text
stringlengths 3
1.05M
|
---|
import Ember from 'ember';
import moment from 'moment';
const {
computed
} = Ember;
export default Ember.TextField.extend({
classNames: ['ff-date-input'],
value: computed('date', {
get() {
const date = this.get('date');
return date ? moment(date).format('MM/DD/YYYY') : '';
},
set(key, formattedDate) {
if (formattedDate.match(/\d\d\/\d\d\/\d\d\d\d/)) {
const date = moment(formattedDate, 'MM/DD/YYYY');
if (!date.isSame(this.get('date'), 'day')) {
this.sendAction('on-change', date.toDate());
}
}
return formattedDate;
}
})
});
|
import React from "react";
class ZipForm extends React.Component {
constructor(props) {
super(props);
this.state = {
zipcode: ''
};
this.inputUpdated = this.inputUpdated.bind(this)
this.submitZipCode = this.submitZipCode.bind(this)
}
submitZipCode(e) {
e.preventDefault();
const { zipcode } = this.state;
const { onSubmit } = this.props;
onSubmit(zipcode);
this.setState({ zipcode: ''});
}
inputUpdated(e) {
const { value } = e.target;
this.setState({ zipcode: value });
}
render() {
return (
<div className="zip-form">
<form onSubmit={this.submitZipCode}>
<label htmlFor ="zipcode">Zip Code</label>
<input
className="form-control"
type="input"
name="zipcode"
value={this.state.zipcode}
onInput={this.inputUpdated} />
<button type="submit" className="btn btn-success">Get the forecast!!</button>
</form>
</div>
)
}
}
export default ZipForm; |
import styled from "styled-components"
import React from "react"
import { Link } from "gatsby"
export const ButtonWrapper = styled(props => <Link {...props} />)`
padding: 0.5rem 0.75rem;
background-color: ${props => props.theme.colors.main1};
border-radius: 0.5rem;
color: ${props => props.theme.colors.light1};
font-weight: 700;
font-size: 0.875rem;
width: fit-content;
transition: filter 0.3s ease;
text-decoration: none;
&:hover,
&:focus {
filter: brightness(110%);
}
`
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
var _react = _interopRequireDefault(require("react"));
var _styles = require("@material-ui/styles");
var _GreenButton = _interopRequireDefault(require("./GreenButton"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const styles = {
root: {
width: '100%',
height: 255,
backgroundColor: '#444343',
display: 'flex',
alignItems: 'center'
},
section: {
width: 1024,
display: 'flex',
alignItems: 'center',
paddingLeft: 120
},
title: {
fontSize: 32,
color: '#ffffff',
fontFamily: 'FilsonSoftRegular'
},
button: {
marginLeft: 135
}
};
class SeeProduct extends _react.default.PureComponent {
render() {
const {
classes
} = this.props;
return _react.default.createElement("div", {
className: classes.root
}, _react.default.createElement("div", {
className: classes.section
}, _react.default.createElement("div", {
className: classes.title
}, "See our Products Section", _react.default.createElement("br", null), "for more details on yarn selection."), _react.default.createElement("div", {
className: classes.button
}, _react.default.createElement(_GreenButton.default, {
text: "Learn more"
}))));
}
}
var _default = (0, _styles.withStyles)(styles)(SeeProduct);
exports.default = _default; |
import { useContext, useState, useEffect } from 'react';
import { SiteContext } from '../../context/Site';
import { Card, Elevation, Icon } from '@blueprintjs/core';
import PageButton from '../pageButton/pageButton';
function List(props) {
const siteContext = useContext(SiteContext);
const [taskList, setTaskList] = useState([]);
const [page, setPage] = useState(1);
const indexOfLastPost = page * siteContext.itemsPerPage;
const indexOfFirstPost = indexOfLastPost - siteContext.itemsPerPage;
const currentItems = props.list.slice(indexOfFirstPost, indexOfLastPost);
useEffect(() => {
setTaskList(currentItems);
}, [props.list, page]);
return (
<div>
{taskList.map((item, idx) => (
<Card elevation={Elevation.THREE}>
<div id={item.id} key={item.id}>
<div>
<Icon icon="cross" onClick={() => props.deleteItem(item.id)} />
<span onClick={() => props.toggleComplete(item.id)}>{item.complete ? 'Complete' : 'In Progress'}</span>
<p><small>Assigned to: {item.assignee}</small></p>
</div>
<p>{item.text}</p>
<p className="difficulty"><small>Difficulty: {item.difficulty}</small></p>
</div>
</Card>
))
}
<PageButton list={props.list} page={page} setPage={setPage}></PageButton>
</div >
);
}
export default List; |
import React from 'react';
import createSvgIcon from './utils/createSvgIcon';
export default createSvgIcon(React.createElement("path", {
d: "M20 4H4c-1.11 0-1.99.89-1.99 2L2 18c0 1.11.89 2 2 2h16c1.11 0 2-.89 2-2V6c0-1.11-.89-2-2-2zM9 11.5c0 .83-.67 1.5-1.5 1.5h-2v1.25c0 .41-.34.75-.75.75S4 14.66 4 14.25V10c0-.55.45-1 1-1h2.5c.83 0 1.5.67 1.5 1.5v1zm3.5 2.75c0 .41-.34.75-.75.75s-.75-.34-.75-.75v-4.5c0-.41.34-.75.75-.75s.75.34.75.75v4.5zm7.5-.04c0 .44-.35.79-.79.79-.25 0-.49-.12-.64-.33l-2.31-3.17v2.88c0 .34-.28.62-.62.62h-.01c-.35 0-.63-.28-.63-.62V9.83c0-.46.37-.83.83-.83.27 0 .52.13.67.35l2.25 3.15V9.62c0-.34.28-.62.62-.62h.01c.34 0 .62.28.62.62v4.59zM5.5 10.5h2v1h-2z"
}), 'FiberPinRounded'); |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2012-2021 Snowflake Computing Inc. All right reserved.
#
import base64
import json
import os
import tempfile
from collections import namedtuple
from logging import getLogger
from typing import IO, TYPE_CHECKING, Tuple
from Cryptodome.Cipher import AES
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from .compat import PKCS5_OFFSET, PKCS5_PAD, PKCS5_UNPAD
from .constants import UTF8
block_size = int(algorithms.AES.block_size / 8) # in bytes
if TYPE_CHECKING: # pragma: no cover
from .remote_storage_util import SnowflakeFileEncryptionMaterial
def matdesc_to_unicode(matdesc):
"""Convert Material Descriptor to Unicode String."""
return str(
json.dumps(
{
"queryId": matdesc.query_id,
"smkId": str(matdesc.smk_id),
"keySize": str(matdesc.key_size),
},
separators=(",", ":"),
)
)
"""
Material Description
"""
MaterialDescriptor = namedtuple(
"MaterialDescriptor",
["smk_id", "query_id", "key_size"], # SMK id # query id # key size, 128 or 256
)
"""
Metadata for encryption
"""
EncryptionMetadata = namedtuple("EncryptionMetadata", ["key", "iv", "matdesc"])
class SnowflakeEncryptionUtil(object):
@staticmethod
def get_secure_random(byte_length):
return os.urandom(byte_length)
@staticmethod
def encrypt_stream(
encryption_material: "SnowflakeFileEncryptionMaterial",
src: IO[bytes],
out: IO[bytes],
chunk_size: int = block_size * 4 * 1024,
) -> "EncryptionMetadata":
"""Reads content from src and write the encrypted content into out.
This function is sensitive to current position of src and out.
It does not seek to position 0 in neither stream objects before or after the encryption.
Args:
encryption_material: The encryption material for file.
src: The input stream.
out: The output stream.
chunk_size: The size of read chunks (Default value = block_size * 4 * 1024
Returns:
The encryption metadata.
"""
logger = getLogger(__name__)
use_openssl_only = os.getenv("SF_USE_OPENSSL_ONLY", "False") == "True"
decoded_key = base64.standard_b64decode(
encryption_material.query_stage_master_key
)
key_size = len(decoded_key)
logger.debug("key_size = %s", key_size)
# Generate key for data encryption
iv_data = SnowflakeEncryptionUtil.get_secure_random(block_size)
file_key = SnowflakeEncryptionUtil.get_secure_random(key_size)
if not use_openssl_only:
data_cipher = AES.new(key=file_key, mode=AES.MODE_CBC, IV=iv_data)
else:
backend = default_backend()
cipher = Cipher(
algorithms.AES(file_key), modes.CBC(iv_data), backend=backend
)
encryptor = cipher.encryptor()
padded = False
while True:
chunk = src.read(chunk_size)
if len(chunk) == 0:
break
elif len(chunk) % block_size != 0:
chunk = PKCS5_PAD(chunk, block_size)
padded = True
if not use_openssl_only:
out.write(data_cipher.encrypt(chunk))
else:
out.write(encryptor.update(chunk))
if not padded:
if not use_openssl_only:
out.write(
data_cipher.encrypt(block_size * chr(block_size).encode(UTF8))
)
else:
out.write(encryptor.update(block_size * chr(block_size).encode(UTF8)))
if use_openssl_only:
out.write(encryptor.finalize())
# encrypt key with QRMK
if not use_openssl_only:
key_cipher = AES.new(key=decoded_key, mode=AES.MODE_ECB)
enc_kek = key_cipher.encrypt(PKCS5_PAD(file_key, block_size))
else:
cipher = Cipher(algorithms.AES(decoded_key), modes.ECB(), backend=backend)
encryptor = cipher.encryptor()
enc_kek = (
encryptor.update(PKCS5_PAD(file_key, block_size)) + encryptor.finalize()
)
mat_desc = MaterialDescriptor(
smk_id=encryption_material.smk_id,
query_id=encryption_material.query_id,
key_size=key_size * 8,
)
metadata = EncryptionMetadata(
key=base64.b64encode(enc_kek).decode("utf-8"),
iv=base64.b64encode(iv_data).decode("utf-8"),
matdesc=matdesc_to_unicode(mat_desc),
)
return metadata
@staticmethod
def encrypt_file(
encryption_material: "SnowflakeFileEncryptionMaterial",
in_filename: str,
chunk_size: int = block_size * 4 * 1024,
tmp_dir: str = None,
) -> Tuple["EncryptionMetadata", str]:
"""Encrypts a file in a temporary directory.
Args:
encryption_material: The encryption material for file.
in_filename: The input file's name.
chunk_size: The size of read chunks (Default value = block_size * 4 * 1024).
tmp_dir: Temporary directory to use, optional (Default value = None).
Returns:
The encryption metadata and the encrypted file's location.
"""
logger = getLogger(__name__)
temp_output_fd, temp_output_file = tempfile.mkstemp(
text=False, dir=tmp_dir, prefix=os.path.basename(in_filename) + "#"
)
logger.debug(
"unencrypted file: %s, temp file: %s, tmp_dir: %s",
in_filename,
temp_output_file,
tmp_dir,
)
metadata = None
with open(in_filename, "rb") as infile:
with os.fdopen(temp_output_fd, "wb") as outfile:
metadata = SnowflakeEncryptionUtil.encrypt_stream(
encryption_material, infile, outfile, chunk_size
)
return metadata, temp_output_file
@staticmethod
def decrypt_file(
metadata,
encryption_material,
in_filename,
chunk_size=block_size * 4 * 1024,
tmp_dir=None,
):
"""Decrypts a file and stores the output in the temporary directory.
Args:
metadata: The file's metadata input.
encryption_material: The file's encryption material.
in_filename: The name of the input file.
chunk_size: The size of read chunks (Default value = block_size * 4 * 1024).
tmp_dir: Temporary directory to use, optional (Default value = None).
Returns:
The decrypted file's location.
"""
logger = getLogger(__name__)
use_openssl_only = os.getenv("SF_USE_OPENSSL_ONLY", "False") == "True"
key_base64 = metadata.key
iv_base64 = metadata.iv
decoded_key = base64.standard_b64decode(
encryption_material.query_stage_master_key
)
key_bytes = base64.standard_b64decode(key_base64)
iv_bytes = base64.standard_b64decode(iv_base64)
if not use_openssl_only:
key_cipher = AES.new(key=decoded_key, mode=AES.MODE_ECB)
file_key = PKCS5_UNPAD(key_cipher.decrypt(key_bytes))
data_cipher = AES.new(key=file_key, mode=AES.MODE_CBC, IV=iv_bytes)
else:
backend = default_backend()
cipher = Cipher(algorithms.AES(decoded_key), modes.ECB(), backend=backend)
decryptor = cipher.decryptor()
file_key = PKCS5_UNPAD(decryptor.update(key_bytes) + decryptor.finalize())
cipher = Cipher(
algorithms.AES(file_key), modes.CBC(iv_bytes), backend=backend
)
decryptor = cipher.decryptor()
temp_output_fd, temp_output_file = tempfile.mkstemp(
text=False, dir=tmp_dir, prefix=os.path.basename(in_filename) + "#"
)
total_file_size = 0
prev_chunk = None
logger.debug("encrypted file: %s, tmp file: %s", in_filename, temp_output_file)
with open(in_filename, "rb") as infile:
with os.fdopen(temp_output_fd, "wb") as outfile:
while True:
chunk = infile.read(chunk_size)
if len(chunk) == 0:
break
total_file_size += len(chunk)
if not use_openssl_only:
d = data_cipher.decrypt(chunk)
else:
d = decryptor.update(chunk)
outfile.write(d)
prev_chunk = d
if prev_chunk is not None:
total_file_size -= PKCS5_OFFSET(prev_chunk)
if use_openssl_only:
outfile.write(decryptor.finalize())
outfile.truncate(total_file_size)
return temp_output_file
|
from pandac.PandaModules import *
from toontown.toonbase.ToontownGlobals import *
from direct.interval.IntervalGlobal import *
from direct.fsm import ClassicFSM, State
from toontown.safezone import SafeZoneLoader
import random
from toontown.launcher import DownloadForceAcknowledge
from toontown.estate import House
from toontown.estate import Estate
from toontown.estate import HouseGlobals
import random
import math
from toontown.coghq import MovingPlatform
from direct.directnotify import DirectNotifyGlobal
class EstateLoader(SafeZoneLoader.SafeZoneLoader):
notify = DirectNotifyGlobal.directNotify.newCategory('EstateLoader')
def __init__(self, hood, parentFSM, doneEvent):
SafeZoneLoader.SafeZoneLoader.__init__(self, hood, parentFSM, doneEvent)
del self.fsm
self.fsm = ClassicFSM.ClassicFSM('EstateLoader', [State.State('start', self.enterStart, self.exitStart, ['quietZone', 'estate', 'house']),
State.State('estate', self.enterEstate, self.exitEstate, ['quietZone']),
State.State('house', self.enterHouse, self.exitHouse, ['quietZone']),
State.State('quietZone', self.enterQuietZone, self.exitQuietZone, ['house', 'estate']),
State.State('final', self.enterFinal, self.exitFinal, ['start'])], 'start', 'final')
self.musicFile = 'phase_4/audio/bgm/TC_nbrhood.ogg'
self.activityMusicFile = 'phase_3.5/audio/bgm/TC_SZ_activity.ogg'
self.dnaFile = 'phase_5.5/dna/estate_1.pdna'
self.safeZoneStorageDNAFile = None
self.cloudSwitch = 0
self.id = MyEstate
self.estateOwnerId = None
self.branchZone = None
self.houseDoneEvent = 'houseDone'
self.estateDoneEvent = 'estateDone'
self.enteredHouse = None
self.houseNode = [None] * 6
self.houseModels = [None] * HouseGlobals.NUM_HOUSE_TYPES
self.houseId2house = {}
self.barrel = None
self.clouds = []
self.cloudTrack = None
self.sunMoonNode = None
self.fsm.enterInitialState()
def load(self):
SafeZoneLoader.SafeZoneLoader.load(self)
self.music = base.loader.loadMusic('phase_4/audio/bgm/TC_nbrhood.ogg')
self.underwaterSound = base.loader.loadSfx('phase_4/audio/sfx/AV_ambient_water.ogg')
self.swimSound = base.loader.loadSfx('phase_4/audio/sfx/AV_swim_single_stroke.ogg')
self.submergeSound = base.loader.loadSfx('phase_5.5/audio/sfx/AV_jump_in_water.ogg')
self.birdSound = map(base.loader.loadSfx, ['phase_4/audio/sfx/SZ_TC_bird1.ogg', 'phase_4/audio/sfx/SZ_TC_bird2.ogg', 'phase_4/audio/sfx/SZ_TC_bird3.ogg'])
self.cricketSound = map(base.loader.loadSfx, ['phase_4/audio/sfx/SZ_TC_bird1.ogg', 'phase_4/audio/sfx/SZ_TC_bird2.ogg', 'phase_4/audio/sfx/SZ_TC_bird3.ogg'])
if base.goonsEnabled:
invModel = loader.loadModel('phase_3.5/models/gui/inventory_icons')
self.invModels = []
from toontown.toonbase import ToontownBattleGlobals
for track in xrange(len(ToontownBattleGlobals.AvPropsNew)):
itemList = []
for item in xrange(len(ToontownBattleGlobals.AvPropsNew[track])):
itemList.append(invModel.find('**/' + ToontownBattleGlobals.AvPropsNew[track][item]))
self.invModels.append(itemList)
invModel.removeNode()
del invModel
def unload(self):
self.ignoreAll()
base.cr.estateMgr.leaveEstate()
self.estateOwnerId = None
self.estateZoneId = None
if self.place:
self.place.exit()
self.place.unload()
del self.place
del self.underwaterSound
del self.swimSound
del self.submergeSound
del self.birdSound
del self.cricketSound
for node in self.houseNode:
node.removeNode()
del self.houseNode
for model in self.houseModels:
model.removeNode()
del self.houseModels
del self.houseId2house
if self.sunMoonNode:
self.sunMoonNode.removeNode()
del self.sunMoonNode
self.sunMoonNode = None
if self.clouds:
for cloud in self.clouds:
cloud[0].removeNode()
del cloud[1]
del self.clouds
if self.barrel:
self.barrel.removeNode()
SafeZoneLoader.SafeZoneLoader.unload(self)
def enter(self, requestStatus):
self.estateOwnerId = requestStatus.get('ownerId', base.localAvatar.doId)
base.localAvatar.inEstate = 1
self.loadCloudPlatforms()
if base.cloudPlatformsEnabled and 0:
self.setCloudSwitch(1)
if self.cloudSwitch:
self.setCloudSwitch(self.cloudSwitch)
SafeZoneLoader.SafeZoneLoader.enter(self, requestStatus)
def exit(self):
self.ignoreAll()
base.cr.cache.flush()
base.localAvatar.stopChat()
base.localAvatar.inEstate = 0
SafeZoneLoader.SafeZoneLoader.exit(self)
def createSafeZone(self, dnaFile):
SafeZoneLoader.SafeZoneLoader.createSafeZone(self, dnaFile)
self.loadHouses()
self.loadSunMoon()
def loadHouses(self):
for i in xrange(HouseGlobals.NUM_HOUSE_TYPES):
self.houseModels[i] = loader.loadModel(HouseGlobals.houseModels[i])
for i in xrange(6):
posHpr = HouseGlobals.houseDrops[i]
self.houseNode[i] = self.geom.attachNewNode('esHouse_' + str(i))
self.houseNode[i].setPosHpr(*posHpr)
def loadSunMoon(self):
self.sun = loader.loadModel('phase_4/models/props/sun.bam')
self.moon = loader.loadModel('phase_5.5/models/props/moon.bam')
self.sunMoonNode = self.geom.attachNewNode('sunMoon')
self.sunMoonNode.setPosHpr(0, 0, 0, 0, 0, 0)
if self.sun:
self.sun.reparentTo(self.sunMoonNode)
self.sun.setY(270)
self.sun.setScale(2)
self.sun.setBillboardPointEye()
if self.moon:
self.moon.setP(180)
self.moon.reparentTo(self.sunMoonNode)
self.moon.setY(-270)
self.moon.setScale(15)
self.moon.setBillboardPointEye()
self.sunMoonNode.setP(30)
def enterEstate(self, requestStatus):
self.notify.debug('enterEstate: requestStatus = %s' % requestStatus)
ownerId = requestStatus.get('ownerId')
if ownerId:
self.estateOwnerId = ownerId
zoneId = requestStatus['zoneId']
self.notify.debug('enterEstate, ownerId = %s, zoneId = %s' % (self.estateOwnerId, zoneId))
self.accept(self.estateDoneEvent, self.handleEstateDone)
self.place = Estate.Estate(self, self.estateOwnerId, zoneId, self.fsm.getStateNamed('estate'), self.estateDoneEvent)
base.cr.playGame.setPlace(self.place)
self.place.load()
self.place.enter(requestStatus)
self.estateZoneId = zoneId
def exitEstate(self):
self.notify.debug('exitEstate')
self.ignore(self.estateDoneEvent)
self.place.exit()
self.place.unload()
self.place = None
base.cr.playGame.setPlace(self.place)
base.cr.cache.flush()
return
def handleEstateDone(self, doneStatus = None):
if not doneStatus:
doneStatus = self.place.getDoneStatus()
how = doneStatus['how']
shardId = doneStatus['shardId']
hoodId = doneStatus['hoodId']
zoneId = doneStatus['zoneId']
avId = doneStatus.get('avId', -1)
ownerId = doneStatus.get('ownerId', -1)
if shardId != None or hoodId != MyEstate:
self.notify.debug('estate done, and we are backing out to a different hood/shard')
self.notify.debug('hoodId = %s, avId = %s' % (hoodId, avId))
self.doneStatus = doneStatus
messenger.send(self.doneEvent)
return
if how in ['tunnelIn',
'teleportIn',
'doorIn',
'elevatorIn']:
self.notify.debug('staying in estateloader')
self.fsm.request('quietZone', [doneStatus])
else:
self.notify.error('Exited hood with unexpected mode %s' % how)
return
def enterHouse(self, requestStatus):
ownerId = requestStatus.get('ownerId')
if ownerId:
self.estateOwnerId = ownerId
self.acceptOnce(self.houseDoneEvent, self.handleHouseDone)
self.place = House.House(self, self.estateOwnerId, self.fsm.getStateNamed('house'), self.houseDoneEvent)
base.cr.playGame.setPlace(self.place)
self.place.load()
self.place.enter(requestStatus)
def exitHouse(self):
self.ignore(self.houseDoneEvent)
self.place.exit()
self.place.unload()
self.place = None
base.cr.playGame.setPlace(self.place)
return
def handleHouseDone(self, doneStatus = None):
if not doneStatus:
doneStatus = self.place.getDoneStatus()
shardId = doneStatus['shardId']
hoodId = doneStatus['hoodId']
if shardId != None or hoodId != MyEstate:
self.doneStatus = doneStatus
messenger.send(self.doneEvent)
return
how = doneStatus['how']
if how in ['tunnelIn',
'teleportIn',
'doorIn',
'elevatorIn']:
self.fsm.request('quietZone', [doneStatus])
else:
self.notify.error('Exited hood with unexpected mode %s' % how)
return
def handleQuietZoneDone(self):
status = self.quietZoneStateData.getRequestStatus()
self.fsm.request(status['where'], [status])
def atMyEstate(self):
if self.estateOwnerId != None:
if self.estateOwnerId == base.localAvatar.getDoId():
return 1
else:
return 0
else:
self.notify.warning("We aren't in an estate")
return
def setHouse(self, houseId):
try:
houseDo = base.cr.doId2do[houseId]
self.enteredHouse = houseDo.house
except KeyError:
self.notify.debug("can't find house: %d" % houseId)
def startCloudPlatforms(self):
return
if len(self.clouds):
self.cloudTrack = self.__cloudTrack()
self.cloudTrack.loop()
def stopCloudPlatforms(self):
if self.cloudTrack:
self.cloudTrack.pause()
del self.cloudTrack
self.cloudTrack = None
return
def __cloudTrack(self):
track = Parallel()
for cloud in self.clouds:
axis = cloud[1]
pos = cloud[0].getPos(render)
newPos = pos + axis * 30
reversePos = pos - axis * 30
track.append(Sequence(LerpPosInterval(cloud[0], 10, newPos), LerpPosInterval(cloud[0], 20, reversePos), LerpPosInterval(cloud[0], 10, pos)))
return track
def debugGeom(self, decomposed):
print 'numPrimitives = %d' % decomposed.getNumPrimitives()
for primIndex in xrange(decomposed.getNumPrimitives()):
prim = decomposed.getPrimitive(primIndex)
print 'prim = %s' % prim
print 'isIndexed = %d' % prim.isIndexed()
print 'prim.getNumPrimitives = %d' % prim.getNumPrimitives()
for basicPrim in xrange(prim.getNumPrimitives()):
print '%d start=%d' % (basicPrim, prim.getPrimitiveStart(basicPrim))
print '%d end=%d' % (basicPrim, prim.getPrimitiveEnd(basicPrim))
def loadOnePlatform(self, version, radius, zOffset, score, multiplier):
self.notify.debug('loadOnePlatform version=%d' % version)
cloud = NodePath('cloud-%d-%d' % (score, multiplier))
cloudModel = loader.loadModel('phase_5.5/models/estate/bumper_cloud')
cc = cloudModel.copyTo(cloud)
colCube = cc.find('**/collision')
colCube.setName('cloudSphere-0')
dTheta = 2.0 * math.pi / self.numClouds
cloud.reparentTo(self.cloudOrigin)
axes = [Vec3(1, 0, 0), Vec3(0, 1, 0), Vec3(0, 0, 1)]
cloud.setPos(radius * math.cos(version * dTheta), radius * math.sin(version * dTheta), 4 * random.random() + zOffset)
cloud.setScale(4.0)
self.clouds.append([cloud, random.choice(axes)])
def loadSkyCollision(self):
plane = CollisionPlane(Plane(Vec3(0, 0, -1), Point3(0, 0, 300)))
plane.setTangible(0)
planeNode = CollisionNode('cloudSphere-0')
planeNode.addSolid(plane)
self.cloudOrigin.attachNewNode(planeNode)
def loadCloudPlatforms(self):
self.cloudOrigin = self.geom.attachNewNode('cloudOrigin')
self.cloudOrigin.setZ(30)
self.loadSkyCollision()
self.numClouds = 12
pinballScore = PinballScoring[PinballCloudBumperLow]
for i in xrange(12):
self.loadOnePlatform(i, 40, 0, pinballScore[0], pinballScore[1])
pinballScore = PinballScoring[PinballCloudBumperMed]
for i in xrange(12):
self.loadOnePlatform(i, 60, 40, pinballScore[0], pinballScore[1])
pinballScore = PinballScoring[PinballCloudBumperHigh]
for i in xrange(12):
self.loadOnePlatform(i, 20, 80, pinballScore[0], pinballScore[1])
self.cloudOrigin.stash()
def setCloudSwitch(self, on):
self.cloudSwitch = on
if hasattr(self, 'cloudOrigin'):
if on:
self.cloudOrigin.unstash()
else:
self.cloudOrigin.stash()
|
import React from "react"
import { Link, graphql } from "gatsby"
import Bio from "../components/bio"
import Layout from "../components/layout"
import SEO from "../components/seo"
import { rhythm } from "../utils/typography"
import Button from "../components/button"
class IndexPage extends React.Component {
render() {
const { data } = this.props
const siteTitle = data.site.siteMetadata.title
const posts = data.allMdx.edges
return (
<Layout location={this.props.location} title={siteTitle}>
<SEO title="All posts" />
<Bio />
<div style={{ margin: "20px 0 40px" }}>
{posts.map(({ node }) => {
const title = node.frontmatter.title || node.fields.slug
return (
<div key={node.fields.slug}>
<h3
style={{
marginBottom: rhythm(1 / 4),
}}
>
<Link
style={{ boxShadow: `none` }}
to={`blog${node.fields.slug}`}
>
{title}
</Link>
</h3>
<small>{node.frontmatter.date}</small>
<p
dangerouslySetInnerHTML={{
__html: node.frontmatter.description || node.excerpt,
}}
/>
</div>
)
})}
</div>
</Layout>
)
}
}
export default IndexPage
export const pageQuery = graphql`
query {
site {
siteMetadata {
title
}
}
allMdx(sort: { fields: [frontmatter___date], order: DESC }) {
edges {
node {
excerpt
fields {
slug
}
frontmatter {
date(formatString: "MMMM DD, YYYY")
title
description
}
}
}
}
}
`
|
/*
Copyright (c) 2003-2016, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
*/
CKEDITOR.lang['sv']={"editor":"Rich Text Editor","editorPanel":"Rich Text Editor panel","common":{"editorHelp":"Tryck ALT 0 för hjälp","browseServer":"Bläddra på server","url":"URL","protocol":"Protokoll","upload":"Ladda upp","uploadSubmit":"Skicka till server","image":"Bild","flash":"Flash","form":"Formulär","checkbox":"Kryssruta","radio":"Alternativknapp","textField":"Textfält","textarea":"Textruta","hiddenField":"Dolt fält","button":"Knapp","select":"Flervalslista","imageButton":"Bildknapp","notSet":"<ej angivet>","id":"Id","name":"Namn","langDir":"Språkriktning","langDirLtr":"Vänster till Höger (VTH)","langDirRtl":"Höger till Vänster (HTV)","langCode":"Språkkod","longDescr":"URL-beskrivning","cssClass":"Stilmall","advisoryTitle":"Titel","cssStyle":"Stilmall","ok":"OK","cancel":"Avbryt","close":"Stäng","preview":"Förhandsgranska","resize":"Dra för att ändra storlek","generalTab":"Allmänt","advancedTab":"Avancerad","validateNumberFailed":"Värdet är inte ett nummer.","confirmNewPage":"Alla ändringar i innehållet kommer att förloras. Är du säker på att du vill ladda en ny sida?","confirmCancel":"Några av alternativen har ändrats. Är du säker på att du vill stänga dialogrutan?","options":"Alternativ","target":"Mål","targetNew":"Nytt fönster (_blank)","targetTop":"Översta fönstret (_top)","targetSelf":"Samma fönster (_self)","targetParent":"Föregående fönster (_parent)","langDirLTR":"Vänster till höger (LTR)","langDirRTL":"Höger till vänster (RTL)","styles":"Stil","cssClasses":"Stilmallar","width":"Bredd","height":"Höjd","align":"Justering","alignLeft":"Vänster","alignRight":"Höger","alignCenter":"Centrerad","alignJustify":"Justera till marginaler","alignTop":"Överkant","alignMiddle":"Mitten","alignBottom":"Nederkant","alignNone":"Ingen","invalidValue":"Felaktigt värde.","invalidHeight":"Höjd måste vara ett nummer.","invalidWidth":"Bredd måste vara ett nummer.","invalidCssLength":"Värdet för fältet \"%1\" måste vara ett positivt nummer med eller utan CSS-mätenheter (px, %, in, cm, mm, em, ex, pt, eller pc).","invalidHtmlLength":"Värdet för fältet \"%1\" måste vara ett positivt nummer med eller utan godkända HTML-mätenheter (px eller %).","invalidInlineStyle":"Det angivna värdet för style måste innehålla en eller flera tupler separerade med semikolon i följande format: \"name : value\"","cssLengthTooltip":"Ange ett nummer i pixlar eller ett nummer men godkänd CSS-mätenhet (px, %, in, cm, mm, em, ex, pt, eller pc).","unavailable":"%1<span class=\"cke_accessibility\">, Ej tillgänglig</span>"},"about":{"copy":"Copyright © $1. Alla rättigheter reserverade.","dlgTitle":"Om CKEditor","help":"Se $1 för hjälp.","moreInfo":"För information av licensiering besök vår hemsida:","title":"Om CKEditor","userGuide":"CKEditor User's Guide"},"basicstyles":{"bold":"Fet","italic":"Kursiv","strike":"Genomstruken","subscript":"Nedsänkta tecken","superscript":"Upphöjda tecken","underline":"Understruken"},"blockquote":{"toolbar":"Blockcitat"},"clipboard":{"copy":"Kopiera","copyError":"Säkerhetsinställningar i Er webbläsare tillåter inte åtgärden kopiera. Använd (Ctrl/Cmd+C) istället.","cut":"Klipp ut","cutError":"Säkerhetsinställningar i Er webbläsare tillåter inte åtgärden klipp ut. Använd (Ctrl/Cmd+X) istället.","paste":"Klistra in","pasteArea":"Paste Area","pasteMsg":"Var god och klistra in Er text i rutan nedan genom att använda (<strong>Ctrl/Cmd+V</strong>) klicka sen på OK.","securityMsg":"På grund av din webbläsares säkerhetsinställningar kan verktyget inte få åtkomst till urklippsdatan. Var god och använd detta fönster istället.","title":"Klistra in"},"contextmenu":{"options":"Context Menu Options"},"button":{"selectedLabel":"%1 (Vald)"},"toolbar":{"toolbarCollapse":"Dölj verktygsfält","toolbarExpand":"Visa verktygsfält","toolbarGroups":{"document":"Dokument","clipboard":"Urklipp/ångra","editing":"Redigering","forms":"Formulär","basicstyles":"Basstilar","paragraph":"Paragraf","links":"Länkar","insert":"Infoga","styles":"Stilar","colors":"Färger","tools":"Verktyg"},"toolbars":"Redigera verktygsfält"},"elementspath":{"eleLabel":"Elementets sökväg","eleTitle":"%1 element"},"format":{"label":"Teckenformat","panelTitle":"Teckenformat","tag_address":"Adress","tag_div":"Normal (DIV)","tag_h1":"Rubrik 1","tag_h2":"Rubrik 2","tag_h3":"Rubrik 3","tag_h4":"Rubrik 4","tag_h5":"Rubrik 5","tag_h6":"Rubrik 6","tag_p":"Normal","tag_pre":"Formaterad"},"horizontalrule":{"toolbar":"Infoga horisontal linje"},"image":{"alt":"Alternativ text","border":"Kant","btnUpload":"Skicka till server","button2Img":"Vill du omvandla den valda bildknappen på en enkel bild?","hSpace":"Horis. marginal","img2Button":"Vill du omvandla den valda bildknappen på en enkel bild?","infoTab":"Bildinformation","linkTab":"Länk","lockRatio":"Lås höjd/bredd förhållanden","menu":"Bildegenskaper","resetSize":"Återställ storlek","title":"Bildegenskaper","titleButton":"Egenskaper för bildknapp","upload":"Ladda upp","urlMissing":"Bildkällans URL saknas.","vSpace":"Vert. marginal","validateBorder":"Kantlinje måste vara ett heltal.","validateHSpace":"HSpace måste vara ett heltal.","validateVSpace":"VSpace måste vara ett heltal."},"indent":{"indent":"Öka indrag","outdent":"Minska indrag"},"fakeobjects":{"anchor":"Ankare","flash":"Flashanimation","hiddenfield":"Gömt fält","iframe":"iFrame","unknown":"Okänt objekt"},"link":{"acccessKey":"Behörighetsnyckel","advanced":"Avancerad","advisoryContentType":"Innehållstyp","advisoryTitle":"Titel","anchor":{"toolbar":"Infoga/Redigera ankarlänk","menu":"Egenskaper för ankarlänk","title":"Egenskaper för ankarlänk","name":"Ankarnamn","errorName":"Var god ange ett ankarnamn","remove":"Radera ankare"},"anchorId":"Efter element-id","anchorName":"Efter ankarnamn","charset":"Teckenuppställning","cssClasses":"Stilmall","emailAddress":"E-postadress","emailBody":"Innehåll","emailSubject":"Ämne","id":"Id","info":"Länkinformation","langCode":"Språkkod","langDir":"Språkriktning","langDirLTR":"Vänster till höger (VTH)","langDirRTL":"Höger till vänster (HTV)","menu":"Redigera länk","name":"Namn","noAnchors":"(Inga ankare kunde hittas)","noEmail":"Var god ange e-postadress","noUrl":"Var god ange länkens URL","other":"<annan>","popupDependent":"Beroende (endast Netscape)","popupFeatures":"Popup-fönstrets egenskaper","popupFullScreen":"Helskärm (endast IE)","popupLeft":"Position från vänster","popupLocationBar":"Adressfält","popupMenuBar":"Menyfält","popupResizable":"Resizable","popupScrollBars":"Scrolllista","popupStatusBar":"Statusfält","popupToolbar":"Verktygsfält","popupTop":"Position från sidans topp","rel":"Förhållande","selectAnchor":"Välj ett ankare","styles":"Stilmall","tabIndex":"Tabindex","target":"Mål","targetFrame":"<ram>","targetFrameName":"Målets ramnamn","targetPopup":"<popup-fönster>","targetPopupName":"Popup-fönstrets namn","title":"Länk","toAnchor":"Länk till ankare i texten","toEmail":"E-post","toUrl":"URL","toolbar":"Infoga/Redigera länk","type":"Länktyp","unlink":"Radera länk","upload":"Ladda upp"},"list":{"bulletedlist":"Infoga/ta bort punktlista","numberedlist":"Infoga/ta bort numrerad lista"},"magicline":{"title":"Infoga paragraf här"},"maximize":{"maximize":"Maximera","minimize":"Minimera"},"pastetext":{"button":"Klistra in som vanlig text","title":"Klistra in som vanlig text"},"pastefromword":{"confirmCleanup":"Texten du vill klistra in verkar vara kopierad från Word. Vill du rensa den innan du klistrar in den?","error":"Det var inte möjligt att städa upp den inklistrade data på grund av ett internt fel","title":"Klistra in från Word","toolbar":"Klistra in från Word"},"removeformat":{"toolbar":"Radera formatering"},"sourcearea":{"toolbar":"Källa"},"specialchar":{"options":"Alternativ för utökade tecken","title":"Välj utökat tecken","toolbar":"Klistra in utökat tecken"},"scayt":{"btn_about":"Om SCAYT","btn_dictionaries":"Ordlistor","btn_disable":"Inaktivera SCAYT","btn_enable":"Aktivera SCAYT","btn_langs":"Språk","btn_options":"Inställningar","text_title":"Stavningskontroll medan du skriver"},"stylescombo":{"label":"Anpassad stil","panelTitle":"Formatmallar","panelTitle1":"Blockstil","panelTitle2":"Inbäddad stil","panelTitle3":"Objektets stil"},"table":{"border":"Kantstorlek","caption":"Rubrik","cell":{"menu":"Cell","insertBefore":"Lägg till cell före","insertAfter":"Lägg till cell efter","deleteCell":"Radera celler","merge":"Sammanfoga celler","mergeRight":"Sammanfoga höger","mergeDown":"Sammanfoga ner","splitHorizontal":"Dela cell horisontellt","splitVertical":"Dela cell vertikalt","title":"Egenskaper för cell","cellType":"Celltyp","rowSpan":"Rad spann","colSpan":"Kolumnen spann","wordWrap":"Radbrytning","hAlign":"Horisontell justering","vAlign":"Vertikal justering","alignBaseline":"Baslinje","bgColor":"Bakgrundsfärg","borderColor":"Ramfärg","data":"Data","header":"Rubrik","yes":"Ja","no":"Nej","invalidWidth":"Cellens bredd måste vara ett nummer.","invalidHeight":"Cellens höjd måste vara ett nummer.","invalidRowSpan":"Radutvidgning måste vara ett heltal.","invalidColSpan":"Kolumn måste vara ett heltal.","chooseColor":"Välj"},"cellPad":"Cellutfyllnad","cellSpace":"Cellavstånd","column":{"menu":"Kolumn","insertBefore":"Lägg till kolumn före","insertAfter":"Lägg till kolumn efter","deleteColumn":"Radera kolumn"},"columns":"Kolumner","deleteTable":"Radera tabell","headers":"Rubriker","headersBoth":"Båda","headersColumn":"Första kolumnen","headersNone":"Ingen","headersRow":"Första raden","invalidBorder":"Ram måste vara ett nummer.","invalidCellPadding":"Luft i cell måste vara ett nummer.","invalidCellSpacing":"Luft i cell måste vara ett nummer.","invalidCols":"Antal kolumner måste vara ett nummer större än 0.","invalidHeight":"Tabellens höjd måste vara ett nummer.","invalidRows":"Antal rader måste vara större än 0.","invalidWidth":"Tabell måste vara ett nummer.","menu":"Tabellegenskaper","row":{"menu":"Rad","insertBefore":"Lägg till rad före","insertAfter":"Lägg till rad efter","deleteRow":"Radera rad"},"rows":"Rader","summary":"Sammanfattning","title":"Tabellegenskaper","toolbar":"Tabell","widthPc":"procent","widthPx":"pixlar","widthUnit":"enhet bredd"},"undo":{"redo":"Gör om","undo":"Ångra"},"wsc":{"btnIgnore":"Ignorera","btnIgnoreAll":"Ignorera alla","btnReplace":"Ersätt","btnReplaceAll":"Ersätt alla","btnUndo":"Ångra","changeTo":"Ändra till","errorLoading":"Tjänsten är ej tillgänglig: %s.","ieSpellDownload":"Stavningskontrollen är ej installerad. Vill du göra det nu?","manyChanges":"Stavningskontroll slutförd: %1 ord rättades.","noChanges":"Stavningskontroll slutförd: Inga ord rättades.","noMispell":"Stavningskontroll slutförd: Inga stavfel påträffades.","noSuggestions":"- Förslag saknas -","notAvailable":"Tyvärr är tjänsten ej tillgänglig nu","notInDic":"Saknas i ordlistan","oneChange":"Stavningskontroll slutförd: Ett ord rättades.","progress":"Stavningskontroll pågår...","title":"Kontrollera stavning","toolbar":"Stavningskontroll"}}; |
var callbackArguments = [];
var argument1 = 1.2514999785740526e+308;
var argument2 = 1.6705498558517827e+308;
var argument3 = {"49":"#,",",":126};
var base_0 = ["?","gRn","l@:","o","G","0","fhx`n?","M=+e{m",")q;"]
var r_0= undefined
try {
r_0 = base_0.reduceRight(argument1,argument2,argument3)
}
catch(e) {
r_0= "Error"
}
function serialize(array){
return array.map(function(a){
if (a === null || a == undefined) return a;
var name = a.constructor.name;
if (name==='Object' || name=='Boolean'|| name=='Array'||name=='Number'||name=='String')
return JSON.stringify(a);
return name;
});
}
setTimeout(function(){
require("fs").writeFileSync("./experiments/reduceRight/reduceRightRandom/test813.json",JSON.stringify({"baseObjects":serialize([base_0]),"returnObjects":serialize([r_0]),"callbackArgs":callbackArguments}))
},300) |
# Copyright 2019 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
The module transforms.py_transform is implemented based on Python. It provides common
operations including OneHotOp.
"""
from .validators import check_one_hot_op, check_compose_list, check_random_apply, check_transforms_list, \
check_compose_call
from . import py_transforms_util as util
from .c_transforms import TensorOperation
def not_random(function):
function.random = False
return function
class OneHotOp:
"""
Apply one hot encoding transformation to the input label, make label be more smoothing and continuous.
Args:
num_classes (int): Number of classes of objects in dataset.
It should be larger than the largest label number in the dataset.
smoothing_rate (float, optional): Adjustable hyperparameter for label smoothing level.
(Default=0.0 means no smoothing is applied.)
Examples:
>>> # Assume that dataset has 10 classes, thus the label ranges from 0 to 9
>>> transforms_list = [py_transforms.OneHotOp(num_classes=10, smoothing_rate=0.1)]
>>> transform = py_transforms.Compose(transforms_list)
>>> mnist_dataset = mnist_dataset.map(input_columns=["label"], operations=transform)
"""
@check_one_hot_op
def __init__(self, num_classes, smoothing_rate=0.0):
self.num_classes = num_classes
self.smoothing_rate = smoothing_rate
self.random = False
def __call__(self, label):
"""
Call method.
Args:
label (numpy.ndarray): label to be applied label smoothing.
Returns:
label (numpy.ndarray), label after being Smoothed.
"""
return util.one_hot_encoding(label, self.num_classes, self.smoothing_rate)
class Compose:
"""
Compose a list of transforms.
.. Note::
Compose takes a list of transformations either provided in py_transforms or from user-defined implementation;
each can be an initialized transformation class or a lambda function, as long as the output from the last
transformation is a single tensor of type numpy.ndarray. See below for an example of how to use Compose
with py_transforms classes and check out FiveCrop or TenCrop for the use of them in conjunction with lambda
functions.
Args:
transforms (list): List of transformations to be applied.
Examples:
>>> image_folder_dataset_dir = "/path/to/image_folder_dataset_directory"
>>> # create a dataset that reads all files in dataset_dir with 8 threads
>>> image_folder_dataset = ds.ImageFolderDataset(image_folder_dataset_dir, num_parallel_workers=8)
>>> # create a list of transformations to be applied to the image data
>>> transform = py_transforms.Compose([py_vision.Decode(),
... py_vision.RandomHorizontalFlip(0.5),
... py_vision.ToTensor(),
... py_vision.Normalize((0.491, 0.482, 0.447), (0.247, 0.243, 0.262)),
... py_vision.RandomErasing()])
>>> # apply the transform to the dataset through dataset.map function
>>> image_folder_dataset = image_folder_dataset.map(operations=transform, input_columns=["image"])
>>>
>>> # Compose is also be invoked implicitly, by just passing in a list of ops
>>> # the above example then becomes:
>>> transforms_list = [py_vision.Decode(),
... py_vision.RandomHorizontalFlip(0.5),
... py_vision.ToTensor(),
... py_vision.Normalize((0.491, 0.482, 0.447), (0.247, 0.243, 0.262)),
... py_vision.RandomErasing()]
>>>
>>> # apply the transform to the dataset through dataset.map()
>>> image_folder_dataset_1 = image_folder_dataset_1.map(operations=transforms_list, input_columns=["image"])
>>>
>>> # Certain C++ and Python ops can be combined, but not all of them
>>> # An example of combined operations
>>> arr = [0, 1]
>>> dataset = ds.NumpySlicesDataset(arr, column_names=["cols"], shuffle=False)
>>> transformed_list = [py_transforms.OneHotOp(2), c_transforms.Mask(c_transforms.Relational.EQ, 1)]
>>> dataset = dataset.map(operations=transformed_list, input_columns=["cols"])
>>>
>>> # Here is an example of mixing vision ops
>>> import numpy as np
>>> op_list=[c_vision.Decode(),
... c_vision.Resize((224, 244)),
... py_vision.ToPIL(),
... np.array, # need to convert PIL image to a NumPy array to pass it to C++ operation
... c_vision.Resize((24, 24))]
>>> image_folder_dataset = image_folder_dataset.map(operations=op_list, input_columns=["image"])
"""
@check_compose_list
def __init__(self, transforms):
self.transforms = transforms
if all(hasattr(transform, "random") and not transform.random for transform in self.transforms):
self.random = False
@check_compose_call
def __call__(self, *args):
"""
Call method.
Returns:
lambda function, Lambda function that takes in an args to apply transformations on.
"""
return util.compose(self.transforms, *args)
@staticmethod
def reduce(operations):
"""
Wraps adjacent Python operations in a Compose to allow mixing of Python and C++ operations
Args:
operations (list): list of tensor operations
Returns:
list, the reduced list of operations
"""
if len(operations) == 1:
if str(operations).find("c_transform") >= 0 or isinstance(operations[0], TensorOperation):
return operations
return [util.FuncWrapper(operations[0])]
new_ops, start_ind, end_ind = [], 0, 0
for i, op in enumerate(operations):
if str(op).find("c_transform") >= 0:
# reset counts
if start_ind != end_ind:
new_ops.append(Compose(operations[start_ind:end_ind]))
new_ops.append(op)
start_ind, end_ind = i + 1, i + 1
else:
end_ind += 1
# do additional check in case the last operation is a Python operation
if start_ind != end_ind:
new_ops.append(Compose(operations[start_ind:end_ind]))
return new_ops
class RandomApply:
"""
Randomly perform a series of transforms with a given probability.
Args:
transforms (list): List of transformations to apply.
prob (float, optional): The probability to apply the transformation list (default=0.5).
Examples:
>>> from mindspore.dataset.transforms.py_transforms import Compose
>>> transforms_list = [py_vision.RandomHorizontalFlip(0.5),
... py_vision.Normalize((0.491, 0.482, 0.447), (0.247, 0.243, 0.262)),
... py_vision.RandomErasing()]
>>> transforms = Compose([py_vision.Decode(),
... py_transforms.RandomApply(transforms_list, prob=0.6),
... py_vision.ToTensor()])
>>> image_folder_dataset = image_folder_dataset.map(operations=transforms, input_columns=["image"])
"""
@check_random_apply
def __init__(self, transforms, prob=0.5):
self.prob = prob
self.transforms = transforms
def __call__(self, img):
"""
Call method.
Args:
img (PIL image): Image to be randomly applied a list transformations.
Returns:
img (PIL image), Transformed image.
"""
return util.random_apply(img, self.transforms, self.prob)
class RandomChoice:
"""
Randomly select one transform from a series of transforms and applies that on the image.
Args:
transforms (list): List of transformations to be chosen from to apply.
Examples:
>>> from mindspore.dataset.transforms.py_transforms import Compose, RandomChoice
>>> transforms_list = [py_vision.RandomHorizontalFlip(0.5),
... py_vision.Normalize((0.491, 0.482, 0.447), (0.247, 0.243, 0.262)),
... py_vision.RandomErasing()]
>>> transforms = Compose([py_vision.Decode(),
... py_transforms.RandomChoice(transforms_list),
... py_vision.ToTensor()])
>>> image_folder_dataset = image_folder_dataset.map(operations=transforms, input_columns=["image"])
"""
@check_transforms_list
def __init__(self, transforms):
self.transforms = transforms
def __call__(self, img):
"""
Call method.
Args:
img (PIL image): Image to be applied transformation.
Returns:
img (PIL image), Transformed image.
"""
return util.random_choice(img, self.transforms)
class RandomOrder:
"""
Perform a series of transforms to the input PIL image in a random order.
Args:
transforms (list): List of the transformations to apply.
Examples:
>>> from mindspore.dataset.transforms.py_transforms import Compose
>>> transforms_list = [py_vision.RandomHorizontalFlip(0.5),
... py_vision.Normalize((0.491, 0.482, 0.447), (0.247, 0.243, 0.262)),
... py_vision.RandomErasing()]
>>> transforms = Compose([py_vision.Decode(),
... py_transforms.RandomOrder(transforms_list),
... py_vision.ToTensor()])
>>> image_folder_dataset = image_folder_dataset.map(operations=transforms, input_columns=["image"])
"""
@check_transforms_list
def __init__(self, transforms):
self.transforms = transforms
def __call__(self, img):
"""
Call method.
Args:
img (PIL image): Image to apply transformations in a random order.
Returns:
img (PIL image), Transformed image.
"""
return util.random_order(img, self.transforms)
|
from pathlib import Path
from unittest import mock
from tests.cli_test_case import CliTestCase
class MinitestTest(CliTestCase):
test_files_dir = Path(__file__).parent.joinpath('../data/minitest/').resolve()
result_file_path = test_files_dir.joinpath('record_test_result.json')
@mock.patch('requests.request')
def test_record_test_minitest(self, mock_post):
result = self.cli('record', 'tests', '--session', self.session, 'minitest', str(self.test_files_dir) + "/")
self.assertEqual(result.exit_code, 0)
payload = self.gzipped_json_payload(mock_post)
expected = self.load_json_from_file(self.result_file_path)
self.assert_json_orderless_equal(expected, payload)
|
$(document).ready(function()
{
fetchReservationList();
function fetchReservationList(){
$('#for-release-table').DataTable({
processing: true,
serverSide: true,
ajax:"/for-release",
columns:[
{data: 'user_id', name: 'user_id'},
{data: 'name', name: 'name'},
{data: 'accession_no', name: 'accession_no'},
{data: 'title', name: 'title'},
{data: 'reservation_date', name: 'reservation_date'},
{data: 'action', name: 'action', orderable:false}
]
});
}
$(document).on('click', '#btn-release', function()
{
let user_id, accession_no;
user_id = $(this).attr('user-id');
accession_no = $(this).attr('accession-no');
$('#user_id').val(user_id);
$('#acn_no').val(accession_no);
console.log(user_id);
console.log(accession_no);
});
}); |
import logging
from django.http import HttpResponse, JsonResponse
from django.utils.decorators import method_decorator
from django.utils.timezone import now
from rest_framework import serializers
from rest_framework import status
from rest_framework.response import Response
from rest_framework.views import APIView
from datahub.company.models import CompanyPermission
from datahub.company.serializers import CompanySerializer
from datahub.core import statsd
from datahub.core.exceptions import APIBadRequestException, APIUpstreamException
from datahub.core.permissions import HasPermissions
from datahub.core.view_utils import enforce_request_content_type
from datahub.dnb_api.link_company import CompanyAlreadyDNBLinkedError, link_company_with_dnb
from datahub.dnb_api.queryset import get_company_queryset
from datahub.dnb_api.serializers import (
DNBCompanyChangeRequestSerializer,
DNBCompanyInvestigationSerializer,
DNBCompanyLinkSerializer,
DNBCompanySerializer,
DNBGetCompanyChangeRequestSerializer,
DNBMatchedCompanySerializer,
DUNSNumberSerializer,
)
from datahub.dnb_api.utils import (
create_investigation,
DNBServiceConnectionError,
DNBServiceError,
DNBServiceInvalidRequestError,
DNBServiceInvalidResponseError,
DNBServiceTimeoutError,
get_change_request,
get_company,
request_changes,
search_dnb,
)
logger = logging.getLogger(__name__)
class DNBCompanySearchView(APIView):
"""
View for searching DNB companies.
"""
permission_classes = (
HasPermissions(
f'company.{CompanyPermission.view_company}',
),
)
@method_decorator(enforce_request_content_type('application/json'))
def post(self, request):
"""
Proxy to DNB search API for POST requests. This will also hydrate results
with Data Hub company details if the company exists (and can be matched)
on Data Hub.
"""
upstream_response = search_dnb(
query_params=request.data,
request=request,
)
if upstream_response.status_code == status.HTTP_200_OK:
response_body = upstream_response.json()
response_body['results'] = self._format_and_hydrate(
response_body.get('results', []),
)
return JsonResponse(response_body)
return HttpResponse(
upstream_response.text,
status=upstream_response.status_code,
content_type=upstream_response.headers.get('content-type'),
)
def _get_datahub_companies_by_duns(self, duns_numbers):
datahub_companies = get_company_queryset().filter(duns_number__in=duns_numbers)
return {
company.duns_number: company for company in datahub_companies
}
def _get_datahub_company_data(self, datahub_company):
if datahub_company:
return DNBMatchedCompanySerializer(
datahub_company,
context={'request': self.request},
).data
return None
def _get_hydrated_results(self, dnb_results, datahub_companies_by_duns):
dnb_datahub_company_pairs = (
(
dnb_company,
self._get_datahub_company_data(
datahub_companies_by_duns.get(dnb_company['duns_number']),
),
) for dnb_company in dnb_results
)
return [
{
'dnb_company': dnb_company,
'datahub_company': datahub_company,
} for dnb_company, datahub_company in dnb_datahub_company_pairs
]
def _format_and_hydrate(self, dnb_results):
"""
Format each result from DNB such that there is a "dnb_company" key and
a "datahub_company" key. The value for "datahub_company" represents
the corresponding Company entry on Data Hub for the DNB result, if it
exists.
This changes a DNB result entry from:
{
"duns_number": "999999999",
"primary_name": "My Company LTD",
...
}
To:
{
"dnb_company": {
"duns_number": "999999999",
"primary_name": "My Company LTD",
...
},
"datahub_company": {
"id": "0f5216e0-849f-11e6-ae22-56b6b6499611",
"latest_interaction": {
"id": "e8c3534f-4f60-4c93-9880-09c22e4fc011",
"created_on": "2018-04-08T14:00:00Z",
"date": "2018-06-06",
"subject": "Meeting with Joe Bloggs"
}
}
}
"""
duns_numbers = [result['duns_number'] for result in dnb_results]
datahub_companies_by_duns = self._get_datahub_companies_by_duns(duns_numbers)
hydrated_results = self._get_hydrated_results(dnb_results, datahub_companies_by_duns)
return hydrated_results
class DNBCompanyCreateView(APIView):
"""
View for creating datahub company from DNB data.
"""
permission_classes = (
HasPermissions(
f'company.{CompanyPermission.view_company}',
f'company.{CompanyPermission.add_company}',
),
)
def post(self, request):
"""
Given a duns_number, get the data for the company from dnb-service
and create a record in DataHub.
"""
duns_serializer = DUNSNumberSerializer(data=request.data)
duns_serializer.is_valid(raise_exception=True)
duns_number = duns_serializer.validated_data['duns_number']
try:
dnb_company = get_company(duns_number, request)
except (DNBServiceConnectionError, DNBServiceError, DNBServiceInvalidResponseError) as exc:
raise APIUpstreamException(str(exc))
except DNBServiceInvalidRequestError as exc:
raise APIBadRequestException(str(exc))
company_serializer = DNBCompanySerializer(
data=dnb_company,
)
try:
company_serializer.is_valid(raise_exception=True)
except serializers.ValidationError:
message = 'Company data from DNB failed DH serializer validation'
extra_data = {
'formatted_dnb_company_data': dnb_company,
'dh_company_serializer_errors': company_serializer.errors,
}
logger.error(message, extra=extra_data)
raise
datahub_company = company_serializer.save(
created_by=request.user,
modified_by=request.user,
dnb_modified_on=now(),
)
statsd.incr('dnb.create.company')
return Response(
company_serializer.to_representation(datahub_company),
)
class DNBCompanyLinkView(APIView):
"""
View for linking a company to a DNB record.
"""
permission_classes = (
HasPermissions(
f'company.{CompanyPermission.view_company}',
f'company.{CompanyPermission.change_company}',
),
)
@method_decorator(enforce_request_content_type('application/json'))
def post(self, request):
"""
Given a Data Hub Company ID and a duns-number, link the Data Hub
Company to the D&B record.
"""
link_serializer = DNBCompanyLinkSerializer(data=request.data)
link_serializer.is_valid(raise_exception=True)
# This bit: validated_data['company_id'].id is weird but the alternative
# is to rename the field to `company_id` which would (1) still be weird
# and (2) leak the weirdness to the API
company_id = link_serializer.validated_data['company_id'].id
duns_number = link_serializer.validated_data['duns_number']
try:
company = link_company_with_dnb(company_id, duns_number, request.user)
except (
DNBServiceConnectionError,
DNBServiceInvalidResponseError,
DNBServiceError,
) as exc:
raise APIUpstreamException(str(exc))
except (
DNBServiceInvalidRequestError,
CompanyAlreadyDNBLinkedError,
) as exc:
raise APIBadRequestException(str(exc))
return Response(
CompanySerializer().to_representation(company),
)
class DNBCompanyChangeRequestView(APIView):
"""
View for requesting change/s to DNB companies.
"""
permission_classes = (
HasPermissions(
f'company.{CompanyPermission.view_company}',
f'company.{CompanyPermission.change_company}',
),
)
@method_decorator(enforce_request_content_type('application/json'))
def post(self, request):
"""
A thin wrapper around the dnb-service change request API.
"""
change_request_serializer = DNBCompanyChangeRequestSerializer(data=request.data)
change_request_serializer.is_valid(raise_exception=True)
try:
response = request_changes(**change_request_serializer.validated_data)
except (
DNBServiceConnectionError,
DNBServiceTimeoutError,
DNBServiceError,
) as exc:
raise APIUpstreamException(str(exc))
return Response(response)
def get(self, request):
"""
A thin wrapper around the dnb-service change request API.
"""
duns_number = request.query_params.get('duns_number', None)
status = request.query_params.get('status', None)
change_request_serializer = DNBGetCompanyChangeRequestSerializer(
data={'duns_number': duns_number, 'status': status},
)
change_request_serializer.is_valid(raise_exception=True)
try:
response = get_change_request(**change_request_serializer.validated_data)
except (
DNBServiceConnectionError,
DNBServiceTimeoutError,
DNBServiceError,
) as exc:
raise APIUpstreamException(str(exc))
return Response(response)
class DNBCompanyInvestigationView(APIView):
"""
View for creating a new investigation to get D&B to investigate and create a company record.
"""
permission_classes = (
HasPermissions(
f'company.{CompanyPermission.view_company}',
f'company.{CompanyPermission.change_company}',
),
)
@method_decorator(enforce_request_content_type('application/json'))
def post(self, request):
"""
A wrapper around the investigation API endpoint for dnb-service.
"""
investigation_serializer = DNBCompanyInvestigationSerializer(data=request.data)
investigation_serializer.is_valid(raise_exception=True)
data = {'company_details': investigation_serializer.validated_data}
company = data['company_details'].pop('company')
try:
response = create_investigation(data)
except (
DNBServiceConnectionError,
DNBServiceTimeoutError,
DNBServiceError,
) as exc:
raise APIUpstreamException(str(exc))
company.dnb_investigation_id = response['id']
company.pending_dnb_investigation = True
company.save()
return Response(response)
|
'use strict';
const Action = require('./Action');
const { Events } = require('../../util/Constants');
class MessageReactionRemoveEmoji extends Action {
handle(data) {
const channel = this.getChannel(data);
if (!channel || !channel.isTextBased()) return false;
const message = this.getMessage(data, channel);
if (!message) return false;
const reaction = this.getReaction(data, message);
if (!reaction) return false;
if (!message.partial) message.reactions.cache.delete(reaction.emoji.id ?? reaction.emoji.name);
/**
* Emitted when a bot removes an emoji reaction from a cached message.
* @event Client#messageReactionRemoveEmoji
* @param {MessageReaction} reaction The reaction that was removed
*/
this.client.emit(Events.MESSAGE_REACTION_REMOVE_EMOJI, reaction);
return { reaction };
}
}
module.exports = MessageReactionRemoveEmoji;
|
from pathlib import Path
base = """
<html lang="en">
<head>
<meta charset="utf-8">
<title>Attention-Guided CG</title>
<script src="https://code.jquery.com/jquery-3.4.1.min.js"
integrity="sha256-CSXorXvZcTkaix6Yvo6HppcZGetbYMGWSFlBw8HfCJo=" crossorigin="anonymous"></script>
<style>
<style>
html,
body {
height: 100%;
max-height: 100%;
margin: 0px;
}
object {
width: 400px;
height: 200px;
}
iframe {
width: 100%;
height: 90%;
}
#goUp {
position: fixed;
bottom: 10;
right: 10;
background: lightskyblue;
padding: 14px 20px 6px;
border-radius: 109px;
font-size: 2em;
color: white;
cursor: pointer;
}
img {
margin: 5px;
}
</style>
</head>
<body>
<div id="goUp">^</div>
<div id="control">
<form action="">
<label for="epoch">Epoch: </label><input type="number" name="epoch" id="epochInput" value="0" />
<label for="domains">Domain: </label><select name="domains" id="selectDomain">
<option value="AB">AB</option>
<option value="A">A</option>
<option value="B">B</option>
</select>
<button type="submit">Go</button>
</form>
</div>
<h3 id="loading">loading...</h3>
<div id="imgs" style='display:none'>
"""
end = """
</div>
</body>
<script>
$(function () {
$("#goUp").click(() => {
$(window).scrollTop(0);
})
$("#loading").hide()
$("form").submit((e) => {
$("#loading").show();
$("#imgs").show()
e.preventDefault()
// alert($("#epochInput").val() + " " + )
const domain = $("#selectDomain option:selected").text();
const epoch = parseInt($("#epochInput").val(), 10);
$("img").hide()
$(".imgDiv").hide()
$("#imgs" + epoch).show()
console.log("#imgs" + epoch)
$("img").each((i, el) => {
let src = $(el).attr("src");
const ep = parseInt(src.match(/\d+/)[0], 10);
let dom;
if (domain !== "AB"){
if (src.indexOf("mask_") >= 0) {
dom = src.split("_")[1].indexOf(domain.toLowerCase())
} else {
dom = src.split("_")[0].indexOf(domain);
}
} else {
dom = 0;
}
console.log(ep, dom)
if (ep === epoch && dom >= 0) {
$(el).show()
}
})
$("#loading").hide();
})
});
</script>
</html>
"""
if __name__ == "__main__":
# 1. put this file alongside imgs/ and epoch_X.html files
# 2. run it $ python combine.py
# 3. open combined.html
# 4. wait a bit, there are a lot of images to load. When it's done,
# the "loading..." text will disapear
# 5. select epoch number and domains (A/B/A and B)
# 6. click "go" or hit return key
files = Path().glob("*epoch*.html")
final = "<h1>Epochs</h1>"
files = sorted(files, key=lambda x: x.name)
for i, fi in enumerate(files):
with fi.open("r") as f:
id = fi.name.split("epoch_")[1].split(".html")[0]
final += f"<div style='display:none' class='imgDiv' id='imgs{id}'><h2 id='head{id}' class='epochHead'>Epoch {id} </h2>\n {f.read()}</div>"
# if i > 10:
# break
with open("combined.html", "w") as f:
f.write(base + final + end)
|
// MIT License:
//
// Copyright (c) 2010-2013, Joe Walnes
// 2013-2014, Drew Noakes
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
/**
* Smoothie Charts - http://smoothiecharts.org/
* (c) 2010-2013, Joe Walnes
* 2013-2014, Drew Noakes
*
* v1.0: Main charting library, by Joe Walnes
* v1.1: Auto scaling of axis, by Neil Dunn
* v1.2: fps (frames per second) option, by Mathias Petterson
* v1.3: Fix for divide by zero, by Paul Nikitochkin
* v1.4: Set minimum, top-scale padding, remove timeseries, add optional timer to reset bounds, by Kelley Reynolds
* v1.5: Set default frames per second to 50... smoother.
* .start(), .stop() methods for conserving CPU, by Dmitry Vyal
* options.interpolation = 'bezier' or 'line', by Dmitry Vyal
* options.maxValue to fix scale, by Dmitry Vyal
* v1.6: minValue/maxValue will always get converted to floats, by Przemek Matylla
* v1.7: options.grid.fillStyle may be a transparent color, by Dmitry A. Shashkin
* Smooth rescaling, by Kostas Michalopoulos
* v1.8: Set max length to customize number of live points in the dataset with options.maxDataSetLength, by Krishna Narni
* v1.9: Display timestamps along the bottom, by Nick and Stev-io
* (https://groups.google.com/forum/?fromgroups#!topic/smoothie-charts/-Ywse8FCpKI%5B1-25%5D)
* Refactored by Krishna Narni, to support timestamp formatting function
* v1.10: Switch to requestAnimationFrame, removed the now obsoleted options.fps, by Gergely Imreh
* v1.11: options.grid.sharpLines option added, by @drewnoakes
* Addressed warning seen in Firefox when seriesOption.fillStyle undefined, by @drewnoakes
* v1.12: Support for horizontalLines added, by @drewnoakes
* Support for yRangeFunction callback added, by @drewnoakes
* v1.13: Fixed typo (#32), by @alnikitich
* v1.14: Timer cleared when last TimeSeries removed (#23), by @davidgaleano
* Fixed diagonal line on chart at start/end of data stream, by @drewnoakes
* v1.15: Support for npm package (#18), by @dominictarr
* Fixed broken removeTimeSeries function (#24) by @davidgaleano
* Minor performance and tidying, by @drewnoakes
* v1.16: Bug fix introduced in v1.14 relating to timer creation/clearance (#23), by @drewnoakes
* TimeSeries.append now deals with out-of-order timestamps, and can merge duplicates, by @zacwitte (#12)
* Documentation and some local variable renaming for clarity, by @drewnoakes
* v1.17: Allow control over font size (#10), by @drewnoakes
* Timestamp text won't overlap, by @drewnoakes
* v1.18: Allow control of max/min label precision, by @drewnoakes
* Added 'borderVisible' chart option, by @drewnoakes
* Allow drawing series with fill but no stroke (line), by @drewnoakes
* v1.19: Avoid unnecessary repaints, and fixed flicker in old browsers having multiple charts in document (#40), by @asbai
* v1.20: Add SmoothieChart.getTimeSeriesOptions and SmoothieChart.bringToFront functions, by @drewnoakes
* v1.21: Add 'step' interpolation mode, by @drewnoakes
* v1.22: Add support for different pixel ratios. Also add optional y limit formatters, by @copacetic
* v1.23: Fix bug introduced in v1.22 (#44), by @drewnoakes
* v1.24: Fix bug introduced in v1.23, re-adding parseFloat to y-axis formatter defaults, by @siggy_sf
* v1.25: Fix bug seen when adding a data point to TimeSeries which is older than the current data, by @Nking92
* Draw time labels on top of series, by @comolosabia
* Add TimeSeries.clear function, by @drewnoakes
* v1.26: Add support for resizing on high device pixel ratio screens, by @copacetic
* v1.27: Fix bug introduced in v1.26 for non whole number devicePixelRatio values, by @zmbush
* v1.28: Add 'minValueScale' option, by @megawac
*/
;(function(exports) {
var Util = {
extend: function() {
arguments[0] = arguments[0] || {};
for (var i = 1; i < arguments.length; i++)
{
for (var key in arguments[i])
{
if (arguments[i].hasOwnProperty(key))
{
if (typeof(arguments[i][key]) === 'object') {
if (arguments[i][key] instanceof Array) {
arguments[0][key] = arguments[i][key];
} else {
arguments[0][key] = Util.extend(arguments[0][key], arguments[i][key]);
}
} else {
arguments[0][key] = arguments[i][key];
}
}
}
}
return arguments[0];
}
};
/**
* Initialises a new <code>TimeSeries</code> with optional data options.
*
* Options are of the form (defaults shown):
*
* <pre>
* {
* resetBounds: true, // enables/disables automatic scaling of the y-axis
* resetBoundsInterval: 3000 // the period between scaling calculations, in millis
* }
* </pre>
*
* Presentation options for TimeSeries are specified as an argument to <code>SmoothieChart.addTimeSeries</code>.
*
* @constructor
*/
function TimeSeries(options) {
this.options = Util.extend({}, TimeSeries.defaultOptions, options);
this.clear();
}
TimeSeries.defaultOptions = {
resetBoundsInterval: 3000,
resetBounds: true
};
/**
* Clears all data and state from this TimeSeries object.
*/
TimeSeries.prototype.clear = function() {
this.data = [];
this.maxValue = Number.NaN; // The maximum value ever seen in this TimeSeries.
this.minValue = Number.NaN; // The minimum value ever seen in this TimeSeries.
};
/**
* Recalculate the min/max values for this <code>TimeSeries</code> object.
*
* This causes the graph to scale itself in the y-axis.
*/
TimeSeries.prototype.resetBounds = function() {
if (this.data.length) {
// Walk through all data points, finding the min/max value
this.maxValue = this.data[0][1];
this.minValue = this.data[0][1];
for (var i = 1; i < this.data.length; i++) {
var value = this.data[i][1];
if (value > this.maxValue) {
this.maxValue = value;
}
if (value < this.minValue) {
this.minValue = value;
}
}
} else {
// No data exists, so set min/max to NaN
this.maxValue = Number.NaN;
this.minValue = Number.NaN;
}
};
/**
* Adds a new data point to the <code>TimeSeries</code>, preserving chronological order.
*
* @param timestamp the position, in time, of this data point
* @param value the value of this data point
* @param sumRepeatedTimeStampValues if <code>timestamp</code> has an exact match in the series, this flag controls
* whether it is replaced, or the values summed (defaults to false.)
*/
TimeSeries.prototype.append = function(timestamp, value, sumRepeatedTimeStampValues) {
// Rewind until we hit an older timestamp
var i = this.data.length - 1;
while (i >= 0 && this.data[i][0] > timestamp) {
i--;
}
if (i === -1) {
// This new item is the oldest data
this.data.splice(0, 0, [timestamp, value]);
} else if (this.data.length > 0 && this.data[i][0] === timestamp) {
// Update existing values in the array
if (sumRepeatedTimeStampValues) {
// Sum this value into the existing 'bucket'
this.data[i][1] += value;
value = this.data[i][1];
} else {
// Replace the previous value
this.data[i][1] = value;
}
} else if (i < this.data.length - 1) {
// Splice into the correct position to keep timestamps in order
this.data.splice(i + 1, 0, [timestamp, value]);
} else {
// Add to the end of the array
this.data.push([timestamp, value]);
}
this.maxValue = isNaN(this.maxValue) ? value : Math.max(this.maxValue, value);
this.minValue = isNaN(this.minValue) ? value : Math.min(this.minValue, value);
};
TimeSeries.prototype.dropOldData = function(oldestValidTime, maxDataSetLength) {
// We must always keep one expired data point as we need this to draw the
// line that comes into the chart from the left, but any points prior to that can be removed.
var removeCount = 0;
while (this.data.length - removeCount >= maxDataSetLength && this.data[removeCount + 1][0] < oldestValidTime) {
removeCount++;
}
if (removeCount !== 0) {
this.data.splice(0, removeCount);
}
};
/**
* Initialises a new <code>SmoothieChart</code>.
*
* Options are optional, and should be of the form below. Just specify the values you
* need and the rest will be given sensible defaults as shown:
*
* <pre>
* {
* minValue: undefined, // specify to clamp the lower y-axis to a given value
* maxValue: undefined, // specify to clamp the upper y-axis to a given value
* maxValueScale: 1, // allows proportional padding to be added above the chart. for 10% padding, specify 1.1.
* minValueScale: 1, // allows proportional padding to be added below the chart. for 10% padding, specify 1.1.
* yRangeFunction: undefined, // function({min: , max: }) { return {min: , max: }; }
* scaleSmoothing: 0.125, // controls the rate at which y-value zoom animation occurs
* millisPerPixel: 20, // sets the speed at which the chart pans by
* enableDpiScaling: true, // support rendering at different DPI depending on the device
* yMinFormatter: function(min, precision) { // callback function that formats the min y value label
* return parseFloat(min).toFixed(precision);
* },
* yMaxFormatter: function(max, precision) { // callback function that formats the max y value label
* return parseFloat(max).toFixed(precision);
* },
* maxDataSetLength: 2,
* interpolation: 'bezier' // one of 'bezier', 'linear', or 'step'
* timestampFormatter: null, // optional function to format time stamps for bottom of chart
* // you may use SmoothieChart.timeFormatter, or your own: function(date) { return ''; }
* scrollBackwards: false, // reverse the scroll direction of the chart
* horizontalLines: [], // [ { value: 0, color: '#ffffff', lineWidth: 1 } ]
* grid:
* {
* fillStyle: '#000000', // the background colour of the chart
* lineWidth: 1, // the pixel width of grid lines
* strokeStyle: '#777777', // colour of grid lines
* millisPerLine: 1000, // distance between vertical grid lines
* sharpLines: false, // controls whether grid lines are 1px sharp, or softened
* verticalSections: 2, // number of vertical sections marked out by horizontal grid lines
* borderVisible: true // whether the grid lines trace the border of the chart or not
* },
* labels
* {
* disabled: false, // enables/disables labels showing the min/max values
* fillStyle: '#ffffff', // colour for text of labels,
* fontSize: 15,
* fontFamily: 'sans-serif',
* precision: 2
* }
* }
* </pre>
*
* @constructor
*/
function SmoothieChart(options) {
this.options = Util.extend({}, SmoothieChart.defaultChartOptions, options);
this.seriesSet = [];
this.currentValueRange = 1;
this.currentVisMinValue = 0;
this.lastRenderTimeMillis = 0;
}
SmoothieChart.defaultChartOptions = {
millisPerPixel: 20,
enableDpiScaling: true,
yMinFormatter: function(min, precision) {
return parseFloat(min).toFixed(precision);
},
yMaxFormatter: function(max, precision) {
return parseFloat(max).toFixed(precision);
},
maxValueScale: 1,
minValueScale: 1,
interpolation: 'bezier',
scaleSmoothing: 0.125,
maxDataSetLength: 2,
scrollBackwards: false,
grid: {
fillStyle: '#000000',
strokeStyle: '#777777',
lineWidth: 1,
sharpLines: false,
millisPerLine: 1000,
verticalSections: 2,
borderVisible: true
},
labels: {
fillStyle: '#ffffff',
disabled: false,
fontSize: 10,
fontFamily: 'monospace',
precision: 2
},
horizontalLines: []
};
// Based on http://inspirit.github.com/jsfeat/js/compatibility.js
SmoothieChart.AnimateCompatibility = (function() {
var requestAnimationFrame = function(callback, element) {
var requestAnimationFrame =
window.requestAnimationFrame ||
window.webkitRequestAnimationFrame ||
window.mozRequestAnimationFrame ||
window.oRequestAnimationFrame ||
window.msRequestAnimationFrame ||
function(callback) {
return window.setTimeout(function() {
callback(new Date().getTime());
}, 16);
};
return requestAnimationFrame.call(window, callback, element);
},
cancelAnimationFrame = function(id) {
var cancelAnimationFrame =
window.cancelAnimationFrame ||
function(id) {
clearTimeout(id);
};
return cancelAnimationFrame.call(window, id);
};
return {
requestAnimationFrame: requestAnimationFrame,
cancelAnimationFrame: cancelAnimationFrame
};
})();
SmoothieChart.defaultSeriesPresentationOptions = {
lineWidth: 1,
strokeStyle: '#ffffff'
};
/**
* Adds a <code>TimeSeries</code> to this chart, with optional presentation options.
*
* Presentation options should be of the form (defaults shown):
*
* <pre>
* {
* lineWidth: 1,
* strokeStyle: '#ffffff',
* fillStyle: undefined
* }
* </pre>
*/
SmoothieChart.prototype.addTimeSeries = function(timeSeries, options) {
this.seriesSet.push({timeSeries: timeSeries, options: Util.extend({}, SmoothieChart.defaultSeriesPresentationOptions, options)});
if (timeSeries.options.resetBounds && timeSeries.options.resetBoundsInterval > 0) {
timeSeries.resetBoundsTimerId = setInterval(
function() {
timeSeries.resetBounds();
},
timeSeries.options.resetBoundsInterval
);
}
};
/**
* Removes the specified <code>TimeSeries</code> from the chart.
*/
SmoothieChart.prototype.removeTimeSeries = function(timeSeries) {
// Find the correct timeseries to remove, and remove it
var numSeries = this.seriesSet.length;
for (var i = 0; i < numSeries; i++) {
if (this.seriesSet[i].timeSeries === timeSeries) {
this.seriesSet.splice(i, 1);
break;
}
}
// If a timer was operating for that timeseries, remove it
if (timeSeries.resetBoundsTimerId) {
// Stop resetting the bounds, if we were
clearInterval(timeSeries.resetBoundsTimerId);
}
};
/**
* Gets render options for the specified <code>TimeSeries</code>.
*
* As you may use a single <code>TimeSeries</code> in multiple charts with different formatting in each usage,
* these settings are stored in the chart.
*/
SmoothieChart.prototype.getTimeSeriesOptions = function(timeSeries) {
// Find the correct timeseries to remove, and remove it
var numSeries = this.seriesSet.length;
for (var i = 0; i < numSeries; i++) {
if (this.seriesSet[i].timeSeries === timeSeries) {
return this.seriesSet[i].options;
}
}
};
/**
* Brings the specified <code>TimeSeries</code> to the top of the chart. It will be rendered last.
*/
SmoothieChart.prototype.bringToFront = function(timeSeries) {
// Find the correct timeseries to remove, and remove it
var numSeries = this.seriesSet.length;
for (var i = 0; i < numSeries; i++) {
if (this.seriesSet[i].timeSeries === timeSeries) {
var set = this.seriesSet.splice(i, 1);
this.seriesSet.push(set[0]);
break;
}
}
};
/**
* Instructs the <code>SmoothieChart</code> to start rendering to the provided canvas, with specified delay.
*
* @param canvas the target canvas element
* @param delayMillis an amount of time to wait before a data point is shown. This can prevent the end of the series
* from appearing on screen, with new values flashing into view, at the expense of some latency.
*/
SmoothieChart.prototype.streamTo = function(canvas, delayMillis) {
this.canvas = canvas;
this.delay = delayMillis;
this.start();
};
/**
* Make sure the canvas has the optimal resolution for the device's pixel ratio.
*/
SmoothieChart.prototype.resize = function() {
// TODO this function doesn't handle the value of enableDpiScaling changing during execution
if (!this.options.enableDpiScaling || !window || window.devicePixelRatio === 1)
return;
var dpr = window.devicePixelRatio;
var width = parseInt(this.canvas.getAttribute('width'));
var height = parseInt(this.canvas.getAttribute('height'));
if (!this.originalWidth || (Math.floor(this.originalWidth * dpr) !== width)) {
this.originalWidth = width;
this.canvas.setAttribute('width', (Math.floor(width * dpr)).toString());
this.canvas.style.width = width + 'px';
this.canvas.getContext('2d').scale(dpr, dpr);
}
if (!this.originalHeight || (Math.floor(this.originalHeight * dpr) !== height)) {
this.originalHeight = height;
this.canvas.setAttribute('height', (Math.floor(height * dpr)).toString());
this.canvas.style.height = height + 'px';
this.canvas.getContext('2d').scale(dpr, dpr);
}
};
/**
* Starts the animation of this chart.
*/
SmoothieChart.prototype.start = function() {
if (this.frame) {
// We're already running, so just return
return;
}
// Renders a frame, and queues the next frame for later rendering
var animate = function() {
this.frame = SmoothieChart.AnimateCompatibility.requestAnimationFrame(function() {
this.render();
animate();
}.bind(this));
}.bind(this);
animate();
};
/**
* Stops the animation of this chart.
*/
SmoothieChart.prototype.stop = function() {
if (this.frame) {
SmoothieChart.AnimateCompatibility.cancelAnimationFrame(this.frame);
delete this.frame;
}
};
SmoothieChart.prototype.updateValueRange = function() {
// Calculate the current scale of the chart, from all time series.
var chartOptions = this.options,
chartMaxValue = Number.NaN,
chartMinValue = Number.NaN;
for (var d = 0; d < this.seriesSet.length; d++) {
// TODO(ndunn): We could calculate / track these values as they stream in.
var timeSeries = this.seriesSet[d].timeSeries;
if (!isNaN(timeSeries.maxValue)) {
chartMaxValue = !isNaN(chartMaxValue) ? Math.max(chartMaxValue, timeSeries.maxValue) : timeSeries.maxValue;
}
if (!isNaN(timeSeries.minValue)) {
chartMinValue = !isNaN(chartMinValue) ? Math.min(chartMinValue, timeSeries.minValue) : timeSeries.minValue;
}
}
// Scale the chartMaxValue to add padding at the top if required
if (chartOptions.maxValue != null) {
chartMaxValue = chartOptions.maxValue;
} else {
chartMaxValue *= chartOptions.maxValueScale;
}
// Set the minimum if we've specified one
if (chartOptions.minValue != null) {
chartMinValue = chartOptions.minValue;
} else {
chartMinValue -= Math.abs(chartMinValue * chartOptions.minValueScale - chartMinValue);
}
// If a custom range function is set, call it
if (this.options.yRangeFunction) {
var range = this.options.yRangeFunction({min: chartMinValue, max: chartMaxValue});
chartMinValue = range.min;
chartMaxValue = range.max;
}
if (!isNaN(chartMaxValue) && !isNaN(chartMinValue)) {
var targetValueRange = chartMaxValue - chartMinValue;
var valueRangeDiff = (targetValueRange - this.currentValueRange);
var minValueDiff = (chartMinValue - this.currentVisMinValue);
this.isAnimatingScale = Math.abs(valueRangeDiff) > 0.1 || Math.abs(minValueDiff) > 0.1;
this.currentValueRange += chartOptions.scaleSmoothing * valueRangeDiff;
this.currentVisMinValue += chartOptions.scaleSmoothing * minValueDiff;
}
this.valueRange = { min: chartMinValue, max: chartMaxValue };
};
SmoothieChart.prototype.render = function(canvas, time) {
var nowMillis = new Date().getTime();
if (!this.isAnimatingScale) {
// We're not animating. We can use the last render time and the scroll speed to work out whether
// we actually need to paint anything yet. If not, we can return immediately.
// Render at least every 1/6th of a second. The canvas may be resized, which there is
// no reliable way to detect.
var maxIdleMillis = Math.min(1000/6, this.options.millisPerPixel);
if (nowMillis - this.lastRenderTimeMillis < maxIdleMillis) {
return;
}
}
this.resize();
this.lastRenderTimeMillis = nowMillis;
canvas = canvas || this.canvas;
time = time || nowMillis - (this.delay || 0);
// Round time down to pixel granularity, so motion appears smoother.
time -= time % this.options.millisPerPixel;
var context = canvas.getContext('2d'),
chartOptions = this.options,
dimensions = { top: 0, left: 0, width: canvas.clientWidth, height: canvas.clientHeight },
// Calculate the threshold time for the oldest data points.
oldestValidTime = time - (dimensions.width * chartOptions.millisPerPixel),
valueToYPixel = function(value) {
var offset = value - this.currentVisMinValue;
return this.currentValueRange === 0
? dimensions.height
: dimensions.height - (Math.round((offset / this.currentValueRange) * dimensions.height));
}.bind(this),
timeToXPixel = function(t) {
if(chartOptions.scrollBackwards) {
return Math.round((time - t) / chartOptions.millisPerPixel);
}
return Math.round(dimensions.width - ((time - t) / chartOptions.millisPerPixel));
};
this.updateValueRange();
context.font = chartOptions.labels.fontSize + 'px ' + chartOptions.labels.fontFamily;
// Save the state of the canvas context, any transformations applied in this method
// will get removed from the stack at the end of this method when .restore() is called.
context.save();
// Move the origin.
context.translate(dimensions.left, dimensions.top);
// Create a clipped rectangle - anything we draw will be constrained to this rectangle.
// This prevents the occasional pixels from curves near the edges overrunning and creating
// screen cheese (that phrase should need no explanation).
context.beginPath();
context.rect(0, 0, dimensions.width, dimensions.height);
context.clip();
// Clear the working area.
context.save();
context.fillStyle = chartOptions.grid.fillStyle;
context.clearRect(0, 0, dimensions.width, dimensions.height);
context.fillRect(0, 0, dimensions.width, dimensions.height);
context.restore();
// Grid lines...
context.save();
context.lineWidth = chartOptions.grid.lineWidth;
context.strokeStyle = chartOptions.grid.strokeStyle;
// Vertical (time) dividers.
if (chartOptions.grid.millisPerLine > 0) {
context.beginPath();
for (var t = time - (time % chartOptions.grid.millisPerLine);
t >= oldestValidTime;
t -= chartOptions.grid.millisPerLine) {
var gx = timeToXPixel(t);
if (chartOptions.grid.sharpLines) {
gx -= 0.5;
}
context.moveTo(gx, 0);
context.lineTo(gx, dimensions.height);
}
context.stroke();
context.closePath();
}
// Horizontal (value) dividers.
for (var v = 1; v < chartOptions.grid.verticalSections; v++) {
var gy = Math.round(v * dimensions.height / chartOptions.grid.verticalSections);
if (chartOptions.grid.sharpLines) {
gy -= 0.5;
}
context.beginPath();
context.moveTo(0, gy);
context.lineTo(dimensions.width, gy);
context.stroke();
context.closePath();
}
// Bounding rectangle.
if (chartOptions.grid.borderVisible) {
context.beginPath();
context.strokeRect(0, 0, dimensions.width, dimensions.height);
context.closePath();
}
context.restore();
// Draw any horizontal lines...
if (chartOptions.horizontalLines && chartOptions.horizontalLines.length) {
for (var hl = 0; hl < chartOptions.horizontalLines.length; hl++) {
var line = chartOptions.horizontalLines[hl],
hly = Math.round(valueToYPixel(line.value)) - 0.5;
context.strokeStyle = line.color || '#ffffff';
context.lineWidth = line.lineWidth || 1;
context.beginPath();
context.moveTo(0, hly);
context.lineTo(dimensions.width, hly);
context.stroke();
context.closePath();
}
}
// For each data set...
for (var d = 0; d < this.seriesSet.length; d++) {
context.save();
var timeSeries = this.seriesSet[d].timeSeries,
dataSet = timeSeries.data,
seriesOptions = this.seriesSet[d].options;
// Delete old data that's moved off the left of the chart.
timeSeries.dropOldData(oldestValidTime, chartOptions.maxDataSetLength);
// Set style for this dataSet.
context.lineWidth = seriesOptions.lineWidth;
context.strokeStyle = seriesOptions.strokeStyle;
// Draw the line...
context.beginPath();
// Retain lastX, lastY for calculating the control points of bezier curves.
var firstX = 0, lastX = 0, lastY = 0;
for (var i = 0; i < dataSet.length && dataSet.length !== 1; i++) {
var x = timeToXPixel(dataSet[i][0]),
y = valueToYPixel(dataSet[i][1]);
if (i === 0) {
firstX = x;
context.moveTo(x, y);
} else {
switch (chartOptions.interpolation) {
case "linear":
case "line": {
context.lineTo(x,y);
break;
}
case "bezier":
default: {
// Great explanation of Bezier curves: http://en.wikipedia.org/wiki/Bezier_curve#Quadratic_curves
//
// Assuming A was the last point in the line plotted and B is the new point,
// we draw a curve with control points P and Q as below.
//
// A---P
// |
// |
// |
// Q---B
//
// Importantly, A and P are at the same y coordinate, as are B and Q. This is
// so adjacent curves appear to flow as one.
//
context.bezierCurveTo( // startPoint (A) is implicit from last iteration of loop
Math.round((lastX + x) / 2), lastY, // controlPoint1 (P)
Math.round((lastX + x)) / 2, y, // controlPoint2 (Q)
x, y); // endPoint (B)
break;
}
case "step": {
context.lineTo(x,lastY);
context.lineTo(x,y);
break;
}
}
}
lastX = x; lastY = y;
}
if (dataSet.length > 1) {
if (seriesOptions.fillStyle) {
// Close up the fill region.
context.lineTo(dimensions.width + seriesOptions.lineWidth + 1, lastY);
context.lineTo(dimensions.width + seriesOptions.lineWidth + 1, dimensions.height + seriesOptions.lineWidth + 1);
context.lineTo(firstX, dimensions.height + seriesOptions.lineWidth);
context.fillStyle = seriesOptions.fillStyle;
context.fill();
}
if (seriesOptions.strokeStyle && seriesOptions.strokeStyle !== 'none') {
context.stroke();
}
context.closePath();
}
context.restore();
}
// Draw the axis values on the chart.
if (!chartOptions.labels.disabled && !isNaN(this.valueRange.min) && !isNaN(this.valueRange.max)) {
var maxValueString = chartOptions.yMaxFormatter(this.valueRange.max, chartOptions.labels.precision),
minValueString = chartOptions.yMinFormatter(this.valueRange.min, chartOptions.labels.precision),
labelPos = chartOptions.scrollBackwards ? 0 : dimensions.width - context.measureText(maxValueString).width - 2;
context.fillStyle = chartOptions.labels.fillStyle;
context.fillText(maxValueString, labelPos, chartOptions.labels.fontSize);
context.fillText(minValueString, labelPos, dimensions.height - 2);
}
// Display timestamps along x-axis at the bottom of the chart.
if (chartOptions.timestampFormatter && chartOptions.grid.millisPerLine > 0) {
var textUntilX = chartOptions.scrollBackwards
? context.measureText(minValueString).width
: dimensions.width - context.measureText(minValueString).width + 4;
for (var t = time - (time % chartOptions.grid.millisPerLine);
t >= oldestValidTime;
t -= chartOptions.grid.millisPerLine) {
var gx = timeToXPixel(t);
// Only draw the timestamp if it won't overlap with the previously drawn one.
if ((!chartOptions.scrollBackwards && gx < textUntilX) || (chartOptions.scrollBackwards && gx > textUntilX)) {
// Formats the timestamp based on user specified formatting function
// SmoothieChart.timeFormatter function above is one such formatting option
var tx = new Date(t),
ts = chartOptions.timestampFormatter(tx),
tsWidth = context.measureText(ts).width;
textUntilX = chartOptions.scrollBackwards
? gx + tsWidth + 2
: gx - tsWidth - 2;
context.fillStyle = chartOptions.labels.fillStyle;
if(chartOptions.scrollBackwards) {
context.fillText(ts, gx, dimensions.height - 2);
} else {
context.fillText(ts, gx - tsWidth, dimensions.height - 2);
}
}
}
}
context.restore(); // See .save() above.
};
// Sample timestamp formatting function
SmoothieChart.timeFormatter = function(date) {
function pad2(number) { return (number < 10 ? '0' : '') + number }
return pad2(date.getHours()) + ':' + pad2(date.getMinutes()) + ':' + pad2(date.getSeconds());
};
exports.TimeSeries = TimeSeries;
exports.SmoothieChart = SmoothieChart;
})(typeof exports === 'undefined' ? this : exports);
|
# -*- coding: utf-8 -*-
# File: training.py
import copy
import pprint
import re
from abc import ABCMeta, abstractmethod
from contextlib import contextmanager
import six
import tensorflow as tf
from ..compat import tfv1
from ..tfutils.common import get_tf_version_tuple
from ..tfutils.gradproc import ScaleGradient
from ..tfutils.tower import TrainTowerContext
from ..utils import logger
from ..utils.develop import HIDE_DOC
from .utils import (
GradientPacker, LeastLoadedDeviceSetter, aggregate_grads, allreduce_grads, allreduce_grads_hierarchical,
merge_grad_list, override_to_local_variable, split_grad_list)
__all__ = ["DataParallelBuilder"]
@six.add_metaclass(ABCMeta)
class GraphBuilder(object):
@abstractmethod
def build(*args, **kwargs):
pass
@contextmanager
def _maybe_reuse_vs(reuse):
if reuse:
with tf.variable_scope(tf.get_variable_scope(), reuse=True):
yield
else:
yield
class DataParallelBuilder(GraphBuilder):
def __init__(self, towers):
"""
Args:
towers(list[int]): list of GPU ids.
"""
if len(towers) > 1:
logger.info("[DataParallel] Training a model of {} towers.".format(len(towers)))
if not tf.test.is_built_with_cuda():
logger.error("[DataParallel] TensorFlow was not built with CUDA support!")
self.towers = towers
@staticmethod
def _check_grad_list(grad_list):
"""
Args:
grad_list: list of list of tuples, shape is Ngpu x Nvar x 2
"""
nvars = [len(k) for k in grad_list]
def basename(x):
return re.sub('tower[0-9]+/', '', x.op.name)
if len(set(nvars)) != 1:
names_per_gpu = [{basename(k[1]) for k in grad_and_vars} for grad_and_vars in grad_list]
inters = copy.copy(names_per_gpu[0])
for s in names_per_gpu:
inters &= s
for s in names_per_gpu:
s -= inters
logger.error("Unique trainable variables on towers: " + pprint.pformat(names_per_gpu))
raise ValueError("Number of gradients from each tower is different! " + str(nvars))
@staticmethod
def call_for_each_tower(
towers, func, devices=None, use_vs=None):
"""
Run `func` on all GPUs (towers) and return the results.
Args:
towers (list[int]): a list of GPU id.
func: a lambda to be called inside each tower
devices: a list of devices to be used. By default will use '/gpu:{tower}'
use_vs (list[bool]): list of use_vs to passed to TowerContext
Returns:
List of outputs of ``func``, evaluated on each tower.
"""
ret = []
if devices is not None:
assert len(devices) == len(towers)
if use_vs is not None:
assert len(use_vs) == len(towers)
tower_names = ['tower{}'.format(idx) for idx in range(len(towers))]
for idx, t in enumerate(towers):
device = devices[idx] if devices is not None else '/gpu:{}'.format(t)
usevs = use_vs[idx] if use_vs is not None else False
reuse = not usevs and idx > 0
with tfv1.device(device), _maybe_reuse_vs(reuse), TrainTowerContext(
tower_names[idx],
vs_name=tower_names[idx] if usevs else '',
index=idx, total=len(towers)):
if len(str(device)) < 10: # a device function doesn't have good string description
logger.info("Building graph for training tower {} on device {} ...".format(idx, device))
else:
logger.info("Building graph for training tower {} ...".format(idx))
# When use_vs is True, use LOCAL_VARIABLES,
# so these duplicated variables won't be saved by default.
with override_to_local_variable(enable=usevs):
ret.append(func())
return ret
@staticmethod
@HIDE_DOC
def build_on_towers(*args, **kwargs):
return DataParallelBuilder.call_for_each_tower(*args, **kwargs)
class SyncMultiGPUParameterServerBuilder(DataParallelBuilder):
"""
Data-parallel training in 'ParameterServer' mode.
It builds one tower on each GPU with
shared variable scope. It synchronizes the gradients computed
from each tower, averages them and applies to the shared variables.
It is an equivalent of ``--variable_update=parameter_server`` in
`tensorflow/benchmarks <https://github.com/tensorflow/benchmarks>`_.
"""
def __init__(self, towers, ps_device):
"""
Args:
towers(list[int]): list of GPU id
ps_device (str): either 'gpu' or 'cpu', where variables are stored.
"""
super(SyncMultiGPUParameterServerBuilder, self).__init__(towers)
assert ps_device in ['cpu', 'gpu']
self.ps_device = ps_device
def call_for_each_tower(self, tower_fn):
"""
Call the function `tower_fn` under :class:`TowerContext` for each tower.
Returns:
a list, contains the return values of `tower_fn` on each tower.
"""
raw_devices = ['/gpu:{}'.format(k) for k in self.towers]
if self.ps_device == 'gpu':
devices = [LeastLoadedDeviceSetter(d, raw_devices) for d in raw_devices]
else:
devices = [tf.train.replica_device_setter(
worker_device=d, ps_device='/cpu:0', ps_tasks=1) for d in raw_devices]
return DataParallelBuilder.build_on_towers(self.towers, tower_fn, devices)
def build(self, grad_list, get_opt_fn):
"""
Reduce the gradients, apply them with the optimizer,
and set self.grads to a list of (g, v), containing the averaged gradients.
Args:
grad_list ([[(grad, var), ...], ...]): #GPU lists to be reduced. Each is the gradients computed on each GPU.
get_opt_fn (-> tf.train.Optimizer): callable which returns an optimizer
Returns:
tf.Operation: the training op
"""
assert len(grad_list) == len(self.towers)
DataParallelBuilder._check_grad_list(grad_list)
# debug tower performance (without update):
# ops = [k[0] for k in grad_list[1]] + [k[0] for k in grad_list[0]]
# self.train_op = tf.group(*ops)
# return
self.grads = aggregate_grads(grad_list, colocation=True)
# grads = grad_list[0]
opt = get_opt_fn()
if self.ps_device == 'cpu':
with tf.device('/cpu:0'):
train_op = opt.apply_gradients(self.grads, name='train_op')
else:
train_op = opt.apply_gradients(self.grads, name='train_op')
return train_op
class SyncMultiGPUReplicatedBuilder(DataParallelBuilder):
"""
Data-parallel training in "replicated" mode,
where each GPU contains a replicate of the whole model.
It will build one tower on each GPU under its own variable scope.
Each gradient update is averaged or summed across or GPUs through NCCL.
It is an equivalent of ``--variable_update=replicated`` in
`tensorflow/benchmarks <https://github.com/tensorflow/benchmarks>`_.
"""
def __init__(self, towers, average, mode):
super(SyncMultiGPUReplicatedBuilder, self).__init__(towers)
self._average = average
assert mode in ['nccl', 'cpu', 'hierarchical'], mode
self._mode = mode
if self._mode == 'hierarchical' and len(towers) != 8:
logger.warn("mode='hierarchical' require >= 8 GPUs. Fallback to mode='nccl'.")
self._mode = 'nccl'
def call_for_each_tower(self, tower_fn):
"""
Call the function `tower_fn` under :class:`TowerContext` for each tower.
Returns:
a list, contains the return values of `tower_fn` on each tower.
"""
# if tower_fn returns [(grad, var), ...], this returns #GPU x #VAR x 2
return DataParallelBuilder.build_on_towers(
self.towers,
tower_fn,
# use no variable scope for the first tower
use_vs=[False] + [True] * (len(self.towers) - 1))
def build(self, grad_list, get_opt_fn):
"""
Reduce the gradients, apply them with the optimizer,
and set self.grads to #GPU number of lists of (g, v), containing the all-reduced gradients on each device.
Args:
grad_list ([[(grad, var), ...], ...]): #GPU lists to be reduced. Each is the gradients computed on each GPU.
get_opt_fn (-> tf.train.Optimizer): callable which returns an optimizer
Returns:
(tf.Operation, tf.Operation)
1. the training op.
2. the op which sync variables from GPU 0 to other GPUs.
It has to be run before the training has started.
And you can optionally run it later to sync non-trainable variables.
"""
assert len(grad_list) == len(self.towers)
raw_devices = ['/gpu:{}'.format(k) for k in self.towers]
DataParallelBuilder._check_grad_list(grad_list)
dtypes = {x[0].dtype.base_dtype for x in grad_list[0]}
dtypes_nccl_supported = [tf.float32, tf.float64]
if get_tf_version_tuple() >= (1, 8):
dtypes_nccl_supported.append(tf.float16)
valid_for_nccl = all(k in dtypes_nccl_supported for k in dtypes)
if self._mode == 'nccl' and not valid_for_nccl:
logger.warn("Cannot use mode='nccl' because some gradients have unsupported types. Fallback to mode='cpu'")
self._mode = 'cpu'
if self._mode in ['nccl', 'hierarchical']:
all_grads, all_vars = split_grad_list(grad_list)
# use allreduce from tf-benchmarks
# from .batch_allreduce import AllReduceSpecAlgorithm
# algo = AllReduceSpecAlgorithm('nccl', list(range(8)), 0, 10)
# all_grads, warmup_ops = algo.batch_all_reduce(all_grads, 1, True, False)
# print("WARMUP OPS", warmup_ops)
if self._mode == 'nccl':
all_grads = allreduce_grads(all_grads, average=self._average) # #gpu x #param
else:
packer = GradientPacker(len(raw_devices))
succ = packer.compute_strategy(all_grads[0])
if succ:
packed_grads = packer.pack_all(all_grads, raw_devices)
packed_grads_aggr = allreduce_grads_hierarchical(
packed_grads, raw_devices, average=self._average)
all_grads = packer.unpack_all(packed_grads_aggr, raw_devices)
else:
all_grads = allreduce_grads_hierarchical(all_grads, raw_devices, average=self._average)
self.grads = merge_grad_list(all_grads, all_vars)
elif self._mode == 'cpu':
agg_grad_and_vars = aggregate_grads(
grad_list, colocation=False,
devices=['/cpu:0'], average=self._average) # #param x 2
self.grads = [] # #gpu x #param x 2
for grad_and_vars in grad_list: # grad_and_vars: #paramx2
# take v from each tower, and g from average.
self.grads.append(
[(g, v) for (_, v), (g, _) in zip(grad_and_vars, agg_grad_and_vars)])
train_ops = []
opt = get_opt_fn()
with tf.name_scope('apply_gradients'):
for idx, grad_and_vars in enumerate(self.grads):
with tf.device(raw_devices[idx]):
# apply_gradients may create variables. Make them LOCAL_VARIABLES
with override_to_local_variable(enable=idx > 0):
train_ops.append(opt.apply_gradients(
grad_and_vars, name='apply_grad_{}'.format(idx)))
train_op = tf.group(*train_ops, name='train_op')
if len(self.towers) > 1:
with tf.name_scope('sync_variables'):
post_init_op = SyncMultiGPUReplicatedBuilder.get_post_init_ops()
else:
post_init_op = None
return train_op, post_init_op
# Adopt from https://github.com/tensorflow/benchmarks/blob/master/scripts/tf_cnn_benchmarks/variable_mgr.py
@staticmethod
def get_post_init_ops():
"""
Copy values of variables on GPU 0 to other GPUs.
"""
# literally all variables, because it's better to sync optimizer-internal variables as well
all_vars = tf.global_variables() + tf.local_variables()
var_by_name = {v.name: v for v in all_vars}
trainable_names = {x.name for x in tf.trainable_variables()}
post_init_ops = []
def log_failure(name, reason):
logger.warn("[ReplicatedTrainer] Do not know how to sync variable '{}' across GPUs. "
"Reason: {} ".format(name, reason))
assert name not in trainable_names, \
"The aforementioned variable is trainable, so this is probably a fatal error."
logger.warn(
"[ReplicatedTrainer] This variable is non-trainable. "
"Ignore this warning if you know it's OK to leave it out-of-sync.")
for v in all_vars:
if not v.name.startswith('tower'):
continue
if v.name.startswith('tower0'):
# in this trainer, the master name doesn't have the towerx/ prefix
log_failure(v.name, "Name should not have prefix 'tower0' in this trainer!")
continue # TODO some vars (EMA) may still startswith tower0
split_name = v.name.split('/')
prefix = split_name[0]
realname = '/'.join(split_name[1:])
if prefix in realname:
log_failure(v.name, "Prefix {} appears multiple times in its name!".format(prefix))
continue
copy_from = var_by_name.get(realname)
if copy_from is not None:
post_init_ops.append(v.assign(copy_from.read_value()))
else:
log_failure(v.name, "Cannot find {} in the graph!".format(realname))
logger.info(
"'sync_variables_from_main_tower' includes {} operations.".format(len(post_init_ops)))
return tf.group(*post_init_ops, name='sync_variables_from_main_tower')
class AsyncMultiGPUBuilder(DataParallelBuilder):
"""
Data-parallel training with async update.
It builds one tower on each GPU with shared variable scope.
Every tower computes the gradients and independently applies them to the
variables, without synchronizing and averaging across towers.
"""
def __init__(self, towers, scale_gradient=True):
"""
Args:
towers(list[int]): list of GPU ids.
scale_gradient (bool): if True, will scale each gradient by ``1.0/nr_gpu``.
"""
super(AsyncMultiGPUBuilder, self).__init__(towers)
self._scale_gradient = scale_gradient
def call_for_each_tower(self, tower_fn):
"""
Call the function `tower_fn` under :class:`TowerContext` for each tower.
Returns:
a list, contains the return values of `tower_fn` on each tower.
"""
ps_device = 'cpu' if len(self.towers) >= 4 else 'gpu'
raw_devices = ['/gpu:{}'.format(k) for k in self.towers]
if ps_device == 'gpu':
devices = [LeastLoadedDeviceSetter(d, raw_devices) for d in raw_devices]
else:
devices = [tf.train.replica_device_setter(
worker_device=d, ps_device='/cpu:0', ps_tasks=1) for d in raw_devices]
return DataParallelBuilder.build_on_towers(self.towers, tower_fn, devices)
def build(self, grad_list, get_opt_fn):
"""
Args:
grad_list ([[(grad, var), ...], ...]): #GPU lists to be reduced. Each is the gradients computed on each GPU.
get_opt_fn (-> tf.train.Optimizer): callable which returns an optimizer
Returns:
tf.Operation: the training op
"""
assert len(grad_list) == len(self.towers)
DataParallelBuilder._check_grad_list(grad_list)
if self._scale_gradient and len(self.towers) > 1:
# pretend to average the grads, in order to make async and
# sync have consistent effective learning rate
gradproc = ScaleGradient(('.*', 1.0 / len(self.towers)), verbose=False)
grad_list = [gradproc.process(gv) for gv in grad_list]
# Ngpu x Nvar x 2
train_ops = []
opt = get_opt_fn()
with tf.name_scope('async_apply_gradients'):
for i, grad_and_vars in enumerate(zip(*grad_list)):
# Ngpu x 2
v = grad_and_vars[0][1]
with tf.device(v.device):
# will call apply_gradients (therefore gradproc) multiple times
train_ops.append(opt.apply_gradients(
grad_and_vars, name='apply_grad_{}'.format(i)))
return tf.group(*train_ops, name='train_op')
|
import { SET_TWEETS } from './feedActions'
const initialState = {
tweets: []
}
export default function feed (state = initialState, action) {
switch (action.type) {
case SET_TWEETS:
const nextState = Object.assign({}, state, { tweets: action.tweets })
return nextState
default:
return state
}
}
|
/* global describe beforeEach it */
// const {expect} = require('chai')
// const request = require('supertest')
// const db = require('../db')
// const app = require('../index')
// const User = db.model('user')
// describe('User routes', () => {
// beforeEach(() => {
// return db.sync({force: true})
// })
// describe('/api/users/', () => {
// const testUser = [{
// email: '[email protected]',
// firstName: 'Jennifer',
// lastName: 'Brown',
// isAdmin: false
// }]
// beforeEach(() => {
// return User.create(testUser)
// })
// it('GET /api/users', () => {
// return request(app)
// .get('/api/users')
// .expect(200)
// .then(res => {
// expect(res.body).to.be.an('array')
// expect(res.body[0].email).to.be.equal('[email protected]')
// })
// // to.be.an('array')
// // expect(res.body[0].email).to.be.equal(testEmail)
// })
// }) // end describe('/api/users')
// }) // end describe('User routes')
const {expect} = require('chai')
const request = require('supertest')
const db = require('../db')
const app = require('../index')
const User = db.model('user')
describe('User routes', () => {
beforeEach(() => {
return db.sync({force: true})
})
describe('/api/users/:id', () => {
const userEmail = '[email protected]'
const userFirst = 'Jasmine'
const userLast = 'Brown'
beforeEach(() => {
return User.create({
email: userEmail,
firstName: userFirst,
lastName: userLast,
isAdmin: false
})
})
xit('GET /api/users', async () => {
const res = await request(app)
.get('/api/users')
.expect(200)
expect(res.body).to.be.an('array')
expect(res.body[0].email).to.be.equal(userEmail)
})
}) // end describe('/api/users')
}) // end describe('User routes')
|
(window.webpackJsonp=window.webpackJsonp||[]).push([[0],[]]);
//# sourceMappingURL=styles-6b8affa5cc22cb60670e.js.map |
# -*- coding: utf-8 -*-
'''
Connection module for Amazon SQS
.. versionadded:: 2014.7.0
:configuration: This module accepts explicit sqs credentials but can also utilize
IAM roles assigned to the instance through Instance Profiles. Dynamic
credentials are then automatically obtained from AWS API and no further
configuration is necessary. More information available at:
.. code-block:: text
http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html
If IAM roles are not used you need to specify them either in a pillar or
in the minion's config file:
.. code-block:: yaml
sqs.keyid: GKTADJGHEIQSXMKKRBJ08H
sqs.key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs
A region may also be specified in the configuration:
.. code-block:: yaml
sqs.region: us-east-1
If a region is not specified, the default is us-east-1.
It's also possible to specify key, keyid and region via a profile, either
as a passed in dict, or as a string to pull from pillars or minion config:
.. code-block:: yaml
myprofile:
keyid: GKTADJGHEIQSXMKKRBJ08H
key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs
region: us-east-1
:depends: boto3
'''
# keep lint from choking on _get_conn and _cache_id
# pylint: disable=E0602
from __future__ import absolute_import
# Import Python libs
import logging
import json
# Import Salt libs
import salt.ext.six as six
from salt.ext.six import string_types
from salt.ext.six.moves.urllib.parse import urlparse as _urlparse # pylint: disable=import-error,no-name-in-module
log = logging.getLogger(__name__)
__func_alias__ = {
'list_': 'list',
}
# Import third party libs
try:
# pylint: disable=unused-import
import boto3
import botocore
# pylint: enable=unused-import
logging.getLogger('boto3').setLevel(logging.CRITICAL)
HAS_BOTO3 = True
except ImportError:
HAS_BOTO3 = False
def __virtual__():
'''
Only load if boto3 libraries exist.
'''
if not HAS_BOTO3:
return (False, 'The boto_sqs module could not be loaded: boto3 libraries not found')
__utils__['boto3.assign_funcs'](__name__, 'sqs', pack=__salt__)
return True
def _preprocess_attributes(attributes):
'''
Pre-process incoming queue attributes before setting them
'''
if isinstance(attributes, string_types):
attributes = json.loads(attributes)
def stringified(val):
# Some attributes take full json policy documents, but they take them
# as json strings. Convert the value back into a json string.
if isinstance(val, dict):
return json.dumps(val)
return val
return dict(
(attr, stringified(val)) for attr, val in six.iteritems(attributes)
)
def exists(name, region=None, key=None, keyid=None, profile=None):
'''
Check to see if a queue exists.
CLI Example:
.. code-block:: bash
salt myminion boto_sqs.exists myqueue region=us-east-1
'''
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
try:
conn.get_queue_url(QueueName=name)
except botocore.exceptions.ClientError as e:
missing_code = 'AWS.SimpleQueueService.NonExistentQueue'
if e.response.get('Error', {}).get('Code') == missing_code:
return {'result': False}
return {'error': __utils__['boto3.get_error'](e)}
return {'result': True}
def create(
name,
attributes=None,
region=None,
key=None,
keyid=None,
profile=None,
):
'''
Create an SQS queue.
CLI Example:
.. code-block:: bash
salt myminion boto_sqs.create myqueue region=us-east-1
'''
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if attributes is None:
attributes = {}
attributes = _preprocess_attributes(attributes)
try:
conn.create_queue(QueueName=name, Attributes=attributes)
except botocore.exceptions.ClientError as e:
return {'error': __utils__['boto3.get_error'](e)}
return {'result': True}
def delete(name, region=None, key=None, keyid=None, profile=None):
'''
Delete an SQS queue.
CLI Example:
.. code-block:: bash
salt myminion boto_sqs.delete myqueue region=us-east-1
'''
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
try:
url = conn.get_queue_url(QueueName=name)['QueueUrl']
conn.delete_queue(QueueUrl=url)
except botocore.exceptions.ClientError as e:
return {'error': __utils__['boto3.get_error'](e)}
return {'result': True}
def list_(prefix='', region=None, key=None, keyid=None, profile=None):
'''
Return a list of the names of all visible queues.
.. versionadded:: 2016.11.0
CLI Example:
.. code-block:: bash
salt myminion boto_sqs.list region=us-east-1
'''
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
def extract_name(queue_url):
# Note: this logic taken from boto, so should be safe
return _urlparse(queue_url).path.split('/')[2]
try:
r = conn.list_queues(QueueNamePrefix=prefix)
# The 'QueueUrls' attribute is missing if there are no queues
urls = r.get('QueueUrls', [])
return {'result': [extract_name(url) for url in urls]}
except botocore.exceptions.ClientError as e:
return {'error': __utils__['boto3.get_error'](e)}
def get_attributes(name, region=None, key=None, keyid=None, profile=None):
'''
Return attributes currently set on an SQS queue.
CLI Example:
.. code-block:: bash
salt myminion boto_sqs.get_attributes myqueue
'''
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
try:
url = conn.get_queue_url(QueueName=name)['QueueUrl']
r = conn.get_queue_attributes(QueueUrl=url, AttributeNames=['All'])
return {'result': r['Attributes']}
except botocore.exceptions.ClientError as e:
return {'error': __utils__['boto3.get_error'](e)}
def set_attributes(
name,
attributes,
region=None,
key=None,
keyid=None,
profile=None,
):
'''
Set attributes on an SQS queue.
CLI Example:
.. code-block:: bash
salt myminion boto_sqs.set_attributes myqueue '{ReceiveMessageWaitTimeSeconds: 20}' region=us-east-1
'''
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
attributes = _preprocess_attributes(attributes)
try:
url = conn.get_queue_url(QueueName=name)['QueueUrl']
conn.set_queue_attributes(QueueUrl=url, Attributes=attributes)
except botocore.exceptions.ClientError as e:
return {'error': __utils__['boto3.get_error'](e)}
return {'result': True}
|
function test() {
this.data = 10;
this.data2 = [];
this.context.data = 10;
this.context.data2 = [];
}
angular.module("synergy.handlers", ["synergy.utils"])
.factory("SynergyHandlers", ["SynergyUtils", function (SynergyUtils) {
var Synergy = {control: {}};
Synergy.control.ArchiveDataFilter = function (allData) {
var filteredAssignments = {};
this.allData.issues = []; // resolved as unknown global variable
var self = this;
};
return Synergy.control;
}]); |
import Resolver from 'ember-resolver';
Resolver.reopen({
// eslint-disable-next-line ember/avoid-leaking-state-in-ember-objects
pluralizedTypes: {
ability: 'abilities'
}
});
export function initialize(/* application */) {}
export default { initialize };
|
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2020, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
''' This modules exposes geometry data for Unites States. It exposes a
dictionary ``data``, which is indexed by the two-tuples:
.. code-block:: python
(state_id, county_id)
that have the following dictionaries as the associated value:
.. code-block:: python
data[(1,1)]['name']
data[(1,1)]['state']
data[(1,1)]['detailed name']
data[(1,1)]['lats']
data[(1,1)]['lons']
Entries for ``'name'`` can have duplicates for certain states (e.g. Virginia).
The combination of ``'detailed name'`` and ``'state'`` will always be unique.
'''
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
import logging # isort:skip
log = logging.getLogger(__name__)
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
import csv
import xml.etree.ElementTree as et
# Bokeh imports
from ..util.sampledata import external_path, open_csv
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
__all__ = (
'data',
)
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
def _read_data():
'''
'''
nan = float('NaN')
data = {}
with open_csv(external_path('US_Counties.csv')) as f:
next(f)
reader = csv.reader(f, delimiter=",", quotechar='"')
for row in reader:
name, dummy, state, dummy, geometry, dummy, dummy, dummy, det_name, state_id, county_id, dummy, dummy = row
xml = et.fromstring(geometry)
lats = []
lons = []
for i, poly in enumerate(xml.findall('.//outerBoundaryIs/LinearRing/coordinates')):
if i > 0:
lats.append(nan)
lons.append(nan)
coords = (c.split(',')[:2] for c in poly.text.split())
lat, lon = list(zip(*[(float(lat), float(lon)) for lon, lat in
coords]))
lats.extend(lat)
lons.extend(lon)
data[(int(state_id), int(county_id))] = {
'name' : name,
'detailed name' : det_name,
'state' : state,
'lats' : lats,
'lons' : lons,
}
return data
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
data = _read_data()
|
from __future__ import print_function
import argparse
import random
import torch
import torch.backends.cudnn as cudnn
import torch.optim as optim
import torch.utils.data
from torch.autograd import Variable
import numpy as np
from warpctc_pytorch import CTCLoss
import os
import utils
import dataset
import models.crnn as crnn
parser = argparse.ArgumentParser()
parser.add_argument('--trainroot', required=True, help='path to dataset')
parser.add_argument('--valroot', required=True, help='path to dataset')
parser.add_argument('--workers', type=int, help='number of data loading workers', default=2)
parser.add_argument('--batchSize', type=int, default=64, help='input batch size')
parser.add_argument('--imgH', type=int, default=64, help='the height / width of the input image to network')
parser.add_argument('--nh', type=int, default=100, help='size of the lstm hidden state')
parser.add_argument('--niter', type=int, default=25, help='number of epochs to train for')
parser.add_argument('--lr', type=float, default=1, help='learning rate for Critic, default=0.00005')
parser.add_argument('--beta1', type=float, default=0.5, help='beta1 for adam. default=0.5')
parser.add_argument('--cuda', action='store_true', help='enables cuda')
parser.add_argument('--ngpu', type=int, default=1, help='number of GPUs to use')
parser.add_argument('--crnn', default='', help="path to crnn (to continue training)")
parser.add_argument('--alphabet', type=str, default='0123456789abcdefghijklmnopqrstuvwxyz')
parser.add_argument('--Diters', type=int, default=5, help='number of D iters per each G iter')
parser.add_argument('--experiment', default=None, help='Where to store samples and models')
parser.add_argument('--displayInterval', type=int, default=500, help='Interval to be displayed')
parser.add_argument('--n_test_disp', type=int, default=10, help='Number of samples to display when test')
parser.add_argument('--valInterval', type=int, default=500, help='Interval to be displayed')
parser.add_argument('--saveInterval', type=int, default=500, help='Interval to be displayed')
parser.add_argument('--adam', action='store_true', help='Whether to use adam (default is rmsprop)')
parser.add_argument('--adadelta', action='store_true', help='Whether to use adadelta (default is rmsprop)')
parser.add_argument('--keep_ratio', action='store_true', help='whether to keep ratio for image resize')
parser.add_argument('--random_sample', action='store_true', help='whether to sample the dataset with random sampler')
opt = parser.parse_args()
print(opt)
if opt.experiment is None:
opt.experiment = 'samples'
os.system('mkdir {0}'.format(opt.experiment))
opt.manualSeed = random.randint(1, 10000) # fix seed
print("Random Seed: ", opt.manualSeed)
random.seed(opt.manualSeed)
np.random.seed(opt.manualSeed)
torch.manual_seed(opt.manualSeed)
cudnn.benchmark = True
if torch.cuda.is_available() and not opt.cuda:
print("WARNING: You have a CUDA device, so you should probably run with --cuda")
train_dataset = dataset.lmdbDataset(root=opt.trainroot)
assert train_dataset
if not opt.random_sample:
sampler = dataset.randomSequentialSampler(train_dataset, opt.batchSize)
else:
sampler = None
train_loader = torch.utils.data.DataLoader(
train_dataset, batch_size=opt.batchSize,
shuffle=True, sampler=sampler,
num_workers=int(opt.workers),
collate_fn=dataset.alignCollate(imgH=opt.imgH, keep_ratio=opt.keep_ratio))
test_dataset = dataset.lmdbDataset(
root=opt.valroot, transform=dataset.resizeNormalize((100, 32)))
ngpu = int(opt.ngpu)
nh = int(opt.nh)
alphabet = opt.alphabet
nclass = len(alphabet) + 1
nc = 1
converter = utils.strLabelConverter(alphabet)
criterion = CTCLoss()
# custom weights initialization called on crnn
def weights_init(m):
classname = m.__class__.__name__
if classname.find('Conv') != -1:
m.weight.data.normal_(0.0, 0.02)
elif classname.find('BatchNorm') != -1:
m.weight.data.normal_(1.0, 0.02)
m.bias.data.fill_(0)
crnn = crnn.CRNN(opt.imgH, nc, nclass, nh, ngpu)
crnn.apply(weights_init)
if opt.crnn != '':
print('loading pretrained model from %s' % opt.crnn)
crnn.load_state_dict(torch.load(opt.crnn))
print(crnn)
image = torch.FloatTensor(opt.batchSize, 3, opt.imgH, opt.imgH)
text = torch.IntTensor(opt.batchSize * 5)
length = torch.IntTensor(opt.batchSize)
if opt.cuda:
crnn.cuda()
image = image.cuda()
criterion = criterion.cuda()
image = Variable(image)
text = Variable(text)
length = Variable(length)
# loss averager
loss_avg = utils.averager()
# setup optimizer
if opt.adam:
optimizer = optim.Adam(crnn.parameters(), lr=opt.lr,
betas=(opt.beta1, 0.999))
elif opt.adadelta:
optimizer = optim.Adadelta(crnn.parameters(), lr=opt.lr)
else:
optimizer = optim.RMSprop(crnn.parameters(), lr=opt.lr)
def val(net, dataset, criterion, max_iter=100):
print('Start val')
for p in crnn.parameters():
p.requires_grad = False
net.eval()
data_loader = torch.utils.data.DataLoader(
dataset, shuffle=True, batch_size=opt.batchSize, num_workers=int(opt.workers))
val_iter = iter(data_loader)
i = 0
n_correct = 0
loss_avg = utils.averager()
for i in range(min(max_iter, len(data_loader))):
data = val_iter.next()
i += 1
cpu_images, cpu_texts = data
batch_size = cpu_images.size(0)
utils.loadData(image, cpu_images)
t, l = converter.encode(cpu_texts)
utils.loadData(text, t)
utils.loadData(length, l)
preds = crnn(image)
preds_size = Variable(torch.IntTensor([preds.size(0)] * batch_size))
cost = criterion(preds, text, preds_size, length) / batch_size
loss_avg.add(cost)
_, preds = preds.max(2)
preds = preds.squeeze(2)
preds = preds.transpose(1, 0).contiguous().view(-1)
sim_preds = converter.decode(preds.data, preds_size.data, raw=False)
for pred, target in zip(sim_preds, cpu_texts):
if pred == target.lower():
n_correct += 1
raw_preds = converter.decode(preds.data, preds_size.data, raw=True)
for raw_pred, pred, gt in zip(raw_preds, sim_preds, cpu_texts):
print('%-20s => %-20s, gt: %-20s' % (raw_pred, pred, gt))
accuracy = n_correct / float(max_iter * opt.batchSize)
print('Test loss: %f, accuray: %f' % (loss_avg.val(), accuracy))
def trainBatch(net, criterion, optimizer):
data = train_iter.next()
cpu_images, cpu_texts = data
batch_size = cpu_images.size(0)
utils.loadData(image, cpu_images)
t, l = converter.encode(cpu_texts)
utils.loadData(text, t)
utils.loadData(length, l)
preds = crnn(image)
preds_size = Variable(torch.IntTensor([preds.size(0)] * batch_size))
cost = criterion(preds, text, preds_size, length) / batch_size
crnn.zero_grad()
cost.backward()
optimizer.step()
return cost
for epoch in range(opt.niter):
train_iter = iter(train_loader)
i = 0
while i < len(train_loader):
for p in crnn.parameters():
p.requires_grad = True
crnn.train()
cost = trainBatch(crnn, criterion, optimizer)
loss_avg.add(cost)
i += 1
if i % opt.displayInterval == 0:
print('[%d/%d][%d/%d] Loss: %f' %
(epoch, opt.niter, i, len(train_loader), loss_avg.val()))
loss_avg.reset()
if i % opt.valInterval == 0:
val(crnn, test_dataset, criterion)
# do checkpointing
if i % opt.saveInterval == 0:
torch.save(
crnn.state_dict(), '{0}/netCRNN_{1}_{2}.pth'.format(opt.experiment, epoch, i))
|
# -*- coding: utf-8 -*-
# @Time : 2019/11/2 21:40
# @Author : 高冷
# @FileName : 正则表达式的常用匹配字符.py
# 1.一般字符类
'''
. --匹配任意字符,除了换行符,当re.DOTALL标记被指定时,则可以匹配包括换行符的任意字符。
? --匹配一个任意字符
^ --匹配字符串的开头
$ --匹配字符串的末尾。
[…] --用来表示一组字符,单独列出:[amk] 匹配 ‘a’,‘m’或’k’
[^…] --不在[]中的字符:[^abc] 匹配除了a,b,c之外的字符。
例:
[Pp]ython --匹配 “Python” 或 “python”
rub[ye] --匹配 “ruby” 或 “rube”
[aeiou] --匹配中括号内的任意一个字母
[0-9]-- 匹配任何数字。类似于 [0123456789]
[a-z] --匹配任何小写字母
[A-Z] --匹配任何大写字母
[a-zA-Z0-9] --匹配任何字母及数字
[^aeiou] --除了aeiou字母以外的所有字符
[^0-9] --匹配除了数字外的字符
'''
import re
s = "etc/resolv.conf', '/etc/extlinux.conf', '/etc/mtools.conf', '/etc/"
pattern1 = r"etc"
pattern2 = r"/etc/"
print(re.findall(pattern1, s))
print(re.findall(pattern2, s))
# 2.特殊字符类(字符出现的次数)
'''
re* --匹配0个或多个的表达式。
re+ --匹配1个或多个的表达式。
re? --匹配0个或1个由前面的正则表达式定义的片段,非贪婪方式
re{ n} --精确匹配 n 个前面表达式。例如, o{2} 不能匹配 “Bob” 中的 “o”,但是能匹配 “food” 中的两个 o。
re{ n,} 匹配 n 个前面表达式。例如, o{2,} 不能匹配"Bob"中的"o",但能匹配 "foooood"中的所有 o。“o{1,}” 等价于 “o+”。“o{0,}” 则等价于 “o*”。
re{ n, m} 匹配 n 到 m 次由前面的正则表达式定义的片段,贪婪方式
'''
# 3.特殊字符类(预定义字符)
'''
\w --匹配字母数字及下划线,等价于’[A-Za-z0-9_], 这里中文也可以匹配
\W --匹配非字母数字及下划线 ,[^A-Za-z0-9_]
\s --匹配任意空白字符,等价于 [\t\n\r\f\v]
\S --匹配任意非空字符,等价于 [^ \f\n\r\t\v]
\d --匹配任意数字,等价于 [0-9]
\D --匹配任意非数字,等价于 [^0-9]
\A --匹配字符串开始
\Z --匹配字符串结束,如果是存在换行,只匹配到换行前的结束字符串
\b --匹配一个单词边界,也就是指单词和空格间的位置。例如, ‘er\b’ 可以匹配"never" 中的 ‘er’,但不能匹配 “verb” 中的 ‘er’。
\B --匹配非单词边界。‘er\B’ 能匹配 “verb” 中的 ‘er’,但不能匹配 “never” 中的 ‘er’。
'''
import re
print(re.findall(r".","hello\n"))
print(re.findall(r"\d","dhjc987"))
print(re.findall(r"\D","dhjc987"))
print(re.findall(r"\s","hello\nworld\t你好\r"))
print(re.findall(r"\S","hello\nworld\t你好\r"))
print(re.findall(r"\w","ni号123_____%^&"))
print(re.findall(r"\W","ni号123_____%^&"))
|
/* @flow */
/**
* The Inverse [Gaussian error function](http://en.wikipedia.org/wiki/Error_function)
* returns a numerical approximation to the value that would have caused
* `errorFunction()` to return x.
*
* @param {number} x value of error function
* @returns {number} estimated inverted value
*/
function inverseErrorFunction(x /*: number */) /*: number */ {
const a = (8 * (Math.PI - 3)) / (3 * Math.PI * (4 - Math.PI));
const inv = Math.sqrt(
Math.sqrt(
Math.pow(2 / (Math.PI * a) + Math.log(1 - x * x) / 2, 2) -
Math.log(1 - x * x) / a
) -
(2 / (Math.PI * a) + Math.log(1 - x * x) / 2)
);
if (x >= 0) {
return inv;
} else {
return -inv;
}
}
export default inverseErrorFunction;
|
#!/usr/bin/env python3
import unittest
import edict
class TestEdict(unittest.TestCase):
def test_parse_entry(self):
raw = '大丈夫 [だいじょうぶ(P);だいじょぶ] /(adj-na) (1) safe/all right/alright/OK/okay/sure/(adv) (2) certainly/surely/undoubtedly/(n) (3) (だいじょうぶ only) (arch) (See 大丈夫・だいじょうふ) great man/fine figure of a man/(P)/EntL1414150X/\n'
should_be = "Entry(kanji=[Word(text='大丈夫')], kana=[Word(text='だいじょうぶ', mark=['common']), Word(text='だいじょぶ')], definitions=[Definition(words=[Word(text='safe'), Word(text='all right'), Word(text='alright'), Word(text='OK'), Word(text='okay'), Word(text='sure')], pos=['adjectival nouns or quasi-adjectives (keiyodoshi)']), Definition(words=[Word(text='certainly'), Word(text='surely'), Word(text='undoubtedly')], pos=['adverb (fukushi)']), Definition(words=[Word(text='(だいじょうぶ only) great man', mark=['archaism']), Word(text='fine figure of a man')], pos=['noun (common) (futsuumeishi)'])])"
entry = edict.parse_entry(raw)
self.assertEqual(str(entry), should_be)
def test_parse_edict(self):
entries = edict.parse_dictionary('edict2')
self.assertEqual(len(entries), 177086)
if __name__ == '__main__':
unittest.main()
|
let fs = require('fs');
function extractCode(x) {
if (x[1] === 'f') {
return 'format(' + x.slice(3, -2) + ')';
} else if (x[1] === 'i') {
return 'formatInt(' + x.slice(3, -2) + ')';
} else if (x[1] === 'q') {
return 'formatMaybeInt(' + x.slice(3, -2) + ')';
} else if (x[1] === 'r') {
return x.slice(3, -2);
} else if (x[1] === 's') {
return 'pluralize(' + x.slice(3, -2) + ', \'\', \'s\')';
} else if (x[1] === 't') {
if (x[2] === ' ') {
return 'formatTime(' + x.slice(3, -2) + ', {seconds: {f: format, s: false}, larger: {f: format, s: false}})';
} else if (x[2] === 'i') {
return 'formatTime(' + x.slice(4, -2) + ', {seconds: {f: formatInt, s: true}, larger: {f: format, s: false}})';
} else if (x[2] === 'q') {
return 'formatTime(' + x.slice(4, -2) + ', {seconds: {f: formatMaybeInt, s: true}, larger: {f: format, s: false}})';
} else if (x[2] === 's') {
return 'formatTime(' + x.slice(4, -2) + ', {seconds: {f: formatMaybeInt, s: true}, larger: {f: formatMaybeInt, s: true}})';
}
} else if (x[1] === 'y') {
return 'pluralize(' + x.slice(3, -2) + ', \'y\', \'ies\')';
}
}
function updateDisplayOneElement(x, i) {
return 'if (shouldUpdate("e' + i + '")) {e[' + i + '].textContent = ' + extractCode(x) + '};'
}
function styleNameToJsName(x) {
if (x === 'class') {
return 'className';
}
return x.replace(/-[a-z]/g, (y) => y[1].toUpperCase());
}
function updateDisplayOneStyle(x, i) {
return x.match(/ ~[-!.a-z]+=[^~]+~/g).map(function (y, j) {
let property = y.split('=')[0].slice(2).split('.').map(styleNameToJsName).join('.');
let head = '';
if (property[0] === '!') {
property = property.slice(1);
head = '!';
}
return head + 'if (shouldUpdate("b' + i + '")) {b[' + i + '].' + property + ' = ' + y.split('=').slice(1).join('=').slice(0, -1) + '};';
});
}
function cmp(a, b) {
return (a < b) ? -1 : ((a > b) ? 1 : 0);
}
function flatten(l) {
return [].concat.apply([], l.map(i => Array.isArray(i) ? i : [i]));
}
function getUntabbed(inTabs) {
let untabbed = {};
for (let i = 0; i < el1Number; i++) {
untabbed['e' + i] = true;
}
for (let i = 0; i < el2Number; i++) {
untabbed['b' + i] = true;
}
for (let i of inTabs) {
for (let j of i[1]) {
delete untabbed[j];
}
}
return Object.keys(untabbed).sort((a, b) => (-cmp(a[0], b[0])) || cmp(+a.slice(1), +b.slice(1)));
}
function makeUpdateDisplaySetup(setupList) {
return 'let e;\nlet b;\nlet majorDivs;\nlet majorDivTable;\nlet tickMap;\n\nlet shouldUpdate = x => majorDivTable[x].every(' +
'y => {if (!(y in tickMap)) {tickMap[y] = document.getElementById(y).style.display !== "none"}; ' +
'return tickMap[y]});\n\nfunction updateDisplayPageLoadSetup() {\n e = [' +
[...Array(el1Number)].map((_, i) => 'document.getElementById("e' + i + '")').join(', ') + '];\n b = [' +
[...Array(el2Number)].map((_, i) => 'document.getElementById("b' + i + '")').join(', ') + '];\n let majorDivsOrig = ' +
'[...document.getElementsByClassName("major-div")];\n majorDivs = majorDivsOrig.map(x => x.id);\n majorDivTable = {};\n for (let i of e.concat(b)) {' +
'majorDivTable[i.id] = majorDivsOrig.filter(j => j.contains(i) && !i.contains(j)).map(j => j.id)}' +
';\n}\n\nfunction updateDisplaySaveLoadSetup() {\n' + g(setupList, ' ') + '\n}';
}
let g = (l, s) => l.map(i => s + i).join('\n');
function makeUpdateDisplay(el1CodeList, el2CodeList, setupList, inTabs) {
let f = x => [el1CodeList, el2CodeList]['eb'.indexOf(x[0])][+x.slice(1)];
let untabbed = getUntabbed(inTabs);
let setupCode = makeUpdateDisplaySetup(setupList);
let updateDisplayCode = 'function updateDisplay() {\n tickMap = {};\n' + g(flatten(untabbed.map(f)), ' ') + '\n' +
inTabs.map(x => ' if (' + x[0][0] + '[' + x[0].slice(1) + '].style.display !== "none") {\n' +
g(flatten(x[1].map(f)), ' ') + '\n }').join('\n') + '\n}';
return setupCode + '\n\n' + updateDisplayCode;
}
let el1Number = 0;
let el2Number = 0;
function dealWithElement(x) {
return x.includes('~') ? x.replace(/^<[-a-z]+/, (y) => y + ' id="b' + el2Number++ + '"').replace(/ ~[-!.a-z]+=[^~]+~/g, '') : x;
}
let files = process.argv.length > 2 ? process.argv.slice(2) : ['index-template.html', 'index.html', 'js/update-display.js']
let time = Date.now();
fs.readFile(files[0], 'utf8', function(err, contents) {
let contentsWithTime = contents.replace(/%time%/g, time);
let newContents = contentsWithTime.replace(
/<[-a-z]+( [-a-z]+="[^"]+"| ~[-!.a-z]+=[^~]+~)*\/?>/g, dealWithElement).replace(
/~([fiqrsy]|t[iqs]?) [^~]+ ~/g, (x) => '<span id="e' + el1Number++ + '"></span>');
let el1CodeList = (contents.match(/~([fiqrsy]|t[iqs]?) [^~]+ ~/g) || []).map(updateDisplayOneElement);
let el2CodeList = (contents.match(/<[-a-z]+( [-a-z]+="[^"]+"| ~[-!.a-z]+=[^~]+~)*\/?>/g) || []).filter(x => x.includes('~')).map(updateDisplayOneStyle);
let setupList = flatten(el2CodeList.map(x => x.filter(y => y[0] === '!').map(y => y.slice(1))));
el2CodeList = el2CodeList.map(x => x.filter(y => y[0] !== '!'));
let inTabs = (newContents.match(/<tab .*?<\/tab>/gs) || []).map(
x => x.match(/id="[be]\d+"/g).map(y => y.slice(4, -1))).map(x => [x[0], x.slice(1)]);
let updateDisplay = makeUpdateDisplay(el1CodeList, el2CodeList, setupList, inTabs);
newContents = newContents.replace(/<tab /g, '<div ').replace(/<\/tab>/g, '</div>');
if (newContents.includes('~')) {
let index = newContents.indexOf('~')
console.log('context for first tilde:\n' + newContents.slice(Math.max(0, index - 100), index + 100));
throw new Error('tilde found in output, check above for context');
}
fs.writeFile(files[1], newContents, function (err) {console.log(err)});
fs.writeFile(files[2], updateDisplay, function (err) {console.log(err)});
});
|
const { INTEGER } = require('sequelize')
module.exports = db =>
db.define('order_item', {
hours: {
type: INTEGER,
allowNull: false,
defaultValue: 1,
validate: {
notEmpty: true,
},
},
rate: {
type: INTEGER,
allowNull: false,
validate: {
notEmpty: true,
},
},
})
module.exports.associations = (OrderItem, {Order, Developer}) => {
OrderItem.belongsTo(Order)
OrderItem.belongsTo(Developer)
}
|
import { useState, useEffect } from 'react';
import PerfectScrollbar from 'react-perfect-scrollbar';
import PropTypes from 'prop-types';
import { format } from 'date-fns';
import {
ref,
uploadBytes,
getStorage,
listAll,
getDownloadURL
} from 'firebase/storage'
import {
Avatar,
Box,
Card,
Checkbox,
Table,
TextField,
TableBody,
TableCell,
TableHead,
TablePagination,
TableRow,
Typography,
Button,
} from '@mui/material';
import { getInitials } from '../../utils/get-initials';
import EditIcon from '@mui/icons-material/Edit';
import AddIcon from '@mui/icons-material/Add';
import { collAuth } from '../data-handling/college-crud';
import Dialog from '@mui/material/Dialog';
import DialogActions from '@mui/material/DialogActions';
import DialogContent from '@mui/material/DialogContent';
import DialogContentText from '@mui/material/DialogContentText';
import DialogTitle from '@mui/material/DialogTitle';
import { useAuth } from 'src/contexts/AuthContext'
import { useFormik } from 'formik';
import * as React from 'react';
import { db } from 'src/firebase/firebase-auth'
import { storage } from 'src/firebase/firebase-auth';
import * as Yup from 'yup';
// import { FormDialog } from 'src/components/college/college-list-toolbar';
import { deleteDoc, getDocs, collection, doc, onSnapshot, query, updateDoc, where } from '@firebase/firestore';
import imageFunc from 'src/components/college/college-list-toolbar';
import { getArchivelist, setArchivelist, setArchiveDisable } from '../userModel';
export default function FormDialog(props) {
const [open, setOpen] = React.useState(false);
var id = props.id
const formik = useFormik({
initialValues: {
college_code: props.college_code,
college_description: props.college_description,
college_logo: props.college_logo,
},
validationSchema: Yup.object({
college_code: Yup
.string()
.max(100)
.required
(
'Department Code is required'
),
college_description: Yup
.string()
.max(100)
.required
(
'Description is required'
),
college_logo: Yup
.string()
.max(32)
.required
(
'College Code is required'
),
}),
onSubmit: async() => {
}
});
const handleClickOpen = () => {
setOpen(true);
};
const handleClose = () => {
setOpen(false);
};
const updateCollege = async() =>{
const updateCollege = {
coll_code: formik.values.college_code,
coll_desc: formik.values.college_description,
coll_logo: formik.values.college_logo,
}
const washingtonRef = doc(db, "colleges", id);
await updateDoc(washingtonRef,updateCollege)
handleClose()
}
return (
<div style={{display : 'inline-block'}} >
<Button
startIcon={(<EditIcon fontSize="small" />)}
variant="outlined"
sx={{ mr: 1 }}
onClick={handleClickOpen}>
Update
</Button>
<Dialog open={open}
onClose={handleClose}>
<DialogTitle
display="flex"
justifyContent="center"
>Update Data</DialogTitle>
<DialogContent>
<Avatar alt={formik.values.college_code} src={formik.values.college_logo} sx={{marginLeft:'auto', marginRight:'auto', width:200, height:200}}/>
<TextField
margin="dense"
label="College Code"
type="text"
fullWidth
variant="outlined"
onChange={formik.handleChange}
value={formik.values.college_code}
name="college_code"
/>
<TextField
margin="dense"
label="Description"
type="text"
fullWidth
variant="outlined"
onChange={formik.handleChange}
value={formik.values.college_description}
name="college_description"
/>
</DialogContent>
<DialogActions>
<Box>
<Button
color="primary"
onClick={handleClose}>Cancel
</Button>
</Box>
<Box p={2}>
<Button
color="primary"
variant='contained'
onClick={updateCollege}>Done
</Button>
</Box>
</DialogActions>
</Dialog>
</div>
);
}
export const CollegeListResults = ({ customers, ...rest }) => {
const [selectedCollegeIds, setSelectedCollegeIds] = useState([]);
const [limit, setLimit] = useState(10);
const [page, setPage] = useState(0);
const [colleges, setColleges] = useState([]);
const [indexValue, setIndexValue] = useState(0)
const [limitValue, setLimitValue] = useState(limit)
const [checkAll, setCheckAll] = useState(false)
const [imagesList, setimageList] = React.useState([]);
const imageListRef = ref(storage, "CollegeLogos/")
React.useEffect(() =>
{
listAll(imageListRef).then((response) => {
response.items.forEach((item) => {
getDownloadURL(item).then((url) => {
setimageList((prev) => [...prev, url]);
});
});
console.log(response)
});
}, []);
function allColl()
{
const q = query(collection(db, "colleges"), where("archive", "==", false));
const unsubscribe = onSnapshot(q, (querySnapshot) => {
const subs = [];
querySnapshot.forEach((doc) => {
subs.push({ ...doc.data(), id: doc.id });
});
setColleges(subs)
});
}
useEffect(() => {
allColl()
}, []);
const handleSelectAll = (event) => {
setCheckAll(!checkAll)
var newSelected = []
if(event.target.checked){
colleges.map((data)=>{
newSelected.push(data.id)
})
setSelectedCollegeIds(newSelected)
setArchivelist(newSelected)
}
else{
setSelectedCollegeIds([])
setArchivelist('')
}
};
const handleSelectOne = (event, id) => {
if(event.target.checked){
const selectedIndex = selectedCollegeIds.indexOf(id);
let newSelectedCustomerIds = [];
if (selectedIndex === -1) {
newSelectedCustomerIds = newSelectedCustomerIds.concat(selectedCollegeIds, id);
} else if (selectedIndex === 0) {
newSelectedCustomerIds = newSelectedCustomerIds.concat(selectedCollegeIds.slice(1));
} else if (selectedIndex === selectedCollegeIds.length - 1) {
newSelectedCustomerIds = newSelectedCustomerIds.concat(selectedCollegeIds.slice(0, -1));
} else if (selectedIndex > 0) {
newSelectedCustomerIds = newSelectedCustomerIds.concat(
selectedCollegeIds.slice(0, selectedIndex),
selectedCollegeIds.slice(selectedIndex + 1)
);
}
setSelectedCollegeIds(newSelectedCustomerIds);
setArchivelist(newSelectedCustomerIds)
}
else{
setSelectedCollegeIds([]);
setArchivelist('')
}
};
const handleLimitChange = (event) => {
setLimitValue(event.target.value)
setIndexValue(0)
setLimit(event.target.value);
};
const handlePageChange = (event, newPage) => {
if(page > newPage){
setIndexValue(indexValue- limit)
setLimitValue(limitValue- limit)
}else{
setIndexValue(indexValue+ limit)
setLimitValue(limitValue+ limit)
}
setPage(newPage);
};
return (
<Card {...rest}>
<PerfectScrollbar>
<Box sx={{ minWidth: 1050 }}>
<Table>
<TableHead>
<TableRow>
<TableCell padding="checkbox">
<Checkbox
checked={checkAll}
color="primary"
indeterminate={
selectedCollegeIds.length > 0
&& selectedCollegeIds.length < customers.length
}
onChange={handleSelectAll}
/>
</TableCell>
<TableCell>
Logo
</TableCell>
<TableCell>
College Code
</TableCell>
<TableCell>
Description
</TableCell>
<TableCell>
Action
</TableCell>
</TableRow>
</TableHead>
<TableBody>
{colleges.slice(indexValue, limitValue).map((college) => (
<TableRow
hover
key={college.id}
selected={selectedCollegeIds.indexOf(college.id) !== -1}
>
<TableCell padding="checkbox">
<Checkbox
checked={selectedCollegeIds.indexOf(college.id) !== -1}
onChange={(event) => handleSelectOne(event, college.id)}
value="true"
/>
</TableCell>
<TableCell>
<Avatar alt={college.coll_code} src={college.coll_logo} sx={{width:100, height:100}}/>
</TableCell>
<TableCell>
<Box
sx={{
alignItems: 'center',
display: 'flex'
}}
>
{/* <Avatar
src={customer.avatarUrl}
sx={{ mr: 2 }}
>
{getInitials(customer.name)}
</Avatar> */}
<Typography
color="textPrimary"
variant="body1"
>
{college.coll_code}
</Typography>
</Box>
</TableCell>
<TableCell>
{college.coll_desc}
</TableCell>
<TableCell>
<FormDialog
id={college.id}
college_code={college.coll_code}
college_description={college.coll_desc}
college_logo={college.coll_logo}
>
</FormDialog>
</TableCell>
</TableRow>
))}
</TableBody>
</Table>
</Box>
</PerfectScrollbar>
<TablePagination
component="div"
count={customers.length}
onPageChange={handlePageChange}
onRowsPerPageChange={handleLimitChange}
page={page}
rowsPerPage={limit}
rowsPerPageOptions={[5, 10, 25]}
/>
</Card>
);
};
|
var _c_p_t_platform_specific_functions_8m =
[
[ "CPTGetCurrentContext", "_c_p_t_platform_specific_functions_8m.html#a3be5490002256d9807df1586581550b9", null ],
[ "CPTPopCGContext", "_c_p_t_platform_specific_functions_8m.html#af83544397fc336d1c14e66f2b7e473be", null ],
[ "CPTPushCGContext", "_c_p_t_platform_specific_functions_8m.html#af6d0cffe2f7fd5d5c9b96622d8472eee", null ]
]; |
#!/usr/bin/python3
# This file filters out the utterances with one channel, and
# only keeps utterances with two channels. The new uttname has
# the format dataset-uttname-speaker_ch1-speaker_ch2-channel
import os
import sys
def utt_spk_mapping(filename):
utt2spk_dict = {}
spk2utt_dict = {}
with open(filename, 'r') as fh:
content = fh.readlines()
for line in content:
line = line.strip('\n')
uttname = line.split()[0]
spkname = line.split()[1]
utt2spk_dict[uttname] = spkname
if spkname in spk2utt_dict:
spk2utt_dict[spkname].append(uttname)
else:
spk2utt_dict[spkname] = [uttname]
return utt2spk_dict, spk2utt_dict
def utt_wav_mapping(filename):
utt2wav_dict = {}
with open(filename, 'r') as fh:
content = fh.readlines()
for line in content:
line = line.strip('\n')
uttname = line.split()[0]
wav = " ".join(line.split()[1:])
utt2wav_dict[uttname] = wav
return utt2wav_dict
def filter_wav(filename, utt2wav_dict, out_dir):
wav_scp_file = open("{}/wav.scp".format(out_dir), 'w')
utt2spk_file = open("{}/utt2spk".format(out_dir), 'w')
utt2singlechannel = {}
with open(filename, 'r') as fh:
content = fh.readlines()
for line in content:
line = line.strip('\n')
uttname_single = line.split()[0]
if line.split()[1] == "sox":
continue
speaker, dataset, uttname, channel = process_uttname(uttname_single)
utt = "{}-{}".format(dataset, uttname)
if utt in utt2singlechannel:
utt2singlechannel[utt].append(uttname_single)
else:
utt2singlechannel[utt] = [uttname_single]
total_utt = len(utt2singlechannel)
utt_list = list(utt2singlechannel.keys())
# remain the utterances if both the channels are in the dataset.
for utt in utt_list:
if len(utt2singlechannel[utt]) != 2:
del utt2singlechannel[utt]
remain_utt = len(utt2singlechannel)
print("{} utts total, {} utts with two channels remain".format(total_utt, remain_utt))
utt_list = list(utt2singlechannel.keys())
utt_list.sort()
for utt in utt_list:
utt_singlechannel = utt2singlechannel[utt]
assert len(utt_singlechannel) == 2 # 2 channels
if (utt_singlechannel[0])[-1] in ['a', 'A', '1']:
utt_ch1 = utt_singlechannel[0]
utt_ch2 = utt_singlechannel[1]
elif (utt_singlechannel[0])[-1] in ['b', 'B', '2']:
utt_ch1 = utt_singlechannel[1]
utt_ch2 = utt_singlechannel[0]
else:
raise ValueError("Condition not defined.")
speaker_ch1, dataset_ch1, uttname_ch1, channel_ch1 = process_uttname(utt_ch1)
speaker_ch2, dataset_ch2, uttname_ch2, channel_ch2 = process_uttname(utt_ch2)
assert (dataset_ch1 == dataset_ch2) and (uttname_ch1 == uttname_ch2) and (channel_ch1 == "1") and (channel_ch2 == "2")
utt_ch1_new = "{}-{}-{}-{}-{}".format(dataset_ch1, uttname_ch1, speaker_ch1, speaker_ch2, channel_ch1)
utt_ch2_new = "{}-{}-{}-{}-{}".format(dataset_ch2, uttname_ch2, speaker_ch1, speaker_ch2, channel_ch2)
wav_scp_file.write("{} {}\n".format(utt_ch1_new, utt2wav_dict[utt_ch1]))
wav_scp_file.write("{} {}\n".format(utt_ch2_new, utt2wav_dict[utt_ch2]))
utt2spk_file.write("{} {}\n".format(utt_ch1_new, utt_ch1_new))
utt2spk_file.write("{} {}\n".format(utt_ch2_new, utt_ch2_new))
wav_scp_file.close()
utt2spk_file.close()
return 0
def process_uttname(uttname):
if len(uttname.split('_')) == 4 and uttname.split('_')[1] in ["MX6", "SRE08", "SRE10"]:
# Mixer6, SRE08, SRE10
uttname_split = uttname.split('_')
speaker = uttname_split[0]
dataset = uttname_split[1]
uttname = uttname_split[2]
if uttname_split[3] == "A":
channel = "1"
elif uttname_split[3] == "B":
channel = "2"
else:
print(uttname)
raise ValueError("Channel not defined.")
elif len(uttname.split('-')) == 4 and uttname.split('-')[1] in ["sre04", "sre05", "sre06"]:
# SRE04, 05, 06
uttname_split = uttname.split('-')
speaker = uttname_split[0]
dataset = uttname_split[1]
uttname = uttname_split[2]
if uttname_split[3] == "a":
channel = "1"
elif uttname_split[3] == "b":
channel = "2"
else:
print(uttname)
raise ValueError("Channel not defined.")
elif len(uttname.split('-')) == 2 and (uttname.split('-')[1]).split('_')[0] == "swbdc":
# Switchboard Cellular Part 1, Switchboard Cellular Part 2
speaker = uttname.split('-')[0]
info = (uttname.split('-')[1]).split('_')
assert len(info) == 4
dataset = info[0]
uttname = "_".join(info[1:3])
channel = info[3]
assert channel in ["1", "2"]
elif len(uttname.split('_')) == 5 and uttname.split('_')[0] == "sw":
# Switchboard-2 Phase I, Switchboard-2 Phase II, Switchboard-2 Phase III
uttname_split = uttname.split('_')
speaker = "_".join(uttname_split[:2])
dataset = "swbd2"
uttname = "_".join(uttname_split[2:4])
channel = uttname_split[4]
assert channel in ["1", "2"]
else:
print(uttname)
raise ValueError("Condition not defined.")
return speaker, dataset, uttname, channel
def main():
swbd_sre_dir = sys.argv[1]
out_dir = sys.argv[2]
if not os.path.exists(out_dir):
os.makedirs(out_dir)
utt2wav_dict = utt_wav_mapping("{}/wav.scp".format(swbd_sre_dir))
filter_wav("{}/wav.scp".format(swbd_sre_dir), utt2wav_dict, out_dir)
return 0
if __name__ == "__main__":
main()
|
'use strict';
// Summary:
// Build for production
const path = require('path');
const shell = require('shelljs');
const crypto = require('crypto');
// const utils = require('./lib/utils');
const webpack = require('webpack');
const BundleAnalyzerPlugin = require('webpack-bundle-analyzer').BundleAnalyzerPlugin;
const ProgressPlugin = require('webpack/lib/ProgressPlugin');
const ArgumentParser = require('argparse').ArgumentParser;
const parser = new ArgumentParser({
addHelp: true,
description: 'Whether to show bundle content as convenient interactive zoomable treemap',
});
parser.addArgument(['--profile', '-p'], {
help: 'Whether to show profile of the bundle.',
action: 'storeTrue',
});
parser.addArgument(['--dist'], {
help: 'Whether it is a dist build.',
action: 'storeTrue',
});
parser.addArgument(['--demo'], {
help: 'Whether it is a demo build.',
action: 'storeTrue',
});
const args = parser.parseArgs();
const config = require('../webpack-config')(args.demo ? 'demo' : 'dist'); // eslint-disable-line
console.log('build args: ', args);
// Show profile of the build bundle
// https://github.com/th0r/webpack-bundle-analyzer
if (args.profile) {
config.plugins.push(new BundleAnalyzerPlugin({
// Can be `server`, `static` or `disabled`.
// In `server` mode analyzer will start HTTP server to show bundle report.
// In `static` mode single HTML file with bundle report will be generated.
// In `disabled` mode you can use this plugin to just generate Webpack Stats JSON file by setting `generateStatsFile` to `true`.
analyzerMode: 'static',
// Path to bundle report file that will be generated in `static` mode.
// Relative to bundles output directory.
reportFilename: 'report.html',
// Automatically open report in default browser
openAnalyzer: false,
// If `true`, Webpack Stats JSON file will be generated in bundles output directory
generateStatsFile: false,
// Options for `stats.toJson()` method.
// For example you can exclude sources of your modules from stats file with `source: false` option.
// See more options here: https://github.com/webpack/webpack/blob/webpack-1/lib/Stats.js#L21
statsOptions: null
}));
}
// Clean folder
const buildFolder = path.join(__dirname, '../build');
shell.rm('-rf', buildFolder);
shell.mkdir(buildFolder);
shell.mkdir(`${buildFolder}/static`);
// Bundle versioning using timestamp hash to prevent browser cache.
const timestamp = crypto
.createHash('md5')
.update(new Date().getTime().toString())
.digest('hex')
.substring(0, 10);
// Process index.html:
// 1. Remove dev vendors bundle
// 2. Add timestamp to main to prevent cache
let lines = shell.cat(path.join(__dirname, '../src/index.html')).split(/\r?\n/);
lines = lines.filter(line => line.indexOf('/.tmp/dev-vendors.js') < 0); // remove dev-vendors
let indexHtml = lines.join('\n');
indexHtml = indexHtml.replace('<img src="/src/images/logo_small.png" />', '<img src="/static/logo_small.png" />');
if (!args.dist) indexHtml = indexHtml.replace('/static/main.js', `/static/main.${timestamp}.js`);
shell.ShellString(indexHtml).to(path.join(buildFolder, 'index.html'));
// Copy favicon
shell.cp(path.join(__dirname, '../src/favicon.png'), buildFolder);
// Copy rekit logo
shell.cp(path.join(__dirname, '../src/images/logo_small.png'), path.join(buildFolder, 'static'));
// Webpack build
console.log('Building, it may take a few seconds...');
console.time('Done');
const compiler = webpack(config);
let lastPercentage = 0;
compiler.apply(new ProgressPlugin((percentage, msg) => {
percentage = Math.round(percentage * 10000) / 100;
if (/building modules/.test(msg) && percentage - lastPercentage < 8) {
return;
}
lastPercentage = percentage;
console.log(percentage + '%', msg);
}));
compiler.run((err, stats) => {
if (err) {
console.error(err.stack || err);
if (err.details) {
console.error(err.details);
}
return;
}
console.log(stats.toString({
colors: true,
}));
// Add timestamp hash to bundle file name.
if (!stats.hasErrors() && !args.dist) shell.mv(path.join(buildFolder, './static/main.js'), path.join(buildFolder, `/static/main.${timestamp}.js`));
// if build for npm, copy build folder to dist folder
if (args.dist) {
console.log('copy build folder to dist build.');
const distFolder = path.join(__dirname, '../dist');
shell.rm('-rf', distFolder);
shell.cp('-r', buildFolder, distFolder);
}
console.timeEnd('Done');
});
|
import { Document32 } from '..';
export default Document32;
|
const { ResponseStatus } = require("../enums");
const { TimerData } = require("../sequelize");
async function TimerRoutes(app){
app.post("/api/v1/initiate-timer", async (req,res,next)=>{
const { type } = req.body
if(type === "START"){
// Take start time, email, rate/min and currency from request body
let { startTime, email, ratePerMin, currency } = req.body
// If start time exist take it, else take the current time
startTime = startTime ? startTime : Date.now()
// Create entry in database
await TimerData.create({ startTime, email, ratePerMin, currency })
// Find the entry made
let user = await TimerData.findOne({where : { email : email }})
// Show it to user as response
res.status(200).end(JSON.stringify({ "status" : ResponseStatus.Error, "startTime": user.dataValues.startTime }))
}
else if (type === "STOP"){
// Take endtime and email from request body
let { endTime, email } = req.body
// If end time exist take it, else take the current time
endTime = endTime ? startTime : Date.now()
try {
// Update where user email exist and endTime is null
await TimerData.update(
{ endTime },
{ where: { email : email, endTime : null }}
)
// Find updated user from last
let user = await TimerData.findOne({ where : { email, id : await TimerData.max('id', { where : { email }}) }})
// Calculate time different and hence find the cost
let timeDiff = Math.abs(new Date(user.endTime) - new Date(user.startTime))
let cost = (timeDiff/(1000 * 60)) * user.ratePerMin
// Update the newly fetched database reponse with the total cost
let user_response = { "status" : "SUCCESS", ...user.dataValues, "Total" : cost}
// Send response to the user
res.status(200).end(JSON.stringify(user_response))
}
catch (err) {
res.status(400).end(JSON.stringify(err))
}
}else{
// For any error send failed response to the user
res.status(400).end(JSON.stringify({"status" : "Failed" }))
}
})
}
module.exports = {
TimerRoutes
}; |
// @flow
import {
ACTIONS,
Lbry,
doNotify,
MODALS,
selectMyChannelClaims,
THUMBNAIL_STATUSES,
batchActions,
} from 'lbry-redux';
import { selectPendingPublishes } from 'redux/selectors/publish';
import type {
UpdatePublishFormData,
UpdatePublishFormAction,
PublishParams,
} from 'redux/reducers/publish';
import fs from 'fs';
import path from 'path';
type Action = UpdatePublishFormAction | { type: ACTIONS.CLEAR_PUBLISH };
type PromiseAction = Promise<Action>;
type Dispatch = (action: Action | PromiseAction | Array<Action>) => any;
type GetState = () => {};
export const doClearPublish = () => (dispatch: Dispatch): PromiseAction => {
dispatch({ type: ACTIONS.CLEAR_PUBLISH });
return dispatch(doResetThumbnailStatus());
};
export const doUpdatePublishForm = (publishFormValue: UpdatePublishFormData) => (
dispatch: Dispatch
): UpdatePublishFormAction =>
dispatch({
type: ACTIONS.UPDATE_PUBLISH_FORM,
data: { ...publishFormValue },
});
export const doResetThumbnailStatus = () => (dispatch: Dispatch): PromiseAction => {
dispatch({
type: ACTIONS.UPDATE_PUBLISH_FORM,
data: {
thumbnailPath: '',
},
});
return fetch('https://spee.ch/api/config/site/publishing')
.then(res => res.json())
.then(status => {
if (status.disabled) {
throw Error();
}
return dispatch({
type: ACTIONS.UPDATE_PUBLISH_FORM,
data: {
uploadThumbnailStatus: THUMBNAIL_STATUSES.READY,
thumbnail: '',
nsfw: false,
},
});
})
.catch(() =>
dispatch({
type: ACTIONS.UPDATE_PUBLISH_FORM,
data: {
uploadThumbnailStatus: THUMBNAIL_STATUSES.API_DOWN,
thumbnail: '',
nsfw: false,
},
})
);
};
export const doUploadThumbnail = (filePath: string, nsfw: boolean) => (dispatch: Dispatch) => {
const thumbnail = fs.readFileSync(filePath);
const fileExt = path.extname(filePath);
const makeid = () => {
let text = '';
const possible = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
for (let i = 0; i < 24; i += 1) text += possible.charAt(Math.floor(Math.random() * 62));
return text;
};
const uploadError = (error = '') =>
dispatch(
batchActions(
{
type: ACTIONS.UPDATE_PUBLISH_FORM,
data: { uploadThumbnailStatus: THUMBNAIL_STATUSES.API_DOWN },
},
dispatch(doNotify({ id: MODALS.ERROR, error }))
)
);
dispatch({
type: ACTIONS.UPDATE_PUBLISH_FORM,
data: { uploadThumbnailStatus: THUMBNAIL_STATUSES.IN_PROGRESS },
});
const data = new FormData();
const name = makeid();
const blob = new Blob([thumbnail], { type: `image/${fileExt.slice(1)}` });
data.append('name', name);
data.append('file', blob);
data.append('nsfw', nsfw.toString());
return fetch('https://spee.ch/api/claim/publish', {
method: 'POST',
body: data,
})
.then(response => response.json())
.then(
json =>
json.success
? dispatch({
type: ACTIONS.UPDATE_PUBLISH_FORM,
data: {
uploadThumbnailStatus: THUMBNAIL_STATUSES.COMPLETE,
thumbnail: `${json.data.url}${fileExt}`,
},
})
: uploadError('Upload failed')
)
.catch(err => uploadError(err.message));
};
export const doPrepareEdit = (claim: any, uri: string) => (dispatch: Dispatch) => {
const {
name,
amount,
channel_name: channelName,
value: {
stream: { metadata },
},
} = claim;
const {
author,
description,
// use same values as default state
// fee will be undefined for free content
fee = {
amount: 0,
currency: 'LBC',
},
language,
license,
licenseUrl,
nsfw,
thumbnail,
title,
} = metadata;
const publishData = {
name,
channel: channelName,
bid: amount,
price: { amount: fee.amount, currency: fee.currency },
contentIsFree: !fee.amount,
author,
description,
fee,
language,
licenseType: license,
licenseUrl,
nsfw,
thumbnail,
title,
uri,
uploadThumbnailStatus: thumbnail ? THUMBNAIL_STATUSES.MANUAL : undefined,
};
dispatch({ type: ACTIONS.DO_PREPARE_EDIT, data: publishData });
};
export const doPublish = (params: PublishParams) => (dispatch: Dispatch, getState: () => {}) => {
const state = getState();
const myChannels = selectMyChannelClaims(state);
const {
name,
bid,
filePath,
description,
language,
license,
licenseUrl,
thumbnail,
nsfw,
channel,
title,
contentIsFree,
price,
uri,
sources,
} = params;
// get the claim id from the channel name, we will use that instead
const namedChannelClaim = myChannels.find(myChannel => myChannel.name === channel);
const channelId = namedChannelClaim ? namedChannelClaim.claim_id : '';
const fee = contentIsFree || !price.amount ? undefined : { ...price };
const metadata = {
title,
nsfw,
license,
licenseUrl,
language,
thumbnail,
};
if (fee) {
metadata.fee = fee;
}
if (description) {
metadata.description = description;
}
const publishPayload = {
name,
channel_id: channelId,
bid,
metadata,
};
if (filePath) {
publishPayload.file_path = filePath;
} else {
publishPayload.sources = sources;
}
dispatch({ type: ACTIONS.PUBLISH_START });
const success = () => {
dispatch({
type: ACTIONS.PUBLISH_SUCCESS,
data: { pendingPublish: { ...publishPayload } },
});
dispatch(doNotify({ id: MODALS.PUBLISH }, { uri }));
};
const failure = error => {
dispatch({ type: ACTIONS.PUBLISH_FAIL });
dispatch(doNotify({ id: MODALS.ERROR, error: error.message }));
};
return Lbry.publish(publishPayload).then(success, failure);
};
// Calls claim_list_mine until any pending publishes are confirmed
export const doCheckPendingPublishes = () => (dispatch: Dispatch, getState: GetState) => {
const state = getState();
const pendingPublishes = selectPendingPublishes(state);
let publishCheckInterval;
const checkFileList = () => {
Lbry.claim_list_mine().then(claims => {
const pendingPublishMap = {};
pendingPublishes.forEach(({ name }) => {
pendingPublishMap[name] = name;
});
const actions = [];
claims.forEach(claim => {
if (pendingPublishMap[claim.name]) {
actions.push({
type: ACTIONS.REMOVE_PENDING_PUBLISH,
data: {
name: claim.name,
},
});
delete pendingPublishMap[claim.name];
}
});
actions.push({
type: ACTIONS.FETCH_CLAIM_LIST_MINE_COMPLETED,
data: {
claims,
},
});
dispatch(batchActions(...actions));
if (!Object.keys(pendingPublishMap).length) {
clearInterval(publishCheckInterval);
}
});
};
if (pendingPublishes.length) {
checkFileList();
publishCheckInterval = setInterval(() => {
checkFileList();
}, 30000);
}
};
|
// @flow
import passport from "@outlinewiki/koa-passport";
import { type Context } from "koa";
import type { AccountProvisionerResult } from "../commands/accountProvisioner";
import { signIn } from "../utils/authentication";
export default function createMiddleware(providerName: string) {
return function passportMiddleware(ctx: Context) {
return passport.authorize(
providerName,
{ session: false },
async (err, _, result: AccountProvisionerResult) => {
if (err) {
console.error(err);
if (err.id) {
const notice = err.id.replace(/_/g, "-");
return ctx.redirect(`${err.redirectUrl || "/"}?notice=${notice}`);
}
if (process.env.NODE_ENV === "development") {
throw err;
}
return ctx.redirect(`/?notice=auth-error`);
}
// Handle errors from Azure which come in the format: message, Trace ID,
// Correlation ID, Timestamp in these two query string parameters.
const { error, error_description } = ctx.request.query;
if (error && error_description) {
console.error(error_description);
// Display only the descriptive message to the user, log the rest
const description = error_description.split("Trace ID")[0];
return ctx.redirect(`/?notice=auth-error&description=${description}`);
}
if (result.user.isSuspended) {
return ctx.redirect("/?notice=suspended");
}
await signIn(
ctx,
result.user,
result.team,
providerName,
result.isNewUser,
result.isNewTeam
);
}
)(ctx);
};
}
|
$.get('/api/top').done(function (data) {
//console.log(data);
var type = []; //类型
var sell = []; //数据
$.each(data.products, function (k, v) {
type.push(v.product.name);
sell.push({value: v.sum_num, name: v.product.name})
})
// console.log(sell);
var myChart = echarts.init(document.getElementById('top'), 'macarons');
myChart.setOption({
title: {
text: '本月热销商品Top',
subtext: data.month_start + ' ~ ' + data.month_end,
x: 'center'
},
tooltip: {
trigger: 'item',
formatter: "{a} <br/>{b} : {c} ({d}%)"
},
legend: {
orient: 'vertical',
left: 'left',
data: type
},
series: [
{
name: '销售量',
type: 'pie',
radius: '55%',
center: ['50%', '50%'],
data: sell,
itemStyle: {
emphasis: {
shadowBlur: 10,
shadowOffsetX: 0,
shadowColor: 'rgba(0, 0, 0, 0.5)'
}
}
}
]
});
}); |
var verificar = window.document.getElementById('verificador')
verificar.addEventListener('click', verif)
function verif(){
var data = new Date()
var anoatual = data.getFullYear()
var anodigitado = window.document.getElementById('nascimento')
var res = window.document.getElementById('res')
if (anodigitado.value == 0 || Number(anodigitado.value) > anoatual){
window.alert('[ERRO]: COLOQUE UM ANO DE NASCIMENTO VÁLIDO')
} else {
var sexoselect = window.document.getElementsByName('rsex')
var idade = anoatual - Number(anodigitado.value)
var genero = ''
var img = document.createElement('img')
img.setAttribute('id', 'foto')
if(sexoselect[0].checked) {
genero = `Homem`
if( idade >= 0 && idade < 10){
img.setAttribute('src', 'images/foto-bebe-m.png')
} else if(idade < 21){
img.setAttribute('src', 'images/foto-jovem-m.png')
} else if (idade < 50){
img.setAttribute('src', 'images/foto-adulto-m.png')
} else {
img.setAttribute('src', 'images/foto-idoso-m.png')
}
}else if(sexoselect[1].checked){
genero = `Mulher`
if(idade >= 0 && idade < 10){
img.setAttribute('src' , 'images/foto-bebe-f.png')
} else if (idade < 21){
img.setAttribute('src' , 'images/foto-jovem-f.png')
} else if (idade < 50){
img.setAttribute('src' , 'images/foto-adulto-f.png')
} else {
img.setAttribute('src' , 'images/foto-idoso-f.png')
}
}
res.innerHTML = `<p> Detectamos ${genero} de ${idade} anos.</p>`
res.appendChild(img)
}
} |
"use strict";
(self["webpackChunk"] = self["webpackChunk"] || []).push([["fonts/free-solid-svg-icons-faExpandAlt-js"],{
/***/ "./node_modules/@fortawesome/free-solid-svg-icons/faExpandAlt.js":
/*!***********************************************************************!*\
!*** ./node_modules/@fortawesome/free-solid-svg-icons/faExpandAlt.js ***!
\***********************************************************************/
/***/ ((__unused_webpack_module, exports) => {
Object.defineProperty(exports, "__esModule", ({ value: true }));
var prefix = 'fas';
var iconName = 'expand-alt';
var width = 448;
var height = 512;
var ligatures = [];
var unicode = 'f424';
var svgPathData = 'M212.686 315.314L120 408l32.922 31.029c15.12 15.12 4.412 40.971-16.97 40.971h-112C10.697 480 0 469.255 0 456V344c0-21.382 25.803-32.09 40.922-16.971L72 360l92.686-92.686c6.248-6.248 16.379-6.248 22.627 0l25.373 25.373c6.249 6.248 6.249 16.378 0 22.627zm22.628-118.628L328 104l-32.922-31.029C279.958 57.851 290.666 32 312.048 32h112C437.303 32 448 42.745 448 56v112c0 21.382-25.803 32.09-40.922 16.971L376 152l-92.686 92.686c-6.248 6.248-16.379 6.248-22.627 0l-25.373-25.373c-6.249-6.248-6.249-16.378 0-22.627z';
exports.definition = {
prefix: prefix,
iconName: iconName,
icon: [
width,
height,
ligatures,
unicode,
svgPathData
]};
exports.faExpandAlt = exports.definition;
exports.prefix = prefix;
exports.iconName = iconName;
exports.width = width;
exports.height = height;
exports.ligatures = ligatures;
exports.unicode = unicode;
exports.svgPathData = svgPathData;
/***/ })
}]); |
(self["webpackChunkwizzi_editor"] = self["webpackChunkwizzi_editor"] || []).push([["node_modules_monaco-editor_esm_vs_basic-languages_html_html_js"],{
/***/ "./node_modules/monaco-editor/esm/vs/basic-languages/html/html.js":
/*!************************************************************************!*\
!*** ./node_modules/monaco-editor/esm/vs/basic-languages/html/html.js ***!
\************************************************************************/
/***/ ((__unused_webpack_module, __webpack_exports__, __webpack_require__) => {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "conf": () => (/* binding */ conf),
/* harmony export */ "language": () => (/* binding */ language)
/* harmony export */ });
/* harmony import */ var _fillers_monaco_editor_core_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! ../fillers/monaco-editor-core.js */ "./node_modules/monaco-editor/esm/vs/basic-languages/fillers/monaco-editor-core.js");
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var EMPTY_ELEMENTS = [
'area',
'base',
'br',
'col',
'embed',
'hr',
'img',
'input',
'keygen',
'link',
'menuitem',
'meta',
'param',
'source',
'track',
'wbr'
];
var conf = {
wordPattern: /(-?\d*\.\d\w*)|([^\`\~\!\@\$\^\&\*\(\)\=\+\[\{\]\}\\\|\;\:\'\"\,\.\<\>\/\s]+)/g,
comments: {
blockComment: ['<!--', '-->']
},
brackets: [
['<!--', '-->'],
['<', '>'],
['{', '}'],
['(', ')']
],
autoClosingPairs: [
{ open: '{', close: '}' },
{ open: '[', close: ']' },
{ open: '(', close: ')' },
{ open: '"', close: '"' },
{ open: "'", close: "'" }
],
surroundingPairs: [
{ open: '"', close: '"' },
{ open: "'", close: "'" },
{ open: '{', close: '}' },
{ open: '[', close: ']' },
{ open: '(', close: ')' },
{ open: '<', close: '>' }
],
onEnterRules: [
{
beforeText: new RegExp("<(?!(?:" + EMPTY_ELEMENTS.join('|') + "))([_:\\w][_:\\w-.\\d]*)([^/>]*(?!/)>)[^<]*$", 'i'),
afterText: /^<\/([_:\w][_:\w-.\d]*)\s*>$/i,
action: {
indentAction: _fillers_monaco_editor_core_js__WEBPACK_IMPORTED_MODULE_0__.languages.IndentAction.IndentOutdent
}
},
{
beforeText: new RegExp("<(?!(?:" + EMPTY_ELEMENTS.join('|') + "))(\\w[\\w\\d]*)([^/>]*(?!/)>)[^<]*$", 'i'),
action: { indentAction: _fillers_monaco_editor_core_js__WEBPACK_IMPORTED_MODULE_0__.languages.IndentAction.Indent }
}
],
folding: {
markers: {
start: new RegExp('^\\s*<!--\\s*#region\\b.*-->'),
end: new RegExp('^\\s*<!--\\s*#endregion\\b.*-->')
}
}
};
var language = {
defaultToken: '',
tokenPostfix: '.html',
ignoreCase: true,
// The main tokenizer for our languages
tokenizer: {
root: [
[/<!DOCTYPE/, 'metatag', '@doctype'],
[/<!--/, 'comment', '@comment'],
[/(<)((?:[\w\-]+:)?[\w\-]+)(\s*)(\/>)/, ['delimiter', 'tag', '', 'delimiter']],
[/(<)(script)/, ['delimiter', { token: 'tag', next: '@script' }]],
[/(<)(style)/, ['delimiter', { token: 'tag', next: '@style' }]],
[/(<)((?:[\w\-]+:)?[\w\-]+)/, ['delimiter', { token: 'tag', next: '@otherTag' }]],
[/(<\/)((?:[\w\-]+:)?[\w\-]+)/, ['delimiter', { token: 'tag', next: '@otherTag' }]],
[/</, 'delimiter'],
[/[^<]+/] // text
],
doctype: [
[/[^>]+/, 'metatag.content'],
[/>/, 'metatag', '@pop']
],
comment: [
[/-->/, 'comment', '@pop'],
[/[^-]+/, 'comment.content'],
[/./, 'comment.content']
],
otherTag: [
[/\/?>/, 'delimiter', '@pop'],
[/"([^"]*)"/, 'attribute.value'],
[/'([^']*)'/, 'attribute.value'],
[/[\w\-]+/, 'attribute.name'],
[/=/, 'delimiter'],
[/[ \t\r\n]+/] // whitespace
],
// -- BEGIN <script> tags handling
// After <script
script: [
[/type/, 'attribute.name', '@scriptAfterType'],
[/"([^"]*)"/, 'attribute.value'],
[/'([^']*)'/, 'attribute.value'],
[/[\w\-]+/, 'attribute.name'],
[/=/, 'delimiter'],
[
/>/,
{
token: 'delimiter',
next: '@scriptEmbedded',
nextEmbedded: 'text/javascript'
}
],
[/[ \t\r\n]+/],
[/(<\/)(script\s*)(>)/, ['delimiter', 'tag', { token: 'delimiter', next: '@pop' }]]
],
// After <script ... type
scriptAfterType: [
[/=/, 'delimiter', '@scriptAfterTypeEquals'],
[
/>/,
{
token: 'delimiter',
next: '@scriptEmbedded',
nextEmbedded: 'text/javascript'
}
],
[/[ \t\r\n]+/],
[/<\/script\s*>/, { token: '@rematch', next: '@pop' }]
],
// After <script ... type =
scriptAfterTypeEquals: [
[
/"([^"]*)"/,
{
token: 'attribute.value',
switchTo: '@scriptWithCustomType.$1'
}
],
[
/'([^']*)'/,
{
token: 'attribute.value',
switchTo: '@scriptWithCustomType.$1'
}
],
[
/>/,
{
token: 'delimiter',
next: '@scriptEmbedded',
nextEmbedded: 'text/javascript'
}
],
[/[ \t\r\n]+/],
[/<\/script\s*>/, { token: '@rematch', next: '@pop' }]
],
// After <script ... type = $S2
scriptWithCustomType: [
[
/>/,
{
token: 'delimiter',
next: '@scriptEmbedded.$S2',
nextEmbedded: '$S2'
}
],
[/"([^"]*)"/, 'attribute.value'],
[/'([^']*)'/, 'attribute.value'],
[/[\w\-]+/, 'attribute.name'],
[/=/, 'delimiter'],
[/[ \t\r\n]+/],
[/<\/script\s*>/, { token: '@rematch', next: '@pop' }]
],
scriptEmbedded: [
[/<\/script/, { token: '@rematch', next: '@pop', nextEmbedded: '@pop' }],
[/[^<]+/, '']
],
// -- END <script> tags handling
// -- BEGIN <style> tags handling
// After <style
style: [
[/type/, 'attribute.name', '@styleAfterType'],
[/"([^"]*)"/, 'attribute.value'],
[/'([^']*)'/, 'attribute.value'],
[/[\w\-]+/, 'attribute.name'],
[/=/, 'delimiter'],
[
/>/,
{
token: 'delimiter',
next: '@styleEmbedded',
nextEmbedded: 'text/css'
}
],
[/[ \t\r\n]+/],
[/(<\/)(style\s*)(>)/, ['delimiter', 'tag', { token: 'delimiter', next: '@pop' }]]
],
// After <style ... type
styleAfterType: [
[/=/, 'delimiter', '@styleAfterTypeEquals'],
[
/>/,
{
token: 'delimiter',
next: '@styleEmbedded',
nextEmbedded: 'text/css'
}
],
[/[ \t\r\n]+/],
[/<\/style\s*>/, { token: '@rematch', next: '@pop' }]
],
// After <style ... type =
styleAfterTypeEquals: [
[
/"([^"]*)"/,
{
token: 'attribute.value',
switchTo: '@styleWithCustomType.$1'
}
],
[
/'([^']*)'/,
{
token: 'attribute.value',
switchTo: '@styleWithCustomType.$1'
}
],
[
/>/,
{
token: 'delimiter',
next: '@styleEmbedded',
nextEmbedded: 'text/css'
}
],
[/[ \t\r\n]+/],
[/<\/style\s*>/, { token: '@rematch', next: '@pop' }]
],
// After <style ... type = $S2
styleWithCustomType: [
[
/>/,
{
token: 'delimiter',
next: '@styleEmbedded.$S2',
nextEmbedded: '$S2'
}
],
[/"([^"]*)"/, 'attribute.value'],
[/'([^']*)'/, 'attribute.value'],
[/[\w\-]+/, 'attribute.name'],
[/=/, 'delimiter'],
[/[ \t\r\n]+/],
[/<\/style\s*>/, { token: '@rematch', next: '@pop' }]
],
styleEmbedded: [
[/<\/style/, { token: '@rematch', next: '@pop', nextEmbedded: '@pop' }],
[/[^<]+/, '']
]
// -- END <style> tags handling
}
};
// TESTED WITH:
// <!DOCTYPE html>
// <html>
// <head>
// <title>Monarch Workbench</title>
// <meta http-equiv="X-UA-Compatible" content="IE=edge" />
// <!----
// -- -- -- a comment -- -- --
// ---->
// <style bah="bah">
// body { font-family: Consolas; } /* nice */
// </style>
// </head
// >
// a = "asd"
// <body>
// <br/>
// <div
// class
// =
// "test"
// >
// <script>
// function() {
// alert("hi </ script>"); // javascript
// };
// </script>
// <script
// bah="asdfg"
// type="text/css"
// >
// .bar { text-decoration: underline; }
// </script>
// </div>
// </body>
// </html>
/***/ })
}]);
//# sourceMappingURL=node_modules_monaco-editor_esm_vs_basic-languages_html_html_js.559f31a1aeb666a9a7a5.chunk.js.map |
/**
* 初始化下注管理详情对话框
*/
var StakesInfoDlg = {
stakesInfoData : {}
};
/**
* 清除数据
*/
StakesInfoDlg.clearData = function() {
this.stakesInfoData = {};
}
/**
* 设置对话框中的数据
*
* @param key 数据的名称
* @param val 数据的具体值
*/
StakesInfoDlg.set = function(key, val) {
this.stakesInfoData[key] = (typeof val == "undefined") ? $("#" + key).val() : val;
return this;
}
/**
* 设置对话框中的数据
*
* @param key 数据的名称
* @param val 数据的具体值
*/
StakesInfoDlg.get = function(key) {
return $("#" + key).val();
}
/**
* 关闭此对话框
*/
StakesInfoDlg.close = function() {
parent.layer.close(window.parent.Stakes.layerIndex);
}
/**
* 收集数据
*/
StakesInfoDlg.collectData = function() {
this
.set('id')
.set('pid')
.set('issue')
.set('rid')
.set('uid')
.set('ruid')
.set('money')
.set('status')
.set('createTime');
}
/**
* 提交添加
*/
StakesInfoDlg.addSubmit = function() {
this.clearData();
this.collectData();
//提交信息
var ajax = new $ax(Feng.ctxPath + "/stakes/add", function(data){
Feng.success("添加成功!");
window.parent.Stakes.table.refresh();
StakesInfoDlg.close();
},function(data){
Feng.error("添加失败!" + data.responseJSON.message + "!");
});
ajax.set(this.stakesInfoData);
ajax.start();
}
/**
* 提交修改
*/
StakesInfoDlg.editSubmit = function() {
this.clearData();
this.collectData();
//提交信息
var ajax = new $ax(Feng.ctxPath + "/stakes/update", function(data){
Feng.success("修改成功!");
window.parent.Stakes.table.refresh();
StakesInfoDlg.close();
},function(data){
Feng.error("修改失败!" + data.responseJSON.message + "!");
});
ajax.set(this.stakesInfoData);
ajax.start();
}
$(function() {
});
|
export * from '@styled-icons/icomoon/Spinner9';
|
const routes = require('./project-routes');
class Project {
constructor(requestHelper, routeHelper, md) {
this.requestHelper = requestHelper;
this.routeHelper = routeHelper;
this.md = md;
}
getProjectUsers(params) {
const path = this.routeHelper.interpolate(routes.GET_PROJECT_USERS, {
projectId: params.projectId
}, {
offset: params.offset,
limit: params.limit
});
return this.requestHelper.get(path);
}
createDataPermission({ projectId, expression, category, title }) {
return this.md.createMetadata({
projectId,
body: {
userFilter: {
content: { expression },
meta: { category, title }
}
}
});
}
deleteDataPermission({ projectId, dataPermissionId }) {
return this.md.deleteMetadata({ projectId, id: dataPermissionId });
}
}
module.exports = Project;
|
import React from 'react';
const SvgComponent = props => (
<svg width={84} height={37} fill="none" {...props}>
<rect x={0.5} y={0.5} width={83} height={36} rx={3.5} fill="#fff" stroke="#212121" />
</svg>
);
export default SvgComponent;
|
/**
* Test the repo list item
*/
import React from 'react';
import { shallow, render } from 'enzyme';
import { IntlProvider } from 'react-intl';
import ListItem from 'components/ListItem';
import { RepoListItem } from '../index';
const renderComponent = (props = {}) => render(
<IntlProvider locale="en">
<RepoListItem {...props} />
</IntlProvider>
);
describe('<RepoListItem />', () => {
let item;
// Before each test reset the item data for safety
beforeEach(() => {
item = {
owner: {
login: 'jeffbski',
},
html_url: 'https://github.com/jeffbski/react-boilerplate-logic',
name: 'react-boilerplate',
open_issues_count: 20,
full_name: 'jeffbski/react-boilerplate-logic',
};
});
it('should render a ListItem', () => {
const renderedComponent = shallow(
<RepoListItem item={item} />
);
expect(renderedComponent.find(ListItem).length).toBe(1);
});
it('should not render the current username', () => {
const renderedComponent = renderComponent({
item,
currentUser: item.owner.login,
});
expect(renderedComponent.text()).not.toContain(item.owner.login);
});
it('should render usernames that are not the current one', () => {
const renderedComponent = renderComponent({
item,
currentUser: 'nikgraf',
});
expect(renderedComponent.text()).toContain(item.owner.login);
});
it('should render the repo name', () => {
const renderedComponent = renderComponent({ item });
expect(renderedComponent.text()).toContain(item.name);
});
it('should render the issue count', () => {
const renderedComponent = renderComponent({ item });
expect(renderedComponent.text()).toContain(item.open_issues_count);
});
it('should render the IssueIcon', () => {
const renderedComponent = renderComponent({ item });
expect(renderedComponent.find('svg').length).toBe(1);
});
});
|
from tests.app.constants import ACTIVE, ERROR, IDLE
from django.utils.translation import gettext_lazy as _
DEBUG = True
USE_TZ = True
SECRET_KEY = "dummy"
DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}}
INSTALLED_APPS = [
"django.forms",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.sites",
"django.contrib.messages",
"django.contrib.staticfiles",
"django.contrib.humanize",
"backoffice_extensions",
"tests.app.apps.TestAppConfig",
"tests.backoffice.apps.BackofficeAppConfig",
]
ROOT_URLCONF = "tests.urls"
SITE_ID = 1
USE_I18N = True
LANGUAGE_CODE = "en"
LANGUAGES = [("en", "English")]
LOCALE_PATHS = ["./backoffice_extensions/locale/"]
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.locale.LocaleMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
]
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"OPTIONS": {
"debug": DEBUG,
"loaders": [
"django.template.loaders.filesystem.Loader",
"django.template.loaders.app_directories.Loader",
],
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.template.context_processors.i18n",
"django.template.context_processors.media",
"django.template.context_processors.static",
"django.template.context_processors.tz",
"django.contrib.messages.context_processors.messages",
],
},
}
]
STATIC_URL = "/static/"
WSGI_APPLICATION = "tests.wsgi.application"
# DJANGO BACKOFFICE
# ------------------------------------------------------------------------------
BACKOFFICE_STATUS_TAG_CLASSES = {
IDLE: "is-warning",
ACTIVE: "is-success",
ERROR: "is-danger",
}
BACKOFFICE_SIDEBAR_CONFIG = [
{
"label": _("Data"),
"sections": {
"user": {"label": _("User"), "permission": None,},
"stuff": {"label": _("Stuff"), "permission": None,},
},
}
]
|
/* istanbul instrument in package npmdoc_scrap */
/*jslint
bitwise: true,
browser: true,
maxerr: 8,
maxlen: 96,
node: true,
nomen: true,
regexp: true,
stupid: true
*/
(function () {
'use strict';
var local;
// run shared js-env code - pre-init
(function () {
// init local
local = {};
// init modeJs
local.modeJs = (function () {
try {
return typeof navigator.userAgent === 'string' &&
typeof document.querySelector('body') === 'object' &&
typeof XMLHttpRequest.prototype.open === 'function' &&
'browser';
} catch (errorCaughtBrowser) {
return module.exports &&
typeof process.versions.node === 'string' &&
typeof require('http').createServer === 'function' &&
'node';
}
}());
// init global
local.global = local.modeJs === 'browser'
? window
: global;
switch (local.modeJs) {
// re-init local from window.local
case 'browser':
local = local.global.utility2.objectSetDefault(
local.global.utility2_rollup || local.global.local,
local.global.utility2
);
break;
// re-init local from example.js
case 'node':
local = (local.global.utility2_rollup || require('utility2'))
.requireReadme();
break;
}
// export local
local.global.local = local;
}());
// run shared js-env code - function
(function () {
return;
}());
switch (local.modeJs) {
// run browser js-env code - function
case 'browser':
break;
// run node js-env code - function
case 'node':
break;
}
// run shared js-env code - post-init
(function () {
return;
}());
switch (local.modeJs) {
// run browser js-env code - post-init
case 'browser':
local.testCase_browser_nullCase = local.testCase_browser_nullCase || function (
options,
onError
) {
/*
* this function will test browsers's null-case handling-behavior-behavior
*/
onError(null, options);
};
// run tests
local.nop(local.modeTest &&
document.querySelector('#testRunButton1') &&
document.querySelector('#testRunButton1').click());
break;
// run node js-env code - post-init
/* istanbul ignore next */
case 'node':
local.testCase_buildApidoc_default = local.testCase_buildApidoc_default || function (
options,
onError
) {
/*
* this function will test buildApidoc's default handling-behavior-behavior
*/
options = { modulePathList: module.paths };
local.buildApidoc(options, onError);
};
local.testCase_buildApp_default = local.testCase_buildApp_default || function (
options,
onError
) {
/*
* this function will test buildApp's default handling-behavior-behavior
*/
local.testCase_buildReadme_default(options, local.onErrorThrow);
local.testCase_buildLib_default(options, local.onErrorThrow);
local.testCase_buildTest_default(options, local.onErrorThrow);
local.testCase_buildCustomOrg_default(options, local.onErrorThrow);
options = [];
local.buildApp(options, onError);
};
local.testCase_buildCustomOrg_default = local.testCase_buildCustomOrg_default ||
function (options, onError) {
/*
* this function will test buildCustomOrg's default handling-behavior
*/
options = {};
local.buildCustomOrg(options, onError);
};
local.testCase_buildLib_default = local.testCase_buildLib_default || function (
options,
onError
) {
/*
* this function will test buildLib's default handling-behavior
*/
options = {};
local.buildLib(options, onError);
};
local.testCase_buildReadme_default = local.testCase_buildReadme_default || function (
options,
onError
) {
/*
* this function will test buildReadme's default handling-behavior-behavior
*/
options = {};
local.buildReadme(options, onError);
};
local.testCase_buildTest_default = local.testCase_buildTest_default || function (
options,
onError
) {
/*
* this function will test buildTest's default handling-behavior
*/
options = {};
local.buildTest(options, onError);
};
local.testCase_webpage_default = local.testCase_webpage_default || function (
options,
onError
) {
/*
* this function will test webpage's default handling-behavior
*/
options = { modeCoverageMerge: true, url: local.serverLocalHost + '?modeTest=1' };
local.browserTest(options, onError);
};
// run test-server
local.testRunServer(local);
break;
}
}());
|
import React, {Component} from 'react';
import {EntypoPaperPlane, EntypoMic} from 'react-entypo';
//import Audio from './Audio';
class Input extends Component {
constructor(props) {
super(props);
this.state = {value: ''};
//this.handleAudio = this.handleAudio.bind(this);
this.handleChange = this.handleChange.bind(this);
//this.handleListen = this.handleListen.bind(this);
this.handleSubmit = this.handleSubmit.bind(this);
}
/*handleAudio(e) {
const last = e.results.length - 1;
const value = this.state.value + e.results[last][0].transcript;
this.setState({value});
}*/
handleChange(e) {
const value = e.target.value;
if (value.length >= 256) {
alert('Bsdk Limit mai raho');
}
this.setState({value});
}
/*handleListen() {
this.audio.listen();
}*/
handleSubmit(e) {
e.preventDefault();
this.props.onSubmit(this.state.value);
this.setState({value: ''});
}
componentDidMount() {
this._text.focus();
//this.audio = new Audio(this.handleAudioStart, this.handleAudio, this.handleAudioError);
}
render(){
return(
<form className="text-form"
onSubmit={this.handleSubmit}>
<input className="text-input"
type="text"
name="inputText"
placeholder="Enter your message"
value={this.state.value}
ref={input => this._text = input}
onChange={this.handleChange}
autoComplete={'false'}
required />
{/* <button className="btn-voice"
type="button"
value="Voice"
onClick={this.handleListen}>
<EntypoMic/>
</button> */}
<button className="btn-send"
type="submit"
value="Send">
<EntypoPaperPlane/> Send
</button>
</form>
);
}
}
export default Input;
|
describe('Adwords forwarder', function () {
var MessageType = {
SessionStart: 1,
SessionEnd: 2,
PageView: 3,
PageEvent: 4,
CrashReport: 5,
OptOut: 6,
Commerce: 16
},
EventType = {
Unknown: 0,
Navigation: 1,
Location: 2,
Search: 3,
Transaction: 4,
UserContent: 5,
UserPreference: 6,
Social: 7,
Other: 8,
Media: 9,
getName: function () {
return 'blahblah';
}
},
ProductActionType = {
Unknown: 0,
AddToCart: 1,
RemoveFromCart: 2,
Checkout: 3,
CheckoutOption: 4,
Click: 5,
ViewDetail: 6,
Purchase: 7,
Refund: 8,
AddToWishlist: 9,
RemoveFromWishlist: 10,
},
IdentityType = {
Other: 0,
CustomerId: 1,
Facebook: 2,
Twitter: 3,
Google: 4,
Microsoft: 5,
Yahoo: 6,
Email: 7,
Alias: 8,
FacebookCustomAudienceId: 9,
getName: function () { return 'CustomerID'; }
},
ReportingService = function () {
var self = this;
this.id = null;
this.event = null;
this.cb = function (id, event) {
self.id = id;
self.event = event;
};
this.reset = function () {
this.id = null
this.event = null;
};
},
google_trackConversion_mock = function (data) {
window.google_track_data = data;
window.google_track_called = true;
},
google_track_data = null,
google_track_called = false,
reportService = new ReportingService();
before(function () {
mParticle.EventType = EventType;
mParticle.ProductActionType = ProductActionType;
mParticle.IdentityType = IdentityType;
mParticle.generateHash = function (name) {
var hash = 0,
i = 0,
character;
if (!name) {
return null;
}
name = name.toString().toLowerCase();
if (Array.prototype.reduce) {
return name.split("").reduce(function (a, b) { a = ((a << 5) - a) + b.charCodeAt(0); return a & a; }, 0);
}
if (name.length === 0) {
return hash;
}
for (i = 0; i < name.length; i++) {
character = name.charCodeAt(i);
hash = ((hash << 5) - hash) + character;
hash = hash & hash;
}
return hash;
};
window.google_trackConversion = google_trackConversion_mock;
});
function checkCommonProperties(){
window.google_track_data.should.have.property("google_conversion_language", "en");
window.google_track_data.should.have.property("google_conversion_color", "ffffff")
window.google_track_data.should.have.property("google_conversion_format", "3")
window.google_track_data.should.have.property("google_conversion_id", 'AW-123123123')
}
describe('Legacy Conversion Async', function () {
describe("Page View Conversion Label", function () {
before(function () {
var map = [{ "maptype": "EventClassDetails.Id", "value": "pageViewLabel123", "map": "0", "jsmap": mParticle.generateHash(MessageType.PageView + "" + 'Homepage') }]
mParticle.forwarder.init({
labels: JSON.stringify(map),
conversionId: 'AW-123123123'
}, reportService.cb, true, true);
});
it('should have conversion labels for page view', function (done) {
var successMessage = mParticle.forwarder.process({
EventName: 'Homepage',
EventDataType: MessageType.PageView,
EventAttributes: {
showcase: 'something',
test: 'thisoneshouldgetmapped',
mp: 'rock'
}
});
successMessage.should.not.be.null();
successMessage.should.be.equal("Successfully sent to GoogleAdWords")
checkCommonProperties();
window.google_track_data.should.have.property('google_conversion_label', "pageViewLabel123");
done();
});
});
describe("Page Event Conversion Label", function () {
before(function () {
var map = [{ "maptype": "EventClass.Id", "value": "pageEventLabel123", "map": "0", "jsmap": mParticle.generateHash(MessageType.PageEvent + "" + EventType.Navigation + 'Homepage') }]
mParticle.forwarder.init({
labels: JSON.stringify(map),
conversionId: 'AW-123123123'
}, reportService.cb, true, true);
});
it('should have conversion labels for page event', function (done) {
var successMessage = mParticle.forwarder.process({
EventName: 'Homepage',
EventDataType: MessageType.PageEvent,
EventCategory: EventType.Navigation,
EventAttributes: {
showcase: 'something',
test: 'thisoneshouldgetmapped',
mp: 'rock'
}
});
successMessage.should.not.be.null();
successMessage.should.be.equal("Successfully sent to GoogleAdWords")
checkCommonProperties();
window.google_track_data.should.have.property('google_conversion_label', "pageEventLabel123");
done();
});
});
describe("Commerce Event Conversion Label", function () {
before(function () {
var map = [{ "maptype": "EventClassDetails.Id", "value": "commerceLabel123", "map": "0", "jsmap": mParticle.generateHash(MessageType.Commerce + "" + "eCommerce - Purchase") }]
mParticle.forwarder.init({
labels: JSON.stringify(map),
conversionId: 'AW-123123123'
}, reportService.cb, true, true);
});
it('should have conversion labels for commerce event', function (done) {
var successMessage = mParticle.forwarder.process({
EventName: "eCommerce - Purchase",
EventDataType: MessageType.Commerce,
ProductAction: {
ProductActionType: ProductActionType.Purchase,
ProductList: [
{
Sku: '12345',
Name: 'iPhone 6',
Category: 'Phones',
Brand: 'iPhone',
Variant: '6',
Price: 400,
CouponCode: null,
Quantity: 1
}
],
TransactionId: 123,
Affiliation: 'my-affiliation',
TotalAmount: 450,
TaxAmount: 40,
ShippingAmount: 10,
},
CurrencyCode: "USD"
});
successMessage.should.not.be.null();
successMessage.should.be.equal("Successfully sent to GoogleAdWords")
checkCommonProperties();
window.google_track_data.should.have.property('google_conversion_label', "commerceLabel123");
done();
});
})
describe("Custom Parameters", function () {
before(function () {
var labels = [
{ "maptype": "EventClass.Id", "value": "pageEventLabel123", "map": "0", "jsmap": mParticle.generateHash(MessageType.PageEvent + "" + EventType.Navigation + 'Homepage') },
{ "maptype": "EventClassDetails.Id", "value": "pageViewLabel123", "map": "0", "jsmap": mParticle.generateHash(MessageType.PageView + "" + 'Homepage') },
{ "maptype": "EventClassDetails.Id", "value": "commerceLabel123", "map": "0", "jsmap": mParticle.generateHash(MessageType.Commerce + "" + "eCommerce - Purchase") },
];
var attr = [
{ "maptype": "EventAttributeClass.Id", "value": "mycustomprop", "map": "0", "jsmap": mParticle.generateHash(MessageType.PageEvent + "" + EventType.Navigation + 'attributekey') },
{ "maptype": "EventAttributeClassDetails.Id", "value": "title", "map": "0", "jsmap": mParticle.generateHash(MessageType.PageView + "" + 'title') },
{ "maptype": "EventAttributeClassDetails.Id", "value": "sale", "map": "0", "jsmap": mParticle.generateHash(MessageType.Commerce + "" + 'sale') }
];
mParticle.forwarder.init({
labels: JSON.stringify(labels),
customParameters: JSON.stringify(attr),
conversionId: 'AW-123123123'
}, reportService.cb, true, true);
});
it('should have custom params for page event', function (done) {
var successMessage = mParticle.forwarder.process({
EventName: 'Homepage',
EventDataType: MessageType.PageEvent,
EventCategory: EventType.Navigation,
EventAttributes: {
attributekey: 'attributevalue'
}
});
successMessage.should.not.be.null();
successMessage.should.be.equal("Successfully sent to GoogleAdWords")
checkCommonProperties();
window.google_track_data.should.have.property('google_custom_params');
Object.keys(window.google_track_data.google_custom_params).length.should.be.equal(1);
window.google_track_data.google_custom_params.should.have.property('mycustomprop', 'attributevalue')
done();
});
it('should have custom params for page view', function (done) {
var successMessage = mParticle.forwarder.process({
EventName: 'Homepage',
EventDataType: MessageType.PageView,
EventAttributes: {
title: 'my page view'
}
});
successMessage.should.not.be.null();
successMessage.should.be.equal("Successfully sent to GoogleAdWords")
checkCommonProperties();
window.google_track_data.should.have.property('google_custom_params');
Object.keys(window.google_track_data.google_custom_params).length.should.be.equal(1);
window.google_track_data.google_custom_params.should.have.property('title', 'my page view');
done();
});
it('should have custom params for commerce events', function (done) {
var successMessage = mParticle.forwarder.process({
EventName: "eCommerce - Purchase",
EventDataType: MessageType.Commerce,
EventAttributes: {
sale: 'seasonal sale'
},
ProductAction: {
ProductActionType: ProductActionType.Purchase,
ProductList: [
{
Sku: '12345',
Name: 'iPhone 6',
Category: 'Phones',
Brand: 'iPhone',
Variant: '6',
Price: 400,
CouponCode: null,
Quantity: 1
}
],
TransactionId: 123,
Affiliation: 'my-affiliation',
TotalAmount: 450,
TaxAmount: 40,
ShippingAmount: 10,
},
CurrencyCode: "USD"
});
successMessage.should.not.be.null();
successMessage.should.be.equal("Successfully sent to GoogleAdWords")
checkCommonProperties();
window.google_track_data.should.have.property('google_custom_params');
Object.keys(window.google_track_data.google_custom_params).length.should.be.equal(1);
window.google_track_data.google_custom_params.should.have.property('sale', 'seasonal sale');
done();
});
});
describe("Unmapped conversion labels", function () {
before(function () {
var map = [{ "maptype": "EventClassDetails.Id", "value": "commerceLabel123", "map": "0", "jsmap": mParticle.generateHash(MessageType.Commerce + "" + "eCommerce - Purchase") }]
mParticle.forwarder.init({
labels: JSON.stringify(map),
conversionId: 'AW-123123123'
}, reportService.cb, true, true);
});
it('should not forward unmapped events', function (done) {
var failMessage = mParticle.forwarder.process({
EventName: 'Something random',
EventDataType: MessageType.Commerce,
EventAttributes: {
showcase: 'something'
}
});
failMessage.should.not.be.null();
failMessage.should.be.containEql("Can't send to forwarder")
done();
});
});
describe("Bad Label Json", function () {
before(function () {
// The ids are calculated based on the events used in the tests below so they must match exactly.
mParticle.forwarder.init({
labels: 'baaaaaddddddd json',
conversionId: 'AW-123123123'
}, reportService.cb, true, true);
});
it('should not forward with bad labels json', function (done) {
var failMessage = mParticle.forwarder.process({
EventName: 'Something random',
EventDataType: MessageType.Commerce,
EventAttributes: {
showcase: 'something'
}
});
failMessage.should.not.be.null();
failMessage.should.be.containEql("Can't send to forwarder")
done();
});
});
describe("Bad Custom Parameters Json", function () {
before(function () {
// The ids are calculated based on the events used in the tests below so they must match exactly.
mParticle.forwarder.init({
customParameters: 'sdpfuhasdflasdjfnsdjfsdjfn really baddd json',
conversionId: 'AW-123123123'
}, reportService.cb, true, true);
});
it('should not forward with bad custom parameters json', function (done) {
var failMessage = mParticle.forwarder.process({
EventName: 'Something random',
EventDataType: MessageType.Commerce,
EventAttributes: {
showcase: 'something'
}
});
failMessage.should.not.be.null();
failMessage.should.be.containEql("Can't send to forwarder")
done();
});
});
});
describe('GTAG Conversions', function () {
describe('Initializing GTAG', function () {
it('should disable gtag and dataLayer by default', function (done) {
var map = [{ "maptype": "EventClassDetails.Id", "value": "pageViewLabel123", "map": "0", "jsmap": mParticle.generateHash(MessageType.PageView + "" + 'Homepage') }]
mParticle.forwarder.init({
labels: JSON.stringify(map),
conversionId: 'AW-123123123'
}, reportService.cb, true, true);
(typeof window.gtag === 'undefined').should.be.true();
(typeof window.dataLayer === 'undefined').should.be.true();
done();
});
it('should initialize gtag and dataLayer when user opts in', function (done) {
var map = [{ "maptype": "EventClassDetails.Id", "value": "pageViewLabel123", "map": "0", "jsmap": mParticle.generateHash(MessageType.PageView + "" + 'Homepage') }]
mParticle.forwarder.init({
labels: JSON.stringify(map),
enableGtag: 'True',
conversionId: 'AW-123123123'
}, reportService.cb, 1, true);
window.gtag.should.be.ok();
window.dataLayer.should.be.ok();
done();
});
});
describe("Page View Conversion Label", function () {
before(function () {
window.dataLayer = undefined;
var map = [{ "maptype": "EventClassDetails.Id", "value": "pageViewLabel123", "map": "0", "jsmap": mParticle.generateHash(MessageType.PageView + "" + 'Homepage') }]
mParticle.forwarder.init({
enableGtag: 'True',
labels: JSON.stringify(map),
conversionId: '123123123'
}, reportService.cb, 1, true);
});
it('should have conversion labels for page view', function (done) {
var successMessage = mParticle.forwarder.process({
EventName: 'Homepage',
EventDataType: MessageType.PageView,
EventAttributes: {
showcase: 'something',
test: 'thisoneshouldgetmapped',
mp: 'rock'
}
});
var expectedDataLayer = [
'event',
'conversion',
{
'send-to': 'AW-123123123/pageViewLabel123'
}
];
successMessage.should.not.be.null();
successMessage.should.be.equal("Successfully sent to GoogleAdWords")
window.dataLayer.should.match([expectedDataLayer]);
done();
});
});
describe("Page Event Conversion Label", function () {
before(function () {
window.dataLayer = undefined;
var map = [{ "maptype": "EventClass.Id", "value": "pageEventLabel123", "map": "0", "jsmap": mParticle.generateHash(MessageType.PageEvent + "" + EventType.Navigation + 'Homepage') }]
mParticle.forwarder.init({
enableGtag: 'True',
labels: JSON.stringify(map),
conversionId: '123123123'
}, reportService.cb, 1, true);
});
it('should have conversion labels for page event', function (done) {
var successMessage = mParticle.forwarder.process({
EventName: 'Homepage',
EventDataType: MessageType.PageEvent,
EventCategory: EventType.Navigation,
EventAttributes: {
showcase: 'something',
test: 'thisoneshouldgetmapped',
mp: 'rock'
}
});
var expectedDataLayer = [
'event',
'conversion',
{
'send-to': 'AW-123123123/pageEventLabel123'
}
];
successMessage.should.not.be.null();
successMessage.should.be.equal("Successfully sent to GoogleAdWords")
window.dataLayer.should.match([expectedDataLayer]);
done();
});
});
describe("Commerce Event Conversion Label", function () {
before(function () {
window.dataLayer = undefined;
var map = [{ "maptype": "EventClassDetails.Id", "value": "commerceLabel123", "map": "0", "jsmap": mParticle.generateHash(MessageType.Commerce + "" + "eCommerce - Purchase") }]
mParticle.forwarder.init({
enableGtag: 'True',
labels: JSON.stringify(map),
conversionId: '123123123'
}, reportService.cb, 1, true);
});
it('should have conversion labels for commerce event', function (done) {
var successMessage = mParticle.forwarder.process({
EventName: "eCommerce - Purchase",
EventDataType: MessageType.Commerce,
ProductAction: {
ProductActionType: ProductActionType.Purchase,
ProductList: [
{
Sku: '12345',
Name: 'iPhone 6',
Category: 'Phones',
Brand: 'iPhone',
Variant: '6',
Price: 400,
CouponCode: null,
Quantity: 1
}
],
TransactionId: 123,
Affiliation: 'my-affiliation',
TotalAmount: 450,
TaxAmount: 40,
ShippingAmount: 10,
},
CurrencyCode: "USD"
});
var expectedDataLayer = [
'event',
'conversion',
{
'send-to': 'AW-123123123/commerceLabel123',
order_id: 123,
value: 450,
currency: 'USD',
}
];
successMessage.should.not.be.null();
successMessage.should.be.equal("Successfully sent to GoogleAdWords")
window.dataLayer.should.match([expectedDataLayer]);
done();
});
})
describe("Custom Parameters", function () {
before(function () {
window.dataLayer = undefined;
var labels = [
{ "maptype": "EventClass.Id", "value": "pageEventLabel123", "map": "0", "jsmap": mParticle.generateHash(MessageType.PageEvent + "" + EventType.Navigation + 'Homepage') },
{ "maptype": "EventClassDetails.Id", "value": "pageViewLabel123", "map": "0", "jsmap": mParticle.generateHash(MessageType.PageView + "" + 'Homepage') },
{ "maptype": "EventClassDetails.Id", "value": "commerceLabel123", "map": "0", "jsmap": mParticle.generateHash(MessageType.Commerce + "" + "eCommerce - Purchase") },
];
var attr = [
{ "maptype": "EventAttributeClass.Id", "value": "mycustomprop", "map": "0", "jsmap": mParticle.generateHash(MessageType.PageEvent + "" + EventType.Navigation + 'attributekey') },
{ "maptype": "EventAttributeClassDetails.Id", "value": "title", "map": "0", "jsmap": mParticle.generateHash(MessageType.PageView + "" + 'title') },
{ "maptype": "EventAttributeClassDetails.Id", "value": "sale", "map": "0", "jsmap": mParticle.generateHash(MessageType.Commerce + "" + 'sale') }
]
mParticle.forwarder.init({
enableGtag: 'True',
labels: JSON.stringify(labels),
customParameters: JSON.stringify(attr),
conversionId: '123123123'
}, reportService.cb, 1, true);
});
it('should have custom params for page event', function (done) {
var successMessage = mParticle.forwarder.process({
EventName: 'Homepage',
EventDataType: MessageType.PageEvent,
EventCategory: EventType.Navigation,
EventAttributes: {
attributekey: 'attributevalue'
}
});
var expectedDataLayer = [
'event',
'conversion',
{
'send-to': 'AW-123123123/pageEventLabel123',
mycustomprop: 'attributevalue'
}
];
successMessage.should.not.be.null();
successMessage.should.be.equal("Successfully sent to GoogleAdWords")
window.dataLayer.should.match([expectedDataLayer]);
done();
});
it('should have custom params for page view', function (done) {
var successMessage = mParticle.forwarder.process({
EventName: 'Homepage',
EventDataType: MessageType.PageView,
EventAttributes: {
title: 'my page title'
}
});
var expectedDataLayer = [
'event',
'conversion',
{
'send-to': 'AW-123123123/pageViewLabel123',
title: 'my page title'
}
];
successMessage.should.not.be.null();
successMessage.should.be.equal("Successfully sent to GoogleAdWords")
window.dataLayer.should.matchAny(expectedDataLayer);
done();
});
it('should have custom params for commerce event', function (done) {
var successMessage = mParticle.forwarder.process({
EventName: "eCommerce - Purchase",
EventDataType: MessageType.Commerce,
EventAttributes: {
sale: 'seasonal sale'
},
ProductAction: {
ProductActionType: ProductActionType.Purchase,
ProductList: [
{
Sku: '12345',
Name: 'iPhone 6',
Category: 'Phones',
Brand: 'iPhone',
Variant: '6',
Price: 400,
CouponCode: null,
Quantity: 1
}
],
TransactionId: 123,
Affiliation: 'my-affiliation',
TotalAmount: 450,
TaxAmount: 40,
ShippingAmount: 10,
},
CurrencyCode: "USD"
});
var expectedDataLayer = [
'event',
'conversion',
{
'send-to': 'AW-123123123/commerceLabel123',
currency: 'USD',
language: 'en',
remarketing_only: false,
sale: 'seasonal sale',
value: 450
}
];
successMessage.should.not.be.null();
successMessage.should.be.equal("Successfully sent to GoogleAdWords")
window.dataLayer.should.matchAny(expectedDataLayer);
done();
});
});
describe("Unmapped conversion labels", function () {
before(function () {
window.dataLayer = undefined;
var map = [{ "maptype": "EventClassDetails.Id", "value": "commerceLabel123", "map": "0", "jsmap": mParticle.generateHash(MessageType.Commerce + "" + "eCommerce - Purchase") }]
mParticle.forwarder.init({
enableGtag: 'True',
labels: JSON.stringify(map),
conversionId: '123123123'
}, reportService.cb, 1, true);
});
it('should not forward unmapped events', function (done) {
var failMessage = mParticle.forwarder.process({
EventName: 'Something random',
EventDataType: MessageType.Commerce,
EventAttributes: {
showcase: 'something'
}
});
// debugger;
failMessage.should.not.be.null();
failMessage.should.be.containEql("Can't send to forwarder")
window.dataLayer.length.should.eql(0)
done();
});
});
describe("Bad Label Json", function () {
before(function () {
window.dataLayer = undefined;
// The ids are calculated based on the events used in the tests below so they must match exactly.
mParticle.forwarder.init({
enableGtag: 'True',
labels: 'baaaaaddddddd json',
conversionId: '123123123'
}, reportService.cb, 1, true);
});
it('should not forward with bad labels json', function (done) {
var failMessage = mParticle.forwarder.process({
EventName: 'Something random',
EventDataType: MessageType.Commerce,
EventAttributes: {
showcase: 'something'
}
});
failMessage.should.not.be.null();
failMessage.should.be.containEql("Can't send to forwarder")
window.dataLayer.length.should.eql(0)
done();
});
});
describe("Bad Custom Parameters Json", function () {
before(function () {
window.dataLayer = undefined;
// The ids are calculated based on the events used in the tests below so they must match exactly.
mParticle.forwarder.init({
enableGtag: 'True',
customParameters: 'sdpfuhasdflasdjfnsdjfsdjfn really baddd json',
conversionId: '123123123'
}, reportService.cb, 1, true);
});
it('should not forward with bad custom parameters json', function (done) {
var failMessage = mParticle.forwarder.process({
EventName: 'Something random',
EventDataType: MessageType.Commerce,
EventAttributes: {
showcase: 'something'
}
});
failMessage.should.not.be.null();
failMessage.should.be.containEql("Can't send to forwarder")
done();
});
});
});
});
|
import pytest
from django.contrib.auth.models import AnonymousUser
from django.http.response import Http404
from django.test import RequestFactory
from d_react.users.models import User
from d_react.users.tests.factories import UserFactory
from d_react.users.views import (
UserRedirectView,
UserUpdateView,
user_detail_view,
)
pytestmark = pytest.mark.django_db
class TestUserUpdateView:
"""
TODO:
extracting view initialization code as class-scoped fixture
would be great if only pytest-django supported non-function-scoped
fixture db access -- this is a work-in-progress for now:
https://github.com/pytest-dev/pytest-django/pull/258
"""
def test_get_success_url(self, user: User, rf: RequestFactory):
view = UserUpdateView()
request = rf.get("/fake-url/")
request.user = user
view.request = request
assert view.get_success_url() == f"/users/{user.username}/"
def test_get_object(self, user: User, rf: RequestFactory):
view = UserUpdateView()
request = rf.get("/fake-url/")
request.user = user
view.request = request
assert view.get_object() == user
class TestUserRedirectView:
def test_get_redirect_url(self, user: User, rf: RequestFactory):
view = UserRedirectView()
request = rf.get("/fake-url")
request.user = user
view.request = request
assert view.get_redirect_url() == f"/users/{user.username}/"
class TestUserDetailView:
def test_authenticated(self, user: User, rf: RequestFactory):
request = rf.get("/fake-url/")
request.user = UserFactory()
response = user_detail_view(request, username=user.username)
assert response.status_code == 200
def test_not_authenticated(self, user: User, rf: RequestFactory):
request = rf.get("/fake-url/")
request.user = AnonymousUser() # type: ignore
response = user_detail_view(request, username=user.username)
assert response.status_code == 302
assert response.url == f"/accounts/login/?next=/fake-url/"
def test_case_sensitivity(self, rf: RequestFactory):
request = rf.get("/fake-url/")
request.user = UserFactory(username="UserName")
with pytest.raises(Http404):
user_detail_view(request, username="username")
|
( function( $ ) {
/**
* @param $scope The Widget wrapper element as a jQuery element
* @param $ The jQuery alias
*/
var WidgetHelloWorldHandler = function( $scope, $ ) {
console.log( $scope );
};
// Make sure you run this code under Elementor.
$( window ).on( 'elementor/frontend/init', function() {
elementorFrontend.hooks.addAction( 'frontend/element_ready/hello-world.default', WidgetHelloWorldHandler );
} );
} )( jQuery );
|
'use strict';
module.exports = {
name: 'Set Position',
menu: './menu/menu.js',
script: './client/client-bundle.js',
style: './style/style.css'
};
|
function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); }
function _objectWithoutProperties(source, excluded) { if (source == null) return {}; var target = _objectWithoutPropertiesLoose(source, excluded); var key, i; if (Object.getOwnPropertySymbols) { var sourceSymbolKeys = Object.getOwnPropertySymbols(source); for (i = 0; i < sourceSymbolKeys.length; i++) { key = sourceSymbolKeys[i]; if (excluded.indexOf(key) >= 0) continue; if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; target[key] = source[key]; } } return target; }
function _objectWithoutPropertiesLoose(source, excluded) { if (source == null) return {}; var target = {}; var sourceKeys = Object.keys(source); var key, i; for (i = 0; i < sourceKeys.length; i++) { key = sourceKeys[i]; if (excluded.indexOf(key) >= 0) continue; target[key] = source[key]; } return target; }
import React, { forwardRef } from 'react';
import PropTypes from 'prop-types';
var Shuffle = /*#__PURE__*/forwardRef(function (_ref, ref) {
var color = _ref.color,
size = _ref.size,
rest = _objectWithoutProperties(_ref, ["color", "size"]);
return /*#__PURE__*/React.createElement("svg", _extends({
ref: ref,
xmlns: "http://www.w3.org/2000/svg",
viewBox: "0 0 16 16",
width: size,
height: size,
fill: color
}, rest), /*#__PURE__*/React.createElement("path", {
fillRule: "evenodd",
d: "M0 3.5A.5.5 0 0 1 .5 3H1c2.202 0 3.827 1.24 4.874 2.418.49.552.865 1.102 1.126 1.532.26-.43.636-.98 1.126-1.532C9.173 4.24 10.798 3 13 3v1c-1.798 0-3.173 1.01-4.126 2.082A9.624 9.624 0 0 0 7.556 8a9.624 9.624 0 0 0 1.317 1.918C9.828 10.99 11.204 12 13 12v1c-2.202 0-3.827-1.24-4.874-2.418A10.595 10.595 0 0 1 7 9.05c-.26.43-.636.98-1.126 1.532C4.827 11.76 3.202 13 1 13H.5a.5.5 0 0 1 0-1H1c1.798 0 3.173-1.01 4.126-2.082A9.624 9.624 0 0 0 6.444 8a9.624 9.624 0 0 0-1.317-1.918C4.172 5.01 2.796 4 1 4H.5a.5.5 0 0 1-.5-.5z"
}), /*#__PURE__*/React.createElement("path", {
d: "M13 5.466V1.534a.25.25 0 0 1 .41-.192l2.36 1.966c.12.1.12.284 0 .384l-2.36 1.966a.25.25 0 0 1-.41-.192zm0 9v-3.932a.25.25 0 0 1 .41-.192l2.36 1.966c.12.1.12.284 0 .384l-2.36 1.966a.25.25 0 0 1-.41-.192z"
}));
});
Shuffle.propTypes = {
color: PropTypes.string,
size: PropTypes.oneOfType([PropTypes.string, PropTypes.number])
};
Shuffle.defaultProps = {
color: 'currentColor',
size: '1em'
};
export default Shuffle; |
const baseURL = "https://cnodejs.org/api/v1"
module.exports = {
hostUrl: baseURL,
wechat: {
appId: "wx3039b960a183e45c",
appSecret: "66fc3a8087885ddee3bc24d6bbf7ef52",
scope: 'snsapi_userinfo'
},
redis: {
host: '127.0.0.1',
port: '6379',
pass: ''
}
} |
#from __future__ import annotations
from e2cnn import gspaces
from e2cnn import kernels
from .general_r2 import GeneralOnR2
from typing import Union, Tuple, Callable, List
from e2cnn.group import Representation
from e2cnn.group import Group
from e2cnn.group import CyclicGroup
from e2cnn.group import cyclic_group
import numpy as np
__all__ = ["TrivialOnR2"]
class TrivialOnR2(GeneralOnR2):
def __init__(self, fibergroup = None):
r"""
Describes the plane :math:`\R^2` without considering any origin-preserving symmetry.
This is modeled by a choosing trivial fiber group :math:`\{e\}`.
.. note ::
This models the symmetries of conventional *Convolutional Neural Networks* which are not equivariant to
origin preserving transformations such as rotations and reflections.
Args:
fibergroup (Group, optional): use an already existing instance of the symmetry group.
By default, it builds a new instance of the trivial group.
"""
if fibergroup is None:
fibergroup = cyclic_group(1)
else:
assert isinstance(fibergroup, CyclicGroup) and fibergroup.order() == 1
name = "Trivial"
super(TrivialOnR2, self).__init__(fibergroup, name)
def restrict(self, id):
r"""
Build the :class:`~e2cnn.group.GSpace` associated with the subgroup of the current fiber group identified by
the input ``id``.
As the trivial group contains only one element, there are no other subgroups.
The only accepted input value is ``id=1`` and returns this same group.
This functionality is implemented only for consistency with the other G-spaces.
Args:
id (int): the order of the subgroup
Returns:
a tuple containing
- **gspace**: the restricted gspace
- **back_map**: a function mapping an element of the subgroup to itself in the fiber group of the original space
- **subgroup_map**: a function mapping an element of the fiber group of the original space to itself in the subgroup (returns ``None`` if the element is not in the subgroup)
"""
group, mapping, child = self.fibergroup.subgroup(id)
return gspaces.TrivialOnR2(fibergroup=group), mapping, child
def _basis_generator(
self, in_repr, out_repr, rings, sigma, **kwargs,
):
r"""
Method that builds the analitical basis that spans the space of equivariant filters which
are intertwiners between the representations induced from the representation ``in_repr`` and ``out_repr``.
Either ``maximum_frequency`` or ``maximum_offset`` must be set in the keywords arguments.
Args:
in_repr: the input representation
out_repr: the output representation
rings: radii of the rings where to sample the bases
sigma: parameters controlling the width of each ring where the bases are sampled.
Keyword Args:
maximum_frequency (int): the maximum frequency allowed in the basis vectors
maximum_offset (int): the maximum frequencies offset for each basis vector with respect to its base ones
(sum and difference of the frequencies of the input and the output representations)
Returns:
the basis built
"""
maximum_frequency = None
maximum_offset = None
if "maximum_frequency" in kwargs and kwargs["maximum_frequency"] is not None:
maximum_frequency = kwargs["maximum_frequency"]
assert isinstance(maximum_frequency, int) and maximum_frequency >= 0
if "maximum_offset" in kwargs and kwargs["maximum_offset"] is not None:
maximum_offset = kwargs["maximum_offset"]
assert isinstance(maximum_offset, int) and maximum_offset >= 0
assert maximum_frequency is not None or maximum_offset is not None, (
"Error! Either the maximum frequency or the maximum offset for the"
" frequencies must be set"
)
return kernels.kernels_Trivial_act_R2(
in_repr,
out_repr,
rings,
sigma,
maximum_frequency,
max_offset=maximum_offset,
)
def _basespace_action(self, input, element):
assert self.fibergroup.is_element(element)
return input.copy()
def __eq__(self, other):
if isinstance(other, TrivialOnR2):
return self.fibergroup == other.fibergroup
else:
return False
def __hash__(self):
return hash(self.name)
|
import React, { useContext } from "react";
import { AuthContext } from "../../context/Auth/AuthProvider";
import { Navbar } from "../shared/Navbar";
import { Header } from "../shared/Header";
import { AdminHome } from "./AdminHome";
import { BrowserRouter as Router, Route, Switch } from "react-router-dom";
import { AddEmployee } from "./AddEmployee";
import { EditEmployee } from "./EditEmployee";
export const AdminDashboard = () => {
const { logout } = useContext(AuthContext);
return (
<div>
<Navbar signOut={logout} />
<Header />
<Router>
<Switch>
<Route path='/admin' exact component={AdminHome} />
<Route path='/admin/addEmployee'>
<AddEmployee
props={{
name: "",
email: "",
password: "",
role: "",
team: "",
salary: "",
update: false,
}}
/>
</Route>
<Route path='/admin/editEmployee/:id'>
<EditEmployee
props={{
name: "",
email: "",
password: "",
role: "",
team: "",
salary: "",
update: false,
}}
/>
</Route>
</Switch>
</Router>
</div>
);
};
|
import React from 'react'
import PropTypes from 'prop-types'
import { Link, graphql } from 'gatsby'
import Helmet from 'react-helmet'
import { readingTime as readingTimeHelper } from '@tryghost/helpers'
import routing from '../utils/routing'
import { Layout, HeaderPost, AuthorList, PreviewPosts, ImgSharp } from '../components/common'
import { Comments, TableOfContents, Newsletter } from '../components/common'
import { StickyNavContainer } from '../components/common/effects'
import { MetaData } from '../components/common/meta'
import { PostClass } from '../components/common/helpers'
/**
* Single post view (/:slug)
*
* This file renders a single post and loads all the content.
*
*/
const Post = ({ data, location, pageContext }) => {
const post = data.ghostPost
const prevPost = data.prev
const nextPost = data.next
const previewPosts = data.allGhostPost.edges
const readingTime = readingTimeHelper(post)
const featImg = post.feature_image
const fluidFeatureImg = post.featureImageSharp && post.featureImageSharp.childImageSharp && post.featureImageSharp.childImageSharp.fluid
const postClass = PostClass({ tags: post.tags, isFeatured: featImg, isImage: featImg && true })
const primaryTagCount = pageContext.primaryTagCount
const transformedHtml = post.childHtmlRehype && post.childHtmlRehype.html
const toc = post.childHtmlRehype && post.childHtmlRehype.tableOfContents || []
return (
<>
<MetaData data={data} location={location} type="article"/>
<Helmet>
<style type="text/css">{`${post.codeinjection_styles}`}</style>
</Helmet>
<StickyNavContainer throttle={300} isPost={true} activeClass="nav-post-title-active" render={ sticky => (
<Layout isPost={true} sticky={sticky}
header={<HeaderPost sticky={sticky} title={post.title}/>}
previewPosts={<PreviewPosts posts={previewPosts} primaryTagCount={primaryTagCount} prev={prevPost} next={nextPost}/>}>
<div className="inner">
<article className={`post-full ${postClass}`}>
<header className="post-full-header">
{ post.primary_tag &&
<section className="post-full-tags">
<Link to={routing(post.primary_tag.url, post.primary_tag.slug)}>{post.primary_tag.name}</Link>
</section>
}
<h1 ref={sticky && sticky.anchorRef} className="post-full-title">{post.title}</h1>
{ post.custom_excerpt &&
<p className="post-full-custom-excerpt">{post.custom_excerpt}</p>
}
<div className="post-full-byline">
<section className="post-full-byline-content">
<AuthorList authors={post.authors} isPost={true}/>
<section className="post-full-byline-meta">
<h4 className="author-name">
{post.authors.map((author, i) => (
<Link key={i} to={routing(author.url, author.slug)}>{author.name}</Link>
))}
</h4>
<div className="byline-meta-content">
<time className="byline-meta-date" dateTime={post.published_at}>
{post.published_at_pretty}
</time>
<span className="byline-reading-time"><span className="bull">•</span> {readingTime}</span>
</div>
</section>
</section>
</div>
</header>
<figure className="post-full-image">
<ImgSharp fluidClass="kg-card kg-code-card" fluidImg={fluidFeatureImg} srcImg={featImg} title={post.title}/>
</figure>
<section className="post-full-content">
<TableOfContents toc={toc} url={routing(post.url, post.slug)}/>
<div className="post-content load-external-scripts"
dangerouslySetInnerHTML={{ __html: transformedHtml || post.html }}/>
</section>
<Newsletter />
<Comments id={post.id}/>
</article>
</div>
</Layout>
)}/>
</>
)
}
Post.propTypes = {
data: PropTypes.shape({
ghostPost: PropTypes.shape({
codeinjection_styles: PropTypes.string,
url: PropTypes.string.isRequired,
slug: PropTypes.string.isRequired,
id: PropTypes.string.isRequired,
title: PropTypes.string.isRequired,
html: PropTypes.string.isRequired,
custom_excerpt: PropTypes.string,
feature_image: PropTypes.string,
featured: PropTypes.bool,
tags: PropTypes.arrayOf(
PropTypes.object.isRequired,
),
authors: PropTypes.arrayOf(
PropTypes.object.isRequired,
).isRequired,
primary_tag: PropTypes.shape({
name: PropTypes.string,
slug: PropTypes.string.isRequired,
url: PropTypes.string.isRequired,
}),
published_at: PropTypes.string.isRequired,
published_at_pretty: PropTypes.string.isRequired,
featureImageSharp: PropTypes.object,
childHtmlRehype: PropTypes.shape({
html: PropTypes.string,
tableOfContents: PropTypes.arrayOf(
PropTypes.object,
),
}),
}).isRequired,
prev: PropTypes.object,
next: PropTypes.object,
allGhostPost: PropTypes.object.isRequired,
}).isRequired,
location: PropTypes.object.isRequired,
pageContext: PropTypes.object,
}
export default Post
export const postQuery = graphql`
query($slug: String!, $prev: String!, $next: String!, $tag: String!, $limit: Int!, $skip: Int!) {
ghostPost: ghostPost(slug: { eq: $slug }) {
...GhostPostFields
}
prev: ghostPost(slug: { eq: $prev }) {
...GhostPostFields
}
next: ghostPost(slug: { eq: $next }) {
...GhostPostFields
}
allGhostPost(
sort: { order: DESC, fields: [published_at] },
filter: {slug: { ne: $slug },tags: {elemMatch: {slug: {eq: $tag}}}},
limit: $limit,
skip: $skip
) {
edges {
node {
...GhostPostFields
}
}
}
}
`
|
# Copyright 2019 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
sqrt run define
"""
import math
import numpy as np
import akg
from akg.ops.nn import softmax
from akg.utils import kernel_exec as utils
from akg.utils.format_transform import get_bytes
from tensorio import compare_tensor
from base import get_rtol_atol
from gen_random import random_gaussian
def compute_blockdim(shape, axis, dtype):
# strategy: all the shape before reduce axis can be used for multicore
blockdim_limit = 2 if utils.product_is_mini() else 32
blockdim = 1
if isinstance(shape, int):
shape = [shape]
if axis < 0:
axis += len(shape)
if isinstance(shape, (list, tuple)):
for i, sh in enumerate(shape):
if not isinstance(sh, int):
raise TypeError("Shape to compute blockdim must be a list/tuple of integer")
if i == axis:
if sh < 32 / get_bytes(dtype):
# when reduce axis is too small, multicore may not always increase performace
blockdim = 1
break
blockdim = blockdim * sh
else:
raise TypeError("Shape to compute blockdim must be a list/tuple of integer")
return min(blockdim_limit, blockdim)
def softmax_execute(shape, dtype, axis, kernel_name, attrs):
if 'tuning' in attrs.keys():
t = attrs.get("tuning", False)
kernel_name = attrs.get("kernel_name", False)
mod = softmax_compile(shape, dtype, axis, kernel_name, attrs, tuning=t)
if t:
expect, inputs, output = gen_data(axis, dtype, shape)
return mod, expect, (inputs, output)
else:
return mod
else:
mod = softmax_compile(shape, dtype, axis, kernel_name, attrs)
expect, inputs, output = gen_data(axis, dtype, shape)
args = [inputs, output]
if attrs.get("dynamic"):
for i in range(len(shape)):
args.append(shape[i])
blockdim = compute_blockdim(shape, axis, dtype)
args.append(blockdim)
acuOutput = utils.mod_launch(mod, args, outputs=(1,), expect=expect)
rtol, atol = get_rtol_atol("softmax", dtype)
testCaseRes = compare_tensor(acuOutput, expect, rtol=rtol, atol=atol, equal_nan=True)
return inputs, acuOutput, expect, testCaseRes
def gen_data(axis, dtype, shape):
if isinstance(axis, (list, tuple)):
axis = axis[0]
inputs = random_gaussian(shape, miu=1, sigma=0.1).astype(dtype)
inputsSub = inputs - np.max(inputs, axis=axis, keepdims=True)
inputsExp = np.exp(inputsSub)
expect = inputsExp / np.sum(inputsExp, axis=axis, keepdims=True)
outShape = expect.shape
output = np.full(outShape, np.nan, dtype)
return expect, inputs, output
def softmax_compile(shape, dtype, axis, kernel_name, attrs, tuning=False):
if attrs is not None and attrs.get("dynamic"):
var_shape = []
for i in range(len(shape)):
var_shape.append(akg.tvm.var("I" + str(i)))
build_shape = var_shape
else:
build_shape = shape
return utils.op_build_test(softmax.softmax, [build_shape], [dtype], op_attrs=[axis], kernel_name=kernel_name, attrs=attrs, tuning=tuning)
|
import ActionTypes from '../actionTypes';
export const inputNumber = value => ({
type: ActionTypes.INPUT_NUMBER,
payload: { value }
});
export const inputOperation = value => ({
type: ActionTypes.INPUT_OPERATION,
payload: { value }
});
export const inputDecimal = () => ({
type: ActionTypes.INPUT_DECIMAL
});
export const inputOperator = operator => ({
type: ActionTypes.INPUT_OPERATOR,
payload: { operator }
});
export const result = operator => ({
type: ActionTypes.RESULT,
payload: { operator }
});
|
from portality.core import app
from portality.lib import httputil
import esprit, json
from portality.api.v2.client import models
DOAJ_RETRY_CODES = [
408, # request timeout
429, # rate limited
502, # bad gateway; retry to see if the gateway can re-establish connection
503, # service unavailable; retry to see if it comes back
504 # gateway timeout; retry to see if it responds next time
]
class DOAJException(Exception):
def __init__(self, msg, *args, **kwargs):
self.message = msg
super(DOAJException, self).__init__(*args, **kwargs)
class DOAJv1API(object):
CLASSMAP = {
"journal" : models.Journal,
"journals" : models.Journal,
}
SEARCH_TYPES = [
"applications",
"articles",
"journals"
]
def __init__(self, api_base_url=None, api_key=None):
self.api_base_url = api_base_url if api_base_url else app.config.get("DOAJ_API2_BASE_URL", "https://doaj.org/api/v2/")
self.api_key = api_key
if not self.api_base_url.endswith("/"):
self.api_base_url += "/"
def doaj_url(self, endpoint=None, type=None, object_id=None, additional_path=None, params=None):
"""
build the api request url
:param endpoint: the endpoint we're sending to. Unless "search" you can probably leave this out
:param type: the type we're making the request to; could be application, applications, articles, journals
:param object_id: the id of the object; only respected if type is given
:param additional_path: additional path elements to add on the end of the constructed url (used mostly for search criteria)
:param params: url parameters as a dict - these will be escaped for you, so just pass the values as they are
:return:
"""
url = self.api_base_url
if endpoint is not None:
url += endpoint + "/"
if type is not None:
url += type
if object_id is not None:
url += "/" + object_id
if additional_path is not None:
if not url.endswith("/"):
url += "/"
url += additional_path
qs = ""
if params is not None:
for k, v in params.items():
if qs != "":
qs += "&"
qs += httputil.quote(k) + "=" + httputil.quote(str(v))
if qs != "":
url += "?" + qs
return url
###################################################
## Methods for handling search queries
def field_search(self, type, field, value, quote=True, page=1, page_size=10, sort_by=None, sort_dir=None):
qb = ANDQueryBuilder()
qb.add_string_field(field, value, quote)
return self.built_search(type, qb, page=page, page_size=page_size, sort_by=sort_by, sort_dir=sort_dir)
def built_search(self, type, query_builder, page=1, page_size=10, sort_by=None, sort_dir=None):
return self.string_search(type, query_builder.make_query(), page=page, page_size=page_size, sort_by=sort_by, sort_dir=sort_dir)
def string_search(self, type, query_string, page=1, page_size=10, sort_by=None, sort_dir=None):
# check this search is against an allowed type
if type not in self.SEARCH_TYPES:
raise DOAJException("Type {x} is not a supported search type".format(x=type))
# construct the url parameters
params = {"page" : page, "pageSize" : page_size}
if sort_by is not None:
sort = sort_by
if sort_dir is not None:
sort += ":" + sort_dir
params["sort"] = sort
url = self.doaj_url("search", type, additional_path=httputil.quote(query_string), params=params)
print(url)
resp = httputil.get(url, retry_codes=DOAJ_RETRY_CODES)
j = resp.json()
klazz = self.CLASSMAP.get(type)
if klazz is None:
raise DOAJException("Type {x} does not have a class representation in the client".format(x=type))
obs = [klazz(r) for r in j.get("results", [])]
return obs
def field_search_iterator(self, type, field, value, quoted=True, page_size=100, sort_by=None, sort_dir=None):
qb = ANDQueryBuilder()
qb.add_string_field(field, value, quoted)
return self.built_search_iterator(type, qb, page_size=page_size, sort_by=sort_by, sort_dir=sort_dir)
def built_search_iterator(self, type, query_builder, page_size=100, sort_by=None, sort_dir=None):
return self.string_search_iterator(type, query_builder.make_query(), page_size=page_size, sort_by=sort_by, sort_dir=sort_dir)
def string_search_iterator(self, type, query_string, page_size=100, sort_by=None, sort_dir=None):
page = 1
while True:
results = self.string_search(type, query_string, page=page, page_size=page_size, sort_by=sort_by, sort_dir=sort_dir)
if len(results) == 0:
break
for r in results:
yield r
page += 1
###################################################
## methods for article CRUD
def create_article(self, article):
# support either the article object or the dict representation
article_data = article
if isinstance(article, models.Article):
article_data = article.data
url = self.doaj_url(type="articles", params={"api_key" : self.api_key})
resp = httputil.post(url, data=json.dumps(article_data), headers={"Content-Type" : "application/json"}, retry_codes=DOAJ_RETRY_CODES)
if resp.status_code == 400:
raise DOAJException("Bad request against DOAJ API: '{x}'".format(x=resp.json().get("error", "no error provided")))
elif resp.status_code == 403:
raise DOAJException("Forbidden action - your API key was incorrect, or you are trying to add an article with an ISSN you don't own")
elif resp.status_code == 401:
raise DOAJException("Authentication failed, your API key was probably wrong")
j = resp.json()
return j.get("id"), j.get("location")
class ANDQueryBuilder(object):
def __init__(self):
self.fields = []
def add_string_field(self, field, value, quote=True):
self.fields.append((field, value, quote))
def make_query(self):
q = ""
for field, val, quote in self.fields:
if q != "":
q += " AND "
wrap = "\"" if quote else ""
q += field + ":" + wrap + val + wrap
return q
################################################################
# Old DOAJ client for use against the public search endpoint
#
# This is deprecated, and you should use the DOAJCv1API above for full
# access to all Search and CRUD activities
class DOAJSearchClient(object):
def __init__(self, search_base=None, query_endpoint=None, search_type=None, search_port=None):
self.search_base = search_base if search_base else app.config.get("DOAJ_BASE_URL", "https://doaj.org")
self.query_endpoint = query_endpoint if query_endpoint else app.config.get("DOAJ_QUERY_ENDPOINT", "query")
self.search_type = search_type if search_type else app.config.get("DOAJ_SEARCH_TYPE", "journal,article")
self.search_port = search_port if search_port else app.config.get("DOAJ_SEARCH_PORT", 80)
# FIXME: we have turned off SSL verification for the moment, for convenience of working with the new
# https-everywhere policy of the DOAJ
self.conn = esprit.raw.Connection(self.search_base, self.query_endpoint, port=self.search_port, verify_ssl=False)
def object_search(self, query):
try:
resp = esprit.raw.search(self.conn, type=self.search_type, query=query, method="GET")
results = esprit.raw.unpack_result(resp)
return results
except:
app.logger.info("Got exception talking to DOAJ query endpoint")
return None
def journals_by_issns(self, issns):
if not isinstance(issns, list):
issns = [issns]
q = IssnQuery("journal", issns)
return self.object_search(q.query())
class IssnQuery(object):
def __init__(self, type, issn):
self.type = type
self.issn = issn
def query(self):
return {
"query" : {
"bool" : {
"must" : [
{"terms" : {"index.issn.exact" : self.issn}},
{"term" : {"_type" : self.type}}
]
}
}
} |
/** @jsx jsx */
import { jsx, Grid } from 'theme-ui'
import GatsbyLink from './GatsbyLink'
import Container from './Container'
const apps = [
{ id: 'app-index', title: 'Home', path: '/' },
{ id: 'dice-game', title: 'Dice Roll', path: '/dice/' },
{ id: 'lottery-generator', title: 'Lottery Numbers', path: '/lottery/' },
{ id: 'luggage-tracker', title: 'Luggage Tracker', path: '/luggage/' },
{ id: 'pizza-calculator', title: 'Pizza Calculator', path: '/pizza/' },
{ id: 'monsters-rolodex', title: 'Monsters Rolodex', path: '/rolodex/' },
{ id: 'coin-flip', title: 'Flip Coin', path: '/coin/' },
]
export default function NavLinkList() {
return (
<Container variant="layout.aside">
<Grid
sx={{
gridTemplateColumns: `repeat(auto-fit, minmax(128px, 1fr))`,
}}
>
{apps.map(app => (
<GatsbyLink
key={app.id}
to={app.path}
sx={{
placeSelf: 'center',
}}
>
{app.title}
</GatsbyLink>
))}
</Grid>
</Container>
)
}
|
'use strict';
import * as Chart from 'chart.js';
import ArrayElementBase, {defaults} from './base';
Chart.defaults.global.elements.plots = Object.assign({}, defaults);
const Plots = Chart.elements.Plots = ArrayElementBase.extend({
draw() {
const ctx = this._chart.ctx;
const vm = this._view;
const plots = vm.plots;
const vert = this.isVertical();
this._drawItems(vm, plots, ctx, vert);
},
_getBounds() {
const vm = this._view;
const vert = this.isVertical();
const plots = vm.plots;
if (!plots) {
return {
left: 0,
top: 0,
right: 0,
bottom: 0
};
}
if (vert) {
const {x, width} = vm;
const x0 = x - width / 2;
return {
left: x0,
top: plots.max,
right: x0 + width,
bottom: plots.min
};
}
const {y, height} = vm;
const y0 = y - height / 2;
return {
left: plots.min,
top: y0,
right: plots.max,
bottom: y0 + height
};
},
height() {
const vm = this._view;
return vm.base - Math.min(vm.boxplot.q1, vm.boxplot.q3);
},
getArea() {
const vm = this._view;
const iqr = Math.abs(vm.boxplot.q3 - vm.boxplot.q1);
if (this.isVertical()) {
return iqr * vm.width;
}
return iqr * vm.height;
}
});
export default Plots;
|
# Generated by Django 3.1.6 on 2022-05-13 16:56
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('notif', '0002_notif_canceled'),
]
operations = [
migrations.RenameField(
model_name='notif',
old_name='canceled',
new_name='read',
),
]
|
output = {result: $.create(path.normalize($.path))}
|
module.exports.permissionRequired = 0
module.exports.run = async (client, message, args, config, queue) => {
const serverQueue = queue.get(message.guild.id)
if (!serverQueue) return message.channel.send("❌ There is nothing playing right now!")
return message.channel.send(`🎶 Now playing **${serverQueue.songs[0].title}**`)
} |
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.createEslintConfig = void 0;
const tslib_1 = require("tslib");
const fs_extra_1 = (0, tslib_1.__importDefault)(require("fs-extra"));
const path_1 = (0, tslib_1.__importDefault)(require("path"));
const utils_1 = require("./utils");
async function createEslintConfig({ pkg, rootDir, writeFile, }) {
const isReactLibrary = Boolean((0, utils_1.getReactVersion)(pkg));
const config = {
extends: [
'react-app',
'prettier/@typescript-eslint',
'plugin:prettier/recommended',
],
settings: {
react: {
// Fix for https://github.com/jaredpalmer/tsdx/issues/279
version: isReactLibrary ? 'detect' : '999.999.999',
},
},
};
if (!writeFile) {
return config;
}
const file = path_1.default.join(rootDir, '.eslintrc.js');
try {
await fs_extra_1.default.writeFile(file, `module.exports = ${JSON.stringify(config, null, 2)}`, { flag: 'wx' });
}
catch (e) {
if (e.code === 'EEXIST') {
console.error('Error trying to save the Eslint configuration file:', `${file} already exists.`);
}
else {
console.error(e);
}
return config;
}
}
exports.createEslintConfig = createEslintConfig;
|
webpackHotUpdate("app",{
/***/ "./src/objects/compoundCrate.ts":
/*!**************************************!*\
!*** ./src/objects/compoundCrate.ts ***!
\**************************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
eval("\nObject.defineProperty(exports, \"__esModule\", { value: true });\nvar CompoundCrate = /** @class */ (function () {\n function CompoundCrate(game, crates, image, label) {\n var _this = this;\n this.sprite = game.matter.add.sprite(0, 0, image, 0, { label: label });\n this.crates = crates;\n var crateBodies = [];\n crates.forEach(function (e) {\n crateBodies.push(e.crate);\n e.owner = _this;\n });\n var yOffset = image == 'crate' ? 0.15 : 0;\n var compoundBody = game.matter.body.create({\n parts: crateBodies,\n inertia: Infinity,\n render: { sprite: { xOffset: 0, yOffset: yOffset } },\n isStatic: true,\n ignoreGravity: false,\n });\n this.sprite.setExistingBody(compoundBody);\n // this.sprite.body.render.sprite.xOffset = 0;\n // this.sprite.body.render.sprite.yOffset = -10;\n // this.sprite.setPosition(x, y);\n this.onFire = false;\n }\n return CompoundCrate;\n}());\nexports.default = CompoundCrate;\n\n\n//# sourceURL=webpack:///./src/objects/compoundCrate.ts?");
/***/ }),
/***/ "./src/objects/crate.ts":
/*!******************************!*\
!*** ./src/objects/crate.ts ***!
\******************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
eval("\nObject.defineProperty(exports, \"__esModule\", { value: true });\nvar Crate = /** @class */ (function () {\n // timeIgnite: number;\n function Crate(game, x, y, label, owner) {\n this.crate = game.matter.bodies.rectangle(x, y, 50, 50, {\n render: { sprite: { xOffset: 0, yOffset: 0.15 } },\n label: label,\n inertia: Infinity,\n });\n // this.crate.setRectangle(100, 50, {\n // render: { sprite: { xOffset: 0, yOffset: 0.15 } },\n // label: label,\n // inertia: Infinity,\n // });\n this.crate.bounce = 0;\n this.onFire = false;\n this.neighbors = new Set();\n this.fireSprite = null;\n this.owner = owner;\n console.log(this.crate);\n // this.timeIgnite = null;\n }\n return Crate;\n}());\nexports.default = Crate;\n\n\n//# sourceURL=webpack:///./src/objects/crate.ts?");
/***/ }),
/***/ "./src/scenes/game-scene.ts":
/*!**********************************!*\
!*** ./src/scenes/game-scene.ts ***!
\**********************************/
/*! no static exports found */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
eval("\nvar __extends = (this && this.__extends) || (function () {\n var extendStatics = function (d, b) {\n extendStatics = Object.setPrototypeOf ||\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\n function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };\n return extendStatics(d, b);\n };\n return function (d, b) {\n extendStatics(d, b);\n function __() { this.constructor = d; }\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\n };\n})();\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.GameScene = void 0;\nvar player_1 = __webpack_require__(/*! ../objects/player */ \"./src/objects/player.ts\");\nvar crate_1 = __webpack_require__(/*! ../objects/crate */ \"./src/objects/crate.ts\");\nvar compoundCrate_1 = __webpack_require__(/*! ../objects/compoundCrate */ \"./src/objects/compoundCrate.ts\");\nvar sceneConfig = {\n active: false,\n visible: false,\n key: 'Game',\n};\nvar player;\nvar platforms;\nvar cursors;\nvar wasd;\n// interface Crate {\n// crate: any;\n// onFire: boolean;\n// neighbors: Set<Crate>;\n// fireSprite: Phaser.GameObjects.Sprite;\n// }\nvar crates = {};\nvar compounds = {};\nvar fire;\nvar fireActive = false;\nvar house;\nvar TILE_SIZE = 50;\nvar xTiles = Math.floor(800 / TILE_SIZE);\nvar yTiles = Math.floor(600 / TILE_SIZE);\nvar bat;\nvar tiles = [];\nfor (var i = 0; i < xTiles; i++) {\n var row = [];\n for (var j = 0; j < yTiles; j++) {\n row.push(new Set());\n }\n tiles.push(row);\n}\nfunction getTile(x, y) {\n return [Math.floor(x / 50), Math.floor(y / 50)];\n}\nfunction clearTiles() {\n for (var i = 0; i < tiles.length; i++) {\n for (var j = 0; j < tiles[0].length; j++) {\n tiles[i][j].clear();\n }\n }\n}\nfunction igniteCompound(game, curr, destroyFire) {\n if (destroyFire) {\n fire.destroy();\n }\n console.log(curr);\n if (curr.onFire) {\n return;\n }\n curr.onFire = true;\n curr.crates.forEach(function (e) {\n igniteCrate(game, e);\n });\n game.time.delayedCall(1000, function () {\n if (curr.sprite.active) {\n curr.sprite.destroy();\n }\n });\n}\nfunction igniteCrate(game, currCrate) {\n if (currCrate.onFire) {\n return;\n }\n currCrate.onFire = true;\n currCrate.fireSprite = game.add.sprite(currCrate.crate.position.x, currCrate.crate.position.y - 10, 'squareFire');\n currCrate.fireSprite.anims.play('squareFire', false);\n currCrate.fireSprite.alpha = 0.7;\n game.time.delayedCall(1000, function () {\n if (currCrate.fireSprite.active) {\n currCrate.fireSprite.destroy();\n }\n var fireDisappear = game.add.sprite(currCrate.crate.position.x, currCrate.crate.position.y - 10, 'fireDisappear');\n fireDisappear.anims.play('fireDisappear', false, true);\n fireDisappear.once('animationcomplete', function () {\n fireDisappear.destroy();\n });\n var pos = getTile(currCrate.crate.position.x, currCrate.crate.position.y);\n var x = pos[0];\n var y = pos[1];\n var candidates = [\n [x - 1, y],\n [x + 1, y],\n [x, y + 1],\n [x, y - 1],\n ];\n console.log(candidates);\n console.log(compounds['house']);\n console.log(tiles);\n for (var i = 0; i < candidates.length; i++) {\n var x_1 = candidates[i][0];\n var y_1 = candidates[i][1];\n if (x_1 >= 0 && x_1 < xTiles && y_1 >= 0 && y_1 < yTiles) {\n tiles[x_1][y_1].forEach(function (e) {\n console.log('triggered');\n igniteCompound(game, e.owner, false);\n });\n }\n }\n // if (currCrate.crate) {\n // currCrate.crate.destroy();\n // }\n });\n}\nvar GameScene = /** @class */ (function (_super) {\n __extends(GameScene, _super);\n function GameScene() {\n var _this = _super.call(this, sceneConfig) || this;\n _this.speed = 200;\n return _this;\n }\n GameScene.prototype.preload = function () {\n this.load.image('background', 'assets/backgrounds/TutorialBackground1.png');\n this.load.image('ground', 'assets/squares/platform.png');\n this.load.image('house', 'assets/squares/house.png');\n this.load.spritesheet('fireball', 'assets/fireball.png', { frameWidth: 38, frameHeight: 19 });\n this.load.spritesheet('dude', 'assets/dude.png', { frameWidth: 32, frameHeight: 48 });\n this.load.spritesheet('crate', 'assets/squares/crate.png', { frameWidth: 79, frameHeight: 80 });\n this.load.spritesheet('squareFire', 'assets/squares/squareFire.png', { frameWidth: 79, frameHeight: 80 });\n this.load.spritesheet('fireDisappear', 'assets/squares/fireDisappear.png', { frameWidth: 84, frameHeight: 133 });\n };\n GameScene.prototype.create = function () {\n this.add.image(400, 300, 'background');\n this.matter.world.setBounds(0, 0, 800, 600, 32, true, true, false, true);\n // platforms = this.matter.add.sprite(400, 568, 'ground');\n player = new player_1.default(this);\n this.anims.create({\n key: 'cratepic',\n frames: this.anims.generateFrameNumbers('crate', { start: 0, end: 0 }),\n frameRate: 10,\n repeat: -1,\n });\n this.anims.create({\n key: 'squareFire',\n frames: this.anims.generateFrameNumbers('squareFire', { start: 0, end: 5 }),\n frameRate: 30,\n repeat: -1,\n });\n this.anims.create({\n key: 'fireball',\n frames: this.anims.generateFrameNumbers('fireball', { start: 0, end: 1 }),\n frameRate: 10,\n repeat: -1,\n });\n this.anims.create({\n key: 'fireDisappear',\n frames: this.anims.generateFrameNumbers('fireDisappear', { start: 0, end: 39 }),\n frameRate: 60,\n });\n cursors = this.input.keyboard.createCursorKeys();\n wasd = this.input.keyboard.addKeys('W,S,A,D');\n for (var i = 0; i < 5; i += 1) {\n var label = 'crate_' + i;\n crates[label] = new crate_1.default(this, 400, 550 - i * 50, label, null);\n compounds[label] = new compoundCrate_1.default(this, new Set([crates[label]]), 'crate', label);\n // hash instead, tileize\n }\n //demo house\n var tempowner = [];\n for (var i = 0; i < 3; i += 1) {\n for (var j = 0; j < 4; j += 1) {\n var label = 'cratehouse_' + i + j;\n crates[label] = new crate_1.default(this, 500 + i * 50, 550 - j * 50, label, null);\n tempowner.push(crates[label]);\n }\n }\n compounds['house'] = new compoundCrate_1.default(this, new Set(tempowner), 'house', 'house');\n var game = this;\n this.matter.world.on('collisionstart', function (event) {\n // Loop through all of the collision pairs\n Object.keys(crates).forEach(function (key) {\n var crate = crates[key];\n crate.neighbors.clear();\n });\n var pairs = event.pairs;\n for (var i = 0; i < pairs.length; i++) {\n var bodyA = pairs[i].bodyA;\n var bodyB = pairs[i].bodyB;\n // sensor collisions\n if (pairs[i].isSensor) {\n var playerBody = void 0;\n var otherBody = void 0;\n if (bodyA.isSensor) {\n playerBody = bodyA;\n otherBody = bodyB;\n }\n else if (bodyB.isSensor) {\n playerBody = bodyB;\n otherBody = bodyB;\n }\n if (playerBody.label === 'groundSensor' && otherBody.label != 'fire') {\n player.touchingGround = true;\n }\n }\n // fire collision\n if (bodyA.label === 'fire' && bodyB.label.includes('crate')) {\n igniteCompound(game, crates[bodyB.label].owner, true);\n }\n if (bodyB.label === 'fire' && bodyA.label.includes('crate')) {\n igniteCompound(game, crates[bodyA.label].owner, true);\n }\n if (bodyA.label === 'fire' && bodyB.label.includes('house')) {\n // Object.keys(house.crates).forEach((key) => {\n // igniteCrate(game, house.crates[key], true);\n // });\n }\n if (bodyB.label === 'fire' && bodyA.label.includes('house')) {\n // igniteHouse(game, house, true);\n }\n // if (bodyA.label.includes('crate') && bodyB.label.includes('crate')) {\n // addNeighbor(crates[bodyA.label], crates[bodyB.label]);\n // }\n }\n });\n // this.matter.world.on('collisionactive', function (event) {\n // // Loop through all of the collision pairs\n // const pairs = event.pairs;\n // for (let i = 0; i < pairs.length; i++) {\n // const bodyA = pairs[i].bodyA;\n // const bodyB = pairs[i].bodyB;\n // if (bodyA.label.includes('crate') && bodyB.label.includes('crate')) {\n // addNeighbor(crates[bodyA.label], crates[bodyB.label]);\n // }\n // }\n // });\n };\n GameScene.prototype.update = function () {\n clearTiles();\n // add to tiles\n Object.keys(crates).forEach(function (key) {\n var curr = crates[key];\n var pos = getTile(curr.crate.position.x, curr.crate.position.y);\n tiles[pos[0]][pos[1]].add(curr);\n });\n if (wasd.A.isDown) {\n player.moveLeft();\n }\n else if (wasd.D.isDown) {\n player.moveRight();\n }\n else {\n player.turn();\n }\n if (wasd.W.isDown && player.touchingGround) {\n player.jump();\n }\n if ((cursors.right.isDown || cursors.down.isDown || cursors.up.isDown || cursors.left.isDown) &&\n fireActive === false) {\n fire = this.matter.add.sprite(player.getX(), player.getY(), 'fireball', null, {\n isSensor: true,\n label: 'fire',\n });\n if (cursors.left.isDown) {\n fire.setRotation(Math.PI);\n }\n if (cursors.down.isDown) {\n fire.setRotation(Math.PI / 2);\n }\n if (cursors.up.isDown) {\n fire.setRotation((3 * Math.PI) / 2);\n }\n fire.anims.play('fireball', true);\n fire.setIgnoreGravity(true);\n var xDir = cursors.right.isDown ? 1 : -1;\n var xVel = cursors.right.isDown || cursors.left.isDown ? 10 : 0;\n fire.setVelocityX(xVel * xDir);\n var yDir = cursors.down.isDown ? 1 : -1;\n var yVel = cursors.down.isDown || cursors.up.isDown ? 10 : 0;\n fire.setVelocityY(yVel * yDir);\n fireActive = true;\n setTimeout(function () {\n if (fireActive) {\n fireActive = false;\n fire.destroy();\n }\n }, 500);\n }\n Object.keys(crates).forEach(function (key) {\n var crate = crates[key];\n if (crate.fireSprite != null) {\n crate.fireSprite.x = crate.crate.position.x;\n crate.fireSprite.y = crate.crate.position.y - 10;\n }\n });\n };\n return GameScene;\n}(Phaser.Scene));\nexports.GameScene = GameScene;\n\n\n//# sourceURL=webpack:///./src/scenes/game-scene.ts?");
/***/ })
}) |
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
from pkg_resources import parse_version
import kaitaistruct
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
if parse_version(kaitaistruct.__version__) < parse_version('0.9'):
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
class Imported2(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.one = self._io.read_u1()
|
import React, { Component } from 'react'
import PropTypes from 'prop-types'
import AccountListItem from '../../account-list-item/'
export default class SendDropdownList extends Component {
static propTypes = {
accounts: PropTypes.array,
closeDropdown: PropTypes.func,
onSelect: PropTypes.func,
activeAddress: PropTypes.string,
};
getListItemIcon (accountAddress, activeAddress) {
return accountAddress === activeAddress
? <i className={`fa fa-check fa-lg`} style={ { color: '#02c9b1' } }/>
: null
}
render () {
const {
accounts,
closeDropdown,
onSelect,
activeAddress,
} = this.props
return (<div>
<div
className="send-v2__from-dropdown__close-area"
onClick={() => closeDropdown()}
/>
<div className="send-v2__from-dropdown__list">
{accounts.map((account, index) => <AccountListItem
account={account}
className="account-list-item__dropdown"
handleClick={() => {
onSelect(account)
closeDropdown()
}}
icon={this.getListItemIcon(account.address, activeAddress)}
key={`send-dropdown-account-#${index}`}
/>)}
</div>
</div>)
}
}
SendDropdownList.contextTypes = {
t: PropTypes.func,
}
|
import { Grouping } from './groupingTryOut.js';
export class IndividualsGrouper {
constructor(interactiveCanvas, individuals, groupingLayouter){
this.interactiveCanvas = interactiveCanvas;
// Set up groupingLayouter
this.groupingLayouter = groupingLayouter;
this.groupingLayouter.setParentIdKey("parentId");
this.groupingLayouter.setIdKey("id");
this.rawIndividuals = individuals;
this.groupingKeys = "root";
this.headGrouping = new Grouping(
this,
this.groupingKey,
[],
[],
[],
[],
this.rawIndividuals.slice(),
this.groupingLayouter);
this.headGrouping.initializeRootGrouping();
}
getGroupingStructure(){
return this.headGrouping.getGroupingStructure();
}
addGrouping(groupingKey){
this.headGrouping.addGrouping(groupingKey);
this.interactiveCanvas.updateNodes(this.getGroupingStructure(), groupingKey)
this.interactiveCanvas.animateNodes();
}
removeGrouping(groupingKey){
this.headGrouping.removeGrouping(groupingKey);
this.interactiveCanvas.updateNodes(this.getGroupingStructure())
this.interactiveCanvas.animateNodes();
}
getUniqueValuesForKey(key){
return this.getUniqueValues(this.rawIndividuals, key);
}
getUniqueValues(individuals, key){
if(Array.isArray(individuals[0][key])){
return this.getUniqueValuesForArrayGrouping(individuals, key)
} else {
return [...new Set(this.rawIndividuals.map((individual) => individual[key]))]
}
}
getUniqueValuesForArrayGrouping(individuals, key){
let values = new Set();
individuals.forEach((individual) => {
let individualValues = this.getSortedValue(individual[key]);
if(individualValues !== "missing"){
individualValues.forEach((value) => {
values.add(value);
})
}
});
let permutations = this.getPermutationsForValues(Array.from(values));
permutations.push("missing");
return permutations;
}
getSortedValue(value){
if(Array.isArray(value)) return value.sort();
return value;
}
getPermutationsForValues(array){
let permutations = [];
let intermediate = [];
array.forEach((entry) => {
intermediate.push(entry);
if(intermediate.length > 1) {
permutations.push([entry])
}
permutations.push([...intermediate]);
})
return permutations.map(permutation => permutation.toString());
}
} |
# -*- coding: utf-8 -*-
'''
专门为wapi程序准备的初始化入口
'''
'''
统一拦截处理和统一错误处理
'''
from api.interceptors.Auth import *
from api.interceptors.ErrorHandler import *
'''
蓝图功能,对所有的url进行蓝图功能配置
'''
from api.controllers.route import *
|
game.PlayScreen = me.ScreenObject.extend({
init: function() {
me.audio.play("theme", true);
// lower audio volume on firefox browser
var vol = me.device.ua.contains("Firefox") ? 0.3 : 0.5;
me.audio.setVolume(vol);
this.parent(this);
},
onResetEvent: function() {
me.audio.stop("theme");
if (!game.data.muted){
me.audio.play("theme", true);
}
me.input.bindKey(me.input.KEY.SPACE, "fly", true);
game.data.score = 0;
game.data.steps = 0;
game.data.start = false;
game.data.newHiscore = false;
me.game.world.addChild(new BackgroundLayer('bg', 1));
this.ground = new TheGround();
me.game.world.addChild(this.ground, 11);
this.HUD = new game.HUD.Container();
me.game.world.addChild(this.HUD);
this.bird = me.pool.pull("clumsy", 60, me.game.viewport.height/2 - 100);
me.game.world.addChild(this.bird, 10);
//inputs
me.input.bindPointer(me.input.mouse.LEFT, me.input.KEY.SPACE);
this.getReady = new me.SpriteObject(
me.video.getWidth()/2 - 200,
me.video.getHeight()/2 - 100,
me.loader.getImage('getready')
);
me.game.world.addChild(this.getReady, 11);
var fadeOut = new me.Tween(this.getReady).to({alpha: 0}, 2000)
.easing(me.Tween.Easing.Linear.None)
.onComplete(function() {
game.data.start = true;
me.game.world.addChild(new PipeGenerator(), 0);
}).start();
},
onDestroyEvent: function() {
me.audio.stopTrack('theme');
// free the stored instance
this.HUD = null;
this.bird = null;
me.input.unbindKey(me.input.KEY.SPACE);
me.input.unbindPointer(me.input.mouse.LEFT);
}
});
|
// Until we decide we want to use a real cache like Redis,
// we'll just keep users in an array and look there first.
let userCache = [];
const cache = {
getUser: function(login) {
return userCache.find(f => f.login === login);
},
storeUser: function(user) {
userCache = userCache.filter(f => f.login !== user.login);
userCache.push(user);
}
};
module.exports = cache;
|
// Global vars
var pymChild = null;
var isMobile = false;
var skipLabels = [ 'Group', 'key', 'values' ];
/*
* Initialize the graphic.
*/
var onWindowLoaded = function() {
if (Modernizr.svg) {
formatData();
pymChild = new pym.Child({
renderCallback: render
});
} else {
pymChild = new pym.Child({});
}
pymChild.onMessage('on-screen', function(bucket) {
ANALYTICS.trackEvent('on-screen', bucket);
});
pymChild.onMessage('scroll-depth', function(data) {
data = JSON.parse(data);
ANALYTICS.trackEvent('scroll-depth', data.percent, data.seconds);
});
}
/*
* Format graphic data for processing by D3.
*/
var formatData = function() {
DATA.forEach(function(d) {
d['key'] = d['Group'];
d['values'] = [];
_.each(d, function(v, k) {
if (_.contains(skipLabels, k)) {
return;
}
d['values'].push({ 'label': k, 'amt': +v });
delete d[k];
});
delete d['Group'];
});
}
/*
* Render the graphic(s). Called by pym with the container width.
*/
var render = function(containerWidth) {
if (!containerWidth) {
containerWidth = DEFAULT_WIDTH;
}
if (containerWidth <= MOBILE_THRESHOLD) {
isMobile = true;
} else {
isMobile = false;
}
// Render the chart!
renderGroupedBarChart({
container: '#grouped-bar-chart',
width: containerWidth,
data: DATA
});
// Update iframe
if (pymChild) {
pymChild.sendHeight();
}
}
/*
* Render a bar chart.
*/
var renderGroupedBarChart = function(config) {
/*
* Setup chart container.
*/
var labelColumn = 'label';
var valueColumn = 'amt';
var numGroups = config['data'].length;
var numGroupBars = config['data'][0]['values'].length;
var barHeight = 25;
var barGapInner = 2;
var barGap = 10;
var groupHeight = (barHeight * numGroupBars) + (barGapInner * (numGroupBars - 1))
var labelWidth = 120;
var labelMargin = 6;
var valueGap = 6;
var margins = {
top: 0,
right: 15,
bottom: 20,
left: (labelWidth + labelMargin)
};
var ticksX = 7;
var roundTicksFactor = 5;
// Calculate actual chart dimensions
var chartWidth = config['width'] - margins['left'] - margins['right'];
var chartHeight = (((((barHeight + barGapInner) * numGroupBars) - barGapInner) + barGap) * numGroups) - barGap + barGapInner;
// Clear existing graphic (for redraw)
var containerElement = d3.select(config['container']);
containerElement.html('');
/*
* Create D3 scale objects.
*/
var min = d3.min(config['data'], function(d) {
return d3.min(d['values'], function(v) {
return Math.floor(v[valueColumn] / roundTicksFactor) * roundTicksFactor;
});
});
if (min > 0) {
min = 0;
}
var max = d3.max(config['data'], function(d) {
return d3.max(d['values'], function(v) {
return Math.ceil(v[valueColumn] / roundTicksFactor) * roundTicksFactor;
});
});
var xScale = d3.scale.linear()
.domain([min, max])
.range([0, chartWidth]);
var yScale = d3.scale.linear()
.range([chartHeight, 0]);
var colorScale = d3.scale.ordinal()
.domain(d3.keys(config['data'][0]['values']).filter(function(d) {
if (!_.contains(skipLabels, d)) {
return d;
}
}))
.range([COLORS['orange3'], ['#7e7e7e'], ['#bbb']]);
/*
* Render a color legend.
*/
var legend = containerElement.append('ul')
.attr('class', 'key')
.selectAll('g')
.data(config['data'][0]['values'])
.enter().append('li')
.attr('class', function(d, i) {
return 'key-item key-' + i + ' ' + classify(d[labelColumn]);
});
legend.append('b')
.style('background-color', function(d) {
return colorScale(d[labelColumn]);
});
legend.append('label')
.text(function(d) {
return d[labelColumn];
});
/*
* Create the root SVG element.
*/
var chartWrapper = containerElement.append('div')
.attr('class', 'graphic-wrapper');
var chartElement = chartWrapper.append('svg')
.attr('width', chartWidth + margins['left'] + margins['right'])
.attr('height', chartHeight + margins['top'] + margins['bottom'])
.append('g')
.attr('transform', 'translate(' + margins['left'] + ',' + margins['top'] + ')');
/*
* Create D3 axes.
*/
var xAxis = d3.svg.axis()
.scale(xScale)
.orient('bottom')
.ticks(ticksX)
.tickFormat(function(d) {
return d.toFixed(0) + '%';
});
/*
* Render axes to chart.
*/
chartElement.append('g')
.attr('class', 'x axis')
.attr('transform', makeTranslate(0, chartHeight))
.call(xAxis);
/*
* Render grid to chart.
*/
var xAxisGrid = function() {
return xAxis;
};
chartElement.append('g')
.attr('class', 'x grid')
.attr('transform', makeTranslate(0, chartHeight))
.call(xAxisGrid()
.tickSize(-chartHeight, 0, 0)
.tickFormat('')
);
/*
* Render bars to chart.
*/
var barGroups = chartElement.selectAll('.bars')
.data(config['data'])
.enter()
.append('g')
.attr('class', 'g bars')
.attr('transform', function(d, i) {
if (i == 0) {
return makeTranslate(0, 0);
}
return makeTranslate(0, (groupHeight + barGap) * i);
});
barGroups.selectAll('rect')
.data(function(d) {
return d['values'];
})
.enter()
.append('rect')
.attr('x', function(d) {
if (d[valueColumn] >= 0) {
return xScale(0);
}
return xScale(d[valueColumn]);
})
.attr('y', function(d, i) {
if (i == 0) {
return 0;
}
return (barHeight * i) + (barGapInner * i);
})
.attr('width', function(d) {
return Math.abs(xScale(0) - xScale(d[valueColumn]));
})
.attr('height', barHeight)
.style('fill', function(d) {
return colorScale(d[labelColumn]);
})
.attr('class', function(d) {
return 'y-' + d[labelColumn];
});
/*
* Render 0-line.
*/
if (min < 0) {
chartElement.append('line')
.attr('class', 'zero-line')
.attr('x1', xScale(0))
.attr('x2', xScale(0))
.attr('y1', 0)
.attr('y2', chartHeight);
}
/*
* Render bar labels.
*/
chartWrapper.append('ul')
.attr('class', 'labels')
.attr('style', formatStyle({
'width': labelWidth + 'px',
'top': margins['top'] + 'px',
'left': '0'
}))
.selectAll('li')
.data(config['data'])
.enter()
.append('li')
.attr('style', function(d,i) {
var top = (groupHeight + barGap) * i;
if (i == 0) {
top = 0;
}
return formatStyle({
'width': (labelWidth - 10) + 'px',
'height': barHeight + 'px',
'left': '0px',
'top': top + 'px;'
});
})
.attr('class', function(d,i) {
return classify(d['key']);
})
.append('span')
.text(function(d) {
return d['key']
});
/*
* Render bar values.
*/
barGroups.append('g')
.attr('class', 'value')
.selectAll('text')
.data(function(d) {
return d['values'];
})
.enter()
.append('text')
.text(function(d) {
var v = d[valueColumn].toFixed(0);
if (d[valueColumn] > 0 && v == 0) {
v = '<1';
}
return v + '%';
})
.attr('x', function(d) {
return xScale(d[valueColumn]);
})
.attr('y', function(d, i) {
if (i == 0) {
return 0;
}
return (barHeight * i) + barGapInner;
})
.attr('dx', function(d) {
var xStart = xScale(d[valueColumn]);
var textWidth = this.getComputedTextLength()
// Negative case
if (d[valueColumn] < 0) {
var outsideOffset = -(valueGap + textWidth);
if (xStart + outsideOffset < 0) {
d3.select(this).classed('in', true)
return valueGap;
} else {
d3.select(this).classed('out', true)
return outsideOffset;
}
// Positive case
} else {
if (xStart + valueGap + textWidth > chartWidth) {
d3.select(this).classed('in', true)
return -(valueGap + textWidth);
} else {
d3.select(this).classed('out', true)
return valueGap;
}
}
})
.attr('dy', (barHeight / 2) + 4);
}
/*
* Initially load the graphic
* (NB: Use window.load to ensure all images have loaded)
*/
window.onload = onWindowLoaded;
|
jQuery(function($){
$.supersized({
// Functionality
slide_interval : 4000, // Length between transitions
transition : 1, // 0-None, 1-Fade, 2-Slide Top, 3-Slide Right, 4-Slide Bottom, 5-Slide Left, 6-Carousel Right, 7-Carousel Left
transition_speed : 1000, // Speed of transition
performance : 1, // 0-Normal, 1-Hybrid speed/quality, 2-Optimizes image quality, 3-Optimizes transition speed // (Only works for Firefox/IE, not Webkit)
// Size & Position
min_width : 0, // Min width allowed (in pixels)
min_height : 0, // Min height allowed (in pixels)
vertical_center : 1, // Vertically center background
horizontal_center : 1, // Horizontally center background
fit_always : 0, // Image will never exceed browser width or height (Ignores min. dimensions)
fit_portrait : 1, // Portrait images will not exceed browser height
fit_landscape : 0, // Landscape images will not exceed browser width
// Components
slide_links : 'blank', // Individual links for each slide (Options: false, 'num', 'name', 'blank')
slides : [ // Slideshow Images
{image : 'img/backgrounds/ball.jpg'},
{image : 'img/backgrounds/pasto.jpg'}
//{image : 'img/backgrounds/3.jpg'}
]
});
});
|
import firebase from 'firebase';
const config = {
apiKey: "AIzaSyByoBI7xpOAB5JsiVx-wOjVt7FstnD1Oyk",
authDomain: "comedero-3f0f9.firebaseapp.com",
databaseURL: "https://comedero-3f0f9.firebaseio.com",
storageBucket: "comedero-3f0f9.appspot.com",
messagingSenderId: "924672332128"
};
export const firebaseApp = firebase.initializeApp(config)
export const firebaseDb = firebase.database();
|
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @format
* @flow
*/
export type CapturedError = {
+componentName: ?string,
+componentStack: string,
+error: mixed,
+errorBoundary: ?{},
+errorBoundaryFound: boolean,
+errorBoundaryName: string | null,
+willRetry: boolean,
};
import type {ExtendedError} from './Devtools/parseErrorStack';
import {handleException, SyntheticError} from './ExceptionsManager';
/**
* Intercept lifecycle errors and ensure they are shown with the correct stack
* trace within the native redbox component.
*/
function showErrorDialog(capturedError: CapturedError): boolean {
const {componentStack, error} = capturedError;
let errorToHandle;
// Typically Errors are thrown but eg strings or null can be thrown as well.
if (error instanceof Error) {
errorToHandle = (error: ExtendedError);
} else if (typeof error === 'string') {
errorToHandle = (new SyntheticError(error): ExtendedError);
} else {
errorToHandle = (new SyntheticError('Unspecified error'): ExtendedError);
}
try {
errorToHandle.componentStack = componentStack;
} catch (e) {}
handleException(errorToHandle, false);
// Return false here to prevent ReactFiberErrorLogger default behavior of
// logging error details to console.error. Calls to console.error are
// automatically routed to the native redbox controller, which we've already
// done above by calling ExceptionsManager.
return false;
}
module.exports = {showErrorDialog};
|
goog.provide('os.ui.modal');
/**
* @param {string} target A selector used to identify the parent for the modal
* @param {string} markup The markup to compile
*/
os.ui.modal.create = function(target, markup) {
var compile = /** @type {!angular.$compile} */ (os.ui.injector.get('$compile'));
var scope = /** @type {!angular.Scope} */ (os.ui.injector.get('$rootScope').$new());
$(target).append(/** @type {Element} */ (compile(markup)(scope)[0]));
};
/**
* Opens a modal
* @param {!angular.JQLite} el
* @param {Object<string, *>=} opt_options
*/
os.ui.modal.open = function(el, opt_options) {
el.modal(opt_options).on('hide', function() {
// let the animation complete
setTimeout(function() {
// and then remove it
el.scope().$destroy();
el.remove();
}, 1500);
});
};
|
export function getStorageInfo(ctx) {
ctx.username = sessionStorage.getItem("username");
ctx.fullName = sessionStorage.getItem("fullName");
ctx.userId = sessionStorage.getItem("userId");
ctx.loggedIn = sessionStorage.getItem("authtoken") !== null;
}
export function getPartials() {
return {
header: "./templates/common/header.hbs",
footer: "./templates/common/footer.hbs"
}
}
export default {
getStorageInfo,
getPartials
}; |
const axios = require('axios');
exports.homeRoutes = (req, res) => {
// Make a get request to /api/items
axios.get('http://localhost:3000/api/items')
.then(function(response){
res.render('index', { items : response.data });
})
.catch(err =>{
res.send(err);
})
}
exports.add_item = (req, res) =>{
res.render('add_item');
}
exports.update_item = (req, res) =>{
axios.get('http://localhost:3000/api/items', { params : { id : req.query.id }})
.then(function(itemdata){
res.render("update_item", { item : itemdata.data})
})
.catch(err =>{
res.send(err);
})
} |
(this.webpackJsonpfrontend_base_dapp=this.webpackJsonpfrontend_base_dapp||[]).push([[3],{632:function(t,e,n){"use strict";n.r(e),n.d(e,"getCLS",(function(){return m})),n.d(e,"getFCP",(function(){return S})),n.d(e,"getFID",(function(){return F})),n.d(e,"getLCP",(function(){return k})),n.d(e,"getTTFB",(function(){return C}));var i,a,r,o,u=function(t,e){return{name:t,value:void 0===e?-1:e,delta:0,entries:[],id:"v1-".concat(Date.now(),"-").concat(Math.floor(8999999999999*Math.random())+1e12)}},c=function(t,e){try{if(PerformanceObserver.supportedEntryTypes.includes(t)){if("first-input"===t&&!("PerformanceEventTiming"in self))return;var n=new PerformanceObserver((function(t){return t.getEntries().map(e)}));return n.observe({type:t,buffered:!0}),n}}catch(t){}},f=function(t,e){var n=function n(i){"pagehide"!==i.type&&"hidden"!==document.visibilityState||(t(i),e&&(removeEventListener("visibilitychange",n,!0),removeEventListener("pagehide",n,!0)))};addEventListener("visibilitychange",n,!0),addEventListener("pagehide",n,!0)},s=function(t){addEventListener("pageshow",(function(e){e.persisted&&t(e)}),!0)},p="function"==typeof WeakSet?new WeakSet:new Set,d=function(t,e,n){var i;return function(){e.value>=0&&(n||p.has(e)||"hidden"===document.visibilityState)&&(e.delta=e.value-(i||0),(e.delta||void 0===i)&&(i=e.value,t(e)))}},m=function(t,e){var n,i=u("CLS",0),a=function(t){t.hadRecentInput||(i.value+=t.value,i.entries.push(t),n())},r=c("layout-shift",a);r&&(n=d(t,i,e),f((function(){r.takeRecords().map(a),n()})),s((function(){i=u("CLS",0),n=d(t,i,e)})))},v=-1,l=function(){return"hidden"===document.visibilityState?0:1/0},h=function(){f((function(t){var e=t.timeStamp;v=e}),!0)},g=function(){return v<0&&(v=l(),h(),s((function(){setTimeout((function(){v=l(),h()}),0)}))),{get timeStamp(){return v}}},S=function(t,e){var n,i=g(),a=u("FCP"),r=function(t){"first-contentful-paint"===t.name&&(f&&f.disconnect(),t.startTime<i.timeStamp&&(a.value=t.startTime,a.entries.push(t),p.add(a),n()))},o=performance.getEntriesByName("first-contentful-paint")[0],f=o?null:c("paint",r);(o||f)&&(n=d(t,a,e),o&&r(o),s((function(i){a=u("FCP"),n=d(t,a,e),requestAnimationFrame((function(){requestAnimationFrame((function(){a.value=performance.now()-i.timeStamp,p.add(a),n()}))}))})))},y={passive:!0,capture:!0},E=new Date,w=function(t,e){i||(i=e,a=t,r=new Date,b(removeEventListener),L())},L=function(){if(a>=0&&a<r-E){var t={entryType:"first-input",name:i.type,target:i.target,cancelable:i.cancelable,startTime:i.timeStamp,processingStart:i.timeStamp+a};o.forEach((function(e){e(t)})),o=[]}},T=function(t){if(t.cancelable){var e=(t.timeStamp>1e12?new Date:performance.now())-t.timeStamp;"pointerdown"==t.type?function(t,e){var n=function(){w(t,e),a()},i=function(){a()},a=function(){removeEventListener("pointerup",n,y),removeEventListener("pointercancel",i,y)};addEventListener("pointerup",n,y),addEventListener("pointercancel",i,y)}(e,t):w(e,t)}},b=function(t){["mousedown","keydown","touchstart","pointerdown"].forEach((function(e){return t(e,T,y)}))},F=function(t,e){var n,r=g(),m=u("FID"),v=function(t){t.startTime<r.timeStamp&&(m.value=t.processingStart-t.startTime,m.entries.push(t),p.add(m),n())},l=c("first-input",v);n=d(t,m,e),l&&f((function(){l.takeRecords().map(v),l.disconnect()}),!0),l&&s((function(){var r;m=u("FID"),n=d(t,m,e),o=[],a=-1,i=null,b(addEventListener),r=v,o.push(r),L()}))},k=function(t,e){var n,i=g(),a=u("LCP"),r=function(t){var e=t.startTime;e<i.timeStamp&&(a.value=e,a.entries.push(t)),n()},o=c("largest-contentful-paint",r);if(o){n=d(t,a,e);var m=function(){p.has(a)||(o.takeRecords().map(r),o.disconnect(),p.add(a),n())};["keydown","click"].forEach((function(t){addEventListener(t,m,{once:!0,capture:!0})})),f(m,!0),s((function(i){a=u("LCP"),n=d(t,a,e),requestAnimationFrame((function(){requestAnimationFrame((function(){a.value=performance.now()-i.timeStamp,p.add(a),n()}))}))}))}},C=function(t){var e,n=u("TTFB");e=function(){try{var e=performance.getEntriesByType("navigation")[0]||function(){var t=performance.timing,e={entryType:"navigation",startTime:0};for(var n in t)"navigationStart"!==n&&"toJSON"!==n&&(e[n]=Math.max(t[n]-t.navigationStart,0));return e}();if(n.value=n.delta=e.responseStart,n.value<0)return;n.entries=[e],t(n)}catch(t){}},"complete"===document.readyState?setTimeout(e,0):addEventListener("pageshow",e)}}}]);
//# sourceMappingURL=3.9c02fced.chunk.js.map |
import React from 'react';
import PropTypes from 'prop-types';
import { FormattedMessage } from 'react-intl';
import isPackage from '../isPackage';
export default class EResourceType extends React.Component {
static propTypes = {
resource: PropTypes.shape({
_object: PropTypes.shape({
pti: PropTypes.shape({
titleInstance: PropTypes.shape({
label: PropTypes.string,
publicationType: PropTypes.oneOfType([
PropTypes.shape({ label: PropTypes.string }),
PropTypes.string,
]),
})
})
}),
reference_object: PropTypes.shape({
publicationType: PropTypes.string,
}),
publicationType: PropTypes.oneOfType([
PropTypes.shape({ label: PropTypes.string }),
PropTypes.string,
]),
})
}
render() {
const { resource } = this.props;
if (!resource) return null;
if (isPackage(resource)) {
return <FormattedMessage id="stripes-erm-components.package" />;
}
return (
resource?._object?.pti?.titleInstance?.publicationType?.label ||
// eslint-disable-next-line camelcase
resource?.reference_object?.publicationType ||
resource?.publicationType?.label ||
<FormattedMessage id="stripes-erm-components.title" />
);
}
}
|
import React from 'react';
import { Row, Col, Card, Progress } from 'reactstrap';
export default function LivePreviewExample() {
return (
<>
<Row>
<Col md="6" xl="3">
<Card className="p-3 mb-5">
<div className="align-box-row">
<div className="text-first font-size-xl font-weight-bold pr-2">
55%
</div>
<div className="flex-grow-1">
<Progress
animated
className="progress-bar-rounded progress-sm"
color="first"
value="55"
/>
</div>
</div>
<div className="text-black-50 pt-2">Expenses target</div>
</Card>
</Col>
<Col md="6" xl="3">
<Card className="p-3 mb-5">
<div className="align-box-row">
<div className="text-success font-size-xl font-weight-bold pr-2">
76%
</div>
<div className="flex-grow-1">
<Progress
animated
className="progress-bar-rounded progress-sm"
color="success"
value="76"
/>
</div>
</div>
<div className="text-black-50 pt-2">Sales target</div>
</Card>
</Col>
<Col md="6" xl="3">
<Card className="p-3 mb-5 bg-asteroid">
<div className="align-box-row">
<div className="text-danger font-size-xl font-weight-bold pr-2">
61%
</div>
<div className="flex-grow-1">
<Progress
animated
className="progress-bar-rounded bg-white-50 progress-sm"
color="danger"
value="61"
/>
</div>
</div>
<div className="text-white-50 pt-2">Income target</div>
</Card>
</Col>
<Col md="6" xl="3">
<Card className="p-3 mb-5 bg-midnight-bloom">
<div className="align-box-row">
<div className="text-warning font-size-xl font-weight-bold pr-2">
83%
</div>
<div className="flex-grow-1">
<Progress
animated
className="progress-bar-rounded bg-white-50 progress-sm"
color="warning"
value="83"
/>
</div>
</div>
<div className="text-white-50 pt-2">Spendings target</div>
</Card>
</Col>
</Row>
</>
);
}
|
# Copyright (c) 2017 Dell Inc. or its subsidiaries.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from copy import deepcopy
import datetime
import hashlib
import re
from cinder.objects.group import Group
from oslo_log import log as logging
from oslo_utils import strutils
import six
from cinder import exception
from cinder.i18n import _
from cinder.objects import fields
from cinder.volume import utils as vol_utils
from cinder.volume import volume_types
LOG = logging.getLogger(__name__)
# SHARED CONSTANTS
ISCSI = 'iscsi'
FC = 'fc'
INTERVAL = 'interval'
RETRIES = 'retries'
VOLUME_ELEMENT_NAME_PREFIX = 'OS-'
VMAX_AFA_MODELS = ['VMAX250F', 'VMAX450F', 'VMAX850F', 'VMAX950F']
MAX_SRP_LENGTH = 16
TRUNCATE_5 = 5
TRUNCATE_27 = 27
ARRAY = 'array'
SLO = 'slo'
WORKLOAD = 'workload'
SRP = 'srp'
PORTGROUPNAME = 'storagetype:portgroupname'
DEVICE_ID = 'device_id'
INITIATOR_CHECK = 'initiator_check'
SG_NAME = 'storagegroup_name'
MV_NAME = 'maskingview_name'
IG_NAME = 'init_group_name'
PARENT_SG_NAME = 'parent_sg_name'
CONNECTOR = 'connector'
VOL_NAME = 'volume_name'
EXTRA_SPECS = 'extra_specs'
HOST_NAME = 'short_host_name'
IS_RE = 'replication_enabled'
DISABLECOMPRESSION = 'storagetype:disablecompression'
REP_SYNC = 'Synchronous'
REP_ASYNC = 'Asynchronous'
REP_METRO = 'Metro'
REP_MODE = 'rep_mode'
RDF_SYNC_STATE = 'synchronized'
RDF_SYNCINPROG_STATE = 'syncinprog'
RDF_CONSISTENT_STATE = 'consistent'
RDF_SUSPENDED_STATE = 'suspended'
RDF_FAILEDOVER_STATE = 'failed over'
RDF_ACTIVE = 'active'
RDF_ACTIVEACTIVE = 'activeactive'
RDF_ACTIVEBIAS = 'activebias'
METROBIAS = 'metro_bias'
DEFAULT_PORT = 8443
CLONE_SNAPSHOT_NAME = "snapshot_for_clone"
# Multiattach constants
IS_MULTIATTACH = 'multiattach'
OTHER_PARENT_SG = 'other_parent_sg_name'
FAST_SG = 'fast_managed_sg'
NO_SLO_SG = 'no_slo_sg'
# Cinder.conf vmax configuration
VMAX_SERVER_IP = 'san_ip'
VMAX_USER_NAME = 'san_login'
VMAX_PASSWORD = 'san_password'
VMAX_SERVER_PORT_NEW = 'san_api_port'
VMAX_SERVER_PORT_OLD = 'san_rest_port'
VMAX_ARRAY = 'vmax_array'
VMAX_WORKLOAD = 'vmax_workload'
VMAX_SRP = 'vmax_srp'
VMAX_SERVICE_LEVEL = 'vmax_service_level'
VMAX_PORT_GROUPS = 'vmax_port_groups'
VMAX_SNAPVX_UNLINK_LIMIT = 'vmax_snapvx_unlink_limit'
class VMAXUtils(object):
"""Utility class for Rest based VMAX volume drivers.
This Utility class is for VMAX volume drivers based on Unisphere Rest API.
"""
def __init__(self):
"""Utility class for Rest based VMAX volume drivers."""
def get_host_short_name(self, host_name):
"""Returns the short name for a given qualified host name.
Checks the host name to see if it is the fully qualified host name
and returns part before the dot. If there is no dot in the host name
the full host name is returned.
:param host_name: the fully qualified host name
:returns: string -- the short host_name
"""
host_array = host_name.split('.')
if len(host_array) > 1:
short_host_name = host_array[0]
else:
short_host_name = host_name
return self.generate_unique_trunc_host(short_host_name)
@staticmethod
def get_volumetype_extra_specs(volume, volume_type_id=None):
"""Gets the extra specs associated with a volume type.
:param volume: the volume dictionary
:param volume_type_id: Optional override for volume.volume_type_id
:returns: dict -- extra_specs - the extra specs
:raises: VolumeBackendAPIException
"""
extra_specs = {}
try:
if volume_type_id:
type_id = volume_type_id
else:
type_id = volume.volume_type_id
if type_id is not None:
extra_specs = volume_types.get_volume_type_extra_specs(type_id)
except Exception as e:
LOG.debug('Exception getting volume type extra specs: %(e)s',
{'e': six.text_type(e)})
return extra_specs
@staticmethod
def get_short_protocol_type(protocol):
"""Given the protocol type, return I for iscsi and F for fc.
:param protocol: iscsi or fc
:returns: string -- 'I' for iscsi or 'F' for fc
"""
if protocol.lower() == ISCSI.lower():
return 'I'
elif protocol.lower() == FC.lower():
return 'F'
else:
return protocol
@staticmethod
def truncate_string(str_to_truncate, max_num):
"""Truncate a string by taking first and last characters.
:param str_to_truncate: the string to be truncated
:param max_num: the maximum number of characters
:returns: string -- truncated string or original string
"""
if len(str_to_truncate) > max_num:
new_num = len(str_to_truncate) - max_num // 2
first_chars = str_to_truncate[:max_num // 2]
last_chars = str_to_truncate[new_num:]
str_to_truncate = first_chars + last_chars
return str_to_truncate
@staticmethod
def get_time_delta(start_time, end_time):
"""Get the delta between start and end time.
:param start_time: the start time
:param end_time: the end time
:returns: string -- delta in string H:MM:SS
"""
delta = end_time - start_time
return six.text_type(datetime.timedelta(seconds=int(delta)))
def get_default_storage_group_name(
self, srp_name, slo, workload, is_compression_disabled=False,
is_re=False, rep_mode=None):
"""Determine default storage group from extra_specs.
:param srp_name: the name of the srp on the array
:param slo: the service level string e.g Bronze
:param workload: the workload string e.g DSS
:param is_compression_disabled: flag for disabling compression
:param is_re: flag for replication
:param rep_mode: flag to indicate replication mode
:returns: storage_group_name
"""
if slo and workload:
prefix = ("OS-%(srpName)s-%(slo)s-%(workload)s"
% {'srpName': srp_name, 'slo': slo,
'workload': workload})
if is_compression_disabled:
prefix += "-CD"
else:
prefix = "OS-no_SLO"
if is_re:
prefix += self.get_replication_prefix(rep_mode)
storage_group_name = ("%(prefix)s-SG" % {'prefix': prefix})
return storage_group_name
@staticmethod
def get_volume_element_name(volume_id):
"""Get volume element name follows naming convention, i.e. 'OS-UUID'.
:param volume_id: Openstack volume ID containing uuid
:returns: volume element name in format of OS-UUID
"""
element_name = volume_id
uuid_regex = (re.compile(
r'[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}',
re.I))
match = uuid_regex.search(volume_id)
if match:
volume_uuid = match.group()
element_name = ("%(prefix)s%(volumeUUID)s"
% {'prefix': VOLUME_ELEMENT_NAME_PREFIX,
'volumeUUID': volume_uuid})
LOG.debug(
"get_volume_element_name elementName: %(elementName)s.",
{'elementName': element_name})
return element_name
@staticmethod
def modify_snapshot_prefix(snapshot_name, manage=False, unmanage=False):
"""Modify a Snapshot prefix on VMAX backend.
Prepare a snapshot name for manage/unmanage snapshot process either
by adding or removing 'OS-' prefix.
:param snapshot_name: the old snapshot backend display name
:param manage: (bool) if the operation is managing a snapshot
:param unmanage: (bool) if the operation is unmanaging a snapshot
:return: snapshot name ready for backend VMAX assignment
"""
new_snap_name = None
if manage:
new_snap_name = ("%(prefix)s%(snapshot_name)s"
% {'prefix': 'OS-',
'snapshot_name': snapshot_name})
if unmanage:
snap_split = snapshot_name.split("-", 1)
if snap_split[0] == 'OS':
new_snap_name = snap_split[1]
return new_snap_name
def generate_unique_trunc_host(self, host_name):
"""Create a unique short host name under 16 characters.
:param host_name: long host name
:returns: truncated host name
"""
if host_name and len(host_name) > 16:
host_name = host_name.lower()
m = hashlib.md5()
m.update(host_name.encode('utf-8'))
uuid = m.hexdigest()
new_name = ("%(host)s%(uuid)s"
% {'host': host_name[-6:],
'uuid': uuid})
host_name = self.truncate_string(new_name, 16)
return host_name
def get_pg_short_name(self, portgroup_name):
"""Create a unique port group name under 12 characters.
:param portgroup_name: long portgroup_name
:returns: truncated portgroup_name
"""
if portgroup_name and len(portgroup_name) > 12:
portgroup_name = portgroup_name.lower()
m = hashlib.md5()
m.update(portgroup_name.encode('utf-8'))
uuid = m.hexdigest()
new_name = ("%(pg)s%(uuid)s"
% {'pg': portgroup_name[-6:],
'uuid': uuid})
portgroup_name = self.truncate_string(new_name, 12)
return portgroup_name
@staticmethod
def get_default_oversubscription_ratio(max_over_sub_ratio):
"""Override ratio if necessary.
The over subscription ratio will be overridden if the user supplied
max oversubscription ratio is less than 1.
:param max_over_sub_ratio: user supplied over subscription ratio
:returns: max_over_sub_ratio
"""
if max_over_sub_ratio < 1.0:
LOG.info("The user supplied value for max_over_subscription "
"ratio is less than 1.0. Using the default value of "
"20.0 instead...")
max_over_sub_ratio = 20.0
return max_over_sub_ratio
def get_temp_snap_name(self, source_device_id):
"""Construct a temporary snapshot name for clone operation
:param source_device_id: the source device id
:return: snap_name
"""
snap_name = ("temp-%(device)s-%(snap_name)s"
% {'device': source_device_id,
'snap_name': CLONE_SNAPSHOT_NAME})
return snap_name
@staticmethod
def get_array_and_device_id(volume, external_ref):
"""Helper function for manage volume to get array name and device ID.
:param volume: volume object from API
:param external_ref: the existing volume object to be manged
:returns: string value of the array name and device ID
"""
device_id = external_ref.get(u'source-name', None)
LOG.debug("External_ref: %(er)s", {'er': external_ref})
if not device_id:
device_id = external_ref.get(u'source-id', None)
host = volume.host
host_list = host.split('+')
array = host_list[(len(host_list) - 1)]
if device_id:
if len(device_id) != 5:
error_message = (_("Device ID: %(device_id)s is invalid. "
"Device ID should be exactly 5 digits.") %
{'device_id': device_id})
LOG.error(error_message)
raise exception.VolumeBackendAPIException(
message=error_message)
LOG.debug("Get device ID of existing volume - device ID: "
"%(device_id)s, Array: %(array)s.",
{'device_id': device_id,
'array': array})
else:
exception_message = (_("Source volume device ID is required."))
raise exception.VolumeBackendAPIException(
message=exception_message)
return array, device_id
@staticmethod
def is_compression_disabled(extra_specs):
"""Check is compression is to be disabled.
:param extra_specs: extra specifications
:returns: boolean
"""
do_disable_compression = False
if DISABLECOMPRESSION in extra_specs:
if strutils.bool_from_string(extra_specs[DISABLECOMPRESSION]):
do_disable_compression = True
return do_disable_compression
def change_compression_type(self, is_source_compr_disabled, new_type):
"""Check if volume type have different compression types
:param is_source_compr_disabled: from source
:param new_type: from target
:returns: boolean
"""
extra_specs = new_type['extra_specs']
is_target_compr_disabled = self.is_compression_disabled(extra_specs)
if is_target_compr_disabled == is_source_compr_disabled:
return False
else:
return True
def change_replication(self, vol_is_replicated, new_type):
"""Check if volume types have different replication status.
:param vol_is_replicated: from source
:param new_type: from target
:return: bool
"""
is_tgt_rep = self.is_replication_enabled(new_type['extra_specs'])
return vol_is_replicated != is_tgt_rep
@staticmethod
def is_replication_enabled(extra_specs):
"""Check if replication is to be enabled.
:param extra_specs: extra specifications
:returns: bool - true if enabled, else false
"""
replication_enabled = False
if IS_RE in extra_specs:
replication_enabled = True
return replication_enabled
@staticmethod
def get_replication_config(rep_device_list):
"""Gather necessary replication configuration info.
:param rep_device_list: the replication device list from cinder.conf
:returns: rep_config, replication configuration dict
"""
rep_config = {}
if not rep_device_list:
return None
else:
target = rep_device_list[0]
try:
rep_config['array'] = target['target_device_id']
rep_config['srp'] = target['remote_pool']
rep_config['rdf_group_label'] = target['rdf_group_label']
rep_config['portgroup'] = target['remote_port_group']
except KeyError as ke:
error_message = (_("Failed to retrieve all necessary SRDF "
"information. Error received: %(ke)s.") %
{'ke': six.text_type(ke)})
LOG.exception(error_message)
raise exception.VolumeBackendAPIException(
message=error_message)
allow_extend = target.get('allow_extend', 'false')
if strutils.bool_from_string(allow_extend):
rep_config['allow_extend'] = True
else:
rep_config['allow_extend'] = False
rep_mode = target.get('mode', '')
if rep_mode.lower() in ['async', 'asynchronous']:
rep_config['mode'] = REP_ASYNC
elif rep_mode.lower() == 'metro':
rep_config['mode'] = REP_METRO
metro_bias = target.get('metro_use_bias', 'false')
if strutils.bool_from_string(metro_bias):
rep_config[METROBIAS] = True
else:
rep_config[METROBIAS] = False
allow_delete_metro = target.get('allow_delete_metro', 'false')
if strutils.bool_from_string(allow_delete_metro):
rep_config['allow_delete_metro'] = True
else:
rep_config['allow_delete_metro'] = False
else:
rep_config['mode'] = REP_SYNC
return rep_config
@staticmethod
def is_volume_failed_over(volume):
"""Check if a volume has been failed over.
:param volume: the volume object
:returns: bool
"""
if volume is not None:
if volume.get('replication_status') and (
volume.replication_status ==
fields.ReplicationStatus.FAILED_OVER):
return True
return False
@staticmethod
def update_volume_model_updates(volume_model_updates,
volumes, group_id, status='available'):
"""Update the volume model's status and return it.
:param volume_model_updates: list of volume model update dicts
:param volumes: volumes object api
:param group_id: consistency group id
:param status: string value reflects the status of the member volume
:returns: volume_model_updates - updated volumes
"""
LOG.info("Updating status for group: %(id)s.", {'id': group_id})
if volumes:
for volume in volumes:
volume_model_updates.append({'id': volume.id,
'status': status})
else:
LOG.info("No volume found for group: %(cg)s.", {'cg': group_id})
return volume_model_updates
@staticmethod
def get_grp_volume_model_update(volume, volume_dict, group_id):
"""Create and return the volume model update on creation.
:param volume: volume object
:param volume_dict: the volume dict
:param group_id: consistency group id
:returns: model_update
"""
LOG.info("Updating status for group: %(id)s.", {'id': group_id})
model_update = ({'id': volume.id, 'status': 'available',
'provider_location': six.text_type(volume_dict)})
return model_update
@staticmethod
def update_extra_specs(extraspecs):
"""Update extra specs.
:param extraspecs: the additional info
:returns: extraspecs
"""
try:
pool_details = extraspecs['pool_name'].split('+')
extraspecs[SLO] = pool_details[0]
if len(pool_details) == 4:
extraspecs[WORKLOAD] = pool_details[1]
extraspecs[SRP] = pool_details[2]
extraspecs[ARRAY] = pool_details[3]
else:
# Assume no workload given in pool name
extraspecs[SRP] = pool_details[1]
extraspecs[ARRAY] = pool_details[2]
extraspecs[WORKLOAD] = 'NONE'
except KeyError:
LOG.error("Error parsing SLO, workload from"
" the provided extra_specs.")
return extraspecs
def get_volume_group_utils(self, group, interval, retries):
"""Standard utility for generic volume groups.
:param group: the generic volume group object to be created
:param interval: Interval in seconds between retries
:param retries: Retry count
:returns: array, intervals_retries_dict
:raises: VolumeBackendAPIException
"""
arrays = set()
# Check if it is a generic volume group instance
if isinstance(group, Group):
for volume_type in group.volume_types:
extra_specs = self.update_extra_specs(volume_type.extra_specs)
arrays.add(extra_specs[ARRAY])
else:
msg = (_("Unable to get volume type ids."))
LOG.error(msg)
raise exception.VolumeBackendAPIException(message=msg)
if len(arrays) != 1:
if not arrays:
msg = (_("Failed to get an array associated with "
"volume group: %(groupid)s.")
% {'groupid': group.id})
else:
msg = (_("There are multiple arrays "
"associated with volume group: %(groupid)s.")
% {'groupid': group.id})
LOG.error(msg)
raise exception.VolumeBackendAPIException(message=msg)
array = arrays.pop()
intervals_retries_dict = {INTERVAL: interval, RETRIES: retries}
return array, intervals_retries_dict
def update_volume_group_name(self, group):
"""Format id and name consistency group.
:param group: the generic volume group object
:returns: group_name -- formatted name + id
"""
group_name = ""
if group.name is not None and group.name != group.id:
group_name = (
self.truncate_string(
group.name, TRUNCATE_27) + "_")
group_name += group.id
return group_name
@staticmethod
def add_legacy_pools(pools):
"""Add legacy pools to allow extending a volume after upgrade.
:param pools: the pool list
:return: pools - the updated pool list
"""
extra_pools = []
for pool in pools:
if 'none' in pool['pool_name'].lower():
extra_pools.append(pool)
for pool in extra_pools:
try:
slo = pool['pool_name'].split('+')[0]
srp = pool['pool_name'].split('+')[2]
array = pool['pool_name'].split('+')[3]
except IndexError:
slo = pool['pool_name'].split('+')[0]
srp = pool['pool_name'].split('+')[1]
array = pool['pool_name'].split('+')[2]
new_pool_name = ('%(slo)s+%(srp)s+%(array)s'
% {'slo': slo, 'srp': srp, 'array': array})
new_pool = deepcopy(pool)
new_pool['pool_name'] = new_pool_name
pools.append(new_pool)
return pools
def check_replication_matched(self, volume, extra_specs):
"""Check volume type and group type.
This will make sure they do not conflict with each other.
:param volume: volume to be checked
:param extra_specs: the extra specifications
:raises: InvalidInput
"""
# If volume is not a member of group, skip this check anyway.
if not volume.group:
return
vol_is_re = self.is_replication_enabled(extra_specs)
group_is_re = volume.group.is_replicated
if not (vol_is_re == group_is_re):
msg = _('Replication should be enabled or disabled for both '
'volume or group. Volume replication status: '
'%(vol_status)s, group replication status: '
'%(group_status)s') % {
'vol_status': vol_is_re, 'group_status': group_is_re}
raise exception.InvalidInput(reason=msg)
@staticmethod
def check_rep_status_enabled(group):
"""Check replication status for group.
Group status must be enabled before proceeding with certain
operations.
:param group: the group object
:raises: InvalidInput
"""
if group.is_replicated:
if group.replication_status != fields.ReplicationStatus.ENABLED:
msg = (_('Replication status should be %s for '
'replication-enabled group.')
% fields.ReplicationStatus.ENABLED)
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
else:
LOG.debug('Replication is not enabled on group %s, '
'skip status check.', group.id)
@staticmethod
def get_replication_prefix(rep_mode):
"""Get the replication prefix.
Replication prefix for storage group naming is based on whether it is
synchronous, asynchronous, or metro replication mode.
:param rep_mode: flag to indicate if replication is async
:return: prefix
"""
if rep_mode == REP_ASYNC:
prefix = "-RA"
elif rep_mode == REP_METRO:
prefix = "-RM"
else:
prefix = "-RE"
return prefix
@staticmethod
def get_async_rdf_managed_grp_name(rep_config):
"""Get the name of the group used for async replication management.
:param rep_config: the replication configuration
:return: group name
"""
async_grp_name = ("OS-%(rdf)s-%(mode)s-rdf-sg"
% {'rdf': rep_config['rdf_group_label'],
'mode': rep_config['mode']})
LOG.debug("The async/ metro rdf managed group name is %(name)s",
{'name': async_grp_name})
return async_grp_name
def is_metro_device(self, rep_config, extra_specs):
"""Determine if a volume is a Metro enabled device.
:param rep_config: the replication configuration
:param extra_specs: the extra specifications
:return: bool
"""
is_metro = (True if self.is_replication_enabled(extra_specs)
and rep_config is not None
and rep_config['mode'] == REP_METRO else False)
return is_metro
def does_vol_need_rdf_management_group(self, extra_specs):
"""Determine if a volume is a Metro or Async.
:param extra_specs: the extra specifications
:return: bool
"""
if (self.is_replication_enabled(extra_specs) and
extra_specs.get(REP_MODE, None) in
[REP_ASYNC, REP_METRO]):
return True
return False
def derive_default_sg_from_extra_specs(self, extra_specs, rep_mode=None):
"""Get the name of the default sg from the extra specs.
:param extra_specs: extra specs
:returns: default sg - string
"""
do_disable_compression = self.is_compression_disabled(
extra_specs)
rep_enabled = self.is_replication_enabled(extra_specs)
return self.get_default_storage_group_name(
extra_specs[SRP], extra_specs[SLO],
extra_specs[WORKLOAD],
is_compression_disabled=do_disable_compression,
is_re=rep_enabled, rep_mode=rep_mode)
@staticmethod
def merge_dicts(d1, *args):
"""Merge dictionaries
:param d1: dict 1
:param *args: one or more dicts
:returns: merged dict
"""
d2 = {}
for d in args:
d2 = d.copy()
d2.update(d1)
d1 = d2
return d2
@staticmethod
def get_temp_failover_grp_name(rep_config):
"""Get the temporary group name used for failover.
:param rep_config: the replication config
:return: temp_grp_name
"""
temp_grp_name = ("OS-%(rdf)s-temp-rdf-sg"
% {'rdf': rep_config['rdf_group_label']})
LOG.debug("The temp rdf managed group name is %(name)s",
{'name': temp_grp_name})
return temp_grp_name
def get_child_sg_name(self, host_name, extra_specs):
"""Get the child storage group name for a masking view.
:param host_name: the short host name
:param extra_specs: the extra specifications
:return: child sg name, compression flag, rep flag, short pg name
"""
do_disable_compression = False
pg_name = self.get_pg_short_name(extra_specs[PORTGROUPNAME])
rep_enabled = self.is_replication_enabled(extra_specs)
if extra_specs[SLO]:
slo_wl_combo = self.truncate_string(
extra_specs[SLO] + extra_specs[WORKLOAD], 10)
unique_name = self.truncate_string(extra_specs[SRP], 12)
child_sg_name = (
"OS-%(shortHostName)s-%(srpName)s-%(combo)s-%(pg)s"
% {'shortHostName': host_name,
'srpName': unique_name,
'combo': slo_wl_combo,
'pg': pg_name})
do_disable_compression = self.is_compression_disabled(
extra_specs)
if do_disable_compression:
child_sg_name = ("%(child_sg_name)s-CD"
% {'child_sg_name': child_sg_name})
else:
child_sg_name = (
"OS-%(shortHostName)s-No_SLO-%(pg)s"
% {'shortHostName': host_name, 'pg': pg_name})
if rep_enabled:
rep_mode = extra_specs.get(REP_MODE, None)
child_sg_name += self.get_replication_prefix(rep_mode)
return child_sg_name, do_disable_compression, rep_enabled, pg_name
@staticmethod
def change_multiattach(extra_specs, new_type_extra_specs):
"""Check if a change in multiattach is required for retype.
:param extra_specs: the source type extra specs
:param new_type_extra_specs: the target type extra specs
:return: bool
"""
is_src_multiattach = vol_utils.is_replicated_str(
extra_specs.get('multiattach'))
is_tgt_multiattach = vol_utils.is_replicated_str(
new_type_extra_specs.get('multiattach'))
return is_src_multiattach != is_tgt_multiattach
@staticmethod
def is_volume_manageable(source_vol):
"""Check if a volume with verbose description is valid for management.
:param source_vol: the verbose volume dict
:return: bool True/False
"""
vol_head = source_vol['volumeHeader']
# VMAX disk geometry uses cylinders, so volume sizes are matched to
# the nearest full cylinder size: 1GB = 547cyl = 1026MB
if vol_head['capMB'] < 1026 or not vol_head['capGB'].is_integer():
return False
if (vol_head['numSymDevMaskingViews'] > 0 or
vol_head['mapped'] is True or
source_vol['maskingInfo']['masked'] is True):
return False
if (vol_head['status'] != 'Ready' or
vol_head['serviceState'] != 'Normal' or
vol_head['emulationType'] != 'FBA' or
vol_head['configuration'] != 'TDEV' or
vol_head['system_resource'] is True or
vol_head['private'] is True or
vol_head['encapsulated'] is True or
vol_head['reservationInfo']['reserved'] is True):
return False
for key, value in source_vol['rdfInfo'].items():
if value is True:
return False
if source_vol['timeFinderInfo']['snapVXTgt'] is True:
return False
if vol_head['nameModifier'][0:3] == 'OS-':
return False
return True
@staticmethod
def is_snapshot_manageable(source_vol):
"""Check if a volume with snapshot description is valid for management.
:param source_vol: the verbose volume dict
:return: bool True/False
"""
vol_head = source_vol['volumeHeader']
if not source_vol['timeFinderInfo']['snapVXSrc']:
return False
# VMAX disk geometry uses cylinders, so volume sizes are matched to
# the nearest full cylinder size: 1GB = 547cyl = 1026MB
if (vol_head['capMB'] < 1026 or
not vol_head['capGB'].is_integer()):
return False
if (vol_head['emulationType'] != 'FBA' or
vol_head['configuration'] != 'TDEV' or
vol_head['private'] is True or
vol_head['system_resource'] is True):
return False
snap_gen_info = (source_vol['timeFinderInfo']['snapVXSession'][0][
'srcSnapshotGenInfo'][0]['snapshotHeader'])
if (snap_gen_info['snapshotName'][0:3] == 'OS-' or
snap_gen_info['snapshotName'][0:5] == 'temp-'):
return False
if (snap_gen_info['expired'] is True
or snap_gen_info['generation'] > 0):
return False
return True
|
/**
* API configuration file
* @author Tang Bo Hao
*/
var commonAPIs = exports.commonAPIs = {
account_info: 'get_account_info'
, users_info: 'get_users_info'
, friends_ids: 'get_friends_ids'
, appfriends_ids: 'get_appfriends_ids'
, appfriends_info:'get_appfriends_info'
, is_app_user: 'get_is_app_user'
};
// Basic Sina App
var WeiboAPI = exports.WeiboAPI = {
// Basic Info
apiurl: 'http://api.t.sina.com.cn'
, result_format: '.json'
// specific api
, public_timeline: '/statuses/public_timeline'
, friends_timeline: '/statuses/friends_timeline'
, comments_timeline: '/statuses/comments_timeline'
, user_timeline: '/statuses/user_timeline'
, mentions: '/statuses/mentions'
, followers: '/statuses/followers'
, friends: '/statuses/friends'
, favorites: '/favorites'
, favorites_create: '/favorites/create'
, favorites_destroy: '/favorites/destroy/{{id}}'
, counts: '/statuses/counts'
, status_show: '/statuses/show/{{id}}'
, update: '/statuses/update'
, upload: '/statuses/upload'
, repost: '/statuses/repost'
, repost_timeline: '/statuses/repost_timeline'
, comment: '/statuses/comment'
, reply: '/statuses/reply'
, comment_destroy: '/statuses/comment_destroy/{{id}}'
, comments: '/statuses/comments'
, destroy: '/statuses/destroy/{{id}}'
, destroy_msg: '/direct_messages/destroy/{{id}}'
, direct_messages: '/direct_messages'
, sent_direct_messages: '/direct_messages/sent'
, new_message: '/direct_messages/new'
, verify_credentials: '/account/verify_credentials'
, rate_limit_status: '/account/rate_limit_status'
, friendships_create: '/friendships/create'
, friendships_destroy: '/friendships/destroy'
, friendships_show: '/friendships/show'
, reset_count: '/statuses/reset_count'
, user_show: '/users/show/{{id}}'
// Authorize
, oauth_authorize: '/oauth/authorize'
, oauth_request_token: '/oauth/request_token'
, oauth_access_token: '/oauth/access_token'
};
// common api
WeiboAPI[commonAPIs.account_info] = WeiboAPI.verify_credentials;
// Sina WeiYouXi
var WeiyxAPI = exports.WeiyxAPI = {
// QueryKey NS
NAMESPACE: "wyx_"
// Basic Info
, apiurl: 'http://api.weibo.com/game/1/'
, result_format: '.json'
// specific Api
, user_show: 'user/show' // get user info
, user_show_batch: 'user/show_batch' // get user info batch
, user_friends: 'user/friends' // get friends infos (limit 20)
, user_friend_ids: 'user/friend_ids' // get all friends ids
, user_app_friends: 'user/app_friends' // get all friends(installed app)' info
, user_app_friend_ids: 'user/app_friend_ids' // get all friends(installed app)' id
, user_are_friends: 'user/are_friends' // check if they are friends
, friendships_show: 'friendships/show' // return two friends' relationship
, application_is_fan: 'application/is_fan' //check if the user is fans
, application_is_user: 'application/is_user' //check if the user installed app
, application_scored: 'application/scored' //check if the user scored app
, application_rate_limit_status: 'application/rate_limit_status' // get app api limit
, notice_send: 'Notice/send' // send single notification [closed now]
, invite_ignore_game_all: 'invite/ignore_game_all' // ignore all game invitation
, invite_ignore_game: 'invite/ignore_game' // ignore some user's game invitation
, pay_get_token: 'pay/get_token' // get payment token
, pay_order_status: 'pay/order_status' // check payment order status
, achievements_set: 'achievements/set' // set achievement
, achievements_get: 'achievements/get' // get achievement
, leaderboards_set: 'leaderboards/set' // set leader board
, leaderboards_get_friends: 'leaderboards/get_friends' // get friends leader boards
, leaderboards_increment: 'leaderboards/increment' // like 'set', but using increment
, leaderboards_get_total: 'leaderboards/get_total' // get total leader board
, engage_get_user_status: 'engage/get_user_status' // get user engage/s info
// Authorize Oauth2
//, oauth_site: 'http://game.weibo.com/oauth/'
//, oauth_authorize: 'auth/'
//, oauth_token: 'auth/token'
};
// common api
WeiyxAPI[commonAPIs.account_info] = WeiyxAPI.user_show;
WeiyxAPI[commonAPIs.users_info] = WeiyxAPI.user_show_batch;
WeiyxAPI[commonAPIs.friends_ids] = WeiyxAPI.user_friend_ids;
WeiyxAPI[commonAPIs.appfriends_ids] = WeiyxAPI.user_app_friend_ids;
WeiyxAPI[commonAPIs.appfriends_info] = WeiyxAPI.user_app_friends;
WeiyxAPI[commonAPIs.is_app_user] = WeiyxAPI.application_is_user;
// RenRen
var RenRenAPI = exports.RenRenAPI = {
// Basic Info
apiurl: 'http://api.renren.com/restserver.do'
, result_format: 'JSON'
// specific Api
, users_getInfo: 'users.getInfo' // get user info
, users_isAppUser: 'users.isAppUser' //check is app user
, friends_get: 'friends.get' // get friends ids
, friends_getAppUsers: 'friends.getAppUsers' // get app friends ids
, friends_getAppFriends:'friends.getAppFriends' // get app friends info
// Authorize Oauth2
, oauth_site: 'https://graph.renren.com/'
, oauth_authorize: 'oauth/authorize'
, oauth_token: 'oauth/token'
};
// common api
RenRenAPI[commonAPIs.account_info] = RenRenAPI.users_getInfo;
RenRenAPI[commonAPIs.users_info] = RenRenAPI.users_getInfo;
RenRenAPI[commonAPIs.friends_ids] = RenRenAPI.friends_get;
RenRenAPI[commonAPIs.appfriends_ids] = RenRenAPI.friends_getAppUsers;
RenRenAPI[commonAPIs.appfriends_info] = RenRenAPI.friends_getAppFriends;
RenRenAPI[commonAPIs.is_app_user] = RenRenAPI.users_isAppUser;
var QQAPI = exports.QQAPI = {
user_get_info: '/v3/user/get_info'
, user_get_multi_info: '/v3/user/get_multi_info'
, user_is_setup: '/v3/user/is_setup'
, user_is_vip: '/v3/user/is_vip'
, user_is_login: '/v3/user/is_login'
, relation_is_friend: '/v3/relation/is_friend'
, relation_get_app_friends: '/v3/relation/get_app_friends'
, pay_buy_goods: '/v3/pay/buy_goods'
, csec_capture_get: '/v3/csec/capture_get'
, csec_capture_verify: '/v3/csec/capture_verify'
, csec_word_filter:'/v3/csec/word_filter'
, msgqueue_send_msg: '/msgqueue/send_msg'
, msgqueue_recv_msg: '/msgqueue/recv_msg'
, msgqueue_delete_msg: '/msgqueue/delete_msg'
}
QQAPI[commonAPIs.account_info] = QQAPI.user_get_info;
QQAPI[commonAPIs.users_info] = QQAPI.user_get_multi_info;
QQAPI[commonAPIs.friends_ids] = QQAPI.relation_get_app_friends;
QQAPI[commonAPIs.appfriends_ids] = QQAPI.relation_get_app_friends;
QQAPI[commonAPIs.appfriends_info] = QQAPI.relation_get_app_friends;
QQAPI[commonAPIs.is_app_user] = QQAPI.user_is_setup;
|
/*
* @Author: your name
* @Date: 2020-04-09 15:25:17
* @LastEditTime: 2020-04-27 09:11:59
* @LastEditors: Please set LastEditors
* @Description: In User Settings Edit
* @FilePath: \gswl-web\src\utils\validate.js
*/
/**
* 邮箱
* @param {*} s
*/
export function isEmail(s) {
return /^([a-zA-Z0-9._-])+@([a-zA-Z0-9_-])+((.[a-zA-Z0-9_-]{2,3}){1,2})$/.test(s)
}
/**
* 手机号码
* @param {*} s
*/
export function isMobile(s) {
return /^1[0-9]{10}$/.test(s)
}
/**
* 电话号码
* @param {*} s
*/
export function isPhone(s) {
return /^([0-9]{3,4}-)?[0-9]{7,8}$/.test(s)
}
/**
* 不大于5位大写字母的正则
* @param {*} s
*/
export function isZM(s) {
return /^[A-Z]{0,5}$/.test(s)
}
/**
* 手机号或者电话号码
* @param {*} s
*/
export function isPhoneOrMobile(s) {
return isMobile(s) || isPhone(s)
}
/**
* 手机号或者电话号码---form表单验证
* @param {*} rule
* @param {*} value
* @param {*} callback
*/
export function isPhoneOrMobileByForm(rule, value, callback) {
if (!value || isPhoneOrMobile(value)) {
callback()
} else {
callback('请输入正确的手机号或者电话号码!')
}
}
/**
* URL地址
* @param {*} s
*/
export function isURL(s) {
return /^http[s]?:\/\/.*/.test(s)
}
/**
* 车牌号
* @param {*} s
*/
export const isVehicleNumber = s => {
return /^([京津晋冀蒙辽吉黑沪苏浙皖闽赣鲁豫鄂湘粤桂琼渝川贵云藏陕甘青宁新][ABCDEFGHJKLMNPQRSTUVWXY][1-9DF][1-9ABCDEFGHJKLMNPQRSTUVWXYZ]\d{3}[1-9DF]|[京津晋冀蒙辽吉黑沪苏浙皖闽赣鲁豫鄂湘粤桂琼渝川贵云藏陕甘青宁新][ABCDEFGHJKLMNPQRSTUVWXY][\dABCDEFGHJKLNMxPQRSTUVWXYZ]{5})$/.test(
s
)
}
/**
* 车牌号验证
* @param {*} rule
* @param {*} value
* @param {*} callback
*/
export function isVehicleNumberByForm(rule, value, callback) {
if (!value || isVehicleNumber(value)) {
callback()
} else {
callback('请输入正确的车牌号!')
}
}
/**
* 身份证
* @param {*} s
*/
export function isSFZ(s) {
// 350128199710140036
//
return /^\d{6}(18|19|20)?\d{2}(0[1-9]|1[012])(0[1-9]|[12]\d|3[01])\d{3}(\d|[xX])$/.test(s)
}
/**
* 不大于五位的大写字母
* @param {*} rule
* @param {*} value
* @param {*} callback
*/
export function isZMOK(rule, value, callback) {
if (!value || isZM(value)) {
callback()
} else {
callback('请输入不大于五位的大写字母!')
}
}
export function isSFZByForm(rule, value, callback) {
if (!value || isSFZ(value)) {
callback()
} else {
callback('请输入正确的身份证号!')
}
}
|
const Express = require('express');
const http = require('http');
const harakiri = require('../../');
const app = new Express();
const server = new http.Server(app);
const port = 3000;
app.get('/', (req, res) => res.send('Hello!!'));
// Open http://localhost:3000/loop in the browser to executing blocking operation
app.get('/loop', (req, res) => {
while(true);
});
server.listen(port, (err) => {
if(err) {
console.log(err);
} else {
// Better to start observation after server started
// Just to prevent some locking operations you might have during booting the project
harakiri();
console.log(`Listing on port ${port}`);
}
});
|
#!/usr/bin/env python
# Copyright 2012 Google Inc. All Rights Reserved.
"""Client actions related to plist files."""
from __future__ import absolute_import
from __future__ import unicode_literals
import io
import types
from binplist import binplist
from grr_response_client import actions
from grr_response_client import vfs
from grr_response_core.lib import plist as plist_lib
from grr_response_core.lib.rdfvalues import plist as rdf_plist
from grr_response_core.lib.rdfvalues import protodict as rdf_protodict
class PlistQuery(actions.ActionPlugin):
"""Parses the plist request specified and returns the results.
PlistQuery allows you to obtain data from a plist, optionally only if it
matches the given filter.
Querying for a plist is done in two steps. First, its contents are
retrieved.
For plists where the top level element is a dict, you can use the key
parameter of the PlistRequest to specify a path into the dict to retrieve.
When specifying a key, the requested key values are places under a dictionary
key called "key".
Whether you've specified a key or not, the query parameter allows you to
filter based on the
"""
in_rdfvalue = rdf_plist.PlistRequest
out_rdfvalues = [rdf_protodict.RDFValueArray]
MAX_PLIST_SIZE = 1024 * 1024 * 100 # 100 MB
def Run(self, args):
# TODO(hanuszczak): Why are these an instance variables?
self.context = args.context
self.filter_query = args.query
with vfs.VFSOpen(args.pathspec, progress_callback=self.Progress) as fd:
data = fd.Read(self.MAX_PLIST_SIZE)
plist = binplist.readPlist(io.BytesIO(data))
# Create the query parser
parser = plist_lib.PlistFilterParser(unicode(self.filter_query)).Parse()
filter_imp = plist_lib.PlistFilterImplementation
matcher = parser.Compile(filter_imp)
if self.context:
# Obtain the values for the context using the value expander
value_expander = filter_imp.FILTERS["ValueExpander"]
iterator = value_expander().Expand(plist, self.context)
else:
# If we didn't get a context, the context is the whole plist
iterator = [plist]
reply = rdf_protodict.RDFValueArray()
for item in iterator:
# As we're setting the context manually, we need to account for types
if isinstance(item, types.ListType):
for sub_item in item:
partial_plist = plist_lib.PlistValueToPlainValue(sub_item)
if matcher.Matches(partial_plist):
reply.Append(sub_item)
else:
partial_plist = plist_lib.PlistValueToPlainValue(item)
if matcher.Matches(partial_plist):
reply.Append(partial_plist)
self.SendReply(reply)
|
import aiohttp
import json
import logging
log = logging.getLogger()
CARBONITEX_API_BOTDATA = 'https://www.carbonitex.net/discord/data/botdata.php'
DISCORD_BOTS_API = 'https://bots.discord.pw/api'
class Carbonitex:
"""Cog for updating carbonitex.net and bots.discord.pw bot information."""
def __init__(self, bot):
self.bot = bot
self.session = aiohttp.ClientSession()
def __unload(self):
# pray it closes
self.bot.loop.create_task(self.session.close())
async def update(self):
carbon_payload = {
'key': self.bot.carbon_key,
'servercount': len(self.bot.servers)
}
async with self.session.post(CARBONITEX_API_BOTDATA, data=carbon_payload) as resp:
log.info('Carbon statistics returned {0.status} for {1}'.format(resp, carbon_payload))
payload = json.dumps({
'server_count': len(self.bot.servers)
})
headers = {
'authorization': self.bot.bots_key,
'content-type': 'application/json'
}
url = '{0}/bots/{1.user.id}/stats'.format(DISCORD_BOTS_API, self.bot)
async with self.session.post(url, data=payload, headers=headers) as resp:
log.info('DBots statistics returned {0.status} for {1}'.format(resp, payload))
async def on_server_join(self, server):
await self.update()
async def on_server_remove(self, server):
await self.update()
async def on_ready(self):
await self.update()
def setup(bot):
bot.add_cog(Carbonitex(bot))
|
var express = require('express');
var router = express.Router();
var config = require('../libs/config');
//security
var crypto = require('crypto');
var jwt = require('jsonwebtoken');
//logging
var intel = require('intel');
var log = require('../libs/log')('console', intel.DEBUG);
// database
var ConnectionFabric= require('../libs/connection-fabric');
var User = ConnectionFabric.defaultConnection.import('../models/user');
var sendResponse = require('../libs/response-callback');
router.post('/signin', function(req, res) {
// find user
User.findOne({
where: {
email: req.body.email
}
}).then(function(user) {
if(!user) {
log.info('Authentication failed. User not found');
sendResponse(res, 400, 'User not found');
return;
}
// check if password matches
var hash = crypto
.createHash("sha1")
.update(req.body.password)
.digest('hex');
if (user.password != hash) {
//if (user.password != req.body.password) {
log.info('Authentication failed. Wrong password');
sendResponse(res, 400, 'Wrong password');
return;
}
// if user is found and password is right
var tokenString = jwt.sign({
idUser: user.idUser
}, config.get('secret'), {
expiresIn: 60*60*24 // expires in 24 hours
});
//get user profile
User.findById(user.idUser).then(function(user) {
if(!user) {
sendResponse(res, 400, 'User with passed id not found');
} else {
res.code = 200;
res.json({
token: tokenString,
user : {
idUser : user.idUser,
email : user.email,
username : user.username,
idGroup : user.group
}
});
}
}, function(err) {
log.error('Internal error(%d): %s', err.code, err.message);
sendResponse(res, 500, 'Server error')
});
}, function(err) {
log.error('Internal error(%d): %s', err.code, err.message);
sendResponse(res, 500, 'Server error')
});
});
router.post('/refresh', function(req, res) {
var token = req.headers['x-access-token'];
jwt.verify(token, config.get('secret'), function(err, decoded) {
if (err) {
log.error('Internal error(%d): %s', err.code, err.message);
sendResponse(res, 400, 'Token prolongation failed');
return;
}
log.info('Refreshing token for user with id %s', decoded.idUser);
var tokenString = jwt.sign({
idUser: decoded.idUser
}, config.get('secret'), {
expiresIn: 60*60 // expires in 60 minutes
});
res.code = 200;
res.json({
token: tokenString
});
});
});
router.post('/signup', function(req, res) {
User.findOne({ where:
{
email: req.body.email
}
}).then(function(user) {
//check if user already exists
if(user) {
log.info('User with email %s already exists', req.body.email);
sendResponse(res, 400, 'User with this email already exists');
return;
}
var hash = crypto
.createHash("sha1")
.update(req.body.password)
.digest('hex');
//it's okay, save user
User.create(
{
email: req.body.email,
password: hash,
username: req.body.username
}).then(function (user) {
log.info('User with email %s successfully created!', user.email);
res.code = 200;
res.json({
idUser : user.idUser
});
}, function(err) {
log.error('Internal error when creating user(%d): %s', err.code, err.message);
sendResponse(res, 500, 'Server error')
});
}, function(err) {
log.error('Internal error(%d): %s', err.code, err.message);
sendResponse(res, 500, 'Server error')
});
});
module.exports = router; |
const { Client, CommandInteraction } = require("discord.js");
const config = require('../../config');//pasador code
const qdb = require('quick.db');
const ydb = new qdb.table("yetkili");//pasador code
const idb = new qdb.table("isimler");
//pasador code
module.exports = {
name: "kadın",
description: "Kullanıcıları Kadın Olarak Kaydetmenizi Sağlar",
options: [
{
type: "USER",
name: "user",
description: "Kullanıcı Belirtin",
required: true
},
{
type: "STRING",
name: "isim",
description: "Kayıt Etmek İstediğiniz Adı Belirtin",
required: false
},//pasador code
{
type: "NUMBER",
name: "yaş",
description: "Kayıt Etmek İstediğiniz Yaşı Belirtin",
required: false
}
],
//pasador code
/**
*
* @param {Client} client
* @param {CommandInteraction} interaction
* @param {String[]} args
*/
async execute (client, interaction, args) {
// definition of user
let user = args[0];
let member = interaction.guild.members.cache.get(user);
// perm & some useful things
if(!config.RegisterHammer.some(role => interaction.member.roles.cache.get(role)) && !interaction.member.permissions.has("ADMINISTRATOR")) return interaction.followUp({ content: "Bu komudu kullanmak için <@&" + config.RegisterHammer.map(role => role).join(", ") + "> rol(ler)ine veya \`ADMINISTRATOR\` yetkisine sahip olmalısın." })
if(member.id === interaction.member.id) return interaction.followUp({ content: "Kendinizi kayıt edemezsiniz." });
if(member.roles.highest.position >= interaction.member.roles.highest.position) return interaction.followUp({ content: "Bu kullanıcıyı kayıt edemezsin." });
// roles add & check
if(config.womanRoles.some(role => member.roles.cache.get(role))) return interaction.followUp({ content: "Bu kullanıcı zaten kayıtlı" })
member.roles.cache.has(config.boosterRole) ? member.roles.set([config.womanRoles[0], config.womanRoles[1], config.unregRole]) : member.roles.set([config.womanRoles[0], config.womanRoles[1]]);
//pasador code
// set & fix nickname
const fix = member.user.username.includes(config.serverTag) ? config.serverTag : config.unTag;
const fix_name = args[1].charAt(0).replace("i", "İ").toLocaleUpperCase() + args[1].slice(1).toLocaleLowerCase();
const name = `${fix} ${fix_name} | ${args[2]}`
member.setNickname(name)
// message
interaction.followUp({ content: `${member} başarıyla <@&${config.womanRoles[0]}> olarak kayıt edildi` })
// lets use dbs
//pasador code
ydb.add(`yetkili.${interaction.member.id}.toplam`, +1);
ydb.add(`yetkili.${interaction.member.id}.kadın`, +1);
idb.push(`kullanici.${member.id}.isimler`, {
Name: name,
Process: `<@&${config.womanRoles[0]}>`
});
idb.set(`kullanici.${member.id}.sonisim`, {
Name: name
})//pasador code
},
};
//pasador code
//pasador code
|
/*
Copyright (c) 2003-2014, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
*/
CKEDITOR.plugins.setLang( 'stylescombo_i', 'de', {
label: 'Stil',
panelTitle: 'Formatierungenstil',
panelTitle1: 'Block Stilart',
panelTitle2: 'Inline Stilart',
panelTitle3: 'Objekt Stilart'
} );
|
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const os = __importStar(require("os"));
const util = __importStar(require("util"));
const fs = __importStar(require("fs"));
const toolCache = __importStar(require("@actions/tool-cache"));
const core = __importStar(require("@actions/core"));
const table = __importStar(require("table"));
const semver = __importStar(require("semver"));
const toolrunner_1 = require("@actions/exec/lib/toolrunner");
const fileHelper = __importStar(require("./fileHelper"));
const utils = __importStar(require("./utils"));
const inputHelper = __importStar(require("./inputHelper"));
exports.DOCKLE_EXIT_CODE = 5;
exports.LEVEL_INFO = "INFO";
exports.dockleToolName = "dockle";
const stableDockleVersion = "0.2.4";
const dockleLatestReleaseUrl = "https://api.github.com/repos/goodwithtech/dockle/releases/latest";
const KEY_DETAILS = "details";
const KEY_CODE = "code";
const KEY_TITLE = "title";
const KEY_LEVEL = "level";
const KEY_ALERTS = "alerts";
const LEVEL_FATAL = "FATAL";
const LEVEL_WARN = "WARN";
const LEVEL_IGNORE = "IGNORE";
const LEVEL_SKIP = "SKIP";
const TITLE_COUNT = "COUNT";
const TITLE_LEVEL = "LEVEL";
const TITLE_VULNERABILITY_ID = "VULNERABILITY ID";
const TITLE_TITLE = "TITLE";
const TITLE_SEVERITY = "SEVERITY";
const TITLE_DESCRIPTION = "DESCRIPTION";
function runDockle() {
return __awaiter(this, void 0, void 0, function* () {
const docklePath = yield getDockle();
const imageName = inputHelper.imageName;
const dockleOptions = yield getDockleExecOptions();
console.log("Scanning for CIS and best practice violations...");
let dockleArgs = ['-f', 'json', '-o', getOutputPath(), '--exit-level', exports.LEVEL_INFO, '--exit-code', exports.DOCKLE_EXIT_CODE.toString(), imageName];
const dockleToolRunner = new toolrunner_1.ToolRunner(docklePath, dockleArgs, dockleOptions);
const dockleStatus = yield dockleToolRunner.exec();
utils.addLogsToDebug(getDockleLogPath());
return dockleStatus;
});
}
exports.runDockle = runDockle;
function getDockle() {
return __awaiter(this, void 0, void 0, function* () {
const latestDockleVersion = yield getLatestDockleVersion();
let cachedToolPath = toolCache.find(exports.dockleToolName, latestDockleVersion);
if (!cachedToolPath) {
let dockleDownloadPath;
const dockleDownloadUrl = getDockleDownloadUrl(latestDockleVersion);
const dockleDownloadDir = `${process.env['GITHUB_WORKSPACE']}/_temp/tools/dockle`;
core.debug(util.format("Could not find dockle in cache, downloading from %s", dockleDownloadUrl));
try {
dockleDownloadPath = yield toolCache.downloadTool(dockleDownloadUrl, dockleDownloadDir);
}
catch (error) {
throw new Error(util.format("Failed to download dockle from %s", dockleDownloadUrl));
}
const untarredDocklePath = yield toolCache.extractTar(dockleDownloadPath);
cachedToolPath = yield toolCache.cacheDir(untarredDocklePath, exports.dockleToolName, latestDockleVersion);
}
const dockleToolPath = cachedToolPath + "/" + exports.dockleToolName;
fs.chmodSync(dockleToolPath, "777");
core.debug(util.format("Dockle executable found at path ", dockleToolPath));
return dockleToolPath;
});
}
exports.getDockle = getDockle;
function getOutputPath() {
const dockleOutputPath = `${fileHelper.getContainerScanDirectory()}/dockleoutput.json`;
return dockleOutputPath;
}
exports.getOutputPath = getOutputPath;
function getDockleLogPath() {
const dockleLogPath = `${fileHelper.getContainerScanDirectory()}/docklelog`;
return dockleLogPath;
}
exports.getDockleLogPath = getDockleLogPath;
function getSummary(dockleStatus) {
let summary = '';
switch (dockleStatus) {
case 0:
summary = 'No CIS benchmark violations were detected in the container image.';
break;
case exports.DOCKLE_EXIT_CODE:
summary = getCisSummary();
break;
default:
summary = 'An error occured while scanning the container image for CIS benchmark violations.';
break;
}
return `- ${summary}`;
}
exports.getSummary = getSummary;
function getText(dockleStatus) {
let clusteredViolations = '';
const cisIdsByLevel = getCisIdsByLevel(dockleStatus);
for (let level in cisIdsByLevel) {
if (cisIdsByLevel[level].length > 0) {
clusteredViolations = `${clusteredViolations}\n- **${level}**:\n${cisIdsByLevel[level].join('\n')}`;
}
}
return `**Best Practices Violations** -${clusteredViolations ? clusteredViolations : '\nNone found.'}`;
}
exports.getText = getText;
function getFilteredOutput() {
const dockleOutputJson = getDockleOutput();
let filteredVulnerabilities = [];
dockleOutputJson[KEY_DETAILS].forEach(cis => {
if (cis[KEY_LEVEL] != LEVEL_IGNORE) {
let vulnObject = {
[KEY_CODE]: cis[KEY_CODE],
[KEY_TITLE]: cis[KEY_TITLE],
[KEY_LEVEL]: cis[KEY_LEVEL],
[KEY_ALERTS]: cis[KEY_ALERTS][0]
};
filteredVulnerabilities.push(vulnObject);
}
});
return filteredVulnerabilities;
}
exports.getFilteredOutput = getFilteredOutput;
function getDockleEnvVariables() {
return __awaiter(this, void 0, void 0, function* () {
let dockleEnv = {};
for (let key in process.env) {
dockleEnv[key] = process.env[key] || "";
}
const username = inputHelper.username;
const password = inputHelper.password;
if (username && password) {
dockleEnv["DOCKLE_USERNAME"] = username;
dockleEnv["DOCKLE_PASSWORD"] = password;
}
return dockleEnv;
});
}
function getLevelsToInclude() {
return [LEVEL_FATAL, LEVEL_WARN, exports.LEVEL_INFO];
}
function getCisIdsByLevel(dockleStatus) {
const levels = getLevelsToInclude();
let cisIdsByLevel = {};
if (dockleStatus === exports.DOCKLE_EXIT_CODE) {
const dockleOutputJson = getDockleOutput();
const dockleDetails = dockleOutputJson['details'];
for (let level of levels) {
cisIdsByLevel[level] = dockleDetails
.filter(dd => dd['level'].toUpperCase() === level)
.map(dd => dd['code']);
}
}
return cisIdsByLevel;
}
function getDockleOutput() {
const path = getOutputPath();
return fileHelper.getFileJson(path);
}
function getCisSummary() {
const dockleOutputJson = getDockleOutput();
let cisSummary = 'Best practices test summary -';
const dockleSummary = dockleOutputJson['summary'];
const includedLevels = getLevelsToInclude();
if (dockleSummary) {
for (let level in dockleSummary) {
if (includedLevels.includes(level.toUpperCase())) {
const levelCount = dockleSummary[level];
const isBold = levelCount > 0;
cisSummary = isBold
? `${cisSummary}\n**${level.toUpperCase()}**: **${dockleSummary[level]}**`
: `${cisSummary}\n${level.toUpperCase()}: ${dockleSummary[level]}`;
}
}
}
return cisSummary;
}
function getLatestDockleVersion() {
return __awaiter(this, void 0, void 0, function* () {
return toolCache.downloadTool(dockleLatestReleaseUrl).then((downloadPath) => {
const response = JSON.parse(fs.readFileSync(downloadPath, 'utf8').toString().trim());
if (!response.tag_name) {
return stableDockleVersion;
}
return semver.clean(response.tag_name);
}, (error) => {
core.warning(util.format("Failed to read latest dockle verison from %s. Using default stable version %s", dockleLatestReleaseUrl, stableDockleVersion));
return stableDockleVersion;
});
});
}
function getDockleDownloadUrl(dockleVersion) {
const curOS = os.type();
switch (curOS) {
case "Linux":
return util.format("https://github.com/goodwithtech/dockle/releases/download/v%s/dockle_%s_Linux-64bit.tar.gz", dockleVersion, dockleVersion);
case "Darwin":
return util.format("https://github.com/goodwithtech/dockle/releases/download/v%s/dockle_%s_macOS-64bit.tar.gz", dockleVersion, dockleVersion);
default:
throw new Error(util.format("Container scanning is not supported on %s currently", curOS));
}
}
function getDockleExecOptions() {
return __awaiter(this, void 0, void 0, function* () {
const dockleEnv = yield getDockleEnvVariables();
return {
env: dockleEnv,
ignoreReturnCode: true,
outStream: fs.createWriteStream(getDockleLogPath())
};
});
}
function printFormattedOutput() {
const dockleOutputJson = getDockleOutput();
let rows = [];
let titles = [TITLE_VULNERABILITY_ID, TITLE_TITLE, TITLE_SEVERITY, TITLE_DESCRIPTION];
rows.push(titles);
dockleOutputJson[KEY_DETAILS].forEach(cis => {
if (cis[KEY_LEVEL] != LEVEL_IGNORE) {
let row = [];
row.push(cis[KEY_CODE]);
row.push(cis[KEY_TITLE]);
row.push(cis[KEY_LEVEL]);
row.push(cis[KEY_ALERTS][0]);
rows.push(row);
}
});
let widths = [25, 25, 15, 55];
console.log(table.table(rows, utils.getConfigForTable(widths)));
}
exports.printFormattedOutput = printFormattedOutput;
|
"use strict";
function __export(m) {
for (var p in m) if (!exports.hasOwnProperty(p)) exports[p] = m[p];
}
Object.defineProperty(exports, "__esModule", { value: true });
__export(require("../core"));
__export(require("./client/client"));
__export(require("./lib/simple_command"));
// var unhandled_rejection_1 = require("./lib/unhandled_rejection");
// exports.initUnhandledRejection = unhandled_rejection_1.init;
var rejectify_1 = require("./lib/rejectify");
exports.rejectifyValue = rejectify_1.rejectifyValue;
exports.rejectifyErrorCode = rejectify_1.rejectifyErrorCode;
__export(require("./host/chain_host"));
const chain_host_1 = require("./host/chain_host");
const core_1 = require("../core");
let host = new chain_host_1.ChainHost();
exports.host = host;
host.registerNet('tcp', (commandOptions) => {
let _host = commandOptions.get('host');
if (!_host) {
console.error('invalid tcp host');
return;
}
let port = commandOptions.get('port');
if (!port) {
console.error('invalid tcp port');
return;
}
let peers = commandOptions.get('peers');
if (!peers) {
peers = [];
}
else {
peers = peers.split(';');
}
let nodeType = core_1.StaticOutNode(core_1.TcpNode);
return new nodeType({ host: _host, port }, peers);
});
host.registerNet('bdt', (commandOptions) => {
let _host = commandOptions.get('host');
if (!_host) {
console.error('invalid bdt host');
return;
}
let port = commandOptions.get('port');
if (!port) {
console.error('no bdt port');
return;
}
port = port.split('|');
let udpport = 0;
let tcpport = parseInt(port[0]);
if (port.length === 1) {
udpport = tcpport + 10;
}
else {
udpport = parseInt(port[1]);
}
if (isNaN(tcpport) || isNaN(udpport)) {
console.error('invalid bdt port');
return;
}
let peerid = commandOptions.get('peerid');
if (!peerid) {
peerid = `${host}:${port}`;
}
let snPeers = commandOptions.get('sn');
if (!snPeers) {
console.error('no sn');
return;
}
let snconfig = snPeers.split('@');
if (snconfig.length !== 4) {
console.error('invalid sn: <SN_PEERID>@<SN_IP>@<SN_TCP_PORT>@<SN_UDP_PORT>');
}
const snPeer = {
peerid: `${snconfig[0]}`,
eplist: [
`4@${snconfig[1]}@${snconfig[2]}@t`,
`4@${snconfig[1]}@${snconfig[3]}@u`
]
};
let bdt_logger = {
level: commandOptions.get('bdt_log_level') || 'info',
// 设置log目录
file_dir: commandOptions.get('dataDir') + '/log',
};
return new core_1.BdtNode({ host: _host, tcpport, udpport, peerid, snPeer, bdtLoggerOptions: bdt_logger });
});
|
'use strict';
/* jshint ignore:start */
/**
* This code was generated by
* \ / _ _ _| _ _
* | (_)\/(_)(_|\/| |(/_ v1.0.0
* / /
*/
/* jshint ignore:end */
var Holodeck = require('../../../holodeck'); /* jshint ignore:line */
var Request = require(
'../../../../../lib/http/request'); /* jshint ignore:line */
var Response = require(
'../../../../../lib/http/response'); /* jshint ignore:line */
var RestException = require(
'../../../../../lib/base/RestException'); /* jshint ignore:line */
var Twilio = require('../../../../../lib'); /* jshint ignore:line */
var client;
var holodeck;
describe('PlayerStreamer', function() {
beforeEach(function() {
holodeck = new Holodeck();
client = new Twilio('ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX', 'AUTHTOKEN', {
httpClient: holodeck
});
});
it('should generate valid fetch request',
function(done) {
holodeck.mock(new Response(500, {}));
var promise = client.media.v1.playerStreamer('VJXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX').fetch();
promise.then(function() {
throw new Error('failed');
}, function(error) {
expect(error.constructor).toBe(RestException.prototype.constructor);
done();
}).done();
var sid = 'VJXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX';
var url = `https://media.twilio.com/v1/PlayerStreamers/${sid}`;
holodeck.assertHasRequest(new Request({
method: 'GET',
url: url
}));
}
);
it('should generate valid fetch response',
function(done) {
var body = {
'account_sid': 'ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
'date_created': '2015-07-30T20:00:00Z',
'date_updated': '2015-07-30T20:00:00Z',
'status': 'created',
'video': true,
'sid': 'VJcafebabecafebabecafebabecafebabe',
'status_callback': 'http://www.example.com',
'status_callback_method': 'POST',
'ended_reason': null,
'url': 'https://media.twilio.com/v1/PlayerStreamers/VJcafebabecafebabecafebabecafebabe',
'max_duration': 300,
'links': {
'timed_metadata': 'https://media.twilio.com/v1/PlayerStreamers/VJcafebabecafebabecafebabecafebabe/TimedMetadata',
'playback_grant': 'https://media.twilio.com/v1/PlayerStreamers/VJcafebabecafebabecafebabecafebabe/PlaybackGrant'
}
};
holodeck.mock(new Response(200, body));
var promise = client.media.v1.playerStreamer('VJXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX').fetch();
promise.then(function(response) {
expect(response).toBeDefined();
done();
}, function() {
throw new Error('failed');
}).done();
}
);
it('should generate valid create request',
function(done) {
holodeck.mock(new Response(500, {}));
var promise = client.media.v1.playerStreamer.create();
promise.then(function() {
throw new Error('failed');
}, function(error) {
expect(error.constructor).toBe(RestException.prototype.constructor);
done();
}).done();
var url = 'https://media.twilio.com/v1/PlayerStreamers';
holodeck.assertHasRequest(new Request({
method: 'POST',
url: url
}));
}
);
it('should generate valid create response',
function(done) {
var body = {
'account_sid': 'ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
'date_created': '2015-07-30T20:00:00Z',
'date_updated': '2015-07-30T20:00:00Z',
'status': 'created',
'video': true,
'sid': 'VJcafebabecafebabecafebabecafebabe',
'status_callback': 'http://www.example.com',
'status_callback_method': 'POST',
'ended_reason': null,
'url': 'https://media.twilio.com/v1/PlayerStreamers/VJcafebabecafebabecafebabecafebabe',
'max_duration': 300,
'links': {
'timed_metadata': 'https://media.twilio.com/v1/PlayerStreamers/VJcafebabecafebabecafebabecafebabe/TimedMetadata',
'playback_grant': 'https://media.twilio.com/v1/PlayerStreamers/VJcafebabecafebabecafebabecafebabe/PlaybackGrant'
}
};
holodeck.mock(new Response(201, body));
var promise = client.media.v1.playerStreamer.create();
promise.then(function(response) {
expect(response).toBeDefined();
done();
}, function() {
throw new Error('failed');
}).done();
}
);
it('should generate valid update request',
function(done) {
holodeck.mock(new Response(500, {}));
var opts = {'status': 'ended'};
var promise = client.media.v1.playerStreamer('VJXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX').update(opts);
promise.then(function() {
throw new Error('failed');
}, function(error) {
expect(error.constructor).toBe(RestException.prototype.constructor);
done();
}).done();
var sid = 'VJXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX';
var url = `https://media.twilio.com/v1/PlayerStreamers/${sid}`;
var values = {'Status': 'ended', };
holodeck.assertHasRequest(new Request({
method: 'POST',
url: url,
data: values
}));
}
);
it('should generate valid update response',
function(done) {
var body = {
'account_sid': 'ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
'date_created': '2015-07-30T20:00:00Z',
'date_updated': '2015-07-30T20:01:00Z',
'status': 'ended',
'video': true,
'sid': 'VJcafebabecafebabecafebabecafebabe',
'status_callback': 'http://www.example.com',
'status_callback_method': 'POST',
'ended_reason': 'ended-via-api',
'url': 'https://media.twilio.com/v1/PlayerStreamers/VJcafebabecafebabecafebabecafebabe',
'max_duration': 300,
'links': {
'timed_metadata': 'https://media.twilio.com/v1/PlayerStreamers/VJcafebabecafebabecafebabecafebabe/TimedMetadata',
'playback_grant': 'https://media.twilio.com/v1/PlayerStreamers/VJcafebabecafebabecafebabecafebabe/PlaybackGrant'
}
};
holodeck.mock(new Response(200, body));
var opts = {'status': 'ended'};
var promise = client.media.v1.playerStreamer('VJXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX').update(opts);
promise.then(function(response) {
expect(response).toBeDefined();
done();
}, function() {
throw new Error('failed');
}).done();
}
);
it('should treat the first each arg as a callback',
function(done) {
var body = {
'meta': {
'page': 0,
'page_size': 10,
'first_page_url': 'https://media.twilio.com/v1/PlayerStreamers?Status=ended&Order=desc&PageSize=10&Page=0',
'previous_page_url': null,
'url': 'https://media.twilio.com/v1/PlayerStreamers?Status=ended&Order=desc&PageSize=10&Page=0',
'next_page_url': null,
'key': 'player_streamers'
},
'player_streamers': [
{
'account_sid': 'ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
'date_created': '2015-07-30T20:00:00Z',
'date_updated': '2015-07-30T20:01:00Z',
'status': 'ended',
'video': true,
'sid': 'VJcafebabecafebabecafebabecafebabe',
'status_callback': 'http://www.example.com',
'status_callback_method': 'POST',
'ended_reason': 'ended-via-api',
'url': 'https://media.twilio.com/v1/PlayerStreamers/VJcafebabecafebabecafebabecafebabe',
'max_duration': 300,
'links': {
'timed_metadata': 'https://media.twilio.com/v1/PlayerStreamers/VJcafebabecafebabecafebabecafebabe/TimedMetadata',
'playback_grant': 'https://media.twilio.com/v1/PlayerStreamers/VJcafebabecafebabecafebabecafebabe/PlaybackGrant'
}
}
]
};
holodeck.mock(new Response(200, body));
client.media.v1.playerStreamer.each(() => done());
}
);
it('should treat the second arg as a callback',
function(done) {
var body = {
'meta': {
'page': 0,
'page_size': 10,
'first_page_url': 'https://media.twilio.com/v1/PlayerStreamers?Status=ended&Order=desc&PageSize=10&Page=0',
'previous_page_url': null,
'url': 'https://media.twilio.com/v1/PlayerStreamers?Status=ended&Order=desc&PageSize=10&Page=0',
'next_page_url': null,
'key': 'player_streamers'
},
'player_streamers': [
{
'account_sid': 'ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
'date_created': '2015-07-30T20:00:00Z',
'date_updated': '2015-07-30T20:01:00Z',
'status': 'ended',
'video': true,
'sid': 'VJcafebabecafebabecafebabecafebabe',
'status_callback': 'http://www.example.com',
'status_callback_method': 'POST',
'ended_reason': 'ended-via-api',
'url': 'https://media.twilio.com/v1/PlayerStreamers/VJcafebabecafebabecafebabecafebabe',
'max_duration': 300,
'links': {
'timed_metadata': 'https://media.twilio.com/v1/PlayerStreamers/VJcafebabecafebabecafebabecafebabe/TimedMetadata',
'playback_grant': 'https://media.twilio.com/v1/PlayerStreamers/VJcafebabecafebabecafebabecafebabe/PlaybackGrant'
}
}
]
};
holodeck.mock(new Response(200, body));
client.media.v1.playerStreamer.each({pageSize: 20}, () => done());
holodeck.assertHasRequest(new Request({
method: 'GET',
url: 'https://media.twilio.com/v1/PlayerStreamers',
params: {PageSize: 20},
}));
}
);
it('should find the callback in the opts object',
function(done) {
var body = {
'meta': {
'page': 0,
'page_size': 10,
'first_page_url': 'https://media.twilio.com/v1/PlayerStreamers?Status=ended&Order=desc&PageSize=10&Page=0',
'previous_page_url': null,
'url': 'https://media.twilio.com/v1/PlayerStreamers?Status=ended&Order=desc&PageSize=10&Page=0',
'next_page_url': null,
'key': 'player_streamers'
},
'player_streamers': [
{
'account_sid': 'ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
'date_created': '2015-07-30T20:00:00Z',
'date_updated': '2015-07-30T20:01:00Z',
'status': 'ended',
'video': true,
'sid': 'VJcafebabecafebabecafebabecafebabe',
'status_callback': 'http://www.example.com',
'status_callback_method': 'POST',
'ended_reason': 'ended-via-api',
'url': 'https://media.twilio.com/v1/PlayerStreamers/VJcafebabecafebabecafebabecafebabe',
'max_duration': 300,
'links': {
'timed_metadata': 'https://media.twilio.com/v1/PlayerStreamers/VJcafebabecafebabecafebabecafebabe/TimedMetadata',
'playback_grant': 'https://media.twilio.com/v1/PlayerStreamers/VJcafebabecafebabecafebabecafebabe/PlaybackGrant'
}
}
]
};
holodeck.mock(new Response(200, body));
client.media.v1.playerStreamer.each({callback: () => done()}, () => fail('wrong callback!'));
}
);
it('should generate valid list request',
function(done) {
holodeck.mock(new Response(500, {}));
var promise = client.media.v1.playerStreamer.list();
promise.then(function() {
throw new Error('failed');
}, function(error) {
expect(error.constructor).toBe(RestException.prototype.constructor);
done();
}).done();
var url = 'https://media.twilio.com/v1/PlayerStreamers';
holodeck.assertHasRequest(new Request({
method: 'GET',
url: url
}));
}
);
it('should generate valid read_empty response',
function(done) {
var body = {
'meta': {
'page': 0,
'page_size': 10,
'first_page_url': 'https://media.twilio.com/v1/PlayerStreamers?Status=started&Order=asc&PageSize=10&Page=0',
'previous_page_url': null,
'url': 'https://media.twilio.com/v1/PlayerStreamers?Status=started&Order=asc&PageSize=10&Page=0',
'next_page_url': null,
'key': 'player_streamers'
},
'player_streamers': []
};
holodeck.mock(new Response(200, body));
var promise = client.media.v1.playerStreamer.list();
promise.then(function(response) {
expect(response).toBeDefined();
done();
}, function() {
throw new Error('failed');
}).done();
}
);
it('should generate valid read_items response',
function(done) {
var body = {
'meta': {
'page': 0,
'page_size': 10,
'first_page_url': 'https://media.twilio.com/v1/PlayerStreamers?Status=ended&Order=desc&PageSize=10&Page=0',
'previous_page_url': null,
'url': 'https://media.twilio.com/v1/PlayerStreamers?Status=ended&Order=desc&PageSize=10&Page=0',
'next_page_url': null,
'key': 'player_streamers'
},
'player_streamers': [
{
'account_sid': 'ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
'date_created': '2015-07-30T20:00:00Z',
'date_updated': '2015-07-30T20:01:00Z',
'status': 'ended',
'video': true,
'sid': 'VJcafebabecafebabecafebabecafebabe',
'status_callback': 'http://www.example.com',
'status_callback_method': 'POST',
'ended_reason': 'ended-via-api',
'url': 'https://media.twilio.com/v1/PlayerStreamers/VJcafebabecafebabecafebabecafebabe',
'max_duration': 300,
'links': {
'timed_metadata': 'https://media.twilio.com/v1/PlayerStreamers/VJcafebabecafebabecafebabecafebabe/TimedMetadata',
'playback_grant': 'https://media.twilio.com/v1/PlayerStreamers/VJcafebabecafebabecafebabecafebabe/PlaybackGrant'
}
}
]
};
holodeck.mock(new Response(200, body));
var promise = client.media.v1.playerStreamer.list();
promise.then(function(response) {
expect(response).toBeDefined();
done();
}, function() {
throw new Error('failed');
}).done();
}
);
it('should generate valid read_items_page_larger_than_max response',
function(done) {
var body = {
'meta': {
'page': 0,
'page_size': 100,
'first_page_url': 'https://media.twilio.com/v1/PlayerStreamers?Status=ended&Order=desc&PageSize=100&Page=0',
'previous_page_url': null,
'url': 'https://media.twilio.com/v1/PlayerStreamers?Status=ended&Order=desc&PageSize=100&Page=0',
'next_page_url': null,
'key': 'player_streamers'
},
'player_streamers': [
{
'account_sid': 'ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
'date_created': '2015-07-30T20:00:00Z',
'date_updated': '2015-07-30T20:01:00Z',
'status': 'ended',
'video': true,
'sid': 'VJcafebabecafebabecafebabecafebabe',
'status_callback': 'http://www.example.com',
'status_callback_method': 'POST',
'ended_reason': 'ended-via-api',
'url': 'https://media.twilio.com/v1/PlayerStreamers/VJcafebabecafebabecafebabecafebabe',
'max_duration': 300,
'links': {
'timed_metadata': 'https://media.twilio.com/v1/PlayerStreamers/VJcafebabecafebabecafebabecafebabe/TimedMetadata',
'playback_grant': 'https://media.twilio.com/v1/PlayerStreamers/VJcafebabecafebabecafebabecafebabe/PlaybackGrant'
}
}
]
};
holodeck.mock(new Response(200, body));
var promise = client.media.v1.playerStreamer.list();
promise.then(function(response) {
expect(response).toBeDefined();
done();
}, function() {
throw new Error('failed');
}).done();
}
);
});
|
# encoding: utf-8
"""
flow.py
Created by Thomas Mangin on 2010-01-14.
Copyright (c) 2009-2015 Exa Networks. All rights reserved.
"""
# Do not use __slots__ here, we never create enough of them to be worth it
# And it really break complex inheritance
from struct import pack
from struct import unpack
from exabgp.protocol.ip import IP
from exabgp.protocol.ip import NoIP
from exabgp.protocol.family import AFI
from exabgp.protocol.family import SAFI
from exabgp.bgp.message import OUT
from exabgp.bgp.message.notification import Notify
from exabgp.bgp.message.update.nlri.cidr import CIDR
from exabgp.protocol import Protocol
from exabgp.protocol.ip.icmp import ICMPType
from exabgp.protocol.ip.icmp import ICMPCode
from exabgp.protocol.ip.fragment import Fragment
from exabgp.protocol.ip.tcp.flag import TCPFlag
from exabgp.bgp.message.update.nlri.nlri import NLRI
from exabgp.bgp.message.update.nlri.qualifier.rd import RouteDistinguisher
# =================================================================== Flow Components
class IComponent (object):
# all have ID
# should have an interface for serialisation and put it here
pass
class CommonOperator (object):
# power (2,x) is the same as 1 << x which is what the RFC say the len is
power = {0:1, 1:2, 2:4, 3:8,}
rewop = {1:0, 2:1, 4:2, 8:3,}
len_position = 0x30
EOL = 0x80 # 0b10000000
AND = 0x40 # 0b01000000
LEN = 0x30 # 0b00110000
NOP = 0x00
OPERATOR = 0xFF ^ (EOL | LEN)
@staticmethod
def eol (data):
return data & CommonOperator.EOL
@staticmethod
def operator (data):
return data & CommonOperator.OPERATOR
@staticmethod
def length (data):
return 1 << ((data & CommonOperator.LEN) >> 4)
class NumericOperator (CommonOperator):
# reserved= 0x08 # 0b00001000
TRUE = 0x00 # 0000000000
LT = 0x04 # 0b00000100
GT = 0x02 # 0b00000010
EQ = 0x01 # 0b00000001
class BinaryOperator (CommonOperator):
# reserved= 0x0C # 0b00001100
INCLUDE = 0x00
NOT = 0x02 # 0b00000010
MATCH = 0x01 # 0b00000001
def _len_to_bit (value):
return NumericOperator.rewop[value] << 4
def _bit_to_len (value):
return NumericOperator.power[(value & CommonOperator.len_position) >> 4]
def _number (string):
value = 0
for c in string:
value = (value << 8) + ord(c)
return value
# def short (value):
# return (ord(value[0]) << 8) + ord(value[1])
# Interface ..................
class IPv4 (object):
afi = AFI.ipv4
class IPv6 (object):
afi = AFI.ipv6
class IPrefix (object):
pass
# Prococol
class IPrefix4 (IPrefix,IComponent,IPv4):
# Must be defined in subclasses
CODE = -1
NAME = ''
# not used, just present for simplying the nlri generation
operations = 0x0
def __init__ (self, raw, netmask):
self.nlri = CIDR(raw,netmask)
def pack (self):
raw = self.nlri.pack()
# ID is defined in subclasses
return "%s%s" % (chr(self.ID),raw) # pylint: disable=E1101
def __str__ (self):
return str(self.nlri)
class IPrefix6 (IPrefix,IComponent,IPv6):
# Must be defined in subclasses
CODE = -1
NAME = ''
# not used, just present for simplying the nlri generation
operations = 0x0
def __init__ (self, raw, netmask, offset):
self.nlri = CIDR(raw,netmask)
self.offset = offset
def pack (self):
raw = self.nlri.packed_ip()
# ID is defined in subclasses
return "%s%s%s%s" % (chr(self.ID),chr(self.nlri.mask),chr(self.offset),raw) # pylint: disable=E1101
def __str__ (self):
return "%s/%s" % (self.nlri,self.offset)
class IOperation (IComponent):
# need to implement encode which encode the value of the operator
def __init__ (self, operations, value):
self.operations = operations
self.value = value
self.first = None # handled by pack/str
def pack (self):
l,v = self.encode(self.value)
op = self.operations | _len_to_bit(l)
return "%s%s" % (chr(op),v)
def encode (self, value):
raise NotImplementedError('this method must be implemented by subclasses')
def decode (self, value):
raise NotImplementedError('this method must be implemented by subclasses')
# class IOperationIPv4 (IOperation):
# def encode (self, value):
# return 4, socket.pton(socket.AF_INET,value)
class IOperationByte (IOperation):
def encode (self, value):
return 1,chr(value)
def decode (self, bgp):
return ord(bgp[0]),bgp[1:]
class IOperationByteShort (IOperation):
def encode (self, value):
if value < (1 << 8):
return 1,chr(value)
return 2,pack('!H',value)
def decode (self, bgp):
return unpack('!H',bgp[:2])[0],bgp[2:]
class IOperationByteShortLong (IOperation):
def encode (self, value):
if value < (1 << 8):
return 1,chr(value)
if value < (1 << 16):
return 2,pack('!H',value)
return 4,pack('!L',value)
# XXX: buggy ?? as it assumes 4 bytes but may be less
def decode (self, bgp):
return unpack('!L',bgp[:4])[0],bgp[4:]
# String representation for Numeric and Binary Tests
class NumericString (object):
operations = None
value = None
_string = {
NumericOperator.TRUE: 'true',
NumericOperator.LT: '<',
NumericOperator.GT: '>',
NumericOperator.EQ: '=',
NumericOperator.LT | NumericOperator.EQ: '<=',
NumericOperator.GT | NumericOperator.EQ: '>=',
NumericOperator.AND | NumericOperator.LT: '&<',
NumericOperator.AND | NumericOperator.GT: '&>',
NumericOperator.AND | NumericOperator.EQ: '&=',
NumericOperator.AND | NumericOperator.LT | NumericOperator.EQ: '&<=',
NumericOperator.AND | NumericOperator.GT | NumericOperator.EQ: '&>=',
}
def __str__ (self):
return "%s%s" % (self._string[self.operations & (CommonOperator.EOL ^ 0xFF)], self.value)
class BinaryString (object):
operations = None
value = None
_string = {
BinaryOperator.INCLUDE: '',
BinaryOperator.NOT: '!',
BinaryOperator.MATCH: '=',
BinaryOperator.AND | BinaryOperator.NOT: '&!',
BinaryOperator.AND | BinaryOperator.MATCH: '&=',
}
def __str__ (self):
return "%s%s" % (self._string[self.operations & (CommonOperator.EOL ^ 0xFF)], self.value)
# Components ..............................
def converter (function, klass=None):
def _integer (value):
if klass is None:
return function(value)
try:
return klass(value)
except ValueError:
return function(value)
return _integer
def decoder (function, klass=int):
def _inner (value):
return klass(function(value))
return _inner
def PacketLength (data):
_str_bad_length = "cloudflare already found that invalid max-packet length for for you .."
number = int(data)
if number > 0xFFFF:
raise ValueError(_str_bad_length)
return number
def PortValue (data):
_str_bad_port = "you tried to set an invalid port number .."
number = int(data)
if number < 0 or number > 0xFFFF:
raise ValueError(_str_bad_port)
return number
def DSCPValue (data):
_str_bad_dscp = "you tried to filter a flow using an invalid dscp for a component .."
number = int(data)
if number < 0 or number > 0xFFFF:
raise ValueError(_str_bad_dscp)
return number
def ClassValue (data):
_str_bad_class = "you tried to filter a flow using an invalid traffic class for a component .."
number = int(data)
if number < 0 or number > 0xFFFF:
raise ValueError(_str_bad_class)
return number
def LabelValue (data):
_str_bad_label = "you tried to filter a flow using an invalid traffic label for a component .."
number = int(data)
if number < 0 or number > 0xFFFFF: # 20 bits 5 bytes
raise ValueError(_str_bad_label)
return number
# Protocol Shared
class FlowDestination (object):
ID = 0x01
NAME = 'destination'
class FlowSource (object):
ID = 0x02
NAME = 'source'
# Prefix
class Flow4Destination (IPrefix4,FlowDestination):
NAME = 'destination'
# Prefix
class Flow4Source (IPrefix4,FlowSource):
NAME = 'source'
# Prefix
class Flow6Destination (IPrefix6,FlowDestination):
NAME = 'destination'
# Prefix
class Flow6Source (IPrefix6,FlowSource):
NAME = 'source'
class FlowIPProtocol (IOperationByte,NumericString,IPv4):
ID = 0x03
NAME = 'protocol'
converter = staticmethod(converter(Protocol.Name,Protocol))
decoder = staticmethod(decoder(ord,Protocol))
class FlowNextHeader (IOperationByte,NumericString,IPv6):
ID = 0x03
NAME = 'next-header'
converter = staticmethod(converter(Protocol.Name,Protocol))
decoder = staticmethod(decoder(ord,Protocol))
class FlowAnyPort (IOperationByteShort,NumericString,IPv4,IPv6):
ID = 0x04
NAME = 'port'
converter = staticmethod(converter(PortValue))
decoder = staticmethod(_number)
class FlowDestinationPort (IOperationByteShort,NumericString,IPv4,IPv6):
ID = 0x05
NAME = 'destination-port'
converter = staticmethod(converter(PortValue))
decoder = staticmethod(_number)
class FlowSourcePort (IOperationByteShort,NumericString,IPv4,IPv6):
ID = 0x06
NAME = 'source-port'
converter = staticmethod(converter(PortValue))
decoder = staticmethod(_number)
class FlowICMPType (IOperationByte,NumericString,IPv4,IPv6):
ID = 0x07
NAME = 'icmp-type'
converter = staticmethod(converter(ICMPType.Name))
decoder = staticmethod(decoder(_number,ICMPType))
class FlowICMPCode (IOperationByte,NumericString,IPv4,IPv6):
ID = 0x08
NAME = 'icmp-code'
converter = staticmethod(converter(ICMPCode.Name))
decoder = staticmethod(decoder(_number,ICMPCode))
class FlowTCPFlag (IOperationByte,BinaryString,IPv4,IPv6):
ID = 0x09
NAME = 'tcp-flags'
converter = staticmethod(converter(TCPFlag.Name))
decoder = staticmethod(decoder(ord,TCPFlag))
class FlowPacketLength (IOperationByteShort,NumericString,IPv4,IPv6):
ID = 0x0A
NAME = 'packet-length'
converter = staticmethod(converter(PacketLength))
decoder = staticmethod(_number)
# RFC2474
class FlowDSCP (IOperationByteShort,NumericString,IPv4):
ID = 0x0B
NAME = 'dscp'
converter = staticmethod(converter(DSCPValue))
decoder = staticmethod(_number)
# RFC2460
class FlowTrafficClass (IOperationByte,NumericString,IPv6):
ID = 0x0B
NAME = 'traffic-class'
converter = staticmethod(converter(ClassValue))
decoder = staticmethod(_number)
# BinaryOperator
class FlowFragment (IOperationByteShort,BinaryString,IPv4):
ID = 0x0C
NAME = 'fragment'
converter = staticmethod(converter(Fragment.Name))
decoder = staticmethod(decoder(ord,Fragment))
# draft-raszuk-idr-flow-spec-v6-01
class FlowFlowLabel (IOperationByteShortLong,NumericString,IPv6):
ID = 0x0D
NAME = 'flow-label'
converter = staticmethod(converter(LabelValue))
decoder = staticmethod(_number)
# ..........................................................
decode = {AFI.ipv4: {}, AFI.ipv6: {}}
factory = {AFI.ipv4: {}, AFI.ipv6: {}}
for content in dir():
kls = globals().get(content,None)
if not isinstance(kls,type(IComponent)):
continue
if not issubclass(kls,IComponent):
continue
_ID = getattr(kls,'ID',None)
if not _ID:
continue
_afis = []
if issubclass(kls,IPv4):
_afis.append(AFI.ipv4)
if issubclass(kls,IPv6):
_afis.append(AFI.ipv6)
for _afi in _afis:
factory[_afi][_ID] = kls
name = getattr(kls,'NAME')
if issubclass(kls, IOperation):
if issubclass(kls, BinaryString):
decode[_afi][_ID] = 'binary'
elif issubclass(kls, NumericString):
decode[_afi][_ID] = 'numeric'
else:
raise RuntimeError('invalid class defined (string)')
elif issubclass(kls, IPrefix):
decode[_afi][_ID] = 'prefix'
else:
raise RuntimeError('unvalid class defined (type)')
# ..........................................................
def _unique ():
value = 0
while True:
yield value
value += 1
unique = _unique()
class Flow (NLRI):
def __init__ (self, afi=AFI.ipv4,safi=SAFI.flow_ip,nexthop=None,rd=None):
NLRI.__init__(self,afi,safi)
self.rules = {}
self.action = OUT.ANNOUNCE
self.nexthop = IP.unpack(nexthop) if nexthop else NoIP
self.rd = rd
self.unique = unique.next()
def __eq__ (self,other):
return self.index() == other.index()
def __ne__ (self, other):
return not self.__eq__(other)
def __hash__ (self):
return hash(self.pack())
def __len__ (self):
return len(self.pack())
def add (self, rule):
ID = rule.ID
if ID in (FlowDestination.ID,FlowSource.ID):
if ID in self.rules:
return False
if ID == FlowDestination.ID:
pair = self.rules.get(FlowSource.ID,[])
else:
pair = self.rules.get(FlowDestination.ID,[])
if pair:
if rule.afi != pair[0].afi:
return False
self.rules.setdefault(ID,[]).append(rule)
return True
# The API requires addpath, but it is irrelevant here.
def pack (self, addpath=None):
ordered_rules = []
# the order is a RFC requirement
for ID in sorted(self.rules.keys()):
rules = self.rules[ID]
# for each component get all the operation to do
# the format use does not prevent two opposing rules meaning that no packet can ever match
for rule in rules:
rule.operations &= (CommonOperator.EOL ^ 0xFF)
rules[-1].operations |= CommonOperator.EOL
# and add it to the last rule
if ID not in (FlowDestination.ID,FlowSource.ID):
ordered_rules.append(chr(ID))
ordered_rules.append(''.join(rule.pack() for rule in rules))
components = ''.join(ordered_rules)
if self.safi == SAFI.flow_vpn:
components = self.rd.pack() + components
l = len(components)
if l < 0xF0:
data = "%s%s" % (chr(l),components)
elif l < 0x0FFF:
data = "%s%s" % (pack('!H',l | 0xF000),components)
else:
raise Notify(3,0,"rule too big for NLRI - how to handle this - does this work ?")
# data = "%s" % chr(0)
return data
def extensive (self):
string = []
for index in sorted(self.rules):
rules = self.rules[index]
s = []
for idx,rule in enumerate(rules):
# only add ' ' after the first element
if idx and not rule.operations & NumericOperator.AND:
s.append(' ')
s.append(rule)
line = ''.join(str(_) for _ in s)
if len(s) > 1:
line = '[ %s ]' % line
string.append(' %s %s' % (rules[0].NAME,line))
nexthop = ' next-hop %s' % self.nexthop if self.nexthop is not NoIP else ''
rd = str(self.rd) if self.rd else ''
return 'flow' + ''.join(string) + rd + nexthop
def __str__ (self):
return self.extensive()
def _json (self):
string = []
for index in sorted(self.rules):
rules = self.rules[index]
s = []
for idx,rule in enumerate(rules):
# only add ' ' after the first element
if idx and not rule.operations & NumericOperator.AND:
s.append(', ')
s.append('"%s"' % rule)
string.append(' "%s": [ %s ]' % (rules[0].NAME,''.join(str(_) for _ in s).replace('""','')))
nexthop = ', "next-hop": "%s"' % self.nexthop if self.nexthop is not NoIP else ''
rd = ', %s' % self.rd.json() if self.rd else ''
compatibility = ', "string": "%s"' % self.extensive()
return '{' + ','.join(string) + rd + nexthop + compatibility + ' }'
def json (self):
# this is a stop gap so flow route parsing does not crash exabgp
# delete unique when this is fixed
return '"flow-%d": %s' % (self.unique,self._json())
def index (self):
return self.pack()
@classmethod
def unpack (cls, afi, safi, bgp, has_multiple_path, nexthop, action):
total = len(bgp)
length,bgp = ord(bgp[0]),bgp[1:]
if length & 0xF0 == 0xF0: # bigger than 240
extra,bgp = ord(bgp[0]),bgp[1:]
length = ((length & 0x0F) << 16) + extra
if length > len(bgp):
raise Notify(3,10,'invalid length at the start of the the flow')
over = bgp[length:]
bgp = bgp[:length]
nlri = Flow(afi,safi,nexthop)
nlri.action = action
if safi == SAFI.flow_vpn:
nlri.rd = RouteDistinguisher(bgp[:8])
bgp = bgp[8:]
seen = []
while bgp:
what,bgp = ord(bgp[0]),bgp[1:]
if what not in decode.get(afi,{}):
raise Notify(3,10,'unknown flowspec component received for address family %d' % what)
seen.append(what)
if sorted(seen) != seen:
raise Notify(3,10,'components are not sent in the right order %s' % seen)
decoded = decode[afi][what]
klass = factory[afi][what]
if decoded == 'prefix':
if afi == AFI.ipv4:
_,rd,_,mask,size,prefix,left = NLRI._nlri(afi,safi,bgp,action,False)
adding = klass(prefix,mask)
if not nlri.add(adding):
raise Notify(3,10,'components are incompatible (two sources, two destinations, mix ipv4/ipv6) %s' % seen)
# logger.parser(LazyFormat("added flow %s (%s) payload " % (klass.NAME,adding),bgp[:-len(left)]))
bgp = left
else:
byte,bgp = bgp[1],bgp[0]+bgp[2:]
offset = ord(byte)
_,rd,_,mask,size,prefix,left = NLRI._nlri(afi,safi,bgp,action,False)
adding = klass(prefix,mask,offset)
if not nlri.add(adding):
raise Notify(3,10,'components are incompatible (two sources, two destinations, mix ipv4/ipv6) %s' % seen)
# logger.parser(LazyFormat("added flow %s (%s) payload " % (klass.NAME,adding),bgp[:-len(left)]))
bgp = left
else:
end = False
while not end:
byte,bgp = ord(bgp[0]),bgp[1:]
end = CommonOperator.eol(byte)
operator = CommonOperator.operator(byte)
length = CommonOperator.length(byte)
value,bgp = bgp[:length],bgp[length:]
adding = klass.decoder(value)
nlri.add(klass(operator,adding))
# logger.parser(LazyFormat("added flow %s (%s) operator %d len %d payload " % (klass.NAME,adding,byte,length),value))
return total-len(over)-len(bgp),nlri
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.