content
stringlengths 10
4.9M
|
---|
/*=========================================================================
*
* Copyright NumFOCUS
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0.txt
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*=========================================================================*/
#include "itkMath.h"
#include "itkNormalVariateGenerator.h"
namespace itk
{
namespace Statistics
{
NormalVariateGenerator::NormalVariateGenerator()
{
m_Scale = 30000000.0;
m_Rscale = 1.0 / m_Scale;
m_Rcons = 1.0 / (2.0 * 1024.0 * 1024.0 * 1024.0);
m_Gausssave = nullptr;
this->Initialize(0);
}
NormalVariateGenerator::~NormalVariateGenerator() = default;
void
NormalVariateGenerator::PrintSelf(std::ostream & os, Indent indent) const
{
Superclass::PrintSelf(os, indent);
os << indent << "Scale: " << m_Scale << std::endl;
os << indent << "Rscale: " << m_Rscale << std::endl;
os << indent << "Rcons: " << m_Rcons << std::endl;
os << indent << "ELEN: " << m_ELEN << std::endl;
os << indent << "LEN: " << m_LEN << std::endl;
os << indent << "LMASK: " << m_LMASK << std::endl;
os << indent << "TLEN: " << m_TLEN << std::endl;
os << indent << "gaussfaze: " << m_Gaussfaze << std::endl;
os << indent << "gausssave: " << m_Gausssave << std::endl;
os << indent << "GScale: " << m_GScale << std::endl;
os << indent << "vec1: " << m_Vec1 << std::endl;
os << indent << "nslew: " << m_Nslew << std::endl;
os << indent << "irs: " << m_Irs << std::endl;
os << indent << "lseed: " << m_Lseed << std::endl;
os << indent << "chic1: " << m_Chic1 << std::endl;
os << indent << "chic2: " << m_Chic2 << std::endl;
os << indent << "actualRSD: " << m_ActualRSD << std::endl;
}
void
NormalVariateGenerator::Initialize(int randomSeed)
{
// m_Random Seed was originally getpid()
double fake;
m_Lseed = randomSeed;
m_Irs = randomSeed;
m_Gaussfaze = 1;
m_Nslew = 0;
m_GScale = m_Rscale;
// At one stage, we need to generate a random variable Z such that
// (TLEN * Z*Z) has a Chi-squared-TLEN density. Now, a var with
// an approximate Chi-sq-K distn can be got as
// 0.5 * (C + A*n)**2 where n has unit Normal distn,
// A = (1 + 1 / (8K)), C*C = 2K - A*A (For large K)
// So we form Z as (sqrt (1 / 2TLEN)) * (C + A*n)
// or:
// Z = (sqrt (1/2TLEN)) * A * (B + n)
// where:
// B = C / A.
// We set m_Chic1 = A * std::sqrt(0.5 / TLEN), m_Chic2 = B
fake = 1.0 + 0.125 / m_TLEN; // This is A
m_Chic2 = std::sqrt(2.0 * m_TLEN - fake * fake) / fake;
m_Chic1 = fake * std::sqrt(0.5 / m_TLEN);
m_ActualRSD = 0.0;
}
double
NormalVariateGenerator::GetVariate()
{
if (--m_Gaussfaze)
{
return m_GScale * m_Gausssave[m_Gaussfaze];
}
else
{
return FastNorm();
}
}
/* ----------------------------------------------------- */
double
NormalVariateGenerator::FastNorm()
{
int i;
int inc = 0;
int skew;
int stride;
int mask = 0;
int p;
int q;
int r;
int s;
int t;
int * pa = nullptr;
int * pb = nullptr;
int * pc = nullptr;
int * pd = nullptr;
int * pe;
int * p0 = nullptr;
int mtype;
int stype;
double ts;
double tr;
double tx;
double ty;
double tz;
/* See if time to make a new set of 'original' deviates */
/* or at least to correct for a drift in sum-of-squares */
if (!(m_Nslew & 0xFF))
{
goto renormalize;
}
startpass:
/* Count passes */
++m_Nslew;
/* Reset index into Saved values */
m_Gaussfaze = m_TLEN - 1; /* We will steal the last one */
/* Update pseudo-random and use to choose type of rotation */
m_Lseed = static_cast<int>(69069 * static_cast<long>(m_Lseed) + 33331);
m_Irs = SignedShiftXOR(m_Irs);
t = static_cast<int>(static_cast<long>(m_Irs) + static_cast<long>(m_Lseed));
if (t < 0)
{
t = ~t;
}
/* This gives us 31 random bits in t */
/* We need ELEN to fix initial index into LEN, ELEN-1 to fix an odd
stride, 2 to fix matrix type and maybe 1 for scantype, making
2*ELEN + 2 in all, and leaving 29 - 2*ELEN unused
*/
t = t >> (29 - 2 * m_ELEN); /* Discard unwanted digits */
skew = (m_LEN - 1) & t;
t = t >> m_ELEN;
skew = 4 * skew; /* To give a word index to group of 4 */
stride = (m_LEN / 2 - 1) & t;
t = t >> (m_ELEN - 1);
stride = 8 * stride + 4; /* To give an odd num of 4-groups */
mtype = t & 3;
/* Leaves a bit for stype, but not currently used */
/* Use last bits of m_Nslew to determine scanning pattern */
stype = m_Nslew & 3;
switch (stype)
{
case 0: /* From consecutive in top to scattered in bot */
inc = 1;
mask = m_LMASK;
pa = m_Vec1;
pb = pa + m_LEN;
pc = pb + m_LEN;
pd = pc + m_LEN;
p0 = m_Vec1 + 4 * m_LEN;
goto scanset;
case 1: /* From consec in bot to scatt in top */
inc = 1;
mask = m_LMASK;
pa = m_Vec1 + 4 * m_LEN;
pb = pa + m_LEN;
pc = pb + m_LEN;
pd = pc + m_LEN;
p0 = m_Vec1;
goto scanset;
case 2: /* From consec in even to scatt in odd */
inc = 2;
mask = 2 * m_LMASK;
skew *= 2;
stride *= 2;
pa = m_Vec1 + 1;
pb = pa + 2 * m_LEN;
pc = pb + 2 * m_LEN;
pd = pc + 2 * m_LEN;
p0 = m_Vec1;
goto scanset;
case 3: /* From consec in odd to scatt in even */
inc = 2;
mask = 2 * m_LMASK;
skew *= 2;
stride *= 2;
pa = m_Vec1;
pb = pa + 2 * m_LEN;
pc = pb + 2 * m_LEN;
pd = pc + 2 * m_LEN;
p0 = m_Vec1 + 1;
goto scanset;
} /* End of scan pattern cases */
scanset:
m_Gausssave = m_Vec1;
/* Set loop count */
i = m_LEN;
/* Use mtype to select matrix */
switch (mtype)
{
case 0:
goto matrix0;
case 1:
goto matrix1;
case 2:
goto matrix2;
case 3:
goto matrix3;
}
matrix0:
pa += (inc * (m_LEN - 1));
mpass0:
skew = (skew + stride) & mask;
pe = p0 + skew;
p = -*pa;
q = -*pb;
r = *pc;
s = *pd;
t = (p + q + r + s) >> 1;
p = t - p;
q = t - q;
r = t - r;
s = t - s;
/* Have new values in p,q,r,s. Place and save replaced vals */
t = -*pe;
*pe = p;
pe += inc;
p = *pe;
*pe = q;
pe += inc;
q = -*pe;
*pe = r;
pe += inc;
r = *pe;
*pe = s;
/* Have vals in p,q,r,t */
s = (p + q + r + t) >> 1;
*pa = s - p;
pa -= inc;
*pb = s - q;
pb += inc;
*pc = s - r;
pc += inc;
*pd = s - t;
pd += inc;
if (--i)
{
goto mpass0;
}
goto endpass;
matrix1:
pb += (inc * (m_LEN - 1));
mpass1:
skew = (skew + stride) & mask;
pe = p0 + skew;
p = -*pa;
q = *pb;
r = *pc;
s = -*pd;
t = (p + q + r + s) >> 1;
p = t - p;
q = t - q;
r = t - r;
s = t - s;
/* Have new values in p,q,r,s. Place and save replaced vals */
t = *pe;
*pe = p;
pe += inc;
p = -*pe;
*pe = q;
pe += inc;
q = -*pe;
*pe = r;
pe += inc;
r = *pe;
*pe = s;
/* Have vals in p,q,r,t */
s = (p + q + r + t) >> 1;
*pa = s - p;
pa += inc;
*pb = s - t;
pb -= inc;
*pc = s - q;
pc += inc;
*pd = s - r;
pd += inc;
if (--i)
{
goto mpass1;
}
goto endpass;
matrix2:
pc += (inc * (m_LEN - 1));
mpass2:
skew = (skew + stride) & mask;
pe = p0 + skew;
p = *pa;
q = -*pb;
r = *pc;
s = -*pd;
t = (p + q + r + s) >> 1;
p = t - p;
q = t - q;
r = t - r;
s = t - s;
/* Have new values in p,q,r,s. Place and save replaced vals */
t = *pe;
*pe = p;
pe += inc;
p = *pe;
*pe = q;
pe += inc;
q = -*pe;
*pe = r;
pe += inc;
r = -*pe;
*pe = s;
/* Have vals in p,q,r,t */
s = (p + q + r + t) >> 1;
*pa = s - r;
pa += inc;
*pb = s - p;
pb += inc;
*pc = s - q;
pc -= inc;
*pd = s - t;
pd += inc;
if (--i)
{
goto mpass2;
}
goto endpass;
matrix3:
pd += (inc * (m_LEN - 1));
mpass3:
skew = (skew + stride) & mask;
pe = p0 + skew;
p = *pa;
q = *pb;
r = -*pc;
s = -*pd;
t = (p + q + r + s) >> 1;
p = t - p;
q = t - q;
r = t - r;
s = t - s;
/* Have new values in p,q,r,s. Place and save replaced vals */
t = -*pe;
*pe = p;
pe += inc;
p = *pe;
*pe = q;
pe += inc;
q = *pe;
*pe = r;
pe += inc;
r = -*pe;
*pe = s;
/* Have vals in p,q,r,t */
s = (p + q + r + t) >> 1;
*pa = s - q;
pa += inc;
*pb = s - r;
pb += inc;
*pc = s - t;
pc += inc;
*pd = s - p;
pd -= inc;
if (--i)
{
goto mpass3;
}
goto endpass;
endpass:
/* Choose a value for m_GScale which will make the sum-of-squares have
the variance of Chi-Sq (TLEN), i.e., 2*TLEN. Choose a value from
Chi-Sq (TLEN) using the method described in initnorm.
The Normal variate is obtained from m_Gausssave[TLEN-1], which is
not used by the caller.
*/
ts = m_Chic1 * (m_Chic2 + m_GScale * m_Vec1[m_TLEN - 1]);
/* m_TLEN * ts * ts has ChiSq (m_TLEN) distribution */
m_GScale = m_Rscale * ts * m_ActualRSD;
return (m_GScale * m_Vec1[0]);
renormalize:
if (m_Nslew & 0xFFFF)
{
goto recalcsumsq;
}
/* Here, replace the whole pool with conventional Normal variates */
ts = 0.0;
p = 0;
nextpair:
m_Lseed = static_cast<int>(69069 * static_cast<long>(m_Lseed) + 33331);
m_Irs = SignedShiftXOR(m_Irs);
r = static_cast<int>(static_cast<long>(m_Irs) + static_cast<long>(m_Lseed));
tx = m_Rcons * r;
m_Lseed = static_cast<int>(69069 * static_cast<long>(m_Lseed) + 33331);
m_Irs = SignedShiftXOR(m_Irs);
r = static_cast<int>(static_cast<long>(m_Irs) + static_cast<long>(m_Lseed));
ty = m_Rcons * r;
tr = tx * tx + ty * ty;
if ((tr > 1.0) || (tr < 0.1))
{
goto nextpair;
}
m_Lseed = static_cast<int>(69069 * static_cast<long>(m_Lseed) + 33331);
m_Irs = SignedShiftXOR(m_Irs);
r = static_cast<int>(static_cast<long>(m_Irs) + static_cast<long>(m_Lseed));
if (r < 0)
{
r = ~r;
}
tz = -2.0 * std::log((r + 0.5) * m_Rcons); /* Sum of squares */
ts += tz;
tz = std::sqrt(tz / tr);
m_Vec1[p++] = static_cast<int>(m_Scale * tx * tz);
m_Vec1[p++] = static_cast<int>(m_Scale * ty * tz);
if (p < m_TLEN)
{
goto nextpair;
}
/* Horrid, but good enough */
/* Calc correction factor to make sum of squares = TLEN */
ts = m_TLEN / ts; /* Should be close to 1.0 */
tr = std::sqrt(ts);
for (p = 0; p < m_TLEN; ++p)
{
tx = m_Vec1[p] * tr;
m_Vec1[p] = static_cast<int>((tx < 0.0) ? (tx - 0.5) : (tx + 0.5));
}
recalcsumsq:
/* Calculate actual sum of squares for correction */
ts = 0.0;
for (p = 0; p < m_TLEN; ++p)
{
tx = m_Vec1[p];
ts += (tx * tx);
}
/* Now ts should be Scale*Scale*TLEN or thereabouts */
ts = std::sqrt(ts / (m_Scale * m_Scale * m_TLEN));
m_ActualRSD = 1.0 / ts; /* Reciprocal of actual Standard Devtn */
goto startpass;
}
} // namespace Statistics
} // namespace itk
|
<reponame>unravela/gitwrk
package export
import (
"encoding/json"
"github.com/unravela/gitwrk"
"io"
"time"
)
type jsonRecord struct {
When time.Time `json:"when"`
Author string `json:"author"`
ScmType string `json:"scm_type"`
ScmScope string `json:"scm_scope"`
Spent string `json:"spent"`
SpentMinutes int `json:"spent_minutes"`
}
// JSON render the collection of work logs as JSON.
// For better control over JSON format and names, we're mapping WorkLog into
// jsonRecord first. Then we're marshalling jsonRecords.
func JSON(wlogs gitwrk.WorkLogs, out io.Writer) {
records := make([]jsonRecord, len(wlogs))
for i, wlog := range wlogs {
records[i] = jsonRecord{
When: wlog.When,
Author: wlog.Author,
ScmType: wlog.Scm.Type,
ScmScope: wlog.Scm.Scope,
Spent: wlog.Spent.String(),
SpentMinutes: int(wlog.Spent.Minutes()),
}
}
bytes, _ := json.MarshalIndent(records, "", "\t")
out.Write(bytes)
}
|
<filename>solutions/java-predictor/src/main/java/org/rob/demo/predictor/AddDiffOfLastTwoValues.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.rob.demo.predictor;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.PrintWriter;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Submission for the ROB Number Predictor Benchmark. Uses the sum of the last
* value and the difference between the last value and the next-to-last value
* in a given sequence as the prediction result.
*
* @author <NAME> <<EMAIL>>
*/
public class AddDiffOfLastTwoValues {
public static void main(String[] args) {
if (args.length != 2) {
System.out.println("Usage: <input-file> <output-file>");
System.exit(-1);
}
File inputFile = new File(args[0]);
File outputFile = new File(args[1]);
File outputFolder = outputFile.getParentFile();
if (outputFolder != null) {
if (!outputFolder.exists()) {
outputFolder.mkdirs();
}
}
try (
BufferedReader in = new BufferedReader(new FileReader(inputFile));
PrintWriter out = new PrintWriter(new FileWriter(outputFile))
) {
String line;
while ((line = in.readLine()) != null) {
if (!line.trim().equals("")) {
String[] tokens = line.split(":");
String seqId = tokens[0];
String[] values = tokens[1].split(",");
int last = Integer.parseInt(values[values.length - 1]);
int nextToLast = Integer.parseInt(values[values.length - 2]);
int prediction = last + (last - nextToLast);
out.println(seqId + ":" + prediction);
}
}
} catch (java.io.IOException ex) {
Logger.getGlobal().log(Level.SEVERE, "RUN", ex);
System.exit(-1);
}
}
}
|
import React, { Component } from 'react';
import { RouteComponentProps } from "react-router-dom";
import { connect } from 'react-redux';
import { IStoreState } from '../../global/constants';
import * as actions from './flow/actions';
import './style.css';
import { IHomeStoreState } from './flow/constants';
import { IStarInfo } from '../../services/constants';
import RepoListItem from '../../components/RepoListItem';
import FullScreentLoad from '../../components/FullScreenLoad';
import MoreButton from '../../components/MoreButton';
import Modal from '../../components/Modal';
interface IActionsProps {
fetchStars: (page: number) => void;
pin: (index: number, pined: boolean, star: any) => void;
clean: () => void;
}
interface IProps extends RouteComponentProps<any>, IActionsProps {
home: IHomeStoreState;
}
class HomePage extends Component<IProps> {
public i: number;
public once: boolean;
constructor(props:IProps){
super(props);
this.i = 0;
this.once = true;
}
componentDidMount(){
this.againFetchStars();
}
componentWillUnmount(){
// 可能要清除一些内存数据
this.props.clean();
}
public againFetchStars = () => {
const { no_data } = this.props.home;
if (!no_data) {
this.i = this.i + 1;
this.props.fetchStars(this.i);
}
}
public onPin = (star: IStarInfo, index: number) => {
this.props.pin(index, true, star);
}
public renderStarList(){
const home = this.props.home;
const { stars } = home;
return (
<div className="home-star-container">
{
stars.map((star, i) => {
return (
<RepoListItem key={star.id} star={ star } index={i} onPinChanged={this.onPin} />
)
})
}
</div>
)
}
public renderStarsList(){
const home = this.props.home;
const { reload, no_data } = home;
return (
<div>
<div className="home-more-container">
{this.renderStarList()}
<MoreButton
reload={ reload }
noData={no_data}
onAgainChanged={this.againFetchStars}
/>
</div>
</div>
)
}
public render(){
const { home } = this.props;
if (home.stars.length > 0) {
if (this.once) {
this.once = false;
}
}
const { openModal } = home;
return (
<div className="home-container">
{
this.once ? <FullScreentLoad/> : this.renderStarsList()
}
<Modal
text="已经存在:请在Pin列表中操作此项"
open={openModal}
/>
</div>
);
}
}
const mapStateToProps = (state: IStoreState) => {
const { home } = state;
return {
home
}
}
export default connect(mapStateToProps, actions)(HomePage);
|
<gh_stars>1000+
package com.jhl.admin.entity;
import lombok.Data;
import lombok.Getter;
import lombok.Setter;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
public class ConnectionStat {
ConcurrentHashMap<String, InternalEntry> cache = new ConcurrentHashMap<>();
@Getter
@Setter
private volatile Long lastBlock = 0L;
public void createOrAdd(String host, int count) {
InternalEntry entry = cache.get(host);
if (entry != null) {
entry.setCount(count);
entry.setLastSetTime(System.currentTimeMillis());
} else {
InternalEntry internalEntry = new InternalEntry();
internalEntry.setCount(count);
internalEntry.setLastSetTime(System.currentTimeMillis());
cache.put(host, internalEntry);
}
}
private static final Long _5MINUTES = 5 * 6000L;
/**
* 返回2分钟内的全局总数
*
* @return
*/
public Integer getTotal() {
final long currentTimeMillis = System.currentTimeMillis();
AtomicInteger total = new AtomicInteger();
List<String> removeKeys = new ArrayList<>();
cache.forEach((key, value) -> {
if ((currentTimeMillis - value.getLastSetTime()) < _5MINUTES) {
total.addAndGet(value.count);
} else {
removeKeys.add(key);
}
});
removeKeys.forEach(key -> cache.remove(key));
int count = total.get();
return count;
}
@Data
private class InternalEntry {
private Integer count;
private long lastSetTime;
}
}
|
<gh_stars>1-10
#include <touchgfx/hal/Types.hpp>
FONT_LOCATION_FLASH_PRAGMA
KEEP extern const uint8_t unicodes_NanumGothicCoding_20_4bpp[] FONT_LOCATION_FLASH_ATTRIBUTE =
{
// Unicode: [0x0020, ]
// (Has no glyph data)
// Unicode: [0x0031, ]
0x00, 0x70, 0x4F, 0x00, 0xF9, 0x5F, 0xA0, 0x4E, 0x5F, 0xC1, 0x12, 0x5F, 0x00, 0x10, 0x5F, 0x00,
0x10, 0x5F, 0x00, 0x10, 0x5F, 0x00, 0x10, 0x5F, 0x00, 0x10, 0x5F, 0x00, 0x10, 0x5F, 0x00, 0x10,
0x5F, 0x00, 0x10, 0x5F, 0x00, 0x10, 0x5F, 0x00, 0x10, 0x5F, 0x00, 0x00, 0x4F,
// Unicode: [0x0032, ]
0x00, 0xC7, 0xEF, 0x09, 0x00, 0x80, 0x8E, 0x65, 0xCE, 0x00, 0x10, 0x00, 0x00, 0xF3, 0x06, 0x00,
0x00, 0x00, 0xD0, 0x0A, 0x00, 0x00, 0x00, 0xD0, 0x0A, 0x00, 0x00, 0x00, 0xF0, 0x08, 0x00, 0x00,
0x00, 0xF7, 0x02, 0x00, 0x00, 0x10, 0xAF, 0x00, 0x00, 0x00, 0xB0, 0x1E, 0x00, 0x00, 0x00, 0xF8,
0x03, 0x00, 0x00, 0x50, 0x6F, 0x00, 0x00, 0x00, 0xF4, 0x09, 0x00, 0x00, 0x20, 0xAF, 0x00, 0x00,
0x00, 0xE0, 0x4E, 0x44, 0x44, 0x04, 0xF2, 0xFF, 0xFF, 0xFF, 0x0F,
// Unicode: [0x0033, ]
0x80, 0xFD, 0xAE, 0x03, 0x20, 0x7D, 0x65, 0xFC, 0x04, 0x00, 0x00, 0x00, 0xEA, 0x00, 0x00, 0x00,
0x40, 0x1F, 0x00, 0x00, 0x00, 0xF5, 0x00, 0x00, 0x00, 0xC0, 0x0B, 0x00, 0x20, 0xD6, 0x2D, 0x00,
0xD0, 0xFF, 0x1C, 0x00, 0x00, 0x43, 0xC6, 0x3E, 0x00, 0x00, 0x00, 0x90, 0x0D, 0x00, 0x00, 0x00,
0xF3, 0x02, 0x00, 0x00, 0x40, 0x3F, 0x00, 0x00, 0x00, 0xEC, 0xA0, 0x69, 0x75, 0xEE, 0x03, 0xC6,
0xFE, 0x9E, 0x01, 0x00,
// Unicode: [0x003F, ]
0xC5, 0xFE, 0x9D, 0x01, 0xB8, 0x67, 0xF9, 0x0D, 0x00, 0x00, 0x40, 0x6F, 0x00, 0x00, 0x00, 0x9E,
0x00, 0x00, 0x10, 0x7F, 0x00, 0x00, 0x90, 0x1F, 0x00, 0x00, 0xF6, 0x05, 0x00, 0x30, 0x7F, 0x00,
0x00, 0xD0, 0x0A, 0x00, 0x00, 0xF3, 0x02, 0x00, 0x00, 0xF6, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00,
0x00, 0x30, 0x00, 0x00, 0x00, 0xFA, 0x04, 0x00, 0x00, 0xE8, 0x02, 0x00,
// Unicode: [0xAE30, ]
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x50, 0x06, 0x11, 0x11, 0x11, 0x01, 0x00, 0x00, 0xCA, 0xF2,
0xFF, 0xFF, 0xFF, 0x2F, 0x00, 0xA0, 0x0C, 0x21, 0x22, 0x22, 0xF3, 0x04, 0x00, 0xCA, 0x00, 0x00,
0x00, 0x30, 0x3F, 0x00, 0xA0, 0x0C, 0x00, 0x00, 0x00, 0xF6, 0x00, 0x00, 0xCA, 0x00, 0x00, 0x00,
0xB0, 0x0B, 0x00, 0xA0, 0x0C, 0x00, 0x00, 0x20, 0x5F, 0x00, 0x00, 0xCA, 0x00, 0x00, 0x00, 0xDA,
0x00, 0x00, 0xA0, 0x0C, 0x00, 0x00, 0xF4, 0x04, 0x00, 0x00, 0xCA, 0x00, 0x00, 0xE2, 0x09, 0x00,
0x00, 0xA0, 0x0C, 0x00, 0xE2, 0x0B, 0x00, 0x00, 0x00, 0xCA, 0x00, 0xF5, 0x0B, 0x00, 0x00, 0x00,
0xA0, 0x1C, 0xFA, 0x08, 0x00, 0x00, 0x00, 0x00, 0xCA, 0xD5, 0x03, 0x00, 0x00, 0x00, 0x00, 0xA0,
0x0C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xCA, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xA0, 0x0C,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xCA, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x0A,
// Unicode: [0xB4C0, ]
0x00, 0x20, 0x33, 0x33, 0x33, 0x33, 0x33, 0x03, 0x00, 0x00, 0xF2, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0x3F, 0x00, 0x00, 0xF4, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xF5, 0x01, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0xF5, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xF5, 0x01,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xF4, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0xF3, 0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0x5E, 0x00, 0x00, 0x30, 0x54, 0x55, 0x55, 0x55, 0x55, 0x15,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x21, 0x22, 0x22, 0x22, 0x22, 0x22,
0x22, 0x22, 0x12, 0xFC, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xCF, 0x00, 0x00, 0xE0, 0x08,
0x00, 0xB0, 0x0B, 0x00, 0x00, 0x00, 0x00, 0xE0, 0x08, 0x00, 0xB0, 0x0B, 0x00, 0x00, 0x00, 0x00,
0xE0, 0x08, 0x00, 0xB0, 0x0B, 0x00, 0x00, 0x00, 0x00, 0xE0, 0x08, 0x00, 0xB0, 0x0B, 0x00, 0x00,
0x00, 0x00, 0xE0, 0x08, 0x00, 0xB0, 0x0B, 0x00, 0x00, 0x00, 0x00, 0xE0, 0x08, 0x00, 0xB0, 0x0B,
0x00, 0x00, 0x00, 0x00, 0x90, 0x05, 0x00, 0x70, 0x07, 0x00, 0x00,
// Unicode: [0xBD07, ]
0x00, 0x60, 0x03, 0x00, 0x00, 0x00, 0x30, 0x06, 0x00, 0x00, 0xE0, 0x07, 0x00, 0x00, 0x00, 0x70,
0x0E, 0x00, 0x00, 0xE0, 0x07, 0x00, 0x00, 0x00, 0x70, 0x0E, 0x00, 0x00, 0xE0, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0x0E, 0x00, 0x00, 0xE0, 0x28, 0x22, 0x22, 0x22, 0x82, 0x0E, 0x00, 0x00, 0xE0, 0x07,
0x00, 0x00, 0x00, 0x70, 0x0E, 0x00, 0x00, 0xE0, 0x5A, 0x55, 0x55, 0x55, 0xA5, 0x0E, 0x00, 0x00,
0x80, 0xDD, 0xDD, 0xFF, 0xDD, 0xDD, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0xAB, 0x00, 0x00, 0x00,
0x00, 0x21, 0x22, 0x22, 0x22, 0xBC, 0x22, 0x22, 0x22, 0x12, 0xFC, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xFF, 0xFF, 0xCF, 0x10, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x01, 0x00, 0x00, 0x00, 0x00,
0x89, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xDF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x90, 0xEE, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xF9, 0x43, 0x8F, 0x00, 0x00, 0x00,
0x00, 0x00, 0xC3, 0x3E, 0x00, 0xE3, 0x5D, 0x00, 0x00, 0x00, 0xC7, 0xAF, 0x01, 0x00, 0x10, 0xF8,
0x9E, 0x01, 0x00, 0x8C, 0x02, 0x00, 0x00, 0x00, 0x10, 0xA6, 0x00,
// Unicode: [0xC2DC, ]
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x65, 0x00, 0x00, 0x20, 0x09, 0x00, 0x00, 0x00, 0xCA,
0x00, 0x00, 0x40, 0x1F, 0x00, 0x00, 0x00, 0xCA, 0x00, 0x00, 0x50, 0x0F, 0x00, 0x00, 0x00, 0xCA,
0x00, 0x00, 0x70, 0x0F, 0x00, 0x00, 0x00, 0xCA, 0x00, 0x00, 0x80, 0x0D, 0x00, 0x00, 0x00, 0xCA,
0x00, 0x00, 0xC0, 0x0B, 0x00, 0x00, 0x00, 0xCA, 0x00, 0x00, 0xF0, 0x0F, 0x00, 0x00, 0x00, 0xCA,
0x00, 0x00, 0xF5, 0x6F, 0x00, 0x00, 0x00, 0xCA, 0x00, 0x00, 0xBB, 0xE7, 0x00, 0x00, 0x00, 0xCA,
0x00, 0x40, 0x4F, 0xD0, 0x09, 0x00, 0x00, 0xCA, 0x00, 0xD0, 0x0A, 0x30, 0x5F, 0x00, 0x00, 0xCA,
0x00, 0xEA, 0x01, 0x00, 0xF7, 0x05, 0x00, 0xCA, 0x90, 0x4F, 0x00, 0x00, 0x80, 0x1F, 0x00, 0xCA,
0xB0, 0x04, 0x00, 0x00, 0x00, 0x03, 0x00, 0xCA, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xCA,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xCA, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xCA,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xA8,
// Unicode: [0xC5D0, ]
0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x02, 0x80, 0x03, 0x00, 0x41, 0x04, 0x00, 0x00, 0xC7, 0x00,
0x5F, 0x00, 0xF5, 0xFE, 0x5F, 0x00, 0x80, 0x0D, 0xF0, 0x05, 0xF3, 0x07, 0x90, 0x3F, 0x00, 0xD8,
0x00, 0x5F, 0xB0, 0x0C, 0x00, 0xD0, 0x0B, 0x80, 0x0D, 0xF0, 0x05, 0x7F, 0x00, 0x00, 0xF7, 0x00,
0xD8, 0x00, 0x5F, 0xF2, 0x04, 0x00, 0x40, 0x2F, 0x80, 0x0D, 0xF0, 0x35, 0x2F, 0x00, 0x00, 0xF2,
0xBC, 0xDD, 0x00, 0x5F, 0xF3, 0x03, 0x00, 0x30, 0x9F, 0xC8, 0x0D, 0xF0, 0x15, 0x5F, 0x00, 0x00,
0xF5, 0x01, 0xD8, 0x00, 0x5F, 0xE0, 0x09, 0x00, 0xA0, 0x0E, 0x80, 0x0D, 0xF0, 0x05, 0xE8, 0x01,
0x10, 0x8F, 0x00, 0xD8, 0x00, 0x5F, 0x10, 0xCE, 0x54, 0xDD, 0x00, 0x80, 0x0D, 0xF0, 0x05, 0x20,
0xFA, 0xAF, 0x01, 0x00, 0xD8, 0x00, 0x5F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x0D, 0xF0, 0x05,
0x00, 0x00, 0x00, 0x00, 0x00, 0xD8, 0x00, 0x5F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x0D, 0xF0,
0x05, 0x00, 0x00, 0x00, 0x00, 0x00, 0xC7, 0x00, 0x5F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
0xC0, 0x04,
// Unicode: [0xC791, ]
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x30, 0x09, 0x00, 0x00, 0xA1, 0xAA, 0xAA, 0xAA, 0x06, 0x00,
0xF6, 0x00, 0x00, 0x10, 0x88, 0x88, 0x88, 0xED, 0x00, 0x60, 0x0F, 0x00, 0x00, 0x00, 0x00, 0x00,
0xE1, 0x08, 0x00, 0xF6, 0x00, 0x00, 0x00, 0x00, 0x00, 0x90, 0x1F, 0x00, 0x60, 0x0F, 0x00, 0x00,
0x00, 0x00, 0x60, 0x5F, 0x00, 0x00, 0xF6, 0x99, 0x09, 0x00, 0x00, 0x50, 0xFF, 0x06, 0x00, 0x60,
0x9F, 0x99, 0x00, 0x00, 0x80, 0x7F, 0xF6, 0x1A, 0x00, 0xF6, 0x00, 0x00, 0x00, 0xC3, 0x4E, 0x00,
0xD3, 0x8F, 0x60, 0x0F, 0x00, 0x00, 0xFA, 0x1B, 0x00, 0x00, 0x70, 0x08, 0xF6, 0x00, 0x00, 0x90,
0x04, 0x00, 0x00, 0x00, 0x00, 0x50, 0x0F, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x80, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0x09, 0x00, 0x00, 0x00, 0x53, 0x55, 0x55,
0x55, 0x55, 0xFA, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x60, 0x0F, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0xF6, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x60, 0x0F,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xF6, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x40, 0x0C, 0x00, 0x00,
// Unicode: [0xD558, ]
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x30, 0x08, 0x00, 0x00, 0x00, 0xA8, 0xAA, 0xAA, 0x02, 0x00,
0xF6, 0x00, 0x00, 0x00, 0x60, 0x88, 0x88, 0x18, 0x00, 0x60, 0x0F, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0xF6, 0x00, 0x00, 0x41, 0x44, 0x44, 0x44, 0x44, 0x44, 0x60, 0x0F, 0x00, 0x60,
0xEE, 0xEE, 0xEE, 0xEE, 0xEE, 0x2E, 0xF6, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x60,
0x0F, 0x00, 0x00, 0x00, 0x40, 0xA9, 0x17, 0x00, 0x00, 0xF6, 0x00, 0x00, 0x00, 0xA0, 0xAF, 0xC8,
0x6F, 0x00, 0x60, 0xEF, 0xEE, 0x00, 0x80, 0x2E, 0x00, 0x60, 0x3F, 0x00, 0xF6, 0x44, 0x04, 0x00,
0x7E, 0x00, 0x00, 0xD0, 0x09, 0x60, 0x0F, 0x00, 0x00, 0xF0, 0x06, 0x00, 0x00, 0xAB, 0x00, 0xF6,
0x00, 0x00, 0x00, 0xBC, 0x00, 0x00, 0xF1, 0x06, 0x60, 0x0F, 0x00, 0x00, 0x30, 0xAF, 0x13, 0xD4,
0x0D, 0x00, 0xF6, 0x00, 0x00, 0x00, 0x20, 0xFB, 0xFF, 0x09, 0x00, 0x60, 0x0F, 0x00, 0x00, 0x00,
0x00, 0x10, 0x00, 0x00, 0x00, 0xF6, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x60, 0x0F,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xF6, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x40, 0x0D, 0x00, 0x00,
// Unicode: [0xD638, ]
0x00, 0x00, 0x00, 0x33, 0x33, 0x23, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xBF, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x20, 0xDD, 0xDD, 0xDD, 0xDD,
0xDD, 0xDD, 0xDD, 0x02, 0x00, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x00, 0x00, 0x00, 0x20,
0xB8, 0xDD, 0x7B, 0x01, 0x00, 0x00, 0x00, 0x00, 0xF6, 0x8D, 0x66, 0xE8, 0x4F, 0x00, 0x00, 0x00,
0x20, 0xAF, 0x00, 0x00, 0x00, 0xEC, 0x00, 0x00, 0x00, 0x50, 0x4F, 0x00, 0x00, 0x00, 0xF6, 0x03,
0x00, 0x00, 0x30, 0x8F, 0x00, 0x00, 0x00, 0xFA, 0x01, 0x00, 0x00, 0x00, 0xF9, 0x49, 0x22, 0xA4,
0x7F, 0x00, 0x00, 0x00, 0x00, 0x50, 0xFB, 0xFF, 0xBE, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0xAC, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xAC, 0x00, 0x00, 0x00, 0x00, 0x54, 0x55,
0x55, 0x55, 0xCD, 0x55, 0x55, 0x55, 0x35, 0xDA, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xDD, 0xAD
};
|
Dynamical transitions in a modulated Landau-Zener model with finite driving fields
We investigate a special time-dependent quantum model which assumes the Landau-Zener driving form but with an overall modulation of the intensity of the pulsing field. We demonstrate that the dynamics of the system, including the two-level case as well as its multi-level extension, is exactly solvable analytically. Differing from the original Landau-Zener model, the nonadiabatic effect of the evolution in the present driving process does not destroy the desired population transfer. As the sweep protocol employs only the finite driving fields which tend to zero asymptotically, the cutoff error due to the truncation of the driving pulse to the finite time interval turns out to be negligibly small. Furthermore, we investigate the noise effect on the driving protocol due to the dissipation of the surrounding environment. The losses of the fidelity in the protocol caused by both the phase damping process and the random spin flip noise are estimated by solving numerically the corresponding master equations within the Markovian regime.
I. INTRODUCTION
Exactly solvable time-dependent quantum system attracts increasing interest owing to its role in the design for quantum control. In particular, to model dynamical processes or target quantum states for atomic and molecular systems , nonadiabatic transitions induced by time-varying external fields are often involved and the theoretical proposal of the driving protocol with desired dynamics is generally a prerequisite to accomplish the corresponding quantum tasks .
Landau-Zener (LZ) model and its analogs, represented by the Hamiltonian below, are the most frequently exploited proposals in the driving protocol Here J x,z denote the angular-momentum operators and Ω x,z (t) account for two components of the driving field along the x and z axes, respectively. Owing to the explicit time dependency of H(t), the general solution to this kind of systems is highly nontrivial even for the simplest two-level case, i.e., with the azimuthal quantum number j = 1 2 . For the standard LZ sweep with Ω x being constant and Ω z (t) varying linearly with time, the very two-level model is exactly solvable and the transition probability induced by the evolution over t ∈ (−∞, ∞) is known well as the LZ formula . Notably, the LZ model has a wide range of applications in physics as well as in chemistry, including the LZ interferometry , the transfer of charge , chemical reactions , controllable manipulation of qubit and qutrit systems , and so on.
The so-called counter-diabatic protocol (also named as the transitionless protocol or shortcuts to adiabaticity ) has been proposed to generate exact dynamical evolution which aims at the adiabatic eigenstates, e.g., of a given Hamiltonian of form (1). Typically, * Electronic address: [email protected] this kind of protocols exploit a reverse-engineering strategy through introducing an auxiliary counter-diabatic driving term to ensure the desired evolution. We would also like to mention another reverse-engineering algorithm proposed in Ref. , where a parametric connection is established between the the evolution operator and the control field of the Hamiltonian. In comparison, while the latter method is able to generate the LZ-type protocol with two driving components formed of Eq. (1), its applications were restricted to the two-level systems .
Except for the models constructed through the mentioned reverse-engineering methods, analytically exactly solvable time-dependent quantum systems are relatively rare and known examples are mostly concentrated on the two-level system, for example, the Rosen-Zener , Allen-Eberly , Demkov-Kunike , and Bambini-Berman models. In a recent work, a tangent-pulse driven model has been proposed which is shown to be analytically solvable not only for the two-level case but also for the multi-level extension. The nonadiabatic dynamics generated by the model itself can serve as a desirable protocol for the population transfer without the need of any auxiliary fields. While the ideal design assumes an infinite chirping field, it is demonstrated that for an imperfect scanning process with truncation, the cutoff error caused to the population transfer could be suppressed to the infinity through enhancing the scanning rate of the protocol.
In this paper we propose a modulated LZ model and explore the generated dynamics for quantum control. In particular, we demonstrate that the model offers an alternative protocol for the nonadiabatic population transfer which retains the advantages previously displayed in the tangent-pulse driven model: the nonadiabatic evolution can realize complete population transfer and no auxiliary field is required; the model is genuinely solvable which can be extended to the multi-level system. Furthermore, since the present protocol employs only the fields of finite intensity, it avoids the nonrealistic design of infinite driving assumed in the original LZ model and other analogous schemes. Meanwhile, the cutoff error in the protocol due to the truncation of the scanning pulse to the finite time interval is shown to be negligibly small. To evaluate further the feasibility of the scheme in the real systems, we investigate the noise effect of the protocol under dissipation. We solve numerically the master equations associated with the dephasing process and the random spin flip process within the Markovian regime. The loss of the fidelity caused by the detrimental influence of the noise is estimated.
The rest of the paper is organized as follows. In Sec. II we will introduce the modulated LZ model and demonstrate that the dynamics of model governed by the timedependent Schrödinger equation is exactly solvable. We will employ the method proposed by Lewis and Riesenfeld (LR) and manifest explicitly the dynamical invariant of the model. In Sec. III we shall focus on the dynamical transition in the model and describe the corresponding process of nonadiabatic population transfer for the two-level case as well as for its multi-level extension. Especially, we show that the intermediate transitions induced by the nonadiabatic effect will not destroy the desired state transfer. The noise effect on the fidelity of the protocol due to the dissipation of the environment is investigated in Sec. IV. Finally, a summary of the paper is presented in Sec. V.
II. DESCRIPTION OF THE MODEL AND ITS EXACT SOLUTION
The driven model considered here is described explicitly by the Hamiltonian where the amplitude η and the sweep frequency ν are fixed constants and the coefficient κ relates to them via Here we have set = 1 such that η ≡ η/ possesses the same dimension with ν. As the model keeps the property of the original LZ model that the ratio between the field components along the z and x axes Ω z (t)/Ω x (t) increases linearly with time, an overall modulation on the field amplitude is exploited in the present sweep process. The schematic of the scanning pulses Ω x,z (t) over t ∈ (−∞, ∞) is depicted in Fig. 1. Note that the modulation here enables the model to avoid the nonrealistic ingredient of assuming an infinite driving field in the original model.
We now show that the dynamics of the system governed by the Schrödinger equation i∂ t |ψ(t) = H(t)|ψ(t) is exactly solvable. To this goal, we recall the dynamical invariant introduced by the LR method . That is, a time-dependent quantum system could be solved exactly if the system possesses a dynamical invariant, i.e., an observable I(t) that satisfies The peculiar property of such an invariant is that its instantaneous eigenvector, denoted by |φ m (t) , differs from the basic solution to the Schrödinger equation only by a phase factor: |ψ Exact analytical expression of the LR invariant has ever been found for the time-dependent quantum system of particular classes . Intriguingly, the above system is shown to possess the following invariant It is direct to verify that the specified α i (t) satisfẏ thus the relation of Eq. (4) is fulfilled.
To calculate the LR phase presented in Eq. (5), one is led to notice that | α(t)| = 1 and I(t) of Eq. (6) can be written as in which |m (m = −j, −j + 1, · · · j) represents the eigenstate of J z . With these notations, the two terms contained in the kernel of the integral of Eq. (5) can be worked out straightforwardly. It happens that the first term φ m (t)|i∂ t |φ m (t) , which denotes a nonadiabatic counterpart of the geometric connection of the adiabatic evolution, always vanishes in the present system. The second term φ m (t)|H(t)|φ m (t) identifies the diabatic energy levels of the system and is shown to be As κ → 1, they recover the adiabatic levels E ad We illustrate both E ad m (t) and E m (t) for the j = 1 2 case in Fig. 1(b). The rigorous dynamical solution achieved above is applicable to the general angular-momentum system with an arbitrary azimuthal quantum number j. It indicates a significant difference from that of the original LZ model since the exact LZ formula of the latter model, which has been achieved as an asymptotical result of the Weber's parabolic cylinder functions , applies only to the twolevel system. Moreover, it is worthy to stress that the demonstration of the overall dynamical invariant for the above model is highly nontrivial as the original model does not possess such an invariant . As will be shown in the below, it implies that the survival probability of the adiabatic state in this model, albeit the existence of intermediate transitions associated with nonadiabatic effects, tends asymptotically to the unit for the overall evolution. It suggests that the nonadiabatic evolution of the model can serve as a protocol for complete population transfer.
A. Protocols for nonadiabatic population transfer
Following the expression of Eq. (6), I(t) will evolve from −J z to J z along a geodesic curve in the Bloch space during the overall evolution t ∈ (−∞, ∞). Since the eigenstates of I(t) are transported parallel without transitions, an initial eigenstate |m then will evolve to the ending state | − m at t → ∞. Therefore, up to a phase term, the generated dynamics yields complete population transfer |m ↔ | − m for the system whatever the sweep process is adiabatic or nonadiabatic.
For an irreducible space spanned by the angular momentum operator with a specific quantum number j, an explicit expression of the basis state |φ m (t) could be obtained via in which D j m ′ m (θ) ≡ m ′ |e iθJy |m has explicit expression for the specified j and the index m ′ of the summation is taken over −j, −j + 1, · · · , j. Specifically, for the twolevel system with j = 1 2 , one has |φ ± (t) = e ±i ϕ 2 cos θ(t) 2 |± ± e ∓i ϕ 2 sin θ(t) 2 |∓ , (10) where we have used the notation "|± " for | ± 1 2 . Accordingly, the phase-equipped dynamical basis |ψ ± (t) is obtained straightforwardly and the process of the population transfer is then characterized as The driving field in the present scheme has finite intensity and tends to zero asymptotically as t → ±∞. In the practical scanning process the driving field should be pulsed in a finite time duration with truncation, that is, t ∈ . It turns out that the cutoff of the pulse results in very limited influence on the transition probability. For simplicity, let us take the above two-level case as an example. The transition probability induced by the sweep over the period t ∈ is defined by P ≡ | −|U (τ c , −τ c )|+ | 2 (or equally by | +|U (τ c , −τ c )|− | 2 ) in which U (τ c , −τ c ) accounts for the generated evolution operator U (τ c , −τ c ) = ± e iΦ±(τc,−τc) |φ ± (τ c ) φ ± (−τ c )|. (13) A straightforward calculation yields that Loss of the fidelity of the population transfer, defined by P δ ≡ 1 − P here, is limited by the factor (1 + ν 2 τ 2 c ) −1 . It is seen that as ντ c ≥ 10π, the population transfer is realized with a high fidelity: P δ 10 −3 .
B. Nonadiabaticity-induced transitions and survival probabilities of the adiabatic states in the multi-level systems
Dynamic control of the multi-channel nonadiabatic process is usually a more challenging task and there have been extensive studies on that of the multi-state version of the Landau-Zener model. Intriguingly, the model we proposed above applies directly to the multilevel system and the multi-channel dynamical transitions can be manifested by exploring the model with high quantum number j. For the cases of j = 1 and j = 3 2 , the corresponding representative matrices D j (θ) are expressed explicitly as and where d 12 = The j = 1 model with an initial state |0 in which the intermediate transitions to |ψ ad 1 (t) and to |ψ ad −1 (t) have an equal probability. (c) The j = 3 2 model in which the initial state is in | 3 2 (n = 3 2 ) and all the elements Tmn (m = ± 3 2 , ± 1 2 ) are characterized. (d) The j = 3 2 model in which the initial is in | 1 2 (n = 1 2 ). depicted in Fig. 2, in which the initial states are taken to be |1 and | 3 2 , respectively.
To characterize further the nonadiabatic effects in the dynamical evolution, we evaluate the matrix of the transition probability: T mn = | ψ ad m (t)|ψ n (t) | 2 , in which |ψ ad m (t) stands for the instantaneous adiabatic eigenvector of the Hamiltonian (2). The diagonal elements of the matrix T represent the survival probabilities of the adiabatic basis states and the off-diagonal ones describe unambiguously the nonadiabaticity-induced transitions between these adiabatic states along the evolution. It is recognized that the basis set |ψ m (t) , that are identical to |ψ ad m (t) at the initial time t → −∞, will exhibit intermediate transitions during the evolution. However, as |ψ m (t) will recover |ψ ad m (t) (up to a phase factor) eventually at t → ∞, the desired population transfer is not destroyed by these nonadiabaticity-induced transitions. In Fig. 3 we illustrate in detail these phenomena for the model with j = 1 and j = 3 2 .
IV. NOISE EFFECTS IN THE PRESENCE OF DISSIPATION
In realistic systems the noise due to the surrounding environment is inevitable. The influence of the systembath coupling to the transition probability for the original LZ model has ever been studied in various background . In the following we shall investigate the noise effect on the dynamics for the present modulated LZ model. Typically, we focus on the two-level system and estimate the population transfer in the presence of the spin flip noise which can arise as the interaction of the spin system with its fermionic reservoir is involved . Within the Markovian regime, the evolution of the system is described by the master equation where γ i (i = x, y, z) accounts for the damping rate of the corresponding spin flip process. Since we only consider the j = 1 2 case, it is convenient to introduce the Bloch vector (ρ x , ρ y , ρ z ) to describe the elements of the density operator, that is, ρ x = ρ +− + ρ −+ , ρ y = −i(ρ +− − ρ −+ ) and ρ z = ρ ++ − ρ −− . According to Eq. (17), one obtains that these components satisfy For the situation γ x = γ y = 0, the above loss mechanism accounts for the pure phase damping process which does not lead to direct transitions between the two levels |± . Nevertheless, as the dephasing process will alter the trajectory of the dynamical evolution generated by H(t), it will result in imperfect effect on the desired population transfer. To characterize the influence of the noise on the dynamical evolution, the central task is to compute the fidelity F (t) = | φ + (t)|ρ(t)|φ + (t) |, in which |φ + (t) is given explicitly in Eq. (10). It is recognized that F (t) describes the overlap between the actual time-evolving state and the target dynamical basis |φ + (t) . Starting from an initial state |+ , we solve numerically the set of equations (18) for both the dephasing process and the random spin flip process with γ x,y,z = γ. For the dephasing process, it happens that reduction of the fidelity mainly occurs in the vicinity of the point t = 0. We have chosen the time duration ντ c = 10π and the influence of the cutoff error is negligible. The result shows that the driving protocol is insensitive to the dephasing and a fidelity higher than 0.99 is obtained even the ratio γ/ν 10 −2 . On the other hand, the spin flip noise with the homogeneous damping rate will exert detrimental effects on the desired state transfer continuously over the whole time evolution. In our calculation we choose ντ c = 8. Besides the noise effect, the dramatic truncation of the scanning process here has slight influence on the population transfer. The yielded results about the time evolving of the fidelity F (t) are illustrated in Fig. 4, in which different values of the ratio γ/ν are assumed.
V. CONCLUSION
We have investigated the exact dynamics of a modulated LZ model and exploited it as a design for nonadiabatic quantum control. Differing from the original LZ model, we have shown that this modulated model possesses an analytical dynamical invariant over the whole time domain and the generated dynamics is fully solvable analytically. While serving as a protocol for population transfer, the model is shown to possess the following distinct advantages: 1) nonadiabatic dynamics generated by the model itself can realize complete population transfer; 2) the protocol uses only finite driving fields which avoids the nonrealistic ingredient assuming infinite driving in the original LZ and also other analogous protocols. Furthermore, the scheme is applicable to the multi-level systems which offers an unambiguous scenario to manifest the multi-channel transitions induced by the nonadiabatic effects in the state transfer process.
As the noise due to the dissipative environment will lead to detrimental effects on the desired control process, we have also investigated the loss of the fidelity for the protocol when the system is subjected to the dissipation. The numerical calculations reveal that the protocol is not sensitive to the pure phase damping noise. On the other hand, to obtain high-fidelity population transfer in the presence of the spin flip noise with a homogeneous damping rate γ, our calculation shows that a requirement of the scanning rate of the protocol, ν/γ 10 3 , should be satisfied in general. Suppose that the coherence time γ −1 is of an order ∼ 10 2 µs (which is achievable for the electron spin of the nitrogen-vacancy center in diamond ), then approximative evaluation yields that the sweep frequency should be ν 10 MHz and the time duration of the pulse τ c ∼ π µs. By taking κ = 0.6 (cf. Fig. 1) one gets η ∼ 12.5 MHz. Potential experimental implementation of the protocol in physical systems is highly expected. |
/**
* Mock implementation of SoapServiceDescriptor that also implements AxisCompatible.
*/
public class MockAxisCompatibleServiceDescriptor implements AdsServiceDescriptor,
AxisCompatible {
public static final String VERSION = "v201511";
@Override
public Class<?> getLocatorClass() throws ClassNotFoundException {
return MockAxisServiceLocator.class;
}
@Override
public Class<?> getInterfaceClass() throws ServiceException {
return MockAxisServiceInterface.class;
}
/**
* Returns {@code /mocks/}{@link #VERSION}{@code /MockAxisService} appended to endpointServer
* after removing any trailing forward slash from endpointServer.
*
* <p>For example, given endpointServer of {@code https://ads.google.com/} or
* {@code https://ads.google.com}, this method returns
* {@code https://ads.google.com/mocks/v201511/MockAxisService}.
*/
@Override
public String getEndpointAddress(String endpointServer) throws MalformedURLException {
String trimmedEndPointServer = StringUtils.removeEnd(endpointServer, "/");
return String.format("%s/mocks/%s/MockAxisService", trimmedEndPointServer, VERSION);
}
/**
* Returns {@link #VERSION}.
*/
@Override
public String getVersion() {
return VERSION;
}
} |
<gh_stars>1-10
import * as React from 'react';
// tslint:disable-next-line:no-submodule-imports
import { mockComponent } from 'react-dom/test-utils';
import layerMouseTouchEvents from '../layer-events-hoc';
import { mount } from 'enzyme';
import { withContext } from 'recompose';
const PropTypes = require('prop-types'); // tslint:disable-line
class MockComponent extends React.Component<any> {
public render() {
return <h1>{this.props.id}</h1>;
}
}
const LayerHOC = layerMouseTouchEvents(MockComponent);
describe('layer-events-hoc', () => {
let onMock;
let LayerHOCWithContext;
beforeEach(() => {
onMock = jest.fn();
LayerHOCWithContext = withContext(
{
map: PropTypes.object
},
() => ({
map: {
on: onMock
}
})
)(LayerHOC);
});
it('Should default the id if none is passed', () => {
const res = mount(<LayerHOCWithContext />);
expect(res.find('h1').text()).toBe('layer-1');
});
it('should listen all mouse and touch events', () => {
const res = mount(<LayerHOCWithContext />);
const events = [
'click',
'mouseenter',
'mouseleave',
'mousedown',
'touchstart'
];
expect(onMock.mock.calls.map(call => call[0])).toEqual(events);
});
});
|
/**
* This panel allows the user to select and configure a classifier, set the
* attribute of the current dataset to be used as the class, and perform an
* Experiment (like in the Experimenter) with this Classifier/Dataset
* combination. The results of the experiment runs are stored in a result
* history so that previous results are accessible. <p/>
*
* Based on the ClassifierPanel code (by Len Trigg, Mark Hall and
* Richard Kirkby).
*
* @author FracPete (fracpete at waikato dot ac dot nz)
* @version $Revision: 2029 $
*/
public class CollectiveClassifierPanel
extends JPanel
implements CapabilitiesFilterChangeListener, ExplorerPanel, LogHandler {
/** for serialization. */
private static final long serialVersionUID = 2078066653508312179L;
/** the key for the model. */
public final static String KEY_MODEL = "model";
/** the key for the predictions. */
public final static String KEY_PREDICTIONS = "predictions";
/** the key for the errors. */
public final static String KEY_ERRORS = "errors";
/** the maximum length for relation names. */
public final static int MAX_RELATIONNAME_LENGTH = 15;
/** the parent frame. */
protected Explorer m_Explorer = null;
/** Lets the user configure the classifier. */
protected GenericObjectEditor m_ClassifierEditor = new GenericObjectEditor();
/** The panel showing the current classifier selection. */
protected PropertyPanel m_CEPanel = new PropertyPanel(m_ClassifierEditor);
/** The output area for classification results. */
protected JTextArea m_OutText = new JTextArea(20, 40);
/** The destination for log/status messages. */
protected Logger m_Log = new SysErrLog();
/** The buffer saving object for saving output. */
protected SaveBuffer m_SaveOut = new SaveBuffer(m_Log, this);
/** A panel controlling results viewing. */
protected ResultHistoryPanel m_History = new ResultHistoryPanel(m_OutText);
/** the panel for the options (evaluation, parameters, class). */
protected JPanel m_PanelOptions = new JPanel(new BorderLayout());
/** The type of evaluation: cross-validation/random split/unlabeled+test set. */
protected JComboBox m_EvalCombo = new JComboBox(new String[]{"Cross-validation", "Random split", "Unlabeled/Test set"});
/** the label for the CV parameters. */
protected JPanel m_CVPanel = new JPanel();
/** The label for the number of folds. */
protected JLabel m_CVFoldsLabel = new JLabel("Folds");
/** the number of folds. */
protected JTextField m_CVFoldsText = new JTextField("10", 10);
/** The label for the CV seed value. */
protected JLabel m_CVSeedLabel = new JLabel("Seed");
/** the CV seed value. */
protected JTextField m_CVSeedText = new JTextField("1", 10);
/** The label for the CV swap folds. */
protected JLabel m_CVSwapFoldsLabel = new JLabel("Swap folds");
/** the CV swap folds checkbox. */
protected JCheckBox m_CVSwapFoldsCheckBox = new JCheckBox();
/** the label for the random split parameters. */
protected JPanel m_SplitPanel = new JPanel();
/** The label for the percentage for the random split. */
protected JLabel m_SplitPercLabel = new JLabel("Percent");
/** the percentage for the random split. */
protected JTextField m_SplitPercText = new JTextField("10", 10);
/** The label for the random split seed value. */
protected JLabel m_SplitSeedLabel = new JLabel("Seed");
/** the random split seed value. */
protected JTextField m_SplitSeedText = new JTextField("1", 10);
/** The label for the random split preserve order. */
protected JLabel m_SplitPreserveOrderLabel = new JLabel("Preserve order");
/** the random split preserve order checkbox. */
protected JCheckBox m_SplitPreserveOrderCheckBox = new JCheckBox();
/** the label for the test set parameters. */
protected JPanel m_TestPanel = new JPanel();
/** The label for the unlabeled set file. */
protected JLabel m_UnlabeledFileLabel = new JLabel("Unlabeled set");
/** the load unlabeled set file button. */
protected JButton m_UnlabeledFileButton = new JButton(ComponentHelper.getImageIcon("open.gif"));
/** the remove unlabeled set file button. */
protected JButton m_UnlabeledFileButtonRemove = new JButton(ComponentHelper.getImageIcon("delete.gif"));
/** The label depicting the unlabeled set file state. */
protected JLabel m_UnlabeledFileLabelState = new JLabel("");
/** The label for the test set file. */
protected JLabel m_TestFileLabel = new JLabel("Test set");
/** the load test set file button. */
protected JButton m_TestFileButton = new JButton(ComponentHelper.getImageIcon("open.gif"));
/** the remove unlabeled set file button. */
protected JButton m_TestFileButtonRemove = new JButton(ComponentHelper.getImageIcon("delete.gif"));
/** The label depicting the test set file state. */
protected JLabel m_TestFileLabelState = new JLabel("");
/** Lets the user select the class column. */
protected JComboBox m_ClassCombo = new JComboBox();
/** Click to start running the experiment. */
protected JButton m_StartBut = new JButton("Start");
/** Click to stop a running experiment. */
protected JButton m_StopBut = new JButton("Stop");
/** Stop the class combo from taking up to much space. */
private Dimension COMBO_SIZE = new Dimension(200, m_StartBut.getPreferredSize().height);
/** The main set of instances we're playing with. */
protected Instances m_Instances;
/** A thread that classification runs in. */
protected Thread m_RunThread;
/** the file chooser for loading the additional datasets. */
protected ConverterFileChooser m_DatasetFileChooser;
/** the current unlabeled set. */
protected Instances m_UnlabeledSet;
/** the current test set. */
protected Instances m_TestSet;
/** for saving models. */
protected JFileChooser m_ModelFileChooser;
/** Lets the user configure the ClassificationOutput. */
protected GenericObjectEditor m_ClassificationOutputEditor = new GenericObjectEditor(true);
/** ClassificationOutput configuration. */
protected PropertyPanel m_ClassificationOutputPanel = new PropertyPanel(m_ClassificationOutputEditor);
/**
* Creates the Experiment panel.
*/
public CollectiveClassifierPanel() {
m_DatasetFileChooser = new ConverterFileChooser(new File(ExplorerDefaults.getInitialDirectory()));
m_ModelFileChooser = new JFileChooser();
ExtensionFileFilter filter = new ExtensionFileFilter("model", "Model files");
m_ModelFileChooser.addChoosableFileFilter(filter);
m_ModelFileChooser.setFileFilter(filter);
m_OutText.setEditable(false);
m_OutText.setFont(new Font("Monospaced", Font.PLAIN, 12));
m_OutText.setBorder(BorderFactory.createEmptyBorder(5, 5, 5, 5));
m_OutText.addMouseListener(new MouseAdapter() {
@Override
public void mouseClicked(MouseEvent e) {
if ((e.getModifiers() & InputEvent.BUTTON1_MASK)
!= InputEvent.BUTTON1_MASK) {
m_OutText.selectAll();
}
}
});
m_History.setBorder(BorderFactory.createTitledBorder("Result list (right-click for options)"));
m_ClassifierEditor.setClassType(CollectiveClassifier.class);
m_ClassifierEditor.setValue(new YATSI());
m_ClassifierEditor.addPropertyChangeListener(new PropertyChangeListener() {
public void propertyChange(PropertyChangeEvent e) {
repaint();
}
});
m_EvalCombo.setToolTipText("The type of evaluation to be performed");
m_EvalCombo.setEnabled(false);
m_EvalCombo.setPreferredSize(COMBO_SIZE);
m_EvalCombo.setMaximumSize(COMBO_SIZE);
m_EvalCombo.setMinimumSize(COMBO_SIZE);
m_EvalCombo.setSelectedIndex(0);
m_EvalCombo.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
int selected = m_EvalCombo.getSelectedIndex();
if (selected == 0) {
m_PanelOptions.remove(m_SplitPanel);
m_PanelOptions.remove(m_TestPanel);
m_PanelOptions.add(m_CVPanel, BorderLayout.CENTER);
}
else if (selected == 1) {
m_PanelOptions.remove(m_CVPanel);
m_PanelOptions.remove(m_TestPanel);
m_PanelOptions.add(m_SplitPanel, BorderLayout.CENTER);
}
else if (selected == 2) {
m_PanelOptions.remove(m_CVPanel);
m_PanelOptions.remove(m_SplitPanel);
m_PanelOptions.add(m_TestPanel, BorderLayout.CENTER);
}
invalidate();
validate();
doLayout();
repaint();
}
});
m_ClassificationOutputEditor.setClassType(AbstractOutput.class);
m_ClassificationOutputEditor.setValue(new Null());
m_CVFoldsText.setToolTipText("Number of folds for cross-validation");
m_CVSeedText.setToolTipText("Seed value for randomizing data");
m_CVSwapFoldsCheckBox.setToolTipText("Swaps train/test set");
m_SplitPercText.setToolTipText("Percentage to use for training data");
m_SplitSeedText.setToolTipText("Seed value for randomizing data");
m_SplitPreserveOrderCheckBox.setToolTipText("Preserves the order in the data, suppresses randomization");
m_UnlabeledFileButton.setToolTipText("Click to select unlabeled set");
m_UnlabeledFileButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
int retVal = m_DatasetFileChooser.showOpenDialog(CollectiveClassifierPanel.this);
if (retVal != ConverterFileChooser.APPROVE_OPTION)
return;
AbstractFileLoader loader = m_DatasetFileChooser.getLoader();
try {
m_UnlabeledSet = loader.getDataSet();
m_UnlabeledSet = CollectiveHelper.removeLabels(m_UnlabeledSet, false);
updateFileLabel(m_UnlabeledFileLabelState, m_UnlabeledSet);
}
catch (Exception ex) {
ex.printStackTrace();
JOptionPane.showMessageDialog(
CollectiveClassifierPanel.this,
"Failed to load data from '" + m_DatasetFileChooser.getSelectedFile() + "':\n" + ex);
m_UnlabeledSet = null;
updateFileLabel(m_UnlabeledFileLabelState, m_UnlabeledSet);
}
}
});
m_UnlabeledFileButtonRemove.setToolTipText("Click to remove unlabeled set");
m_UnlabeledFileButtonRemove.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
m_UnlabeledSet = null;
updateFileLabel(m_UnlabeledFileLabelState, m_UnlabeledSet);
}
});
m_TestFileButton.setToolTipText("Click to select test set");
m_TestFileButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
int retVal = m_DatasetFileChooser.showOpenDialog(CollectiveClassifierPanel.this);
if (retVal != ConverterFileChooser.APPROVE_OPTION)
return;
AbstractFileLoader loader = m_DatasetFileChooser.getLoader();
try {
m_TestSet = loader.getDataSet();
updateFileLabel(m_TestFileLabelState, m_TestSet);
}
catch (Exception ex) {
ex.printStackTrace();
JOptionPane.showMessageDialog(
CollectiveClassifierPanel.this,
"Failed to load data from '" + m_DatasetFileChooser.getSelectedFile() + "':\n" + ex);
m_TestSet = null;
updateFileLabel(m_TestFileLabelState, m_TestSet);
}
}
});
m_TestFileButtonRemove.setToolTipText("Click to remove test set");
m_TestFileButtonRemove.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
m_TestSet = null;
updateFileLabel(m_TestFileLabelState, m_TestSet);
}
});
m_ClassCombo.setToolTipText("Select the attribute to use as the class");
m_ClassCombo.setEnabled(false);
m_ClassCombo.setPreferredSize(COMBO_SIZE);
m_ClassCombo.setMaximumSize(COMBO_SIZE);
m_ClassCombo.setMinimumSize(COMBO_SIZE);
m_ClassCombo.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
updateCapabilitiesFilter(m_ClassifierEditor.getCapabilitiesFilter());
}
});
m_StartBut.setToolTipText("Starts the evaluation");
m_StartBut.setEnabled(false);
m_StartBut.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
startClassifier();
}
});
m_StopBut.setToolTipText("Stops a running evaluation");
m_StopBut.setEnabled(false);
m_StopBut.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
stopClassifier();
}
});
m_History.setHandleRightClicks(false);
// see if we can popup a menu for the selected result
m_History.getList().addMouseListener(new MouseAdapter() {
@Override
public void mouseClicked(MouseEvent e) {
if (((e.getModifiers() & InputEvent.BUTTON1_MASK)
!= InputEvent.BUTTON1_MASK) || e.isAltDown()) {
int index = m_History.getList().locationToIndex(e.getPoint());
if (index != -1) {
String name = m_History.getNameAtIndex(index);
showPopup(name, e.getX(), e.getY());
} else {
showPopup(null, e.getX(), e.getY());
}
}
}
});
// Layout the GUI
JPanel pClassifier = new JPanel();
pClassifier.setBorder(
BorderFactory.createCompoundBorder(
BorderFactory.createTitledBorder("Classifier"),
BorderFactory.createEmptyBorder(0, 5, 5, 5)));
pClassifier.setLayout(new BorderLayout());
pClassifier.add(m_CEPanel, BorderLayout.NORTH);
m_PanelOptions.setBorder(
BorderFactory.createCompoundBorder(
BorderFactory.createTitledBorder("Evaluation options"),
BorderFactory.createEmptyBorder(0, 5, 5, 5)));
GridBagConstraints gbC;
GridBagLayout gbL;
// CV
gbL = new GridBagLayout();
m_CVPanel.setLayout(gbL);
// CV/Folds
gbC = new GridBagConstraints();
gbC.anchor = GridBagConstraints.WEST;
gbC.gridy = 0;
gbC.gridx = 0;
gbC.insets = new Insets(2, 5, 2, 5);
gbL.setConstraints(m_CVFoldsLabel, gbC);
m_CVPanel.add(m_CVFoldsLabel);
gbC = new GridBagConstraints();
gbC.anchor = GridBagConstraints.WEST;
gbC.fill = GridBagConstraints.HORIZONTAL;
gbC.gridy = 0;
gbC.gridx = 1;
gbC.weightx = 100;
gbC.ipadx = 20;
gbC.insets = new Insets(2, 5, 2, 5);
gbL.setConstraints(m_CVFoldsText, gbC);
m_CVPanel.add(m_CVFoldsText);
// CV/Seed
gbC = new GridBagConstraints();
gbC.anchor = GridBagConstraints.WEST;
gbC.gridy = 1;
gbC.gridx = 0;
gbC.insets = new Insets(2, 5, 2, 5);
gbL.setConstraints(m_CVSeedLabel, gbC);
m_CVPanel.add(m_CVSeedLabel);
gbC = new GridBagConstraints();
gbC.anchor = GridBagConstraints.WEST;
gbC.fill = GridBagConstraints.HORIZONTAL;
gbC.gridy = 1;
gbC.gridx = 1;
gbC.weightx = 100;
gbC.ipadx = 20;
gbC.insets = new Insets(2, 5, 2, 5);
gbL.setConstraints(m_CVSeedText, gbC);
m_CVPanel.add(m_CVSeedText);
// CV/swap folds
gbC = new GridBagConstraints();
gbC.anchor = GridBagConstraints.WEST;
gbC.gridy = 2;
gbC.gridx = 0;
gbC.insets = new Insets(2, 5, 2, 5);
gbL.setConstraints(m_CVSwapFoldsLabel, gbC);
m_CVPanel.add(m_CVSwapFoldsLabel);
gbC = new GridBagConstraints();
gbC.anchor = GridBagConstraints.WEST;
gbC.fill = GridBagConstraints.HORIZONTAL;
gbC.gridy = 2;
gbC.gridx = 1;
gbC.weightx = 100;
gbC.ipadx = 20;
gbC.insets = new Insets(2, 5, 2, 5);
gbL.setConstraints(m_CVSwapFoldsCheckBox, gbC);
m_CVPanel.add(m_CVSwapFoldsCheckBox);
m_PanelOptions.add(m_CVPanel, BorderLayout.CENTER);
// random split
gbL = new GridBagLayout();
m_SplitPanel.setLayout(gbL);
// random split/percentage
gbC = new GridBagConstraints();
gbC.anchor = GridBagConstraints.WEST;
gbC.gridy = 0;
gbC.gridx = 0;
gbC.insets = new Insets(2, 5, 2, 5);
gbL.setConstraints(m_SplitPercLabel, gbC);
m_SplitPanel.add(m_SplitPercLabel);
gbC = new GridBagConstraints();
gbC.anchor = GridBagConstraints.WEST;
gbC.fill = GridBagConstraints.HORIZONTAL;
gbC.gridy = 0;
gbC.gridx = 1;
gbC.weightx = 100;
gbC.ipadx = 20;
gbC.insets = new Insets(2, 5, 2, 5);
gbL.setConstraints(m_SplitPercText, gbC);
m_SplitPanel.add(m_SplitPercText);
// random split/Seed
gbC = new GridBagConstraints();
gbC.anchor = GridBagConstraints.WEST;
gbC.gridy = 1;
gbC.gridx = 0;
gbC.insets = new Insets(2, 5, 2, 5);
gbL.setConstraints(m_SplitSeedLabel, gbC);
m_SplitPanel.add(m_SplitSeedLabel);
gbC = new GridBagConstraints();
gbC.anchor = GridBagConstraints.WEST;
gbC.fill = GridBagConstraints.HORIZONTAL;
gbC.gridy = 1;
gbC.gridx = 1;
gbC.weightx = 100;
gbC.ipadx = 20;
gbC.insets = new Insets(2, 5, 2, 5);
gbL.setConstraints(m_SplitSeedText, gbC);
m_SplitPanel.add(m_SplitSeedText);
// random split/preserve order
gbC = new GridBagConstraints();
gbC.anchor = GridBagConstraints.WEST;
gbC.gridy = 2;
gbC.gridx = 0;
gbC.insets = new Insets(2, 5, 2, 5);
gbL.setConstraints(m_SplitPreserveOrderLabel, gbC);
m_SplitPanel.add(m_SplitPreserveOrderLabel);
gbC = new GridBagConstraints();
gbC.anchor = GridBagConstraints.WEST;
gbC.fill = GridBagConstraints.HORIZONTAL;
gbC.gridy = 2;
gbC.gridx = 1;
gbC.weightx = 100;
gbC.ipadx = 20;
gbC.insets = new Insets(2, 5, 2, 5);
gbL.setConstraints(m_SplitPreserveOrderCheckBox, gbC);
m_SplitPanel.add(m_SplitPreserveOrderCheckBox);
// test set
gbL = new GridBagLayout();
m_TestPanel.setLayout(gbL);
// unlabeled set/file
gbC = new GridBagConstraints();
gbC.anchor = GridBagConstraints.WEST;
gbC.gridy = 0;
gbC.gridx = 0;
gbC.insets = new Insets(2, 5, 2, 5);
gbL.setConstraints(m_UnlabeledFileLabel, gbC);
m_TestPanel.add(m_UnlabeledFileLabel);
gbC = new GridBagConstraints();
gbC.anchor = GridBagConstraints.WEST;
gbC.fill = GridBagConstraints.NONE;
gbC.gridy = 0;
gbC.gridx = 1;
gbC.weightx = 0;
gbC.ipadx = 20;
gbC.insets = new Insets(2, 5, 2, 5);
gbL.setConstraints(m_UnlabeledFileButton, gbC);
m_TestPanel.add(m_UnlabeledFileButton);
gbC = new GridBagConstraints();
gbC.anchor = GridBagConstraints.WEST;
gbC.fill = GridBagConstraints.NONE;
gbC.gridy = 0;
gbC.gridx = 2;
gbC.weightx = 0;
gbC.ipadx = 20;
gbC.insets = new Insets(2, 5, 2, 5);
gbL.setConstraints(m_UnlabeledFileButtonRemove, gbC);
m_TestPanel.add(m_UnlabeledFileButtonRemove);
gbC = new GridBagConstraints();
gbC.anchor = GridBagConstraints.WEST;
gbC.fill = GridBagConstraints.HORIZONTAL;
gbC.gridy = 0;
gbC.gridx = 3;
gbC.weightx = 100;
gbC.ipadx = 20;
gbC.insets = new Insets(2, 5, 2, 5);
m_UnlabeledFileLabelState = new JLabel("none");
gbL.setConstraints(m_UnlabeledFileLabelState, gbC);
m_TestPanel.add(m_UnlabeledFileLabelState);
// test set/file
gbC = new GridBagConstraints();
gbC.anchor = GridBagConstraints.WEST;
gbC.gridy = 1;
gbC.gridx = 0;
gbC.insets = new Insets(2, 5, 2, 5);
gbL.setConstraints(m_TestFileLabel, gbC);
m_TestPanel.add(m_TestFileLabel);
gbC = new GridBagConstraints();
gbC.anchor = GridBagConstraints.WEST;
gbC.fill = GridBagConstraints.NONE;
gbC.gridy = 1;
gbC.gridx = 1;
gbC.weightx = 0;
gbC.ipadx = 20;
gbC.insets = new Insets(2, 5, 2, 5);
gbL.setConstraints(m_TestFileButton, gbC);
m_TestPanel.add(m_TestFileButton);
gbC = new GridBagConstraints();
gbC.anchor = GridBagConstraints.WEST;
gbC.fill = GridBagConstraints.NONE;
gbC.gridy = 1;
gbC.gridx = 2;
gbC.weightx = 0;
gbC.ipadx = 20;
gbC.insets = new Insets(2, 5, 2, 5);
gbL.setConstraints(m_TestFileButtonRemove, gbC);
m_TestPanel.add(m_TestFileButtonRemove);
gbC = new GridBagConstraints();
gbC.anchor = GridBagConstraints.WEST;
gbC.fill = GridBagConstraints.HORIZONTAL;
gbC.gridy = 1;
gbC.gridx = 3;
gbC.weightx = 100;
gbC.ipadx = 20;
gbC.insets = new Insets(2, 5, 2, 5);
m_TestFileLabelState = new JLabel("none");
gbL.setConstraints(m_TestFileLabelState, gbC);
m_TestPanel.add(m_TestFileLabelState);
// Evaluation
JPanel pEval = new JPanel(new FlowLayout(FlowLayout.LEFT));
JLabel lEval = new JLabel("Evaluation");
pEval.add(lEval);
pEval.add(m_EvalCombo);
m_PanelOptions.add(pEval, BorderLayout.NORTH);
// output
JPanel pOut = new JPanel(new FlowLayout(FlowLayout.LEFT));
JLabel lOut = new JLabel("Output");
pOut.add(lOut);
pOut.add(m_ClassificationOutputPanel);
m_PanelOptions.add(pOut, BorderLayout.SOUTH);
// class
JPanel pClass = new JPanel();
pClass.setLayout(new GridLayout(2, 2));
pClass.add(m_ClassCombo);
m_ClassCombo.setBorder(BorderFactory.createEmptyBorder(5, 5, 5, 5));
JPanel ssButs = new JPanel();
ssButs.setBorder(BorderFactory.createEmptyBorder(5, 5, 5, 5));
ssButs.setLayout(new GridLayout(1, 2, 5, 5));
ssButs.add(m_StartBut);
ssButs.add(m_StopBut);
pClass.add(ssButs);
JPanel pOptionsButtons = new JPanel(new BorderLayout());
pOptionsButtons.add(m_PanelOptions, BorderLayout.CENTER);
pOptionsButtons.add(pClass, BorderLayout.SOUTH);
JPanel pOutput = new JPanel();
pOutput.setBorder(BorderFactory.createTitledBorder("Evaluation output"));
pOutput.setLayout(new BorderLayout());
final JScrollPane js = new JScrollPane(m_OutText);
pOutput.add(js, BorderLayout.CENTER);
js.getViewport().addChangeListener(new ChangeListener() {
private int lastHeight;
public void stateChanged(ChangeEvent e) {
JViewport vp = (JViewport)e.getSource();
int h = vp.getViewSize().height;
if (h != lastHeight) { // i.e. an addition not just a user scrolling
lastHeight = h;
int x = h - vp.getExtentSize().height;
vp.setViewPosition(new Point(0, x));
}
}
});
JPanel pOptionsHistory = new JPanel(new BorderLayout());
pOptionsHistory.add(pOptionsButtons, BorderLayout.NORTH);
pOptionsHistory.add(m_History, BorderLayout.CENTER);
JPanel pOptionsHistoryOutput = new JPanel(new BorderLayout());
pOptionsHistoryOutput.add(pOptionsHistory, BorderLayout.WEST);
pOptionsHistoryOutput.add(pOutput, BorderLayout.CENTER);
setLayout(new BorderLayout());
add(pClassifier, BorderLayout.NORTH);
add(pOptionsHistoryOutput, BorderLayout.CENTER);
}
/**
* Sets the Logger to receive informational messages.
*
* @param newLog the Logger that will now get info messages
*/
public void setLog(Logger newLog) {
m_Log = newLog;
}
/**
* Tells the panel to use a new set of instances.
*
* @param inst a set of Instances
*/
public void setInstances(Instances inst) {
m_Instances = inst;
String[] attribNames = new String [m_Instances.numAttributes()];
for (int i = 0; i < attribNames.length; i++) {
String type = "";
switch (m_Instances.attribute(i).type()) {
case Attribute.NOMINAL:
type = "(Nom) ";
break;
case Attribute.NUMERIC:
type = "(Num) ";
break;
case Attribute.STRING:
type = "(Str) ";
break;
case Attribute.DATE:
type = "(Dat) ";
break;
case Attribute.RELATIONAL:
type = "(Rel) ";
break;
default:
type = "(???) ";
}
attribNames[i] = type + m_Instances.attribute(i).name();
}
m_ClassCombo.setModel(new DefaultComboBoxModel(attribNames));
if (attribNames.length > 0) {
if (inst.classIndex() == -1)
m_ClassCombo.setSelectedIndex(attribNames.length - 1);
else
m_ClassCombo.setSelectedIndex(inst.classIndex());
m_EvalCombo.setEnabled(true);
m_ClassCombo.setEnabled(true);
m_CVPanel.setEnabled(true);
m_SplitPanel.setEnabled(true);
m_TestPanel.setEnabled(true);
m_StartBut.setEnabled(m_RunThread == null);
m_StopBut.setEnabled(m_RunThread != null);
}
else {
m_StartBut.setEnabled(false);
m_StopBut.setEnabled(false);
}
}
/**
* Handles constructing a popup menu with visualization options.
*
* @param name the name of the result history list entry clicked on by
* the user
* @param x the x coordinate for popping up the menu
* @param y the y coordinate for popping up the menu
*/
protected void showPopup(String name, int x, int y) {
JPopupMenu result;
JMenuItem menuitem;
final String selectedName;
final Hashtable<String,Object> additional;
result = new JPopupMenu();
selectedName = name;
if (selectedName != null)
additional = (Hashtable<String,Object>) m_History.getSelectedObject();
else
additional = new Hashtable<String,Object>();
menuitem = new JMenuItem("View in main window");
menuitem.setEnabled(selectedName != null);
menuitem.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
m_History.setSingle(selectedName);
}
});
result.add(menuitem);
menuitem = new JMenuItem("View in separate window");
menuitem.setEnabled(selectedName != null);
menuitem.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
m_History.openFrame(selectedName);
}
});
result.add(menuitem);
menuitem = new JMenuItem("Save result buffer");
menuitem.setEnabled(selectedName != null);
menuitem.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
saveBuffer(selectedName);
}
});
result.add(menuitem);
menuitem = new JMenuItem("Delete result buffer");
menuitem.setEnabled(selectedName != null);
menuitem.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
m_History.removeResult(selectedName);
}
});
result.add(menuitem);
result.addSeparator();
menuitem = new JMenuItem("Save model");
menuitem.setEnabled((selectedName != null) && additional.containsKey(KEY_MODEL));
menuitem.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
saveModel((Object[]) additional.get(KEY_MODEL));
}
});
result.add(menuitem);
result.addSeparator();
menuitem = new JMenuItem("Visualize errors");
menuitem.setEnabled((selectedName != null) && additional.containsKey(KEY_ERRORS));
menuitem.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
visualizeClassifierErrors((VisualizePanel) additional.get(KEY_ERRORS));
}
});
result.add(menuitem);
result.show(m_History.getList(), x, y);
}
/**
* outputs the header for the predictions on the data.
*
* @param outBuff the buffer to add the output to
* @param output for generating the classification output
* @param title the title to print
*/
protected void printPredictionsHeader(StringBuffer outBuff, AbstractOutput output, String title) {
if (output.generatesOutput())
outBuff.append("=== Predictions on " + title + " ===\n\n");
}
/**
* Cross-validates the classifier.
*
* @param classifier the classifier to evaluate
* @param output for collecting the test results
* @param outBuff the buffer for the output
* @return the generated evaluation
* @throws Execption if evaluation fails
*/
protected CollectiveEvaluation crossValidate(CollectiveClassifier classifier, AbstractOutput output, StringBuffer outBuff) throws Exception {
CollectiveEvaluation eval;
Instances train = new Instances(m_Instances);
train.setClassIndex(m_ClassCombo.getSelectedIndex());
eval = new CollectiveEvaluation(train);
int folds = Integer.parseInt(m_CVFoldsText.getText());
int seed = Integer.parseInt(m_CVSeedText.getText());
eval.setSwapFolds(m_CVSwapFoldsCheckBox.isSelected());
if (output != null) {
printPredictionsHeader(outBuff, output, "test data");
eval.crossValidateModel(classifier, train, folds, new Random(seed), output);
}
else {
eval.crossValidateModel(classifier, train, folds, new Random(seed));
}
return eval;
}
/**
* Evaluates the classifier on a random split.
*
* @param classifier the classifier to evaluate
* @param output for collecting the test results
* @param outBuff the buffer for the output
* @return the generated evaluation
* @throws Execption if evaluation fails
*/
protected CollectiveEvaluation evalRandomSplit(CollectiveClassifier classifier, AbstractOutput output, StringBuffer outBuff) throws Exception {
CollectiveEvaluation eval;
Instances train = new Instances(m_Instances);
Instances test;
double percentage = Double.parseDouble(m_SplitPercText.getText());
int seed = Integer.parseInt(m_SplitSeedText.getText());
if (!m_SplitPreserveOrderCheckBox.isSelected())
train.randomize(new Random(seed));
int trainSize = (int) Math.round(train.numInstances() * percentage / 100);
int testSize = train.numInstances() - trainSize;
test = new Instances(train, trainSize, testSize);
train = new Instances(train, 0, trainSize);
train.setClassIndex(m_ClassCombo.getSelectedIndex());
test.setClassIndex(m_ClassCombo.getSelectedIndex());
Instances unlabeled = CollectiveHelper.removeLabels(test, true);
eval = new CollectiveEvaluation(train);
classifier.buildClassifier(train, unlabeled);
if (output != null) {
printPredictionsHeader(outBuff, output, "test split");
output.printHeader();
eval.evaluateModel(classifier, test, output);
output.printFooter();
}
else {
eval.evaluateModel(classifier, test);
}
return eval;
}
/**
* Evaluates the classifier on a test set.
*
* @param classifier the classifier to evaluate
* @param output for collecting the test results
* @param outBuff the buffer for the output
* @return the generated evaluation
* @throws Execption if evaluation fails
*/
protected CollectiveEvaluation evalTestSet(CollectiveClassifier classifier, AbstractOutput output, StringBuffer outBuff) throws Exception {
CollectiveEvaluation eval;
if (m_UnlabeledSet == null)
throw new IllegalStateException("No unlabeled dataset set!");
Instances train = new Instances(m_Instances);
train.setClassIndex(m_ClassCombo.getSelectedIndex());
Instances unlabeled = new Instances(m_UnlabeledSet);
unlabeled.setClassIndex(m_ClassCombo.getSelectedIndex());
if (!train.equalHeaders(unlabeled))
throw new IllegalStateException(train.equalHeadersMsg(unlabeled));
eval = new CollectiveEvaluation(train);
classifier.buildClassifier(train, unlabeled);
if (m_TestSet != null) {
Instances test = new Instances(m_TestSet);
test.setClassIndex(m_ClassCombo.getSelectedIndex());
if (!train.equalHeaders(test))
throw new IllegalStateException(train.equalHeadersMsg(test));
if (output != null) {
printPredictionsHeader(outBuff, output, "test set");
output.printHeader();
eval.evaluateModel(classifier, test, output);
output.printFooter();
}
else {
eval.evaluateModel(classifier, test);
}
}
return eval;
}
/**
* Starts running the currently configured classifier.
*/
protected void startClassifier() {
if (m_RunThread == null) {
synchronized (this) {
m_StartBut.setEnabled(false);
m_StopBut.setEnabled(true);
}
m_RunThread = new Thread() {
@Override
public void run() {
// set up everything:
m_Log.statusMessage("Setting up...");
try {
CollectiveEvaluation eval = null;
CollectiveClassifier classifier = (CollectiveClassifier) AbstractClassifier.makeCopy((CollectiveClassifier) m_ClassifierEditor.getValue());
String title = "";
boolean model = false;
boolean hasPred = false;
StringBuffer outBuff = new StringBuffer();
Instances header = new Instances(m_Instances, 0);
header.setClassIndex(m_ClassCombo.getSelectedIndex());
AbstractOutput output = (AbstractOutput) m_ClassificationOutputEditor.getValue();
if (output instanceof Null)
output = null;
if (output != null) {
output.setHeader(header);
output.setBuffer(outBuff);
}
// run information
outBuff.append("=== Run information ===\n\n");
outBuff.append("Scheme: " + Utils.toCommandLine(classifier) + "\n");
outBuff.append("Relation: " + m_Instances.relationName() + '\n');
outBuff.append("Instances: " + m_Instances.numInstances() + '\n');
outBuff.append("Attributes: " + m_Instances.numAttributes() + '\n');
if (m_Instances.numAttributes() < 100) {
for (int i = 0; i < m_Instances.numAttributes(); i++)
outBuff.append(" " + m_Instances.attribute(i).name() + '\n');
}
else {
outBuff.append(" [list of attributes omitted]\n");
}
outBuff.append("Test mode: ");
switch (m_EvalCombo.getSelectedIndex()) {
case 0: // CV mode
outBuff.append("" + m_CVFoldsText.getText() + "-fold cross-validation\n");
break;
case 1: // Random split
outBuff.append("random split " + m_SplitPercText.getText() + "% train, remainder test" + (m_SplitPreserveOrderCheckBox.isSelected() ? " (order preserved)" : "") + "\n");
break;
case 2: // Test set
if (m_UnlabeledSet == null)
throw new IllegalStateException("No unlabeled dataset set!");
outBuff.append("unlabeled set: " + m_UnlabeledSet.numInstances() + " instances\n");
if (m_TestSet != null)
outBuff.append("test set: " + m_UnlabeledSet.numInstances() + " instances\n");
break;
default:
outBuff.append("unknown\n");
break;
}
outBuff.append("\n");
m_Log.logMessage("Started evaluation for " + m_ClassifierEditor.getValue().getClass().getName());
if (m_Log instanceof TaskLogger)
((TaskLogger)m_Log).taskStarted();
// evaluating
m_Log.statusMessage("Evaluating...");
// cross-validation
if (m_EvalCombo.getSelectedIndex() == 0) {
title = "Cross-validation";
hasPred = true;
eval = crossValidate(classifier, output, outBuff);
}
// random split
else if (m_EvalCombo.getSelectedIndex() == 1) {
title = "Random split";
model = true;
hasPred = true;
eval = evalRandomSplit(classifier, output, outBuff);
}
// test set
else if (m_EvalCombo.getSelectedIndex() == 2) {
title = "Supplied test set";
model = true;
hasPred = (m_TestSet != null);
eval = evalTestSet(classifier, output, outBuff);
}
else {
throw new IllegalArgumentException("Unknown evaluation type: " + m_EvalCombo.getSelectedItem());
}
// assemble output
if (model) {
outBuff.append("\n=== Model ===\n");
outBuff.append("\n");
outBuff.append(classifier.toString());
outBuff.append("\n");
}
if (hasPred) {
outBuff.append("\n" + eval.toSummaryString("=== " + title + " ===\n", false));
outBuff.append("\n" + eval.toClassDetailsString());
outBuff.append("\n" + eval.toMatrixString());
}
// additional information
Hashtable<String,Object> additional = new Hashtable<String,Object>();
// 1. model
if (model)
additional.put(KEY_MODEL, new Object[]{classifier, new Instances(m_Instances, 0)});
// 2. predictions
if (hasPred && eval.predictions() != null)
additional.put(KEY_PREDICTIONS, eval.predictions());
// 3. errors
if (hasPred && eval.predictions() != null) {
DataGenerator generator = new DataGenerator(eval);
PlotData2D plotdata = generator.getPlotData();
plotdata.setPlotName(generator.getPlotInstances().relationName());
VisualizePanel visualizePanel = new VisualizePanel();
visualizePanel.addPlot(plotdata);
visualizePanel.setColourIndex(plotdata.getPlotInstances().classIndex());
if ((visualizePanel.getXIndex() == 0) && (visualizePanel.getYIndex() == 1)) {
try {
visualizePanel.setXIndex(visualizePanel.getInstances().classIndex()); // class
visualizePanel.setYIndex(visualizePanel.getInstances().classIndex() - 1); // predicted class
}
catch (Exception e) {
// ignored
}
}
additional.put(KEY_ERRORS, visualizePanel);
}
String name = m_ClassifierEditor.getValue().getClass().getName().replaceAll("weka\\.classifiers\\.", "");
SimpleDateFormat df = new SimpleDateFormat("HH:mm:ss");
name = df.format(new Date()) + " - " + name;
m_History.addResult(name, outBuff);
m_History.addObject(name, additional);
m_History.setSingle(name);
m_Log.statusMessage("Evaluation finished.");
m_Log.statusMessage("OK");
}
catch (Exception ex) {
ex.printStackTrace();
m_Log.logMessage(ex.getMessage());
JOptionPane.showMessageDialog(
CollectiveClassifierPanel.this,
"Problem evaluating:\n" + ex.getMessage(),
"Evaluation",
JOptionPane.ERROR_MESSAGE);
m_Log.statusMessage("Problem evaluating");
}
finally {
synchronized (this) {
m_StartBut.setEnabled(true);
m_StopBut.setEnabled(false);
m_RunThread = null;
}
if (m_Log instanceof TaskLogger)
((TaskLogger)m_Log).taskFinished();
}
}
};
m_RunThread.setPriority(Thread.MIN_PRIORITY);
m_RunThread.start();
}
}
/**
* Save the currently selected experiment output to a file.
*
* @param name the name of the buffer to save
*/
protected void saveBuffer(String name) {
StringBuffer sb = m_History.getNamedBuffer(name);
if (sb != null) {
if (m_SaveOut.save(sb))
m_Log.logMessage("Save successful.");
}
}
/**
* Save the currently selected experiment output to a file.
*
* @param name the name of the buffer to save
*/
protected void saveModel(Object[] data) {
int retVal = m_ModelFileChooser.showSaveDialog(CollectiveClassifierPanel.this);
if (retVal != JFileChooser.APPROVE_OPTION)
return;
try {
SerializationHelper.writeAll(m_ModelFileChooser.getSelectedFile().getAbsolutePath(), data);
m_Log.logMessage("Model saved successfully");
}
catch (Exception ex) {
String msg = "Failed to save model to '" + m_ModelFileChooser.getSelectedFile() + "': " + ex;
m_Log.logMessage(msg);
JOptionPane.showMessageDialog(CollectiveClassifierPanel.this, msg);
}
}
/**
* Pops up a VisualizePanel for visualizing the data and errors for the
* classifier from the currently selected item in the results list.
*
* @param sp the VisualizePanel to pop up.
*/
protected void visualizeClassifierErrors(VisualizePanel sp) {
if (sp != null) {
JFrame jf = new javax.swing.JFrame("Classifier Visualize: " + sp.getName());
jf.setSize(600, 400);
jf.getContentPane().setLayout(new BorderLayout());
jf.getContentPane().add(sp, BorderLayout.CENTER);
jf.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE);
jf.setVisible(true);
}
}
/**
* Stops the currently running evaluation (if any).
*/
protected void stopClassifier() {
if (m_RunThread != null) {
m_RunThread.interrupt();
// This is deprecated (and theoretically the interrupt should do).
m_RunThread.stop();
}
}
/**
* Shortens the text to at most have the specified number of characters.
* If text too long, then it gets truncated to "max-3" and "..." gets
* appended.
*
* @param text the text to shorten, if necessary
* @param max the maximum number of characters
* @return the (potentially) shortended text
*/
protected String shorten(String text, int max) {
if (text.length() <= max)
return text;
else
return text.substring(0, max - 3) + "...";
}
/**
* Updates the file label with the given relation name.
*
* @param label the label to update
* @param text the text to use, null to reset
*/
protected void updateFileLabel(JLabel label, Instances data) {
if (data == null) {
label.setText("none");
label.setToolTipText(null);
}
else {
label.setText(shorten(data.relationName(), MAX_RELATIONNAME_LENGTH));
label.setToolTipText(data.relationName());
}
}
/**
* updates the capabilities filter of the GOE.
*
* @param filter the new filter to use
*/
protected void updateCapabilitiesFilter(Capabilities filter) {
Instances tempInst;
Capabilities filterClass;
if (filter == null) {
m_ClassifierEditor.setCapabilitiesFilter(new Capabilities(null));
return;
}
if (!ExplorerDefaults.getInitGenericObjectEditorFilter())
tempInst = new Instances(m_Instances, 0);
else
tempInst = new Instances(m_Instances);
tempInst.setClassIndex(m_ClassCombo.getSelectedIndex());
try {
filterClass = Capabilities.forInstances(tempInst);
}
catch (Exception e) {
filterClass = new Capabilities(null);
}
// set new filter
m_ClassifierEditor.setCapabilitiesFilter(filterClass);
}
/**
* method gets called in case of a change event.
*
* @param e the associated change event
*/
public void capabilitiesFilterChanged(CapabilitiesFilterChangeEvent e) {
if (e.getFilter() == null)
updateCapabilitiesFilter(null);
else
updateCapabilitiesFilter((Capabilities) e.getFilter().clone());
}
/**
* Sets the Explorer to use as parent frame (used for sending notifications
* about changes in the data).
*
* @param parent the parent frame
*/
public void setExplorer(Explorer parent) {
m_Explorer = parent;
}
/**
* returns the parent Explorer frame.
*
* @return the parent
*/
public Explorer getExplorer() {
return m_Explorer;
}
/**
* Returns the title for the tab in the Explorer.
*
* @return the title of this tab
*/
public String getTabTitle() {
return "Collective";
}
/**
* Returns the tooltip for the tab in the Explorer.
*
* @return the tooltip of this tab
*/
public String getTabTitleToolTip() {
return "Collective classification";
}
} |
def remove(self, ip, no_write=False):
if not no_write:
self.ip_record.write(ip, record_type='remove')
self.ip_record.remove(ip, record_types=['.block', '.permanent', '.add'])
entry = self._ip_ban_list.get(ip)
if not entry:
return False
del self._ip_ban_list[ip]
return True |
Once you’ve exhausted the jokes about green thumbs, red eyes, and the hilarity of growing weed instead of blooms, the questions remain about how to go about growing your own marijuana, if you want to.
Amendment 64 allows home cultivation of marijuana, up to six plants per adult. (Denver’s rules allow a household of two or more adults to cultivate a total of a dozen cannabis plants.) That’s going to appeal to those partakers who are old enough to be leery about openly buying a substance that still remains, on the federal level, strictly illegal.
And it’s going to appeal to many who live in a Colorado town or county that doesn’t allow retail stores — and there are quite a few of those, especially on the Eastern Plains. Even within the county of Denver, the retail marijuana scene is a confusing patchwork: Legal in Glendale, but not in Englewood; legal in Denver but not yet in Edgewater.
Even in jurisdictions that don’t allow retail marijuana shops, it’s legal to grow your own on your own property, within the specifications of Amendment 64.
But for home growers, there’s been the problem of where to acquire seeds or cuttings. Unless they received it as a gift, they couldn’t obtain cannabis plant material without risking a step on the wrong side of the law. That changes on Jan. 1 when retail marijuana shops will open and can legally sell plants and seed.
Still, it’s not like home growers can call a Colorado State University extension agent for growing advice; those experts are prohibited from answering any questions related to marijuana. Even though Amendment 64 specifically allows advice on growing marijuana, greenhouse and nursery staff often shy away from the subject.
“It’s crazy,” said Sharon Harris, executive director of the Colorado Nursery and Greenhouse Association.
“We started getting those calls when the bill first passed for medical marijuana, and our attorney advised us not to talk about it. It’s legal in Colorado, but still a federal offense. We’re waiting to see how the legal retail sales work out, but our position will not change until the U.S. attorney general says, ‘OK, here’s the deal.’ It’s quite a quagmire.”
So don’t expect help from the usual horticultural resources. Instead, look at hydroponic indoor gardening retail outlets, and start-ups like Cannabis University, which offers a $250 one-day class in possessing and growing marijuana.
Be advised: Under the law, growing your own marijuana requires keeping your plants in an “enclosed, locked space” that is not open or public. That’s pretty broadly-written, but a safe interpretation would mean a basement room or closet that can be locked.
No more than three of your plants can be in the flowering stage at one time. And it remains illegal to sell marijuana you grow.
To answer basic questions for novice pot-growers (potters?), we interviewed George Archambault, owner of MileHydro, Ben Holmes of Centennial Seeds, and Michelle LaMay of Cannabis University.
Q: What will I need to start growing weed?
Holmes: The basics are one container per plant; potting soil; fertilizer; a good-quality grow light; and seeds.
Archambault: Ideally, you’ll have a controlled environment, with fresh air coming in through a ventilation system and exhaust air going out the opposite end of the room, because plants don’t do well in stagnant air. If you use a controlled environment like a tent or cabinet, you’ll want a thermostat to make sure the room stays at the same temperature instead of getting too hot.
LaMay: A grow light with a vegging bulb and flowering bulb, a controlled environment, like a room or a tent; nutrient supplements; an outside air source; a carbon filter; a thermometer; an oscillating fan to move the air about; a can fan to pull air out through the carbon filter; timers; a PH tester for the water; a five-gallon water container; pots; growing medium; tarps for the floor, even with a grow tent; and only highest-quality extension cords, if you must use extension cords at all.
Q: So what’ll that cost?
Holmes: For a very basic set-up, around $500. Figure $20 for the containers, $40 or so for the soil, another $40 for the fertilizer and nutrients, $300 for a decent grow light, another $100 for a vegging bulb and a flowering bulb. Figure on spending $5 to $10 per seed, but prices vary widely. Some seeds cost $1,000 apiece.
Archambault: Five hundred is cutting a lot of corners. I’d say more like $1,000.
Q: Does it make more sense to try to grow hydroponically?
Archambault: I don’t advise new growers to start right in with hydroponics. That means spending at least $1,000 on equipment, and that’s a lot for a beginner. And you’re out all that money if you’re not successful.
Q: How much space would a $500 dirt set-up require?
Holmes: That’d be for a 4-by-4-foot area, so you’ll need only one grow light, plus one grow light with a white-blue vegging light bulb and an orange-red flowering light bulb.
Q: What’s a “vegging light bulb”?
Holmes: In indoor gardens, you mimic the spring and summer growing period with a light that’s on the white/blue spectrum. In the vegging state, you’re encouraging the plant to produce leaves, with a goal of growing the plant to half the size you want it to be when you harvest it. The rule of thumb is giving the plant 18 hours of light in the vegging [short for vegetative growth] stage. So if you want a 3-foot-tall plant at the harvest stage, you want to veg it until the plant is a foot and a half tall.
Q: Then what?
Holmes: When it reaches half the size you want it to be, then you have to trick it into flowering by making the plant think it’s fall. The flower is what people want from a marijuana plant, because you harvest the flower buds. So then you switch to the orange-red light bulb. That makes the plant think it’s fall, and it will induce flowering. During the flowering stage, you’ll want to give it 12 hours of light on, and rest it in the dark for 12 hours.
Q: That sounds like a ton of work. Is it easier to start with cuttings?
LaMay: Cuttings are easily accessible from friends or the medical marijuana dispensary or, soon, the retail store. They are about $10 each. They must be quarantined and doused aggressively with organic neem oil over 20 days.
Archambault: Start from seeds. I’ve never met anyone who bought a clone from a dispensary that wasn’t infested with spider mites or powdery mildew. It’s an indoor growing issue. The worst thing you can do is buy a plant that has a lot of insects.
Holmes: No! Start with seeds. We urge people not to buy cuttings, and my dispensary clients will hate me for saying that, but the worst thing you can do is buy their cuttings because they’re infested.
Q: Where can I buy seeds?
Archambault: After Jan.1, 2014, you can buy cannabis seeds in Colorado without a medical marijuana card. Seeds, and cuttings, will be sold at state-licensed marijuana retail stores. But remember, you can’t tell whether a seed is male or female. You have to wait until it germinates. It takes about a month to see the telltale signs of the first budding flowers. The males only grow leaves.
Q: Why does it matter whether the seed is male or female?
Holmes: Only the female seeds produce flowers, which is the crop you want. Some companies sell what they call “feminized seeds” that have a higher probability of being female. But regularly-bred seeds is what we recommend.
Q: Is growing marijuana comparable to starting tomatoes or other garden plants?
Holmes: Yes, it’s like growing a tomato. Marijuana is a plant that’s very sensitive to over-feeding. You need to lime the soil, because they don’t like acid soil. And I just use Miracle-Gro. I use that on everything. If you ate my tomatoes or zucchini, or smoked my weed, you’d come back for more. You don’t need to buy a lot of supplements and amendments and products. You need a bucket of dirt and a well-thought-out fertilizer plan, not 20 different fertilizers and nutrients. The best thing is to keep it simple.
Q: I have relatives who live in states that haven’t legalized marijuana. Will they be able to tell I’m growing it?
Archambault: Well, the plants are still going to release that telltale aroma. Hydroponic stores sell odor mitigation systems. Carbon filters are the most effective. If your grow system is in a basement room that nobody uses, maybe they won’t notice.
Q: What about pets?
Holmes: Cats will be kind of curious. Pets are disease-carriers, and your pet could infect your plants. Make your grow room off-limits to your pets.
Claire Martin: 303-954-1477, [email protected] or twitter.com/byclairemartin |
/*
* 1. Find pivot by partiontioning
* 2. Perform recursive sort
*/
func quicksort(arr []int, low, high int) {
if low < high {
pi := partition(arr, low, high)
quicksort(arr, low, pi-1)
quicksort(arr, pi+1, high)
}
} |
// UpdateAutoScalingGroup return none
// input autoscaling.UpdateAutoScalingGroupInput
func (c *AutoScaling) UpdateAutoScalingGroup(input *autoscaling.UpdateAutoScalingGroupInput) error {
if _, err := c.Client.UpdateAutoScalingGroup(input); err != nil {
return fmt.Errorf("update autoscaling group: %v", err)
}
return nil
} |
<reponame>zglue/zglue_nuttx<filename>arch/arm/include/nrf52/nrf52832_irq.h
/****************************************************************************************************
* arch/arm/include/nrf52/nrf52_irq.h
*
* Copyright (C) 2018 <NAME>. All rights reserved.
* Author: <NAME> <<EMAIL>>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
* 3. Neither the name NuttX nor the names of its contributors may be
* used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
****************************************************************************************************/
#ifndef __ARCH_ARM_INCLUDE_NRF52_NRF52_IRQ_H
#define __ARCH_ARM_INCLUDE_NRF52_NRF52_IRQ_H
/****************************************************************************************************
* Included Files
****************************************************************************************************/
/****************************************************************************************************
* Pre-processor Definitions
****************************************************************************************************/
/* Cortex-M4 External interrupts (vectors >= 16) */
#define NRF52_IRQ_POWER_CLOCK (NRF52_IRQ_EXTINT+0) /* VOD Windowed watchdog timer, Brownout detect */
#define NRF52_IRQ_RADIO (NRF52_IRQ_EXTINT+1) /* DMA controller */
#define NRF52_IRQ_UART0 (NRF52_IRQ_EXTINT+2) /* GPIO group 0 */
#define NRF52_IRQ_SPI_TWI_0 (NRF52_IRQ_EXTINT+3) /* GPIO group 1 */
#define NRF52_IRQ_SPI_TWI_1 (NRF52_IRQ_EXTINT+4) /* Pin interrupt 0 or pattern match engine slice 0 */
#define NRF52_IRQ_NFCT (NRF52_IRQ_EXTINT+5) /* Pin interrupt 1 or pattern match engine slice 1 */
#define NRF52_IRQ_GPIOTE (NRF52_IRQ_EXTINT+6) /* Pin interrupt 2 or pattern match engine slice 2 */
#define NRF52_IRQ_SAADC (NRF52_IRQ_EXTINT+7) /* Pin interrupt 3 or pattern match engine slice 3 */
#define NRF52_IRQ_TIMER0 (NRF52_IRQ_EXTINT+8) /* Micro-tick Timer */
#define NRF52_IRQ_TIMER1 (NRF52_IRQ_EXTINT+9) /* Multi-rate timer */
#define NRF52_IRQ_TIMER2 (NRF52_IRQ_EXTINT+10) /* Standard counter/timer CTIMER0 */
#define NRF52_IRQ_RTC0 (NRF52_IRQ_EXTINT+11) /* Standard counter/timer CTIMER1 */
#define NRF52_IRQ_TEMP (NRF52_IRQ_EXTINT+12) /* SCTimer/PWM0 */
#define NRF52_IRQ_RNG (NRF52_IRQ_EXTINT+12) /* SCTimer/PWM0 */
#define NRF52_IRQ_ECB (NRF52_IRQ_EXTINT+13) /* CTIMER3 Standard counter/timer CTIMER3 */
#define NRF52_IRQ_CCM_AAR (NRF52_IRQ_EXTINT+14) /* Flexcomm Interface 0 (USART, SPI, I2C) */
#define NRF52_IRQ_WDT (NRF52_IRQ_EXTINT+15) /* Flexcomm Interface 1 (USART, SPI, I2C) */
#define NRF52_IRQ_RTC1 (NRF52_IRQ_EXTINT+16) /* Flexcomm Interface 2 (USART, SPI, I2C) */
#define NRF52_IRQ_QDEC (NRF52_IRQ_EXTINT+17) /* Flexcomm Interface 3 (USART, SPI, I2C) */
#define NRF52_IRQ_COMP_LPCOMP (NRF52_IRQ_EXTINT+18) /* Flexcomm Interface 4 (USART, SPI, I2C) */
#define NRF52_IRQ_SWI0_EGU0 (NRF52_IRQ_EXTINT+19) /* Flexcomm Interface 5 (USART, SPI, I2C) */
#define NRF52_IRQ_SWI1_EGU1 (NRF52_IRQ_EXTINT+20) /* Flexcomm Interface 6 (USART, SPI, I2C, I2S) */
#define NRF52_IRQ_SWI2_EGU2 (NRF52_IRQ_EXTINT+21) /* Flexcomm Interface 7 (USART, SPI, I2C, I2S) */
#define NRF52_IRQ_SWI3_EGU3 (NRF52_IRQ_EXTINT+22) /* ADC0 sequence A completion */
#define NRF52_IRQ_SWI4_EGU4 (NRF52_IRQ_EXTINT+23) /* ADC0 sequence B completion */
#define NRF52_IRQ_SWI5_EGU5 (NRF52_IRQ_EXTINT+24) /* ADC0 threshold compare and error */
#define NRF52_IRQ_TIMER3 (NRF52_IRQ_EXTINT+25) /* Digital microphone and audio subsystem */
#define NRF52_IRQ_TIMER4 (NRF52_IRQ_EXTINT+26) /* Hardware Voice Activity Detection */
#define NRF52_IRQ_PWM0 (NRF52_IRQ_EXTINT+27) /* USB0 Activity Interrupt */
#define NRF52_IRQ_PDM (NRF52_IRQ_EXTINT+28) /* USB0 host and device */
/* 29-30 Reserved */
#define NRF52_IRQ_MWU (NRF52_IRQ_EXTINT+31) /* Pin interrupt 4 or pattern match engine slice 4 */
#define NRF52_IRQ_PWM1 (NRF52_IRQ_EXTINT+32) /* Pin interrupt 5 or pattern match engine slice 5 */
#define NRF52_IRQ_PWM2 (NRF52_IRQ_EXTINT+33) /* Pin interrupt 6 or pattern match engine slice 6 */
#define NRF52_IRQ_SPI2 (NRF52_IRQ_EXTINT+34) /* Pin interrupt 7 or pattern match engine slice 7 */
#define NRF52_IRQ_RTC2 (NRF52_IRQ_EXTINT+35) /* Standard counter/timer CTIMER2 */
#define NRF52_IRQ_I2S (NRF52_IRQ_EXTINT+36) /* Standard counter/timer CTIMER4 */
#define NRF52_IRQ_FPU (NRF52_IRQ_EXTINT+37) /* Repetitive Interrupt Timer */
#define NRF52_IRQ_NEXTINT (38)
#define NRF52_IRQ_NIRQS (NRF52_IRQ_EXTINT+NRF52_IRQ_NEXTINT)
/* Total number of IRQ numbers */
#define NR_VECTORS NRF52_IRQ_NIRQS
#define NR_IRQS NRF52_IRQ_NIRQS
#endif /* __ARCH_ARM_INCLUDE_NRF52_NRF52_IRQ_H */
|
public class PrintAreaVisitor implements ShapeVisitor {
@Override
public void visit(Rectangle rectangle) {
double area = rectangle.getLength() * rectangle.getWidth();
System.out.println("Rectangle area: " + area);
}
@Override
public void visit(Circle circle) {
double area = Math.PI * (circle.getRadius() * circle.getRadius());
System.out.println("Circle area: " + area);
}
}
|
<filename>loader/loader_test.go
package loader
import (
"io/ioutil"
"os"
"path/filepath"
"sync"
"sync/atomic"
"testing"
"sort"
"time"
stats "github.com/lyft/gostats"
logger "github.com/sirupsen/logrus"
"github.com/stretchr/testify/require"
)
var nullScope = stats.NewStore(stats.NewNullSink(), false)
func init() {
lvl, _ := logger.ParseLevel("DEBUG")
logger.SetLevel(lvl)
}
func makeFileInDir(assert *require.Assertions, path string, text string) {
tmpdir, err := ioutil.TempDir("", "")
assert.NoError(err)
defer os.RemoveAll(tmpdir)
tmpfile := filepath.Join(tmpdir, filepath.Base(path))
err = ioutil.WriteFile(tmpfile, []byte(text), os.ModePerm)
assert.NoError(err)
err = os.MkdirAll(filepath.Dir(path), os.ModeDir|os.ModePerm)
assert.NoError(err)
// We use rename since creating a file and writing to it is too slow.
// This is because creating the directory triggers the loader's watcher
// causing it to scan the directory and if we need to create + write to
// the file there is a chance the loader will store the contents of an
// empty file, which is a race.
//
// This is okay because in prod we symlink files into place so we don't
// need to worry about reading empty/partial files.
//
err = os.Rename(tmpfile, path)
assert.NoError(err)
}
func TestNilRuntime(t *testing.T) {
assert := require.New(t)
loader := New("", "", nullScope, &SymlinkRefresher{RuntimePath: ""}, AllowDotFiles)
snapshot := loader.Snapshot()
assert.Equal("", snapshot.Get("foo"))
assert.Equal(uint64(100), snapshot.GetInteger("bar", 100))
assert.True(snapshot.FeatureEnabled("baz", 100))
assert.False(snapshot.FeatureEnabled("blah", 0))
}
func TestSymlinkRefresher(t *testing.T) {
assert := require.New(t)
// Setup base test directory.
tempDir, err := ioutil.TempDir("", "runtime_test")
assert.NoError(err)
defer os.RemoveAll(tempDir)
// Make test files for first runtime snapshot.
makeFileInDir(assert, tempDir+"/testdir1/app/file1", "hello")
makeFileInDir(assert, tempDir+"/testdir1/app/dir/file2", "world")
makeFileInDir(assert, tempDir+"/testdir1/app/dir2/file3", "\n 34 ")
assert.NoError(err)
err = os.Symlink(tempDir+"/testdir1", tempDir+"/current")
assert.NoError(err)
loader := New(tempDir+"/current", "app", nullScope, &SymlinkRefresher{RuntimePath: tempDir + "/current"}, AllowDotFiles)
runtime_update := make(chan int)
loader.AddUpdateCallback(runtime_update)
snapshot := loader.Snapshot()
assert.Equal("", snapshot.Get("foo"))
assert.Equal(uint64(5), snapshot.GetInteger("foo", 5))
assert.Equal("hello", snapshot.Get("file1"))
assert.Equal(uint64(6), snapshot.GetInteger("file1", 6))
assert.Equal("world", snapshot.Get("dir.file2"))
assert.Equal(uint64(7), snapshot.GetInteger("dir.file2", 7))
assert.Equal(uint64(34), snapshot.GetInteger("dir2.file3", 100))
info, _ := os.Stat(tempDir + "/testdir1/app/file1")
assert.Equal(info.ModTime(), snapshot.GetModified("file1"))
keys := snapshot.Keys()
sort.Strings(keys)
assert.EqualValues([]string{"dir.file2", "dir2.file3", "file1"}, keys)
//// Make test files for second runtime snapshot.
makeFileInDir(assert, tempDir+"/testdir2/app/file1", "hello2")
makeFileInDir(assert, tempDir+"/testdir2/app/dir/file2", "world2")
makeFileInDir(assert, tempDir+"/testdir2/app/dir2/file3", "100")
err = os.Symlink(tempDir+"/testdir2", tempDir+"/current_new")
assert.NoError(err)
err = os.Rename(tempDir+"/current_new", tempDir+"/current")
assert.NoError(err)
<-runtime_update
time.Sleep(100 * time.Millisecond)
snapshot = loader.Snapshot()
assert.Equal("", snapshot.Get("foo"))
assert.Equal("hello2", snapshot.Get("file1"))
assert.Equal("world2", snapshot.Get("dir.file2"))
assert.Equal(uint64(100), snapshot.GetInteger("dir2.file3", 0))
assert.True(snapshot.FeatureEnabled("dir2.file3", 0))
keys = snapshot.Keys()
sort.Strings(keys)
assert.EqualValues([]string{"dir.file2", "dir2.file3", "file1"}, keys)
}
func TestIgnoreDotfiles(t *testing.T) {
assert := require.New(t)
// Setup base test directory.
tempDir, err := ioutil.TempDir("", "runtime_test")
assert.NoError(err)
defer os.RemoveAll(tempDir)
// Make test files for runtime snapshot.
makeFileInDir(assert, tempDir+"/testdir1/app/dir3/.file4", ".file4")
makeFileInDir(assert, tempDir+"/testdir1/app/.dir/file5", ".dir")
assert.NoError(err)
loaderIgnoreDotfiles := New(tempDir+"/testdir1", "app", nullScope, &SymlinkRefresher{RuntimePath: tempDir + "/testdir1"}, IgnoreDotFiles)
snapshot := loaderIgnoreDotfiles.Snapshot()
assert.Equal("", snapshot.Get("dir3..file4"))
assert.Equal("", snapshot.Get(".dir.file5"))
loaderIncludeDotfiles := New(tempDir+"/testdir1", "app", nullScope, &SymlinkRefresher{RuntimePath: tempDir + "/testdir1"}, AllowDotFiles)
snapshot = loaderIncludeDotfiles.Snapshot()
assert.Equal(".file4", snapshot.Get("dir3..file4"))
assert.Equal(".dir", snapshot.Get(".dir.file5"))
}
func TestDirectoryRefresher(t *testing.T) {
assert := require.New(t)
// Setup base test directory.
tempDir, err := ioutil.TempDir("", "dir_runtime_test")
assert.NoError(err)
defer os.RemoveAll(tempDir)
appDir := tempDir + "/app"
err = os.MkdirAll(appDir, os.ModeDir|os.ModePerm)
assert.NoError(err)
loader := New(tempDir, "app", nullScope, &DirectoryRefresher{}, AllowDotFiles)
runtime_update := make(chan int)
loader.AddUpdateCallback(runtime_update)
snapshot := loader.Snapshot()
assert.Equal("", snapshot.Get("file1"))
makeFileInDir(assert, appDir+"/file1", "hello")
// Wait for the update
<-runtime_update
snapshot = loader.Snapshot()
assert.Equal("hello", snapshot.Get("file1"))
// Mimic a file change in directory
makeFileInDir(assert, appDir+"/file2", "hello2")
// Wait for the update
<-runtime_update
snapshot = loader.Snapshot()
assert.Equal("hello2", snapshot.Get("file2"))
// Write to the file
f, err := os.OpenFile(appDir+"/file2", os.O_RDWR, os.ModeAppend)
assert.NoError(err)
_, err = f.WriteString("hello3")
assert.NoError(err)
f.Sync()
// Wait for the update
<-runtime_update
snapshot = loader.Snapshot()
assert.Equal("hello3", snapshot.Get("file2"))
}
func TestOnRuntimeChanged(t *testing.T) {
tmpdir, err := ioutil.TempDir("", "goruntime-*")
if err != nil {
t.Fatal(err)
}
defer func() {
if err := os.RemoveAll(tmpdir); err != nil {
t.Error(err)
}
}()
dir, base := filepath.Split(tmpdir)
ll := Loader{
watchPath: dir,
subdirectory: base,
stats: newLoaderStats(stats.NewStore(stats.NewNullSink(), false)),
}
const Timeout = time.Second * 3
t.Run("Nil", func(t *testing.T) {
defer func() {
if e := recover(); e == nil {
t.Fatal("expected panic")
}
}()
ll.AddUpdateCallback(nil)
})
t.Run("One", func(t *testing.T) {
cb := make(chan int, 1)
ll.AddUpdateCallback(cb)
go ll.onRuntimeChanged()
select {
case i := <-cb:
if i != 1 {
t.Errorf("Callback: got: %d want: %d", i, 1)
}
case <-time.After(Timeout):
t.Fatalf("Time out after: %s", Timeout)
}
})
t.Run("Blocking", func(t *testing.T) {
done := make(chan struct{})
cb := make(chan int)
ll.AddUpdateCallback(cb)
go func() {
ll.onRuntimeChanged()
close(done)
}()
select {
case <-done:
// Ok
case <-time.After(Timeout):
t.Fatalf("Time out after: %s", Timeout)
}
})
t.Run("Many", func(t *testing.T) {
cbs := make([]chan int, 10)
for i := range cbs {
cbs[i] = make(chan int, 1)
ll.AddUpdateCallback(cbs[i])
}
go ll.onRuntimeChanged()
for _, cb := range cbs {
select {
case i := <-cb:
if i != 1 {
t.Errorf("Callback: got: %d want: %d", i, 1)
}
case <-time.After(Timeout):
t.Fatalf("Time out after: %s", Timeout)
}
}
})
t.Run("ManyDelayed", func(t *testing.T) {
total := new(int64)
wg := new(sync.WaitGroup)
ready := make(chan struct{})
cbs := make([]chan int, 10)
for i := range cbs {
cbs[i] = make(chan int) // blocking
ll.AddUpdateCallback(cbs[i])
wg.Add(1)
go func(cb chan int) {
defer wg.Done()
<-ready
atomic.AddInt64(total, int64(<-cb))
}(cbs[i])
}
done := make(chan struct{})
go func() {
ll.onRuntimeChanged()
close(done)
}()
select {
case <-done:
// Ok
case <-time.After(Timeout):
t.Fatalf("Time out after: %s", Timeout)
}
if n := atomic.LoadInt64(total); n != 0 {
t.Errorf("Expected %d channels to be signaled got: %d", 0, n)
}
close(ready)
wg.Wait()
if n := atomic.LoadInt64(total); n != 10 {
t.Errorf("Expected %d channels to be signaled got: %d", 10, n)
}
})
t.Run("ManyBlocking", func(t *testing.T) {
cbs := make([]chan int, 10)
for i := range cbs {
cbs[i] = make(chan int)
ll.AddUpdateCallback(cbs[i])
}
done := make(chan struct{})
go func() {
ll.onRuntimeChanged()
close(done)
}()
select {
case <-done:
// Ok
case <-time.After(Timeout):
t.Fatalf("Time out after: %s", Timeout)
}
})
}
func BenchmarkSnapshot(b *testing.B) {
var ll Loader
for i := 0; i < b.N; i++ {
ll.Snapshot()
}
}
func BenchmarkSnapshot_Parallel(b *testing.B) {
ll := new(Loader)
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
ll.Snapshot()
}
})
}
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.plugin.bigquery;
import com.facebook.presto.common.predicate.TupleDomain;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.ConnectorTableHandle;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.cloud.bigquery.TableId;
import com.google.cloud.bigquery.TableInfo;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.OptionalLong;
import static com.google.common.base.MoreObjects.toStringHelper;
import static java.util.Objects.requireNonNull;
public class BigQueryTableHandle
implements ConnectorTableHandle
{
private final String projectId;
private final String schemaName;
private final String tableName;
private final String type;
private final TupleDomain<ColumnHandle> constraint;
private final Optional<List<ColumnHandle>> projectedColumns;
private final OptionalLong limit;
@JsonCreator
public BigQueryTableHandle(
@JsonProperty("projectId") String projectId,
@JsonProperty("schemaName") String schemaName,
@JsonProperty("tableName") String tableName,
@JsonProperty("type") String type,
@JsonProperty("constraint") TupleDomain<ColumnHandle> constraint,
@JsonProperty("projectedColumns") Optional<List<ColumnHandle>> projectedColumns,
@JsonProperty("limit") OptionalLong limit)
{
this.projectId = requireNonNull(projectId, "projectId is null");
this.schemaName = requireNonNull(schemaName, "schemaName is null");
this.tableName = requireNonNull(tableName, "tableName is null");
this.type = requireNonNull(type, "type is null");
this.constraint = requireNonNull(constraint, "constraint is null");
this.projectedColumns = requireNonNull(projectedColumns, "projectedColumns is null");
this.limit = requireNonNull(limit, "limit is null");
}
public static BigQueryTableHandle from(TableInfo tableInfo)
{
TableId tableId = tableInfo.getTableId();
String type = tableInfo.getDefinition().getType().toString();
return new BigQueryTableHandle(tableId.getProject(), tableId.getDataset(), tableId.getTable(), type, TupleDomain.none(), Optional.empty(), OptionalLong.empty());
}
@JsonProperty
public String getProjectId()
{
return projectId;
}
@JsonProperty
public String getSchemaName()
{
return schemaName;
}
@JsonProperty
public String getTableName()
{
return tableName;
}
@JsonProperty
public String getType()
{
return type;
}
@JsonProperty
public TupleDomain<ColumnHandle> getConstraint()
{
return constraint;
}
@JsonProperty
public Optional<List<ColumnHandle>> getProjectedColumns()
{
return projectedColumns;
}
@JsonProperty
public OptionalLong getLimit()
{
return limit;
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
BigQueryTableHandle that = (BigQueryTableHandle) o;
return Objects.equals(projectId, that.projectId) &&
Objects.equals(schemaName, that.schemaName) &&
Objects.equals(tableName, that.tableName) &&
Objects.equals(type, that.tableName) &&
Objects.equals(constraint, that.constraint) &&
Objects.equals(projectedColumns, that.projectedColumns) &&
Objects.equals(limit, that.limit);
}
@Override
public int hashCode()
{
return Objects.hash(projectId, schemaName, tableName, type, constraint, projectedColumns, limit);
}
@Override
public String toString()
{
return toStringHelper(this)
.add("projectId", projectId)
.add("schemaName", schemaName)
.add("tableName", tableName)
.add("type", type)
.add("constraint", constraint)
.add("projectedColumns", projectedColumns)
.add("limit", limit)
.toString();
}
public TableId getTableId()
{
return TableId.of(projectId, schemaName, tableName);
}
BigQueryTableHandle withConstraint(TupleDomain<ColumnHandle> newConstraint)
{
return new BigQueryTableHandle(projectId, schemaName, tableName, type, newConstraint, projectedColumns, limit);
}
BigQueryTableHandle withProjectedColumns(List<ColumnHandle> newProjectedColumns)
{
return new BigQueryTableHandle(projectId, schemaName, tableName, type, constraint, Optional.of(newProjectedColumns), limit);
}
}
|
package com.grosner.painter.slider;
import android.graphics.Color;
import android.os.Build;
import android.support.v7.app.ActionBar;
import com.grosner.painter.exception.UseActionBarSliderException;
/**
* Created by: andrewgrosner
* Date: 7/17/14.
* Contributors: {}
* Description:
*/
public class SliderUtils {
/**
* Based on how the drawer slides, this will calculate what color to display.
* @param slideOffset
* @param mStartColor
* @param mEndColor
* @return
*/
public static int calculateColor(float slideOffset, int mStartColor, int mEndColor){
int red = (int)Math.abs((slideOffset * Color.red(mEndColor)) + ((1 - slideOffset) * Color.red(mStartColor)));
int green = (int)Math.abs((slideOffset * Color.green(mEndColor)) + ((1 - slideOffset) * Color.green(mStartColor)));
int blue = (int)Math.abs((slideOffset * Color.blue(mEndColor)) + ((1 - slideOffset) * Color.blue(mStartColor)));
return Color.rgb(red, green, blue);
}
/**
* This will check to make sure the object is not an actionbar
* @param object
*/
static void checkForActionBarObject(Object object){
boolean isActionBar;
isActionBar = (object instanceof ActionBar);
if(!isActionBar && Build.VERSION.SDK_INT>=11){
isActionBar = (object instanceof android.app.ActionBar);
}
if(isActionBar){
throw new UseActionBarSliderException();
}
}
}
|
class ConfigLoader:
"""
Class that provides static methods to load configuration descriptions (in form of dictionaries) from JSON files
"""
logger = logging.getLogger('ConfigLoader')
_dfl_config = {
"RESULTS_DIR": 'results',
"SKIP_EXPIRED": True,
"RETRY_TASKS": True,
"RETRY_TASKS_ON_ERROR": False,
"ABRUPT_TASK_KILL": True,
"RECOVER_AFTER_DISCONNECT": False,
"LOG_OUTPUTS": True,
"ENABLE_ROOT": False,
"SERVER_PORT": 30000,
"MAX_REQUESTS": 20,
"RETRY_INTERVAL": 600,
"RETRY_PERIOD": 30,
"PRE_SEND_INTERVAL": 600,
"WORKLOAD_PADDING": 20,
"SESSION_WAIT": 60,
"NUMA_CORES_FAULTS": None,
"NUMA_CORES_BENCHMARKS": None,
"HOSTS": [],
"AUX_COMMANDS": []
}
@staticmethod
def getConfig(file=None):
"""
Loads a configuration dictionary from an input file, or return the default configuration
:param file: Path to a JSON file containing configuration entries for FINJ. If None is supplied, the default
configuration is used
:return: A configuration dictionary for FINJ
"""
if file is None:
return copy(ConfigLoader._dfl_config)
else:
cfg = copy(ConfigLoader._dfl_config)
try:
cfg_file = json.load(open(file))
cfg.update(cfg_file)
except (FileNotFoundError, IOError, ValueError):
ConfigLoader.logger.error("Configuration file %s cannot be read" % file)
pass
return cfg |
Navy SEALs are shown training in this undated photo. The first female Navy SEAL candidates could arrive at pre-training as early as mid-May, and the Marines will start sending women ground combat candidates to boot camp as soon as April.
The first female Navy SEAL candidates could arrive at pre-training as early as mid-May, and the Marines will start sending women ground combat candidates to boot camp as soon as April, according to time lines released Thursday by the Defense Department.
That would mean the first women could start the infamously tough SEAL training course in Coronado in late August — potentially being assigned to SEAL teams as early as October 2017, if they make the cut.
Defense Secretary Ashton Carter's office said he has approved implementation plans by each of America's armed services to allow women in nearly 220,000 previously closed combat jobs.
It's a move building since 2013, when then-Defense Secretary Leon Panetta announced that he would lift the ban on women in combat. Many observers see it as an acknowledgment that women have proven themselves in the line of fire during the Iraq and Afghanistan wars.
"Having reviewed their exceptionally thorough work, I am pleased all of the services developed plans that will effectively carry out this change and make us even better in the future," Carter said in a statement.
A Marine Corps spokesman said recruiters are poised to offer ground combat contracts to potential female recruits as soon as they get the signal from Marine Corps Commandant Gen. Robert Neller.
"They are ready," said Capt. Philip Kulczewski, a Marine Corps headquarters spokesman.
Women who want to seek ground combat jobs will have to pass the same rigorous initial strength test taken by men, including three pull-ups, among other things. Females who don't want to become infantry Marines aren't required to do pull-ups at present.
Pull-ups, a classic test of upper-body strength, have been considered a stumbling block for women. Critics of the current system have said the military has done women a disservice by setting a lower bar for them.
Lt. Col. Kate Germano is one of those.
Germano is former commander of the 4th Recruit Training Battalion at Marine Corps Recruit Depot Parris Island, S.C., the sole training unit for women recruits in the Marine Corps. She was removed for being "hostile," but her supporters said she was targeted for pushing tough reforms to hold women recruits to higher standards.
"I think the biggest takeaway that I have for the plan is that it doesn't address the overall quality issues we have with female recruits right out the gate," Germano said.
It would make more sense to hold all recruits, men and women, to the more rigorous standards regardless of their assignment, she said. The plan "only increases the standards for women who self-identify for ground combat."
On the other hand, former Marine Capt. Jacquie Atkinson said she sees the single standard for men and women headed to combat arms positions as an important step forward.
"It's important for leaders to not give the impression that anyone gets a free ride," she said.
When she led a combat engineering unit, she was one of the few women around, and in some instances, the only woman some of her colleagues ever served with, let alone reported to.
"I had a very good experience there and I upheld the standards. I was stronger in some cases and I ran faster than some men," said Atkinson, who is now running for Congress as a Republican in San Diego's 52nd District.
As women start entering Marine infantry units, the goal will be to assign two or more junior enlisted females to the same unit. If that's not possible because of low numbers, a single woman will still be assigned, because at least two female leaders are expected to be present in a unit before her arrival.
On the Marine officer side, women who want to lead infantry units will have to pass the Infantry Officer Course. In the three years the Marine Corps spent evaluating women in infantry roles, female hopefuls struggled to succeed in the grueling 10-week curriculum, which includes long hikes and an obstacle course.
An official comment released on Thursday said, "As a result of our research, the Marine Corps instituted clearly defined gender neutral, operationally relevant, individual performance standards across the spectrum of Marine training."
The U.S. Army has a significant chunk of the jobs that will now open to women.
The Army's plan shows it initiating gender-neutral training this month and starting to assign women to operational units next February.
The first applications from female Navy officers who want to join the nation's 2,750 elite SEALs were due March 4. Officials at Coronado's Naval Special Warfare Command said Thursday they won't comment on the gender of applicants.
A SEAL spokesman said the dates mentioned in the plan simply reflect the first opportunities open to female candidates.
"It would be premature to speculate as to when the first women would be available for SEAL or SWCC (special warfare combatant-craft crewmen) training, or when the first female SEALs would receive their Trident pins," Cmdr. Jason Salata said.
"Any expansion of the force will be a deliberate process; it will not take hours, days or weeks, but months and potentially years."
In the broad public debate surrounding females entering the last bastions of all-male combat, common questions include how units will accommodate women in what is generally a no-privacy, cheek-to-jowl atmosphere in the field.
The Naval Special Warfare Command plan includes details on how to handle that.
During initial training, SEAL officials plan to house female SEAL candidates on a separate floor or wing most of the time and add extra security cameras. The Navy will also build separate bathrooms at San Clemente Island, where special warfare sailors do advanced maneuvers. Total cost: $275,000.
A memo from Rear Adm. Brian Losey, the top SEAL commander, said that history has shown that quotas don't work and won't be attempted and that physical standards will remain neutral as to gender.
The current average physical screening test score for enlisted SEAL applicants is 18 pull-ups, 82 sit-ups and 87 push-ups, each in two minutes.
An estimated 65 percent of male officer candidates survive the 21-week SEAL training course known as Basic Underwater Demolition/SEAL, or BUD/S, and go on to graduate.
A much lower percentage of male enlisted candidates — 28 percent — make the cut.
The full SEAL training pipeline — from pre-BUD/S preparation classes to post-BUD/S advanced courses — is 63 weeks.
Staff writer Joshua Stewart contributed to this report.
[email protected]
___
(c)2016 The San Diego Union-Tribune
Visit The San Diego Union-Tribune at www.sandiegouniontribune.com
Distributed by Tribune Content Agency, LLC. |
<filename>src/traits/yolol_ops.rs
use crate::traits::InnerBounds;
pub trait YololOps: InnerBounds
{
fn yolol_add(self, right: Self) -> Self;
fn yolol_sub(self, right: Self) -> Self;
fn yolol_mul(self, right: Self) -> Self;
// Division may create an error (divide by 0) so we need to return an option
fn yolol_div(self, right: Self) -> Option<Self>;
fn yolol_mod(self, right: Self) -> Self;
fn would_overflow_add(self, right: Self) -> bool;
fn would_underflow_add(self, right: Self) -> bool;
fn would_overflow_mul(self, right: Self) -> bool;
fn would_underflow_mul(self, right: Self) -> bool;
}
impl<T: InnerBounds> YololOps for T
{
// For these, use this algorithm: https://stackoverflow.com/questions/199333/how-do-i-detect-unsigned-integer-multiply-overflow
// Use the checked operations to check overflow in the operations on min and max
fn yolol_add(self, right: Self) -> Self
{
match self.checked_add(&right)
{
Some(num) => num,
None if self.would_overflow_add(right) => T::max_value(),
None if self.would_underflow_add(right) => T::min_value(),
None => {
panic!("[yolol_add] Unknown failure occurred with adding values! Operation: ({} + {})", self, right)
}
}
}
fn yolol_sub(self, right: Self) -> Self
{
match self.checked_sub(&right)
{
Some(num) => num,
None if self.would_overflow_add(-right) => T::max_value(),
None if self.would_underflow_add(-right) => T::min_value(),
None => {
panic!("[yolol_sub] Unknown failure occurred with subtracting values! Operation: ({} - {})", self, right)
}
}
}
fn yolol_mul(self, right: Self) -> Self
{
match self.checked_mul(&right)
{
Some(num) => num,
None if self.would_overflow_mul(right) => T::max_value(),
None if self.would_underflow_mul(right) => T::min_value(),
None => {
panic!("[yolol_mul] Unknown failure occurred with multiplying values! Operation: ({} * {})", self, right)
}
}
}
fn yolol_div(self, right: Self) -> Option<Self>
{
match self.checked_div(&right)
{
Some(num) => Some(num),
None if right == T::zero() => None,
None if self.would_overflow_mul(right) => Some(T::max_value()),
None if self.would_underflow_mul(right) => Some(T::min_value()),
None => {
panic!("[yolol_div] Unknown failure occurred with dividing values! Operation: ({} / {})", self, right)
}
}
}
fn yolol_mod(self, right: Self) -> Self
{
match self.checked_rem(&right)
{
Some(num) => num,
None if right == T::zero() => T::zero(),
None if self.would_overflow_mul(T::one() / right) => T::max_value(),
None if self.would_underflow_mul(T::one() / right) => T::min_value(),
None => {
panic!("[yolol_mod] Unknown failure occurred with moduloing values! Operation: ({} % {})", self, right)
}
}
}
fn would_overflow_add(self, right: Self) -> bool
{
self > T::max_value() - right
}
fn would_underflow_add(self, right: Self) -> bool
{
self < T::min_value() - right
}
// TODO: add special case for flipping sign at bottom of range
fn would_overflow_mul(self, right: Self) -> bool
{
let div = match T::max_value().checked_div(&right)
{
Some(num) => num,
None => return false
};
self > div
}
// TODO: add special case for flipping sign at bottom of range
fn would_underflow_mul(self, right: Self) -> bool
{
let div = match T::min_value().checked_div(&right)
{
Some(num) => num,
None => return false
};
self < div
}
}
|
def overlay(frm, to):
app = FS.app
lower = '' + ':'.join(frm) + ''
upper = to + '.upper'
if already_overlayed(frm, upper):
FS.app('debug', 'Already overlayed', target=to)
return
work = to + '.work'
name = env.get('DA_CLS')
for k in upper, work, to:
if not exists(k):
os.mkdir(k)
overlay = [
'mount',
'-t',
'overlay',
name,
'-o',
'lowerdir=%s,upperdir=%s,workdir=%s' % (lower, upper, work),
to,
]
return FS.sp_call(*overlay) |
#-----------------------------------------------------------------------------#
#pointing.py
#
#NPS Night Skies Program
#
#Last updated: 2016/11/15
#
#This script calculates the actual pointed azimuth (AZ) and altitude (ALT) using
#the solved RA and Dec values from the image headers. If the images are not
#solved, the RA, Dec, AZ, and ALT values are interpolated.
# (1) Read in the solved RA and Dec values from the image header.
# (2) Update the coordinates to the observed date.
# (3) Translate to the azimuth and altitude given the LAST and the longitude.
# (4) Write the output to file
# (5) Insert the interpolated AZ and ALT in the output file
# (6) Update the headers with the interpolated RA and Dec if the images are
# not solved.
#
#Note: In order to use the ACP objects, the Python must be a 32-bit version.
#
#Input:
# (1) Calibrated images
#
#Output:
# (1) pointerr_%s.txt
#
#History:
# Dan Duriscoe -- Created in visual basic as "calc_pointing_error_v4.vbs"
# Li-Wei Hung -- Cleaned, improved, and translated to Python
# Davyd Betchkal -- Plotted the pointing error by image number
#
#-----------------------------------------------------------------------------#
from astropy.coordinates import SkyCoord
from astropy.io import fits
from glob import glob, iglob
from scipy.interpolate import UnivariateSpline
from win32com.client import Dispatch
import pdb
import numpy as n
import matplotlib.pyplot as plt
import os
# Local Source
import filepath
#-----------------------------------------------------------------------------#
def get_last(JD, longitude):
'''
This function calculates the local apparent sidereal time given
the Julian Date (JD) and the longitude [deg] of the observing site.
This calculation is based on the information from http://aa.usno.navy.mil/faq/docs/GAST.php
The maximum error resulting from the use of these formulas
for sidereal time over the period 2000-2100 is 0.432 seconds.
Parameters
----------
JD: float, the julian day of observation.
longitude: float, longitude of the observing site (~~which coordinate system??~~)
Returns
-------
float
'''
D = JD - 2451545.0 #number of days from 2000 January 1, 12h UT
GMST = 18.697374558 + 24.06570982441908*D #Greenwich mean sidereal time [hr]
Omega = n.deg2rad(125.04 - 0.052954*D) #longitude of the ascending node of the Moon [rad]
L = n.deg2rad(280.47 + 0.98565*D) #mean Longitude of the Sun [rad]
Del_Phi = -0.000319*n.sin(Omega)-0.000024*n.sin(2*L) #nutation in longitude
eps = n.deg2rad(23.4393 - 0.0000004*D) #obliquity [rad]
eqeq = Del_Phi*n.cos(eps) #equation of the equinoxes
GAST = GMST + eqeq #Greenwich apparent sidereal time [hr]
LAST = (GAST+longitude/360*24)%24 #local apparent sidereal time [hr]
return LAST
def interp_coord(filenames, solved_outputs):
'''
Interpolate the True_AZ and True_ALT for images that are not solved and
update the RA and DEC with the interpolated values in the header.
'''
util = Dispatch('ACP.Util')
solved, Input_AZ, Input_ALT, True_AZ, True_ALT = solved_outputs
fi = n.array([int(filenames[i][-7:-4]) for i in range(len(filenames))])
w = [0,15,30,40,45] # the number of last image in every elevation row
for i in range(len(w)-1):
wf = (fi>w[i]) & (fi<=w[i+1])
if not any(filenames[wf]): continue
if i==0: #using the second row of image in elevation for interpolation
wi = (solved>w[i+1]) & (solved<=w[i+2])
k = min(3, sum(wi)-1)
A = UnivariateSpline(solved[wi]-15, True_AZ[wi], k=1)
E = UnivariateSpline(solved[wi]-15, True_ALT[wi]-25, k=k)
else:
wi = (solved>w[i]) & (solved<=w[i+1])
k = min(3, sum(wi)-1)
A = UnivariateSpline(solved[wi], True_AZ[wi], k=k)
E = UnivariateSpline(solved[wi], True_ALT[wi], k=k)
for fn in filenames[wf]:
#insert the interpolated Obs_AZ and Obs_ALT
f = fits.open(fn, mode='update')
H = f[0].header
j = int(fn[-7:-4])
entry = [j,H['AZ'],H['ALT'],float(A(j)),float(E(j))]
solved_outputs = n.insert(solved_outputs,j-1,entry,axis=1)
#update the RA and DEC in the header with the interpolated values
LAST = get_last(H['JD'],H['LONGITUD']) #local apparent sidereal time
ct = util.Newct(H['LATITUDE'],LAST)
ct.Azimuth = float(A(j))
ct.Elevation = float(E(j))
c = SkyCoord(ct.RightAscension, ct.Declination, unit=('hour','deg'))
f[0].header['RA'] = c.ra.to_string(unit='hour',sep=' ',precision=2)
f[0].header['DEC'] = c.dec.to_string(unit='deg',sep=' ',precision=1)
f.flush()
f.close()
return solved_outputs.T
def pointing_err(dnight, sets):
'''
This module is calculating the pointing error of each image.
'''
star = Dispatch('NOVAS.Star')
site = Dispatch('NOVAS.Site')
util = Dispatch('ACP.Util')
p = Dispatch('PinPoint.Plate')
#looping through all the sets in that night
for s in sets:
calsetp = filepath.calibdata + dnight + '/S_0' + s[0] + '/'
#read in the header to set the site object's parameter
H = fits.open(calsetp+'ib001.fit',unit=False)[0].header
site.longitude = H['LONGITUD']
site.latitude = H['LATITUDE']
site.height = 0
#calculate the temperture-pressure correction for refraction
temp = (H['AMTEMP_F']-32)/1.8 + 273 #temperature [K]
pres = (1-(0.0065*H['ELEVATIO']/288.15))**5.3 #pressure [atm]
tpco = pres*(283/temp) #correction
#refraction at 7.5 altitude
refraction = tpco*(1/n.tan(n.deg2rad(7.5+7.31/11.9)))/60
#just for V band
solved=[]; notsolved=[]
True_AZ=[]; True_ALT=[]; Input_AZ=[]; Input_ALT=[]
for fn in iglob(calsetp+'ib???.fit'):
fns = fn[:-4]+'s'+fn[-4:]
if os.path.exists(fns):
H = fits.open(fns)[0].header
fnsolved = fns
else:
H = fits.open(fn)[0].header
fnsolved = fn
#calculating the pointing error only if the plate is solved
if 'PLTSOLVD' not in H or H['PLTSOLVD']==False:
notsolved.append(fn)
continue
solved.append(int(fn[-7:-4]))
p.attachFits(fnsolved)
star.RightAscension = p.RightAscension
star.Declination = p.Declination
JD = H['JD'] #Julian Date
TJD = util.Julian_TJD(JD) #Terrestrial Julian Date
#updated star's coordinates at the observed date/time and site
StarTopo = star.GetTopocentricPosition(TJD, site, False)
#local apparent sidereal time [hr]
LAST = get_last(JD, H['LONGITUD'])
#new CoordinateTransform object
ct = util.Newct(H['LATITUDE'],LAST)
ct.RightAscension = StarTopo.RightAscension
ct.Declination = StarTopo.Declination
Input_AZ.append(H['AZ'])
Input_ALT.append(H['ALT'])
True_AZ.append(ct.Azimuth)
#correct for atmospheric refraction on images 1-15
if int(fn[-7:-4]) < 16:
True_ALT.append(ct.Elevation + refraction)
else:
True_ALT.append(ct.Elevation)
p.DetachFITS()
#interpolate the True_AZ for True_ALT for images that are not solved
pterr = n.array([solved,Input_AZ,Input_ALT,True_AZ,True_ALT])
pterr = interp_coord(n.array(notsolved), pterr)
# calculate errors
pErr = pterr.T
pErr[3][n.where((pErr[1]==0)& (pErr[3]>180))] -= 360
azmErr = (pErr[1] - pErr[3])*n.cos(n.deg2rad(pErr[4]))
altErr = pErr[2] - pErr[4]
totErr = n.sqrt(n.power(azmErr,2) + n.power(altErr,2))
#create a pointing error plot
errorPlot = plt.figure(figsize=(20,10))
ax = errorPlot.add_subplot(111)
plt.suptitle("Pointing Error by Image Number", fontsize=25, verticalalignment='top')
plt.title("Data Set " + s[0], fontsize=20)
plt.plot(pErr[0], azmErr, linestyle="-.", marker="o", markerfacecolor='None',
markersize=4, color = "darkorange", alpha=0.7, label="Azimuth Error")
plt.plot(pErr[0], altErr, linestyle="--", marker="o",
markersize=4, color = "darkgreen", alpha=0.7, label="Altitude Error")
plt.plot(pErr[0], totErr, linestyle="-", linewidth=2, marker="o",
markersize=6, color = "black", alpha=1, label="Total Error")
plt.axhline(0, color="black", linestyle="-", alpha=0.5, zorder=-10)
plt.ylim(-3, 3)
plt.ylabel("Error in Degrees", fontsize=20, labelpad = 10)
plt.xlabel("Image Number", fontsize=20, labelpad = 15)
plt.xticks(n.arange(0, 50, 5))
plt.legend(loc='upper left', markerscale=1.8, fontsize=18, framealpha=0.3)
ax.tick_params(axis='both', which='major', labelsize=15)
plt.text(0.5, -2.8, "Average Total Error: " + '{:.3f}'.format(totErr.mean()) + u'\N{DEGREE SIGN}', fontsize=18)
errorPlot.savefig(filepath.calibdata+dnight+'/pointerr_%s.png' %s[0])
#saving the output file
outfile = filepath.calibdata+dnight+'/pointerr_%s.txt' %s[0]
nformat = ['%4.f','%8.f','%8.1f','%8.2f','%8.2f']
H = 'file Input_AZ Input_ALT Obs_AZ Obs_ALT' #column names
n.savetxt(outfile,pterr,fmt=nformat,header=H)
if __name__ == "__main__":
# pass
print("Hi, running from the console.")
pointing_err('FCNA160803', ['1st',])
|
Venezuelan equine encephalitis virus: comparison of infectivity and virulence of strains V-38 and P676 in donkeys.
Two strains of Venezuelan equine encephalitis (VEE) virus were examined for the ability to replicate in, as well as to produce death among donkeys. One, a low passage strain known as strain P676 was originally isolated from mosquitos in Venezuela. The other, strain V-38 was isolated from a horse brain in 1938 and had undergone an unknown number of laboratory passages; it is used extensively for the preparation of inactivated VEE vaccine. Both strains were found to be approximately equal in their ability to infect donkeys. However, a quantity as small as 50% hamster intraperitoneal infectious units of strain V-38 resulted in fatal infection. On the other hand, as much as 631 million infectious units of strain P676 were nonfatal in one of two donkeys. It appears that strain V-38 is approximately 100 million times more virulent than strain P676 in equine species. One donkey which received strain P676 demonstrated a biphasic pattern of clinical illness and viremia, and there is suggestive evidence that another animal experienced a second and fatal clinical response 3 weeks after virus inoculation. |
import { ScheduleEvent } from '../../garoon/schedule';
import { d_2021_09_18 } from '../../utils/__test__/mock-datetime';
import { SyntaxGeneratorFactory } from '../syntax-generator-factory';
const domain = 'domain';
const htmlSyntaxGenerator = new SyntaxGeneratorFactory().create('html');
const markdownSyntaxGenerator = new SyntaxGeneratorFactory().create('markdown');
const mockRegularEvent: ScheduleEvent = {
'id': '2652936',
'subject': '予定1',
'startTime': {
'year': 2021,
'month': 9,
'date': 19,
'hour': 9,
'minute': 0,
},
'endTime': {
'year': 2021,
'month': 9,
'date': 19,
'hour': 10,
'minute': 0,
},
'eventType': 'REGULAR',
'eventMenu': '',
'attendees': [
{
'id': '1',
'name': 'ユーザー1',
},
],
'visibilityType': 'PUBLIC',
'isAllDay': false,
'isStartOnly': false,
};
const mockRegularEventWithEventMenu: ScheduleEvent = {
'id': '2652938',
'subject': '予定3',
'startTime': {
'year': 2021,
'month': 9,
'date': 19,
'hour': 10,
'minute': 0,
},
'endTime': {
'year': 2021,
'month': 9,
'date': 19,
'hour': 10,
'minute': 30,
},
'eventType': 'REGULAR',
'eventMenu': '打合',
'attendees': [
{
'id': '1',
'name': 'ユーザー1',
},
],
'visibilityType': 'PUBLIC',
'isAllDay': false,
'isStartOnly': false,
};
const mockAllDayEvent: ScheduleEvent = {
'id': '2652943',
'subject': '非公開予定',
'startTime': {
'year': 2021,
'month': 9,
'date': 19,
'hour': 0,
'minute': 0,
},
'endTime': {
'year': 2021,
'month': 9,
'date': 19,
'hour': 23,
'minute': 59,
},
'eventType': 'REGULAR',
'eventMenu': '',
'attendees': [
{
'id': '1',
'name': 'ユーザー1',
},
],
'visibilityType': 'PRIVATE',
'isAllDay': true,
'isStartOnly': false,
};
const mockAllDayEventWithEventMenu: ScheduleEvent = {
'id': '2652940',
'subject': '終日予定',
'startTime': {
'year': 2021,
'month': 9,
'date': 19,
'hour': 0,
'minute': 0,
},
'endTime': {
'year': 2021,
'month': 9,
'date': 19,
'hour': 23,
'minute': 59,
},
'eventType': 'REGULAR',
'eventMenu': '休み',
'attendees': [
{
'id': '1',
'name': 'ユーザー1',
},
],
'visibilityType': 'PUBLIC',
'isAllDay': true,
'isStartOnly': false,
};
// TODO: 余力があったらテストを実装する
// const mockMyGroupRegularEvent = {};
// const mockMyGroupEventWithEventMenu = {};
// const mockMyGroupAllDayEvent = {};
// const mockMyGroupAllDayEventWithEventMenu = {};
describe('createTitle', () => {
test('html', () => {
expect(htmlSyntaxGenerator.createTitle(d_2021_09_18)).toBe('<span>[ 2021-09-18 の予定 ]</span>');
});
test('markdown', () => {
expect(markdownSyntaxGenerator.createTitle(d_2021_09_18)).toBe('[ 2021-09-18 の予定 ]');
});
});
describe('createEvents', () => {
test.each([
[
mockRegularEvent,
`<span><span>09:00-10:00</span> <a href="https://${domain}/g/schedule/view.csp?event=2652936">予定1</a></span>`,
],
[
mockRegularEventWithEventMenu,
`<span><span>10:00-10:30</span> <span style="background-color: #3182dc; display: inline-block; margin-right: 3px; padding: 2px 2px 1px; color: rgb(255, 255, 255); font-size: 11.628px; border-radius: 2px; line-height: 1.1;">打合</span> <a href="https://${domain}/g/schedule/view.csp?event=2652938">予定3</a></span>`,
],
[
mockAllDayEvent,
`<span><span style="background-color: #9acd32; display: inline-block; margin-right: 3px; padding: 2px 2px 1px; color: rgb(255, 255, 255); font-size: 11.628px; border-radius: 2px; line-height: 1.1;">終日</span> <a href="https://${domain}/g/schedule/view.csp?event=2652943">非公開予定</a></span>`,
],
[
mockAllDayEventWithEventMenu,
`<span><span style="background-color: #9acd32; display: inline-block; margin-right: 3px; padding: 2px 2px 1px; color: rgb(255, 255, 255); font-size: 11.628px; border-radius: 2px; line-height: 1.1;">終日</span> <span style="background-color: #f44848; display: inline-block; margin-right: 3px; padding: 2px 2px 1px; color: rgb(255, 255, 255); font-size: 11.628px; border-radius: 2px; line-height: 1.1;">休み</span> <a href="https://${domain}/g/schedule/view.csp?event=2652940">終日予定</a></span>`,
],
])('html', (event, expected) => {
expect(htmlSyntaxGenerator.createEvents(domain, [event])).toBe(expected);
});
test.each([
[mockRegularEvent, `09:00-10:00 [予定1](https://${domain}/g/schedule/view.csp?event=2652936)`],
[
mockRegularEventWithEventMenu,
`10:00-10:30 <span style="color: #3182dc;">[打合]</span> [予定3](https://${domain}/g/schedule/view.csp?event=2652938)`,
],
[
mockAllDayEvent,
`<span style="color: #9acd32;">[終日]</span> [非公開予定](https://${domain}/g/schedule/view.csp?event=2652943)`,
],
[
mockAllDayEventWithEventMenu,
`<span style="color: #9acd32;">[終日]</span> <span style="color: #f44848;">[休み]</span> [終日予定](https://${domain}/g/schedule/view.csp?event=2652940)`,
],
])('markdown', (event, expected) => {
expect(markdownSyntaxGenerator.createEvents(domain, [event])).toBe(expected);
});
});
|
<reponame>hamarb123/dotnet-api-docs
// <Internal>
// File name: ipaddress.cpp
// The snippets contained here apply to:
// 1) System.Net.IPAddress.AddressFamily, snippet3.
// 2) System.Net.IPAddess.ScopeId, snippet3.
// more
// </Internal>
// <Snippet1>
// This program shows how to use the IPAddress class to obtain a server
// IP addressess and related information.
#using <System.dll>
using namespace System;
using namespace System::Net;
using namespace System::Net::Sockets;
using namespace System::Text::RegularExpressions;
/**
* The IPAddresses method obtains the selected server IP address information.
* It then displays the type of address family supported by the server and its
* IP address in standard and byte format.
**/
void IPAddresses( String^ server )
{
try
{
System::Text::ASCIIEncoding^ ASCII = gcnew System::Text::ASCIIEncoding;
// Get server related information.
IPHostEntry^ heserver = Dns::GetHostEntry( server );
// Loop on the AddressList
System::Collections::IEnumerator^ myEnum = heserver->AddressList->GetEnumerator();
while ( myEnum->MoveNext() )
{
IPAddress^ curAdd = safe_cast<IPAddress^>(myEnum->Current);
//<Snippet3>
// Display the type of address family supported by the server. If the
// server is IPv6-enabled this value is: InterNetworkV6. If the server
// is also IPv4-enabled there will be an additional value of InterNetwork.
Console::WriteLine( "AddressFamily: {0}", curAdd->AddressFamily );
// Display the ScopeId property in case of IPV6 addresses.
if ( curAdd->AddressFamily.ToString() == ProtocolFamily::InterNetworkV6.ToString() )
Console::WriteLine( "Scope Id: {0}", curAdd->ScopeId );
//</Snippet3>
// Display the server IP address in the standard format. In
// IPv4 the format will be dotted-quad notation, in IPv6 it will be
// in in colon-hexadecimal notation.
Console::WriteLine( "Address: {0}", curAdd );
// Display the server IP address in byte format.
Console::Write( "AddressBytes: " );
//<Snippet2>
array<Byte>^bytes = curAdd->GetAddressBytes();
for ( int i = 0; i < bytes->Length; i++ )
{
Console::Write( bytes[ i ] );
}
// </Snippet2>
Console::WriteLine( "\r\n" );
}
}
catch ( Exception^ e )
{
Console::WriteLine( "[DoResolve] Exception: {0}", e );
}
}
// This IPAddressAdditionalInfo displays additional server address information.
void IPAddressAdditionalInfo()
{
try
{
// Display the flags that show if the server supports IPv4 or IPv6
// address schemas.
Console::WriteLine( "\r\nSupportsIPv4: {0}", Socket::SupportsIPv4 );
Console::WriteLine( "SupportsIPv6: {0}", Socket::SupportsIPv6 );
if ( Socket::SupportsIPv6 )
{
// Display the server Any address. This IP address indicates that the server
// should listen for client activity on all network interfaces.
Console::WriteLine( "\r\nIPv6Any: {0}", IPAddress::IPv6Any );
// Display the server loopback address.
Console::WriteLine( "IPv6Loopback: {0}", IPAddress::IPv6Loopback );
// Used during autoconfiguration first phase.
Console::WriteLine( "IPv6None: {0}", IPAddress::IPv6None );
Console::WriteLine( "IsLoopback(IPv6Loopback): {0}", IPAddress::IsLoopback( IPAddress::IPv6Loopback ) );
}
Console::WriteLine( "IsLoopback(Loopback): {0}", IPAddress::IsLoopback( IPAddress::Loopback ) );
}
catch ( Exception^ e )
{
Console::WriteLine( "[IPAddresses] Exception: {0}", e );
}
}
int main()
{
array<String^>^args = Environment::GetCommandLineArgs();
String^ server = nullptr;
// Define a regular expression to parse user's input.
// This is a security check. It allows only
// alphanumeric input string between 2 to 40 character long.
Regex^ rex = gcnew Regex( "^[a-zA-Z]\\w{1,39}$" );
if ( args->Length < 2 )
{
// If no server name is passed as an argument to this program, use the current
// server name as default.
server = Dns::GetHostName();
Console::WriteLine( "Using current host: {0}", server );
}
else
{
server = args[ 1 ];
if ( !(rex->Match(server))->Success )
{
Console::WriteLine( "Input string format not allowed." );
return -1;
}
}
// Get the list of the addresses associated with the requested server.
IPAddresses( server );
// Get additional address information.
IPAddressAdditionalInfo();
}
// </Snippet1>
|
def recognize_raw(self, wav, locale=None):
if locale is None:
locale = self.locale
if locale not in LOCALES:
raise ValueError('unsupported locale: ' + locale)
with audio._open_wav(wav) as w:
if w.getnchannels() != 1:
raise ValueError('can only recognize single channel audio')
content_type = '; '.join((
'audio/wav',
'codec="audio/pcm"',
'samplerate=8000',
'sourcerate={}'.format(w.getframerate()),
'trustsourcerate=true'
))
params = '&'.join((
'scenarios=ulm',
'appid=D4D52672-91D7-4C74-8AD8-42B1D98141A5',
'locale={}'.format(locale),
'device.os="Windows OS"',
'version=3.0',
'format=json',
'instanceid=565D69FF-E928-4B7E-87DA-9A750B96D9E3',
'requestid={}'.format(uuid.uuid4())
))
r = requests.post(
_API_SCOPE + '/recognize?' + params,
data=wav,
headers={
'Content-Type': content_type,
'Accept': 'application/json;text/xml',
'Authorization': 'Bearer ' + self._get_token(),
},
)
r.raise_for_status()
return r.json() |
def c_2d_plane_2n_points_bipartite_graph():
import networkx as nx
from networkx.algorithms.flow import dinitz
N = int(input())
A, B, C, D = [], [], [], []
for _ in range(N):
a, b = [int(i) for i in input().split()]
A.append(a)
B.append(b)
for _ in range(N):
c, d = [int(i) for i in input().split()]
C.append(c)
D.append(d)
# 赤い点の番号は 1~N とし,青い点の番号は N+1~2N とする
# ソースを 0 番とし,シンクを 2N+1 番とする
graph = nx.DiGraph()
graph.add_nodes_from(range(2 * N + 2))
for i in range(N):
for j in range(N):
if A[i] < C[j] and B[i] < D[j]:
graph.add_edge(i + 1, j + N + 1, capacity=1)
for i in range(1, N + 1):
graph.add_edge(0, i, capacity=1)
for j in range(N + 1, 2 * N + 1):
graph.add_edge(j, 2 * N + 1, capacity=1)
return dinitz(graph, 0, 2 * N + 1).graph['flow_value']
print(c_2d_plane_2n_points_bipartite_graph()) |
def swap(items,i,j):
assert(type(items) is list)
tmp = items[i]
items[i] = items[j]
items[j] = tmp |
<reponame>srl295/icu<filename>locexp/lxbrk.c
/**********************************************************************
* Copyright (C) 1999-2006, International Business Machines
* Corporation and others. All Rights Reserved.
***********************************************************************/
#include "locexp.h"
/* routines having to do with the break iteration sample */
/*****************************************************************************
*
* Explorer for #'s
*/
void showExploreBreak(LXContext *lx, const char *locale)
{
UChar pattern[1024];
UNumberFormat *nf = NULL; /* numfmt in the current locale */
UNumberFormat *nf_default = NULL; /* numfmt in the default locale */
UNumberFormat *nf_spellout = NULL;
UErrorCode status = U_ZERO_ERROR;
const UChar *defaultValueErr = 0;
const UChar *localValueErr = 0;
showKeyAndStartItem(lx, "EXPLORE_Break", FSWF("EXPLORE_Break", "Explore > Numbers"), locale, FALSE, U_ZERO_ERROR);
u_fprintf(lx->OUT, "%S<p>", FSWF("formatExample_Break_What","This example demonstrates break iteration"));
exploreFetchNextPattern(lx, pattern, strstr(lx->queryString,"EXPLORE_Break"));
/*
*
*
* REAL CODE GOES HERE
*
*
*
*/
showExploreCloseButton(lx, locale, "Break");
u_fprintf(lx->OUT, "</td><td align=\"left\" valign=\"top\">");
printHelpTag(lx, "EXPLORE_Break", NULL);
showKeyAndEndItem(lx, "EXPLORE_Break", locale);
}
|
# Hello World program in Python
def solve():
napis = input()
if(napis[0] == ')' or (len(napis) % 2 == 1) or (napis[-1] == '(')):
print("No")
else:
print("Yes")
tt = int(input())
for x in range(tt):
solve()
|
def bi_directional_shape_broadcasting(input_shape_1: np.array, input_shape_2: np.array):
shape_1 = input_shape_1.copy()
shape_2 = input_shape_2.copy()
shape_1, shape_2 = make_equal_rank(shape_1, shape_2)
result = list()
for left, right in zip(shape_1, shape_2):
if left != right and left != 1 and right != 1 and left is not dynamic_dimension and \
right is not dynamic_dimension:
log.debug('The shape "{}" cannot be broadcasted to "{}"'.format(input_shape_1, input_shape_2))
return None
if left is not dynamic_dimension and right is not dynamic_dimension:
result.append(max(left, right))
elif left is not dynamic_dimension and left != 1:
result.append(left)
elif right is not dynamic_dimension and right != 1:
result.append(right)
else:
result.append(dynamic_dimension_value)
return shape_array(result) |
<gh_stars>0
package org.perm.testgenerator;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.InvalidPropertiesFormatException;
import java.util.Vector;
import org.perm.testgenerator.dataset.DataAndQueryGenerator;
public class TestInfoHolder {
private static TestInfoHolder instance;
private DataAndQueryGenerator generator;
private String genName;
private Vector<String> markedErrors;
private TestInfoHolder () {
markedErrors = new Vector<String> ();
}
public static TestInfoHolder getInstance () {
if (instance == null) {
instance = new TestInfoHolder ();
}
return instance;
}
public DataAndQueryGenerator getCurrentGenerator () {
return generator;
}
public void setGenerator (String name) throws InvalidPropertiesFormatException, FileNotFoundException, IOException {
generator = new DataAndQueryGenerator (ConnectionOptions.getInstance().getPath() + "/" + name + ".xml");
genName = name;
}
public String getGeneratorName () {
return genName;
}
public Vector<String> getMarkedErrors () {
return markedErrors;
}
public void resetMarkedErrors () {
markedErrors = new Vector<String> ();
}
public void addMarkedError (String error) {
markedErrors.add(error);
}
}
|
/**
* Benchmark tool for mnemonic index generation methods (aligned 11-bit reading).
*/
@VmOptions("-XX:-TieredCompilation")
@SuppressFBWarnings("PREDICTABLE_RANDOM")
class IndexGeneratorBenchmark {
private static final byte[] INPUT = new byte[128 / 8];
private static final byte[] CHECKSUM = new byte[256 / 8];
static {
Random rng = new Random(0x1234);
rng.nextBytes(INPUT);
rng.nextBytes(CHECKSUM);
}
@Benchmark
public int bigIntegerMethod(int reps) {
int dummy = 0;
for (int rep = 0; rep < reps; rep++) {
byte[] entropy = INPUT;
byte[] checksum = Arrays.copyOf(CHECKSUM, CHECKSUM.length);
/* Convert the length to bits for purpose of BIP0039 specification match-up */
int entropyBitCount = entropy.length * 8;
int checksumBitCount = entropyBitCount / 32;
int mnemonicSentenceLength = (entropyBitCount + checksumBitCount) / 11;
BigInteger entropyBytes = new BigInteger(1, entropy);
BigInteger checksumBytes = new BigInteger(1, checksum);
/* We need only 'first' checksumBitCount from the full checksumBytes,
* so we need to shift to the right checksum.length * 8 - bits
*/
checksumBytes = checksumBytes.shiftRight(checksum.length * 8 - checksumBitCount);
/* We need to 'tack on' the checksumBitCount to the right of the entropyBytes.
* Shift the entropyBytes to the left checksumBitCount and 'or' the values.
*/
BigInteger sentenceSource = entropyBytes.shiftLeft(checksumBitCount).or(checksumBytes);
int[] indexValues = new int[mnemonicSentenceLength];
for (int i = 0; i < mnemonicSentenceLength; i++) {
/* Extract the 11-bit chunks out (in reverse order due to shifting optimization) */
int index = sentenceSource.intValue() & ((1 << 11) - 1);
indexValues[mnemonicSentenceLength - i - 1] = index;
sentenceSource = sentenceSource.shiftRight(11);
}
dummy += indexValues[0];
}
return dummy;
}
/* Very very specialized, could use array input, but nope */
private static BitSet bytesToBitSet(int totalBits, byte[] entropy, byte[] checksum) {
Preconditions.checkNotNull(entropy);
Preconditions.checkNotNull(checksum);
Preconditions.checkArgument(totalBits > 0 && totalBits <= (entropy.length * 8 + checksum.length * 8));
BitSet bits = new BitSet(totalBits);
int checksumOffset = entropy.length * 8;
int checksumLength = totalBits - checksumOffset;
for (int i = 0; i < entropy.length; ++i) {
for (int j = 0; j < 8; ++j) {
if ((entropy[i] & (1 << (7 - j))) != 0) {
bits.set((i * 8) + j);
}
}
}
for (int i = 0; i < checksum.length; ++i) {
for (int j = 0; j < 8; ++j) {
int index = i * 8 + j;
if (index >= checksumLength) {
return bits;
}
if ((checksum[i] & (1 << (7 - j))) != 0) {
bits.set(index + checksumOffset);
}
}
}
return bits;
}
@Benchmark
public int bitsetMethod(int reps) {
int dummy = 0;
for (int rep = 0; rep < reps; rep++) {
byte[] entropy = INPUT;
byte[] checksum = Arrays.copyOf(CHECKSUM, CHECKSUM.length);
/* Convert the length to bits for purpose of BIP0039 specification match-up */
int entropyBitCount = entropy.length * 8;
int checksumBitCount = entropyBitCount / 32;
int mnemonicSentenceLength = (entropyBitCount + checksumBitCount) / 11;
BitSet entropyBits = bytesToBitSet(entropyBitCount + checksumBitCount, entropy, checksum);
/* Take each group of 11 bits and convert to an integer
* that will be used to index into the word list.
*/
int[] indexValues = new int[mnemonicSentenceLength];
for (int i = 0; i < mnemonicSentenceLength; ++i) {
int index = 0;
for (int j = 0; j < 11; ++j) {
index <<= 1;
if (entropyBits.get((i * 11) + j)) {
index |= 0x1;
}
}
indexValues[i] = index;
}
dummy += indexValues[0];
}
return dummy;
}
private static boolean[] bytesToBits(byte[] data) {
Preconditions.checkNotNull(data);
boolean[] bits = new boolean[data.length * 8];
for (int i = 0; i < data.length; ++i) {
for (int j = 0; j < 8; ++j) {
bits[(i * 8) + j] = (data[i] & (1 << (7 - j))) != 0;
}
}
return bits;
}
@Nonnull
private static boolean[] bytesToBits(int totalBits, @Nonnull byte[] data) {
Preconditions.checkNotNull(data);
Preconditions.checkArgument(totalBits > 0 && totalBits <= data.length * 8);
boolean[] bits = new boolean[totalBits];
int offset = 0;
for (int i = 0; i < data.length; ++i) {
for (int j = 0; j < 8; ++j) {
bits[offset] = (data[i] & (1 << (7 - j))) != 0;
offset++;
if (offset >= totalBits) {
return bits;
}
}
}
return bits;
}
@Benchmark
int joinedBooleanMethod(int reps) {
int dummy = 0;
for (int rep = 0; rep < reps; rep++) {
byte[] entropy = INPUT;
byte[] joined = new byte[entropy.length + 256 / 8];
System.arraycopy(entropy, 0, joined, 0, entropy.length);
System.arraycopy(CHECKSUM, 0, joined, entropy.length, CHECKSUM.length);
/* Convert the length to bits for purpose of BIP0039 specification match-up */
int entropyBitCount = entropy.length * 8;
int checksumBitCount = entropyBitCount / 32;
int mnemonicSentenceLength = (entropyBitCount + checksumBitCount) / 11;
boolean[] concatBits = bytesToBits(entropyBitCount + checksumBitCount, joined);
/* Take each group of 11 bits and convert to an integer
* that will be used to index into the word list.
*/
int[] indexValues = new int[mnemonicSentenceLength];
for (int i = 0; i < mnemonicSentenceLength; ++i) {
int index = 0;
for (int j = 0; j < 11; ++j) {
index <<= 1;
if (concatBits[(i * 11) + j]) {
index |= 0x1;
}
}
indexValues[i] = index;
}
dummy += indexValues[0];
}
return dummy;
}
@Benchmark
public int crinchBitReaderMethod(int reps) {
int dummy = 0;
for (int rep = 0; rep < reps; rep++) {
byte[] entropy = INPUT;
byte[] joined = new byte[entropy.length + 256 / 8];
System.arraycopy(entropy, 0, joined, 0, entropy.length);
System.arraycopy(CHECKSUM, 0, joined, entropy.length, CHECKSUM.length);
/* Convert the length to bits for purpose of BIP0039 specification match-up */
int entropyBitCount = entropy.length * 8;
int checksumBitCount = entropyBitCount / 32;
int totalBitCount = entropyBitCount + checksumBitCount;
int mnemonicSentenceLength = totalBitCount / 11;
BitReader bitReader = new ByteArrayBitReader(joined);
int[] indexValues = new int[mnemonicSentenceLength];
for (int i = 0; i < mnemonicSentenceLength; i++) {
int index = bitReader.read(11);
indexValues[i] = index;
}
dummy += indexValues[0];
}
return dummy;
}
@Benchmark
public int crinchBitVectorMethod(int reps) {
int dummy = 0;
for (int rep = 0; rep < reps; rep++) {
byte[] entropy = INPUT;
byte[] joined = new byte[entropy.length + 256 / 8];
System.arraycopy(entropy, 0, joined, 0, entropy.length);
System.arraycopy(CHECKSUM, 0, joined, entropy.length, CHECKSUM.length);
/* Convert the length to bits for purpose of BIP0039 specification match-up */
int entropyBitCount = entropy.length * 8;
int checksumBitCount = entropyBitCount / 32;
int totalBitCount = entropyBitCount + checksumBitCount;
int mnemonicSentenceLength = totalBitCount / 11;
BitVector bitVector = new BitVector(joined.length * 8);
bitVector.setBytes(0, joined, 0, joined.length * 8);
int offset = 0;
int[] indexValues = new int[mnemonicSentenceLength];
for (int i = 0; i < mnemonicSentenceLength; i++) {
int index = (int)bitVector.getBits(offset, 11);
indexValues[i] = index;
offset += 11;
}
dummy += indexValues[0];
}
return dummy;
}
public static void main(String[] args) {
CaliperMain.main(IndexGeneratorBenchmark.class, args);
}
} |
The Vocal Teacher of Ten Thousand: E. Azalia Hackley as Community Music Educator, 1910–22
Known as “the vocal teacher often thousand, ”Emma Azalia Hackley (1867-1922) dedicated much of her career to the promotion of music activities in black communities. She organized dozens of community gatherings at which she taught music reading fundamentals, techniques of singing, and appreciation of the classics to huge groups of amateur musicians and the general public. This essay introduces aspects of the late nineteenth-century musical landscape in the United States that preceded Hackley's entry into music education, and recounts her career as a community music educator. The article concludes with the identification of more recent manifestations of the philosophical underpinnings that informed her teaching, thereby establishing connections between her work and contemporary educational thought. Hackley's own writings serve as the primary source material for this discussion whenever possible, supplemented by first-hand accounts of her activities that appeared in the black press and those written by her associates. Her accomplishments add an important chapter to the history of community music education which has thus far received little scholarly attention. |
/**
* Write a description of class maker here.
*
* @author Bailey Cross
* @version 1 (Probably final version as well)
*/
public class maker
{
public static void main(String args[]){
Random rand = new Random();
Scanner sc = new Scanner(System.in);
int x = rand.nextInt(2);
while(!sc.nextLine().equals("q")){
x = rand.nextInt(2);
if(x == 0){
System.out.println("We should get Chipotle");
}
else{
System.out.println("We should get Chinese");
}
}
}
} |
// FetchAgent populates the minimal Agent struct with data anyone can see
func FetchAgent(id string, agent *Agent) error {
gid, err := toGid(id)
if err != nil {
Log.Error(err)
return err
}
if gid == "" {
return fmt.Errorf("unknown agent (redundant check?): %s", id)
}
err = db.QueryRow("SELECT u.gid, u.iname, u.level, u.VVerified, u.VBlacklisted, u.Vid, u.RocksVerified FROM agent=u WHERE u.gid = ?", gid).Scan(
&agent.Gid, &agent.Name, &agent.Level, &agent.Verified, &agent.Blacklisted, &agent.EnlID, &agent.RocksVerified)
if err != nil {
Log.Error(err)
return err
}
return nil
} |
/**
* Checks if the block is farmable.
*
* @param event PlayerInteractEvent
* @return true if block is farmable
*/
private boolean isPlayerBlockFarmable(PlayerInteractEvent event) {
boolean isGrassOrDirt = event.getClickedBlock().getType() == Material.GRASS_BLOCK || event.getClickedBlock().getType() == Material.DIRT;
boolean isTopBlockAir = event.getClickedBlock().getRelative(BlockFace.UP).getType() == Material.AIR;
return event.hasBlock() && isGrassOrDirt && isTopBlockAir;
} |
/**
* Created by alain on 16/9/17.
*/
public class CacheClusterViewerFactory {
private static final Logger logger = LoggerFactory.getLogger(CacheClusterViewerFactory.class);
private static volatile CacheClusterViewer cacheClusterViewer = null;
/**
* this function will lock the cache cluster to guarantee the integrity of CacheClusterMeta
* in CacheClusterViewer, so must not call it during the cluster is locked.
* <p>
* on cache cluster servers, this function should be called before starting cache services to
* make sure the deadlock dose not happen.
* <p>
* on cache cluster clients, it's recommend this method called only once before getInstance
* called, then all calls of getInstance will return the same CacheClusterViewer instance.
*/
public static void configure(Config config) throws Exception {
if (config == null) {
config = ConfigFactory.load();
}
String zookeeperConnectionUrl = config.getString("zookeeper.connection_url");
RetryPolicy retryPolicy = new ExponentialBackoffRetry(1000, 3);
CuratorFramework zkClient = CuratorFrameworkFactory.newClient(zookeeperConnectionUrl, retryPolicy);
zkClient.start();
InterProcessReadWriteLock clusterGlobalLock =
new InterProcessReadWriteLock(zkClient, Constants.CACHE_CLUSTER_PATH);
clusterGlobalLock.readLock().acquire();
try {
cacheClusterViewer = new CacheClusterViewer(config);
} finally {
try {
clusterGlobalLock.readLock().release();
} catch (Exception e) {
logger.error(String.format("failed to release clusterGlobalLock on zknode[%s]",
Constants.CACHE_CLUSTER_PATH), e);
}
}
zkClient.close();
}
public static CacheClusterViewer getCacheClusterViewer() {
if (cacheClusterViewer == null) {
throw new RuntimeException(
"CacheClusterViewerFactory not configured, please configure before using it.");
}
return cacheClusterViewer;
}
} |
James May has said that he is colleagues, "not mates" with his Grand Tour co-hosts Jeremy Clarkson and Richard Hammond, in an interview that also saw him call Clarkson a "knob".
Speaking to The Telegraph (Premium), May said that he has "never really changed [his] view of Jeremy, which is that he is a 'knob', as [he] said on the news" [after Clarkson's assault of a producer] and that Clarkson is "a bit of a Stuckist: part of him is actually locked in the 1920s."
May said the most annoying thing about his other co-host Hammond is "his face. His chirpiness and his silly little beard".
Join Independent Minds For exclusive articles, events and an advertising-free read for just £5.99 €6.99 $9.99 a month Get the best of The Independent With an Independent Minds subscription for just £5.99 €6.99 $9.99 a month Get the best of The Independent Without the ads – for just £5.99 €6.99 $9.99 a month
He added that he knew Hammond had recovered from his near-fatal supercar crash 10 years ago because he emerged 'just as much of a t**t as he always was".
May's comments were published almost immediately in the wake of controversy over a remark Hammond made on the latest episode of The Grand Tour, where the 47-year-old motoring show host suggested that men who eat ice cream are gay.
On the sixth episode of the first season, which was broadcast on 23 December via Amazon Prime, Hammond he didn't eat ice cream, which had "something to do with being straight".
He was responding to an observation by Clarkson, who said that you would not be able to eat a chocolate Magnum inside a Volvo for fear of ruining the white leather seats.
Clarkson and May appeared baffled by Hammond's comments, and Clarkson seemed to make an attempt to steer the conversation back to his original joke about eating chocolate in the Volvo.
People on Twitter have reacted negatively to Hammond's comments, accusing him of homophobia and criticising Amazon Prime for allowing the moment to be aired.
At the time of writing Hammond had not responded to the controversy. |
import { colors } from '@0x/react-shared';
import * as React from 'react';
import { styled } from 'ts/style/theme';
import { dash, rotate } from 'ts/style/keyframes';
interface SpinnerSvgProps {
color: string;
size: number;
viewBox?: string;
}
const SpinnerSvg: React.StatelessComponent<SpinnerSvgProps> = props => <svg {...props} />;
const StyledSpinner = styled(SpinnerSvg)`
animation: ${rotate} 3s linear infinite;
margin: ${props => `-${props.size / 2}px 0 0 -${props.size / 2}px`};
margin-top: ${props => `-${props.size / 2}px`};
margin-left: ${props => `-${props.size / 2}px`};
margin-bottom: 0px;
margin-right: 0px;
size: ${props => `${props.size}px`};
height: ${props => `${props.size}px`};
& .path {
stroke: ${props => props.color};
stroke-linecap: round;
animation: ${dash} 2.5s ease-in-out infinite;
}
`;
export interface SpinnerProps {
size?: number;
strokeSize?: number;
color?: string;
}
export const Spinner: React.StatelessComponent<SpinnerProps> = ({ size, strokeSize, color }) => {
const c = size / 2;
const r = c - strokeSize;
return (
<StyledSpinner color={color} size={size} viewBox={`0 0 ${size} ${size}`}>
<circle className="path" cx={c} cy={c} r={r} fill="none" strokeWidth={strokeSize} />
</StyledSpinner>
);
};
Spinner.defaultProps = {
size: 50,
color: colors.mediumBlue,
strokeSize: 4,
};
Spinner.displayName = 'Spinner';
|
<reponame>artem1458/ts-auto-mock
import { createMock } from 'ts-auto-mock';
import {
TypeUnion,
TypeUnionEmptyObject,
TypeUnionFunction,
TypeUnionObject,
TypeUnionToken,
TypeUnionTokenAllBoolean,
TypeUnionTokenNumber,
TypeUnionTokenSameBoolean,
} from '../utils/types/typeUnion';
describe('for literal', () => {
describe('with a specific string', () => {
interface Interface {
a: 'string2';
}
it('should set null', () => {
const properties: Interface = createMock<Interface>();
expect(properties.a).toBe('string2');
});
});
describe('with a specific number', () => {
interface Interface {
a: 2;
}
it('should set null', () => {
const properties: Interface = createMock<Interface>();
expect(properties.a).toBe(2);
});
});
describe('with import', () => {
interface Interface {
literal: TypeUnion;
}
it('should set the first one', () => {
const properties: Interface = createMock<Interface>();
expect(properties.literal).toBe('1');
});
});
describe('with import token string', () => {
interface Interface {
literal: TypeUnionToken;
}
it('should set the first one', () => {
const properties: Interface = createMock<Interface>();
expect(properties.literal).toBe('a');
});
});
describe('with import token number', () => {
interface Interface {
literal: TypeUnionTokenNumber;
}
it('should set the first one', () => {
const properties: Interface = createMock<Interface>();
expect(properties.literal).toBe(1);
});
});
describe('with import token with same boolean', () => {
interface Interface {
literal: TypeUnionTokenSameBoolean;
}
it('should set value', () => {
const properties: Interface = createMock<Interface>();
expect(properties.literal).toBe(true);
});
});
describe('with import token with all kind of boolean values', () => {
interface Interface {
literal: TypeUnionTokenAllBoolean;
}
it('should set the first boolean value', () => {
const properties: Interface = createMock<Interface>();
expect(properties.literal).toBe(true);
});
});
describe('with import object', () => {
interface Interface {
literal: TypeUnionObject;
}
it('should set the first one', () => {
const properties: Interface = createMock<Interface>();
expect(properties.literal).toEqual({
a: '',
});
});
});
describe('with import function', () => {
interface Interface {
literal: TypeUnionFunction;
}
it('should set the first one', () => {
const properties: Interface = createMock<Interface>();
expect(properties.literal()).toBeUndefined();
});
});
describe('with import empty object', () => {
interface Interface {
literal: TypeUnionEmptyObject;
}
it('should set the first one', () => {
const properties: Interface = createMock<Interface>();
expect(properties.literal).toEqual({});
});
});
});
|
The following is a list of directories focused on Audio and Video Podcasts - That do not charge for you to be listed - Links for the most part take you directly to the submit page.
Note: Never pay to have your podcast listed in a directory - it is not worth it and you will not get any Return on your Investment. Please report any directories below that viloate that principal to [email protected]
DIRECTORIES & APPS (197) - Updated 19-March-2018
iTunes - iTunes podcast directory - Need iTunes installed to make entry and need an iTunes account.
Google Play Music - If with Libsyn - go HERE FIRST
Stitcher - Great place to pick up new listeners
Libsyn - The Podcast Source App - WP8 & Windows 8 App
TuneIn Radio - New submit page.
OverCast App - There is no submission page - it pulls from the iTunes directory - but this is an important app to make sure your show is in and working well - check the iOS only App - here
(For the Apps - Downcast, iCatcher, OverCaast, PocketCasts & RSSRadio - They pull their directory from the iTunes Public API - so make sure you are in iTunes. Instacast Directory is based on shows where X number of users have manually subscribed to your feed.)
aCast - email submisson - No longer recommending this as they are asking you about audience size and making it a selective entry. Plus asking questions they should be able to get from your RSS feed. Something just does not look right with this - stay away.
All Podcasts - podcast directory - Ping form - Giving Error messages for all pings
Audio-Podcast.fm - General Podcast Directory
Blogdigger - Search for the latest Audio and more.
Blubrry - Need to register to add.
BritCaster - Directory of UK podcasters
CastRoller - Need to register to add.
Digital Podcasts - Need to register to add.
DoubleTwist (App) - Select "Request New Podcast" and enter RSS feed and other info in message box.
gigadial - podcast directory
iPodder - General Podcast Directory
Learn Out Loud - Podcast Directory - email sub. needed.
Lisn.cc - Podcast Directory meets Pinterest.
MeFeedia - Video Podcast Directory
Miro Guide - Part of Miro Player - Free Registration Needed - Giving errors.
mirpod - Podcast Directory
New Time Radio - podcast directory with long history
Outdoor Radio Network - Fishing, Hunting, Boating Podcast & Videocast Directory
OzPodcasts - Australian Podcasts
Player.fm - Audio Podcast Directory
Plazoo.com - RSS Search (submit does not work with Safari)
Podcast.com - Currently for sale (again) - no submissions at this time.
Podcast.tv - A Video Podcast Directory
podcast Alley - Need to register to add.
Podcast Blaster - General Podcast Directory
Podcast Bunker - Link removed per request of Podcast Bunker
PodcastDirectory - General Podcast Directory
Podcast-Directory.co.uk - General Podcast Directory
Podcastellano.com - Spanish Language Podcast Directory
Podcast Pup - Search Portal for Podcasts
PodcastZoom - Need to register to add.
NEW - poddirectory - New General Podcast Directory
Podfeed - Need to register to add. Submit page not working
podKicker - Smartphone Aggregator App - goes to blank screen may not be working
Podsonoro - Spanish Language Podcast directory
Podster.de - German Language Podcast Directory
PublicRadioFan.com - Directory of Public Radio Programs being podcasted.
QueerCasters - Directory of Gay and Lesbian Podcast
RadioPublic - General Audio Podcast Directory
RECAP - Podcasts for Educators
Replay Media Guide - Podcast directory
ShortOrange - submit page giving error message - use contact link
The Running Podcast List - Podcasts about Running - email submission top of page
Two Thumbs Up Media - General Podcast directory
US Government Podcasts - Submit form is dead - there are contact us links to use now
Vodcasts.TV - Video Podcast Directory
Women in Podcasting - Need to register to add.
If you would like your site added here - send an e-mail to [email protected]
DEAD POOL
Blackberry Podcasts - submission page is now dead.
Zune Marketplace - Submission currently offline - need to submit via email - podcasts at microsoft dot com
01Podcast.com - DEAD - French Audio / Video Podcast Directory
01Vlog.com - DEAD - Video Podcast Directory
2RSS.Com - DEAD - Podcast Directory
amigofish - gone to sleep with the fishes.
AOL Podcast Beta - Podcast Directory, read Terms First
A Podcast Like That - Audio and Video Podcast Directory
Apodo - Audios - Spanish Language Podcast Directory
AUDIOFEEDS.ORG - DEAD - Someone forgot to feed them
blawg - Directory of law related podcasts
Blinkx.tv - Guess they Blinkxed first
BlogExplosion - DEAD - It Imploded
Blog Resource - DEAD - Was not resourcefull enough
BlogUniverse - Asking for $ - F That
Canada Podcasts.ca - Heading up to the Great White 404
Cast Register - DEAD - No need to register here
Castwing - They did not have a wing or a prayer
Church Podcasts - DEAD - Now in the afterlife
ClickCaster - No Longer taking submissions
Contentious - No Longer taking submissions
DailySplice - DEAD - Submits don't work
DIGG Podcast Beta - They burried this feature
DJconneXion - Dead - What you get for improper use of an X
Dramapod.com - Audio Drama Podcast directory
Elpodio.com - Spanish Language Podcasts Directory
EPN - The Education Podcast Network - Lesson Learned
EveryPodcast - DEAD - guess there were too many podcasts
Experience Podcasting - or not - DEAD
Family Friendly Podcasts - Dead -Not very podcaster friendly
Feed The Network - DEAD - Died of Starvation
Feeds4All - But Directory for no one
Feedzie - DEAD - Deadzie
FeedOoyoo - Asking for $ - F That
Fluctu8 - Dead - Too much Fluctu8'g
fonpodsa - DEAD - This one is phinished
genwi - no more podcast submissions
GetAPodcast.com - DEAD - Got a Tumbstone
goFish - submission page gone
GospelPodcasting.com - Dead - Unlike the lord this one is not coming back.
Hwy777.com - Christian Podcast Directory
iBizRadio - No Biz Model - Dead
idiotvox - Directory w/ search, reviews & original content
iPodder.org - iDEAD
iPodderX - X'd out now Transistr
iTunes Links - DEAD - I predicted this death a long time ago
iTunesTracks.co.uk - DEAD - No tracks to follow now.
kedora - DEAD - Stick a feather in this fedora
Last 100 podCasts - DEAD - A century in the making.
Loomia - removed submission page
Lusocast - DEAD
Melodeo - Mobilcast - Podcast directory / network to supply podcasts on Mobile phones.
Music Only Podcasts - DEAD - Just playing taps now
My Podcast Center - DEAD - Not very Centric
Nature Podcasts - DEAD
Odeo - Podcast Directory
OOYHAA - DEAD
Open Media Network - DEAD - Closed Media Network
PCastBaby - DEAD - Thrown out with the bath water.
PCCaster - DEAD - Hello I'm a PC - ugggghhh - then silence
Personal Growth Podcast Directory - DEAD - Stopped Growing
Pluggd.com - Submission page gone - site to follow??
Pocketcasting - DEAD - just lintcasting now
Podanza - DEADanza
PodBean - Submit page 404'g - email [email protected]
Podblaster.net - DEAD
podblaze - Went out in a blaze of glory or not
podcast411 Directory - Spammed to death
Podcast.com - Podcast Directory. Scroll down for submission.
podcast.de - Uber Dead - German Language Podcast Directory
podcast.com.ar - El Dead - Spanish Language Podcasts Directory
podcast.net - DEAD - Swish
podcast Central - Dead even on the edges
Podcast Charts - DEAD - This one is off the charts
PodcastDirectory.com.au - DEAD
Podcast Empire - DEAD - The Emperor was missing more than cloths.
Podcast-es.org - El Dead - Spanish Language Podcast Directory
PodcastFerret - They just weaselled out on us.
PodcastFusion - DEAD -Maybe they should have tried fision
podcasting news - Stop the presses they stopped the site.
Podcasting-Station.com - Went off the Air
Podcast Pickle - DEAD - This one is Pickeled and placed on the shelf
Podcast Player Online - Was not the player they thought they were
Podcasters Who's Who - Directory for podcasts. Free Registration required.
Podcasterworld.com - DEAD - Its a small world after all
PodcastFix - DEAD - the fix was in
Podcast Like That - DEAD like that
PodCastingList.org - DEAD - de-listed
Podcast in Guatemala - Podcasts from Guatemala
Podcast Mania - DEAD - Podcast Depresia
Podcast SA - South African Podcast directory
Podcast Salad - Video Podcast Directory
Podcast Sniper - DEAD - Someone Sniped them
Podcast Style - General Podcast Directory, email submission is needed - but seems not to be working. Soon to be DEAD
Podcast Shuffle - DEAD - Shorter lifespan than the Ickey Shuffle
PodcastVideos.org - DEAD
Pod Farm - DEAD - EIEIO
podfeeder - DEAD - no one feed it
podfeed.net - Podcast Directory with Voting
Podlog - English Podcast directory
Podlog.de - German Podcast directory
Podlook - Chinese Language Podcast Directory
Podmopolis - Podcast Directory
podnova.com - Directory combined with aggragator for one-click subscription
PodOmatic - no longer taking submissions
Podfeed.nl - Dutch Podcast Directory
PodGator.net - DEAD - Gator Bait
Pod-Planet - DEAD - same fate as Pluto
PodPusher - DEAD - Pushing up daisies
Podscope - Down Periscope down site
Podseek.net - DEAD - We searched but could not find.
PodSpider - DEAD - itsy bitsy
Podutainment - DEADutainment
Podzinger - submission page is gone, is site to follow?
PotKast - DAHEAD - Not winning any spelling bees in the Afterlife
PromoPicker - DEAD - Picked clean
Qpodder - Oh Snap it is dead
RSS Podcast Directory - Did not know the difference between RSS and CSS
RSS Network - DEAD - Submit page 404'g
SciFiPods - DEAD
Sahfor - podcast directory, looking for money or reciprocal link for you to be added. No reason to do either.
SingingFish - DEAD - AOL'd
Small Business Podcast Directory - Business was just too small
Speecha - General Podcast Directory
Sportpodcasts - DEAD - no games here
syndic8 - Dead - Lesson on why you don't spell your name with a number
Swell.am (App) - Apple Purchased this app and shut it down
The Hard Pod Cafe - Learned the Hard way this is not a good biz model
The podlounge - Too much lounging not enough podding
The Star Wars Podcast Network - DEAD - the Force was not with them
The Pod Ship - currently under maintenance Tinny Nibbles - Sex Podcast directory, You will need to contact Violet Blue to get your show added.
UK Podcasts - UK Podcast Directory
TSFPN - DEAD
ViCasting - podcast directory - registration needed to enter podcast.
Vital Podcasts - DEAD - vital signs not good
VLOGDIR - DEAD
VLogList.com - DEAD - Added to the Deadpool List
Vodstock - DEAD - out of stock
Yahoo Podcast Beta - DEAD - In like a Lion out like a Lamb.
Yahoo Media Search - Can't even google it
YakkYakk.com - Akk Akk Akk - Dead
YouLoud - DEAD - was not loud enough
ZENcast - Nothing more Zen then Death |
<reponame>andtan91/polars
import {NullValues} from "./datatypes";
import pli from "./internals/polars_internal";
import {DataFrame, dfWrapper} from "./dataframe";
import {isPath} from "./utils";
import {LazyDataFrame} from "./lazy/dataframe";
import {Readable, Stream} from "stream";
import {concat} from "./functions";
type ScanIPCOptions = {
numRows?: number;
cache?: boolean;
rechunk?: boolean;
}
type ScanParquetOptions = {
numRows?: number;
parallel?: boolean;
cache?: boolean;
rechunk?: boolean;
}
type ReadCsvOptions = {
batchSize?: number;
columns?: Array<string>;
commentChar?: string;
encoding?: "utf8" | "utf8-lossy";
endRows?: number;
hasHeader?: boolean;
ignoreErrors?: boolean;
inferSchemaLength?: number;
lowMemory?: boolean;
nullValues?: NullValues;
numThreads?: number;
parseDates?: boolean;
projection?: Array<number>;
quoteChar?: string;
rechunk?: boolean;
sep?: string;
startRows?: number;
};
type ReadJsonOptions = {
inferSchemaLength?: number;
batchSize?: number;
};
type ReadParquetOptions = {
columns?: string[];
projection?: number[];
numRows?: number;
parallel?: boolean;
rechunk?: boolean;
}
type ReadIPCOptions = {
columns?: string[];
projection?: number[];
numRows?: number;
}
const readCsvDefaultOptions: Partial<ReadCsvOptions> = {
inferSchemaLength: 50,
batchSize: 10000,
ignoreErrors: true,
hasHeader: true,
sep: ",",
rechunk: false,
startRows: 0,
encoding: "utf8",
lowMemory: false,
parseDates: true,
};
const readJsonDefaultOptions: Partial<ReadJsonOptions> = {
batchSize: 10000,
inferSchemaLength: 50
};
// utility to read streams as lines.
class LineBatcher extends Stream.Transform {
#lines: Buffer[];
#accumulatedLines: number;
#batchSize: number;
constructor(options) {
super(options);
this.#lines = [];
this.#accumulatedLines = 0;
this.#batchSize = options.batchSize;
}
_transform(chunk, _encoding, done) {
var begin = 0;
var position = 0;
let i = 0;
while (i < chunk.length) {
if (chunk[i] === 10) { // '\n'
this.#accumulatedLines++;
if (this.#accumulatedLines == this.#batchSize) {
this.#lines.push(chunk.subarray(begin, i + 1));
this.push(Buffer.concat(this.#lines));
this.#lines = [];
this.#accumulatedLines = 0;
begin = i + 1;
}
}
i++;
}
this.#lines.push(chunk.subarray(begin));
done();
}
_flush(done) {
this.push(Buffer.concat(this.#lines));
done();
}
}
// helper functions
function readCSVBuffer(buff, options) {
return dfWrapper(pli.df.readCSVBuffer({...readCsvDefaultOptions, ...options, buff}));
}
function readCSVPath(path, options) {
return dfWrapper(pli.df.readCSVPath({...readCsvDefaultOptions, ...options, path}));
}
function readJSONBuffer(buff, options) {
return dfWrapper(pli.df.readJSONBuffer({...readJsonDefaultOptions, ...options, buff}));
}
function readJSONPath(path, options) {
return dfWrapper(pli.df.readJSONPath({...readJsonDefaultOptions, ...options, path}));
}
function readParquetBuffer(buff, options) {
return dfWrapper(pli.df.readParquetBuffer({...options, buff}));
}
function readParquetPath(path, options) {
return dfWrapper(pli.df.readParquetPath({...options, path}));
}
function readIPCBuffer(buff, options) {
return dfWrapper(pli.df.readIPCBuffer({...options, buff}));
}
function readIPCPath(path, options) {
return dfWrapper(pli.df.readIPCPath({...options, path}));
}
/**
* __Read a CSV file or string into a Dataframe.__
* ___
* @param pathOrBody - path or buffer or string
* - path: Path to a file or a file like string. Any valid filepath can be used. Example: `file.csv`.
* - body: String or buffer to be read as a CSV
* @param options
* @param options.inferSchemaLength -Maximum number of lines to read to infer schema. If set to 0, all columns will be read as pl.Utf8.
* If set to `null`, a full table scan will be done (slow).
* @param options.batchSize - Number of lines to read into the buffer at once. Modify this to change performance.
* @param options.hasHeader - Indicate if first row of dataset is header or not. If set to False first row will be set to `column_x`,
* `x` being an enumeration over every column in the dataset.
* @param options.ignoreErrors -Try to keep reading lines if some lines yield errors.
* @param options.endRows -After n rows are read from the CSV, it stops reading.
* During multi-threaded parsing, an upper bound of `n` rows
* cannot be guaranteed.
* @param options.startRows -Start reading after `startRows` position.
* @param options.projection -Indices of columns to select. Note that column indices start at zero.
* @param options.sep -Character to use as delimiter in the file.
* @param options.columns -Columns to select.
* @param options.rechunk -Make sure that all columns are contiguous in memory by aggregating the chunks into a single array.
* @param options.encoding -Allowed encodings: `utf8`, `utf8-lossy`. Lossy means that invalid utf8 values are replaced with `�` character.
* @param options.numThreads -Number of threads to use in csv parsing. Defaults to the number of physical cpu's of your system.
* @param options.dtype -Overwrite the dtypes during inference.
* @param options.lowMemory - Reduce memory usage in expense of performance.
* @param options.commentChar - character that indicates the start of a comment line, for instance '#'.
* @param options.quotChar -character that is used for csv quoting, default = ''. Set to null to turn special handling and escaping of quotes off.
* @param options.nullValues - Values to interpret as null values. You can provide a
* - `string` -> all values encountered equal to this string will be null
* - `Array<string>` -> A null value per column.
* - `Record<string,string>` -> An object or map that maps column name to a null value string.Ex. {"column_1": 0}
* @param options.parseDates -Whether to attempt to parse dates or not
* @returns DataFrame
*/
export function readCSV(pathOrBody: string | Buffer, options?: Partial<ReadCsvOptions>): DataFrame;
export function readCSV(pathOrBody, options?) {
const extensions = [".tsv", ".csv"];
if (Buffer.isBuffer(pathOrBody)) {
return readCSVBuffer(pathOrBody, options);
}
if (typeof pathOrBody === "string") {
const inline = !isPath(pathOrBody, extensions);
if (inline) {
return readCSVBuffer(Buffer.from(pathOrBody, "utf-8"), options);
} else {
return readCSVPath(pathOrBody, options);
}
} else {
throw new Error("must supply either a path or body");
}
}
/**
* __Lazily read from a CSV file or multiple files via glob patterns.__
*
* This allows the query optimizer to push down predicates and
* projections to the scan level, thereby potentially reducing
* memory overhead.
* ___
* @param path path to a file
* @param options.hasHeader - Indicate if first row of dataset is header or not. If set to False first row will be set to `column_x`,
* `x` being an enumeration over every column in the dataset.
* @param options.sep -Character to use as delimiter in the file.
* @param options.commentChar - character that indicates the start of a comment line, for instance '#'.
* @param options.quotChar -character that is used for csv quoting, default = ''. Set to null to turn special handling and escaping of quotes off.
* @param options.startRows -Start reading after `startRows` position.
* @param options.nullValues - Values to interpret as null values. You can provide a
* - `string` -> all values encountered equal to this string will be null
* - `Array<string>` -> A null value per column.
* - `Record<string,string>` -> An object or map that maps column name to a null value string.Ex. {"column_1": 0}
* @param options.ignoreErrors -Try to keep reading lines if some lines yield errors.
* @param options.cache Cache the result after reading.
* @param options.inferSchemaLength -Maximum number of lines to read to infer schema. If set to 0, all columns will be read as pl.Utf8.
* If set to `null`, a full table scan will be done (slow).
* @param options.batchSize - Number of lines to read into the buffer at once. Modify this to change performance.
* @param options.endRows -After n rows are read from the CSV, it stops reading.
* During multi-threaded parsing, an upper bound of `n` rows
* cannot be guaranteed.
* @param options.rechunk -Make sure that all columns are contiguous in memory by aggregating the chunks into a single array.
* @param options.lowMemory - Reduce memory usage in expense of performance.
* ___
*
*/
export function scanCSV(path: string, options?: Partial<ReadCsvOptions>): LazyDataFrame
export function scanCSV(path, options?) {
options = {...readCsvDefaultOptions, ...options};
return LazyDataFrame(pli.ldf.scanCSV({path, ...options}));
}
/**
* __Read a JSON file or string into a DataFrame.__
*
* _Note: Currently only newline delimited JSON is supported_
* @param pathOrBody - path or buffer or string
* - path: Path to a file or a file like string. Any valid filepath can be used. Example: `file.csv`.
* - body: String or buffer to be read as a CSV
* @param options
* @param options.inferSchemaLength -Maximum number of lines to read to infer schema. If set to 0, all columns will be read as pl.Utf8.
* If set to `null`, a full table scan will be done (slow).
* @param options.batchSize - Number of lines to read into the buffer at once. Modify this to change performance.
* @returns ({@link DataFrame})
* @example
* ```
* const jsonString = `
* {"a", 1, "b", "foo", "c": 3}
* {"a": 2, "b": "bar", "c": 6}
* `
* > const df = pl.readJSON(jsonString)
* > console.log(df)
* shape: (2, 3)
* ╭─────┬─────┬─────╮
* │ a ┆ b ┆ c │
* │ --- ┆ --- ┆ --- │
* │ i64 ┆ str ┆ i64 │
* ╞═════╪═════╪═════╡
* │ 1 ┆ foo ┆ 3 │
* ├╌╌╌╌╌┼╌╌╌╌╌┼╌╌╌╌╌┤
* │ 2 ┆ bar ┆ 6 │
* ╰─────┴─────┴─────╯
* ```
*/
export function readJSON(pathOrBody: string | Buffer, options?: Partial<ReadJsonOptions>): DataFrame
export function readJSON(pathOrBody, options?) {
const extensions = [".ndjson", ".json", ".jsonl"];
if (Buffer.isBuffer(pathOrBody)) {
return readJSONBuffer(pathOrBody, options);
}
if (typeof pathOrBody === "string") {
const inline = !isPath(pathOrBody, extensions);
if (inline) {
return readJSONBuffer(Buffer.from(pathOrBody, "utf-8"), options);
} else {
return readJSONPath(pathOrBody, options);
}
} else {
throw new Error("must supply either a path or body");
}
}
/**
* Read into a DataFrame from a parquet file.
* @param pathOrBuffer
* Path to a file, list of files, or a file like object. If the path is a directory, that directory will be used
* as partition aware scan.
* @param options.columns Columns to select. Accepts a list of column names.
* @param options.numRows Stop reading from parquet file after reading ``n_rows``.
* @param options.parallel Read the parquet file in parallel. The single threaded reader consumes less memory.
*/
export function readParquet(pathOrBody: string | Buffer, options?: ReadParquetOptions): DataFrame
export function readParquet(pathOrBody, options?) {
if (Buffer.isBuffer(pathOrBody)) {
return readParquetBuffer(pathOrBody, options);
}
if (typeof pathOrBody === "string") {
const inline = !isPath(pathOrBody, [".parquet"]);
if (inline) {
return readParquetBuffer(Buffer.from(pathOrBody, "utf-8"), options);
} else {
return readParquetPath(pathOrBody, options);
}
} else {
throw new Error("must supply either a path or body");
}
}
/**
* __Lazily read from a parquet file or multiple files via glob patterns.__
* ___
* This allows the query optimizer to push down predicates and projections to the scan level,
* thereby potentially reducing memory overhead.
* @param path Path to a file or or glob pattern
* @param options.numRows Stop reading from parquet file after reading ``n_rows``.
* @param options.cache Cache the result after reading.
* @param options.parallel Read the parquet file in parallel. The single threaded reader consumes less memory.
* @param options.rechunk In case of reading multiple files via a glob pattern rechunk the final DataFrame into contiguous memory chunks.
*/
export function scanParquet(path: string, options?: ScanParquetOptions): LazyDataFrame
export function scanParquet(path, options?) {
return LazyDataFrame(pli.ldf.scanParquet({path, ...options}));
}
/**
* __Read into a DataFrame from Arrow IPC (Feather v2) file.__
* ___
* @param pathOrBody - path or buffer or string
* - path: Path to a file or a file like string. Any valid filepath can be used. Example: `file.ipc`.
* - body: String or buffer to be read as Arrow IPC
* @param options.columns Columns to select. Accepts a list of column names.
* @param options.numRows Stop reading from parquet file after reading ``n_rows``.
*/
export function readIPC(pathOrBody: string | Buffer, options?: ReadIPCOptions): DataFrame
export function readIPC(pathOrBody, options?) {
if (Buffer.isBuffer(pathOrBody)) {
return readIPCBuffer(pathOrBody, options);
}
if (typeof pathOrBody === "string") {
const inline = !isPath(pathOrBody, [".ipc"]);
if (inline) {
return readIPCBuffer(Buffer.from(pathOrBody, "utf-8"), options);
} else {
return readIPCPath(pathOrBody, options);
}
} else {
throw new Error("must supply either a path or body");
}
}
/**
* __Lazily read from an Arrow IPC (Feather v2) file or multiple files via glob patterns.__
* ___
* @param path Path to a IPC file.
* @param options.numRows Stop reading from IPC file after reading ``numRows``
* @param options.cache Cache the result after reading.
* @param options.rechunk Reallocate to contiguous memory when all chunks/ files are parsed.
*/
export function scanIPC(path: string, options?: ScanIPCOptions): LazyDataFrame
export function scanIPC(path, options?) {
return LazyDataFrame(pli.ldf.scanIPC({path, ...options}));
}
/**
* __Read a stream into a Dataframe.__
*
* **Warning:** this is much slower than `scanCSV` or `readCSV`
*
* This will consume the entire stream into a single buffer and then call `readCSV`
* Only use it when you must consume from a stream, or when performance is not a major consideration
*
* ___
* @param stream - readable stream containing csv data
* @param options
* @param options.inferSchemaLength -Maximum number of lines to read to infer schema. If set to 0, all columns will be read as pl.Utf8.
* If set to `null`, a full table scan will be done (slow).
* @param options.batchSize - Number of lines to read into the buffer at once. Modify this to change performance.
* @param options.hasHeader - Indicate if first row of dataset is header or not. If set to False first row will be set to `column_x`,
* `x` being an enumeration over every column in the dataset.
* @param options.ignoreErrors -Try to keep reading lines if some lines yield errors.
* @param options.endRows -After n rows are read from the CSV, it stops reading.
* During multi-threaded parsing, an upper bound of `n` rows
* cannot be guaranteed.
* @param options.startRows -Start reading after `startRows` position.
* @param options.projection -Indices of columns to select. Note that column indices start at zero.
* @param options.sep -Character to use as delimiter in the file.
* @param options.columns -Columns to select.
* @param options.rechunk -Make sure that all columns are contiguous in memory by aggregating the chunks into a single array.
* @param options.encoding -Allowed encodings: `utf8`, `utf8-lossy`. Lossy means that invalid utf8 values are replaced with `�` character.
* @param options.numThreads -Number of threads to use in csv parsing. Defaults to the number of physical cpu's of your system.
* @param options.dtype -Overwrite the dtypes during inference.
* @param options.lowMemory - Reduce memory usage in expense of performance.
* @param options.commentChar - character that indicates the start of a comment line, for instance '#'.
* @param options.quotChar -character that is used for csv quoting, default = ''. Set to null to turn special handling and escaping of quotes off.
* @param options.nullValues - Values to interpret as null values. You can provide a
* - `string` -> all values encountered equal to this string will be null
* - `Array<string>` -> A null value per column.
* - `Record<string,string>` -> An object or map that maps column name to a null value string.Ex. {"column_1": 0}
* @param options.parseDates -Whether to attempt to parse dates or not
* @returns Promise<DataFrame>
*
* @example
* ```
* >>> const readStream = new Stream.Readable({read(){}});
* >>> readStream.push(`a,b\n`);
* >>> readStream.push(`1,2\n`);
* >>> readStream.push(`2,2\n`);
* >>> readStream.push(`3,2\n`);
* >>> readStream.push(`4,2\n`);
* >>> readStream.push(null);
*
* >>> pl.readCSVStream(readStream).then(df => console.log(df));
* shape: (4, 2)
* ┌─────┬─────┐
* │ a ┆ b │
* │ --- ┆ --- │
* │ i64 ┆ i64 │
* ╞═════╪═════╡
* │ 1 ┆ 2 │
* ├╌╌╌╌╌┼╌╌╌╌╌┤
* │ 2 ┆ 2 │
* ├╌╌╌╌╌┼╌╌╌╌╌┤
* │ 3 ┆ 2 │
* ├╌╌╌╌╌┼╌╌╌╌╌┤
* │ 4 ┆ 2 │
* └─────┴─────┘
* ```
*/
export function readCSVStream(stream: Readable, options?: ReadCsvOptions): Promise<DataFrame>
export function readCSVStream(stream, options?) {
let batchSize = options?.batchSize ?? 10000;
let count = 0;
let end = options?.endRows ?? Number.POSITIVE_INFINITY;
return new Promise((resolve, reject) => {
const s = stream.pipe(new LineBatcher({batchSize}));
const chunks: any[] = [];
s.on("data", (chunk) => {
// early abort if 'end rows' is specified
if (count <= end) {
chunks.push(chunk);
} else {
s.end();
}
count += batchSize;
}).on("end", () => {
try {
let buff = Buffer.concat(chunks);
const df = readCSVBuffer(buff, options);
resolve(df);
} catch (err) {
reject(err);
}
});
});
}
/**
* __Read a newline delimited JSON stream into a DataFrame.__
*
* @param stream - readable stream containing json data
* @param options
* @param options.inferSchemaLength -Maximum number of lines to read to infer schema. If set to 0, all columns will be read as pl.Utf8.
* If set to `null`, a full table scan will be done (slow).
* Note: this is done per batch
* @param options.batchSize - Number of lines to read into the buffer at once. Modify this to change performance.
* @example
* ```
* >>> const readStream = new Stream.Readable({read(){}});
* >>> readStream.push(`${JSON.stringify({a: 1, b: 2})} \n`);
* >>> readStream.push(`${JSON.stringify({a: 2, b: 2})} \n`);
* >>> readStream.push(`${JSON.stringify({a: 3, b: 2})} \n`);
* >>> readStream.push(`${JSON.stringify({a: 4, b: 2})} \n`);
* >>> readStream.push(null);
*
* >>> pl.readJSONStream(readStream).then(df => console.log(df));
* shape: (4, 2)
* ┌─────┬─────┐
* │ a ┆ b │
* │ --- ┆ --- │
* │ i64 ┆ i64 │
* ╞═════╪═════╡
* │ 1 ┆ 2 │
* ├╌╌╌╌╌┼╌╌╌╌╌┤
* │ 2 ┆ 2 │
* ├╌╌╌╌╌┼╌╌╌╌╌┤
* │ 3 ┆ 2 │
* ├╌╌╌╌╌┼╌╌╌╌╌┤
* │ 4 ┆ 2 │
* └─────┴─────┘
* ```
*/
export function readJSONStream(stream: Readable, options?: ReadJsonOptions): Promise<DataFrame>
export function readJSONStream(stream, options?) {
let batchSize = options?.batchSize ?? 10000;
return new Promise((resolve, reject) => {
const chunks: any[] = [];
stream
.pipe(new LineBatcher({batchSize}))
.on("data", (chunk) => {
try {
const df = readJSONBuffer(chunk, options);
chunks.push(df);
} catch (err) {
reject(err);
}
})
.on("end", () => {
try {
const df = concat(chunks);
resolve(df);
} catch (err) {
reject(err);
}
});
});
}
|
Image copyright AFP Image caption Roshen's owner Petro Poroshenko is a pro-European politician
Russian authorities have taken over a Ukrainian-owned sweet factory in the southern Russian city of Lipetsk, Ukraine's government says.
Russian riot police moved in on the Roshen factory and halted production on Wednesday, the finance and economic ministries said.
A spokesperson for the firm confirmed the "plant is closed".
Roshen is controlled by Ukrainian businessman and pro-European MP Petro Poroshenko.
He is planning to stand in May's presidential elections.
Ukraine's finance and economic ministries said Russian authorities had shown no documentation giving them the right "to burst on to the company's property and halt production".
Alexander Zolotarev, press spokesman for Roshen CIS, said the Lipetsk plant closed after police arrived and sent all the workers home.
'Chocolate war'
Last July, Russia's consumer watchdog banned sales of Roshen sweets, citing health concerns.
But many observers thought it to be Moscow's response to Mr Poroshenko's political activities rather than concern for the welfare of Russia's chocolate fans. The ban was eventually lifted in November.
Mr Poroshenko was among opposition figures in Ukraine who called for the country to resist Russia's invitation to join the Moscow-led Customs Union and instead integrate with Europe.
Image copyright AFP Image caption Petro Poroshenko has occupied top posts in Ukrainian politics
Ukraine's seventh richest man, according to Forbes magazine, with an estimated fortune of $1.3bn (£787m), Mr Poroshenko was a supporter of the Maidan protests that ultimately led to the overthrow of Ukraine's president Viktor Yanukovych last month.
The factory's closure comes at a time of high tensions between Ukraine and Russia following the outcome of Sunday's referendum in Crimea to secede from Ukraine and join the Russian Federation.
Angered by what is sees as Russian intervention on Ukrainian territory, the US and the EU are considering escalating sanctions against those it suspects of playing a part in the crisis.
Russia says Crimeans voted overwhelmingly to join the Federation and so the result of the referendum should be respected. Foreign Minister Sergei Lavrov said sanctions would be "illegitimate". |
<filename>src/main/java/depindr/json/JsonFingerprintParser.java
package depindr.json;
import com.google.gson.Gson;
import com.google.gson.JsonSyntaxException;
import depindr.model.entity.Dependency;
import lombok.extern.slf4j.Slf4j;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.nio.file.Paths;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
@Slf4j
public class JsonFingerprintParser {
public List<Dependency> parseTechnologiesFile(String filePath) {
JsonConfigurationDTO configurationDTO = getConfigurationDTO(filePath);
if (configurationDTO == null)
return Collections.emptyList();
return configurationDTO.getTechnologies().stream()
.map(DependencyJsonDTO::toDependency)
.collect(Collectors.toList());
}
public JsonConfigurationDTO getConfigurationDTO(String filePath){
Gson gson = new Gson();
try {
return gson.fromJson(new FileReader(Paths.get(filePath).toFile()), JsonConfigurationDTO.class);
} catch (FileNotFoundException e) {
log.error("Could not read JSON file!", e);
} catch (
JsonSyntaxException e) {
log.error("File " + filePath + " is could not be parsed as a JSON Technology file!", e);
}
return null;
}
}
|
def create_table() -> None:
Base.metadata.create_all(engine)
table: str = Face.__tablename__
print(table) |
// LogData - wrapper function to use golang's built in logger and append all operational data to a central log file
func LogData(data string) error {
file, err := os.OpenFile(".lupo.log", os.O_CREATE|os.O_APPEND|os.O_WRONLY, 0644)
if err != nil {
return err
}
defer file.Close()
log.SetOutput(file)
log.Print(data)
return nil
} |
def transform(self, X):
if not isinstance(X, dask.array.core.Array):
raise ValueError("Only dask.Array is supported for X")
return self._transform(
X, n_dims=2, delayed=True, output_collection_type="cupy"
) |
/**
* This function prints out the vector to the file specified by name.
* The vector is printed out in a triangular format, the same as the way
* the arrays are displayed in matrices.h. This function is for testing purposes.
* @param temp
* @param name
*/
void SubMatrix::printInFormat(vector<short>& temp, char* name)
{
char nameOfFile[30];
strcpy(nameOfFile, name);
strcat(nameOfFile, ".out");
ofstream outfile(nameOfFile);
if(!outfile)
cerr<<"oops failed to open !!!\n";
outfile<<"short "<<name<<"[]{\n";
int numOnCurrentLine = 1;
int soFar = 0;
for(int i = 0; i < (int)temp.size(); i++)
{
if(soFar == numOnCurrentLine)
{
outfile<<"\n";
soFar = 0;
numOnCurrentLine++;
}
if((temp[i] > 9) || (temp[i] < 0))
{
outfile <<" "<< temp[i]<<",";
}
else
{
outfile <<" "<< temp[i]<<",";
}
soFar++;
if((i + 1) == (int)temp.size() - 1)
{
if((temp[i+1] > 9) || (temp[i+1] < 0))
{
outfile <<" "<< temp[i + 1]<<"};\n";
}
else
{
outfile <<" "<< temp[i + 1]<<"};\n";
}
break;
}
}
ofstream outfile2("temp.out");
for(int i = 0; i < (int)temp.size(); i++)
{
outfile2 << temp[i] << " ";
}
} |
/**
* @class Reader
* @brief A class to read paces from an ASCII file.
*/
class Reader {
ifstream currentFile;
public:
vector<Pace *> readAllFile(string filename);
Reader(string filename);
bool openFile(string filename);
Pace * readNext();
void closeFile();
} |
const { version } = require('../package.json')
require('isomorphic-fetch')
const defaultInit = {
headers: {
'User-Agent': `SaaSkit ${version}`,
'Authorization': `Bearer ${process.env.SAASKIT_SECRET ?? 'anonymous'}`
}
}
export const fetcher = (url: string, init?: RequestInit) => fetch(url, {...defaultInit, ...init}).then(res => res.json()) |
Review article: Seymour Lubetzky Writings on the classical art of cataloguing
T his volume is of major importance for much more than its historical value in the annals of cataloguing. I think 'classical' in this context should be taken as signifying ideas firmly based on consistent reasoningnot just as traditional cataloguing in comparison with more 'mechanical' approaches to the topic, or as a practice fixed in some bygone 'golden age'. There is no timewarp here: the skill and clarity of Lubetzky's views, his expression of them and his emphasis on objectives and principles, make his writings a useful model in any current re-thinking of access provision by author and title. |
print("Bad" if [sum([x[i]==x[i+1] for i in range(len(x)-1)]) for x in [str(input())]]!=[0] else "Good") |
def seg_count_file(self):
prior_ploidy = {}
bin_tracking_dict = Tool_Box.VivifiedDictionary()
line_num = 0
seg_copy_array = self.array_builder()
seg_count = list(csv.reader(open(self.input_file), delimiter='\t'))
for line in seg_count:
if line_num > 0:
bin_tracking_dict[line[0]][line_num] = (line[1], line[2])
elif line_num == 0:
label_list = line
for i in range(len(label_list)):
if i > 2:
prior_ploidy[label_list[i]] = [-1, False, 0, 0, 0]
line_num += 1
if not eval(self.chrY):
with suppress(KeyError):
bin_tracking_dict.pop("chrY")
return prior_ploidy, bin_tracking_dict, seg_copy_array |
/**
* This is the implementation of the reader for multiturn data
*
* @author kfuchsbe
*
*/
public class MultiturnMeasurementReaderImpl implements
MultiturnMeasurementReader, AlohaBeanFactoryAware,
MachineElementsManagerAware {
/** the bean-factory to use to create other objects */
private AlohaBeanFactory alohaBeanFactory;
/** the data reader to use for reading the data */
private MultiturnParser multiturnParser;
/** the machineElements manager */
private MachineElementsManager machineElementsManager;
/**
* a file filter for dispersion-data
*/
private FileFilter fileFilter = new FileFilter() {
@Override
public boolean accept(File f) {
if (f.isDirectory()) {
return true;
} else {
return isHandling(f);
}
}
@Override
public String getDescription() {
return MultiturnMeasurementReaderImpl.this.getDescription();
}
};
@InitMethod
public void init() {
this.multiturnParser = getAlohaBeanFactory().create(
MultiturnParserImpl.class);
}
@Override
public MultiturnMeasurement read(List<File> files,
ModelDelegate modelDelegate, MeasurementReaderOptions options) throws ReaderException {
if ((files.size() < 1) || (files.size() > 2)) {
throw new ReaderException(
files.size()
+ " files were given but this reader can only handle 1 or 2 files.\r\n"
+ "When 2 files are given one must contain the horizontal and one the vertical data.");
}
MultiturnData data = getAlohaBeanFactory().create(
MultiturnDataImpl.class);
String name = "";
for (File file : files) {
try {
if (name.length() > 0) {
name += ", ";
}
name += file.getName();
getMultiturnParser().parse(file, data);
} catch (MultiturnParserException e) {
throw new ReaderException(
"Could not load multiturn data from file '"
+ file.getAbsolutePath() + "'.", e);
}
}
/* ensure that the unavailable bpms are deactivated */
getMachineElementsManager().activateAvailableMonitors(
Arrays.asList(new MultiturnData[] { data }));
MultiturnDifferenceDataImpl diffData = getAlohaBeanFactory().create(
MultiturnDifferenceDataImpl.class);
MultiturnMeasurement measurement = new MultiturnMeasurementImpl(name,
modelDelegate, data, diffData);
getAlohaBeanFactory().configure(measurement);
diffData.setMeasurement(measurement);
return measurement;
}
@Override
public String getDescription() {
return "Multiturn measurement data";
}
@Override
public FileFilter getFileFilter() {
return this.fileFilter;
}
@Override
public boolean isHandling(List<File> files) {
if ((files.size() < 1) || (files.size() > 2)) {
return false;
}
for (File file : files) {
if (!isHandling(file)) {
return false;
}
}
return true;
}
private boolean isHandling(File file) {
return file.getName().toLowerCase().endsWith(".mtdata");
}
@Override
public void setAlohaBeanFactory(AlohaBeanFactory alohaBeanFactory) {
this.alohaBeanFactory = alohaBeanFactory;
}
private AlohaBeanFactory getAlohaBeanFactory() {
return this.alohaBeanFactory;
}
private MultiturnParser getMultiturnParser() {
return multiturnParser;
}
@Override
public void setMachineElementsManager(
MachineElementsManager machineElementsManager) {
this.machineElementsManager = machineElementsManager;
}
private MachineElementsManager getMachineElementsManager() {
return this.machineElementsManager;
}
@Override
public boolean requiresOptions() {
return false;
}
@Override
public URI proposedModelDefinitionUri(List<File> files) {
return null;
}
} |
import {Component, OnDestroy, OnInit} from '@angular/core';
import {DatabaseService} from './services/database.service';
import {SettingsService} from './services/settings.service';
@Component({
selector: 'app-strongbox-database',
templateUrl: './app.component.html',
styleUrls: ['./app.component.scss']
})
export class AppComponent implements OnInit, OnDestroy {
constructor(private databaseService: DatabaseService, private settingsService: SettingsService) {
}
ngOnInit(): void {
if (this.settingsService.getSavedConfigurations().length >= 1) {
// Autoconnect to the first database source
this.databaseService.connect(this.settingsService.getConfigurationByIndex(0));
}
}
ngOnDestroy(): void {
this.databaseService.disconnect();
}
}
|
package release
import (
"context"
"yunion.io/x/onecloud/pkg/httperrors"
"yunion.io/x/onecloud/pkg/mcclient"
"yunion.io/x/pkg/errors"
"yunion.io/x/kubecomps/pkg/kubeserver/api"
"yunion.io/x/kubecomps/pkg/kubeserver/models"
)
func init() {
models.GetReleaseManager().RegisterDriver(newInternalDriver())
}
func newInternalDriver() models.IReleaseDriver {
return new(internalDriver)
}
type internalDriver struct{}
func (d *internalDriver) GetType() api.RepoType {
return api.RepoTypeInternal
}
func (d *internalDriver) ValidateCreateData(ctx context.Context, userCred mcclient.TokenCredential, ownerCred mcclient.IIdentityProvider, data *api.ReleaseCreateInput) (*api.ReleaseCreateInput, error) {
if data.NamespaceId != "" {
return nil, httperrors.NewNotAcceptableError("%s release can not specify namespace", d.GetType())
}
if data.ClusterId != "" {
return nil, httperrors.NewNotAcceptableError("%s release can not specify cluster", d.GetType())
}
data.NamespaceId = ownerCred.GetProjectId()
sysCls, err := models.ClusterManager.GetSystemCluster()
if err != nil {
return nil, err
}
if sysCls == nil {
return nil, httperrors.NewNotFoundError("system cluster not found")
}
data.ClusterId = sysCls.GetId()
nsData := new(api.NamespaceCreateInputV2)
nsData.Name = ownerCred.GetProjectId()
nsData.ClusterId = sysCls.GetId()
ns, err := models.GetNamespaceManager().EnsureNamespace(ctx, userCred, ownerCred, sysCls, nsData)
if err != nil {
return nil, errors.Wrap(err, "ensure namespace")
}
data.NamespaceId = ns.GetId()
return data, nil
}
func (d *internalDriver) CustomizeCreate(ctx context.Context, userCred mcclient.TokenCredential, ownerCred mcclient.IIdentityProvider, release *models.SRelease, data *api.ReleaseCreateInput) error {
release.ClusterId = data.ClusterId
release.NamespaceId = data.NamespaceId
return nil
}
|
/*
* Copyright (C) 2018 Veritas Technologies LLC.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.fluidops.fedx.evaluation;
import java.util.function.Supplier;
import org.eclipse.rdf4j.common.iteration.CloseableIteration;
import org.eclipse.rdf4j.common.iteration.EmptyIteration;
import org.eclipse.rdf4j.common.iteration.Iterations;
import org.eclipse.rdf4j.model.IRI;
import org.eclipse.rdf4j.model.Resource;
import org.eclipse.rdf4j.model.Value;
import org.eclipse.rdf4j.query.BindingSet;
import org.eclipse.rdf4j.query.BooleanQuery;
import org.eclipse.rdf4j.query.GraphQuery;
import org.eclipse.rdf4j.query.MalformedQueryException;
import org.eclipse.rdf4j.query.Operation;
import org.eclipse.rdf4j.query.Query;
import org.eclipse.rdf4j.query.QueryEvaluationException;
import org.eclipse.rdf4j.query.QueryLanguage;
import org.eclipse.rdf4j.query.TupleQuery;
import org.eclipse.rdf4j.query.impl.EmptyBindingSet;
import org.eclipse.rdf4j.repository.RepositoryConnection;
import org.eclipse.rdf4j.repository.RepositoryException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fluidops.fedx.algebra.ExclusiveGroup;
import com.fluidops.fedx.endpoint.Endpoint;
import com.fluidops.fedx.evaluation.iterator.CloseDependentConnectionIteration;
import com.fluidops.fedx.evaluation.iterator.GraphToBindingSetConversionIteration;
import com.fluidops.fedx.evaluation.iterator.SingleBindingSetIteration;
import com.fluidops.fedx.exception.ExceptionUtil;
import com.fluidops.fedx.monitoring.Monitoring;
import com.fluidops.fedx.structures.QueryType;
import com.fluidops.fedx.util.FedXUtil;
import com.fluidops.fedx.util.QueryStringUtil;
public abstract class TripleSourceBase implements TripleSource
{
private static final Logger log = LoggerFactory.getLogger(TripleSourceBase.class);
protected final Monitoring monitoringService;
protected final Endpoint endpoint;
public TripleSourceBase(Monitoring monitoring, Endpoint endpoint) {
this.monitoringService = monitoring;
this.endpoint = endpoint;
}
@Override
public CloseableIteration<BindingSet, QueryEvaluationException> getStatements(
String preparedQuery, QueryType queryType)
throws RepositoryException, MalformedQueryException,
QueryEvaluationException
{
return withConnection((conn, resultHolder) -> {
switch (queryType) {
case SELECT:
monitorRemoteRequest();
TupleQuery tQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, preparedQuery);
applyMaxExecutionTimeUpperBound(tQuery);
disableInference(tQuery);
resultHolder.set(tQuery.evaluate());
return;
case CONSTRUCT:
monitorRemoteRequest();
GraphQuery gQuery = conn.prepareGraphQuery(QueryLanguage.SPARQL, preparedQuery);
applyMaxExecutionTimeUpperBound(gQuery);
disableInference(gQuery);
resultHolder.set(new GraphToBindingSetConversionIteration(gQuery.evaluate()));
return;
case ASK:
monitorRemoteRequest();
boolean hasResults = false;
try (RepositoryConnection _conn = conn) {
BooleanQuery bQuery = _conn.prepareBooleanQuery(QueryLanguage.SPARQL, preparedQuery);
applyMaxExecutionTimeUpperBound(bQuery);
disableInference(bQuery);
hasResults = bQuery.evaluate();
}
resultHolder.set(booleanToBindingSetIteration(hasResults));
return;
default:
throw new UnsupportedOperationException("Operation not supported for query type " + queryType);
}
});
}
@Override
public boolean hasStatements(Resource subj,
IRI pred, Value obj, Resource... contexts) throws RepositoryException
{
try (RepositoryConnection conn = endpoint.getConnection()) {
return conn.hasStatement(subj, pred, obj, false, contexts);
}
}
@Override
public boolean hasStatements(ExclusiveGroup group, BindingSet bindings)
throws RepositoryException, MalformedQueryException,
QueryEvaluationException {
monitorRemoteRequest();
String preparedAskQuery = QueryStringUtil.askQueryString(group, bindings);
try (RepositoryConnection conn = endpoint.getConnection()) {
BooleanQuery query = conn.prepareBooleanQuery(QueryLanguage.SPARQL, preparedAskQuery);
disableInference(query);
applyMaxExecutionTimeUpperBound(query);
return query.evaluate();
}
}
protected void monitorRemoteRequest() {
monitoringService.monitorRemoteRequest(endpoint);
}
private CloseableIteration<BindingSet, QueryEvaluationException> booleanToBindingSetIteration(boolean hasResult) {
if (hasResult)
return new SingleBindingSetIteration(EmptyBindingSet.getInstance());
return new EmptyIteration<BindingSet, QueryEvaluationException>();
}
/**
* Set includeInference to disabled explicitly.
*
* @param query
*/
protected void disableInference(Query query) {
// set includeInferred to false explicitly
try {
query.setIncludeInferred(false);
} catch (Exception e) {
log.debug("Failed to set include inferred: " + e.getMessage());
log.trace("Details:", e);
}
}
/**
* Apply an upper bound of the maximum execution time using
* {@link FedXUtil#applyMaxQueryExecutionTime(Operation)}.
*
* @param operation the operation
*/
protected void applyMaxExecutionTimeUpperBound(Operation operation) {
FedXUtil.applyMaxQueryExecutionTime(operation);
}
private <T> CloseableIteration<T, QueryEvaluationException> closeConn(RepositoryConnection dependentConn,
CloseableIteration<T, QueryEvaluationException> inner) {
return new CloseDependentConnectionIteration<T>(inner, dependentConn);
}
/**
* Convenience method to perform an operation on a {@link RepositoryConnection}.
* This method takes care for closing resources as well error handling. The
* resulting iteration has to be supplied to the {@link ResultHolder}.
*
* @param operation the {@link ConnectionOperation}
* @return the resulting iteration
*/
protected <T> CloseableIteration<T, QueryEvaluationException> withConnection(ConnectionOperation<T> operation) {
ResultHolder<T> resultHolder = new ResultHolder<>();
RepositoryConnection conn = endpoint.getConnection();
try {
operation.perform(conn, resultHolder);
CloseableIteration<T, QueryEvaluationException> res = resultHolder.get();
// do not wrap Empty Iterations
if (res instanceof EmptyIteration) {
conn.close();
return res;
}
return closeConn(conn, res);
} catch (Throwable t) {
// handle all other exception case
Iterations.closeCloseable(resultHolder.get());
conn.close();
throw ExceptionUtil.traceExceptionSource(endpoint, t, "");
}
}
/**
* Interface defining the operation to be perform on the connection
*
* <p>
* Typical pattern
* </p>
*
* <pre>
* CloseableIteration<BindingSet, QueryEvaluationException> res = withConnection((conn, resultHolder) -> {
* // do something with conn
* resultHolder.set(...)
* });
*
* </pre>
*
* @author <NAME>
*
* @param <T>
* @see TripleSourceBase#withConnection(ConnectionOperation)
*/
protected static interface ConnectionOperation<T> {
public void perform(RepositoryConnection conn, ResultHolder<T> resultHolder);
}
/**
* Holder for a result iteration to be used with
* {@link TripleSourceBase#withConnection(ConnectionOperation)}. Note that the
* result holder should also be set with temporary results to properly allow
* error handling.
*
* @author <NAME>
*
* @param <T>
*/
protected static class ResultHolder<T> implements Supplier<CloseableIteration<T, QueryEvaluationException>> {
protected CloseableIteration<T, QueryEvaluationException> result;
public void set(CloseableIteration<T, QueryEvaluationException> result) {
this.result = result;
}
@Override
public CloseableIteration<T, QueryEvaluationException> get() {
return result;
}
}
}
|
Interphase fish analysis of cell cycle genes in asbestos-treated human mesothelial cells (HMC), SV40-transformed HMC (MeT-5A) and mesothelioma cells (COLO).
The epidemiologic association between asbestos exposure and human malignant mesothelioma is well established. However, the molecular mechanisms linking asbestos exposure of humans and the subsequent mesothelioma formation is not well understood. The most frequent genetic changes found so far in human malignant mesothelioma (HMM) are deletions and point mutations in the tumor suppressor genes p16INK4a and NF2. Whereas homozygous deletions appear to be the predominant mechanism leading to p16/CDKN2A inactivation, inactivating point mutations coupled with allelic loss mainly occur at the NF2 locus. In the present study, asbestos-treated human mesothelial cells (HMC), SV40-transformed human mesothelial cells (MeT-5A) and a human mesothelioma cell line (COLO) were investigated for genetic changes of cell cycle genes (cyclin D1, p16INK4a, RB1, CDK2) using multicolor fluorescence in situ hybridization (mFISH) in interphase cells. The results show that cyclin D1 is unaffected in all investigated cells. The p16INK4a gene locus was shown to be mutated in COLO cells but not in HMC. After labeling of CDK2 and RB1, hemizygous loss of one allele of each gene was observed in asbestos-treated HMC whereas gene amplification of these genes was detectable in MeT-5A and COLO cells. Our data indicate that disarrangement of the RB1 dependent pathway seems to be involved in mesothelioma formation. |
/**
* Store tickets.
*
* @author <a href="mailto:[email protected]">Apache Directory Project</a>
*/
public class StoreTickets extends GrammarAction<KrbCredContainer>
{
/** The logger */
private static final Logger LOG = LoggerFactory.getLogger( StoreTickets.class );
/** Speedup for logs */
private static final boolean IS_DEBUG = LOG.isDebugEnabled();
/**
* {@inheritDoc}
*/
public void action( KrbCredContainer krbCredContainer ) throws DecoderException
{
TLV tlv = krbCredContainer.getCurrentTLV();
// The Length should not be null
if ( tlv.getLength() == 0 )
{
LOG.error( I18n.err( I18n.ERR_01308_ZERO_LENGTH_TLV ) );
// This will generate a PROTOCOL_ERROR
throw new DecoderException( I18n.err( I18n.ERR_01309_EMPTY_TLV ) );
}
// decoder for Ticket
Asn1Decoder decoder = new Asn1Decoder();
// Ticket container
TicketContainer ticketContainer = new TicketContainer( krbCredContainer.getStream() );
krbCredContainer.rewind();
// decode Ticket
decoder.decode( krbCredContainer.getStream(), ticketContainer );
Ticket ticket = ticketContainer.getTicket();
// add Ticket to the list of tickets
krbCredContainer.getKrbCred().addTicket( ticket );
// Update the expected length for the current TLV
tlv.setExpectedLength( tlv.getExpectedLength() - tlv.getLength() );
// Update the parent
krbCredContainer.updateParent();
if ( IS_DEBUG )
{
LOG.debug( "Ticket : {}", ticket );
}
}
} |
<filename>src/pages/random/index.ts
import "./components/getRandomFromForm";
import "./components/getYesOrNoForm";
import "../../components/rm-tabs/components/rm-tab";
import "./components/getRandomNumberForm";
import "../../components/pages-navigation-menu";
import "../../components/rm-tabs";
import { LitElement, html, css, TemplateResult } from 'lit';
import { cache } from 'lit/directives/cache';
import { customElement, property } from 'lit/decorators.js';
import { commonStyles, pageStyles } from '../../styles/common';
import { EPageRandomTabs, pageRandomTabs, tagName } from "./definitions";
import { cssFlexFullAlign, size, cssSquare } from "../../styles/utils";
import { svgRandomExperiment } from "../../components/rm-icon/icons";
import { TRmTab } from "../../components/rm-tabs/definitions";
@customElement(tagName)
export class PageExperimentRandom extends LitElement {
@property() public selectedTabName = EPageRandomTabs.YES_NO;
render(): TemplateResult {
return html`
<div class="page">
<pages-navigation-menu></pages-navigation-menu>
<div class="page-content">
<h2 class="page-header">
<rm-icon class="header-icon" .icon=${svgRandomExperiment}></rm-icon>
Random tools
</h2>
<div class="page-tabs">
<rm-tabs
.tabs=${pageRandomTabs}
.selectedTabName=${this.selectedTabName}
.onTabSelected=${this.onTabSelected}
></rm-tabs>
</div>
${cache(this.renderTabContent())}
</div>
</div>
`;
}
protected onTabSelected = (tab: TRmTab): void => {
this.selectedTabName = tab.name as EPageRandomTabs;
};
protected renderTabContent = (): TemplateResult => {
switch (this.selectedTabName) {
case (EPageRandomTabs.YES_NO):
return html`<get-yes-or-no-form></get-yes-or-no-form>`;
case (EPageRandomTabs.RANDOM_FROM):
return html`<get-random-from-form></get-random-from-form>`;
case (EPageRandomTabs.RANDOM_NUMBER):
return html`<get-random-number-form></get-random-number-form>`;
}
};
static styles = css/* language=css */ `
${commonStyles}
${pageStyles}
.page-header {
${cssFlexFullAlign()};
}
.page-tabs {
margin-bottom: ${size(2)};
}
.header-icon {
${cssSquare(6)};
margin-right: ${size(1)};
}
`;
}
declare global {
interface HTMLElementTagNameMap {
[tagName]: PageExperimentRandom;
}
}
|
/**
* Return the file name for a given table name.
*
* @param tableName The table name
* @return The file name
*/
protected final String fileNameForTable(String tableName) {
String filename = tableNameToFileNameMap.get(tableName.toUpperCase());
if (filename == null) throw new IllegalArgumentException("No such table [" + tableName + "]");
return filename;
} |
Abstract P232: Associations Between Income And Education With ASCVD Risk, NHANES, 1999-2018
Introduction:
Factors used to calculate cardiovascular disease (CVD) risk scores include age, sex, race, cholesterol, blood pressure, diabetes, and smoking status, but exclude social determinants of health (SDOH).
Objectives:
To determine whether SDOH factors are independently associated with CVD risk.
Methods:
The National Health and Nutrition Examination Survey is an annual, cross-sectional representative survey of the US adult population. We combined years 1999-2018 and included participants age 40-79 without CVD history, and with information to calculate CVD risk (n=23,877). Ten-year risk of ASCVD was calculated using the AHA/ACC pooled cohort equation. SDOH were conceptualized using self-reported educational attainment and income. We used linear regression models to estimate the association between SDOH and ASCVD risk adjusting for age, sex, race, marital status, and insurance. All analyses accounted for the complex survey design.
Results:
Among US adults, income of ≥ $100k was reported in 23% (SE: 1.1) of those with CVD risk <5%, but only 10% (SE: 0.9) of those with CVD risk ≥ 20%. Similarly, having graduated college was reported in 38% (SE: 1.1) of those with CVD risk < 5% but only 20% (SE: 0.9) of those with CVD risk ≥ 20%. From fully adjusted linear regression models, compared to those with highest income (>$100k), income < $25k was associated with a 5.3% (95% CI: 4.5, 6.1) greater CVD risk, while individuals at income levels $25k-$44.9k, $45-74.9k, and $75-$99.9k, experienced a 3.2% (95% CI: 2.4, 4.1), 1.5% (95% CI: 0.8, 1.5), and 1.1% (95% CI: 0.5, 1.7), greater CVD risk, respectively. Additionally, compared to college graduates, less than a high school education was associated with a 4.4% (95% CI: 3.7, 5.1) greater CVD risk, while high school graduates or some amount of college still experienced a 2.8% (95% CI: 2.3, 3.3) and 1.6% (95% CI: 1.2, 2.1) greater CVD risk, respectively.
Conclusion:
In a large US population-based sample of adults, we found strong graded associations between lower income and lower educational attainment with greater CVD risk. Though SDOH are not inputs into calculators of ASCVD risk, they are strongly associated with ASCVD risk scores.
|
/*
* Copyright (c) 2020, Mulesoft, LLC. All rights reserved.
* Use of this source code is governed by a BSD 3-Clause License
* license that can be found in the LICENSE.txt file.
*/
package com.mulesoft.tools.migration.library.mule.steps.pom;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import com.mulesoft.tools.migration.project.model.ApplicationModel;
import com.mulesoft.tools.migration.project.model.pom.Parent;
import com.mulesoft.tools.migration.project.model.pom.PomModel;
import com.mulesoft.tools.migration.tck.ReportVerification;
import java.io.IOException;
import java.net.URISyntaxException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Optional;
import org.codehaus.plexus.util.xml.pull.XmlPullParserException;
import org.junit.Rule;
import org.junit.Test;
public class UpdateProjectParentTest {
private static final String POM = "/pommodel/simple-pom/pom.xml";
private static final String PARENT_POM = "/pommodel/simple-pom/parent-pom.xml";
@Rule
public ReportVerification report = new ReportVerification();
@Test
public void executeWithoutProjectParentGavAndWithoutParent() throws IOException, XmlPullParserException, URISyntaxException {
UpdateProjectParent updateProjectParent = new UpdateProjectParent();
final ApplicationModel applicationModelMock = mock(ApplicationModel.class);
when(applicationModelMock.getProjectPomParent()).thenReturn(Optional.empty());
updateProjectParent.setApplicationModel(applicationModelMock);
Path pomPath = Paths.get(getClass().getResource(POM).toURI());
PomModel model = new PomModel.PomModelBuilder().withPom(pomPath).build();
try {
updateProjectParent.execute(model, report.getReport());
assertTrue(!model.getParent().isPresent());
} catch (RuntimeException e) {
fail("no exception have to be thrown");
}
}
@Test
public void executeWithoutProjectParentGavAndWithParent() throws URISyntaxException, IOException, XmlPullParserException {
UpdateProjectParent updateProjectParent = new UpdateProjectParent();
final ApplicationModel applicationModelMock = mock(ApplicationModel.class);
when(applicationModelMock.getProjectPomParent()).thenReturn(Optional.empty());
updateProjectParent.setApplicationModel(applicationModelMock);
Path pomPath = Paths.get(getClass().getResource(PARENT_POM).toURI());
PomModel model = new PomModel.PomModelBuilder().withPom(pomPath).build();
try {
updateProjectParent.execute(model, report.getReport());
assertTrue(model.getParent().isPresent());
assertEquals("com.mule.parent", model.getParent().get().getGroupId());
assertEquals("parent-rest", model.getParent().get().getArtifactId());
assertEquals("1.0.0", model.getParent().get().getVersion());
} catch (RuntimeException e) {
fail("no exception have to be thrown");
}
}
@Test
public void executeWithProjectParentGavAndWithoutParent() throws URISyntaxException, IOException, XmlPullParserException {
UpdateProjectParent updateProjectParent = new UpdateProjectParent();
final ApplicationModel applicationModelMock = mock(ApplicationModel.class);
Parent parent = new Parent.ParentBuilder().withGroupId("com.mule").withArtifactId("rest-parent").withVersion("2.0.0").build();
when(applicationModelMock.getProjectPomParent()).thenReturn(Optional.of(parent));
updateProjectParent.setApplicationModel(applicationModelMock);
Path pomPath = Paths.get(getClass().getResource(POM).toURI());
PomModel model = new PomModel.PomModelBuilder().withPom(pomPath).build();
try {
updateProjectParent.execute(model, report.getReport());
assertTrue(!model.getParent().isPresent());
} catch (RuntimeException e) {
fail("no exception have to be thrown");
}
}
@Test
public void executeWithGavAndParent() throws URISyntaxException, IOException, XmlPullParserException {
UpdateProjectParent updateProjectParent = new UpdateProjectParent();
final ApplicationModel applicationModelMock = mock(ApplicationModel.class);
Parent parent = new Parent.ParentBuilder().withGroupId("com.mule").withArtifactId("rest-parent").withVersion("2.0.0").build();
when(applicationModelMock.getProjectPomParent()).thenReturn(Optional.of(parent));
updateProjectParent.setApplicationModel(applicationModelMock);
Path pomPath = Paths.get(getClass().getResource(PARENT_POM).toURI());
PomModel model = new PomModel.PomModelBuilder().withPom(pomPath).build();
parent = model.getParent().get();
assertEquals(parent.getGroupId(), "com.mule.parent");
assertEquals(parent.getArtifactId(), "parent-rest");
assertEquals(parent.getVersion(), "1.0.0");
try {
updateProjectParent.execute(model, report.getReport());
Parent afterParent = model.getParent().get();
assertEquals(afterParent.getGroupId(), "com.mule");
assertEquals(afterParent.getArtifactId(), "rest-parent");
assertEquals(afterParent.getVersion(), "2.0.0");
} catch (RuntimeException e) {
fail("no exception have to be thrown");
}
}
}
|
import { isBoolean } from '../index'
describe('Return correct flag', () => {
test('Test boolean', () => {
expect(isBoolean(true)).toBeTruthy()
expect(isBoolean(false)).toBeTruthy()
})
test('Test familiar truthy or falsy value', () => {
expect(isBoolean(0)).toBeFalsy()
expect(isBoolean('')).toBeFalsy()
expect(isBoolean(undefined)).toBeFalsy()
expect(isBoolean(null)).toBeFalsy()
expect(isBoolean(1)).toBeFalsy()
expect(isBoolean({})).toBeFalsy()
})
})
|
class Column:
"""Class to define a column in a :class:`Model`."""
__slots__ = (
"column_type",
"index",
"primary_key",
"nullable",
"default",
"unique",
"name",
"index_name",
)
def __init__(
self,
column_type: typing.Union[typing.Type[SQLType], SQLType],
*,
index: bool = False,
primary_key: bool = False,
nullable: bool = False,
unique: bool = False,
default: typing.Optional[typing.Any] = None,
name: typing.Optional[str] = None
):
if inspect.isclass(column_type):
column_type = column_type()
if not isinstance(column_type, SQLType):
raise TypeError("Cannot have a non-SQLType derived column_type")
if default is not None:
if not nullable:
# If provided default is not of same type as column_type raise an error.
if not isinstance(default, column_type.python):
try:
default = column_type.python(default)
except TypeError:
raise TypeError(
"Column default cannot be of different type than column_type"
)
self.column_type = column_type
self.index = index
self.unique = unique
self.primary_key = primary_key
self.nullable = nullable
self.default = default
self.name = name
self.index_name = None # to be filled later
if sum(map(bool, (unique, primary_key, default is not None))) > 1:
raise SchemaError(
"'unique', 'primary_key', and 'default' are mutually exclusive."
)
def generate_create_table_sql(self) -> str:
"""Generates the SQL for this column for the ``CREATE TABLE`` statement."""
builder = [self.name, self.column_type.to_sql()]
default = self.default
if default is not None:
builder.append("DEFAULT")
if isinstance(default, str) and isinstance(self.column_type, String):
builder.append("'%s'" % default)
elif isinstance(default, bool):
builder.append(str(default).upper())
else:
builder.append("(%s)" % default)
elif self.unique:
builder.append("UNIQUE")
if not self.nullable:
builder.append("NOT NULL")
return " ".join(builder) |
#include <stdio.h>
int main ( void )
{
int cant, sumap = 0, sumai = 0;
scanf ( "%d", &cant);
int apple [ cant ];
for ( int i = 0; i < cant; i++ ){
scanf ( "%d", &apple [ i ]);
if ( apple [ i ] == 200)
sumap += 1;
else
sumai += 1;
}
if ( sumai == 0 || sumap == 0){
if ((sumap == 0 && sumai % 2 == 0 ) || ( sumai == 0 && sumap % 2 == 0 ))
printf ( "YES");
else
printf ( "NO" );
}
else{
if (( sumap * 2 + sumai ) % 2 == 0 && cant != 1 )
printf ( "YES");
else
printf ( "NO" );
}
return 0;
}
|
<filename>tests/integration/main.rs
mod merge_test;
mod utils;
|
<gh_stars>100-1000
package mocks
import (
"fmt"
"net"
"time"
"github.com/puppetlabs/lumogon/utils"
)
// MockNetConn TODO
type MockNetConn struct {
ReadFn func(b []byte) (n int, err error)
WriteFn func(b []byte) (n int, err error)
CloseFn func() error
LocalAddrFn func() net.Addr
RemoteAddrFn func() net.Addr
SetDeadlineFn func(t time.Time) error
SetReadDeadlineFn func(t time.Time) error
SetWriteDeadlineFn func(t time.Time) error
}
// Read TODO
func (c MockNetConn) Read(b []byte) (n int, err error) {
if c.ReadFn != nil {
fmt.Println("[MockNetConn] In ", utils.CurrentFunctionName())
fmt.Println("[MockNetConn] - len(b): ", len(b))
return c.ReadFn(b)
}
panic(fmt.Sprintf("No function defined for: %s", utils.CurrentFunctionName()))
}
// Write TODO
func (c MockNetConn) Write(b []byte) (n int, err error) {
if c.WriteFn != nil {
fmt.Println("[MockNetConn] In ", utils.CurrentFunctionName())
fmt.Println("[MockNetConn] - b: ", b)
return c.WriteFn(b)
}
panic(fmt.Sprintf("No function defined for: %s", utils.CurrentFunctionName()))
}
// Close TODO
func (c MockNetConn) Close() error {
if c.CloseFn != nil {
fmt.Println("[MockNetConn] In ", utils.CurrentFunctionName())
return c.CloseFn()
}
panic(fmt.Sprintf("No function defined for: %s", utils.CurrentFunctionName()))
}
// LocalAddr TODO
func (c MockNetConn) LocalAddr() net.Addr {
if c.LocalAddrFn != nil {
fmt.Println("[MockNetConn] In ", utils.CurrentFunctionName())
return c.LocalAddrFn()
}
panic(fmt.Sprintf("No function defined for: %s", utils.CurrentFunctionName()))
}
// RemoteAddr TODO
func (c MockNetConn) RemoteAddr() net.Addr {
if c.RemoteAddrFn != nil {
fmt.Println("[MockNetConn] In ", utils.CurrentFunctionName())
return c.RemoteAddrFn()
}
panic(fmt.Sprintf("No function defined for: %s", utils.CurrentFunctionName()))
}
// SetDeadline TODO
func (c MockNetConn) SetDeadline(t time.Time) error {
if c.SetDeadlineFn != nil {
fmt.Println("[MockNetConn] In ", utils.CurrentFunctionName())
fmt.Println("[MockNetConn] - t: ", t)
return c.SetDeadlineFn(t)
}
panic(fmt.Sprintf("No function defined for: %s", utils.CurrentFunctionName()))
}
// SetReadDeadline TODO
func (c MockNetConn) SetReadDeadline(t time.Time) error {
if c.SetReadDeadlineFn != nil {
fmt.Println("[MockNetConn] In ", utils.CurrentFunctionName())
fmt.Println("[MockNetConn] - t: ", t)
return c.SetReadDeadlineFn(t)
}
panic(fmt.Sprintf("No function defined for: %s", utils.CurrentFunctionName()))
}
// SetWriteDeadline TODO
func (c MockNetConn) SetWriteDeadline(t time.Time) error {
if c.SetWriteDeadlineFn != nil {
fmt.Println("[MockNetConn] In ", utils.CurrentFunctionName())
fmt.Println("[MockNetConn] - t: ", t)
return c.SetWriteDeadlineFn(t)
}
panic(fmt.Sprintf("No function defined for: %s", utils.CurrentFunctionName()))
}
|
package utils
import (
"encoding/json"
"fmt"
"github.com/bazelbuild/buildtools/build"
"github.com/bazelbuild/buildtools/warn"
"strings"
)
// Diagnostics contains diagnostic information returned by formatter and linter
type Diagnostics struct {
Success bool `json:"success"` // overall success (whether all files are formatted properly and have no warnings)
Files []*FileDiagnostics `json:"files"` // diagnostics per file
}
// Format formats a Diagnostics object either as plain text or as json
func (d *Diagnostics) Format(format string, verbose bool) string {
switch format {
case "text", "":
var output strings.Builder
for _, f := range d.Files {
for _, w := range f.Warnings {
formatString := "%s:%d: %s: %s (%s)\n"
if !w.Actionable {
formatString = "%s:%d: %s: %s [%s]\n"
}
output.WriteString(fmt.Sprintf(formatString,
f.Filename,
w.Start.Line,
w.Category,
w.Message,
w.URL))
}
if !f.Formatted {
output.WriteString(fmt.Sprintf("%s # reformat\n", f.Filename))
}
}
return output.String()
case "json":
var result []byte
if verbose {
result, _ = json.MarshalIndent(*d, "", " ")
} else {
result, _ = json.Marshal(*d)
}
return string(result) + "\n"
}
return ""
}
// FileDiagnostics contains diagnostics information for a file
type FileDiagnostics struct {
Filename string `json:"filename"`
Formatted bool `json:"formatted"`
Valid bool `json:"valid"`
Warnings []*warning `json:"warnings"`
}
type warning struct {
Start position `json:"start"`
End position `json:"end"`
Category string `json:"category"`
Actionable bool `json:"actionable"`
Message string `json:"message"`
URL string `json:"url"`
}
type position struct {
Line int `json:"line"`
Column int `json:"column"`
}
// NewDiagnostics returns a new Diagnostics object
func NewDiagnostics(fileDiagnostics ...*FileDiagnostics) *Diagnostics {
diagnostics := &Diagnostics{
Success: true,
Files: fileDiagnostics,
}
for _, file := range diagnostics.Files {
if !file.Formatted || len(file.Warnings) > 0 {
diagnostics.Success = false
break
}
}
return diagnostics
}
// NewFileDiagnostics returns a new FileDiagnostics object
func NewFileDiagnostics(filename string, warnings []*warn.Finding) *FileDiagnostics {
fileDiagnostics := FileDiagnostics{
Filename: filename,
Formatted: true,
Valid: true,
Warnings: []*warning{},
}
for _, w := range warnings {
fileDiagnostics.Warnings = append(fileDiagnostics.Warnings, &warning{
Start: makePosition(w.Start),
End: makePosition(w.End),
Category: w.Category,
Actionable: w.Actionable,
Message: w.Message,
URL: w.URL,
})
}
return &fileDiagnostics
}
// InvalidFileDiagnostics returns a new FileDiagnostics object for an invalid file
func InvalidFileDiagnostics(filename string) *FileDiagnostics {
fileDiagnostics := &FileDiagnostics{
Filename: filename,
Formatted: false,
Valid: false,
Warnings: []*warning{},
}
if filename == "" {
fileDiagnostics.Filename = "<stdin>"
}
return fileDiagnostics
}
func makePosition(p build.Position) position {
return position{
Line: p.Line,
Column: p.LineRune,
}
}
|
import { Component, Inject, OnInit } from '@angular/core';
import { MatDialogRef, MAT_DIALOG_DATA } from '@angular/material/dialog';
import { Subscription } from 'rxjs';
import { map } from 'rxjs/operators';
import { UserLogin } from 'src/app/model/User/user';
import { ApiService } from 'src/app/services/API/api.service';
@Component({
selector: 'app-admin-confirmation',
templateUrl: './admin-confirmation.component.html',
styleUrls: ['./admin-confirmation.component.scss']
})
export class AdminConfirmationComponent implements OnInit {
user: UserLogin = {
username: '',
password: ''
};
sub: Subscription
constructor(public dialogRef: MatDialogRef<AdminConfirmationComponent>,@Inject(MAT_DIALOG_DATA) public data?: any, private apiService?: ApiService) { }
ngOnInit(): void {
}
authenticate(){
if(this.user.username && this.user.password){
this.sub = this.apiService.authenticate(this.user)
.pipe(map(user => {
return user.user.role =="Admin";
})).subscribe(
res => {
this.dialogRef.close(res);
}
);
}
}
ngOnDestroy(){
if(this.sub){
this.sub.unsubscribe();
}
}
}
|
<filename>rayvision_houdini/hanalyse/__init__.py<gh_stars>1-10
#! /usr/bin/env python
#coding=utf-8
import sys
import os
script_version = "py" + "".join([str(i) for i in sys.version_info[:1]])
script_path = os.path.join(os.path.dirname(__file__))
sys.path.append(script_path)
print("python executable is: " + sys.executable)
print("python version is: " + sys.version)
print("import Analyze path: " + script_path)
sys.stdout.flush()
exec("from " + script_version + ".hanalyse import *")
|
# -*- coding: utf-8 -*-
# Copyright (c) 2020. Distributed under the terms of the MIT License.
from dataclasses import dataclass
from itertools import groupby
from typing import List
import numpy as np
from scipy.spatial import HalfspaceIntersection
@dataclass
class SingleDefectEnergy:
name: str
charge: int
energy: float
correction: float
@dataclass
class DefectEnergy:
name: str
charges: List[int]
energies: List[float]
corrections: List[float]
def cross_points(self, ef_min, ef_max):
large_minus_number = -1e4
half_spaces = []
for charge, energy, correction in zip(self.charges, self.energies, self.corrections):
corrected_energy = energy + correction
half_spaces.append([-charge, 1, -corrected_energy])
half_spaces.append([-1, 0, ef_min])
half_spaces.append([1, 0, -ef_max])
half_spaces.append([0, -1, large_minus_number])
feasible_point = np.array([(ef_min + ef_max) / 2, -1e3])
hs = HalfspaceIntersection(np.array(half_spaces), feasible_point)
boundary_points = []
inner_cross_points = []
for intersection in hs.intersections:
x, y = np.round(intersection, 8)
if ef_min < x < ef_max:
inner_cross_points.append([x, y])
elif y > large_minus_number:
boundary_points.append([x, y])
return CrossPoints(inner_cross_points, boundary_points)
def __str__(self):
lines = []
for charge, energy, correction in zip(self.charges, self.energies, self.corrections):
lines.append(f"{self.name:>10} {charge:>4} {energy:12.4f} {correction:12.4f}")
return "\n".join(lines)
@dataclass
class CrossPoints:
inner_cross_points: List[List[float]]
boundary_points: List[List[float]]
@property
def all_sorted_points(self):
return sorted(self.boundary_points + self.inner_cross_points,
key=lambda v: v[0])
@property
def t_all_sorted_points(self):
return np.transpose(np.array(self.all_sorted_points)).tolist()
@property
def t_inner_cross_points(self):
return np.transpose(np.array(self.inner_cross_points)).tolist()
@property
def t_boundary_points(self):
return np.transpose(np.array(self.boundary_points)).tolist()
@property
def charges(self):
result = []
for i, j in zip(self.all_sorted_points[:-1], self.all_sorted_points[1:]):
dx = j[0] - i[0]
dy = j[1] - i[1]
result.append(int(round(dy / dx)))
return result
@property
def charge_list(self):
charges = [None] + self.charges + [None]
return list(zip(charges[:-1], charges[1:]))
def __str__(self):
lines = []
for point in self.all_sorted_points:
lines.append(f"{point[0]:12.4f} {point[1]:12.4f}")
return "\n".join(lines)
def make_defect_energies(single_energies: List[SingleDefectEnergy]
) -> List[DefectEnergy]:
sorted_energies = sorted(single_energies, key=lambda x: x.name)
result = []
for _, grouped_energies in groupby(sorted_energies, lambda x: x.name):
charges = []
energies = []
corrections = []
for single_energy in grouped_energies:
charges.append(single_energy.charge)
energies.append(single_energy.energy)
corrections.append(single_energy.correction)
result.append(DefectEnergy(single_energy.name, charges, energies, corrections))
return result
|
// Copyright (C) 2018 rameshvk. All rights reserved.
// Use of this source code is governed by a MIT-style license
// that can be found in the LICENSE file.
package refs
import "github.com/dotchain/dot/changes"
// Path represents a reference to a value at a specific path. A nil
// or empty path refers to the root value.
//
// This is an immutable type -- none of the methods modify the
// provided path itself.
//
// This only handles the standard set of changes. Custom changes
// should implement the PathMerger interface.
//
// If no such method is implemented by the change, the change is
// ignored as if it has no side-effects.
type Path []interface{}
// Merge implements Ref.Merge
func (p Path) Merge(c changes.Change) (Ref, changes.Change) {
if result := Merge(p, c); result != nil {
return Path(result.P), result.Scoped
}
return InvalidRef, nil
}
// Equal implements Ref.Equal
func (p Path) Equal(other Ref) bool {
o, ok := other.(Path)
if !ok {
return false
}
if len(p) != len(o) {
return false
}
for kk, elt := range p {
if o[kk] != elt {
return false
}
}
return true
}
// Merge merges a path with a change. If the path is invalidated, it
// returns nil. Otherwise, it returns the updated path. The version of
// the change that can be applied to the just object at the path
// itself is in Affected. Unaffected holds the changes that does not
// concern the provided path.
//
// Custom changes should implement the PathMerger interface or the
// change will be considered as not affecting the path in any way
//
// For most purposes, the Path type is a better fit than directly
// calling Merge.
func Merge(p []interface{}, c changes.Change) *MergeResult {
if len(p) == 0 {
return &MergeResult{nil, c, c, nil}
}
switch c := c.(type) {
case changes.Replace:
return nil
case changes.Splice:
return mergeSplice(p, c)
case changes.Move:
return mergeMove(p, c)
case changes.PathChange:
idx := 0
for len(p) > idx && len(c.Path) > idx {
if p[idx] == c.Path[idx] {
idx++
continue
}
return &MergeResult{P: p, Unaffected: c}
}
if len(p) == idx {
unaff := c
c.Path = c.Path[idx:]
return &MergeResult{p, c, unaff, nil}
}
return Merge(p[idx:], c.Change).addPathPrefix(p[:idx])
case changes.ChangeSet:
result := &MergeResult{P: p}
for _, cx := range c {
result = result.join(Merge(result.P, cx))
if result == nil {
return nil
}
}
return result
case PathMerger:
return c.MergePath(p)
}
return &MergeResult{P: p, Unaffected: c}
}
func mergeMove(p []interface{}, c changes.Move) *MergeResult {
idx := c.MapIndex(p[0].(int))
return &MergeResult{
P: append([]interface{}{idx}, p[1:]...),
Unaffected: c,
}
}
func mergeSplice(p []interface{}, c changes.Splice) *MergeResult {
idx, ok := c.MapIndex(p[0].(int))
if ok {
return nil
}
return &MergeResult{
P: append([]interface{}{idx}, p[1:]...),
Unaffected: c,
}
}
|
/**
* Minimalistic model of a graph with integer-labeled vertices and string-labeld edges.
*
* f0: [sourceId_k,sourceLabel_k,targetId_k,targetLabel_k,..]
* f1: [edgeLabel_0,..,edgeLabel_k]
*/
public class LabeledGraphIntString extends Tuple2<int[], String[]> {
/**
* Number of array-fields used to information about a single edge's sources and targets.
*/
private static final int EDGE_LENGTH = 4;
/**
* Relative offset of an edge's source vertex id.
*/
private static final int SOURCE_ID = 0;
/**
* Relative offset of an edge's source vertex label.
*/
private static final int SOURCE_LABEL = 1;
/**
* Relative offset of an edge's target vertex id.
*/
private static final int TARGET_ID = 2;
/**
* Relative offset of an edge's target vertex label.
*/
private static final int TARGET_LABEL = 3;
/**
* Default constructor.
*/
public LabeledGraphIntString() {
}
/**
* Valued constructor.
*
* @param edges array of edge information
* @param edgeLabels array of edge labels
*/
private LabeledGraphIntString(int[] edges, String[] edgeLabels) {
super(edges, edgeLabels);
}
/**
* Factory method to create an empty graph.
*
* @return empty graph
*/
public static LabeledGraphIntString getEmptyOne() {
return new LabeledGraphIntString(new int[0], new String[0]);
}
/**
* Convenience method to add an edge.
* @param sourceId source vertex id
* @param sourceLabel source vertex label
* @param label edge label
* @param targetId target vertex id
* @param targetLabel target vertex label
*/
public void addEdge(int sourceId, int sourceLabel, String label, int targetId, int targetLabel) {
setEdges(ArrayUtils.addAll(getEdges(), sourceId, sourceLabel, targetId, targetLabel));
setEdgeLabels(ArrayUtils.add(getEdgeLabels(), label));
}
/**
* Convenience method.
*
* @return edge count
*/
public int size() {
return getEdgeLabels().length;
}
// GETTERS AND SETTERS
/**
* Getter.
* @param id edge id
* @return target vertex id
*/
public int getSourceId(int id) {
return getEdges()[id * EDGE_LENGTH + SOURCE_ID];
}
/**
* Getter.
* @param id edge id
* @return source vertex label
*/
public int getSourceLabel(int id) {
return getEdges()[id * EDGE_LENGTH + SOURCE_LABEL];
}
/**
* Getter.
* @param id edge id
* @return edge label
*/
public String getEdgeLabel(int id) {
return this.f1[id];
}
/**
* Getter.
* @param id edge id
* @return target vertex id
*/
public int getTargetId(int id) {
return getEdges()[id * EDGE_LENGTH + TARGET_ID];
}
/**
* Getter.
* @param id edge id
* @return target vertex label
*/
public int getTargetLabel(int id) {
return getEdges()[id * EDGE_LENGTH + TARGET_LABEL];
}
public String[] getEdgeLabels() {
return this.f1;
}
private void setEdgeLabels(String[] edgeLabels) {
this.f1 = edgeLabels;
}
private int[] getEdges() {
return this.f0;
}
private void setEdges(int[] edges) {
this.f0 = edges;
}
} |
Think you know The Thinker? Think Again.
Located in front of Grawemeyer Hall, it is the first large-scale bronze cast of The Thinker. In 2012, conservators cleaned the corrosion and restored him to a black-green patina. Close-up detail of hand after restoration. The Thinker is often at the center of things on campus, including the annual sorority Bid Day events held each fall. Close-up detail of Rodin's signature on the sculpture. ‹ ›
He's a masterpiece among us — the real deal.
The Thinker statue that sits in front of Grawemeyer Hall is the first large-scale bronze cast of The Thinker. French sculptor Auguste Rodin personally supervised the casting in Paris. It came out of the mold Dec. 25, 1903, and was completed in early 1904. The Thinker design has been cast many times. Eight were made before Rodin's death in 1917.
As the first large-scale Thinker ever cast, UofL's Thinker claims priority as the most original.
He has been around.
Rodin sent The Thinker to the 1904 World's Fair. It was owned privately in Baltimore and later was displayed in the Walters Art Museum there. When Baltimore purchased another Thinker, the museum sold the sculpture to the estate of lawyer and art lover Arthur Hopkins, which bought it for the city of Louisville. The city decided to put The Thinker at UofL.
The Thinker has sat in front of Grawemeyer Hall since 1949.
He used to be green.
Chemical reactions of acids in rainwater with copper compounds in the bronze had turned our Thinker green. Between December 2011 and February 2012, conservators cleaned the corrosion and gave him a black-over-green patina similar to that on other versions of The Thinker.
The Thinker goes way back.
The origins of The Thinker date to 1880. Rodin originally conceived of The Thinker as a statue to be installed at the top of a pair of monumental doors he'd been commissioned to design for a museum of decorative arts. He envisioned the figure as "The Inferno" poet Dante looking down on hell. Rodin called the entire piece The Gates of Hell.
Rodin refined the design over the next 20 years, although it never served its original purpose.
He's recognized everywhere.
Many art historians consider The Thinker to be the most famous sculpture in the world. Its image has been used in media campaigns and it even played an important role in the early days of television when it was incorporated in a popular TV show called "The Many Loves of Dobie Gillis," set on a college campus. |
// Construct a DenseMatrix with a specific shape
inline DenseMatrix(const index_type num_rows, const index_type num_cols,
const value_type val = 0)
: base(num_rows, num_cols, num_rows * num_cols),
_values("matrix", size_t(num_rows), size_t(num_cols)) {
assign(num_rows, num_cols, val);
} |
// print sums of digits of a number
#include <stdio.h>
#include <conio.h>
void main()
{
int reminder, sum = 0, n;
int clrscr();
printf("Enter n : ");
scanf("%d", &n);
while (n > 0)
{
reminder = n % 10;
sum = sum + reminder;
n = n / 10;
}
printf("Sum of digits : %d", sum);
getch();
} |
Online daily assessment of dose change in head and neck radiotherapy without dose‐recalculation
Abstract Background Head and neck cancers are commonly treated with radiation therapy, but due to possible volume changes, plan adaptation may be required during the course of treatment. Currently, plan adaptations consume significant clinical resources. Existing methods to evaluate the need for plan adaptation requires deformable image registration (DIR) to a new CT simulation or daily cone beam CT (CBCT) images and the recalculation of the dose distribution. In this study, we explore a tool to assist the decision for plan adaptation using a CBCT without re‐computation of dose, allowing for rapid online assessment. Methods This study involved 18 head and neck cancer patients treated with CBCT image guidance who had their treatment plan modified based on a new CT simulation (ReCT). Dose changes were estimated using different methods and compared to the current gold standard of using DIR between the planning CT scan (PCT) and ReCT with recomputed dose. The first and second methods used DIR between the PCT and daily CBCT with the planned dose or recalculated dose from the ReCT respectively, with the dose transferred to the CBCT using rigid registration. The necessity of plan adaptation was assessed by the change in dose to 95% of the planning target volume (D95) and mean dose to the parotids. Results The treatment plans were adapted clinically for all 18 patients but only 7 actually needed an adaptation yielding 11 unnecessary adaptations. Applying a method using the daily CBCT with the planned dose distribution would have yielded only four unnecessary adaptations and no missed adaptations: a significant improvement from that done clinically. Conclusion Using the DIR between the planning CT and daily CBCT can flag cases for plan adaptation before every fraction while not requiring a new re‐planning CT scan and dose recalculation.
| INTRODUCTION
Radiation therapy is a standard treatment option for a variety of cancers, where the precise geometric targeting of tumors can be exploited for achieving better tumor control while limiting healthy tissue damage. The specific targeting and attenuation of radiation are unique to the patient's anatomy at the time of the planning CT (PCT) simulation, but these conditions are difficult to maintain throughout an entire course of treatment due to changes in anatomy. To account for changes in patient anatomy, plan modification may be required during the treatment course to ensure accurate targeting. Plan adaptation has been shown to improve treatment outcomes by promoting better tumor control and limiting toxicities, 5,6 but this procedure entails additional costs of re-imaging, replanning, and additional quality assurance. Although the potential benefits of plan adaptation are obvious, no guidelines on decisionmaking and optimal time for re-planning are available.
Plan adaptation has been reported for various treatment sites including lung, 7 prostate, and head and neck cancers. 11,12 Across all treatment sites, adaptation is necessary due to tumor shrinkage, weight loss or other significant anatomical changes that impact the dose distribution (e.g., lung collapse or re-inflation). Specifically for head and neck cancers, large volume changes are common and often detected by external examination or through poor fitting of immobilization devices, but minor changes can go unnoticed. However, relatively minor anatomy changes may still have a significant effect on the dose distribution and are more difficult to discern by visual inspection of anatomy alone.
More precise and conformal radiation treatments available with modern techniques may need more plan adaptations to provide consistent target dose coverage and healthy tissue sparing with a changing anatomy. For making a decision on the necessity of plan adaptation in clinical practice, efficient daily evaluation of the delivered dose distribution on the modified anatomy is required. Different methods have been presented on detecting volume changes 13 and landmark movements, 14 but most rely solely on visual inspection by clinicians. These visual inspections may not be consistent as shown by inter-observer studies. 15 Several groups have presented adaptation strategies and schedules throughout treatment. 16,17 A recent study using the same dataset as in this study has produced a method of detecting anatomical differences to flag consideration of plan re-evaluation without considering the dose distribution. 18 Currently, cone beam CT (CBCT) imaging is routinely used for patient alignment and anatomy monitoring, but can also be used for dosimetric assessment of actual radiation delivery. Dose calculations on CBCTs are possible with the results varying between reported studies 19-21 because of inferior image quality and tissue densitometry. Performing reliable analysis of the dose to the target and organs at risk would require contouring of relevant structures on the daily CBCT image. An attractive alternative is to employ deformable image registration (DIR) to transfer contour information from the planning CT study for analysis. DIR has been shown to produce a variety of results depending on the algorithm used, original contouring accuracy and imaging modalities (i.e., CT simulation, MRI or CBCT).
Unfortunately, registration between different imaging modalities has been shown to have worse accuracy 22 especially for CBCT images due to limited image quality and artifacts.
There are two primary effects of anatomical deformations on a radiation treatment: 1) movement of voxels and regions on interest (ROI) relative to the planned dose distribution and 2) change in the dose distribution itself due to re-arrangement of voxels or density changes therein. The current gold standard (GS) for determining whether to adapt a treatment plan involves a new CT simulation (ReCT), dose calculation and DIR to map contours from the PCT. This procedure is time-consuming and expensive but accounts for both effects of anatomical deformation and is applied when gross anatomical changes are suspected.
The best alternative without a new CT simulation involves using DIR to warp the planning CT to match the daily anatomy from the CBCT and perform dose calculation as proposed by Veiga et al. 23 and accounts for both effects of anatomical deformations. However, the dose recalculation practically can be difficult and time-consuming. It is usually performed off-line which limits its routine daily use at the treatment unit. What if you could determine the necessity of plan adaptation without a new CT scan and dose calculation? Without the re-computation of the dose, only the movement of voxels and ROI's relative to the planned dose distribution are considered, but not the change to the dose distribution. The dose distribution is assumed to be robust and only mildly affected by the re-arrangement of the voxels. In this study, we explore the results of using the CBCT without a dose calculation and a CBCT with a dose calculation and compare both to the current gold standard. The goal is to see if assessing the movement of ROI relative to the planned dose distribution provides enough dose information to properly trigger the plan adaptation process, when compared to current clinical practice of visual inspection.
2.A | Patient studies
For this study, 18 patients who received multi-fractionated radiotherapy for head and neck cancer and had plan adaptation during treatment course were selected. Each patient had a CT scan taken before treatment (range 4-30 days) and used for planning (i.e., PCT), daily pre-treatment CBCT studies and another CT re-taken during treatment (ReCT) when anatomy changes were deemed significant (day "X"). were performed with software from MIM Maestro (version 6.5 MIM Software Inc., Cleveland, OH, USA) using the default DIR algorithm applying an intensity based free form algorithm, with a sum of squared differences similarity metric. 25 The mean registration error using MIM Maestro between two kVCT's was shown to be 1.7 mm by Kirby et al. 22 using a deformable Head and Neck phantom.
2.B | Dose distribution estimation
To determine the necessity of plan adaptation, an estimation of the dose distribution "of the day" was required and three estimation methods are presented and compared to the current gold standard which requires a re-planning CT. The first method (CBCT P ) used DIR to map the contours from the PCT to the daily CBCT with the planned dose distribution rigidly registered to the daily CBCT as shown in Fig. 1. The second method (CBCT R ) used the DIR to map the contours from the PCT to the daily CBCT with the recalculated dose (from the ReCT) rigidly registered to the daily CBCT. The third method (ReCT P ) used the DIR to map the contours from planning CT to the ReCT with the planned dose distribution rigidly registered to the ReCT. The gold standard method (ReCT R ) applied DIR to map contours from the PCT to ReCT with the recalculated dose on the ReCT. Both dose distributions (planned and recalculated) were obtained using the original treatment plan parameters and beam; the plan was not re-optimized. The rigid registration process used 6 degrees of freedom and simulated the alignment of the CBCT study to PCT (or ReCT) performed by the radiation therapists in the clinic before each fraction. In total, four separate methods estimated the daily dose distribution using the CBCT or ReCT as the secondary CT study, with the planned or recomputed dose. For clarity, each method was referred to by the secondary image used (CBCT or ReCT) and if the planned or ReCT dose was used, denoted by subscript P or R, respectively. All dose estimation methods are illustrated in Fig. 2, showing all four investigated combinations.
2.C | Voxel-to-voxel dose comparison
The clinically relevant comparison of the dose results obtained by different estimations requires evaluation on a voxel-to-voxel basis.
Every voxel in the PCT study can have a different dose value in fraction X (when ReCT was ordered), depending on the secondary CT study for image registration and the dose distribution. Comparison with any other method is done by calculating the relative dose difference to the GS (RD j ) for a specific structure j across each individual voxel i: between a test method (T) and GS averaged over all N j voxels within all 18 patients p.
Voxel-to-voxel analysis was performed for the right and left parotids because they were present in all image studies, incurred significant deformation and are frequently positioned close to the target volume. The analysis was also performed for the spinal cord because it is a clinically important structure. For the PTV, the dose was determined at each voxel using CBCT P and CBCT R methods. The threshold criterion for adaptation was for 95% of the volume (D95) to be below the prescribed dose.
2.D | Test for the necessity of plan adaptation
The D95 parameter was selected following recommendations for evaluating the target coverage. 26 Only the primary PTV was analyzed for each patient.
3.A | Voxel-wise dose comparison
The relative dose difference RD j given by Eq. (1) for each method are shown in Table 1 for the ipsilateral and contralateral parotids and spinal cord. The error caused by only the changed dose distribution is presented by the ReCT P row and the CBCT R row represents the error caused only by the DIR between different imaging modalities. CBCT P row represents the error when both effects were present.
| Test for necessity of adaptation
The parotid mean dose estimates using CBCT P and CBCT R are com-
| DISCUSSION
In a standard workflow, the only dose distribution always available is the one calculated using the initial CT simulation for planning purposes. Theoretically, the dose gradients from the planned dose distribution indicate what dose differences may occur due to specific anatomical changes. Dose gradients are mainly defined by the original beam geometry relative to the planned iso-center, which is not affected by deformation. Without extensive deformation, these gradients can be maintained and could predict dose change, when combined with a deformation field. However, with large volume or density reductions within the beams path significant changes to the F I G . 2. Schema describing the daily dose estimation using DIR from the planning CT to either the daily CBCT or re CT study (ReCT). Two different dose distributions computed on the PCT or ReCT are transferred to the moving image using a 6 degree of freedom rigid registration. The gold standard method is highlighted in yellow using the ReCT and recomputed dose. Day X is when ReCT was ordered due to observed significant anatomical changes. The average relative dose differences RD j for each organ presented in Table 1 show that for both parotids and spinal cord the RD j from the CBCT R method (which is a result of DIR error alone) is similar to the results from the ReCT P method, which is the error from using the planned instead of the recomputed dose. The CBCT P RD j includes both sources of error but is less than the sum of errors in ReCT P and CBCT R methods.
It has been shown that DIR error is specific to the algorithm used 27,28 and image quality. 22 F I G . 4. The difference between predicted D95 and the prescribed dose for the PTV for using a) CBCT P and b) CBCT R methods compared to ReCT R (gold standard). Values are presented as the difference from the prescribed dose. Dashed line represents conservative criteria (within 1 Gy of threshold). ReCT R is the DIR to ReCT using the recalculated dose. CBCT P is the DIR to daily CBCT using the planned dose. CBCT R is the DIR to daily CBCT using the recalculated dose.
18 of these cases was not necessary, with 11 of the original patient plans still within clinical tolerances. Clinical decisions of plan adaptation were made before the re-scan using personal experience, which explains the discrepancy in adaptation rates between our GS and that decided clinically. Our results have shown that both methods using daily CBCT studies (CBCT R and CBCT P ) yielded very conservative results and missed no required adaptations. If the simplest prediction method (CBCT P ) was used, only four patients would have been unnecessarily re-scanned and adapted. This demonstrates that using the DIR to the CBCT of the day without a dose calculation in CBCT P method can determine when to adapt a treatment plan better than that done clinically avoiding a number of unnecessary CT simulations and re-planning efforts. Performing an additional dose calculation in CBCT R caught two additional unnecessary plan adaptations at the cost of additional computation time, while without a dose computation the procedure can be completed within one minute allowing for an efficient "adapt or not" decision online.
| CONCLUSION
Improvements in IGRT and conformal radiation delivery have made adaptive radiation therapy a reality, but steps need to be taken to ensure its efficiency. Practical implementation requires an efficient method of daily evaluation and decision-making to determine when plan adaptation is truly necessary. The method of dose evaluation using on-board CBCT imaging alone is limited by the necessity for dose calculation, contouring and image registration. We have shown that the daily CBCT image mapped back to the planning CT without a dose calculation can provide sufficient information for the important decision of when to re-plan. The goal is to prevent the use of unnecessary additional CT simulations and dose computations with a quick online evaluation. Further research needs to be performed with more patients and other treatment sites including abdomen and thorax and for treatment techniques that will produce a different landscape of dose gradients.
ACKNOWLEDGMENT
The authors thank Dr. Bryan Schaly for supplying CT data and discussion.
CONF LICT OF I NTEREST
The authors declare no conflict of interest. |
<reponame>SanjayRai/software_acceleration_framework_with_Xilinx_HLS<gh_stars>1-10
#! /usr/bin/python
import binascii
import os
import sys
if (len(sys.argv)) != 4 :
print "Wrong arguments\n\n\t ./rd_test.py 0xf7c00000 num_bytes srai_wr.bin"
else :
byte_count = 0
filename = sys.argv[3]
FP = open (filename, 'wb')
try:
base_addr = int(sys.argv[1],16)
while (byte_count < sys.argv[2]):
cmd_strng = "iotools mmio_read8 %s" % (hex(base_addr))
os.system(cmd_strng)
base_addr = base_addr + 1
FP.write(1)
finally:
FP.close()
|
package org.fkjava.security;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.fkjava.security.interceptor.UserHolderInterceptor;
import org.fkjava.security.service.SecurityService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import org.springframework.security.authentication.dao.DaoAuthenticationProvider;
import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.core.AuthenticationException;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.security.web.authentication.SimpleUrlAuthenticationFailureHandler;
import org.springframework.web.servlet.config.annotation.InterceptorRegistry;
import org.springframework.web.servlet.config.annotation.ResourceHandlerRegistry;
import org.springframework.web.servlet.config.annotation.ViewControllerRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
@SpringBootApplication
@ComponentScan("org.fkjava")
@EnableJpaRepositories
public class SecurityConfig extends WebSecurityConfigurerAdapter implements WebMvcConfigurer{
@Autowired
private SecurityService securityService;
@Autowired
private PasswordEncoder passwordEncoder;
// 自定义AuthenticationProvider,不隐藏【用户未找到的异常】
// Spring Security会默认自动创建AuthenticationProvider
// 但是如果开发者自己提供了,那么就不会自动创建
@Override
protected void configure(AuthenticationManagerBuilder auth) throws Exception {
//不要调用supersuper.configure(auth)方法
DaoAuthenticationProvider dap = new DaoAuthenticationProvider();
dap.setHideUserNotFoundExceptions(false);
dap.setUserDetailsService(securityService);
dap.setPasswordEncoder(passwordEncoder);
auth.authenticationProvider(dap);
}
@Override
public void addInterceptors(InterceptorRegistry registry) {
//把拦截器添加进spring里面,拦截根目录以及根目录下的子目录
registry.addInterceptor(new UserHolderInterceptor()).addPathPatterns("/**");
}
@Override
protected void configure(HttpSecurity http) throws Exception {
String loginPage = "/security/login";
SimpleUrlAuthenticationFailureHandler fh = new SimpleUrlAuthenticationFailureHandler(loginPage + "?error") {
@Override
public void onAuthenticationFailure(HttpServletRequest request, HttpServletResponse response,
AuthenticationException exception) throws IOException, ServletException {
//把登录名放入session里面
request.getSession().setAttribute("loginName", request.getParameter("loginName"));
super.onAuthenticationFailure(request, response, exception);
}
};
http.authorizeRequests()//验证请求
.antMatchers(loginPage,"/css/**","/js/**","/webjars/**","/static/**")
.permitAll()//不做访问判断
.anyRequest()//所有请求
.authenticated()//授权后才能访问
.and()//并且
.formLogin()//使用表单登录
.loginPage(loginPage)//登录页面位置,默认是/login
.loginProcessingUrl("/security/do-login")//处理登录请求的url
.usernameParameter("loginName")//登录名的参数名
.passwordParameter("password")//登录密码的参数名
.failureHandler(fh)
.and().logout()//配置登出
.logoutUrl("/security/do-logout")//登出的url
//.and().httpBasic();
.and().csrf();//启用防跨站攻击
}
@Override
public void addResourceHandlers(ResourceHandlerRegistry registry) {
//系统自动把静态文件的根目录映射到,/static, /public, /resources里面
}
@Override
public void addViewControllers(ViewControllerRegistry registry) {
//动态注册URL和视图的映射关闭,解决控制器里几乎没有代码的问题
registry.addViewController("/security/login")
.setViewName("security/login");
//设置首页
registry.addViewController("/").setViewName("security/index");
}
public static void main(String[] args) {
SpringApplication.run(SecurityConfig.class, args);
}
}
|
Skin Barrier Function Defect-A Marker of Recalcitrant Tinea Infections
Dermatophytosis is a common infection of the skin, hair or nails, i.e., colonization of keratinized tissue caused by dermatophytes, a group of related filamentous fungi. These infections are caused by species of three genera—Trichophyton, Epidermophyton, and Microsporum. Among all fungal infections, infections caused by the dermatophytes are the most frequent forms of human infections, affecting more than 20%–25% of the world’s population.
Introduction
Dermatophytosis is a common infection of the skin, hair or nails, i.e., colonization of keratinized tissue caused by dermatophytes, a group of related filamentous fungi. These infections are caused by species of three genera-Trichophyton, Epidermophyton, and Microsporum. Among all fungal infections, infections caused by the dermatophytes are the most frequent forms of human infections, affecting more than 20%-25% of the world's population. Based on their natural habitat, dermatophytes are classified into three groups-Geophilic, Zoophilic, and Anthrophilic species. Superficial fungal infections had always been simple to treat with the basket of antifungal agents available; however, recently, an alarming trend of these dermatosis is being observed, with substantial change in the clinical profile of patients associated with an increase in the number of chronic, recurrent, and recalcitrant dermatophytosis. Recalcitrant dermatophytosis refers to relapse, recurrences, reinfection, persistence, or chronic infections, and possibly microbiological resistance. Dermatophytosis is considered to be recurrent when there is recurrence of the disease (lesions) after 4 weeks of completion of approved systemic therapy. Relapse denotes the occurrence of dermatophytosis (lesions), after a longer period of infection-free interval (6-8 weeks) in a patient who has been cured clinically. Dermatophytosis is considered to be chronic when the patients who have suffered from the disease for more than 6 months to 1 year, with or without recurrence, in spite of being adequately treated. Now-a-days, antifungal resistance is also thought to be an important cause for treatment failure in case of dermatophytosis. Other factors such as barrier defects, remains neglected.
Transepidermal water loss (TEWL) measurement is the most widely used objective measurement for assessing the barrier function of the skin. TEWL represents the diffusion of condensed water through a fixed area of stratum corneum to the skin surface per unit time and is measured in grams/m 2 /hour.
Our study highlights the role of transepidermal water loss from the lesional skin and its effect on the cure rate and relapse in patients of Tinea cruris.
Materials and Method
This was a hospital based prospective comparative study. A total of 200 patients of tinea cruris, attending the dermatology outpatient department of Sawai Mansingh Hospital, Jaipur, not on any topical or systemic treatment previously were included in the study. A written and informed consent was obtained from each patient. The study was conducted over a period of 1 year.
The cases were diagnosed clinically and by KOH examination. Ethical clearance was taken up for the study.
TEWL Measurement
TEWL was calculated in these patients from the lesional skin over the right inguinal region, according to guidelines developed by 5 th International Conference on Occupational and Environmental exposure of skin to chemicals (OEESC) using Tewameter TM300 open chamber probe of Courage and Khazaka, Cologne, Germany.
Transepidermal water loss is calculated by measuring the water vapor pressure (VP) gradient at the skin surface, which is considered constant in the absence of external convection currents. In the open-chamber method, the VP gradient is calculated by measuring the difference in VP between two distinct points aligned perpendicularly to the skin surface. VP is calculated as the product of RH (Relative humidity) and saturated VP, which is dependent on temperature.
Prior to measurement of TEWL and/or skin hydration, the study participant were acclimatized to the measurement environment to avoid errors caused by environmental temperature or sweating.
In accordance to OEESC guidelines, the affected area was left open for 20 minutes prior to the test at relative humidity of 50% and ambient temperature of 22°C.
TEWL was measured in grams/m 2 /hour. A value of >25 grams/m 2 /hour was considered as abnormal or critical. These patients were divided into 2 groups on the basis of TEWL values.
Group A included patients of tinea cruris with abnormal TEWL, whereas Group B included age and sex matched patients of Tinea cruris with a normal TEWL. Both the groups were given 200 mg Itraconazole OD + oral antihistamines. Although Itraconazole 200 mg single dose is unapproved formulation by USFDA, but the skin department in our institution follows both the regimens, i.e., Itraconazole 200 mg OD as well as 100 mg BD. Little differences are observed in cure rates after both regimens. Cure was defined on the basis of clinicalobservation, and KOH examination. Culture studies could not be performed.
Cure rate of the two groups was compared 1 month later, whereas, recurrence of the two groups was compared 3 months later.
ŸMedcalc 16.4 version software was used to analyze data presented as proportion.
Chi square test was used for analysis and P value <0.05 was taken as significant.
Results
In the Group A (see ) i.e., patients of tinea cruris with abnormal TEWL, only 28% (n = 28) of the patients showed clinical improvement at the end of 1 month. Out of those cured, 78.57% of the cases (n = 22) showed recurrence after 3 months of completion of therapy.
In Group B (see ) i.e., patients of tinea cruris with normal TEWL, 69% (n = 69) of the patients showed clinical improvement at the end of 1 month. Out of those cured, only 21.74% of the cases (n = 15) showed recurrence after 3 months of completion of therapy.
The comparative results of both groups were statistically significant, i.e., P value <0.005. Therefore, our study concludes that tinea cruris patients with abnormal TEWL have significantly poor cure rate, and significantly higher recurrence rate. This highlights the fact that stratum corneum hydration and TEWL significantly influence the cure rate and recurrence rate of superficial dermatophytosis.
Discussion
In recent years, scenario of dermatophytosis is changing with an alarming rise in the number of difficult to treat chronic and recurrent dermatophytosis in our country. There has been an increasing trend of patients presenting with decreasing cure rates and frequent relapses within a few days to weeks of stopping the treatment with antifungal agents. The cause of such recalcitrant dermatophytosis are multifactorial, listed in Table 1. A significant but questionable cause of refractory fungal infections is drug resistance.
According to the "90-60 Rule" proposed by Rex and Pfaller, which states that infections caused by fungal isolates that have MICs considered susceptible respond favorably to appropriate therapy approximately 90% of the time, whereas infections caused by the isolates with MICs considered resistant also respond favorably approximately 60% of the time, so factors other than antifungal resistance probably affect the cure rate. A study by Sardana et al. concluded that in vitro resistance to antifungals is not very common and should not be frequently labeled as a cause of treatment failure. Out of the many causes listed above, the one studied in our study was the role of barrier function defect. The stratum corneum, the main permeability barrier, is formed from extracellular lipids and corneocytes during epidermal differentiation of the skin. The main extracellular stratum corneum lipids are cholesterol, free fatty acids, and ceramides. The initial step in the dermatophyte infections is stratum corneum penetration and proliferation. A disturbance in barrier function is accompanied or caused by changes in epidermal proliferation and differentiation. The increased epidermal proliferation leads to expression of proliferation associated cytokeratins K6, K16, and K17. Interestingly, filaggrin and involucrin protiens are also downregulated in lesional skin of tinea cruris and therefore these patients have a compromised barrier.
This was supported by a study, where dermatophytoses, except tinea pedis and tinea mannum, showed highly significant increase in TEWL compared with adjacent infection free skin. In the context of our study, either the fungal strain is more invasive resulting into acceleration of the above process and not responding well to treatment, or, the barrier defect may be a primary phenomenon, which needs confirmation by further studies.
It is important to understand that patients with atopic background, have a selective or induced immune deficit for dermatophytic infections in addition to barrier function impairment. We did not segregate our patients according to their atopic background, which is a limitation of our study. Our study stresses on the fact that barrier defect is an understated cause of recalcitrant superficial fungal infections and therefore should not be neglected. A concomitant or adjuvant application of barrier repair formulations are of great value in both treatment and preventing recurrences of dermatophytosis.
Limitations
There were a few limitations in our study, i.e. we limited our study to tinea cruris only, culture studies for species identification were not done, only itraconazole was used as a prototype antifungal drug in our study and our patients were not segregated according to atopic background.
Conclusion
Our study is an attempt to correlate the barrier function properties of skin with recalcitrant superficial fungal infections. The cases of tinea cruris with abnormal TEWL show significant decrease in cure rates and significant relapse rates among those initially cured.
This finding should urge the researchers to lay focus on factors other than just drug resistance.
Skin barrier dysfunction and increased TEWL is one such factor. |
def inventory(game):
inventory = game.player.inventory
if not len(inventory):
outputter.display_game_text("You aren't carrying anything.")
else:
item_names = [item.full_name for item in inventory]
items_list = concatenate_items(item_names)
outputter.display_game_text('You are carrying {}.'.format(items_list)) |
New York City has appointed three city employees to be board members for the struggling nonprofit organization that runs the South Street Seaport Museum as the city works to find a steward to help operate the museum.
But the Department of Cultural Affairs said on Monday that the city had not formally taken control of the organization, which remains an independent nonprofit.
To maintain its standing as a nonprofit, the museum needs at least three board members, Danai Pointer, a department spokeswoman said.
Last month the Museum of the City of New York decided to pull out of running the institution, having deemed its current condition unworkable. The museum has been struggling with financial problems that were exacerbated by Hurricane Sandy.
“During this time our hope is that a successor steward will take responsibility for the museum’s mission and collection,” said Kate D. Levin, the cultural affairs commissioner.
Officials said the three employees serving temporarily as trustees had volunteered for the role. Two of them, Christie Huus and David Sheehan, were appointed on behalf of the mayor’s office. The third, Tracey Knuckles, was appointed on behalf of the cultural affairs commissioner.
The board, whose appointment was reported by The Wall Street Journal, then named Jonathan Boulware, the museum’s waterfront director, as an interim president who will oversee its operations and collections, including its historic ships.
The museum is hoping to find another entity to take over the organization. If no group comes forward, state officials will help determine whether to close the museum or disperse its collection. |
The Flip Side of “Spice”: The Adverse Effects of Synthetic Cannabinoids as Discussed on a Swedish Internet Forum
Background Synthetic cannabinoids in smoking mixtures (such as Spice) or as raw powder are sold for recreational use as an alternative to herbal cannabis (hashish and marijuana). Although clinical case studies have documented an array of side effects, there is also information available at Internet based drug discussion forums. Aim Our study investigates experiences of side effects from use of synthetic cannabinoids, as described and anonymously shared on Swedish online discussion forums. Methods A systematic search yielded 254 unique and publicly available self-reports from the Swedish forum flashback.org. These texts were analysed thematically, which resulted in 32 sub-themes, which were combined into three overarching themes. Results & Conclusion The experiences of negative side effects were described as (1) Adverse reactions during acute intoxication; (2) Hangover the day after intoxication; (3) Dependency and withdrawal after long-term use. The first theme was characterized by an array of fierce and unpredictable side effects as tachycardia, anxiety, fear and nausea. The acute intoxication reactions were congruent with the side effects published in clinical case studies. The day after intoxication included residual effects of dullness, apathy, nausea and headache. Long-term use resulted in dependency and experiences of being emotionally numb and disconnected. Furthermore, withdrawal was described as sweating, shaking, loss of appetite and insomnia. Both the hangover and the long-term effects have previously been given little scientific attention and need to be investigated further. Drug related Internet discussion forums constitute an overlooked source of information which can aid in the identification of previously unknown risks and effects.
Introduction
Synthetic cannabinoid receptor agonists, with effects similar to those of herbal cannabis (hashish and marijuana), have gained increasing popularity especially among teens and young adults (e.g. Harris & Brown, 2013). These substances have been on the market for recreational drugs since around 2004, and are easily purchased on the Internet, where they are purported to be a legal alternative to cannabis or are dis-guised as incense, etc. (Dresen, Ferreirós, Pütz, Westphal, Zimmermann, & Auwärter, 2010;Harris & Brown, 2013). Before they were introduced for recreational use, most novel cannabinoids with cannabis-like activity had been synthesised by researchers in order to explore their pharmaceutical potential (EMCDDA, 2013). For a historical review of the emergence and abuse of synthetic cannabinoids, see Ashton (2012) and Fattore and Fratta (2011). The emergence of synthetic cannabinoids has also spread outside Europe, including Ukraine, Taiwan, Japan, the USA, Australia and New Zealand (Ashton, 2012;Fattore & Fratta, 2011). A Swedish study (CAN, 2013) that analysed the patterns of Internet drug use showed that 3.6% of students aged 14-15 and 6.8% of students aged 16-17 had used Spice-like smoking mixtures. Furthermore, a Swedish laboratory study (Helander, Beck, Hägerkvist, & Hulthén, 2013) has identified synthetic cannabinoids as the most common substance (36%) in 103 patients admitted to emergency departments with drug intoxications. Despite repeated legal actions in several countries, including Sweden, to ban a number of synthetic cannabinoids, clandestine chemists keep synthesising new and uncontrolled substances to circumvent existing drug laws (King, 2013).
Many of the synthetic cannabinoids are most likely full agonists in cannabinoid receptors, which in comparison with the partial agonist effects of tetrahydrocannabinol (THC) in herbal cannabis seem to result in stronger potency and psychoactive impact (Atwood, Huffman, Straiker, & Mackie, 2010). This also implies that the adverse effects are both different and more extensive than those induced by herbal cannabis. Several reports have documented various and significant side effects of synthetic cannabinoids, including anxiety, paranoia, spasm, tachycardia, pain and dependency (Ashton, 2012;Bebarta, Ramirez, & Varney, 2012;Forrester, Kleinschmidt, Schwarz, & Young, 2011;Green, Kavanagh, & Young, 2003;Gunderson, Haughey, Ait-Daoud, Joshi, & Hart, 2012;Hermanns-Clausen, Kneisel, Szabo, & Auwärter, 2012;Hoyte, Jacob, Monte, Al-Jumaan, Bronstein, & Heard, 2012;Schneir, Cullen, & Ly, 2011;Seely, Lapoint, Moran, & Fattore, 2012). Most reports focus on acute intoxication effects, and are mainly based on clinical case studies from hospital or health care centres. However, medical reports in scientific journals are less likely to reach the attention of Spiceinterested adolescents, whose primary source of information when searching for sensitive and health-related issues appears to be the Internet (Borzekowski & Rickert, 2001;Gray, Klein, Noyce, Sesselberg, Cantrill, 2005 This study seeks to investigate the experienced negative side effects ascribed to the use of synthetic cannabinoids by analysing self-reports published anonymously on Swedish online discussion forums.
Data collection
The data source for this study was identified by searching for Swedish online forums where experiences induced by synthetic cannabinoids were publicly discussed. An initial Google search (Swedish) was conducted with keywords reflecting drug experiences. The first 10 hits from every keyword search were further screened for websites with drug-related discussions. Nine websites were found.
All searches were done in August 2013.
In the second screening, each website was explored in depth for reports and discussions of the use of synthetic cannabinoids. All but one website were excluded because of lack of reports, defective search function, or hidden and password-pro-tected content. The website used as the data source was in the end flashback.org, which is Sweden's largest Internet discussion forum with over 800,000 members and around 2 million unique visitors each week. Flashback was started in May 2000 and is dedicated to discussions in general, not only drugs. Therefore, the next step involved using the local search engine at flashback.org to find experiences of negative side effects induced by synthetic cannabinoids. All but a few of the reports resulting from the local search appeared to be part of a bundled thread of discussion especially designated to collect all reports of negative side effects in one place. This thread was found to be a very good source of data for our study, as it had been col-
Participants
The reports were written and published by 254 anonymous users on the online community flashback.org. No information about gender or age was stated, although there were reasons to believe that the users were 18 years or older, since this is the age limit for a user account at flashback.org.
However, the age limit can be bypassed by creating a user account with false age statements. Dosage was not always stated, or stated in very non-specific terms. The route of administration was exclusively smoking. The discussions did not reveal if the users smoked the synthetic cannabinoids in the form of raw powder or as a smoking mixture.
Analysis
The data were analysed with thematic analysis (Braun & Clarke, 2006;Hayes, 2000) in order to identify themes of recurrent experiences related to negative side effects induced by synthetic cannabinoids. The focus of this study was primarily to identify themes at an explicit level rather than at an implicit or interpretive level. The analysis process was characterised by as much openness and bias-free attitude towards the data as possible, and can be summarised by five phases outlined below. Every item in the entire set of data was coded systematically and manually, and transferred to a new word document.
The analysis resulted in a total of 617 coded elements (CE).
• Phase 3 was the process of identifying recurring patterns by sorting, relating and combining the codes into overarching and potential themes.
• Phase 4 meant reviewing and refining the potential themes. In this phase the raw data and codes were repeatedly checked for consistency. The themes were also checked for both internal homogeneity (coherent data within a theme) and external heterogeneity (distinction between themes).
• Phase 5 involved naming and identifying the essence of each theme, which resulted in a written analysis with supporting quotations from the data set.
Throughout the process, a checklist of criteria for good thematic analysis (Braun & Clarke, 2006) was followed carefully in order to ensure a reliable analysis. Each step of the analysis process was given equal attention and the analysis was carried out with as little distorting preconceptualisation about the data as possible. The results of the analysis were repeatedly matched with the original dataset to check for verification and consistency.
Ethical considerations
The experience reports used as data source for this study were published anonymously on a public Internet forum. Even though writers were unidentifiable, user aliases and URLs connected with specific reports were not collected.
Results
A total number of 617 coded elements Unauthenticated Download Date | 12/16/16 11:59 AM Table 1. The 32 sub-themes that emerged from the analysis, sorted by the total number of CEs. The table also shows the prevalence of CEs in each of the overarching themes: (1) Adverse reactions during acute intoxication (AI); (2) Hangover the day after intoxication (DA); (3) Dependency and withdrawal after long-term use (LT). Some sub-themes were partly related to more than one overarching theme, as the time at which experiences within a subtheme took place differed. See Table 1 During the acute phase, the most severely described and commonly experienced side effects were fear, nausea, tachycardia and respiratory difficulties. These experiences sometimes appeared together, creating a downward spiral with increasing and panic-like discomfort. Other and less commonly depicted side effects marking the acute phase were dehydration, memory impairment, pain, spasms and fever. Many experiences were depicted as fierce and appeared to be unpredictable in terms of when they occurred. A well-known fact about drugs in general is that the effects are highly affected by expectations (set) and the circumstances surrounding the use (setting) (Metzner, 1998). There are probably other explanations for the numerous and intense side effects. For example, adverse effects are known to be dose-dependent (Ashton, 2012), which the users also mentioned in the reports. The high potency levels of powdery synthetic cannabinoids smoked directly or blended into herbal smoking mixtures are of serious concern because of the risks associated with synthetic cannabinoids (EMCDDA, 2013). In addition, Unauthenticated Download Date | 12/16/16 11:59 AM Table 4. The ten most prevalent sub-themes in the third overarching theme: dependency and withdrawal after long-term use.
Withdrawal and dependence 35
Memory impairment 20 Mood swings 20 Disconnected and emotionally numb 17 Sleeping problems 17 Loss of appetite 17 Sweating 16 Sluggish and dull 11 Rashes and acne 11 Nausea and dizziness 10 Total Winstock & Barratt, 2013) and appear to resemble alcohol hangover as described by Swift and Davidson (1998). In general, the day-after effects were depicted as hard, in comparison to herbal cannabis, which is known to produce few or moderate residual effects (Chait & Perry, 1994;Chait, Fischman, & Schuster, 1985). Synthetic cannabinoids' stronger potency (Atwood et al., 2010) may explain the apparent discrepancy in terms of hangover symptoms. (Smith, 2002) showed that the overall research is highly ambiguous, which is believed to reflect the influence of non-substance-specific factors like personality. |
// newConnection takes a scheme and address and creates a connection from it
func newConnection(scheme, address string) Connection {
client := http.Client{
Transport: &http.Transport{
DialContext: func(_ context.Context, _, _ string) (net.Conn, error) {
return net.Dial(scheme, address)
},
},
}
newConn := Connection{
client: &client,
address: address,
scheme: scheme,
}
return newConn
} |
/*
* connect the TCP DNS QUERY (IPv6)
*/
int
connect6(void)
{
struct exchange *x;
int ret;
int idx;
struct epoll_event ev;
ret = clock_gettime(CLOCK_REALTIME, &last);
if (ret < 0) {
perror("clock_gettime(connect)");
fatal = 1;
(void) pthread_kill(master, SIGTERM);
return -errno;
}
if (xfree >= 0) {
idx = xfree;
x = xlist + idx;
ret = pthread_mutex_lock(&mtxfree);
if (ret != 0) {
fprintf(stderr, "pthread_mutex_lock(connect6)");
fatal = 1;
(void) pthread_kill(master, SIGTERM);
return -ret;
}
ISC_REMOVE(xfreel, x);
ret = pthread_mutex_unlock(&mtxfree);
if (ret != 0) {
fprintf(stderr, "pthread_mutex_unlock(connect6)");
fatal = 1;
(void) pthread_kill(master, SIGTERM);
return -ret;
}
} else if (xused < xlast) {
idx = xused;
x = xlist + idx;
xused++;
} else
return -ENOMEM;
memset(x, 0, sizeof(*x));
memset(&ev, 0, sizeof(ev));
x->next = -1;
x->prev = NULL;
x->ts0 = last;
x->sock = getsock6();
if (x->sock < 0) {
int result = x->sock;
x->sock = -1;
ret = pthread_mutex_lock(&mtxfree);
if (ret != 0) {
fprintf(stderr, "pthread_mutex_lock(connect6)");
fatal = 1;
(void) pthread_kill(master, SIGTERM);
return -ret;
}
ISC_INSERT(xfree, xfreel, x);
ret = pthread_mutex_unlock(&mtxfree);
if (ret != 0) {
fprintf(stderr, "pthread_mutex_unlock(connect6)");
fatal = 1;
(void) pthread_kill(master, SIGTERM);
return -ret;
}
return result;
}
ret = pthread_mutex_lock(&mtxconn);
if (ret != 0) {
fprintf(stderr, "pthread_mutex_lock(connect6)");
fatal = 1;
(void) pthread_kill(master, SIGTERM);
return -ret;
}
x->state = X_CONN;
ISC_INSERT(xconn, xconnl, x);
ret = pthread_mutex_unlock(&mtxconn);
if (ret != 0) {
fprintf(stderr, "pthread_mutex_unlock(connect6)");
fatal = 1;
(void) pthread_kill(master, SIGTERM);
return -ret;
}
ev.events = EPOLLOUT | EPOLLET | EPOLLONESHOT;
ev.data.fd = idx;
if (epoll_ctl(epoll_ofd, EPOLL_CTL_ADD, x->sock, &ev) < 0) {
perror("epoll_ctl(add output)");
fatal = 1;
(void) pthread_kill(master, SIGTERM);
return -errno;
}
x->order = xccount++;
x->id = (uint16_t) random();
#if 0
if (random_query > 0)
x->rnd = (uint32_t) random();
#endif
return idx;
} |
/**
* Validates entries on Guest register user forms.
*/
@Component("guestRegisterValidator")
public class GuestRegisterValidator implements Validator
{
private static final String CHECK_PWD = "checkPwd";
@Override
public boolean supports(final Class<?> aClass)
{
return GuestRegisterForm.class.equals(aClass);
}
@Override
public void validate(final Object object, final Errors errors)
{
final GuestRegisterForm guestRegisterForm = (GuestRegisterForm) object;
final String newPasswd = guestRegisterForm.getPwd();
final String checkPasswd = guestRegisterForm.getCheckPwd();
final boolean termsCheck = guestRegisterForm.isTermsCheck();
if (StringUtils.isNotEmpty(newPasswd) && StringUtils.isNotEmpty(checkPasswd) && !StringUtils.equals(newPasswd, checkPasswd))
{
errors.rejectValue(CHECK_PWD, "validation.checkPwd.equals");
}
else
{
if (StringUtils.isEmpty(newPasswd))
{
errors.rejectValue("pwd", "register.pwd.invalid");
}
else if (StringUtils.length(newPasswd) < 6 || StringUtils.length(newPasswd) > 255)
{
errors.rejectValue("pwd", "register.pwd.invalid");
}
if (StringUtils.isEmpty(checkPasswd))
{
errors.rejectValue(CHECK_PWD, "register.checkPwd.invalid");
}
else if (StringUtils.length(checkPasswd) < 6 || StringUtils.length(checkPasswd) > 255)
{
errors.rejectValue(CHECK_PWD, "register.checkPwd.invalid");
}
}
validateTermsAndConditions(errors, termsCheck);
}
protected void validateTermsAndConditions(final Errors errors, final boolean termsCheck)
{
if (!termsCheck)
{
errors.rejectValue("termsCheck", "register.terms.not.accepted");
}
}
} |
/// The possible impurity measures for training.
#[derive(Clone, Copy)]
pub enum SplitQuality {
Gini,
Entropy,
}
/// The set of hyperparameters that can be specified for fitting a
/// [decision tree](struct.DecisionTree.html).
#[derive(Clone, Copy)]
pub struct DecisionTreeParams {
pub n_classes: u64,
pub split_quality: SplitQuality,
pub max_depth: Option<u64>,
pub min_samples_split: u64,
pub min_samples_leaf: u64,
pub min_impurity_decrease: f64,
}
/// A helper struct to build the hyperparameters for a decision tree.
pub struct DecisionTreeParamsBuilder {
n_classes: u64,
split_quality: SplitQuality,
max_depth: Option<u64>,
min_samples_split: u64,
min_samples_leaf: u64,
min_impurity_decrease: f64,
}
impl DecisionTreeParamsBuilder {
pub fn n_classes(mut self, n_classes: u64) -> Self {
self.n_classes = n_classes;
self
}
pub fn split_quality(mut self, split_quality: SplitQuality) -> Self {
self.split_quality = split_quality;
self
}
pub fn max_depth(mut self, max_depth: Option<u64>) -> Self {
self.max_depth = max_depth;
self
}
pub fn min_samples_split(mut self, min_samples_split: u64) -> Self {
self.min_samples_split = min_samples_split;
self
}
pub fn min_samples_leaf(mut self, min_samples_leaf: u64) -> Self {
self.min_samples_leaf = min_samples_leaf;
self
}
pub fn min_impurity_decrease(mut self, min_impurity_decrease: f64) -> Self {
self.min_impurity_decrease = min_impurity_decrease;
self
}
pub fn build(&self) -> DecisionTreeParams {
DecisionTreeParams::build(
self.n_classes,
self.split_quality,
self.max_depth,
self.min_samples_split,
self.min_samples_leaf,
self.min_impurity_decrease,
)
}
}
impl DecisionTreeParams {
/// Defaults are provided if the optional parameters are not specified:
/// * `split_quality = SplitQuality::Gini`
/// * `max_depth = None`
/// * `min_samples_split = 2`
/// * `min_samples_leaf = 1`
/// * `min_impurity_decrease = 0.00001`
pub fn new(n_classes: u64) -> DecisionTreeParamsBuilder {
DecisionTreeParamsBuilder {
n_classes: n_classes,
split_quality: SplitQuality::Gini,
max_depth: None,
min_samples_split: 2,
min_samples_leaf: 1,
min_impurity_decrease: 0.00001,
}
}
fn build(
n_classes: u64,
split_quality: SplitQuality,
max_depth: Option<u64>,
min_samples_split: u64,
min_samples_leaf: u64,
min_impurity_decrease: f64,
) -> Self {
// TODO: Check parameters
DecisionTreeParams {
n_classes,
split_quality,
max_depth,
min_samples_split,
min_samples_leaf,
min_impurity_decrease,
}
}
}
|
package ch.epfl.imhof;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.fail;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.junit.Test;
public class GraphTest {
@Test
public void constructorImmutability() {
HashMap<Integer, Set<Integer>> adjacencyList = createAdjacencyList();
Graph<Integer> graph = new Graph<>(adjacencyList);
int immutableCount = 0;
for (Map.Entry<Integer, Set<Integer>> mapping: adjacencyList.entrySet()) {
Set<Integer> valid = new HashSet<>(mapping.getValue());
mapping.getValue().clear();
assertEquals(valid, graph.neighborsOf(mapping.getKey()));
try {
graph.neighborsOf(mapping.getKey()).clear();
} catch(UnsupportedOperationException e) {
immutableCount += 1;
}
}
adjacencyList.clear();
assertNotEquals(graph.nodes().size(), 0);
try {
graph.nodes().clear();
} catch(UnsupportedOperationException e) {
immutableCount += 1;
}
if (immutableCount != createAdjacencyList().size() + 1) {
fail("Constructor does not guarantee immutability.");
}
}
@Test
public void nodesComplete() {
HashMap<Integer, Set<Integer>> adjacencyList = createAdjacencyList();
Graph<Integer> graph = new Graph<>(adjacencyList);
assertEquals(adjacencyList.keySet(), graph.nodes());
}
@Test
public void correctNeighborsOf() {
HashMap<Integer, Set<Integer>> adjacencyList = createAdjacencyList();
Graph<Integer> graph = new Graph<>(adjacencyList);
for (Map.Entry<Integer, Set<Integer>> mapping: adjacencyList.entrySet()) {
assertEquals(mapping.getValue(), graph.neighborsOf(mapping.getKey()));
}
}
@Test
public void builderBuilt() {
HashMap<Integer, Set<Integer>> adjacencyList = createAdjacencyList();
Graph.Builder<Integer> graphBuilder = new Graph.Builder<>();
Set<Pair> edges = new HashSet<>();
for (Integer from: adjacencyList.keySet()) {
for (Integer to: adjacencyList.get(from)) {
edges.add(new Pair(from, to));
}
}
for (Integer node: adjacencyList.keySet()) {
graphBuilder.addNode(node);
}
for (Pair edge: edges) {
graphBuilder.addEdge(edge.start, edge.end);
}
Graph<Integer> graph = graphBuilder.build();
assertEquals(adjacencyList.keySet(), graph.nodes());
for (Map.Entry<Integer, Set<Integer>> mapping: adjacencyList.entrySet()) {
assertEquals(mapping.getValue(), graph.neighborsOf(mapping.getKey()));
}
}
@Test(expected = IllegalArgumentException.class)
public void neighborsOfThrowsExceptionWhenUnknownNode() {
new Graph<Integer>(Collections.emptyMap()).neighborsOf(1);
}
@Test(expected = IllegalArgumentException.class)
public void builderThrowsExceptionWhenUnknownNode1() {
Graph.Builder<Integer> graphBuilder = new Graph.Builder<>();
Integer node = 1;
graphBuilder.addNode(node);
graphBuilder.addEdge(node, 2);
}
@Test(expected = IllegalArgumentException.class)
public void builderThrowsExceptionWhenUnknownNode2() {
Graph.Builder<Integer> graphBuilder = new Graph.Builder<>();
Integer node = 1;
graphBuilder.addNode(node);
graphBuilder.addEdge(2, node);
}
// undirected edge as an ordered pair
private final class Pair {
final Integer start, end;
Pair(Integer start, Integer end) {
if (start < end) {
this.start = start;
this.end = end;
} else {
this.start = end;
this.end = start;
}
}
}
private HashMap<Integer, Set<Integer>> createAdjacencyList() {
HashMap<Integer, Set<Integer>> adjacencyList = new HashMap<>();
adjacencyList.put(1, createNodeSet(6));
adjacencyList.put(2, createNodeSet(5));
adjacencyList.put(3, createNodeSet(5,6,10));
adjacencyList.put(4, createNodeSet());
adjacencyList.put(5, createNodeSet(2,3,10));
adjacencyList.put(6, createNodeSet(1,3));
adjacencyList.put(10, createNodeSet(3,5));
return adjacencyList;
}
private Set<Integer> createNodeSet(Integer... edges) {
Set<Integer> nodeSet = new HashSet<>();
for (Integer edge: edges) {
nodeSet.add(edge);
}
return nodeSet;
}
}
|
/**
* Verifies that we can get a new connection.
*/
@Test
void createsNewConnection() {
EventHubConnectionProcessor processor = Mono.fromCallable(() -> connection).repeat()
.subscribeWith(eventHubConnectionProcessor);
StepVerifier.create(processor)
.expectNext(connection)
.expectComplete()
.verify(timeout);
} |
/**
* Tests the number of node points in the curves.
*/
@Test
public void testDataInBundle() {
final Pair<MulticurveProviderDiscount, CurveBuildingBlockBundle> result = CURVE_BUILDER.getBuilder().buildCurves(VALUATION_DATE, FIXINGS);
final MulticurveProviderDiscount curves = result.getFirst();
assertEquals(curves.getDiscountingCurves().size(), 1);
assertEquals(curves.getForwardIborCurves().size(), 1);
assertEquals(curves.getForwardONCurves().size(), 1);
final YieldAndDiscountCurve discounting = curves.getCurve(Currency.USD);
assertEquals(discounting.getNumberOfParameters(), OIS_TENORS.length + 1);
assertEquals(discounting, curves.getCurve(IndexConverter.toIndexOn(FED_FUNDS_INDEX)));
assertEquals(curves.getCurve(IndexConverter.toIborIndex(LIBOR_INDEX)).getNumberOfParameters(), LIBOR_SWAP_TENORS.length + 1);
} |
<gh_stars>1000+
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.desktopvirtualization.models;
import com.azure.resourcemanager.desktopvirtualization.fluent.models.ExpandMsixImageInner;
import java.time.OffsetDateTime;
import java.util.List;
/** An immutable client-side representation of ExpandMsixImage. */
public interface ExpandMsixImage {
/**
* Gets the id property: Fully qualified resource Id for the resource.
*
* @return the id value.
*/
String id();
/**
* Gets the name property: The name of the resource.
*
* @return the name value.
*/
String name();
/**
* Gets the type property: The type of the resource.
*
* @return the type value.
*/
String type();
/**
* Gets the packageAlias property: Alias of MSIX Package.
*
* @return the packageAlias value.
*/
String packageAlias();
/**
* Gets the imagePath property: VHD/CIM image path on Network Share.
*
* @return the imagePath value.
*/
String imagePath();
/**
* Gets the packageName property: Package Name from appxmanifest.xml.
*
* @return the packageName value.
*/
String packageName();
/**
* Gets the packageFamilyName property: Package Family Name from appxmanifest.xml. Contains Package Name and
* Publisher name.
*
* @return the packageFamilyName value.
*/
String packageFamilyName();
/**
* Gets the packageFullName property: Package Full Name from appxmanifest.xml.
*
* @return the packageFullName value.
*/
String packageFullName();
/**
* Gets the displayName property: User friendly Name to be displayed in the portal.
*
* @return the displayName value.
*/
String displayName();
/**
* Gets the packageRelativePath property: Relative Path to the package inside the image.
*
* @return the packageRelativePath value.
*/
String packageRelativePath();
/**
* Gets the isRegularRegistration property: Specifies how to register Package in feed.
*
* @return the isRegularRegistration value.
*/
Boolean isRegularRegistration();
/**
* Gets the isActive property: Make this version of the package the active one across the hostpool.
*
* @return the isActive value.
*/
Boolean isActive();
/**
* Gets the packageDependencies property: List of package dependencies.
*
* @return the packageDependencies value.
*/
List<MsixPackageDependencies> packageDependencies();
/**
* Gets the version property: Package Version found in the appxmanifest.xml.
*
* @return the version value.
*/
String version();
/**
* Gets the lastUpdated property: Date Package was last updated, found in the appxmanifest.xml.
*
* @return the lastUpdated value.
*/
OffsetDateTime lastUpdated();
/**
* Gets the packageApplications property: List of package applications.
*
* @return the packageApplications value.
*/
List<MsixPackageApplications> packageApplications();
/**
* Gets the inner com.azure.resourcemanager.desktopvirtualization.fluent.models.ExpandMsixImageInner object.
*
* @return the inner object.
*/
ExpandMsixImageInner innerModel();
}
|
/// Initializes a SDL platform instance and configures imgui.
///
/// This function configures imgui-rs in the following ways:
///
/// * backend flags are updated
/// * keys are configured
/// * platform name is set
pub fn init(imgui: &mut Context) -> SdlPlatform {
let io = imgui.io_mut();
io.backend_flags.insert(BackendFlags::HAS_MOUSE_CURSORS);
io.backend_flags.insert(BackendFlags::HAS_SET_MOUSE_POS);
io[Key::Tab] = Scancode::Tab as _;
io[Key::LeftArrow] = Scancode::Left as _;
io[Key::RightArrow] = Scancode::Right as _;
io[Key::UpArrow] = Scancode::Up as _;
io[Key::DownArrow] = Scancode::Down as _;
io[Key::PageUp] = Scancode::PageUp as _;
io[Key::PageDown] = Scancode::PageDown as _;
io[Key::Home] = Scancode::Home as _;
io[Key::End] = Scancode::End as _;
io[Key::Insert] = Scancode::Insert as _;
io[Key::Delete] = Scancode::Delete as _;
io[Key::Backspace] = Scancode::Backspace as _;
io[Key::Space] = Scancode::Space as _;
io[Key::Enter] = Scancode::Return as _;
io[Key::Escape] = Scancode::Escape as _;
io[Key::KeyPadEnter] = Scancode::KpEnter as _;
io[Key::A] = Scancode::A as _;
io[Key::C] = Scancode::C as _;
io[Key::V] = Scancode::V as _;
io[Key::X] = Scancode::X as _;
io[Key::Y] = Scancode::Y as _;
io[Key::Z] = Scancode::Z as _;
imgui.set_platform_name(Some(format!(
"imgui-sdl2-support {}",
env!("CARGO_PKG_VERSION")
)));
SdlPlatform {
cursor_instance: None,
last_frame: Instant::now(),
mouse_buttons: [Button::new(); 5],
}
} |
def log_watcher_error_simplified(args, new_log):
if not new_log:
return
log = args[WATCHEDIO_LOG]
if new_log.endswith("\n"):
new_log = new_log[:-1]
log.cl_error(new_log) |
/**
* Created by Administrator on 16-3-20.
*/
public class ViewListPresenter implements Presenter {
private EntryListView entryListView;
private final UseCase getEntriesUseCase;
public ViewListPresenter(UseCase getEntriesUseCase){
this.getEntriesUseCase=getEntriesUseCase;
// getEntriesUseCase;
}
public void setView(@NonNull EntryListView view) {
this.entryListView = view;
}
@Override
public void resume() {
}
@Override
public void pause() {
}
@Override
public void destroy() {
this.getEntriesUseCase.unsubscribe();
this.entryListView = null;
}
/**
* Initializes the presenter by start retrieving the user list.
*/
public void initialize() {
this.loadUserList();
}
/**
* Loads all users.
*/
private void loadUserList() {
this.hideViewRetry();
this.showViewLoading();
this.getEntries();
}
public void onUserClicked(DisplayEntry entry) {
this.entryListView.viewEntry(entry);
}
private void showViewLoading() {
this.entryListView.showLoading();
}
private void hideViewLoading() {
this.entryListView.hideLoading();
}
private void showViewRetry() {
this.entryListView.showRetry();
}
private void hideViewRetry() {
this.entryListView.hideRetry();
}
private void showErrorMessage() { // needs to be imlemented
}
private void showEntriesInView(GetEntriesUseCase.EntriesResult ret) {
this.entryListView.renderList(ret);
}
private void getEntries() {
this.getEntriesUseCase.execute(new EntriesSubscriber());
}
private final class EntriesSubscriber extends DefaultSubscriber<ReturnResult> {
@Override public void onCompleted() {hideViewLoading();
}
@Override public void onError(Throwable e) {
hideViewLoading();
// showErrorMessage(new DefaultErrorBundle((Exception) e)); //....
showViewRetry();
}
@Override public void onNext(ReturnResult ret) {
showEntriesInView((GetEntriesUseCase.EntriesResult)ret);
}
}
} |
def stack(df):
df_out = df.stack()
df_out.index = df_out.index.map('{0[1]}_{0[0]}'.format)
if isinstance(df_out, pd.Series):
df_out = df_out.to_frame()
return df_out |
_, k = input().split()
k = int(k)
scores = sorted(list(map(int, input().split())), reverse=True)
k = scores[k - 1]
count = 0
for s in scores:
if s >= k and s > 0:
count += 1
else:
break
print(count)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.