content
stringlengths 10
4.9M
|
---|
package leet1464;
import java.util.Comparator;
import java.util.PriorityQueue;
public class Solution {
/*
* 这里没有考虑到有负数和零的存在,只适用于正数
* */
public int maxProduct(int[] nums) {
// 小根堆
PriorityQueue<Integer> queue = new PriorityQueue<>(new Comparator<Integer>() {
@Override
public int compare(Integer o1, Integer o2) {
return o2 - o1;
}
});
for (int i = 0; i < nums.length; i++) {
for (int j = i; j < nums.length; j++) {
if(i != j){
int num = (nums[i] - 1) * (nums[j] - 1);
queue.add(num);
}
}
}
return queue.peek();
}
}
|
def is_middleware(target):
if callable(target) and getattr(target, 'is_face_middleware', None):
return True
return False
|
import '@babel/polyfill';
import * as express from 'express';
import * as http from 'http';
import * as fs from 'fs';
import * as https from 'https';
import * as PrettyError from 'pretty-error';
import * as React from 'react';
import * as path from 'path';
import { renderToString } from 'react-dom/server';
import Html from './components/Html';
import { configureStore } from 'common/reduck/store';
import { Root } from './Root';
import { router } from 'common/router';
import Helmet from 'react-helmet';
import { StaticRouterContext } from 'react-router';
import { errorHandle } from './utils/errorHandler';
import { prefetchData } from 'utils/prefetchData';
import { loadLocales, getClientLanguage } from './utils/loadLocales';
import { cache } from './utils/cache';
import { localeReducerInitialState } from 'modules/locale/reducer';
import { userReducerInitialState } from 'modules/user/reducer';
import * as cookieParser from 'cookie-parser';
import * as Loadable from 'react-loadable';
import { getBundles } from 'react-loadable/webpack';
// tslint:disable-next-line:no-var-requires
const preloadStatsfrom = require('./react-loadable.json');
const pe = new PrettyError();
pe.start();
// The server code must export a function
// (`parameters` may contain some miscellaneous library-specific stuff)
export default function(parameters) {
const server: express.Application = express();
server.use(cookieParser());
server.get('/server.js', (req, res) => {
res.writeHead(404);
res.end();
});
// serve our static stuff like index.css
server.use(express.static(global.boil.dist, { index: false }));
// Simple api
server.post('/api/user/:id', (req, res) => {
res.json({
login: req.params.id,
id: 9361325,
avatar_url: 'https://avatars3.githubusercontent.com/u/9361325?v=4',
});
})
// send all requests to index.html so browserHistory works
server.get('*', cache(), (req, res) => {
const assets = parameters.chunks();
const clientLanguages = getClientLanguage(req);
const context: StaticRouterContext = {};
const modules = [];
loadLocales(clientLanguages).then((locales) => {
const store = configureStore({
user: { ...userReducerInitialState, count: 50 },
locale: {
...localeReducerInitialState,
currentLocale:
clientLanguages.length !== 0
? clientLanguages[0]
: localeReducerInitialState.currentLocale,
locales,
},
});
prefetchData(store, router, req.url).then(() => {
const content = renderToString(
<Loadable.Capture report={(moduleName) => modules.push(moduleName)}>
<Root {...{ store, context, url: req.url }} />
</Loadable.Capture>,
);
const helmet = Helmet.renderStatic();
const preloaded: any[] = getBundles(preloadStatsfrom, modules);
errorHandle(context, res);
res.send(`<!doctype html>\n
${renderToString(
<Html {...{ store, helmet, assets, content, preloaded }} />,
)}`);
});
});
});
let hServer;
if (global.boil.https) {
const options = {
hostname: 'demo.local',
key: fs.readFileSync('cert/server.key'),
cert: fs.readFileSync('cert/server.crt'),
requestCert: false,
rejectUnauthorized: false,
};
hServer = https.createServer(options, server);
} else {
hServer = http.createServer(server);
}
Loadable.preloadAll()
.then(() => {
hServer.listen(global.boil.port, global.boil.host, (err) => {
if (err) {
console.error(err);
return;
}
console.info(
`Ssr server started at \x1b[36mhttp${
global.boil.https ? 's' : ''
}://${global.boil.host}:${global.boil.port}\x1b[0m.`,
);
if (global.boil.isDevelopment) {
console.log(`🔥🔥🔥 Tip 🔥🔥🔥
You can use it for debuggins on all devices at your network 📱`);
}
if (global.boil.hostname && global.boil.isDevelopment) {
console.info(
`For better expirience on current device you can use \x1b[36mhttp${
global.boil.https ? 's' : ''
}://${global.boil.hostname}:${global.boil.port} instead\x1b[0m.`,
);
}
});
})
.catch((err) => {
console.log(err);
});
}
|
<filename>internal/game/tetrimino/opiece_test.go
package tetrimino
import "testing"
var oPieceTests = map[orientation]tetriminoTestCase{
spawn: tetriminoTestCase{
expectedMaxY: tetriminoCoordTest{
y: 3,
ignoreX: true,
},
expectedMinY: tetriminoCoordTest{
y: 2,
ignoreX: true,
},
expectedMaxX: tetriminoCoordTest{
x: 2,
ignoreY: true,
},
expectedMinX: tetriminoCoordTest{
x: 1,
ignoreY: true,
},
},
clockwise: tetriminoTestCase{
expectedMaxY: tetriminoCoordTest{
y: 3,
ignoreX: true,
},
expectedMinY: tetriminoCoordTest{
y: 2,
ignoreX: true,
},
expectedMaxX: tetriminoCoordTest{
x: 2,
ignoreY: true,
},
expectedMinX: tetriminoCoordTest{
x: 1,
ignoreY: true,
},
},
opposite: tetriminoTestCase{
expectedMaxY: tetriminoCoordTest{
y: 3,
ignoreX: true,
},
expectedMinY: tetriminoCoordTest{
y: 2,
ignoreX: true,
},
expectedMaxX: tetriminoCoordTest{
x: 2,
ignoreY: true,
},
expectedMinX: tetriminoCoordTest{
x: 1,
ignoreY: true,
},
},
counterclockwise: tetriminoTestCase{
expectedMaxY: tetriminoCoordTest{
y: 3,
ignoreX: true,
},
expectedMinY: tetriminoCoordTest{
y: 2,
ignoreX: true,
},
expectedMaxX: tetriminoCoordTest{
x: 2,
ignoreY: true,
},
expectedMinX: tetriminoCoordTest{
x: 1,
ignoreY: true,
},
},
}
func TestOPiece(t *testing.T) {
piece := newOPiece(4, 4)
testPiece(t, piece, oPieceTests)
piece = newOPiece(10, 24)
// no wall-kicks for O piece
oPieceWallKickTests := make(map[orientation]map[orientation][]wallKickTest)
if err := testRotationTests(piece, oPieceWallKickTests); err != nil {
t.Errorf("%s", err)
}
}
|
// This file is part of Substrate.
// Copyright (C) 2018-2021 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//! GRANDPA block finality proof generation and check.
//!
//! Finality of block B is proved by providing:
//! 1) the justification for the descendant block F;
//! 2) headers sub-chain (B; F] if B != F;
//! 3) proof of GRANDPA::authorities() if the set changes at block F.
//!
//! Since earliest possible justification is returned, the GRANDPA authorities set
//! at the block F is guaranteed to be the same as in the block B (this is because block
//! that enacts new GRANDPA authorities set always comes with justification). It also
//! means that the `set_id` is the same at blocks B and F.
//!
//! Let U be the last finalized block known to caller. If authorities set has changed several
//! times in the (U; F] interval, multiple finality proof fragments are returned (one for each
//! authority set change) and they must be verified in-order.
//!
//! Finality proof provider can choose how to provide finality proof on its own. The incomplete
//! finality proof (that finalizes some block C that is ancestor of the B and descendant
//! of the U) could be returned.
use log::trace;
use std::sync::Arc;
use finality_grandpa::BlockNumberOps;
use parity_scale_codec::{Encode, Decode};
use sp_blockchain::{Backend as BlockchainBackend, Error as ClientError, Result as ClientResult};
use sp_runtime::{
Justification, generic::BlockId,
traits::{NumberFor, Block as BlockT, Header as HeaderT, One},
};
use sc_client_api::backend::Backend;
use sp_finality_grandpa::AuthorityId;
use crate::authorities::AuthoritySetChanges;
use crate::justification::GrandpaJustification;
use crate::SharedAuthoritySet;
use crate::VoterSet;
const MAX_UNKNOWN_HEADERS: usize = 100_000;
/// Finality proof provider for serving network requests.
pub struct FinalityProofProvider<BE, Block: BlockT> {
backend: Arc<BE>,
shared_authority_set: Option<SharedAuthoritySet<Block::Hash, NumberFor<Block>>>,
}
impl<B, Block: BlockT> FinalityProofProvider<B, Block>
where
B: Backend<Block> + Send + Sync + 'static,
{
/// Create new finality proof provider using:
///
/// - backend for accessing blockchain data;
/// - authority_provider for calling and proving runtime methods.
/// - shared_authority_set for accessing authority set data
pub fn new(
backend: Arc<B>,
shared_authority_set: Option<SharedAuthoritySet<Block::Hash, NumberFor<Block>>>,
) -> Self {
FinalityProofProvider {
backend,
shared_authority_set,
}
}
/// Create new finality proof provider for the service using:
///
/// - backend for accessing blockchain data;
/// - storage_provider, which is generally a client.
/// - shared_authority_set for accessing authority set data
pub fn new_for_service(
backend: Arc<B>,
shared_authority_set: Option<SharedAuthoritySet<Block::Hash, NumberFor<Block>>>,
) -> Arc<Self> {
Arc::new(Self::new(backend, shared_authority_set))
}
}
impl<B, Block> FinalityProofProvider<B, Block>
where
Block: BlockT,
NumberFor<Block>: BlockNumberOps,
B: Backend<Block> + Send + Sync + 'static,
{
/// Prove finality for the given block number by returning a Justification for the last block of
/// the authority set.
pub fn prove_finality(
&self,
block: NumberFor<Block>
) -> Result<Option<Vec<u8>>, FinalityProofError> {
let authority_set_changes = if let Some(changes) = self
.shared_authority_set
.as_ref()
.map(SharedAuthoritySet::authority_set_changes)
{
changes
} else {
return Ok(None);
};
prove_finality::<_, _, GrandpaJustification<Block>>(
&*self.backend.blockchain(),
authority_set_changes,
block,
)
}
}
/// Finality for block B is proved by providing:
/// 1) the justification for the descendant block F;
/// 2) headers sub-chain (B; F] if B != F;
#[derive(Debug, PartialEq, Encode, Decode, Clone)]
pub struct FinalityProof<Header: HeaderT> {
/// The hash of block F for which justification is provided.
pub block: Header::Hash,
/// Justification of the block F.
pub justification: Vec<u8>,
/// The set of headers in the range (B; F] that we believe are unknown to the caller. Ordered.
pub unknown_headers: Vec<Header>,
}
/// Errors occurring when trying to prove finality
#[derive(Debug, derive_more::Display, derive_more::From)]
pub enum FinalityProofError {
/// The requested block has not yet been finalized.
#[display(fmt = "Block not yet finalized")]
BlockNotYetFinalized,
/// The requested block is not covered by authority set changes. Likely this means the block is
/// in the latest authority set, and the subscription API is more appropriate.
#[display(fmt = "Block not covered by authority set changes")]
BlockNotInAuthoritySetChanges,
/// Errors originating from the client.
Client(sp_blockchain::Error),
}
fn prove_finality<Block, B, J>(
blockchain: &B,
authority_set_changes: AuthoritySetChanges<NumberFor<Block>>,
block: NumberFor<Block>,
) -> Result<Option<Vec<u8>>, FinalityProofError>
where
Block: BlockT,
B: BlockchainBackend<Block>,
J: ProvableJustification<Block::Header>,
{
// Early-return if we sure that there are no blocks finalized AFTER begin block
let info = blockchain.info();
if info.finalized_number <= block {
let err = format!(
"Requested finality proof for descendant of #{} while we only have finalized #{}.",
block,
info.finalized_number,
);
trace!(target: "afg", "{}", &err);
return Err(FinalityProofError::BlockNotYetFinalized);
}
// Get set_id the block belongs to, and the last block of the set which should contain a
// Justification we can use to prove the requested block.
let (_, last_block_for_set) = if let Some(id) = authority_set_changes.get_set_id(block) {
id
} else {
trace!(
target: "afg",
"AuthoritySetChanges does not cover the requested block #{}. \
Maybe the subscription API is more appropriate.",
block,
);
return Err(FinalityProofError::BlockNotInAuthoritySetChanges);
};
// Get the Justification stored at the last block of the set
let last_block_for_set_id = BlockId::Number(last_block_for_set);
let justification =
if let Some(justification) = blockchain.justification(last_block_for_set_id)? {
justification
} else {
trace!(
target: "afg",
"No justification found when making finality proof for {}. Returning empty proof.",
block,
);
return Ok(None);
};
// Collect all headers from the requested block until the last block of the set
let unknown_headers = {
let mut headers = Vec::new();
let mut current = block + One::one();
loop {
if current >= last_block_for_set || headers.len() >= MAX_UNKNOWN_HEADERS {
break;
}
headers.push(blockchain.expect_header(BlockId::Number(current))?);
current += One::one();
}
headers
};
Ok(Some(
FinalityProof {
block: blockchain.expect_block_hash_from_id(&last_block_for_set_id)?,
justification,
unknown_headers,
}
.encode(),
))
}
/// Check GRANDPA proof-of-finality for the given block.
///
/// Returns the vector of headers that MUST be validated + imported
/// AND if at least one of those headers is invalid, all other MUST be considered invalid.
///
/// This is currently not used, and exists primarily as an example of how to check finality proofs.
#[cfg(test)]
fn check_finality_proof<Header: HeaderT, J>(
current_set_id: u64,
current_authorities: sp_finality_grandpa::AuthorityList,
remote_proof: Vec<u8>,
) -> ClientResult<FinalityProof<Header>>
where
J: ProvableJustification<Header>,
{
let proof = FinalityProof::<Header>::decode(&mut &remote_proof[..])
.map_err(|_| ClientError::BadJustification("failed to decode finality proof".into()))?;
let justification: J = Decode::decode(&mut &proof.justification[..])
.map_err(|_| ClientError::JustificationDecode)?;
justification.verify(current_set_id, ¤t_authorities)?;
Ok(proof)
}
/// Justification used to prove block finality.
pub trait ProvableJustification<Header: HeaderT>: Encode + Decode {
/// Verify justification with respect to authorities set and authorities set id.
fn verify(&self, set_id: u64, authorities: &[(AuthorityId, u64)]) -> ClientResult<()>;
/// Decode and verify justification.
fn decode_and_verify(
justification: &Justification,
set_id: u64,
authorities: &[(AuthorityId, u64)],
) -> ClientResult<Self> {
let justification =
Self::decode(&mut &**justification).map_err(|_| ClientError::JustificationDecode)?;
justification.verify(set_id, authorities)?;
Ok(justification)
}
}
impl<Block: BlockT> ProvableJustification<Block::Header> for GrandpaJustification<Block>
where
NumberFor<Block>: BlockNumberOps,
{
fn verify(&self, set_id: u64, authorities: &[(AuthorityId, u64)]) -> ClientResult<()> {
let authorities = VoterSet::new(authorities.iter().cloned()).ok_or(
ClientError::Consensus(sp_consensus::Error::InvalidAuthoritiesSet),
)?;
GrandpaJustification::verify_with_voter_set(self, set_id, &authorities)
}
}
#[cfg(test)]
pub(crate) mod tests {
use super::*;
use crate::authorities::AuthoritySetChanges;
use sp_core::crypto::Public;
use sp_finality_grandpa::AuthorityList;
use sc_client_api::NewBlockState;
use sc_client_api::in_mem::Blockchain as InMemoryBlockchain;
use substrate_test_runtime_client::runtime::{Block, Header, H256};
pub(crate) type FinalityProof = super::FinalityProof<Header>;
#[derive(Debug, PartialEq, Encode, Decode)]
pub struct TestJustification(pub (u64, AuthorityList), pub Vec<u8>);
impl ProvableJustification<Header> for TestJustification {
fn verify(&self, set_id: u64, authorities: &[(AuthorityId, u64)]) -> ClientResult<()> {
if (self.0).0 != set_id || (self.0).1 != authorities {
return Err(ClientError::BadJustification("test".into()));
}
Ok(())
}
}
#[derive(Debug, PartialEq, Encode, Decode)]
pub struct TestBlockJustification(TestJustification, u64, H256);
impl ProvableJustification<Header> for TestBlockJustification {
fn verify(&self, set_id: u64, authorities: &[(AuthorityId, u64)]) -> ClientResult<()> {
self.0.verify(set_id, authorities)
}
}
fn header(number: u64) -> Header {
let parent_hash = match number {
0 => Default::default(),
_ => header(number - 1).hash(),
};
Header::new(
number,
H256::from_low_u64_be(0),
H256::from_low_u64_be(0),
parent_hash,
Default::default(),
)
}
fn test_blockchain() -> InMemoryBlockchain<Block> {
let blockchain = InMemoryBlockchain::<Block>::new();
blockchain
.insert(header(0).hash(), header(0), Some(vec![0]), None, NewBlockState::Final)
.unwrap();
blockchain
.insert(header(1).hash(), header(1), Some(vec![1]), None, NewBlockState::Final)
.unwrap();
blockchain
.insert(header(2).hash(), header(2), None, None, NewBlockState::Best)
.unwrap();
blockchain
.insert(header(3).hash(), header(3), Some(vec![3]), None, NewBlockState::Final)
.unwrap();
blockchain
}
#[test]
fn finality_proof_fails_if_no_more_last_finalized_blocks() {
let blockchain = test_blockchain();
blockchain
.insert(header(4).hash(), header(4), Some(vec![1]), None, NewBlockState::Best)
.unwrap();
blockchain
.insert(header(5).hash(), header(5), Some(vec![2]), None, NewBlockState::Best)
.unwrap();
let mut authority_set_changes = AuthoritySetChanges::empty();
authority_set_changes.append(0, 5);
// The last finalized block is 3, so we cannot provide further justifications.
let proof_of_4 = prove_finality::<_, _, TestJustification>(
&blockchain,
authority_set_changes,
*header(4).number(),
);
assert!(matches!(proof_of_4, Err(FinalityProofError::BlockNotYetFinalized)));
}
#[test]
fn finality_proof_is_none_if_no_justification_known() {
let blockchain = test_blockchain();
blockchain
.insert(header(4).hash(), header(4), None, None, NewBlockState::Final)
.unwrap();
let mut authority_set_changes = AuthoritySetChanges::empty();
authority_set_changes.append(0, 4);
// Block 4 is finalized without justification
// => we can't prove finality of 3
let proof_of_3 = prove_finality::<_, _, TestJustification>(
&blockchain,
authority_set_changes,
*header(3).number(),
)
.unwrap();
assert_eq!(proof_of_3, None);
}
#[test]
fn finality_proof_check_fails_when_proof_decode_fails() {
// When we can't decode proof from Vec<u8>
check_finality_proof::<_, TestJustification>(
1,
vec![(AuthorityId::from_slice(&[3u8; 32]), 1u64)],
vec![42],
)
.unwrap_err();
}
#[test]
fn finality_proof_check_fails_when_proof_is_empty() {
// When decoded proof has zero length
check_finality_proof::<_, TestJustification>(
1,
vec![(AuthorityId::from_slice(&[3u8; 32]), 1u64)],
Vec::<TestJustification>::new().encode(),
)
.unwrap_err();
}
#[test]
fn finality_proof_check_works() {
let auth = vec![(AuthorityId::from_slice(&[3u8; 32]), 1u64)];
let finality_proof = FinalityProof {
block: header(2).hash(),
justification: TestJustification((1, auth.clone()), vec![7]).encode(),
unknown_headers: Vec::new(),
};
let proof = check_finality_proof::<_, TestJustification>(
1,
auth.clone(),
finality_proof.encode(),
)
.unwrap();
assert_eq!(proof, finality_proof);
}
#[test]
fn finality_proof_using_authority_set_changes_fails_with_undefined_start() {
let blockchain = test_blockchain();
let auth = vec![(AuthorityId::from_slice(&[1u8; 32]), 1u64)];
let just4 = TestJustification((0, auth.clone()), vec![4]).encode();
let just7 = TestJustification((1, auth.clone()), vec![7]).encode();
blockchain
.insert(header(4).hash(), header(4), Some(just4), None, NewBlockState::Final)
.unwrap();
blockchain
.insert(header(5).hash(), header(5), None, None, NewBlockState::Final)
.unwrap();
blockchain
.insert(header(6).hash(), header(6), None, None, NewBlockState::Final)
.unwrap();
blockchain
.insert(header(7).hash(), header(7), Some(just7.clone()), None, NewBlockState::Final)
.unwrap();
// We have stored the correct block number for the relevant set, but as we are missing the
// block for the preceding set the start is not well-defined.
let mut authority_set_changes = AuthoritySetChanges::empty();
authority_set_changes.append(1, 7);
let proof_of_5 = prove_finality::<_, _, TestJustification>(
&blockchain,
authority_set_changes,
*header(5).number(),
);
assert!(matches!(proof_of_5, Err(FinalityProofError::BlockNotInAuthoritySetChanges)));
}
#[test]
fn finality_proof_using_authority_set_changes_works() {
let blockchain = test_blockchain();
let auth = vec![(AuthorityId::from_slice(&[1u8; 32]), 1u64)];
let just4 = TestJustification((0, auth.clone()), vec![4]).encode();
let just7 = TestJustification((1, auth.clone()), vec![7]).encode();
blockchain
.insert(header(4).hash(), header(4), Some(just4), None, NewBlockState::Final)
.unwrap();
blockchain
.insert(header(5).hash(), header(5), None, None, NewBlockState::Final)
.unwrap();
blockchain
.insert(header(6).hash(), header(6), None, None, NewBlockState::Final)
.unwrap();
blockchain
.insert(header(7).hash(), header(7), Some(just7.clone()), None, NewBlockState::Final)
.unwrap();
let mut authority_set_changes = AuthoritySetChanges::empty();
authority_set_changes.append(0, 4);
authority_set_changes.append(1, 7);
let proof_of_5: FinalityProof = Decode::decode(
&mut &prove_finality::<_, _, TestJustification>(
&blockchain,
authority_set_changes,
*header(5).number(),
)
.unwrap()
.unwrap()[..],
)
.unwrap();
assert_eq!(
proof_of_5,
FinalityProof {
block: header(7).hash(),
justification: just7,
unknown_headers: vec![header(6)],
}
);
}
}
|
/**
* Logging implementation of TraceListener method.
* Method that is called just after the formatter listener is called.
*
* @param generateEvent the generate event.
*/
public void generated(GenerateEvent generateEvent)
{
counters[TYPE_GENERATED]++;
Hashtable attrs = new Hashtable();
attrs.put("event", "generated");
StringBuffer buf = new StringBuffer(" <eventtype ");
switch (generateEvent.m_eventtype)
{
case SerializerTrace.EVENTTYPE_STARTDOCUMENT :
buf.append("type=\"STARTDOCUMENT\">");
break;
case SerializerTrace.EVENTTYPE_ENDDOCUMENT :
buf.append("type=\"ENDDOCUMENT\">");
break;
case SerializerTrace.EVENTTYPE_STARTELEMENT :
buf.append("type=\"STARTELEMENT\">" + generateEvent.m_name);
break;
case SerializerTrace.EVENTTYPE_ENDELEMENT :
buf.append("type=\"ENDELEMENT\">" + generateEvent.m_name);
break;
case SerializerTrace.EVENTTYPE_CHARACTERS :
String chars1 = new String(generateEvent.m_characters, generateEvent.m_start, generateEvent.m_length);
buf.append("type=\"CHARACTERS\">" + chars1);
break;
case SerializerTrace.EVENTTYPE_CDATA :
String chars2 = new String(generateEvent.m_characters, generateEvent.m_start, generateEvent.m_length);
buf.append("type=\"CDATA\">" + chars2);
break;
case SerializerTrace.EVENTTYPE_COMMENT :
buf.append("type=\"COMMENT\">" + generateEvent.m_data);
break;
case SerializerTrace.EVENTTYPE_PI :
buf.append("type=\"PI\">" + generateEvent.m_name + ", " + generateEvent.m_data);
break;
case SerializerTrace.EVENTTYPE_ENTITYREF :
buf.append("type=\"ENTITYREF\">" + generateEvent.m_name);
break;
case SerializerTrace.EVENTTYPE_IGNORABLEWHITESPACE :
buf.append("type=\"IGNORABLEWHITESPACE\">");
break;
}
buf.append("</eventtype>\n");
setLastItem(buf.toString());
logger.logElement(level, TRACE_LISTENER_DUMP, attrs, buf.toString());
}
|
<reponame>acidicMercury8/xray-1.0
// Magic Software, Inc.
// http://www.magic-software.com
// Copyright (c) 2000-2002. All Rights Reserved
//
// Source code from Magic Software is supplied under the terms of a license
// agreement and may not be copied or disclosed except in accordance with the
// terms of that agreement. The various license agreements may be found at
// the Magic Software web site. This file is subject to the license
//
// FREE SOURCE CODE
// http://www.magic-software.com/License/free.pdf
#include "ImageInterp3D.h"
#include "MgcTriangleMesh.h"
using namespace Mgc;
#include <algorithm>
#include <cfloat>
using namespace std;
//----------------------------------------------------------------------------
ImageInterp3D::ImageInterp3D (int iXBound, int iYBound, int iZBound,
int* aiData)
:
ImageInt3D(iXBound,iYBound,iZBound,(Eint*)aiData)
{
m_iXYProduct = iXBound*iYBound;
}
//----------------------------------------------------------------------------
ImageInterp3D::ImageInterp3D (const char* acFilename)
:
ImageInt3D(acFilename)
{
if ( m_aiBound )
m_iXYProduct = m_aiBound[0]*m_aiBound[1];
else
m_iXYProduct = 0;
}
//----------------------------------------------------------------------------
float ImageInterp3D::GetFunction (const Vector3& rkP) const
{
int iX = (int) rkP.x;
if ( iX < 0 || iX >= m_aiBound[0]-1 )
return 0.0f;
int iY = (int) rkP.y;
if ( iY < 0 || iY >= m_aiBound[1]-1 )
return 0.0f;
int iZ = (int) rkP.z;
if ( iZ < 0 || iZ >= m_aiBound[2]-1 )
return 0.0f;
float fDX = rkP.x - iX, fDY = rkP.y - iY, fDZ = rkP.z - iZ;
int i000 = iX + m_aiBound[0]*(iY + m_aiBound[1]*iZ);
int i100 = i000 + 1;
int i010 = i000 + m_aiBound[0];
int i110 = i100 + m_aiBound[0];
int i001 = i000 + m_iXYProduct;
int i101 = i100 + m_iXYProduct;
int i011 = i010 + m_iXYProduct;
int i111 = i110 + m_iXYProduct;
float fF000 = (float) m_atData[i000];
float fF100 = (float) m_atData[i100];
float fF010 = (float) m_atData[i010];
float fF110 = (float) m_atData[i110];
float fF001 = (float) m_atData[i001];
float fF101 = (float) m_atData[i101];
float fF011 = (float) m_atData[i011];
float fF111 = (float) m_atData[i111];
float fC0, fC1, fC2, fInterp;
if ( (iX & 1) ^ (iY & 1) ^ (iZ & 1) )
{
if ( fDX - fDY - fDZ >= 0.0f )
{
// 1205
fInterp =
(1.0f-(1.0f-fDX)-fDY-fDZ)*fF100 +
(1.0f-fDX)*fF000 +
fDY*fF110 +
fDZ*fF101;
}
else if ( fDX - fDY + fDZ <= 0.0f )
{
// 3027
fInterp =
(1.0f-fDX-(1.0f-fDY)-fDZ)*fF010 +
fDX*fF110 +
(1.0f-fDY)*fF000 +
fDZ*fF011;
}
else if ( fDX + fDY - fDZ <= 0.0f )
{
// 4750
fInterp =
(1.0f-fDX-fDY-(1-fDZ))*fF001 +
fDX*fF101 +
fDY*fF011 +
(1.0f-fDZ)*fF000;
}
else if ( fDX + fDY + fDZ >= 2.0f )
{
// 6572
fInterp =
(1.0f-(1.0f-fDX)-(1.0f-fDY)-(1.0f-fDZ))*fF111 +
(1.0f-fDX)*fF011 +
(1.0f-fDY)*fF101 +
(1.0f-fDZ)*fF110;
}
else
{
// 0752
fC0 = 0.5f*(-fDX+fDY+fDZ);
fC1 = 0.5f*(fDX-fDY+fDZ);
fC2 = 0.5f*(fDX+fDY-fDZ);
fInterp =
(1.0f-fC0-fC1-fC2)*fF000 +
fC0*fF011 +
fC1*fF101 +
fC2*fF110;
}
}
else
{
if ( fDX + fDY + fDZ <= 1.0f )
{
// 0134
fInterp =
(1.0f-fDX-fDY-fDZ)*fF000 +
fDX*fF100 +
fDY*fF010 +
fDZ*fF001;
}
else if ( fDX + fDY - fDZ >= 1.0f )
{
// 2316
fInterp =
(1.0f-(1.0f-fDX)-(1.0f-fDY)-fDZ)*fF110 +
(1.0f-fDX)*fF010 +
(1.0f-fDY)*fF100 +
fDZ*fF111;
}
else if ( fDX - fDY + fDZ >= 1.0f )
{
// 5461
fInterp =
(1.0f-(1.0f-fDX)-fDY-(1.0f-fDZ))*fF101 +
(1.0f-fDX)*fF001 +
fDY*fF111 +
(1.0f-fDZ)*fF100;
}
else if ( -fDX + fDY + fDZ >= 1.0f )
{
// 7643
fInterp =
(1.0f-fDX-(1.0f-fDY)-(1.0f-fDZ))*fF011 +
fDX*fF111 +
(1.0f-fDY)*fF001 +
(1.0f-fDZ)*fF010;
}
else
{
// 6314
fC0 = 0.5f*((1.0f-fDX)-(1.0f-fDY)+(1.0f-fDZ));
fC1 = 0.5f*(-(1.0f-fDX)+(1.0f-fDY)+(1.0f-fDZ));
fC2 = 0.5f*((1.0f-fDX)+(1.0f-fDY)-(1.0f-fDZ));
fInterp =
(1.0f-fC0-fC1-fC2)*fF111 +
fC0*fF010 +
fC1*fF100 +
fC2*fF001;
}
}
return fInterp;
}
//----------------------------------------------------------------------------
Vector3 ImageInterp3D::GetGradient (const Vector3& rkP) const
{
int iX = (int) rkP.x;
if ( iX < 0 || iX >= m_aiBound[0]-1 )
return Vector3::ZERO;
int iY = (int) rkP.y;
if ( iY < 0 || iY >= m_aiBound[1]-1 )
return Vector3::ZERO;
int iZ = (int) rkP.z;
if ( iZ < 0 || iZ >= m_aiBound[2]-1 )
return Vector3::ZERO;
float fDX = rkP.x - iX, fDY = rkP.y - iY, fDZ = rkP.z - iZ;
int i000 = iX + m_aiBound[0]*(iY + m_aiBound[1]*iZ);
int i100 = i000 + 1;
int i010 = i000 + m_aiBound[0];
int i110 = i100 + m_aiBound[0];
int i001 = i000 + m_iXYProduct;
int i101 = i100 + m_iXYProduct;
int i011 = i010 + m_iXYProduct;
int i111 = i110 + m_iXYProduct;
float fF000 = (float) m_atData[i000];
float fF100 = (float) m_atData[i100];
float fF010 = (float) m_atData[i010];
float fF110 = (float) m_atData[i110];
float fF001 = (float) m_atData[i001];
float fF101 = (float) m_atData[i101];
float fF011 = (float) m_atData[i011];
float fF111 = (float) m_atData[i111];
Vector3 kInterp;
if ( (iX & 1) ^ (iY & 1) ^ (iZ & 1) )
{
if ( fDX - fDY - fDZ >= 0.0f )
{
// 1205
kInterp.x = + fF100 - fF000;
kInterp.y = - fF100 + fF110;
kInterp.z = - fF100 + fF101;
}
else if ( fDX - fDY + fDZ <= 0.0f )
{
// 3027
kInterp.x = - fF010 + fF110;
kInterp.y = + fF010 - fF000;
kInterp.z = - fF010 + fF011;
}
else if ( fDX + fDY - fDZ <= 0.0f )
{
// 4750
kInterp.x = - fF001 + fF101;
kInterp.y = - fF001 + fF011;
kInterp.z = + fF001 - fF000;
}
else if ( fDX + fDY + fDZ >= 2.0f )
{
// 6572
kInterp.x = + fF111 - fF011;
kInterp.y = + fF111 - fF101;
kInterp.z = + fF111 - fF110;
}
else
{
// 0752
kInterp.x = 0.5f*(-fF000-fF011+fF101+fF110);
kInterp.y = 0.5f*(-fF000+fF011-fF101+fF110);
kInterp.z = 0.5f*(-fF000+fF011+fF101-fF110);
}
}
else
{
if ( fDX + fDY + fDZ <= 1.0f )
{
// 0134
kInterp.x = - fF000 + fF100;
kInterp.y = - fF000 + fF010;
kInterp.z = - fF000 + fF001;
}
else if ( fDX + fDY - fDZ >= 1.0f )
{
// 2316
kInterp.x = + fF110 - fF010;
kInterp.y = + fF110 - fF100;
kInterp.z = - fF110 + fF111;
}
else if ( fDX - fDY + fDZ >= 1.0f )
{
// 5461
kInterp.x = + fF101 - fF001;
kInterp.y = - fF101 + fF111;
kInterp.z = + fF101 - fF100;
}
else if ( -fDX + fDY + fDZ >= 1.0f )
{
// 7643
kInterp.x = - fF011 + fF111;
kInterp.y = + fF011 - fF001;
kInterp.z = + fF011 - fF010;
}
else
{
// 6314
kInterp.x = 0.5f*(fF111-fF010+fF100-fF001);
kInterp.y = 0.5f*(fF111+fF010-fF100-fF001);
kInterp.z = 0.5f*(fF111-fF010-fF100+fF001);
}
}
return kInterp;
}
//----------------------------------------------------------------------------
void ImageInterp3D::ExtractLevelSetLinear (int iLevel, int& riVertexQuantity,
Vertex*& rakVertex, int& riEdgeQuantity, Edge*& rakEdge,
int& riTriangleQuantity, Triangle*& rakTriangle)
{
VMap kVMap;
ESet kESet;
TSet kTSet;
m_iNextIndex = 0;
// adjust image so level set is F(x,y,z) = 0
int i;
for (i = 0; i < m_iQuantity; i++)
m_atData[i] = m_atData[i] - iLevel;
int iXBoundM1 = m_aiBound[0] - 1;
int iYBoundM1 = m_aiBound[1] - 1;
int iZBoundM1 = m_aiBound[2] - 1;
for (int iZ = 0, iZP = 1; iZ < iZBoundM1; iZ++, iZP++)
{
int iZParity = (iZ & 1);
for (int iY = 0, iYP = 1; iY < iYBoundM1; iY++, iYP++)
{
int iYParity = (iY & 1);
for (int iX = 0, iXP = 1; iX < iXBoundM1; iX++, iXP++)
{
int iXParity = (iX & 1);
int i000 = iX + m_aiBound[0]*(iY + m_aiBound[1]*iZ);
int i100 = i000 + 1;
int i010 = i000 + m_aiBound[0];
int i110 = i100 + m_aiBound[0];
int i001 = i000 + m_iXYProduct;
int i101 = i100 + m_iXYProduct;
int i011 = i010 + m_iXYProduct;
int i111 = i110 + m_iXYProduct;
int iF000 = m_atData[i000];
int iF100 = m_atData[i100];
int iF010 = m_atData[i010];
int iF110 = m_atData[i110];
int iF001 = m_atData[i001];
int iF101 = m_atData[i101];
int iF011 = m_atData[i011];
int iF111 = m_atData[i111];
if ( iXParity ^ iYParity ^ iZParity )
{
// 1205
ProcessTetrahedron(iLevel,kVMap,kESet,kTSet,
iXP,iY,iZ,iF100,iXP,iYP,iZ,iF110,iX,iY,iZ,iF000,iXP,
iY,iZP,iF101);
// 3027
ProcessTetrahedron(iLevel,kVMap,kESet,kTSet,
iX,iYP,iZ,iF010,iX,iY,iZ,iF000,iXP,iYP,iZ,iF110,iX,
iYP,iZP,iF011);
// 4750
ProcessTetrahedron(iLevel,kVMap,kESet,kTSet,
iX,iY,iZP,iF001,iX,iYP,iZP,iF011,iXP,iY,iZP,iF101,iX,
iY,iZ,iF000);
// 6572
ProcessTetrahedron(iLevel,kVMap,kESet,kTSet,
iXP,iYP,iZP,iF111,iXP,iY,iZP,iF101,iX,iYP,iZP,iF011,
iXP,iYP,iZ,iF110);
// 0752
ProcessTetrahedron(iLevel,kVMap,kESet,kTSet,
iX,iY,iZ,iF000,iX,iYP,iZP,iF011,iXP,iY,iZP,iF101,iXP,
iYP,iZ,iF110);
}
else
{
// 0134
ProcessTetrahedron(iLevel,kVMap,kESet,kTSet,
iX,iY,iZ,iF000,iXP,iY,iZ,iF100,iX,iYP,iZ,iF010,iX,iY,
iZP,iF001);
// 2316
ProcessTetrahedron(iLevel,kVMap,kESet,kTSet,
iXP,iYP,iZ,iF110,iX,iYP,iZ,iF010,iXP,iY,iZ,iF100,iXP,
iYP,iZP,iF111);
// 5461
ProcessTetrahedron(iLevel,kVMap,kESet,kTSet,
iXP,iY,iZP,iF101,iX,iY,iZP,iF001,iXP,iYP,iZP,iF111,
iXP,iY,iZ,iF100);
// 7643
ProcessTetrahedron(iLevel,kVMap,kESet,kTSet,
iX,iYP,iZP,iF011,iXP,iYP,iZP,iF111,iX,iY,iZP,iF001,iX,
iYP,iZ,iF010);
// 6314
ProcessTetrahedron(iLevel,kVMap,kESet,kTSet,
iXP,iYP,iZP,iF111,iX,iYP,iZ,iF010,iXP,iY,iZ,iF100,iX,
iY,iZP,iF001);
}
}
}
}
// readjust image so level set is F(x,y,z) = L
for (i = 0; i < m_iQuantity; i++)
m_atData[i] = m_atData[i] + iLevel;
// pack vertices into an array
riVertexQuantity = kVMap.size();
if ( riVertexQuantity > 0 )
{
rakVertex = new Vertex[riVertexQuantity];
VIterator pkVIter;
for (pkVIter = kVMap.begin(); pkVIter != kVMap.end(); pkVIter++)
rakVertex[pkVIter->second] = pkVIter->first;
}
else
{
rakVertex = NULL;
}
// pack edges into an array
riEdgeQuantity = kESet.size();
if ( riEdgeQuantity > 0 )
{
rakEdge = new Edge[riEdgeQuantity];
copy(kESet.begin(),kESet.end(),rakEdge);
}
else
{
rakEdge = NULL;
}
// pack triangles into an array
riTriangleQuantity = kTSet.size();
if ( riTriangleQuantity > 0 )
{
rakTriangle = new Triangle[riTriangleQuantity];
copy(kTSet.begin(),kTSet.end(),rakTriangle);
}
else
{
rakTriangle = NULL;
}
}
//----------------------------------------------------------------------------
int ImageInterp3D::AddVertex (VMap& rkVMap, int iXNumer, int iXDenom,
int iYNumer, int iYDenom, int iZNumer, int iZDenom)
{
#ifdef _DEBUG
int iX = (int)(iXNumer/(float)iXDenom);
int iY = (int)(iYNumer/(float)iYDenom);
int iZ = (int)(iZNumer/(float)iZDenom);
assert( 0 <= iX && iX < m_aiBound[0]
&& 0 <= iY && iY < m_aiBound[1]
&& 0 <= iZ && iZ < m_aiBound[2] );
#endif
Vertex kVertex(iXNumer,iXDenom,iYNumer,iYDenom,iZNumer,iZDenom);
VIterator pkVIter = rkVMap.find(kVertex);
if ( pkVIter != rkVMap.end() )
{
// Vertex already in map, just return its unique index.
return pkVIter->second;
}
else
{
// Vertex not in map, insert it and assign it a unique index.
int i = m_iNextIndex++;
pair<VIterator,bool> kResult = rkVMap.insert(make_pair(kVertex,i));
return i;
}
}
//----------------------------------------------------------------------------
void ImageInterp3D::AddEdge (VMap& rkVMap, ESet& rkESet, int iXNumer0,
int iXDenom0, int iYNumer0, int iYDenom0, int iZNumer0, int iZDenom0,
int iXNumer1, int iXDenom1, int iYNumer1, int iYDenom1, int iZNumer1,
int iZDenom1)
{
#ifdef _DEBUG
int iX = (int)(iXNumer0/(float)iXDenom0);
int iY = (int)(iYNumer0/(float)iYDenom0);
int iZ = (int)(iZNumer0/(float)iZDenom0);
assert( 0 <= iX && iX < m_aiBound[0]
&& 0 <= iY && iY < m_aiBound[1]
&& 0 <= iZ && iZ < m_aiBound[2] );
iX = (int)(iXNumer1/(float)iXDenom1);
iY = (int)(iYNumer1/(float)iYDenom1);
iZ = (int)(iZNumer1/(float)iZDenom1);
assert( 0 <= iX && iX < m_aiBound[0]
&& 0 <= iY && iY < m_aiBound[1]
&& 0 <= iZ && iZ < m_aiBound[2] );
#endif
int iV0 = AddVertex(rkVMap,iXNumer0,iXDenom0,iYNumer0,iYDenom0,iZNumer0,
iZDenom0);
int iV1 = AddVertex(rkVMap,iXNumer1,iXDenom1,iYNumer1,iYDenom1,iZNumer1,
iZDenom1);
rkESet.insert(Edge(iV0,iV1));
}
//----------------------------------------------------------------------------
void ImageInterp3D::AddTriangle (VMap& rkVMap, ESet& rkESet, TSet& rkTSet,
int iXNumer0, int iXDenom0, int iYNumer0, int iYDenom0, int iZNumer0,
int iZDenom0, int iXNumer1, int iXDenom1, int iYNumer1, int iYDenom1,
int iZNumer1, int iZDenom1, int iXNumer2, int iXDenom2, int iYNumer2,
int iYDenom2, int iZNumer2, int iZDenom2)
{
#ifdef _DEBUG
int iX = (int)(iXNumer0/(float)iXDenom0);
int iY = (int)(iYNumer0/(float)iYDenom0);
int iZ = (int)(iZNumer0/(float)iZDenom0);
assert( 0 <= iX && iX < m_aiBound[0]
&& 0 <= iY && iY < m_aiBound[1]
&& 0 <= iZ && iZ < m_aiBound[2] );
iX = (int)(iXNumer1/(float)iXDenom1);
iY = (int)(iYNumer1/(float)iYDenom1);
iZ = (int)(iZNumer1/(float)iZDenom1);
assert( 0 <= iX && iX < m_aiBound[0]
&& 0 <= iY && iY < m_aiBound[1]
&& 0 <= iZ && iZ < m_aiBound[2] );
iX = (int)(iXNumer2/(float)iXDenom2);
iY = (int)(iYNumer2/(float)iYDenom2);
iZ = (int)(iZNumer2/(float)iZDenom2);
assert( 0 <= iX && iX < m_aiBound[0]
&& 0 <= iY && iY < m_aiBound[1]
&& 0 <= iZ && iZ < m_aiBound[2] );
#endif
int iV0 = AddVertex(rkVMap,iXNumer0,iXDenom0,iYNumer0,iYDenom0,iZNumer0,
iZDenom0);
int iV1 = AddVertex(rkVMap,iXNumer1,iXDenom1,iYNumer1,iYDenom1,iZNumer1,
iZDenom1);
int iV2 = AddVertex(rkVMap,iXNumer2,iXDenom2,iYNumer2,iYDenom2,iZNumer2,
iZDenom2);
// nothing to do if triangle already exists
Triangle kT(iV0,iV1,iV2);
if ( rkTSet.find(kT) != rkTSet.end() )
return;
// prevent double-sided triangles
int iSave = kT.m_i1;
kT.m_i1 = kT.m_i2;
kT.m_i2 = iSave;
if ( rkTSet.find(kT) != rkTSet.end() )
return;
rkESet.insert(Edge(iV0,iV1));
rkESet.insert(Edge(iV1,iV2));
rkESet.insert(Edge(iV2,iV0));
// compute triangle normal assuming counterclockwise ordering
Vector3 kV0(
iXNumer0/(float)iXDenom0,
iYNumer0/(float)iYDenom0,
iZNumer0/(float)iZDenom0);
Vector3 kV1(
iXNumer1/(float)iXDenom1,
iYNumer1/(float)iYDenom1,
iZNumer1/(float)iZDenom1);
Vector3 kV2(
iXNumer2/(float)iXDenom2,
iYNumer2/(float)iYDenom2,
iZNumer2/(float)iZDenom2);
Vector3 kE0 = kV1 - kV0;
Vector3 kE1 = kV2 - kV0;
Vector3 kN = kE0.Cross(kE1);
// choose triangle orientation based on gradient direction
const float fOneThird = 1.0f/3.0f;
Vector3 kCentroid = fOneThird*(kV0+kV1+kV2);
Vector3 kGrad = GetGradient(kCentroid);
if ( kGrad.Dot(kN) <= 0.0f )
rkTSet.insert(Triangle(iV0,iV1,iV2));
else
rkTSet.insert(Triangle(iV0,iV2,iV1));
}
//----------------------------------------------------------------------------
void ImageInterp3D::ProcessTetrahedron (int iLevel, VMap& rkVM,
ESet& rkES, TSet& rkTS, int iX0, int iY0, int iZ0, int iF0,
int iX1, int iY1, int iZ1, int iF1, int iX2, int iY2, int iZ2, int iF2,
int iX3, int iY3, int iZ3, int iF3)
{
int iXN0, iYN0, iZN0, iD0;
int iXN1, iYN1, iZN1, iD1;
int iXN2, iYN2, iZN2, iD2;
int iXN3, iYN3, iZN3, iD3;
//int iV0, iV1, iV2;
if ( iF0 != 0 )
{
// convert to case +***
if ( iF0 < 0 )
{
iF0 = -iF0;
iF1 = -iF1;
iF2 = -iF2;
iF3 = -iF3;
}
if ( iF1 > 0 )
{
if ( iF2 > 0 )
{
if ( iF3 > 0 )
{
// ++++
return;
}
else if ( iF3 < 0 )
{
// +++-
iD0 = iF0 - iF3;
iXN0 = iF0*iX3 - iF3*iX0;
iYN0 = iF0*iY3 - iF3*iY0;
iZN0 = iF0*iZ3 - iF3*iZ0;
iD1 = iF1 - iF3;
iXN1 = iF1*iX3 - iF3*iX1;
iYN1 = iF1*iY3 - iF3*iY1;
iZN1 = iF1*iZ3 - iF3*iZ1;
iD2 = iF2 - iF3;
iXN2 = iF2*iX3 - iF3*iX2;
iYN2 = iF2*iY3 - iF3*iY2;
iZN2 = iF2*iZ3 - iF3*iZ2;
AddTriangle(rkVM,rkES,rkTS,
iXN0,iD0,iYN0,iD0,iZN0,iD0,
iXN1,iD1,iYN1,iD1,iZN1,iD1,
iXN2,iD2,iYN2,iD2,iZN2,iD2);
}
else
{
// +++0
AddVertex(rkVM,iX3,1,iY3,1,iZ3,1);
}
}
else if ( iF2 < 0 )
{
iD0 = iF0 - iF2;
iXN0 = iF0*iX2 - iF2*iX0;
iYN0 = iF0*iY2 - iF2*iY0;
iZN0 = iF0*iZ2 - iF2*iZ0;
iD1 = iF1 - iF2;
iXN1 = iF1*iX2 - iF2*iX1;
iYN1 = iF1*iY2 - iF2*iY1;
iZN1 = iF1*iZ2 - iF2*iZ1;
if ( iF3 > 0 )
{
// ++-+
iD2 = iF3 - iF2;
iXN2 = iF3*iX2 - iF2*iX3;
iYN2 = iF3*iY2 - iF2*iY3;
iZN2 = iF3*iZ2 - iF2*iZ3;
AddTriangle(rkVM,rkES,rkTS,
iXN0,iD0,iYN0,iD0,iZN0,iD0,
iXN1,iD1,iYN1,iD1,iZN1,iD1,
iXN2,iD2,iYN2,iD2,iZN2,iD2);
}
else if ( iF3 < 0 )
{
// ++--
iD2 = iF0 - iF3;
iXN2 = iF0*iX3 - iF3*iX0;
iYN2 = iF0*iY3 - iF3*iY0;
iZN2 = iF0*iZ3 - iF3*iZ0;
iD3 = iF1 - iF3;
iXN3 = iF1*iX3 - iF3*iX1;
iYN3 = iF1*iY3 - iF3*iY1;
iZN3 = iF1*iZ3 - iF3*iZ1;
AddTriangle(rkVM,rkES,rkTS,
iXN0,iD0,iYN0,iD0,iZN0,iD0,
iXN1,iD1,iYN1,iD1,iZN1,iD1,
iXN2,iD2,iYN2,iD2,iZN2,iD2);
AddTriangle(rkVM,rkES,rkTS,
iXN1,iD1,iYN1,iD1,iZN1,iD1,
iXN3,iD3,iYN3,iD3,iZN3,iD3,
iXN2,iD2,iYN2,iD2,iZN2,iD2);
}
else
{
// ++-0
AddTriangle(rkVM,rkES,rkTS,
iXN0,iD0,iYN0,iD0,iZN0,iD0,
iXN1,iD1,iYN1,iD1,iZN1,iD1,
iX3,1,iY3,1,iZ3,1);
}
}
else
{
if ( iF3 > 0 )
{
// ++0+
AddVertex(rkVM,iX2,1,iY2,1,iZ2,1);
}
else if ( iF3 < 0 )
{
// ++0-
iD0 = iF0 - iF3;
iXN0 = iF0*iX3 - iF3*iX0;
iYN0 = iF0*iY3 - iF3*iY0;
iZN0 = iF0*iZ3 - iF3*iZ0;
iD1 = iF1 - iF3;
iXN1 = iF1*iX3 - iF3*iX1;
iYN1 = iF1*iY3 - iF3*iY1;
iZN1 = iF1*iZ3 - iF3*iZ1;
AddTriangle(rkVM,rkES,rkTS,
iXN0,iD0,iYN0,iD0,iZN0,iD0,
iXN1,iD1,iYN1,iD1,iZN1,iD1,
iX2,1,iY2,1,iZ2,1);
}
else
{
// ++00
AddEdge(rkVM,rkES,iX2,1,iY2,1,iZ2,1,iX3,1,iY3,1,iZ3,1);
}
}
}
else if ( iF1 < 0 )
{
if ( iF2 > 0 )
{
iD0 = iF0 - iF1;
iXN0 = iF0*iX1 - iF1*iX0;
iYN0 = iF0*iY1 - iF1*iY0;
iZN0 = iF0*iZ1 - iF1*iZ0;
iD1 = iF2 - iF1;
iXN1 = iF2*iX1 - iF1*iX2;
iYN1 = iF2*iY1 - iF1*iY2;
iZN1 = iF2*iZ1 - iF1*iZ2;
if ( iF3 > 0 )
{
// +-++
iD2 = iF3 - iF1;
iXN2 = iF3*iX1 - iF1*iX3;
iYN2 = iF3*iY1 - iF1*iY3;
iZN2 = iF3*iZ1 - iF1*iZ3;
AddTriangle(rkVM,rkES,rkTS,
iXN0,iD0,iYN0,iD0,iZN0,iD0,
iXN1,iD1,iYN1,iD1,iZN1,iD1,
iXN2,iD2,iYN2,iD2,iZN2,iD2);
}
else if ( iF3 < 0 )
{
// +-+-
iD2 = iF0 - iF3;
iXN2 = iF0*iX3 - iF3*iX0;
iYN2 = iF0*iY3 - iF3*iY0;
iZN2 = iF0*iZ3 - iF3*iZ0;
iD3 = iF2 - iF3;
iXN3 = iF2*iX3 - iF3*iX2;
iYN3 = iF2*iY3 - iF3*iY2;
iZN3 = iF2*iZ3 - iF3*iZ2;
AddTriangle(rkVM,rkES,rkTS,
iXN0,iD0,iYN0,iD0,iZN0,iD0,
iXN1,iD1,iYN1,iD1,iZN1,iD1,
iXN2,iD2,iYN2,iD2,iZN2,iD2);
AddTriangle(rkVM,rkES,rkTS,
iXN1,iD1,iYN1,iD1,iZN1,iD1,
iXN3,iD3,iYN3,iD3,iZN3,iD3,
iXN2,iD2,iYN2,iD2,iZN2,iD2);
}
else
{
// +-+0
AddTriangle(rkVM,rkES,rkTS,
iXN0,iD0,iYN0,iD0,iZN0,iD0,
iXN1,iD1,iYN1,iD1,iZN1,iD1,
iX3,1,iY3,1,iZ3,1);
}
}
else if ( iF2 < 0 )
{
iD0 = iF1 - iF0;
iXN0 = iF1*iX0 - iF0*iX1;
iYN0 = iF1*iY0 - iF0*iY1;
iZN0 = iF1*iZ0 - iF0*iZ1;
iD1 = iF2 - iF0;
iXN1 = iF2*iX0 - iF0*iX2;
iYN1 = iF2*iY0 - iF0*iY2;
iZN1 = iF2*iZ0 - iF0*iZ2;
if ( iF3 > 0 )
{
// +--+
iD2 = iF1 - iF3;
iXN2 = iF1*iX3 - iF3*iX1;
iYN2 = iF1*iY3 - iF3*iY1;
iZN2 = iF1*iZ3 - iF3*iZ1;
iD3 = iF2 - iF3;
iXN3 = iF2*iX3 - iF3*iX2;
iYN3 = iF2*iY3 - iF3*iY2;
iZN3 = iF2*iZ3 - iF3*iZ2;
AddTriangle(rkVM,rkES,rkTS,
iXN0,iD0,iYN0,iD0,iZN0,iD0,
iXN1,iD1,iYN1,iD1,iZN1,iD1,
iXN2,iD2,iYN2,iD2,iZN2,iD2);
AddTriangle(rkVM,rkES,rkTS,
iXN1,iD1,iYN1,iD1,iZN1,iD1,
iXN3,iD3,iYN3,iD3,iZN3,iD3,
iXN2,iD2,iYN2,iD2,iZN2,iD2);
}
else if ( iF3 < 0 )
{
// +---
iD2 = iF3 - iF0;
iXN2 = iF3*iX0 - iF0*iX3;
iYN2 = iF3*iY0 - iF0*iY3;
iZN2 = iF3*iZ0 - iF0*iZ3;
AddTriangle(rkVM,rkES,rkTS,
iXN0,iD0,iYN0,iD0,iZN0,iD0,
iXN1,iD1,iYN1,iD1,iZN1,iD1,
iXN2,iD2,iYN2,iD2,iZN2,iD2);
}
else
{
// +--0
AddTriangle(rkVM,rkES,rkTS,
iXN0,iD0,iYN0,iD0,iZN0,iD0,
iXN1,iD1,iYN1,iD1,iZN1,iD1,
iX3,1,iY3,1,iZ3,1);
}
}
else
{
iD0 = iF1 - iF0;
iXN0 = iF1*iX0 - iF0*iX1;
iYN0 = iF1*iY0 - iF0*iY1;
iZN0 = iF1*iZ0 - iF0*iZ1;
if ( iF3 > 0 )
{
// +-0+
iD1 = iF1 - iF3;
iXN1 = iF1*iX3 - iF3*iX1;
iYN1 = iF1*iY3 - iF3*iY1;
iZN1 = iF1*iZ3 - iF3*iZ1;
AddTriangle(rkVM,rkES,rkTS,
iXN0,iD0,iYN0,iD0,iZN0,iD0,
iXN1,iD1,iYN1,iD1,iZN1,iD1,
iX2,1,iY2,1,iZ2,1);
}
else if ( iF3 < 0 )
{
// +-0-
iD1 = iF3 - iF0;
iXN1 = iF3*iX0 - iF0*iX3;
iYN1 = iF3*iY0 - iF0*iY3;
iZN1 = iF3*iZ0 - iF0*iZ3;
AddTriangle(rkVM,rkES,rkTS,
iXN0,iD0,iYN0,iD0,iZN0,iD0,
iXN1,iD1,iYN1,iD1,iZN1,iD1,
iX2,1,iY2,1,iZ2,1);
}
else
{
// +-00
AddTriangle(rkVM,rkES,rkTS,
iXN0,iD0,iYN0,iD0,iZN0,iD0,
iX2,1,iY2,1,iZ2,1,
iX3,1,iY3,1,iZ3,1);
}
}
}
else
{
if ( iF2 > 0 )
{
if ( iF3 > 0 )
{
// +0++
AddVertex(rkVM,iX1,1,iY1,1,iZ1,1);
}
else if ( iF3 < 0 )
{
// +0+-
iD0 = iF0 - iF3;
iXN0 = iF0*iX3 - iF3*iX0;
iYN0 = iF0*iY3 - iF3*iY0;
iZN0 = iF0*iZ3 - iF3*iZ0;
iD1 = iF2 - iF3;
iXN1 = iF2*iX3 - iF3*iX2;
iYN1 = iF2*iY3 - iF3*iY2;
iZN1 = iF2*iZ3 - iF3*iZ2;
AddTriangle(rkVM,rkES,rkTS,
iXN0,iD0,iYN0,iD0,iZN0,iD0,
iXN1,iD1,iYN1,iD1,iZN1,iD1,
iX1,1,iY1,1,iZ1,1);
}
else
{
// +0+0
AddEdge(rkVM,rkES,iX1,1,iY1,1,iZ1,1,iX3,1,iY3,1,iZ3,1);
}
}
else if ( iF2 < 0 )
{
iD0 = iF2 - iF0;
iXN0 = iF2*iX0 - iF0*iX2;
iYN0 = iF2*iY0 - iF0*iY2;
iZN0 = iF2*iZ0 - iF0*iZ2;
if ( iF3 > 0 )
{
// +0-+
iD1 = iF2 - iF3;
iXN1 = iF2*iX3 - iF3*iX2;
iYN1 = iF2*iY3 - iF3*iY2;
iZN1 = iF2*iZ3 - iF3*iZ2;
AddTriangle(rkVM,rkES,rkTS,
iXN0,iD0,iYN0,iD0,iZN0,iD0,
iXN1,iD1,iYN1,iD1,iZN1,iD1,
iX1,1,iY1,1,iZ1,1);
}
else if ( iF3 < 0 )
{
// +0--
iD1 = iF0 - iF3;
iXN1 = iF0*iX3 - iF3*iX0;
iYN1 = iF0*iY3 - iF3*iY0;
iZN1 = iF0*iZ3 - iF3*iZ0;
AddTriangle(rkVM,rkES,rkTS,
iXN0,iD0,iYN0,iD0,iZN0,iD0,
iXN1,iD1,iYN1,iD1,iZN1,iD1,
iX1,1,iY1,1,iZ1,1);
}
else
{
// +0-0
AddTriangle(rkVM,rkES,rkTS,
iXN0,iD0,iYN0,iD0,iZN0,iD0,
iX1,1,iY1,1,iZ1,1,
iX3,1,iY3,1,iZ3,1);
}
}
else
{
if ( iF3 > 0 )
{
// +00+
AddEdge(rkVM,rkES,iX1,1,iY1,1,iZ1,1,iX2,1,iY2,1,iZ2,1);
}
else if ( iF3 < 0 )
{
// +00-
iD0 = iF0 - iF3;
iXN0 = iF0*iX3 - iF3*iX0;
iYN0 = iF0*iY3 - iF3*iY0;
iZN0 = iF0*iZ3 - iF3*iZ0;
AddTriangle(rkVM,rkES,rkTS,
iXN0,iD0,iYN0,iD0,iZN0,iD0,
iX1,1,iY1,1,iZ1,1,
iX2,1,iY2,1,iZ2,1);
}
else
{
// +000
AddTriangle(rkVM,rkES,rkTS,
iX1,1,iY1,1,iZ1,1,
iX2,1,iY2,1,iZ2,1,
iX3,1,iY3,1,iZ3,1);
}
}
}
}
else if ( iF1 != 0 )
{
// convert to case 0+**
if ( iF1 < 0 )
{
iF1 = -iF1;
iF2 = -iF2;
iF3 = -iF3;
}
if ( iF2 > 0 )
{
if ( iF3 > 0 )
{
// 0+++
AddVertex(rkVM,iX0,1,iY0,1,iZ0,1);
}
else if ( iF3 < 0 )
{
// 0++-
iD0 = iF2 - iF3;
iXN0 = iF2*iX3 - iF3*iX2;
iYN0 = iF2*iY3 - iF3*iY2;
iZN0 = iF2*iZ3 - iF3*iZ2;
iD1 = iF1 - iF3;
iXN1 = iF1*iX3 - iF3*iX1;
iYN1 = iF1*iY3 - iF3*iY1;
iZN1 = iF1*iZ3 - iF3*iZ1;
AddTriangle(rkVM,rkES,rkTS,
iXN0,iD0,iYN0,iD0,iZN0,iD0,
iXN1,iD1,iYN1,iD1,iZN1,iD1,
iX0,1,iY0,1,iZ0,1);
}
else
{
// 0++0
AddEdge(rkVM,rkES,iX0,1,iY0,1,iZ0,1,iX3,1,iY3,1,iZ3,1);
}
}
else if ( iF2 < 0 )
{
iD0 = iF2 - iF1;
iXN0 = iF2*iX1 - iF1*iX2;
iYN0 = iF2*iY1 - iF1*iY2;
iZN0 = iF2*iZ1 - iF1*iZ2;
if ( iF3 > 0 )
{
// 0+-+
iD1 = iF2 - iF3;
iXN1 = iF2*iX3 - iF3*iX2;
iYN1 = iF2*iY3 - iF3*iY2;
iZN1 = iF2*iZ3 - iF3*iZ2;
AddTriangle(rkVM,rkES,rkTS,
iXN0,iD0,iYN0,iD0,iZN0,iD0,
iXN1,iD1,iYN1,iD1,iZN1,iD1,
iX0,1,iY0,1,iZ0,1);
}
else if ( iF3 < 0 )
{
// 0+--
iD1 = iF1 - iF3;
iXN1 = iF1*iX3 - iF3*iX1;
iYN1 = iF1*iY3 - iF3*iY1;
iZN1 = iF1*iZ3 - iF3*iZ1;
AddTriangle(rkVM,rkES,rkTS,
iXN0,iD0,iYN0,iD0,iZN0,iD0,
iXN1,iD1,iYN1,iD1,iZN1,iD1,
iX0,1,iY0,1,iZ0,1);
}
else
{
// 0+-0
AddTriangle(rkVM,rkES,rkTS,
iXN0,iD0,iYN0,iD0,iZN0,iD0,
iX0,1,iY0,1,iZ0,1,
iX3,1,iY3,1,iZ3,1);
}
}
else
{
if ( iF3 > 0 )
{
// 0+0+
AddEdge(rkVM,rkES,iX0,1,iY0,1,iZ0,1,iX2,1,iY2,1,iZ2,1);
}
else if ( iF3 < 0 )
{
// 0+0-
iD0 = iF1 - iF3;
iXN0 = iF1*iX3 - iF3*iX1;
iYN0 = iF1*iY3 - iF3*iY1;
iZN0 = iF1*iZ3 - iF3*iZ1;
AddTriangle(rkVM,rkES,rkTS,
iXN0,iD0,iYN0,iD0,iZN0,iD0,
iX0,1,iY0,1,iZ0,1,
iX2,1,iY2,1,iZ2,1);
}
else
{
// 0+00
AddTriangle(rkVM,rkES,rkTS,
iX0,1,iY0,1,iZ0,1,
iX2,1,iY2,1,iZ2,1,
iX3,1,iY3,1,iZ3,1);
}
}
}
else if ( iF2 != 0 )
{
// convert to case 00+*
if ( iF2 < 0 )
{
iF2 = -iF2;
iF3 = -iF3;
}
if ( iF3 > 0 )
{
// 00++
AddEdge(rkVM,rkES,iX0,1,iY0,1,iZ0,1,iX1,1,iY1,1,iZ1,1);
}
else if ( iF3 < 0 )
{
// 00+-
iD0 = iF2 - iF3;
iXN0 = iF2*iX3 - iF3*iX2;
iYN0 = iF2*iY3 - iF3*iY2;
iZN0 = iF2*iZ3 - iF3*iZ2;
AddTriangle(rkVM,rkES,rkTS,
iXN0,iD0,iYN0,iD0,iZN0,iD0,
iX0,1,iY0,1,iZ0,1,
iX1,1,iY1,1,iZ1,1);
}
else
{
// 00+0
AddTriangle(rkVM,rkES,rkTS,
iX0,1,iY0,1,iZ0,1,
iX1,1,iY1,1,iZ1,1,
iX3,1,iY3,1,iZ3,1);
}
}
else if ( iF3 != 0 )
{
// cases 000+ or 000-
AddTriangle(rkVM,rkES,rkTS,
iX0,1,iY0,1,iZ0,1,
iX1,1,iY1,1,iZ1,1,
iX2,1,iY2,1,iZ2,1);
}
else
{
// case 0000
AddTriangle(rkVM,rkES,rkTS,
iX0,1,iY0,1,iZ0,1,
iX1,1,iY1,1,iZ1,1,
iX2,1,iY2,1,iZ2,1);
AddTriangle(rkVM,rkES,rkTS,
iX0,1,iY0,1,iZ0,1,
iX1,1,iY1,1,iZ1,1,
iX3,1,iY3,1,iZ3,1);
AddTriangle(rkVM,rkES,rkTS,
iX0,1,iY0,1,iZ0,1,
iX2,1,iY2,1,iZ2,1,
iX3,1,iY3,1,iZ3,1);
AddTriangle(rkVM,rkES,rkTS,
iX1,1,iY1,1,iZ1,1,
iX2,1,iY2,1,iZ2,1,
iX3,1,iY3,1,iZ3,1);
}
}
//----------------------------------------------------------------------------
//----------------------------------------------------------------------------
// Vertex
//----------------------------------------------------------------------------
ImageInterp3D::Vertex::Vertex (int iXNumer, int iXDenom, int iYNumer,
int iYDenom, int iZNumer, int iZDenom)
{
if ( iXDenom > 0 )
{
m_iXNumer = iXNumer;
m_iXDenom = iXDenom;
}
else
{
m_iXNumer = -iXNumer;
m_iXDenom = -iXDenom;
}
if ( iYDenom > 0 )
{
m_iYNumer = iYNumer;
m_iYDenom = iYDenom;
}
else
{
m_iYNumer = -iYNumer;
m_iYDenom = -iYDenom;
}
if ( iZDenom > 0 )
{
m_iZNumer = iZNumer;
m_iZDenom = iZDenom;
}
else
{
m_iZNumer = -iZNumer;
m_iZDenom = -iZDenom;
}
}
//----------------------------------------------------------------------------
bool ImageInterp3D::Vertex::operator< (const Vertex& rkVertex) const
{
// Support for STL maps. I tried using the equal_to comparison and had
// two problems.
// 1. typedef map<Vertex,int,equal_to<Vertex> > VMap;
// The space between the last two '>' symbol *must* occur, otherwise
// the compiler complains about a mising ',' (go figure).
// 2. The VMap 'find' calls were failing when they should have
// succeeded. I have no idea why.
unsigned int auiValue0[6] =
{
*(unsigned int*)&m_iXNumer,
*(unsigned int*)&m_iXDenom,
*(unsigned int*)&m_iYNumer,
*(unsigned int*)&m_iYDenom,
*(unsigned int*)&m_iZNumer,
*(unsigned int*)&m_iZDenom
};
unsigned int auiValue1[6] =
{
*(unsigned int*)&rkVertex.m_iXNumer,
*(unsigned int*)&rkVertex.m_iXDenom,
*(unsigned int*)&rkVertex.m_iYNumer,
*(unsigned int*)&rkVertex.m_iYDenom,
*(unsigned int*)&rkVertex.m_iZNumer,
*(unsigned int*)&rkVertex.m_iZDenom
};
for (int i = 0; i < 6; i++)
{
if ( auiValue0[i] < auiValue1[i] )
return true;
if ( auiValue0[i] > auiValue1[i] )
return false;
}
return false;
}
//----------------------------------------------------------------------------
void ImageInterp3D::Vertex::GetTriple (Vector3& rkMeshVertex)
const
{
rkMeshVertex.x = float(m_iXNumer)/float(m_iXDenom);
rkMeshVertex.y = float(m_iYNumer)/float(m_iYDenom);
rkMeshVertex.z = float(m_iZNumer)/float(m_iZDenom);
}
//----------------------------------------------------------------------------
//----------------------------------------------------------------------------
// Edge
//----------------------------------------------------------------------------
ImageInterp3D::Edge::Edge (int i0, int i1)
{
if ( i0 < i1 )
{
m_i0 = i0;
m_i1 = i1;
}
else
{
m_i0 = i1;
m_i1 = i0;
}
}
//----------------------------------------------------------------------------
bool ImageInterp3D::Edge::operator< (const Edge& rkEdge) const
{
unsigned int auiValue0[2] =
{
*(unsigned int*)&m_i0,
*(unsigned int*)&m_i1
};
unsigned int auiValue1[2] =
{
*(unsigned int*)&rkEdge.m_i0,
*(unsigned int*)&rkEdge.m_i1
};
for (int i = 0; i < 2; i++)
{
if ( auiValue0[i] < auiValue1[i] )
return true;
if ( auiValue0[i] > auiValue1[i] )
return false;
}
return false;
}
//----------------------------------------------------------------------------
//----------------------------------------------------------------------------
// Triangle
//----------------------------------------------------------------------------
ImageInterp3D::Triangle::Triangle (int i0, int i1, int i2)
{
if ( i0 < i1 )
{
if ( i0 < i2 )
{
m_i0 = i0;
m_i1 = i1;
m_i2 = i2;
}
else
{
m_i0 = i2;
m_i1 = i0;
m_i2 = i1;
}
}
else
{
if ( i1 < i2 )
{
m_i0 = i1;
m_i1 = i2;
m_i2 = i0;
}
else
{
m_i0 = i2;
m_i1 = i0;
m_i2 = i1;
}
}
}
//----------------------------------------------------------------------------
bool ImageInterp3D::Triangle::operator< (const Triangle& rkTriangle) const
{
unsigned int auiValue0[3] =
{
*(unsigned int*)&m_i0,
*(unsigned int*)&m_i1,
*(unsigned int*)&m_i2
};
unsigned int auiValue1[3] =
{
*(unsigned int*)&rkTriangle.m_i0,
*(unsigned int*)&rkTriangle.m_i1,
*(unsigned int*)&rkTriangle.m_i2
};
for (int i = 0; i < 3; i++)
{
if ( auiValue0[i] < auiValue1[i] )
return true;
if ( auiValue0[i] > auiValue1[i] )
return false;
}
return false;
}
//----------------------------------------------------------------------------
|
// GetImageStreamNameAndTag gives the image stream name and tag given the image name.
func (*ImageStream) GetImageStreamNameAndTag(fullImageName string) (string, string) {
imageName, tag := common.GetImageNameAndTag(fullImageName)
imageStreamName := fmt.Sprintf("%s-%s", imageName, tag)
imageStreamName = common.MakeStringDNSSubdomainNameCompliant(imageStreamName)
return imageStreamName, tag
}
|
Fast Forward to the Past: NASA Technologists Test ‘Game-Changing’ Data-Processing Technology
› View larger
Goddard technologist Jonathan Pellish holds a Goddard-developed digital test board (larger) and the IRAD-developed daughter card containing the analog-based data-processing integrated circuit. The daughter card snaps into the digital test board and will be used to test a number of spaceflight processing applications. Credit: NASA/Goddard/Pat Izzo
It’s a digital world. Or is it?NASA technologist Jonathan Pellish isn’t convinced. In fact, he believes a computing technology of yesteryear could potentially revolutionize everything from autonomous rendezvous and docking to remotely correcting wavefront errors on large, deployable space telescope mirrors like those to fly on the James Webb Space Telescope.“It’s fast forward to the past,” Pellish said, referring to an emerging processing technology developed by a Cambridge, Mass.-based company, Analog Devices Lyric Labs.So convinced is he of its potential, Pellish is meeting with scientists and engineers to explain the technology’s capabilities and is using fiscal year 2013 NASA Center Innovation Fund resources to build printed circuit boards that researchers can use to test the technology’s performance for a range of scientific applications. Pellish works at NASA's Goddard Space Flight Center in Greenbelt, Md. He also has carried out preliminary radiation-effects studies to see how the technology’s architecture holds up under the extreme environment encountered in space.“I wouldn’t do it unless I really believed in it,” Pellish added. “This is one of the few things I’ve seen that is really different than what others are trying to do. I think this technology could fundamentally change the way we carry out onboard processing.”The new technology is an analog-based microchip developed with significant support from the Defense Advanced Research Projects Agency (DARPA). Instead of relying on tiny switches or transistors that turn on and off, producing streams of ones and zeroes that computing systems then translate into something meaningful to users, the company’s new microchip is more like a dimmer switch. It can accept inputs and calculate outputs that are between zero and one, directly representing probabilities, or levels of certainty.“The technology is fundamentally different from standard digital-signal processing, recognizing values between zero and one to accomplish what would otherwise be cost prohibitive or impossible with traditional digital circuits,” Pellish said.The processor’s enhanced performance is due to the way the technology works, he explained. While digital systems use processors that step through calculations one at a time, in a serial fashion, the new processor uses electronic signals to represent probabilities rather than binary ones and zeros. It then effectively runs the calculations in parallel. Where it might take 500 transistors for a digital computer to calculate a probability, the new technology would take just a few. In other words, the microchip can perform a calculation more efficiently, with fewer circuits and less power than a digital processor — attributes important for space- and power-constrained spacecraft instruments, Pellish said.Although “there has been an overwhelming amount of positive support for the technology within Goddard” since Pellish began introducing colleagues to its capabilities, he is the first to concede that the technology isn’t appropriate for all space applications.Because of its efficiency and inherent design, however, it’s especially ideal for computing fast Fourier transforms (FFTs), and more particularly the discrete Fourier transform, a ubiquitously used mathematical algorithm in digital-signal processing. Among other things, Fourier transforms decompose signals into their constituent frequencies and are used to generate and filter cell-phone and Wi-Fi transmissions as well as compress audio, image and video files so that they take up less bandwidth.Among other products, the company has developed an analog-based integrated circuit geared specifically for computing Fourier transforms. The team will use the technology, which the company donated, to assemble several custom circuit boards. “We’ll take the hardware and see what it can do with our data and applications,” Pellish explained.One of the first applications the group plans to target with a version of the FFT integrated circuit is wavefront sensing and control, the computational technique for aligning multiple mirror segments, like those that are flying on the Webb telescope, so that they operate as a single mirror system.In addition, Jeffrey Klenzing, who works with Goddard’s Space Weather Laboratory, wants to evaluate the technology’s use for on-board data processing, particularly for studies of the sun. “For a typical sounding rocket application, we send all data down and perform fast Fourier transforms on the ground. However, for satellite missions, this is not feasible given limited telemetry,” Klenzing said. “A chip for performing rapid, reliable FFTs would be very useful for such heliophysics missions particularly with the push toward smaller, low-power satellites such as CubeSats and nanosats.”Pellish also believes autonomous rendezvous and docking and other applications requiring precise locational information would benefit from the analog-based technology. “We’re trying to create a new market at NASA for analog processing. I believe it will give us a competitive edge. If we can push this, it could revolutionize how we do onboard data processing.”
|
// IsTransit indicates the targets doesn't have actual work to do
func (t *Target) IsTransit() bool {
return t.ExecDriver == "" &&
!t.Exec &&
len(t.Ext) == 0 &&
len(t.Watches) == 0 &&
len(t.Artifacts) == 0
}
|
I've noticed a common bias that shows up in some founders: they believe that their competitors are stupid or uncreative. They'll look at other businesses and identify inefficiencies or bad systems, and decide that those conditions exist because of dumb decisions on the part of founders or employees.
This is a bad belief to hold. In truth, competitors in the market are usually founded and run by intelligent people making smart and logical decisions. That doesn't mean that all the decisions they make are necessarily the right ones, but they're rarely a function of outright stupidity.
Where companies do things that diverge from what seems smart from the outside, it's a much better idea to ask why those companies are doing things from the presumption of intelligence and logic rather than the presumption of stupidity. If you don't ask these questions, you might find yourself making the same decisions, or ending up in the same place with your own set of rationalizations. I see this all the time.
In fact, we made this mistake when we started Tutorspree. We looked at all the local agencies and the way that they acquired customers and charged for packages of lessons. We assumed they asked for so much money up front because they were greedy and not smart enough to figure out a better system. It turned out that packages of lessons were a logical outgrowth of high upfront acquisition costs and the long term dynamic of tutor/student relationships. A large enough subset of customers appreciated the breaks on pricing and commitment created by booking multiple lessons up front that it made sense to model the business that way. It took longer than it should have to realize this because of our bias.
If, instead, you presume intelligence and analyze the reasons a business looks the way it does, you will often see the challenges you might face ahead of time and, as a result, design a solution that is actually better, as opposed to simply looking new. It is a lot harder to think this way because it means that you can't just dismiss the things other people do and assume you'll be better. You actually have to prove that you know how to be better. That can be really scary because, much of the time, you might not be able to figure out how to be better. Everything you think of might lead you to the same place you see your competitors.
That, though, is no reason to stop working on your company. I think it's actually a reason to keep going, and to keep gathering information and generating new ideas. This is part of what's so cool about starting a company, you get to make up new rules as you go along, and you can toss out old ones as you go along. Two founders looking at the same problem can easily come up with multiple solutions. Each solution might look similar from far away, but the small differences add up. Importantly, if you know that other smart people started in a similar place and ended up with the wrong answer, you'll think a lot more critically about each of your decisions and never get lazy about challenging your own assumptions.
Of course, just because you presume intelligence doesn't mean that every decision made was smart. People and organizations make bad decisions for all kinds of reasons. The thing is, you don't learn much by understanding that a call was bad, you learn by understanding the inputs and the organization that enabled the bad decisions.
Even with this framework, there's no guarantee that you'll end up in the right place, no matter how much you analyze those whose decisions have left you with an opportunity. At the end of the day, there's only so much you can learn from looking at competitors. Truly great businesses aren't built as counterpoints to existing companies. They become great because they meet a deep need that isn't being satisfied. That usually requires the kinds of creative and cognitive leaps that no amount of market analysis could possibly give you.
|
def _get_person_url(self, url):
if url is None:
return None
matched = re.match(self.PERSON_URL_PATTERN, url)
if matched:
sub_url = matched.group(1)
return '{base}{sub_url}'.format(base=self.BASE_URL, sub_url=sub_url)
return None
|
def chop_map(self, chop_edges, ftimes, pixel=3):
x_l, x_r, y_t, y_b = np.array(chop_edges) * pixel
x_r = max(self._pos_x) - x_r
y_t = max(self._pos_y) - y_t
in_range_x = np.logical_and(self._pos_x >= x_l, self._pos_x <= x_r)
in_range_y = np.logical_and(self._pos_y >= y_b, self._pos_y <= y_t)
spikeLoc = self.get_event_loc(ftimes)[1]
spike_idxs = spikeLoc[0]
spike_idxs_to_use = []
sample_spatial_idx = np.where(np.logical_and(in_range_y, in_range_x))
for i, val in enumerate(spike_idxs):
if not np.any(sample_spatial_idx == val):
spike_idxs_to_use.append(i)
ftimes = ftimes[np.array(spike_idxs_to_use)]
self._set_time(self._time[sample_spatial_idx])
self._set_pos_x(self._pos_x[sample_spatial_idx] - x_l)
self._set_pos_y(self._pos_y[sample_spatial_idx] - y_b)
self._set_direction(self._direction[sample_spatial_idx])
self._set_speed(self._speed[sample_spatial_idx])
self.set_ang_vel(self._ang_vel[sample_spatial_idx])
return ftimes
|
<filename>java/src/test/java/io/binac/leetcode/ReverseLinkedListIITest.java
package io.binac.leetcode;
import io.binac.leetcode.util.LinkedLists;
import io.binac.leetcode.util.ListNode;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;
class ReverseLinkedListIITest {
private final ReverseLinkedListII.Solution1 solution1 = new ReverseLinkedListII.Solution1();
@Test
void test1() {
ListNode head = LinkedLists.asLinkedList(1, 2, 3, 4, 5);
ListNode actual = solution1.reverseBetween(head, 2, 4);
ListNode expected = LinkedLists.asLinkedList(1, 4, 3, 2, 5);
assertTrue(LinkedLists.equals(expected, actual));
}
}
|
package main
import (
"fmt"
"log"
"os"
"github.com/bitfinexcom/bitfinex-api-go/pkg/models/fundingoffer"
"github.com/bitfinexcom/bitfinex-api-go/v2/rest"
"github.com/davecgh/go-spew/spew"
)
// Set BFX_API_KEY and BFX_API_SECRET as :
//
// export BFX_API_KEY=YOUR_API_KEY
// export BFX_API_SECRET=YOUR_API_SECRET
//
// you can obtain it from https://www.bitfinex.com/api
func main() {
key := os.Getenv("BFX_API_KEY")
secret := os.Getenv("BFX_API_SECRET")
c := rest.NewClient().Credentials(key, secret)
offers(c)
offerHistory(c)
loans(c)
loansHistory(c)
activeCredits(c)
creditsHistory(c)
fundingTrades(c)
keepFunding(c)
/********* submit a new funding offer ***********/
fo, err := c.Funding.SubmitOffer(&fundingoffer.SubmitRequest{
Type: "LIMIT",
Symbol: "fUSD",
Amount: 1000,
Rate: 0.012,
Period: 7,
Hidden: true,
})
if err != nil {
panic(err)
}
newOffer := fo.NotifyInfo.(*fundingoffer.New)
/********* cancel funding offer ***********/
fc, err := c.Funding.CancelOffer(&fundingoffer.CancelRequest{
ID: newOffer.ID,
})
if err != nil {
panic(err)
}
fmt.Println(fc)
}
func offers(c *rest.Client) {
// active funding offers
snap, err := c.Funding.Offers("fUSD")
if err != nil {
panic(err)
}
for _, item := range snap.Snapshot {
fmt.Println(item)
}
}
func offerHistory(c *rest.Client) {
// funding offer history
snapHist, err := c.Funding.OfferHistory("fUSD")
if err != nil {
panic(err)
}
for _, item := range snapHist.Snapshot {
fmt.Println(item)
}
}
func loans(c *rest.Client) {
// active loans
snapLoans, err := c.Funding.Loans("fUSD")
if err != nil {
panic(err)
}
for _, item := range snapLoans.Snapshot {
fmt.Println(item)
}
}
func loansHistory(c *rest.Client) {
napLoansHist, err := c.Funding.LoansHistory("fUSD")
if err != nil {
panic(err)
}
for _, item := range napLoansHist.Snapshot {
fmt.Println(item)
}
}
func activeCredits(c *rest.Client) {
// active credits
snapCredits, err := c.Funding.Credits("fUSD")
if err != nil {
panic(err)
}
for _, item := range snapCredits.Snapshot {
fmt.Println(item)
}
}
func creditsHistory(c *rest.Client) {
napCreditsHist, err := c.Funding.CreditsHistory("fUSD")
if err != nil {
panic(err)
}
for _, item := range napCreditsHist.Snapshot {
fmt.Println(item)
}
}
func fundingTrades(c *rest.Client) {
napTradesHist, err := c.Funding.Trades("fUSD")
if err != nil {
panic(err)
}
for _, item := range napTradesHist.Snapshot {
fmt.Println(item)
}
}
func keepFunding(c *rest.Client) {
// keep funding
resp, err := c.Funding.KeepFunding(rest.KeepFundingRequest{
Type: "credit",
ID: 12345, // Insert correct ID
})
if err != nil {
log.Fatalf("KeepFunding error: %s", err)
}
spew.Dump(resp)
}
|
<reponame>marcelorvergara/java_tp3
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package system;
/**
*
* @author marcelo
*/
public class EscolaExceptions extends Exception {
private String messageError = "";
EscolaExceptions(String msg) {
if ("Entrada invalida".equals(msg)) {
messageError = "Entrada inválida ou inexistente!";
} else if ("Mat inv".equals(msg)) {
messageError = "Matrícula inválida! Tente novamente.";
} else if ("err_cad".equals(msg)) {
messageError = "Falha na entrada de dados. Favor verificar os dados digitados.\nO salário só pode conter números e um ponto para casas decimais.";
} else {
messageError = "Erro genérico, não identificado.";
}
}
EscolaExceptions() {
messageError = "Erro genérico.";
}
@Override
public String getMessage() {
return messageError;
}
}
|
def _parse_status(self, s):
match = _status.match(s)
if match:
status, data, self.info = match.groups()
self.status = status.upper()
if data:
self._parse_data(data)
return True
return False
|
/**
* Common error code class.<br>
*
* @author
* @version SDNO 0.5 August 2, 2016
*/
public class CommonErrorCode {
public static final String APP = "wanvpncommon";
public static final String SRC_ENUM = "enum";
public static final String SRC_FIELD = "field";
public static final String SRC_STRING = "string";
public static final String SRC_IP = "ip";
public static final String APP_NAME = "singlevpnsvc";
/**
* the value of Enumeration is out of range.
*/
public static final String CHECKER_ENUM_OUT_OF_RANGE = ErrorCodeUtils.getErrorCode(APP, SRC_ENUM, "out_of_range");
/**
* The field value should not be null.
*/
public static final String CHECKER_FILED_IS_NULL = ErrorCodeUtils.getErrorCode(APP, SRC_FIELD, "field_is_null");
/**
* The value of IP is not a valid IP address without mask.
*/
public static final String CHECKER_IP_INVALID = ErrorCodeUtils.getErrorCode(APP, SRC_IP, "ip_invalid");
/**
* The value of CIDR is not a valid IP address with mask.
*/
public static final String CHECKER_CIDR_INVALID = ErrorCodeUtils.getErrorCode(APP, SRC_IP, "cidr_invalid");
/**
* String can not be blank.
*/
public static final String CHECKER_STRING_IS_BLANK =
ErrorCodeUtils.getErrorCode(APP, SRC_STRING, "string_is_blank");
/**
* String can not be empty.
*/
public static final String CHECKER_STRING_IS_EMPTY =
ErrorCodeUtils.getErrorCode(APP, SRC_STRING, "string_is_empty");
/**
* the value of String does not match the pattern.
*/
public static final String CHECKER_STRING_IS_INVALID =
ErrorCodeUtils.getErrorCode(APP, "scopechecker", "string_is_invalid");
/**
* the value of String's length is out of range.
*/
public static final String CHECKER_STRING_OVER_LENGTH =
ErrorCodeUtils.getErrorCode(APP, SRC_STRING, "string_over_length");
/**
* The field in Scope is unsupported.
*/
public static final String CHECKER_UNSUPPORT_FIELD_TYPE =
ErrorCodeUtils.getErrorCode(APP, "scopechecker", "field_type_unsupport");
/**
* The VLAN scope is not valid.
*/
public static final String CHECKER_VLAN_SCOPE_INVALID =
ErrorCodeUtils.getErrorCode(APP, "vlanscope", "vlanscope_invalid");
/**
* UUID is invalid.
*/
public static final String UUID_INVALID = ErrorCodeUtils.getErrorCode(APP, "uuid", "uuid_invalid");
/**
* It is not a A type IP address.
*/
public static final String CHECK_IP_NOT_A_CLASS = ErrorCodeUtils.getErrorCode(APP, SRC_IP, "ip_not_a_class");
/**
* It is not a B type IP address.
*/
public static final String CHECK_IP_NOT_B_CLASS = ErrorCodeUtils.getErrorCode(APP, SRC_IP, "ip_not_b_class");
/**
* It is not a C type IP address.
*/
public static final String CHECK_IP_NOT_C_CLASS = ErrorCodeUtils.getErrorCode(APP, SRC_IP, "ip_not_c_class");
/**
* It is not a D type IP address.
*/
public static final String CHECK_IP_NOT_D_CLASS = ErrorCodeUtils.getErrorCode(APP, SRC_IP, "ip_not_d_class");
/**
* It is not a E type IP address.
*/
public static final String CHECK_IP_NOT_E_CLASS = ErrorCodeUtils.getErrorCode(APP, SRC_IP, "ip_not_e_class");
/**
* It is not a ABC type IP address.
*/
public static final String CHECK_IP_NOT_ABC_CLASS = ErrorCodeUtils.getErrorCode(APP, SRC_IP, "ip_not_abc_class");
public static final String CHECK_HTTP_CONTEXT_IS_NULL =
ErrorCodeUtils.getErrorCode(APP, "http_context", "http_context_is_null");
public static final String SITE_NOT_BIND_WITH_NE = ErrorCodeUtils.getErrorCode(APP, "site", "not_bind_with_ne");
/**
* sort type can not be empty.
*/
public static final String CHECKER_PAGENUMBER_IS_INVALID =
ErrorCodeUtils.getErrorCode(APP, "pageNumber", "in_valid");
public static final String CHECKER_PAGESZIE_IS_INVALID = ErrorCodeUtils.getErrorCode(APP, "pageSize", "in_valid");
/**
* page Capacity can not be empty.
*/
public static final String CHECKER_PAGECAPACITY_IS_INVALID =
ErrorCodeUtils.getErrorCode(APP, "pageCapacity", "is_invalid");
public static final String JSON_FORMAT_ERROR = ErrorCodeUtils.getErrorCode(APP, "json", "format_error");
public static final String VPN_NOT_EXIST =
ErrorCodeUtils.getErrorCode(CommonErrorCode.APP_NAME, "vpn", "vpn_not_exist");
private CommonErrorCode() {
}
}
|
def statements_from(self, policy, allow=None, patches=None):
make_statements = lambda : sorted(list(self.statements.make_permission_statements(policy, allow=allow)))
if not patches:
return make_statements()
else:
with mock.patch.multiple(self.statements, **patches):
return make_statements()
|
Striking employees stage protests at all circles; Home Minister appeals to them to discharge duties
Hyderabad: The strike by the Greater Hyderabad Municipal Employees union JAC seeking hike in wages and permanent employment for the outsourced employees has started to affect the sanitation works in the city. The municipal workers registered their protest in all ward offices by raising slogans against the government. Heaps of garbage were spread across the GHMC limits as workers on Monday boycotted the sanitation works in all parts of the city. Protests were held in all ward offices in the GHMC limits on Monday.
On Tuesday the union would hold protests at all circles offices and on Wednesday at all zonal offices, the union leaders said. The union has been demanding minimum wage for sanitary workers be raised from Rs 8,500 to Rs 14,170. And for the skilled workers, they want a hike in minimum wage from Rs 9,500 to Rs 17,380.
A total of nine municipal corporations and 53 municipalities with 40,000 employees are participating in the stir. Apparently annoyed by the stir of the employees, Chief Minister K Chandrashekar Rao directed Home Minister Naini Narsimha Reddy and Finance Minister Eatala Rajender to talk to them and resolve the issue.
Home Minister appealed to the GHMC JAC to withdraw the strike and discharge duties. Addressing a meeting after holding a discussion along with Minister E Rajender and JAC leaders, the Home Minister said the government would take a decision on their demands. The demands would be put before Chief Minister for a final call on Tuesday, he said. Keeping Ramzan festival in view, the staff of the GHMC should attend their duties to remove garbage before it piled up, he appealed.
Chandrashekar Rao will take a final call on the demands including that of regularisation of outsourcing employees. When asked, Reddy said that the Cabinet had to take a policy decision on regularisation demand of outsourcing staff. But the municipal workers refused to call off the strike till they get a clear time-bound assurance from the Chief Minister. The two Ministers will hold another round of meeting on Tuesday afternoon.
Meanwhile, the AITUC leaders demanded hike in their salaries on par with the permanent employees according to the 10th PRC fitment. They also reminded the Chief Minister that Hyderabad could not be made a world class city without the help of sanitation workers. They further said the GHMC was not even providing proper tools to work.
BMS leader Shanker said that the government was discriminating the municipal workers from others and looking down at them. “Telangana Chief Minister K Chandrashekar Rao who compared us with Gods and saluted us is now ignoring us. We are ready to call off the strike once the government passes a GO on salary hike and permanent employment,” he said. Labour Minister Naini Narasimha Reddy held discussion with them on June 20.
The Municipal Administration Department officials also held talks twice on June 22 and June 23 and sought 10 days time to address their demands. However, there was no response from them so far, he pointed out.AITUC Greater Hyderabad unit extended its support by burning effigy of Chandrashekar Rao at Narayanguda junction. Seven unions, which include CITU, FTU, BMS, HMS, IFTU, AITUC and TSTU served the strike notice on July 7.
|
def animate(self, geodesic, interval=50):
vals = geodesic.trajectory
time = vals[:, 0]
r = vals[:, 1]
phi = vals[:, 3]
pos_x = r * np.cos(phi)
pos_y = r * np.sin(phi)
frames = pos_x.shape[0]
x_max, x_min = max(pos_x), min(pos_x)
y_max, y_min = max(pos_y), min(pos_y)
margin_x = (x_max - x_min) * 0.1
margin_y = (y_max - y_min) * 0.1
fig = plt.figure()
plt.xlim(x_min - margin_x, x_max + margin_x)
plt.ylim(y_min - margin_y, y_max + margin_y)
pic = plt.scatter([], [], s=1, c=[])
plt.scatter(0, 0, color="black")
def _update(frame):
pic.set_offsets(np.vstack((pos_x[: frame + 1], pos_y[: frame + 1])).T)
pic.set_array(time[: frame + 1])
return (pic,)
self.animated = FuncAnimation(fig, _update, frames=frames, interval=interval)
|
Liars lie. The more they get away with it, the more they do it. Trump lies as he breathes, bombarding us with untruth upon untruth until we stumble in disbelief. This is all part of what we call fake news, but fake news is enabled by two things: individuals who lie and an equivocating media culture that is cautious about calling certain people liars.
Thus we have Boris Johnson – a known liar, somehow still considered a possible prime minister – doing what exactly? “Misleading” us? Though the evidence is presented to show that his figure of £350m a week that will come back to us once we leave the EU is false, no one wants to quite say that his claims are a deliberate lie. That this sum would go to the NHS was the leavers’ magic promise, emblazoned on the side of a campaign bus. It was, we are required to say, not a downright lie but a distortion. David Norgrove, head of the UK Statistics Authority, said it was “a clear misuse of official statisitics”. So this figure is in dispute. Maybe we can call it “an inverted pyramid of piffle”, as Johnson once said when lying about a four-year affair that he denied.
Q&A What was wrong with the claim that the UK sends the EU £350m a week? Show Hide The claim that Britain “sends the EU £350m a week” is wrong because: The rebate negotiated by Margaret Thatcher is removed before anything is paid to Brussels. In 2014, this meant Britain actually “sent” £276m a week to Brussels; in 2016, the figure was £252m.
Slightly less than half that sum – the money that Britain does send to the EU – either comes back to the UK to be spent mainly on agriculture, regional aid, research and community projects, or gets counted towards the country’s international aid target. Regardless of how much the UK “saves” by leaving the EU, the claim that a future government would be able to spend it on the NHS is highly misleading because: It assumes the government would choose to spend on the NHS the money it currently gets back from the EU (£115m a week in 2014), thus cutting funding for agriculture, regional development and research by that amount.
It assumes the UK economy will not be adversely affected by Brexit, which many economists doubt.
Johnson was sacked by Michael Howard for lying. As a journalist he was known for “embroidering” stories. He was sacked from the Times for lying. Making up quotes. Or, as he explained it to Eddie Mair: “I mildly sandpapered something somebody said.”
Still, he has risen to the top because this lying (he backs Theresa May, and I am a straight banana) is all part of his chaotic and colourful life. The media stand by, seemingly afraid to confront his lies with evidence. The £350m claim is a lie and even if he is half as clever as his mates say he is, he knows it. But why wouldn’t he lie? So far he has reaped significant rewards for it. His appalling behaviour is consequence-free. Indeed, he can now look across the Atlantic and see that lying is not a bar to power but an attribute of it.
The hapless Sean Spicer, Trump’s former spokesman, who lied not just about the crowds at his boss’s inauguration, but about the Holocaust, turned up at the Emmys. Hilarious, right? At least he can laugh at himself!
He lied for money and those who encourage others to laugh at him invite him right back into the inner circle. Satire is not dead, it is merely comatose because it no longer has anything to tell other than a vague truth (that Trump is stupid). Nor does it have any idea who it is telling it to – people who already think Trump is an idiot?
Boris Johnson’s £350m claim is devious and bogus. Here’s why | John Lichfield Read more
To watch the rehabilitation of these liars is galling, but it happens with media consent. It is a disgusting spectacle. If we stop demanding truth or even the semblance of it, we bypass any possibility of integrity in the name of entertainment. We shrug off the very notion that there can be anything other than fake news.
Boris Johnson is in the business of creating fake news. It is his modus operandi, as it was Sean Spicer’s. One of these men will do a round of chat shows and write some god-awful memoir signifying nothing. The other is spoken of still as a possible prime minister. This is frankly unbelievable. The lies that we know these men have told are not funny, or bombast, or “sand-papering” or “truth–stretching”, or mere quirks of character or ego. They cannot be normalised by showbiz or fellow politicians. Their lies are a violation of decency. Call them out every time.
• Suzanne Moore is a Guardian columnist
|
/**
* Confirm that explicit EC params aren't accepted in certificates.
*/
@Test
public void testExplicitEcParams() throws Exception {
ServiceTester.test("CertificateFactory")
.withAlgorithm("X509")
.skipProvider("BC")
.run(new ServiceTester.Test() {
@Override
public void test(Provider p, String algorithm) throws Exception {
try {
X509Certificate c = certificateFromPEM(p, EC_EXPLICIT_KEY_CERT);
c.verify(c.getPublicKey());
fail();
} catch (InvalidKeyException expected) {
} catch (CertificateParsingException expected) {
}
}
});
}
|
Share
While streaming services and digital rentals make it easy to watch all the movies you want without actually buying anything, there are still film buffs who prefer to own their favorite movies, whether via digital download or physical media like Blu-ray. With so many different services that let you buy films, it can be tough to keep track of what you bought, and where you can play it.
Movies Anywhere aims to solve this problem by sharing films purchased via multiple services. Now, Comcast has joined the digital movie party as the service’s first pay-TV provider. It joins a comprehensive list of sellers, including Microsoft, which partnered with the service in August.
The Comcast partnership means that Comcast Xfinity TV customers will be able to sync all of their movies purchased from that platform with the titles they may have previously bought from Movies Anywhere’s other partners. Once the Comcast and Movies Anywhere accounts have been successfully paired up, Comcast subscribers will have access to the merged collection via the Xfinity X1, the Xfinity Stream app, and other supported Xfinity TV platforms, like Roku.
“Buying” digital movies has always been a bit problematic. Once purchased, you need to make sure you can play a movie on your desired device, which is not a given. Apple’s iTunes purchases, for instance, can only be played on Apple devices (though workarounds do exist). This makes the Movies Anywhere partnership a good deal for Comcast subscribers, as it not only allows their purchased movies to be played back on any device that Movies Anywhere supports, but also brings the entire Movies Anywhere catalog to Xfinity devices.
It might also be pretty lucrative for Movies Anywhere, as the Disney-owned service does its best to entice valuable Comcast subscribers into the fold by offering up a copy of Happy Feet as a reward for syncing. If you sync with two or more sellers on the Movies Anywhere platform, you’ll be treated to The Martian and The Fate of the Furious too.
Movies Anywhere allows purchases from select studios made through Microsoft’s Movies & TV store, Amazon Video, Google Play, iTunes, Vudu, and FandangoNow to be watched via the Movies Anywhere app, including movies you have previously purchased via these services. Studios that support the service include Sony Pictures Entertainment, The Walt Disney Studios (encompassing Disney, Pixar, Marvel Studios, and Lucasfilm), 20th Century Fox Film, Universal Pictures (including DreamWorks and Illumination Entertainment), and Warner Bros.
By connecting your accounts for each of these services, you will be able to watch your purchased films from any of the aforementioned studios on the Movies Anywhere app at any time. This also applies to physical purchases: Blu-ray and UHD Blu-rays from the involved studios often include a digital download code, and while you could just download the file, registering it with one of the above services makes for a more portable experience. As more studios join Movies Anywhere, you may even find the codes included with older purchases are now compatible with the service.
If this sounds familiar, it’s because the concept has been done before with Ultraviolet, which offers similar functionality. Many major studios support Ultraviolet, but one longtime holdout was Disney, which spurned the service in favor of developing its own service in Movies Anywhere. While the service did well enough when it was Disney-only, the addition of other studios is seeing more customers using it.
Movies Anywhere is supported on Amazon Fire devices, Android and Android TV devices, Chromecast, iOS, Roku devices, Windows 10 PC, Xbox game consoles, and popular browsers. For more information, see the Movies Anywhere website.
Updated on December 6, 2018: Added information on Comcast joining the Movies Anywhere service.
|
/*
* Zed Attack Proxy (ZAP) and its related class files.
*
* ZAP is an HTTP/HTTPS proxy for assessing web application security.
*
* Copyright 2014 The ZAP Development Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.zaproxy.zap.extension.ascanrulesBeta;
import java.util.Collections;
import java.util.Map;
import org.apache.commons.collections.map.LRUMap;
import org.apache.commons.httpclient.URI;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.parosproxy.paros.core.scanner.HostProcess;
import org.parosproxy.paros.network.HttpMessage;
import org.zaproxy.addon.commonlib.http.HttpFieldsNames;
/**
* MessageCache caches HTTP messages.
*
* @author [email protected]
*/
public class MessageCache {
private static MessageCache instance;
private HostProcess parent = null;
@SuppressWarnings("unchecked")
private Map<URI, HttpMessage> messagecache =
Collections.synchronizedMap(new LRUMap(100)); // a map of 100 objects, synchronized
private static final Logger LOGGER = LogManager.getLogger(MessageCache.class);
private MessageCache(HostProcess hostprocess) {
LOGGER.debug("Initialising");
parent = hostprocess;
}
public static synchronized MessageCache getSingleton(HostProcess hostprocess) {
if (instance == null) createSingleton(hostprocess);
return instance;
}
private static synchronized void createSingleton(HostProcess hostprocess) {
if (instance == null) {
instance = new MessageCache(hostprocess);
}
}
/**
* is a message cached for the given URI?
*
* @param uri
* @return
*/
public synchronized boolean isMessageCached(URI uri) {
return messagecache.containsKey(uri);
}
/**
* gets a HttpMessage for the requested URI, using basemsg as the base message. If the message
* is available in the cache, return it. If not, retrieve it.
*
* @param uri the URI for which a httpMessage is being requested
* @param basemsg the base message which will be used to construct new messages
* @return a HttpMessage for the requested URI, using basemsg as the base message
* @throws Exception
*/
public synchronized HttpMessage getMessage(
URI uri, HttpMessage basemsg, boolean followRedirects) throws Exception {
if (!isMessageCached(uri)) {
LOGGER.debug("URI '{}' is not in the message cache. Retrieving it.", uri);
// request the file, then add the file to the cache
// use the cookies from an original request, in case authorisation is required
HttpMessage requestmsg = new HttpMessage(uri);
requestmsg.getRequestHeader().setVersion(basemsg.getRequestHeader().getVersion());
try {
requestmsg.setCookieParams(basemsg.getCookieParams());
} catch (Exception e) {
LOGGER.debug("Could not set the cookies from the base request: ", e);
}
requestmsg.getRequestHeader().setHeader(HttpFieldsNames.IF_MODIFIED_SINCE, null);
requestmsg.getRequestHeader().setHeader(HttpFieldsNames.IF_NONE_MATCH, null);
requestmsg.getRequestHeader().setContentLength(requestmsg.getRequestBody().length());
parent.getHttpSender().sendAndReceive(requestmsg, followRedirects);
parent.notifyNewMessage(requestmsg);
// put the message in the cache
messagecache.put(uri, requestmsg);
LOGGER.debug("Put URI '{}' in the message cache.", uri);
} else {
LOGGER.debug("URI '{}' is cached in the message cache.", uri);
}
// and return the cached message.
return messagecache.get(uri);
}
}
|
def bootstrap():
base_packages = [
"curl",
"git",
"rsync",
]
run("/usr/bin/localedef -i en_US -f UTF-8 en_US.UTF-8")
run("export LC_ALL=en_US.UTF-8 && export LANG=en_US.UTF-8")
append("/etc/sysconfig/i18n", 'LC_ALL="en_US.UTF-8"')
run("yum update --assumeyes")
run("yum groupinstall --assumeyes 'Development Tools'")
run("yum install --assumeyes {pkgs}".format(pkgs=" ".join(base_packages)))
append("/etc/hosts", "{0} saltmaster-private".format(env.master_server.private_ip))
uncomment("/etc/sudoers", "wheel.*NOPASSWD")
with fabric_settings(warn_only=True):
reboot()
|
// Copyright (c) 2017 The Ustore Authors.
#include <cstddef>
#include "hash/murmurhash3.h"
#ifndef USTORE_HASH_MURMURHASH_H_
#define USTORE_HASH_MURMURHASH_H_
namespace ustore {
static const uint32_t kMurmurHashSeed = 0xbc9f1d34; // from LevelDB
inline const uint32_t MurmurHash32(const void* key, const int& len) {
uint32_t hash;
MurmurHash3_x86_32(key, len, kMurmurHashSeed, &hash);
return hash;
}
inline const size_t MurmurHash(const void* key, const int& len) {
return static_cast<size_t>(MurmurHash32(key, len));
}
} // namespace ustore
#endif // USTORE_HASH_MURMURHASH_H_
|
<gh_stars>10-100
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -Wno-type-defaults #-}
module Main where
import Algebra.Prelude.Core (sNat, Polynomial, SingI (sing), ordToNatural, toIdeal, var)
import qualified AlgebraicPrelude as AP
import Control.Exception (evaluate)
import Data.Maybe (fromJust)
import Data.Proxy
import Data.Reflection
import qualified Data.Sized as SV
import qualified Data.Vector as V
import GHC.TypeNats (KnownNat)
import Gauge
import Numeric.Algebra.Smooth.Weil
main :: IO ()
main =
defaultMain
[ benchFor @(DOrder 2) "DOrder 2"
, benchFor @(DOrder 3) "DOrder 3"
, benchFor @(DOrder 4) "DOrder 4"
, benchFor @(DOrder 3 |*| DOrder 4) "DOrder 3 |*| DOrder 4"
, fromJust $
reifyWeil
(toIdeal [var 0 ^ 3 - var 1 ^ 2, var 1 ^ 3 :: Polynomial AP.Rational 2])
$ \(_ :: Proxy w) ->
benchFor @w "R[x,y]/(x^3 - y^2, y^3)"
]
benchFor ::
forall w n m.
(Reifies w (WeilSettings n m), KnownNat n, KnownNat m) =>
String ->
Benchmark
benchFor title =
bgroup
title
[ env (evaluate $ SV.singleton (Weil @w input)) $ \inp ->
bgroup
lab
[ bgroup
"identity"
[ bench "liftSmoothSeries" $ nf (liftSmoothSeries SV.head) inp
, bench "liftSmoothSerisAD" $ nf (liftSmoothSeriesAD SV.head) inp
, bench "liftSmoothSuccinctTower" $ nf (liftSmoothSuccinctTower SV.head) inp
]
, bgroup
"exp x"
[ bench "liftSmoothSeries" $ nf (liftSmoothSeries (exp . SV.head)) inp
, bench "liftSmoothSerisAD" $ nf (liftSmoothSeriesAD (exp . SV.head)) inp
, bench "liftSmoothSuccinctTower" $ nf (liftSmoothSuccinctTower (exp . SV.head)) inp
]
, let f :: forall x. Floating x => x -> x
f = \x -> sin x * exp (x ^ 2 + x)
in bgroup
"sin x * exp (x^2 + x)"
[ bench "liftSmoothSeries" $ nf (liftSmoothSeries (f . SV.head)) inp
, bench "liftSmoothSerisAD" $ nf (liftSmoothSeriesAD (f . SV.head)) inp
, bench "liftSmoothSuccinctTower" $ nf (liftSmoothSuccinctTower (f . SV.head)) inp
]
, env (evaluate $ SV.replicate' $ SV.head inp) $ \inp3 ->
let f :: forall x. Floating x => SV.Sized V.Vector 3 x -> x
f = \(x SV.:< y SV.:< z SV.:< SV.Nil) -> sin x * exp (y ^ 2 + z)
in bgroup
"sin x * exp (y^2 + z)"
[ bench "liftSmoothSeries" $
nf
(liftSmoothSeries f)
inp3
, bench "liftSmoothSerisAD" $
nf
(liftSmoothSeriesAD f)
inp3
, bench "liftSmoothSuccinctTower" $
nf
(liftSmoothSuccinctTower f)
inp3
]
]
| (lab, input) <-
[ ("sparse", SV.generate sNat $ \o -> if o == 0 then 1 else 0.0 :: Double)
, ("x + d", SV.generate sNat $ \o -> if o == 0 || o == 1 then 1 else 0.0 :: Double)
, ("dense", SV.generate sNat $ \o -> fromIntegral (ordToNatural o) + 1)
]
]
|
Tom Baker
Thomas Stewart Baker (born 20 January 1934), or Tom Baker, is an English actor. He is best known for playing the Fourth Doctor in the long-running science fiction television series Doctor Who.
Early life [ change | change source ]
Baker was born in Liverpool. His father was a sailor, and was Jewish. His mother was Roman Catholic. Baker was not very academic and struggled at school. He failed the eleven plus exam. Baker became a monk when he was 15.[1] When he decided to change his life, he first worked in the Royal Army Medical Corps, and then as a construction worker. He took up acting first as a hobby, then professionally.
His first big part was as Grigori Rasputin in the 1971 movie, Nicholas and Alexandra. Baker is best known for playing the Fourth Doctor in the long-running science fiction television series Doctor Who. He played this role from 1974 to 1981. When he got the job, he was working on a building site, because it was hard to find work as an actor.[2]
He is famous for playing the Doctor longer than any other actor. His version of the character is probably the best remembered by many people. As he thought of himself as a role model for children, he would always pretend to be the Doctor in real life, and sign autographs for them. He did not like to see too much violence in Doctor Who. He once changed the script so the Doctor threatened a character with a jelly baby instead of a knife.[3]
He was a narrator for the comedy sketch show Little Britain.
Baker has had many jobs as a voiceover artist. He is very recognized for his voice. In a 2005 survey of British adults, Baker's voice was the fourth most recognisable after the Queen, Tony Blair and Margaret Thatcher.[4]
As Doctor Who
Video games [ change | change source ]
|
// removeItemsWithNamePrefix iterates through the collection stored at 'key' in 'unstructuredObj'
// and removes any item that has a name that starts with 'prefix'.
func removeItemsWithNamePrefix(unstructuredObj map[string]interface{}, key, prefix string, log logrus.FieldLogger) error {
var preservedItems []interface{}
if err := collections.ForEach(unstructuredObj, key, func(item map[string]interface{}) error {
name, err := collections.GetString(item, "name")
if err != nil {
return err
}
singularKey := strings.TrimSuffix(key, "s")
log := log.WithField(singularKey, name)
log.Debug("Checking " + singularKey)
switch {
case strings.HasPrefix(name, prefix):
log.Debug("Excluding ", singularKey)
default:
log.Debug("Preserving ", singularKey)
preservedItems = append(preservedItems, item)
}
return nil
}); err != nil {
return err
}
unstructuredObj[key] = preservedItems
return nil
}
|
from sklearn.decomposition import TruncatedSVD
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.feature_extraction.text import HashingVectorizer
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn.pipeline import make_pipeline
from sklearn.preprocessing import Normalizer
from sklearn import metrics
from sklearn import metrics
from sklearn.cluster import KMeans, MiniBatchKMeans
import logging
from optparse import OptionParser
import sys
from time import time
import numpy as np
from txt_process_util import processTxtRemoveStopWordTokenized
dataFileTrueTxt = "data/search_snippets/search_snippets_true_text"
file1=open(dataFileTrueTxt,"r", encoding="utf8")
lines = file1.readlines()
file1.close()
for line in lines:
arr=line.split('\t')
true_label=arr[0]
text=arr[1]
file1=open("stopWords.txt","r", encoding="utf8")
stopWords1 = file1.readlines()
file1.close()
stopWs=[]
for stopWord1 in stopWords1:
stopWord1=stopWord1.strip().lower()
if len(stopWord1)==0:
continue
stopWs.append(stopWord1)
stopWs=set(stopWs)
data=[]
true_labels=[]
for line in lines:
arr=line.split('\t')
true_label=arr[0]
text=arr[1]
filtered_sentence = processTxtRemoveStopWordTokenized(text, stopWs)
data.append(text)
true_labels.append(true_label)
true_k = np.unique(true_labels).shape[0]
vectorizer = TfidfVectorizer(max_df=1.0, min_df=1, stop_words='english', use_idf=True, smooth_idf=True, norm='l2')
X = vectorizer.fit_transform(data)
svd = TruncatedSVD(20)
normalizer = Normalizer(copy=False)
lsa = make_pipeline(svd, normalizer)
X = lsa.fit_transform(X)
#print(true_k)
km = KMeans(n_clusters=true_k, init='k-means++', max_iter=100, n_init=5)
km.fit(X)
#print("Completeness: %0.3f" % metrics.completeness_score(true_labels, km.labels_))
#print(km.labels_)
score = metrics.normalized_mutual_info_score(true_labels, km.labels_)
#print(score)
for label in km.labels_:
print(label)
|
Tacked on last minute to the $700 billion bailout, bicycle commuters across the nation will be eligible for for a tax benefit that is already available to both train & bus commuters.
[social_buttons] After $700 billion of our American tax dollars just went to bail out private banks, it’s a small relief that some laws were squeezed into that deal, with the sole purpose of benefiting people and the planet.
Starting January 1st, those who commute via bicycle to work are eligible to receive $20.00 per month in tax-free reimbursements from their employer. The reimbursement is meant to defray the costs of owning and operating a bike. Employers can deduct this expense from their federal taxes.
“What this does is to legitimize commuting by bicycle,” said Robert Rayburn, director of the East Bay Bicycle Coalition. “We’ve already seen a swelling of bicycling commuters on account of the rising gas prices. Until this change in the tax code, bike commuters have been denied a benefit that public transit riders get.”
The bicycle tax break was championed by Oregonians. Advocates of the provision had been trying for seven years to get something similar passed in Washington. At the last minute of negotiating the $700 billion bailout, lawmakers were able to add in their “pet projects,” and thus the bicycle commuter tax break is now a reality.
According to the League of American Bicyclists, it’s estimated that $1 million a year might be eliminated from the federal tax roll due to this new employer write off.
Source: SFGate & MercuryNews.com
Photo: Flickr under a Creative Commons License
|
use crate::networking::{RemoteSessionApi, RemoteSessionApiImpl, Session};
// use openssl::rsa::Rsa;
use log::*;
use ploc_common::errors::ServicesError;
use ploc_common::model_types::PublicKey;
#[derive(Debug, Clone)]
pub struct KeyPair {
pub private: Vec<u8>,
pub public: Vec<u8>,
}
// TODO rename this in ClientParticipant
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct ClientSessionKey {
pub session_id: String,
pub key: PublicKey,
}
pub fn start_session(session_id: String, key: String) -> Result<Session, ServicesError> {
// TODO check if id already exists in db?
let res = join_session_with_id(session_id, key);
debug!("Start session res: {:?}", res);
res
}
pub fn join_session_with_id(id: String, key: String) -> Result<Session, ServicesError> {
debug!("Joining session with id: {}, key: {}", id, key);
let api = RemoteSessionApiImpl {};
let res = api
.join_session(ClientSessionKey {
session_id: id,
key: PublicKey { str: key },
})
.map_err(ServicesError::from);
debug!("Join session res: {:?}", res);
res
}
pub fn create_key_pair() -> Result<KeyPair, ServicesError> {
// it was not possible to get rust-openssl working:
// https://stackoverflow.com/questions/63513401/how-to-use-rust-openssl-on-ios
// https://github.com/sfackler/rust-openssl/issues/1331
// for now generating keys in the apps
// note also that we should use EC instead of RSA (doing this now on iOS)
// let rsa = Rsa::generate(4096)?;
// let private_key = rsa.private_key_to_pem()?;
// let public_key = rsa.public_key_to_pem()?;
// Ok(KeyPair {
// private: private_key,
// public: public_key,
// })
Ok(KeyPair {
private: vec![],
public: vec![],
})
}
pub fn ack(uuid: String, stored_participants: i32) -> Result<bool, ServicesError> {
let api = RemoteSessionApiImpl {};
let res = api
.ack(uuid, stored_participants)
.map_err(ServicesError::from);
debug!("Ack res: {:?}", res);
res
}
pub fn participants(session_id: String) -> Result<Session, ServicesError> {
let api = RemoteSessionApiImpl {};
let res = api.participants(session_id).map_err(ServicesError::from);
debug!("Participants res: {:?}", res);
res
}
pub fn delete(peer_id: String) -> Result<(), ServicesError> {
let api = RemoteSessionApiImpl {};
let res = api.delete(peer_id).map_err(ServicesError::from);
debug!("Mark as deleted res: {:?}", res);
res
}
|
#include<bits/stdc++.h>
using namespace std;
#define st first
#define nd second
#define mp make_pair
#define pb push_back
#define sol (root+root)
#define sag (root+root+1)
#define orta ((bas+son)/2)
#define ll long long
#define pii pair<int,int>
const int N=1e5+5;
const int mod=1e9+7;
const int inf=1e9+7;
int n,i,x,y,t1,t2,b,k,t3,t4,ans,j;
pii a[N];
vector<int> sat[N],sut[N];
main(){
scanf("%d",&n);
for(i=1 ; i<=n ; i++){
scanf("%d %d",&x,&y);
sat[y].pb(x);
sut[x].pb(y);
a[i]=mp(x,y);
}
for(i=0 ; i<=N-5 ; i++)
sort(sat[i].begin(),sat[i].end()),sort(sut[i].begin(),sut[i].end());
for(i=1 ; i<=n ; i++){
x=a[i].st;
y=a[i].nd;
t1=lower_bound(sat[y].begin(),sat[y].end(),x)-sat[y].begin();
t2=lower_bound(sut[x].begin(),sut[x].end(),y)-sut[x].begin();
if(sat[y].size()-t1<=sut[x].size()-t2){
for(j=t1+1 ; j<sat[y].size() ; j++){
b=sat[y][j];
k=b-x;
t3=lower_bound(sut[x].begin(),sut[x].end(),y+k)-sut[x].begin();
t4=lower_bound(sut[b].begin(),sut[b].end(),y+k)-sut[b].begin();
if(t3<sut[x].size() and t4<sut[b].size() and sut[x][t3]==y+k and sut[b][t4]==y+k)
ans++;
}
}
else {
for(j=t2+1 ; j<sut[x].size() ; j++){
b=sut[x][j];
k=b-y;
t3=lower_bound(sat[y].begin(),sat[y].end(),x+k)-sat[y].begin();
t4=lower_bound(sat[b].begin(),sat[b].end(),x+k)-sat[b].begin();
if(t3<sat[y].size() and t4<sat[b].size() and sat[y][t3]==x+k and sat[b][t4]==x+k)
ans++;
}
}
}
cout<<ans<<endl;
}
|
// Plugins returns a list of plugins.
func (r *Registry) Plugins() (plugin.Plugins, error) {
cfg, err := r.Config()
if err != nil {
return nil, err
}
return cfg.Plugins, nil
}
|
<reponame>20ft/20ft
# Copyright (c) 2017 <NAME>, All rights reserved.
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import sys
import requests.exceptions
import json
import logging
from typing import Optional
class Docker:
docker_socket = '/var/run/docker.sock'
docker_url_base = 'http+unix://%2Fvar%2Frun%2Fdocker.sock'
session = None
@staticmethod
def description(docker_image_id: str, *, conn: Optional['Connection']=None) -> dict:
"""Describe a docker image.
:param docker_image_id: Docker image id.
:param conn: An optional connection to the location.
:return: A dict representation of image metadata."""
# try locally
can_connect_local = True
try:
r = Docker._session().get('%s/images/%s/json' % (Docker.docker_url_base, docker_image_id))
# local docker works but doesn't have it
if r.status_code == 404:
logging.info("Local docker doesn't have image, trying for remote")
else:
# presumably worked, cache it and return
descr = json.loads(r.text)
# strip some stuff we don't need
removes = ('Container', 'Comment', 'ContainerConfig', 'GraphDriver')
for remove in removes:
if remove in descr:
del descr[remove]
# all good
return descr
except requests.exceptions.ConnectionError:
can_connect_local = False
# no go locally, try remotely
if conn is not None:
logging.info("Retrieving description: " + docker_image_id)
msg = conn.send_blocking_cmd(b'retrieve_description', {'image_id': docker_image_id})
if 'description' in msg.params:
return msg.params['description']
# image is in neither location
if can_connect_local:
if conn is not None:
raise RuntimeError("Cannot find image in either local docker or remote image cache: " + docker_image_id)
else:
raise RuntimeError("Cannot find image in local docker: " + docker_image_id)
else:
# local's dead, too
Docker._docker_warning()
@staticmethod
def tarball(docker_image_id: str) -> bytes:
"""Retrieve the tarball of a docker image.
:param docker_image_id: Docker image id.
:return: A stream of bytes that would be the contents of the tar archive."""
try:
r = Docker._session().get('%s/images/%s/get' % (Docker.docker_url_base, docker_image_id))
return r.content
except requests.exceptions.ConnectionError:
Docker._docker_warning()
@staticmethod
def last_image() -> str:
"""Finding the most recent docker image on this machine.
:return: Docker image id of the most recently built docker image"""
r = None
try:
r = Docker._session().get('%s/images/json' % Docker.docker_url_base)
except requests.exceptions.ConnectionError:
Docker._docker_warning()
if len(r.text) == 0:
raise ValueError("Docker has no local images.")
obj = json.loads(r.text)
return obj[0]['Id'][7:19]
@staticmethod
def _docker_warning():
print("""
Cannot (and need to) connect to the docker socket
-------------------------------------------------
The remote cache does not have a description for the combination of this image and this user.
If you think it should, have changed you which user account you're using?
Is docker running on this machine?
You may need to run sudo chmod 666 /var/run/docker.sock
""", file=sys.stderr)
raise RuntimeError("Need a functioning local Docker")
@staticmethod
def _session():
# when we need unix sockets (not deployed on server, hence late binding)
if Docker.session is None:
import requests_unixsocket
Docker.session = requests_unixsocket.Session()
return Docker.session
|
The screaming man with a gun who terrorized fleeing customers in a Lenexa Costco on Sunday morning was a tractor-trailer driver who, like many long-haul truckers, apparently lived in the cab of his truck.
Ronald O. Hunt, 58, left his truck for the last time in the Costco parking lot when — for reasons still unknown to investigators — he walked menacingly into the store where he eventually was gunned down inside by an off-duty lawman.
“It does not appear that he had any specific targets,” said Lenexa Police Public Relations Officer Danny Chavez.
Although many witnesses said Hunt threatened them with his gun, there is not yet any indication that Hunt ever fired his weapon, Chavez said.
Sign Up and Save Get six months of free digital access to The Kansas City Star
“His actual motive — why that Costco, why that time, what he was intending to do — we’re still trying to learn that,” Chavez said.
Hunt has proven to be a mystery so far for investigators, who struggled to locate any relatives to let any family know that Hunt had been killed.
SHARE COPY LINK Ronald O. Hunt, 58, of Edwardsville was killed by an off-duty KCK police captain inside the Lenexa Costco on Sunday. Police say he was apparently living in his truck, which was parked on the far east side of the Costco parking lot at the time of t
Late Monday police finally located a relative in California, but the person had not had any contact with Hunt for a long time, Chavez said. “They were not close.”
The only local address police could find for Hunt was of the Swift Transportation Kansas Terminal in Edwardsville, Kan.
That was the address court records had for Hunt when he was cited in 2015 and pleaded guilty to a traffic charge for carrying too much weight on his truck.
Repeated phone calls to the Edwardsville Swift terminal since Monday afternoon have not been answered.
About three hours after the shooting Sunday afternoon, several investigators searched Hunt’s truck in the lot. He had arrived in the cab hauling no trailer.
Witnesses said he was shouting loudly and threatening people as he neared the front of the store around 11 a.m. Employees attempted to pull down metal security doors but did not make it in time before Hunt stood inside the front door.
“Multiple people have said they were threatened with the gun,” Chavez said.
Employees shouted for customers to flee through and out the back of the store. An off-duty police captain with the Kansas City, Kan., Police Department — Michael Howell — happened to be shopping at that moment. He identified himself to some Costco employees and he eventually engaged Hunt deep inside the store, police said.
SHARE COPY LINK Kansas City, Kan., off-duty police Capt. Michael Howell was shopping at a Lenexa Costco when he shot and killed a man screaming while holding a handgun as shoppers fled. KCK Fraternal Order of Police Lodge President Scott Kirkpatrick has been in c
Witnesses reported hearing five or six gunshots. Hunt was killed and no one else was injured.
A team of investigators with the Johnson County Officer Involved Shooting Investigation Team is continuing its investigation Tuesday, Chavez said.
They are working through a list of some 80 to 100 witnesses, he said. Many were people leaving the parking lot whose contact information was gathered by troopers with the Kansas Highway Patrol. Many witnesses are employees and customers who were inside the store.
The investigators will be looking for any surveillance camera footage as well as reviewing 911 calls. Ballistics investigators will be piecing together evidence of spent shell casings, bullet holes and other physical evidence.
|
#include<stdio.h>
#include<conio.h>
int main()
{ int i,j,totalProcess,time,temp,t=0,TQ,c,c1;
int sum_wait=0,sum_turnaround=0,at[10],bt[10],rt[10];
printf( "\n---------------- Process Scheduling Algorithm ----------------") ;
printf( "\n----------------- Round Robin Scheduling ----------------") ;
printf("Enter total Processes : ");
scanf("%d",&totalProcess);
temp=totalProcess;
for(i=0;i<totalProcess;i++){
printf("\nEnter Arrival time and Burst Time for process %d (Comma Separated): ",i+1);
scanf("%d,%d",&at[i],&bt[i]);
}
//Swapping if user has not entered arrival time in ascending order
for(i=0;i<totalProcess;i++){
for(j=i+1; j<totalProcess; j++){
if(at[i] > at[j]){
c = at[i];
c1 = bt[i];
at[i] = at[j];
bt[i] = bt[j];
at[j] = c;
bt[j] = c1;
}
}
}
for(i=0;i<totalProcess;i++){
rt[i]=bt[i];
}
sum_wait = at[0];
printf("Enter time quantum: ");
scanf("%d",&TQ);
printf("\n\n\n\t\t\tAnswers\n\n");
printf("\n\nProcess\t\t\tArrival Time\t\t\tBurst Time\t\tCompletion time\t\tTurnaround time\t\tWaiting time\n\n");
for(time=0,i=0;temp!=0;){
if(rt[i]<=TQ && rt[i]>0){
time+=rt[i];
rt[i]=0;
t=1;
}
else if(rt[i]>0){
rt[i]-=TQ;
time+=TQ;
}
if(rt[i]==0 && t==1)
{
temp--;
printf("P[%d]\t\t\t%d\t\t\t\t%d\t\t\t%d\t\t\t%d\t\t\t\t%d\n",i+1,at[i],bt[i],time,time-at[i],time-at[i]-bt[i]);
sum_wait+=time-at[i]-bt[i];
sum_turnaround+=time-at[i];
t=0;
}
if(i==totalProcess-1)
i=0;
else if(at[i+1]<=time)
i++;
else
i=0;
}
printf("\nAvg Waiting Time = %f\n",sum_wait*1.0/totalProcess);
printf("Avg Turn Around Time = %f",sum_turnaround*1.0/totalProcess);
return 0;
}
|
/*
* Creates a set of num processes. Each process calls the specified callback with the provided argument.
* Each created process' identifier is inserted sequentially into the passed array.
*/
int createProcesses(pid_t *array, int num, void (*callback) (void))
{
int created = 0;
while (created < num)
{
array[created] = fork();
if (!array[created])
{
clearMemory();
callback();
}
created++;
}
return created;
}
|
import sys
import gtfparse
if not len(sys.argv) == 4:
print ' python ' + sys.argv[0] + ' gtf.file tr.blast.annotate \
gene.blast.annotate'
sys.exit(0)
gtf_file = sys.argv[1]
tr_blast = sys.argv[2]
gene_blast = sys.argv[3]
gene_dict = {}
ko_dict = {}
out_dict = {}
out_inf_list = []
def tr_gene_map(gtf):
gtf_df = gtfparse.read_gtf(gtf)
tr_df = gtf_df[gtf_df.feature == 'transcript']
tr_gene_map = tr_df.loc[:, ['transcript_id', 'gene_id']]
tr_gene_map = tr_gene_map.set_index('transcript_id')
return tr_gene_map
tr_gene_map = tr_gene_map(gtf_file)
with open(tr_blast) as tr_blast_inf:
for eachline in tr_blast_inf:
eachline_inf = eachline.strip().split('\t')
tr_id = eachline_inf[0]
gene_id = tr_gene_map.loc[tr_id, 'gene_id']
ko_id = eachline_inf[1]
bitscore = float(eachline_inf[-1])
eachline_out = '%s\t%s\n' % (gene_id, '\t'.join(eachline_inf[1:]))
if gene_id not in gene_dict and ko_id not in ko_dict:
gene_dict[gene_id] = [ko_id, bitscore]
ko_dict[ko_id] = [gene_id, bitscore]
out_dict.setdefault(gene_id, {})[ko_id] = eachline_out
elif gene_id in gene_dict:
if bitscore > gene_dict[gene_id][-1]:
old_ko = gene_dict[gene_id][0]
del ko_dict[old_ko]
del gene_dict[gene_id]
del out_dict[gene_id][old_ko]
if ko_id in ko_dict:
if bitscore > ko_dict[ko_id][-1]:
old_gene = ko_dict[ko_id][0]
del ko_dict[ko_id]
del gene_dict[old_gene]
del out_dict[old_gene][ko_id]
ko_dict[ko_id] = [gene_id, bitscore]
gene_dict[gene_id] = [ko_id, bitscore]
out_dict.setdefault(gene_id, {})[ko_id] = eachline_out
else:
ko_dict[ko_id] = [gene_id, bitscore]
gene_dict[gene_id] = [ko_id, bitscore]
out_dict.setdefault(gene_id, {})[ko_id] = eachline_out
else:
if bitscore > ko_dict[ko_id][-1]:
old_gene = ko_dict[ko_id][0]
del out_dict[old_gene][ko_id]
del gene_dict[old_gene]
ko_dict[ko_id] = [gene_id, bitscore]
gene_dict[gene_id] = [ko_id, bitscore]
out_dict.setdefault(gene_id, {})[ko_id] = eachline_out
with open(gene_blast, 'w') as gene_blast_inf:
for each_gene in out_dict:
for each_ko in out_dict[each_gene]:
gene_blast_inf.write(out_dict[each_gene][each_ko])
|
/*!
* Copyright 2022 XGBoost contributors
*/
#pragma once
#include <federated.grpc.pb.h>
#include <federated.pb.h>
#include <grpcpp/grpcpp.h>
#include <cstdio>
#include <cstdlib>
#include <limits>
#include <string>
namespace xgboost {
namespace federated {
/**
* @brief A wrapper around the gRPC client.
*/
class FederatedClient {
public:
FederatedClient(std::string const &server_address, int rank, std::string const &server_cert,
std::string const &client_key, std::string const &client_cert)
: stub_{[&] {
grpc::SslCredentialsOptions options;
options.pem_root_certs = server_cert;
options.pem_private_key = client_key;
options.pem_cert_chain = client_cert;
grpc::ChannelArguments args;
args.SetMaxReceiveMessageSize(std::numeric_limits<int>::max());
auto channel =
grpc::CreateCustomChannel(server_address, grpc::SslCredentials(options), args);
channel->WaitForConnected(
gpr_time_add(gpr_now(GPR_CLOCK_REALTIME), gpr_time_from_seconds(60, GPR_TIMESPAN)));
return Federated::NewStub(channel);
}()},
rank_{rank} {}
/** @brief Insecure client for connecting to localhost only. */
FederatedClient(std::string const &server_address, int rank)
: stub_{[&] {
grpc::ChannelArguments args;
args.SetMaxReceiveMessageSize(std::numeric_limits<int>::max());
return Federated::NewStub(
grpc::CreateCustomChannel(server_address, grpc::InsecureChannelCredentials(), args));
}()},
rank_{rank} {}
std::string Allgather(std::string const &send_buffer) {
AllgatherRequest request;
request.set_sequence_number(sequence_number_++);
request.set_rank(rank_);
request.set_send_buffer(send_buffer);
AllgatherReply reply;
grpc::ClientContext context;
context.set_wait_for_ready(true);
grpc::Status status = stub_->Allgather(&context, request, &reply);
if (status.ok()) {
return reply.receive_buffer();
} else {
std::cout << status.error_code() << ": " << status.error_message() << '\n';
throw std::runtime_error("Allgather RPC failed");
}
}
std::string Allreduce(std::string const &send_buffer, DataType data_type,
ReduceOperation reduce_operation) {
AllreduceRequest request;
request.set_sequence_number(sequence_number_++);
request.set_rank(rank_);
request.set_send_buffer(send_buffer);
request.set_data_type(data_type);
request.set_reduce_operation(reduce_operation);
AllreduceReply reply;
grpc::ClientContext context;
context.set_wait_for_ready(true);
grpc::Status status = stub_->Allreduce(&context, request, &reply);
if (status.ok()) {
return reply.receive_buffer();
} else {
std::cout << status.error_code() << ": " << status.error_message() << '\n';
throw std::runtime_error("Allreduce RPC failed");
}
}
std::string Broadcast(std::string const &send_buffer, int root) {
BroadcastRequest request;
request.set_sequence_number(sequence_number_++);
request.set_rank(rank_);
request.set_send_buffer(send_buffer);
request.set_root(root);
BroadcastReply reply;
grpc::ClientContext context;
context.set_wait_for_ready(true);
grpc::Status status = stub_->Broadcast(&context, request, &reply);
if (status.ok()) {
return reply.receive_buffer();
} else {
std::cout << status.error_code() << ": " << status.error_message() << '\n';
throw std::runtime_error("Broadcast RPC failed");
}
}
private:
std::unique_ptr<Federated::Stub> const stub_;
int const rank_;
uint64_t sequence_number_{};
};
} // namespace federated
} // namespace xgboost
|
def evaluate(
self,
state: MdpState,
actions: List[Action]
) -> np.ndarray:
log_with_border(logging.DEBUG, f'Evaluating {len(actions)} action(s)')
X = self.get_X([state] * len(actions), actions, False)
if X.shape[1] == 0:
return np.repeat(0.0, len(actions))
action_values = self.model.evaluate(X)
log_with_border(logging.DEBUG, 'Evaluation complete')
return action_values
|
<gh_stars>0
module Level4.Problem84
( problem
) where
import Control.Monad
import Data.List
import System.Random
import Problem
-- I couldn't get the simulation quite right for 6 sided dice but it worked
-- out ok for 4 sided dice. Guess the margins are less forgiving in that case.
-- The play turn is a mess and the performance isn't ideal, but it gets the desired result
problem :: Problem Integer
problem =
Problem
84
"Monopoly odds"
(read $
modal $ map (fst . fst) $ reverse $ statisticsForGame (1, 4) 100000 0)
data Tiles
= Go
| GoToJail
| CommunityChest
| Chance
| Jail
| Normal String
deriving (Show, Eq)
data Card
= AdvanceToGoCard
| GoToJailCard
| GoTo String
| GoToNext Char
| GoBack3
| Ignored
deriving (Show)
data GameState = GameState
{ position :: Int
, chanceDeck :: [Card]
, communityChestDeck :: [Card]
, rolls :: [(Int, Int)]
, prevDoubles :: Int
} deriving (Show)
initGame :: (Eq t, Num t) => (Int, Int) -> t -> Int -> GameState
initGame dRange turns seed =
GameState
{ position = 0
, chanceDeck = chDeck
, communityChestDeck = ccDeck
, prevDoubles = 0
, rolls = diceRolls
}
where
gen = mkStdGen seed
(diceRolls, genAfterRolls) = generateNDiceRolls turns dRange gen
(ccDeck, genAftercc) = shuffleCards genAfterRolls communityChestCards
(chDeck, _) = shuffleCards genAftercc chanceCards
playTurn :: GameState -> Maybe GameState
playTurn gs =
case getDiceRolls $ rolls gs of
Nothing -> Nothing
Just (roll, double, remaining) ->
if newDoubles == 3
then Just
GameState
{ position = 10
, chanceDeck = chanceDeck gs
, communityChestDeck = communityChestDeck gs
, rolls = remaining
, prevDoubles = 0
}
else Just $ handlePos' gs
where handlePos' gs =
case board !! position updated of
(_, Chance) -> handlePos updated
_ -> updated
where
updated = handlePos gs
newDoubles =
if double
then prevDoubles gs + 1
else 0
newPos = (position gs + roll) `mod` length board
handlePos gs =
case board !! newPos of
(_, GoToJail) ->
GameState
{ position = 10
, chanceDeck = chanceDeck gs
, communityChestDeck = communityChestDeck gs
, rolls = remaining
, prevDoubles = newDoubles
}
(_, Chance) ->
let (c, deck) = drawCard $ chanceDeck gs
in GameState
{ position = nextPosition c newPos
, chanceDeck = deck
, communityChestDeck = communityChestDeck gs
, rolls = remaining
, prevDoubles = newDoubles
}
(_, CommunityChest) ->
let (c, deck) = drawCard $ communityChestDeck gs
in GameState
{ position = nextPosition c newPos
, chanceDeck = chanceDeck gs
, communityChestDeck = deck
, rolls = remaining
, prevDoubles = newDoubles
}
_ ->
GameState
{ position = newPos
, chanceDeck = chanceDeck gs
, communityChestDeck = communityChestDeck gs
, rolls = remaining
, prevDoubles = newDoubles
}
playGame :: (Num t, Eq t) => (Int, Int) -> t -> Int -> [(Int, Tiles)]
playGame dRange turns s =
map ((board !!) . position) $
unfoldr (playTurn >=> (\gs' -> return (gs', gs'))) (initGame dRange turns s)
modal :: Show a => [a] -> String
modal l =
concatMap
((\s ->
if length s < 2
then '0' : s
else s) .
show) $
take 3 l
statisticsForGame ::
(Eq t, Num t) => (Int, Int) -> t -> Int -> [((Int, Tiles), Int)]
statisticsForGame dRange turns s =
sortBy (\a b -> compare (snd a) (snd b)) $
map (\l -> (head l, length l)) $
group $ sortBy (\a b -> compare (fst a) (fst b)) $ playGame dRange turns s
getDiceRolls :: (Eq a, Num a) => [(a, a)] -> Maybe (a, Bool, [(a, a)])
getDiceRolls [] = Nothing
getDiceRolls (r:rs) = Just (sumDice r, sameDice r, rs)
where
sameDice (d1, d2) = d1 == d2
sumDice (d1, d2) = d1 + d2
drawCard :: [a] -> (a, [a])
drawCard (c:cs) = (c, cs ++ [c])
communityChestCards :: [Card]
communityChestCards = [AdvanceToGoCard, GoToJailCard] ++ replicate 14 Ignored
chanceCards :: [Card]
chanceCards =
[ AdvanceToGoCard
, GoToJailCard
, GoTo "C1"
, GoTo "E3"
, GoTo "H2"
, GoTo "R1"
, GoToNext 'R'
, GoToNext 'R'
, GoToNext 'U'
, GoBack3
] ++
replicate 6 Ignored
board :: [(Int, Tiles)]
board =
zip
[0 ..]
[ Go
, Normal "A1"
, CommunityChest
, Normal "A2"
, Normal "T1"
, Normal "R1"
, Normal "B1"
, Chance
, Normal "B2"
, Normal "B3"
, Jail
, Normal "C1"
, Normal "U1"
, Normal "C2"
, Normal "C3"
, Normal "R2"
, Normal "D1"
, CommunityChest
, Normal "D2"
, Normal "D3"
, Normal "FP"
, Normal "E1"
, Chance
, Normal "E2"
, Normal "E3"
, Normal "R3"
, Normal "F1"
, Normal "F2"
, Normal "U2"
, Normal "F3"
, GoToJail
, Normal "G1"
, Normal "G2"
, CommunityChest
, Normal "G3"
, Normal "R4"
, Chance
, Normal "H1"
, Normal "T2"
, Normal "H2"
]
cardName :: Tiles -> String
cardName (Normal s) = s
cardName _ = ""
getTileByName :: String -> (Int, Tiles)
getTileByName n = head res
where
res = filter ((n ==) . cardName . snd) $ cycle board
getTileByType :: Char -> Int -> (Int, Tiles)
getTileByType t p = head res
where
res =
filter
(\tile -> cardName (snd tile) /= "" && t == head (cardName (snd tile))) $
drop p $ cycle board
nextPosition :: Card -> Int -> Int
nextPosition card curPos =
case card of
Ignored -> curPos
AdvanceToGoCard -> 0
GoToJailCard -> 10
GoTo s -> fst $ getTileByName s
GoToNext c -> fst $ getTileByType c curPos
GoBack3 -> mod (curPos - 3) (length board)
rollDice :: RandomGen t => (Int, Int) -> t -> (Int, t)
rollDice (lo, hi) g = (rolled, nextG)
where
(v, nextG) = next g
rolled = lo + (v `mod` (1 + hi - lo))
generateRandNumbers ::
(RandomGen t, Num t1, Eq t1) => t1 -> (Int, Int) -> t -> ([Int], t)
generateRandNumbers n range g =
( map fst res
, if not (null res)
then snd $ head res
else g)
where
res =
unfoldr
(\(n', g') ->
let (roll, nextG) = rollDice range g'
in if n' == n
then Nothing
else Just ((roll, nextG), (n' + 1, nextG)))
(0, g)
generateNDiceRolls ::
(RandomGen t, Num t1, Eq t1) => t1 -> (Int, Int) -> t -> ([(Int, Int)], t)
generateNDiceRolls turns dRange gen = (zip dice1Rolls dice2Rolls, gen2)
where
(dice1Rolls, gen1) = generateRandNumbers turns dRange gen
(dice2Rolls, gen2) = generateRandNumbers turns dRange gen1
shuffleCards :: RandomGen t => t -> [b] -> ([b], t)
shuffleCards g cards =
(map snd $ sortBy (\a b -> fst a `compare` fst b) $ zip nums cards, g2)
where
(nums, g2) = generateRandNumbers (length cards) (1, length cards) g
|
#include <iostream>
#include <algorithm>
#include <string>
#include <cstdio>
#include <set>
#include <queue>
#include <map>
#include <iomanip>
#include <cstring>
#include <cmath>
using namespace std;
const long double eps = 1e-12;
typedef long long ll;
typedef pair<int , int> pt;
#define sz(a) ((int) a.size() )
#define LL(x) (x << 1)
#define RR(x) ((x << 1) | 1)
#define For(i , a , b) for (int i = a ; i <= b ; i++)
#define Ford(i , a , b) for (int i = a ; i >= b ; i--)
#define Rep(i , n) for (int i = 0 ; i < n ; i++)
long long t , a , b;
void ReadData() {
cin >> t >> a >> b;
}
int Solve() { // if P(t) > all of coeff
vector<long long> coeff;
// transfer from b to base a
long long cur = b;
while (cur) {
coeff.push_back(cur % a);
cur /= a;
}
// we have coeff in base a , now we check
long long collect = 0;
long long pwr = 1;
// Rep(i , sz(coeff)) cout << coeff[i] << " "; cout << endl;
Rep(i , sz(coeff)) {
collect += coeff[i] * pwr;
pwr *= t;
}
if (collect != a) return 0;
return 1;
}
void Process() {
if (t > a || t > b || a > b) cout << (a == b) << endl; else
if (t > 1) cout << Solve() + (b == a) << endl; else {
if (a == 1) {
if (b == 1) {
cout << "inf" << endl;
} else cout << 0 << endl;
return;
}
// t == 1 and a > 1
{
bool ok = true;
long long cur = b;
while (cur != 1) {
if (cur % a != 0) {
ok = false;
break;
}
cur /= a;
}
int res = 0;
if (ok) res++;
res += Solve();
cout << res << endl;
}
}
}
int main() {
ios_base::sync_with_stdio(false);
cin.tie(NULL);
// freopen("/users/youaremysky/documents/workspace/input.inp" , "r" , stdin);
ReadData();
Process();
}
|
import {ConnectionClientParameters} from "../../parameters/event/ConnectionClientParameters";
import {EmitFlags} from "../../components/EmitFlags";
export abstract class SocketClient {
protected constructor(readonly parameters: ConnectionClientParameters) {}
abstract connect(connData: string): Promise<void>;
abstract disconnect(): Promise<void>;
abstract emit(event: string, data: string, callback?: (data?: string, err?: any) => void, flags?: EmitFlags): Promise<void>;
abstract on(event: string, callback: (data: string, cb?: (data: string) => void) => void): void;
abstract offEvent(event: string): void
abstract offAll(): void
abstract get reservedEvents(): string[];
}
|
The Effectiveness of an Integrated Treatment Approach for Clients With Dual Diagnoses
Objective: A randomized experiment tested the effectiveness of adding a psychoeducationally oriented group therapy intervention, Good Chemistry Groups, to standard inpatient chemical dependency services for clients dually diagnosed with mental and substance dependence disorders. Method: Ninety-seven clients were randomly assigned to an experimental group (n = 48) and a control group (n = 49). Outcome variables included drug and alcohol use, participation in self-help support group meetings, incarceration days, psychiatric symptoms, psychiatric inpatient admissions, compliance with prescribed psychotropic medication plans, and composite scores on the Addiction Severity Index. Results: No significant treatment effects were found on any of the outcome variables. The findings were generally consistent with those of prior controlled studies. Conclusion: Good Chemistry Groups did not add to the effects of standard treatments for dually diagnosed clients. Practitioners should continue to develop and evaluate alternative integrated treatment approaches that might prove to be more effective than this one.
|
package main
import (
"bytes"
"encoding/base64"
"encoding/binary"
"fmt"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"net/http"
"net/http/httptest"
"strings"
"testing"
)
type ntlmServer struct {
t *testing.T
delegate http.Handler
}
func (s ntlmServer) ServeHTTP(w http.ResponseWriter, req *http.Request) {
hdr := req.Header.Get("Proxy-Authorization")
if !strings.HasPrefix(hdr, "NTLM ") {
sendProxyAuthRequired(w)
return
}
msg, err := base64.StdEncoding.DecodeString(strings.TrimPrefix(hdr, "NTLM "))
require.Nil(s.t, err)
require.True(s.t, bytes.Equal(msg[0:8], []byte("NTLMSSP\x00")), "Missing NTLMSSP signature")
msgType := binary.LittleEndian.Uint32(msg[8:12])
switch msgType {
case 1:
sendChallengeResponse(w)
case 3:
req.Header.Del("Proxy-Authenticate")
s.delegate.ServeHTTP(w, req)
default:
s.t.Fatalf("Unexpected NTLM message type: %x", msgType)
}
}
func sendProxyAuthRequired(w http.ResponseWriter) {
w.Header().Set("Proxy-Authenticate", "NTLM")
w.Header().Set("Connection", "close")
w.WriteHeader(http.StatusProxyAuthRequired)
fmt.Fprintf(w, "<html><body>oh noes!</body></html>")
}
func sendChallengeResponse(w http.ResponseWriter) {
w.Header().Set("Proxy-Authenticate", "NTLM TlRMTVNTUAACAAAADAAMADgAAAAFgomi+Rp9UDbAycMAAAAAAAAAAKIAogBEAAAABgEAAAAAAA9HAEwATwBCAEEATAACAAwARwBMAE8AQgBBAEwAAQAeAFAAWABZAEEAVQAwADAAMgBNAEUATAAwADEAMAAzAAQAHABnAGwAbwBiAGEAbAAuAGEAbgB6AC4AYwBvAG0AAwA8AHAAeAB5AGEAdQAwADAAMgBtAGUAbAAwADEAMAAzAC4AZwBsAG8AYgBhAGwALgBhAG4AegAuAGMAbwBtAAcACABQ7ZOkOQbVAQAAAAA=")
w.WriteHeader(http.StatusProxyAuthRequired)
}
func TestNtlmAuth(t *testing.T) {
requests := make(chan string, 3)
server := httptest.NewServer(testServer{requests})
defer server.Close()
parent := httptest.NewServer(
ntlmServer{t, testProxy{requests, "parent proxy", newDirectProxy()}})
defer parent.Close()
handler := newChildProxy(parent)
handler.auth = &authenticator{"isis", "malory", "guest"}
child := httptest.NewServer(testProxy{requests, "child proxy", handler})
defer child.Close()
tr := &http.Transport{Proxy: proxyServer(t, child)}
testGetRequest(t, tr, server.URL)
require.Len(t, requests, 3)
assert.Equal(t, "GET to child proxy", <-requests)
assert.Equal(t, "GET to parent proxy", <-requests)
assert.Equal(t, "GET to server", <-requests)
}
func TestNtlmAuthOverTls(t *testing.T) {
requests := make(chan string, 3)
server := httptest.NewTLSServer(testServer{requests})
defer server.Close()
parent := httptest.NewServer(
ntlmServer{t, testProxy{requests, "parent proxy", newDirectProxy()}})
defer parent.Close()
handler := newChildProxy(parent)
handler.auth = &authenticator{"isis", "malory", "guest"}
child := httptest.NewServer(testProxy{requests, "child proxy", handler})
defer child.Close()
tr := &http.Transport{Proxy: proxyServer(t, child), TLSClientConfig: tlsConfig(server)}
testGetRequest(t, tr, server.URL)
require.Len(t, requests, 3)
assert.Equal(t, "CONNECT to child proxy", <-requests)
assert.Equal(t, "CONNECT to parent proxy", <-requests)
assert.Equal(t, "GET to server", <-requests)
}
|
// Copyright 2019 NetApp, Inc. All Rights Reserved.
package ontap
import (
"fmt"
"testing"
log "github.com/sirupsen/logrus"
"github.com/stretchr/testify/assert"
tridentconfig "github.com/netapp/trident/config"
drivers "github.com/netapp/trident/storage_drivers"
)
// ToStringPointer takes a string and returns a string pointer
func ToStringPointer(s string) *string {
return &s
}
func NewTestLUNHelper(storagePrefix string, context tridentconfig.DriverContext) *LUNHelper {
commonConfigJSON := fmt.Sprintf(`
{
"managementLIF": "10.0.207.8",
"dataLIF": "10.0.207.7",
"svm": "iscsi_vs",
"aggregate": "aggr1",
"username": "admin",
"password": "password",
"storageDriverName": "ontap-san-economy",
"storagePrefix": "%v",
"debugTraceFlags": {"method": true, "api": true},
"version":1
}
`, storagePrefix)
// parse commonConfigJSON into a CommonStorageDriverConfig object
commonConfig, err := drivers.ValidateCommonSettings(commonConfigJSON)
if err != nil {
log.Errorf("could not decode JSON configuration: %v", err)
return nil
}
config := &drivers.OntapStorageDriverConfig{}
config.CommonStorageDriverConfig = commonConfig
helper := NewLUNHelper(*config, context)
return helper
}
func TestSnapshotNames_DockerContext(t *testing.T) {
helper := NewTestLUNHelper("storagePrefix_", tridentconfig.ContextDocker)
snapName1 := helper.getInternalSnapshotName("snapshot-123")
assert.Equal(t, "_snapshot_snapshot_123", snapName1, "Strings not equal")
snapName2 := helper.getInternalSnapshotName("snapshot")
assert.Equal(t, "_snapshot_snapshot", snapName2, "Strings not equal")
snapName3 := helper.getInternalSnapshotName("_snapshot")
assert.Equal(t, "_snapshot__snapshot", snapName3, "Strings not equal")
snapName4 := helper.getInternalSnapshotName("_____snapshot")
assert.Equal(t, "_snapshot______snapshot", snapName4, "Strings not equal")
k8sSnapName1 := helper.getInternalSnapshotName("snapshot-0bf1ec69_da4b_11e9_bd10_000c29e763d8")
assert.Equal(t, "_snapshot_snapshot_0bf1ec69_da4b_11e9_bd10_000c29e763d8", k8sSnapName1, "Strings not equal")
}
func TestSnapshotNames_KubernetesContext(t *testing.T) {
helper := NewTestLUNHelper("storagePrefix_", tridentconfig.ContextKubernetes)
k8sSnapName1 := helper.getInternalSnapshotName("snapshot-0bf1ec69_da4b_11e9_bd10_000c29e763d8")
assert.Equal(t, "_snapshot_snapshot_0bf1ec69_da4b_11e9_bd10_000c29e763d8", k8sSnapName1, "Strings not equal")
k8sSnapName2 := helper.getInternalSnapshotName("mySnap")
assert.Equal(t, "_snapshot_mySnap", k8sSnapName2, "Strings not equal")
}
func TestHelperGetters(t *testing.T) {
helper := NewTestLUNHelper("storagePrefix_", tridentconfig.ContextDocker)
snapPathPattern := helper.GetSnapPathPattern("my-Bucket")
assert.Equal(t, "/vol/my_Bucket/storagePrefix_*_snapshot_*", snapPathPattern, "Strings not equal")
snapPathPatternForVolume := helper.GetSnapPathPatternForVolume("my-Vol")
assert.Equal(t, "/vol/*/storagePrefix_my_Vol_snapshot_*", snapPathPatternForVolume, "Strings not equal")
snapPath := helper.GetSnapPath("my-Bucket", "storagePrefix_my-Lun", "snap-1")
assert.Equal(t, "/vol/my_Bucket/storagePrefix_my_Lun_snapshot_snap_1", snapPath, "Strings not equal")
snapName1 := helper.GetSnapshotName("my-Lun", "my-Snapshot")
assert.Equal(t, "storagePrefix_my_Lun_snapshot_my_Snapshot", snapName1, "Strings not equal")
snapName2 := helper.GetSnapshotName("my-Lun", "snapshot-123")
assert.Equal(t, "storagePrefix_my_Lun_snapshot_snapshot_123", snapName2, "Strings not equal")
internalSnapName := helper.GetInternalSnapshotName("storagePrefix_my-Lun", "my-Snapshot")
assert.Equal(t, "storagePrefix_my_Lun_snapshot_my_Snapshot", internalSnapName, "Strings not equal")
internalVolName := helper.GetInternalVolumeName("my-Lun")
assert.Equal(t, "storagePrefix_my_Lun", internalVolName, "Strings not equal")
lunPath := helper.GetLUNPath("my-Bucket", "my-Lun")
assert.Equal(t, "/vol/my_Bucket/storagePrefix_my_Lun", lunPath, "Strings not equal")
lunName := helper.GetInternalVolumeNameFromPath(lunPath)
assert.Equal(t, "storagePrefix_my_Lun", lunName, "Strings not equal")
lunPathPatternForVolume := helper.GetLUNPathPattern("my-Vol")
assert.Equal(t, "/vol/*/storagePrefix_my_Vol", lunPathPatternForVolume, "Strings not equal")
}
func TestValidateLUN(t *testing.T) {
helper := NewTestLUNHelper("storagePrefix_", tridentconfig.ContextDocker)
isValid := helper.IsValidSnapLUNPath("/vol/myBucket/storagePrefix_myLun_snapshot_mysnap")
assert.True(t, isValid, "boolean not true")
}
func TestGetComponents_DockerContext(t *testing.T) {
helper := NewTestLUNHelper("storagePrefix_", tridentconfig.ContextDocker)
snapName := helper.GetSnapshotNameFromSnapLUNPath("/vol/myBucket/storagePrefix_myLun_snapshot_mysnap")
assert.Equal(t, "mysnap", snapName, "Strings not equal")
volName := helper.GetExternalVolumeNameFromPath("/vol/myBucket/storagePrefix_myLun_snapshot_mysnap")
assert.Equal(t, "myLun", volName, "Strings not equal")
bucketName := helper.GetBucketName("/vol/myBucket/storagePrefix_myLun_snapshot_mysnap")
assert.Equal(t, "myBucket", bucketName, "Strings not equal")
}
func TestGetComponents_KubernetesContext(t *testing.T) {
helper := NewTestLUNHelper("storagePrefix_", tridentconfig.ContextKubernetes)
snapName := helper.GetSnapshotNameFromSnapLUNPath("/vol/myBucket/storagePrefix_myLun_snapshot_snapshot_123")
assert.Equal(t, "snapshot_123", snapName, "Strings not equal")
snapName2 := helper.GetSnapshotNameFromSnapLUNPath("/vol/myBucket/storagePrefix_myLun_snapshot_mysnap")
assert.Equal(t, "mysnap", snapName2, "Strings not equal")
volName := helper.GetExternalVolumeNameFromPath("/vol/myBucket/storagePrefix_myLun_snapshot_mysnap")
assert.Equal(t, "myLun", volName, "Strings not equal")
bucketName := helper.GetBucketName("/vol/myBucket/storagePrefix_myLun_snapshot_mysnap")
assert.Equal(t, "myBucket", bucketName, "Strings not equal")
}
func TestGetComponentsNoSnapshot(t *testing.T) {
helper := NewTestLUNHelper("storagePrefix_", tridentconfig.ContextDocker)
snapName := helper.GetSnapshotNameFromSnapLUNPath("/vol/myBucket/storagePrefix_myLun")
assert.Equal(t, "", snapName, "Strings not equal")
volName := helper.GetExternalVolumeNameFromPath("/vol/myBucket/storagePrefix_myLun")
assert.Equal(t, "myLun", volName, "Strings not equal")
bucketName := helper.GetBucketName("/vol/myBucket/storagePrefix_myLun")
assert.Equal(t, "myBucket", bucketName, "Strings not equal")
snapName2 := helper.GetSnapshotNameFromSnapLUNPath("/vol/myBucket/storagePrefix_myLun")
assert.Equal(t, "", snapName2, "Strings not equal")
volName2 := helper.GetExternalVolumeNameFromPath("myBucket/storagePrefix_myLun")
assert.NotEqual(t, "myLun", volName2, "Strings are equal")
assert.Equal(t, "", volName2, "Strings are NOT equal")
}
|
Procedure Guideline for Diuretic Renography in Children 3.0*
Hydronephrosis (distension of the pelvicalyceal system) is one of the most common indications for radionuclide evaluation of the kidneys in pediatrie patients. The etiology of the hydronephrosis can be an obstructed renal pelvis, an obstructed ureter, vesicoureteral reflux, the bladder itself or the bladder outlet, infection or congenital in nature. Contrast intravenous urography, ultrasonography and con ventional radionuclide renography cannot reliably differentiate obstructive from nonobstructive causes of hydronephrosis and hydroureteronephrosis (distension of the pelvicalyceal system and ureter). The pressure perfusion study (Whitaker test), which mea sures collecting system pressure under conditions of increased pelvic infusion is relatively invasive. The evaluation of function in the presence of obstruction does not give reliable indication of potential for recovery following surgical correction. High pressure in the collecting system results in reduction of renal blood flow and function. The most common cause of unilateral obstruction is the presence of a ureteropelvic obstruction. Obstructions can also occur more distally at the ureterovesical junction. Bilateral hydronephrosis can be produced by posterior urethral valves, bilateral ureteropelvic obstructions or even a full bladder. The purpose of diuretic renography is to differentiate a true obstruction from a dilated nonobstructed system (stasis) by serial imaging after intravenous administration of furosemide (Lasix).
|
<filename>src/Crypto.ts
import {CryptoUtils, LocalAddress} from "loom-js";
export class Crypto {
public static generatePrivateKey(): Uint8Array {
return CryptoUtils.generatePrivateKey();
}
public static getPublicKey(privateKey: Uint8Array): Uint8Array {
return CryptoUtils.publicKeyFromPrivateKey(privateKey);
}
public static getUserAddress(publicKey: Uint8Array): string {
return LocalAddress.fromPublicKey(publicKey).toString();
}
}
|
#include "ModuleInput.h"
#include "ModuleAudio.h"
ModuleInput::ModuleInput(bool start_enabled) : Module(start_enabled)
{
name = "input";
keyboard = new KEY_STATE[MAX_KEYS];
memset(keyboard, KEY_IDLE, sizeof(KEY_STATE) * MAX_KEYS);
memset(mouse_buttons, KEY_IDLE, sizeof(KEY_STATE) * MAX_MOUSE_BUTTONS);
}
// Destructor
ModuleInput::~ModuleInput()
{
delete[] keyboard;
}
// Called before render is available
bool ModuleInput::Init(pugi::xml_node& config)
{
LOG("Init SDL input event system");
bool ret = true;
SDL_Init(0);
this->config = config;
if (SDL_InitSubSystem(SDL_INIT_EVENTS) < 0)
{
LOG("SDL_EVENTS could not initialize! SDL_Error: %s\n", SDL_GetError());
ret = false;
}
if (SDL_InitSubSystem(SDL_INIT_GAMECONTROLLER) < 0)
{
LOG("SDL_INIT_GAMECONTROLLER could not initialize! SDL_Error: %s\n", SDL_GetError());
ret = false;
}
else
{
OpenController();
SDL_JoystickEventState(SDL_ENABLE);
}
return ret;
}
// Called every draw update
UpdateStatus ModuleInput::PreUpdate()
{
SDL_PumpEvents();
const Uint8* keys = SDL_GetKeyboardState(NULL);
for (int i = 0; i < MAX_KEYS; ++i)
{
if (keys[i] == 1)
{
usingGameController = false;
if (keyboard[i] == KEY_IDLE)
keyboard[i] = KEY_DOWN;
else
keyboard[i] = KEY_REPEAT;
}
else
{
if (keyboard[i] == KEY_REPEAT || keyboard[i] == KEY_DOWN)
keyboard[i] = KEY_UP;
else
keyboard[i] = KEY_IDLE;
}
}
Uint32 buttons = SDL_GetMouseState(&mouse_x, &mouse_y);
mouse_x /= SCREEN_SIZE;
mouse_y /= SCREEN_SIZE;
for (int i = 0; i < MAX_MOUSE_BUTTONS; ++i)
{
if (buttons & SDL_BUTTON(i))
{
usingGameController = false;
if (mouse_buttons[i] == KEY_IDLE)
mouse_buttons[i] = KEY_DOWN;
else
mouse_buttons[i] = KEY_REPEAT;
}
else
{
if (mouse_buttons[i] == KEY_REPEAT || mouse_buttons[i] == KEY_DOWN)
mouse_buttons[i] = KEY_UP;
else
mouse_buttons[i] = KEY_IDLE;
}
}
if (controllerHandles[controllerIndex] != nullptr)
{
UpdateControllerInput();
if (GetControllerAxis(SDL_CONTROLLER_AXIS_LEFTX) > 10000 || GetControllerAxis(SDL_CONTROLLER_AXIS_LEFTY) > 10000 ||
GetControllerAxis(SDL_CONTROLLER_AXIS_RIGHTX) > 10000 || GetControllerAxis(SDL_CONTROLLER_AXIS_RIGHTY) > 10000) usingGameController = true;
}
if (usingGameController)
{
SDL_ShowCursor(0);
}
else
{
SDL_ShowCursor(1);
}
// Handle X button on window
SDL_Event event;
while (SDL_PollEvent(&event) != 0)
{
switch (event.type)
{
case(SDL_QUIT):// X(Close) Button event handler
{
app->ExitGame();
break;
}
case(SDL_CONTROLLERDEVICEADDED):
OpenController();
break;
}
}
// Toggle Global Debug
//if (GetKey(SDL_SCANCODE_F1) == KEY_DOWN)
//{
// app->isDebug = !app->isDebug;
//}
return UPDATE_CONTINUE;
}
// Called before quitting
bool ModuleInput::CleanUp()
{
LOG("Quitting SDL input event subsystem");
SDL_QuitSubSystem(SDL_INIT_EVENTS);
return true;
}
void ModuleInput::OpenController()
{
maxJoysticks = SDL_NumJoysticks();
controllerIndex = 0;
for (int JoystickIndex = 0; JoystickIndex < maxJoysticks; ++JoystickIndex)
{
if (!SDL_IsGameController(JoystickIndex))
{
continue;
}
if (controllerIndex >= MAX_CONTROLLERS)
{
break;
}
controllerHandles[controllerIndex] = SDL_GameControllerOpen(JoystickIndex);
usingGameController = true;
break;
}
}
void ModuleInput::UpdateControllerInput()
{
if (controllerHandles[controllerIndex] == nullptr) return;
// Temporary button state (only gives if it's pressed or not)
bool buttons[MAX_CONTROLLER_BUTTONS];
for (int i = 0; i < MAX_CONTROLLER_BUTTONS; i++)
{
// Get Button State
buttons[i] = SDL_GameControllerGetButton(controllerHandles[controllerIndex], sdlJoystickButtons[i]);
if (buttons[i] && i == 9)
{
printf("R2\n");
}
// If button is pressed
if (buttons[i] == true)
{
usingGameController = true;
if (joystickButtons[i] == KEY_IDLE)
joystickButtons[i] = KEY_DOWN; // KEY_DOWN if not pressed on previous frame
else
joystickButtons[i] = KEY_REPEAT;// KEY_REPEAT if pressed on previous frame
}
else
{
if (joystickButtons[i] == KEY_REPEAT || joystickButtons[i] == KEY_DOWN)
joystickButtons[i] = KEY_UP; // KEY_UP if pressed on previous frame
else
joystickButtons[i] = KEY_IDLE; // KEY_IDLE if not pressed on previous frame
}
}
}
|
def _check_symbol_format_simple(self, data):
if self._vx_version == 5:
sym_type = ord(data[14])
if sym_type not in vx_5_sym_types:
return False
if data[15] != '\x00':
return False
if data[12:14] != '\x00\x00':
return False
if data[4:8] == '\x00\x00\x00\x00':
return False
if data[8:12] == '\x00\x00\x00\x00':
return False
return True
elif self._vx_version == 6:
sym_type = ord(data[18])
if sym_type not in vx_6_sym_types:
return False
if data[19] != '\x00':
return False
if data[16:18] != '\x00\x00':
return False
if data[4:8] == '\x00\x00\x00\x00':
return False
return True
return False
|
#include "buttons.hpp"
const uint8_t PumpRelay = 14;
const uint8_t MotionSensor = 21;
Buttons::Buttons()
{
}
Buttons::~Buttons()
{
}
void Buttons::SetupButtons()
{
pinMode(TouchOnOff, INPUT);
pinMode(TouchManAut, INPUT);
pinMode(TouchPlus, INPUT);
pinMode(TouchMinus, INPUT);
}
void Buttons::ButtonLoop()
{
// Class variables
// Button Check
if (digitalRead(TouchOnOff) == 1)
{
Serial.println("ON ");
}
if (digitalRead(TouchManAut) == 1)
{
Serial.println("MANAUT ");
}
if (digitalRead(TouchPlus) == 1)
{
Serial.println("PLUS ");
}
if (digitalRead(TouchMinus) == 1)
{
Serial.println("MINUS ");
}
if (digitalRead(MotionSensor) == 1)
{
Serial.println("MOTION ");
}
if (S_Menu == false ? Serial.println("Menu OFF") : Serial.println("Menu ON"))
;
// On Off
if ((digitalRead(TouchOnOff) == 0) && (TouchOnOffOld == 1))
{
TouchOnOffPN = 1;
}
else
{
TouchOnOffPN = 0;
}
TouchOnOffOld = digitalRead(TouchOnOff);
if (S_OnOff == true)
{
// Manual
if ((digitalRead(TouchManAut) == 0) && (TouchManAutOld == 1))
{
TouchManAutPN = 1;
}
else
{
TouchManAutPN = 0;
}
TouchManAutOld = digitalRead(TouchManAut);
// Plus
if ((digitalRead(TouchPlus) == 0) && (TouchPlusOld == 1))
{
TouchPlusPN = 1;
}
else
{
TouchPlusPN = 0;
}
TouchPlusOld = digitalRead(TouchPlus);
// Minus
if ((digitalRead(TouchMinus) == 0) && (TouchMinusOld == 1))
{
TouchMinusPN = 1;
}
else
{
TouchMinusPN = 0;
}
TouchMinusOld = digitalRead(TouchMinus);
}
else
{
// Powerled
strip.setPixelColor(0, strip.Color(255, 225, 255));
strip.show();
}
// BUTTONS
if (TouchOnOffPN == 1)
{
OnOff();
}
if ((S_Menu == false) && (TouchManAutPN == 1))
{
pump.Man_Aut();
}
if ((S_Menu == false) && (TouchPlusPN == 1))
{
neopixel.Settings();
}
if ((S_Menu == true) && (TouchPlusPN == 1))
{
neopixel.Plus();
}
if ((S_Menu == false) && (TouchMinusPN == 1))
{
neopixel.Settings();
}
if ((S_Menu == true) && (TouchMinusPN == 1))
{
neopixel.Minus();
}
}
// All Off
void Buttons::All_Off()
{
digitalWrite(PumpRelay, false);
S_ManAut = false;
Step_Manual = 10;
Step_Automatic = 20;
S_Menu = false;
Step_Menu = 40;
neopixel.colorWipeAll(strip.Color(0, 0, 0), 50); // Off
}
// OnOff Button Code
void Buttons::OnOff()
{
if (S_OnOff == 0)
{
S_OnOff = 1;
neopixel.colorWipeAll(strip.Color(0, 0, 127), 50); // Soft Blue
S_ManAut = true;
}
else
{
S_OnOff = false;
All_Off();
}
}
Buttons buttons;
|
// QueueEntityWithAutoForward configures the queue to automatically forward messages to the specified target.
//
// The ability to AutoForward to a target requires the connection have management authorization. If the connection
// string or Azure Active Directory identity used does not have management authorization, an unauthorized error will be
// returned on the PUT.
func QueueEntityWithAutoForward(target Targetable) QueueManagementOption {
return func(q *QueueDescription) error {
uri := target.TargetURI()
q.ForwardTo = &uri
return nil
}
}
|
import { Struct } from '@polkadot/types/codec';
import { getTypeRegistry, u32, u128, GenericAccountId } from '@polkadot/types';
import { BlockNumber, AccountId, Balance } from '@polkadot/types/interfaces';
import { MemberId, Role } from './members';
// re-export Role
export { Role } from './members';
export class Actor extends Struct {
constructor (value?: any) {
super({
member_id: MemberId,
role: Role,
account: GenericAccountId,
joined_at: u32, // BlockNumber
}, value);
}
get member_id (): MemberId {
return this.get('member_id') as MemberId;
}
get role (): Role {
return this.get('role') as Role;
}
get account (): AccountId {
return this.get('account') as AccountId;
}
get joined_at (): BlockNumber {
return this.get('joined_at') as BlockNumber;
}
}
export type Request = [AccountId, MemberId, Role, BlockNumber];
export type Requests = Array<Request>;
export class RoleParameters extends Struct {
constructor (value?: any) {
super({
min_stake: u128, // Balance,
min_actors: u32,
max_actors: u32,
reward: u128, // Balance,
reward_period: u32, // BlockNumber,
bonding_period: u32, // BlockNumber,
unbonding_period: u32, // BlockNumber,
min_service_period: u32, // BlockNumber,
startup_grace_period: u32, // BlockNumber,
entry_request_fee: u128, // Balance
}, value);
}
get min_stake (): Balance {
return this.get('min_stake') as Balance;
}
get max_actors (): u32 {
return this.get('max_actors') as u32;
}
get min_actors (): u32 {
return this.get('min_actors') as u32;
}
get reward (): Balance {
return this.get('reward') as Balance;
}
get reward_period (): BlockNumber {
return this.get('reward_period') as BlockNumber;
}
get unbonding_period (): BlockNumber {
return this.get('unbonding_period') as BlockNumber;
}
get bonding_period (): BlockNumber {
return this.get('bonding_period') as BlockNumber;
}
get min_service_period (): BlockNumber {
return this.get('min_service_period') as BlockNumber;
}
get startup_grace_period (): BlockNumber {
return this.get('startup_grace_period') as BlockNumber;
}
get entry_request_fee (): Balance {
return this.get('entry_request_fee') as Balance;
}
}
export function registerRolesTypes () {
try {
const typeRegistry = getTypeRegistry();
typeRegistry.register({
RoleParameters,
Request: '(AccountId, MemberId, Role, BlockNumber)',
Requests: 'Vec<Request>',
Actor
});
} catch (err) {
console.error('Failed to register custom types of roles module', err);
}
}
|
#include <stdio.h>
#include <math.h>
#include <string.h>
#include <algorithm>
using namespace std;
using namespace std;
__int64 jc(__int64 m)
{
__int64 i;
__int64 s=0;
for(i=m; i>0; i--)
s+=i;
if(m<0)
s=0;
return s;
}
int main()
{
__int64 c[2200]= {0},s[2200]= {0};
__int64 t;
__int64 i;
__int64 a,b;
scanf("%I64d",&t);
__int64 sum=0;
for(i=0; i<t; i++)
{
scanf("%I64d%I64d",&a,&b);
c[a-b+1000]++;
s[a+b]++;
}
for(i=0; i<2000; i++)
{
if(c[i]!=0||s[i]!=0)
{
sum=sum+jc(c[i]-1)+jc(s[i]-1);
}
}
printf("%I64d\n",sum);
}
|
<filename>apps/Editor/src/control/SearchFieldLayout.h<gh_stars>10-100
#pragma once
#include "SearchFieldBinding.h"
#include "ui/widget/button/Button.h"
#include "ui/widget/text/TextField.h"
namespace Ghurund::Editor {
using namespace Ghurund::UI;
class SearchFieldLayout:public SearchFieldBinding {
private:
EventHandler<Control> stateHandler = [this](Control& control) {
Hint->Visible = QueryField->Focused;
return true;
};
public:
SearchFieldLayout(Control* layout):SearchFieldBinding(layout) {
QueryField->StateChanged.add(stateHandler);
}
~SearchFieldLayout() {
QueryField->StateChanged.remove(stateHandler);
}
};
}
|
<reponame>ldr709/computational-algebra
{-# LANGUAGE BangPatterns, DataKinds, FlexibleContexts, FlexibleInstances #-}
{-# LANGUAGE GADTs, MultiParamTypeClasses, NoImplicitPrelude #-}
{-# LANGUAGE NoMonomorphismRestriction, OverloadedStrings, PolyKinds #-}
{-# LANGUAGE TypeFamilies, UndecidableInstances, ViewPatterns #-}
{-# OPTIONS_GHC -fno-warn-type-defaults -fno-warn-orphans -Wall #-}
module Main where
import Algebra.Algorithms.Groebner
import Algebra.Algorithms.Groebner.Signature
import Algebra.Internal
import Algebra.Prelude.Core
import Control.Foldl (Fold)
import qualified Control.Foldl as Fl
import Control.Lens (_1, _2)
import Data.Monoid (Dual (..))
import Data.Sequence (Seq ((:<|), (:|>)))
import qualified Data.Sequence as Seq
import Data.Sized.Builtin (unsized)
import Gauge.Main
type Comparer = Fold (Int, Int) Ordering
gradeF :: Comparer
gradeF = compare <$> Fl.handles _1 Fl.sum <*> Fl.handles _2 Fl.sum
{-# INLINE gradeF #-}
lexF :: Comparer
lexF = Fl.foldMap (uncurry compare) id
revlexF :: Comparer
revlexF = Fl.foldMap (Dual . uncurry (flip compare)) getDual
{-# INLINE revlexF #-}
data FoldlGrevlex = FoldlGrevlex
instance SingI n => IsOrder n FoldlGrevlex where
cmpMonomial _ (unsized -> m) (unsized -> n) =
Fl.fold (gradeF <> revlexF) $ Seq.zip m n
{-# INLINE cmpMonomial #-}
instance SingI n => IsMonomialOrder n FoldlGrevlex
newtype Seq' a = Seq' { runSeq' :: Seq a }
instance Foldable Seq' where
foldl f z (Seq' (as :|> a)) = f (foldl f z (Seq' as)) a
foldl _ z _ = z
foldr f z (Seq' (a :<| as)) = f a (foldr f z (Seq' as))
foldr _ z _ = z
foldMap f (Seq' (as :|> a)) = foldMap f (Seq' as) <> f a
foldMap _ _ = mempty
data Seqs a where
Seqs :: Seq a -> Seq b -> Seqs (a, b)
instance Foldable Seqs where
foldl f z (Seqs (as :|> a) (bs :|> b)) = f (foldl f z (Seqs as bs)) (a, b)
foldl _ z _ = z
foldr f z (Seqs (a :<| as) (b :<| bs)) = f (a, b) (foldr f z (Seqs as bs))
foldr _ z _ = z
foldlSeq :: Fold a b -> Seq a -> b
foldlSeq (Fl.Fold step begin done) s = done $! loop s
where
loop (as :|> a) = (step $ loop as) $! a
loop _ = begin
{-# INLINE foldlSeq #-}
data FoldlSeq'Grevlex = FoldlSeq'Grevlex
instance SingI n => IsOrder n FoldlSeq'Grevlex where
cmpMonomial _ (unsized -> m) (unsized -> n) =
foldlSeq (gradeF <> revlexF) $ Seq.zip m n
{-# INLINE cmpMonomial #-}
instance SingI n => IsMonomialOrder n FoldlSeq'Grevlex
data FoldlSeqsGrevlex = FoldlSeqsGrevlex
foldlSeqs :: Fold (a, b) c -> Seqs (a, b) -> c
foldlSeqs (Fl.Fold step begin done) (Seqs as bs) =
done $! loop as bs
where
loop (xs :|> x) (ys :|> y) = step (loop xs ys) (x, y)
loop _ _ = begin
{-# INLINE foldlSeqs #-}
instance SingI n => IsOrder n FoldlSeqsGrevlex where
cmpMonomial _ (unsized -> m) (unsized -> n) =
foldlSeqs (gradeF <> revlexF) $ Seqs m n
{-# INLINE cmpMonomial #-}
instance SingI n => IsMonomialOrder n FoldlSeqsGrevlex
grevlexHW :: Seq Int -> Seq Int -> Int -> Int -> Ordering -> Ordering
grevlexHW (as :|> a) (bs :|> b) !accl !accr EQ =
grevlexHW as bs (accl + a) (accr + b) $ compare b a
grevlexHW as bs !accl !accr cmp = compare (sum as + accl) (sum bs + accr) <> cmp
data HandWrittenGrevlex = HandWrittenGrevlex
instance SingI n => IsOrder n HandWrittenGrevlex where
cmpMonomial _ (unsized -> m) (unsized -> n) = grevlexHW m n 0 0 EQ
{-# INLINE cmpMonomial #-}
instance SingI n => IsMonomialOrder n HandWrittenGrevlex
i2 :: [OrderedPolynomial (Fraction Integer) Grevlex 5]
i2 = [35 * y^4 - 30*x*y^2 - 210*y^2*z + 3*x^2 + 30*x*z - 105*z^2 +140*y*t - 21*u
,5*x*y^3 - 140*y^3*z - 3*x^2*y + 45*x*y*z - 420*y*z^2 + 210*y^2*t -25*x*t + 70*z*t + 126*y*u
]
where [t,u,x,y,z] = vars
mkTC :: (IsMonomialOrder 5 ord) => String -> ord -> Benchmark
mkTC name ord =
env (return $ toIdeal $ map (changeOrder ord) i2) $ \ideal ->
bgroup name [ bench "calcGroebnerBasis" $ nf calcGroebnerBasis ideal
, bench "F5" $ nf f5 ideal
]
main :: IO ()
main =
defaultMainWith defaultConfig
[ mkTC "default" Grevlex
, mkTC "foldl" FoldlGrevlex
, mkTC "foldl-custom" FoldlSeq'Grevlex
, mkTC "foldl-seqs" FoldlSeqsGrevlex
]
{-# ANN module ("HLint: ignore Use camelCase" :: String) #-}
|
package com.senderman.lastkatkabot.config;
public interface BotConfig {
String token();
String username();
String timezone();
long mainAdminId();
long feedbackChannelId();
long notificationChannelId();
String bncHelpPictureId();
String helloGifId();
String leaveStickerId();
}
|
<gh_stars>0
""" Version information for Phil """
MAJOR = 0
MINOR = 0
PATCH = 0
|
def ble_connection(self, addr, service, characteristic):
while True:
try:
self.helpers.logger.info(
"Attempting BLE connection to "+addr)
peripheral = btle.Peripheral(addr)
peripheral.setMTU(512)
delegate = BtAgentDelegate()
peripheral.withDelegate(delegate)
serv = peripheral.getServiceByUUID(service)
charac = serv.getCharacteristics(characteristic)[0]
peripheral.writeCharacteristic(charac.valHandle + 1, b"\x01\x00")
self.helpers.logger.info(
"BLE connection to " + addr + " established")
if addr in self.ble_tracker:
self.ble_tracker[addr]["last_seen"] = datetime.now()
self.helpers.logger.info(
addr + " connection timestamp updated")
self.notification_loop(peripheral)
except Exception as e:
self.helpers.logger.info(
"BLE connection to " + addr + " failed")
time.sleep(1.0)
continue
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import csv
from pathlib import Path
import typing as ty
import attr
from attr.validators import optional
from attr.validators import instance_of as is_a
from rnacentral_pipeline import writers
EXCLUDED_TERMS = {
"GO:0008049",
"GO:0042981",
"GO:0042749",
"GO:0050789",
"GO:0006810",
"GO:0001263",
"SO:0004725",
"SO:0010039",
}
EXCLUDED_MIRNA = {"GO:0035068", "GO:0006396"}
GO_REPLACEMENTS = {
"GO:0044005": "GO:0051819",
}
def empty_to_none(raw):
"""
Return an empty string to None otherwise return the string.
"""
if not raw:
return None
return raw
@attr.s(frozen=True)
class RfamDatabaseLink(object):
"""
This class represents the entries in the database_link in Rfam.
"""
rfam_family = attr.ib(validator=is_a(str))
database = attr.ib(validator=is_a(str))
comment = attr.ib(
converter=empty_to_none,
validator=optional(is_a(str)),
)
external_id = attr.ib(validator=is_a(str))
other = attr.ib(
converter=empty_to_none,
validator=optional(is_a(str)),
)
family_type = attr.ib(validator=is_a(str))
@classmethod
def from_row(cls, row):
"""
Build an object from a dictionary that has the same names as the
database columns.
"""
database = row["db_id"]
external_id = row["db_link"]
if database in {"SO", "GO"}:
external_id = "%s:%s" % (database, row["db_link"])
return cls(
rfam_family=row["rfam_acc"],
database=database,
comment=row["comment"],
external_id=external_id,
other=row["other_params"],
family_type=row["type"],
)
def from_ontology(self):
"""
Check if this instance comes from a known ontology (SO or GO).
"""
return self.database in {"SO", "GO"}
def writeable_go_mappings(self):
if self.database != "GO":
return
yield [
self.rfam_family,
self.external_id,
]
def writeable_ontology_terms(self):
if not self.from_ontology():
return
yield [self.external_id]
@attr.s()
class Writer:
terms = attr.ib()
rfam_ontology_mappings = attr.ib()
def write(self, references):
for reference in references:
self.terms.writerows(reference.writeable_ontology_terms())
self.rfam_ontology_mappings.writerows(reference.writeable_go_mappings())
def parse(handle: ty.IO) -> ty.Iterable[RfamDatabaseLink]:
"""
Parse the given filehandle to produce all database link objects in the
file.
"""
reader = csv.DictReader(handle, delimiter="\t")
return map(RfamDatabaseLink.from_row, reader)
def correct_go_term(reference):
"""
This will correct the reference to GO if required. Basically, this will
exclude some terms and replace others.
"""
go_term_id = reference.external_id
if go_term_id in EXCLUDED_TERMS:
return None
if reference.family_type == "Gene; miRNA;" and go_term_id in EXCLUDED_MIRNA:
return None
go_term_id = GO_REPLACEMENTS.get(go_term_id, go_term_id)
return attr.evolve(reference, external_id=go_term_id)
def ontology_references(handle) -> ty.Iterable[RfamDatabaseLink]:
"""
Produce an iterable of all ontology terms from Rfam.
"""
for reference in parse(handle):
if not reference.from_ontology():
continue
if reference.database in {"SO", "GO"}:
reference = correct_go_term(reference)
if not reference:
continue
yield reference
def from_file(handle: ty.IO, output: Path):
with writers.build(Writer, output) as writer:
writer.write(ontology_references(handle))
|
The NCAA basketball season is coming to a close. Only sixteen teams remain. Each team only needs four more wins to claim the championship. There is plenty of talent remaining in the tournament for those who enjoy watching NBA prospects. But I decided to list the players for each team that are the most important to their quest for a championship, as opposed to simply the most talented.
Aaron Harrison – Kentucky
Harrison’s game winning shot in last year’s Final Four (Kevin Jairaj/USA TODAY Sports)
Kentucky might be the hardest team to identify which individual player is the most important to their success. How do you pick one player out of a team made up of nine future NBA draft picks? You could make a good argument for just about any of them. Karl-Anthony Towns is probably the most talented of the group. Willie Cauley-Stein is the most important player for the Wildcats when they’re on defense. The offense runs through Tyler Ulis and Andrew Harrison. However, I would pick Andrew’s twin, Aaron, as Kentucky’s most important player. He led the team in scoring and minutes played this season. He just about single-handedly carried them to the championship game a year ago. If the team is struggling through a cold shooting night, Aaron almost always seems to hit a clutch shot to get his teammates going. I think they would struggle the most if they were to lose Aaron Harrison than any of the other stars.
Devin Williams – West Virginia
Coach Bob Huggins adopted the full-court press strategy this season when he saw the roster he had to work with. Out of the 13 players that saw substantial time for West Virginia, only four of them were taller than 6’7″. Huggins knew that with a team this small, he had to use it to his advantage. Implementing the press meant constant energy and effort out of everyone on the court. Therefore, only three players for West Virginia saw more than 22 minutes per game. One of those players was 6’9″ Devin Williams. For a player of Williams’ size to play more than 24 minutes per game at an energy level that high is insane. Williams is even more important to the Mountaineers because he leads the small team in rebounding. With a team whose average height is a little more than 6’3″, Williams big frame is extremely important. He may have never been as important as he will be on Thursday when they play Kentucky, the tallest team in the nation.
Jerian Grant – Notre Dame
How did Notre Dame go from a 15-17 team last year to a 31-5 record and ACC tournament champions this season? There was one key piece missing last season. Jerian Grant only played 12 games before being suspended for an academic issue. Grant came back this year with a vengeance. If it wasn’t for the consensus number one NBA draft pick being in the same conference, Grant would’ve won ACC player of the year with ease. Grant played nearly 37 minutes per game, led the team in scoring and finished with the seventh most assists in the entire nation. Notre Dame goes through Grant and they probably wouldn’t have made the tournament without him.
Fred Van Vleet – Wichita State
Out of the players remaining, Fred Van Vleet has the fourth highest points per game in the tournament with 22 a game. This is unexpected because Van Vleet only averaged 12.7 points per game during the regular season, which didn’t lead his team. He tied his career high in points with 27 in their first round game against Indiana. On top of his recent scoring streak, Van Vleet finished the season with a top ten assist-to-turnover ratio in the nation. This offense runs through Van Vleet, and when leading scorer Ron Baker has an off night, the Shockers turn to Van Vleet to provide a spark. He’s been that and then some this postseason.
Frank Kaminsky – Wisconsin
Photo: Todd Rosenburg for Sports Illustrated
The Naismith player of the year award should go to Kaminsky. Frank the Tank has been a force all season long for the Badgers. He had a 18.2/8.0/2.7 split (points/rebounds/assists) and 59.6/39.5/75.9 percentage split (FG/3PT/FT). For those keeping score at home, that is ridiculous. Everything Wisconsin does goes through Kaminsky. Defense’s entire game plans are to stop him, and they still fail. Other Badgers like Sam Dekker and Nigel Hayes get so many open looks because teams are so focused on Kaminsky. Frank missed only one game this season, and Wisconsin lost to the worst team in their conference in that game. No player is more important to their team than The Tank.
Brice Johnson – North Carolina
For any other game of the season, I would’ve picked Marcus Paige as the most important player for the Tar Heels. Paige leads the team in scoring and minutes, and UNC is a much better team when Paige is hitting his shots. This is because the Tar Heels get a combined 24.9 points out of their front court combination of Brice Johnson and Kennedy Meeks. However, Meeks is highly questionable for Thursday’s game against Wisconsin with a knee injury. Even if Meeks responds well in practice and suits up, he will more than likely be less than 100%. This makes Brice Johnson extremely important for North Carolina. Johnson now has less help to stop Kaminsky on defense and must make up for the 11.7 points per game and 7.5 rebounds that the team would lose without Meeks.
Dee Davis – Xavier
Xavier is a well balanced team. They have six players that average more than eight points per game. This makes it difficult to pick one as most important. I decided to go with Dee Davis, the team’s assist leader. When you have a team with so many scoring options, it is important to have a point guard that can distribute the ball to all of the scorers equally. Davis is the only Musketeer that plays more than 30 minutes a game. He is their primary ball-handler and the key to their success.
T.J. McConnell – Arizona
Photo: rushthecourt.net
McConnell is the backbone for this Arizona team. He is the lone senior in the Wildcats’ rotation and his leadership is vital to their success. He finished the season in the top ten in assists and assist-to-turnover ratio. There are many talented mouths to feed on Arizona’s roster and McConnell makes sure they all get their opportunities. McConnell might be the only starter for Arizona that doesn’t see the NBA, but he is also the key reason they have an excellent shot at a championship.
Trevor Lacey – NC State
NC State might be the closest thing this Sweet Sixteen has to a Cinderella team. UCLA is the highest seeded team remaining, but NC State registered the biggest upset when they defeated top seeded Villanova. The Wolfpack had a very up and down season, but wins against top teams like Duke and North Carolina got them into the tournament. The team is full of talent, but it has been difficult at times for them to all mesh. Lacey, a transfer from Alabama, is the leading scorer and the team looks to him when they need a big shot. For the Wolfpack to have a chance at seeing the Elite Eight, Lacey needs to hit jump shots against the strong defense of Louisville.
Wayne Blackshear – Louisville
Blackshear is nowhere near the most talented player on Louisville. If you asked Louisville fans who the most frustrating player on the team is, they would probably yell Blackshear’s name before you could finish your question. In a way, this makes him the most important piece. When Blackshear’s game is on, Louisville can be as dangerous as any team in the nation. Since Chris Jones’ dismissal, the team has heavily relied on Montrezl Harrell and Terry Rozier, both of whom are NBA prospects. Blackshear is a senior that will never wear an NBA jersey, but if he steps his game up when defenses are keyed in on other players, then the Cardinals can get to the Final Four.
TaShawn Thomas – Oklahoma
Oklahoma is the home to the Big 12 player of the year, Buddy Hield. You would think the guy who was awarded as the best player in his conference would be the most important to his team, but I disagree. Hield is no doubt the most talented player on the team, but I think the Sooners have enough talent to make up for the scoring lost if Hield is shut down by an opposing defense. Last season, the Sooners were ousted in the first round. The difference this season is the addition of TaShawn Thomas. Thomas gives them another big body down low to offset the plethora of shooting Oklahoma has. Going against Michigan State and the beast that is Branden Dawson, Oklahoma needs all the size they can get.
Travis Trice and Branden Dawson- Michigan State
Photo: http://msutoday.msu.edu/
OK, I cheated. Instead of picking the one most important player for Sparty, I had to pick the two seniors that are playing in their fourth career Sweet Sixteen. Trice is the main ball-handler, leads the team in assists and is a deadly shooter that can spark the entire team. Dawson only stands at 6’6″, the average size for a small forward, but he’s an animal who plays like he’s 6’10”. He snags more than nine rebounds a game and brings an energy that nobody can imitate. If Michigan State makes the Final Four, it will be behind their senior leadership.
Quinn Cook – Duke
Jahlil Okafor is the consensus number one NBA draft pick and probably the most talented player in the nation. Tyus Jones runs the offense and led the team in assists. Justise Winslow is likely a top ten draft pick, plays excellent defense and brings energy to the entire team. So why is Cook my most important player for Duke? There’s one thing that sets him apart from Okafor, Jones and Winslow: he’s a senior. For as talented as those three guys are, Cook brings leadership and hits big shots to settle down his young teammates. He has made a three-point shot in a school record 45 straight games. To set a record at Duke for anything is crazy impressive. Cook plays the most minutes for Duke and their freshmen will look to him if they are in a tough spot during this tournament.
Delon Wright – Utah
Delon Wright is one of my favorite players in the tournament. He’s a senior leader that leads Utah in minutes, points, assists and he’s second in rebounds. Utah wouldn’t be close to where they are today without Wright. The only knock on Wright has been that he is too passive. Wright has boatloads of talent, but is constantly trying to set his teammates up. He has struggled to score so far in this tournament, but his ability to make the players around him better have kept the Utes alive. I expect him to have a big game against Duke. If not, Utah is in trouble.
Bryce Alford – UCLA
When Alford went to UCLA, nobody turned their head. He is the head coach’s son and many thought he only got the scholarship for that reason. He wasn’t highly ranked or recruited, but got to attend one of the most historic basketball schools because his father was the coach. Now in his sophomore season, Alford is the most important Bruin. In their first round game against SMU, Alford went 9-11(!!!) from the 3-point line and carried UCLA to the second round. Going into the second round, Alford knew the defense’s mission would be to stop him from shooting. So he let his teammates do the work and recorded five assists. Alford can beat teams single-handedly with his shooting and simultaneously opens up opportunities for his teammates.
Kevin Pangos – Gonzaga
Kyle Wiltjer had a dominant season for the Zags, but Pangos runs the show. Pangos is a senior that feels like he’s been at Gonzaga forever. He has started every game of his college career except one, which was the very first game of his freshman season. Pangos had the second best assist-to-turnover ratio in the nation while shooting 44.4% from 3. Pangos is the backbone to Gonzaga and needs to play exceptional in order for Gonzaga to shake the overrated title they’ve carried all season long.
I think this would be good for Bleacher Report or Grantland.
Advertisements
|
// Trace returns the source code line and function name (of the calling function)
func Trace() (line string) {
pc := make([]uintptr, 15)
n := runtime.Callers(2, pc)
frames := runtime.CallersFrames(pc[:n])
frame, _ := frames.Next()
return fmt.Sprintf("%s,:%d %s\n", frame.File, frame.Line, frame.Function)
}
|
/* Copyright 2021 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_GATHER_H_
#define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_GATHER_H_
#include <cstring>
#include "tensorflow/lite/kernels/internal/common.h"
namespace tflite {
namespace reference_ops {
template <typename T, typename CoordsT = int32>
inline void Gather(const tflite::GatherParams& op_params,
const RuntimeShape& input_shape, const T* input_data,
const RuntimeShape& coords_shape, const CoordsT* coords_data,
const RuntimeShape& output_shape, T* output_data) {
ruy::profiler::ScopeLabel label("Gather");
int axis = op_params.axis;
if (axis < 0) {
axis += input_shape.DimensionsCount();
}
TFLITE_DCHECK_GE(axis, 0);
TFLITE_DCHECK_LT(axis, input_shape.DimensionsCount());
int batch_dims = op_params.batch_dims;
if (batch_dims < 0) {
batch_dims += coords_shape.DimensionsCount();
}
TFLITE_DCHECK_GE(batch_dims, 0);
TFLITE_DCHECK_LT(batch_dims, input_shape.DimensionsCount());
TFLITE_DCHECK_LE(batch_dims, coords_shape.DimensionsCount());
TFLITE_DCHECK_GE(axis, batch_dims);
for (int i = 0; i < batch_dims; ++i) {
TFLITE_DCHECK_EQ(input_shape.Dims(i), coords_shape.Dims(i));
}
const int axis_size = input_shape.Dims(axis);
int batch_size = 1;
for (int i = 0; i < batch_dims; ++i) {
batch_size *= input_shape.Dims(i);
}
int outer_size = 1;
for (int i = batch_dims; i < axis; ++i) {
outer_size *= input_shape.Dims(i);
}
int inner_size = 1;
for (int i = axis + 1; i < input_shape.DimensionsCount(); ++i) {
inner_size *= input_shape.Dims(i);
}
int coord_size = 1;
for (int i = batch_dims; i < coords_shape.DimensionsCount(); ++i) {
coord_size *= coords_shape.Dims(i);
}
for (int batch = 0; batch < batch_size; ++batch) {
for (int outer = 0; outer < outer_size; ++outer) {
for (int i = 0; i < coord_size; ++i) {
TFLITE_DCHECK_GE(coords_data[i], 0);
TFLITE_DCHECK_LT(coords_data[i], axis_size);
// TODO(rsun): replace memcpy with a for loop
std::memcpy(
output_data +
(((batch * outer_size) + outer) * coord_size + i) * inner_size,
input_data + (((batch * outer_size) + outer) * axis_size +
coords_data[batch * coord_size + i]) *
inner_size,
sizeof(T) * inner_size);
}
}
}
}
} // namespace reference_ops
} // namespace tflite
#endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_GATHER_H_
|
/**
* Iterates through all edges and removes the edge if a match is found with
* the MouseEvent.target()
* @param me - MouseEvent
*/
private void removeEdgeIfFound(MouseEvent me) {
Iterator<Edge> edgeIter = edgeList.iterator();
while (edgeIter.hasNext()) {
Edge element = edgeIter.next();
if (element.equals(me.getTarget())) {
log.appendText("Edge " + element.getEdgeID() + " removed!\n");
Node nodeOne = element.getConnectedNodes().get(0);
Node nodeTwo = element.getConnectedNodes().get(1);
nodeOne.removeConnectedNode(nodeTwo);
nodeOne.removeConnectedEdge(element);
nodeTwo.removeConnectedNode(nodeOne);
nodeTwo.removeConnectedEdge(element);
simulationArea.getChildren().remove(element);
if (bestRouteEdgeList.contains(element)) {
for (Edge edge : edgeList) {
edge.setStroke(Color.BLACK);
edge.setStrokeWidth(1.0);
}
bestRouteEdgeList = new ArrayList();
}
edgeIter.remove();
}
}
}
|
import { prefixLogger } from '@graphql-portal/logger';
import { ApiDef } from '@graphql-portal/types';
import { NextFunction, Request, RequestHandler, Response } from 'express';
import {
DocumentNode,
GraphQLError,
parse,
TypeInfo,
ValidationContext,
visit,
Visitor,
visitWithTypeInfo,
} from 'graphql';
import CostAnalysis from 'graphql-cost-analysis/dist/costAnalysis';
import depthLimit from 'graphql-depth-limit';
import { tracer } from '../../tracer';
import { apiSchema } from '../../server/router';
import { isIntrospectionRequest, throwError } from '../utils';
import RequestCostTool from './request-cost.tool';
const logger = prefixLogger('cost-analysis');
export class CustomCostAnalysis extends CostAnalysis {
public cost = 0;
// eslint-disable-next-line no-useless-constructor
public constructor(context: ValidationContext, options: any) {
super(context, options);
}
}
const rateLimitMiddleware = (apiDef: ApiDef): RequestHandler => {
const maxDepth = apiDef.depth_limit || 10;
const maxCost = apiDef.request_complexity_limit || 1000;
const costRate = apiDef.rate_limit?.complexity || Infinity;
const costRatePer = apiDef.rate_limit?.per || 3600;
logger.debug(`max depth: ${maxDepth}`);
logger.debug(`max cost: ${maxCost}`);
logger.debug(`rate: ${costRate} per ${costRatePer} second(s)`);
const requestCostTool = new RequestCostTool(costRatePer);
return async function rateLimitMiddleware(req: Request, res: Response, next: NextFunction): Promise<void> {
const { body } = req;
if (isIntrospectionRequest(req)) {
return next();
}
const span = tracer?.startSpan(`rateLimit middleware`, { childOf: req.context?.tracerSpan });
const schema = apiSchema[apiDef.name];
let query: DocumentNode;
try {
query = parse(body.query);
} catch (error) {
logger.error(error.message);
span?.log({ error });
span?.finish();
return next();
}
const typeInfo = new TypeInfo(schema);
const validationContext = new ValidationContext(schema, query, typeInfo, () => {});
let depth = 0;
const setDepth = (depths: { [key: string]: number }): void => {
depth = Math.max(...Object.values(depths));
};
depthLimit(maxDepth, {}, setDepth)(validationContext);
if (Number.isNaN(depth)) {
return throwError(new GraphQLError(`The query exceeds maximum operation depth of ${maxDepth}`), undefined, span);
}
logger.debug(`request ${req.id} from ${req.ip}: depth ${depth}`);
const visitor = new CustomCostAnalysis(validationContext, {
maximumCost: maxCost,
defaultCost: 1,
});
visit(query, visitWithTypeInfo(typeInfo, visitor as Visitor<any>));
const { cost } = visitor;
if (cost > maxCost) {
return throwError(new GraphQLError(`The query exceeds maximum complexity of ${maxCost}`), undefined, span);
}
logger.debug(`request ${req.id} from ${req.ip}: cost ${cost}`);
if (!Number.isFinite(costRate)) {
return next();
}
const totalCost = (await requestCostTool.getTotalCost(req)) + cost;
if (totalCost > costRate) {
return throwError(
new GraphQLError(`Too many requests. Exceeded complexity limit of ${costRate} per ${costRatePer} seconds`),
429,
span
);
}
logger.debug(`request ${req.id} from ${req.ip}: totalCost ${totalCost}`);
await requestCostTool.saveCost(req, cost);
span?.log({ cost });
span?.finish();
return next();
};
};
export default rateLimitMiddleware;
|
From the Mayan calendar and a runaway planet called "Nibiru," from killer asteroids and theories about galactic alignments: The internet is full of talk about the world ending on December 21.
NASA scientists recently addressed some of the most pervasive of these rumors around the dubious date. Take a look:
Mayan calendar
The Mayan calendar began somewhere around 3,114 years before the current era, and is set to end on December 21 or 23 (depending on the translation). NASA scientist Mitzi Adams describes what the Mayans would have done had their civilization lasted and why there is no cause for alarm.
Is an ancient Sumerian planet called "Nibiru" on a collision course with Earth? NASA scientist David Morrison explains how you can dispel this rumor all by yourself.
Galactic alignment
Will the Earth, Sun and center of the Milky Way line up in December? Yes! But as three astronomers explain, it happens every December and is not a sign of doomsday.
Solar apocalypse
Like any other star, the sun will eventually use up its hydrogen core, expand and engulf the Earth. But don't rush out to buy Red Giant insurance for your home by December 21 - you have about 5 BILLION years to prepare. Two NASA scientists explain.
Killer asteroid
There is a real, long-term concern about possible asteroid impacts, and that's why NASA has been scanning the heavens, cataloging them for nearly 20 years. And as two NASA scientists will tell you, not a single object has been found that's on a collision course with Earth.
Polar shift
One internet rumor about the world ending on December 21 is that the Earth will shift on its axis. Astronomer Andrew Fraknoi explains how it's just not possible to change the way our planet rotates in a single day.
More
– The Maya collapsed. Could we?
– Be honest: Apocalypse kinda exciting?
|
import type { ElementContent } from "hast";
export const el: <T extends ElementContent>(t: T) => T = (t) => t;
|
// createtable takes two integer arguments, the number of initial array slots
// and the initial number of hash slots - both default to 0.
static int l_createtable (lua_State *L) {
int narr = luaL_optint(L,1,0);
int nrec = luaL_optint(L,2,0);
lua_createtable(L,narr,nrec);
return 1;
}
|
<gh_stars>1-10
#ifndef CRC32_H
#define CRC32_H
#include <stddef.h>
#include <stdint.h>
class Crc32
{
typedef struct {
uint32_t init;
uint32_t polynomial;
} Config;
public:
Crc32();
Crc32(uint32_t init, uint32_t polynomial);
void addData(const void* data, size_t length);
uint32_t getHash(){return crc_;}
void reset();
private:
uint32_t crc_;
uint32_t unaligned_;
uint32_t byteCount_;
uint32_t lookup_[16][256];
Config cfg_;
void generateLookup();
};
#endif // CRC32_H
|
import { render, screen } from '@shared/tests';
import { createCustomPropsTest } from '@shared/tests/utils';
import React from 'react';
import { AnalyticalCardHeader } from '../AnalyticalCardHeader';
import { Text } from '../Text';
import { AnalyticalCard } from './index';
const Header = <AnalyticalCardHeader titleText="Header Title" />;
describe('Analytical Card', () => {
test('Render without Crashing', () => {
const { asFragment } = render(
<AnalyticalCard>
<Text>I'm a content!</Text>
</AnalyticalCard>
);
expect(screen.getByText("I'm a content!")).toBeInTheDocument();
expect(asFragment()).toMatchSnapshot();
});
test('with custom header', () => {
const { asFragment } = render(
<AnalyticalCard header={<div>I'm a header!</div>}>
<Text>I'm a content!</Text>
</AnalyticalCard>
);
expect(screen.getByText("I'm a content!")).toBeInTheDocument();
expect(screen.getByText("I'm a header!")).toBeInTheDocument();
expect(asFragment()).toMatchSnapshot();
});
test('with AnalyticalCardHeader', () => {
const { asFragment } = render(
<AnalyticalCard header={Header}>
<Text>I'm a content!</Text>
</AnalyticalCard>
);
expect(screen.getByText('Header Title')).toBeInTheDocument();
expect(screen.getByText("I'm a content!")).toBeInTheDocument();
expect(asFragment()).toMatchSnapshot();
});
createCustomPropsTest(AnalyticalCard);
});
|
def decision_threshold(overall=0.99, accuracy=0.95, samples=1000):
if samples == 0:
return 0
else:
k = binom.ppf(overall, samples, 1-accuracy)
return int(k)
|
// Destroy will destroy the container and all associated resources. Custom
// private keys or repositories will not be cleaned up.
func (g *Gerrit) Destroy() error {
defer g.cancel()
errs := errset.ErrSet{}
if g.Config.CleanupContainer && g.Container != nil {
errs = append(errs, g.Container.Terminate())
}
if g.SSH != nil {
errs = append(errs, g.SSH.Close())
}
for _, key := range g.Config.SSHKeys {
errs = append(errs, key.Remove())
}
return errs.ReturnValue()
}
|
#include<iostream> //the c++ standard library for stream input output
#include<cstdio> //the c standard library for standard input output
#include<cstdlib> //for the exit function
using namespace std;
class arr //arr class that holds each stag
{
public:
int a[100],b[100],c[100];
int topa,topb,topc;
}hanoi;
int moves=1; //counts the no. of moves
void tower(int n,int src[],int dest[],int aux[],int *ts,int *td,int *ta) //the tower function passes
{ //the arrs along with the top pointers
void show (int);
if(n==1) //if one element is there in source arr ,
{ //then it is moved to the destination arr,
dest[++(*td)]=src[(*ts)];
src[*ts]=-1;
(*ts)--;
int max;
max=((*ts)>(*td)?(*ts):(*td));
max=(max>(*ta)?max:(*ta));
cout<<" Move "<<moves++<<" ";
cout<<"\nrod A =>";
for(int i=max;i>-1;i--) //show the status of each arr
{
show(hanoi.a[i]);
}
cout<<"\nrod B =>";
for(int i=max;i>-1;i--) //show the status of each arr
{
show(hanoi.b[i]);
}
cout<<"\nrod C =>";
for(int i=max;i>-1;i--) //show the status of each arr
{
show(hanoi.c[i]);
}
cout<<"\n";
return;
}
tower(n-1,src,aux,dest,ts,ta,td); //else the
tower(1,src,dest,aux,ts,td,ta); // problem is solved by
tower(n-1,aux,dest,src,ta,td,ts); // recursive calls
}
void show(int a) //the show function shows the current status of the arrs
{
if(a==-1)
cout<<"- ";
else
cout<<a<<" ";
}
int main()
{
void tower(int ,int *,int *,int *,int *,int *,int *); //function prototype
void show(int); //function prototype
int n;
cout<<"Enter the number of disks: ";
cin>>n;
for(int j=0;j<n;j++) //feeds the elements in the arrs;
{
hanoi.a[j]=j+1;
hanoi.b[j]=-1;
hanoi.c[j]=-1;
}
hanoi.topa=n-1; //topa,topb,topc,mean the top of each arr
hanoi.topb=-1;
hanoi.topc=-1;
cout<<"Initially "<<"\n";
cout<<"rod A =>";
for(int j=n-1;j>-1;j--) //show the status of each arr
{
show(hanoi.a[j]);
}
cout<<"\nrod B =>";
for(int j=n-1;j>-1;j--) //show the status of each arr
{
show(hanoi.b[j]);
}
cout<<"\nrod C =>";
for(int j=n-1;j>-1;j--) //show the status of each arr
{
show(hanoi.c[j]);
}
cout<<"\n";
tower(n,hanoi.a,hanoi.c,hanoi.b,&(hanoi.topa),&(hanoi.topc),&(hanoi.topb));
}
|
<filename>tests/test_db_hybrid/test_where_not_equal.py
import pytest
import uvicore
import sqlalchemy as sa
from uvicore.support.dumper import dump
# DB Hybrid
@pytest.fixture(scope="module")
def Posts():
from app1.database.tables.posts import Posts
yield Posts
@pytest.fixture(scope="module")
def post(Posts):
yield Posts.table.c
@pytest.mark.asyncio
async def test_single(app1, Posts, post):
# Single NOT where
query = uvicore.db.query().table(Posts.table).where(post.creator_id, '!=', 2)
posts = await query.get()
#print(query.sql());dump(posts); dump(posts[0].keys())
assert [1, 2, 6, 7] == [x.id for x in posts]
@pytest.mark.asyncio
async def test_single_bexp(app1, Posts, post):
# Single NOT where - binary expression
posts = await uvicore.db.query().table(Posts.table).where(post.creator_id != 2).get()
assert [1, 2, 6, 7] == [x.id for x in posts]
@pytest.mark.asyncio
async def test_and(app1, Posts, post):
# Multiple where NOT AND
posts = await uvicore.db.query().table(Posts.table).where(post.creator_id, '!=', 2).where(post.owner_id, '!=', 2).get()
assert [6, 7] == [x.id for x in posts]
@pytest.mark.asyncio
async def test_and_bexp(app1, Posts, post):
# Multiple where NOT AND - binary expression
posts = await uvicore.db.query().table(Posts.table).where(post.creator_id != 2).where(post.owner_id != 2).get()
assert [6, 7] == [x.id for x in posts]
@pytest.mark.asyncio
async def test_and_list(app1, Posts, post):
# Multiple where NOT AND using a LIST
posts = await uvicore.db.query().table(Posts.table).where([
(post.creator_id, '!=', 2),
(post.owner_id, '!=', 2),
]).get()
assert [6, 7] == [x.id for x in posts]
@pytest.mark.asyncio
async def test_and_list_bexp(app1, Posts, post):
# Multiple where NOT AND using a LIST - binary expression
posts = await uvicore.db.query().table(Posts.table).where([
post.creator_id != 2,
post.owner_id != 2,
]).get()
assert [6, 7] == [x.id for x in posts]
@pytest.mark.asyncio
async def test_or(app1, Posts, post):
# Where NOT OR
posts = await uvicore.db.query().table(Posts.table).or_where([
(post.creator_id, '!=', 1),
(post.owner_id, '!=', 2)
]).get()
assert [3, 4, 5, 6, 7] == [x.id for x in posts]
@pytest.mark.asyncio
async def test_or_bexp(app1, Posts, post):
# Where NOT OR - binary expression
posts = await uvicore.db.query().table(Posts.table).or_where([
post.creator_id != 1,
post.owner_id != 2
]).get()
assert [3, 4, 5, 6, 7] == [x.id for x in posts]
@pytest.mark.asyncio
async def test_and_or(app1, Posts, post):
# Where NOT AND with where OR
posts = await uvicore.db.query().table(Posts.table).where(post.unique_slug, '!=', 'test-post5').or_where([
(post.creator_id, '!=', 1),
(post.owner_id, '!=', 2)
]).get()
assert [3, 4, 6, 7] == [x.id for x in posts]
@pytest.mark.asyncio
async def test_and_or_bexp(app1, Posts, post):
# Where NOT AND with where OR - binary expression
posts = await uvicore.db.query().table(Posts.table).where(post.unique_slug != 'test-post5').or_where([
post.creator_id != 1,
post.owner_id != 2
]).get()
assert [3, 4, 6, 7] == [x.id for x in posts]
|
def balance(self):
while self.recNum > self.shpNum:
self.null()
while self.recNum < self.shpNum:
self.record()
|
/**
* Sets the current layout style.
*/
void
ALMLayout::SetLayoutStyle(ALMLayoutStyleType style)
{
fLayoutStyle = style;
}
|
/**
* Created by leekyoungil ([email protected]) on 12/19/2019.
*
* - It should be made of the Spring Bean or Singleton.
* example)
* * Singleton : HDFSDataBroker.getInstance({HDFSConnectionInfo.class});
* * Spring Bean :
* #@Bean
* public HDFSDataBroker hdfsDataBroker(HDFSConnectionInfo hdfsConnectionInfo) {
* return new HDFSDataBroker(hdfsConnectionInfo);
* }
*/
public class HDFSDataBroker implements DataBroker {
private final static Logger HDFS_PROCESSOR_LOGGER = LoggerFactory.getLogger(HDFSDataBroker.class);
private static final class HDFSDataProcessorHolder {
private static HDFSDataBroker INSTANCE_HOLDER;
private static HDFSDataBroker getInstance(HDFSConnectionInfo hdfsConnectionInfo) {
if (INSTANCE_HOLDER == null) {
synchronized (INSTANCE_HOLDER) {
if (INSTANCE_HOLDER != null) {
return INSTANCE_HOLDER;
} else {
INSTANCE_HOLDER = new HDFSDataBroker(hdfsConnectionInfo);
}
}
}
return INSTANCE_HOLDER;
}
}
private final Configuration configuration = new Configuration();
public static HDFSDataBroker getInstance(HDFSConnectionInfo hdfsConnectionInfo) {
return HDFSDataProcessorHolder.getInstance(hdfsConnectionInfo);
}
public HDFSDataBroker(HDFSConnectionInfo hdfsConnectionInfo) {
this.init(hdfsConnectionInfo);
}
private final String uriKey = "fs.defaultFS";
private final String hdfsImplKey = "fs.hdfs.impl";
private final String fileImplKey = "fs.file.impl";
private final String securityAuthenticationKey = "hadoop.security.authentication";
private final String securityAuthorizationKey = "hadoop.security.authorization";
private final String rpcTimeout = "fs.mapr.rpc.timeout";
private final String dfsSupportAppendKey = "dfs.support.append";
private final String userNameKey = "HADOOP_USER_NAME";
private final String homeDirKey = "hadoop.home.dir";
private void init(HDFSConnectionInfo hdfsConnectionInfo) {
System.setProperty(this.userNameKey, hdfsConnectionInfo.getHDFSUser());
System.setProperty(this.homeDirKey, hdfsConnectionInfo.getHomeDir());
this.configuration.set(this.hdfsImplKey, org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
this.configuration.set(this.fileImplKey, org.apache.hadoop.fs.LocalFileSystem.class.getName());
this.configuration.set(this.uriKey, hdfsConnectionInfo.getHDFSUriAddress());
this.configuration.set(this.securityAuthenticationKey, hdfsConnectionInfo.getHDFSSecurityAuthenticationType());
this.configuration.set(this.securityAuthorizationKey, hdfsConnectionInfo.getHDFSSecurityAuthorizationValue());
this.configuration.set(this.rpcTimeout, hdfsConnectionInfo.getRpcTimeout());
this.configuration.set(this.dfsSupportAppendKey, hdfsConnectionInfo.isDfsSupportAppend());
}
@Override
public boolean addFile(final String source, final String dest, final boolean overwrite, final boolean withNewLine) {
try (FileSystem fileSystem = FileSystem.get(URI.create(this.configuration.get(this.uriKey)), this.configuration)) {
PathInfo pathInfo = this.checkPathAndGet(dest, fileSystem);
Path path = pathInfo.getPath();
FSDataOutputStream out;
if (!pathInfo.isExists()) {
out = fileSystem.create(path);
} else {
if (overwrite) {
fileSystem.delete(path, true);
out = fileSystem.create(path);
} else {
out = fileSystem.append(path);
if (withNewLine) {
out.writeBytes(System.lineSeparator());
}
}
}
File sourceFile = new File(source);
try (FileInputStream fileInputStream = new FileInputStream(sourceFile)) {
return this.writeFileSystem(fileInputStream, out);
} catch (Exception ex) {
HDFS_PROCESSOR_LOGGER.error("An error occurred checking of file input stream. ({})", ex.toString());
} finally {
out.close();
}
} catch (Exception ex) {
HDFS_PROCESSOR_LOGGER.error("An error occurred checking of file system. ({})", ex.toString());
}
return false;
}
private final int bytePerOnce = 1024;
private boolean writeFileSystem(final FileInputStream fileInputStream, FSDataOutputStream out) {
try (InputStream inputStream = new BufferedInputStream(fileInputStream)) {
byte[] byteData = new byte[this.bytePerOnce];
int numBytes = 0;
while ((numBytes = inputStream.read(byteData)) > 0) {
out.write(byteData, 0, numBytes);
}
final int wroteSize = out.size();
return wroteSize > 0;
} catch (Exception ex) {
HDFS_PROCESSOR_LOGGER.error("An error occurred writing to file system. ({})", ex.toString());
}
return false;
}
@Override
public String readFile(final String source) throws Exception {
try (FileSystem fileSystem = FileSystem.get(URI.create(this.configuration.get(this.uriKey)), this.configuration)) {
PathInfo pathInfo = this.checkPathAndGet(source, fileSystem);
if (pathInfo.isNotExists()) {
throw new Exception("File is not exists. check this("+source+") location.");
}
return this.readFileSystem(fileSystem, pathInfo.getPath());
} catch (Exception ex) {
final String errorMessage = "An error occurred reading of file system. ("+ex.toString()+")";
HDFS_PROCESSOR_LOGGER.error(errorMessage);
throw new Exception(errorMessage);
}
}
private String readFileSystem(final FileSystem fileSystem, final Path path) throws Exception {
try (FSDataInputStream in = fileSystem.open(path)) {
return this.getStringFromFSDataInputStream(in);
} catch (Exception ex) {
final String errorMessage = "An error occurred reading from file system. ("+ex.toString()+")";
HDFS_PROCESSOR_LOGGER.error(errorMessage);
throw new Exception(errorMessage);
}
}
private final String utf8CharsetString = StandardCharsets.UTF_8.toString();
private String getStringFromFSDataInputStream(FSDataInputStream in) throws Exception {
try (StringWriter stringWriter = new StringWriter()) {
IOUtils.copy(in, stringWriter, this.utf8CharsetString);
return stringWriter.toString();
} catch (Exception ex) {
final String errorMessage = "An error occurred processing of StringWriter. ("+ex.toString()+")";
HDFS_PROCESSOR_LOGGER.error(errorMessage);
throw new Exception(errorMessage);
}
}
@Override
public boolean deleteFile(final String source, boolean forceDelete) {
try (FileSystem fileSystem = FileSystem.get(URI.create(this.configuration.get(this.uriKey)), this.configuration)) {
PathInfo pathInfo = this.checkPathAndGet(source, fileSystem);
if (pathInfo.isNotExists()) {
return false;
}
return fileSystem.delete(pathInfo.getPath(), forceDelete);
} catch (Exception ex) {
HDFS_PROCESSOR_LOGGER.error("An error occurred deleting of file. ({})", ex.toString());
}
return false;
}
@Override
public boolean mkdir(final String source) {
try (FileSystem fileSystem = FileSystem.get(URI.create(this.configuration.get(this.uriKey)), this.configuration)) {
PathInfo pathInfo = this.checkPathAndGet(source, fileSystem);
if (pathInfo.isExists()) {
return false;
}
return fileSystem.mkdirs(pathInfo.getPath());
} catch (Exception ex) {
HDFS_PROCESSOR_LOGGER.error("An error occurred make dir. ({})", ex.toString());
}
return false;
}
@Override
public Configuration getConfiguration() {
return this.configuration;
}
private PathInfo checkPathAndGet(final String source, final FileSystem fileSystem) throws IOException {
final Path path = new Path(source);
final boolean fileExists = fileSystem.exists(path);
if (!fileExists) {
HDFS_PROCESSOR_LOGGER.debug("Target {} does not exists.", source);
}
return new PathInfo(path, fileExists);
}
}
|
Fundamental shifts in society are upending the current nature of work. With automation and artificial intelligence already permeating nearly every sector of the economy, disruption is happening at an accelerated pace.
Our recent presidential election made clear that workforce shifts are felt by a broad swath of the American public. People are looking to elected officials at every level of government for a new response to these changes.
We have to move the policy discussion away from job retraining towards job rethinking.
The National League of Cities newest report, The Future of Work in Cities, examines the rapid changes happening in the workforce. Here are 8 suggestions from that report on how city leaders — the most responsive level of government — can approach the rapidly shifting future of work.
Rethink education and workforce training programs.
The strength of cities comes from the people that live in them. As cities prepare for the future of work, they must address talent development by collaborating with business leaders, educational institutions, and community-based organizations to ensure education and training programs match workforce needs.
Update policies to reflect the changing composition of the workforce.
Tomorrow's workforce will be significantly more diverse. Women will continue to make up a larger portion of the workforce, and the racial and ethnic makeup of the workforce will change. The workforce is also getting older as many delay retirement, while younger people delay working. These changes shift the fundamental needs of employees, and subsequently, the way employers should respond . Flexibility will be critical.
Support entrepreneurs and startups as a core workforce development strategy.
Innovation is the lifeblood of city economic growth. Local leaders need to create a strong startup culture through low tax and regulatory barriers, and strong regional networks with access to capital that allow startups to scale. As cities continue to lower barriers of entry for small businesses and support local startups, innovation will flourish.
Build equitable business development programs.
Equity is critical to building a strong workforce. Policies that promote equity in areas such as health and education often have positive effects on economic growth. Likewise, policies that address marginalized groups reduce political conflict and strengthen public institutions and social organizations, feeding into a virtuous cycle of growth.
Invest in digital and physical infrastructure that supports the workforce of tomorrow.
Investment in reliable, high-speed internet and expanded broadband services is critical to supporting a competitive workforce. In addition to digital infrastructure, cities must also invest in roads, bridges and transit systems.
In cities, people like to walk, bike, and take public transit, while single occupancy vehicle use continues to decline. This preference, combined with a move toward autonomous vehicles, means that cities will need to rethink investment priorities while considering new uses for car-oriented infrastructure like parking garages.
Ensure access to paid leave for families.
The United States is one of few developed countries that doesn't offer some type of guaranteed paid leave for new parents. Yet, companies that offer these policies retain more employees and avoid lengthy talent searches. Cities are leading in this space. The San Francisco Board of Supervisors, for example, mandates six weeks of paid parental leave for workers. This long overdue policy benefits everyone, giving parents the opportunity to maintain their careers, helping organizations retain employees, and bringing stability to the city's workforce and economy.
Consider offering portable benefit systems.
As workers change jobs more frequently and contract work becomes more common, the policy environment around benefits needs to shift. Benefits that once accompanied most employment situations are becoming more elusive, and portable benefits, which are tied to individuals rather than employers, represent one potential solution.
These typically wrap together some form of paid leave, health insurance, worker's compensation/unemployment, and retirement fund matching. Proposals for this type of system vary.
Some suggest that it should be universal and administered by government or a public/private institution created for such a purpose. Others think it should be administered by non-governmental community-based groups. Either way, portable benefits have the potential to support those who work outside the realm of the traditional 9-5 economy.
Explore basic income and other broad-based social support systems.
Basic income, which guarantees every citizen a regular, unconditional sum of money, is gaining support in policy conversations. This is intended to serve the same function as a living wage by bringing all individuals up to an economic baseline. In some ways, this proposal resembles existing welfare systems, with the major exception that the benefit goes to everyone, regardless of age, ability, class status, or participation in the workforce.
Advocates from the tech world tout it as a way to counteract the economic blow of automation replacing jobs currently occupied by humans. Other supporters argue that basic income is more streamlined, efficient, and transparent than current social welfare systems. Finally, there are others who argue that a basic income might allow individuals to pursue more creative, enjoyable interests. A full-scale of examination of the cultural and financial implications of basic income will be key to implementing such a system.
We know that automation and artificial intelligence will have a great impact on the future of work, play, and life. However, we shouldn't jump to the assumption that this will be a net negative.
|
import { Row, Col } from "antd";
import styled from "styled-components";
import { RowProps } from "antd/es/grid";
import React from "react";
const PaddingCol = styled(Col as any)`
padding: 4px !important;
//&:first-child {
// padding-left: 0 !important;
//}
//
//&:last-child {
//padding-right: 0 !important;
//}
`;
export default PaddingCol;
|
/**
* Runs action with unparseable OIDC token.
*/
@SuppressWarnings("unchecked")
@Test
public void testUnparseable() throws Exception {
final AccessToken accessToken = new BearerAccessToken();
final RefreshToken refreshToken = new RefreshToken();
final JWT jwt = Mockito.mock(JWT.class);
Mockito.when(jwt.getJWTClaimsSet()).thenThrow(java.text.ParseException.class);
final OIDCTokens oidcTokens = new OIDCTokens(jwt, accessToken, refreshToken);
final OIDCTokenResponse oidcTokenResponse = new OIDCTokenResponse(oidcTokens);
final AbstractProfileAction<?, ?> action = getAction();
action.initialize();
final SocialUserOpenIdConnectContext suCtx = new SocialUserOpenIdConnectContext();
suCtx.setOidcTokenResponse(oidcTokenResponse);
if (nullifyIdToken){
suCtx.setIDToken(null);
}
suCtx.setoIDCProviderMetadata(buildOidcMetadata(DEFAULT_ISSUER));
prc.getSubcontext(AuthenticationContext.class, false).addSubcontext(suCtx);
final Event event = action.execute(src);
ActionTestingSupport.assertEvent(event, AuthnEventIds.NO_CREDENTIALS);
}
|
/* Licensed under GPL-3.0 */
package util;
import frontEnd.Interface.outputRouting.ExceptionHandler;
import frontEnd.Interface.outputRouting.ExceptionId;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathFactory;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.w3c.dom.Document;
import org.w3c.dom.NodeList;
/**
* MvnPomFileParser class.
*
* @author CryptoguardTeam
* @version 03.07.01
* @since V01.00.00
*/
public class MvnPomFileParser implements BuildFileParser {
private static final Logger log =
org.apache.logging.log4j.LogManager.getLogger(MvnPomFileParser.class);
Map<String, String> moduleVsPath = new HashMap<>();
String projectName;
String projectVersion;
/**
* Constructor for MvnPomFileParser.
*
* @param fileName a {@link java.lang.String} object.
* @throws frontEnd.Interface.outputRouting.ExceptionHandler if any.
*/
public MvnPomFileParser(String fileName) throws ExceptionHandler {
try {
File xmlFile = new File(fileName);
DocumentBuilderFactory docbuildFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder docBuilder = docbuildFactory.newDocumentBuilder();
Document document = docBuilder.parse(xmlFile);
NodeList nodeList = document.getElementsByTagName("module");
String[] splits = fileName.split("/");
String projectName = splits[splits.length - 2];
String projectRoot = fileName.substring(0, fileName.lastIndexOf('/'));
if (nodeList.getLength() == 0) {
moduleVsPath.put(projectName, projectRoot);
} else {
for (int i = 0; i < nodeList.getLength(); i++) {
String moduleName = nodeList.item(i).getTextContent();
moduleVsPath.put(moduleName, projectRoot + "/" + moduleName);
}
}
String groupId = document.getElementsByTagName("groupId").item(0).getNodeValue();
String artifactId = document.getElementsByTagName("artifactId").item(0).getNodeValue();
projectName = StringUtils.trimToNull(groupId) + ":" + StringUtils.trimToNull(artifactId);
projectVersion =
StringUtils.trimToNull(document.getElementsByTagName("version").item(0).getNodeValue());
} catch (ParserConfigurationException e) {
log.fatal("Error creating file parser");
throw new ExceptionHandler("Error creating file parser", ExceptionId.FILE_CON);
} catch (org.xml.sax.SAXException | java.io.IOException e) {
log.fatal("Error parsing " + fileName);
throw new ExceptionHandler("Error parsing " + fileName, ExceptionId.FILE_O);
}
}
/**
* isGradle.
*
* @return a {@link java.lang.Boolean} object.
*/
public Boolean isGradle() {
return false;
}
/** {@inheritDoc} */
@Override
public Map<String, List<String>> getDependencyList() throws ExceptionHandler {
String currentModule = "";
try {
Map<String, List<String>> moduleVsDependencies = new HashMap<>();
for (String module : moduleVsPath.keySet()) {
currentModule = module;
File xmlFile = new File(moduleVsPath.get(module) + "/pom.xml");
DocumentBuilderFactory docbuildFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder docBuilder = docbuildFactory.newDocumentBuilder();
Document document = docBuilder.parse(xmlFile);
XPath xPath = XPathFactory.newInstance().newXPath();
NodeList nodeList =
(NodeList)
xPath
.compile("/project/dependencies/dependency/artifactId")
.evaluate(document, XPathConstants.NODESET);
List<String> dependencies = new ArrayList<>();
for (int i = 0; i < nodeList.getLength(); i++) {
String dependency = nodeList.item(i).getTextContent();
if (moduleVsPath.containsKey(dependency)) {
dependencies.add(dependency);
}
}
moduleVsDependencies.put(module, dependencies);
}
Map<String, List<String>> moduleVsDependencyPaths = new HashMap<>();
for (String module : moduleVsDependencies.keySet()) {
List<String> dependencyPaths = new ArrayList<>();
calcAlldependenciesForModule(module, moduleVsDependencies, dependencyPaths);
dependencyPaths.add(moduleVsPath.get(module) + "/src/main/java");
moduleVsDependencyPaths.put(module, dependencyPaths);
}
return moduleVsDependencyPaths;
} catch (ParserConfigurationException e) {
throw new ExceptionHandler("Error creating file parser", ExceptionId.FILE_CON);
} catch (javax.xml.xpath.XPathExpressionException e) {
throw new ExceptionHandler(
"Error parsing artifacts from" + currentModule + "/pom.xml", ExceptionId.FILE_READ);
} catch (org.xml.sax.SAXException | java.io.IOException e) {
throw new ExceptionHandler(
"Error parsing " + currentModule + "/pom.xml", ExceptionId.FILE_READ);
}
}
private void calcAlldependenciesForModule(
String module, Map<String, List<String>> mVsds, List<String> dependencyPaths) {
for (String dependency : mVsds.get(module)) {
dependencyPaths.add(moduleVsPath.get(dependency) + "/src/main/java");
calcAlldependenciesForModule(dependency, mVsds, dependencyPaths);
}
}
/**
* Getter for the field <code>projectName</code>.
*
* @return a {@link java.lang.String} object.
*/
public String getProjectName() {
return this.projectName;
}
/**
* Getter for the field <code>projectVersion</code>.
*
* @return a {@link java.lang.String} object.
*/
public String getProjectVersion() {
return this.projectVersion;
}
/**
* Setter for the field <code>projectName</code>.
*
* @param projectName a {@link java.lang.String} object.
*/
public void setProjectName(String projectName) {
this.projectName = projectName;
}
/**
* Setter for the field <code>projectVersion</code>.
*
* @param projectVersion a {@link java.lang.String} object.
*/
public void setProjectVersion(String projectVersion) {
this.projectVersion = projectVersion;
}
}
|
/*!
Kills all the threads that have been created with multithreadedTrain.
This function is automatically called in the destructor, and is equivalent
to calling multithreadedTrain(1).
\sa multithreadedTrain()
*/
void Perceptron::killThreads()
{
#ifdef __unix__
if (!threads)
return;
pthread_mutex_lock(&cond_mutex);
t_exit = true;
pthread_cond_broadcast(&cond);
pthread_mutex_unlock(&cond_mutex);
for (int i = t_count - 1; i >= 0; --i)
pthread_join(threads[i], NULL);
pthread_mutex_destroy(&cond_mutex);
pthread_cond_destroy(&cond);
pthread_cond_destroy(&nextc);
pthread_mutex_destroy(&grad_protect);
pthread_cond_destroy(&wait_end);
delete[] threads;
threads = NULL;
#endif
}
|
def PrintFragmens(self):
self.ps = MyReportService()
for FragID, Fragment in self.cfg.Fragments.items():
FileName = Fragment[CFG_FRG_GEF]
self.ps.PrintSec(CFG_SEC_FRG+str(FragID+1)+" "+CFG_FRG_NAM+" = "+Fragment[CFG_FRG_NAM]+" "+CFG_FRG_ID+" = "+str(Fragment[CFG_FRG_ID]))
print(CFG_FRG_GEF+" = "+FileName)
if ( self.cfg.Methods[CFG_MET_APX] != CFG_MET_ARX_LFT):
for AtomID, Atom in Fragment[CFG_EXS_ATOMS].items():
Atom.MyPrint(AtomID+1)
self.ps.PrintDiv()
print("Coordinates of origin:")
print(self.cfg.Fragments[FragID][CFG_EXS_CENTER].MyPrint())
self.ps.PrintDiv()
print("Excited states read from the input file:")
INTX_LEN = 5
if ( self.cfg.Methods[CFG_MET_APX] == CFG_MET_ARX_LFT):
print("{}\t{}\t{}\t{}\t{}".format("State".ljust(INTX_LEN),
"Abs.Max. nm".ljust(STR_LEN_FLOAT),
"Ems.Max nm".ljust(STR_LEN_FLOAT),
"Eps M-1cm-1".ljust(STR_LEN_FLOAT),
"Q".ljust(STR_LEN_FLOAT)))
else:
print("{} {} ({} {} {}) {} {} {}".format("State".ljust(INTX_LEN),
"Abs. Max, nm".ljust(STR_LEN_FLOAT),
"x ea0".ljust(STR_LEN_FLOAT),
"y ea0".ljust(STR_LEN_FLOAT),
"z ea0".ljust(STR_LEN_FLOAT),
"Norm ea0".ljust(STR_LEN_FLOAT),
"Debye".ljust(STR_LEN_FLOAT),
"Ems. Max, nm".ljust(STR_LEN_FLOAT)))
for ExcitedStateID, ExcitedState in Fragment[CFG_EXS_EXSTATE].items():
ExcitedState.MyPrint(self.cfg.Methods[CFG_MET_APX])
self.ps.PrintDiv()
if (self.cfg.Methods[CFG_MET_QD] != CFG_MET_QD_NONE ):
print("Quantum Dynamics Parameters of Fragments:")
print("El. Ex. State, Vib. Mode, Vib. cm-1, El.-Vib. Coupl, cm-1, Vib. Decay ps-1")
QDVibs = self.cfg.Fragments[FragID][CFG_EXS_QDVID]
for QDVibID, QDVib in QDVibs.items():
QDVib.MyPrint()
self.ps.PrintDiv()
return
|
/**
* @author <a href="mailto:[email protected]">Marek Posolda</a>
*/
public class JpaUserSessionPersisterProvider implements UserSessionPersisterProvider {
private static final Logger LOG = LoggerFactory.getLogger(JpaUserSessionPersisterProvider.class);
@Autowired
private PersistentUserSessionRepository persistentUserSessionRepository;
@Autowired
private PersistentClientSessionRepository persistentClientSessionRepository;
@Autowired
private RealmProvider realmProvider;
@Override
public void createUserSession(UserSessionModel userSession, boolean offline) {
PersistentUserSessionAdapter adapter = new PersistentUserSessionAdapter(userSession);
PersistentUserSessionModel model = adapter.getUpdatedModel();
PersistentUserSession entity = new PersistentUserSession();
entity.setUserSessionId(model.getUserSessionId());
entity.setCreatedOn(model.getStarted());
entity.setRealmId(adapter.getRealm().getId());
entity.setUserId(adapter.getUser().getId());
String offlineStr = offlineToString(offline);
entity.setOffline(offlineStr);
entity.setLastSessionRefresh(model.getLastSessionRefresh());
entity.setData(model.getData());
persistentUserSessionRepository.save(entity);
}
@Override
public void createClientSession(AuthenticatedClientSessionModel clientSession, boolean offline) {
PersistentAuthenticatedClientSessionAdapter adapter = new PersistentAuthenticatedClientSessionAdapter(clientSession);
PersistentClientSessionModel model = adapter.getUpdatedModel();
PersistentClientSession entity = new PersistentClientSession();
StorageId clientStorageId = new StorageId(clientSession.getClient().getId());
if (clientStorageId.isLocal()) {
entity.setClientId(clientStorageId.getId());
entity.setClientStorageProvider(PersistentClientSession.LOCAL);
entity.setExternalClientId(PersistentClientSession.LOCAL);
} else {
entity.setClientId(PersistentClientSession.EXTERNAL);
entity.setClientStorageProvider(clientStorageId.getProviderId());
entity.setExternalClientId(clientStorageId.getExternalId());
}
entity.setTimestamp(clientSession.getTimestamp());
String offlineStr = offlineToString(offline);
entity.setOffline(offlineStr);
entity.setUserSessionId(clientSession.getUserSession().getId());
entity.setData(model.getData());
persistentClientSessionRepository.save(entity);
}
@Override
public void removeUserSession(String userSessionId, boolean offline) {
String offlineStr = offlineToString(offline);
persistentClientSessionRepository.deleteClientSessionsByUserSession(userSessionId, offlineStr);
persistentUserSessionRepository.deleteByUserSessionIdAndOffline(userSessionId, offlineStr);
}
@Override
public void removeClientSession(String userSessionId, String clientUUID, boolean offline) {
String offlineStr = offlineToString(offline);
StorageId clientStorageId = new StorageId(clientUUID);
String clientId = PersistentClientSession.EXTERNAL;
String clientStorageProvider = PersistentClientSession.LOCAL;
String externalId = PersistentClientSession.LOCAL;
if (clientStorageId.isLocal()) {
clientId = clientUUID;
} else {
clientStorageProvider = clientStorageId.getProviderId();
externalId = clientStorageId.getExternalId();
}
PersistentClientSession sessionEntity = persistentClientSessionRepository.findByKey(userSessionId, clientId, clientStorageProvider, externalId, offlineStr);
if (sessionEntity != null) {
persistentClientSessionRepository.delete(sessionEntity);
// Remove userSession if it was last clientSession
List<PersistentClientSession> clientSessions = getClientSessionsByUserSession(sessionEntity.getUserSessionId(), offline);
if (clientSessions.size() == 0) {
offlineStr = offlineToString(offline);
PersistentUserSession userSessionEntity = persistentUserSessionRepository.findByKey(sessionEntity.getUserSessionId(), offlineStr);
if (userSessionEntity != null) {
persistentUserSessionRepository.delete(userSessionEntity);
}
}
}
}
private List<PersistentClientSession> getClientSessionsByUserSession(String userSessionId, boolean offline) {
String offlineStr = offlineToString(offline);
return persistentClientSessionRepository.findClientSessionsByUserSession(userSessionId, offlineStr);
}
@Override
public void onRealmRemoved(RealmModel realm) {
persistentClientSessionRepository.deleteClientSessionsByRealm(realm.getId());
persistentUserSessionRepository.deleteUserSessionsByRealm(realm.getId());
}
@Override
public void onClientRemoved(RealmModel realm, ClientModel client) {
onClientRemoved(client.getId());
}
private void onClientRemoved(String clientUUID) {
StorageId clientStorageId = new StorageId(clientUUID);
if (clientStorageId.isLocal()) {
persistentClientSessionRepository.deleteClientSessionsByClient(clientUUID);
} else {
persistentClientSessionRepository.deleteClientSessionsByExternalClient(clientStorageId.getProviderId(), clientStorageId.getExternalId());
}
}
@Override
public void onUserRemoved(RealmModel realm, UserModel user) {
onUserRemoved(user.getId());
}
private void onUserRemoved(String userId) {
persistentClientSessionRepository.deleteClientSessionsByUser(userId);
persistentUserSessionRepository.deleteUserSessionsByUser(userId);
}
@Override
public void updateLastSessionRefreshes(RealmModel realm, int lastSessionRefresh, Collection<String> userSessionIds, boolean offline) {
String offlineStr = offlineToString(offline);
int us = persistentUserSessionRepository.updateUserSessionLastSessionRefresh(realm.getId(), lastSessionRefresh, offlineStr, userSessionIds);
LOG.debug("Updated lastSessionRefresh of {} user sessions in realm '{}'", us, realm.getName());
}
@Override
public void removeExpired(RealmModel realm) {
int expiredOffline = Time.currentTime() - realm.getOfflineSessionIdleTimeout() - SessionTimeoutHelper.PERIODIC_CLEANER_IDLE_TIMEOUT_WINDOW_SECONDS;
String offlineStr = offlineToString(true);
LOG.trace("Trigger removing expired user sessions for realm '{}'", realm.getName());
int us = persistentClientSessionRepository.deleteExpiredClientSessions(realm.getId(), expiredOffline, offlineStr);
int cs = persistentUserSessionRepository.deleteExpiredUserSessions(realm.getId(), expiredOffline, offlineStr);
LOG.debug("Removed {} expired user sessions and {} expired client sessions in realm '{}'", us, cs, realm.getName());
}
@Override
public List<? extends UserSessionModel> loadUserSessions(int firstResult, int maxResults, boolean offline, int lastCreatedOn, String lastUserSessionId) {
String offlineStr = offlineToString(offline);
List<PersistentUserSessionAdapter> result = persistentUserSessionRepository.findUserSessions(offlineStr, lastCreatedOn, lastUserSessionId)
.map(this::toAdapter)
.collect(Collectors.toList());
Map<String, PersistentUserSessionAdapter> sessionsById = result.stream()
.collect(Collectors.toMap(UserSessionModel::getId, Function.identity()));
Set<String> userSessionIds = sessionsById.keySet();
Set<String> removedClientUUIDs = new HashSet<>();
if (!userSessionIds.isEmpty()) {
List<PersistentClientSession> clientSessions = persistentClientSessionRepository.findClientSessionsByUserSessions(userSessionIds, offlineStr);
for (PersistentClientSession clientSession : clientSessions) {
PersistentUserSessionAdapter userSession = sessionsById.get(clientSession.getUserSessionId());
PersistentAuthenticatedClientSessionAdapter clientSessAdapter = toAdapter(userSession.getRealm(), userSession, clientSession);
Map<String, AuthenticatedClientSessionModel> currentClientSessions = userSession.getAuthenticatedClientSessions();
// Case when client was removed in the meantime
if (clientSessAdapter.getClient() == null) {
removedClientUUIDs.add(clientSession.getClientId());
} else {
currentClientSessions.put(clientSession.getClientId(), clientSessAdapter);
}
}
}
for (String clientUUID : removedClientUUIDs) {
onClientRemoved(clientUUID);
}
return result;
}
private PersistentUserSessionAdapter toAdapter(PersistentUserSession entity) {
RealmModel realm = realmProvider.getRealm(entity.getRealmId());
return toAdapter(realm, entity);
}
private PersistentUserSessionAdapter toAdapter(RealmModel realm, PersistentUserSession entity) {
PersistentUserSessionModel model = new PersistentUserSessionModel();
model.setUserSessionId(entity.getUserSessionId());
model.setStarted(entity.getCreatedOn());
model.setLastSessionRefresh(entity.getLastSessionRefresh());
model.setData(entity.getData());
model.setOffline(offlineFromString(entity.getOffline()));
Map<String, AuthenticatedClientSessionModel> clientSessions = new HashMap<>();
return new PersistentUserSessionAdapter(model, realm, entity.getUserId(), clientSessions);
}
private PersistentAuthenticatedClientSessionAdapter toAdapter(RealmModel realm, PersistentUserSessionAdapter userSession, PersistentClientSession entity) {
String clientId = entity.getClientId();
if (!entity.getExternalClientId().equals("local")) {
clientId = new StorageId(entity.getClientId(), entity.getExternalClientId()).getId();
}
ClientModel client = realm.getClientById(clientId);
PersistentClientSessionModel model = new PersistentClientSessionModel();
model.setClientId(clientId);
model.setUserSessionId(userSession.getId());
model.setUserId(userSession.getUserId());
model.setTimestamp(entity.getTimestamp());
model.setData(entity.getData());
return new PersistentAuthenticatedClientSessionAdapter(model, realm, client, userSession);
}
@Override
public int getUserSessionsCount(boolean offline) {
String offlineStr = offlineToString(offline);
return persistentUserSessionRepository.findUserSessionsCount(offlineStr);
}
@Override
public void close() {
}
private String offlineToString(boolean offline) {
return offline ? "1" : "0";
}
private boolean offlineFromString(String offlineStr) {
return "1".equals(offlineStr);
}
}
|
def post_import(self, pyramid):
transform = pyramid.dataset.GetCoordinateTransformation(
dst_ref=SpatialReference.FromEPSG(4326)
)
lower_left, upper_right = pyramid.dataset.GetTiledExtents(
transform=transform
)
self.mbtiles.metadata['bounds'] = (lower_left.x, lower_left.y,
upper_right.x, upper_right.y)
|
def swap_pass_manager_creator(
backend,
swap_strategy: Optional[SwapStrategy] = None,
swap_strategy_qubits: Optional[List[int]] = None,
use_initial_mapping: bool = False,
) -> PassManager:
if swap_strategy is not None and swap_strategy_qubits is None:
warn("swap_strategy will be ignored since swap_strategy_qubits is None.")
basis_gates = backend.configuration().basis_gates
swap_pm = PassManager()
swap_pm.append(
SwapStrategyCreator(
backend,
swap_strategy=swap_strategy,
swap_strategy_qubits=swap_strategy_qubits,
)
)
if use_initial_mapping:
swap_pm.append(InitialQubitMapper())
coupling_map = CouplingMap(backend.configuration().coupling_map)
swap_pm.append(
[
QAOASwapPass(),
FullAncillaAllocation(coupling_map),
EnlargeWithAncilla(),
ApplyLayout(),
UnrollCustomDefinitions(std_eqlib, basis_gates),
BasisTranslator(std_eqlib, basis_gates),
Optimize1qGatesDecomposition(basis_gates),
]
)
return swap_pm
|
<reponame>boomaking02/deploy_mindstay
import React from 'react';
import { Box, CardMedia, Typography } from '@mui/material/';
import { makeStyles } from '@mui/styles';
import Link from 'next/link';
import Carousel from 'react-multi-carousel';
import 'react-multi-carousel/lib/styles.css';
import int1 from '@src/static/img/int1.png';
import int2 from '@src/static/img/int2.png';
import int3 from '@src/static/img/int3.png';
const responsive = {
desktop: {
breakpoint: { max: 3000, min: 1024 },
items: 3,
},
tablet: {
breakpoint: { max: 1024, min: 464 },
items: 2,
partialVisibilityGutter: 40,
},
mobile: {
breakpoint: { max: 464, min: 0 },
items: 1,
partialVisibilityGutter: 40,
},
};
const useStyles = makeStyles(() => ({
imgCover: {
objectFit: 'cover',
borderRadius: 5,
pointerEvents: 'none',
cursor: 'pointer',
},
textTitle: { fontSize: '1.1 rem', fontWeight: 500, marginTop: 3, color: '#222222' },
}));
const InterestCarousel: React.FC = () => {
const classStyle = useStyles();
return (
<Carousel responsive={responsive} draggable arrows={false} partialVisible>
<Box px={1} height="100%">
<Link href="/interest">
<CardMedia component="img" image={int3.src} className={classStyle.imgCover} />
</Link>
<Typography className={classStyle.textTitle}>กิจกรรมในกรุงเทพ</Typography>
</Box>
<Box px={1} height="100%">
<Link href="/interest">
<CardMedia component="img" image={int1.src} className={classStyle.imgCover} />
</Link>
<Typography className={classStyle.textTitle}>แอดเวนเจอร์</Typography>
</Box>
<Box px={1} height="100%">
<Link href="/interest">
<CardMedia component="img" image={int2.src} className={classStyle.imgCover} />
</Link>
<Typography className={classStyle.textTitle}>ใกล้ชิดสัตว์</Typography>
</Box>
</Carousel>
);
};
export default InterestCarousel;
|
import * as d3 from 'd3';
import { DiagramLayout } from '@core/diagram/diagram-layout';
import { DiagramLinkableContext } from '@core/diagram-element-linkable';
import { forceNodes, forceLinks } from './force-data';
import { d3Element } from '@core/svg/d3-def-types';
import { ArchConfig } from '@core/diagram-impls/element/diagram-element.config';
import { AnalysisElementType } from '@core/models/analysis-element';
import { forceInBox } from '@core/diagram-impls/libs/force-in-box';
import { rectCollide } from '@core/diagram-impls/libs/force-rect-collide';
import { boundedBox } from '@core/diagram-impls/libs/force-bounded-box';
interface NodeType {
name: string;
elementType?: AnalysisElementType;
textWidth?: number;
textHeight?: number;
rectWidth?: number;
rectHeight?: number;
fullWidth?: number;
fullHeight?: number;
}
const nodePadding = { width: 14, height: 6 };
// TODO, no used, no finished
export class StarForceLayout extends DiagramLayout {
private linksGroup: d3Element;
private nodesGroup: d3Element;
constructor() {
super();
}
drawLayout(elementContext: DiagramLinkableContext) {
this.updateStatusNotReady();
console.log('star-force', elementContext);
this.initLayout();
this.drawForceLayout(forceNodes, forceLinks);
}
private initLayout() {
const rootGroup = this.rootGroup;
// const diagramElements: DiagramElement[] = elementContext.elements;
this.linksGroup = rootGroup.append('g').attr('class', 'links');
this.nodesGroup = rootGroup.append('g').attr('class', 'nodes');
// this.nodesLinks = convertToForceData(diagramElements);
}
private drawForceLayout(nodes: any, links: any) {
const time1 = new Date();
const size = this.board.getBoardSize();
const { width, height } = size;
const center = { x: width / 3, y: height / 2 };
nodes[0].fx = center.x;
nodes[0].fy = center.y;
const linkElement = createLinkSvgElement(this.linksGroup, links);
const nodeElement = createNodeSvgElement(this.nodesGroup, nodes);
nodeElement
.call(
d3.drag()
.on('start', dragstarted)
.on('drag', dragged)
.on('end', dragended)
);
const rectSize = (node: NodeType) => ([node.fullWidth, node.fullHeight]);
const linkForce = this.createForceLinkFn(links);
const collision = this.createCollisionFn();
const rectCollision = rectCollide()
.size(function (d) { return [ d.fullWidth + 20, d.fullHeight + 20 ]; });
const boxForce = boundedBox()
.bounds([[0, 0], [width, height]])
.size(rectSize);
// Instantiate the forceInABox force
const groupingForce = forceInBox(1)
.strength(0.1) // Strength to foci
.template('treemap') // Either treemap or force
.groupBy('group') // Node attribute to group
.links(links) // The graph links. Must be called after setting the grouping attribute
.enableGrouping(true)
.nodeSize(5) // How big are the nodes to compute the force template
// .forceCharge(-200) // Separation between nodes on the force template
.size([width, height]); // Size of the chart
const simulation = d3.forceSimulation()
.nodes(nodes)
.force('charge', d3.forceManyBody().strength(-120).distanceMax(200).distanceMin(120))
// .force('charge', d3.forceManyBody())
.force('center', d3.forceCenter(center.x, center.y))
// .force('collision', collision)
.force('collision', rectCollision)
// .force('box', boxForce)
// .force('group', groupingForce)
.force('link', linkForce)
// .alphaDecay(0)
.on('tick', tick);
simulation
.on('end', () => {
this.updateStatusReady();
});
function tick() {
const alpha = this.alpha();
const offsetX = 40 * alpha;
const offsetY = 20 * alpha;
nodes.forEach(function(tickedNode, nodeIndex) {
const type = tickedNode.elementType;
if (type !== AnalysisElementType.Module) {
tickedNode.x += type === AnalysisElementType.Service ? offsetX : -offsetX;
//
} else {
tickedNode.y += tickedNode.y > center.y ? offsetY : -offsetY;
}
});
linkElement
.attr('x1', (d) => d.source.x)
.attr('y1', (d) => d.source.y)
.attr('x2', (d) => d.target.x)
.attr('y2', (d) => d.target.y);
nodeElement.attr('transform', (d) => `translate(${d.x}, ${d.y})`);
}
function dragstarted(d) {
if (!d3.event.active) {
simulation.alphaTarget(0.3).restart();
}
d.fx = d.x;
d.fy = d.y;
}
function dragged(d) {
d.fx = d3.event.x;
d.fy = d3.event.y;
}
function dragended(d) {
if (!d3.event.active) {
simulation.alphaTarget(0);
}
d.fx = null;
d.fy = null;
}
}
private createCollisionFn(): d3.ForceCollide<any> {
const collision = d3.forceCollide().radius(function(d: any) {
return d.fullWidth / 3 * 2;
});
return collision;
}
private createForceLinkFn(links: { source: any, target: any }[]): d3.ForceLink<any, any> {
const forceLink = d3.forceLink<any, any>()
.links(links)
.id((d: any) => d.name )
;
forceLink
.distance(function(link) {
const { source, target } = link;
if (source.elementType === target.elementType && source.elementType === AnalysisElementType.Module) {
return 150;
}
return 150;
});
return forceLink;
}
}
function createLinkSvgElement(linksGroup: d3Element, links: { source: any, target: any }[]): d3Element {
const link = linksGroup
.selectAll('line')
.data(links)
.enter()
.append('line')
.style('stroke', '#999')
.attr('stroke-width', function(d) { return 1; });
return link;
}
function createNodeSvgElement(nodesGroup: d3Element, nodes: NodeType[]): d3Element {
const node = nodesGroup.selectAll('g')
.data(nodes)
.enter()
.append('g');
node.append('text')
.text((d) => d.name)
.attr('font-size', 11)
.style('color', 'black')
.each(function(d) {
// const $this = this as HTMLElement;
const $this = this as any;
const size = $this.getBoundingClientRect();
const { width, height } = size;
d.textWidth = width;
d.textHeight = height;
d.rectWidth = d.textWidth + nodePadding.width * 2;
d.rectHeight = d.textHeight + nodePadding.height * 2;
d.fullWidth = d.rectWidth + 2;
d.fullHeight = d.rectHeight + 2;
})
.attr('dx', (d) => -d.textWidth / 2)
.attr('dy', (d) => d.textHeight / 2 - 4)
;
node.append('rect')
.attr('x', (d) => -d.rectWidth / 2)
.attr('y', (d) => -d.rectHeight / 2)
.attr('width', (d) => d.rectWidth)
.attr('height', (d) => d.rectHeight)
.attr('fill', function(d) {
const [elementColor] = ArchConfig.ElementColors[d.elementType];
return elementColor;
})
.attr('stroke', '#595959')
.lower()
;
return node;
}
|
// GG transform for "Round 2"
pub fn gg(a: u32, b: u32, c: u32, d: u32, x: u32, s: u32) -> u32 {
let t =
((a as u64 + Self::g(b, c, d) as u64 + x as u64 + ROOT_2 as u64) & 0xffff_ffff) as u32;
t.rotate_left(s)
}
|
<gh_stars>100-1000
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/viz/client/frame_eviction_manager.h"
#include <algorithm>
#include <vector>
#include "testing/gtest/include/gtest/gtest.h"
namespace viz {
namespace {
class TestFrameEvictionManagerClient : public FrameEvictionManagerClient {
public:
TestFrameEvictionManagerClient() = default;
~TestFrameEvictionManagerClient() override = default;
// FrameEvictionManagerClient:
void EvictCurrentFrame() override {
FrameEvictionManager::GetInstance()->RemoveFrame(this);
has_frame_ = false;
}
bool has_frame() const { return has_frame_; }
private:
bool has_frame_ = true;
DISALLOW_COPY_AND_ASSIGN(TestFrameEvictionManagerClient);
};
} // namespace
using FrameEvictionManagerTest = testing::Test;
TEST_F(FrameEvictionManagerTest, ScopedPause) {
constexpr int kMaxSavedFrames = 1;
constexpr int kFrames = 2;
FrameEvictionManager* manager = FrameEvictionManager::GetInstance();
manager->set_max_number_of_saved_frames(kMaxSavedFrames);
std::vector<TestFrameEvictionManagerClient> frames(kFrames);
{
FrameEvictionManager::ScopedPause scoped_pause;
for (auto& frame : frames)
manager->AddFrame(&frame, /*locked=*/false);
// All frames stays because |scoped_pause| holds off frame eviction.
EXPECT_EQ(kFrames,
std::count_if(frames.begin(), frames.end(),
[](const TestFrameEvictionManagerClient& frame) {
return frame.has_frame();
}));
}
// Frame eviction happens when |scoped_pause| goes out of scope.
EXPECT_EQ(kMaxSavedFrames,
std::count_if(frames.begin(), frames.end(),
[](const TestFrameEvictionManagerClient& frame) {
return frame.has_frame();
}));
}
} // namespace viz
|
/**
* Adapter for {@link MainMenuAdapter}
* @author Talal Abou Haiba
*/
class MainMenuAdapter extends BaseAdapter {
private final List<RoomObject> mItems = RoomManagerFactory.getInstance().generateListOfRoomObjects();
private final LayoutInflater mInflater;
public MainMenuAdapter(Context context) {
mInflater = LayoutInflater.from(context);
}
@Override
public int getCount() {
return mItems.size();
}
@Override
public RoomObject getItem(int i) {
return mItems.get(i);
}
@Override
public long getItemId(int i) {
return mItems.get(i).getImageId();
}
@Override
public View getView(int i, View view, ViewGroup viewGroup) {
View v = view;
ImageView picture;
TextView name;
if (v == null) {
v = mInflater.inflate(R.layout.grid_item, viewGroup, false);
v.setTag(R.id.picture, v.findViewById(R.id.picture));
v.setTag(R.id.text, v.findViewById(R.id.text));
}
picture = (ImageView) v.getTag(R.id.picture);
name = (TextView) v.getTag(R.id.text);
RoomObject item = getItem(i);
picture.setImageResource(item.getImageId());
name.setText(item.toString());
return v;
}
}
|
def FROM_STRING(value):
if value.upper() == "RADIANS":
return Angle.RADIANS
elif value.upper() == "DEGREES":
return Angle.DEGREES
return None
|
// default config
const path = require('path');
const DEFAULT_PROJECT_FILE = 'myst.json';
const DEFAULT_OUT_EXT = '.html';
export interface BuildOptions{
// cwd
cwd?: string;
// filename of myst.json
project?: string;
// ======== mode of work
// enable building
build?: boolean;
// enable watching
watch?: boolean;
// enable server
server?: boolean;
// port
port?: number;
// ========== overwriting ProjectSettings ==========
// force all files to be re-rendered
force?: boolean;
// root directory of page files
rootDir?: string;
// output directory
outDir?: string;
// extension of html files
outExt?: string;
// build target file
target?: Array<string>;
}
// myst.jsonの中身
export interface ProjectSettings{
rootDir: string;
outDir: string;
outExt: string;
// force rerendering
force: boolean;
// dataディレクトリがある場所
data?: string;
// cacheファイルの場所
cache?: string;
// 依存ファイルの一覧
dependency?: string | Array<string>;
// render対象ファイル
target?: Array<string>;
// Contextに追加するやつ
extension?: string | Array<string>;
// options for server
server?: {
port?: number;
contentRoot?: string;
};
}
export interface FoundProject{
projdir: string;
options: BuildOptions;
settings: ProjectSettings;
}
// BuildOptionsにデフォルト設定を書き込む
export function defaultBuildOptions(options: BuildOptions): void{
if (!options.cwd){
options.cwd = process.cwd();
}
if (!options.project){
options.project = DEFAULT_PROJECT_FILE;
}
if (options.build == null){
// build defaults to true
options.build = true;
}
}
// BuildOptionsはProjectSettingsを上書きするかも
export function overwriteSettings(options: BuildOptions, settings: ProjectSettings): ProjectSettings{
if (options.rootDir){
settings.rootDir = options.rootDir;
}
if (options.outDir){
settings.outDir = options.outDir;
}
if (options.outExt){
settings.outExt = options.outExt;
}else if (!settings.outExt){
settings.outExt = DEFAULT_OUT_EXT;
}
if (options.force != null){
settings.force = options.force;
}
if (Array.isArray(options.target)){
// build targetはcwdからあれしてると思う
settings.target = options.target.map(f => path.resolve(options.cwd, f));
}
if (options.server && options.port){
if (settings.server == null || 'object' !== settings.server){
settings.server = {};
}
settings.server.port = options.port;
}
return settings;
}
|
/**
* <p>Creates a VCardError object and sets the specified error information
* in it and adds it to the VCard currently being parses.</p>
*
* @see VCardError
* @see ErrorSeverity
*
* @param vcard
* @param errorMessage
* @param exception
* @param severity
*/
private void handleError(VCardImpl vcard, String errorMessage, Throwable exception, ErrorSeverity severity) {
VCardError vError = new VCardError();
vError.setErrorMessage(errorMessage);
vError.setSeverity(severity);
if(exception != null) {
vError.setError(exception);
}
vcard.addError(vError);
}
|
<filename>packages/panda-spinner/demo/index.ts<gh_stars>0
import { LitElement, html, css } from "lit";
import "../src/panda-spinner";
class DemoPage extends LitElement {
// css styles
static get styles() {
return css`
.spinner-cont {
display: inline-block;
padding: 30px;
background-color: var(--panda-primary-color);
border-radius: 10px;
box-shadow: 0px 1px 2px var(--panda-shadow-50opc);
}
`;
}
protected render() {
return html`
<div class="spinner-cont">
<panda-spinner spinner="dots"></panda-spinner>
</div>
<div class="spinner-cont">
<panda-spinner spinner="circle"></panda-spinner>
</div>
<div class="spinner-cont">
<panda-spinner spinner="video"></panda-spinner>
</div>
<div class="spinner-cont">
<panda-spinner spinner="google"></panda-spinner>
</div>
`;
}
}
window.customElements.define("demo-page", DemoPage);
|
Property taxpayers in Hennepin County will almost certainly see increases in 2016 nearly triple what they have grown accustomed to in recent years.
County Administrator David Hough on Tuesday proposed an increase of 4.5 percent, $31.2 million, over the current year’s property tax collections. In contrast, for the past six years, taxpayers in the county have seen increases that averaged 1.3 percent.
“The proposed property tax levy reflects significant demands for quality services we provide to our residents,” Hough said. “It is not insignificant, but necessary to maintain the current level of services.”
While Hough presented the number during his annual budget speech to the County Board, he and his staff have worked behind the scenes with the seven board members so the proposal didn’t hit them unexpectedly.
His pitch signaled the beginning of the tax-setting sessions.
The county’s departments will make their own cases to the board in a series of public meetings in the coming months. The first is Wednesday. The budget will be adopted in December.
While the board will make adjustments, the ultimate budget tends to hew closely to the administrator’s recommendation. What Hough’s proposal would mean: The owner of a median-valued suburban home would see a property tax increase of $41 in the county portion of their bill, taking into account an anticipated market-rate increase.
In Minneapolis, the median-valued home would see an increase of $37.
County Budget Director Dave Lawless said the budget reflected the need for catching up in some areas where spending was deferred during the recession that struck in 2008. Notable among those is an increase in Human Services staffing, especially in child protective services.
County Board Chair Jan Callison said the higher percentage increase is a concern because “it’s going to have an impact on residents and business,” but she said the demands for services are “pretty strong.”
Among those demands will be county employee salary increases. “We need to pay them their value,” she said.
Hough proposed increasing the county workforce to the full-time equivalent of 7,966 employees, an increase of 198 from the current year.
He drew broad outlines for the budget, not specific departmental numbers, but said “changing legislative requirements, including changes to the child protection system, critical staffing shortages to address increased service demands, and support for new strategic county initiatives all require more staff in the coming year.”
The budget reflects the county’s mission to “enhance the health, safety and quality of life of our communities,” he said.
The county’s proposed budget is $1.9 billion for next year. The number includes an operating budget of $1.6 billion, an increase of $86 million from the current year.
The capital, brick-and-mortar, portion was proposed at $286 million, an increase of $23 million over the current year.
At the same time, Hough said the county expects to see a decrease in federal and state funding next year, from $428 million to $400 million.
Twitter: @rochelleolson
|
<gh_stars>0
class Solution:
def spellchecker(self, wordlist: List[str], queries: List[str]) -> List[str]:
"""Hash table.
Running time: O(n + m) where n is the length of queries and m is the length of wordlist.
"""
words = set(wordlist)
lower = {}
devowel = {}
res = []
for w in wordlist[::-1]:
l = w.lower()
lower[l] = w
devowel[re.sub('[aeiou]', '#', l)] = w
for q in queries:
lq = q.lower()
if q in words:
res.append(q)
elif lq in lower:
res.append(lower[lq])
elif re.sub('[aeiou]', '#', lq) in devowel:
res.append(devowel[re.sub('[aeiou]', '#', lq)])
else:
res.append('')
return res
|
import Char (digitToInt)
import List ((\\), nub, sort)
import Data.Map (Map, assocs, empty, fromList, insert, keys)
import Maybe (listToMaybe)
--------------------------------------------------------------------------------
{-
import HUnit
testInput :: Test
testInput = TestList "TestInput"
[
Test "#1" $ assertEq (Just (6,0)) $ solve input1,
Test "#2" $ assertEq Nothing $ solve input2
]
where
input1 = ["AERLAND DERLAND 2:1", "DERLAND CERLAND 0:3", "CERLAND AERLAND 0:1", "AERLAND BERLAND 2:0", "DERLAND BERLAND 4:0"]
input2 = ["AERLAND DERLAND 2:2", "DERLAND CERLAND 2:3", "CERLAND AERLAND 1:3", "AERLAND BERLAND 2:1", "DERLAND BERLAND 4:1"]
testOthers :: Test
testOthers = TestList "TestOthers"
[
Test "#3" $ assertEq (Just (1,0)) $ solve input3,
Test "#4" $ assertEq Nothing $ solve input4,
Test "#5" $ assertEq (Just (15,0)) $ solve input5,
Test "#6" $ assertEq (Just (4,0)) $ solve input6
]
where
input3 = ["EIYLBZCLPBGXJRT BERLAND 7:9", "MWVSPZD BERLAND 4:7", "MWVSPZD EIYLBZCLPBGXJRT 4:8", "VRGN EIYLBZCLPBGXJRT 3:6", "VRGN MWVSPZD 6:0"]
input4 = ["BERLAND ACTKRNTOOHZLAXGQM 2:3", "NAPPIFV ACTKRNTOOHZLAXGQM 4:1", "O ACTKRNTOOHZLAXGQM 6:9", "O BERLAND 4:3", "O NAPPIFV 7:6"]
input5 = ["QHA BERLAND 7:2", "VROOBFARVCFK QHA 5:7", "ZLRZXLRDUKGQM BERLAND 9:3", "ZLRZXLRDUKGQM QHA 7:8", "ZLRZXLRDUKGQM VROOBFARVCFK 0:1"]
input6 = ["PC BERLAND 8:8", "TIEFPKCKZWBWN PC 8:8", "UCU BERLAND 7:6", "UCU PC 8:3", "UCU TIEFPKCKZWBWN 3:9"]
test :: IO ()
test = mapM_ run
[
testInput,
testOthers
]
-}
--------------------------------------------------------------------------------
type Map = Data.Map.Map (String, String) (Int, Int)
data Result = Result String Int Int Int deriving (Eq, Show)
instance Ord Result
where
compare (Result n1 s1 zp1 z1) (Result n2 s2 zp2 z2)
| s1 > s2 = LT
| s1 < s2 = GT
| zp1 > zp2 = LT
| zp1 < zp2 = GT
| z1 > z2 = LT
| z1 < z2 = GT
| otherwise = compare n1 n2
instance Num Result
where
(+) (Result n1 s1 zp1 z1) (Result n2 s2 zp2 z2)
= Result n2 (s1+s2) (zp1+zp2) (z1+z2)
fromInteger 0 = Result "" 0 0 0
getName :: Result -> String
getName (Result name _ _ _) = name
--------------------------------------------------------------------------------
berland = "BERLAND"
impossible = "IMPOSSIBLE"
map' :: Main.Map
map' = fromList
[
(("AERLAND", "DERLAND"), (2,1)),
(("DERLAND", "CERLAND"), (0,3)),
(("CERLAND", "AERLAND"), (0,1)),
(("AERLAND", "BERLAND"), (2,0)),
(("DERLAND", "BERLAND"), (4,0))
]
commands :: Main.Map -> [String]
commands map' = nub (map fst keys' ++ (map snd keys'))
where
keys' = keys map'
findNotGameCommand :: Main.Map -> String
findNotGameCommand map' = head $ commands map' \\
(concatMap (\(s1, s2) -> [s1, s2]) $
filter (\(s1, s2) -> s1 == berland || s2 == berland) $ keys map')
result :: Main.Map -> String -> Result
result map' name = sum $ map (flip result' name) $ assocs map'
where
result' :: ((String, String), (Int, Int)) -> String -> Result
result' (names, (n1,n2)) name
| name == fst names = Result name (scope n1 n2) (n1 - n2) n1
| name == snd names = Result name (scope n2 n1) (n2 - n1) n2
| otherwise = Result name 0 0 0
where
scope n1 n2 = case compare n1 n2 of
GT -> 3
LT -> 0
EQ -> 1
results :: Main.Map -> [Result]
results map' = map (result map') (commands map')
berlandFirstOrSecond :: Main.Map -> Bool
berlandFirstOrSecond map' =
((== berland) . getName . head) res
|| ((== berland) . getName . head . tail) res
where
res = sort $ results map'
solve :: [String] -> Maybe (Int, Int)
solve = listToMaybe . solve' . parseLines
where
solve' :: Main.Map -> [(Int, Int)]
solve' map' = [(z, z - zp) | zp <- [1..36], z <- [zp..36],
berlandFirstOrSecond $ insert (berland, findNotGameCommand map') (z, z - zp) map']
parseLines :: [String] -> Main.Map
parseLines = foldl (flip (uncurry insert)) empty . map parseLine
where
parseLine :: String -> ((String, String), (Int, Int))
parseLine line = ((s1, s2), (n1, n2))
where
[s1, s2, s3] = words line
n1 = digitToInt $ s3 !! 0
n2 = digitToInt $ s3 !! 2
printAns :: Maybe (Int, Int) -> IO ()
printAns Nothing = putStrLn impossible
printAns (Just (n1,n2)) = putStrLn $ concat [show n1, ":", show n2]
main :: IO ()
main = getContents >>= printAns . solve . take 5 . lines
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.