content
stringlengths 10
4.9M
|
---|
Odessa Games
NEW
contact us at [email protected]
Nickname:
Team: Auto Blue Red
Connecting.. Play
Region:
World: 1 Change
Music: None 1
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 Back
Options Background new old Controller Support On Off
Back
Forgotten your password?
Back
Password recovery
Back
We have emailed you a code.
Please enter below
Back
Register
Back
Commander Username
0 Credits Shop Logout Redeem code
Back Base Fighter Support Utility Heavy Healer M.o.D Sniper Engi Bruiser Frigate The Doctor Mechanic D.o.D aLeRT Stealth Shogun The Moth Suicide Squad Bully Wurship B.R.U.C.E The Wall Indestructibro Jack Hulk ??? Decal Trail Tag Skin Buy Credits Buy Equip Item 1100g
Redeem Code
Accept
Back
This music was generously provided by s9meNINE |
/**
* @return The average keys received from the DHT. Only evaluates rawKeys.
*/
public double avgStoredKeys() {
synchronized (lock) {
final int size = rawResult.size();
int total = 0;
for (Map<Number640, Byte> map : rawResult.values()) {
Collection<Number640> collection = map.keySet();
if (collection != null) {
total += collection.size();
}
}
return total / (double) size;
}
} |
// This file is part of caniuse-serde. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/caniuse-serde/master/COPYRIGHT. No part of predicator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
// Copyright © 2017 The developers of caniuse-serde. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/caniuse-serde/master/COPYRIGHT.
/// A simple 'newtype' wrapper that represents a percentage
#[derive(Deserialize, Debug, Copy, Clone)]
pub struct UsagePercentage(f64);
impl<I: Into<f64>> From<I> for UsagePercentage
{
/// Converts from anything that can be represented as a f64 into a percentage.
/// Clamps values below zero (including negative zero and negative infinity) to positive zero.
/// Clamps NaN as positive zero.
/// Clamps values above one hundred (including positive infinity) to one hundred.
#[inline(always)]
fn from(value: I) -> Self
{
Self::clamp(value.into())
}
}
impl PartialEq for UsagePercentage
{
/// Partial Equality; total equality is also supported
#[inline(always)]
fn eq(&self, other: &Self) -> bool
{
self.0.eq(&other.0)
}
}
impl Eq for UsagePercentage
{
}
impl PartialOrd for UsagePercentage
{
/// Partial comparison
#[inline(always)]
fn partial_cmp(&self, other: &Self) -> Option<Ordering>
{
self.0.partial_cmp(&other.0)
}
}
impl Ord for UsagePercentage
{
/// Total comparison; always succeeds
#[inline(always)]
fn cmp(&self, other: &Self) -> Ordering
{
self.partial_cmp(other).unwrap_or(Ordering::Equal)
}
}
impl Hash for UsagePercentage
{
/// Hash
#[inline(always)]
fn hash<H: Hasher>(&self, state: &mut H)
{
self.to_bits().hash(state)
}
}
impl Display for UsagePercentage
{
/// Displays as a floating point value followed by a '%'
#[inline(always)]
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result
{
write!(fmt, "{}%", self.0)
}
}
impl Default for UsagePercentage
{
/// Defaults to zero
#[inline(always)]
fn default() -> Self
{
Self::Zero
}
}
impl Add<UsagePercentage> for UsagePercentage
{
type Output = Self;
/// Add
#[inline(always)]
fn add(self, rhs: Self) -> Self::Output
{
Self::clamp(self.0 + rhs.0)
}
}
impl AddAssign<UsagePercentage> for UsagePercentage
{
/// Add in place
#[inline(always)]
fn add_assign(&mut self, rhs: Self)
{
*self = self.add(rhs)
}
}
impl Sub<UsagePercentage> for UsagePercentage
{
type Output = Self;
/// Subtract
#[inline(always)]
fn sub(self, rhs: Self) -> Self::Output
{
Self::clamp(self.0 - rhs.0)
}
}
impl SubAssign<UsagePercentage> for UsagePercentage
{
/// Subtract in place
#[inline(always)]
fn sub_assign(&mut self, rhs: Self)
{
*self = self.sub(rhs)
}
}
impl Deref for UsagePercentage
{
type Target = f64;
/// Dereferences to f64
#[inline(always)]
fn deref(&self) -> &Self::Target
{
&self.0
}
}
impl UsagePercentage
{
/// Represents 0%
pub const Zero: Self = UsagePercentage(0.0);
/// Represents 1%
pub const One: Self = UsagePercentage(1.0);
/// Represents 0.1%
pub const OnePerMille: Self = UsagePercentage(0.1);
/// Represents 0.01%
pub const OneBasisPoint: Self = UsagePercentage(0.01);
/// Represents the minimum, 0%; interchangeable with UsagePercentage::Zero
pub const Minimum: Self = UsagePercentage::Zero;
/// Represents 100%
pub const OneHundred: Self = UsagePercentage(100.0);
/// Represents the maximum, 100%; interchangeable with UsagePercentage::OneHundred
pub const Maximum: Self = UsagePercentage::OneHundred;
/// Converts from anything that can be represented as a f64 into a percentage.
/// Clamps values below zero (including negative zero and negative infinity) to positive zero.
/// Clamps NaN as positive zero.
/// Clamps values above one hundred (including positive infinity) to one hundred.
#[inline(always)]
pub fn new(value: f64) -> Self
{
Self::clamp(value)
}
/// Converts to a scalar, ie a percentage divided by 100
#[inline(always)]
pub fn to_scalar(self) -> f64
{
self.0 / 100.0
}
#[inline(always)]
fn clamp(value: f64) -> Self
{
if value.is_sign_negative() || value.is_nan()
{
UsagePercentage(0.0)
}
else if value.is_sign_positive() && (value > 100.0 || value.is_infinite())
{
UsagePercentage(100.0)
}
else
{
UsagePercentage(value)
}
}
}
|
/*====================================================================*\
FUNCTION : ProUtilDisp
PURPOSE : Display a feature tree in an information window
\*====================================================================*/
static ProError ProUtilDisp(char *text, ProBool move_carret)
{
ProError err;
wchar_t w_str[150];
ProTextAttribute attribute, new_attribute;
double char_length;
err = ProTextAttributesCurrentGet(&attribute);
TEST_CALL_REPORT("ProTextAttributesCurrentGet","ProUtilDisp",
err, err!=PRO_TK_NO_ERROR);
new_attribute = attribute;
new_attribute.height = 15;
new_attribute.width_factor = 0.75;
#if 0
err = ProTextHeightCurrentSet(new_attribute.height);
TEST_CALL_REPORT("ProTextHeightCurrentSet","ProUtilDisp",
err, err!=PRO_TK_NO_ERROR);
err = ProTextWidthFactorCurrentSet(new_attribute.width_factor);
TEST_CALL_REPORT("ProTextWidthFactorCurrentSet","ProUtilDisp",
err, err!=PRO_TK_NO_ERROR);
#endif
ProStringToWstring(w_str, text);
err = ProGraphicsTextDisplay(win_data.point, w_str);
TEST_CALL_REPORT("ProGraphicsTextDisplay","ProUtilDisp",
err, err!=PRO_TK_NO_ERROR);
#if 0
err = ProTextHeightCurrentSet(attribute.height);
TEST_CALL_REPORT("ProTextHeightCurrentSet","ProUtilDisp",
err, err!=PRO_TK_NO_ERROR);
err = ProTextWidthFactorCurrentSet(attribute.width_factor);
TEST_CALL_REPORT("ProTextWidthFactorCurrentSet","ProUtilDisp",
err, err!=PRO_TK_NO_ERROR);
#endif
if (move_carret == PRO_B_TRUE)
{
char_length = new_attribute.height * new_attribute.width_factor;
win_data.point[0] += 2 * char_length * (double)strlen(text);
}
return err;
} |
/**
* Provides the functionality to communicate with the database and perform queries
* pertaining to Discussions
*
* @author Tyler Haigh - C3182929
* @author Simon Hartcher - C3185790
* @author Josh Crompton - C3165877
*
*/
public class DiscussionManager extends DataManager {
private final Logger logger = Logger.getLogger("rgms.datacontext.DiscussionManager");
/**
* Creates a Discussion Thread in the database
* @param discussion The Discussion to insert
*/
public void createDiscussion(DiscussionThread discussion) {
try {
//Create a prepared statement
PreparedStatement pstmt = conn.prepareStatement(
"INSERT INTO DiscussionThreads (GroupId, ThreadName)" +
"VALUES (?, ?)", Statement.RETURN_GENERATED_KEYS);
//Set the required parameters and execute
pstmt.setInt(1, discussion.getGroupId());
pstmt.setString(2, discussion.getThreadName());
pstmt.executeUpdate();
//Get the generated id
ResultSet rs = pstmt.getGeneratedKeys();
if (rs.next())
discussion.setId(rs.getInt(1));
}
catch (Exception e) {
logger.log(Level.SEVERE, "SQL Error", e);
}
}
/**
* Creates a Discussion Post in the database
* @param post The Discussion Post to insert
*/
public void createPost(DiscussionPost post) {
try {
//Create a prepared statement
PreparedStatement pstmt = conn.prepareStatement(
"INSERT INTO DiscussionPosts (ThreadId, UserId, Message)" +
"VALUES (?, ?, ?)", Statement.RETURN_GENERATED_KEYS);
//Set the required parameters and execute
pstmt.setInt(1, post.getThreadId());
pstmt.setInt(2, post.getUserId());
pstmt.setString(3, post.getMessage());
pstmt.executeUpdate();
//get the generated id
ResultSet rs = pstmt.getGeneratedKeys();
if (rs.next())
post.setId(rs.getInt(1));
}
catch (Exception e) {
logger.log(Level.SEVERE, "SQL Error", e);
}
}
/**
* Retrieves all of the Discussion Threads associated with a Group Id
*
* @param groupId The Group Id to get Threads for
* @return A List of Discussion Threads that belong to the Group
*/
public List<DiscussionThread> getThreads(int groupId) {
ArrayList<DiscussionThread> threads = new ArrayList<>();
try {
//Create a prepared statement
PreparedStatement pstmt = conn.prepareStatement(
"SELECT * FROM DiscussionThreads WHERE GroupId = ?");
//Set the required parameters and execute
pstmt.setInt(1, groupId);
ResultSet rs = pstmt.executeQuery();
//Get the results and add to the list
if (rs.isBeforeFirst()) {
while (!rs.isAfterLast()) {
DiscussionThread thread = DiscussionThread.fromResultSet(rs);
if (thread != null) {
threads.add(thread);
}
}
}
}
catch (Exception e) {
logger.log(Level.SEVERE, "SQL Error", e);
}
return threads;
}
/**
* Retrieves a single thread based on its Id
* @param threadId The Id of the Thread
* @return The Thread with the given Id
*/
public DiscussionThread getThread(int threadId) {
DiscussionThread thread = null;
try {
//Create a prepared statement
PreparedStatement pstmt = conn.prepareStatement(
"SELECT * FROM DiscussionThreads WHERE Id = ?");
//Set the required parameters and execute
pstmt.setInt(1, threadId);
ResultSet rs = pstmt.executeQuery();
thread = DiscussionThread.fromResultSet(rs);
}
catch (Exception e) {
logger.log(Level.SEVERE, "SQL Error", e);
}
return thread;
}
/**
* Retrieves a List of Discussion Posts for a given Thread Id
* @param threadId The Id of the Thread to query
* @return A List of Discussion Posts for the Thread
*/
public List<DiscussionPost> getPosts(int threadId) {
ArrayList<DiscussionPost> posts = new ArrayList<>();
try {
//Create a prepared statement
PreparedStatement pstmt = conn.prepareStatement(
"SELECT * FROM DiscussionPosts WHERE ThreadId = ?");
//Set the required parameters adn execute
pstmt.setInt(1, threadId);
ResultSet rs = pstmt.executeQuery();
//Retrieve the results and add to the list
if (rs.isBeforeFirst()) {
while (!rs.isAfterLast()) {
DiscussionPost post = DiscussionPost.fromResultSet(rs);
if (post != null)
posts.add(post);
}
}
}
catch (Exception e) {
logger.log(Level.SEVERE, "SQL Error", e);
}
return posts;
}
} |
import React, { FC } from "react";
import Lottie from "lottie-react";
import loadingAnim from "./anim.json";
import Styles from "./Loading.module.css";
const Loading: FC = () => (
<div className={Styles["blui-ldr"]}>
<div className="w-64">
<Lottie loop={true} autoplay={true} animationData={loadingAnim} />
</div>
</div>
);
export { Loading };
|
/**
* Metodo necessario para auxiliar no algoritmo de conjuntoAnulavel
* testa se em uma determinada producao existe apenas terminais
* @param s producao a ser analisada
* @return true caso exista apenas terminais, false caso exista pelo menos um nao terminal.
*/
public boolean isOnlyTerminal(String s){
for( int i = 0; i < s.length(); i++)
if(!terminais.contains(s.charAt(i)+"") && s.charAt(i) != '?')
return false;
return true;
} |
def git_checkout(self, component, git_url, git_branch, update):
if update:
subprocess.run(['git', 'remote', 'prune', 'origin'],
cwd=self.component_srcdir(component), check=True)
if self.replace_sources:
subprocess.run(['git', 'clean', '-dxfq'],
cwd=self.component_srcdir(component), check=True)
subprocess.run(['git', 'pull', '-q'],
cwd=self.component_srcdir(component), check=True)
else:
subprocess.run(['git', 'clone', '-q', '-b', git_branch, git_url,
self.component_srcdir(component)], check=True)
r = subprocess.run(['git', 'rev-parse', 'HEAD'],
cwd=self.component_srcdir(component),
stdout=subprocess.PIPE,
check=True, universal_newlines=True).stdout
return r.rstrip() |
def main():
parser: argparse.ArgumentParser = argparse.ArgumentParser(
description="This script takes an directory with ELAN files and "
"saves the audio and output text in JSON format to a file")
parser.add_argument("-i", "--input_dir",
help="Directory of dirty audio and eaf files",
default="./input/")
parser.add_argument("-o", "--output_dir",
help="Output directory",
default="./output/")
parser.add_argument("-j", "--output_json",
help="File path to output json",
default="elan.json")
parser.add_argument("-r", "--tier_order",
help="Source tier order",
type=int,
default=0)
parser.add_argument("-n", "--tier_name",
help="Source tier name",
default="Phrase")
parser.add_argument("-t", "--tier_type",
help="Source tier type",
default="default-lt")
arguments: argparse.Namespace = parser.parse_args()
if not os.path.exists(arguments.output_dir):
os.makedirs(arguments.output_dir)
all_files_in_directory = set(glob.glob(os.path.join(arguments.input_dir, "**"), recursive=True))
input_elan_files = [ file_ for file_ in all_files_in_directory if file_.endswith(".eaf") ]
annotations_data = []
for input_elan_file in input_elan_files:
annotations_data.extend(process_eaf(input_elan_file=input_elan_file,
tier_order=arguments.tier_order,
tier_name=arguments.tier_name,
tier_type=arguments.tier_type))
annotations_data.sort(key=lambda x: x["audio_file_name"], reverse=False)
write_data_to_json_file(data=annotations_data,
output=os.path.join(arguments.output_dir, arguments.output_json)) |
<reponame>StuyPulse/PEGasus1
package edu.stuy.commands;
import edu.stuy.Robot;
import edu.wpi.first.wpilibj.command.Command;
/**
* Controls the lift, using the right joystick of the operator pad
*/
public class LiftControlCommand extends Command {
public LiftControlCommand() {
requires(Robot.lift);
}
protected void initialize() {
}
protected void execute() {
double right = Robot.oi.operatorPad.getRightY();
if(Math.abs(right) > 0.0){
Robot.toteknocker.retract();
}
double input = squareInput(right) * 0.75;
Robot.lift.manualControl(-input);
}
// Square the value of the input while preserving its sign
private double squareInput(double input) {
boolean negative = input < 0;
input = input * input;
if (negative) {
input = -input;
}
return input;
}
// Make this return true when this Command no longer needs to run execute()
protected boolean isFinished() {
// This is the Lift's default command.
return false;
}
// Called once after isFinished returns true
protected void end() {
}
// Called when another command which requires one or more of the same
// subsystems is scheduled to run
protected void interrupted() {
}
}
|
Tilting at Windmills
Harnessing wind energy is one of the pressing challenges of our time. The scale, complexity, and robustness of wind power systems present compelling cyber-physical system design issues. In response to these emerging challenges, Purdue has initiated strategic partnerships for infrastructure development. Working with the US Department of Energy, Purdue is developing physical infrastructure for an experimental wind farm on campus. Separately, General Electric has committed to building 60 wind turbines with total capacity exceeding 100 MW on land owned by Purdue. In return, GE will allow three turbines to be utilized for research and education, along with access to unrestricted data from all other turbines. This represents the most comprehensive experimental and operational wind energy infrastructure available for academic research today. |
def add_host(self, host_config: Dict):
hostname = [*host_config][0]
self.connections.hosts = [*self.connections.hosts, hostname]
self.connections.host_config = {
**self.connections.host_config, **host_config
} |
/**
* Created by lintzuhsiu on 14/11/15.
*/
public class TSLScrollView extends ScrollView {
private View firstView;
private View headerView;
private int currentScrollY;
private boolean isHeaderFixed;
public TSLScrollView(Context context) {
this(context, null);
}
public TSLScrollView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public TSLScrollView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
}
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
super.onLayout(changed, l, t, r, b);
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
if (headerView != null && firstView != null) {
if (isHeaderFixed) {
headerView.layout(0, currentScrollY, headerView.getWidth(),
currentScrollY + headerView.getHeight());
} else {
int firstViewHeight = firstView.getMeasuredHeight();
headerView.layout(0, firstViewHeight, headerView.getWidth(), firstViewHeight + headerView.getHeight());
}
}
invalidate();
}
/*
* 當 internal scroll view 改變
* @param l Current horizontal scroll origin.
* @param t Current vertical scroll origin.
* @param oldl Previous horizontal scroll origin.
* @param oldt Previous vertical scroll origin.
* */
@Override
protected void onScrollChanged(int l, int t, int oldl, int oldt) {
super.onScrollChanged(l, t, oldl, oldt);
ViewGroup viewGroup = (ViewGroup) this.getChildAt(0);
firstView = viewGroup.getChildAt(0);
headerView = viewGroup.getChildAt(2);
currentScrollY = t;
firstView.scrollTo(0, -(int)(t * 0.2));
isHeaderFixed = t >= firstView.getMeasuredHeight() ? true : false;
}
} |
/**
* Certain color values of a given color.
*
* @name Date Values
* @type EXPRESSION
* @pattern [the] (hex[adecimal]|red|green|blue|alpha) value of %color%
* @pattern %color%'[s] (hex[adecimal]|red|green|blue|alpha)
* @since ALPHA
* @author Mwexim
*/
public class ExprColorValues extends PropertyExpression<Object, Color> {
static {
Parser.getMainRegistration().addPropertyExpression(
ExprColorValues.class,
Object.class,
"colors",
"(0:hex[adecimal]|1:red|2:green|3:blue|4:alpha) value"
);
}
private int mark;
@Override
public boolean init(Expression<?>[] expressions, int matchedPattern, ParseContext parseContext) {
mark = parseContext.getNumericMark();
return super.init(expressions, matchedPattern, parseContext);
}
@Override
public Object getProperty(Color owner) {
switch (mark) {
case 0:
return owner.getHex();
case 1:
return BigInteger.valueOf(owner.getRed());
case 2:
return BigInteger.valueOf(owner.getGreen());
case 3:
return BigInteger.valueOf(owner.getBlue());
case 4:
return BigInteger.valueOf(owner.getAlpha());
default:
throw new IllegalStateException();
}
}
@Override
public Class<?> getReturnType() {
return mark == 0 ? String.class : BigInteger.class;
}
@Override
public String toString(TriggerContext ctx, boolean debug) {
return toString(ctx, debug, new String[] {"hex", "red", "green", "blue", "alpha"}[mark] + " value");
}
} |
import json
import asyncio
import websockets
from .utils import newuid, new_node, ham_mix
from jumpscale import j
JSConfigBase = j.tools.configmanager.base_class_config
def format_put_request(soul, **kwargs):
ch = {
'#': newuid(),
'put': {
soul: new_node(soul, **kwargs)
}
}
return ch
def format_get_request(soul):
ch = {
'#': newuid(),
'get': {
'#': soul
}
}
return ch
class Memory:
def __init__(self):
self.db = {}
def put(self, soul, key, value, state):
# soul -> {field:{'state':state, 'val':val, rel: relation}}
if soul not in self.db:
self.db[soul] = {'_':{}}
self.db[soul][key] = value
self.db[soul]['_'][key] = state
def get(self, soul, key=None):
# print("SOUL: ", soul, " KEY : ", key)
ret = {'#': soul, '_':{'#':soul, '>':{}}}
res = None
if soul in self.db:
if key and isinstance(key, str):
res = {**ret, **self.db.get(soul)}
return res.get(key, {})
else:
res = {**ret, **self.db.get(soul)}
return res
return ret
def __setitem__(self, k, v):
self.db[k] = v
def __getitem__(self, k):
return self.db[k]
def list(self):
return self.db.items()
TEMPLATE = """
addr = ""
port = 8080
base_url = ""
user = ""
password_ = ""
"""
class GunClient(JSConfigBase):
def __init__(self, instance, data=None, parent=None, interactive=True):
if data is None:
data = {}
JSConfigBase.__init__(self, instance=instance, data=data,
parent=parent, template=TEMPLATE, interactive=interactive)
self.ws = None
self.backend = Memory()
@property
def wsendpoint(self):
return 'ws://{addr}:{port}/gun'.format(addr=self.config.data['addr'], port=self.config.data['port'])
async def put(self, soul, **kwargs):
async with websockets.connect(self.wsendpoint) as ws:
ch = format_put_request(soul, **kwargs)
ch_str = json.dumps(ch)
await ws.send(ch_str)
resp = await ws.recv()
return resp
async def get(self, soul, key=None):
async with websockets.connect(self.wsendpoint) as ws:
ch = format_get_request(soul)
ch_str = json.dumps(ch)
await ws.send(ch_str)
resp = await ws.recv()
loaded = json.loads(resp)
change = loaded['put']
soul = loaded['#']
diff = ham_mix(change, self.backend)
resp = {'@':soul, '#':newuid(), 'ok':True}
for soul, node in diff.items():
for k, v in node.items():
if k == "_":
continue
self.backend.put(soul, k, v, diff[soul]['_']['>'][k])
res = self.backend.get(soul, key) or 'nil'
return res
|
<filename>src/Parser.hs<gh_stars>0
{-# LANGUAGE OverloadedStrings #-}
module Parser
( parseRPN
, infixToRPN
)
where
import Control.Monad
import Data.Char
import Data.Text ( Text )
import qualified Data.Text as T
import qualified Data.Text.Read as TR
import Utils
infixToRPN :: Text -> Text
infixToRPN = T.unwords . shunt ([], []) . T.words
shunt :: ([Text], [Operator]) -> [Text] -> [Text]
shunt (out, ops) [] = reverse out ++ ops
shunt (out, ops) (x : rest) | isOperator x = shunt (shuntOp (out, ops) x) rest
| otherwise = shunt (x : out, ops) rest
shuntOp :: ([Text], [Operator]) -> Operator -> ([Text], [Operator])
shuntOp (out, "(" : ops) ")" = (out, ops)
shuntOp (out, op : ops ) ")" = shuntOp (op : out, ops) ")"
shuntOp (out, op : ops) x
| (op ->- x && (not . isFunction) x)
|| (op -=- x && isLeftAssociative x)
|| (isFunction op && x /= "(")
= shuntOp (op : out, ops) x
| otherwise
= (out, x : op : ops)
shuntOp (out, ops) op = (out, op : ops)
parseRPN :: Text -> Either Text DFunc
parseRPN text = case (foldM parse [] . T.words) text of
Right [fun] -> Right fun
Right _ -> Left "input is not a single function"
Left msg -> Left msg
parse :: [DFunc] -> Text -> Either Text [DFunc]
parse fs x
| x == "e" = Right $ const (exp 1) : fs
| x == "pi" = Right $ const pi : fs
| isFunction x && length fs > 0 = Right $ parseFun fs x
| isOperator x && length fs > 1 = Right $ parseOp fs x
| isLetter $ T.head x = Right $ id : fs
| otherwise = case TR.double x of
Right (n, _) -> Right $ (\_ -> n) : fs
Left msg -> Left $ T.pack msg
parseFun :: [DFunc] -> Text -> [DFunc]
parseFun (fx : hs) "ln" = (log . fx) : hs
parseFun (fx : hs) "sin" = (sin . fx) : hs
parseFun (fx : hs) "cos" = (cos . fx) : hs
parseFun (fx : hs) "tan" = (tan . fx) : hs
parseFun (fx : hs) "cot" = (cot . fx) : hs
parseOp :: [DFunc] -> Text -> [DFunc]
parseOp (fx : gx : hs) "*" = (fx \* gx) : hs
parseOp (fx : gx : hs) "+" = (fx \+ gx) : hs
parseOp (fx : gx : hs) "-" = (gx \- fx) : hs
parseOp (fx : gx : hs) "/" = (gx \/ fx) : hs
parseOp (fx : gx : hs) "^" = (gx \^ fx) : hs
|
<reponame>henrisusanto/loyalty-app
import './loyaltycore.eventregister'
// import './fnbtransaction.eventregister'
// import './birthdaypoint.eventregister'
// etc.. |
def forward(self, X, y):
eps = 1e-5
D = torch.matmul(X, self.W)
D = torch.sigmoid(D)
D = torch.clip(D, eps, 1 - eps)
Phi = torch.sum(-(y * torch.log(D) + (1 - y) * torch.log(1 - D)))
W1 = torch.squeeze(self.W)
L2 = torch.sum(torch.mul(W1, W1))
return (Phi, L2) |
A passive lossless snubber cell with minimum stress and wide soft-switching range
A passive lossless snubber cell and its dual structure for reducing the switching loss of a range of switching converters are presented. The proposed snubber cell has several advantages over existing snubbering techniques. First, it provides zero-current-switching (ZCS) and zero-voltage-switching (ZVS) conditions for turning on and off, respectively, the switch over a wide load range. Second, it does not introduce extra voltage stress on the switch. Third, by taking the ripple current through the switch into account, the peak switch current during the snubber resonance period is designed to be less than the designed switch current without the snubber. Hence, the proposed snubber does not introduce extra current stress on the switch. The operating principle, procedure of designing the values of the components and soft-switching range of the snubber will be given. Connections of the snubber cells to different switching converters will be depicted. A performance comparison among the proposed snubber and two previously studied snubber cells will be addressed. The proposed snubber has been successfully applied to an example of a 200W, 380V/24V, 100kHz two-switch flyback converter operating in continuous conduction modes. Experimental results are in good agreement with the theoretical predictions. |
// A server object maps incoming RPC requests to a provided service interface.
// The service interface methods are executed inside a worker thread.
// Non-thread-safe.
class server : boost::noncopyable {
public:
server();
~server();
void register_service(service& service);
void register_service(service& service, const std::string& name);
void bind(const std::string& endpoint);
void register_rpc_service(rpc_service* rpc_service, const std::string& name);
private:
scoped_ptr<server_impl> impl_;
} |
<reponame>liugjin/videoservice
import { Sink } from '../component';
export interface MediaTrack {
type: string;
encoding?: string;
mime?: string;
codec?: any;
}
export declare class MseSink extends Sink {
private _videoEl;
private _done?;
private _lastCheckpointTime;
onSourceOpen?: (mse: MediaSource, tracks: MediaTrack[]) => void;
/**
* Create a Media component.
*
* The constructor sets up two streams and connects them to the MediaSource.
*
* @param {MediaSource} mse - A media source.
*/
constructor(el: HTMLVideoElement);
/**
* Add a new sourceBuffer to the mediaSource and remove old ones.
* @param {HTMLMediaElement} el The media element holding the media source.
* @param {MediaSource} mse The media source the buffer should be attached to.
* @param {String} [mimeType='video/mp4; codecs="avc1.4D0029, mp4a.40.2"'] [description]
*/
addSourceBuffer(el: HTMLVideoElement, mse: MediaSource, mimeType: string): SourceBuffer;
get currentTime(): number;
play(): Promise<void>;
pause(): void;
}
|
/**
* Classify documents, run trials, print statistics from a vector file.
@author Andrew McCallum <a href="mailto:[email protected]">[email protected]</a>
*/
public abstract class Vectors2Classify
{
private static Logger logger = MalletLogger.getLogger(Vectors2Classify.class.getName());
private static Logger progressLogger = MalletProgressMessageLogger.getLogger(Vectors2Classify.class.getName() + "-pl");
private static ArrayList classifierTrainers = new ArrayList();
private static boolean[][] ReportOptions = new boolean[3][4];
private static String[][] ReportOptionArgs = new String[3][4]; //arg in dataset:reportOption=arg
// Essentially an enum mapping string names to enums to ints.
private static class ReportOption
{
static final String[] dataOptions = {"train", "test", "validation"};
static final String[] reportOptions = {"accuracy", "f1", "confusion", "raw"};
static final int train=0;
static final int test =1;
static final int validation=2;
static final int accuracy=0;
static final int f1=1;
static final int confusion=2;
static final int raw=3;
}
static CommandOption.SpacedStrings report = new CommandOption.SpacedStrings
(Vectors2Classify.class, "report", "[train|test|validation]:[accuracy|f1|confusion|raw]",
true, new String[] {"test:accuracy", "test:confusion", "train:accuracy"},
"", null)
{
public void postParsing (CommandOption.List list)
{
java.lang.String defaultRawFormatting = "siw";
for (int argi=0; argi<this.value.length; argi++){
// convert options like --report train:accuracy --report test:f1=labelA to
// boolean array of options.
// first, split the argument at semicolon.
//System.out.println(argi + " " + this.value[argi]);
java.lang.String arg = this.value[argi];
java.lang.String fields[] = arg.split("[:=]");
java.lang.String dataSet = fields[0];
java.lang.String reportOption = fields[1];
java.lang.String reportOptionArg = null;
if (fields.length >=3){
reportOptionArg = fields[2];
}
//System.out.println("Report option arg " + reportOptionArg);
//find the datasource (test,train,validation)
boolean foundDataSource = false;
int i=0;
for (; i<ReportOption.dataOptions.length; i++){
if (dataSet.equals(ReportOption.dataOptions[i])){
foundDataSource = true;
break;
}
}
if (!foundDataSource){
throw new IllegalArgumentException("Unknown argument = " + dataSet + " in --report " + this.value[argi]);
}
//find the report option (accuracy, f1, confusion, raw)
boolean foundReportOption = false;
int j=0;
for (; j<ReportOption.reportOptions.length; j++){
if (reportOption.equals(ReportOption.reportOptions[j])){
foundReportOption = true;
break;
}
}
if (!foundReportOption){
throw new IllegalArgumentException("Unknown argument = " + reportOption + " in --report " + this.value[argi]);
}
//Mark the (dataSet,reportionOption) pair as selected
ReportOptions[i][j] = true;
if (j == ReportOption.f1){
// make sure a label was specified for f1
if (reportOptionArg == null){
throw new IllegalArgumentException("F1 must have label argument in --report " + this.value[argi]);
}
// Pass through the string argument
ReportOptionArgs[i][j]= reportOptionArg;
}else if (reportOptionArg != null){
throw new IllegalArgumentException("No arguments after = allowed in --report " + this.value[argi]);
}
}
}
};
static CommandOption.Object trainerConstructor = new CommandOption.Object
(Vectors2Classify.class, "trainer", "ClassifierTrainer constructor", true, new NaiveBayesTrainer(),
"Java code for the constructor used to create a ClassifierTrainer. "+
"If no '(' appears, then \"new \" will be prepended and \"Trainer()\" will be appended."+
"You may use this option mutiple times to compare multiple classifiers.", null)
{
public void parseArg (java.lang.String arg) {
// parse something like Maxent,gaussianPriorVariance=10,numIterations=20
//System.out.println("Arg = " + arg);
// first, split the argument at commas.
java.lang.String fields[] = arg.split(",");
//Massage constructor name, so that MaxEnt, MaxEntTrainer, new MaxEntTrainer()
// all call new MaxEntTrainer()
java.lang.String constructorName = fields[0];
if (constructorName.indexOf('(') != -1) // if contains (), pass it though
super.parseArg(arg);
else {
if (constructorName.endsWith("Trainer")){
super.parseArg("new " + constructorName + "()"); // add parens if they forgot
}else{
super.parseArg("new "+constructorName+"Trainer()"); // make trainer name from classifier name
}
}
// find methods associated with the class we just built
Method methods[] = this.value.getClass().getMethods();
// find setters corresponding to parameter names.
for (int i=1; i<fields.length; i++){
java.lang.String nameValuePair[] = fields[i].split("=");
java.lang.String parameterName = nameValuePair[0];
java.lang.String parameterValue = nameValuePair[1]; //todo: check for val present!
java.lang.Object parameterValueObject;
try {
parameterValueObject = getInterpreter().eval(parameterValue);
} catch (bsh.EvalError e) {
throw new IllegalArgumentException ("Java interpreter eval error on parameter "+
parameterName + "\n"+e);
}
boolean foundSetter = false;
for (int j=0; j<methods.length; j++){
// System.out.println("method " + j + " name is " + methods[j].getName());
// System.out.println("set" + Character.toUpperCase(parameterName.charAt(0)) + parameterName.substring(1));
if ( ("set" + Character.toUpperCase(parameterName.charAt(0)) + parameterName.substring(1)).equals(methods[j].getName()) &&
methods[j].getParameterTypes().length == 1){
// System.out.println("Matched method " + methods[j].getName());
// Class[] ptypes = methods[j].getParameterTypes();
// System.out.println("Parameter types:");
// for (int k=0; k<ptypes.length; k++){
// System.out.println("class " + k + " = " + ptypes[k].getName());
// }
try {
java.lang.Object[] parameterList = new java.lang.Object[]{parameterValueObject};
// System.out.println("Argument types:");
// for (int k=0; k<parameterList.length; k++){
// System.out.println("class " + k + " = " + parameterList[k].getClass().getName());
// }
methods[j].invoke(this.value, parameterList);
} catch ( IllegalAccessException e) {
System.out.println("IllegalAccessException " + e);
throw new IllegalArgumentException ("Java access error calling setter\n"+e);
} catch ( InvocationTargetException e) {
System.out.println("IllegalTargetException " + e);
throw new IllegalArgumentException ("Java target error calling setter\n"+e);
}
foundSetter = true;
break;
}
}
if (!foundSetter){
System.out.println("Parameter " + parameterName + " not found on trainer " + constructorName);
System.out.println("Available parameters for " + constructorName);
for (int j=0; j<methods.length; j++){
if ( methods[j].getName().startsWith("set") && methods[j].getParameterTypes().length == 1){
System.out.println(Character.toLowerCase(methods[j].getName().charAt(3)) +
methods[j].getName().substring(4));
}
}
throw new IllegalArgumentException ("no setter found for parameter " + parameterName);
}
}
}
public void postParsing (CommandOption.List list) {
assert (this.value instanceof ClassifierTrainer);
//System.out.println("v2c PostParsing " + this.value);
classifierTrainers.add (this.value);
}
};
static CommandOption.String outputFile = new CommandOption.String
(Vectors2Classify.class, "output-classifier", "FILENAME", true, "classifier.mallet",
"The filename in which to write the classifier after it has been trained.", null);
static CommandOption.String inputFile = new CommandOption.String
(Vectors2Classify.class, "input", "FILENAME", true, "text.vectors",
"The filename from which to read the list of training instances. Use - for stdin.", null);
static CommandOption.String trainingFile = new CommandOption.String
(Vectors2Classify.class, "training-file", "FILENAME", true, "text.vectors",
"Read the training set instance list from this file. " +
"If this is specified, the input file parameter is ignored", null);
static CommandOption.String testFile = new CommandOption.String
(Vectors2Classify.class, "testing-file", "FILENAME", true, "text.vectors",
"Read the test set instance list to this file. " +
"If this option is specified, the training-file parameter must be specified and " +
" the input-file parameter is ignored", null);
static CommandOption.String validationFile = new CommandOption.String
(Vectors2Classify.class, "validation-file", "FILENAME", true, "text.vectors",
"Read the validation set instance list to this file." +
"If this option is specified, the training-file parameter must be specified and " +
"the input-file parameter is ignored", null);
static CommandOption.Double trainingProportionOption = new CommandOption.Double
(Vectors2Classify.class, "training-portion", "DECIMAL", true, 1.0,
"The fraction of the instances that should be used for training.", null);
static CommandOption.Double validationProportionOption = new CommandOption.Double
(Vectors2Classify.class, "validation-portion", "DECIMAL", true, 0.0,
"The fraction of the instances that should be used for validation.", null);
static CommandOption.Double unlabeledProportionOption = new CommandOption.Double
(Vectors2Classify.class, "unlabeled-portion", "DECIMAL", true, 0.0,
"The fraction of the training instances that should have their labels hidden. "
+"Note that these are taken out of the training-portion, not allocated separately.", null);
static CommandOption.Integer randomSeedOption = new CommandOption.Integer
(Vectors2Classify.class, "random-seed", "INTEGER", true, 0,
"The random seed for randomly selecting a proportion of the instance list for training", null);
static CommandOption.Integer numTrialsOption = new CommandOption.Integer
(Vectors2Classify.class, "num-trials", "INTEGER", true, 1,
"The number of random train/test splits to perform", null);
static CommandOption.Object classifierEvaluatorOption = new CommandOption.Object
(Vectors2Classify.class, "classifier-evaluator", "CONSTRUCTOR", true, null,
"Java code for constructing a ClassifierEvaluating object", null);
// static CommandOption.Boolean printTrainAccuracyOption = new CommandOption.Boolean
// (Vectors2Classify.class, "print-train-accuracy", "true|false", true, true,
// "After training, run the resulting classifier on the instances included in training, "
// +"and print the accuracy", null);
//
// static CommandOption.Boolean printTestAccuracyOption = new CommandOption.Boolean
// (Vectors2Classify.class, "print-test-accuracy", "true|false", true, true,
// "After training, run the resulting classifier on the instances not included in training, "
// +"and print the accuracy", null);
static CommandOption.Integer verbosityOption = new CommandOption.Integer
(Vectors2Classify.class, "verbosity", "INTEGER", true, -1,
"The level of messages to print: 0 is silent, 8 is most verbose. " +
"Levels 0-8 correspond to the java.logger predefined levels "+
"off, severe, warning, info, config, fine, finer, finest, all. " +
"The default value is taken from the mallet logging.properties file," +
" which currently defaults to INFO level (3)", null);
static CommandOption.Boolean noOverwriteProgressMessagesOption = new CommandOption.Boolean
(Vectors2Classify.class, "noOverwriteProgressMessages", "true|false", false, false,
"Suppress writing-in-place on terminal for progess messages - repetitive messages "
+"of which only the latest is generally of interest", null);
public static void main (String[] args) throws bsh.EvalError, java.io.IOException
{
// Process the command-line options
CommandOption.setSummary (Vectors2Classify.class,
"A tool for training, saving and printing diagnostics from a classifier on vectors.");
CommandOption.process (Vectors2Classify.class, args);
// handle default trainer here for now; default argument processing doesn't work
if (!trainerConstructor.wasInvoked()){
classifierTrainers.add (new NaiveBayesTrainer());
}
if (!report.wasInvoked()){
report.postParsing(null); // force postprocessing of default value
}
int verbosity = verbosityOption.value;
Logger rootLogger = ((MalletLogger)progressLogger).getRootLogger();
if (verbosityOption.wasInvoked()){
rootLogger.setLevel( MalletLogger.LoggingLevels[verbosity]);
}
if (noOverwriteProgressMessagesOption.value == false){
// install special formatting for progress messages
// find console handler on root logger; change formatter to one
// that knows about progress messages
Handler[] handlers = rootLogger.getHandlers();
for (int i = 0; i < handlers.length; i++) {
if (handlers[i] instanceof ConsoleHandler) {
handlers[i].setFormatter(new ProgressMessageLogFormatter());
}
}
}
boolean separateIlists = testFile.wasInvoked() || trainingFile.wasInvoked() ||
validationFile.wasInvoked();
InstanceList ilist=null;
InstanceList testFileIlist=null;
InstanceList trainingFileIlist=null;
InstanceList validationFileIlist=null;
if (!separateIlists) { // normal case, --input-file specified
// Read in the InstanceList, from stdin if the input filename is "-".
ilist = InstanceList.load (new File(inputFile.value));
}else{ // user specified separate files for testing and training sets.
trainingFileIlist = InstanceList.load (new File(trainingFile.value));
logger.info("Training vectors loaded from " + trainingFile.value);
if (testFile.wasInvoked()){
testFileIlist = InstanceList.load (new File(testFile.value));
logger.info("Testing vectors loaded from " + testFile.value);
}
if (validationFile.wasInvoked()){
validationFileIlist = InstanceList.load (new File(validationFile.value));
logger.info("validation vectors loaded from " + validationFile.value);
}
}
int numTrials = numTrialsOption.value;
Random r = randomSeedOption.wasInvoked() ? new Random (randomSeedOption.value) : new Random ();
ClassifierTrainer[] trainers = new ClassifierTrainer[classifierTrainers.size()];
for (int i = 0; i < classifierTrainers.size(); i++) {
trainers[i] = (ClassifierTrainer) classifierTrainers.get(i);
logger.fine ("Trainer specified = "+trainers[i].toString());
}
double trainAccuracy[][] = new double[trainers.length][numTrials];
double testAccuracy[][] = new double[trainers.length][numTrials];
double validationAccuracy[][] = new double[trainers.length][numTrials];
String trainConfusionMatrix[][] = new String[trainers.length][numTrials];
String testConfusionMatrix[][] = new String[trainers.length][numTrials];
String validationConfusionMatrix[][] = new String[trainers.length][numTrials];
double t = trainingProportionOption.value;
double v = validationProportionOption.value;
if (!separateIlists) {
logger.info("Training portion = " + t);
logger.info(" Unlabeled training sub-portion = "+unlabeledProportionOption.value);
logger.info("Validation portion = " + v);
logger.info("Testing portion = " + (1 - v - t));
}
// for (int i=0; i<3; i++){
// for (int j=0; j<4; j++){
// System.out.print(" " + ReportOptions[i][j]);
// }
// System.out.println();
// }
for (int trialIndex = 0; trialIndex < numTrials; trialIndex++) {
System.out.println("\n-------------------- Trial " + trialIndex + " --------------------\n");
InstanceList[] ilists;
BitSet unlabeledIndices = null;
if (!separateIlists){
ilists = ilist.split (r, new double[] {t, 1-t-v, v});
} else {
ilists = new InstanceList[3];
ilists[0] = trainingFileIlist;
ilists[1] = testFileIlist;
ilists[2] = testFileIlist;
}
if (unlabeledProportionOption.value > 0)
unlabeledIndices = new edu.umass.cs.mallet.base.util.Random(r.nextInt())
.nextBitSet(ilists[0].size(),
unlabeledProportionOption.value);
//InfoGain ig = new InfoGain (ilists[0]);
//int igl = Math.min (10, ig.numLocations());
//for (int i = 0; i < igl; i++)
//System.out.println ("InfoGain["+ig.getObjectAtRank(i)+"]="+ig.getValueAtRank(i));
//ig.print();
//FeatureSelection selectedFeatures = new FeatureSelection (ig, 8000);
//ilists[0].setFeatureSelection (selectedFeatures);
//OddsRatioFeatureInducer orfi = new OddsRatioFeatureInducer (ilists[0]);
//orfi.induceFeatures (ilists[0], false, true);
//System.out.println ("Training with "+ilists[0].size()+" instances");
long time[] = new long[trainers.length];
for (int c = 0; c < trainers.length; c++){
time[c] = System.currentTimeMillis();
System.out.println ("Trial " + trialIndex + " Training " + trainers[c].toString() + " with "+ilists[0].size()+" instances");
if (unlabeledProportionOption.value > 0)
ilists[0].hideSomeLabels(unlabeledIndices);
Classifier classifier = trainers[c].train (ilists[0]);
if (unlabeledProportionOption.value > 0)
ilists[0].unhideAllLabels();
System.out.println ("Trial " + trialIndex + " Training " + trainers[c].toString() + " finished");
time[c] = System.currentTimeMillis() - time[c];
Trial trainTrial = new Trial (classifier, ilists[0]);
//assert (ilists[1].size() > 0);
Trial testTrial = new Trial (classifier, ilists[1]);
Trial validationTrial = new Trial(classifier, ilists[2]);
if (ilists[0].size()>0) trainConfusionMatrix[c][trialIndex] = new ConfusionMatrix (trainTrial).toString();
if (ilists[1].size()>0) testConfusionMatrix[c][trialIndex] = new ConfusionMatrix (testTrial).toString();
if (ilists[2].size()>0) validationConfusionMatrix[c][trialIndex] = new ConfusionMatrix (validationTrial).toString();
trainAccuracy[c][trialIndex] = trainTrial.accuracy();
testAccuracy[c][trialIndex] = testTrial.accuracy();
validationAccuracy[c][trialIndex] = validationTrial.accuracy();
if (outputFile.wasInvoked()) {
String filename = outputFile.value;
if (trainers.length > 1) filename = filename+trainers[c].toString();
if (numTrials > 1) filename = filename+".trial"+trialIndex;
try {
ObjectOutputStream oos = new ObjectOutputStream
(new FileOutputStream (filename));
oos.writeObject (classifier);
oos.close();
} catch (Exception e) {
e.printStackTrace();
throw new IllegalArgumentException ("Couldn't write classifier to filename "+
filename);
}
}
// New Reporting
// raw output
if (ReportOptions[ReportOption.train][ReportOption.raw]){
System.out.println("Trial " + trialIndex + " Trainer " + trainers[c].toString());
System.out.println(" Raw Training Data");
printTrialClassification(trainTrial);
}
if (ReportOptions[ReportOption.test][ReportOption.raw]){
System.out.println("Trial " + trialIndex + " Trainer " + trainers[c].toString());
System.out.println(" Raw Testing Data");
printTrialClassification(testTrial);
}
if (ReportOptions[ReportOption.validation][ReportOption.raw]){
System.out.println("Trial " + trialIndex + " Trainer " + trainers[c].toString());
System.out.println(" Raw Validation Data");
printTrialClassification(validationTrial);
}
//train
if (ReportOptions[ReportOption.train][ReportOption.confusion]){
System.out.println("Trial " + trialIndex + " Trainer " + trainers[c].toString() + " Training Data Confusion Matrix");
if (ilists[0].size()>0) System.out.println (trainConfusionMatrix[c][trialIndex]);
}
if (ReportOptions[ReportOption.train][ReportOption.accuracy]){
System.out.println ("Trial " + trialIndex + " Trainer " + trainers[c].toString() + " training data accuracy= "+ trainAccuracy[c][trialIndex]);
}
if (ReportOptions[ReportOption.train][ReportOption.f1]){
String label = ReportOptionArgs[ReportOption.train][ReportOption.f1];
System.out.println ("Trial " + trialIndex + " Trainer " + trainers[c].toString() + " training data F1(" + label + ") = "+ trainTrial.labelF1(label));
}
//validation
if (ReportOptions[ReportOption.validation][ReportOption.confusion]){
System.out.println("Trial " + trialIndex + " Trainer " + trainers[c].toString() + " Validation Data Confusion Matrix");
if (ilists[2].size()>0) System.out.println (validationConfusionMatrix[c][trialIndex]);
}
if (ReportOptions[ReportOption.validation][ReportOption.accuracy]){
System.out.println ("Trial " + trialIndex + " Trainer " + trainers[c].toString() + " validation data accuracy= "+ validationAccuracy[c][trialIndex]);
}
if (ReportOptions[ReportOption.validation][ReportOption.f1]){
String label = ReportOptionArgs[ReportOption.validation][ReportOption.f1];
System.out.println ("Trial " + trialIndex + " Trainer " + trainers[c].toString() + " validation data F1(" + label + ") = "+ validationTrial.labelF1(label));
}
//test
if (ReportOptions[ReportOption.test][ReportOption.confusion]){
System.out.println("Trial " + trialIndex + " Trainer " + trainers[c].toString() + " Test Data Confusion Matrix");
if (ilists[1].size()>0) System.out.println (testConfusionMatrix[c][trialIndex]);
}
if (ReportOptions[ReportOption.test][ReportOption.accuracy]){
System.out.println ("Trial " + trialIndex + " Trainer " + trainers[c].toString() + " test data accuracy= "+ testAccuracy[c][trialIndex]);
}
if (ReportOptions[ReportOption.test][ReportOption.f1]){
String label = ReportOptionArgs[ReportOption.test][ReportOption.f1];
System.out.println ("Trial " + trialIndex + " Trainer " + trainers[c].toString() + " test data F1(" + label + ") = "+ testTrial.labelF1(label));
}
} // end for each trainer
} // end for each trial
// New reporting
//"[train|test|validation]:[accuracy|f1|confusion|raw]"
for (int c=0; c < trainers.length; c++) {
System.out.println ("\n"+trainers[c].toString());
if (ReportOptions[ReportOption.train][ReportOption.accuracy])
System.out.println ("Summary. train accuracy mean = "+ MatrixOps.mean (trainAccuracy[c])+
" stddev = "+ MatrixOps.stddev (trainAccuracy[c])+
" stderr = "+ MatrixOps.stderr (trainAccuracy[c]));
if (ReportOptions[ReportOption.validation][ReportOption.accuracy])
System.out.println ("Summary. validation accuracy mean = "+ MatrixOps.mean (validationAccuracy[c])+
" stddev = "+ MatrixOps.stddev (validationAccuracy[c])+
" stderr = "+ MatrixOps.stderr (validationAccuracy[c]));
if (ReportOptions[ReportOption.test][ReportOption.accuracy])
System.out.println ("Summary. test accuracy mean = "+ MatrixOps.mean (testAccuracy[c])+
" stddev = "+ MatrixOps.stddev (testAccuracy[c])+
" stderr = "+ MatrixOps.stderr (testAccuracy[c]));
} // end for each trainer
}
private static void printTrialClassification(Trial trial)
{
ArrayList classifications = trial.toArrayList();
for (int i = 0; i < classifications.size(); i++) {
Instance instance = trial.getClassification(i).getInstance();
System.out.print(instance.getName() + " " + instance.getTarget() + " ");
Labeling labeling = trial.getClassification(i).getLabeling();
for (int j = 0; j < labeling.numLocations(); j++){
System.out.print(labeling.getLabelAtRank(j).toString() + ":" + labeling.getValueAtRank(j) + " ");
}
System.out.println();
}
}
} |
<reponame>lukaselmer/adventofcode<filename>2017/5/run.ts
import * as fs from "fs";
export function day5() {
console.log(`Part 1: ${jumpUntilEscaped(readJumpInstructions(), () => 1)}`);
console.log(
`Part 2: ${jumpUntilEscaped(
readJumpInstructions(),
jump => (jump >= 3 ? -1 : 1)
)}`
);
}
function readJumpInstructions() {
return fs
.readFileSync("./5/input.txt")
.toString()
.trim()
.split("\n")
.map(Number);
}
function jumpUntilEscaped(
jumpInstructions: number[],
strangenessJumpChanger: (jump: number) => number
): number {
let currentInstructionIndex = 0;
let counter = 0;
while (
currentInstructionIndex < jumpInstructions.length &&
currentInstructionIndex >= 0
) {
counter++;
const jump = jumpInstructions[currentInstructionIndex];
jumpInstructions[currentInstructionIndex] += strangenessJumpChanger(jump);
currentInstructionIndex += jump;
}
return counter;
}
day5();
|
export declare const cibAlipay: any[]; |
#ifndef __RMQ_H__
#define __RMQ_H__
#ifdef __VMS
#include <inttypes.h>
#else
#include <stdint.h>
#endif
#include <stdlib.h>
#include "amqp.h"
#ifdef __VMS
#ifdef AMQP091
#include "amqp_framing_091.h"
#else
#include "amqp_framing_080.h"
#endif
#else
#include "amqp_framing.h"
#endif
#ifndef RMQ_MAX_FRAME
#if defined(__VMS) && defined(__alpha) && __CRTL_VER <= 70320000
/* Assume that for OpenVMS 7.3-2 and earlier these need to be < 64K to stop send() and recv() having a fit! */
#define RMQ_MAX_FRAME 65535
#else
#define RMQ_MAX_FRAME 131072
#endif
#endif
#ifndef RMQ_MAX_CHAN
#define RMQ_MAX_CHAN 256
#endif
#ifndef RMQ_Q_NAM_LEN
#define RMQ_Q_NAM_LEN 128
#endif
typedef struct {
amqp_connection_state_t conn;
int chan; /* Will always be 1 (for the moment) */
int fd;
char errstr[128];
struct {
char name[RMQ_Q_NAM_LEN];
amqp_bytes_t repq;
amqp_basic_properties_t *ph;
long long count;
} rpc;
} RMQ_conn_t;
typedef struct {
char *rkey;
char *repq;
uint64_t dtag;
char *cid;
amqp_bytes_t data;
} RMQ_info_t;
#ifdef __cplusplus
extern "C" {
#endif
extern char *RabbitMQ_strerror(RMQ_conn_t *);
extern RMQ_conn_t *RabbitMQ_connect(char *);
extern void RabbitMQ_disconnect(RMQ_conn_t *);
extern int RabbitMQ_publish(RMQ_conn_t *, char *, char *, int, int,
amqp_basic_properties_t *, char *, int);
extern char *RabbitMQ_declare_queue(RMQ_conn_t *, char *, int, int,
int, int, amqp_table_t *);
extern int RabbitMQ_declare_exchange(RMQ_conn_t *, char *, char *, int,
int, amqp_table_t *);
extern int RabbitMQ_bind_queue(RMQ_conn_t *, char *, char *, char *,
amqp_table_t *);
extern int RabbitMQ_bind_exchange(RMQ_conn_t *, char *, char *, char *,
amqp_table_t *);
extern char *RabbitMQ_consume(RMQ_conn_t *, char *, char *, int, int,
int, amqp_table_t *);
extern int RabbitMQ_dequeue(RMQ_conn_t *, RMQ_info_t *, uint64_t *,
int);
extern void RabbitMQ_dump(char *, int);
extern void RabbitMQ_free_info(RMQ_info_t *);
extern int RabbitMQ_serve(RMQ_conn_t *, int (*)(RMQ_info_t *, void *),
int, void *, int);
extern void RabbitMQ_info_init(RMQ_info_t *);
extern int RabbitMQ_purge_queue(RMQ_conn_t *, char *);
extern int RabbitMQ_delete_queue(RMQ_conn_t *, char *, int, int);
extern int RabbitMQ_delete_exchange(RMQ_conn_t *, char *, int);
extern int RabbitMQ_unbind_queue(RMQ_conn_t *, char *, char *, char *,
amqp_table_t *);
extern int RabbitMQ_unbind_exchange(RMQ_conn_t *, char *, char *,
char *, amqp_table_t *);
extern int RabbitMQ_rpc_call(RMQ_conn_t *, char *, char *, char *, int,
RMQ_info_t *);
extern int RabbitMQ_qos(RMQ_conn_t *, int, int, int);
extern int RabbitMQ_get(RMQ_conn_t *, const char *, RMQ_info_t *, int);
extern RMQ_info_t *RabbitMQ_alloc_info();
extern int RabbitMQ_tx_select(RMQ_conn_t *);
extern int RabbitMQ_tx_commit(RMQ_conn_t *);
extern int RabbitMQ_tx_rollback(RMQ_conn_t *);
extern int RabbitMQ_cancel(RMQ_conn_t *, char *);
extern void RabbitMQ_release(RMQ_conn_t *);
#ifndef _WIN32
#ifdef __VMS
#pragma names save
#pragma names uppercase
#endif
#include <pthread.h>
#ifdef __VMS
#pragma names restore
#endif
extern pthread_t RabbitMQ_serve_thread(RMQ_conn_t *,
int (*)(RMQ_info_t *, void *),
int, void *, int);
#endif
#ifdef __cplusplus
}
#endif
#if defined __GNUC__
#define likely(x) __builtin_expect ((x), 1)
#define unlikely(x) __builtin_expect ((x), 0)
#else
#define likely(x) (x)
#define unlikely(x) (x)
#endif
#define RMQ_Assert(x) \
do { \
if (unlikely (!x)) { \
fprintf (stderr, "Assertion failed: %s (%s: %d)\n", #x, __FILE__, __LINE__); \
abort(); \
} \
} while (0)
#define RMQ_AllocAssert(x) \
do { \
if (unlikely (!x)) { \
fprintf (stderr, "FATAL ERROR: OUT OF MEMORY (%s: %d)\n", __FILE__, __LINE__); \
abort(); \
} \
} while (0)
#define RMQ_Free(x) \
do { \
if (x != NULL) { \
free(x); x = NULL; \
} \
} while (0)
#endif
|
def resample_uv_to_bbox(
predictor_output: DensePoseChartPredictorOutput,
labels: torch.Tensor,
box_xywh_abs: Tuple[int, int, int, int],
) -> torch.Tensor:
x, y, w, h = box_xywh_abs
w = max(int(w), 1)
h = max(int(h), 1)
u_bbox = F.interpolate(predictor_output.u, (h, w), mode="bilinear", align_corners=False)
v_bbox = F.interpolate(predictor_output.v, (h, w), mode="bilinear", align_corners=False)
uv = torch.zeros([2, h, w], dtype=torch.float32, device=predictor_output.u.device)
for part_id in range(1, u_bbox.size(1)):
uv[0][labels == part_id] = u_bbox[0, part_id][labels == part_id]
uv[1][labels == part_id] = v_bbox[0, part_id][labels == part_id]
return uv |
import numpy as np
def _connectivity(V, idx):
idx = np.asmatrix(idx)
mat1 = np.tile(idx, (V.shape[1], 1))
mat2 = np.tile(idx.T, (1, V.shape[1]))
conn = np.equal(np.mat(mat1), np.mat(mat2))
return np.mat(conn, dtype='d')
def connectivity(V, H): # derived from nimfa
idx = np.asmatrix(np.argmax(H, axis=0))
mat1 = np.tile(idx, (V.shape[1], 1))
mat2 = np.tile(idx.T, (1, V.shape[1]))
conn = np.equal(np.mat(mat1), np.mat(mat2))
return np.mat(conn, dtype='d')
|
<gh_stars>0
import { Controller, Get, Post, Body, Param, Put, Delete } from '@nestjs/common';
import { FilmsService } from './films.service';
import { Film } from '../entities/film.entity';
@Controller('films')
export class FilmsController {
constructor(private filmService: FilmsService) {}
@Get()
findAll() {
return this.filmService.findAll();
}
@Post()
create(@Body() body) {
return this.filmService.create(body)
}
@Put(':id')
update(@Param('id') id, @Body('Name') Name,@Body('Description') Description) {
return this.filmService.update(id,Name,Description)
}
@Delete(':id')
remove(@Param('id') id) {
return this.filmService.remove(id)
}
}
|
def resolve():
N = int(input())
for _ in range(N):
length_of_array = int(input())
A = [int(x) for x in input().split(" ")]
isFound = False
for i in range(length_of_array - 2):
if A[i] + A[i+1] <= A[length_of_array-1]:
isFound = True
print(i+1, i+2, length_of_array)
break
if not isFound:
print(-1)
if __name__ == "__main__":
resolve() |
Experimental analysis of damage mechanism and shrinkage performance of recycled concrete
This article analyzes the interface structural features and damage mechanism of recycled aggregate concrete. Recycled aggregates are divided into three types by particle shaping. Through the experiments, the shrinkage performance of recycled concrete is analyzed under the conditions of different cement content and the replacement rate of recycled coarse aggregate. The experimental results show that A is the worst, inferior to NA ordinary concrete; C is the best, superior to NA ordinary concrete.
Introduction
On a conservative estimate, the annual production of waste concrete in China is about 100 million tons. Only a tiny amount of low-level waste concrete is recycled in the non bearing structures, such as base courses of roadbeds, and most of it is landfilled or piled up, which pollutes the environment and wastes resources. Meanwhile, the annual concrete production in China is up to 1.5to2 billion cubes,and the sand-gravel aggregates required exceeds 10 billion tons. Over-exploitation of sand and gravel has caused landslides and riverbed realignments, destroying the sustainable development of the ecological environment of aggregates' native place. In some areas of China, the high-quality natural aggregates (river sand, pebbles) have been exhausted. Hunting for alternative aggregate sources, treatment and disposal of waste concretes, and the concern for the sustainable development of the environment have set off a recycling research upsurge of waste concrete .
With the rapid development of the society and the reconstruction of the infrastructure, the demand for concrete is increasing, and the treatment of a large number of demolished building wastes will bring some harm to the environment. Therefore, this paper put forward the basic idea of recycled aggregate concrete, and hopes to replace partially ordinary concrete with recycled aggregate concrete. It can not only meet the social needs, but also reduce the harm to the environment and realize the recycling of resources, and recycled aggregate concrete research also become the focus of social research nowadays. However, due to the large amount of old adhered mortar in recycled aggregate, it has certain defects in mechanical properties compared with ordinary concrete .At present, the processing mechanism of recycled aggregate concrete technology is not perfect, and there are many bottlenecks that need to be broken in order to improve various mechanical performance indexes. All of these limit the development of recycled aggregate concrete to a large extent. So far, recycled aggregate concrete has been mostly used in some non-load-bearing components or road surfaces. The study on the damage mechanism and modification method of recycled aggregate concrete are of great significance for improving the destructive of recycled aggregate concrete and strengthen its application in engineering . According to the related research, it is found that either the tensile failure or the compression failure of the recycled aggregate concrete is manifested by the fracture of the old adhered mortar and the interface transition zone. The initial micro cracks in the process are usually first appeared in the interface transition zone, and then extend to the mortar area. Therefore, the interface transition zone is the weakest link of recycled aggregate concrete, and the main factor causing this phenomenon is the existence of old mortar. To a large extent, the old mortar influences the difference in quantity, distribution and performance of interface transition zone. In recent years, many scholars have done research on recycled aggregate concrete, and have made a series of research achievements in improving the performance of recycled aggregate and the strength of recycled aggregate concrete. This paper will sort out and summarize these achievements,Through the experiments, the shrinkage performance of recycled concrete is analyzed under the conditions of different cement content and the replacement rate of recycled coarse aggregate .
Interface characteristics and damage mechanism of recycled concrete
The interface in concrete is a loose porous structure, where the concentration and orientation of much Ca(OH) 2 exist, which is the weakest link in concrete, affecting significantly the mechanical property and durability of concrete. Recycled aggregate is made of processed waste concrete, often some hardened set cement or mortar appear on the surface of the aggregate. Thus, the recycled aggregate itself is a composite, and there are more interfaces in the concrete prepared by the recycled aggregate than ordinary concrete : Aggregate/old mortar interface, aggregate/new mortar interface, old mortar/new mortar interface, and interface intersection. The existence of multiple interface structures causes recycled concrete to use a large amount of water, with a low strength after hardening and low elasticity modulus. At the same time, the anti-permeability, freezing resistance, carbonization resistance, shrinkage, creeping, and chloride ion penetration resistance and other endurance qualities of the recycled concrete are also lower than those of ordinary concrete. The concrete itself is made of cement mortar, interfacial transition zone and aggregate. The strengthening phase is aggregate, and the matrix phase is cement concretion -cement mortar. Therefore, with respect to recycled concrete, its strengthening phase is the recycled aggregate and its matrix phase is the cement mortar. In accordance with the structural characteristics of recycled concrete, it is possible to enhance the combination of various interfaces, the combination of interface intersections, and the cohesion of recycled aggregates in two strengthening ways, that is, improving the properties of recycled aggregate and the performance of cement mortar, to increase the performance of recycled concrete.
The application of recycled concrete can meet the strength requirements, as well as the durability requirements. Only in this way, can the structural safety of recycled concrete be guaranteed. The performance deterioration degree of recycled concrete is related to the quality and replacement rate of the used recycled aggregate. The performance of Type 1 recycled aggregate concrete is superior to that of Type II recycled aggregate concrete, and better than that of Type III recycled aggregate concrete: the higher the replacement rate of recycled aggregate, the greater the performance deterioration degree of recycled concrete. Generally speaking, provided that the content of cementing material is determined, the lower the strength of recycled concrete, the worse the durability will be. In real life, it often happens that mixture of recycled concrete made from recycled aggregates has a large water consumption, a low strength and low elastic modulus, and its anti-permeability, freeing resistance, carbonization resistance, shrinkage, creeping, and chloride ion penetration resistance and other endurance qualities are also lower than those of ordinary concrete. The main reasons why the performance of recycled concrete is low is that there are structures with multiple weak interfaces in recycled concrete . See Figure 1 for typical photos of natural aggregate and recycled concrete aggregate. Recycled aggregate is made of processed waste concrete (also known as the original concrete), often some hardened set cement or mortar exists on the surface of the aggregate. Thus, the recycled aggregate itself is a composite, and inside there are some interfaces which can be called old interface. The hardened cement paste or mortar in the concrete (hereinafter collectively known as slurry) is continuous, while the coarse aggregate is discontinuous. However, the natural coarse aggregate in the recycled aggregate is generally continuous, while the slurry is discontinuous, the old interface only exists locally. Previous researches generally thought of it as a closed interface along the coarse aggregate surface, so many scholars think that the interface in recycled concrete includes the old interface and the old slurry-new slurry interface. In fact, the aggregate is made of a local material, and the aggregate in an area changes little. When the natural aggregate is replaced by some recycled aggregate, the new surface (excluding the slurry) in the recycled aggregate is similar to the surface of the natural aggregate. At this time, there are three interface structures in the recycled concrete: the old interface, the old slurry-new slurry interface, and the natural aggregate-new slurry interface. Secondly, recycled concrete is mostly concrete where natural aggregate is replaced by recycled aggregate partially. When many researchers carry out research on the interface structure of recycled concrete, they normally select the weakest interface randomly between aggregate and slurry from recycled concrete. They think that this is the old interface in the recycled aggregate; to some extent, this research method is uncertain and blind. Because even ordinary concrete, due to the internal stratification, the structure of the coarse aggregate differs from those of the upper and lower interfaces perpendicular to the pouring direction. Generally, the upper interface is dense, while the lower interface structure is relatively loose. The interface selected subjectively and randomly is not necessarily an old interface, so a more scientific interface research method needs establishing .
Principle and process of tests
This contact shrinkage test is adopted in the test to measure the shrinkage ratio of unconstrained concrete under the conditions of specified temperature and humidity. Experimental process: (1) Test-piece specification: 100mmx100mmx515mm, with an embedded probe, cured to the valid period of standard. (2) Placed in a shrinkage lab with a room temperature of 20±2°C and the relative humidity of 60±5%.
(3) The zero point of the expansion and shrinkage gauge is corrected by a standard bar and readings are taken 3 times at every turn.
Classification of recycled aggregate
The waste concrete is simply crushed into Type II recycled coarse aggregate by a jaw crusher; Type II recycled coarse aggregate is subjected to the primary shaping and secondary shaping through a particle shaper to obtain the standard Type I recycled coarse aggregate and Type I recycled coarse aggregate. In this article, Type II recycled coarse aggregate is represented by A; standard Type I recycled coarse aggregate is represented by B; Type I recycled coarse aggregate is represented by C.
Test results
According to Standard for Test Methods of Long-term Performance and Durability of Ordinary Concrete, the shrinkage ratio and test results of test piece are shown in Figures 2 and 3. Among them, the cement contents are taken to be 350kg/m 3 and 500kg/m 3 respectively. a. The replacement rate 50% b. The replacement rate 100% Figure 2. Test data map of shrinkage rate of recycled concrete with a cement content of 350kg/m 3 .
Analysis of test results
The test conclusions gained through the analysis of effects of particle shaping and recycled coarse aggregate's replacement ratio on the shrinkage properties of recycled concrete are as follows.
(1) The shrinkage performance of A recycled concrete is the worst, inferior to that of NA ordinary concrete; the shrinkage performance of B recycled concrete is close to that of NA ordinary concrete; C recycled concrete has the best shrinkage performance, superior to that of NA ordinary concrete. Compared with A recycled concrete, the 60d shrinkage ratio of B and C recycled concrete is reduced by 15% -40% and 20% -40%. (2) The shrinkage ratio of A recycled concrete is increased as the replacement rate increases; the shrinkage rate of B recycled concrete changes little as the replacement rate increases; the shrinkage rate of C recycled concrete is decreased as the replacement rate increases.
Conclusions
(1) There are more interfaces inside the recycled concrete: aggregate/old mortar interface, aggregate/new mortar interface, old mortar/new mortar interfaces, and interface intersections. The residual adhered mortar on the surface of recycled coarse aggregate will affect the mechanical properties and durability of recycled concrete. Recycled concrete is a multi-phase composite, and the random distribution of multiple interfaces inside makes the research of the interface structure more difficult, thus preventing the study of the property damage mechanism of recycled concrete.
(2) Particle shaping can significantly improve the quality of recycled coarse aggregate and increase the shrinkage performance of recycled concrete. Shrinkage performance of recycled coarse aggregate concrete: A is the worst, inferior to NA ordinary concrete; C is the best, superior to NA ordinary concrete. |
def is_error(self, word: str) -> bool:
result = False
if self.isInput(word):
word2 = self.remove_char(word)
if not (word2.lower() in self.words):
if word2.lower() in self.names:
if not (self.names[word2.lower()] == word2):
result = True
else:
result = True
else:
if word2.lower() in self.names:
if not (self.names[word2.lower()] == word2):
result = True
return result |
// --- Address Change Listener
static class AddressChangeListener extends Thread {
public void run() {
for (;;) {
// wait for configuration to change
if (notifyAddrChange0() != 0)
return;
synchronized (lock) {
changed = true;
}
}
}
} |
// Test that the Settings App installs correctly when it's set to be disabled
// via SystemFeaturesDisableList policy, but doesn't launch.
IN_PROC_BROWSER_TEST_P(SettingsAppIntegrationTest, SettingsAppDisabled) {
{
ListPrefUpdate update(TestingBrowserProcess::GetGlobal()->local_state(),
policy::policy_prefs::kSystemFeaturesDisableList);
base::ListValue* list = update.Get();
list->Append(policy::SystemFeature::kOsSettings);
}
ASSERT_FALSE(GetManager()
.GetAppIdForSystemApp(web_app::SystemAppType::SETTINGS)
.has_value());
WaitForTestSystemAppInstall();
Browser* app_browser;
LaunchAppWithoutWaiting(web_app::SystemAppType::SETTINGS, &app_browser);
ASSERT_TRUE(GetManager()
.GetAppIdForSystemApp(web_app::SystemAppType::SETTINGS)
.has_value());
content::WebContents* web_contents =
app_browser->tab_strip_model()->GetActiveWebContents();
EXPECT_TRUE(content::WaitForLoadStop(web_contents));
content::WebUI* web_ui = web_contents->GetWebUI();
ASSERT_TRUE(web_ui);
EXPECT_EQ(l10n_util::GetStringUTF16(IDS_CHROME_URLS_DISABLED_PAGE_HEADER),
web_contents->GetTitle());
} |
import * as core from "@actions/core";
import * as crypto from "crypto";
import { V1JobStatus } from "@kubernetes/client-node";
import k8s = require("@kubernetes/client-node");
const generateJobName = (action: string, database_name: string) => {
const fullJobName = `gha-review-service-${action}-${database_name}`;
// Job name can be only 52, as there needs to be space for appended hashes when pods created.
if (fullJobName.length < 53) {
return fullJobName;
} else {
const jobHash = crypto.createHash('sha256').update(fullJobName).digest('hex').substring(0,8);
return `${fullJobName.substring(0, 40)}-${jobHash}`
}
}
const kc = new k8s.KubeConfig();
kc.loadFromString(core.getInput("kubeconfig"));
const CREATE = "create";
const REMOVE = "remove";
const action_config = {
namespace: core.getInput("namespace"),
action: core.getInput("action"),
};
const database_config = {
user: core.getInput("db_user"),
password: core.getInput("<PASSWORD>"),
port: core.getInput("db_port"),
host: core.getInput("db_host"),
name: core.getInput("database"),
defaultdb: core.getInput("default_database_name"),
};
const jobName = generateJobName(action_config.action, database_config.name);
const k8sBatchV1Api = kc.makeApiClient(k8s.BatchV1Api);
const k8sCoreApi = kc.makeApiClient(k8s.CoreV1Api);
const createJob = async () => {
try {
await k8sCoreApi.readNamespace(action_config.namespace);
core.info(`Namespace ${action_config.namespace} exists`);
} catch (e) {
const error = e as Error;
core.debug(error.message);
await k8sCoreApi.createNamespace({
apiVersion: "v1",
kind: "Namespace",
metadata: {
name: action_config.namespace,
labels: {
environment: "review",
// Needed for synchronizing the secret to download images
app: "kubed",
},
},
});
core.info(`Namespace ${action_config.namespace} created`);
}
const secrets = await k8sCoreApi.listNamespacedSecret(
action_config.namespace,
undefined,
undefined,
undefined,
undefined,
`job-name=${jobName}`
);
if (secrets.body.items.length == 0) {
await k8sCoreApi.createNamespacedSecret(action_config.namespace, {
apiVersion: "v1",
kind: "Secret",
metadata: {
name: jobName,
labels: {
"job-name": jobName,
},
},
stringData: {
PGHOST: database_config.host,
PGPORT: String(database_config.port),
PGUSER: database_config.user,
PGPASSWORD: <PASSWORD>,
PGDATABASE: database_config.defaultdb,
},
});
core.info(`Secret ${jobName} created`);
}
let command = "";
switch (action_config.action.toLowerCase()) {
case CREATE:
command = `echo "SELECT 'CREATE DATABASE \\"${database_config.name}\\"' WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = '${database_config.name}')\\gexec" > command.sql && psql -v ON_ERROR_STOP=1 -f command.sql`;
break;
case REMOVE:
command = `echo "DROP DATABASE \\"${database_config.name}\\"" > command.sql && psql -v ON_ERROR_STOP=1 -f command.sql`;
}
await k8sBatchV1Api.createNamespacedJob(action_config.namespace, {
apiVersion: "batch/v1",
kind: "Job",
metadata: {
name: jobName,
},
spec: {
backoffLimit: 0,
completions: 1,
parallelism: 1,
template: {
spec: {
containers: [
{
name: "psql",
image: "postgres:11-alpine",
command: ["/bin/sh", "-c"],
args: [command],
envFrom: [
{
secretRef: {
name: jobName,
},
},
],
},
],
restartPolicy: "Never",
},
},
},
});
core.info(`Job ${jobName} created`);
};
const loop = async () => {
let status = new V1JobStatus();
while ((status.failed || 0) + (status.succeeded || 0) == 0) {
await new Promise((r) => setTimeout(r, 1000));
const result = await k8sBatchV1Api.readNamespacedJobStatus(
jobName,
action_config.namespace
);
status = result.body.status || new V1JobStatus();
}
if (!status || (status && status.failed && status.failed > 0)) {
const response = await k8sCoreApi.listNamespacedPod(
action_config.namespace,
undefined,
undefined,
undefined,
undefined,
`job-name=${jobName}`
);
if (
response.body.items.length > 0 &&
response.body.items[0].metadata !== undefined &&
response.body.items[0].metadata.name !== undefined
) {
const logResponse = await k8sCoreApi.readNamespacedPodLog(
response.body.items[0].metadata.name,
action_config.namespace,
"psql",
false
);
core.info(logResponse.body);
}
core.setFailed(`Job ${jobName} failed`);
} else {
core.info(`Job ${jobName} succeeded`);
}
};
const getJobPod = async () => {
const response = await k8sCoreApi.listNamespacedPod(
action_config.namespace,
undefined,
undefined,
undefined,
undefined,
`job-name=${jobName}`
);
return response.body.items;
};
const execute = async () => {
try {
if (![CREATE, REMOVE].includes(action_config.action.toLowerCase())) {
core.setFailed(
`Given action ${action_config.action} not supported. Supported ones are ${CREATE} and ${REMOVE}.`
);
} else {
const pods = await getJobPod();
if (pods.length == 0) {
await createJob();
}
loop();
}
} catch (e) {
const error = e as Error;
core.setFailed(error);
}
};
execute();
|
<filename>src/visualization.opengl.renderer.geometry/BoundingOctreeRenderer.cpp
#include <visualization.opengl/MiniGL.h>
#include <simulation.geometry/boundingVolumes/BoundingSphere.h>
#include "BoundingOctreeRenderer.h"
using namespace nspace;
using namespace std;
int OctreeRenderer::level =0;
bool OctreeRenderer::doRender =true;
OctreeRenderer::OctreeRenderer(Octree & octree):_octree(octree){
}
void OctreeRenderer::render(){
if(!doRender)return;
MiniGL::pushMatrix();
const Matrix3x3 & R =_octree.getGeometry().coordinates().getTransposedRotationMatrix();
MiniGL::translate(_octree.getGeometry().coordinates().position());
MiniGL::multMatrix(R);
_octree.foreachChildOfLevel(level,[](Octree * octree){
Vector3D center;
octree->getCenter(center);
//const BoundingBox & aabb =octree->getBoundingBox();
float color [4];
const float * baseColor = MiniGL::cyan;
BoundingVolume * volume = &(octree->getBoundingVolume());
if(volume->isUpToDate()){
baseColor = MiniGL::darkCyan;
}
if(volume->isColliding()){
baseColor = MiniGL::red;
}
for(int i=0; i < 4; i++){ color[i]= baseColor[i];}
color[3]=0.4;
//MiniGL::drawPoint(aabb.min,5,MiniGL::darkblue);
//MiniGL::drawPoint(aabb.max,5,MiniGL::darkblue);
MiniGL::drawPoint(center,5,MiniGL::darkblue);
BoundingSphere * bs = dynamic_cast<BoundingSphere*>(volume);
if(bs){
MiniGL::drawSphere(bs->getPositionPCS(),bs->radius(),color,14U);
}
//MiniGL::drawCube(¢er,&((Matrix3x3::Identity())),aabb.getWidth(), aabb.getHeight(), aabb.getDepth(),color);
//MiniGL::drawSphere();
});
MiniGL::popMatrix();
}
|
def array_from_compressed_json(value, widget):
comp = value.pop('compressed_buffer', None) if value is not None else None
if comp is not None:
if six.PY2:
comp = comp.tobytes()
value['buffer'] = zlib.decompress(comp)
if six.PY2:
value['buffer'] = memoryview(value['buffer'])
return array_from_json(value, widget) |
/**
* This activity displays the history of past translations.
*
* @author Cedric Beust
* @author Daniel Rall
*/
public class HistoryActivity extends ListActivity implements OnItemClickListener {
private SimpleAdapter mAdapter;
private List<Map<String, String>> mListData;
private History mHistory;
private static final String INPUT = "input";
private static final String OUTPUT = "output";
private static final String FROM = "from";
private static final String TO = "to";
private static final String FROM_SHORT_NAME = "from-short-name";
private static final String TO_SHORT_NAME = "to-short-name";
// These constants are used to bind the adapter to the list view
private static final String[] COLUMN_NAMES = { INPUT, OUTPUT, FROM, TO };
private static final int[] VIEW_IDS = { R.id.input, R.id.output, R.id.from, R.id.to };
@Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
setContentView(R.layout.history_activity);
mHistory = new History(TranslateActivity.getPrefs(this));
initializeAdapter(mHistory.getHistoryRecordsMostRecentFirst());
}
private void initializeAdapter(List<HistoryRecord> historyRecords) {
mListData = Lists.newArrayList();
for (HistoryRecord hr : historyRecords) {
Map<String, String> data = Maps.newHashMap();
// Values that are bound to views
data.put(INPUT, hr.input);
data.put(OUTPUT, hr.output);
data.put(FROM, hr.from.name().toLowerCase());
data.put(TO, hr.to.name().toLowerCase());
// Extra values we keep around for convenience
data.put(FROM_SHORT_NAME, hr.from.getShortName());
data.put(TO_SHORT_NAME, hr.to.getShortName());
mListData.add(data);
}
mAdapter = new SimpleAdapter(this, mListData, R.layout.history_record,
COLUMN_NAMES, VIEW_IDS);
getListView().setAdapter(mAdapter);
getListView().setOnItemClickListener(this);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.history_activity_menu, menu);
return super.onCreateOptionsMenu(menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch(item.getItemId()) {
case R.id.most_recent:
initializeAdapter(mHistory.getHistoryRecordsMostRecentFirst());
break;
case R.id.languages:
initializeAdapter(mHistory.getHistoryRecordsByLanguages());
break;
}
return true;
}
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
Map<String, String> data = (Map<String, String>) parent.getItemAtPosition(position);
Editor edit = TranslateActivity.getPrefs(this).edit();
TranslateActivity.savePreferences(edit,
data.get(FROM_SHORT_NAME), data.get(TO_SHORT_NAME),
data.get(INPUT), data.get(OUTPUT));
finish();
}
} |
def check_ready(self):
was_ready = is_flag_set(self.expand_name('ready'))
toggle_flag(self.expand_name('ready'), self.is_ready)
if self.is_ready and was_ready and self.is_changed:
set_flag(self.expand_name('ready.changed'))
clear_flag(self.expand_name('changed')) |
/**
* Test to ensure labeled anomalies are filtered out and not notified
*/
@Test
public void testAlertFilterFeedback() throws Exception {
this.alertConfig.getProperties().put(PROP_DETECTION_CONFIG_IDS, Collections.singletonList(detectionConfigId3));
this.alertConfig.setVectorClocks(Collections.singletonMap(detectionConfigId3, this.baseTime));
AnomalyFeedbackDTO feedbackAnomaly = new AnomalyFeedbackDTO();
feedbackAnomaly.setFeedbackType(AnomalyFeedbackType.ANOMALY);
AnomalyFeedbackDTO feedbackNoFeedback = new AnomalyFeedbackDTO();
feedbackNoFeedback.setFeedbackType(AnomalyFeedbackType.NO_FEEDBACK);
MergedAnomalyResultDTO anomalyWithFeedback = makeAnomaly(detectionConfigId3, this.baseTime, 5, 10, Collections.emptyMap(), feedbackAnomaly);
MergedAnomalyResultDTO anomalyWithNoFeedback = makeAnomaly(detectionConfigId3, this.baseTime, 5, 10, Collections.emptyMap(), feedbackNoFeedback);
MergedAnomalyResultDTO anomalyWithNullFeedback = makeAnomaly(detectionConfigId3, this.baseTime, 5, 10, Collections.emptyMap(), null);
Thread.sleep(1);
this.alertFilter = new DimensionsRecipientAlertFilter(provider, alertConfig, System.currentTimeMillis());
DetectionAlertFilterResult result = this.alertFilter.run();
Assert.assertEquals(result.getResult().size(), 2);
DetectionAlertFilterNotification recDefault = AlertFilterUtils.makeEmailNotifications(
this.alertConfig, PROP_TO_VALUE, PROP_CC_VALUE, PROP_BCC_VALUE);
Assert.assertTrue(result.getResult().containsKey(recDefault));
Assert.assertEquals(result.getResult().get(recDefault).size(), 2);
Assert.assertTrue(result.getResult().get(recDefault).contains(anomalyWithNoFeedback));
Assert.assertTrue(result.getResult().get(recDefault).contains(anomalyWithNullFeedback));
Assert.assertFalse(result.getResult().get(recDefault).contains(anomalyWithFeedback));
DetectionAlertFilterNotification recValue = AlertFilterUtils.makeEmailNotifications(
this.alertConfig, PROP_TO_FOR_VALUE, PROP_CC_VALUE, PROP_BCC_VALUE);
Multimap<String, String> dimFilters = ArrayListMultimap.create();
dimFilters.put("key", "value");
recValue.setDimensionFilters(dimFilters);
Map<String, String> refLinks = new HashMap<>();
refLinks.put("link1", "value1");
recValue.getSubscriptionConfig().setReferenceLinks(refLinks);
Assert.assertTrue(result.getResult().containsKey(recValue));
Assert.assertEquals(result.getResult().get(recValue).size(), 1);
Assert.assertTrue(result.getResult().get(recValue).contains(this.detectedAnomalies.get(7)));
} |
Action attribution in schizophrenia: evidencing neural correlates of reasoning?
The association between the bodily representation of the individual in neural activity of the brain and the construct of the self appears as a consistent and key relationship in varied self-perceptions. Nonetheless, while its perceptual realization implicates a need for the physical instantiation of the construct, variation in its representational content suggest that there exists a second determinant shaping self-representation. Impairments in action attribution but not in automatic, predictive processing that are observed in schizophrenia suggest that this latter determinant fails to link drive to goal decisional mechanisms with bodily representation. Complementary saliency incentivization mechanisms that enhance goal directed, predictive precision, on the other hand, may function to elicit independence from probabilistic predictions by facilitating rational goal inferencing. Action attribution in schizophrenia: evidencing neural correlates of reasoning?
Introduction
Disturbances of self traditionally mark diagnostic evaluation of the schizophrenia patient . An abnormal sense of ownership of the body, loss of ego boundary, and confused sense of self agency comprise several of a variety of phenomenological descriptions that have been used to subjectively characterize the disease . The sense of self expressed through body ownership entails implicit and tacit feelings, both that one's body is the physical reality of one's self and that it is uniquely the personal possession of the self. This sense of identification with and possession of the body is experientially ongoing, as John Locke noted "a thinking intelligent being that has reason and reflection and can consider itself as itself the same thinking thing in different times and places ." It can occur, therefore, whether bodily actions are initiated by the self, or whether they are actions that are performed on the body.
It is the capacity to perceive the self in such dual and differing states that distinguishes the notion of the self from the sense of self elicited for action attribution . In this latter sense the self is the source of action initiation, which thus distinguishes the self as a source from which bodily actions emerge. Nonetheless the perceptual realization that the body remains the source of its actions links the perception of self to the body in attributed actions.
The association between a representation of the whole individual by his body in the neural activity of the brain, that is, as a global brain state, appears as a consistent and key relationship in varied perceptual realizations of the self. Infants perseverating toward a hidden object where they have previously reached for the object rather than where they have seen it last hidden, for example, the A not B task, is regarded as a failure of one of a class of executive control tasks . This failure in the inability to move toward a goal where last seen, has been interpreted as a failure in motor planning due to maturational insufficiency in mechanisms needed to situate the motor plan, that are associated with representing the self as the whole body . In like manner, the representation of the body is used to configure the spatial domain that distinguishes self from other in social settings . Finally, the minimal self has been indexed to the heart rate as a crucial index of bodily viability .
Conversely, while the varied perceptual realizations of the construct implicate a need to instantiate a bodily representation of the self, the representational variation that characterizes the perceptual constructs suggest that there also exists a second determinant that acts to variably shape the neural representation of the body according to experiential exigencies. As a corollary, psychiatric diseases impacting the self, like schizophrenia, can be expected to have a variable impact on the capacity to link the role of the bodily dynamic to the structuring of the self's representation . The point of a second determinant needed to shape bodily representation and the impact of schizophrenia on this process will be taken up here in the specific case of action attribution. Specifically, it will be argued that this second determinant may relate primitives of rational inferencing to bodily representation, which are revealed in schizophrenia lesioning.
Simulation in motor imagery: eliciting the self in dynamic action
The modern concept of the neural representation of the self, evoked in circumstances where the body is dynamically engaged in intentional actions, has evolved from several experimental legacies traced to the notion of the motor image . This image is so designated to indicate a covert action undertaken only mentally and as a simulation of a non-executed action. In its current understanding the motor image represents the feature elements of an intended motor trajectory, that is, the projected series of motions that will be executed in a motor plan. Hence, it contains the signal features needed for plan execution .
These features entail distinct and reciprocal contributions from central and peripheral origins; accordingly, they underscore the essential unity of dynamic performance even in its covert formulation . Central influences have been classically demonstrated by the presence of movements undertaken in the absence of sensorial input. Lashley, notably, observed that humans, and animals, were capable of motor actions despite the loss of afferent input.Later experiments in monkeys also confirmed a central origin by showing that those with deafferentation of spinal dorsal motor roots , that is, a lesion in the immediate locus of afferent input to the motor neuron junction, could still exhibit pointing movements. How central influences were executed became apparent in studies of consecutive motions, where the original relationship between a movement and its spatiotemporal origin had been altered. In such circumstances limb movements failed to correspond to their expected trajectories, involving instead a mis reaching that was then followed by progressive compensatory movements . This was interpreted to indicate that subsequent movement, undertaken when the respective relative coordinates were altered, responded to a memorized centrally evoked motor command that was sent to the sensory cortex, termed an efference copy or corollary discharge.
In continuous motions, however, sensory cues are essential to motor execution, where both are coupled in a mutually reciprocal and sustained process. This is necessary since as the body undergoes motion, its spatiotemporal position is continually changing; so also, the sensory cues that reference it . This peripheral contribution has been shown to provide an ongoing stream of sensory updating that serves to continually adjust motor execution and that has been likened to a perception action loop. Here resonating, sensorial input streams inwardly, continually modifying executable actions. In effect, this functional synergy constrains both the patterns of information inflow and outflow and so regulates the organism's perception of the external world as well as its interaction with it . The significance of this peripheral and perceptual influence is demonstrably evident in studies investigating the body's perceptual and dynamical interaction with the environment in developing infants, acquired habitual motor abilities, biodynamic studies showing that movement and proprioception are intrinsically related to perception, and in the motoric encoding of actions such as reaching .
Significantly, the body's topological, three-dimensional configuration is critical for coordinated and unified performance. Intuitively this is evident since the body's perimeter and not the brain's neural architecture is the junction where the individual encounters the world. By situating the point of incidence of sensorial reception to its relative bodily mooring, sensorial input modulates information flow as a function of the body's configuration.
Among the senses somatotopic input is unique for structuring the representation of the body, although other senses also have limited contributions. Existing studies show, for example, that somatotopic afferents are essential to the elicitation of self-identity, a staged process emerging throughout the whole body. Indeed, in the absence of bodily input there would be no percept. Beginning with these afferents a threedimensional postural image is successively built up from segmented zones that are progressively and hierarchically assembled en route to the brain. Much of this assembly appears to occur en route at spinal and brain stem levels. In effect the experiential and dynamical operation of this recurrent framework structures a prototypical platform where peripherally enacted and complex integration increasingly occurs. Interactive events with the body are thereby mapped to their respective bodily locus yielding a temporal evolution of bodily contextualized events. Building on this platform, neural activity can be assimilated from their various sittings to generate a common dynamical representation, that is, a bodily entity of which the representation is indicative .
Linking the whole to intentional action
Insights drawn from the motor image, understood in its compositional formulation, posit that bodily representation is a key feature stabilizing individual motions of the plan as a teleologically situated undertaking; that is, one inscribing actions that link an agent with an objective terminus. The percept of the self is thus not an abstraction but a real image emerging from throughout the body and identifying it with a spatiotemporal reality. Accordingly, it appears to constitute an ontological feature needed to confer individual unity for performative and other ends . Of significance, core notions of the self are also situated in emotive dimensions associated with interoceptive feelings localized to peripheral, somatic loci . Hence, the bodily representation is also invested with emotional salience that imbues the self with normative significance.
By building on the unitary dynamism established in reciprocal, body brain exchanges the percept can be evoked to guide unified action when called upon. In developing toddlers, for example, discrepancies between explicit and implicit formats in false-belief tests for navigational affordances reveal an inability to mentally construct a navigational trajectory by 'situating oneself and making moves' of the task at hand, consistent with observations made in the A not B task . In other words, it is necessary to conceive of the self as performing bodily action in order to construct its motor trajectory, an ability absent in very young children.
Here, actions must first be distinguished as one's own before they can be bound to the self representation. In Held's proposal bodily actions are distinguished from a surrounding and mobile terrain by comparing them with their efference copy stored in the sensory cortex. Actions consistent with the planned trajectory can be identified thereby as dynamic events belonging to the body. By then linking motions that have been so identified to a representation of the self it is possible also to ascribe a causal origin to the bodily representation; in other words to implement a mechanistic proposal for self agency . Sensorially, this link appears to be structured primarily by visual input, although, as in the case of bodily representation, other senses also provide limited afferent contribution. The use of different sensory modalities means, by extension, that the processes associating executed actions with the body are sensorially distinguished from those structuring the representational content of the body. Intuitively, this can be understood as appropriating adequate sensory performance for mobile versus stable objects that are ontologically distinguished as part to whole in their dynamic realization. This link between the actions and their bodily association, dubbed the Comparator Model by Frith , proposes that inscribed in the representational content of the motor image is a sense of agency that is linked with the representation of the body. This is also to say that a critical feature of self-agency is its holistic character that, accordingly, needs to be systemically distinguished.
Consistent with this interpretation, recent studies reveal that physical constraints limiting the range of activities that may be undertaken by the self are systemically imposed , that is, they are defined in terms of their systemic realization. Accordingly, systemic activities associated with self-agency evoke not just executive and motor neural circuits but entail body wide internal constraints that inform and integrate goal oriented performance. Purposeful behaviors thus link the representational content of the self to the pursuit of organismal and environmental interactions. In these studies constraints placed on these behaviors are due to a need to use free energy efficiently, which necessarily constrains the potential range of actions that the whole body engages in. Accordingly, strategical motor planning necessitates inferences about actions that integrate performance and maximize energy efficiency, which means that such inferences must be made in the context of the self as represented by the body. From these, and other experimental findings it is intuitive to see why the observed events and processes hypothesized by Von Holst et al. and by Frith et al. require a 'predictive processing' to engage motion. Predictions are needed if one is to engage in actions, that is, actions that are intended to be carried out by the self and are not merely passive responses to external events. Since all external contingencies cannot be known beforehand, neither can all energetic consequences of the intended actions. The expectation of the action, its prediction, affords a first approximation open to correction that can structure the sequence that follows, as goal directed and energetically consilient.
Schizophrenia: pathological influences on the bodily representation of self
Disturbances in the sense of self that mark schizophrenia in prodromal and acute stages have led to the recognition of the loss of self as a core symptom . The consensus on its loss is based on neurocognitive and psychopathological evidence, where both body ownership and sense of agency are impacted, with affected individuals exhibiting distinct and substantive physical and neural signatures . These physical signatures have been shown to extend to effective connectivities based on methodological approaches using imaging modalities . In normal patients, notably, there is a consistently high correspondence between fMRI modules and the modules of structural networks from diffusion imaging, across a wide range of populations. In patients with child onset schizophrenia, by contrast, there is a decrease in modularity and there are significantly different modular communities between affected and non-affected individuals, particularly in the right insular and perisylvian regions . That is, information exchange both within modules and interareally between modules appears to be affected.
Given that these and other physical alterations are consistently observed, this raises questions of whether and how representational content of the self may be affected and how this may be linked to the body. Insight into the neural features that these results may implicate can be inferred from misattribution errors that are experimentally evoked in normal individuals and that appear to be pathologically exacerbated in shizophrenic individuals. In such experiments uncertainty about bodily motion can be artificially induced, for example, by preventing direct visual control of the motion and substituting an 'alien' hand to perform similar movements. In many cases the individual misattributes the motion of the alien hand to himself . Here the link between seeing and attribution is broken, suggesting the implementation of visual sensoria in linking movements to a source identified with the self. On the basis of such experimental results, Feinberg and later Frith proposed that deficient self-monitoring is greatly exaggerated in the schizophrenic patient.
Yet, the association of the motions with a physical representation of the self, as pointed out by Jeannerod , is processionally complex, and multiply articulated, includes single appendage motions, that are linked to efference copy transmission, predictive processing which entails automated inferences of whole body performance, and a conscious sense of agency that links individual motions to a selfrepresentation that emanates from the whole body. How attribution is impaired is therefore unclear in these earlier proposals.
Based on evidence from schizophrenic patients Jeannerod has concluded that there exist two levels of self-recognition, one automatic for action identification, the second, conscious and employed for conducting intentional actions. These patients fall within a class where intentional actions but not automatic attributable actions are affected, that is the etiological basis of the disease is associated with an inability to attribute intentional actions to the self-representation.
Action attribution: freeing bodily representation from energy contingencies
However, since the whole body is used to anchor self-representation, both in automatic predictive processing circumstances and in intentional goal directed ones, the question of how the representational content of the whole body is distinguished for use in either mode is raised. Schizophrenia patients of the class studied by Jeannerod, notably, automatically adapt their movements to visuomotor conflicts, reaching their targets despite distorted visual feedback. That is, they appear to be unimpaired in automatically making needed predictions about the efficacy of their performance, incorporating them in efficient and coordinated motor strategies. Their automatic actions thus appear to successfully negotiate the needed inferential terrain; that is, understood in a Bayesian sense, where they are necessarily contextualized to the whole body .
On the other hand, schizophrenia patients are consistently worse than normal individuals in judging whether movements in attribution paradigms are their own or belong to another. In patients with first rank symptoms, that is, with symptoms most indicative of the disease, attribution errors amounted to nearly 80% of all detected, compared to 30% for normal individuals. In other words, schizophrenia patients appear to lack a capacity to consciously associate their self-initiated motions to a representation of the self. As the whole body representation itself does not appear to be impaired -patients automatically make predictive and prudential inferences on the basis of the whole body -the mechanisms associated with linking self-attribution to bodily representation, instead, appear to be affected.
What are the pertinent neural correlates? Among recently proposed models, decisional determinants are cast as complementary features of control and motivation, neurophysiologically distinguished as mutually reciprocal operations of the dorsolateral and medioventral frontal cortical regions . These two operations together are hypothesized to comprise drive to goal actions, understood as intentional, but embracing motivational variation. As proposed, decisional effectiveness is elicited through parcellation of conditional independencies that are hierarchically distributed, e.g., learned vs hardwired, to create probabilistic generative models servicing maximal self preservation (energetically) Cast in this formulation, decisional outcomes bind self to body via goals in probabilistic action sequences. The role of choice in predictive inferencing is thereby 'eluded', restricted to inverting a generative model to infer an action sequence . Saliency is then understood to be decisional through incentivization, rather than orthogonal to executive control, where correspondence to high prior probability has high motivational value. This interpretation suggests that decisional events emerge from learned, randomly evoked variation in prewired contingencies that are linked to saliency mechanisms that are constrained by generative, predictive modeling.
However, saliency can be understood, instead, as inductive, instilling repetition, and conferring precision. Goal progression, for example, increases its anticipated likelihood of attainment. When precision is inferred, successful goal directed behavior creates positive feedback . This is to say that saliency induces a form of meta-learning, linking outcomes precisely with action, rather than with probabilistic inferencing. Decisional enactments can be expected to increasingly draw from meta-learning paradigms that reduce uncertainty and improve consequence. By inferring precision, goal directed behavior confers a salience on predictive reliability that overcomes probabilistic modes linked to free energy exigencies and creates the primitive circumstances for decisions based on causal conclusions, that is, the primitive for rational behavior. Taken together, schizophrenia aberrations are consistent with this latter interpretation and reveal that predictive assessments are not the sole form of inferencing. By lesioning the junction between bodily source and goal, they also reveal that inferencing is likely to be staged, at lower probability levels and higher, rationally deductive ones.
Conclusions
Disturbances in the sense of self that mark schizophrenia in prodromal and acute stages and current neural models of selfidentification suggest that the loss of self as a core symptom involves whole body representation. Based on evidence from schizophrenia patients there exist two levels of self-recognition, one automatic for action identification, the second, conscious and employed for conducting intentional actions. This latter determinant, altered in schizophrenia, is likely to entail rational primitives that bind self to goal through deductive inferencing. |
def was_overriden(self):
if self._as_widget:
return self._parent.was_overriden
return self._was_overriden |
Tracking mesenchymal stem cell tumor-homing using fluorescent silica nanoparticles.
Stem cell tracking can reveal the underlying biological processes of stem-cell-based therapies such as the migration and biodistribution of human mesenchymal stem cells (hMSCs) in cancer therapy. Nanoparticle-based contrast agents offer unprecedented opportunities for achieving this goal due to their unique and tunable imaging capabilities. However, most nanoparticles are still in the process of development due to challenges such as retention time and safety issues, and are inaccessible to most researchers. In this article, we investigate the potential application of core-shell fluorescent silica nanoparticles (i.e. C dots), which are commercially available and approved by the FDA for clinical trials. Specifically we demonstrate that 500 nm C dots have prolonged cellular retention (up to one month), minimal contrast agent transfer (at least three weeks) between cells in a co-culture Boyden chamber system, and minimal influence on the hMSC properties including viability, proliferation, differentiation, and tropism to tumor cells. |
/**
* Created by pruiz on 5/4/17.
*/
public class TravelportXMLRequest {
private static Map valuesMap = new HashMap();
private static StrSubstitutor sub;
DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSXXX");
public static String getRequest(FlightsSearchCommand search, String targetBranchValue) {
valuesMap.put("passengersNumber", search.getPassengers().size());
valuesMap.put("classTravel", search.getCabinClass().equals("") ? "Economy" : search.getCabinClass() );
valuesMap.put("targetBranch", targetBranchValue);
sub = new StrSubstitutor(valuesMap);
String header = getXMLStringFromResource("/travelport/XML.request/requestHeader.xml");
header = sub.replace( header );
String xml = getSearchAirLegs( search );
//Passengers
String passengers = getSearchPassengers(search.getPassengersMap());
//Search Modifier Fragment
String modifier = getXMLStringFromResource("/travelport/XML.request/searchModifier.xml");
valuesMap.put("limit", search.getOffSet());
sub = new StrSubstitutor(valuesMap);
modifier = sub.replace(modifier);
String tail = getXMLStringFromResource("/travelport/XML.request/requestTail.xml");
return header + xml + modifier + passengers + tail;
}
private static String getXMLStringFromResource(String resource) {
InputStream soapInputStream = TravelportXMLRequest.class.getResourceAsStream(resource);
String xml = new BufferedReader(new InputStreamReader(soapInputStream))
.lines()
.collect(Collectors.joining("\n"));
return xml;
}
private static String getSearchPassengers(Map<PassengerType, List<Passenger>> passengersMap){
InputStream passengerInputStream = TravelportXMLRequest.class
.getResourceAsStream("/travelport/XML.request/requestPassenger.xml");
String passengerXML = new BufferedReader(new InputStreamReader(passengerInputStream))
.lines()
.collect(Collectors.joining("\n"));
String passengers = "";
for(Map.Entry<PassengerType, List<Passenger>> entry : passengersMap.entrySet()){
passengers += getPassengerDetail(passengerXML, entry);
}
return passengers;
}
private static String getPassengerDetail(String passengerXML, Map.Entry<PassengerType, List<Passenger>> entry){;
String passengers = "";
for(Passenger passenger : entry.getValue()){
valuesMap.put("passengerType", getTravelportCode(passenger.getType()));
valuesMap.put("passengerAge", passenger.getAge());
sub = new StrSubstitutor(valuesMap);
passengers += sub.replace(passengerXML);
}
return passengers;
}
private static String getSearchAirLegs(FlightsSearchCommand search) {
String leg = getXMLStringFromResource("/travelport/XML.request/requestSearchAirLeg.xml");
String airlegs = "";
for (int i = 0; i<search.getDepartingDates().size(); i++) {
valuesMap.put("departureAirport", search.getDepartureAirports().get(i) );
valuesMap.put("arrivalAirport", search.getArrivalAirports().get(i) );
valuesMap.put("departureDate", search.getDepartingDates().get(i).format(DateTimeFormatter.ISO_LOCAL_DATE));
sub = new StrSubstitutor(valuesMap);
airlegs += sub.replace( leg );
}
if ( search.getType() == FlightType.ROUNDTRIP ){
valuesMap.put("departureAirport", search.getArrivalAirports().get(0));
valuesMap.put("arrivalAirport", search.getDepartureAirports().get(0) );
valuesMap.put("departureDate", search.getReturningDates().get(0).format(DateTimeFormatter.ISO_LOCAL_DATE));
sub = new StrSubstitutor(valuesMap);
airlegs += sub.replace( leg );
}
return airlegs;
}
public static String getRequest(Segment seg, String targetBranchValue, TravelportFlightDetails details) {
InputStream soapInputStream = TravelportXMLRequest.class
.getResourceAsStream("/travelport/XML.request/airPricing.xml");
String header = new BufferedReader(new InputStreamReader(soapInputStream))
.lines()
.collect(Collectors.joining("\n"));
valuesMap.clear();
String carrier = seg.getOperatingAirlineCode() != null ? seg.getOperatingAirlineCode() : seg.getMarketingAirlineCode();
String flightNumber = seg.getOperatingFlightNumber()!= null? seg.getOperatingFlightNumber() : seg.getMarketingFlightNumber();
valuesMap.put("key", seg.getKey() );
valuesMap.put("group", details.getGroup() );
valuesMap.put("carrier", carrier );
valuesMap.put("flightNumber", flightNumber );
valuesMap.put("origin", seg.getDepartureAirportCode() );
valuesMap.put("destination", seg.getArrivalAirportCode() );
valuesMap.put("departureTime", seg.getDepartureDate().format( DateTimeFormatter.ISO_DATE_TIME) );
valuesMap.put("arrivalTime", seg.getArrivalDate().format(DateTimeFormatter.ISO_DATE_TIME) );
valuesMap.put("flightTime", seg.getDuration() );
valuesMap.put("travelTime", seg.getDuration() );
valuesMap.put("equipment", seg.getAirplaneData());
valuesMap.put("targetBranch", targetBranchValue);
sub = new StrSubstitutor(valuesMap);
header = sub.replace( header );
return header ;
}
private static String getTravelportCode(PassengerType type){
switch (type){
case ADULTS:
return "ADT";
case CHILDREN:
return "CHD";
case INFANTS:
return "INF";
case INFANTSONSEAT:
return "INS";
default:
return "ADT";
}
}
} |
Solitary Labial Metastasis of Adrenocortical Carcinoma Resembling a Cystic Tumor in a Child
Facial skin metastases account for less than 0.5% of patients with metastatic cancer and most of them originate from malignant melanoma . Because the clinical presentations of skin metastasis are often nonspecific, early detection is very difficult. Adrenocortical carcinoma (ACC) is a rare tumor accounting for 0.2% of all childhood cancers and has an incidence of 1-2 cases per 1.7 million persons . Until now, cutaneous metastases have rarely been reported. Herein, we report on a 4-year-old girl who presented with an elevated mass resembling a cystic tumor on her right upper lip that was diagnosed as metastatic ACC.
A 4-year-old girl who was diagnosed with Turner syndrome recently visited our hospital for the evaluation of a right maxillary area subcutaneous mass. The mass was 3.5 cm×2.5 cm in size and had developed 6 months earlier. It was round, non-tender and fixed and, clinically, seemed to be a cystic tumor.
The patient had a history of adrenalectomy due to left ACC 2 years earlier. At that time the tumor had been 7.5 cm×6.5 cm×4.5 cm in size and weighed 102 g (Fig. 1). The resection margins were tumorfree, and there was no lymph node enlargement or local invasion. According to the modified staging system of pediatric ACC, it was stage I. After complete resection of the ACC, no further symptoms or signs had developed until 18 months later, when she felt a palpable mass on her right maxillary area.
Fig. 1
The patient had a history of adrenal cortical carcinoma 2 years before the metastatic presentation. There was a huge mass (7 cm×6 cm×4 cm) on the adrenal gland consistent with adrenal cortical carcinoma.
The preoperative computed tomography (CT) and ultrasonography of the maxillary mass indicated the possibility of a solid tumor (Fig. 2). The mass was deep in the subcutaneous level, so the approach was performed through the right upper oral mucosa. After penetrating the orbicularis oris muscle, a well-encapsulated mass was found (Fig. 3). The mass was overlying the subcutaneous and muscle layer. The mass was excised entirely, preserving normal adjacent tissue without any rupture. The specimen was 2.2 cm×1.5 cm×1.5 cm in size and sent to the pathology department (Fig. 4).
Fig. 2
Preoperative computed tomographyfindings. Facial computed tomography findings showing a solid mass (white arrow) with irregular marginsin the right premaxillary area.
Fig. 3
Intraoperative view. The well-encapsulated round mass (black arrow) was noted while the orbicular is oris muscle was being held with Adson tissue forceps.
Fig. 4
Postoperative gross mass view. A 2.2 cm×1.5 cm×1.5 cm specimen was removed.
Histological evaluation of the mass showed a high nuclear grade, atypical mitosis, sinusoidal invasion, and a mitotic rate of 40/50 in the high power field (Fig. 5). This histopathology was consistent with that of the ACC excised 2 years earlier. The laboratory studies of the hormone level revealed a normal range. Additionally, there was no evidence of lymph node enlargement or other organ metastasis in CT and positron emission tomography (PET)-CT evaluation. Therefore, she did not receive chemotherapy or radiation.
Fig. 5
Pathologic findings. The high power view of the metastatic adrenocortical carcinoma shows pleomorphic tumor cells, increased mitoses, and necrosis similar to that of the previous adrenocortical carcinoma (H&E, × 400).
During 2 years of follow-up, there was no evidence of local recurrence and no distant metastasis in facial and abdominal CT or PET-CT evaluation.
ACC is a rare malignancy, and it is associated with p53 mutation in children. Generally, ACC can be classified as a functional or non-functional tumor. Children with ACC most commonly present with virilization. Although several studies have shown that patients with functional tumors have a slightly better outcome, other studies contradict this finding . 75% of cases in children are localized to the adrenal gland (stage I and stage II); however, in adult patients, 30% to 85% of patients have distant metastasis at the time of presentation. The most common sites of metastasis are the liver, local lymph nodes, lung, peritoneum, and bone . However, cutaneous metastases have rarely been reported. In the previous report, the facial metastatic lesion existed concomitantly with ACC, but in our case the metastatic lesion occurred 18 months after the excision of the primary ACC. Therefore, our case is the first report of ACC with delayed metastasis to the facial area.
For primary ACC, histopathologic findings of tumor necrosis, a mitotic rate of more than 5 of 50 in the high power field, and atypical mitotic figures are associated with reduced disease-free survival . In metastatic ACC, poor prognostic factors are liver metastases, bone metastases, more than 5metastatic lesions, more than 2 tumoral organs, a high mitotic count of more than 20 of 50 in the high power field, and presence of atypical mitoses. In contrast, patients whose metastases were diagnosed more than 1 year after resection of the primary ACC had a better outcome than those who developed a recurrence before this date . In our case, the poor prognostic factors were atypical mitosis and a high mitotic rate (40 of 50 in the high power field), which were found in both the original ACC 2 years earlier and the metastatic lesion. However, late recurrence of the metastasis (18 months after adrenalectomy) is a good prognostic factor.
In patients with advanced or metastatic tumors, resection of the metastatic disease is indicated if possible. Otherwise, they can be treated with adjuvant chemotherapy, mitotane, and radiation. There has been no analysis of the efficacy of adjuvant treatment in patients with advanced-stage disease . If complete resection is possible, no further therapy is indicated. Close observation with imaging and endocrine studies is needed.
Our case is noteworthy because delayed metastasis of ACC to the facial skin is extremely rare, and there was only a solitary metastatic lesion without any tumor recurrences or distant metastases.
The clinical presentations of skin metastasis are highly variable, so they may go unnoticed for a long time. Most skin metastasis appears as rapidly growing solitary or multiple round or subcutaneous nodules, and the lesion is usually painless . In our case, the mass had round, non-tender, and fixed features. Because the mass seemed clinically to be a cystic tumor and no symptoms or signs had developed until 18 months after resection of the primary ACC, early detection of the skin metastasis was difficult. Additionally, there was no evidence of metastasis in the regular oncology work-up. Although the punch biopsy revealed capillary proliferation, we used imaging studies (CT and ultrasonography) with a high index of suspicion and performed a surgical excision for accurate diagnosis.
In conclusion, careful examination with various diagnostic methods and total excisional biopsy are recommended for early detection of metastasis if there is any suspicious skin lesion in patients with a history of malignancy.
ImageS
tical carcinoma (ACC) is a rare tumor accounting for 0.2% of all childhood cancers and has an incidence of 1-2 cases per 1.7 million persons . Until now, cutaneous metastases have rarely been reported. Herein, we report on a 4-year-old girl who presented with an elevated mass resembling a cystic tumor on her right upper lip that was diagnosed as metastatic ACC.
A 4-year-old girl who was diagnosed with Turner syndrome recently visited our hospital for the evaluation of a right maxillary area subcutaneous mass. The mass was 3.5 cm × 2.5 cm in size and had developed 6 months earlier. It was round, non-tender and fixed and, clinically, seemed to be a cystic tumor.
The patient had a history of adrenalectomy due to left ACC 2 years earlier. At that time the tumor had been 7.5 cm × 6.5 cm × 4.5 cm in size and weighed 102 g (Fig. 1). The resection margins were tumorfree, and there was no lymph node enlargement or local invasion. According to the modified staging system of pediatric ACC, it was stage I. After complete resection of the ACC, no further symptoms or signs had developed until 18 months later, when she felt a palpable mass on her right maxillary area.
The preoperative computed tomography (CT) and ultrasonography of the maxillary mass indicated the possibility of a solid tumor (Fig. 2). The mass was deep in the subcutaneous level, so the approach was performed through the right upper oral mucosa. After penetrating the orbicularis oris muscle, a well-encap-
Solitary Labial Metastasis of Adrenocortical Carcinoma Resembling a Cystic Tumor in a Child
Facial skin metastases account for less than 0.5% of patients with metastatic cancer and most of them originate from malignant melanoma . Because the clinical presentations of skin metastasis are often nonspecific, early detection is very difficult. Adrenocor-
Fig. 1.
The patient had a history of adrenal cortical carcinoma 2 years before the metastatic presentation. There was a huge mass (7 cm × 6 cm × 4 cm) on the adrenal gland consistent with adrenal cortical carcinoma.
Fig. 2.
Preoperative computed tomographyfindings. Facial computed tomography findings showing a solid mass (white arrow) with irregular marginsin the right premaxillary area. Fig. 3. Intraoperative view. The well-encapsulated round mass (black arrow) was noted while the orbicular is oris muscle was being held with Adson tissue forceps.
sulated mass was found (Fig. 3). The mass was overlying the subcutaneous and muscle layer. The mass was excised entirely, preserving normal adjacent tissue without any rupture. The specimen was 2.2 cm × 1.5 cm × 1.5 cm in size and sent to the pathology department (Fig. 4). Histological evaluation of the mass showed a high nuclear grade, atypical mitosis, sinusoidal invasion, and a mitotic rate of 40/50 in the high power field (Fig. 5). This histopathology was consistent with that of the ACC excised 2 years earlier. The laboratory studies of the hormone level revealed a normal range. Additionally, there was no evidence of lymph node enlargement or other organ metastasis in CT and positron emission tomography (PET)-CT evaluation. Therefore, she did not receive chemotherapy or radiation.
During 2 years of follow-up, there was no evidence of local recurrence and no distant metastasis in facial and abdominal CT or PET-CT evaluation.
ACC is a rare malignancy, and it is associated with p53 mutation in children. Generally, ACC can be classified as a functional or non-functional tumor. Children with ACC most commonly present with virilization. Although several studies have shown that patients with functional tumors have a slightly better outcome, other studies contradict this finding . 75% of cases in children are localized to the adrenal gland (stage I and stage II); however, in adult patients, 30% to 85% of patients have distant metastasis at the time of presentation. The most common sites of metastasis are the liver, local lymph nodes, lung, peritoneum, and bone . However, cutaneous metastases have rarely been reported. In the previous report, the facial metastatic lesion existed concomitantly with ACC, but in our case the metastatic lesion occurred 18 months after the excision of the primary ACC. Therefore, our case is the first report of ACC with delayed metastasis to the facial area.
For primary ACC, histopathologic findings of tumor necrosis, a mitotic rate of more than 5 of 50 in the high power field, and atypical mitotic figures are associated with reduced disease-free survival . In metastatic ACC, poor prognostic factors are liver metastases, bone metastases, more than 5metastatic lesions, more than 2 tumoral organs, a high mitotic count of more than 20 of 50 in the high power field, and presence of atypical mitoses. In contrast, patients whose metastases were diagnosed more than 1 year after resection of the primary ACC had a better outcome than those who developed a recurrence before this date . In our case, the poor prognostic factors were atypical mitosis and a high mitotic rate (40 of 50 in the high power field), which were found in both the original ACC 2 years earlier and the metastatic lesion. However, late recurrence of the metastasis (18 months after adrenalectomy) is a good prognostic factor.
In patients with advanced or metastatic tumors, resection of the metastatic disease is indicated if possible. Otherwise, they can be treated with adjuvant chemotherapy, mitotane, and radiation. There has been no analysis of the efficacy of adjuvant treatment in patients with advanced-stage disease . If complete Breast Reconstruction in a resection is possible, no further therapy is indicated. Close observation with imaging and endocrine studies is needed.
Our case is noteworthy because delayed metastasis of ACC to the facial skin is extremely rare, and there was only a solitary metastatic lesion without any tumor recurrences or distant metastases.
The clinical presentations of skin metastasis are highly variable, so they may go unnoticed for a long time. Most skin metastasis appears as rapidly growing solitary or multiple round or subcutaneous nodules, and the lesion is usually painless . In our case, the mass had round, non-tender, and fixed features. Because the mass seemed clinically to be a cystic tumor and no symptoms or signs had developed until 18 months after resection of the primary ACC, early detection of the skin metastasis was difficult. Additionally, there was no evidence of metastasis in the regular oncology work-up. Although the punch biopsy revealed capillary proliferation, we used imaging studies (CT and ultrasonography) with a high index of suspicion and performed a surgical excision for accurate diagnosis.
In conclusion, careful examination with various diagnostic methods and total excisional biopsy are recommended for early detection of metastasis if there is any suspicious skin lesion in patients with a history of malignancy.
Breast prostheses have been available since the 1950s, serving the purpose of improving the breast contour in breast reconstruction and augmentation. Explantation of breast implants may be required for a variety of reasons including capsular contracture, damage or change in size and shape of the implant, or fear of the potential adverse effects of the implants on health . The complications and effects of explantation are |
package user
import "github.com/go-chi/chi"
func Routes(r chi.Router, handler Handler) chi.Router {
r.Post("/create", handler.Create)
r.Get("/", handler.Get)
r.Route("/{id}", func(r chi.Router) {
r.Get("/", handler.GetByID)
r.Post("/", handler.Update)
})
return r
}
|
def simple_cfu(instructions):
saved_instructions = instructions.copy()
class _ASimpleCfu(Cfu):
def elab_instructions(self, m):
for i, instruction in saved_instructions.items():
m.submodules[f"fn{i}"] = instruction
return saved_instructions
return _ASimpleCfu() |
const reactProps = [
'onClick',
'onContextMenu',
'onDoubleClick',
'onDrag',
'onDragEnd',
'onDragEnter',
'onDragExit',
'onDragLeave',
'onDragOver',
'onDragStart',
'onDrop',
'onMouseDown',
'onMouseEnter',
'onMouseLeave',
'onMouseMove',
'onMouseOut',
'onMouseOver',
'onMouseUp',
'className',
'onMouseLeave',
'onChange',
'onFocus',
'onBlur',
'onMouseMove',
'maxLength',
'onSubmit',
'htmlFor',
'selected',
];
export default reactProps;
|
<filename>pkg/kubeadm/init.go
/*
* Copyright 2018 SUSE LINUX GmbH, Nuernberg, Germany..
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package kubeadm
import (
"bytes"
"fmt"
"github.com/golang/glog"
kubeadmscheme "k8s.io/kubernetes/cmd/kubeadm/app/apis/kubeadm/scheme"
kubeadmapiv1beta1 "k8s.io/kubernetes/cmd/kubeadm/app/apis/kubeadm/v1beta1"
kubeadmconstants "k8s.io/kubernetes/cmd/kubeadm/app/constants"
kubeadmutil "k8s.io/kubernetes/cmd/kubeadm/app/util"
"github.com/kubic-project/kubic-init/pkg/config"
)
// NewInit starts a new Init with kubeadm
func NewInit(kubicCfg *config.KubicInitConfiguration, args ...string) error {
args = append(args,
getIgnorePreflightArg(),
getVerboseArg())
return kubeadmWithConfig("init", kubicCfg, toInitConfig, args...)
}
// toInitConfig copies some settings to a Init configuration
func toInitConfig(kubicCfg *config.KubicInitConfiguration, featureGates map[string]bool) ([]byte, error) {
glog.V(3).Infof("[kubic] creating initialization configuration...")
initCfg := &kubeadmapiv1beta1.InitConfiguration{
ClusterConfiguration: kubeadmapiv1beta1.ClusterConfiguration{
ControlPlaneEndpoint: kubicCfg.Network.DNS.ExternalFqdn,
FeatureGates: featureGates,
APIServer: kubeadmapiv1beta1.APIServer{
CertSANs: []string{},
},
KubernetesVersion: config.DefaultKubernetesVersion,
Networking: kubeadmapiv1beta1.Networking{
PodSubnet: kubicCfg.Network.PodSubnet,
ServiceSubnet: kubicCfg.Network.ServiceSubnet,
},
},
NodeRegistration: kubeadmapiv1beta1.NodeRegistrationOptions{
KubeletExtraArgs: config.DefaultKubeletSettings,
},
}
nonEmpty := func(a, b string) string {
if len(a) > 0 {
return a
}
return b
}
if kubicCfg.Etcd.LocalEtcd != nil {
initCfg.ClusterConfiguration.Etcd = kubeadmapiv1beta1.Etcd{
Local: &kubeadmapiv1beta1.LocalEtcd{
ImageMeta: kubeadmapiv1beta1.ImageMeta{
ImageRepository: config.DefaultEtdcImageRepo,
ImageTag: config.DefaultEtdcImageTag,
},
},
}
}
// Add some extra flags in the API server for OIDC (necessary for using Dex)
initCfg.ClusterConfiguration.APIServer.ExtraArgs = map[string]string{
"oidc-client-id": nonEmpty(kubicCfg.Auth.OIDC.ClientID, config.DefaultOIDCClientID),
"oidc-ca-file": nonEmpty(kubicCfg.Auth.OIDC.CA, config.DefaultCertCA),
"oidc-username-claim": nonEmpty(kubicCfg.Auth.OIDC.Username, config.DefaultOIDCUsernameClaim),
"oidc-groups-claim": nonEmpty(kubicCfg.Auth.OIDC.Groups, config.DefaultOIDCGroupsClaim),
}
if len(kubicCfg.Auth.OIDC.Issuer) > 0 {
initCfg.ClusterConfiguration.APIServer.ExtraArgs["oidc-issuer-url"] = kubicCfg.Auth.OIDC.Issuer
} else {
public, err := kubicCfg.GetPublicAPIAddress()
if err != nil {
return nil, err
}
initCfg.ClusterConfiguration.APIServer.ExtraArgs["oidc-issuer-url"] = fmt.Sprintf("https://%s:%d", public, config.DefaultDexIssuerPort)
}
if len(kubicCfg.Network.Bind.Address) > 0 && kubicCfg.Network.Bind.Address != "127.0.0.1" {
glog.V(8).Infof("[kubic] setting bind address: %s", kubicCfg.Network.Bind.Address)
initCfg.LocalAPIEndpoint.AdvertiseAddress = kubicCfg.Network.Bind.Address
initCfg.ClusterConfiguration.APIServer.CertSANs = append(initCfg.ClusterConfiguration.APIServer.CertSANs, kubicCfg.Network.Bind.Address)
}
// TODO: enable these two args once we have OpenSUSE images in registry.opensuse.org for k8s
//
// ImageRepository what container registry to pull control plane images from
// initCfg.ImageRepository = "registry.opensuse.org"
//
// UnifiedControlPlaneImage specifies if a specific container image should
// be used for all control plane components.
// initCfg.UnifiedControlPlaneImage = ""
if len(kubicCfg.ClusterFormation.Token) > 0 {
glog.V(8).Infof("[kubic] adding a bootstrap token: %s", kubicCfg.ClusterFormation.Token)
var err error
bto := kubeadmapiv1beta1.BootstrapToken{}
bto.Token, err = kubeadmap<PASSWORD>beta1.NewBootstrapTokenString(kubicCfg.ClusterFormation.Token)
if err != nil {
return nil, err
}
initCfg.BootstrapTokens = []kubeadmapiv1beta1.BootstrapToken{bto}
}
if len(kubicCfg.Network.DNS.Domain) > 0 {
glog.V(3).Infof("[kubic] using DNS domain '%s'", kubicCfg.Network.DNS.Domain)
initCfg.Networking.DNSDomain = kubicCfg.Network.DNS.Domain
}
if len(kubicCfg.Network.DNS.ExternalFqdn) > 0 {
// TODO: add all the other ExternalFqdn's to the certs
initCfg.APIServer.CertSANs = append(initCfg.APIServer.CertSANs, kubicCfg.Network.DNS.ExternalFqdn)
}
glog.V(3).Infof("[kubic] using container engine '%s'", kubicCfg.Runtime.Engine)
if socket, ok := config.DefaultCriSocket[kubicCfg.Runtime.Engine]; ok {
glog.V(3).Infof("[kubic] setting CRI socket '%s'", socket)
initCfg.NodeRegistration.KubeletExtraArgs["container-runtime-endpoint"] = fmt.Sprintf("unix://%s", socket)
initCfg.NodeRegistration.CRISocket = socket
}
kubeadmscheme.Scheme.Default(initCfg)
initbytes, err := kubeadmutil.MarshalToYamlForCodecs(initCfg, kubeadmapiv1beta1.SchemeGroupVersion, kubeadmscheme.Codecs)
if err != nil {
return []byte{}, err
}
allFiles := [][]byte{initbytes}
clusterbytes, err := kubeadmutil.MarshalToYamlForCodecs(&initCfg.ClusterConfiguration, kubeadmapiv1beta1.SchemeGroupVersion, kubeadmscheme.Codecs)
if err != nil {
return []byte{}, err
}
allFiles = append(allFiles, clusterbytes)
return bytes.Join(allFiles, []byte(kubeadmconstants.YAMLDocumentSeparator)), nil
}
|
Long-time behavior of the $\omega \to \alpha$ transition in shocked Zirconium: Interplay of nucleation and plastic deformation
We study the thermally activated, slow conversion of the hysteretically retained $\omega$ phase into stable $\alpha$ phase in recovered samples of shocked zirconium. The $\omega$-phase decays in time following an algebraic law, unlike the predictions of the nucleation-growth framework for first order transitions, and residual volume fractions of phases and dislocation densities are related by a power law. We propose an explanation for the annealing mechanism through coupled dynamics of dislocations and phase change. We find that the long-time behavior is controlled by the interplay of dislocations, shear fluctuations, and remnant volume fractions of phases, which lead to an algebraic decay in time. For late time, thermally activated quantities such as the dislocation mobility and nucleation rate set the timescale and control the algebraic behavior, respectively. At high enough temperatures this behavior is effectively indistinguishable from standard Avrami kinetics.
We study the thermally activated, slow conversion of the hysteretically retained ω phase into stable α phase in recovered samples of shocked zirconium. The ω-phase decays in time following an algebraic law, unlike the predictions of the nucleation-growth framework for first order transitions, and residual volume fractions of phases and dislocation densities are related by a power law. We propose an explanation for the annealing mechanism through coupled dynamics of dislocations and phase change. We find that the long-time behavior is controlled by the interplay of dislocations, shear fluctuations, and remnant volume fractions of phases, which lead to an algebraic decay in time. For late time, thermally activated quantities such as the dislocation mobility and nucleation rate set the timescale and control the algebraic behavior, respectively. At high enough temperatures this behavior is effectively indistinguishable from standard Avrami kinetics.
There has been much recent interest in understanding the coupling of phase transformation and deformation processes . The group IV elements and especially Zr and Ti, with relatively easily accessible transition temperatures and pressures, have been excellent test beds for investigating aspects of deformation and phase transformations under high pressure and shock . Starting from ambient conditions, these metals undergo an hcp (α) to hexagonal (ω) structural transformation under pressure, which on release retains substantial volume fractions of the high pressure ω phase . The volume fraction of ω increases with the peak pressure. The metastable microstructure of coexisting α and ω phases in recovered samples, and the significant hysteresis across the equilibrium phase boundary, are a reflection of the non group-subgroup nature of the firstorder shear and shuffle (phonon) driven transformation . In addition, we expect aspects of the slow kinetics and history dependence, affected by defects and heterogeneities, to be consequences of the reconstructive nature of this transformation.
We have previously studied the initial evolution of the retained ω phase in recovered samples of shocked Zr at several temperatures under isothermal annealing conditions using x-ray diffraction measurements . Our principal conclusions were that in the temperature range 430-535K, the activation barriers calculated from a modified Kohlrausch-Williams-Watts relation for the evolution of the volume fraction for the reverse ω → α transformation increased with the peak shock pressure . Molecular dynamics simulations interpreted the changes in activation barrier to be controlled by hetrogeneous nucleation from defects, such as dislocations, in the microstructure.
The focus of the current work is to develop a phenomenological model for the long time behavior that describes the annealing via a coupled dynamics of dislocations and phase change. We derive an algebraic depen-dence for the late time evolution of the volume faction of phases at relatively low temperatures, which in the high temperature regime recovers the standard Avrami kinetics. While the kinetics of high temperature recrystallization is generally well understood and found to follow the established Avrami type sigmoidal profile typical of nucleation and growth processes , we show here that slow aging of the retained phase in zirconium at low temperatures involves a more subtle interplay with thermally activated dislocations. This is a mechanism reminiscent of shear-driven martensitic transformations. In the case of transformation induced plasticity (TRIP) it leads to a unique combination of high strength and ductility in steels. Localization of externally applied deformation, shear banding, creates potent sites inside the disappearing phase, on which the new phase can nucleate . Here we show that even in the absence of external shear, the thermally activated motion of dislocations can determine fluctuations of plastic shear capable of inducing a martensitic transition that eliminates a metastable phase.
We introduce a simplified model of how this can come about, and we test this framework on experimental data obtained by thermal annealing of recovered shocked Zirconium (hcp α-phase: P 63/mmc, c/a = 1.593 ; simple hexagonal ω-phase: P 6/mmm, c/a = 0.623). Polycrystalline α-Zr shocked loaded to 8 GPa and 10.5 GPa results in a retained volume fraction of ∼ 60% and ∼ 80% respectively of the metastable ω-phase . When the recovered shocked samples are heated, the data show a slow annealing as well as monotonic relationships between the density of dislocations and the volume fraction of the remnant phase . Transitions of this kind are often described by the Johnson-Mehl-Avrami-Kolmogorov approach . However the standard Avrami test on the data shows, at large times, strong deviations from uniform nucleation and growth behavior and its typical sigmoidal behavior. This data, previously reported in the literature , is plotted in Fig. 1. We first recognize that at long times the decay is algebraic in time and is well fitted by where v ω is the volume fraction of the disappearing ωphase, and the exponent χ appears to be thermally activated. The key to understanding this algebraic behavior, which is one of our results which we will derive in this work, lies in combining plasticity with nucleation of the new phase. We assume during shock that the material first twins and then nucleates dislocations, which can reach a considerable density before and possibly while the material finally undergoes a transformation toward the ω-phase. These dislocations, may be interpreted as a stabilizing network allowing for retention of the metastable ω-phase. Then, as the sample is heated, the acquired mobility of these thermally activated dislocations can undermine stability, thus providing the driving force for the transformation. Indeed, martensitic transformations are mediated by a shear strain , for example, the retained, metastable austenite phase in TRIP-steel, and its martensitic shear driven transformation .
Following Stringfellow et al. , we write the propor- relating volume fraction to strain rateγ in the shearinduced nucleation on potent sites created by plastic strain . Stringfellow et al. observed that for TRIP steels the strain-induced nucleation occurs at shear band intersections, and included a proportionality constant to accounted for the nucleation rate of shear bands at low strains. Here the proportionality constant will be related to the temperature dependent dislocation removal rate. In the case, e.g., of TRIP steel, the shear is global and externally applied. In our system, however, the total strain rate is endogenous, and ambient. In particularγ is not a total derivative, but rather the average of the absolute value of the local strain rates, and thus, in general γ = γdt.
The experimental data provides average and coarse information on dislocation densities, which can be obtained from averaging the peak widths of the X-ray data .We can relate such information to the local shear rates. Indeed, Kocks and Mecking have introduced a phenomenological equation to relate variations in the shear rate to the density of dislocations: Equation (3) describes the storage (first term) and annihilation (second term) of dislocations in a material subjected to environmental shear, in our case coming from the collective motion of dislocations (plastic shear) and from phase transformation (transformation shear).
In general, the constant c 2 is proportional to the critical annihilation distance for dislocations; the term c 1 , is the subject of much recent research and its dependence on the specificity of the problem is still debated . We can neglect these issues by noting that c 1 simply defines the stationary density of dislocations ρ ∞ = (c 1 /c 2 ) 2 at which, in (3), dρ/dγ = 0 because of our focus on the asymptotic behavior. We can then eliminate c 1 by introducing η = ρ − ρ ∞ and expanding the Kocks-Mecking equation around η ∼ 0 + , thus obtaining in the asymptotic regime dη/dγ = −c 2 η/2, or equivalentlẏ Thus from Eq. (2) and Eq. (4) it is immediate to obtain which establishes a proportionality between the relative rates of variation of the dislocation density and volume fraction during phase change (here k is a nucleation rate). Fortunately this interesting result can be tested using experimental data. Indeed, from Eq. (5) we can now obtain a power law relating volume fraction and dislocation density where the exponent is the nucleation rate k, η 0 = ρ 0 −ρ ∞ is the density of mobile dislocations at the beginning of the process, and v ω,0 is the initial volume fraction.
In Fig. 2 we we demonstrate the power law behavior of Eq. (6) via a logarithmic plot which is used to fit the constant k: ρ 0 is experimentally given, while ρ ∞ has to be fitted, as the annealing is never complete in these experiments. In the left panels we demonstrate the power law of Eq. (6) via a log-log plot which is used to fit the constant k at large times. Since k is a nucleation constant, we expect it to be thermally activated, or with activation energy ∆W . Indeed Fig. 2 shows the Arrhenius fit for k which returns the values for the activation temperature T 8GPa = 8.7×10 3 K 0.75 eV and T 10GPa = 6.9×10 3 K 0.6 eV , consistent with the results of atomistic calculations . The fact that activation energies for nucleation rates are different in the two cases points to an inherent difference in the microscopic structure of samples shocked at different pressures, in accordance with our previous findings .
To deduce the time evolution of the normalized volume fraction v ω (t) it is sufficient to know the time dependence of the dislocation density ρ(t). This can be resolved in a coupled dynamics that involves plasticity and phase change. There are two sources of shear: transformation shear γ t and plastic shear γ p . The source of plastic shear rateγ p is the motion of the dislocations in the ω-phase, and is well known from the Orowan equation γ p = bρ m w d , in which w d is the thermally activated mobility of the mobile dislocations ρ m , b is the magnitude of the Burgers vector. In our phenomenological approachγ is not a total derivative, as explained above when introducing the Kocks-Mecking equation, but rather the coarse grained shear, over a sufficiently large region of the absolute value of the intensity of the local shear rates. Furthermore the relationship between mobile dislocations ρ m and the excess dislocations η is unknown to us. In general the mobile dislocations will be less than η as some of the annihilating dislocations might be non-mobile. In the absence of more precise information, we proceed with the reasonable ansatz that the relative decrements of mobile dislocations and of dislocations tout-court are proportional, or dρ m /ρ m = ψdη/η. This, together with the Orowan equation (8) leads us to writė with ψ > 1 and c 3 proportional to the dislocation mobility w d . The other source of shear, the transformation shear γ t , is associated to the structural change in unit cells of the α and ω phases. Clearly, its rate is proportional to the rate of phase change, orγ t = c 4vω , where v ω is the volume fraction of the ω phase, and c 4 depends on the geometric parameters of the unit cells in the two phases. Indeed from Eq. (2), we have that in the asymptotic limit most of the shear rate is purely plastic, orγ t /γ ∝ v ω → 0. This allows us to approximate γ p γ in Eq. (9). Then, in the approximation of long times, ρ(t) can be found from (9) and (4) to be In Fig. 3, we demonstrate that Eq. (10) fits the data very well. Equation (10) describes a slow, algebraic decline at long times, controlled by the parameter τ −1 ∝ c 3 ∝ w d , which depends on temperature through the thermally activated mobility of dislocations w d . Interestingly, the decay is faster at higher temperatures (because τ of Eq. (10) depends on the activated dislocation mobility) and for samples pre-shocked at higher pressure (because more dislocations have been nucleated to begin with). Yet the value of the exponent ψ from Eq. (9), which represents the ratio of relative decrement between mobile dislocations and dislocations proper is almost the same in all cases and close to the value of ψ = 2.5. Equation (1) can now finally be deduced from (6) and (10) which imply χ = k/ψ: the exponent χ, which controls the lond time algebraic decay of the remnant volume fraction, is thus also, like k, an activated quantity that follows an Arrhenius law. This result suggests the following: while the dislocation mobility w d sets the timescale, the nucleation rate k controls the algebraic decay of the residual volume fraction of the ω-phase. As both quantities are thermally activated, higher temperature implies not only a smaller timescale (controlled by w d ) but also a larger relative decrement for the same relative increment of time (controlled by k).
Our work elucidates the limits of the standard nucleation and growth framework which can be relevant to other systems. We have seen that the algebraic approach to equilibrium is a consequence of the coupled dynamics of phase transformation and dislocations. When the temperature is high enough, the conventional Avrami formalism can still provide a reasonable description. Let us see how that can come about.
From Eq. (1) we can write directlẏ which shows that the total nucleation rate is not constant, as in usual nucleation and growth, but rather a power law of the concentration of the disappearing phase. However, at high temperatures, k becomes exponentially large and (v ω /v ω,0 ) ψ/k 1 unless unless v ω /v ω,0 is very small, say below the experimental error. Then the nucleation rate can be well approximated by the constant value k/ψτ . Of course, mathematically, the behavior at long times will always become algebraic. However, for all practical purposes at large enough temperatures, that might happen only when the residual volume fraction is as low as to be negligible or unmeasurable, while in general Eq. (11) shows that the measured nucleation rate might in fact appear constant at any experimentally reasonable value of the volume fraction.
In the context of the behavior of v ω vs η, described by Eq. (6), at high temperature and thus large k corresponds a sudden drop in volume fraction on a timescale τ ψ/k, while the density of dislocations would show little change, its timescale being τ ; thus a standard nucleation approach can be effectively regained when the two dynamics, for phase transformation and dislocations, become decoupled.
In conclusion, we show that a coupled dynamics of plasticity and nucleation leads to an algebraic transformation law in which dislocation mobility defines the timescale whereas the nucleation rate on potent sites controls the power law behavior. Beside proposing a possible explanation for the slow aging of the metastable ωphase in Zirconium, we are also elucidating one of the mechanisms by which, more generally, the accepted nucleation and growth framework can mask a more complex dynamics, one that becomes apparent only at lower temperatures. This is most likely not an isolated case and similar mechanisms may be at work in the aging of metastable multiphase materials under extreme thermodynamic and boundary conditions, such as at nanoscale or in film geometries. |
/**
*
* @author Pedro Igor
*
*/
public class CustomIdentityStoreTestCase {
@Test
public void testConfiguration() throws Exception {
IdentityConfigurationBuilder builder = new IdentityConfigurationBuilder();
// let's use this instance to test the custom store configuration and check for the methods invocation
MethodInvocationContext methodInvocationContext = new MethodInvocationContext();
builder
.named("default")
.stores()
.add(MyIdentityStoreConfiguration.class, MyIdentityStoreConfigurationBuilder.class)
.methodInvocationContext(methodInvocationContext)
.supportAllFeatures();
PartitionManager partitionManager = new DefaultPartitionManager(builder.build());
IdentityManager identityManager = partitionManager.createIdentityManager();
identityManager.add(new User("john"));
assertEquals("addAttributedType", methodInvocationContext.getMethodName());
identityManager.createIdentityQuery(User.class).getResultList();
assertEquals("queryIdentityType", methodInvocationContext.getMethodName());
}
public static class MyIdentityStoreConfigurationBuilder extends
IdentityStoreConfigurationBuilder<MyIdentityStoreConfiguration, MyIdentityStoreConfigurationBuilder> {
private MethodInvocationContext methodInvocationContext;
public MyIdentityStoreConfigurationBuilder(IdentityStoresConfigurationBuilder builder) {
super(builder);
}
@Override
public MyIdentityStoreConfiguration create() {
MyIdentityStoreConfiguration config = new MyIdentityStoreConfiguration(getSupportedTypes(),
getUnsupportedTypes(),
getContextInitializers(),
getCredentialHandlerProperties(),
getCredentialHandlers());
config.setMethodInvocationContext(this.methodInvocationContext);
return config;
}
public MyIdentityStoreConfigurationBuilder methodInvocationContext(MethodInvocationContext methodInvocationContext) {
this.methodInvocationContext = methodInvocationContext;
return this;
}
}
public static class MyIdentityStoreConfiguration extends AbstractIdentityStoreConfiguration {
private MethodInvocationContext methodInvocationContext;
protected MyIdentityStoreConfiguration(Map<Class<? extends AttributedType>,
Set<IdentityOperation>> supportedTypes, Map<Class<? extends AttributedType>,
Set<IdentityOperation>> unsupportedTypes, List<ContextInitializer> contextInitializers, Map<String,
Object> credentialHandlerProperties, Set<Class<? extends CredentialHandler>> credentialHandlers) {
super(supportedTypes, unsupportedTypes, contextInitializers, credentialHandlerProperties,
credentialHandlers, false, false, false);
}
@Override
public Class<? extends IdentityStore> getIdentityStoreType() {
return MyIdentityStore.class;
}
public void setMethodInvocationContext(MethodInvocationContext assertion) {
this.methodInvocationContext = assertion;
}
public MethodInvocationContext getMethodInvocationContext() {
return this.methodInvocationContext;
}
@Override
public boolean supportsPartition() {
return false;
}
}
public static class MyIdentityStore implements IdentityStore<MyIdentityStoreConfiguration>,
CredentialStore<MyIdentityStoreConfiguration> {
private MyIdentityStoreConfiguration config;
private final Map<String, Partition> partitions = new HashMap<String, Partition>();
@Override
public void setup(MyIdentityStoreConfiguration config) {
this.config = config;
}
@Override
public MyIdentityStoreConfiguration getConfig() {
return this.config;
}
@Override
public void add(IdentityContext context, AttributedType value) {
value.setId(context.getIdGenerator().generate());
getConfig().getMethodInvocationContext().setMethodName("addAttributedType");
}
@Override
public void update(IdentityContext context, AttributedType value) {
}
@Override
public void remove(IdentityContext context, AttributedType value) {
}
@Override
public <V extends IdentityType> List<V> fetchQueryResults(IdentityContext context, IdentityQuery<V> identityQuery) {
getConfig().getMethodInvocationContext().setMethodName("queryIdentityType");
return Collections.emptyList();
}
@Override
public <V extends IdentityType> int countQueryResults(IdentityContext context, IdentityQuery<V> identityQuery) {
return 0;
}
@Override
public <V extends Relationship> List<V> fetchQueryResults(IdentityContext context, RelationshipQuery<V> query) {
return null;
}
@Override
public <V extends Relationship> int countQueryResults(IdentityContext context, RelationshipQuery<V> query) {
return 0;
}
@Override
public void validateCredentials(IdentityContext context, Credentials credentials) {
}
@Override
public void updateCredential(IdentityContext context, Account agent, Object credential, Date effectiveDate,
Date expiryDate) {
// TODO Auto-generated method stub
}
@Override
public void storeCredential(IdentityContext context, Account agent, CredentialStorage storage) {
// TODO Auto-generated method stub
}
@Override
public <T extends CredentialStorage> T retrieveCurrentCredential(IdentityContext context, Account account,
Class<T> storageClass) {
// TODO Auto-generated method stub
return null;
}
@Override
public <T extends CredentialStorage> List<T> retrieveCredentials(IdentityContext context, Account agent,
Class<T> storageClass) {
// TODO Auto-generated method stub
return null;
}
@Override
public void removeCredential(IdentityContext context, Account account, Class<? extends CredentialStorage> storageClass) {
}
}
public static class MethodInvocationContext {
private String methodName;
public void setMethodName(String methodName) {
this.methodName = methodName;
}
public String getMethodName() {
return this.methodName;
}
}
} |
Metabolic Resistance to Acetolactate Synthase Inhibiting Herbicide Tribenuron-Methyl in Descurainia sophia L. Mediated by Cytochrome P450 Enzymes.
Descurainia sophia is one of the most notorious broadleaf weeds in China and has evolved extremely high resistance to acetolactate synthase (ALS)-inhibiting herbicide tribenuron-methyl. The target-site resistance due to ALS gene mutations was well-known, while the non-target-site resistance is not yet well-characterized. Metabolic resistance, which is conferred by enhanced rates of herbicide metabolism, is the most important NTSR. To explore the mechanism of metabolic resistance underlying resistant (R) D. sophia plants, tribenuron-methyl uptake and metabolism levels, qPCR reference gene stability, and candidate P450 genes expression patterns were investigated. The results of liquid chromatography-mass spectrometry (LC-MS) analysis indicated that the metabolic rates of tribenuron-methyl in R plants was significantly faster than in susceptible (S) plants, and this metabolism differences can be eliminated by P450 inhibitor malathion. The genes for 18S rRNA and TIP41-like were identified as the most suitable reference genes using programs of BestKeeper, NormFinder, and geNorm. The P450 gene CYP96A146 constitutively overexpressed in R plants compared to S plants; this overexpression in R plants can be suppressed by malathion. Taken together, a higher expression level of P450 genes, leading to higher tribenuron-methyl metabolism, appears to be responsible for metabolic resistance to tribenuron-methyl in R D. sophia plants. |
#include "../../config.h"
#include <stdio.h>
#ifndef HAVE_PNFRONT_H
int main()
{
printf("This program requires library PNFront.\n");
return 0;
}
#else
#include <unistd.h>
#include <stdlib.h>
#include <string.h>
#include "statelib.h"
#include "pnfront.h"
#include "timerlib.h"
// #define VERIFY_STATIC
// #define VERIFY_DYNAMIC
#include <assert.h>
StateLib::state_db* reachset = 0;
long v_max; // max number of vanishings per elimination.
void Report();
#ifdef VERIFY_STATIC
void VerifyStatic(const pn_model* m);
#endif
#ifdef VERIFY_DYNAMIC
void VerifyDynamic(const pn_model* m);
#endif
void ShowState(const pn_model* m, const int* s, FILE* strm)
{
fputs("[", strm);
bool written = false;
for (int i=0; i<m->NumPlaces(); i++) if (s[i]) {
if (written) fputs(", ", strm);
fprintf(strm, "%s = %d", m->GetPlaceName(i), s[i]);
written = true;
}
fputs("]", strm);
}
const int Stack_depth = 1024; // plenty!
inline void CheckStackOver(int top, int size)
{
if (top < size) return;
printf("Integer expression stack overflow\n");
exit(1);
}
inline void CheckStackUnder(int top, int elems)
{
if (top >= elems) return;
printf("Integer expression stack underflow\n");
exit(1);
}
inline void CheckNext(int* next)
{
if (next) return;
printf("Assignment expression out of place\n");
exit(1);
}
inline void CheckAssign(bool b)
{
if (b) return;
printf("Assignment expression error\n");
exit(1);
}
int EvaluateExpr(const pn_model* m, const int_expr* expr, const int* current, int* next)
{
if (0==expr) return 0;
static int* stack = 0;
if (0==stack) stack = new int[Stack_depth];
int top = 0;
for (int i=0; i<expr->NumTerms(); i++) {
char what;
int val;
expr->GetTerm(i, what, val);
int left, right = 0;
switch (what) {
case 'l':
stack[top++] = val;
CheckStackOver(top, Stack_depth);
continue;
case 'p':
stack[top++] = current[val];
CheckStackOver(top, Stack_depth);
continue;
case 't':
printf("Transitions not supported yet in arc expressions\n");
exit(1);
case 'b':
CheckStackUnder(top, 2);
right = stack[--top];
left = stack[--top];
break;
case 'u':
CheckStackUnder(top, 1);
left = stack[--top];
break;
default:
printf("Postfix expression error %c\n", what);
exit(1);
}; // what
// still here? must be an operator
switch (val) {
// Unary operators
case int_expr::MINUS_UOP:
stack[top++] = -left;
continue;
case int_expr::NOT_UOP:
stack[top++] = ! left;
continue;
case int_expr::INCR_UOP:
CheckNext(next);
i++;
CheckAssign((0==top) & (i<expr->NumTerms()));
expr->GetTerm(i, what, val);
CheckAssign('p' == what);
next[val] += left;
continue;
case int_expr::DECR_UOP:
CheckNext(next);
i++;
CheckAssign((0==top) & (i<expr->NumTerms()));
expr->GetTerm(i, what, val);
CheckAssign('p' == what);
next[val] -= left;
continue;
// Binary operators
case int_expr::ADD_BOP:
stack[top++] = left + right;
continue;
case int_expr::SUB_BOP:
stack[top++] = left - right;
continue;
case int_expr::MULT_BOP:
stack[top++] = left * right;
continue;
case int_expr::DIV_BOP:
stack[top++] = left / right;
continue;
case int_expr::MOD_BOP:
stack[top++] = left % right;
continue;
case int_expr::MAX_BOP:
stack[top++] = (left > right) ? left : right;
continue;
case int_expr::MIN_BOP:
stack[top++] = (left < right) ? left : right;
continue;
case int_expr::AND_BOP:
stack[top++] = left && right;
continue;
case int_expr::OR_BOP:
stack[top++] = left || right;
continue;
case int_expr::IMPLIES_BOP:
stack[top++] = (0==left) || (left && right);
continue;
case int_expr::EQ_BOP:
stack[top++] = left == right;
continue;
case int_expr::NEQ_BOP:
stack[top++] = left != right;
continue;
case int_expr::GT_BOP:
stack[top++] = left > right;
continue;
case int_expr::GE_BOP:
stack[top++] = left >= right;
continue;
case int_expr::LT_BOP:
stack[top++] = left < right;
continue;
case int_expr::LE_BOP:
stack[top++] = left <= right;
continue;
default:
printf("Unsupported operator %s\n",
expr->GetOperatorName(val));
exit(1);
} // val
} // for i
if (top) return stack[--top];
return 0;
}
void VanishEnabledEvents(const pn_model* m, const int* mark, bool* enabled)
{
for (int i=m->NumTrans(); i; ) {
i--;
if (0==m->IsImmediate(i)) {
enabled[i] = 0;
continue;
}
const int_expr* t = m->GetTransitionEnablingExpr(i);
enabled[i] = EvaluateExpr(m, t, mark, 0);
}
}
void TangibleEnabledEvents(const pn_model* m, const int* mark, bool* enabled)
{
for (int i=m->NumTrans(); i; ) {
i--;
if (1==m->IsImmediate(i)) {
enabled[i] = 0;
continue;
}
const int_expr* t = m->GetTransitionEnablingExpr(i);
enabled[i] = EvaluateExpr(m, t, mark, 0);
}
}
void GetNextState(const pn_model* m, int t, const int* curr, int* next)
{
memcpy(next, curr, m->NumPlaces() * sizeof(int));
EvaluateExpr(m, m->GetTransitionFiringExpr(t), curr, next);
}
bool IsVanishing(const pn_model* m, const int* mark)
{
for (int i=m->NumTrans(); i; ) {
i--;
if (0==m->IsImmediate(i)) continue;
const int_expr* t = m->GetTransitionEnablingExpr(i);
if (EvaluateExpr(m, t, mark, 0)) return true;
}
return false;
}
long Gencmp(const pn_model* m)
{
StateLib::state_db* rscmp =
StateLib::CreateStateDB(StateLib::SDBT_Splay, true, false);
if (0==rscmp) {
fputs("Couldn't create comparison set\n", stdout);
return -2;
}
for (int i=0; i<m->NumTrans(); i++) {
if (m->IsImmediate(i)) {
fputs("Model has immediate transitions, comparison not supported\n", stdout);
return -2;
}
}
int* current = new int[m->NumPlaces()];
int* next = new int[m->NumPlaces()];
bool* enabled = new bool[m->NumTrans()];
m->GetInitialMarking(current);
reachset->InsertState(current, m->NumPlaces());
rscmp->InsertState(current, m->NumPlaces());
long t_exp = 0;
for (;;) {
if (t_exp < reachset->Size()) {
reachset->GetStateKnown(t_exp, current, m->NumPlaces());
} else {
// no tangibles, bail out
break;
}
TangibleEnabledEvents(m, current, enabled);
for (int e=0; e<m->NumTrans(); e++) if (enabled[e]) {
GetNextState(m, e, current, next);
// add next state to appropriate slot
long to, tocmp;
try {
to = reachset->InsertState(next, m->NumPlaces());
tocmp = rscmp->InsertState(next, m->NumPlaces());
}
catch (StateLib::error e) {
printf("Error: %s\n", e.getName());
printf("Bailing out after %ld states discovered\n", reachset->Size());
return -1;
}
if (to != tocmp) {
printf("\nDisagreement when exploring state %ld:\n", t_exp);
printf("\trss gets index %ld\n", to);
printf("\tcmp gets index %ld\n", tocmp);
/*
printf("\tcmp ");
rscmp->DumpState(stdout, tocmp);
printf("\trss ");
reachset->DumpState(stdout, tocmp);
printf("\trss ");
reachset->DumpState(stdout, to);
*/
reachset->ConvertToStatic(true);
return -1;
}
} // for e
// Advance
t_exp++;
} // infinite explore loop
delete[] current;
delete[] next;
delete[] enabled;
delete rscmp;
return reachset->Size();
}
long Generate(const pn_model* m, bool quiet, bool show, bool debug)
{
StateLib::state_db* vanishing = 0;
const StateLib::state_coll* vcoll = 0;
vanishing = StateLib::CreateStateDB(StateLib::SDBT_Splay, true, false);
if (0==vanishing) {
fputs("Couldn't create vanishing set\n", stdout);
return -2;
}
vcoll = vanishing->GetStateCollection();
bool has_immed = false;
for (int i=0; i<m->NumTrans(); i++) {
if (m->IsImmediate(i)) {
has_immed = true;
break;
}
}
int* current = new int[m->NumPlaces()];
int* next = new int[m->NumPlaces()];
bool* enabled = new bool[m->NumTrans()];
if (!quiet) {
if (has_immed) fputs("Petri net has immediate transitions\n", stdout);
else fputs("Petri net has only timed transitions\n", stdout);
}
timer watch;
m->GetInitialMarking(current);
// TO DO: check if initial state is vanishing
reachset->InsertState(current, m->NumPlaces());
long v_exp = 0;
long t_exp = 0;
v_max = 0;
if (show) fputs("Reachable states:\n", stdout);
bool current_is_vanishing = false;
for (;;) {
if (v_exp < vanishing->Size()) {
// there are vanishings to explore
vcoll->GetStateKnown(v_exp, current, m->NumPlaces());
current_is_vanishing = true;
} else {
// No vanishings to explore
if (current_is_vanishing) {
vanishing->Clear();
if (v_exp > v_max) v_max = v_exp;
v_exp = 0;
} // if current_is_vanishing
if (t_exp < reachset->Size()) {
reachset->GetStateKnown(t_exp, current, m->NumPlaces());
current_is_vanishing = false;
} else {
// no tangibles either, bail out
break;
}
}
if (debug) {
if (current_is_vanishing)
printf("Exploring V# %-9ld", v_exp);
else
printf("Exploring T# %-9ld", t_exp);
ShowState(m, current, stdout);
fputc('\n', stdout);
fflush(stdout);
}
if (show && !current_is_vanishing) {
printf("State %ld: ", t_exp);
ShowState(m, current, stdout);
fputc('\n', stdout);
}
if (current_is_vanishing) VanishEnabledEvents(m, current, enabled);
else TangibleEnabledEvents(m, current, enabled);
for (int e=0; e<m->NumTrans(); e++) if (enabled[e]) {
GetNextState(m, e, current, next);
bool next_is_vanishing = IsVanishing(m, next);
// add next state to appropriate slot
long to;
try {
if (next_is_vanishing) {
// new state is vanishing
to = vanishing->InsertState(next, m->NumPlaces());
} else {
// new state is tangible
to = reachset->InsertState(next, m->NumPlaces());
} // if next_is_vanishing
}
catch (StateLib::error e) {
printf("Error: %s\n", e.getName());
printf("Bailing out after %ld states discovered\n", reachset->Size());
return -1;
}
if (debug) {
printf("\t");
if (current_is_vanishing) printf("V# %-9ld", v_exp);
else printf("T# %-9ld", t_exp);
printf("via %-10s to ", m->GetTransName(e));
if (next_is_vanishing) fputs("V", stdout);
else fputs("T", stdout);
printf("# %-9ld", to);
ShowState(m, next, stdout);
fputc('\n', stdout);
}
} // for e
// Advance
if (current_is_vanishing) v_exp++;
else t_exp++;
} // infinite explore loop
if (!quiet) {
printf("Generation took %lf seconds\n", watch.elapsed_seconds());
Report();
}
if (vanishing) {
if (!quiet) {
fputs("Vanishing states:\n", stdout);
printf("\tPeak number of states: %ld\n", v_max);
printf("\tPeak memory: %ld bytes\n", vanishing->ReportMemTotal());
}
delete vanishing;
}
#ifdef VERIFY_STATIC
VerifyStatic(m);
#endif
#ifdef VERIFY_DYNAMIC
VerifyDynamic(m);
#endif
delete[] current;
delete[] next;
delete[] enabled;
return reachset->Size();
}
void MemShow(const char* what, double mem)
{
fputs(what, stdout);
if (mem < 2000) {
fprintf(stdout, "%lg bytes\n", mem);
return;
}
mem /= 1024.0;
if (mem < 2000) {
fprintf(stdout, "%lg Kb\n", mem);
return;
}
mem /= 1024.0;
if (mem < 2000) {
fprintf(stdout, "%lg Mb\n", mem);
return;
}
mem /= 1024.0;
fprintf(stdout, "%lg Gb\n", mem);
}
void Report()
{
const StateLib::state_coll* collection = reachset->GetStateCollection();
fputs("Tangible states:\n", stdout);
for (int m=0; m<collection->NumEncodingMethods(); m++) {
int cnt = collection->ReportEncodingCount(m);
if (0==cnt) continue;
fprintf(stdout, "\t %d encodings are %s\n",
cnt, collection->EncodingMethod(m) );
}
fprintf(stdout, "\t %ld states total\n", collection->Size());
MemShow("\tState memory: ", collection->ReportMemTotal());
double bps = collection->ReportMemTotal();
bps /= collection->Size();
fprintf(stdout, "\tAvg. bytes per state: %lg\n", bps);
long dbmem = reachset->ReportMemTotal() - collection->ReportMemTotal();
MemShow("\tDatabase memory: ", dbmem);
MemShow("\tGrand total: ", reachset->ReportMemTotal());
}
#ifdef VERIFY_STATIC
void VerifyStatic(const pn_model* m)
{
const state_coll* collection = reachset->GetStateCollection();
timer watch;
fputs("Converting to static mode\n", stdout);
statedb_error e = reachset->ConvertToStatic(true);
if (e != SDB_Success) {
switch (e) {
case SDB_NoMemory:
fputs("Not enough memory\n", stdout);
return;
default:
fputs("Couldn't convert to static (unexpected error)\n", stdout);
return;
}
}
fprintf(stdout, "Converstion took %lf seconds\n", watch.elapsed_seconds());
int* current = new int[m->NumPlaces()];
fputs("Verifying states...\n", stdout);
watch.reset();
for (long i=0; i<reachset->Size(); i++) {
collection->GetStateKnown(i, current, m->NumPlaces());
long where = reachset->FindState(current, m->NumPlaces());
if (where != i) {
fprintf(stdout, "discrepency, state %ld: ", i);
ShowState(m, current, stdout);
fputs(" not found\n", stdout);
return;
}
}
fprintf(stdout, "Verification took %lf seconds\n", watch.elapsed_seconds());
}
#endif
#ifdef VERIFY_DYNAMIC
void VerifyDynamic(const pn_model* m)
{
const state_coll* collection = reachset->GetStateCollection();
timer watch;
fputs("Converting to dynamic mode\n", stdout);
statedb_error e = reachset->ConvertToDynamic(true);
if (e != SDB_Success) {
switch (e) {
case SDB_NoMemory:
fputs("Not enough memory\n", stdout);
return;
default:
fputs("Couldn't convert to static (unexpected error)\n", stdout);
return;
}
}
fprintf(stdout, "Converstion took %lf seconds\n", watch.elapsed_seconds());
int* current = new int[m->NumPlaces()];
fputs("Verifying states...\n", stdout);
watch.reset();
for (long i=0; i<reachset->Size(); i++) {
collection->GetStateKnown(i, current, m->NumPlaces());
long where = reachset->FindState(current, m->NumPlaces());
if (where != i) {
fprintf(stdout, "discrepency, state %ld: ", i);
ShowState(m, current, stdout);
fputs(" not found\n", stdout);
return;
}
}
fprintf(stdout, "Verification took %lf seconds\n", watch.elapsed_seconds());
}
#endif
int Usage(char* name)
{
puts(StateLib::LibraryVersion());
puts(PNFRONT_LibraryVersion());
printf("\nUsage: %s [-d] [-h|-i] [-r|-s|-t] [-q|-v] <file>\n", name);
puts("\nReads a Petri net from the given file (using PNFront format).");
puts("Builds the reachability set of the Petri net.");
puts("\nOptions:");
// puts("\t-c:\tCompare with another method\n");
puts("\t-d:\t(Debug) Display reachability graph information\n");
puts("\t-h:\tUse handles to identify states");
puts("\t-i:\tUse indexes to identify states (default)\n");
puts("\t-r:\tUse red-black tree");
puts("\t-s:\tUse splay tree (default)");
puts("\t-t:\tUse hash table\n");
puts("\t-q:\t(Quiet) Only checks if the count is correct.");
puts("\t-v:\t(Verbose) The reachable states will be displayed.\n");
return 0;
}
int main(int argc, char** argv)
{
bool compare = false;
bool quiet = false;
bool show_states = false;
bool debug = false;
bool use_index = true;
StateLib::state_db_type which = StateLib::SDBT_Splay;
char* name = argv[0];
// process command line
int ch;
for (;;) {
ch = getopt(argc, argv, "dhiqrstv");
if (ch<0) break;
switch (ch) {
/*
case 'c':
compare = true;
break;
*/
case 'h':
use_index = false;
break;
case 'i':
use_index = true;
break;
case 'r':
which = StateLib::SDBT_RedBlack;
break;
case 's':
which = StateLib::SDBT_Splay;
break;
case 't':
which = StateLib::SDBT_Hash;
break;
case 'q':
debug = show_states = false;
quiet = true;
break;
case 'v':
show_states = true;
quiet = false;
break;
case 'd':
debug = true;
quiet = false;
break;
default:
return Usage(name);
} // switch
} // for
argc -= optind;
argv += optind;
if (argc != 1) return Usage(name);
FILE* input = fopen(argv[0], "r");
if (0==input) {
printf("Couldn't open file %s\n", argv[0]);
return 1;
}
long reachable;
int c = getc(input);
if (c != '#') {
ungetc(c, input);
reachable = -1;
} else {
if (1!=fscanf(input, "%ld", &reachable)) reachable = -1;
rewind(input);
}
if (!quiet && reachable>0)
printf("Expecting %ld reachable states\n", reachable);
pn_model* mdl = PNFRONT_Compile_PN(input, stderr);
if (0==mdl) return 1;
if (quiet) {
fprintf(stderr, "%-40s", argv[0]);
} else {
printf("Generating reachability set using ");
switch(which) {
case StateLib::SDBT_RedBlack: printf("red-black tree\n"); break;
case StateLib::SDBT_Splay: printf("splay tree\n"); break;
case StateLib::SDBT_Hash: printf("hash table\n"); break;
default: printf("unknown data structure\n");
}
}
reachset = StateLib::CreateStateDB(which, use_index, false);
if (0==reachset) {
printf("Couldn't create reachability set!\n");
return 1;
}
long actual;
if (compare) {
actual = Gencmp(mdl);
} else {
actual = Generate(mdl, quiet, show_states, debug);
}
if (reachable>0 && actual != reachable) {
if (quiet) fprintf(stderr, "Changed\n");
else printf("Input file says there should be %ld tangible states\n", reachable);
return 1;
} else {
if (quiet) fprintf(stderr, "Ok\n");
}
delete reachset;
return 0;
}
#endif // giant ifdef
|
/* Traverse tree 't' breadth-first looking for a process with pid p */
proc_t* LookupPID(proc_t* t, pid_t p) {
proc_t* tmp = NULL;
if (!t)
return NULL;
if (t->pid == p)
return t;
if ((tmp = LookupPID(t->l, p)))
return tmp;
for (; t; t=t->r)
if ((tmp = LookupPID(tmp, p)))
return tmp;
return NULL;
} |
def fit_from_image(self, data, voxelsize, seeds, unique_cls):
fvs, clsselected = self.features_from_image(data, voxelsize, seeds, unique_cls)
self.fit(fvs, clsselected) |
import {
Body,
Controller,
Delete,
Get,
Param,
Post,
Put,
} from "@nestjs/common";
import { TodoService } from "./todo.service";
import { CreateTodoDto } from "./dto/create-todo.dto";
@Controller("todo")
export class TodoController {
constructor(private todoService: TodoService) {}
@Post("add")
addTodo(@Body() createTodoDto: CreateTodoDto) {
return this.todoService.addTodo(createTodoDto);
}
@Get(":id")
getTodos(@Param("id") id: string) {
return this.todoService.getTodos(id);
}
@Put("complete/:id")
completeTodo(@Param("id") id: string) {
return this.todoService.completeTodo(id);
}
@Put("important/:id")
importantTodo(@Param("id") id: string) {
return this.todoService.importantTodo(id);
}
@Delete("delete/:id")
deleteTodo(@Param("id") id: string) {
return this.todoService.deleteTodo(id);
}
}
|
// Clone returns a deep copy of the patch
func (patch *Patch) Clone() *Patch {
clone := objectPool.BorrowPatch()
clone.Baseline = patch.Baseline
clone.Target = patch.Target
clone.Operations = append(clone.Operations, patch.Operations...)
return clone
} |
Selective C-alkylation between Alcohols Catalyzed by N-Heterocyclic Carbene Molybdenum.
The first implementation of a molybdenum complex with an easily accessible bis-N-heterocyclic carbene ligand to catalyze β-alkylation of secondary alcohols via borrowing-hydrogen (BH) strategy using alcohols as alkylating agents is reported. Remarkably high activity, excellent selectivity, and broad substrate scope compatibility with advantages of catalyst usage low to 0.5 mol%, a catalytic amount of NaOH as the base, and H2 O as the by-product are demonstrated in this green and step-economical protocol. Mechanistic studies indicate a plausible outer-sphere mechanism in which the alcohol dehydrogenation is the rate-determining step. |
// Function to find Longest Increasing Subsequence in given array
void printLIS(int arr[], int n)
{
set<Node> S;
map<int, int> parent;
for (int i = 0; i < n; i++)
{
Node curr = {arr[i], i};
auto it = S.insert(curr).first;
if (++it != S.end())
S.erase(it);
it = S.find(curr);
parent[i] = (--it)->index;
}
print(arr, parent, S);
} |
<filename>src/utils/ObjectContains.ts<gh_stars>0
import isPlainObject from 'lodash/isPlainObject';
import {AnyObject, DeepPartial} from '../@types/Generic';
export function objectContains<TNeedle extends AnyObject, THaystack extends TNeedle = TNeedle>(
haystack: THaystack,
needle: DeepPartial<THaystack>,
): boolean {
if (!isPlainObject(haystack)) {
throw new Error(`Invalid haystack`);
}
if (!isPlainObject(needle)) {
throw new Error(`Invalid needle`);
}
return Object.keys(needle).every((key: keyof TNeedle) => {
if (isPlainObject(haystack[key]) && isPlainObject(needle[key])) {
return objectContains(haystack[key], needle[key] as DeepPartial<TNeedle[keyof TNeedle]>);
}
return needle[key] === haystack[key];
});
} |
def generate_grid(self, background):
pygame.draw.line(background, (0, 255, 0), (800, 0), (0, 0))
pygame.draw.line(background, (0, 255, 0), (800, 70), (0, 70))
pygame.draw.line(background, (0, 255, 0), (320, 70), (320, 0))
pygame.draw.line(background, (0, 255, 0), (800, 35), (0, 35)) |
def _FormatTestData(self, data):
hexadecimal_lines = []
data_size = len(data)
for block_index in range(0, data_size, 16):
data_string = data[block_index:block_index + 16]
hexadecimal_string = ', '.join([
'0x{0:02x}'.format(byte_value)
for byte_value in data_string[0:16]])
if len(data_string) < 16 or block_index + 16 == data_size:
hexadecimal_lines.append('\t{0:s}'.format(hexadecimal_string))
else:
hexadecimal_lines.append('\t{0:s},'.format(hexadecimal_string))
return '\n'.join(hexadecimal_lines) |
def add_learner_hook(hooks: dict, hook: LearnerHook) -> None:
position = hook.position
priority = hook.priority
idx = 0
for i in reversed(range(len(hooks[position]))):
if priority >= hooks[position][i].priority:
idx = i + 1
break
assert isinstance(hook, LearnerHook)
hooks[position].insert(idx, hook) |
Design and implementation of FIR digital wave filter based on DSP
This paper discusses the general principles of FIR digital wave filter, describes a design method for FIR digital electric wave filter based on DSP processor with TMS320VC54x fixed point series. The coefficient of wave filter is obtained using the MATLAB window function calculator and verified with the DSP measuring system. The digital electric wave filter's all functionalities met design expectations. |
package controllers
import (
"github.com/nexus-uw/paisley/app/routes"
"github.com/revel/revel"
"github.com/google/uuid"
"github.com/nexus-uw/paisley/app/models"
)
type Subscriptions struct {
Application
}
func (c Subscriptions) checkUser() revel.Result {
if user := c.connected(); user == nil {
c.Flash.Error("Please log in first")
return c.Redirect(routes.Application.Index())
}
return nil
}
func (c Subscriptions) Index() revel.Result {
c.Log.Info("Fetching index")
var subscriptions []*models.Subscription
_, err := c.Txn.Select(&subscriptions,
c.Db.SqlStatementBuilder.Select("*").
From("subscriptions")) /*.Where("UserId = ?", c.connected().UserId))*/
if err != nil {
panic(err)
}
return c.Render(subscriptions)
}
func (c Subscriptions) Create(subscription models.Subscription) revel.Result {
subscription.OwnerID = c.connected().UserId
subscription.SubscriptionID = uuid.New().String()
subscription.Validate(c.Validation)
// todo: assert that subredit actually exists...
err := c.Txn.Insert(&subscription)
if err != nil {
panic(err)
}
c.Flash.Success("Thank you, %s, created sub",
c.connected().Name)
return c.Redirect(routes.Subscriptions.Index())
}
func (c Subscriptions) Delete(SubscriptionID string) revel.Result {
panic("TODO: implement delete Subscription for SubscriptionID=" + SubscriptionID)
}
|
def startPayment():
pg = PaymentGateway()
nextUrl=""
try:
if 'mail' in request.args and 'currency' in request.args and 'amount' in request.args and 'redirectUrl' in request.args and 'paymentMethod' in request.args:
nextUrl = pg.processByDemandPayment(
request.args.get('mail'),
request.args.get('currency'),
request.args.get('amount'),
request.args.get('redirectUrl'),
request.args.get('paymentMethod')
)
except:
pass
resp = Response(nextUrl)
resp.headers['Access-Control-Allow-Origin'] = '*'
return resp |
package main
import (
"path/filepath"
"strings"
"sync"
"github.com/raggaer/tiger/app/config"
"github.com/raggaer/tiger/app/xml"
"github.com/schollz/closestmatch"
)
type xmlTaskList struct {
Path string
rw sync.Mutex
Errors []*xmlTaskError
Monsters map[string]*xml.Monster
Vocations map[string]*xml.Vocation
InstantSpells map[string]*xml.InstantSpell
InstantSpellsFuzzySearch *closestmatch.ClosestMatch
RuneSpells map[string]*xml.RuneSpell
RuneSpellsFuzzySearch *closestmatch.ClosestMatch
ConjureSpells map[string]*xml.ConjureSpell
ConjureSpellsFuzzySearch *closestmatch.ClosestMatch
Items map[int]xml.Item
}
type xmlTaskError struct {
Name string
Error error
}
func loadServerData(cfg *config.Config) (*xmlTaskList, *xmlTaskError) {
taskList := &xmlTaskList{
Path: cfg.Server.Path,
}
// Create wait group for all parsing tasks
tasks := &sync.WaitGroup{}
tasks.Add(6)
// Execute tasks
go loadServerMonsters(taskList, tasks, cfg.Server.Path)
go loadServerItems(taskList, tasks, cfg.Server.Path)
go loadServerVocations(taskList, tasks, cfg.Server.Path)
go loadServerInstantSpells(taskList, tasks, cfg.Server.Path)
go loadServerRuneSpells(taskList, tasks, cfg.Server.Path)
go loadServerConjureSpells(taskList, tasks, cfg.Server.Path)
// Wait for all tasks to end
tasks.Wait()
// Check for errors
if len(taskList.Errors) >= 1 {
return nil, taskList.Errors[0]
}
return taskList, nil
}
func loadServerConjureSpells(taskList *xmlTaskList, wg *sync.WaitGroup, path string) {
defer wg.Done()
// Load spell list
spells, err := xml.LoadConjureSpells(filepath.Join(path, "data", "spells", "spells.xml"))
if err != nil {
taskList.rw.Lock()
taskList.Errors = append(taskList.Errors, &xmlTaskError{
Name: "Conjure spell list",
Error: err,
})
taskList.rw.Unlock()
return
}
// Convert rune spell list to map
spellMap := make(map[string]*xml.ConjureSpell, len(spells.Conjures))
for _, s := range spells.Conjures {
spellMap[strings.ToLower(s.Words)] = s
}
// Set task spell list
taskList.rw.Lock()
taskList.ConjureSpellsFuzzySearch = spells.CreateFuzzyClosest(2)
taskList.ConjureSpells = spellMap
taskList.rw.Unlock()
}
func loadServerRuneSpells(taskList *xmlTaskList, wg *sync.WaitGroup, path string) {
defer wg.Done()
// Load spell list
spells, err := xml.LoadRuneSpells(filepath.Join(path, "data", "spells", "spells.xml"))
if err != nil {
taskList.rw.Lock()
taskList.Errors = append(taskList.Errors, &xmlTaskError{
Name: "Rune spell list",
Error: err,
})
taskList.rw.Unlock()
return
}
// Convert rune spell list to map
spellMap := make(map[string]*xml.RuneSpell, len(spells.Runes))
for _, s := range spells.Runes {
spellMap[strings.ToLower(s.Name)] = s
}
// Set task spell list
taskList.rw.Lock()
taskList.RuneSpellsFuzzySearch = spells.CreateFuzzyClosest(2)
taskList.RuneSpells = spellMap
taskList.rw.Unlock()
}
func loadServerInstantSpells(taskList *xmlTaskList, wg *sync.WaitGroup, path string) {
defer wg.Done()
// Load spell list
spells, err := xml.LoadInstantSpells(filepath.Join(path, "data", "spells", "spells.xml"))
if err != nil {
taskList.rw.Lock()
taskList.Errors = append(taskList.Errors, &xmlTaskError{
Name: "Instant spell list",
Error: err,
})
taskList.rw.Unlock()
return
}
// Convert instant spell list to map
spellMap := make(map[string]*xml.InstantSpell, len(spells.Spells))
for _, s := range spells.Spells {
// Skip monster instant spell
if strings.HasPrefix(s.Words, "###") {
continue
}
spellMap[strings.ToLower(s.Words)] = s
}
// Set task spell list
taskList.rw.Lock()
taskList.InstantSpellsFuzzySearch = spells.CreateFuzzyClosest(2)
taskList.InstantSpells = spellMap
taskList.rw.Unlock()
}
func loadServerVocations(taskList *xmlTaskList, wg *sync.WaitGroup, path string) {
defer wg.Done()
// Load vocation list
vocList, err := xml.LoadVocationList(filepath.Join(path, "data", "XML", "vocations.xml"))
if err != nil {
taskList.rw.Lock()
taskList.Errors = append(taskList.Errors, &xmlTaskError{
Name: "Vocation list",
Error: err,
})
taskList.rw.Unlock()
return
}
// Convert vocation slice to map
vocs := make(map[string]*xml.Vocation, len(vocList.Vocations))
for e, i := range vocList.Vocations {
vocs[strings.ToLower(i.Name)] = &vocList.Vocations[e]
}
// Set task vocation list
taskList.rw.Lock()
taskList.Vocations = vocs
taskList.rw.Unlock()
}
func loadServerItems(taskList *xmlTaskList, wg *sync.WaitGroup, path string) {
defer wg.Done()
// Load item list
itemList, err := xml.LoadItemList(filepath.Join(path, "data", "items", "items.xml"))
if err != nil {
taskList.rw.Lock()
taskList.Errors = append(taskList.Errors, &xmlTaskError{
Name: "Item list",
Error: err,
})
taskList.rw.Unlock()
return
}
// Convert item slice to map
items := make(map[int]xml.Item, len(itemList.Items))
for _, i := range itemList.Items {
if i.FromID != 0 && i.ToID != 0 {
// Populate items range
for x := i.FromID; x <= i.ToID; x++ {
items[x] = i
}
continue
}
// Populate normal item
if i.ID != 0 {
items[i.ID] = i
}
}
// Set task item list
taskList.rw.Lock()
taskList.Items = items
taskList.rw.Unlock()
}
func loadServerMonsters(taskList *xmlTaskList, wg *sync.WaitGroup, path string) {
defer wg.Done()
// Load monster list
monsterList, err := xml.LoadMonsterList(filepath.Join(path, "data", "monster", "monsters.xml"))
if err != nil {
taskList.rw.Lock()
taskList.Errors = append(taskList.Errors, &xmlTaskError{
Name: "Monster list",
Error: err,
})
taskList.rw.Unlock()
return
}
// Load each monster from the main list
monsters := make(map[string]*xml.Monster, len(monsterList.Monsters))
for _, m := range monsterList.Monsters {
xmlMonster, err := xml.LoadMonster(filepath.Join(taskList.Path, "data", "monster", m.File))
if err != nil {
taskList.rw.Lock()
taskList.Errors = append(taskList.Errors, &xmlTaskError{
Name: "Load monster " + m.Name,
Error: err,
})
taskList.rw.Unlock()
return
}
// Append monster to the list
monsters[strings.ToLower(xmlMonster.Name)] = xmlMonster
}
// Set task monster list
taskList.rw.Lock()
taskList.Monsters = monsters
taskList.rw.Unlock()
}
|
use instruction_def::*;
use test::run_test;
use Operand::*;
use Reg::*;
use RegScale::*;
use RegType::*;
use {BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
#[test]
fn vptestmw_1() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPTESTMW,
operand1: Some(Direct(K3)),
operand2: Some(Direct(XMM0)),
operand3: Some(Direct(XMM1)),
operand4: None,
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: Some(MaskReg::K2),
broadcast: None,
},
&[98, 242, 253, 10, 38, 217],
OperandSize::Dword,
)
}
#[test]
fn vptestmw_2() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPTESTMW,
operand1: Some(Direct(K7)),
operand2: Some(Direct(XMM6)),
operand3: Some(IndirectDisplaced(
EBX,
427244617,
Some(OperandSize::Xmmword),
None,
)),
operand4: None,
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: Some(MaskReg::K7),
broadcast: None,
},
&[98, 242, 205, 15, 38, 187, 73, 60, 119, 25],
OperandSize::Dword,
)
}
#[test]
fn vptestmw_3() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPTESTMW,
operand1: Some(Direct(K1)),
operand2: Some(Direct(XMM4)),
operand3: Some(Direct(XMM10)),
operand4: None,
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: Some(MaskReg::K3),
broadcast: None,
},
&[98, 210, 221, 11, 38, 202],
OperandSize::Qword,
)
}
#[test]
fn vptestmw_4() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPTESTMW,
operand1: Some(Direct(K6)),
operand2: Some(Direct(XMM22)),
operand3: Some(IndirectScaledIndexed(
RDI,
RAX,
Four,
Some(OperandSize::Xmmword),
None,
)),
operand4: None,
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: Some(MaskReg::K4),
broadcast: None,
},
&[98, 242, 205, 4, 38, 52, 135],
OperandSize::Qword,
)
}
#[test]
fn vptestmw_5() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPTESTMW,
operand1: Some(Direct(K4)),
operand2: Some(Direct(YMM0)),
operand3: Some(Direct(YMM2)),
operand4: None,
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: Some(MaskReg::K5),
broadcast: None,
},
&[98, 242, 253, 45, 38, 226],
OperandSize::Dword,
)
}
#[test]
fn vptestmw_6() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPTESTMW,
operand1: Some(Direct(K5)),
operand2: Some(Direct(YMM0)),
operand3: Some(IndirectScaledIndexedDisplaced(
ESI,
EDX,
Eight,
47412936,
Some(OperandSize::Ymmword),
None,
)),
operand4: None,
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: Some(MaskReg::K3),
broadcast: None,
},
&[98, 242, 253, 43, 38, 172, 214, 200, 118, 211, 2],
OperandSize::Dword,
)
}
#[test]
fn vptestmw_7() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPTESTMW,
operand1: Some(Direct(K5)),
operand2: Some(Direct(YMM4)),
operand3: Some(Direct(YMM25)),
operand4: None,
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: Some(MaskReg::K7),
broadcast: None,
},
&[98, 146, 221, 47, 38, 233],
OperandSize::Qword,
)
}
#[test]
fn vptestmw_8() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPTESTMW,
operand1: Some(Direct(K3)),
operand2: Some(Direct(YMM25)),
operand3: Some(IndirectScaledIndexed(
RBX,
RBX,
Eight,
Some(OperandSize::Ymmword),
None,
)),
operand4: None,
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: Some(MaskReg::K4),
broadcast: None,
},
&[98, 242, 181, 36, 38, 28, 219],
OperandSize::Qword,
)
}
#[test]
fn vptestmw_9() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPTESTMW,
operand1: Some(Direct(K6)),
operand2: Some(Direct(ZMM6)),
operand3: Some(Direct(ZMM0)),
operand4: None,
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: Some(MaskReg::K2),
broadcast: None,
},
&[98, 242, 205, 74, 38, 240],
OperandSize::Dword,
)
}
#[test]
fn vptestmw_10() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPTESTMW,
operand1: Some(Direct(K4)),
operand2: Some(Direct(ZMM2)),
operand3: Some(IndirectScaledDisplaced(
EBX,
Four,
353442478,
Some(OperandSize::Zmmword),
None,
)),
operand4: None,
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: Some(MaskReg::K7),
broadcast: None,
},
&[98, 242, 237, 79, 38, 36, 157, 174, 26, 17, 21],
OperandSize::Dword,
)
}
#[test]
fn vptestmw_11() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPTESTMW,
operand1: Some(Direct(K5)),
operand2: Some(Direct(ZMM13)),
operand3: Some(Direct(ZMM26)),
operand4: None,
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: Some(MaskReg::K7),
broadcast: None,
},
&[98, 146, 149, 79, 38, 234],
OperandSize::Qword,
)
}
#[test]
fn vptestmw_12() {
run_test(
&Instruction {
mnemonic: Mnemonic::VPTESTMW,
operand1: Some(Direct(K3)),
operand2: Some(Direct(ZMM29)),
operand3: Some(IndirectScaledDisplaced(
RDI,
Two,
1917044996,
Some(OperandSize::Zmmword),
None,
)),
operand4: None,
lock: false,
rounding_mode: None,
merge_mode: None,
sae: false,
mask: Some(MaskReg::K1),
broadcast: None,
},
&[98, 242, 149, 65, 38, 28, 125, 4, 201, 67, 114],
OperandSize::Qword,
)
}
|
import drivers
import math
drivers.LED4.on()
from vision import Camera, VisionModule
import cv2
drivers.LED4.off()
PI = math.PI
ROBOT_LATERAL = 3 + 5 / 8
ROBOT_AXIAL = 3 + 3 / 8
BASE_ORIGIN = [4 * 12, 4 * 12]
ROBOT_ORIGIN_POSES = {
"YELLOW": [BASE_ORIGIN[0] + 1 + ROBOT_LATERAL / 2, BASE_ORIGIN[1] + 1 + ROBOT_AXIAL / 2, 3 * PI / 2],
"BLUEYELLOW": [BASE_ORIGIN[0] + 12 - 1 - ROBOT_LATERAL / 2, BASE_ORIGIN[1] + 1 + ROBOT_AXIAL / 2, 3 * PI / 2],
"BLUE": [BASE_ORIGIN[0] + 1 + ROBOT_LATERAL / 2, BASE_ORIGIN[1] + 12 - 1 - ROBOT_AXIAL / 2, PI / 2],
"GREEN": [BASE_ORIGIN[0] + 12 - 1 - ROBOT_LATERAL / 2, BASE_ORIGIN[1] + 12 - 1 - ROBOT_AXIAL / 2, PI / 2]
}
COLORS = {
"obstacle": (255, 0, 0),
"cube": (0, 0, 255),
"green": (0, 255, 0),
"yellow": (255, 255, 0),
"base": (0, 255, 255)
}
PARKER_ROBOTS = ["BLUE", "BLUEYELLOW"]
COLLECTOR_ROBOTS = ["YELLOW", "GREEN"]
def in_to_cm(in):
return in / 0.393701
def sleep(seconds):
now = time.time()
while time.time() - now < seconds:
pass
def draw(img, objects):
cv2.line(img, (0, 240), (640, 240), (255, 255, 255), 2)
for c in objects:
cv2.rectangle(
img,
(c.rect[0], c.rect[1]),
(c.rect[0] + c.rect[2], c.rect[1] + c.rect[3]),
COLORS.get(c.meta), 3)
cv2.putText(
img, "{:.2f}".format(c.dist), (c.rect[0], c.rect[1]),
cv2.FONT_HERSHEY_PLAIN, 1.0, (255, 255, 255))
def in_the_way(objs):
right = 0
for obj in objs:
if (
obj.dist > 0 and
obj.dist < 25 and
obj.rect[0] < 300 and
obj.rect[0] + obj.rect[2] > 340):
right = max(right, obj.rect[0] + obj.rect[2] - 320)
return right
if __name__ == '__main__':
import sys
import time
print("Spinning up. Pray to Lafayette Official God.")
# Figure out which robot I am
identity = None
with open("identity.dat", "r") as file:
identity = file.readlines()[0].strip()
print(identity, "online!")
# Figure out where I'm headed
goal = [-1, 1]
with open("goal.dat", "r") as file:
nums = file.readlines[0].strip().split(" ")
goal = [float(x) for x in nums]
print("Targeting block at", goal)
# System initialization
drivers.init()
camera = Camera()
mod = VisionModule(width=640, height=480)
# State
pose = ROBOT_ORIGIN_POSES[identity]
time = 0
iterations = 0
done = False
epoch = time.time()
# Parker robots have a simple routine
if identity in PARKER_ROBOTS:
drivers.move(drivers.RobotState(drivers.DRIVE, in_to_cm(ROBOT_AXIAL + 2)))
sleep(3)
drivers.move(drivers.RobotState(drivers.DRIVE, -in_to_cm(ROBOT_AXIAL + 3)))
done = True
# Collector robots will wait a bit for parkers to park
elif identity in COLLECTOR_ROBOTS:
sleep(8)
goal_direction = math.atan2(goal[1] - pose[1], goal[0] - pose[0])
while not done:
# Run vision on current FOV, set indicators
src = camera.capture()
drivers.LED3.on()
objects, mask, cvxhull = mod.process(src)
drivers.LED3.off()
# Timekeeping
time = (time.time() - start)
iterations += 1
"""
src = cv2.cvtColor(src, cv2.COLOR_BGR2RGB)
draw(src, objects)
if cvxhull is not None:
cv2.drawContours(
src, [cvxhull], -1, (255, 255, 255), 3, cv2.LINE_8)
cv2.line(src, (itw + 320, 0), (itw + 320, 480), (0, 0, 0), 3)
src = cv2.cvtColor(src, cv2.COLOR_RGB2BGR)
cv2.putText(
src, '{:.1f}fps'.format(n / total), (0, 20),
cv2.FONT_HERSHEY_PLAIN, 1.0, (255, 255, 255))
cv2.imwrite('{}.jpg'.format(x), src)
"""
# Obstacle avoidance
itw = in_the_way(objects)
drivers.LED2.on()
if(in_the_way(objects) != 0):
drivers.move(drivers.RobotState(drivers.TURN, -1 * math.pi * 0.25))
else:
drivers.move(drivers.RobotState(drivers.DRIVE, 5))
drivers.LED2.off()
print("{} frames computed in {}s ({}fps)".format(n, total, n / total))
camera.close()
|
<reponame>liunan/compiler
#include "cklanemap.h"
#include "cklanescene.h"
#include "ifeature.h"
#include "ifeaturefactory.h"
#include <qgraphicsview.h>
#include <QGraphicsLineItem>
#include <QGraphicsEllipseItem>
#include <QPainterPath>
#include <QGraphicsPathItem>
#include <QPluginLoader>
#include <QFileDialog>
#include <QPrintDialog>
#include <QPrintPreviewDialog>
#include <QPrinter>
#include <QDir>
cklanemap::cklanemap(QWidget *parent)
: QMainWindow(parent), m_persistence(NULL)
{
ui.setupUi(this);
createActions();
createMenu();
m_pScene = new CKLaneScene(this);
m_pScene->setSceneRect(QRectF(0, 0, 1000, 1000));
m_pView = new QGraphicsView(m_pScene);
m_pView->setHorizontalScrollBarPolicy(Qt::ScrollBarAlwaysOff);
m_pView->setVerticalScrollBarPolicy(Qt::ScrollBarAlwaysOff);
m_pView->setDragMode(QGraphicsView::NoDrag);
setCentralWidget(m_pView);
QString appPath = QApplication::instance()->applicationDirPath();
QDir pluginsDir = QDir(QApplication::instance()->applicationDirPath());
foreach(QString fileName, pluginsDir.entryList(QDir::Files)) {
QPluginLoader loader(pluginsDir.absoluteFilePath(fileName));
QString str = loader.errorString();
QObject *plugin = loader.instance();
if (plugin) {
IFeatureFactory * featureFactoryInstance = qobject_cast<IFeatureFactory *>(plugin);
if (featureFactoryInstance)
{
const char* name = featureFactoryInstance->GetID();
int productCount = featureFactoryInstance->GetProductCount();
IFeature* pFeature = featureFactoryInstance->CreateFeature(0);
if (pFeature)
{
m_persistence.AddFeature(pFeature);
QGraphicsItem* pItem = pFeature->GetViewItem();
pItem->setPos(500, 500);
m_pScene->addItem(pItem);
pItem->setFlag(QGraphicsItem::ItemIsMovable, true);
pItem->setFlag(QGraphicsItem::ItemIsSelectable, true);
}
IFeature* pFeature2 = featureFactoryInstance->CreateFeature(0);
if (pFeature2)
{
m_persistence.AddFeature(pFeature2);
pFeature2->GetViewItem()->setPos(0, 0);
m_pScene->addItem(pFeature2->GetViewItem());
}
}
}
}
}
cklanemap::~cklanemap()
{
}
void cklanemap::createActions()
{
openAction = new QAction(tr("&Open"),this);
connect(openAction, SIGNAL(triggered()), this, SLOT(onOpen()));
saveAction = new QAction(tr("&Save"),this);
connect(saveAction, SIGNAL(triggered()), this, SLOT(onSave()));
printAction = new QAction(tr("&Print"), this);
connect(printAction, SIGNAL(triggered()), this, SLOT(onPrint()));
printPreviewAction = new QAction(tr("PrintPreview"), this);
connect(printPreviewAction, SIGNAL(triggered()), this, SLOT(onPrintPreview()));
zoomInAction = new QAction(tr("ZoomIn"), this);
connect(zoomInAction, SIGNAL(triggered()), this, SLOT(onZoomIn()));
zoomOutAction = new QAction(tr("ZoomOut"), this);
connect(zoomOutAction, SIGNAL(triggered()), this, SLOT(onZoomOut()));
}
void cklanemap::createMenu()
{
fileMenu = menuBar()->addMenu(tr("&File"));
fileMenu->addAction(openAction);
fileMenu->addAction(saveAction);
fileMenu->addSeparator();
fileMenu->addAction(printAction);
fileMenu->addAction(printPreviewAction);
viewMenu = menuBar()->addMenu(tr("&View"));
viewMenu->addAction(zoomInAction);
viewMenu->addAction(zoomOutAction);
itemMenu = menuBar()->addMenu(tr("&Item"));
//itemMenu->addAction(deleteAction);
/*itemMenu->addSeparator();
itemMenu->addAction(toFrontAction);
itemMenu->addAction(sendBackAction);
*/
aboutMenu = menuBar()->addMenu(tr("&Help"));
//aboutMenu->addAction(aboutAction);
}
////////////////////////////
void cklanemap::onOpen()
{
QString selectedFile = QFileDialog::getOpenFileName(this,
tr("Open Map"), "/home/jana", tr("Map Files (*.map)"));
m_persistence.Import(selectedFile.toLocal8Bit().data());
}
void cklanemap::onSave()
{
QFileDialog dialog(this);
dialog.setFileMode(QFileDialog::AnyFile);
dialog.setNameFilter(tr("Map Files (*.map)"));
if (dialog.exec())
{
QStringList fileNames = dialog.selectedFiles();
if (fileNames.length())
{
m_persistence.Export(fileNames[0].toLocal8Bit().data());
}
}
}
void cklanemap::onPrint()
{
QPrinter printer(QPrinter::HighResolution);
printer.setPaperSize(QPrinter::A4);
QPrintDialog printDialog(&printer, this);
if (printDialog.exec() == QDialog::Accepted) {
// print ...
QPainter painter(&printer);
m_pScene->render(&painter);
}
}
void cklanemap::onPrintPreview()
{
QPrinter printer(QPrinter::HighResolution);
QPrintPreviewDialog preview(&printer);
connect(&preview, SIGNAL(paintRequested(QPrinter *)), this, SLOT(onPaintRequested(QPrinter*)));
preview.exec();
}
void cklanemap::onPaintRequested(QPrinter * printer)
{
QPainter painter(printer);
m_pScene->render(&painter);
}
void cklanemap::onZoomIn()
{
m_pView->scale(2, 2);
}
void cklanemap::onZoomOut()
{
m_pView->scale(0.5, 0.5);
} |
<reponame>claassenk/karenapp
package main
import (
"fmt"
"log"
"math/big"
"net/http"
"github.com/btcsuite/btcec"
"github.com/btcsuite/btcnet"
"github.com/btcsuite/btcutil"
)
const ResultsPerPage = 128
const PageTemplateHeader = `<html>
<head>
<title>All bitcoin private keys</title>
<meta charset="utf8" />
<link href="http://fonts.googleapis.com/css?family=Open+Sans" rel="stylesheet" type="text/css">
<style>
body{font-size: 9pt; font-family: 'Open Sans', sans-serif;}
a{text-decoration: none}
a:hover {text-decoration: underline}
.keys > span:hover { background: #f0f0f0; }
span:target { background: #ccffcc; }
</style>
</head>
<body>
<h1>Bitcoin private key database</h1>
<h2>Page %s out of %s</h2>
<a href="/%s">previous</a> | <a href="/%s">next</a>
<pre class="keys">
<strong>Private Key</strong> <strong>Address</strong> <strong>Compressed Address</strong>
`
const PageTemplateFooter = `</pre>
<pre style="margin-top: 1em; font-size: 8pt">
It took a lot of computing power to generate this database. Donations welcome: 1Bv8dN7pemC5N3urfMDdAFReibefrBqCaK
</pre>
<a href="/%s">previous</a> | <a href="/%s">next</a>
</body>
</html>`
const KeyTemplate = `<span id="%s"><a href="/warning:understand-how-this-works!/%s">+</a> <span title="%s">%s </span> <a href="https://blockchain.info/address/%s">%34s</a> <a href="https://blockchain.info/address/%s">%34s</a></span>
`
var (
// Total bitcoins
total = new(big.Int).SetBytes([]byte{
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFE,
0xBA, 0xAE, 0xDC, 0xE6, 0xAF, 0x48, 0xA0, 0x3B, 0xBF, 0xD2, 0x5E, 0x8C, 0xD0, 0x36, 0x41, 0x40,
})
// One
one = big.NewInt(1)
// Total pages
_pages = new(big.Int).Div(total, big.NewInt(ResultsPerPage))
pages = _pages.Add(_pages, one)
)
type Key struct {
private string
number string
compressed string
uncompressed string
}
func compute(count *big.Int) (keys [ResultsPerPage]Key, length int) {
var padded [32]byte
var i int
for i = 0; i < ResultsPerPage; i++ {
// Increment our counter
count.Add(count, one)
// Check to make sure we're not out of range
if count.Cmp(total) > 0 {
break
}
// Copy count value's bytes to padded slice
copy(padded[32-len(count.Bytes()):], count.Bytes())
// Get private and public keys
privKey, public := btcec.PrivKeyFromBytes(btcec.S256(), padded[:])
// Get compressed and uncompressed addresses for public key
caddr, _ := btcutil.NewAddressPubKey(public.SerializeCompressed(), &btcnet.MainNetParams)
uaddr, _ := btcutil.NewAddressPubKey(public.SerializeUncompressed(), &btcnet.MainNetParams)
// Encode addresses
wif, _ := btcutil.NewWIF(privKey, &btcnet.MainNetParams, false)
keys[i].private = wif.String()
keys[i].number = count.String()
keys[i].compressed = caddr.EncodeAddress()
keys[i].uncompressed = uaddr.EncodeAddress()
}
return keys, i
}
func PageRequest(w http.ResponseWriter, r *http.Request) {
// Default page is page 1
if len(r.URL.Path) <= 1 {
r.URL.Path = "/1"
}
// Convert page number to bignum
page, success := new(big.Int).SetString(r.URL.Path[1:], 0)
if !success {
w.WriteHeader(http.StatusNotFound)
return
}
// Make sure page number cannot be negative or 0
page.Abs(page)
if page.Cmp(one) == -1 {
page.SetInt64(1)
}
// Make sure we're not above page count
if page.Cmp(pages) > 0 {
w.WriteHeader(http.StatusNotFound)
return
}
// Get next and previous page numbers
previous := new(big.Int).Sub(page, one)
next := new(big.Int).Add(page, one)
// Calculate our starting key from page number
start := new(big.Int).Mul(previous, big.NewInt(ResultsPerPage))
// Send page header
fmt.Fprintf(w, PageTemplateHeader, page, pages, previous, next)
// Send keys
keys, length := compute(start)
for i := 0; i < length; i++ {
key := keys[i]
fmt.Fprintf(w, KeyTemplate, key.private, key.private, key.number, key.private, key.uncompressed, key.uncompressed, key.compressed, key.compressed)
}
// Send page footer
fmt.Fprintf(w, PageTemplateFooter, previous, next)
}
func RedirectRequest(w http.ResponseWriter, r *http.Request) {
key := r.URL.Path[36:]
wif, err := btcutil.DecodeWIF(key)
if err != nil {
w.WriteHeader(http.StatusNotFound)
return
}
page, _ := new(big.Int).DivMod(new(big.Int).SetBytes(wif.PrivKey.D.Bytes()), big.NewInt(ResultsPerPage), big.NewInt(ResultsPerPage))
page.Add(page, one)
fragment, _ := btcutil.NewWIF(wif.PrivKey, &btcnet.MainNetParams, false)
http.Redirect(w, r, "/"+page.String()+"#"+fragment.String(), http.StatusTemporaryRedirect)
}
func main() {
http.HandleFunc("/", PageRequest)
http.HandleFunc("/warning:understand-how-this-works!/", RedirectRequest)
log.Println("Listening")
log.Fatal(http.ListenAndServe(":8085", nil))
}
|
/***************************************
Aging Module's HUMBER API Interfaces
*****************************************/
int32
ctc_humber_aging_set_property(ctc_aging_prop_t aging_prop, uint32 value)
{
uint8 value_8 = 0;
uint32 value_32 = 0;
bool value_b = FALSE;
switch( aging_prop )
{
case CTC_AGING_PROP_FIFO_THRESHOLD :
value_8 = value & 0xFF;
CTC_ERROR_RETURN(sys_humber_aging_set_fifo_threshold(value_8));
break;
case CTC_AGING_PROP_INTERVAL :
value_32 = value ;
CTC_ERROR_RETURN(sys_humber_aging_set_aging_interval(value_32));
break;
case CTC_AGING_PROP_STOP_SCAN_TIMER_EXPIRED :
if( 0 != value )
{
value_b = TRUE;
}
CTC_ERROR_RETURN(sys_humber_aging_set_stop_scan_timer_expired(value_b));
break;
case CTC_AGING_PROP_AGING_SCAN_EN :
if( 0 != value )
{
value_b = TRUE;
}
CTC_ERROR_RETURN(sys_humber_aging_set_aging_scan_en(value_b));
break;
default :
break;
}
return CTC_E_NONE;
} |
/**
* Given StructureMode
* when convert is called
* then value should be correctly converted.
*/
@Test
public void convert_StructureMode_ShouldCorrectlyConvert() {
TypeConversionService conversionService = createWithDependencies();
Assertions.assertEquals(net.minecraft.world.level.block.state.properties.StructureMode.CORNER, conversionService.convertToStructureModeHandle(StructureMode.CORNER));
Assertions.assertEquals(net.minecraft.world.level.block.state.properties.StructureMode.SAVE, conversionService.convertToStructureModeHandle(StructureMode.SAVE));
Assertions.assertEquals(net.minecraft.world.level.block.state.properties.StructureMode.DATA, conversionService.convertToStructureModeHandle(StructureMode.DATA));
Assertions.assertEquals(net.minecraft.world.level.block.state.properties.StructureMode.LOAD, conversionService.convertToStructureModeHandle(StructureMode.LOAD));
Assertions.assertEquals(StructureMode.CORNER, conversionService.convertToStructureMode(net.minecraft.world.level.block.state.properties.StructureMode.CORNER));
Assertions.assertEquals(StructureMode.DATA, conversionService.convertToStructureMode(net.minecraft.world.level.block.state.properties.StructureMode.DATA));
Assertions.assertEquals(StructureMode.SAVE, conversionService.convertToStructureMode(net.minecraft.world.level.block.state.properties.StructureMode.SAVE));
Assertions.assertEquals(StructureMode.LOAD, conversionService.convertToStructureMode(net.minecraft.world.level.block.state.properties.StructureMode.LOAD));
} |
// Complement gives the elements in the last element of seqs that are not in
// any of the others.
// All elements of seqs must be slices or arrays of comparable types.
//
// The reasoning behind this rather clumsy API is so we can do this in the templates:
// {{ $c := .Pages | complement $last4 }}
func (ns *Namespace) Complement(seqs ...interface{}) (interface{}, error) {
if len(seqs) < 2 {
return nil, errors.New("complement needs at least two arguments")
}
universe := seqs[len(seqs)-1]
as := seqs[:len(seqs)-1]
aset, err := collectIdentities(as...)
if err != nil {
return nil, err
}
v := reflect.ValueOf(universe)
switch v.Kind() {
case reflect.Array, reflect.Slice:
sl := reflect.MakeSlice(v.Type(), 0, 0)
for i := 0; i < v.Len(); i++ {
ev, _ := indirectInterface(v.Index(i))
if !ev.Type().Comparable() {
return nil, errors.New("elements in complement must be comparable")
}
if _, found := aset[normalize(ev)]; !found {
sl = reflect.Append(sl, ev)
}
}
return sl.Interface(), nil
default:
return nil, fmt.Errorf("arguments to complement must be slices or arrays")
}
} |
<filename>Chapter10/StanfordLexicalDemo.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package chapter10;
import edu.stanford.nlp.ling.CoreLabel;
import edu.stanford.nlp.ling.SentenceUtils;
import edu.stanford.nlp.parser.lexparser.LexicalizedParser;
import edu.stanford.nlp.process.CoreLabelTokenFactory;
import edu.stanford.nlp.process.PTBTokenizer;
import edu.stanford.nlp.process.Tokenizer;
import edu.stanford.nlp.process.TokenizerFactory;
import edu.stanford.nlp.trees.GrammaticalStructure;
import edu.stanford.nlp.trees.GrammaticalStructureFactory;
import edu.stanford.nlp.trees.Tree;
import edu.stanford.nlp.trees.TreePrint;
import edu.stanford.nlp.trees.TreebankLanguagePack;
import edu.stanford.nlp.trees.TypedDependency;
import java.io.File;
import java.io.StringReader;
import java.util.List;
/**
*
* @author ashish
*/
public class StanfordLexicalDemo {
private static String getResourcePath(){
File currDir = new File(".");
String path = currDir .getAbsolutePath();
path = path.substring(0, path.length()-2);
System.out.println(path);
String resourcePath = path + File.separator + "src/chapter10/";
return resourcePath;
}
public static void main(String args[]){
String parseModel = getResourcePath() + "englishPCFG.ser.gz";
LexicalizedParser lexicalizedParser = LexicalizedParser.loadModel(parseModel);
String [] sentenceArray = {"The", "cow" ,"jumped", "over", "the", "moon", "."};
List<CoreLabel> words = SentenceUtils.toCoreLabelList(sentenceArray);
Tree parseTree = lexicalizedParser.apply(words);
parseTree.pennPrint();
TreePrint treePrint = new TreePrint("typedDependenciesCollapsed");
treePrint.printTree(parseTree);
String sentence = "The cow jumped over the moon.";
TokenizerFactory<CoreLabel> tokenizerFactory = PTBTokenizer.factory(new CoreLabelTokenFactory(), "");
Tokenizer<CoreLabel> tokenizer = tokenizerFactory.getTokenizer(new StringReader(sentence));
List<CoreLabel> wordList = tokenizer.tokenize();
parseTree = lexicalizedParser.apply(wordList);
TreebankLanguagePack tlp = lexicalizedParser.treebankLanguagePack();
GrammaticalStructureFactory gsf = tlp.grammaticalStructureFactory();
GrammaticalStructure gs = gsf.newGrammaticalStructure(parseTree);
List<TypedDependency> tdl = gs.typedDependenciesCCprocessed();
System.out.println(tdl);
for(TypedDependency dependency : tdl) {
System.out.println("Governor Word: [" + dependency.gov()
+ "] Relation: [" + dependency.reln().getLongName()
+ "] Dependent Word: [" + dependency.dep() + "]");
}
}
}
|
A theoretical analysis of the parameters in a Hopfield/Tank model for solving TSP
Some parameter rules for the Hopfield-Tank model are given. It has been found that if the time step Delta t is not enough, the model may converge to an invalid solution. Thus, this model requires a lot of time to find a good solution, especially for the large-scale TSP (traveling salesman problem) (N>30). If the transfer function is replaced by the linear-segment function, Delta t can be comparatively large. Therefore some modification is necessary in order to use this model to solve the large-scale problem.<<ETX>> |
use criterion::{criterion_group, criterion_main, Criterion};
use tempdir::TempDir;
use std::path::{Path, PathBuf};
use anyhow::Context;
use hotg_rune_cli::run::{
Image, image::ImageSource, Sound, sound::AudioClip, new_capability_switcher,
};
use hotg_rune_wasmer_runtime::Runtime;
use hotg_runicos_base_runtime::BaseImage;
use hotg_rune_core::capabilities;
use hotg_rune_codegen::{Compilation, DefaultEnvironment, RuneProject, Verbosity};
use hotg_rune_compiler::{hir::Rune, yaml::Document, Diagnostics};
pub fn project_root() -> PathBuf {
Path::new(env!("CARGO_MANIFEST_DIR"))
.ancestors()
.find(|path| path.join(".git").exists())
.expect("Unable to determine the project's root directory. Where is \".git/\"?")
.to_path_buf()
}
pub fn example_dir() -> PathBuf { project_root().join("examples") }
fn load_rune(path: PathBuf) -> Vec<u8> {
std::fs::read(&path)
.with_context(|| format!("Unable to read \"{}\"", path.display()))
.unwrap()
}
fn parse_runefile(runefile: &Path) -> Rune {
let src = std::fs::read_to_string(runefile).unwrap();
let mut diags = Diagnostics::new();
let parsed = Document::parse(&src).unwrap();
let rune = hotg_rune_compiler::analyse_yaml_runefile(&parsed, &mut diags);
assert!(!diags.has_errors());
rune
}
fn build_rune(rune_path: PathBuf, name: String, rune: Rune) {
let rune_build_dir = TempDir::new("rune_build_dir").unwrap();
let compilation = Compilation {
name,
rune,
working_directory: rune_build_dir.path().to_path_buf(),
current_directory: rune_path,
rune_project: RuneProject::Disk(project_root()),
optimized: true,
verbosity: Verbosity::Quiet,
};
let mut env = DefaultEnvironment::for_compilation(&compilation);
let blob = hotg_rune_codegen::generate_with_env(compilation, &mut env)
.expect("Rune compilation failed");
assert_ne!(blob.len(), 0);
}
fn sine_build_benchmark(c: &mut Criterion) {
let base = example_dir().join("sine");
let runefile = base.join("Runefile.yml");
let rune = parse_runefile(&runefile);
c.bench_function("sine_build", |b| {
b.iter(|| build_rune(base.clone(), String::from("sine"), rune.clone()))
});
}
fn sine_inference_benchmark(c: &mut Criterion) {
let mut runtime = Runtime::load(
&load_rune(example_dir().join("sine").join("sine.rune")),
BaseImage::with_defaults(),
)
.context("Unable to create rune runtime")
.unwrap();
c.bench_function("sine_inference", |b| {
b.iter(|| runtime.call().context("Call Failed").unwrap())
});
}
fn microspeech_inference_benchmark(c: &mut Criterion) {
let base = example_dir().join("microspeech");
let mut img = BaseImage::with_defaults();
let wav_file = base
.join("data")
.join("right")
.join("fb7eb481_nohash_0.wav");
img.register_capability(
capabilities::SOUND,
new_capability_switcher::<Sound, _>(vec![AudioClip::from_wav_file(
wav_file,
)
.unwrap()]),
);
let mut runtime =
Runtime::load(&load_rune(base.join("microspeech.rune")), img)
.context("Unable to create rune runtime")
.unwrap();
c.bench_function("microspeech_inference", |b| {
b.iter(|| runtime.call().context("Call Failed").unwrap())
});
}
fn styletransfer_inference_benchmark(c: &mut Criterion) {
let base = example_dir().join("style_transfer");
let mut img = BaseImage::with_defaults();
img.register_capability(
capabilities::IMAGE,
new_capability_switcher::<Image, _>(vec![
ImageSource::from_file(base.join("style.jpg")).unwrap(),
ImageSource::from_file(base.join("flower.jpg")).unwrap(),
]),
);
let mut runtime =
Runtime::load(&load_rune(base.join("style_transfer.rune")), img)
.context("Unable to create rune runtime")
.unwrap();
c.bench_function("styletransfer_inference", |b| {
b.iter(|| runtime.call().context("Call Failed").unwrap())
});
}
criterion_group!(
name = build_benchmark;
config = Criterion::default().significance_level(0.1).sample_size(10);
targets = sine_build_benchmark);
criterion_group!(
name = inference_benchmark;
config = Criterion::default().significance_level(0.1).sample_size(10);
targets = sine_inference_benchmark, microspeech_inference_benchmark, styletransfer_inference_benchmark);
criterion_main!(build_benchmark, inference_benchmark);
|
def load_data(self, path: Path) -> bool:
self._logger.debug("LOADING DATA")
if not path.exists():
self._logger.debug(f'No report found for [{path.resolve()}]')
return True
self._logger.debug("PATH ==> {}".format(path))
data = read_json(path)
self._logger.debug("DATA ==> {}".format(data))
if data is not False:
self._tool_report = data
return data != False |
import * as Koa from 'koa';
export type ServerApplication = Koa<Koa.DefaultState, Koa.DefaultContext>;
export interface StartHandler {
onStartAsync(app: ServerApplication): Promise<void>;
}
|
def py_exe(self):
return self.py_dir / 'python.exe' |
#include <bits/stdc++.h>
using namespace std;
using Graph = vector<vector<int>>;
typedef long long ll;
#define all(x) (x).begin(), (x).end()
template <class T>
std::ostream& operator<<(std::ostream& os, const std::vector<T>& V) {
os << "[";
for (int i = 0; i < V.size(); ++i) {
if (i > 0) os << ", ";
os << V[i];
}
os << "]";
return os;
}
template <class A, class B>
std::ostream& operator<<(std::ostream& os, const std::pair<A, B>& V) {
os << "{" << V.first << ", " << V.second << "}";
return os;
}
template <class A, class B, class C, class D>
std::ostream& operator<<(std::ostream& os, const std::map<A, B, C, D>& V) {
os << "{";
for (auto it = V.begin(); it != V.end(); ++it) {
if (it != V.begin()) {
os << ", ";
}
os << it->first << ": " << it->second;
}
os << "}";
return os;
}
template <class A, class B, class C, class D, class E>
std::ostream& operator<<(std::ostream& os, const std::unordered_map<A, B, C, D, E>& V) {
os << "{";
for (auto it = V.begin(); it != V.end(); ++it) {
if (it != V.begin()) {
os << ", ";
}
os << it->first << ": " << it->second;
}
os << "}";
return os;
}
template <class A, class B, class C>
std::ostream& operator<<(std::ostream& os, const std::set<A, B, C>& V) {
os << "{";
for (auto it = V.begin(); it != V.end(); ++it) {
if (it != V.begin()) {
os << ", ";
}
os << *it;
}
os << "}";
return os;
}
template <class A, class B, class C, class D>
std::ostream& operator<<(std::ostream& os, const std::unordered_set<A, B, C, D>& V) {
os << "{";
for (auto it = V.begin(); it != V.end(); ++it) {
if (it != V.begin()) {
os << ", ";
}
os << *it;
}
os << "}";
return os;
}
void debug_func() {}
template <class Head, class...Tail>
void debug_func(Head H, Tail... T) {
std::cout << " " << H;
debug_func(T...);
}
#ifdef OCHKO
#define debug(...) std::cout << "[" << #__VA_ARGS__ "]:"; debug_func(__VA_ARGS__);cout << endl;
#else
#define debug(...) 3;
#endif
int main() {
std::ios_base::sync_with_stdio(false);
std::cin.tie(nullptr);
#ifdef OCHKO
freopen("in", "r", stdin);
freopen("out", "w", stdout);
#endif
int n, m;
cin >> n >> m;
vector<vector<int>> G(n);
vector<vector<pair<int, int>>> G2(n);
for (int i = 0; i < m; ++i) {
int a, b, result;
cin >> a >> b >> result;
--a; --b;
G[a].emplace_back(b);
G[b].emplace_back(a);
if (result) {
G2[a].emplace_back(b, 1);
G2[b].emplace_back(a, -1);
} else {
G2[a].emplace_back(b, 1);
G2[b].emplace_back(a, 1);
}
}
vector<char> used(n);
vector<int> num(n, -1);
function<bool(int, int)> check = [&](int v, int lvl) {
num[v] = lvl;
used[v] = 1;
for (int u : G[v]) {
if (used[u]) {
if ((num[u] - lvl) % 2 == 0) {
return false;
}
continue;
}
if (!check(u, lvl+1)) {
return false;
}
}
return true;
};
if (!check(0, 0)) {
cout << "NO\n";
return 0;
}
vector<vector<int>> d(n, vector<int>(n, n + 10));
for (int i = 0; i < n; ++i) {
for (auto [u, w] : G2[i]) {
d[i][u] = w;
}
d[i][i] = 0;
}
for (int k = 0; k < n; ++k) {
for (int i = 0; i < n; ++i) {
for (int j = 0; j < n; ++j) {
if (d[i][k] == n + 10 || d[k][j] == n + 10) {
continue;
}
d[i][j] = min(d[i][j], d[i][k] + d[k][j]);
if (d[i][j] < -n - 10) {
cout << "NO\n";
return 0;
}
}
}
}
int first = 0, last = 0;
for (int i = 0; i < n; ++i) {
if (d[i][i] < 0) {
cout << "NO\n";
return 0;
}
for (int j = 0; j < n; ++j) {
if (d[first][last] < d[i][j]) {
first = i;
last = j;
}
}
}
if (d[first][last] < 0) {
cout << "NO\n";
return 0;
}
cout << "YES\n";
cout << d[first][last] << "\n";
for (int i = 0; i < n; ++i) {
cout << d[first][i] << ' ';
}
return 0;
}
|
def process_austin_parcel_data():
time0 = time.time()
print('Starting to read parcel data')
parcel_data = gpd.read_file(austin_parcel_path)
print('Finished reading parcel data, took {} seconds. Now processing base zones.'.format(time.time() - time0))
def format_basezone(s):
s = str(s)
try:
return s.split('(')[1][:-1]
except:
return s
parcel_data['basezone'] = parcel_data['basezone'].apply(format_basezone)
return parcel_data |
/**
* Test the internal method to encode and decode lists.
*/
@Test
public void testEncodeListAndDecodeList() {
UriActiveSearchFilterImpl filter = Spring.getBeanOfType(UriActiveSearchFilterImpl.class);
List<String> list = new ArrayList<String>();
list.add("one");
list.add("two");
StringBuilder encoding = new StringBuilder();
filter.encodeList(encoding, list);
assertEquals("one,two:",encoding.toString());
List<String> result = filter.decodeList("one,two", String.class);
assertEquals("one",result.get(0));
assertEquals("two",result.get(1));
assertEquals(2,result.size());
} |
// GetSchema wraps RPCAgent.GetSchema
func (tm *TabletManager) GetSchema(ctx context.Context, args *gorpcproto.GetSchemaArgs, reply *myproto.SchemaDefinition) error {
ctx = callinfo.RPCWrapCallInfo(ctx)
return tm.agent.RPCWrap(ctx, actionnode.TabletActionGetSchema, args, reply, func() error {
sd, err := tm.agent.GetSchema(ctx, args.Tables, args.ExcludeTables, args.IncludeViews)
if err == nil {
*reply = *sd
}
return err
})
} |
/****************************************************************************/
/**
*
* Break occurred signalling that a recovery should be performed. Call the
* prerecovery user handler, and then suspend the processor, to signal to
* the TMR Manager hardware that it should reset the TMR sub-system.
*
* @param InstancePtr is a pointer to the XTMR_Manager instance.
*
* @return None.
*
* @note Called from break vector, with interrupts disabled.
*
****************************************************************************/
void XTMR_Manager_BreakHandler (XTMR_Manager *InstancePtr)
{
Xil_AssertVoid(InstancePtr != NULL);
Xil_AssertVoid(InstancePtr->IsReady == XIL_COMPONENT_IS_READY);
Xil_AssertVoid((InstancePtr->Cr & XTM_CR_RIR) != 0);
Xil_AssertVoid((InstancePtr->Cr & XTM_CR_MAGIC1_MASK) ==
XPAR_TMR_MANAGER_0_MAGIC1);
if (InstancePtr->PreResetHandler != NULL)
InstancePtr->PreResetHandler(
InstancePtr->PreResetCallBackRef);
} |
<gh_stars>10-100
import { AccessoryConfig } from "homebridge"
export interface Config extends AccessoryConfig {
services?: Service[]
initialVolume?: number
initiallyMuted?: boolean
logarithmic?: boolean
switchVolumeDelta?: number
switchDelay?: number
}
export enum Service {
Lightbulb = "lightbulb",
Speaker = "speaker",
Fan = "fan",
IncreaseVolumeButton = "increase-button",
DecreaseVolumeButton = "decrease-button",
}
export enum VolumeAlgorithm {
Linear = "linear",
Logarithmic = "logarithmic",
}
export default Config
|
def extract_from_itch_group(group_page):
soup = BeautifulSoup(group_page, 'lxml')
ended = soup.find_all('div', class_ = 'not_active_notification')
urls, more = set(), set()
if ended:
print(" Sale ended")
return urls, more
games = soup.find_all('div', class_='game_cell')
for game in games:
url = game.find('a').get('href')
urls.add(url)
if game.find('div', class_='blurb_outer') is not None:
more.add(url)
return urls, more |
/*
* Copyright (C) 2007 <NAME>
* Hochschule fuer Technik Rapperswil
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or (at your
* option) any later version. See <http://www.fsf.org/copyleft/gpl.txt>.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*/
#include "gateway.h"
#include <sys/types.h>
#include <sys/socket.h>
#include <unistd.h>
#include <string.h>
#include <sys/socket.h>
#include <sys/un.h>
#include <xml.h>
typedef struct private_gateway_t private_gateway_t;
/**
* private data of gateway
*/
struct private_gateway_t {
/**
* public functions
*/
gateway_t public;
/**
* name of the gateway
*/
char *name;
/**
* host to connect using tcp
*/
host_t *host;
/**
* socket file descriptor, > 0 if connected
*/
int fd;
/**
* unique id assigned to each xml message
*/
int xmlid;
};
struct sockaddr_un unix_addr = { AF_UNIX, IPSEC_PIDDIR "/charon.xml"};
/**
* establish connection to gateway
*/
static bool connect_(private_gateway_t *this)
{
int family, len;
struct sockaddr *addr;
if (this->fd >= 0)
{
close(this->fd);
}
if (this->host)
{
family = AF_INET;
addr = this->host->get_sockaddr(this->host);
len = *this->host->get_sockaddr_len(this->host);
}
else
{
family = AF_UNIX;
addr = (struct sockaddr*)&unix_addr;
len = sizeof(unix_addr);
}
this->fd = socket(family, SOCK_STREAM, 0);
if (this->fd < 0)
{
return FALSE;
}
if (connect(this->fd, addr, len) != 0)
{
close(this->fd);
this->fd = -1;
return FALSE;
}
return TRUE;
}
METHOD(gateway_t, request, char*,
private_gateway_t *this, char *xml, ...)
{
if (this->fd < 0)
{
if (!connect_(this))
{
return NULL;
}
}
while (TRUE)
{
char buf[8096];
ssize_t len;
va_list args;
va_start(args, xml);
len = vsnprintf(buf, sizeof(buf), xml, args);
va_end(args);
if (len < 0 || len >= sizeof(buf))
{
return NULL;
}
if (send(this->fd, buf, len, 0) != len)
{
if (!connect_(this))
{
return NULL;
}
continue;
}
len = recv(this->fd, buf, sizeof(buf) - 1, 0);
if (len <= 0)
{
if (!connect_(this))
{
return NULL;
}
continue;
}
buf[len] = 0;
return strdup(buf);
}
}
METHOD(gateway_t, query_ikesalist, enumerator_t*,
private_gateway_t *this)
{
char *str, *name, *value;
xml_t *xml;
enumerator_t *e1, *e2, *e3, *e4 = NULL;
str = request(this, "<message type=\"request\" id=\"%d\">"
"<query>"
"<ikesalist/>"
"</query>"
"</message>", this->xmlid++);
if (str == NULL)
{
return NULL;
}
xml = xml_create(str);
if (xml == NULL)
{
return NULL;
}
e1 = xml->children(xml);
free(str);
while (e1->enumerate(e1, &xml, &name, &value))
{
if (streq(name, "message"))
{
e2 = xml->children(xml);
while (e2->enumerate(e2, &xml, &name, &value))
{
if (streq(name, "query"))
{
e3 = xml->children(xml);
while (e3->enumerate(e3, &xml, &name, &value))
{
if (streq(name, "ikesalist"))
{
e4 = xml->children(xml);
e1->destroy(e1);
e2->destroy(e2);
e3->destroy(e3);
return e4;
}
}
e3->destroy(e3);
}
}
e2->destroy(e2);
}
}
e1->destroy(e1);
return NULL;
}
METHOD(gateway_t, query_configlist, enumerator_t*,
private_gateway_t *this)
{
char *str, *name, *value;
xml_t *xml;
enumerator_t *e1, *e2, *e3, *e4 = NULL;
str = request(this, "<message type=\"request\" id=\"%d\">"
"<query>"
"<configlist/>"
"</query>"
"</message>", this->xmlid++);
if (str == NULL)
{
return NULL;
}
xml = xml_create(str);
if (xml == NULL)
{
return NULL;
}
e1 = xml->children(xml);
free(str);
while (e1->enumerate(e1, &xml, &name, &value))
{
if (streq(name, "message"))
{
e2 = xml->children(xml);
while (e2->enumerate(e2, &xml, &name, &value))
{
if (streq(name, "query"))
{
e3 = xml->children(xml);
while (e3->enumerate(e3, &xml, &name, &value))
{
if (streq(name, "configlist"))
{
e4 = xml->children(xml);
e1->destroy(e1);
e2->destroy(e2);
e3->destroy(e3);
return e4;
}
}
e3->destroy(e3);
}
}
e2->destroy(e2);
}
}
e1->destroy(e1);
return NULL;
}
/**
* create enumerator over control elements children of a control response
*/
static enumerator_t* read_result(private_gateway_t *this, char *res)
{
char *name, *value;
xml_t *xml;
enumerator_t *e1, *e2, *e3;
if (res == NULL)
{
return NULL;
}
xml = xml_create(res);
if (xml == NULL)
{
return NULL;
}
e1 = xml->children(xml);
free(res);
while (e1->enumerate(e1, &xml, &name, &value))
{
if (streq(name, "message"))
{
e2 = xml->children(xml);
while (e2->enumerate(e2, &xml, &name, &value))
{
if (streq(name, "control"))
{
e3 = xml->children(xml);
e1->destroy(e1);
e2->destroy(e2);
return e3;
}
}
e2->destroy(e2);
}
}
e1->destroy(e1);
return NULL;
}
METHOD(gateway_t, initiate, enumerator_t*,
private_gateway_t *this, bool ike, char *name)
{
char *str, *kind;
if (ike)
{
kind = "ike";
}
else
{
kind = "child";
}
str = request(this, "<message type=\"request\" id=\"%d\">"
"<control>"
"<%ssainitiate>%s</%ssainitiate>"
"</control>"
"</message>", this->xmlid++, kind, name, kind);
return read_result(this, str);
}
METHOD(gateway_t, terminate, enumerator_t*,
private_gateway_t *this, bool ike, u_int32_t id)
{
char *str, *kind;
if (ike)
{
kind = "ike";
}
else
{
kind = "child";
}
str = request(this, "<message type=\"request\" id=\"%d\">"
"<control>"
"<%ssaterminate>%d</%ssaterminate>"
"</control>"
"</message>", this->xmlid++, kind, id, kind);
return read_result(this, str);
}
METHOD(gateway_t, destroy, void,
private_gateway_t *this)
{
if (this->fd >= 0)
{
close(this->fd);
}
if (this->host) this->host->destroy(this->host);
free(this->name);
free(this);
}
/**
* generic constructor
*/
static private_gateway_t *gateway_create(char *name)
{
private_gateway_t *this;
INIT(this,
.public = {
.request = _request,
.query_ikesalist = _query_ikesalist,
.query_configlist = _query_configlist,
.initiate = _initiate,
.terminate = _terminate,
.destroy = _destroy,
},
.name = strdup(name),
.fd = -1,
.xmlid = 1,
);
return this;
}
/**
* see header
*/
gateway_t *gateway_create_tcp(char *name, host_t *host)
{
private_gateway_t *this = gateway_create(name);
this->host = host;
return &this->public;
}
/**
* see header
*/
gateway_t *gateway_create_unix(char *name)
{
private_gateway_t *this = gateway_create(name);
return &this->public;
}
|
These are external links and will open in a new window
These are external links and will open in a new window
These are external links and will open in a new window
Image copyright EPA Image caption It is estimated that there are as many as 260,000 illicit guns in Australia
Australia is bringing in its first national gun amnesty since 1996 because of the growing terrorism threat and an influx of illegal arms in the country.
During the three-month amnesty running from 1 July, people can hand in unregistered weapons without the fear of prosecution, the government says.
Those caught outside that period face fines of up to A$280,000 ($212,730; £166,480) or up to 14 years in prison.
It is estimated that there are as many as 260,000 illicit guns in Australia.
Justice Minister Michael Keenan said illegal guns were used in recent terror attacks in Australia as well as for organised crime.
"This is an opportunity for people to present the guns to authorities, no questions asked and with no penalty," he said.
"If people don't take that opportunity, the penalties for owning an unregistered or illegal gun in Australia are very severe."
Australia brought in a similar amnesty deal after the 1996 shootings in Port Arthur.
Attacker Martin Bryant killed 35 people in the historic tourist town in Tasmania - the worst mass shooting in Australia's history.
In recent years the authorities have been expressing growing concern over the threat of possible terrorist attacks in the country. |
.
OBJECTIVE
To study the association of vitamin D level with asthma control and pulmonary function in children with asthma.
METHODS
A total of 150 children with asthma were enrolled as observation group, and 55 healthy children were enrolled as control group. According to the level of asthma control, the children were divided into good control group, partial control group, and non-control group. Chemiluminescence microparticle immunoassay was used to measure the serum level of 25-hydroxyvitamin D for all groups. According to the level of 25(OH)D, the asthmatic children were divided into normal vitamin D group, vitamin D insufficiency group, and vitamin D deficiency group. Pulmonary function was measured for all asthmatic children.
RESULTS
The observation group had a significantly lower serum level of 25(OH)D than the control group (25± 7 ng/mL vs 29± 4 ng/mL; P<0.05). The normal vitamin D group had the highest asthma control rate, followed by the vitamin D insufficiency group and the vitamin D deficiency group (P<0.05). There was no significant difference in pulmonary function among the three groups (P>0.05).
CONCLUSIONS
Asthmatic children have a lower serum level of 25(OH)D than healthy children. The serum level of 25(OH)D is associated with the level of asthma control and has no association with pulmonary function. |
package main
import (
"log"
"os"
"time"
"github.com/urfave/cli"
)
var version = time.Now().String()
func main() {
if err := newApp().Run(os.Args); err != nil {
log.Fatalln(err)
}
}
func newApp() *cli.App {
app := cli.NewApp()
app.Version = version
app.EnableBashCompletion = true
app.Name = "atlas"
app.Usage = `Atlassion User Administration command line tool
see https://github.com/emicklei/atlas for documentation.
`
// override -v
cli.VersionFlag = cli.BoolFlag{
Name: "print-version, V",
Usage: "print only the version",
}
app.Flags = []cli.Flag{
cli.BoolFlag{
Name: "verbose, v",
Usage: "verbose logging",
},
}
format := cli.BoolFlag{
Name: "json, JSON",
Usage: "-json or -JSON",
}
app.Commands = []cli.Command{
{
Name: "group",
Usage: "Retrieving information related to groups",
Subcommands: []cli.Command{
{
Name: "list",
Usage: "Show list of all groups",
Flags: []cli.Flag{
cli.IntFlag{
Name: "limit",
Usage: "-limit 10",
},
format,
},
Action: func(c *cli.Context) error {
return cmdGroupList(c)
},
ArgsUsage: `group list`,
},
},
},
{
Name: "user",
Usage: "Retrieving information related to users",
Subcommands: []cli.Command{
{
Name: "list",
Usage: "Show list of all groups",
Flags: []cli.Flag{
cli.IntFlag{
Name: "limit",
Usage: "-limit 10",
},
format,
},
Action: func(c *cli.Context) error {
return cmdUserList(c)
},
ArgsUsage: `user list`,
},
},
},
}
return app
}
|
A first-in-human phase 1 and pharmacological study of TAS-119, a novel selective Aurora A kinase inhibitor in patients with advanced solid tumours
Background This is a first-in-human study with TAS-119, an Aurora A kinase (AurA) inhibitor. Methods Patients with advanced, refractory, solid tumours were enrolled into 5 dose escalation cohorts (70–300 mg BID, 4 days on/3 days off, 3 out of 4 weeks or 4 out of 4 weeks). The expansion part consisted of patients with small-cell lung cancer, HER2-negative breast cancer, MYC-amplified/β-catenin-mutated (MT) tumours or other (basket cohort). Results In the escalation part (n = 34 patients), dose-limiting toxicities were one grade 3 nausea, two grade 2 and one grade 3 ocular toxicity and a combination of fatigue, ocular toxicity and nausea in one patient (all grade 2) at dose levels of 150, 200, 250 and 300 mg, respectively. Most frequent treatment-related adverse events were fatigue (32%), diarrhoea (24%) and ocular toxicity (24%). Toxicity grade ≥3 in ≥10% of patients were diarrhoea (15%) and increased lipase (12%). The maximum tolerated dose was 250 mg BID. Due to one additional grade 1 ocular toxicity, the RP2D was set at 200 mg BID (4 days on/3 days off, 3 out of 4 weeks), which was further explored in the expansion part (n = 40 patients). Target inhibition in paired skin biopsies was shown. Conclusions TAS-119 has a favourable and remarkably distinct safety profile from other AurA inhibitors. Clinical trial registration NCT02448589.
TRK-B IC50 1.53 ± 0.12 nM, TRK-C IC50 1.47 ± 0.04 nM), RET (IC50: 25.8 ± 1.5 nM) and ROS (IC50: 29.3 ± 0.8 nM) was observed. 13 Compared to Alisertib (MLN8237), the only AurA inhibitor that has progressed to evaluation in a phase 3 trial, 14 the IC50 for AurB was 1533 nM (±1060 nM) and the inhibitory effect on TRK-A, TRK-B, TRK-C, RET and ROS were relatively high (mean inhibition of 87, 78, 91, 50 and 80%, respectively), but its clinical significance is unknown. The effect on AurC was not described. 15 Furthermore, TAS-119 and other AurA kinase inhibitors demonstrated more potent growth inhibitory effects on cancer cells with MYC oncogene amplifications and/or mutations in the Wnt/β-catenin pathway. 16,17 We conducted a first-in-human phase 1 study with TAS-119 to assess safety and tolerability and to determine the maximumtolerated dose (MTD) and recommended phase 2 dose (RP2D). Other objectives were the assessment of pharmacokinetics (PK), pharmacodynamics (PD) and preliminary antitumour activity of TAS-119. In the expansion part of this study, patients with specific tumour types and tumours known to harbour either MYC oncogene amplifications or β-catenin mutations were enrolled to further explore antitumour activity.
Study design
This multicentre study (six centres) consisted of a dose escalation part with a 3 + 3 design to explore safety and tolerability. Patients were enrolled into predefined dose levels (DLs; 70, 150, 200, 250, 300 mg BID) utilising a 4 days on/3 days off schedule 3 out of 4 weeks (intermittent schedule). The rationale for this intermittent schedule was based on pre-clinical data showing a more favourable toxicity profile, compared to continuous dosing regimens while maintaining antitumour activity. In addition to the 4 days on/3 days off, 3 out of 4 weeks schedule, a continuous dosing (200 mg BID) schedule with the same weekly schedule (4 days on/3 days off) administered 4 out of 4 weeks, was explored in the escalation phase. A minimum of 3 patients were to be treated at each DL, and at least 6 patients were planned to be enrolled at the MTD level. The MTD was defined as the highest DL at which <33% of patients experienced a dose-limiting toxicity (DLT) during cycle 1. The RP2D was defined as a dose below or equal to the MTD based on the evaluation of all available information (tolerability in cycles after cycle 1, PK, PD or other safety information). The RP2D was used in the expansion part of this study. Based on the safety profile assessed in the dose escalation part of this study, either the intermittent (3 out of 4 weeks) or the continuous dosing schedule (4 out of 4 weeks) could be selected.
The dose escalation part of this study enrolled patients with unselected advanced solid tumours for which no standard treatment options were available. Based on the abovementioned rationale to select for these tumours, enrolment in the expansion part of the study was restricted to patients with either any tumours with known MYC gene amplification or βcatenin mutation (MT) based on local testing, patients with smallcell lung cancer (SCLC), HER2-negative breast cancer, as well as to patients with other solid tumours in a basket cohort. There was no pre-screening for MYC gene amplified/β-catenin-mutated tumours.
The study was approved by the local ethics committees of the participating centres and was performed according to the principles defined by the Declaration of Helsinki and Good Clinical Practice guidelines. All patients gave written informed consent prior to any study-related procedure. Study initiation was in September 2014 and completion date in August 2019 (NCT02448589).
Inclusion criteria (full description are available in the Supplementary text) were age ≥18 years; Eastern Cooperative Oncology Group performance status 0-1; and adequate bone marrow, renal and hepatic function. Patients in the dose expansion part had to undergo a core tumour biopsy, as well as paired sampling of nontumour tissue (skin biopsies), for PD assessments if considered clinically safe and appropriate (this was optional in the dose escalation part of this study). For the expansion part of this study, patients with tumours harbouring MYC-amplification/β-catenin mutations were selected, based on pre-clinical evidence that MYC amplification as well as β-catenin mutation could sensitise to Aur A inhibition. Treatment, starting dose and dose escalation TAS-119 was administered orally BID as tablets of 25 and 100 mg strength, on an empty stomach. Based on rodent toxicology data, 70 mg BID was determined as the starting dose after conversion from the severely toxic dose in 10% of exposed animals (STD10) (63 mg/kg BID) to one tenth of the human equivalent dose. Cyclic administration (4 days on/3 days off) resulted in less frequent vomiting and liquid faeces in comparison to daily administration. These data resulted in predefined dose escalation cohorts of the intermittent schedule of 70, 100, 150, 200, 250 and 300 mg BID, 4 days on/3 days off. Non-haematological DLTs consisted of grade ≥3 toxicity (excluding nausea/vomiting lasting <48 h and controlled by anti-emetic therapy, diarrhoea lasting <48 h and responsive to anti-diarrhoea medication or hypersensitivity reactions), whereas haematological DLT consisted of any grade 4 neutropenia lasting >7 days, any febrile neutropenia (documented absolute neutrophil count <1000/mm 3 ) lasting >1 h, any grade 4 thrombocytopenia or grade 3 thrombocytopenia associated with bleeding and requiring blood transfusion. In addition, any grade ≥3 drug-related toxicity (excluding hypersensitivity reactions) that prevented administration of ≥80% of the assigned dose of cycle 1 or resulted in a delay of >14 days in starting cycle 2 was considered DLT.
Pre-treatment and study evaluations Vital sign assessment, blood cell count, serum biochemistry, coagulation parameters, urinalysis, a 12-lead electrocardiogram and, if applicable, a pregnancy test were performed at baseline. In addition, after the amendment of the protocol (Amendment 2: 31-March-2015) an ophthalmologic assessment, including visual acuity, pupil shape and pupillary reflexes, extraocular motility (eye movement) and alignment, tonometry, visual field, external examination, slit-lamp examination, and fundoscopy, was performed and repeated in all patients on days 8 and 22 during cycle 1 and on day 1 of every subsequent cycle, beginning with cycle 3. This was a consequence of ocular toxicity observed in 2 out of 26 patients until that moment.
Adverse events (AEs) at baseline and during the study were recorded and graded based on the Common Terminology Criteria for Adverse Events v4.03. Tumour measurements were done at the end of every second cycle or as per the Institutional standard of care in case of clinical indications. Response were assessed using Response Evaluation Criteria in Solid Tumours (RECIST) v1.1. 19 Blood samples for PK analysis were collected in cycle 1, on days 1, 4 and 18 pre-dose and at 0.5, 1, 2, 3, 5, 8, and 12 h post-dose. Urine samples were collected on day 1 of cycle 1 before dosing and from 0 to 12 h after dosing. Plasma and urine concentrations of TAS-119 were determined by validated liquid chromatography-tandem mass spectrometry method. PK parameters included the peak plasma concentration (C max ), time to reach maximum concentration in plasma (T max ), area under the plasma concentration-time curve up to the last observable concentration (AUC 0 − last ) and up to infinity (AUC 0 − inf ), terminal phase elimination half-life (T 1/2 ), clearance (CL/F), apparent volume of distribution (V d /F), renal clearance (CL r ) and oral clearance (CL/F).
Blood and tissue samples were taken for PD on-target effects during mitosis of TAS-119. First, the rate of phosphorylated histone H3 (pHH3) immunohistochemistry-positive cells to total cells were measured in paired skin biopsies and paired tumour samples collected prior to first TAS-119 administration as well as after receiving TAS-119 on day 4 of cycle 1. In case of target engagement, an increase in pHH3 is expected because of cells that will stagnate in mitosis. Second, pre-and post-dose mRNA expression of genes involved in mitosis, BORA, SGOL2, KIF20A and DEPDC-1, was analysed in tissue samples by reverse transcriptasepolymerase chain reaction.
The influence of polymorphisms of SLCO1B1 encoding the drug influx transporter OATP1B1 was examined in a blood sample obtained pre-dose on day 1 of cycle 1 for all patients during the dose escalation part. MYC amplification and β-catenin mutation were assessed in archival formalin-fixed paraffin-embedded tumour samples obtained after enrolment of the patient into the study.
Statistical analysis
Planned enrolment in the dose escalation part included 18-30 evaluable patients, with 3-6 DLT evaluable patients in each DL. To further assess the feasibility as well as preliminarily efficacy of the RP2D, approximately 40 patients were planned to be enrolled into A first-in-human phase 1 and pharmacological study of TAS-119, a novel. . . DGJ Robbrecht et al.
the expansion part (approximately 10-15 patients in each of the expansion cohorts). An additional 20 patients were pre-planned to be enrolled in an extension of the expansion part provided that either an overall response rate (ORR) of ≥20% for each specific indication or ≥10% for patients with MYC-amplified or β-cateninmutated tumours had been observed. The addition of 20 patients in the indication, which demonstrates the most promising response, provides a reasonable number of patients (n = 30) to be explored. Descriptive statistics were used to summarise safety data (AEs, vital signs and clinical laboratory results) overall response based on RECIST, PK and PD data. PK parameters were calculated by standard non-compartmental methods using Phoenix TM WinNon-lin® (Ver 6.3 or later, Certara L.P.). Dose proportionality of TAS-119 was evaluated with a power model and a linear regression model using logarithmic values of PK parameters such as C max and AUCs, as well as a one-way analysis of variance (ANOVA) using dosenormalised parameters such as C max , AUCs, CL/F, and apparent volume of distribution (V d /F). Student's t test was used to test statistical significance of the PD data and calculate the mean ratio for AUC 0 − last at RP2D or MTD. Influence of SLCO1B1 genotypes on PK parameters was tested by ANOVA.
RESULTS
Thirty-four patients were enrolled in the dose escalation part and received at least one dose of TAS-119. Four patients did not receive ≥80% of the assigned dose in cycle 1 and were deemed unevaluable. In the expansion part, 40 patients were enrolled and received at least 1 dose of TAS-119. One patient was ongoing as of the data cut-off (β-catenin-mutated non-small-cell lung cancer). Patient baseline characteristics for the both the dose escalation and expansion population are summarised in Table 1.
Based on the 2 DLTs reported at DL 3 (300 mg BID), the MTD was determined to be 250 mg BID. As a result of one patient with grade 1 treatment-related eye toxicity at this DL, the RP2D moving forward in the study was set at DL 200 mg BID.
The 4-week continuous dosing regimen was evaluated in the dose escalation part, in parallel with conducting the expansion part. The continuous dosing schedule has never been initiated in the expansion part based on the preliminary results from the dose escalation part, showing no significant differences between the intermittent and continuous schedule of 200 mg BID. No DLT was observed in 6 patients treated at 200 mg BID in the 4-week continuous dosing regimen.
PK and PD A total of 34 patients were evaluable for PK data. Mean plasma concentrations over time showed a dose-proportional increase of plasma exposure that did not significantly change after multiple doses on day 4 and on day 18, both being on-treatment days (Fig. 1). One out of 2 patients at DL3 had a relatively high plasma concentration on cycle 1 day 4, but both patients at this DL3 experienced some ocular toxicity. Dose-proportionality analyses by power model, linear model and one-way ANOVA confirmed dose proportionality of TAS-119 PK (Fig. 2).
There was a median T max of 1.2 h (range 0.5-2.0 h); blood concentrations declined with a mean half-life between 2.8 and 6.0 h. The main CL r was much lower (median 0.25 L/h, range 0.15-0.31 L/h) than the main oral clearance (CL/F) (median 9.79 L/h, range 2.72-13.94 L/h) in plasma. No trends were observed between dose and urinary PK parameters. The accumulation ratios throughout all DLs were low. At the RP2D level (DL 2.1; 200 mg BID), the mean ratio of AUC 0 − last were 1.3 and 1.2 on both days 4 and 18, respectively. This did not significantly differ between the intermittent and continuous 200 mg BID schedule (DL 2.1) with a ratio of 1.2 and 1.1 on days 4 and 18, respectively. The CL/F and V d /F of TAS-119 were compared among SLCO1B1 gene polymorphism-caused phenotypes (normal/intermediate/low) by ANOVA, and no statistical differences were observed.
Analyses for target modulation of TAS-119 with respect to mRNA expression were evaluable for 32 patients from the dose escalation part (1 missing sample at DL 200 mg BID and DL 250 mg BID) and showed no significant increase in the expression level. The mean pHH3-positive rate in skin samples, available for 55 paired samples, increased after TAS-119 administration (p value <0.0001; Fig. 3). The mean pHH3 rate did not significantly change in paired tumour samples, although the sample size was very small (eight paired tumour samples).
Antitumour activity
In the dose escalation part, no complete response (CR) or partial response (PR) was observed. Fourteen (41%) patients had stable disease as best response: 6 patients at DL 2 (150 mg BID), 4 patients at DL 2.1 (200 mg BID), and 4 patients at DL 2.1 (200 mg BID continuous schedule). One patient with an epithelial mesothelioma at DL 2 (150 mg BID) had an unconfirmed PR at cycle 4.
In the expansion part of the study, no confirmed CR or PR were observed. Fourteen (35%) patients had stable disease (5 SCLC, 5 MYC-amp/B-cat mutation-positive tumours, 1 breast cancer and 2 basket cohort patients (1 mesothelioma, 1 colorectal cancer). Based on the observed response in the dose escalation part, additional mesothelioma patients were enrolled in the basket cohort of the trial (Table 1). No additional PRs or confirmed CRs were observed in any of these patients.
The target ORR of 20% (or ≥10% for patients with MYC amplification or β-catenin mutation) was not met in the expansion part, and therefore enrolment in the study was discontinued and the extension part of the expansion part has not been conducted.
DISCUSSION
Here we report on a first-in-human phase 1 study with TAS-119, an oral selective AurA kinase inhibitor. TAS-119 was largely well tolerated, with a low frequency of treatment discontinuation due to AEs. Grade ≥3 toxicities included diarrhoea (5/74 patients) and increased lipase without symptoms of pancreatitis (9/74 patients).
Diarrhoea, decreased appetite and increases in AST and/or ALT levels were expected AEs based on pre-clinical evaluation of TAS-119 and based on what has been seen in other AurA inhibitor trials. 12 Increased lipase levels were not expected. It is remarkable that the most commonly observed grade 3 drug-related toxicities in most other AurA inhibitor trials, such as (febrile) neutropenia, thrombocytopenia, anaemia and stomatitis, 11,12,14 were only very infrequently observed in this study. These side effects have hindered clinical development and application of various other Note: The overall treatment duration is defined as the first dose date of last cycle minus first dose date + 28. If a patient died within 28 days after the first dose day of the last cycle, the overall treatment duration is defined as death date minus first dose date + 1.
SD standard deviation.
A first-in-human phase 1 and pharmacological study of TAS-119, a novel. . . DGJ Robbrecht et al.
AurA inhibitors but were not significantly influencing patient's well-being or safety in this study. This observation can likewise be explained by either the different cross-inhibition pattern of other kinases with TAS-119 in comparison to other selective AurA inhibitors or the use of an intermittent administration schedule. Ocular AEs and DLTs led to the selection of 200 mg BID as the RP2D administered 4 days on/3 days off, every 3 out of 4 weeks, and therefore this dose and schedule was used in the expansion part of this trial.
The ocular toxicity in 38% of the patients throughout the various DLs is of particular concern. Ocular toxicity that became clear as a result of a decrease in visual acuity, mainly related to problems localised within the cornea, led to the incorporation of routine ophthalmologic examinations in subsequent patients in the trial. Microscopic findings in the pre-clinical high-dose toxicity study in animals showed degeneration and regeneration or hyperplasia of epithelial tissue, with considerable individual variation between animals and, only in dogs, involvement of the epithelium of the eyes. Expert review determined that it was plausible that the corneal events appeared to be due to excretion of TAS-119 into the tear film (not measured), with secondary direct irritation, and/or the effects of the agent on rapidly growing cells in the corneal basal epithelium. The events were dose dependent, temporary and resolved with cessation of treatment. The need to discontinue treatment because of these events was rare.
The observed corneal toxicity may be caused by a direct off-target toxic effect on the epithelium of the cornea, as well as an on-target effect by influencing the normal epithelial-mesenchymal transition (EMT) process of the corneal epithelium. EMT plays a role in the self-renewal and homoeostasis of the cornea 20 and active AurA is associated with mitogenactivated protein kinase pathway-induced EMT. 21 As a consequence of inhibiting AurA, it is conceivable that epithelial markers can be upregulated (reverse EMT). 21 Considering this mechanism, one could have expected that corneal toxicity would have occurred in other trials with AurA kinase inhibitors. However, and to the best of our knowledge, ocular toxicity has not been described in other clinical trials with AurA kinase inhibitors, 2,10-12 except for the new generation AurA inhibitor LY3295668, where corneal deposits were described without further specification. 22 It should be taken into consideration that inhibition of other kinases Corneal epithelial microcysts, blurred vision, conjunctivitis, keratitis, eye irritation, vitreous haemorrhage, blepharitis, vitreous floaters, increased intraocular pressure.
A first-in-human phase 1 and pharmacological study of TAS-119, a novel. . . DGJ Robbrecht et al.
by TAS-119 might also play a role in the ocular toxicity. The exact underlying mechanism(s) of ocular toxicity associated with TAS-119 require further investigation.
Target modulation of TAS-119 was shown in 55 paired skin biopsies with an increase in pHH3-positive cells after administration (Fig. 3). This was not confirmed in tumour biopsies; however, the sample size was very small (eight paired tumour samples) and therefore it is not feasible to draw firm conclusions.
TAS-119 demonstrated limited antitumour activity as single agent. Pre-clinical work indicated more growth inhibitory activity of AurA inhibitors on cancer cells with MYC oncogene amplifications and/or mutations in the Wnt/β-catenin pathway with decreased MYC protein expression in the presence of AurA kinase inhibition, 16,17 but this could not be confirmed clinically. 23 In conclusion, these genetic aberrations do not seem to have potential as a predictive biomarker for TAS-119. Because the predefined target of an ORR of 20% (or ≥10% for patients with MYC amplification or β-catenin mutation) was not met in the expansion part of this trial, enrolment in the study was not extended for more patients with MYC amplification or β-catenin mutation.
In conclusion, a twice-daily 200 mg dose in an intermittent schedule (4 days on 3 days off, 3 out of 4 weeks) was established as the dose and schedule for further activity testing of TAS-119. The RP2D was not merely determined by the overall toxicity profile but ocular toxicity was crucial in the decision. Evidence for target modulation was acquired but the observed antitumour activity of TAS-119 was disappointing. Although the observed ocular toxicity merits attention, the overall safety profile of TAS-119 seems to stand out when compared to that of other AurA inhibitors. These data support the further investigation of TAS-119 in pre-clinical combination trials to look for potential synergistic effects as well as in early clinical trials in combination with drugs influencing cell cycle processes. The study was performed according to the principles defined by the Declaration of Helsinki and Good Clinical Practice guidelines. All patients gave written informed consent prior to any study-related procedure.
AUTHOR CONTRIBUTIONS
Data availability All data supporting the results in this manuscript are available at Taiho Oncology. |
def check_user(username, error_message=''):
user_re = re.compile(r"^[a-zA-Z0-9_-]{6,20}$")
if not user_re.match(username):
error_message = 'The username does not fit the requirements'
k = UserData.by_name(username)
if k:
error_message = 'This username is already used'
return error_message |
Mr. Hu’s wife, Zeng Jinyan, herself a well-known blogger and rights advocate, was distraught in a telephone interview on Thursday.
“I feel hopeless and helpless,” said Ms. Zeng, who is under house arrest with the couple’s infant daughter in their suburban Beijing apartment, though she was allowed to visit her husband on Thursday.
Asked why Mr. Hu was arrested and convicted, she said: “The fundamental reason is to silence him. He had been speaking up and all he said was plain truth. It makes them unhappy. But they can do this to him because they’re unhappy?”
Earlier this year, Ms. Rice raised Mr. Hu’s case during a meeting with China’s foreign minister, Yang Jiechi. The European Union presidency has also criticized the subversion charge and called for Mr. Hu’s release.
Li Fangping, Mr. Hu’s lawyer, said the court showed leniency by sentencing him to less than the maximum five-year term. The sentence also forbids Mr. Hu to make any public political statements for one year after his release from prison, Mr. Li said.
Photo
“Three and a half years is still unacceptable to us,” Mr. Li told a throng of reporters outside the courthouse. “There is a major disagreement between prosecutors and the defense over punishing someone for making peaceful speech. We still believe the charge does not stand.”
Prosecutors in China rarely discuss cases after a verdict. But Xinhua, the country’s official news agency, reported that Mr. Hu had confessed to the charges. “Hu spread malicious rumors and committed libel in an attempt to subvert the state’s political power and socialist system,” the court verdict stated, according to Xinhua.
Advertisement Continue reading the main story
In his human rights work, Mr. Hu has volunteered to help AIDS patients and plant trees to fight the encroachment of desert.
Newsletter Sign Up Continue reading the main story Please verify you're not a robot by clicking the box. Invalid email address. Please re-enter. You must select a newsletter to subscribe to. Sign Up You will receive emails containing news content , updates and promotions from The New York Times. You may opt-out at any time. You agree to receive occasional updates and special offers for The New York Times's products and services. Thank you for subscribing. An error has occurred. Please try again later. View all New York Times newsletters.
He has maintained regular contacts with dissidents and other advocates on issues that include environmental protection and legal reform. He has also served as a one-man clearinghouse for information about peasant protests and dissidents.
He was detained on Dec. 27 last year and later charged with “incitement to subvert state power,” an accusation based on six essays and interviews in which he criticized the Communist Party. He wrote a long, blistering essay detailing how the police had tortured two people who had protested the illegal seizure of their homes in Beijing. In that essay, he also criticized the party’s human rights record.
Mr. Hu posted the essay on his personal blog at a delicate time: in advance of last fall’s 17th Party Congress, a major political meeting in which the new party leadership was announced.
Last year, Mr. Hu was also a co-writer of an article that contended that the Communist Party had failed to fulfill its Olympic promises to improve human rights before the Beijing Games, though that article apparently was not included as evidence.
Mr. Li said that Mr. Hu continued to maintain his innocence, though he had acknowledged outside the courtroom that some of his comments were “excessive” in the context of existing law.
China’s subversion laws, like those for state secrets, are deliberately vague and grant prosecutors considerable leeway in determining subversive speech, although freedom of speech is included in the Constitution.
Mr. Hu has 10 days to decide whether to appeal the verdict. His health is also an issue; he has hepatitis B and also takes medication for a deteriorating liver condition. Mr. Li said Mr. Hu had the option of applying for medical parole if he chose not to appeal.
Meanwhile, Ms. Zeng, Mr. Hu’s wife, was anguished. “I’m very disappointed and very pained,” she said. “Yesterday, I thought he could be back home today.” |
#ifdef WITH_JEMALLOC
#ifndef JEMALLOC_EXPORT
#define JEMALLOC_EXPORT // to tell jemalloc that it is statically linked
#endif
#include "jemalloc/jemalloc.h" // don't forget to add jemalloc/include and msvc_compat to the include path
#include <new>
#ifdef _DEBUG
#ifdef _WIN64
#pragma comment(lib, "jemalloc-vc140-Debug-static")
#else
#pragma comment(lib, "jemalloc-v140-Debug-static")
#endif
#else
#ifdef _WIN64
#pragma comment(lib, "jemalloc-vc140-Release-static")
#else
#pragma comment(lib, "jemalloc-v140-Release-static")
#endif
#endif
static inline void* my_malloc(size_t size)
{
if (!size)
size = 1; // je_malloc(0) doesn't work
return je_malloc(size);
}
static inline void* my_mallocx(size_t size, int flags)
{
if (!size)
size = 1; // je_malloc(0) doesn't work
return je_mallocx(size, flags);
}
static inline void* my_realloc(void* pointer, size_t size, int flags)
{
if (!size)
size = 1; // je_malloc(0) doesn't work
return pointer ? je_rallocx(pointer, size, flags) : my_mallocx(size, flags);
}
static inline void my_free(void* pointer)
{
if (pointer) // je_free(NULL) doesn't work
je_free(pointer);
}
static inline size_t my_msize(void* pointer)
{
return je_malloc_usable_size(pointer);
}
void* operator new(size_t size)
{
void* ptr = my_malloc(size);
if (ptr)
return ptr;
else
throw std::bad_alloc(); //TODO use std::get_new_handler
}
void* operator new[](size_t size)
{
void* ptr = my_malloc(size);
if (ptr)
return ptr;
else
throw std::bad_alloc(); //TODO use std::get_new_handler
}
void* operator new(size_t size, const std::nothrow_t&) throw()
{
return my_malloc(size);
}
void* operator new[](size_t size, const std::nothrow_t&) throw()
{
return my_malloc(size);
}
void operator delete(void* pointer) throw()
{
my_free(pointer);
}
void operator delete[](void* pointer) throw()
{
my_free(pointer);
}
void operator delete(void* pointer, const std::nothrow_t&) throw()
{
my_free(pointer);
}
void operator delete[](void* pointer, const std::nothrow_t&) throw()
{
my_free(pointer);
}
// C++14's sized-delete operators
void operator delete(void* pointer, std::size_t size) noexcept
{
my_free(pointer);
}
void operator delete[](void* pointer, std::size_t size) noexcept
{
my_free(pointer);
}
//TODO: Add C++17's aligned new operators
#if !defined(_DEBUG) && defined(JEMALLOC_MALLOC)
// Optionally also redirect malloc and free
// * Not possible in Debug mode, or link with /FORCE:MULTIPLE
// * Not possible with dynamic CRT (/MD)
extern "C" {
void* malloc(size_t size)
{
return my_malloc(size);
}
void free(void* pointer)
{
my_free(pointer);
}
void* calloc(size_t count, size_t size)
{
return my_mallocx(count * size, MALLOCX_ZERO);
}
void* _expand(void* pointer, size_t size)
{
return my_realloc(pointer, size, 0);
}
size_t _msize(void* pointer)
{
return my_msize(pointer);
}
void* realloc(void* pointer, size_t size)
{
return my_realloc(pointer, size, 0);
}
void* _recalloc(void* pointer, size_t count, size_t size)
{
return my_realloc(pointer, count * size, MALLOCX_ZERO);
}
void _aligned_free(void* pointer)
{
my_free(pointer);
}
void* _aligned_malloc(size_t size, size_t align)
{
return my_mallocx(size, MALLOCX_ALIGN(align));
}
size_t _aligned_msize(void* pointer, size_t align, size_t offset)
{
return my_msize(pointer);
}
void* _aligned_offset_malloc(size_t size, size_t align, size_t offset)
{
return my_mallocx(size, MALLOCX_ALIGN(offset)); // align + offset not implemented
}
void* _aligned_offset_realloc(void* pointer, size_t size, size_t align, size_t offset)
{
return my_realloc(pointer, size, MALLOCX_ALIGN(offset)); // align + offset not implemented
}
void* _aligned_offset_recalloc(void* pointer, size_t count, size_t size, size_t align, size_t offset)
{
return my_realloc(pointer, count * size, MALLOCX_ZERO | MALLOCX_ALIGN(offset)); // align + offset not implemented
}
void* _aligned_realloc(void* pointer, size_t size, size_t align)
{
return my_realloc(pointer, size, MALLOCX_ALIGN(align));
}
void* _aligned_recalloc(void* pointer, size_t count, size_t size, size_t align)
{
return my_realloc(pointer, count * size, MALLOCX_ZERO | MALLOCX_ALIGN(align));
}
// MSVC CRT internal functions
void* _calloc_base(size_t count, size_t size)
{
return my_mallocx(count * size, MALLOCX_ZERO);
}
void* _expand_base(void* pointer, size_t size)
{
return my_realloc(pointer, size, 0);
}
void _free_base(void* pointer)
{
my_free(pointer);
}
void* _malloc_base(size_t size)
{
return my_malloc(size);
}
size_t _msize_base(void* pointer)
{
return my_msize(pointer);
}
void* realloc_base(void* pointer, size_t size)
{
return my_realloc(pointer, size, 0);
}
void* _recalloc_base(void* pointer, size_t count, size_t size)
{
return my_realloc(pointer, count * size, MALLOCX_ZERO);
}
void _aligned_free_base(void* pointer)
{
my_free(pointer);
}
void* _aligned_malloc_base(size_t size, size_t align)
{
return my_mallocx(size, MALLOCX_ALIGN(align));
}
size_t _aligned_msize_base(void* pointer, size_t align, size_t offset)
{
return my_msize(pointer);
}
void* _aligned_offset_malloc_base(size_t size, size_t align, size_t offset)
{
return my_mallocx(size, MALLOCX_ALIGN(offset)); // align + offset not implemented
}
void* _aligned_offset_realloc_base(void* pointer, size_t size, size_t align, size_t offset)
{
return my_realloc(pointer, size, MALLOCX_ALIGN(offset)); // align + offset not implemented
}
void* _aligned_offset_recalloc_base(void* pointer, size_t count, size_t size, size_t align, size_t offset)
{
return my_realloc(pointer, count * size, MALLOCX_ZERO | MALLOCX_ALIGN(offset)); // align + offset not implemented
}
void* _aligned_realloc_base(void* pointer, size_t size, size_t align)
{
return my_realloc(pointer, size, MALLOCX_ALIGN(align));
}
void* _aligned_recalloc_base(void* pointer, size_t count, size_t size, size_t align)
{
return my_realloc(pointer, count * size, MALLOCX_ZERO | MALLOCX_ALIGN(align));
}
#undef _calloc_dbg
void* _calloc_dbg(size_t count, size_t size, int block, const char* file, int line)
{
return my_mallocx(count * size, MALLOCX_ZERO);
}
#undef _expand_dbg
void* _expand_dbg(void* pointer, size_t size, int block, const char* file, int line)
{
return my_realloc(pointer, size, 0);
}
#undef _free_dbg
void _free_dbg(void* pointer, int block)
{
my_free(pointer);
}
#undef _malloc_dbg
void* _malloc_dbg(size_t size, int block, const char* file, int line)
{
return my_malloc(size);
}
#undef _msize_dbg
size_t _msize_dbg(void* pointer, int block)
{
return my_msize(pointer);
}
#undef _realloc_dbg
void* _realloc_dbg(void* pointer, size_t size, int block, const char* file, int line)
{
return my_realloc(pointer, size, 0);
}
#undef _recalloc_dbg
void* _recalloc_dbg(void* pointer, size_t count, size_t size, int block, const char* file, int line)
{
return my_realloc(pointer, count * size, MALLOCX_ZERO);
}
#undef _aligned_free_dbg
void _aligned_free_dbg(void* pointer)
{
my_free(pointer);
}
#undef _aligned_malloc_dbg
void* _aligned_malloc_dbg(size_t size, size_t align, int block, const char* file, int line)
{
return my_mallocx(size, MALLOCX_ALIGN(align));
}
#undef _aligned_msize_dbg
size_t _aligned_msize_dbg(void* pointer, size_t align, size_t offset)
{
return my_msize(pointer);
}
#undef _aligned_offset_malloc_dbg
void* _aligned_offset_malloc_dbg(size_t size, size_t align, size_t offset, int block, const char* file, int line)
{
return my_mallocx(size, MALLOCX_ALIGN(offset)); // align + offset not implemented
}
#undef _aligned_offset_realloc_dbg
void* _aligned_offset_realloc_dbg(void* pointer, size_t size, size_t align, size_t offset, int block, const char* file, int line)
{
return my_realloc(pointer, size, MALLOCX_ALIGN(offset)); // align + offset not implemented
}
#undef _aligned_offset_recalloc_dbg
void* _aligned_offset_recalloc_dbg(void* pointer, size_t count, size_t size, size_t align, size_t offset, int block, const char* file, int line)
{
return my_realloc(pointer, count * size, MALLOCX_ZERO | MALLOCX_ALIGN(offset)); // align + offset not implemented
}
#undef _aligned_realloc_dbg
void* _aligned_realloc_dbg(void* pointer, size_t size, size_t align, int block, const char* file, int line)
{
return my_realloc(pointer, size, MALLOCX_ALIGN(align));
}
#undef _aligned_recalloc_dbg
void* _aligned_recalloc_dbg(void* pointer, size_t count, size_t size, size_t align, int block, const char* file, int line)
{
return my_realloc(pointer, count * size, MALLOCX_ZERO | MALLOCX_ALIGN(align));
}
} // extern C
#endif // Override malloc and free (in Release only)
#endif
|
def eval(self, data, data_store, *, exclude=None):
exclude = [] if exclude is None else exclude
result = {}
for key, value in self.items():
if key in exclude:
continue
if value is not None and callable(value):
result[key] = value(data, data_store)
else:
result[key] = value
return TaskParameters(result) |
Visions of Mars
Visions of Mars offers a visual tour of the main geographic features of Mars as they have been recorded by twenty years of remote-sensing missions, ranging from the Viking orbiter/lander of the late 1970s through the Pathfinder orbiter/rover of the mid-1990s and up to the twin rover missions of today. The survey is driven by images, many of which have never been seen before, which introduce the reader to the plains, volcanoes, chasms, lake beds, and polar regions of the red planet. A final section summarizes the basic data that have been gathered about the planet, comparing Earth and Mars. |
// Copyright (c) 2017- PPSSPP Project.
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, version 2.0 or later versions.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License 2.0 for more details.
// A copy of the GPL 2.0 should have been included with the program.
// If not, see http://www.gnu.org/licenses/
// Official git repository and contact information can be found at
// https://github.com/hrydgard/ppsspp and http://www.ppsspp.org/.
#include "Common/File/FileUtil.h"
#include "Common/Log.h"
#include "Common/StringUtils.h"
#include "Core/CoreParameter.h"
#include "Core/System.h"
#include "GPU/Common/GPUDebugInterface.h"
#include "headless/Compare.h"
#include "headless/HeadlessHost.h"
void HeadlessHost::SendDebugScreenshot(const u8 *pixbuf, u32 w, u32 h) {
// Only if we're actually comparing.
if (comparisonScreenshot_.empty()) {
return;
}
// We ignore the current framebuffer parameters and just grab the full screen.
const static u32 FRAME_STRIDE = 512;
const static u32 FRAME_WIDTH = 480;
const static u32 FRAME_HEIGHT = 272;
GPUDebugBuffer buffer;
gpuDebug->GetCurrentFramebuffer(buffer, GPU_DBG_FRAMEBUF_DISPLAY);
const std::vector<u32> pixels = TranslateDebugBufferToCompare(&buffer, 512, 272);
ScreenshotComparer comparer(pixels, FRAME_STRIDE, FRAME_WIDTH, FRAME_HEIGHT);
double errors = comparer.Compare(comparisonScreenshot_);
if (errors < 0)
SendAndCollectOutput(comparer.GetError() + "\n");
if (errors > maxScreenshotError_)
SendAndCollectOutput(StringFromFormat("Screenshot MSE: %f\n", errors));
if (errors > maxScreenshotError_ && writeFailureScreenshot_) {
if (comparer.SaveActualBitmap(Path("__testfailure.bmp")))
SendAndCollectOutput("Actual output written to: __testfailure.bmp\n");
comparer.SaveVisualComparisonPNG(Path("__testcompare.png"));
}
}
void HeadlessHost::SendAndCollectOutput(const std::string &output) {
SendDebugOutput(output);
if (PSP_CoreParameter().collectDebugOutput)
*PSP_CoreParameter().collectDebugOutput += output;
}
|
<gh_stars>0
/*
* This file is part of UDAO
* https://github.com/perbone/udao/
*
* Copyright 2013-2018 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package io.perbone.udao.spi;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import io.perbone.toolbox.provider.NotEnoughResourceException;
import io.perbone.toolbox.provider.OperationTimeoutException;
import io.perbone.udao.DataConstraintViolationException;
import io.perbone.udao.DataException;
import io.perbone.udao.DataManager;
import io.perbone.udao.KeyViolationException;
import io.perbone.udao.NotFoundException;
import io.perbone.udao.transaction.Transaction;
import io.perbone.udao.transaction.TransactionException;
/**
* A <code>DataProvider</code> is a factory for concrete classes of the {@link DataSource}
* interface.
* <p>
* It serves as an intermediate between {@link DataManager} and {@link DataSource} acting in the
* role of life-cycle manager for all {@link DataSource} it creates.
* <p>
* A provider is either active or inactive. A provider is active upon creation, and once it shuts
* down it remains inactive. Once a provider is inactive, any attempt to invoke a storage operation
* upon it will cause a {@link IllegalStateException} to be thrown. Whether or not a provider is
* active may be tested by invoking its {@link #isActive} method.
* <p>
* <code>DataProvider</code>s are, in general, intended to be safe for multithreaded access but it
* is really up to the implementation to support this feature or not. In any case, it should be
* documented so the developers using the concrete classes do not make mistakes.
*
* @author <NAME> <<EMAIL>>
* @since 0.1.0
*/
public interface DataProvider
{
/**
* The provider id.
* <p>
* This id should to be unique application wise.
*
* @return the provider id
*/
String id();
/**
* Returns the name for the underling storage back-end.
* <p>
* This name must be unique between all providers implementations.
*
* @return the back-end name
*/
String backendName();
/**
* Activates this provider.
*
* @return this concrete {@link DataProvider} implementation
*
* @throws IllegalStateException
* if shutdown is in progress
* @throws IllegalStateException
* if this provider is already active
* @throws NotEnoughResourceException
* if there is no enough resources to activate it
* @throws DataException
* if cannot activate this provider
*
* @see #isShutdownInProgress
* @see #isActive
*/
<T extends DataProvider> T activate() throws IllegalStateException, NotEnoughResourceException, DataException;
/**
* Shuts down this provider.
* <p>
* Initiates an orderly shutdown in which previously opened data sources will all be closed and
* no new data sources can be requested for this provider.
* <p>
* Invocation has no additional effect if shutdown is already in progress but will raise an
* exception if this provider is inactive. Once inactive it remains inactive until
* {@link #activate} is invoked again.
*
* @param graceTime
* The period allowed for housekeeping before forced shutdown is assumed
* @param unit
* The grace time unit
*
* @return this concrete {@link DataProvider} implementation
*
* @throws IllegalArgumentException
* if either graceTime or unit are invalid
* @throws IllegalStateException
* if this provider is inactive
* @throws DataException
* if cannot shutdown this provider
*
* @see #isShutdownInProgress
* @see #isActive
*/
<T extends DataProvider> T shutdown(final long graceTime, final TimeUnit unit)
throws IllegalArgumentException, IllegalStateException, DataException;
/**
* Returns the status of the shutdown process.
*
* @return <tt>true</tt> if the shutdown is in progress; <tt>false</tt> otherwise
*
* @see #isActive
*/
boolean isShutdownInProgress();
/**
* Tells whether or not this provider is active.
* <p>
* It is assumed that after a successful object instantiation this method will return
* <tt>true</tt>. Conversely for fail object instantiation this method should return
* <tt>false</tt> despite the fact that this object may still be valid.
* <p>
* For an active provider asked for shutdown, it will return <tt>true</tt> until
* {@link #isShutdownInProgress} returns <tt>true</tt>; after that it will returns
* <tt>false</tt>.
*
* @return <tt>true</tt> if it is active; <tt>false</tt> otherwise
*
* @see #isShutdownInProgress
*/
boolean isActive();
/**
* Tells whether or not this provider is read-only.
*
* @return <tt>true</tt> if it is read-only; <tt>false</tt> otherwise
*/
boolean isReadOnly();
/**
* Tells whether or not this provider belongs to an high availability cluster environment.
* <p>
* This status could change during the life-cycle of the provider so the user should call this
* method before performing any logic based on this assumption.
* <p>
* Not all providers support this feature and when not supported it is up to the provider what
* to return.
*
* @return <tt>true</tt> if it is high availability; <tt>false</tt> otherwise
*
* @see #isMaster
*/
boolean isHighAvailability();
/**
* Tells whether or not this provider is a master provider.
* <p>
* This is for cluster and master/slave environments where there are more than one storage unit
* available and some are masters and some are slaves. This status could change during the
* life-cycle of the provider so the user should call this method before performing any logic
* based on this assumption.
* <p>
* Not all providers support this feature and when not supported it is up to the provider what
* to return.
*
* @return <tt>true</tt> if it is master; <tt>false</tt> otherwise
*
* @see #isHighAvailability
*/
boolean isMaster();
/**
* Sets the read-only value for this provider.
*
* @param value
* the read-only value fo this provider
*/
void setReadOnly(boolean value);
/**
* Returns a collection of resource class types this provider is capable to manage through its
* data sources.
*
* @return a collection of supported bean types
*
* @throws IllegalStateException
* if shutdown is in progress or this provider is inactive
* @throws DataProviderException
*
* @see #isShutdown
* @see #isActive
*/
Set<Class<?>> getTypes() throws IllegalStateException, DataProviderException;
/**
* Tells whether or not this provider supports transactions.
*
* @return <tt>true</tt> if it supports; <tt>false</tt> otherwise
*
* @throws IllegalStateException
* if shutdown is in progress or this provider is inactive
* @throws DataProviderException
*/
boolean isTransactionSupported() throws IllegalStateException, DataProviderException;
/**
* Checks the given bean type upon the underling storage.
* <p>
* Providers should look if the bean exists into its declared schema.
*
* @param type
* the bean type to check
*
* @throws IllegalStateException
* if shutdown is in progress or this provider is inactive
* @throws OperationTimeoutException
* if the operation is timed out
* @throws NotEnoughResourceException
* if the is not enough resources for a new bean
* @throws DataProviderException
*/
void checkType(Class<?> type)
throws IllegalStateException, OperationTimeoutException, NotEnoughResourceException, DataProviderException;
/**
* Tells whether or not this provider supports schema creation.
*
* @return <tt>true</tt> if it supports; <tt>false</tt> otherwise
*
* @throws IllegalStateException
* if shutdown is in progress or this provider is inactive
* @throws DataProviderException
*/
boolean isSchemaCreationSupported() throws IllegalStateException, DataProviderException;
/**
* Creates the given type upon the underling storage.
* <p>
* If possible providers should create the bean into its declared schema.
*
* @param type
* the bean type to create
*
* @throws IllegalStateException
* if shutdown is in progress or this provider is inactive
* @throws OperationTimeoutException
* if the operation is timed out
* @throws NotEnoughResourceException
* if the is not enough resources for a new bean
* @throws DataProviderException
* if this provider cannot perform the action
*/
void createType(Class<?> type)
throws IllegalStateException, OperationTimeoutException, NotEnoughResourceException, DataProviderException;
/**
* Opens a data source.
* <p>
* The brand new {@link DataSource} object can access all the underling storage under its
* management.
*
* @param type
* The target resource class type for the underling storage
* @return A new {@link DataSource} object that represents a connection to the underling storage
*
* @throws IllegalStateException
* if shutdown is in progress or this provider is inactive
* @throws IllegalArgumentException
* if type is invalid
* @throws OperationTimeoutException
* if the operation is timed out
* @throws NotEnoughResourceException
* if the is not enough resources for a new bean
* @throws DataProviderException
* if cannot create a new {@link DataSource}
*
* @see #isShutdown
* @see #isActive
*/
DataSource openDataSource(Class<?> type) throws IllegalStateException, IllegalArgumentException,
OperationTimeoutException, NotEnoughResourceException, DataProviderException;
/////// ** Transaction support operations *///////
/**
*
* @param txn
* @throws UnsupportedOperationException
* @throws IllegalStateException
* @throws TransactionException
* @throws NotFoundException
* @throws KeyViolationException
* @throws DataConstraintViolationException
* @throws OperationTimeoutException
* @throws NotEnoughResourceException
* @throws DataProviderException
*/
void flush(Transaction txn) throws UnsupportedOperationException, IllegalStateException, TransactionException,
NotFoundException, KeyViolationException, DataConstraintViolationException, OperationTimeoutException,
NotEnoughResourceException, DataProviderException;
/**
*
* @return
* @throws UnsupportedOperationException
* @throws IllegalStateException
* @throws IllegalArgumentException
* @throws TransactionException
* @throws OperationTimeoutException
* @throws NotEnoughResourceException
* @throws DataProviderException
*/
List<Transaction> recover() throws UnsupportedOperationException, IllegalStateException, IllegalArgumentException,
TransactionException, OperationTimeoutException, NotEnoughResourceException, DataProviderException;
/**
*
* @param id
* @return
* @throws UnsupportedOperationException
* @throws IllegalStateException
* @throws IllegalArgumentException
* @throws TransactionException
* @throws OperationTimeoutException
* @throws NotEnoughResourceException
* @throws DataProviderException
*/
Transaction begin(String id) throws UnsupportedOperationException, IllegalStateException, IllegalArgumentException,
TransactionException, OperationTimeoutException, NotEnoughResourceException, DataProviderException;
/**
*
* @param id
* @return
* @throws UnsupportedOperationException
* @throws IllegalStateException
* @throws IllegalArgumentException
* @throws TransactionException
* @throws OperationTimeoutException
* @throws NotEnoughResourceException
* @throws DataProviderException
*/
Transaction join(String id) throws UnsupportedOperationException, IllegalStateException, IllegalArgumentException,
TransactionException, OperationTimeoutException, NotEnoughResourceException, DataProviderException;
/**
*
* @param txn
* @throws UnsupportedOperationException
* @throws IllegalStateException
* @throws IllegalArgumentException
* @throws TransactionException
* @throws OperationTimeoutException
* @throws NotEnoughResourceException
* @throws DataProviderException
*/
void suspend(Transaction txn) throws UnsupportedOperationException, IllegalStateException, IllegalArgumentException,
TransactionException, OperationTimeoutException, NotEnoughResourceException, DataProviderException;
/**
*
* @param txn
* @throws UnsupportedOperationException
* @throws IllegalStateException
* @throws IllegalArgumentException
* @throws TransactionException
* @throws OperationTimeoutException
* @throws NotEnoughResourceException
* @throws DataProviderException
*/
void resume(Transaction txn) throws UnsupportedOperationException, IllegalStateException, IllegalArgumentException,
TransactionException, OperationTimeoutException, NotEnoughResourceException, DataProviderException;
/**
*
* @param txn
* @throws UnsupportedOperationException
* @throws IllegalStateException
* @throws IllegalArgumentException
* @throws TransactionException
* @throws OperationTimeoutException
* @throws DataProviderException
*/
void prepare(Transaction txn) throws UnsupportedOperationException, IllegalStateException, IllegalArgumentException,
TransactionException, OperationTimeoutException, DataProviderException;
/**
*
* @param txn
* @throws UnsupportedOperationException
* @throws IllegalStateException
* @throws IllegalArgumentException
* @throws TransactionException
* @throws OperationTimeoutException
* @throws NotEnoughResourceException
* @throws DataProviderException
*/
void commit(Transaction txn) throws UnsupportedOperationException, IllegalStateException, IllegalArgumentException,
TransactionException, OperationTimeoutException, NotEnoughResourceException, DataProviderException;
/**
*
* @param txn
* @throws UnsupportedOperationException
* @throws IllegalStateException
* @throws IllegalArgumentException
* @throws TransactionException
* @throws OperationTimeoutException
* @throws NotEnoughResourceException
* @throws DataProviderException
*/
void rollback(Transaction txn)
throws UnsupportedOperationException, IllegalStateException, IllegalArgumentException, TransactionException,
OperationTimeoutException, NotEnoughResourceException, DataProviderException;
/**
*
* @param txn
* @throws UnsupportedOperationException
* @throws IllegalStateException
* @throws IllegalArgumentException
* @throws TransactionException
* @throws OperationTimeoutException
* @throws NotEnoughResourceException
* @throws DataProviderException
*/
void forget(Transaction txn) throws UnsupportedOperationException, IllegalStateException, IllegalArgumentException,
TransactionException, OperationTimeoutException, NotEnoughResourceException, DataProviderException;
} |
/**
* Before doing anything with multiplayer, the client must send this packet.
*/
public class PacketSignalMultiplayer extends Packet {
@Override
public void write(ByteDataOutputStream stream) throws IOException {
// just a signal, no data
}
@Override
public void read(ByteDataInputStream stream, int length) throws IOException {
// just a signal, no data
}
@Override
public int size(Bancho bancho) {
return 0;
}
} |
from contextlib import contextmanager
from . import config
import json
import flask
from functools import wraps
import psycopg2.extras
import psycopg2.extensions
psycopg2.extensions.register_adapter(dict, psycopg2.extras.Json)
def with_pg_cursor(fn):
"""
Injects an argument `cur` containing a postgres cursor into the function arguments. Autocommits.
"""
@wraps(fn)
def wrapper(*args, **kwargs):
if "cur" in kwargs:
# pg cursor is given by the caller already, which also takes care of committing
return fn(*args, **kwargs)
conn = config.postgres.getconn()
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
kwargs["cur"] = cur
try:
ret = fn(*args, **kwargs)
conn.commit()
return ret
except:
conn.rollback()
raise
finally:
config.postgres.putconn(conn)
return wrapper
def json_api(fn):
@wraps(fn)
def wrapper(*args, **kwargs):
ret = fn(*args, **kwargs)
status = None
if isinstance(ret, flask.Response):
return ret
if isinstance(ret, tuple):
status = ret[1]
ret = ret[0]
return flask.Response(
json.dumps(ret, ensure_ascii=False, separators=(',', ':')),
status=status,
headers={
"Content-Type": "application/json; charset=utf-8",
}
)
return wrapper
def with_db_session(fn):
@wraps(fn)
def wrapper(*args, **kwargs):
if "session" in kwargs:
raise RuntimeError("A session argument already exists!")
with db_session() as s:
kwargs["session"] = s
return fn(*args, **kwargs)
return wrapper
@contextmanager
def db_session():
session = config.db.create_session()
try:
yield session
session.commit()
except:
session.rollback()
raise
finally:
session.close()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.