content
stringlengths
10
4.9M
/** * An error occurred while executing a fix. */ class FixError extends Error { private final Fix origin; public FixError(Fix f, Exception e, String msg) { super(e, msg); origin = f; } public Fix getOrigin() { return origin; } @Override public String toString() { return String.format("%s - %s", super.toString(), origin.getHRName()); } }
/* * FSExceptionHandler.cpp * Transform SWF * * Created by smackay on Tue Feb 18 2003. * Copyright (c) 2001-2003 Flagstone Software Ltd. All rights reserved. * * This file contains Original Code and/or Modifications of Original Code as defined in * and that are subject to the Flagstone Software Source License Version 1.0 (the * 'License'). You may not use this file except in compliance with the License. Please * obtain a copy of the License at http://www.flagstonesoftware.com/licenses/source.html * and read it before using this file. * * The Original Code and all software distributed under the License are distributed on an * 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESS OR IMPLIED, AND Flagstone * HEREBY DISCLAIMS ALL SUCH WARRANTIES, INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, AND NONINFRINGEMENT OF THIRD PARTY * RIGHTS. Please see the License for the specific language governing rights and limitations * under the License. */ #include "FSExceptionHandler.h" #include "FSMovie.h" #include "FSActionObject.h" #include "FSInputStream.h" #include "FSOutputStream.h" using namespace transform; namespace transform { FSExceptionHandler::FSExceptionHandler(FSInputStream* aStream) : FSActionObject(ExceptionHandler), registerNumber(0), variable(), tryLength(0), catchLength(0), finalLength(0), tryActions(), catchActions(), finalActions() { decodeFromStream(aStream); } FSExceptionHandler::FSExceptionHandler(const FSExceptionHandler& rhs) : FSActionObject(rhs), registerNumber(rhs.registerNumber), variable(rhs.variable), tryLength(rhs.tryLength), catchLength(rhs.catchLength), finalLength(rhs.finalLength), tryActions(rhs.tryActions.size()), catchActions(rhs.tryActions.size()), finalActions(rhs.tryActions.size()) { int index = 0; for (FSVector<FSActionObject*>::const_iterator i = rhs.tryActions.begin(); i != rhs.tryActions.end(); i++, index++) tryActions[index] = (*i)->clone(); index = 0; for (FSVector<FSActionObject*>::const_iterator i = rhs.catchActions.begin(); i != rhs.catchActions.end(); i++, index++) catchActions[index] = (*i)->clone(); index = 0; for (FSVector<FSActionObject*>::const_iterator i = rhs.finalActions.begin(); i != rhs.finalActions.end(); i++, index++) finalActions[index] = (*i)->clone(); } FSExceptionHandler::~FSExceptionHandler() { for (FSVector<FSActionObject*>::iterator i = tryActions.begin(); i != tryActions.end(); i++) { delete *i; *i = 0; } for (FSVector<FSActionObject*>::iterator i = catchActions.begin(); i != catchActions.end(); i++) { delete *i; *i = 0; } for (FSVector<FSActionObject*>::iterator i = finalActions.begin(); i != finalActions.end(); i++) { delete *i; *i = 0; } } const char* FSExceptionHandler::className() const { const static char _name[] = "FSExceptionHandler"; return _name; } FSExceptionHandler* FSExceptionHandler::clone() const { return new FSExceptionHandler(*this); } FSExceptionHandler& FSExceptionHandler::operator= (const FSExceptionHandler& rhs) { if (this != &rhs) { int index = 0; this->FSActionObject::operator=(rhs); registerNumber = rhs.registerNumber; variable = rhs.variable; for (FSVector<FSActionObject*>::iterator i = tryActions.begin(); i != tryActions.end(); i++) { delete *i; *i = 0; } tryActions = FSVector<FSActionObject*>(rhs.tryActions.size()); index = 0; for (FSVector<FSActionObject*>::const_iterator i = rhs.tryActions.begin(); i != rhs.tryActions.end(); i++, index++) tryActions[index] = (*i)->clone(); // Actions for the catch block for (FSVector<FSActionObject*>::iterator i = catchActions.begin(); i != catchActions.end(); i++) { delete *i; *i = 0; } catchActions = FSVector<FSActionObject*>(rhs.catchActions.size()); index = 0; for (FSVector<FSActionObject*>::const_iterator i = rhs.catchActions.begin(); i != rhs.catchActions.end(); i++, index++) catchActions[index] = (*i)->clone(); // Actions for the final block for (FSVector<FSActionObject*>::iterator i = finalActions.begin(); i != finalActions.end(); i++) { delete *i; *i = 0; } finalActions = FSVector<FSActionObject*>(rhs.finalActions.size()); index = 0; for (FSVector<FSActionObject*>::const_iterator i = rhs.finalActions.begin(); i != rhs.finalActions.end(); i++, index++) finalActions[index] = (*i)->clone(); } return *this; } void FSExceptionHandler::addToTry(const FSVector<FSActionObject*>& anArray) { for (FSVector<FSActionObject*>::const_iterator i = anArray.begin(); i != anArray.end(); ++i) tryActions.push_back(*i); } void FSExceptionHandler::addToCatch(const FSVector<FSActionObject*>& anArray) { for (FSVector<FSActionObject*>::const_iterator i = anArray.begin(); i != anArray.end(); ++i) catchActions.push_back(*i); } void FSExceptionHandler::addToFinal(const FSVector<FSActionObject*>& anArray) { for (FSVector<FSActionObject*>::const_iterator i = anArray.begin(); i != anArray.end(); ++i) finalActions.push_back(*i); } void FSExceptionHandler::setTryActions(const FSVector<FSActionObject*>& anArray) { for (FSVector<FSActionObject*>::iterator i = tryActions.begin(); i != tryActions.end(); i++) { delete *i; *i = 0; } tryActions = anArray; } void FSExceptionHandler::setCatchActions(const FSVector<FSActionObject*>& anArray) { for (FSVector<FSActionObject*>::iterator i = catchActions.begin(); i != catchActions.end(); i++) { delete *i; *i = 0; } catchActions = anArray; } void FSExceptionHandler::setFinalActions(const FSVector<FSActionObject*>& anArray) { for (FSVector<FSActionObject*>::iterator i = finalActions.begin(); i != finalActions.end(); i++) { delete *i; *i = 0; } finalActions = anArray; } int FSExceptionHandler::lengthInStream(FSOutputStream* aStream) { FSActionObject::lengthInStream(aStream); length += 7; length += variable.length() + 1; tryLength = 0; for (FSVector<FSActionObject*>::const_iterator i = tryActions.begin(); i != tryActions.end(); ++i) { tryLength += (*i)->lengthInStream(aStream); tryLength += ((*i)->getType() > 128) ? 3 : 1; } length += tryLength; catchLength = 0; for (FSVector<FSActionObject*>::const_iterator i = catchActions.begin(); i != catchActions.end(); ++i) { catchLength += (*i)->lengthInStream(aStream); catchLength += ((*i)->getType() > 128) ? 3 : 1; } length += catchLength; finalLength = 0; for (FSVector<FSActionObject*>::const_iterator i = finalActions.begin(); i != finalActions.end(); ++i) { finalLength += (*i)->lengthInStream(aStream); finalLength += ((*i)->getType() > 128) ? 3 : 1; } length += finalLength; return length; } void FSExceptionHandler::encodeToStream(FSOutputStream* aStream) { aStream->startEncoding(className()); FSActionObject::encodeToStream(aStream); aStream->write(0, FSStream::UnsignedBit, 5); aStream->write(variable.length() == 0 ? 1:0, FSStream::UnsignedBit, 1); aStream->write(catchActions.size() > 0 ? 1:0, FSStream::UnsignedBit, 1); aStream->write(finalActions.size() > 0 ? 1:0, FSStream::UnsignedBit, 1); aStream->write(tryLength, FSStream::UnsignedWord, 16); aStream->write(catchLength, FSStream::UnsignedWord, 16); aStream->write(finalLength, FSStream::UnsignedWord, 16); if (variable.length() > 0) { aStream->write((byte*)variable.c_str(), variable.length()); aStream->write(0, FSStream::UnsignedWord, 8); } else { aStream->write(registerNumber, FSStream::UnsignedWord, 8); } #ifdef _DEBUG aStream->startEncoding("array"); #endif for (FSVector<FSActionObject*>::iterator i = tryActions.begin(); i != tryActions.end(); ++i) #ifdef _DEBUG FSMovie::encodeToStream(*i, aStream); #else (*i)->encodeToStream(aStream); #endif #ifdef _DEBUG aStream->endEncoding("array"); #endif // Catch Actions #ifdef _DEBUG aStream->startEncoding("array"); #endif for (FSVector<FSActionObject*>::iterator i = catchActions.begin(); i != catchActions.end(); ++i) #ifdef _DEBUG FSMovie::encodeToStream(*i, aStream); #else (*i)->encodeToStream(aStream); #endif #ifdef _DEBUG aStream->endEncoding("array"); #endif // Final actions #ifdef _DEBUG aStream->startEncoding("array"); #endif for (FSVector<FSActionObject*>::iterator i = finalActions.begin(); i != finalActions.end(); ++i) #ifdef _DEBUG FSMovie::encodeToStream(*i, aStream); #else (*i)->encodeToStream(aStream); #endif #ifdef _DEBUG aStream->endEncoding("array"); #endif aStream->endEncoding(className()); } void FSExceptionHandler::decodeFromStream(FSInputStream* aStream) { aStream->startDecoding(className()); FSActionObject::decodeFromStream(aStream); aStream->read(FSStream::UnsignedBit, 5); int containsVariable = aStream->read(FSStream::UnsignedBit, 1); int containsCatch = aStream->read(FSStream::UnsignedBit, 1); int containsFinal = aStream->read(FSStream::UnsignedBit, 1); int tryLength = aStream->read(FSStream::UnsignedWord, 16); int catchLength = aStream->read(FSStream::UnsignedWord, 16); int finalLength = aStream->read(FSStream::UnsignedWord, 16); if (containsVariable != 0) { const char* str = aStream->readString(); variable = str; delete [] str; } else { registerNumber = aStream->read(FSStream::UnsignedWord, 8); } #ifdef _DEBUG aStream->startDecoding("array"); #endif while (tryLength > 0) { FSActionObject* anAction = FSMovie::actionFromStream(aStream); if (anAction != 0) { tryLength -= anAction->getLength() + ((anAction->getType() >= 128) ? 3 : 1); tryActions.push_back(anAction); } } #ifdef _DEBUG aStream->endDecoding("array"); #endif #ifdef _DEBUG aStream->startDecoding("array"); #endif while (catchLength > 0) { FSActionObject* anAction = FSMovie::actionFromStream(aStream); if (anAction != 0) { catchLength -= anAction->getLength() + ((anAction->getType() >= 128) ? 3 : 1); catchActions.push_back(anAction); } } #ifdef _DEBUG aStream->endDecoding("array"); #endif #ifdef _DEBUG aStream->startDecoding("array"); #endif while (finalLength > 0) { FSActionObject* anAction = FSMovie::actionFromStream(aStream); if (anAction != 0) { finalLength -= anAction->getLength() + ((anAction->getType() >= 128) ? 3 : 1); finalActions.push_back(anAction); } } #ifdef _DEBUG aStream->endDecoding("array"); #endif aStream->endDecoding(className()); } }
<reponame>chandu1988/https-github.com-cdowney-sf-messaging-client<filename>src/main/java/io/cdsoft/sf/messaging/api/consumer/MapEventConsumer.java package io.cdsoft.sf.messaging.api.consumer; import java.util.Map; public interface MapEventConsumer extends EventConsumer<Map<String, Object>> { }
#![deny(unused_must_use)] //! Integration tests of the Subset protocol. extern crate env_logger; extern crate hbbft; #[macro_use] extern crate log; extern crate rand; #[macro_use] extern crate serde_derive; #[macro_use] extern crate rand_derive; extern crate threshold_crypto as crypto; mod network; use std::collections::{BTreeMap, BTreeSet}; use std::iter::once; use std::sync::Arc; use hbbft::subset::{Subset, SubsetOutput}; use hbbft::NetworkInfo; use network::{Adversary, MessageScheduler, NodeId, SilentAdversary, TestNetwork, TestNode}; type ProposedValue = Vec<u8>; fn test_subset<A: Adversary<Subset<NodeId>>>( mut network: TestNetwork<A, Subset<NodeId>>, inputs: &BTreeMap<NodeId, ProposedValue>, ) { let ids: Vec<NodeId> = network.nodes.keys().cloned().collect(); for id in ids { if let Some(value) = inputs.get(&id) { network.input(id, value.to_owned()); } } // Terminate when all good nodes do. while !network.nodes.values().all(TestNode::terminated) { network.step(); } // Verify that all instances output the same set. let observer: BTreeSet<_> = network.observer.outputs().iter().cloned().collect(); for node in network.nodes.values() { let mut outputs = node.outputs(); let mut actual = BTreeMap::default(); let mut has_seen_done = false; for i in outputs { assert!(!has_seen_done); match i { SubsetOutput::Contribution(k, v) => { assert!(actual.insert(k, v).is_none()); } SubsetOutput::Done => has_seen_done = true, } } assert_eq!(outputs.len(), actual.len() + 1); // The Subset algorithm guarantees that more than two thirds of the proposed elements // are in the set. assert!(actual.len() * 3 > inputs.len() * 2); for (id, value) in actual { assert_eq!(&inputs[id], value); } assert_eq!(outputs.iter().cloned().collect::<BTreeSet<_>>(), observer); } } fn new_network<A, F>( good_num: usize, bad_num: usize, adversary: F, ) -> TestNetwork<A, Subset<NodeId>> where A: Adversary<Subset<NodeId>>, F: Fn(BTreeMap<NodeId, Arc<NetworkInfo<NodeId>>>) -> A, { // This returns an error in all but the first test. let _ = env_logger::try_init(); let new_subset = |netinfo: Arc<NetworkInfo<NodeId>>| Subset::new(netinfo, 0).expect("new Subset instance"); TestNetwork::new(good_num, bad_num, adversary, new_subset) } #[test] fn test_subset_3_out_of_4_nodes_propose() { let proposed_value = Vec::from("Fake news"); let proposing_ids: BTreeSet<NodeId> = (0..3).map(NodeId).collect(); let proposals: BTreeMap<NodeId, ProposedValue> = proposing_ids .iter() .map(|id| (*id, proposed_value.clone())) .collect(); let adversary = |_| SilentAdversary::new(MessageScheduler::First); let network = new_network(3, 1, adversary); test_subset(network, &proposals); } #[test] fn test_subset_5_nodes_different_proposed_values() { let proposed_values = vec![ Vec::from("Alpha"), Vec::from("Bravo"), Vec::from("Charlie"), Vec::from("Delta"), Vec::from("Echo"), ]; let proposals: BTreeMap<NodeId, ProposedValue> = (0..5) .into_iter() .map(NodeId) .zip(proposed_values) .collect(); let adversary = |_| SilentAdversary::new(MessageScheduler::Random); let network = new_network(5, 0, adversary); test_subset(network, &proposals); } #[test] fn test_subset_1_node() { let proposals: BTreeMap<NodeId, ProposedValue> = once((NodeId(0), Vec::from("Node 0 is the greatest!"))).collect(); let adversary = |_| SilentAdversary::new(MessageScheduler::Random); let network = new_network(1, 0, adversary); test_subset(network, &proposals); }
def decode(self, inputs): x = self.decoder_hidden1(inputs) x = self.decoder_hidden1_dropout(x) x = self.decoder_output(x) x = self.decoder_output_dropout(x) return x
// Copyright 2015-present 650 Industries. All rights reserved. #import <Foundation/Foundation.h> #import <ABI32_0_0EXFileSystem/ABI32_0_0EXFileSystem.h> #import <ABI32_0_0EXCore/ABI32_0_0EXInternalModule.h> #import <ABI32_0_0EXFileSystemInterface/ABI32_0_0EXFileSystemManagerInterface.h> NS_ASSUME_NONNULL_BEGIN @interface ABI32_0_0EXFileSystemManagerService : NSObject <ABI32_0_0EXInternalModule, ABI32_0_0EXFileSystemManager> - (NSString *)bundleDirectoryForExperienceId:(NSString *)experienceId; - (NSArray<NSString *> *)bundledAssetsForExperienceId:(NSString *)experienceId; @end NS_ASSUME_NONNULL_END
The answer is no. In fact, the Bible says that it is right for people to be anxious to please the ones they love.​—⁠1 Corinthians 7:​32-34; 2 Corinthians 11:28. Also, let’s face it​—⁠anxiety can be a powerful motivator. For example, suppose you will be taking a test at school next week. Anxiety might compel you to study this week​—⁠and that might help you get a better grade! A degree of anxiety can also alert you to danger. “You might feel anxious because you know that you’re taking a wrong course of action and that you need to make changes for your conscience to be at rest,” says a teenager named Serena.​—⁠Compare James 5:14. Fact of life: Anxiety can work for you​—⁠as long as it moves you to the right kind of action. But what if anxiety traps you in a maze of negative thinking? Anxiety might make you feel as if you were trapped in a maze, but someone with a different perspective can help you find a way out Example: “My mind races when I think about the different ways a stressful situation could turn out,” says 19-year-old Richard. “I play the situation over and over in my mind to the point that it makes me very anxious.” The Bible says that “a calm heart gives life to the body.” (Proverbs 14:30) On the other hand, anxiety can bring on a number of unpleasant physical symptoms, including headaches, dizziness, upset stomach, and heart palpitations. What can you do if anxiety seems to be working against you rather than for you?
Yanomami shaman publishes unique book 'The Falling Sky' November 2, 2013 The Falling Sky is a unique book by Yanomami shaman Davi Kopenawa. © Harvard University Press In the first book ever written by a Yanomami Indian, Davi Kopenawa – shaman and leading spokesman for his people – describes the rich culture, history and ways of life of the Yanomami of the Amazon rainforest. ‘The Falling Sky: Words of a Yanomami Shaman’ is a unique account of the life story of Davi Kopenawa, who heads the Yanomami Association Hutukara and who continues to defend the rights of the tribe around the world. In his book, Davi recounts his initiation as a shaman and his first encounters with outsiders – including the gold miners who flooded Yanomami land during the 1980s and caused the death of 1 in 5 Yanomami through disease and violence. He vividly describes his impressions of western culture on trips abroad, such as his first journey outside Brazil when he visited Europe at the invitation of Survival International. ‘The Falling Sky’ is an impassioned plea to respect his people’s rights and preserve the Amazon rainforest. Watch Davi speaking about ‘The Falling Sky’: Davi said, ‘This book is a message for the non-Indians. We want to teach people about our shamanic dreams … I hope that the non-Indians will learn from the book and make them think about our history.’ ‘The Falling Sky’ also challenges wide-spread perceptions that the Yanomami are ‘fierce’ and violent, as promulgated by the highly controversial anthropologist Napoleon Chagnon. Davi Kopenawas book is an impassioned plea to respect the Yanomamis rights and preserve the Amazon rainforest. © Fiona Watson/Survival The book was written in collaboration with French anthropologist Bruce Albert, Research Director at France’s Institut de Recherche pour le développment (IRD) and Vice President of Survival France, who has worked with Brazil’s Yanomami since 1975. Notes to editors: - ‘The Falling Sky’ was originally published in French by PLON. The English translation is published by Harvard University Press ($39.95/ £25.00/ €30.00) and is available to buy in Survival’s shop - Survival has supported the Yanomami for decades and led the international campaign for the demarcation of Yanomami territory, along with the Brazilian NGO Pro Yanomami Commission (CCPY). The ‘Yanomami Park’ was created in 1992 but the Yanomami territory continues to be invaded by illegal miners.
def _split_rules_into_passes(self): first_pass = [] second_pass = [] for rule in self.rules: if self._is_first_pass_rule(rule): first_pass.append(rule) else: second_pass.append(rule) return first_pass, second_pass
<gh_stars>1-10 package mxml import ( "encoding/xml" "strings" ) // Root is the structure for root MusicXML element. type Root struct { XMLName xml.Name `xml:"root"` RootAlter []RootAlter `xml:"root-alter,omitempty"` RootStep []RootStep `xml:"root-step,omitempty"` IValue string `xml:",chardata"` } // ToMXML creates a MXML. func (r *Root) ToMXML() *MXML { attributes := make(map[string]string) children := make(map[string][]*MXML) children["root-alter"] = make([]*MXML, len(r.RootAlter)) for i, c := range r.RootAlter { children["root-alter"][i] = c.ToMXML() } children["root-step"] = make([]*MXML, len(r.RootStep)) for i, c := range r.RootStep { children["root-step"][i] = c.ToMXML() } return newMXML("root", strings.TrimSpace(r.IValue), attributes, children) }
/** * A simple {@link Fragment} subclass. */ public class WebViewSimpleFragment extends StickHeaderWebViewFragment { public static WebViewSimpleFragment newInstance() { WebViewSimpleFragment fragment = new WebViewSimpleFragment(); return fragment; } public static WebViewSimpleFragment newInstance(String title) { WebViewSimpleFragment fragment = new WebViewSimpleFragment(); fragment.setTitle(title); return fragment; } @Override public WebView createWebView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { return (WebView)inflater.inflate(R.layout.fragment_webview, container, false); } @Override public void bindData() { WebView wv_content = getWebView(); wv_content.loadUrl("http://www.github.com/w446108264"); wv_content.setWebViewClient(new WebViewClient() { @Override public boolean shouldOverrideUrlLoading(WebView view, String url) { view.loadUrl(url); return true; } }); } }
<filename>src/subtopics/subtopics.controller.ts import { Controller, Get, Param, Post, Delete, Put, Body, } from '@nestjs/common'; import { CreateSubtopicDto } from './dto/create-subtopic.dto'; import { UpdateSubtopicDto } from './dto/update-subtopic.dto'; import { Subtopic } from './subtopics.entity'; import { SubtopicsService } from './subtopics.service'; @Controller('subtopics') export class SubtopicsController { constructor(private readonly subtopicsService: SubtopicsService) {} @Get() getAll(): Promise<Subtopic[]> { return this.subtopicsService.getAll(); } @Get(':id') getOne(@Param('id') id: string): Promise<Subtopic> { return this.subtopicsService.getById(id); } @Post() create(@Body() createSubtopicDto: CreateSubtopicDto): Promise<Subtopic> { return this.subtopicsService.create(createSubtopicDto); } @Delete(':id') remove(@Param('id') id: string): Promise<Subtopic> { return this.subtopicsService.remove(id); } @Put(':id') update( @Body() updateSubtopicDto: UpdateSubtopicDto, @Param('id') id: string, ): Promise<Subtopic> { return this.subtopicsService.update(id, updateSubtopicDto); } }
colors = list(int(x) for x in input().split()) color_set = {"Aaa"} for elements in colors: color_set.add(elements) available = len(color_set) - 1 print(4 - available)
class Testing { public static void main(String[] args){ for (int i = 0; i < 10;) { System.out.println("testing"); } } }
import os, sys, torch, random, PIL, copy, json, time, numpy as np import os.path as osp from shutil import copyfile from collections import namedtuple def prepare_seed(rand_seed): random.seed(rand_seed) np.random.seed(rand_seed) torch.manual_seed(rand_seed) torch.cuda.manual_seed(rand_seed) torch.cuda.manual_seed_all(rand_seed) def prepare_logger(xargs): args = copy.deepcopy( xargs ) from logger import Logger logger = Logger(args.save_dir, args.rand_seed) logger.log('Main Function with logger : {:}'.format(logger)) logger.log('Arguments : -------------------------------') for name, value in args._get_kwargs(): logger.log('{:16} : {:}'.format(name, value)) logger.log("Python Version : {:}".format(sys.version.replace('\n', ' '))) logger.log("Pillow Version : {:}".format(PIL.__version__)) logger.log("PyTorch Version : {:}".format(torch.__version__)) logger.log("cuDNN Version : {:}".format(torch.backends.cudnn.version())) logger.log("CUDA available : {:}".format(torch.cuda.is_available())) logger.log("CUDA GPU numbers : {:}".format(torch.cuda.device_count())) logger.log("CUDA_VISIBLE_DEVICES : {:}".format(os.environ['CUDA_VISIBLE_DEVICES'] if 'CUDA_VISIBLE_DEVICES' in os.environ else 'None')) return logger def get_machine_info(): info = "Python Version : {:}".format(sys.version.replace('\n', ' ')) info+= "\nPillow Version : {:}".format(PIL.__version__) info+= "\nPyTorch Version : {:}".format(torch.__version__) info+= "\ncuDNN Version : {:}".format(torch.backends.cudnn.version()) info+= "\nCUDA available : {:}".format(torch.cuda.is_available()) info+= "\nCUDA GPU numbers : {:}".format(torch.cuda.device_count()) if 'CUDA_VISIBLE_DEVICES' in os.environ: info+= "\nCUDA_VISIBLE_DEVICES={:}".format(os.environ['CUDA_VISIBLE_DEVICES']) else: info+= "\nDoes not set CUDA_VISIBLE_DEVICES" return info support_types = ('str', 'int', 'bool', 'float', 'none') def convert_param(original_lists): assert isinstance(original_lists, list), 'The type is not right : {:}'.format(original_lists) ctype, value = original_lists[0], original_lists[1] assert ctype in support_types, 'Ctype={:}, support={:}'.format(ctype, support_types) is_list = isinstance(value, list) if not is_list: value = [value] outs = [] for x in value: if ctype == 'int': x = int(x) elif ctype == 'str': x = str(x) elif ctype == 'bool': x = bool(int(x)) elif ctype == 'float': x = float(x) elif ctype == 'none': if x.lower() != 'none': raise ValueError('For the none type, the value must be none instead of {:}'.format(x)) x = None else: raise TypeError('Does not know this type : {:}'.format(ctype)) outs.append(x) if not is_list: outs = outs[0] return outs def load_config(path, extra, logger): path = str(path) if hasattr(logger, 'log'): logger.log(path) assert os.path.exists(path), 'Can not find {:}'.format(path) # Reading data back with open(path, 'r') as f: data = json.load(f) content = { k: convert_param(v) for k,v in data.items()} assert extra is None or isinstance(extra, dict), 'invalid type of extra : {:}'.format(extra) if isinstance(extra, dict): content = {**content, **extra} Arguments = namedtuple('Configure', ' '.join(content.keys())) content = Arguments(**content) if hasattr(logger, 'log'): logger.log('{:}'.format(content)) return content def dict2config(xdict, logger): assert isinstance(xdict, dict), 'invalid type : {:}'.format( type(xdict) ) Arguments = namedtuple('Configure', ' '.join(xdict.keys())) content = Arguments(**xdict) if hasattr(logger, 'log'): logger.log('{:}'.format(content)) return content def get_optim_scheduler(parameters, config): assert hasattr(config, 'optim') and hasattr(config, 'scheduler') and hasattr(config, 'criterion'), 'config must have optim / scheduler / criterion keys instead of {:}'.format(config) if config.optim == 'SGD': optim = torch.optim.SGD(parameters, config.LR, momentum=config.momentum, weight_decay=config.decay, nesterov=config.nesterov) elif config.optim == 'RMSprop': optim = torch.optim.RMSprop(parameters, config.LR, momentum=config.momentum, weight_decay=config.decay) else: raise ValueError('invalid optim : {:}'.format(config.optim)) if config.scheduler == 'cos': from optimizers import CosineAnnealingLR T_max = getattr(config, 'T_max', config.epochs) scheduler = CosineAnnealingLR(optim, config.warmup, config.warmup+config.epochs, T_max, config.eta_min) elif config.scheduler == 'multistep': from optimizers import MultiStepLR scheduler = MultiStepLR(optim, config.warmup, config.warmup+config.epochs, config.milestones, config.gammas) elif config.scheduler == 'exponential': from optimizers import ExponentialLR scheduler = ExponentialLR(optim, config.warmup, config.warmup+config.epochs, config.gamma) elif config.scheduler == 'linear': from optimizers import LinearLR T_max = getattr(config, 'T_max', config.epochs) scheduler = LinearLR(optim, config.warmup, config.warmup+config.epochs, T_max, config.LR, config.LR_min) else: raise ValueError('invalid scheduler : {:}'.format(config.scheduler)) if config.criterion == 'Softmax': criterion = torch.nn.CrossEntropyLoss() criterion_smooth = None elif config.criterion == 'SmoothSoftmax': criterion = torch.nn.CrossEntropyLoss() from optimizers import CrossEntropyLabelSmooth criterion_smooth = CrossEntropyLabelSmooth(config.class_num, config.label_smooth) else: raise ValueError('invalid criterion : {:}'.format(config.criterion)) return optim, scheduler, criterion, criterion_smooth class AverageMeter(object): """Computes and stores the average and current value""" def __init__(self): self.reset() def reset(self): self.val = 0.0 self.avg = 0.0 self.sum = 0.0 self.count = 0.0 def update(self, val, n=1): self.val = val self.sum += val * n self.count += n self.avg = self.sum / self.count def __repr__(self): return ('{name}(val={val}, avg={avg}, count={count})'.format(name=self.__class__.__name__, **self.__dict__)) def convert_secs2time(epoch_time, return_str=False): need_hour = int(epoch_time / 3600) need_mins = int((epoch_time - 3600*need_hour) / 60) need_secs = int(epoch_time - 3600*need_hour - 60*need_mins) if return_str: str = '[{:02d}:{:02d}:{:02d}]'.format(need_hour, need_mins, need_secs) return str else: return need_hour, need_mins, need_secs def time_string(): ISOTIMEFORMAT='%Y-%m-%d %X' string = '[{:}]'.format(time.strftime( ISOTIMEFORMAT, time.gmtime(time.time()) )) return string def obtain_accuracy(output, target, topk=(1,)): """Computes the precision@k for the specified values of k""" maxk = max(topk) batch_size = target.size(0) _, pred = output.topk(maxk, 1, True, True) pred = pred.t() correct = pred.eq(target.view(1, -1).expand_as(pred)) res = [] for k in topk: correct_k = correct[:k].view(-1).float().sum(0, keepdim=True) res.append(correct_k.mul_(100.0 / batch_size)) return res def save_checkpoint(state, filename, logger): if osp.isfile(filename): if hasattr(logger, 'log'): logger.log('Find {:} exist, delete is at first before saving'.format(filename)) os.remove(filename) torch.save(state, filename) assert osp.isfile(filename), 'save filename : {:} failed, which is not found.'.format(filename) if hasattr(logger, 'log'): logger.log('save checkpoint into {:}'.format(filename)) return filename def copy_checkpoint(src, dst, logger): if osp.isfile(dst): if hasattr(logger, 'log'): logger.log('Find {:} exist, delete is at first before saving'.format(dst)) os.remove(dst) copyfile(src, dst) if hasattr(logger, 'log'): logger.log('copy the file from {:} into {:}'.format(src, dst)) def compute_num_unpruned_edges(genos): ret = 0 for geno in genos: for step in geno: l, r = step[0], step[1] if l[0] is None and l[1] == -1: ret += 1 if r[0] is None and r[1] == -1: ret += 1 return ret
#include <stdio.h> #define SIZE 10 /* prototype declaration */ void init(); void operation(); void push(int data); int pop(); int s[SIZE]; int sp; int main(int argc, const char *argv[]) { init(); operation(); return 0; } void init() { int i; sp = 0; for(i=0; i<SIZE; i++) s[i] = 0; } void operation() { int car; while(scanf("%d", &car) != EOF){ if(car) push(car); else printf("%d\n",pop()); } } void push(int data) { s[sp++] = data; } int pop() { return s[--sp]; }
// addToValues adds the HostRecord fields to values. Ignores read only fields. func addToValues(host HostRecord, hostNumber int, values *url.Values) { setValueIfPresent := func(key, value string) { if value != "" && value != "0" { keyWithNumber := fmt.Sprintf("%s%d", key, hostNumber) values.Set(keyWithNumber, value) } } setValueIfPresent("HostName", host.Name) setValueIfPresent("RecordType", string(host.RecordType)) setValueIfPresent("Address", string(host.Address)) setValueIfPresent("MXPref", host.MXPref) setValueIfPresent("TTL", strconv.Itoa(int(host.TTL))) }
/* Convert ethernet clock ticks to microseconds */ static unsigned int gfar_ticks2usecs(struct gfar_private *priv, unsigned int ticks) { unsigned int count; switch (priv->phydev->speed) { case SPEED_1000: count = GFAR_GBIT_TIME; break; case SPEED_100: count = GFAR_100_TIME; break; case SPEED_10: default: count = GFAR_10_TIME; break; } return ((ticks * count) / 1000); }
package rds import ( "encoding/json" "fmt" "strings" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/service/cloudwatchlogs" "github.com/aws/aws-sdk-go/service/rds" "github.com/pganalyze/collector/config" "github.com/pganalyze/collector/state" "github.com/pganalyze/collector/util" "github.com/pganalyze/collector/util/awsutil" ) // GetSystemState - Gets system information about an Amazon RDS instance func GetSystemState(config config.ServerConfig, logger *util.Logger) (system state.SystemState) { system.Info.Type = state.AmazonRdsSystem sess := awsutil.GetAwsSession(config) rdsSvc := rds.New(sess) instance, err := awsutil.FindRdsInstance(config, sess) if err != nil { logger.PrintError("Rds/System: Encountered error when looking for instance: %v\n", err) return } if instance == nil { logger.PrintWarning("Could not find RDS instance in AWS, skipping system data") return } system.Info.AmazonRds = &state.SystemInfoAmazonRds{ Region: config.AwsRegion, InstanceClass: util.StringPtrToString(instance.DBInstanceClass), InstanceID: util.StringPtrToString(instance.DBInstanceIdentifier), Status: util.StringPtrToString(instance.DBInstanceStatus), AvailabilityZone: util.StringPtrToString(instance.AvailabilityZone), PubliclyAccessible: util.BoolPtrToBool(instance.PubliclyAccessible), MultiAz: util.BoolPtrToBool(instance.MultiAZ), SecondaryAvailabilityZone: util.StringPtrToString(instance.SecondaryAvailabilityZone), CaCertificate: util.StringPtrToString(instance.CACertificateIdentifier), AutoMinorVersionUpgrade: util.BoolPtrToBool(instance.AutoMinorVersionUpgrade), PreferredMaintenanceWindow: util.StringPtrToString(instance.PreferredMaintenanceWindow), PreferredBackupWindow: util.StringPtrToString(instance.PreferredBackupWindow), LatestRestorableTime: util.TimePtrToTime(instance.LatestRestorableTime), BackupRetentionPeriodDays: int32(util.IntPtrToInt(instance.BackupRetentionPeriod)), MasterUsername: util.StringPtrToString(instance.MasterUsername), InitialDbName: util.StringPtrToString(instance.DBName), CreatedAt: util.TimePtrToTime(instance.InstanceCreateTime), } group := instance.DBParameterGroups[0] pgssParam, _ := awsutil.GetRdsParameter(group, "shared_preload_libraries", rdsSvc) if pgssParam != nil && pgssParam.ParameterValue != nil { system.Info.AmazonRds.ParameterPgssEnabled = strings.Contains(*pgssParam.ParameterValue, "pg_stat_statements") } else { system.Info.AmazonRds.ParameterPgssEnabled = false } system.Info.AmazonRds.ParameterApplyStatus = *group.ParameterApplyStatus dbInstanceID := *instance.DBInstanceIdentifier cloudWatchReader := awsutil.NewRdsCloudWatchReader(sess, logger, dbInstanceID) system.Disks = make(state.DiskMap) system.Disks["default"] = state.Disk{ DiskType: util.StringPtrToString(instance.StorageType), ProvisionedIOPS: uint32(util.IntPtrToInt(instance.Iops)), Encrypted: util.BoolPtrToBool(instance.StorageEncrypted), } system.DiskStats = make(state.DiskStatsMap) system.DiskStats["default"] = state.DiskStats{ DiffedOnInput: true, DiffedValues: &state.DiffedDiskStats{ ReadOperationsPerSecond: float64(cloudWatchReader.GetRdsIntMetric("ReadIOPS", "Count/Second")), WriteOperationsPerSecond: float64(cloudWatchReader.GetRdsIntMetric("WriteIOPS", "Count/Second")), BytesReadPerSecond: float64(cloudWatchReader.GetRdsIntMetric("ReadThroughput", "Bytes/Second")), BytesWrittenPerSecond: float64(cloudWatchReader.GetRdsIntMetric("WriteThroughput", "Bytes/Second")), AvgQueueSize: int32(cloudWatchReader.GetRdsIntMetric("DiskQueueDepth", "Count")), AvgReadLatency: cloudWatchReader.GetRdsFloatMetric("ReadLatency", "Seconds") * 1000, AvgWriteLatency: cloudWatchReader.GetRdsFloatMetric("WriteLatency", "Seconds") * 1000, }, } system.XlogUsedBytes = uint64(cloudWatchReader.GetRdsIntMetric("TransactionLogsDiskUsage", "Bytes")) if instance.EnhancedMonitoringResourceArn != nil { system.Info.AmazonRds.EnhancedMonitoring = true svc := cloudwatchlogs.New(sess) params := &cloudwatchlogs.GetLogEventsInput{ LogGroupName: aws.String("RDSOSMetrics"), LogStreamName: instance.DbiResourceId, Limit: aws.Int64(1), } resp, err := svc.GetLogEvents(params) if err != nil { fmt.Printf("Error: %v\n", err) return } if len(resp.Events) > 0 { event := resp.Events[0] str := event.Message if str != nil { var osSnapshot RdsOsSnapshot err = json.Unmarshal([]byte(*str), &osSnapshot) if err != nil { fmt.Printf("Error: %v\n", err) return } system.CPUStats = make(state.CPUStatisticMap) system.CPUStats["all"] = state.CPUStatistic{ DiffedOnInput: true, DiffedValues: &state.DiffedSystemCPUStats{ GuestPercent: float64(osSnapshot.CPUUtilization.Guest), IdlePercent: float64(osSnapshot.CPUUtilization.Idle), IrqPercent: float64(osSnapshot.CPUUtilization.Irq), IowaitPercent: float64(osSnapshot.CPUUtilization.Wait), SystemPercent: float64(osSnapshot.CPUUtilization.System), UserPercent: float64(osSnapshot.CPUUtilization.User), StealPercent: float64(osSnapshot.CPUUtilization.Steal), NicePercent: float64(osSnapshot.CPUUtilization.Nice), }, } system.CPUInfo.SocketCount = 1 system.CPUInfo.LogicalCoreCount = int32(osSnapshot.NumVCPUs) system.CPUInfo.PhysicalCoreCount = int32(osSnapshot.NumVCPUs) system.Scheduler.Loadavg1min = float64(osSnapshot.LoadAverageMinute.One) system.Scheduler.Loadavg5min = float64(osSnapshot.LoadAverageMinute.Five) system.Scheduler.Loadavg15min = float64(osSnapshot.LoadAverageMinute.Fifteen) system.Memory.ActiveBytes = uint64(osSnapshot.Memory.Active * 1024) system.Memory.BuffersBytes = uint64(osSnapshot.Memory.Buffers * 1024) system.Memory.CachedBytes = uint64(osSnapshot.Memory.Cached * 1024) system.Memory.DirtyBytes = uint64(osSnapshot.Memory.Dirty * 1024) system.Memory.FreeBytes = uint64(osSnapshot.Memory.Free * 1024) system.Memory.HugePagesFree = uint64(osSnapshot.Memory.HugePagesFree) system.Memory.HugePagesReserved = uint64(osSnapshot.Memory.HugePagesRsvd) system.Memory.HugePagesSizeBytes = uint64(osSnapshot.Memory.HugePagesSize * 1024) system.Memory.HugePagesSurplus = uint64(osSnapshot.Memory.HugePagesSurp) system.Memory.HugePagesTotal = uint64(osSnapshot.Memory.HugePagesTotal) system.Memory.InactiveBytes = uint64(osSnapshot.Memory.Inactive * 1024) system.Memory.MappedBytes = uint64(osSnapshot.Memory.Mapped * 1024) system.Memory.PageTablesBytes = uint64(osSnapshot.Memory.PageTables * 1024) system.Memory.SlabBytes = uint64(osSnapshot.Memory.Slab * 1024) system.Memory.SwapTotalBytes = uint64(osSnapshot.Swap.Total) * 1024 system.Memory.SwapUsedBytes = uint64(osSnapshot.Swap.Total-osSnapshot.Swap.Free) * 1024 system.Memory.TotalBytes = uint64(osSnapshot.Memory.Total * 1024) system.Memory.WritebackBytes = uint64(osSnapshot.Memory.Writeback * 1024) system.NetworkStats = make(state.NetworkStatsMap) for _, networkIf := range osSnapshot.Network { // Practically this always has one entry, and oddly enough we don't have // the throughput numbers on a per interface basis... system.NetworkStats[networkIf.Interface] = state.NetworkStats{ DiffedOnInput: true, DiffedValues: &state.DiffedNetworkStats{ ReceiveThroughputBytesPerSecond: uint64(cloudWatchReader.GetRdsIntMetric("NetworkReceiveThroughput", "Bytes/Second")), TransmitThroughputBytesPerSecond: uint64(cloudWatchReader.GetRdsIntMetric("NetworkTransmitThroughput", "Bytes/Second")), }, } } for _, disk := range osSnapshot.DiskIO { system.DiskStats["default"].DiffedValues.UtilizationPercent = float64(disk.Util) } system.DiskPartitions = make(state.DiskPartitionMap) for _, diskPartition := range osSnapshot.FileSystems { system.DiskPartitions[diskPartition.MountPoint] = state.DiskPartition{ DiskName: "default", PartitionName: diskPartition.Name, UsedBytes: uint64(diskPartition.Used * 1024), TotalBytes: uint64(diskPartition.Total * 1024), } } } } } else { system.CPUStats = make(state.CPUStatisticMap) system.CPUStats["all"] = state.CPUStatistic{ DiffedOnInput: true, DiffedValues: &state.DiffedSystemCPUStats{ UserPercent: cloudWatchReader.GetRdsFloatMetric("CPUUtilization", "Percent"), }, } system.NetworkStats = make(state.NetworkStatsMap) system.NetworkStats["default"] = state.NetworkStats{ DiffedOnInput: true, DiffedValues: &state.DiffedNetworkStats{ ReceiveThroughputBytesPerSecond: uint64(cloudWatchReader.GetRdsIntMetric("NetworkReceiveThroughput", "Bytes/Second")), TransmitThroughputBytesPerSecond: uint64(cloudWatchReader.GetRdsIntMetric("NetworkTransmitThroughput", "Bytes/Second")), }, } system.Memory.FreeBytes = uint64(cloudWatchReader.GetRdsIntMetric("FreeableMemory", "Bytes")) system.Memory.SwapUsedBytes = uint64(cloudWatchReader.GetRdsIntMetric("SwapUsage", "Bytes")) var bytesTotal, bytesFree int64 if instance.AllocatedStorage != nil { bytesTotal = *instance.AllocatedStorage * 1024 * 1024 * 1024 bytesFree = cloudWatchReader.GetRdsIntMetric("FreeStorageSpace", "Bytes") system.DiskPartitions = make(state.DiskPartitionMap) system.DiskPartitions["/"] = state.DiskPartition{ DiskName: "default", UsedBytes: uint64(bytesTotal - bytesFree), TotalBytes: uint64(bytesTotal), } } } return }
def genKernelMapSupport(innerKernelMap, outerKernelMap, supportThreshold=-1): return (np.sum(np.abs(innerKernelMap), 2) / np.mean(np.sum(np.abs(innerKernelMap), 2)) * np.sum(np.abs(outerKernelMap)) / np.mean(np.sum(np.abs(outerKernelMap)))) > supportThreshold
/** * simulates the motion of the ball * @param dt number of seconds * @param p1Paddle p1 paddle * @param p2Paddle p2 paddle * @throws PlayerScoreException if the ball goes out of bounds in a way that a player scores a point */ public void tick(double dt, Paddle p1Paddle, Paddle p2Paddle) throws PlayerScoreException { if (x + dt*vx + radius > bounds.XMax()) { x = 2*bounds.XMax() - x - vx*dt; vx = -vx; } else if (x + dt*vx - radius < bounds.XMin()) { x = 2*bounds.XMin() - x - vx*dt; vx = -vx; } else { x += dt * vx; } if (y + dt*vy + radius > bounds.YMax()) { y = 2 * bounds.YMax() - y - vy*dt; vy = -vy; } else if (y + dt*vy - radius < bounds.YMin()) { y = 2 * bounds.YMin() - y - vy*dt; vy = -vy; } else { y += dt * vy; } if (z + dt*vz + radius > bounds.ZMax()) { throw new PlayerScoreException(PlayerPosition.P2); } else if (z + dt*vz - radius < bounds.ZMin()) { throw new PlayerScoreException(PlayerPosition.P1); } else { if (p1Paddle.ballInRange(this)) { z = 2*p1Paddle.zMin() - z - vz*dt; vz *= -1; } else if (p2Paddle.ballInRange(this)) { z = 2*p2Paddle.zMax() - z - vz*dt; vz *= -1; } else { z += dt * vz; } } }
<reponame>paiuolo/django-sso-app """ django envs: SESSION_ENGINE, SESSION_COOKIE_NAME, SESSION_COOKIE_PATH, \ SESSION_COOKIE_DOMAIN, SESSION_SAVE_EVERY_REQUEST, SESSION_COOKIE_SECURE, SESSION_COOKIE_HTTPONLY, \ SESSION_COOKIE_SAMESITE """ import environ env = environ.Env() # common DEBUG = env.bool("DJANGO_DEBUG", default=True) APP_DOMAIN = env("APP_DOMAIN", default="localhost:8000") COOKIE_DOMAIN = env("COOKIE_DOMAIN", default=APP_DOMAIN.split(':')[0]) ACCOUNT_DEFAULT_HTTP_PROTOCOL = env("ACCOUNT_DEFAULT_HTTP_PROTOCOL", default='http' if DEBUG else 'https') I18N_PATH_ENABLED = env.bool('I18N_PATH_ENABLED', default=False) REDIS_ENABLED = env.bool('REDIS_ENABLED', default=False) # django-sso-app DJANGO_SSO_APP_SOCIALACCOUNT_PROVIDERS = env.list('DJANGO_SSO_APP_SOCIALACCOUNT_PROVIDERS', default=[]) BACKEND_SHAPES = ('backend_standalone', 'backend_only', 'backend_only_apigateway', 'backend_app', 'backend_app_apigateway') APP_SHAPES = ('app', 'app_persistence', 'app_apigateway', 'app_persistence_apigateway') AVAILABLE_SHAPES = BACKEND_SHAPES + APP_SHAPES DJANGO_SSO_APP_SHAPE = env('DJANGO_SSO_APP_SHAPE', default='backend_only') DJANGO_SSO_APP_BASE_DJANGO_APPS = [ 'django_sso_app', ] DJANGO_SSO_APP_DJANGO_APPS = DJANGO_SSO_APP_BASE_DJANGO_APPS + [ 'allauth', 'allauth.account', 'allauth.socialaccount', 'rest_framework.authtoken', 'django_countries', 'treebeard', ] if 'google' in DJANGO_SSO_APP_SOCIALACCOUNT_PROVIDERS: DJANGO_SSO_APP_DJANGO_APPS += [ 'allauth.socialaccount.providers.google', ] # django # SESSION_ENGINE = 'django.contrib.sessions.backends.cache' COUNTRIES_FIRST = [ 'IT', ]
def __loadStaticData(self): if self.__loadedStaticData or not self.crawlers(): return self.__loadedStaticData = True crawler = self.crawlers()[0] for info in self.__genericCrawlerInfo: if info in crawler.varNames(): self.addInfo(info, crawler.var(info)) self.__version = int(os.path.basename(self.versionPath())[1:])
<reponame>letuananh/chirptext # -*- coding: utf-8 -*- # This code is a part of chirptext library: https://github.com/letuananh/chirptext # :copyright: (c) 2012 <NAME> <<EMAIL>> # :license: MIT, see LICENSE for more details. from . import mecab from .. import texttaglib as ttl __JANOME_AVAILABLE = False try: from janome.tokenizer import Tokenizer __JANOME_AVAILABLE = True except Exception as e: pass __JANOME_TOKENIZER = None def _get_tokenizer(): global __JANOME_TOKENIZER if __JANOME_TOKENIZER is None and __JANOME_AVAILABLE: __JANOME_TOKENIZER = Tokenizer() return __JANOME_TOKENIZER def janome_available(): ''' Check if janome package is installed ''' return __JANOME_AVAILABLE def _janome_parse_token_dicts(content, *args, **kwargs): """ Parse a sentence using janome and return a mecab-compatible list of token dicts """ _tokenizer = _get_tokenizer() tokens = _tokenizer.tokenize(content) # format: same as mecab # 表層形,品詞,品詞細分類1,品詞細分類2,品詞細分類3,活用形,活用型,原形,読み,発音 # extra[0] is pos with 4 parts token_dicts = [] for token in tokens: if token.extra is None: if token.surface in ('\r', '\n', '\r\n'): continue elif token.surface == ',': features = (',', '記号', '読点' , '*', '*', '*', '*', ',', ',', ',') else: features = [token.surface] + [''] * 9 else: features = (token.surface, *token.extra[0].split(','), *token.extra[1:]) if len(features) < 10: features += [''] * (10 - len(features)) token_dicts.append({k: v for k, v in zip(mecab._MECAB_FIELDS, features)}) return token_dicts def parse(text, doc=None, sent_id=None, **kwargs): token_dicts = _janome_parse_token_dicts(text) return mecab._make_sent(text, token_dicts, doc=doc, sent_id=sent_id, **kwargs) def parse_doc(text, splitlines=True, auto_strip=True, doc_name='', **kwargs): """ Parse a Japanese document with multiple sentences using Mecab """ doc = ttl.Document(name=doc_name) if not splitlines: token_dicts = _janome_parse_token_dicts(text) return mecab._tokenize_token_dicts(token_dicts, text, auto_strip, doc=doc) else: for line in text.splitlines(): parse(line.strip() if auto_strip else line, doc=doc, **kwargs) return doc def tokenize(text, **kwargs): """ Sentence to a list of tokens (string) """ return list(Tokenizer(wakati=True).tokenize(text, wakati=True)) def tokenize_sent(content, **kwargs): """ Tokenize a Japanese text into sentences """ doc = parse_doc(content, splitlines=False, **kwargs) return [s.text for s in doc]
Near-surface turbulence effects on electro-optical propagation in an arid environment In the framework of a NATO research group Fraunhofer IOSB and partners conducted a field trial in an arid shrub land environment in southern New Mexico (USA). The group investigates environmental limitations of fielded EO-TDAs (Electro-Optical Tactical Decision Aids). Main objective of the trial was to study the impact of the atmosphere on imaging sensor performance with a focus on the effects of atmospheric extinction and near surface turbulence. An overview of the trial will be given, as well as an overview on EO-TDA development. Results of efforts to forecast the refractive index structure parameter using numerical weather prediction (NWP) models will be described, as well as the results of a perception study on the influence of turbulence on target acquisition ranges using MWIR imagery.
// WrapETH will handle all necessary works to convert ETH->WETH(possible BNB->WBNB on bsc), include send speedup tx // currently this will convert all ETH balance in wallet func (w *WrapETHHandler) WrapETH(refID string, amount *big.Int) string { l := w.l.With("ref", refID) lastTx, err := w.additionTxStorage.GetLastTransaction(refID) if err == exchange.ErrNotFound { return w.sendWrapETHTx(refID, amount) } if err != nil { l.Errorw("cannot get addition tx", "err", err) return common.MiningStatusNA } txs, err := w.additionTxStorage.ListTransactions(refID) if err != nil { l.Errorw("list transactions failed", "err", err) return common.MiningStatusNA } for _, tx := range txs { if tx.Status == common.MiningStatusFailed || tx.Status == common.MiningStatusMined { return tx.Status } } if err = w.sendSpeedupTx(lastTx); err != nil { l.Errorw("send speed up wrap eth failed", "err", err) } for _, a := range txs { miningStatus, _, err := w.bc.TxStatus(common2.HexToHash(a.TxHash)) if err != nil { l.Errorw("get tx status failed", "err", err, "tx", a.TxHash) continue } l.Debugw("tx_status", "tx", a.TxHash, "status", miningStatus) switch miningStatus { case common.MiningStatusMined, common.MiningStatusFailed: if errS := w.additionTxStorage.UpdateStatus(a.RefID, a.TxHash, miningStatus); errS != nil { l.Errorw("update tx status failed", "err", err) } return miningStatus } } return common.ExchangeStatusNA }
def update_time(self, time: int, state: TimeState) -> None: time_str: str = f"{(time // 60):02d}:{(time % 60):02d}" self.information["time"].setText(time_str) self.information["time-state"].setText(state.name.lower()) if state is TimeState.BREAK: self.information["time"].set_color(TextColor.RED.value) self.information["time-state"].set_color(TextColor.RED.value) else: self.information["time"].set_color(TextColor.BLACK.value) self.information["time-state"].set_color(TextColor.BLACK.value)
// Project: Advanced Locomotion System V4 on C++ // Copyright: Copyright (C) 2021 <NAME> // License: MIT License (http://www.opensource.org/licenses/mit-license.php) // Source Code: https://github.com/dyanikoglu/ALSV4_CPP // Original Author: <NAME> // Contributors: #pragma once #include "CoreMinimal.h" #include "Runtime/Engine/Classes/Animation/AnimSequenceBase.h" #include "ALSCharacterEnumLibrary.h" #include "ALSAnimationStructLibrary.generated.h" USTRUCT(BlueprintType) struct FALSDynamicMontageParams { GENERATED_BODY() UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "ALS|Dynamic Transition") UAnimSequenceBase* Animation = nullptr; UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "ALS|Dynamic Transition") float BlendInTime = 0.0f; UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "ALS|Dynamic Transition") float BlendOutTime = 0.0f; UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "ALS|Dynamic Transition") float PlayRate = 0.0f; UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "ALS|Dynamic Transition") float StartTime = 0.0f; }; USTRUCT(BlueprintType) struct FALSLeanAmount { GENERATED_BODY() UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Grounded") float LR = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Grounded") float FB = 0.0f; }; USTRUCT(BlueprintType) struct FALSVelocityBlend { GENERATED_BODY() UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Grounded") float F = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Grounded") float B = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Grounded") float L = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Grounded") float R = 0.0f; }; USTRUCT(BlueprintType) struct FALSTurnInPlaceAsset { GENERATED_BODY() UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Turn In Place") UAnimSequenceBase* Animation = nullptr; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Turn In Place") float AnimatedAngle = 0.0f; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Turn In Place") FName SlotName; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Turn In Place") float PlayRate = 1.0f; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Turn In Place") bool ScaleTurnAngle = true; }; USTRUCT(BlueprintType) struct FALSAnimCharacterInformation { GENERATED_BODY() UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Character Information") FRotator AimingRotation; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Character Information") FRotator CharacterActorRotation; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Character Information") FVector Velocity; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Character Information") FVector RelativeVelocityDirection; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Character Information") FVector Acceleration; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Character Information") FVector MovementInput; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Character Information") bool bIsMoving = false; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Character Information") bool bHasMovementInput = false; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Character Information") float Speed = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Character Information") float MovementInputAmount = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Character Information") float AimYawRate = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Character Information") float ZoomAmount = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Character Information") EALSMovementState PrevMovementState = EALSMovementState::None; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Character Information") EALSViewMode ViewMode = EALSViewMode::ThirdPerson; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Character Information") bool bIsAICharacter = false; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Character Information") bool bIsCharacterVisible = false; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Character Information") bool bIsWeaponCollidingWithWall = false; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Character Information") float SpineRotationAlpha = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Character Information") bool bDistanceQualityDrop = false; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Character Information") bool bIsDead = false; }; USTRUCT(BlueprintType) struct FALSAnimGraphGrounded { GENERATED_BODY() UPROPERTY(VisibleDefaultsOnly, BlueprintReadWrite, Category = "ALS|Anim Graph - Grounded") EALSHipsDirection TrackedHipsDirection = EALSHipsDirection::F; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Grounded") bool bShouldMove = false; // Should be false initially UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Grounded") bool bRotateL = false; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Grounded") bool bRotateR = false; UPROPERTY(VisibleDefaultsOnly, BlueprintReadWrite, Category = "ALS|Anim Graph - Grounded") bool bPivot = false; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Grounded") float RotateRate = 1.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Grounded") float RotationScale = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Grounded") float DiagonalScaleAmount = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Grounded") float WalkRunBlend = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Grounded") float StandingPlayRate = 1.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Grounded") float CrouchingPlayRate = 1.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Grounded") float StrideBlend = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Grounded") float FYaw = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Grounded") float BYaw = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Grounded") float LYaw = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Grounded") float RYaw = 0.0f; }; USTRUCT(BlueprintType) struct FALSAnimGraphInAir { GENERATED_BODY() UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - In Air") bool bJumped = false; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - In Air") float JumpPlayRate = 1.2f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - In Air") float FallSpeed = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - In Air") float LandPrediction = 1.0f; }; USTRUCT(BlueprintType) struct FALSAnimGraphAimingValues { GENERATED_BODY() UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Aiming Values") FRotator SmoothedAimingRotation; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Aiming Values") FRotator SpineRotation; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Aiming Values") FVector2D AimingAngle; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Aiming Values") float AimSweepTime = 0.5f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Aiming Values") float InputYawOffsetTime = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Aiming Values") float ForwardYawTime = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Aiming Values") float LeftYawTime = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Aiming Values") float RightYawTime = 0.0f; }; USTRUCT(BlueprintType) struct FALSAnimGraphLayerBlending { GENERATED_BODY() UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Layer Blending") int32 OverlayOverrideState = 0; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Layer Blending") float EnableAimOffset = 1.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Layer Blending") float BasePose_N = 1.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Layer Blending") float BasePose_CLF = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Layer Blending") float Arm_L = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Layer Blending") float Arm_L_Add = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Layer Blending") float Arm_L_LS = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Layer Blending") float Arm_L_MS = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Layer Blending") float Arm_R = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Layer Blending") float Arm_R_Add = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Layer Blending") float Arm_R_LS = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Layer Blending") float Arm_R_MS = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Layer Blending") float Hand_L = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Layer Blending") float Hand_R = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Layer Blending") float Legs = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Layer Blending") float Legs_Add = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Layer Blending") float Pelvis = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Layer Blending") float Pelvis_Add = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Layer Blending") float Spine = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Layer Blending") float Spine_Add = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Layer Blending") float Head = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Layer Blending") float Head_Add = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Layer Blending") float EnableHandIK_L = 1.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Layer Blending") float EnableHandIK_R = 1.0f; }; USTRUCT(BlueprintType) struct FALSAnimGraphFootIK { GENERATED_BODY() UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Foot IK") float FootLock_L_Alpha = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Foot IK") float FootLock_R_Alpha = 0.0f; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Foot IK") bool UseFootLockCurve_L; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Foot IK") bool UseFootLockCurve_R; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Foot IK") FVector FootLock_L_Location; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Foot IK") FVector TargetFootLock_R_Location; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Foot IK") FVector FootLock_R_Location; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Foot IK") FRotator TargetFootLock_L_Rotation; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Foot IK") FRotator FootLock_L_Rotation; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Foot IK") FRotator TargetFootLock_R_Rotation; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Foot IK") FRotator FootLock_R_Rotation; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Foot IK") FVector FootOffset_L_Location; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Foot IK") FVector FootOffset_R_Location; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Foot IK") FRotator FootOffset_L_Rotation; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Foot IK") FRotator FootOffset_R_Rotation; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Foot IK") FVector PelvisOffset; UPROPERTY(VisibleDefaultsOnly, BlueprintReadOnly, Category = "ALS|Anim Graph - Foot IK") float PelvisAlpha = 0.0f; }; USTRUCT(BlueprintType) struct FALSAnimTurnInPlace { GENERATED_BODY() UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Turn In Place") float TurnCheckMinAngle = 45.0f; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Turn In Place") float Turn180Threshold = 130.0f; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Turn In Place") float AimYawRateLimit = 50.0f; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Turn In Place") float ElapsedDelayTime = 0.0f; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Turn In Place") float MinAngleDelay = 0.f; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Turn In Place") float MaxAngleDelay = 0.75f; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Turn In Place") FALSTurnInPlaceAsset N_TurnIP_L90; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Turn In Place") FALSTurnInPlaceAsset N_TurnIP_R90; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Turn In Place") FALSTurnInPlaceAsset N_TurnIP_L180; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Turn In Place") FALSTurnInPlaceAsset N_TurnIP_R180; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Turn In Place") FALSTurnInPlaceAsset CLF_TurnIP_L90; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Turn In Place") FALSTurnInPlaceAsset CLF_TurnIP_R90; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Turn In Place") FALSTurnInPlaceAsset CLF_TurnIP_L180; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Turn In Place") FALSTurnInPlaceAsset CLF_TurnIP_R180; }; USTRUCT(BlueprintType) struct FALSAnimRotateInPlace { GENERATED_BODY() UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Rotate In Place") float RotateMinThreshold = -50.0f; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Rotate In Place") float RotateMaxThreshold = 50.0f; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Rotate In Place") float AimYawRateMinRange = 90.0f; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Rotate In Place") float AimYawRateMaxRange = 270.0f; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Rotate In Place") float MinPlayRate = 1.15f; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Rotate In Place") float MaxPlayRate = 3.0f; }; USTRUCT(BlueprintType) struct FALSAnimConfiguration { GENERATED_BODY() UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Main Configuration") float AnimatedWalkSpeed = 150.0f; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Main Configuration") float AnimatedRunSpeed = 350.0f; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Main Configuration") float AnimatedSprintSpeed = 600.0f; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Main Configuration") float AnimatedCrouchSpeed = 150.0f; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Main Configuration") float VelocityBlendInterpSpeed = 12.0f; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Main Configuration") float GroundedLeanInterpSpeed = 4.0f; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Main Configuration") float InAirLeanInterpSpeed = 4.0f; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Main Configuration") float SmoothedAimingRotationInterpSpeed = 10.0f; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Main Configuration") float InputYawOffsetInterpSpeed = 8.0f; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Main Configuration") float TriggerPivotSpeedLimit = 200.0f; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Main Configuration") float FootHeight = 13.5f; /** Threshold value for activating dynamic transition on various animations */ UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Main Configuration") float DynamicTransitionThreshold = 8.0f; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Main Configuration") float IK_TraceDistanceAboveFoot = 50.0f; UPROPERTY(EditAnywhere, BlueprintReadOnly, Category = "ALS|Main Configuration") float IK_TraceDistanceBelowFoot = 45.0f; };
Evaluation of Resistance to Fescue Toxicosis in Purebred Angus Cattle Utilizing Animal Performance and Cytokine Response Fescue toxicosis is a multifaceted syndrome common in cattle grazing endophyte-infected tall fescue; however, varying symptomatic responses potentially imply genetic tolerance to the syndrome. It was hypothesized that a subpopulation of animals within a herd would develop tolerance to ergot alkaloid toxicity. Therefore, the goals of this study were to develop selection criteria to identify tolerant and susceptible animals within a herd based on animal performance, and then examine responsive phenotypic and cytokine profiles to fescue toxicosis. Angus cows grazed endophyte-infected tall fescue at two locations for 13 weeks starting in mid-April 2016. Forage measurements were collected to evaluate ergot alkaloid exposure during the study. A post hoc analysis of animal performance was utilized to designate cattle into either tolerant or susceptible groups, and weekly physiological measurements and blood samples were collected to evaluate responses to chronic exposure to endophyte-infected tall fescue. Findings from this study support the proposed fescue toxicosis selection method formulated herein, could accurately distinguish between tolerant and susceptible animals based on the performance parameters in cattle chronically exposed to ergot alkaloids, and provides evidence to warrant additional analysis to examine the impact of ergot alkaloids on immune responsiveness in cattle experiencing fescue toxicosis. Introduction Fescue toxicosis, resulting from consumption of ergot alkaloids commonly found in endophyte (Epichloë coenophiala)-infected tall fescue (Lolium arundinaceum Schreb. Darbysh), significantly impacts livestock health and production globally. Direct exposure to ergot alkaloids from tall fescue and other grass species occurs in production systems that heavily rely on grazing, such as in cow calf and stocker programs in the United States, New Zealand, and Australia; whereas importation of feedstuff has led to ergot alkaloid-induced effects in several Asian countries, including Japan and Korea . Due to The expected proportion of T-Snip genotypes between the two FTSM groups can be see in Table 2. There is a tendency for a different distribution of T-Snip genotypes between EI-TOL and EI-SUS animals (p = 0.089). There was a statistical difference (p < 0.05) for the proportions of T-Snip genotypes 2 and 3 between EI-TOL and EI-SUS animals. EI-TOL animals had a lower proportion of genotype 2 than EI-SUS animals (0.1 ± 0.07 versus 0.368 ± 0.11), whereas the opposite was found for T-Snip genotype 3 (0.6 ± 0.11 versus 0.316 ± 0.11). There were no differences in the proportion of T-Snip genotypes 1 and 4 between these animals (p > 0.05). These results indicated that EI-TOL animals had overall greater frequency of greater T-Snip genotypes than EI-SUS animals. Table 2. Expected proportion of T-Snip genotypes 2 between tolerant (TOL) and susceptible (SUS) cows in the endophyte-infected (EI) fescue locations. Phenotypic Variables of Animal Performance Animal performance is negatively impacted during ergot alkaloid exposure, thus, the focus of this study was to identify tolerant and susceptible animals within an exposed population; however, direct comparisons of performance parameters to animals not exposed to ergot alkaloids demonstrated the severity of exposure throughout the trial. Cattle grazing novel endophyte fescue pastures (EN, non-toxic) had greater body weights compared to animals grazing endophyte-infected (EI, toxic) fescue pastures (p < 0.0001; Figure 1). By comparison, animals deemed tolerant (EI-TOL) to ergot alkaloid exposure displayed a positive weight gain compared to animals deemed susceptible (EI-SUS) to ergot alkaloid exposure ( Figure 1; Table 3; p < 0.05). Additionally, cattle grazing novel endophyte fescue pastures had a greater average daily gain (ADG; 0.703 kg/d) and body condition score (BCS; 6.1) compared to EI-TOL and EI-SUS animals (0.497 and −0.003 kg/d, respectively, for ADG and 5.5 and 5.6, respectively, for BCS; Table 3; p < 0.0001). Throughout the course of the study, EN and EI-TOL animals displayed similar hair coat scores (HCS) and were significantly less dense compared to EI-SUS animals ( Table 3; p < 0.0001). Furthermore, EN animals were better able to shed their hair throughout the trial period compared to EI-TOL and EI-SUS animals (1.99 versus 2.36 and 2.64, respectively; Table 3; p < 0.0001). Although within the normal body temperature range for cattle, EN animals were cooler compared to EI-TOL and EI-SUS animals as indicated by rectal temperatures (Table 3; p < 0.0001). Toxins 2020, 12 The average daily gain was significantly greater in EI-TOL animals during P-1 and over the entire study (ES) compared to EI-SUS animals (Table 4); however, no difference in ADG was observed during P-2 (Table 4), which could be reflective of the change in ergovaline concentrations observed in the fescue pastures between P-1 and P-2 of the trial. In addition to ADG, EI-TOL animals displayed a greater positive change in BCS during P-1, P-2, and throughout the entire study compared to the EI-SUS animals ( Table 4; p < 0.05). Throughout the course of the study, EI-TOL animals displayed greater change in HCS and HSS in P-1 and P-2 compared to EI-SUS animals (Table 4; p < 0.05), with the greatest change occurring in P-1 as the temperature and humidity increased. Thus, EI-TOL animals were able to alter their hair coat density and length earlier in the year when ergot alkaloid concentrations were elevated (P-1), reducing the negative impact of fescue toxicosis on animal performance. Interestingly, no differences were observed when examining the change in HCS or HHS between EI-TOL and EI-SUS animals over the entire study (Table 4; p > 0.05). When examining differences in HCS and HSS between P-1 and P-2, there was an increase in hair shedding in the EI-SUS animals in P-2, which could be attributed to the decrease in ergot alkaloid concentration in the fescue pastures, in combination with the fact that elevated temperatures and humidity permitted these animals to display similar hair characteristics to that of the EI-TOL animals. However, no differences were observed in the change in body temperatures during P-1, P-2, or over the entire study (Table 4; p > 0.05). Within the row, means without a common superscript significantly differ (p ≤ 0.05); * p-values 0.05 < p ≤ 0.10 determined a statistical tendency; 1 P-1: Period 1, using data collected from weeks one, three, and five; 2 P-2: Period 2, using data collected from weeks 7, 9, and 11; 3 ES: Entire study, using data collected from weeks 1, 3, 5, 7, 9, and 11; 4 Values are reported as least square means for the experiment; 5 EI-TOL: cattle grazing endophyte-infected fescue deemed tolerant through FTSM (n = 20); EI-SUS cattle grazing endophyte-infected fescue deemed susceptible through FTSM (n = 20); 6 ∆BCS = change in body condition score (1-9 scale) over the given time frame; 7 ∆HCS = change in hair coat score (1-5 scale) over the given time frame; 8 ∆HSS = change in hair shedding score (1-5 scale) over the given time frame; 9 ∆RT = change in rectal temperature ( • C) over the given time frame. Of the growth factors, vascular endothelial growth factor A (VEGFA) concentrations were greater in EI-TOL animals compared to EI-SUS animals (6.3 ± 1.1 versus 3.2 ± 1.1 ng/mL, respectively; Figure 2B; p = 0.0444). No differences were observed in VEGFA concentrations between locations or over the course of the trial (Table 5; p > 0.05). No differences were observed in FGF1, FGF2, or IGF1 concentrations between EI-TOL and EI-SUS animals over the course of the trial (Table 5; p > 0.05). Fibroblast growth factor 1, but not FGF2 or IGF1, concentrations tended to be greater in animals at UPRS compared animals at BBCFL (328.6 ± 81.6 versus 62.4 ± 81.6 ng/mL, respectively; Table 5; over the course of the trial (Table 5; p > 0.05). No differences were observed in FGF1, FGF2, or IGF1 concentrations between EI-TOL and EI-SUS animals over the course of the trial (Table 5; p > 0.05). Fibroblast growth factor 1, but not FGF2 or IGF1, concentrations tended to be greater in animals at UPRS compared animals at BBCFL (328.6 ± 81.6 versus 62.4 ± 81.6 ng/mL, respectively; Table 5; p = 0.0793). Discussion Due to the extensive use of endophyte-infected tall fescue across the globe, the goals of this study were to develop selection criteria to identify tolerant and susceptible animals based on phenotypic animal performance traits, and then examine responsive cytokine profiles to identify beef cattle displaying tolerance to fescue toxicosis. Several studies described breed differences in response to ergot alkaloids, in which breeds such as Senepol and Brahman, which are better able to handle the heat and humidity, outperformed British breeds such as Angus and Hereford when exposed to toxic tall fescue. These breeds have been used in crossbreeding programs to address fescue toxicosis, but it is unclear if their apparent tolerance is to fescue or adaptation to elevated heat and humidity. However, genetic progress is obtained within the breed, and the identification of genetic variation in animals from the same breed should be exploited to allow for selection for improved response (e.g., tolerance) to fescue toxicosis. Gray et al. examined Angus cattle performance in North Carolina and Mississippi on endophyte-infected fescue pasture. These researchers observed genetic variation for hair coat shedding, indicating that Angus cattle that shed their winter hair coat earlier in the year may be more heat tolerant. At the genomic level, genes regulating prolactin production have been targeted to identify genetic markers for tolerance to fescue toxicosis. Campbell et al. recently identified a single nucleotide polymorphism (SNP) within the dopamine receptor D2 gene that was associated with variation in calving rates when grazing endophyte-infected tall fescue. In addition, using part of the same data in this study, Galliou et al. showed that a commercial genetic test for fescue toxicosis, T-Snip (AgBotanica, LCC, Columbia, MO, USA), is associated with growth, hair shedding, and calf weaning weight in pregnant Angus cows. With the available T-Snip data, we tested whether the distribution of T-Snip genotypes changed between EI-TOL and EI-SUS animals in our study. According to the company's instructions, T-Snip genotypes range from 1 to 5, with levels of tolerance to fescue toxicosis increasing with the value of the genotype. Although both groups of animals showed T-Snip genotypes 1 to 4, there was a greater proportion of genotype 3 in EI-TOL than in EI-SUS. Similarly, there was a greater proportion of genotype 2 in EI-SUS than in EI-TOL. Therefore, the expected genetic values based on this commercial genetic test was greater in EI-TOL than in EI-SUS. Hence, although the selection of animals in this study was fully based on their phenotypic performance, results from T-Snip genotypes further support that this selection method was able to identify animals with contrasting tolerance to fescue toxicosis. It is important to note that this commercial genetic test does not have a perfect accuracy to predict with phenotypic performance in animals during fescue toxicosis . Therefore, the presence of animals with lower and greater genotype values in EI-TOL and EI-SUS groups, respectively, was expected. Considering these factors, developing a selection method based on the phenotypic performance parameter in cattle from a single breed source in controlled experiments would aid in identifying animals that are potentially tolerant or susceptible to ergot alkaloids. A major challenge with studying this multifaceted syndrome is that endophyte production of ergot alkaloids varies with season and plant maturity, and contributes to variation in ergot alkaloid intake by grazing cattle. Total ergot alkaloids or ergovaline, the most abundant alkaloid, has been measured in fescue to describe its potential toxicity. Concentrations of ergovaline increase from 250 to 500 µg/Kg in leaf blades and from 500 to 1300 µg/Kg in leaf sheaths from April to May. Seed heads contain the greatest concentration of toxins, and reach concentrations as high as 5000 µg/Kg in June. Ergovaline concentrations decline in August and increase again during fall regrowth . Total ergot alkaloid concentration displays the same seasonal changes as ergovaline . The concentration of total ergot alkaloids and ergovaline declines by 81% to 85%, respectively, from December to March in stockpiled fescue (accumulated during the growing season for grazing during dormancy; ). There were differences in the percentage of fescue and endophyte infection rate between locations used in this study, which then led to differences in ergovaline concentrations between locations, as well as variation in ergovaline concentration throughout the trial. The increase in ergot alkaloid consumption during period 1 resulted in greater differences in the phenotypic performance parameter, including average daily gain (ADG), body condition scores (BCS), body weight (BW), and rectal temperatures that were evaluated in this study. The difference in ADG and BCS in response to increased consumption of ergot alkaloids is similar to data reported Thompson et al. , which identified a direct negative correlation to an animal's ADG when the pasture infection rate increased. Moreover, comparison of phenotypic performance parameters of animals exposed to ergot alkaloids (EI) to animals not exposed to ergot alkaloids (EN) demonstrated the severity of exposure throughout the trial. Additionally, sufficient body condition has been shown to be one of the most important traits to cattle reproductive efficiency in a cow-calf herd, and decreases as a result of endophyte-infected tall fescue consumption . Aside from animal growth performance, issues with thermotolerance have also frequently been seen in animals suffering from severe cases of fescue toxicosis . Many of the symptoms of fescue toxicosis (ergot alkaloid exposure) are amplified during a period of heat stress, and the greatest loss in animal performance and consequently increased production losses occur during the summer months when grazing endophyte infected tall fescue . It has been speculated that heat tolerant (i.e., Bos indicus) breeds of cattle have improved resistance to fescue toxicosis . Several studies, including Poole et al. , have demonstrated that cattle with adaptions to manage heat stress perform better when exposed to ergot alkaloids and increased environmental temperatures. However, there is conflicting data that report no difference in growth rate , hormone concentration , or milk production between Bos indicus and Bos taurus cattle grazing endophyte-infected tall fescue. Therefore, it remains unknown if the traits for heat tolerance and tolerance to ergot alkaloids are synonymous. Correlations have been previously made in which cattle that shed their winter hair coats earlier in the fescue season suffer less from the negative effects of fescue toxicosis . In this study, weekly hair shedding scores (HSS) and hair coat scores (HCS) were taken to evaluate differences based on tolerance designation week to week, as well as initial rate of shedding. Both measures were significantly different between tolerant and susceptible animals grazing the endophyte-infected tall fescue. Both HCS and HSS were similar at the start of the study, and deviation between EI-TOL and EI-SUS animal occurred at week three of the study and continued through week seven (P-1). Interestingly, HSS and HCS re-converged to similar numeric values when ergot alkaloid concentrations decreased, yet ambient temperatures and humidity remained elevated consequentially when prolactin concentrations increased. Decreases in serum prolactin have been associated with fescue toxicity, and are often used as an indicator of ergot alkaloid exposure in cattle . However, increased prolactin concentrations are associated with increasing day lengths and ambient temperatures, and initiate hair shedding . Prolactin secretion has been linked with changes in environmental temperature, such that prolactin concentrations are elevated during warmer verses cooler months , and has been shown to regulate hair shedding . It has been speculated that fescue toxicosis-induced hypoprolactinemia prevents shedding of the winter hair coat, therefore, cattle have an elevated body temperature and increased vulnerability to heat stress . In the current study, EI-TOL animals had a tendency for lower serum prolactin concentrations when compared to EI-SUS animals as determined by the FTSM, thus, it can be assumed that those animals deemed tolerant had sufficient concentrations of serum prolactin to initiate hair shedding. Based on the other physiological measurements, the cattle in this study displayed many other symptoms of fescue toxicosis, and the lack of hypoprolactinemia was unexpected. The interaction of increased THI and exposure to ergot alkaloids impacts the severity of fescue toxicosis (reviewed by one), and the relatively low inclusion rate of the ergot alkaloids in the infected pastures may have altered serum prolactin concentrations. With EI-TOL animals having lower average serum prolactin concentrations than EI-SUS animals, it is hypothesized that these cattle were genetically predisposed to eliciting a shedding response at lower thresholds of prolactin. Using the same animals from this study, Koester et al. showed that EI-TOL and EI-SUS animals have distinct fecal bacterial and fungal communities, further supporting that the proposed FTSM also results in differences in the microbiome of animals under FT stress. This classification using FTSM is further supported by the differences in T-Snip genotypes between EI-TOL and EI-SUS presented in the current study. These hypotheses build from the speculation of Aiken et al. that genetic predispositions exist for necessary prolactin levels to initiate shedding. More recent studies in various livestock species have reported that immune parameters have moderate to high heritability, as well as high genetic correlation with reproductive performance (lowly heritable traits), indicating that immune parameters can be used as a genetic trait for selecting animals that are resistant to specific diseases . Using the protein array approach, numerous growth factors, chemokines, cytokine receptor antagonists, and anti-inflammatory and pro-inflammatory cytokines were examined during ergot alkaloid exposure. One of the major outcomes of this analysis is that many of the factors examined were significantly different or showed a statistical tendency to be higher at UPRS compared to BBCFL (Tables 5 and 6). While cytokine differences between locations was not the primary focus of this study, these data demonstrate the immune system's responsiveness to varying ergot alkaloid exposure, and warrants further investigation with known concentrations of ergovaline to better understand this interaction. Based on these data, it is hypothesized that cattle at the UPRS location experienced a greater immune response in response to higher ergot alkaloid exposure. A similar report described a hyperactive innate immune response, which may lead to an immuno-compromised animal in stocker steers when chronically exposed to ergovaline . In the current study, vascular endothelial growth factor A (VEGFA) and G Protein-Coupled Receptor Associated Sorting Protein 1 (GPRASP-1) concentrations were greater in EI-TOL animals compared to EI-SUS animals. G Protein-Coupled Receptor Associated Sorting Protein 1 has been associated with downregulation of a variety of G Protein-Coupled receptors, including the D2-dopamine receptor, through lysosomal degradation. The D2-dopamine receptor has been shown to play a direct effect in cattle prolactin secretion, and is also involved in ergot alkaloids exposure's decrease in prolactin secretion . Prolactin is decreased when dopamine binds to the D2-dopamine receptor, and the cyclic ring structure of ergovaline closely mimics the ring structure of dopamine, allowing ergovaline to bind the D2-dopamine receptor and thus inhibit prolactin secretion . In the current study, EI-TOL had greater GPRASP-1 concentrations, potentially causing the D2-dopamine receptor to be downregulated at a greater rate. This decrease in the prevalence of the D2-dopamine receptor decreased the opportunity for ergovaline to bind and decrease prolactin secretion, giving EI-TOL animals an advantage when grazing endophyte-infected tall fescue. Furthermore, this could contribute to EI-TOL animals having greater sensitivity to prolactin through downregulation of the entire prolactin secretion pathway, resulting in lower biological thresholds needed to initiate the biological roles of the hormone, which was previously discussed. Vascular endothelial growth factor A is a known vasodilator, and has been shown to increase micro-vascular permeability. In addition, it is known to play a particularly important role in vascular endothelial cells, where higher concentrations increase vascular permeability and could provide protective effects against ergot alkaloid exposure. Vasoconstriction caused by exposure to ergot alkaloids further compounds the issues associated with fescue toxicosis, such as its ability to decrease the animal's capacity for evaporative cooling . Increased VEGFA concentrations and lower rectal temperature observed in the EI-TOL animals provide evidence that the EI-TOL animals selected using the FTSM have a greater ability to avoid heat stress and the vasoconstrictions effects of fescue toxicosis compared to EI-SUS animals. Several studies have examined changes in VEGFA to heat stress, and have focused on tissue and/or cellular responsiveness as opposed to whole animal changes. Vascular endothelial growth factor concentrations in the blood of heat tolerant (Bos Indicus) cattle compared to cattle that are more sensitive to heat stress (Bos Taurus) has not been directly investigated; however, Jyotiranjan et al. and Iqbal et al. reported increases in VEGF gene expression in response to thermal stress in goats and Bos indicus cattle breeds, respectively. Interestingly, of the eight Bos indicus breeds examined, only two breeds (Bhagnari and Lohani) displayed increased VEGF expression, but the authors indicate that this increase in VEGF is associated with adaptation to high altitude as opposed to heat tolerances . Additionally, Jones et al. and Aiken et al. have speculated that decreases in serum progesterone concentrations could be caused by vasoconstriction of blood flow to the ovary in cattle consuming ergot alkaloids. Poole and colleagues confirmed that ergovaline exposure reduced the diameter of the ovarian artery, leading to the functional corpus luteum, thus decreasing circulating progesterone concentrations. In the current study, EI-TOL animals tended to have higher progesterone concentrations compared to EI-SUS animals, which may be linked to greater ovarian artery diameter due to greater concentrations of circulating VEGFA in these animals. Thus, the EI-TOL animal's ability to synthesize higher concentrations of VEGFA may provide the capacity to increase vessel diameter, which would result in substantial mitigation of the vasoconstriction seen with fescue toxicosis. Ultimately, further investigation is needed to evaluate these cytokines and their role in fescue toxicosis, as well as the tolerant animal's ability to mount a stronger immune response to mitigate some of the negative effects of fescue toxicosis. Taken together, the selection strategy used in this study (i.e., FTSM) was showed to be effective in identifying groups of individuals expressing contrasting responses to ergot alkaloid exposure, i.e., fescue toxicosis . Nonetheless, three items must be further discussed: first, the selection of these animals was based solely on their (adjusted) phenotypic performance. Although the T-Snip data presented in this study support that TOL animals have greater genetic tolerance to ergot alkaloid exposure than SUS animals, the accuracy of identifying individual differences from ergot alkaloid exposure using this test was not 100% . Hence, there is still genetic variation in the animal's response to ergot alkaloid exposure that could be explored. One way of getting ahold of individual genetic variation is through using, for example, the expected progeny differences (EPDs) for the growth rate of these animals as part of the selection criteria. However, there are some limitations on using the current EPDs for this purpose. These EPDs are based on a nationwide genetic evaluation (American Angus Association; http://www.angus.org/), which does not take into consideration the presence of GxE, which seems to be the case for ergot alkaloid exposure and for other stress-related traits . Thus, the use of EPDs for this purpose may bias the selection of animals with greater tolerance to ergot alkaloid exposure. Although no source of genetic information was used in our selection process, it was expected that our approach captured part of the genetic potential of these animals. Assuming a) unbiased estimates for the fixed effects and b) independence between fixed and random effects included in the statistical model used for selection of TOL and SUS animals (see details in Koester at al. ), the estimated residuals that were used for classification of TOL and SUS animals should include both genotypic effects (additive and non-additive) and the true residual effects. Thus, assuming at least a moderate narrow-sense heritability for growth rate under ergot alkaloid exposure, it is expected that the estimated residuals include a substantial contribution of additive genetic values. Hence, although the selection process used in this study did not include prior genetic information from these animals, the data presented in this study demonstrate that TOL and SUS animals differed, at some level, in their genetic potential when exposed to ergot alkaloids. Additionally, another consideration is the potential confounding between the growth rate in the presence or absence of ergot alkaloids. In this study, it was assumed that animals expressed their growth as a function of response to ergot alkaloid exposure. In fact, the difference in results obtained between the two locations with endophyte-infected (toxic) fescue suggests that some minimum concentrations of ergot alkaloids are needed for animals to express a tolerant-related phenotype. In order to obtain accurate information on the relationship between the growth rate in the presence or absence of ergot alkaloids at the genetic level, a much larger sample size, with animals from a substantial number of sires represented across locations with varying concentrations of ergot alkaloids, would be needed to estimate the genetic correlation between the growth rate in the presence or absence of ergot alkaloids. Albeit needed, such a scenario would be difficult to achieve in a timely manner. At the phenotypic level, the use of biomarkers for ergot alkaloid exposure, such as prolactin or those identified in our study (e.g., VEGFA), could provide additional information on the level of ergot alkaloid exposure on animals. This information could be used to evaluate whether a significant physiological response to ergot alkaloid exposure was observed in each animal, potentially allowing for better separation between animals showing a growth rate in the presence or absence of ergot alkaloids. Lastly, our method was unidimensional, using only growth rate data for the selection of TOL and SUS animals. Hence, the evaluation of a fescue toxicosis selection index using multiple parameters and including other sources of information could be beneficial for more accurate selection of animals for response to ergot alkaloid exposure. Additional sources of information include the use of T-Snip, the biomarkers identified in this population (this study and Koester et al. ), genotypes at the dopamine receptor D2 gene locus , and more. Opportunities exist for the identification of additional biomarkers based on response-related traits other than growth rate, as used in this study. There is also a need for large-scale genomic studies for the identification of SNPs associated with variation in ergot alkaloid exposure related traits, which could be used to better identify animals with different genetic potential for fescue toxicosis. Conclusions Taken together, these data provide support to validate the fescue toxicosis selection method (FTSM) proposed in this study through accurate collection of phenotypic performance parameters in a population of cattle chronically exposed to ergot alkaloids, and provide evidence to warrant additional analysis on the impact of ergot alkaloids on immune responsiveness in cattle that experience fescue toxicosis. Given the increased performance and hair shedding ability, greater hormone regulation and efficiency, and stronger cytokine responses, EI-TOL animals have clear mechanisms available to provide these advantages that can and should be selected for in animals raised in a fescue-dominant environment. Animal Management Cow performance and forage data were collected from late April to late July 2016 (Figure 3) when ergot alkaloid concentrations are greatest. Purebred Angus cows (n = 148) that ranged from two to four years of age all grazed endophyte-infected tall fescue during the entire experimental period. Cattle selected for this study were confirmed pregnant at 30 days post artificial insemination via ultrasonography, and were approximately 85 days post-conception at the start of the study. Additionally, cows selected for this study were weaned two weeks prior to the start of the study to remove the effect of lactation on animal performance. Cattle were offered ad libitum water and free choice minerals throughout the duration of the study, in addition to natural shade structures within the pastures. Additionally, a subset of cows (n = 27) were maintained under the same conditions on novel endophyte fescue pastures (non-toxic, EN) at the BBCFL to serve as a representative control group. Body weight (BW), body condition scores (BCS, as adapted from Richards et al., ), hair shedding score (HSS, as adapted from ), hair coat score (HCS, adapted from ), rectal temperatures, and jugular blood samples were collected weekly to evaluate the animal's physiological response to ergot alkaloid exposure. Objective scores of BCS, HSS, and HCS were all collected by two trained evaluators and composited for an average score for each animal. Blood samples were collected via jugular venipuncture using 20-gauge needles and sterile 10.0 mL vacutainer tubes that contained no additive (Becton Dickerson, Franklin Lakes, NJ, USA). All blood samples were immediately placed on ice and then transported to the laboratory for processing. Whole blood samples were centrifuged the afternoon following collection each week for 25 min at 1500× g at 4 • C, then serum was drawn from vacutainer tubes and aliquoted into a glass dram vial and a plastic micro-centrifuge tube and stored at −80 • C until progesterone, prolactin, and cytokine analyses were conducted. Fescue Toxicosis Selection Method (FTSM) The classification of animals into tolerance (TOL) and susceptible (SUS) using the proposed Fescue Toxicosis Selection Method (FTSM) is fully described in Koester et al. . In summary, growth data on each animal (Figure 3) was used to estimate the slope of the regression analysis of BW on weeks (average weekly gain; AWG, Figure 4A). This was performed based on three window periods: weeks 1 through 13 (ES), weeks 1 through 7 (P-1), and weeks 7 through 13 (P-2) to assess the effect of the increase in temperature from April to July, availability of forage (see Supplementary Table S1), and exposure of infected tall fescue ( Table 1). The residuals from the analysis of AWG in a model including the fixed effects of location, parity, and initial body weight (covariate) were used to identify the window period in which the variation on the data was the largest, indicating the impact of the syndrome . Results from this analysis indicated that data from P-1 resulted in the greatest residual variance . Thus, the forty selected animals, those with the 10 most positive and 10 most negative residuals at each location, representing the TOL and SUS groups, respectively, from P-1 were used to evaluate performance and resistance to fescue toxicosis ( Figure 4B). Fescue Toxicosis Selection Method (FTSM) The classification of animals into tolerance (TOL) and susceptible (SUS) using the proposed Fescue Toxicosis Selection Method (FTSM) is fully described in Koester et al. . In summary, growth data on each animal (Figure 3) was used to estimate the slope of the regression analysis of BW on weeks (average weekly gain; AWG, Figure 4A). This was performed based on three window periods: weeks 1 through 13 (ES), weeks 1 through 7 (P-1), and weeks 7 through 13 (P-2) to assess the effect of the increase in temperature from April to July, availability of forage (see Supplementary Table S1), and exposure of infected tall fescue ( Table 1). The residuals from the analysis of AWG in a model including the fixed effects of location, parity, and initial body weight (covariate) were used to identify the window period in which the variation on the data was the largest, indicating the impact of the syndrome . Results from this analysis indicated that data from P-1 resulted in the greatest residual variance . Thus, the forty selected animals, those with the 10 most positive and 10 most negative residuals at each location, representing the TOL and SUS groups, respectively, from P-1 were used to evaluate performance and resistance to fescue toxicosis ( Figure 4B). model including the fixed effects of location, parity, and initial body weight (covariate) were used to identify the window period in which the variation on the data was the largest, indicating the impact of the syndrome . Results from this analysis indicated that data from P-1 resulted in the greatest residual variance . Thus, the forty selected animals, those with the 10 most positive and 10 most negative residuals at each location, representing the TOL and SUS groups, respectively, from P-1 were used to evaluate performance and resistance to fescue toxicosis ( Figure 4B). T-Snip Genotyping A detailed description of the genotyping of animals in this study for T-Snip (AgBotanica, LLC, Columbia, MO, USA) can be found in Galliou et al. . In summary, blood cards were collected on each animal and shipped to GeneSeek (Neogen Genomics, Lincoln, NE, USA) for T-Snip genotyping. There were two, one, six, and one animals from EI-TOL at BBCFL for genotypes 1 to 4, respectively. The distribution of these T-Snip genotypes was one, five, three, and one, respectively, for EI-SUS at BBCFL; one, one, six, and two, respectively, for EI-TOL UPRS; and four, two, three, and zero, respectively, for EI-SUS UPRS cows. Forage Management Cattle at both locations grazed endophyte-infected tall fescue (toxic) pastures throughout the 13-week study. Cattle were rotationally grazed every two weeks at each location to ensure sufficient forage utilization and availability. Composite forage samples were taken from each pasture every two weeks to evaluate the nutrient quality and percentage of forage species available. Forage samples were clipped from approximately 20 locations in each pasture and composited. Fescue tiller samples were collected in November of 2016 to evaluate the pasture endophyte infection rate of the fescue. Nutrient quality samples were submitted within 24 h of collection for nutrient content, and then the results were averaged by experimental period (North Carolina Department of Agriculture Forage Laboratory, Raleigh, NC; USA, see Supplementary Table S1). From the same composite samples, forage was hand-separated in house by trained technicians to determine the percentage of fescue in relation to other various forage species (Table 3) making up forage dry matter. Collected fescue tiller samples were collected, rinsed, and shipped on ice to determine the pasture infection rate, and the average infection rate is reported by experimental period (Agrinostics Ltd. Co., Watkinsville, GA; USA, Table 3). Subsequent HPLC analysis of the forage samples for ergot alkaloid concentrations (MU Veterinary Medical Diagnostic Lab; ) demonstrated the change in ergovaline concentrations between locations and over the course of the grazing period (Table 3). Serum Assays and Analysis Serum progesterone concentrations (P4) were analyzed on the FTSM, EI-TOL (n = 20), and EI-SUS (n = 20) animals for both locations at weeks 1, 3, 5, 7, 9, 11 and 13, which represent a bi-weekly measure during the entirety of the collection period. Concentrations were determined by a commercially available radioimmunoassay, the Immuchem Coated Tube Progesterone I125 RIA assay (ICN Pharmaceuticals, Inc., Costa Mesa, CA), as previously described by Lyons et al. . Concentrations are reported in ng per mL, and the interassay and intraassay variation was 7.4% and 4.3%, respectively. Serum prolactin concentrations (PRLs) were analyzed on the FTSM, EI-TOL (n = 20), and EI-SUS (n = 20) animals for both locations at weeks 1, 7, and 13, which represent the beginning, midpoint, and end of the collection period. Concentrations were determined by a commercially available Bovine Prolactin ELISA assay (MyBioSource, San Diego, CA, USA), as previously described by Poole et al. . Concentrations were reported in ng per mL, and the interassay and intraassay variation was 11.7% and 4.2%, respectively. Statistical Analysis Performance and cytokine data were analyzed using the MIXED procedure of SAS 9.3 with repeated measures. The individual animal was utilized as the experimental unit and the model for body condition scores, positive change in body condition scores, body weight, average daily gain, rectal temperature, hair coat score, hair shedding score, and hormone concentrations, and included Fescue Toxicosis Selection Method outcome (EI-TOL vs. EI-SUS), location (BBCFL vs. UPRS), trial period (P-1, P-2, and ES), and all respective interactions. For cytokine response data analysis, the individual animal was utilized as the experimental unit and the model for each cytokine response included treatment (EI-TOL vs. EI-SUS), location (BBCFL vs. UPRS), and time (Weeks 1, 7, and 13). The distribution of T-Snip genotypes between EI-TOL and EI-SUS was analyzed using a logistic multinomial model using the T-Snip genotype of animals as the response variable and the FTSM group (EI-TOL and EI-SUS) as a fixed effect in the model. This analysis was carried out in R . Results were recorded as least square means ± SEM, where statistical significance was reported at p ≤ 0.05 and a statistical tendency at 0.05 ≤ p ≤ 0.10.
/** * Instantiates PhysiologyValueGenerators for each VitalSign output in the generator * configuration. * * @param generatorConfig generator configuration object * @param person Person to generate VitalSigns for * @return List of PhysiologyValueGenerator instances */ public static List<PhysiologyValueGenerator> fromConfig( PhysiologyGeneratorConfig generatorConfig, Person person) { List<PhysiologyValueGenerator> generators = new ArrayList<PhysiologyValueGenerator>(); SimRunner runner = new SimRunner(generatorConfig, person); for (IoMapper mapper : generatorConfig.getOutputs()) { if (mapper.getType() == IoMapper.IoType.VITAL_SIGN) { generators.add(new PhysiologyValueGenerator( generatorConfig, runner, VitalSign.fromString(mapper.getTo()), person, mapper.getVariance())); } } return generators; }
def send_keyword_to_productdriver(driver_name, plugin_name, keyword, data_repository, args_repository): step_num = data_repository["step_num"] try: if plugin_name is not None: import_name = ".".join(["plugins", plugin_name, "bin", plugin_name[:-7]+'_driver']) else: import_name = "ProductDrivers.{0}".format(driver_name) driver_call = __import__(import_name, fromlist=[driver_name]) except Exception: trcback = print_exception(Exception) data_repository['step-%s_status' % step_num] = 'ERROR' data_repository['step-%s_exception' % step_num] = trcback Utils.testcase_Utils.pStep() return data_repository else: return driver_call.main(keyword, data_repository, args_repository)
/** * @author Samuel Githengi created on 10/09/19 */ public class LocationUtils { public static Set<String> getRootLocation(Map<String, String> locations) { // get all parents Set<String> parents = new HashSet<>(locations.values()); Set<String> ids = locations.keySet(); // remove parents that are also children parents.removeAll(ids); parents.remove(null); // add ids that dot have parents for (Entry<String, String> entry : locations.entrySet()) { if (entry.getValue() == null) parents.add(entry.getKey()); } //add ids if parent not added return parents; } }
export default interface ILoginUser { hash: string; name: string; role: number; lang: string; }
Zika in Puerto Rico, 2016-2017: II Perspectives on Epidemic Surveillance and Control, Health Communication, Outcomes and Lessons. The social reaction to the Zika epidemic in Puerto Rico reached a confrontational climax regarding aerial fumigation with an organophosphate insecticide. The public drama has obscured multiple simultaneous controversies. This and a companion paper, based mostly on print and digital news reports, provide a context and description of the major controversies and examine the outcomes and their lessons for the protection of the public's health. Part II covers the questions on disease surveillance (what is going on?); health communication and epidemic control (what is an epidemic? is there a way to control an epidemic transmitted by Aedes aegypti?), and the outcomes and lessons from the debates.
/* Automatically generated from OpenCL registry files; DO NOT EDIT! */ #include "opencl_private.h" #include "opencl_types.h" #include "unixlib.h" WINE_DEFAULT_DEBUG_CHANNEL(opencl); cl_int WINAPI clBuildProgram( cl_program program, cl_uint num_devices, const cl_device_id* device_list, const char* options, void (WINAPI* pfn_notify)(cl_program program, void* user_data), void* user_data ) { struct clBuildProgram_params params = { program, num_devices, device_list, options, pfn_notify, user_data }; TRACE( "(%p, %u, %p, %p, %p, %p)\n", program, num_devices, device_list, options, pfn_notify, user_data ); return OPENCL_CALL( clBuildProgram, &params ); } cl_int WINAPI clCompileProgram( cl_program program, cl_uint num_devices, const cl_device_id* device_list, const char* options, cl_uint num_input_headers, const cl_program* input_headers, const char** header_include_names, void (WINAPI* pfn_notify)(cl_program program, void* user_data), void* user_data ) { struct clCompileProgram_params params = { program, num_devices, device_list, options, num_input_headers, input_headers, header_include_names, pfn_notify, user_data }; TRACE( "(%p, %u, %p, %p, %u, %p, %p, %p, %p)\n", program, num_devices, device_list, options, num_input_headers, input_headers, header_include_names, pfn_notify, user_data ); return OPENCL_CALL( clCompileProgram, &params ); } cl_mem WINAPI clCreateBuffer( cl_context context, cl_mem_flags flags, size_t size, void* host_ptr, cl_int* errcode_ret ) { cl_mem __retval; struct clCreateBuffer_params params = { &__retval, context, flags, size, host_ptr, errcode_ret }; TRACE( "(%p, %s, %Iu, %p, %p)\n", context, wine_dbgstr_longlong(flags), size, host_ptr, errcode_ret ); OPENCL_CALL( clCreateBuffer, &params ); return __retval; } cl_command_queue WINAPI clCreateCommandQueue( cl_context context, cl_device_id device, cl_command_queue_properties properties, cl_int* errcode_ret ) { cl_command_queue __retval; struct clCreateCommandQueue_params params = { &__retval, context, device, properties, errcode_ret }; TRACE( "(%p, %p, %s, %p)\n", context, device, wine_dbgstr_longlong(properties), errcode_ret ); OPENCL_CALL( clCreateCommandQueue, &params ); return __retval; } cl_context WINAPI clCreateContext( const cl_context_properties* properties, cl_uint num_devices, const cl_device_id* devices, void (WINAPI* pfn_notify)(const char* errinfo, const void* private_info, size_t cb, void* user_data), void* user_data, cl_int* errcode_ret ) { cl_context __retval; struct clCreateContext_params params = { &__retval, properties, num_devices, devices, pfn_notify, user_data, errcode_ret }; TRACE( "(%p, %u, %p, %p, %p, %p)\n", properties, num_devices, devices, pfn_notify, user_data, errcode_ret ); OPENCL_CALL( clCreateContext, &params ); return __retval; } cl_context WINAPI clCreateContextFromType( const cl_context_properties* properties, cl_device_type device_type, void (WINAPI* pfn_notify)(const char* errinfo, const void* private_info, size_t cb, void* user_data), void* user_data, cl_int* errcode_ret ) { cl_context __retval; struct clCreateContextFromType_params params = { &__retval, properties, device_type, pfn_notify, user_data, errcode_ret }; TRACE( "(%p, %s, %p, %p, %p)\n", properties, wine_dbgstr_longlong(device_type), pfn_notify, user_data, errcode_ret ); OPENCL_CALL( clCreateContextFromType, &params ); return __retval; } cl_mem WINAPI clCreateImage( cl_context context, cl_mem_flags flags, const cl_image_format* image_format, const cl_image_desc* image_desc, void* host_ptr, cl_int* errcode_ret ) { cl_mem __retval; struct clCreateImage_params params = { &__retval, context, flags, image_format, image_desc, host_ptr, errcode_ret }; TRACE( "(%p, %s, %p, %p, %p, %p)\n", context, wine_dbgstr_longlong(flags), image_format, image_desc, host_ptr, errcode_ret ); OPENCL_CALL( clCreateImage, &params ); return __retval; } cl_mem WINAPI clCreateImage2D( cl_context context, cl_mem_flags flags, const cl_image_format* image_format, size_t image_width, size_t image_height, size_t image_row_pitch, void* host_ptr, cl_int* errcode_ret ) { cl_mem __retval; struct clCreateImage2D_params params = { &__retval, context, flags, image_format, image_width, image_height, image_row_pitch, host_ptr, errcode_ret }; TRACE( "(%p, %s, %p, %Iu, %Iu, %Iu, %p, %p)\n", context, wine_dbgstr_longlong(flags), image_format, image_width, image_height, image_row_pitch, host_ptr, errcode_ret ); OPENCL_CALL( clCreateImage2D, &params ); return __retval; } cl_mem WINAPI clCreateImage3D( cl_context context, cl_mem_flags flags, const cl_image_format* image_format, size_t image_width, size_t image_height, size_t image_depth, size_t image_row_pitch, size_t image_slice_pitch, void* host_ptr, cl_int* errcode_ret ) { cl_mem __retval; struct clCreateImage3D_params params = { &__retval, context, flags, image_format, image_width, image_height, image_depth, image_row_pitch, image_slice_pitch, host_ptr, errcode_ret }; TRACE( "(%p, %s, %p, %Iu, %Iu, %Iu, %Iu, %Iu, %p, %p)\n", context, wine_dbgstr_longlong(flags), image_format, image_width, image_height, image_depth, image_row_pitch, image_slice_pitch, host_ptr, errcode_ret ); OPENCL_CALL( clCreateImage3D, &params ); return __retval; } cl_kernel WINAPI clCreateKernel( cl_program program, const char* kernel_name, cl_int* errcode_ret ) { cl_kernel __retval; struct clCreateKernel_params params = { &__retval, program, kernel_name, errcode_ret }; TRACE( "(%p, %p, %p)\n", program, kernel_name, errcode_ret ); OPENCL_CALL( clCreateKernel, &params ); return __retval; } cl_int WINAPI clCreateKernelsInProgram( cl_program program, cl_uint num_kernels, cl_kernel* kernels, cl_uint* num_kernels_ret ) { struct clCreateKernelsInProgram_params params = { program, num_kernels, kernels, num_kernels_ret }; TRACE( "(%p, %u, %p, %p)\n", program, num_kernels, kernels, num_kernels_ret ); return OPENCL_CALL( clCreateKernelsInProgram, &params ); } cl_program WINAPI clCreateProgramWithBinary( cl_context context, cl_uint num_devices, const cl_device_id* device_list, const size_t* lengths, const unsigned char** binaries, cl_int* binary_status, cl_int* errcode_ret ) { cl_program __retval; struct clCreateProgramWithBinary_params params = { &__retval, context, num_devices, device_list, lengths, binaries, binary_status, errcode_ret }; TRACE( "(%p, %u, %p, %p, %p, %p, %p)\n", context, num_devices, device_list, lengths, binaries, binary_status, errcode_ret ); OPENCL_CALL( clCreateProgramWithBinary, &params ); return __retval; } cl_program WINAPI clCreateProgramWithBuiltInKernels( cl_context context, cl_uint num_devices, const cl_device_id* device_list, const char* kernel_names, cl_int* errcode_ret ) { cl_program __retval; struct clCreateProgramWithBuiltInKernels_params params = { &__retval, context, num_devices, device_list, kernel_names, errcode_ret }; TRACE( "(%p, %u, %p, %p, %p)\n", context, num_devices, device_list, kernel_names, errcode_ret ); OPENCL_CALL( clCreateProgramWithBuiltInKernels, &params ); return __retval; } cl_program WINAPI clCreateProgramWithSource( cl_context context, cl_uint count, const char** strings, const size_t* lengths, cl_int* errcode_ret ) { cl_program __retval; struct clCreateProgramWithSource_params params = { &__retval, context, count, strings, lengths, errcode_ret }; TRACE( "(%p, %u, %p, %p, %p)\n", context, count, strings, lengths, errcode_ret ); OPENCL_CALL( clCreateProgramWithSource, &params ); return __retval; } cl_sampler WINAPI clCreateSampler( cl_context context, cl_bool normalized_coords, cl_addressing_mode addressing_mode, cl_filter_mode filter_mode, cl_int* errcode_ret ) { cl_sampler __retval; struct clCreateSampler_params params = { &__retval, context, normalized_coords, addressing_mode, filter_mode, errcode_ret }; TRACE( "(%p, %u, %u, %u, %p)\n", context, normalized_coords, addressing_mode, filter_mode, errcode_ret ); OPENCL_CALL( clCreateSampler, &params ); return __retval; } cl_mem WINAPI clCreateSubBuffer( cl_mem buffer, cl_mem_flags flags, cl_buffer_create_type buffer_create_type, const void* buffer_create_info, cl_int* errcode_ret ) { cl_mem __retval; struct clCreateSubBuffer_params params = { &__retval, buffer, flags, buffer_create_type, buffer_create_info, errcode_ret }; TRACE( "(%p, %s, %u, %p, %p)\n", buffer, wine_dbgstr_longlong(flags), buffer_create_type, buffer_create_info, errcode_ret ); OPENCL_CALL( clCreateSubBuffer, &params ); return __retval; } cl_int WINAPI clCreateSubDevices( cl_device_id in_device, const cl_device_partition_property* properties, cl_uint num_devices, cl_device_id* out_devices, cl_uint* num_devices_ret ) { struct clCreateSubDevices_params params = { in_device, properties, num_devices, out_devices, num_devices_ret }; TRACE( "(%p, %p, %u, %p, %p)\n", in_device, properties, num_devices, out_devices, num_devices_ret ); return OPENCL_CALL( clCreateSubDevices, &params ); } cl_event WINAPI clCreateUserEvent( cl_context context, cl_int* errcode_ret ) { cl_event __retval; struct clCreateUserEvent_params params = { &__retval, context, errcode_ret }; TRACE( "(%p, %p)\n", context, errcode_ret ); OPENCL_CALL( clCreateUserEvent, &params ); return __retval; } cl_int WINAPI clEnqueueBarrier( cl_command_queue command_queue ) { struct clEnqueueBarrier_params params = { command_queue }; TRACE( "(%p)\n", command_queue ); return OPENCL_CALL( clEnqueueBarrier, &params ); } cl_int WINAPI clEnqueueBarrierWithWaitList( cl_command_queue command_queue, cl_uint num_events_in_wait_list, const cl_event* event_wait_list, cl_event* event ) { struct clEnqueueBarrierWithWaitList_params params = { command_queue, num_events_in_wait_list, event_wait_list, event }; TRACE( "(%p, %u, %p, %p)\n", command_queue, num_events_in_wait_list, event_wait_list, event ); return OPENCL_CALL( clEnqueueBarrierWithWaitList, &params ); } cl_int WINAPI clEnqueueCopyBuffer( cl_command_queue command_queue, cl_mem src_buffer, cl_mem dst_buffer, size_t src_offset, size_t dst_offset, size_t size, cl_uint num_events_in_wait_list, const cl_event* event_wait_list, cl_event* event ) { struct clEnqueueCopyBuffer_params params = { command_queue, src_buffer, dst_buffer, src_offset, dst_offset, size, num_events_in_wait_list, event_wait_list, event }; TRACE( "(%p, %p, %p, %Iu, %Iu, %Iu, %u, %p, %p)\n", command_queue, src_buffer, dst_buffer, src_offset, dst_offset, size, num_events_in_wait_list, event_wait_list, event ); return OPENCL_CALL( clEnqueueCopyBuffer, &params ); } cl_int WINAPI clEnqueueCopyBufferRect( cl_command_queue command_queue, cl_mem src_buffer, cl_mem dst_buffer, const size_t* src_origin, const size_t* dst_origin, const size_t* region, size_t src_row_pitch, size_t src_slice_pitch, size_t dst_row_pitch, size_t dst_slice_pitch, cl_uint num_events_in_wait_list, const cl_event* event_wait_list, cl_event* event ) { struct clEnqueueCopyBufferRect_params params = { command_queue, src_buffer, dst_buffer, src_origin, dst_origin, region, src_row_pitch, src_slice_pitch, dst_row_pitch, dst_slice_pitch, num_events_in_wait_list, event_wait_list, event }; TRACE( "(%p, %p, %p, %p, %p, %p, %Iu, %Iu, %Iu, %Iu, %u, %p, %p)\n", command_queue, src_buffer, dst_buffer, src_origin, dst_origin, region, src_row_pitch, src_slice_pitch, dst_row_pitch, dst_slice_pitch, num_events_in_wait_list, event_wait_list, event ); return OPENCL_CALL( clEnqueueCopyBufferRect, &params ); } cl_int WINAPI clEnqueueCopyBufferToImage( cl_command_queue command_queue, cl_mem src_buffer, cl_mem dst_image, size_t src_offset, const size_t* dst_origin, const size_t* region, cl_uint num_events_in_wait_list, const cl_event* event_wait_list, cl_event* event ) { struct clEnqueueCopyBufferToImage_params params = { command_queue, src_buffer, dst_image, src_offset, dst_origin, region, num_events_in_wait_list, event_wait_list, event }; TRACE( "(%p, %p, %p, %Iu, %p, %p, %u, %p, %p)\n", command_queue, src_buffer, dst_image, src_offset, dst_origin, region, num_events_in_wait_list, event_wait_list, event ); return OPENCL_CALL( clEnqueueCopyBufferToImage, &params ); } cl_int WINAPI clEnqueueCopyImage( cl_command_queue command_queue, cl_mem src_image, cl_mem dst_image, const size_t* src_origin, const size_t* dst_origin, const size_t* region, cl_uint num_events_in_wait_list, const cl_event* event_wait_list, cl_event* event ) { struct clEnqueueCopyImage_params params = { command_queue, src_image, dst_image, src_origin, dst_origin, region, num_events_in_wait_list, event_wait_list, event }; TRACE( "(%p, %p, %p, %p, %p, %p, %u, %p, %p)\n", command_queue, src_image, dst_image, src_origin, dst_origin, region, num_events_in_wait_list, event_wait_list, event ); return OPENCL_CALL( clEnqueueCopyImage, &params ); } cl_int WINAPI clEnqueueCopyImageToBuffer( cl_command_queue command_queue, cl_mem src_image, cl_mem dst_buffer, const size_t* src_origin, const size_t* region, size_t dst_offset, cl_uint num_events_in_wait_list, const cl_event* event_wait_list, cl_event* event ) { struct clEnqueueCopyImageToBuffer_params params = { command_queue, src_image, dst_buffer, src_origin, region, dst_offset, num_events_in_wait_list, event_wait_list, event }; TRACE( "(%p, %p, %p, %p, %p, %Iu, %u, %p, %p)\n", command_queue, src_image, dst_buffer, src_origin, region, dst_offset, num_events_in_wait_list, event_wait_list, event ); return OPENCL_CALL( clEnqueueCopyImageToBuffer, &params ); } cl_int WINAPI clEnqueueFillBuffer( cl_command_queue command_queue, cl_mem buffer, const void* pattern, size_t pattern_size, size_t offset, size_t size, cl_uint num_events_in_wait_list, const cl_event* event_wait_list, cl_event* event ) { struct clEnqueueFillBuffer_params params = { command_queue, buffer, pattern, pattern_size, offset, size, num_events_in_wait_list, event_wait_list, event }; TRACE( "(%p, %p, %p, %Iu, %Iu, %Iu, %u, %p, %p)\n", command_queue, buffer, pattern, pattern_size, offset, size, num_events_in_wait_list, event_wait_list, event ); return OPENCL_CALL( clEnqueueFillBuffer, &params ); } cl_int WINAPI clEnqueueFillImage( cl_command_queue command_queue, cl_mem image, const void* fill_color, const size_t* origin, const size_t* region, cl_uint num_events_in_wait_list, const cl_event* event_wait_list, cl_event* event ) { struct clEnqueueFillImage_params params = { command_queue, image, fill_color, origin, region, num_events_in_wait_list, event_wait_list, event }; TRACE( "(%p, %p, %p, %p, %p, %u, %p, %p)\n", command_queue, image, fill_color, origin, region, num_events_in_wait_list, event_wait_list, event ); return OPENCL_CALL( clEnqueueFillImage, &params ); } void* WINAPI clEnqueueMapBuffer( cl_command_queue command_queue, cl_mem buffer, cl_bool blocking_map, cl_map_flags map_flags, size_t offset, size_t size, cl_uint num_events_in_wait_list, const cl_event* event_wait_list, cl_event* event, cl_int* errcode_ret ) { void* __retval; struct clEnqueueMapBuffer_params params = { &__retval, command_queue, buffer, blocking_map, map_flags, offset, size, num_events_in_wait_list, event_wait_list, event, errcode_ret }; TRACE( "(%p, %p, %u, %s, %Iu, %Iu, %u, %p, %p, %p)\n", command_queue, buffer, blocking_map, wine_dbgstr_longlong(map_flags), offset, size, num_events_in_wait_list, event_wait_list, event, errcode_ret ); OPENCL_CALL( clEnqueueMapBuffer, &params ); return __retval; } void* WINAPI clEnqueueMapImage( cl_command_queue command_queue, cl_mem image, cl_bool blocking_map, cl_map_flags map_flags, const size_t* origin, const size_t* region, size_t* image_row_pitch, size_t* image_slice_pitch, cl_uint num_events_in_wait_list, const cl_event* event_wait_list, cl_event* event, cl_int* errcode_ret ) { void* __retval; struct clEnqueueMapImage_params params = { &__retval, command_queue, image, blocking_map, map_flags, origin, region, image_row_pitch, image_slice_pitch, num_events_in_wait_list, event_wait_list, event, errcode_ret }; TRACE( "(%p, %p, %u, %s, %p, %p, %p, %p, %u, %p, %p, %p)\n", command_queue, image, blocking_map, wine_dbgstr_longlong(map_flags), origin, region, image_row_pitch, image_slice_pitch, num_events_in_wait_list, event_wait_list, event, errcode_ret ); OPENCL_CALL( clEnqueueMapImage, &params ); return __retval; } cl_int WINAPI clEnqueueMarker( cl_command_queue command_queue, cl_event* event ) { struct clEnqueueMarker_params params = { command_queue, event }; TRACE( "(%p, %p)\n", command_queue, event ); return OPENCL_CALL( clEnqueueMarker, &params ); } cl_int WINAPI clEnqueueMarkerWithWaitList( cl_command_queue command_queue, cl_uint num_events_in_wait_list, const cl_event* event_wait_list, cl_event* event ) { struct clEnqueueMarkerWithWaitList_params params = { command_queue, num_events_in_wait_list, event_wait_list, event }; TRACE( "(%p, %u, %p, %p)\n", command_queue, num_events_in_wait_list, event_wait_list, event ); return OPENCL_CALL( clEnqueueMarkerWithWaitList, &params ); } cl_int WINAPI clEnqueueMigrateMemObjects( cl_command_queue command_queue, cl_uint num_mem_objects, const cl_mem* mem_objects, cl_mem_migration_flags flags, cl_uint num_events_in_wait_list, const cl_event* event_wait_list, cl_event* event ) { struct clEnqueueMigrateMemObjects_params params = { command_queue, num_mem_objects, mem_objects, flags, num_events_in_wait_list, event_wait_list, event }; TRACE( "(%p, %u, %p, %s, %u, %p, %p)\n", command_queue, num_mem_objects, mem_objects, wine_dbgstr_longlong(flags), num_events_in_wait_list, event_wait_list, event ); return OPENCL_CALL( clEnqueueMigrateMemObjects, &params ); } cl_int WINAPI clEnqueueNDRangeKernel( cl_command_queue command_queue, cl_kernel kernel, cl_uint work_dim, const size_t* global_work_offset, const size_t* global_work_size, const size_t* local_work_size, cl_uint num_events_in_wait_list, const cl_event* event_wait_list, cl_event* event ) { struct clEnqueueNDRangeKernel_params params = { command_queue, kernel, work_dim, global_work_offset, global_work_size, local_work_size, num_events_in_wait_list, event_wait_list, event }; TRACE( "(%p, %p, %u, %p, %p, %p, %u, %p, %p)\n", command_queue, kernel, work_dim, global_work_offset, global_work_size, local_work_size, num_events_in_wait_list, event_wait_list, event ); return OPENCL_CALL( clEnqueueNDRangeKernel, &params ); } cl_int WINAPI clEnqueueNativeKernel( cl_command_queue command_queue, void (WINAPI* user_func)(void*), void* args, size_t cb_args, cl_uint num_mem_objects, const cl_mem* mem_list, const void** args_mem_loc, cl_uint num_events_in_wait_list, const cl_event* event_wait_list, cl_event* event ) { struct clEnqueueNativeKernel_params params = { command_queue, user_func, args, cb_args, num_mem_objects, mem_list, args_mem_loc, num_events_in_wait_list, event_wait_list, event }; TRACE( "(%p, %p, %p, %Iu, %u, %p, %p, %u, %p, %p)\n", command_queue, user_func, args, cb_args, num_mem_objects, mem_list, args_mem_loc, num_events_in_wait_list, event_wait_list, event ); return OPENCL_CALL( clEnqueueNativeKernel, &params ); } cl_int WINAPI clEnqueueReadBuffer( cl_command_queue command_queue, cl_mem buffer, cl_bool blocking_read, size_t offset, size_t size, void* ptr, cl_uint num_events_in_wait_list, const cl_event* event_wait_list, cl_event* event ) { struct clEnqueueReadBuffer_params params = { command_queue, buffer, blocking_read, offset, size, ptr, num_events_in_wait_list, event_wait_list, event }; TRACE( "(%p, %p, %u, %Iu, %Iu, %p, %u, %p, %p)\n", command_queue, buffer, blocking_read, offset, size, ptr, num_events_in_wait_list, event_wait_list, event ); return OPENCL_CALL( clEnqueueReadBuffer, &params ); } cl_int WINAPI clEnqueueReadBufferRect( cl_command_queue command_queue, cl_mem buffer, cl_bool blocking_read, const size_t* buffer_origin, const size_t* host_origin, const size_t* region, size_t buffer_row_pitch, size_t buffer_slice_pitch, size_t host_row_pitch, size_t host_slice_pitch, void* ptr, cl_uint num_events_in_wait_list, const cl_event* event_wait_list, cl_event* event ) { struct clEnqueueReadBufferRect_params params = { command_queue, buffer, blocking_read, buffer_origin, host_origin, region, buffer_row_pitch, buffer_slice_pitch, host_row_pitch, host_slice_pitch, ptr, num_events_in_wait_list, event_wait_list, event }; TRACE( "(%p, %p, %u, %p, %p, %p, %Iu, %Iu, %Iu, %Iu, %p, %u, %p, %p)\n", command_queue, buffer, blocking_read, buffer_origin, host_origin, region, buffer_row_pitch, buffer_slice_pitch, host_row_pitch, host_slice_pitch, ptr, num_events_in_wait_list, event_wait_list, event ); return OPENCL_CALL( clEnqueueReadBufferRect, &params ); } cl_int WINAPI clEnqueueReadImage( cl_command_queue command_queue, cl_mem image, cl_bool blocking_read, const size_t* origin, const size_t* region, size_t row_pitch, size_t slice_pitch, void* ptr, cl_uint num_events_in_wait_list, const cl_event* event_wait_list, cl_event* event ) { struct clEnqueueReadImage_params params = { command_queue, image, blocking_read, origin, region, row_pitch, slice_pitch, ptr, num_events_in_wait_list, event_wait_list, event }; TRACE( "(%p, %p, %u, %p, %p, %Iu, %Iu, %p, %u, %p, %p)\n", command_queue, image, blocking_read, origin, region, row_pitch, slice_pitch, ptr, num_events_in_wait_list, event_wait_list, event ); return OPENCL_CALL( clEnqueueReadImage, &params ); } cl_int WINAPI clEnqueueTask( cl_command_queue command_queue, cl_kernel kernel, cl_uint num_events_in_wait_list, const cl_event* event_wait_list, cl_event* event ) { struct clEnqueueTask_params params = { command_queue, kernel, num_events_in_wait_list, event_wait_list, event }; TRACE( "(%p, %p, %u, %p, %p)\n", command_queue, kernel, num_events_in_wait_list, event_wait_list, event ); return OPENCL_CALL( clEnqueueTask, &params ); } cl_int WINAPI clEnqueueUnmapMemObject( cl_command_queue command_queue, cl_mem memobj, void* mapped_ptr, cl_uint num_events_in_wait_list, const cl_event* event_wait_list, cl_event* event ) { struct clEnqueueUnmapMemObject_params params = { command_queue, memobj, mapped_ptr, num_events_in_wait_list, event_wait_list, event }; TRACE( "(%p, %p, %p, %u, %p, %p)\n", command_queue, memobj, mapped_ptr, num_events_in_wait_list, event_wait_list, event ); return OPENCL_CALL( clEnqueueUnmapMemObject, &params ); } cl_int WINAPI clEnqueueWaitForEvents( cl_command_queue command_queue, cl_uint num_events, const cl_event* event_list ) { struct clEnqueueWaitForEvents_params params = { command_queue, num_events, event_list }; TRACE( "(%p, %u, %p)\n", command_queue, num_events, event_list ); return OPENCL_CALL( clEnqueueWaitForEvents, &params ); } cl_int WINAPI clEnqueueWriteBuffer( cl_command_queue command_queue, cl_mem buffer, cl_bool blocking_write, size_t offset, size_t size, const void* ptr, cl_uint num_events_in_wait_list, const cl_event* event_wait_list, cl_event* event ) { struct clEnqueueWriteBuffer_params params = { command_queue, buffer, blocking_write, offset, size, ptr, num_events_in_wait_list, event_wait_list, event }; TRACE( "(%p, %p, %u, %Iu, %Iu, %p, %u, %p, %p)\n", command_queue, buffer, blocking_write, offset, size, ptr, num_events_in_wait_list, event_wait_list, event ); return OPENCL_CALL( clEnqueueWriteBuffer, &params ); } cl_int WINAPI clEnqueueWriteBufferRect( cl_command_queue command_queue, cl_mem buffer, cl_bool blocking_write, const size_t* buffer_origin, const size_t* host_origin, const size_t* region, size_t buffer_row_pitch, size_t buffer_slice_pitch, size_t host_row_pitch, size_t host_slice_pitch, const void* ptr, cl_uint num_events_in_wait_list, const cl_event* event_wait_list, cl_event* event ) { struct clEnqueueWriteBufferRect_params params = { command_queue, buffer, blocking_write, buffer_origin, host_origin, region, buffer_row_pitch, buffer_slice_pitch, host_row_pitch, host_slice_pitch, ptr, num_events_in_wait_list, event_wait_list, event }; TRACE( "(%p, %p, %u, %p, %p, %p, %Iu, %Iu, %Iu, %Iu, %p, %u, %p, %p)\n", command_queue, buffer, blocking_write, buffer_origin, host_origin, region, buffer_row_pitch, buffer_slice_pitch, host_row_pitch, host_slice_pitch, ptr, num_events_in_wait_list, event_wait_list, event ); return OPENCL_CALL( clEnqueueWriteBufferRect, &params ); } cl_int WINAPI clEnqueueWriteImage( cl_command_queue command_queue, cl_mem image, cl_bool blocking_write, const size_t* origin, const size_t* region, size_t input_row_pitch, size_t input_slice_pitch, const void* ptr, cl_uint num_events_in_wait_list, const cl_event* event_wait_list, cl_event* event ) { struct clEnqueueWriteImage_params params = { command_queue, image, blocking_write, origin, region, input_row_pitch, input_slice_pitch, ptr, num_events_in_wait_list, event_wait_list, event }; TRACE( "(%p, %p, %u, %p, %p, %Iu, %Iu, %p, %u, %p, %p)\n", command_queue, image, blocking_write, origin, region, input_row_pitch, input_slice_pitch, ptr, num_events_in_wait_list, event_wait_list, event ); return OPENCL_CALL( clEnqueueWriteImage, &params ); } cl_int WINAPI clFinish( cl_command_queue command_queue ) { struct clFinish_params params = { command_queue }; TRACE( "(%p)\n", command_queue ); return OPENCL_CALL( clFinish, &params ); } cl_int WINAPI clFlush( cl_command_queue command_queue ) { struct clFlush_params params = { command_queue }; TRACE( "(%p)\n", command_queue ); return OPENCL_CALL( clFlush, &params ); } cl_int WINAPI clGetCommandQueueInfo( cl_command_queue command_queue, cl_command_queue_info param_name, size_t param_value_size, void* param_value, size_t* param_value_size_ret ) { struct clGetCommandQueueInfo_params params = { command_queue, param_name, param_value_size, param_value, param_value_size_ret }; TRACE( "(%p, %u, %Iu, %p, %p)\n", command_queue, param_name, param_value_size, param_value, param_value_size_ret ); return OPENCL_CALL( clGetCommandQueueInfo, &params ); } cl_int WINAPI clGetContextInfo( cl_context context, cl_context_info param_name, size_t param_value_size, void* param_value, size_t* param_value_size_ret ) { struct clGetContextInfo_params params = { context, param_name, param_value_size, param_value, param_value_size_ret }; TRACE( "(%p, %u, %Iu, %p, %p)\n", context, param_name, param_value_size, param_value, param_value_size_ret ); return OPENCL_CALL( clGetContextInfo, &params ); } cl_int WINAPI clGetDeviceIDs( cl_platform_id platform, cl_device_type device_type, cl_uint num_entries, cl_device_id* devices, cl_uint* num_devices ) { struct clGetDeviceIDs_params params = { platform, device_type, num_entries, devices, num_devices }; TRACE( "(%p, %s, %u, %p, %p)\n", platform, wine_dbgstr_longlong(device_type), num_entries, devices, num_devices ); return OPENCL_CALL( clGetDeviceIDs, &params ); } cl_int WINAPI clGetEventInfo( cl_event event, cl_event_info param_name, size_t param_value_size, void* param_value, size_t* param_value_size_ret ) { struct clGetEventInfo_params params = { event, param_name, param_value_size, param_value, param_value_size_ret }; TRACE( "(%p, %u, %Iu, %p, %p)\n", event, param_name, param_value_size, param_value, param_value_size_ret ); return OPENCL_CALL( clGetEventInfo, &params ); } cl_int WINAPI clGetEventProfilingInfo( cl_event event, cl_profiling_info param_name, size_t param_value_size, void* param_value, size_t* param_value_size_ret ) { struct clGetEventProfilingInfo_params params = { event, param_name, param_value_size, param_value, param_value_size_ret }; TRACE( "(%p, %u, %Iu, %p, %p)\n", event, param_name, param_value_size, param_value, param_value_size_ret ); return OPENCL_CALL( clGetEventProfilingInfo, &params ); } cl_int WINAPI clGetImageInfo( cl_mem image, cl_image_info param_name, size_t param_value_size, void* param_value, size_t* param_value_size_ret ) { struct clGetImageInfo_params params = { image, param_name, param_value_size, param_value, param_value_size_ret }; TRACE( "(%p, %u, %Iu, %p, %p)\n", image, param_name, param_value_size, param_value, param_value_size_ret ); return OPENCL_CALL( clGetImageInfo, &params ); } cl_int WINAPI clGetKernelArgInfo( cl_kernel kernel, cl_uint arg_index, cl_kernel_arg_info param_name, size_t param_value_size, void* param_value, size_t* param_value_size_ret ) { struct clGetKernelArgInfo_params params = { kernel, arg_index, param_name, param_value_size, param_value, param_value_size_ret }; TRACE( "(%p, %u, %u, %Iu, %p, %p)\n", kernel, arg_index, param_name, param_value_size, param_value, param_value_size_ret ); return OPENCL_CALL( clGetKernelArgInfo, &params ); } cl_int WINAPI clGetKernelInfo( cl_kernel kernel, cl_kernel_info param_name, size_t param_value_size, void* param_value, size_t* param_value_size_ret ) { struct clGetKernelInfo_params params = { kernel, param_name, param_value_size, param_value, param_value_size_ret }; TRACE( "(%p, %u, %Iu, %p, %p)\n", kernel, param_name, param_value_size, param_value, param_value_size_ret ); return OPENCL_CALL( clGetKernelInfo, &params ); } cl_int WINAPI clGetKernelWorkGroupInfo( cl_kernel kernel, cl_device_id device, cl_kernel_work_group_info param_name, size_t param_value_size, void* param_value, size_t* param_value_size_ret ) { struct clGetKernelWorkGroupInfo_params params = { kernel, device, param_name, param_value_size, param_value, param_value_size_ret }; TRACE( "(%p, %p, %u, %Iu, %p, %p)\n", kernel, device, param_name, param_value_size, param_value, param_value_size_ret ); return OPENCL_CALL( clGetKernelWorkGroupInfo, &params ); } cl_int WINAPI clGetMemObjectInfo( cl_mem memobj, cl_mem_info param_name, size_t param_value_size, void* param_value, size_t* param_value_size_ret ) { struct clGetMemObjectInfo_params params = { memobj, param_name, param_value_size, param_value, param_value_size_ret }; TRACE( "(%p, %u, %Iu, %p, %p)\n", memobj, param_name, param_value_size, param_value, param_value_size_ret ); return OPENCL_CALL( clGetMemObjectInfo, &params ); } cl_int WINAPI clGetPlatformIDs( cl_uint num_entries, cl_platform_id* platforms, cl_uint* num_platforms ) { struct clGetPlatformIDs_params params = { num_entries, platforms, num_platforms }; TRACE( "(%u, %p, %p)\n", num_entries, platforms, num_platforms ); return OPENCL_CALL( clGetPlatformIDs, &params ); } cl_int WINAPI clGetProgramBuildInfo( cl_program program, cl_device_id device, cl_program_build_info param_name, size_t param_value_size, void* param_value, size_t* param_value_size_ret ) { struct clGetProgramBuildInfo_params params = { program, device, param_name, param_value_size, param_value, param_value_size_ret }; TRACE( "(%p, %p, %u, %Iu, %p, %p)\n", program, device, param_name, param_value_size, param_value, param_value_size_ret ); return OPENCL_CALL( clGetProgramBuildInfo, &params ); } cl_int WINAPI clGetProgramInfo( cl_program program, cl_program_info param_name, size_t param_value_size, void* param_value, size_t* param_value_size_ret ) { struct clGetProgramInfo_params params = { program, param_name, param_value_size, param_value, param_value_size_ret }; TRACE( "(%p, %u, %Iu, %p, %p)\n", program, param_name, param_value_size, param_value, param_value_size_ret ); return OPENCL_CALL( clGetProgramInfo, &params ); } cl_int WINAPI clGetSamplerInfo( cl_sampler sampler, cl_sampler_info param_name, size_t param_value_size, void* param_value, size_t* param_value_size_ret ) { struct clGetSamplerInfo_params params = { sampler, param_name, param_value_size, param_value, param_value_size_ret }; TRACE( "(%p, %u, %Iu, %p, %p)\n", sampler, param_name, param_value_size, param_value, param_value_size_ret ); return OPENCL_CALL( clGetSamplerInfo, &params ); } cl_int WINAPI clGetSupportedImageFormats( cl_context context, cl_mem_flags flags, cl_mem_object_type image_type, cl_uint num_entries, cl_image_format* image_formats, cl_uint* num_image_formats ) { struct clGetSupportedImageFormats_params params = { context, flags, image_type, num_entries, image_formats, num_image_formats }; TRACE( "(%p, %s, %u, %u, %p, %p)\n", context, wine_dbgstr_longlong(flags), image_type, num_entries, image_formats, num_image_formats ); return OPENCL_CALL( clGetSupportedImageFormats, &params ); } cl_program WINAPI clLinkProgram( cl_context context, cl_uint num_devices, const cl_device_id* device_list, const char* options, cl_uint num_input_programs, const cl_program* input_programs, void (WINAPI* pfn_notify)(cl_program program, void* user_data), void* user_data, cl_int* errcode_ret ) { cl_program __retval; struct clLinkProgram_params params = { &__retval, context, num_devices, device_list, options, num_input_programs, input_programs, pfn_notify, user_data, errcode_ret }; TRACE( "(%p, %u, %p, %p, %u, %p, %p, %p, %p)\n", context, num_devices, device_list, options, num_input_programs, input_programs, pfn_notify, user_data, errcode_ret ); OPENCL_CALL( clLinkProgram, &params ); return __retval; } cl_int WINAPI clReleaseCommandQueue( cl_command_queue command_queue ) { struct clReleaseCommandQueue_params params = { command_queue }; TRACE( "(%p)\n", command_queue ); return OPENCL_CALL( clReleaseCommandQueue, &params ); } cl_int WINAPI clReleaseContext( cl_context context ) { struct clReleaseContext_params params = { context }; TRACE( "(%p)\n", context ); return OPENCL_CALL( clReleaseContext, &params ); } cl_int WINAPI clReleaseDevice( cl_device_id device ) { struct clReleaseDevice_params params = { device }; TRACE( "(%p)\n", device ); return OPENCL_CALL( clReleaseDevice, &params ); } cl_int WINAPI clReleaseEvent( cl_event event ) { struct clReleaseEvent_params params = { event }; TRACE( "(%p)\n", event ); return OPENCL_CALL( clReleaseEvent, &params ); } cl_int WINAPI clReleaseKernel( cl_kernel kernel ) { struct clReleaseKernel_params params = { kernel }; TRACE( "(%p)\n", kernel ); return OPENCL_CALL( clReleaseKernel, &params ); } cl_int WINAPI clReleaseMemObject( cl_mem memobj ) { struct clReleaseMemObject_params params = { memobj }; TRACE( "(%p)\n", memobj ); return OPENCL_CALL( clReleaseMemObject, &params ); } cl_int WINAPI clReleaseProgram( cl_program program ) { struct clReleaseProgram_params params = { program }; TRACE( "(%p)\n", program ); return OPENCL_CALL( clReleaseProgram, &params ); } cl_int WINAPI clReleaseSampler( cl_sampler sampler ) { struct clReleaseSampler_params params = { sampler }; TRACE( "(%p)\n", sampler ); return OPENCL_CALL( clReleaseSampler, &params ); } cl_int WINAPI clRetainCommandQueue( cl_command_queue command_queue ) { struct clRetainCommandQueue_params params = { command_queue }; TRACE( "(%p)\n", command_queue ); return OPENCL_CALL( clRetainCommandQueue, &params ); } cl_int WINAPI clRetainContext( cl_context context ) { struct clRetainContext_params params = { context }; TRACE( "(%p)\n", context ); return OPENCL_CALL( clRetainContext, &params ); } cl_int WINAPI clRetainDevice( cl_device_id device ) { struct clRetainDevice_params params = { device }; TRACE( "(%p)\n", device ); return OPENCL_CALL( clRetainDevice, &params ); } cl_int WINAPI clRetainEvent( cl_event event ) { struct clRetainEvent_params params = { event }; TRACE( "(%p)\n", event ); return OPENCL_CALL( clRetainEvent, &params ); } cl_int WINAPI clRetainKernel( cl_kernel kernel ) { struct clRetainKernel_params params = { kernel }; TRACE( "(%p)\n", kernel ); return OPENCL_CALL( clRetainKernel, &params ); } cl_int WINAPI clRetainMemObject( cl_mem memobj ) { struct clRetainMemObject_params params = { memobj }; TRACE( "(%p)\n", memobj ); return OPENCL_CALL( clRetainMemObject, &params ); } cl_int WINAPI clRetainProgram( cl_program program ) { struct clRetainProgram_params params = { program }; TRACE( "(%p)\n", program ); return OPENCL_CALL( clRetainProgram, &params ); } cl_int WINAPI clRetainSampler( cl_sampler sampler ) { struct clRetainSampler_params params = { sampler }; TRACE( "(%p)\n", sampler ); return OPENCL_CALL( clRetainSampler, &params ); } cl_int WINAPI clSetEventCallback( cl_event event, cl_int command_exec_callback_type, void (WINAPI* pfn_notify)(cl_event event, cl_int event_command_status, void *user_data), void* user_data ) { struct clSetEventCallback_params params = { event, command_exec_callback_type, pfn_notify, user_data }; TRACE( "(%p, %d, %p, %p)\n", event, command_exec_callback_type, pfn_notify, user_data ); return OPENCL_CALL( clSetEventCallback, &params ); } cl_int WINAPI clSetKernelArg( cl_kernel kernel, cl_uint arg_index, size_t arg_size, const void* arg_value ) { struct clSetKernelArg_params params = { kernel, arg_index, arg_size, arg_value }; TRACE( "(%p, %u, %Iu, %p)\n", kernel, arg_index, arg_size, arg_value ); return OPENCL_CALL( clSetKernelArg, &params ); } cl_int WINAPI clSetMemObjectDestructorCallback( cl_mem memobj, void (WINAPI* pfn_notify)(cl_mem memobj, void* user_data), void* user_data ) { struct clSetMemObjectDestructorCallback_params params = { memobj, pfn_notify, user_data }; TRACE( "(%p, %p, %p)\n", memobj, pfn_notify, user_data ); return OPENCL_CALL( clSetMemObjectDestructorCallback, &params ); } cl_int WINAPI clSetUserEventStatus( cl_event event, cl_int execution_status ) { struct clSetUserEventStatus_params params = { event, execution_status }; TRACE( "(%p, %d)\n", event, execution_status ); return OPENCL_CALL( clSetUserEventStatus, &params ); } cl_int WINAPI clUnloadCompiler( void ) { struct clUnloadCompiler_params params = {}; TRACE( "()\n" ); return OPENCL_CALL( clUnloadCompiler, &params ); } cl_int WINAPI clUnloadPlatformCompiler( cl_platform_id platform ) { struct clUnloadPlatformCompiler_params params = { platform }; TRACE( "(%p)\n", platform ); return OPENCL_CALL( clUnloadPlatformCompiler, &params ); } cl_int WINAPI clWaitForEvents( cl_uint num_events, const cl_event* event_list ) { struct clWaitForEvents_params params = { num_events, event_list }; TRACE( "(%u, %p)\n", num_events, event_list ); return OPENCL_CALL( clWaitForEvents, &params ); } BOOL extension_is_supported( const char *name, size_t len ) { unsigned int i; static const char *const unsupported[] = { "cl_apple_contextloggingfunctions", "cl_apple_setmemobjectdestructor", "cl_arm_import_memory", "cl_arm_shared_virtual_memory", "cl_ext_device_fission", "cl_ext_migrate_memobject", "cl_img_generate_mipmap", "cl_img_use_gralloc_ptr", "cl_intel_accelerator", "cl_intel_create_buffer_with_properties", "cl_intel_d3d11_nv12_media_sharing", "cl_intel_dx9_media_sharing", "cl_intel_unified_shared_memory", "cl_intel_va_api_media_sharing", "cl_khr_create_command_queue", "cl_khr_d3d10_sharing", "cl_khr_d3d11_sharing", "cl_khr_dx9_media_sharing", "cl_khr_egl_event", "cl_khr_egl_image", "cl_khr_gl_event", "cl_khr_gl_sharing", "cl_khr_icd", "cl_khr_il_program", "cl_khr_subgroups", "cl_khr_terminate_context", "cl_loader_layers", "cl_nv_d3d10_sharing", "cl_nv_d3d11_sharing", "cl_nv_d3d9_sharing", "cl_qcom_ext_host_ptr", }; for (i = 0; i < ARRAY_SIZE(unsupported); ++i) { if (!strncasecmp( name, unsupported[i], len )) return FALSE; } return TRUE; }
/** * Created by android_ls on 16/11/11. */ public class ImageBindingAdapter { @BindingAdapter({"url"}) public static void loadImage(SimpleDraweeView simpleDraweeView, String url) { ImageLoader.loadImage(simpleDraweeView, url); } @BindingAdapter({"url_small"}) public static void loadImageSmall(SimpleDraweeView simpleDraweeView, String url) { ImageLoader.loadImageSmall(simpleDraweeView, url); } @BindingAdapter({"url", "iconWidth", "iconHeight"}) public static void loadTextDrawable(final TextView view, String url, final int iconWidth, final int iconHeight) { ImageLoader.loadImage(view.getContext(), url, new IResult<Bitmap>() { @Override public void onResult(Bitmap bitmap) { Drawable drawable = new BitmapDrawable(view.getContext().getResources(), bitmap); final int width = DensityUtil.dipToPixels(view.getContext(), iconWidth); final int height = DensityUtil.dipToPixels(view.getContext(), iconHeight); drawable.setBounds(0, 0, width, height); view.setCompoundDrawables(drawable, null, null, null); } }); } @BindingAdapter({"url", "direction", "iconWidth", "iconHeight"}) public static void loadTextDrawable(final TextView view, String url, final int direction, final int iconWidth, final int iconHeight) { ImageLoader.loadImage(view.getContext(), url, new IResult<Bitmap>() { @Override public void onResult(Bitmap bitmap) { Drawable drawable = new BitmapDrawable(view.getContext().getResources(), bitmap); final int width = DensityUtil.dipToPixels(view.getContext(), iconWidth); final int height = DensityUtil.dipToPixels(view.getContext(), iconHeight); drawable.setBounds(0, 0, width, height); switch (direction) { case 0: view.setCompoundDrawables(drawable, null, null, null); break; case 1: view.setCompoundDrawables(null, drawable, null, null); break; case 2: view.setCompoundDrawables(null, null, drawable, null); break; case 3: view.setCompoundDrawables(null, null, null, drawable); break; } } }); } }
// CoNLL document format reader for dependency annotated corpora. // The expected format is described e.g. at http://ilk.uvt.nl/conll/#dataformat // // Data should adhere to the following rules: // - Data files contain sentences separated by a blank line. // - A sentence consists of one or tokens, each one starting on a new line. // - A token consists of ten fields described in the table below. // - Fields are separated by a single tab character. // - All data files will contains these ten fields, although only the ID // column is required to contain non-dummy (i.e. non-underscore) values. // Data files should be UTF-8 encoded (Unicode). // // Fields: // 1 ID: Token counter, starting at 1 for each new sentence and increasing // by 1 for every new token. // 2 FORM: Word form or punctuation symbol. // 3 LEMMA: Lemma or stem. // 4 CPOSTAG: Coarse-grained part-of-speech tag or category. // 5 POSTAG: Fine-grained part-of-speech tag. Note that the same POS tag // cannot appear with multiple coarse-grained POS tags. // 6 FEATS: Unordered set of syntactic and/or morphological features. // 7 HEAD: Head of the current token, which is either a value of ID or '0'. // 8 DEPREL: Dependency relation to the HEAD. // 9 PHEAD: Projective head of current token. // 10 PDEPREL: Dependency relation to the PHEAD. // // This CoNLL reader is compatible with the CoNLL-U format described at // http://universaldependencies.org/format.html // Note that this reader skips CoNLL-U multiword tokens and empty nodes. // // Note on reconstruct the raw text of a sentence: the raw text is constructed // by concatenating all words (field 2) with a intervening space between // consecutive words. If the last field of a token is "SpaceAfter=No", there // would be no space between current word and the next one. class CoNLLSyntaxFormat : public DocumentFormat { public: CoNLLSyntaxFormat() {} void Setup(TaskContext *context) override { join_category_to_pos_ = context->GetBoolParameter("join_category_to_pos"); add_pos_as_attribute_ = context->GetBoolParameter("add_pos_as_attribute"); serialize_morph_to_pos_ = context->GetBoolParameter("serialize_morph_to_pos"); } bool ReadRecord(tensorflow::io::BufferedInputStream *buffer, string *record) override { return DoubleNewlineReadRecord(buffer, record); } void ConvertFromString(const string &key, const string &value, std::vector<Sentence *> *sentences) override { Sentence *sentence = new Sentence(); string text; bool add_space_to_text = true; std::vector<string> lines = utils::Split(value, '\n'); std::vector<string> fields; int expected_id = 1; for (size_t i = 0; i < lines.size(); ++i) { fields.clear(); fields = utils::Split(lines[i], '\t'); if (fields.empty()) continue; if (fields[0][0] == '#') continue; if (RE2::FullMatch(fields[0], "[0-9]+-[0-9]+")) continue; if (RE2::FullMatch(fields[0], "[0-9]+\\.[0-9]+")) continue; for (size_t j = 2; j < fields.size(); ++j) { if (fields[j].length() == 1 && fields[j][0] == '_') fields[j].clear(); } CHECK_GE(fields.size(), 8) << "Every line has to have at least 8 tab separated fields."; const int id = utils::ParseUsing<int>(fields[0], 0, utils::ParseInt32); CHECK_EQ(expected_id++, id) << "Token ids start at 1 for each new sentence and increase by 1 " << "on each new token. Sentences are separated by an empty line."; const string &word = fields[1]; const string &cpostag = fields[3]; const string &tag = fields[4]; const string &attributes = fields[5]; const int head = utils::ParseUsing<int>(fields[6], 0, utils::ParseInt32); const string &label = fields[7]; if (!text.empty() && add_space_to_text) text.append(" "); const int start = text.size(); const int end = start + word.size() - 1; text.append(word); std::vector<string> sub_fields = utils::Split(fields[9], '|'); auto no_space = [](const string &str) { return str == "SpaceAfter=No"; }; add_space_to_text = !std::any_of(sub_fields.begin(), sub_fields.end(), no_space); Token *token = sentence->add_token(); token->set_word(word); token->set_start(start); token->set_end(end); if (head > 0) token->set_head(head - 1); if (!tag.empty()) token->set_tag(tag); if (!cpostag.empty()) token->set_category(cpostag); if (!label.empty()) token->set_label(label); if (!attributes.empty()) AddMorphAttributes(attributes, token); if (join_category_to_pos_) JoinCategoryToPos(token); if (add_pos_as_attribute_) AddPosAsAttribute(token); if (serialize_morph_to_pos_) SerializeMorphToPos(token); } if (sentence->token_size() > 0) { sentence->set_docid(key); sentence->set_text(text); sentences->push_back(sentence); } else { delete sentence; } } void ConvertToString(const Sentence &sentence, string *key, string *value) override { *key = sentence.docid(); std::vector<string> lines; for (int i = 0; i < sentence.token_size(); ++i) { Token token = sentence.token(i); if (join_category_to_pos_) SplitCategoryFromPos(&token); if (add_pos_as_attribute_) RemovePosFromAttributes(&token); std::vector<string> fields(10); fields[0] = tensorflow::strings::Printf("%d", i + 1); fields[1] = UnderscoreIfEmpty(token.word()); fields[2] = "_"; fields[3] = UnderscoreIfEmpty(token.category()); fields[4] = UnderscoreIfEmpty(token.tag()); fields[5] = GetMorphAttributes(token); fields[6] = tensorflow::strings::Printf("%d", token.head() + 1); fields[7] = UnderscoreIfEmpty(token.label()); fields[8] = "_"; fields[9] = "_"; lines.push_back(utils::Join(fields, "\t")); } *value = tensorflow::strings::StrCat(utils::Join(lines, "\n"), "\n\n"); } private: string UnderscoreIfEmpty(const string &field) { return field.empty() ? "_" : field; } void AddMorphAttributes(const string &attributes, Token *token) { TokenMorphology *morph = token->MutableExtension(TokenMorphology::morphology); std::vector<string> att_vals = utils::Split(attributes, '|'); for (int i = 0; i < att_vals.size(); ++i) { std::vector<string> att_val = utils::SplitOne(att_vals[i], '='); const std::pair<string, string> name_value = att_val.size() == 2 ? std::make_pair(att_val[0], att_val[1]) : std::make_pair(att_val[0], "on"); if (name_value.second.empty()) { LOG(WARNING) << "Invalid attributes string: " << attributes << " for token: " << token->ShortDebugString(); continue; } if (!name_value.first.empty()) { TokenMorphology::Attribute *attribute = morph->add_attribute(); attribute->set_name(name_value.first); attribute->set_value(name_value.second); } } } string GetMorphAttributes(const Token &token) { const TokenMorphology &morph = token.GetExtension(TokenMorphology::morphology); if (morph.attribute_size() == 0) return "_"; string attributes; for (const TokenMorphology::Attribute &attribute : morph.attribute()) { if (!attributes.empty()) tensorflow::strings::StrAppend(&attributes, "|"); tensorflow::strings::StrAppend(&attributes, attribute.name()); if (attribute.value() != "on") { tensorflow::strings::StrAppend(&attributes, "=", attribute.value()); } } return attributes; } void JoinCategoryToPos(Token *token) { token->set_tag( tensorflow::strings::StrCat(token->category(), "++", token->tag())); token->clear_category(); } void SplitCategoryFromPos(Token *token) { const string &tag = token->tag(); const size_t pos = tag.find("++"); if (pos != string::npos) { token->set_category(tag.substr(0, pos)); token->set_tag(tag.substr(pos + 2)); } } void AddPosAsAttribute(Token *token) { if (!token->tag().empty()) { TokenMorphology *morph = token->MutableExtension(TokenMorphology::morphology); TokenMorphology::Attribute *attribute = morph->add_attribute(); attribute->set_name("fPOS"); attribute->set_value(token->tag()); } } void RemovePosFromAttributes(Token *token) { TokenMorphology *morph = token->MutableExtension(TokenMorphology::morphology); if (morph->attribute_size() > 0 && morph->attribute().rbegin()->name() == "fPOS") { morph->mutable_attribute()->RemoveLast(); } } void SerializeMorphToPos(Token *token) { const TokenMorphology &morph = token->GetExtension(TokenMorphology::morphology); TextFormat::Printer printer; printer.SetSingleLineMode(true); string morph_str; printer.PrintToString(morph, &morph_str); token->set_tag(morph_str); } bool join_category_to_pos_ = false; bool add_pos_as_attribute_ = false; bool serialize_morph_to_pos_ = false; TF_DISALLOW_COPY_AND_ASSIGN(CoNLLSyntaxFormat); }
<gh_stars>100-1000 using namespace System; using namespace System::Runtime::InteropServices; // <Snippet1> [BestFitMapping(false, ThrowOnUnmappableChar = true)] interface class IMyInterface1 { //Insert code here. }; // </Snippet1> public ref class InteropBFMA : IMyInterface1 { }; int main() { InteropBFMA^ bfma = gcnew InteropBFMA(); Console::WriteLine(bfma->GetType()->GetInterfaces()[0]->Name); }
<reponame>AIoTES/SIL-Bridge-SensiNact /** * /** * INTER-IoT. Interoperability of IoT Platforms. * INTER-IoT is a R&D project which has received funding from the European * Union's Horizon 2020 research and innovation programme under grant * agreement No 687283. * <p> * Copyright (C) 2017-2018, by : - Università degli Studi della Calabria * <p> * <p> * For more information, contact: - @author * <a href="mailto:<EMAIL>"><NAME></a> * - Project coordinator: <a href="mailto:<EMAIL>"></a> * <p> * <p> * This code is licensed under the EPL license, available at the root * application directory. */ package eu.interiot.intermw.bridge.sensinact.ontology; import eu.interiot.intermw.bridge.sensinact.wrapper.SNAResource; import org.apache.jena.ontology.*; import org.apache.jena.rdf.model.Model; import org.apache.jena.rdf.model.ModelFactory; import org.apache.jena.rdf.model.Property; import org.apache.jena.rdf.model.Statement; import java.io.*; import java.time.DateTimeException; import java.time.Instant; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.time.format.DateTimeParseException; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import java.util.TimeZone; import org.apache.jena.riot.Lang; import org.apache.jena.riot.RDFDataMgr; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This classes loads Sensinact Ontology and create individuals on that ontology * according to information sent to updateOntology */ public class SNAOntologyAggregator { private static final Logger LOG = LoggerFactory.getLogger(SNAOntologyAggregator.class); private static final DateTimeFormatter DATE_TIMESTAMP_FORMATTER = DateTimeFormatter.ISO_OFFSET_DATE_TIME; private static final OntModel EMPTY_MODEL = ModelFactory.createOntologyModel(); public enum JenaWriterType { rdf("RDF/XML"), n3("N3"), rdfjson("RDF/JSON"), ttl("TTL"), jsonld("JSON-LD"),; JenaWriterType(String name) { this.name = name; } String name; } private OntModel model; private static final String SNA_ONTOLOGY_PREFIX = "http://sensinact.com#"; private static final String SNA_ONTOLOGY_FILE_PATH = "/ontology/SNAOntology.owl"; private static final String SNA_ONTOLOGY_FILE_PATH_PATTERN = "/ontology/%s/SNAOntology.owl"; private static final Properties SNA2AIOTES_KPI = new Properties(); static { try { SNA2AIOTES_KPI.load(SNAOntologyAggregator.class.getResourceAsStream("sna2aiotes-kpi.properties")); } catch (IOException ex) { LOG.error("failed to load sensiNact to AIOTES mapping sna2aiotes-kpi.properties"); } }; public SNAOntologyAggregator(Model rdfmodel) { model = ModelFactory.createOntologyModel(); model.add(rdfmodel); } public SNAOntologyAggregator(String filepath, String format) throws FileNotFoundException { model = ModelFactory.createOntologyModel(); InputStream is = new FileInputStream(filepath); model.read(is, null, format); } public SNAOntologyAggregator(JenaWriterType lang) { model = ModelFactory.createOntologyModel(); InputStream is = this.getClass().getResourceAsStream(SNA_ONTOLOGY_FILE_PATH); model.read(is, null, lang.name); } public SNAOntologyAggregator() { this(JenaWriterType.rdf); } public OntClass getOntologyClass(String clazz) { return model.getOntClass(SNA_ONTOLOGY_PREFIX + clazz); } public Property getObjectProperty(String property) { return model.getProperty(SNA_ONTOLOGY_PREFIX + property); } public DatatypeProperty getDataProperty(String property) { return model.getDatatypeProperty(SNA_ONTOLOGY_PREFIX + property); } public Model transformOntology(String provider, String service, String resource, String type, String value, String timestamp) { final OntModel isolatedModel = ModelFactory.createOntologyModel(); String ontologyFilePath; InputStream is; ontologyFilePath = SNA_ONTOLOGY_FILE_PATH; is = this.getClass().getResourceAsStream(ontologyFilePath); RDFDataMgr.read(isolatedModel, is, Lang.RDFXML); // ontologyFilePath = String.format(SNA_ONTOLOGY_FILE_PATH_PATTERN, type); // is = this.getClass().getResourceAsStream(ontologyFilePath); // if (is != null) { // RDFDataMgr..read(isolatedModel, is, Lang.RDFXML); // } updateOntologyWith(provider, service, resource, type, value, timestamp, isolatedModel); this.model = isolatedModel; final Model minimalModel = isolatedModel.difference(EMPTY_MODEL); return minimalModel; } public Model transformOntology(String provider, String service, String resource, String type, String value, String timestamp, Map<String, String> metadata) { final OntModel isolatedModel = ModelFactory.createOntologyModel(); String ontologyFilePath; InputStream is; ontologyFilePath = SNA_ONTOLOGY_FILE_PATH; is = this.getClass().getResourceAsStream(ontologyFilePath); RDFDataMgr.read(isolatedModel, is, Lang.RDFXML); // ontologyFilePath = String.format(SNA_ONTOLOGY_FILE_PATH_PATTERN, type); // is = this.getClass().getResourceAsStream(ontologyFilePath); // if (is != null) { // RDFDataMgr.read(isolatedModel, is, Lang.RDFXML); // } updateOntologyWith(provider, service, resource, type, value, timestamp, metadata, isolatedModel); this.model = isolatedModel; final Model minimalModel = isolatedModel.difference(EMPTY_MODEL); return minimalModel; } public Model createModel(String provider, String service, String resource, String type, String value, String timestamp) { final Model isolatedModel = transformOntology(provider, service, resource, type, value, timestamp); return isolatedModel; } public Model createModel(String provider, String service, String resource, String type, String value, String timestamp, Map<String, String> metadata) { final Model isolatedModel = transformOntology(provider, service, resource, type, value, timestamp, metadata); return isolatedModel; } public void updateOntologyWith(String provider, String service, String resource, String type, String value, String timestamp) { updateOntologyWith(provider, service, resource, type, value, timestamp, getOntModel()); } public void updateOntologyWith(String provider, String service, String resource, String type, String value, String timestamp, Map<String, String> metadata) { updateOntologyWith(provider, service, resource, type, value, timestamp, metadata, getOntModel()); } public void updateOntologyWith(SNAResource snaResource) { final long timestamp = System.currentTimeMillis(); updateOntologyWith( snaResource.getProvider(), snaResource.getService(), snaResource.getResource(), snaResource.getType(), snaResource.getValue(), String.valueOf(timestamp), snaResource.getMetadata() ); } /** * Create individuals necessary to receive the information passed to this * method and link them in the ontology instance * * @param provider device name in sensinact * @param service group of particular information in sensinact * @param resource the leaf node that contains the metadata of the * information * @param value the current data stored by the 'resource' * @param model the ontology model */ public void updateOntologyWith(String provider, String service, String resource, String type, String value, String timestamp, OntModel model) { updateOntologyWith( provider, service, resource, type, value, timestamp, Collections.EMPTY_MAP, model); } /** * Create individuals necessary to receive the information passed to this * method and link them in the ontology instance * * @param provider device name in sensinact * @param service group of particular information in sensinact * @param resource the leaf node that contains the metadata of the * information * @param value the current data stored by the 'resource' * @param model the ontology model */ public void updateOntologyWith(String provider, String service, String resource, String type, String value, String timestamp, Map<String, String> metadata, OntModel model) { final SNAAHAOntologyType snaOntologyType = SNAAHAOntologyType.getSNAAHAOntologyType(type, service, resource); final String ontologyClassName = snaOntologyType.getOntologyClassName(); OntClass ontologyClass = getOntologyClass(ontologyClassName); Individual individualResource = model.createIndividual(ontologyClass); Property providerDataProperty = getObjectProperty("provider"); Property serviceDataProperty = getObjectProperty("service"); Property nameDataProperty = getObjectProperty("name"); Property typeDataProperty = getObjectProperty("type"); Property timestampDataProperty = getObjectProperty("timestamp"); Property dateTimestampDataProperty = getObjectProperty("dateTimestamp"); if (value != null) { String correctedValue = snaOntologyType.computeValue(value); String correctedName = snaOntologyType.toAIOTESName(resource); Property valueProperty = getObjectProperty("value"); individualResource.addLiteral(providerDataProperty, provider); individualResource.addLiteral(serviceDataProperty, service); individualResource.addLiteral(nameDataProperty, correctedName); individualResource.addLiteral(typeDataProperty, type); individualResource.addLiteral(valueProperty, correctedValue); individualResource.addLiteral(timestampDataProperty, timestamp); String dateTimestamp = toLocaDateTime(timestamp); individualResource.addLiteral(dateTimestampDataProperty, dateTimestamp); Property metadataProperty; for (Entry<String, String> entry : metadata.entrySet()) { metadataProperty = getObjectProperty(entry.getKey()); individualResource.addLiteral(metadataProperty, entry.getValue()); } } } private static String toLocaDateTime(String timestamp$) { String date = timestamp$; ZonedDateTime dateTime = null; try { final long timestamp = Long.parseLong(timestamp$); dateTime = ZonedDateTime.ofInstant( Instant.ofEpochMilli(timestamp), TimeZone.getDefault().toZoneId() ); date = dateTime.format(DATE_TIMESTAMP_FORMATTER); } catch (NumberFormatException e) { //nothing to do } catch (DateTimeException e) { date = e.getMessage(); // if (localDateTime != null) { // date = localDateTime.toString(); // } } return date; } private static long toTimeStamp(String dateTime$) { long timeStamp = System.currentTimeMillis(); try { ZonedDateTime dateTime = ZonedDateTime.parse(dateTime$); timeStamp = dateTime.toEpochSecond(); } catch (DateTimeParseException e) { } return timeStamp; } private static interface ValueComputer { String computeValue(String value); } private static interface NameMapper { String toAIOTESName(String snaName); String toSNAName(String aiotesName); } private static enum SNAAHAOntologyType implements ValueComputer, NameMapper { DAY_LAYING("state", "BedOccupancyResource"), KPI("KPIResource") { @Override public String toAIOTESName(final String snaKpiName) { String aiotesKpiName = SNA2AIOTES_KPI.getProperty(snaKpiName, snaKpiName); if (aiotesKpiName == null) { aiotesKpiName = snaKpiName; } return aiotesKpiName; } @Override public String toSNAName(final String aiotesKpiName) { String snaName = aiotesKpiName; String propertyValue; for (Entry entry : SNA2AIOTES_KPI.entrySet()) { propertyValue = entry.getValue().toString(); if (propertyValue.equals(aiotesKpiName)) { snaName = entry.getKey().toString(); break; } }; return snaName; } }, NIGHT_RISING("state", "BedOccupancyResource") { @Override public String computeValue(final String value) { boolean isOutOfBed = Boolean.valueOf(value); boolean isInBed = !isOutOfBed; return String.valueOf(isInBed); } }, PEDOMETER_MONITOR("last-day-step-counter", "StepNumberResource"), TEMPERATURE_ALERT("last-temperature", "TemperatureMonitorResource"), WEIGHT_MONITOR("last-weight", "WeightMonitorResource"), DEFAULT; private static final String DEFAULT_ONTOLOGY_CLASS_NAME = "Resource"; private static final String DEFAULT_SERVICE = "monitor"; private static final String ANY_RESOURCE = "*"; private static final String DEFAULT_RESOURCE = ANY_RESOURCE; private final String ontologyClassName; private final String service; private final String resource; SNAAHAOntologyType() { this.ontologyClassName = DEFAULT_ONTOLOGY_CLASS_NAME; this.service = DEFAULT_SERVICE; this.resource = DEFAULT_RESOURCE; } SNAAHAOntologyType(final String ontologyClassName) { this.ontologyClassName = ontologyClassName; this.service = DEFAULT_SERVICE; this.resource = DEFAULT_RESOURCE; } SNAAHAOntologyType(final String resource, final String ontologyClassName) { this.ontologyClassName = ontologyClassName; this.service = DEFAULT_SERVICE; this.resource = resource; } private String getOntologyClassName() { return ontologyClassName; } private boolean isForService(final String service) { boolean isForService = this.service.equals(service); return isForService; } private boolean isForResource(final String resource) { boolean isForResource = this.resource.equals(ANY_RESOURCE) || this.resource.equals(resource); return isForResource; } @Override public String computeValue(String value) { return value; } @Override public String toAIOTESName(String snaName) { return snaName; } @Override public String toSNAName(String aiotesName) { return aiotesName; } private static SNAAHAOntologyType getSNAAHAOntologyType(final String ahaType, final String serviceId, final String resourceId) { SNAAHAOntologyType ontologyType; try { ontologyType = SNAAHAOntologyType.valueOf(ahaType); if (!ontologyType.isForService(serviceId) || !ontologyType.isForResource(resourceId)) { ontologyType = DEFAULT; } } catch (Exception e) { ontologyType = DEFAULT; } return ontologyType; } } public OntModel getOntModel() { return this.model; } public void printOntology(JenaWriterType type) { getOntModel().write(System.out, type.name); } public String getStringOntology() { ByteArrayOutputStream bos = new ByteArrayOutputStream(); getOntModel().write(bos, "TURTLE"); try { return new String(bos.toByteArray(), "utf-8"); } catch (UnsupportedEncodingException e) { LOG.error("unsupported encoding {}", e.getMessage()); LOG.debug("unsupported encoding", e); return null; } } public void saveOntology(String filepath, JenaWriterType writerType) throws IOException { new File(filepath).createNewFile(); FileOutputStream fos = new FileOutputStream(filepath); ByteArrayOutputStream bos = new ByteArrayOutputStream(); getOntModel().write(fos, writerType.name); } public List<SNAResource> getResourceList() { return getResourceList(model); } public List<SNAResource> getResourceList(final OntModel model) { final List<SNAResource> resources = new ArrayList<>(); final Property providerDataProperty = getObjectProperty("provider"); final Property serviceDataProperty = getObjectProperty("service"); final Property nameDataProperty = getObjectProperty("name"); final Property typeDataProperty = getObjectProperty("type"); final Property timestampDataProperty = getObjectProperty("timestamp"); final Property dateTimeDataProperty = getObjectProperty("dateTimestamp"); final Property valueDataProperty = getObjectProperty("value"); for (Iterator<Individual> it = model.listIndividuals(); it.hasNext();) { Individual resource = it.next(); try { final String providerName = resource.getProperty(providerDataProperty).getString(); final String serviceName = resource.getProperty(serviceDataProperty).getString(); final String resourceName = resource.getProperty(nameDataProperty).getString(); final String resourceType = resource.getProperty(typeDataProperty).getString(); final String resouceValue = resource.getProperty(valueDataProperty).getString(); final Statement timeStampStatement = resource.getProperty(timestampDataProperty); final SNAResource snaResource = new SNAResource(providerName, serviceName, resourceName, resourceType, resouceValue); if (timeStampStatement != null) { final String timeStampValue = timeStampStatement.getString(); snaResource.putMetadata("timestamp", timeStampValue); } else { final Statement dateTimeStatement = resource.getProperty(dateTimeDataProperty); if (dateTimeStatement != null) { final String dateTimeValue = dateTimeStatement.getString(); final String timeStampValue = String.valueOf(toTimeStamp(dateTimeValue)); snaResource.putMetadata("timestamp", timeStampValue); } } if (resourceType.equals("KPI")) { final String snaKpiName = SNAAHAOntologyType.KPI.toSNAName(resourceName); snaResource.setResource(snaKpiName); final Property targetDataProperty = getObjectProperty("target"); final String resourceTarget = resource.getProperty(targetDataProperty).getString(); snaResource.putMetadata("target", resourceTarget); } LOG.info("found update for resource {}", snaResource); resources.add(snaResource); } catch (Exception e) { LOG.error("not a complete resource description for individual {}: {}", resource, e.getMessage()); } } return resources; } }
#!/usr/bin/python import sys words = set() with open(sys.argv[1], 'r') as fp: for line in fp.readlines(): words.add(line.strip()) for line in sys.stdin.readlines(): sp = line.strip().split() if sp[0] in words: print line.strip()
<filename>src/PRPPaddleWheel.h // // PRPPaddleWheel.h // primaryp // // Created by stefan on 5/15/14. // // #ifndef __primaryp__PRPPaddleWheel__ #define __primaryp__PRPPaddleWheel__ #include "PRPPaddle.h" NS_PRP_BEGIN static const unsigned int kPaddleCount = 3; struct PaddlePauseAction { Paddle_t paddle; long timer; }; struct PaddleQuery { Paddle_t paddle; bool alive; }; class PaddleWheel : public cocos2d::Node { public: typedef cocos2d::Node super; /** Creates a paddle wheel with a given size */ static PaddleWheel* createWithSize(const cocos2d::Size size); PaddleWheel(); virtual bool initWithSize(const cocos2d::Size size); /** Applies a given rotation to the wheel */ void applyRotation(const float r); /** * Determines whether the point touches a paddle, a paddle's center, or nothing * @return the Paddle_t that has been touched */ Paddle_t doesPointTouchPaddle(const cocos2d::Point& p); /** Determines if the particle is beyond reach */ bool particleIsBeyondPaddleReach(const cocos2d::Point& p); /** * @brief Updates the level of the given paddle * * @param paddle The paddle * @param levelUpdate The type of update */ void updatePaddleLevel(const Paddle_t& paddle, const PaddleSprite::LevelUpdates_t levelUpdate); /** Shrinks the paddle because the player made a mistake */ void shrinkPaddleBecauseOfMistake(const Paddle_t paddle); /** Kills a given paddle */ void killPaddle(const Paddle_t& paddle); /** * Returns the outer radius of the paddle wheel ring * @note: the value represents a fraction of the contentWidth */ GLfloat getRadiusOuter() {return _radiusOuter;}; /** * Returns the inner radius of the paddle wheel ring * @note: the value represents a fraction of the contentWidth */ GLfloat getRadiusInner() {return _radiusInner;}; /** * Revives paddles in the following order: red, blue, yellow * @return true if a paddle has been revived */ bool revivePaddle(); /** Makes the paddle grow to the max amount */ void growPaddleToMax(const Paddle_t paddle); /** Fills the passed in array with PaddleQueries indicating whether the paddles are alive or dead */ void queryPaddles(PaddleQuery* paddleArray); /** Checks whether the particle's paddle is alive */ bool isPaddleForParticleAlive(const Particle_t p); /** Returns a random alive paddle */ Paddle_t getRandomAlivePaddle(); /** Checks to see if there are any paddles alive */ bool areAnyPaddlesAlive(); /** Returns the number of paddles that are alive */ unsigned int numberOfPaddlesAlive(); /** Returns the center of the wheel in the parent layer's coordinates */ cocos2d::Point getCenterPointOnParentCoordinates(); /** Returns the radius in points */ float getRadiusInPoints() {return _contentSize.width*0.5f;} /** * @brief Sets the appropriate colors for the given difficulty * * @param d The difficulty */ void setColorsForDifficulty(const Difficulty_t d); private: PaddleSprite* _paddleA; PaddleSprite* _paddleB; PaddleSprite* _paddleC; float _radiusOuterAndParticleDiameter; GLfloat _radiusOuter; // radius as a fraction of the texture size GLfloat _radiusInner; // radius as a fraction of the texture size /** Creates the background */ void setupBackground(); /** Creates the paddles */ void setupPaddles(); /** Returns the corresponding paddle sprite */ PaddleSprite* getPaddleSpriteForType(const Paddle_t p) const; /** returns the paddle for a given index (0-based) from the _angleRanges array */ PaddleSprite* getPaddleForIndex(const int index) const; /** * Determines whether the given angle is within a paddles range * @return PADDLE_NONE if a is outside any range */ Paddle_t getPaddleThatIsAtAngle(const float a); /** Handles the event when the player lost the game */ void playerLostTheGame(); /** * @brief Checks to see if all paddles are level 3 and alive * * @return true if all paddles are level 3 and alive */ bool areAllPaddlesAliveLevel3(); /** * @brief Resets the level of all paddles */ void resetLevelOfAllPaddles(float delta); /** * @brief Makes the old paddles disappear beyond the edges of the screen and makes a new * set of paddles emerge from the core. */ void zoomOutNewSetOfPaddles(); }; NS_PRP_END #endif /* defined(__primaryp__PRPPaddleWheel__) */
__all__ = ['wrapper', 'config'] from . import wrapper from .config import Config config = Config() config.from_envvar('CARPY_CONFIG_FILE', silent=True)
// USAGE EXAMPLES: // // cargo run rome // cargo run rome -f 0 -t 1 // cargo run rome --from 0,1,2,4,5 --to 2,3,3,3,2 // FROM=2,3,3,4 TO=1,1,0,0 cargo run rome // // PROBLEM: // // You are given a map of the Roman Empire. There are ROADS + 1 cities // (numbered from 0 to ROADS) and ROADS directed roads between them. // The road network is connected; that is, ignoring the directions of // roads, there is a route between each pair of cities. // // The capital of the Roman Empire is Rome. We know that all roads lead // to Rome. This means that there is a route from each city to Rome. // Your task is to find Rome on the map, or decide that it is not // there. // // The roads are described by two vectors FROM and TO of ROADS integers // each. For each integer I (0 ≤ I < ROADS), there exists a road from // city FROM[I] to city TO[I]. // // Write a function: // // fn solve(from: &Vec<i64>, to: &Vec<i64>) -> i64 // // that, given two arrays FROM and TO, returns the number of the city // which is Rome (the city that can be reached from all other cities). // If no such city exists, your function should return −1. // // EXAMPLES: // // 1. Given FROM = vec!(1, 2, 3) and TO = vec!(0, 0, 0), the function // should return 0. Rome has the number 0 on the map. // // 2 // \ // > // 0 <- 3 // > // / // 1 // // 2. Given FROM = vec!(0, 1, 2, 4, 5) and TO = vec!(2, 3, 3, 3, 2), // the function should return 3. Rome has the number 3 on the map. // From cities 1, 2 and 4, there is a direct road to city 3. From // cities 0 and 5, the roads to city 3 go through city 2. // // 5 4 // \ / // > < // 2 -> 3 // > < // / \ // 0 1 // // 3. Given FROM = vec!(2, 3, 3, 4) and TO = vec!(1, 1, 0, 0), the // function should return −1. There is no Rome on the map. // // 2 --> 1 <-- 3 --> 0 <-- 4 // // Write an efficient algorithm for the following assumptions: // // - ROADS is an integer within the range [1..200,000]; // - each element of vectors FROM, TO is an integer within the range // [0..ROADS]; // - the road network is connected. pub fn run(from: &Vec<i64>, to: &Vec<i64>) { println!("roads from: {:?}", from); println!("roads to: {:?}", to); if !input_ok(from, to) { return; } let rome = solve(&from, &to); match rome { -1 => println!("Rome: None"), _ => println!("Rome: City {}", rome), } } // Not part of the solution, but... // time complexity, O(N), loop over the vectors // space complexity, O(1), scalar variables not based on input size fn input_ok(from: &Vec<i64>, to: &Vec<i64>) -> bool { let length = from.len(); if to.len() != length { println!("ERROR: Road vectors are not the same length."); return false; } let max = length as i64; for i in 0..length { if from[i] < 0 || to[i] < 0 { println!("ERROR: Negative city value."); return false; } if max < from[i] || max < to[i] { println!("ERROR: City value greater then number of roads."); return false; } } return true; } // time compexity, O(N), loop over vectors one at a time // space compexity, O(N), vector of cities fn solve(from: &Vec<i64>, to: &Vec<i64>) -> i64 { let roads = from.len(); let mut cities: Vec<bool> = vec![false; roads + 1]; for city in to.iter() { cities[*city as usize] = true; } for city in from.iter() { cities[*city as usize] = false; } let mut result: i64 = -1; for (city, is_rome) in cities.iter().enumerate() { if *is_rome { if result < 0 { result = city as i64; } else { return -1; } } } return result; } #[cfg(test)] mod tests { use super::*; #[test] fn run_ok() { run(&vec!(0), &vec!(0)); } #[test] fn returns_expected() { assert_eq!( 1, solve(&vec!(0), &vec!(1))); assert_eq!(-1, solve(&vec!(0), &vec!(0))); assert_eq!( 0, solve(&vec!(1,2,3), &vec!(0,0,0))); assert_eq!( 3, solve(&vec!(0,1,2,4,5), &vec!(2,3,3,3,2))); assert_eq!(-1, solve(&vec!(2,3,3,4), &vec!(1,1,0,0))); } }
import moment from 'dayjs'; import { observer } from 'mobx-react-lite'; import React, { FunctionComponent } from 'react'; import { TableBodyRow, TableData, TableHeadRow } from 'src/components/Tables'; import { SubTitleText, Text } from 'src/components/Texts'; import { useStore } from 'src/stores'; import useWindowSize from 'src/hooks/useWindowSize'; const tableWidths = ['50%', '50%']; export const UnpoolingTable = observer(() => { const { chainStore, accountStore, queriesStore } = useStore(); const { isMobileView } = useWindowSize(); const account = accountStore.getAccount(chainStore.current.chainId); const queries = queriesStore.get(chainStore.current.chainId); const unlockingTokensExceptLPShares = queries.osmosis.queryAccountLocked .get(account.bech32Address) .unlockingCoins.filter(unlocking => !unlocking.amount.currency.coinMinimalDenom.startsWith('gamm/pool/')); return ( <div className="mt-10"> <div className="px-5 md:px-0"> <SubTitleText isMobileView={isMobileView}>Depoolings</SubTitleText> </div> <div className="text-white-mid mt-2 w-full px-4 py-1.5 border-2 border-solid border-secondary-50 border-opacity-60 rounded-lg"> Note: Depooling asset balance shown is a total across all pools, not on a per-pool basis </div> <table className="w-full"> <UnpoolingTableHeader isMobileView={isMobileView} /> <tbody className="w-full"> {unlockingTokensExceptLPShares.map((unlocking, i) => { return ( <UnpoolingTableRow key={i.toString()} amount={unlocking.amount .maxDecimals(6) .trim(true) .toString()} lockIds={unlocking.lockIds} endTime={unlocking.endTime} isMobileView={isMobileView} /> ); })} </tbody> </table> </div> ); }); interface UnlockingTableHeaderProps { isMobileView: boolean; } const UnpoolingTableHeader = observer(({ isMobileView }: UnlockingTableHeaderProps) => { return ( <thead> <TableHeadRow> <TableData width={tableWidths[0]}> <Text isMobileView={isMobileView}>Amount</Text> </TableData> <TableData width={tableWidths[1]}> <Text isMobileView={isMobileView}>Unlock Complete</Text> </TableData> </TableHeadRow> </thead> ); }); interface UnlockingTableRowProps { amount: string; lockIds: string[]; endTime: Date; isMobileView: boolean; } const UnpoolingTableRow: FunctionComponent<UnlockingTableRowProps> = ({ endTime, isMobileView, amount }) => { const endTimeMoment = moment(endTime); return ( <TableBodyRow height={64}> <TableData width={tableWidths[0]}> <Text emphasis="medium" isMobileView={isMobileView}> {amount} </Text> </TableData> <TableData width={tableWidths[1]}> <Text isMobileView={isMobileView}>{endTimeMoment.fromNow()}</Text> </TableData> </TableBodyRow> ); };
// This is the next phase after null protection is applied. void LuaConverter::convertAstSkipNullChecks(Analyser::AnalyserContext & context, const AstNode & node, DefaultIfMissing defaultIfMissing, LuaOutputter & output) { if (node.function().functionType != Function::CONSTANT && node.coercedType != node.type) { if(node.coercedType == PMMLDocument::TYPE_NUMBER) { output.keyword("tonumber"); } else if (node.coercedType == PMMLDocument::TYPE_STRING) { output.keyword("tostring"); } LuaOutputter::OperatorScopeHelper arguments(output, LuaOutputter::PRECEDENCE_PARENTHESIS); convertAstToLuaInner(context, node, defaultIfMissing, output); } else { convertAstToLuaInner(context, node, defaultIfMissing, output); } }
Post–Chikungunya Virus Infection Musculoskeletal Disorders: Syndromic Sequelae after an Outbreak The Chikungunya virus is a re-emerging mosquito-borne alphavirus. Outbreaks are unpredictable and explosive in nature. Fever, arthralgia, and rash are common symptoms during the acute phase. Diagnostic tests are required to differentiate chikungunya virus from other co-circulating arboviruses, as symptoms can overlap, causing a dilemma for clinicians. Arthritis is observed during the sub-acute and chronic phases, which can flare up, resulting in increased morbidity that adversely affects the activities of daily living. During the 2019 chikungunya epidemic in Thailand, cases surged in Bangkok in the last quarter of the year. Here, we demonstrate the chronic sequelae of post-chikungunya arthritis in one of our patients one year after the initial infection. An inflammatory process involving edema, erythema, and tenderness to palpation of her fingers’ flexor surfaces was observed, with positive chikungunya IgG and negative IgM tests and antigen. The condition produced stiffness in the patient’s fingers and limited their range of motion, adversely affecting daily living activities. Resolution of symptoms was observed with a short course of an anti-inflammatory agent. More research is required to determine whether sanctuaries enable chikungunya virus to evade the host immune response and remain latent, flaring up months later and triggering an inflammatory response that causes post-chikungunya arthritis. Introduction Arboviral infections exhibit many etiologies and cause acute febrile illnesses in tropical and subtropical regions. These infections are considered "neglected tropical diseases" that place over one billion people worldwide at risk of illness . The co-existence of these pathogens in endemic regions poses a conundrum in clinical practice . Due to similarities in their presentation, these viruses can be easily misidentified . The use of definite laboratory diagnostics, either molecular techniques or serology, is therefore crucial in narrowing down the differential diagnosis of acute febrile illnesses or, more precisely, acute undifferentiated febrile illnesses. Infections involving the majority of arboviral etiologic agents such as the dengue viruses, Zika, and chikungunya virus (CHIKV), frequently manifest as an abrupt-onset illness with non-localizing signs and symptoms; the infections are self-limited, and patients typically recuperate without the use of any particular therapeutics . Conversely, of the three sympatric pathogens described, CHIKV causes more morbidity and arthritis during chronic infection. CHIKV is a mosquito-borne Alphavirus of the Togaviridae family, and it was first detected along the forest fringes of modern-day Tanzania . Since its discovery, the virus has been more commonly known for its re-emergence and resurgence in tropical and subtropical regions. In addition, CHIKV is characterized by unpredictable outbreak patterns that are explosive in nature, short-lived, and sporadically limited during interepidemic periods . The virus has three specific lineages, and polymorphisms that have arisen in the viral genome over the last decade have produced sub-lineages that are detected with increasing frequency across the globe . The sylvatic transmission cycle has been well characterized, but details regarding the urban cycle and identity of the reservoir maintaining the virus remain unclear. However, the virus is amplified and sustained in humans during outbreaks. After an incubation period of 2-10 days, over 70% of patients become symptomatic with a febrile arthritogenic infection that resolves naturally, providing life-long immunity . Fatalities from CHIKV infection are rare, but the virus can cause high morbidity. Arthralgia and arthritis can affect large joints and proximal joints and is polyarticular in nature, with symmetric or asymmetric polymorphic involvement. Arthralgia is the predominant feature, but atypical infections can involve other organs. Individuals of extreme age or with underlying comorbidities develop a more severe infection, leading to multi-system involvement. Common laboratory features include leukopenia and thrombocytopenia, with lymphopenia and neutrophilia varying with the degree of viremia. In terms of therapeutic management of the arthritides caused by CHIKV, infections are classified as either acute, sub-acute, or chronic . The acute phase lasts for three weeks, whereas the sub-acute phase begins at three weeks of illness and lasts to the end of three months, and protracted chronic infection is diagnosed when the illness persists beyond three months with symptomatic sequelae. This syndrome can be recurrent, with flare-ups during the sub-acute phase and chronic period. These flare-ups can involve episodic relapses and periods of remission of musculoskeletal symptoms reminiscent of rheumatism, causing debilitating and restricted ambulation that adversely impacts an affected individual's quality of life. As observed previously, the median number of relapses is two, with a range of 1-20, and the median delay between relapses is four weeks (range of 1-99 weeks) . Over 60% of individuals affected by the virus can develop chronicity . Those most at risk of developing chronic infection include the elderly, women, and individuals with an underlying musculoskeletal disorder . Moreover, increasing evidence suggests that severity and chronicity vary with the viral genotype . Details of the mechanism leading to persistent and recurrent inflammatory arthritis post-CHIKV infection are poorly understood. Historically, many viruses have been described as causing arthritis or suspected of triggering an autoimmune response after infection . The first description of a post-CHIKV rheumatic disorder was in South Africa after an outbreak in the 1970s . The host responses in post-CHIKV inflammatory arthritis and rheumatoid arthritis involve the expression of the same pro-inflammatory cytokines and chemokines, with comparable clinical findings . The majority of cases fail to meet the criteria for rheumatoid arthritis during articular inflammatory flare-ups, with seronegative status for rheumatism . This makes it difficult to recognize the early onset of rheumatoid arthritis and initiate appropriate targeted treatment in a timely manner. However, cases of erosive rheumatoid arthritis were reported following a large Indian Ocean CHIKV epidemic . Here, we report the chronic sequelae of post-CHIKV arthritis in a patient one year after the initial infection. Materials and Methods The patient was a participant of one of our previous studies conducted at the Bangkok Hospital for Tropical Diseases during the CHIKV outbreak in 2019 . After becoming symptomatic again in 2020, the patient returned to the Fever Clinic. The Mahidol-Osaka Center for Infectious Diseases at the Faculty of Tropical Medicine, Mahidol University, provided diagnostic support for the CHIKV infection. The patient's medical chart was reviewed retrospectively to extract the clinical data, and the patient was closely followed up until resolution of her symptoms. We used a prototype lateral-flow immunochromatography rapid point-of-care test kit to detect the CHIKV envelope protein 1 (E1) antigen, which was subsequently confirmed via real-time reverse transcription polymerase chain (RT-PCR) analysis . Other serologic tests included anti-CHIKV immunoglobulin M (IgM) and immunoglobulin G (IgG) (SD Biosensor, Inc. Gyeonggi-do, Korea), dengue non-structural protein 1 (NS1) antigen (Biosynex, Swiss S.A, Fribourg, Switzerland), and anti-DENV IgM and IgG (S, Bioline, Sankt Ingbert, Germany). To exclude other possible cocirculating arboviruses in Thailand, we also performed real-time RT-PCR for both dengue and Zika virus. Case Report A case of post-CHIKV arthritis in a patient from our CHIKV patient cohort is presented. A Thai woman in her mid-forties presented to the Fever Clinic at Bangkok Hospital for Tropical Diseases following surges of CHIKV cases during the 2019 outbreak in Thailand. She had no underlying past medical history of diabetes, hypertension, chronic lung disease, chronic kidney disease, ischemic heart disease, or any known musculoskeletal disorder and was not on any regular treatment or consuming any supplements. She denied any recent travel history outside of Bangkok, coming into contact with animals (rodents), or exposure to floods, and she had not received any blood transfusion over the past four weeks. She presented on her third day of illness with complaints of an abrupt onset of high-grade fever (39 • C) associated with chills, arthralgia, myalgia, rash, finger stiffness, and difficulty walking. On examination, she was conscious and coherent with time, place, and person. There was no noticeable pallor or jaundice and no evidence of dehydration or any palpable lymphadenopathy, but injection of the conjunctiva, erythema over the cheeks, and erysipelas of the pinnae were noted ( Figure S1). Physical examination of the cardiovascular system was normal except for tachycardia. Examination of the respiratory and central nervous systems was unremarkable. The patient's abdomen was soft, with no evidence of hepato-splenomegaly. A maculopapular rash was observed predominantly over the trunk, which was described as pruritic and suggestive of a centrifugal distribution. The arthralgia involved large joints (knees), without swelling or joint effusion as well as peripheral joints (wrists and phalanges). There was swelling of the proximal phalangeal and metacarpophalangeal joints, with restricted range of motion. A pain severity score of 8/10 was reported. The myalgia was reported as generalized, not predominantly affecting the lower or upper extremities. Laboratory investigation results at the time of presentation are shown in Table 1. CHIKV infection was suspected, and the patient was screened using a novel antigen test that detects the E1 protein; CHIKV infection was later confirmed via real-time RT-PCR analysis. With possible co-infections ruled out and without prior known allergies to nonsteroidal anti-inflammatory drugs (NSAIDs), naproxen was prescribed at a daily dose of 1000 mg, along with an antihistamine (hydroxyzine 25 mg), for three days, to which she responded well, reporting much relief at the follow-up consultation on the fifth day of illness. Complete resolution of symptoms was noted on follow-up nine days after onset. After almost a year without further flare-ups of symptoms characteristic of CHIKV, the patient returned to the Fever Clinic with new onset of febrile illness. She presented on the second day of illness with complaints of fever, chills, arthralgia, and unilateral swelling and stiffness of her left hand. On examination of the affected limb, there was swelling of the proximal interphalangeal joints extending to the metacarpophalangeal joints, with a limited range of motion ( Figure 1). Discussion We describe a case involving typical musculoskeletal manifestations of acute CHIKV infection including cutaneous manifestations. The Milians ear sign is increasingly recognized during the acute phase of CHIKV infection . Nevertheless, non-specific symptoms such as non-purulent conjunctivitis can resemble those associated with other endemic virus infections . Clinicians working in fever clinics or travel clinics caring for patients residing in endemic regions or travelers returning from tropical regions should carefully consider infection with all possible tropical pathogens that could present as an acute undifferentiated febrile illness. It is essential to inquire about possible exposures, which can help narrow the differential diagnosis of tropical viruses or bacteria such as Leptospira or Rickettsia, for which effective treatments exist. For most arboviruses, rapid point-of-care diagnostics are helpful for prompt identification of suspected cases of dengue, CHIKV, or Zika. As alluded to in the Introduction, details of the mechanism underlying the chronicity and flare-up of symptoms with CHIKV are unclear. However, it has been demonstrated that factors such as viremia, advanced age at the onset of illness, female gender, comorbidities, pre-existing rheumatism or arthropathy, and genetic predisposition may contribute to the persistence of symptoms in CHIKV infection . Furthermore, the East Central South African (ECSA) lineage and Asian lineage reportedly cause long-lasting musculoskeletal disorders . Similarly, post-CHIKV arthritis can occur during infection with the ECSA Indian Ocean sub-lineage, as observed in the present case . Inflammatory polyarthritis is the most common long-term sequelae to occur with chikungunya infection . However, there are reports of alopecia, skin hyper-pigmentation, chronic fatigue, and depression to occur as persisting sequelae . Other long-term sequelae after a viral insult to the central nervous system by the CHIKV include persistent neurological sequelae manifesting as epilepsy or post-infectious dementia. Encephalitis or encephalopathy in neonates and children have a worsened neurocognitive function with severe development deficits . Albeit in small numbers, ocular complications leading to loss of visual acuity and permanent neurological disability after acute disseminated encephalomyelitis have previously been reported . Unlike animal models in which pathologies and persistence of viral RNA have been demonstrated in joint tissue in vivo during acute CHIKV infection, no viable viruses or viral genetic material have been found within the joint articular spaces in humans, except in a limited number of cases in which the CHIKV antigen was identified in perivascular macrophages . This lack of detection might be due a robust innate type 1 immune response that directs macrophages to clear up the viruses or to a decreased threshold of detection for extremely low viremia levels . Interferons are known for their antiviral properties; any interferon deficiencies observed with increasing age could contribute to the severity of the infection . In addition, interferons function poorly at lower temperatures, which promotes arthritis in infections with other alphaviruses . Similarly, interferon dysfunction could play a role in the peripheral articular joints predominantly involved during CHIKV infection. As described previously in patients with CHIKV, levels of circulating anti-inflammatory cytokines and cytotoxic T-cell activity are low during post-CHIKV arthritis . Individuals with a compromised immune system due to diabetes or immunosuppressive therapy are vulnerable to development of chronic sequelae of inflammatory arthritis post-CHIKV infection. Although IgM can be detected as early as the fourth day after the onset of symptoms and aids in viral clearance, IgM persistence is also reportedly associated with destructive arthropathies during CHIKV infection . In general, the IgM level decreases to below the detection limit within three to four months, and IgG persistence ensures life-long immunity. Some researchers have described a slightly extended IgM depletion trend lasting until the end of 18 months . In our case, IgM and IgG were detected during the second week of the acute phase, but IgM became undetectable by the end of 12 months, but IgG was positive, facilitating immunologic memory. After an acute CHIKV infection, the common observation is the resolution of symptoms during the acute phase, but protraction of symptoms into the chronic phase with relapse can occur within three months. In contrast, in our case, although the patient's symptoms resolved after the acute CHIKV infection, after approximately one year, the patient presented again with symptoms consistent with post-CHIKV rheumatic and musculoskeletal disorders (pCHIKV-RMSD). An inflammatory process involving edema, erythema, and tenderness to palpation of the flexor surfaces of her fingers was noted. This inflammatory response produced stiffness in her fingers and limited their range of motion, thus adversely affecting her daily living activities. The term pCHIKV-RMSD was coined by rheumatologists. For specific targeted therapeutic management, pCHIKV-RMSD was partitioned into two additional categories. These include post-CHIKV musculoskeletal disorders (pCHIKV-MSD), which respond to anti-inflammatory agents, and post-CHIKV de novo chronic inflammatory rheumatism (pCHIKV-CIR), which is characterized by the presence of rheumatism without evidence of articular disorders prior to CHIKV infection . Limitations in this case report include the unavailability of inflammatory biomarker data (rheumatoid factor, anti-citrullinated protein antibodies, antinuclear antibodies, Creactive protein, erythrocyte sedimentation rate, uric acid levels, or human leukocyte antigen antibodies) and imaging (x-rays or magnetic resonance imaging of the articular joints) to determine the presence of any degenerative or erosive arthritis. It is important to consider other Alphaviruses that can exhibit persistent arthritides after an acute infection. The Mayaro virus and Ross River virus (RRV) are still geographically restricted to South America and Australia including the Southwestern Pacific islands . The Barmah Forest virus is found only in the Australian mainland, and outbreaks of the O' nyong-nyong virus have occurred in East and West Africa . As the patient declared no recent travel history, we did not consider these Alphaviruses capable of manifesting as arthritides. The Sindbis virus group is widely distributed in Africa, Asia, and Australia, with an increased endemicity in Northern Europe. The clinical presentation was not suggestive of other cosmopolitan viruses such as rubella, cytomegalovirus, or hepatitis. More research is thus required to determine whether there are sanctuaries for CHIKV in which the virus can evade the host immune response and remain latent, flaring up months later and triggering inflammatory responses leading to pCHIKV-MSD or pCHIKV-CIR. The new onset of inflammation in the patient's fingers was consistent with pCHIKV-MSD, with rapid response to a short course of NSAIDs. Treatment strategies should take a combined approach involving the primary care physician and a rheumatologist to optimize the management of such cases. Informed Consent Statement: Written informed consent to publish this paper was obtained from the patient. Data Availability Statement: The data presented in this study are available on request from the corresponding author. The data are not publicly available to ensure the privacy of the study participant.
Stub content: Drivers/STM32F1xx_HAL_Driver/Src/stm32f1xx_hal_spi_ex.c
/** * Text content returned by an HTTP request. */ public class text_content { public final string mime_type; public final string content; public text_content(string mime_type, string content) { this.mime_type = mime_type; this.content = content; } public String to_content_type() { return utilities.s(mime_type) + ";charset=" + utilities.s(resource_util.UTF_8); } }
def read_3D_array(name): try: with open(name, "r") as f: lines = f.readlines() line0 = [int(elt) for elt in lines[0].split()] nbSubjects, nbControlPoints, dimension = line0[0], line0[1], line0[2] momenta = np.zeros((nbSubjects, nbControlPoints, dimension)) lines = lines[2:] for i in range(nbSubjects): for c in range(nbControlPoints): foo = lines[c].split() assert (len(foo) == dimension) foo = [float(elt) for elt in foo] momenta[i, c, :] = foo lines = lines[1 + nbControlPoints:] if momenta.shape[0] == 1: return momenta[0] else: return momenta except ValueError: return read_2D_array(name)
<filename>tests/brevitas/test_brevitas_avg_pool_export.py import os import onnx # noqa import torch import numpy as np import brevitas.onnx as bo from brevitas.nn import QuantAvgPool2d from brevitas.quant_tensor import pack_quant_tensor from brevitas.core.quant import QuantType from finn.core.modelwrapper import ModelWrapper from finn.core.datatype import DataType from finn.transformation.infer_shapes import InferShapes from finn.transformation.infer_datatypes import InferDataTypes from finn.util.basic import gen_finn_dt_tensor import finn.core.onnx_exec as oxe import pytest export_onnx_path = "test_brevitas_avg_pool_export.onnx" @pytest.mark.parametrize("kernel_size", [2, 3]) @pytest.mark.parametrize("stride", [1, 2]) @pytest.mark.parametrize("signed", [False, True]) @pytest.mark.parametrize("bit_width", [2, 4]) @pytest.mark.parametrize("input_bit_width", [4, 8, 16]) @pytest.mark.parametrize("channels", [2, 4]) @pytest.mark.parametrize("idim", [7, 8]) def test_brevitas_avg_pool_export( kernel_size, stride, signed, bit_width, input_bit_width, channels, idim ): ishape = (1, channels, idim, idim) ibw_tensor = torch.Tensor([input_bit_width]) b_avgpool = QuantAvgPool2d( kernel_size=kernel_size, stride=stride, bit_width=bit_width, quant_type=QuantType.INT, ) # call forward pass manually once to cache scale factor and bitwidth input_tensor = torch.from_numpy(np.zeros(ishape)).float() scale = np.ones((1, channels, 1, 1)) output_scale = torch.from_numpy(scale).float() input_quant_tensor = pack_quant_tensor( tensor=input_tensor, scale=output_scale, bit_width=ibw_tensor, signed=signed ) bo.export_finn_onnx(b_avgpool, ishape, export_onnx_path, input_t=input_quant_tensor) model = ModelWrapper(export_onnx_path) # determine input FINN datatype if signed is True: prefix = "INT" else: prefix = "UINT" dt_name = prefix + str(input_bit_width) dtype = DataType[dt_name] model = model.transform(InferShapes()) model = model.transform(InferDataTypes()) # execution with input tensor using integers and scale = 1 # calculate golden output inp = gen_finn_dt_tensor(dtype, ishape) input_tensor = torch.from_numpy(inp).float() input_quant_tensor = pack_quant_tensor( tensor=input_tensor, scale=output_scale, bit_width=ibw_tensor, signed=signed ) b_avgpool.eval() expected = b_avgpool.forward(input_quant_tensor).tensor.detach().numpy() # finn execution idict = {model.graph.input[0].name: inp} odict = oxe.execute_onnx(model, idict, True) produced = odict[model.graph.output[0].name] assert (expected == produced).all() # execution with input tensor using float and scale != 1 scale = np.random.uniform(low=0, high=1, size=(1, channels, 1, 1)).astype( np.float32 ) inp_tensor = inp * scale input_tensor = torch.from_numpy(inp_tensor).float() input_scale = torch.from_numpy(scale).float() input_quant_tensor = pack_quant_tensor( tensor=input_tensor, scale=input_scale, bit_width=ibw_tensor, signed=signed ) # export again to set the scale values correctly bo.export_finn_onnx(b_avgpool, ishape, export_onnx_path, input_t=input_quant_tensor) model = ModelWrapper(export_onnx_path) model = model.transform(InferShapes()) model = model.transform(InferDataTypes()) b_avgpool.eval() expected = b_avgpool.forward(input_quant_tensor).tensor.detach().numpy() # finn execution idict = {model.graph.input[0].name: inp_tensor} odict = oxe.execute_onnx(model, idict, True) produced = odict[model.graph.output[0].name] assert np.isclose(expected, produced).all() os.remove(export_onnx_path)
use super::LatexRenderer; use mediawiki_parser::*; use preamble::*; impl<'e, 's: 'e, 't: 'e> LatexRenderer<'e, 't> { pub fn htmltag( &mut self, root: &'e HtmlTag, settings: &'s Settings, out: &mut io::Write, ) -> io::Result<bool> { match root.name.to_lowercase().trim() { "dfn" => { let content = root.content.render(self, settings)?; write!(out, HTML_ITALIC!(), &content)?; } "ref" => { let content = root.content.render(self, settings)?; write!(out, HTML_REF!(), &content)?; } "section" => (), _ => { let msg = format!( "no export function defined \ for html tag `{}`!", root.name ); self.write_error(&msg, out)?; } } Ok(false) } }
def _update_scroll(self, cursor_cell, win_rows, win_cols): sr, sc = self.scroll if win_rows < 1: sr = 0 elif cursor_cell.row < sr: sr = cursor_cell.row elif cursor_cell.row >= sr + win_rows: sr = max(0, cursor_cell.row - win_rows + 1) sr = min(sr, max(0, self.document.max_row - win_rows + 1)) if win_cols < 1: sc = 0 elif cursor_cell.col < sc: sc = cursor_cell.col elif cursor_cell.col >= sc + win_cols: sc = max(0, cursor_cell.col - win_cols + 1) sc = min(sc, max(0, self.document.max_col - win_cols + 1)) self.scroll = CellLocation(sr, sc)
def create_array_with_hounsfield_units(image_data, mu_water, mu_air): dim_x = np.size(image_data, 0) dim_y = np.size(image_data, 1) dim_slice = np.size(image_data, 2) count = 0 iterations = dim_x * dim_y * dim_slice for i in range(0, dim_x): for j in range(0, dim_y): for k in range(0, dim_slice): image_data[i][j][k] = calculate_hounsfield_unit(image_data[i][j][k], mu_water, mu_air) count += 1 if count % (0.1 * iterations) == 0: print(round(count / iterations, 1) * 100, "% progress") return image_data
// TimeMillisAfter waits for the duration between min and max // to elapse and then sends the current time // on the returned channel. func TimeMillisAfter(min, max uint32) <-chan time.Time { if min >= max { return time.After(time.Duration(min) * time.Millisecond) } return time.After(time.Duration(Uint32Range(min, max)) * time.Millisecond) }
/** * An Vector contains a vector of 'dimension' values. It serves as the main data * structure that is stored and retrieved. It also has an identifier (key). * * @author Joren Six */ public class Vector implements Serializable { private static final long serialVersionUID = 5169504339456492327L; /** * Values are stored here. */ public double[] values; /** * An optional key, identifier for the vector. */ private String key; /** * Creates a new vector with the requested number of dimensions. * @param dimensions The number of dimensions. */ public Vector(int dimensions) { this(null,new double[dimensions]); } /** * Copy constructor. * @param other The other vector. */ public Vector(Vector other){ //copy the values this(other.getKey(),Arrays.copyOf(other.values, other.values.length)); } /** * Creates a vector with the values and a key * @param key The key of the vector. * @param values The values of the vector. */ public Vector(String key,double[] values){ this.values = values; this.key = key; } /** * Moves the vector slightly, adds a value selected from -radius to +radius to each element. * @param radius The radius determines the amount to change the vector. */ public void moveSlightly(double radius){ Random rand = new Random(); for (int d = 0; d < getDimensions(); d++) { //copy the point but add or subtract a value between -radius and +radius double diff = radius + (-radius - radius) * rand.nextDouble(); double point = get(d) + diff; set(d, point); } } /** * Set a value at a certain dimension d. * @param dimension The dimension, index for the value. * @param value The value to set. */ public void set(int dimension, double value) { values[dimension] = value; } /** * Returns the value at the requested dimension. * @param dimension The dimension, index for the value. * @return Returns the value at the requested dimension. */ public double get(int dimension) { return values[dimension]; } /** * @return The number of dimensions this vector has. */ public int getDimensions(){ return values.length; } /** * Calculates the dot product, or scalar product, of this vector with the * other vector. * * @param other * The other vector, should have the same number of dimensions. * @return The dot product of this vector with the other vector. * @exception ArrayIndexOutOfBoundsException * when the two vectors do not have the same dimensions. */ public double dot(Vector other) { double sum = 0.0; for(int i=0; i < getDimensions(); i++) { sum += values[i] * other.values[i]; } return sum; } public void setKey(String key) { this.key = key; } public String getKey() { return key; } public String toString(){ StringBuilder sb= new StringBuilder(); sb.append("values:["); for(int d=0; d < getDimensions() - 1; d++) { sb.append(values[d]).append(","); } sb.append(values[getDimensions()-1]).append("]"); return sb.toString(); } }
def mul_church(m, n): return lambda f : lambda x : m(n(f))(x)
<reponame>czen/open-ops<filename>unittests/Reprise/Reprise.cpp<gh_stars>10-100 #include "Reprise/Reprise.h" #include "Frontend/Frontend.h" #include "Shared/RepriseClone.h" #include "Backends/RepriseXml/RepriseXml.h" #include "Backends/OutToC/OutToC.h" #include <string> #include "GTestIncludeWrapper.h" #include "../FrontendHelper.h" using namespace std; using namespace OPS; using namespace OPS::Reprise; //TEST(FactorialTest, HandlesZeroInput) //{ // EXPECT_EQ(1, 1); //} TEST(Frontend, ParseNegativeFile) { OPS::Frontend::Frontend frontend; const std::string input_filepath = IO::osPathToPosixPath(IO::combinePath(sourceRoot, L"tests/Reprise/UnitTests/negative_test.c")); const CompileResult& result = frontend.compileSingleFile(input_filepath); EXPECT_NE(0, result.errorCount()); } TEST(Frontend, CheckGoto) { COMPILE_FILE("tests/Reprise/UnitTests/goto_test.c"); ASSERT_EQ(1, frontend.getProgramUnit().getUnitCount()); TranslationUnit& unit = frontend.getProgramUnit().getUnit(0); EXPECT_EQ(0, unit.getGlobals().getVariableCount()); EXPECT_EQ(1, unit.getGlobals().getSubroutineCount()); Declarations::SubrIterator firstSubr = unit.getGlobals().getFirstSubr(); if (firstSubr.isValid()) { SubroutineDeclaration& main = *firstSubr; EXPECT_EQ(true, main.hasImplementation()); BlockStatement& body = main.getBodyBlock(); EXPECT_FALSE( body.isEmpty()); BlockStatement::Iterator stmt = body.getFirst(); int stmtCount = 0; while (stmt.isValid()) { stmtCount += 1; stmt.goNext(); } EXPECT_EQ(2, stmtCount); } } TEST(Reprise, DumpLabels) { COMPILE_FILE("tests/Reprise/UnitTests/dump_labels.c"); EXPECT_GE(1, frontend.getProgramUnit().getUnitCount()); //TranslationUnit& unit = frontend.getProgramUnit().getUnit(0); // std::cout << unit.dumpState(); } TEST(Reprise, GetAssociatedVariable) { COMPILE_FILE("tests/Reprise/UnitTests/get_associated_variable.c"); EXPECT_GE(1, frontend.getProgramUnit().getUnitCount()); for (int unitIndex = 0; unitIndex < frontend.getProgramUnit().getUnitCount(); ++unitIndex) { TranslationUnit& unit = frontend.getProgramUnit().getUnit(unitIndex); for (Declarations::SubrIterator subIter = unit.getGlobals().getFirstSubr(); subIter.isValid(); ++subIter) { SubroutineType& subroutineF = subIter->getType(); for (int paramIndex = 0; paramIndex < subroutineF.getParameterCount(); ++paramIndex) { EXPECT_TRUE(subroutineF.getParameter(paramIndex).hasAssociatedVariable()); VariableDeclaration& varDecl = subroutineF.getParameter(paramIndex).getAssociatedVariable(); EXPECT_TRUE(varDecl.hasParameterReference()); EXPECT_EQ(&varDecl.getParameterReference(), &subroutineF.getParameter(paramIndex)); } } } } TEST(Reprise, GotoClone) { using namespace OPS::Shared; COMPILE_FILE("tests/Reprise/UnitTests/goto_test.c"); ASSERT_GE(1, frontend.getProgramUnit().getUnitCount()); TranslationUnit& unit = frontend.getProgramUnit().getUnit(0); ReprisePtr<TranslationUnit> newUnit(deepCloneTranslationUnit(unit)); StatementBase& oldGoto = *unit.getGlobals().getFirstSubr()->getBodyBlock().getLast(); StatementBase* oldPointed = oldGoto.cast_to<GotoStatement>().getPointedStatement(); StatementBase& newGoto = *newUnit->getGlobals().getFirstSubr()->getBodyBlock().getLast(); StatementBase* newPointed = newGoto.cast_to<GotoStatement>().getPointedStatement(); EXPECT_NE(oldPointed, newPointed); /* OPS::XmlBuilder xml; OPS::Backends::RepriseXml::Options options; options.writeNCIDofParent = true; OPS::Backends::RepriseXml repXml(xml, options); repXml.visit(*newUnit); cout << xml.dump(); */ } TEST(Reprise, StructAccess) { using namespace OPS::Shared; COMPILE_FILE("tests/Reprise/UnitTests/struct_access.c"); ASSERT_GE(1, frontend.getProgramUnit().getUnitCount()); //TranslationUnit& unit = frontend.getProgramUnit().getUnit(0); /* OPS::XmlBuilder xml; OPS::Backends::RepriseXml::Options options; options.writeNCIDofParent = true; OPS::Backends::RepriseXml repXml(xml, options); repXml.visit(*newUnit); cout << xml.dump(); */ } TEST(Reprise, GetResultTypeForArrays) { COMPILE_FILE("tests/Reprise/UnitTests/get_result_type.c"); ASSERT_GE(1, frontend.getProgramUnit().getUnitCount()); TranslationUnit& unit = frontend.getProgramUnit().getUnit(0); BlockStatement& body = unit.getGlobals().getFirstSubr()->getBodyBlock(); for (BlockStatement::Iterator stIter = body.getFirst(); stIter.isValid(); ++stIter) { ExpressionStatement& expr = stIter->cast_to<ExpressionStatement>(); BasicCallExpression& arrayAccess = expr.get().cast_to<BasicCallExpression>().getArgument(0).cast_to<BasicCallExpression>(); ReprisePtr<TypeBase> resultType; ASSERT_NO_THROW(resultType = arrayAccess.getResultType()); ASSERT_TRUE(0 != resultType.get()); EXPECT_TRUE(resultType->is_a<BasicType>()); } } TEST(Reprise, GetResultTypeForVectors) { COMPILE_FILE("tests/Reprise/UnitTests/vec_get_result_type.c"); BlockStatement& body = frontend.getProgramUnit().getUnit(0).getGlobals().findSubroutine("main")->getBodyBlock(); EXPECT_TRUE(body.getChild(0).cast_to<ExpressionStatement>().get().getResultType()->is_a<VectorType>()); EXPECT_TRUE(body.getChild(1).cast_to<ExpressionStatement>().get().getResultType()->is_a<BasicType>()); EXPECT_TRUE(body.getChild(2).cast_to<ExpressionStatement>().get().getResultType()->is_a<VectorType>()); EXPECT_TRUE(body.getChild(3).cast_to<ExpressionStatement>().get().getResultType()->is_a<VectorType>()); } TEST(Reprise, GetResultTypeForFuncPtr) { COMPILE_FILE("tests/Reprise/UnitTests/func_ptr_result_type.c"); BlockStatement& body = frontend.getProgramUnit().getUnit(0).getGlobals().findSubroutine("main")->getBodyBlock(); ASSERT_EQ(4, body.getChildCount()); // Explicit function call ASSERT_TRUE(body.getChild(0).is_a<ExpressionStatement>()); ReprisePtr<TypeBase> type0 = body.getChild(0).cast_to<ExpressionStatement>().get().getResultType(); EXPECT_TRUE(type0->is_a<BasicType>()); // Function call by pointer ASSERT_TRUE(body.getChild(1).is_a<ExpressionStatement>()); ReprisePtr<TypeBase> type1 = body.getChild(1).cast_to<ExpressionStatement>().get().getResultType(); EXPECT_TRUE(type1->is_a<BasicType>()); // Explicit function reference ASSERT_TRUE(body.getChild(2).is_a<ExpressionStatement>()); ReprisePtr<TypeBase> type2 = body.getChild(2).cast_to<ExpressionStatement>().get().getResultType(); EXPECT_TRUE(type2->is_a<SubroutineType>()); // Dereference pointer to function ASSERT_TRUE(body.getChild(3).is_a<ExpressionStatement>()); ReprisePtr<TypeBase> type3 = body.getChild(3).cast_to<ExpressionStatement>().get().getResultType(); EXPECT_TRUE(type3->is_a<SubroutineType>()); } TEST(Reprise, GetResultTypeForConditional) { COMPILE_FILE("tests/Reprise/UnitTests/conditional_result_type.c"); BlockStatement& main = frontend.getProgramUnit().getUnit(0).getGlobals().findSubroutine("main")->getBodyBlock(); ASSERT_EQ(9, main.getChildCount()); for(int i = 0; i < 9; ++i) { ASSERT_TRUE(main.getChild(i).is_a<ExpressionStatement>()); ReprisePtr<TypeBase> resType = main.getChild(i).cast_to<ExpressionStatement>().get().getResultType(); ASSERT_TRUE(resType->is_a<BasicType>()); EXPECT_EQ(BasicType::BT_INT32, resType->cast_to<BasicType>().getKind()); } } TEST(Reprise, GetResultTypeForArithmetic) { COMPILE_FILE("tests/Reprise/UnitTests/arithmetic_result_type.c"); BlockStatement& main = frontend.getProgramUnit().getUnit(0).getGlobals().findSubroutine("main")->getBodyBlock(); ASSERT_EQ(16, main.getChildCount()); for(int i = 0; i < 10; ++i) { EXPECT_EQ(BasicType::BT_INT32, main.getChild(i).cast_to<ExpressionStatement>().get().getResultType()->cast_to<BasicType>().getKind()); } EXPECT_EQ(BasicType::BT_FLOAT32, main.getChild(10).cast_to<ExpressionStatement>().get().getResultType()->cast_to<BasicType>().getKind()); EXPECT_EQ(BasicType::BT_FLOAT64, main.getChild(11).cast_to<ExpressionStatement>().get().getResultType()->cast_to<BasicType>().getKind()); EXPECT_TRUE(main.getChild(12).cast_to<ExpressionStatement>().get().getResultType()->is_a<PtrType>()); EXPECT_TRUE(main.getChild(13).cast_to<ExpressionStatement>().get().getResultType()->is_a<PtrType>()); EXPECT_EQ(BasicType::BT_UINT32, main.getChild(14).cast_to<ExpressionStatement>().get().getResultType()->cast_to<BasicType>().getKind()); EXPECT_EQ(BasicType::BT_INT32, main.getChild(15).cast_to<ExpressionStatement>().get().getResultType()->cast_to<BasicType>().getKind()); } TEST(Reprise, RecursiveStructs) { COMPILE_FILE("tests/Reprise/UnitTests/recursive_structs.c"); ASSERT_GE(1, frontend.getProgramUnit().getUnitCount()); TranslationUnit& unit = frontend.getProgramUnit().getUnit(0); std::stringstream ss; OPS::Backends::OutToC outc(ss); unit.accept(outc); } TEST(Reprise, ArrayEqual) { COMPILE_FILE("tests/Reprise/UnitTests/array_equal.c"); TypeBase& Atype = frontend.getProgramUnit().getUnit(0).getGlobals().findVariable("A")->getType(); TypeBase& Btype = frontend.getProgramUnit().getUnit(0).getGlobals().findVariable("B")->getType(); EXPECT_TRUE(Atype.isEqual(Btype)); }
package cn.chaboshi.test; import android.content.Context; import android.content.res.TypedArray; import android.graphics.Color; import android.support.annotation.NonNull; import android.support.design.widget.BottomSheetDialog; import android.support.v7.widget.GridLayoutManager; import android.support.v7.widget.RecyclerView; import android.util.TypedValue; import android.view.Gravity; import android.view.View; import android.view.ViewGroup; import android.widget.RelativeLayout; import android.widget.TextView; import java.util.ArrayList; import java.util.List; /** * ${车牌号汉字选择弹框} * * @author Su * @create 2018-07-24-11:36 */ public class PlateNumberDialog extends BottomSheetDialog { private Context mContext; private OnItemSelectedListener mSelectedListener; private static List<String> sStringList = new ArrayList<>(); static { sStringList.add("京"); sStringList.add("津"); sStringList.add("冀"); sStringList.add("晋"); sStringList.add("蒙"); sStringList.add("辽"); sStringList.add("吉"); sStringList.add("黑"); sStringList.add("沪"); sStringList.add("苏"); sStringList.add("浙"); sStringList.add("皖"); sStringList.add("闽"); sStringList.add("赣"); sStringList.add("鲁"); sStringList.add("豫"); sStringList.add("鄂"); sStringList.add("湘"); sStringList.add("粤"); sStringList.add("桂"); sStringList.add("琼"); sStringList.add("渝"); sStringList.add("川"); sStringList.add("贵"); sStringList.add("云"); sStringList.add("藏"); sStringList.add("陕"); sStringList.add("甘"); sStringList.add("青"); sStringList.add("宁"); sStringList.add("新"); } public PlateNumberDialog(@NonNull Context context) { super(context); mContext = context; init(); } private void init() { //初始化整体布局 RecyclerView rcv = new RecyclerView(mContext); RelativeLayout.LayoutParams lp = new RelativeLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT); lp.setMargins(dip2px(mContext, 5), dip2px(mContext, 5), dip2px(mContext, 5), dip2px(mContext, 5)); rcv.setLayoutParams(lp); rcv.setLayoutManager(new GridLayoutManager(mContext, 5)); rcv.setAdapter(new PlateAdapter()); setContentView(rcv); } public void show() { super.show(); } public void show(OnItemSelectedListener listener) { super.show(); mSelectedListener = listener; } //省适配器 class PlateAdapter extends RecyclerView.Adapter<PlateAdapter.PlateHolder> { @NonNull @Override public PlateHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) { RelativeLayout rl = new RelativeLayout(mContext); RelativeLayout.LayoutParams layoutParams = new RelativeLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT); layoutParams.setMargins(dip2px(mContext, 5), dip2px(mContext, 5), dip2px(mContext, 5), dip2px(mContext, 5)); rl.setLayoutParams(layoutParams); TextView textView = new TextView(mContext); textView.setLayoutParams(new ViewGroup.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT)); textView.setTextColor(Color.parseColor("#FF2C241F")); //代码设置?android:attr/selectableItemBackground TypedValue typedValue = new TypedValue(); int[] attribute = new int[]{android.R.attr.selectableItemBackground}; TypedArray typedArray = mContext.getTheme().obtainStyledAttributes(typedValue.resourceId, attribute); textView.setBackground(typedArray.getDrawable(0)); textView.setPadding(0, dip2px(mContext, 5), 0, dip2px(mContext, 5)); textView.getPaint().setFakeBoldText(true); textView.setGravity(Gravity.CENTER); rl.setClickable(true); textView.setClickable(true); rl.addView(textView); rl.setBackgroundColor(Color.parseColor("#f2f2f2")); return new PlateHolder(rl); } @Override public void onBindViewHolder(@NonNull PlateHolder holder, final int position) { holder.mTextView.setText(sStringList.get(position)); if (mSelectedListener != null) { holder.mTextView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { mSelectedListener.onSelectedListener(sStringList.get(position)); PlateNumberDialog.super.dismiss(); } }); } } @Override public int getItemCount() { return sStringList.size(); } class PlateHolder extends RecyclerView.ViewHolder { TextView mTextView; public PlateHolder(View itemView) { super(itemView); RelativeLayout rl = (RelativeLayout) itemView; mTextView = (TextView) rl.getChildAt(0); } } } public void setOnItemSelectedListener(OnItemSelectedListener listener) { mSelectedListener = listener; } interface OnItemSelectedListener { void onSelectedListener(String province); } public static int dip2px(Context context, float dpValue) { final float scale = context.getResources().getDisplayMetrics().density; return (int) (dpValue * scale + 0.5f); } }
/** * shifting - right rotation just one unit * * @param array the source array. * @param startIndex starting position * @param endIndex finishing position */ @TimeComplexity("O(n)") @SpaceComplexity("O(1)") public static void rightRotate(int[] array, int startIndex, int endIndex) { int temp = array[endIndex]; rightShift(array, startIndex, endIndex); array[startIndex] = temp; }
<reponame>quantfamily/foreverbull<filename>tests/helper/socket_test.go<gh_stars>1-10 package helper import ( "testing" "github.com/stretchr/testify/assert" ) func TestLocalSocket(t *testing.T) { ls := LocalSocket(t) assert.NotNil(t, ls.ToRead) assert.NotNil(t, ls.Written) t.Run("TestRead", func(t *testing.T) { input := []byte("hello world") output := make([]byte, len(input)) ls.ToRead = input if _, err := ls.Read(output); err != nil { t.Error("Error reading") } assert.Equal(t, input, output) }) t.Run("TestWrite", func(t *testing.T) { input := []byte("world, hello") if _, err := ls.Write(input); err != nil { t.Error("Error writing") } assert.Equal(t, 1, len(ls.Written)) assert.Equal(t, input, ls.Written[0]) }) t.Run("TestClear", func(t *testing.T) { if err := ls.Clear(); err != nil { t.Error("Error clearing") } assert.Equal(t, 0, len(ls.Written)) }) if err := ls.Close(); err != nil { t.Error("Error closing") } }
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this file, * You can obtain one at http://mozilla.org/MPL/2.0/. */ #include "WebrtcMediaDataDecoderCodec.h" #include "ImageContainer.h" #include "MediaDataDecoderProxy.h" #include "PDMFactory.h" #include "VideoUtils.h" #include "mozilla/layers/ImageBridgeChild.h" #include "mozilla/media/MediaUtils.h" #include "mozilla/StaticPrefs_media.h" namespace mozilla { WebrtcMediaDataDecoder::WebrtcMediaDataDecoder(nsACString& aCodecMimeType, TrackingId aTrackingId) : mThreadPool(GetMediaThreadPool(MediaThreadType::SUPERVISOR)), mTaskQueue(TaskQueue::Create(do_AddRef(mThreadPool), "WebrtcMediaDataDecoder::mTaskQueue")), mImageContainer(MakeAndAddRef<layers::ImageContainer>( layers::ImageContainer::ASYNCHRONOUS)), mFactory(new PDMFactory()), mTrackType(TrackInfo::kUndefinedTrack), mCodecType(aCodecMimeType), mTrackingId(std::move(aTrackingId)) {} WebrtcMediaDataDecoder::~WebrtcMediaDataDecoder() {} bool WebrtcMediaDataDecoder::Configure( const webrtc::VideoDecoder::Settings& settings) { nsCString codec; mTrackType = TrackInfo::kVideoTrack; mInfo = VideoInfo(settings.max_render_resolution().Width(), settings.max_render_resolution().Height()); mInfo.mMimeType = mCodecType; #ifdef MOZ_WIDGET_GTK if (mInfo.mMimeType.EqualsLiteral("video/vp8") && !StaticPrefs::media_navigator_mediadatadecoder_vp8_hardware_enabled()) { mDisabledHardwareAcceleration = true; } #endif return WEBRTC_VIDEO_CODEC_OK == CreateDecoder(); } int32_t WebrtcMediaDataDecoder::Decode(const webrtc::EncodedImage& aInputImage, bool aMissingFrames, int64_t aRenderTimeMs) { if (!mCallback || !mDecoder) { return WEBRTC_VIDEO_CODEC_UNINITIALIZED; } if (!aInputImage.data() || !aInputImage.size()) { return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } // Always start with a complete key frame. if (mNeedKeyframe) { if (aInputImage._frameType != webrtc::VideoFrameType::kVideoFrameKey) return WEBRTC_VIDEO_CODEC_ERROR; // We have a key frame - is it complete? mNeedKeyframe = false; } auto disabledHardwareAcceleration = MakeScopeExit([&] { mDisabledHardwareAcceleration = true; }); RefPtr<MediaRawData> compressedFrame = new MediaRawData(aInputImage.data(), aInputImage.size()); if (!compressedFrame->Data()) { return WEBRTC_VIDEO_CODEC_MEMORY; } compressedFrame->mTime = media::TimeUnit::FromMicroseconds(aInputImage.Timestamp()); compressedFrame->mTimecode = media::TimeUnit::FromMicroseconds(aRenderTimeMs * 1000); compressedFrame->mKeyframe = aInputImage._frameType == webrtc::VideoFrameType::kVideoFrameKey; { media::Await( do_AddRef(mThreadPool), mDecoder->Decode(compressedFrame), [&](const MediaDataDecoder::DecodedData& aResults) { mResults = aResults.Clone(); mError = NS_OK; }, [&](const MediaResult& aError) { mError = aError; }); for (auto& frame : mResults) { MOZ_ASSERT(frame->mType == MediaData::Type::VIDEO_DATA); RefPtr<VideoData> video = frame->As<VideoData>(); MOZ_ASSERT(video); if (!video->mImage) { // Nothing to display. continue; } rtc::scoped_refptr<ImageBuffer> image( new rtc::RefCountedObject<ImageBuffer>(std::move(video->mImage))); auto videoFrame = webrtc::VideoFrame::Builder() .set_video_frame_buffer(image) .set_timestamp_rtp(aInputImage.Timestamp()) .set_rotation(aInputImage.rotation_) .build(); mCallback->Decoded(videoFrame); } mResults.Clear(); } if (NS_FAILED(mError) && mError != NS_ERROR_DOM_MEDIA_CANCELED) { CreateDecoder(); return WEBRTC_VIDEO_CODEC_ERROR; } if (NS_FAILED(mError)) { return WEBRTC_VIDEO_CODEC_ERROR; } disabledHardwareAcceleration.release(); return WEBRTC_VIDEO_CODEC_OK; } int32_t WebrtcMediaDataDecoder::RegisterDecodeCompleteCallback( webrtc::DecodedImageCallback* aCallback) { mCallback = aCallback; return WEBRTC_VIDEO_CODEC_OK; } int32_t WebrtcMediaDataDecoder::Release() { if (mDecoder) { RefPtr<MediaDataDecoder> decoder = std::move(mDecoder); decoder->Flush()->Then(mTaskQueue, __func__, [decoder]() { decoder->Shutdown(); }); } mNeedKeyframe = true; mError = NS_OK; return WEBRTC_VIDEO_CODEC_OK; } bool WebrtcMediaDataDecoder::OnTaskQueue() const { return mTaskQueue->IsOnCurrentThread(); } int32_t WebrtcMediaDataDecoder::CreateDecoder() { RefPtr<layers::KnowsCompositor> knowsCompositor = layers::ImageBridgeChild::GetSingleton(); if (mDecoder) { Release(); } RefPtr<TaskQueue> tq = TaskQueue::Create(GetMediaThreadPool(MediaThreadType::PLATFORM_DECODER), "webrtc decode TaskQueue"); RefPtr<MediaDataDecoder> decoder; media::Await(do_AddRef(mThreadPool), InvokeAsync(tq, __func__, [&] { RefPtr<GenericPromise> p = mFactory ->CreateDecoder( {mInfo, CreateDecoderParams::OptionSet( CreateDecoderParams::Option::LowLatency, CreateDecoderParams::Option::FullH264Parsing, CreateDecoderParams::Option:: ErrorIfNoInitializationData, mDisabledHardwareAcceleration ? CreateDecoderParams::Option:: HardwareDecoderNotAllowed : CreateDecoderParams::Option::Default), mTrackType, mImageContainer, knowsCompositor, Some(mTrackingId)}) ->Then( tq, __func__, [&](RefPtr<MediaDataDecoder>&& aDecoder) { decoder = std::move(aDecoder); return GenericPromise::CreateAndResolve( true, __func__); }, [](const MediaResult& aResult) { return GenericPromise::CreateAndReject( NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__); }); return p; })); if (!decoder) { return WEBRTC_VIDEO_CODEC_ERROR; } // We need to wrap our decoder in a MediaDataDecoderProxy so that it always // run on an nsISerialEventTarget (which the webrtc code doesn't do) mDecoder = new MediaDataDecoderProxy(decoder.forget(), tq.forget()); media::Await( do_AddRef(mThreadPool), mDecoder->Init(), [&](TrackInfo::TrackType) { mError = NS_OK; }, [&](const MediaResult& aError) { mError = aError; }); return NS_SUCCEEDED(mError) ? WEBRTC_VIDEO_CODEC_OK : WEBRTC_VIDEO_CODEC_ERROR; } } // namespace mozilla
import sys li = list(map(int,input().split())) li.sort() if(str(li[0]) == str(li[1]) == str(li[2])): print("No") sys.exit() if(str(li[0]) != str(li[1]) != str(li[2])): print("No") sys.exit() for i in range(3): if(str(li[i]) == str(li[i+1])): print("Yes") break
def update_bindings(item_table: ItemTable, region: dict = None, excel_filepath: str = None, sheet_name: str = None) -> None: if region: bindings["$left"] = region['left'] bindings["$right"] = region['right'] bindings["$top"] = region['top'] bindings["$bottom"] = region['bottom'] if excel_filepath: add_excel_file_to_bindings(excel_filepath, sheet_name) bindings["item_table"] = item_table
/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://bangular.io/license */ import {DebugContext, NodeFlags, QueryValueType, Services, asElementData, asTextData, directiveDef, elementDef, textDef} from '@bangular/core/src/view/index'; import {compViewDef, createAndGetRootNodes} from './helper'; { describe('View Services', () => { describe('DebugContext', () => { class AComp {} class AService {} function createViewWithData() { const {view} = createAndGetRootNodes(compViewDef([ elementDef( 0, NodeFlags.None, null, null, 1, 'div', null, null, null, null, () => compViewDef([ elementDef( 0, NodeFlags.None, [['ref', QueryValueType.ElementRef]], null, 2, 'span'), directiveDef(1, NodeFlags.None, null, 0, AService, []), textDef(2, null, ['a']) ])), directiveDef(1, NodeFlags.Component, null, 0, AComp, []), ])); return view; } it('should provide data for elements', () => { const view = createViewWithData(); const compView = asElementData(view, 0).componentView; const debugCtx = Services.createDebugContext(compView, 0); expect(debugCtx.componentRenderElement).toBe(asElementData(view, 0).renderElement); expect(debugCtx.renderNode).toBe(asElementData(compView, 0).renderElement); expect(debugCtx.injector.get(AComp)).toBe(compView.component); expect(debugCtx.component).toBe(compView.component); expect(debugCtx.context).toBe(compView.context); expect(debugCtx.providerTokens).toEqual([AService]); expect(debugCtx.references['ref'].nativeElement) .toBe(asElementData(compView, 0).renderElement); }); it('should provide data for text nodes', () => { const view = createViewWithData(); const compView = asElementData(view, 0).componentView; const debugCtx = Services.createDebugContext(compView, 2); expect(debugCtx.componentRenderElement).toBe(asElementData(view, 0).renderElement); expect(debugCtx.renderNode).toBe(asTextData(compView, 2).renderText); expect(debugCtx.injector.get(AComp)).toBe(compView.component); expect(debugCtx.component).toBe(compView.component); expect(debugCtx.context).toBe(compView.context); }); it('should provide data for other nodes based on the nearest element parent', () => { const view = createViewWithData(); const compView = asElementData(view, 0).componentView; const debugCtx = Services.createDebugContext(compView, 1); expect(debugCtx.renderNode).toBe(asElementData(compView, 0).renderElement); }); }); }); }
package api import ( "crypto/ecdsa" "crypto/sha256" "crypto/sha512" "crypto/x509" "encoding/asn1" "encoding/hex" "errors" "fmt" "github.com/duo-labs/webauthn/config" "github.com/duo-labs/webauthn/models" "github.com/ugorji/go/codec" "math/big" "net/url" "strings" b64 "encoding/base64" req "github.com/nimblehq/webauthn-growth-demo/request" ) func DecodeAttestationObject(rawAttObj string) (req.EncodedAuthData, error) { b64Decoder := b64.URLEncoding.Strict() attObjBytes, err := b64Decoder.DecodeString(rawAttObj) if err != nil { fmt.Println("b64 Decode error:", err) return req.EncodedAuthData{}, err } var handler codec.Handle = new(codec.CborHandle) var decoder = codec.NewDecoderBytes(attObjBytes, handler) var ead req.EncodedAuthData err = decoder.Decode(&ead) if err != nil { fmt.Println("CBOR Decode error:", err) return req.EncodedAuthData{}, err } return ead, err } // ParseAuthData - Parses the AuthData returned from the authenticator from a byte array func ParseAuthData(ead req.EncodedAuthData) (req.DecodedAuthData, error) { decodedAuthData := req.DecodedAuthData{} rpID := ead.AuthData[:32] rpIDHash := hex.EncodeToString(rpID) intFlags := ead.AuthData[32] flags := fmt.Sprintf("%08b", intFlags) counter := ead.AuthData[33:38] if len(ead.AuthData) < 38 { err := errors.New("AuthData byte array is not long enough") return decodedAuthData, err } aaguid := ead.AuthData[38:54] credIDLen := ead.AuthData[53] + ead.AuthData[54] credID := ead.AuthData[55 : 55+credIDLen] cborPubKey := ead.AuthData[55+credIDLen:] var handler codec.Handle = new(codec.CborHandle) decoder := codec.NewDecoderBytes(cborPubKey, handler) var pubKey models.PublicKey err := decoder.Decode(&pubKey) if err != nil { fmt.Println("Error decoding the Public Key in Authentication Data") return decodedAuthData, err } decodedAuthData = req.DecodedAuthData{ // Flags are used to determine user presence, user verification, and if attData is present Flags: []byte(flags), // Counter is used to prevent replay attacks Counter: counter, // RPIDHash is used to verify the Auth Request RPIDHash: rpIDHash, // AAGUID is the ID of the Authenticator device line AAGUID: aaguid, // CredID is the ID of the credential we are creating CredID: credID, // Public Key of the credential key pair PubKey: pubKey, // Format of the attestation statement (ex, "u2f", "safety-net"), currently defaults to "none" Format: ead.Format, } // If the format is one that contains an authenticator attestation certificate then parse it if ead.Format == "fido-u2f" { das, err := ParseAttestationStatement(ead.AttStatement) if err != nil { fmt.Println("Error parsing Attestation Statement from Authentication Data") return decodedAuthData, err } // The authenticator's attestation statement decodedAuthData.AttStatement = das } return decodedAuthData, err } // ParseAssertionData - Parses assertion data from byte array to a struct func ParseAssertionData(assertionData []byte, hexSig string) (req.DecodedAssertionData, error) { decodedAssertionData := req.DecodedAssertionData{} rpID := assertionData[:32] rpIDHash := hex.EncodeToString(rpID) intFlags := assertionData[32] counter := assertionData[33:] if len(assertionData) > 38 { err := errors.New("assertionData byte array is too long") return decodedAssertionData, err } rawSig, err := hex.DecodeString(hexSig) if err != nil { return decodedAssertionData, err } decodedAssertionData = req.DecodedAssertionData{ Flags: intFlags, RPIDHash: rpIDHash, Counter: counter, RawAssertionData: assertionData, Signature: rawSig, } return decodedAssertionData, err } // ParseAttestationStatement - parse the Attestation Certificate returned by the // the authenticator func ParseAttestationStatement( ead req.EncodedAttestationStatement) (req.DecodedAttestationStatement, error) { das := req.DecodedAttestationStatement{} // Currently, for fido-u2f formatted attStatements, we only support one x509 cert // but it is returned to us as an array cert, err := x509.ParseCertificate(ead.X509Cert[0]) if err != nil { return das, err } das = req.DecodedAttestationStatement{ Certificate: cert, Signature: ead.Signature, } return das, nil } // VerifyAssertionData - Verifies that the Assertion data provided is correct and valid func VerifyAssertionData( clientData *req.DecodedClientData, authData *req.DecodedAssertionData, sessionData *models.SessionData, credentialID string) (bool, models.Credential, error) { // Step 1. Using credential’s id attribute (or the corresponding rawId, // if base64url encoding is inappropriate for your use case), look up the // corresponding credential public key. fmt.Printf("Auth data is %+v\n", authData) // var credential models.Credential credential, err := models.GetCredentialForUser(&sessionData.User, credentialID) if err != nil { fmt.Println("Issue getting credential during Assertion") err := errors.New("Issue getting credential during Assertion") return false, credential, err } // Step 2. Let cData, aData and sig denote the value of credential’s // response's clientDataJSON, authenticatorData, and signature respectively. // Okeydoke // Step 3. Perform JSON deserialization on cData to extract the client data // C used for the signature. // Already done above fmt.Printf("Decoded Client Data: %+v\n", clientData) fmt.Printf("Auth Data: %+v\n", authData) credential.Counter = authData.Counter err = CheckCredentialCounter(&credential) if err != nil { fmt.Println("Error updating the the counter") err := errors.New("Error updating the the counter") return false, credential, err } // Step 4. Verify that the type in C is the string webauthn.create if clientData.ActionType != "webauthn.get" { fmt.Println("Client Request type is: ", string(clientData.ActionType)) err := errors.New("The webauthn action type is incorrect") return false, credential, err } // Step 5. Verify that the challenge member of C matches the challenge that // was sent to the authenticator in the PublicKeyCredentialRequestOptions // passed to the get() call. sessionDataChallenge := strings.Trim(b64.URLEncoding.EncodeToString(sessionData.Challenge), "=") if sessionDataChallenge != clientData.Challenge { fmt.Println("Stored Challenge is: ", string(sessionDataChallenge)) fmt.Println("Client Challenge is: ", string(clientData.Challenge)) err := errors.New("Stored and Given Sessions do not match") return false, credential, err } // Step 6. Verify that the origin member of C matches the Relying Party's origin. cdo, err := url.Parse(clientData.Origin) if err != nil { fmt.Println("Error Parsing Client Data Origin: ", string(clientData.Origin)) err := errors.New("Error Parsing the Client Data Origin") return false, credential, err } if sessionData.RelyingPartyID != cdo.Hostname() { fmt.Println("Stored Origin is: ", string(sessionData.RelyingPartyID)) fmt.Println("Client Origin is: ", string(clientData.Origin)) err := errors.New("Stored and Client Origin do not match") return false, credential, err } // Step 7. Verify that the tokenBindingId member of C (if present) matches the // Token Binding ID for the TLS connection over which the signature was obtained. // No Token Binding ID exists in this example. Sorry bruv // Step 8. Verify that the clientExtensions member of C is a subset of the extensions // requested by the Relying Party and that the authenticatorExtensions in C is also a // subset of the extensions requested by the Relying Party. // We don't have any clientExtensions // Step 9. Verify that the RP ID hash in aData is the SHA-256 hash of the RP ID expected // by the Relying Party. hasher := sha256.New() hasher.Write([]byte(config.Conf.HostAddress)) // We use our default RP ID - Host RPIDHash := hasher.Sum(nil) hexRPIDHash := hex.EncodeToString(RPIDHash) if hexRPIDHash != (authData.RPIDHash) { fmt.Println("Stored RP Hash is: ", hexRPIDHash) fmt.Println("Client RP Hash is: ", string(authData.RPIDHash)) err := errors.New("Stored and Client RP ID Hash do not match") return false, credential, err } // Step 10. Let hash be the result of computing a hash over the cData using the // algorithm represented by the hashAlgorithm member of C. var clientDataHash []byte switch clientData.HashAlgorithm { case "SHA-512": h := sha512.New() h.Write([]byte(clientData.RawClientData)) clientDataHash = h.Sum(nil) fmt.Printf("Client data hash is %x\n", clientDataHash) case "SHA-256": h := sha256.New() h.Write([]byte(clientData.RawClientData)) clientDataHash = h.Sum(nil) fmt.Printf("Client data hash is %x\n", clientDataHash) default: // Currently, the Editor's Draft makes no mention of hashAlgorithm // in the client data, but we can default to SHA256. h := sha256.New() h.Write([]byte(clientData.RawClientData)) clientDataHash = h.Sum(nil) fmt.Printf("Client data hash is %x\n", clientDataHash) } // Step 11. Using the credential public key looked up in step 1, verify that sig // is a valid signature over the binary concatenation of aData and hash. binCat := append(authData.RawAssertionData, clientDataHash...) pubKey, err := models.GetPublicKeyForCredential(&credential) if err != nil { fmt.Println("Error retreiving Public Key for Credential") err := errors.New("Error retrieving public key for credential") return false, credential, err } var ecsdaSig struct { R, S *big.Int } sig := authData.Signature _, err = asn1.Unmarshal(sig, &ecsdaSig) if err != nil { return false, credential, errors.New("Error unmarshalling signature") } h := sha256.New() h.Write(binCat) return ecdsa.Verify(&pubKey, h.Sum(nil), ecsdaSig.R, ecsdaSig.S), credential, nil }
/** * The DefaultInstrument is a class * <p/> * Description * * @author Luca Lutterotti * @version $Revision: 1.3 $, $Date: 2006/11/10 09:33:01 $ * @since JDK1.1 */ public class DefaultInstrument extends Instrument { public static String modelID = "Diffraction Instrument"; public DefaultInstrument(XRDcat obj, String alabel) { super(obj, alabel); identifier = modelID; IDlabel = modelID; description = modelID; } public DefaultInstrument(XRDcat afile) { this(afile, modelID); } public DefaultInstrument() { identifier = modelID; IDlabel = modelID; description = modelID; } public void checkConsistencyForVersion(double version) { // thetaDisplacement = getParameterLoopVector(thetaDisplacementID); int n2theta = numberOfLoopParameters[thetaDisplacementID]; if (getAngularCalibrationMethod().equalsIgnoreCase("no ang")) { setAngularCalibration("Instrument disalignment"); getAngularCalibration().parameterloopField[0].removeAllItems(); for (int i = 0; i < n2theta; i++) getAngularCalibration().addparameterloopField(0, new Parameter(getAngularCalibration(), getAngularCalibration().getParameterString(0, i), -((Parameter) parameterloopField[thetaDisplacementID].elementAt(i)).getValueD())); parameterloopField[thetaDisplacementID].removeAllItems(); } else if (getAngularCalibrationMethod().equalsIgnoreCase("Instrument disalignment")) { if (getAngularCalibration().numberofelementPL(0) == 0) getAngularCalibration().addparameterloopField(0, new Parameter(getAngularCalibration(), getAngularCalibration().getParameterString(0, 0), 0)); } } }
/** * Creates a key pair from private key and public key files. * * @param privateKeyFile The private key file. * @param publicKeyFile The public key file. * @param passphrase The passphrase for the private key. * @return The created key pair. */ public static Identity fromFiles(final File privateKeyFile, final File publicKeyFile, final byte[] passphrase) { Objects.requireNonNull(privateKeyFile); final String publicKeyPath = publicKeyFile == null ? null : publicKeyFile.getAbsolutePath(); return new Identity(jsch -> jsch.addIdentity(privateKeyFile.getAbsolutePath(), publicKeyPath, passphrase)); }
Raman spectroscopic analysis of mo/si multilayers. Raman spectra are reported from MoSi2 polycrystalline powder and soft x-ray Mo/Si multilayers. The sharp lines at 323 and 438 cm-1 are all due to crystalline MoSi2. These lines in the powder sample intensify with annealing. The Raman spectra of as-deposited multilayers shows a broad asymmetric peak, highest at about 480 cm-1. We attribute this to α-Si which is highly disordered. In contrast to α-Si in semiconductor/semiconductor and semiconductor/dielectric multilayers, in the Mo/Si samples the Raman signal can vanish after modest heating. This provides evidence that the composition of the silicon component of the multilayer changes even with 200°C annealing. Further annealing also produces the signature for crystalline MoSi2 in the multilayer samples. This is the first report of the characterization of Mo/Si soft x-ray multilayers by Raman spectroscopy, and it indicates that Raman spectroscopy may be an effective technique for characterizing these soft x-ray multilayers and may be useful in studying their interfaces.
#!/usr/bin/env python from __future__ import print_function from sklearn.feature_extraction import FeatureHasher from sklearn.ensemble import RandomForestClassifier from sklearn.pipeline import make_pipeline from sklearn.metrics import log_loss import ctr learner = RandomForestClassifier(verbose = False, n_jobs = -1) for ID,x,y in ctr.data(ctr.train, batchsize = 1): pass
Smithtown High School West teacher Veronica Welsh said her students were racist if they support Donald Trump. Welsh was reportedly disciplined after her slanderous Facebook post. CBS Local reported: A Long Island teacher faces backlash over a Facebook post calling some of her students “racist.” The halls of Smithtown High School West echoed with debate and heated conversation Friday as students reacted to the world language teacher’s social media post. A screenshot of Veronica Welsh’s post was circulated, which read, “This week is spirit week at Smithtown HS West. It’s easy to spot which students are racist by the Trump gear they’re sporting for USA Day.” Students, dressed in school colors, had vehement reactions. “She made a mistake by putting the school’s name in it,” one student told CBS2’s Jennifer McLogan. “But she has a right to her opinion.” “It’s her personal account too, not like she went into school and wrote it or anything. She has the right to do it,” another student said. “It’s not a good idea because then like she influences the students with the same ideas,” another said. The school’s superintendent called the posting an extremely unfortunate incident and highly inappropriate. Welsh was not in school Friday, and CBS2 was not able to reach her at her home nor on her phone. The original Facebook post was taken down, but parents continued to share their opinions. “There’s no reason for her to pass judgement on those kids on a social platform,” one parent told McLogan.
Chicago Mayor and former Chief of Staff for President Obama Rahm Emanuel (D) has asked the city council to pass a law decriminalizing simple possession of marijuana. From the Chicago Tribune: Mayor Rahm Emanuel is throwing his support behind a plan to decriminalize small amounts of marijuana. Under the proposed ordinance, police officers will have the discretion to issue tickets with fines ranging from $100 to $500 for people carrying 15 grams or less of pot. […] Currently people caught in possession face a misdemeanor charge punishable by up to six months in jail and a $1,500 fine. This would be an important step for marijuana reform if it succeeds. Chicago is the United States’ third largest city, containing over two and half million people. On a numerical basis Chicago decriminalizing minor possession of marijuana should impact even more people than the new law recently signed in Rhode Island decriminalizing marijuana. By comparison, Rhode Island has just over a million residents. Recently we have seen a significant wave of top politicians coming out in support of reducing penalties for marijuana possession. At the beginning of this month New York Governor Andrew Cuomo (D) proposed reducing the penalties for marijuana in “public view.” Only a few days ago Rhode Island Governor Lincoln Chafee (I) signed a new marijuana decriminalization law. And less than a year ago Connecticut Governor Dannel Malloy (D) helped push through a marijuana decriminalization law in his state.
Working with resistance to diversity issues in the classroom: Lessons from teacher training and multicultural education Abstract Social work programs are mandated by the Council on Social Work Education to address issues of difference, privilege, oppression, and discrimination at both the baccalaureate and graduate levels. Teaching courses that address these issues can be difficult, especially when students are resistant to the kinds of analysis and self‐reflection that diversity education requires. While the idea of resistance is discussed at length in the clinical social work and social work education literatures, ideas about addressing and using student resistance in multicultural social work education are not well‐developed. This article suggests that social work educators might benefit from an examination of perspectives on student resistance found in the field of education and, specifically, in the multicultural higher education and teacher training literatures. Four areas that are addressed at length in these literatures are described in this paper: Resistance as a source of information about group process: resistance as a measure of student readiness; resistance as a reflection of the larger sociopolitical context; and resistance as a resource for facilitating student learning and engagement. The paper concludes with a discussion of strategies that social work faculty might use to prevent, address, and use student resistance in multicultural social work education.
/**CFile**************************************************************** FileName [lpk.h] SystemName [ABC: Logic synthesis and verification system.] PackageName [Fast Boolean matching for LUT structures.] Synopsis [External declarations.] Author [Alan Mishchenko] Affiliation [UC Berkeley] Date [Ver. 1.0. Started - April 28, 2007.] Revision [$Id: lpk.h,v 1.00 2007/04/28 00:00:00 alanmi Exp $] ***********************************************************************/ #ifndef ABC__opt__lpk__lpk_h #define ABC__opt__lpk__lpk_h //////////////////////////////////////////////////////////////////////// /// INCLUDES /// //////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////// /// PARAMETERS /// //////////////////////////////////////////////////////////////////////// ABC_NAMESPACE_HEADER_START //////////////////////////////////////////////////////////////////////// /// BASIC TYPES /// //////////////////////////////////////////////////////////////////////// typedef struct Lpk_Par_t_ Lpk_Par_t; struct Lpk_Par_t_ { // user-controlled parameters int nLutsMax; // (N) the maximum number of LUTs in the structure int nLutsOver; // (Q) the maximum number of LUTs not in the MFFC int nVarsShared; // (S) the maximum number of shared variables (crossbars) int nGrowthLevel; // (L) the maximum increase in the node level after resynthesis int fSatur; // iterate till saturation int fZeroCost; // accept zero-cost replacements int fFirst; // use root node and first cut only int fOldAlgo; // use old algorithm int fVerbose; // the verbosiness flag int fVeryVerbose; // additional verbose info printout // internal parameters int nLutSize; // (K) the LUT size (determined by the input network) int nVarsMax; // (V) the largest number of variables: V = N * (K-1) + 1 }; //////////////////////////////////////////////////////////////////////// /// MACRO DEFINITIONS /// //////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////// /// ITERATORS /// //////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////// /// FUNCTION DECLARATIONS /// //////////////////////////////////////////////////////////////////////// /*=== lpkCore.c ========================================================*/ extern int Lpk_Resynthesize( Abc_Ntk_t * pNtk, Lpk_Par_t * pPars ); ABC_NAMESPACE_HEADER_END #endif //////////////////////////////////////////////////////////////////////// /// END OF FILE /// ////////////////////////////////////////////////////////////////////////
<gh_stars>0 // Copyright 2015 <NAME>. All rights reserved. // Use of this source code is governed by the MIT // license, which can be found in the LICENSE file. package jobs import ( "fmt" "github.com/garyburd/redigo/redis" "time" "strings" ) // Job represents a discrete piece of work to be done by a worker. type Job struct { id string data []byte typ *Type status Status time int64 freq int64 priority int err error retries uint started int64 finished int64 poolId string } // ErrorJobNotFound is returned whenever a specific job is not found, // e.g. from the FindById function. type ErrorJobNotFound struct { id string } func (e ErrorJobNotFound) Error() string { if e.id == "" { return fmt.Sprintf("jobs: Could not find job with the given criteria.") } return fmt.Sprintf("jobs: Could not find job with id: %s", e.id) } // Id returns the unique identifier used for the job. If the job has not yet // been saved to the database, it may return an empty string. func (j *Job) Id() string { return j.id } // Data returns the gob-encoded data of the job func (j *Job) Data() []byte { return j.data } // Status returns the status of the job. func (j *Job) Status() Status { return j.status } // Time returns the time at which the job should be executed in UTC UNIX // format with nanosecond precision. func (j *Job) Time() int64 { return j.time } // Freq returns the frequency at which the job should be executed. Specifically // it returns the number of nanoseconds between each scheduled execution. func (j *Job) Freq() int64 { return j.freq } // Priority returns the job's priority. func (j *Job) Priority() int { return j.priority } // Error returns the last error that arose during execution of the job. It is // only non-nil if the job has failed at some point. func (j *Job) Error() error { return j.err } // Retries returns the number of remaining retries for the job. func (j *Job) Retries() uint { return j.retries } // Started returns the time that the job started executing (in local time // with nanosecond precision) or the zero time if the job has not started // executing yet. func (j *Job) Started() time.Time { return time.Unix(0, j.started).Local() } // Finished returns the time that the job finished executing (in local // time with nanosecond precision) or the zero time if the job has not // finished executing yet. func (j *Job) Finished() time.Time { return time.Unix(0, j.finished).Local() } // PoolId returns the pool id of the job if it is currently being executed // or has been executed and at some point has been assigned to a specific pool. // Otherwise, it returns an empty string. func (j *Job) PoolId() string { return j.poolId } // TypeName returns the type name of the job // Otherwise, it returns an empty string. func (j *Job) TypeName() string { if j.typ == nil { return "" } return j.typ.name } // Duration returns how long the job took to execute with nanosecond // precision. I.e. the difference between j.Finished() and j.Started(). // It returns a duration of zero if the job has not finished yet. func (j *Job) Duration() time.Duration { if j.Finished().IsZero() { return 0 * time.Second } return j.Finished().Sub(j.Started()) } // Key returns the key used for the hash in redis which stores all the // fields for this job. func (j *Job) Key() string { return "jobs:" + j.id } // IsRecurring returns true iff the job is recurring func (j *Job) IsRecurring() bool { return j.freq != 0 } // NextTime returns the time (unix UTC with nanosecond precision) that the // job should execute next, if it is a recurring job, and 0 if it is not. func (j *Job) NextTime() int64 { if !j.IsRecurring() { return 0 } // NOTE: is this the proper way to handle rescheduling? // What if we schedule jobs faster than they can be executed? // Should we just let them build up and expect the end user to // allocate more workers? Or should we schedule for time.Now at // the earliest to prevent buildup? return j.time + j.freq } // save writes the job to the database and adds it to the appropriate indexes and status // sets, but does not enqueue it. If you want to add it to the queue, use the enqueue method // after save. func (j *Job) save() error { t := newTransaction() t.saveJob(j) if err := t.exec(); err != nil { return err } return nil } // saveJob adds commands to the transaction to set all the fields for the main hash for the job, // add the job to the time index, move the job to the appropriate status set. It will // also mutate the job by 1) generating an id if the id is empty and 2) setting the status to // StatusSaved if the status is empty. func (t *transaction) saveJob(job *Job) { // Generate id if needed if job.id == "" { job.id = generateRandomId() } // Set status to saved if needed if job.status == "" { job.status = StatusSaved } // Add the job attributes to a hash t.command("HMSET", job.mainHashArgs(), nil) // Add the job to the appropriate status set t.setStatus(job, job.status) // Add the job to the time index t.addJobToTimeIndex(job) } // addJobToTimeIndex adds commands to the transaction which will, when executed, // add the job id to the time index with a score equal to the job's time field. // If the job has been destroyed, addJobToTimeIndex will have no effect. func (t *transaction) addJobToTimeIndex(job *Job) { t.addJobToSet(job, Keys.JobsTimeIndex, float64(job.time)) } // Refresh mutates the job by setting its fields to the most recent data // found in the database. It returns an error if there was a problem connecting // to the database or if the job was destroyed. func (j *Job) Refresh() error { t := newTransaction() t.scanJobById(j.id, j) if err := t.exec(); err != nil { return err } return nil } // enqueue adds the job to the queue and sets its status to StatusQueued. Queued jobs will // be completed by workers in order of priority. Attempting to enqueue a destroyed job // will have no effect. func (j *Job) enqueue() error { if err := j.setStatus(StatusQueued); err != nil { return err } return nil } // Reschedule reschedules the job with the given time. It can be used to reschedule // cancelled jobs. It may also be used to reschedule finished or failed jobs, however, // in most cases if you want to reschedule finished jobs you should use the ScheduleRecurring // method and if you want to reschedule failed jobs, you should set the number of retries > 0 // when registering the job type. Attempting to reschedule a destroyed job will have no effect. // Reschedule returns an error if there was a problem connecting to the database. func (j *Job) Reschedule(time time.Time) error { t := newTransaction() unixNanoTime := time.UTC().UnixNano() t.setJobField(j, "time", unixNanoTime) t.setStatus(j, StatusQueued) j.time = unixNanoTime t.addJobToTimeIndex(j) if err := t.exec(); err != nil { return err } j.status = StatusQueued return nil } // Cancel cancels the job, but does not remove it from the database. It will be // added to a list of cancelled jobs. If you wish to remove it from the database, // use the Destroy method. Attempting to cancel a destroyed job will have no effect. func (j *Job) Cancel() error { if err := j.setStatus(StatusCancelled); err != nil { return err } return nil } // setError sets the err property of j and adds it to the set of jobs which had errors. // If the job has been destroyed, setError will have no effect. func (j *Job) setError(err error) error { j.err = err t := newTransaction() t.setJobField(j, "error", j.err.Error()) if err := t.exec(); err != nil { return err } return nil } // Destroy removes all traces of the job from the database. If the job is currently // being executed by a worker, the worker may still finish the job. Attempting to // destroy a job that has already been destroyed will have no effect, so it is safe // to call Destroy multiple times. func (j *Job) Destroy() error { if j.id == "" { return fmt.Errorf("jobs: Cannot destroy job that doesn't have an id.") } // Start a new transaction t := newTransaction() // Call the script to destroy the job t.destroyJob(j) // Execute the transaction if err := t.exec(); err != nil { return err } j.status = StatusDestroyed return nil } // setStatus updates the job's status in the database and moves it to the appropriate // status set. Attempting to set the status of a job which has been destroyed will have // no effect. func (j *Job) setStatus(status Status) error { if j.id == "" { return fmt.Errorf("jobs: Cannot set status to %s because job doesn't have an id.", status) } if j.status == StatusDestroyed { return fmt.Errorf("jobs: Cannot set job:%s status to %s because it was destroyed.", j.id, status) } // Use a transaction to move the job to the appropriate status set and set its status t := newTransaction() t.setStatus(j, status) if err := t.exec(); err != nil { return err } j.status = status return nil } // mainHashArgs returns the args for the hash which will store the job data func (j *Job) mainHashArgs() []interface{} { hashArgs := []interface{}{j.Key(), "data", string(j.data), "type", j.typ.name, "time", j.time, "freq", j.freq, "priority", j.priority, "retries", j.retries, "totalRetries", j.retries, "status", j.status, "started", j.started, "finished", j.finished, "poolId", j.poolId, "poolKey", j.typ.PoolKey, } if j.err != nil { hashArgs = append(hashArgs, "error", j.err.Error()) } return hashArgs } // scanJob scans the values of reply into job. reply should be the // response of an HMGET or HGETALL query. func scanJob(reply interface{}, job *Job) error { fields, err := redis.Values(reply, nil) if err != nil { return err } else if len(fields) == 0 { return ErrorJobNotFound{} } else if len(fields)%2 != 0 { return fmt.Errorf("jobs: In scanJob: Expected length of fields to be even but got: %d", len(fields)) } for i := 0; i < len(fields)-1; i += 2 { fieldName, err := redis.String(fields[i], nil) if err != nil { return fmt.Errorf("jobs: In scanJob: Could not convert fieldName (fields[%d] = %v) of type %T to string.", i, fields[i], fields[i]) } fieldValue := fields[i+1] switch fieldName { case "id": if err := scanString(fieldValue, &(job.id)); err != nil { return err } case "data": if err := scanBytes(fieldValue, &(job.data)); err != nil { return err } case "type": typeName := "" if err := scanString(fieldValue, &typeName); err != nil { return err } Type, found := Types[typeName] if !found { // workaround for integram // trying to trim func path pos := strings.LastIndex(typeName, "/") if pos > -1 { typeName = typeName[pos+1:] } Type, found = Types[typeName] if !found { pos = strings.LastIndex(typeName, ".") if pos > -1 { typeSuffix := typeName[pos:] for tn, t := range Types{ if strings.HasSuffix(tn, typeSuffix){ Type = t found = true break } } } if !found { return fmt.Errorf("jobs: In scanJob: Could not find Type with name = %s", typeName) } } } job.typ = Type case "time": if err := scanInt64(fieldValue, &(job.time)); err != nil { return err } case "freq": if err := scanInt64(fieldValue, &(job.freq)); err != nil { return err } case "priority": if err := scanInt(fieldValue, &(job.priority)); err != nil { return err } case "retries": if err := scanUint(fieldValue, &(job.retries)); err != nil { return err } case "status": status := "" if err := scanString(fieldValue, &status); err != nil { return err } job.status = Status(status) case "started": if err := scanInt64(fieldValue, &(job.started)); err != nil { return err } case "finished": if err := scanInt64(fieldValue, &(job.finished)); err != nil { return err } case "poolId": if err := scanString(fieldValue, &(job.poolId)); err != nil { return err } } } return nil } // scanInt converts a reply from redis into an int and scans the value into v. func scanInt(reply interface{}, v *int) error { if v == nil { return fmt.Errorf("jobs: In scanInt: argument v was nil") } val, err := redis.Int(reply, nil) if err != nil { return fmt.Errorf("jobs: In scanInt: Could not convert %v of type %T to int.", reply, reply) } (*v) = val return nil } // scanUint converts a reply from redis into a uint and scans the value into v. func scanUint(reply interface{}, v *uint) error { if v == nil { return fmt.Errorf("jobs: In scanUint: argument v was nil") } val, err := redis.Uint64(reply, nil) if err != nil { return fmt.Errorf("jobs: In scanUint: Could not convert %v of type %T to uint.", reply, reply) } (*v) = uint(val) return nil } // scanInt64 converts a reply from redis into an int64 and scans the value into v. func scanInt64(reply interface{}, v *int64) error { if v == nil { return fmt.Errorf("jobs: In scanInt64: argument v was nil") } val, err := redis.Int64(reply, nil) if err != nil { return fmt.Errorf("jobs: In scanInt64: Could not convert %v of type %T to int64.", reply, reply) } (*v) = val return nil } // scanString converts a reply from redis into a string and scans the value into v. func scanString(reply interface{}, v *string) error { if v == nil { return fmt.Errorf("jobs: In String: argument v was nil") } val, err := redis.String(reply, nil) if err != nil { return fmt.Errorf("jobs: In String: Could not convert %v of type %T to string.", reply, reply) } (*v) = val return nil } // scanBytes converts a reply from redis into a slice of bytes and scans the value into v. func scanBytes(reply interface{}, v *[]byte) error { if v == nil { return fmt.Errorf("jobs: In scanBytes: argument v was nil") } val, err := redis.Bytes(reply, nil) if err != nil { return fmt.Errorf("jobs: In scanBytes: Could not convert %v of type %T to []byte.", reply, reply) } (*v) = val return nil } // scanJobById adds commands and a reply handler to the transaction which, when run, // will scan the values of the job corresponding to id into job. It does not execute // the transaction. func (t *transaction) scanJobById(id string, job *Job) { job.id = id t.command("HGETALL", redis.Args{job.Key()}, newScanJobHandler(job)) } // FindById returns the job with the given id or an error if the job cannot be found // (in which case the error will have type ErrorJobNotFound) or there was a problem // connecting to the database. func FindById(id string) (*Job, error) { job := &Job{} t := newTransaction() t.scanJobById(id, job) if err := t.exec(); err != nil { switch e := err.(type) { case ErrorJobNotFound: // If the job was not found, add the id to the error // so that the caller can get a more useful error message. e.id = id return nil, e default: return nil, err } } return job, nil }
/* return the inverse of u mod v, if v is odd */ int inv_mod2(int u, int v) { int t1, t3; int u1 = 1; int u3 = u; int v1 = v; int v3 = v; if ((u & 1) != 0) { t1 = 0; t3 = -v; goto Y4; } else { t1 = 1; t3 = u; } do { do { if ((t1 & 1) == 0) { t1 = t1 >> 1; t3 = t3 >> 1; } else { t1 = (t1 + v) >> 1; t3 = t3 >> 1; } Y4:; } while ((t3 & 1) == 0); if (t3 >= 0) { u1 = t1; u3 = t3; } else { v1 = v - t1; v3 = -t3; } t1 = u1 - v1; t3 = u3 - v3; if (t1 < 0) t1 = t1 + v; } while (t3 != 0); return u1; }
/* Copyright 2002-2020 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ public class PirateTalk extends StdLanguage { @Override public String ID() { return "PirateTalk"; } private final static String localizedName = CMLib.lang().L("Pirate"); @Override public String name() { return localizedName; } public static List<String[]> wordLists=null; public PirateTalk() { super(); } private static final Hashtable<String,String> hashWords=new Hashtable<String,String>(); @Override public String translate(final String language, final String word) { final String res = super.translate(language, word); if(res.endsWith("ING") && (!res.equalsIgnoreCase("sing"))) return res.substring(0,res.length()-3)+"IN'"; if(res.endsWith("ing") && (!res.equalsIgnoreCase("sing"))) return res.substring(0,res.length()-3)+"in'"; return res; } @Override public Map<String, String> translationHash(final String language) { if((hashWords!=null)&&(hashWords.size()>0)) return hashWords; // *** Capitalized keys are below. final Map<String,String> hashwords = new TreeMap<String,String>(); hashwords.put("are","be"); hashwords.put("is","be"); hashwords.put("am","be"); hashwords.put("I'm","I be"); hashwords.put("Im","I be"); hashwords.put("you've","ye be"); hashwords.put("youve","ye be"); hashwords.put("we've","we be"); hashwords.put("weve","we be"); hashwords.put("you","ye"); hashwords.put("your","yer"); hashwords.put("my","me"); hashwords.put("smart","witty"); hashwords.put("steal","purloin"); hashwords.put("take","plunder"); hashwords.put("mercy","quarter"); hashwords.put("give","giv'"); hashwords.put("heaven","heav'n"); hashwords.put("eating","eat'n"); hashwords.put("chat","natter"); hashwords.put("chatting","nattering"); hashwords.put("stab","skewer"); hashwords.put("hello","ahoy"); hashwords.put("wow","avast"); hashwords.put("neat","smart"); hashwords.put("yes","aye"); hashwords.put("coin","doubloon"); hashwords.put("umm","arr"); hashwords.put("ummm","arrr"); hashwords.put("ummmm","arrrr"); hashwords.put("uhm","arh"); hashwords.put("uhmm","argh"); hashwords.put("uhh","arr"); hashwords.put("uhhh ","arrr"); hashwords.put("cheat","hornswaggle"); hashwords.put("cheating","hornswaggling"); hashwords.put("rob","pillage"); hashwords.put("citizen","landlubber"); hashwords.put("idiot","landlubber"); hashwords.put("ship","manowar"); hashwords.put("perfect","shipshape"); hashwords.put("shopkeeper","chandler"); hashwords.put("shopkeep","chandler"); hashwords.put("friend","hearty"); hashwords.put("friends","hearties"); hashwords.put("girl","lass"); hashwords.put("girls","lassies"); hashwords.put("boy","lad"); hashwords.put("boys","laddies"); hashwords.put("lady","wench"); hashwords.put("woman","wench"); hashwords.put("queen","grand strumpet"); hashwords.put("stealing","thievin'"); hashwords.put("needing","needin'"); hashwords.put("taking","haulin'"); hashwords.put("lying","lyin'"); hashwords.put("eating","eat'n"); hashwords.put("captain","cap'n"); hashwords.put("reading","readin'"); hashwords.put("writing","writin'"); hashwords.put("rotting","festerin'"); hashwords.put("stopping","stoppin'"); hashwords.put("swimming","swimmin'"); hashwords.put("with","wit'"); hashwords.put("because","coz"); hashwords.put("cuz","coz"); hashwords.put("cousin","son of a biscuit eater"); hashwords.put("quickly","smartly"); hashwords.put("bastard","knave"); hashwords.put("villain","scallywag"); hashwords.put("toilet","head"); hashwords.put("potty","head"); hashwords.put("bathroom","jardin"); hashwords.put("restroom","jardin"); hashwords.put("them","'em"); hashwords.put("him","'im"); hashwords.put("her","'er"); hashwords.put("there","thar"); hashwords.put("criminal","scalawag"); hashwords.put("thief","scallywag"); hashwords.put("villain ","scalallalloololowag"); hashwords.put("stomach","gizzard"); hashwords.put("dumb","daft"); hashwords.put("stupid","daft"); hashwords.put("almost","nigh-on"); hashwords.put("over ","o'er"); hashwords.put("before","afore"); hashwords.put("little","wee"); hashwords.put("small","wee"); hashwords.put("tiny","wee"); hashwords.put("wee","wee"); hashwords.put("myself","meself"); hashwords.put("expect","'spect"); hashwords.put("punish","keelhaul"); hashwords.put("punishment","keelhauling"); hashwords.put("drunk","three sheets to the wind"); hashwords.put("ouch","shiver me timbers"); hashwords.put("ow","Blow me down!"); hashwords.put("oof","blimey!"); hashwords.put("noose","hempen halter"); hashwords.put("chest","coffer"); hashwords.put("peaceful","becalmed"); hashwords.put("recruit","crimp"); hashwords.put("hell","Davy Jones' locker"); hashwords.put("eyes","deadlights"); hashwords.put("lean","list"); hashwords.put("wake","show a leg"); hashwords.put("damn","sink me!"); hashwords.put("nap","caulk"); hashwords.put("sleep","caulk"); hashwords.put("coffin","dead men's chest"); hashwords.put("food","grub"); hashwords.put("coward","lily-liver"); hashwords.put("cowardly","lily-livered"); hashwords.put("rebellion","mutiny"); hashwords.put("no","nay"); hashwords.put("reward","bounty"); hashwords.put("song","chantey"); //hashwords.put("feet","fathoms"); hashwords.put("stop","heave to"); hashwords.put("understand","savvy"); hashwords.put("telescope","spyglass"); hashwords.put("binoculars","spyglasses"); hashwords.put("tipsy","squiffy"); hashwords.put("surrender","strike colors"); hashwords.put("mop","swab"); hashwords.put("ignore","belay"); hashwords.put("tie","belay"); hashwords.put("butt","dungbie"); hashwords.put("ass","dungbie"); //hashwords.put("become a pirate go on account hashwords.put("backpack","duffle"); hashwords.put("nerd","drivelswigger"); hashwords.put("rascal","picaroon"); hashwords.put("cask","hogshead"); hashwords.put("afraid","afeard"); hashwords.put("insane","addled"); hashwords.put("eggs","cackle fruit"); hashwords.put("ghost","duffy"); hashwords.put("revenant","dredgie"); hashwords.put("hey","ho"); hashwords.put("excution","Jack Ketch"); hashwords.put("executed","Jack Ketch-ed"); hashwords.put("executing","Jack Ketch-ing"); hashwords.put("child","nipper"); hashwords.put("move", "step to"); hashwords.put("nice","Aaaaaaaaaaaarh"); hashwords.put("impressive","begad"); hashwords.put("heaven","Fiddler's Green"); hashwords.put("up","aloft"); hashwords.put("above","aloft"); hashwords.put("Ha","Harr"); hashwords.put("Haha","har-har"); hashwords.put("Sailor","Jack Tar"); hashwords.put("attention","a weather eye open"); hashwords.put("unprepared","under bare poles"); hashwords.put("gossip","scuttlebutt"); hashwords.put("coat","reefer"); hashwords.put("lie","spin yarn"); hashwords.put("overwhelmed","awash"); hashwords.put("progress","headway"); hashwords.put("assignment","berth"); hashwords.put("lodging","quarters"); hashwords.put("home","quarters"); hashwords.put("property","quarters"); hashwords.put("put","stow"); hashwords.put("swamped","awash"); hashwords.put("aftermath","wake"); hashwords.put("lost","adrift"); hashwords.put("everyone","all hands"); hashwords.put("everybody","all hands"); hashwords.put("weapons","armamament"); hashwords.put("pull","bowse"); hashwords.put("demote","disrate"); hashwords.put("full speed","flank"); hashwords.put("beat","flog"); hashwords.put("bottom","foot"); hashwords.put("kitchen","galley"); hashwords.put("Steamship","Hand Bomber"); hashwords.put("Steamboat","Hand Bomber"); hashwords.put("soap","holystone"); hashwords.put("police","jollies"); hashwords.put("cityguard","bluejack"); hashwords.put("constable","jollies"); hashwords.put("cop","bluejack"); hashwords.put("stair","ladder"); hashwords.put("dining","mess"); hashwords.put("kitchen","mess"); hashwords.put("navigator","pilot"); hashwords.put("group","crew"); hashwords.put("wind","windage"); hashwords.put("land","ashore"); hashwords.put("cane","stonnacky"); hashwords.put("whip","cat"); hashwords.put("I","oi"); hashwords.put("want","wants"); hashwords.put("ya'all","you alls"); hashwords.put("yall","you alls"); //hashwords.put("cat o' nine tails captain's daughter //hashwords.put("cat of nine tails captain's duaghter hashwords.put("go","lay"); hashwords.put("sit","lie"); hashwords.put("crowded","no room to swing a cat"); hashwords.put("quiet","pipe down"); hashwords.put("manipulate","run a rig"); hashwords.put("manipulating","running a rig"); hashwords.put("manipulation","rig running"); hashwords.put("go downwind","haul wind"); for(final Iterator<String> i=hashwords.keySet().iterator();i.hasNext();) { final String key = i.next(); final String value = hashwords.get(key); hashWords.put(key.toUpperCase().trim(), value.toLowerCase()); } return hashWords; } }
Increased melanizing activity in Anopheles gambiae does not affect development of Plasmodium falciparum Serpins are central to the modulation of various innate immune responses in insects and are suspected to influence the outcome of malaria parasite infection in mosquito vectors. Three Anopheles gambiae serpins (SRPN1, -2, and -3) were tested for their ability to inhibit the prophenoloxidase cascade, a key regulatory process in the melanization response. Recombinant SRPN1 and -2 can bind and inhibit a heterologous phenoloxidase-activating protease and inhibit phenoloxidase activation in vitro. Using a reverse genetics approach, we studied the effect of SRPN2 on melanization in An. gambiae adult females in vivo. Depletion of SRPN2 from the mosquito hemolymph increases melanin deposition on foreign surfaces such as negatively charged Sephadex beads. As reported, the knockdown of SRPN2 adversely affects the ability of the rodent malaria parasite Plasmodium berghei to invade the midgut epithelium and develop into oocysts. Importantly, we tested whether the absence of SRPN2 from the hemolymph influences Plasmodium falciparum development. RNAi silencing of SRPN2 in an An. gambiae strain originally established from local populations in Yaoundé, Cameroon, did not influence the development of autochthonous field isolates of P. falciparum. This study suggests immune evasion strategies of the human malaria parasite and emphasizes the need to study mosquito innate immune responses toward the pathogens they transmit in natural vector–parasite combinations.
+ Show + in 32.53% 44.12% 40.24% 55.88% 26.44% 99.99% 55.88% 99.99% 44.12% 99.97% in 78.96% 67.29% 83.53% 32.71% 69.53% 13.81% 32.71% 21.19% 67.29% 10.22% in 37.09% 56.99% 43.01% 43.01% 29.24% 30.55% 43.01% 39.12% 56.99% 24.09% in 45.26% 43.06% 54.61% 56.94% 38.2% 99.97% 56.94% 99.99% 43.06% 99.94% 19.73% 78.96% 97.76% 82.78% 17.16% 99.97% 100% 99.99% 15.26% 99.99% 100% 100% 14.49% 37.09% 64.78% 39.95% 9.65% 32.53% 66.92% 38.03% 8.68% 45.26% 85.28% 54.03% 8.57% 30.55% 70.43% 37.3% 6.45% 13.81% 47.73% 18.43% in IEM Katowice quarterfinals. herO has the #3 Headband after 2 defenses! Bbyong is atBlizzcon Chances.of the time they win this match and their Blizzcon Chances go toof the time they lose this match and their Blizzcon Chances go to herO is atBlizzcon Chances.of the time they win this match and their Blizzcon Chances go toof the time they lose this match and their Blizzcon Chances go toin IEM Katowice quarterfinals. INnoVation is atBlizzcon Chances.of the time they win this match and their Blizzcon Chances go toof the time they lose this match and their Blizzcon Chances go to Zest is atBlizzcon Chances.of the time they win this match and their Blizzcon Chances go toof the time they lose this match and their Blizzcon Chances go toin IEM Katowice quarterfinals. Trap is atBlizzcon Chances.of the time they win this match and their Blizzcon Chances go toof the time they lose this match and their Blizzcon Chances go to FanTaSy is atBlizzcon Chances.of the time they win this match and their Blizzcon Chances go toof the time they lose this match and their Blizzcon Chances go toin IEM Katowice quarterfinals. Dark is atBlizzcon Chances.of the time they win this match and their Blizzcon Chances go toof the time they lose this match and their Blizzcon Chances go to Maru is atBlizzcon Chances.of the time they win this match and their Blizzcon Chances go toof the time they lose this match and their Blizzcon Chances go to INnoVation has achance to win----going fromtoif they get 1st, orif they get 2nd. Maru has achance to win----going fromtoif they get 1st, orif they get 2nd. herO has achance to win----going fromtoif they get 1st, orif they get 2nd. Trap has achance to win----going fromtoif they get 1st, orif they get 2nd. Bbyong has achance to win----going fromtoif they get 1st, orif they get 2nd. Dark has achance to win----going fromtoif they get 1st, orif they get 2nd. FanTaSy has achance to win----going fromtoif they get 1st, orif they get 2nd. Zest has achance to win----going fromtoif they get 1st, orif they get 2nd.
<gh_stars>1-10 #ifndef ASM_GENERIC_TYPES32_H_ #define ASM_GENERIC_TYPES32_H_ #define __WORDSIZE 32 #ifndef __ASSEMBLER__ //#define __SWORD_TYPE int typedef signed char __s8; typedef unsigned char __u8; typedef signed short __s16; typedef unsigned short __u16; typedef signed int __s32; typedef unsigned int __u32; typedef long long __s64; typedef unsigned long long __u64; typedef unsigned int __size_t; typedef signed int __ssize_t; typedef signed int __ptrdiff_t; typedef unsigned int __uintptr_t; typedef int __intptr_t; typedef __s32 __s_fast; typedef __u32 __u_fast; typedef __s32 __atomic_t; #endif /* __ASSEMBLER__ */ #endif /* ASM_GENERIC_TYPES32_H_ */
Dermal graft repair of Peyronie's disease: survey of 50 patients. Peyronie's disease is characterized by localized fibrosis in the tunica albuginea of the corpus cavernosum. This inelastic segment causes bending of the erect penis and sexual incapacity in advanced cases. We reviewed 52 cases in which excision of the Peyronie's plaque and replacement of the defect with a dermal graft have been done. An operation is an acceptable method to treat patients who are anatomic and sexual cripples. More than 70 per cent of our postoperative patients have been satisfied with sexual performance. Specific aspects of the postoperative course and surgical results are reviewed.
<reponame>MohitSethi99/IlluminoEngine<filename>IlluminoEngine/src/Illumino/Renderer/SceneRenderer.cpp<gh_stars>0 #include "ipch.h" #include "SceneRenderer.h" #include <glm/glm.hpp> #include <glm/gtx/transform.hpp> #include "RenderCommand.h" #include "Shader.h" namespace IlluminoEngine { static Ref<Shader> s_Shader; static glm::mat4 s_Projection; std::vector<MeshData> SceneRenderer::s_Meshes; void SceneRenderer::Init() { OPTICK_EVENT(); s_Shader = Shader::Create("Assets/Shaders/TestShader.hlsl", { {"POSITION", ShaderDataType::Float3}, {"TEXCOORD", ShaderDataType::Float2} }); } void SceneRenderer::Shutdown() { OPTICK_EVENT(); } void SceneRenderer::BeginScene() { OPTICK_EVENT(); // TODO: setup camera, lights, etc data s_Projection = glm::perspective(glm::radians(45.0f), 1920.0f / 1080.0f, 0.001f, 1000.0f); } void SceneRenderer::EndScene() { OPTICK_EVENT(); RenderPass(); } void SceneRenderer::SubmitMesh(const Ref<MeshBuffer>& mesh, glm::mat4& transform) { OPTICK_EVENT(); MeshData meshData = { transform, mesh }; s_Meshes.push_back(meshData); } void SceneRenderer::RenderPass() { OPTICK_EVENT(); RenderCommand::ClearColor({ 0.042f, 0.042f, 0.042f, 1.0f }); if (s_Meshes.empty()) return; s_Shader->Bind(); struct CB { glm::mat4 u_MVP; glm::vec4 u_Color = { 1.0f, 0.0f, 0.0f, 1.0f }; }; const size_t alignedSize = ALIGN(256, sizeof(CB)); const uint32_t meshCount = s_Meshes.size(); uint64_t gpuHandle = s_Shader->CreateBuffer("Properties", alignedSize * meshCount); size_t bufferSize = alignedSize * meshCount; char* buffer = new char[bufferSize]; for (size_t i = 0; i < meshCount; ++i) { auto& meshData = s_Meshes[i]; CB cb; cb.u_MVP = s_Projection * meshData.Transform; memcpy(buffer + alignedSize * i, &cb, sizeof(CB)); } s_Shader->UploadBuffer("Properties", buffer, bufferSize, 0); delete[] buffer; for (size_t i = 0; i < s_Meshes.size(); ++i) { auto& meshData = s_Meshes[i]; meshData.Mesh->Bind(); RenderCommand::DrawIndexed(meshData.Mesh, gpuHandle + alignedSize * i); } s_Meshes.clear(); } }
/// Reseve enough space in the vector for at least `size` additional elements. pub fn reserve(&mut self, additional: usize) { let ptr = self.slice.ptr; let size = self.slice.len + additional; if self.capacity >= size { return; } let mut new_capacity = if self.capacity > 0 { self.capacity } else { 4 }; while new_capacity < size { new_capacity *= 2; } let new_ptr: NonNull<T> = self.slice.handle.allocate_or_extend(ptr, self.capacity, new_capacity); if ptr != new_ptr { unsafe { ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr(), self.slice.len()); } self.slice.ptr = new_ptr; } self.capacity = new_capacity; }
The calcar femorale. An anatomic, radiologic, and surgical correlative study. In order to define the anatomy of the calcar femorale, a radiologic and surgical study was done on ten paired cadaver femurs. After radiography and computed tomographic (CT) scans, the specimens were subjected to medullary reaming by an experienced orthopedist, simulating total hip arthroplasty procedures. The imaging studies were repeated and compared with the prereaming studies. The calcar femorale was dissected from surrounding medullary bone, and sections of this structure were examined histologically. The calcar femorale is a condensation of cancellous bone. It is not affected by the reaming procedure but may play a role in guiding the reamer. This structure is separate from the calcar area described in relation to bone resorption after hip arthroplasty.
package com.essane.partimejob.mapper; import com.baomidou.mybatisplus.core.mapper.BaseMapper; import com.essane.partimejob.domain.Admin; /** * @author Essane */ public interface AdminMapper extends BaseMapper<Admin> { }
import useApplications from "applications/hooks/useApplications"; import ManageAssessmentStatus from "assessments/views/ManageAssessmentStatus"; import ActionButton from "components/Buttons/ActionButton"; import PageLoading from "components/Spinner/PageLoading"; import Table from "components/Table/Table"; import { IRow } from "components/Table/types"; import { useFirestoreConnect } from "react-redux-firebase"; import { Link, useHistory, useParams } from "react-router-dom"; import { AssessmentHelper } from "../../assessments/helper/AssessmentHelper"; import useAssessments from "../../assessments/hooks/useAssessments"; import { IAssessmentRecord } from "../../assessments/types"; import useJudges from "../hooks/useJudges"; import { Pencil } from "heroicons-react"; import PasswordReset from "./passwordReset"; interface ParamTypes { id: string; } export default function Application() { let { id } = useParams<ParamTypes>(); const history = useHistory(); useFirestoreConnect([{ collection: "judges", doc: id }]); const { judges } = useJudges(); const { data } = useApplications(); const { judgeAverages } = useAssessments(); const judge = judges && judges[id]; const assessments = judge && judge.assessments ? (Object.values(judge.assessments) as IAssessmentRecord[]) : []; const columns = [ { field: "application_name", use: "Name", use_in_search: true, render: (row: IRow) => ( <div> {data && data[row.application_id] && data[row.application_id].startupName} </div> ), }, ...AssessmentHelper.getQuestions().map((field) => ({ field: field.source, use: field.shortLabel, width: "w-20", })), { field: "id", use: "Status", render: (row: any) => { return ( <div onClick={(e) => e.stopPropagation()}> <ManageAssessmentStatus active={row.status !== "hidden"} application_id={row.application_id} judge_id={row.judge_id} /> </div> ); }, width: "w-14", }, { field: "judge_id", use: "", render: (row: any) => { return ( <div onClick={(e) => e.stopPropagation()}> <Link to={`/assessment/${row.id}/${row.judge_id}/update`}> <Pencil className="text-actionColor-300 hover:text-actionColor-700" /> </Link> </div> ); }, width: "w-14", }, ]; if (!judge && !assessments) { return <PageLoading />; } return ( <> <ActionButton path={`/judges/${id}/update`} type="update" /> {judge && ( <div className="flex"> <h5 className={`flex justify-center items-center text-xl w-44 rounded-full p-3 mb-5 ${judge.color} `} > {judge.name} </h5> {judgeAverages[id] && ( <h5 className={`flex justify-center items-center text-xl w-20 rounded-full p-3 mb-5 ml-4 ${judge.color} `} > {judgeAverages[id]} </h5> )} </div> )} <Table rowStyle={(row) => { return row.status === "hidden" ? "bg-red-400" : ""; }} onRowClick={(row) => history.push(`/applications/${row.application_id}`) } columns={columns} rows={assessments.map((item) => ({ ...item, id: item.application_id, }))} ></Table> </> ); }
Safety and efficacy of 3D-printed templates assisted CT-guided radioactive iodine-125 seed implantation for the treatment of recurrent cervical carcinoma after external beam radiotherapy Objective To investigate the safety and efficacy of 3-dimensional (3D) printing non-coplanar templates (PNCT) assisted computer tomography (CT) guided radioactive 125I seed implantation (RISI) for the treatment of recurrent cervical carcinoma (RCC) after external beam radiotherapy (EBRT). Methods A total of 103 patients with inoperable post-EBRT RCC were included in this retrospective study. A total of 111 lesions received RISI. Eight lesions were at the pelvic center, 75 lesions were at the pelvic lateral, and 28 lesions were extra-pelvic metastasis. The median prescription dose was 120 Gy. The primary end points were adverse events and local control (LC), and the secondary end points were overall survival (OS) and progression-free survival. Results Grade 2 adverse events of acute nausea, diarrhea, and pollakiuria occurred in 1, 2, and 1 patient, respectively. One patient suffered from grade 3 acute proctitis. Late toxicity was observed in 2 patients with rectovaginal fistula. No grade 5 toxicity occurred. The 3-year LC and OS rates were 75.1% and 20.8%, respectively. The median OS was 17 months. The multivariate analysis showed that the minimum dose received by the “hottest” 90% of the gross tumor volume (D90) ≥130 Gy, squamous cell carcinoma, hemoglobin ≥80 g/L and good short-term efficacy (complete response or partial response) were independent predictors of LC and OS (all p<0.05). Conclusions 3D-PNCT assisted CT-guided RISI is a safe, effective, and minimally invasive modality for RCC. The hemoglobin level, pathological type, dose distribution and short-term efficacy are considered as independent factors for clinical outcomes. INTRODUCTION Surgery and radiotherapy are currently the main approaches for the treatment of cervical carcinoma. The recurrence rates of patients of International Federation of Gynecology and Obstetrics (FIGO) stages Ib, IIa, IIb, III and IVa are 10%, 17%, 23%, 42% and 74%, respectively, and 80% of cervical cancer recurrences occur within 2 years after initial treatment . The treatment options for recurrent cervical carcinoma (RCC) have always been challenging due to the fact that nearly 70% RCC patients have received previous pelvic external beam radiotherapy (EBRT) and re-irradiation will increase the risk of damage to normal tissues . As a standard treatment for early low-risk prostate cancer, radioactive 125 I seed implantation (RISI) could overcome this problem. RISI can deliver extremely high dose to tumor while sparing normal tissue. RISI has showed its good efficacy and been recognized as a salvage or palliative (pain-relief ) therapy for various recurrent cancers after multiple therapies, including rectal cancer, cervical cancer, head and neck cancer, pancreatic cancer and so on . Moreover, RISI has been referenced in the National Comprehensive Cancer Network guidelines for the management of locally recurrent rectal cancer . With computer tomography (CT)-guidance and 3-dimensional (3D)-printing non-coplanar templates (PNCT) assistance, the accuracy and efficacy of RISI have been greatly increased . The purpose of this non-randomized multicenter retrospective study was to further clarify the safety and efficacy of 3D-PNCT assisted CT-guided RISI as a salvage treatment for patients with post-EBRT RCC. Moreover, we wished to explore the relationship between dose and outcome, and to help determine the appropriate prescription dose. Patient eligibility This retrospective study enrolled totally 103 patients with post-EBRT RCC from December 2015 to September 2019 in 2 hospitals with approval by the Institutional Review Boards (IRB00006761-M2019118). The written informed consent was signed by all patients. The inclusion criteria of RISI: ① a Karnofsky performance status (KPS) of ≥70; ② an expected survival of ≥3 months; ③ pathologically or radiologically confirmed RCC; ④ a tumor diameter of less than 7 cm, with no metastasis or no more than 2 unstable metastatic lesions; and ⑤ patients who refused surgery and/or EBRT or were unfit for surgery and/or EBRT. The exclusion criteria: ① severe disturbance to coagulation functions; ② tumor bleeding, necrosis and fistula formation; ③ unable to design a suitable needle path. All the patients received 3D-PNCT assisted CT-guided RISI. Procedure All patients selected for RISI received CT simulation (Brilliance, Philips Inc., Netherlands) with contrast and 5-mm slice thickness 2 days prior to RISI (Supplementary Data 1 and Supplementary Fig. 1). The CT simulation image dataset was imported into a brachytherapy treatment planning system (BT-TPS, KLSIRPS-3D; Beijing Tianhang Kelin Technology Development Inc., Beijing, China) for pre-plan (Figs. 1A, 1B, and 2A), target volume and organ at risk (OAR) delineation. We set the prescribed dose to the gross tumor volume (GTV) and the activity of 125 I seeds. The median prescription dose was 120 Gy (range, 100-180 Gy). The BT-TPS simulated the distribution of needles and seeds, and calculated the dose distribution. The pre-plan dataset was used for digital modeling and printing of individualized 3D-PNCTs, which included the biologic surface characteristics of the seed implantation area, the X-axis and Y-axis laser lines, a registration mark, and information of the simulated needle path. RISI was carried out under local infiltration anesthesia or spinal anesthesia. After patient and 3D-PNCT re-set up, single-use needles were inserted into the target lesion under CT guidance (Figs. 1C, 2C, and Supplementary Data 2). A Mick applicator was used to implant seeds. After seed implantation, CT scan was performed again to check the distribution of actual 125 I seeds in the targets, and additional seeds would be implanted if the distribution of the 125 I seeds in the target volume was not satisfactory. The CT image dataset was transferred to the BT-TPS for post-planning dose evaluation (Fig. 2C). The patients were discharged 1-2 days after RSI. All procedures were performed by qualified . The prescription dose was 120 Gy, the GTV was 21.2 cm 3 , the number of seeds was 45, and the D 90 in pre-plan, real-time and post-plan was 160, 158, and 155 Gy, respectively. D 90 , the minimum dose received by the "hottest" 90% of the GTV; GTV, gross tumor volume. and well-trained personnel, and the safety measures by the International Commission on Radiological Protection were strictly followed. The end-points and follow-up The following dosimetry parameters were defined and recorded: the minimum dose received by the "hottest" 90% of the GTV (D 90 ); the minimum dose received by the GTV (D 100 ); percentage of the GTV receiving 100% (V 100 ), 150% (V 150 ), and 200% (V 200 ) of prescription dose; and the external index, conformal index, and homogeneity index of the target area. External index described the volume exceeding the prescription dose outside GTV, and the greater the value of external index was, the greater the prescription dose received outside GTV. Conformal index described the conformity of dose distribution; the ideal conformal index was 1, which indicated that GTV was properly covered by the prescription dose, and the dose outside GTV was lower than prescription dose. Homogeneity index described the uniformity of dose distribution; the closer the homogeneity index was to 100%, the more uniform the dose distribution of GTV. Follow-up assessments were performed at 3, 6, 9, and 12 months after RISI and every 6 months after one year. The assessments involved regular outpatient visits and telephone interviews. Diagnostic imaging with CT scans or magnetic resonance imaging (MRI) examinations was used to evaluate the tumor response for each post-operative visit. The primary endpoints were adverse events and local control (LC), and the secondary endpoints were overall survival (OS) and progression-free survival (PFS). Local tumor response was evaluated by Response Evaluation Criteria in Solid Tumors (RECIST) after RISI. Pain intensity was assessed using a numerical rating scale categorized into 5 grades: 0, no pain; 1-3, mild pain; 4-6, moderate pain; 7-9, severe pain; and 10, unbearable pain. The pre-RISI and post-RISI pain scores were compared. Adverse events were graded according to the Radiation Therapy Oncology Group (RTOG) and European Organization for Research and Treatment of Cancer (EORTC) criteria. Statistical analysis Statistical analysis was performed using SPSS version 25.0 (SPSS, Chicago, IL, USA). If a patient underwent RISI for 2 different sites, each site was considered separately when the LC was analyzed. Receiver operating characteristic (ROC) analysis was used to identify the optimal cutoff values to divide patients into high-risk and low-risk groups. Kaplan-Meier survival analysis was used to estimate LC, OS and PFS; log-rank test was used for inter-group comparisons. Cox proportional hazards regression analysis was used to identify the factors independently influencing LC, OS and PFS. The factors identified in the multivariate analysis were used to plot nomograms by R-3.6.2 (Lucent Technologies Inc., New Providence, NJ, USA) for result visualization. The nomogram showed the importance of risk factors and could be used to predict LC. For a risk factor, the greater its point, the greater the impact on LC. Total point obtained by adding the points of all risk factors of a patient could be used to predict 1-year or 3-year LC. The results were expressed as a concordance index (C-index). The principle of C-index was to randomly pair all research samples in the study and evaluate the difference between the model predicted value and the true value. The range of C-index was 0.5-1, and the closer the value to 1, the higher the accuracy. The p-value ≤0.05 was considered statistically significant. RESULTS During the last follow-up carried out in February 2020, the median follow-up time was 12 months (range, 2-43 months), while 48 patients were alive. The median age was 52 years (range, 29-72 years) ( Table 1). Ten patients with early cervical cancer (stages IA1 to IB1) at first visit received radical surgery with/without EBRT, brachytherapy (BT) or paclitaxel plus platinum chemotherapy (Supplementary Fig. 2). Ninety-one patients had locally advanced cervical cancer (stages IB2 to IVA) and 63 patients of them received an initial standard treatment of EBRT with concurrent paclitaxel plus platinum chemotherapy followed by intracavitary BT. Fourteen out of the 63 patients received salvage surgery after recurrence. The other 28 out of the 91 patients with locally advanced cervical cancer received surgery and EBRT, with/without paclitaxel plus platinum chemotherapy and BT. Two patients had late cervical cancer (stage IVB) received EBRT with concurrent paclitaxel plus platinum chemotherapy followed by BT. The median interval from the initial treatment to the recurrence was 11 months (range, 2-70 months). After initial treatment, 42 patients had multiple cervical cancer recurrences and had undergone surgery, radiation, or platinum-based chemotherapy. Before RISI, all patients had undergone pelvic EBRT, 9 patients had undergone re-EBRT, 52 patients had undergone surgery, and 95 patients had been given chemotherapy. Even if the total dose is the same during radiotherapy, different dose segmentation will lead to different biological effects. Therefore, we converted the doses that received by targets into equivalent dose (EQD). The median cumulative equivalent dose in 2 Gy/f (EQD2) at the implantation sites before RISI was 64 Gy (range, . The median KPS was 80 (range, 70-100). In 103 patients, a total of 111 lesions were successfully treated by RISI. The lesions were at the pelvic center in 8 patients, and at the pelvic lateral region in 75 patients, and 28 patients had pelvic lesions with extra-pelvic metastasis. The median lesion volume was 37.7 cm 3 (range, 2.6-237.8 cm 3 ). The median activity of 125 I seeds was 0.6 mCi (range, 0.4-0.8 mCi), and the median number of 125 I seeds was 63 (range, 8-186). Adverse events Two of the 103 patients (1.9%) suffered from intensified pain and recovered 1 week later. Seed migration occurred in one patient. Four patients suffered from grade 2 adverse events: 1 of acute nausea, 2 of diarrhea, and 1 of pollakiuria. One patient suffered from grade 3 acute proctitis. Late toxicity in this study was rare and only two patients suffered from rectovaginal fistula, and no grade 5 late toxicity occurred ( Table 2). The toxicity prevalence was low in this study, so the factors that might be related to toxicity could not be evaluated. Prognostic factors In the univariate analysis, the factors of KPS ≥90, squamous cell carcinoma, hemoglobin ≥80 g/L, GTV <55 cm 3 , D 90 ≥130 Gy and good short-term efficacy (STE; CR or PR was defined as good STE, SD or PD was defined as bad STE) were significantly associated with higher LC ( Table 3) and OS (Supplementary Table 1) (all p<0.05). Compared to extra-pelvic recurrence, pelvic recurrence was associated with a better OS. In the multivariate analysis, the factors independently associated with the LC were pathologic type, hemoglobin levels, D 90 , and STE (all p<0.05). The factors independently associated with the OS were pathologic type, hemoglobin levels, implantation sites, D 90 , and STE (all p<0.05). For both LC and OS rates, the ROC analysis showed that D 90 of 130 Gy was the optimum cutoff value for identifying patients with a high risk of local failure and short survival. The Kaplan-Meier analysis showed that the patients with D 90 ≥130 Gy had better LC and OS than others ( Fig. 3B and C). A nomogram was created using the above 4 factors independently associated with LC ( Fig. 3D) for visualization. The C-index (per internal validation) was 0.945 (95% CI=0.912-0.978), indicating that the predicted value was consistent with the actual value. Significant differences in the prognosis were observed between patients with risk factors of 0 or 1 and patients with risk factors of 2 to 4 (p<0.001). The 3-year LC and OS rates of the patients with risk factors of 0 or 1 were 92.7% and 28.1%, respectively, whereas both the 3-year LC and OS rates of patients with risk factors of 2 to 4 were zero (Fig. 3E). DISCUSSION The standard management for patients with early-stage cervical cancer is surgery and/or EBRT with or without chemotherapy . EBRT alone and/or combined with concurrent cisplatin-based chemotherapy with BT is the first line option for patients with locally advanced cervical cancer . Locoregional recurrence or local control failure occurs in 15%-61% of cervical cancer patients after radical surgery or EBRT plus chemotherapy . Surgery or EBRT may be curative for some patients with locally recurrence or limited metastatic diseases, whereas most patients cannot be cured by surgery or EBRT . The 5-year OS rate of patients with recurrent cervical carcinoma is only 3.2%-16.5% . For patients with post-EBRT RCC, re-EBRT is associated with a high risk of toxicity to the intestine, rectum and bladder, and the morbidity of grade 3 or 4 side effects is up to 15.6%-56% . Other possible salvage treatment modalities are surgery, High-dose rate (HDR)-BT, stereotactic body radiotherapy (SBRT), low-dose rate (LDR)-BT and chemotherapy. The management of RCC depends mainly on previous therapeutic approaches as well as the site and extent of recurrence . RCC is classified into pelvic central, pelvic lateral or/and extrapelvic RCC . Patients with pelvic central recurrence without pelvic wall invasion or distant metastasis after EBRT were suitable candidates for pelvic exenteration. The 5-year OS rate and operative mortality were 20%-60% and 0%-12%, respectively . Meanwhile, positive margin was indicated in half of the patients after surgery. The recurrence rate of these patients is up to 13%-64% . Therefore, the use of pelvic exenteration has been declining due to its limited indications. The HDR-BT technique was introduced to treat pelvic central RCC due to its advantages of good conformity and short treatment. All published reports used small sample sizes, while the rate of >grade 2 adverse events was 25%-55% . Therefore, the efficacy and safety of HDR-BT need further investigations to evaluate and clarify the indications and dose restriction for OARs. The prognosis of patients with pelvic lateral recurrence after EBRT was poor due to lack of standard treatments . Laterally extended endopelvic resection (LEER) with or without intraoperative RT was reported to treat post-EBRT RCC. The 5-year OS rate was 42%-58%, and the morbidity of severe intestinal or neurological side effects was up to 25%-50%. The life quality of patients after LEER was very unfavorable . As an emerging technique, SBRT had the advantages of good conformity and short treatment, which is suitable to treat tumors with a diameter of less than 3 cm. SBRT was reported to treat pelvic lateral RCC with the 2-year LC rate of 43-57.5%. The morbidity of severe effects was 8.5%-17.6% . All published reports were retrospective studies of small sample sizes. Therefore, to clarify the efficacy and safety of SBRT, it remains to further improve the patient selection and dose restriction in OARs. Chemotherapy of cis-platinum combined with other drugs was an option for RCC, the response rate was 36%-59.9%, with a median OS of 9.6-12.9 months . Some reports suggested that platinum-based chemotherapy combined with bevacizumab significantly extended the median OS to 17 months . RISI is a kind of LDR-BT using a sealed radiation source directly placed into the tumor or around the tumor. The implantation of radioactive seeds is permanent, and only one operation is needed. The advantages of RISI: ① it is a minimally invasive procedure and can continuously deliver ablation doses to tumor targets at a LDR; ② the implantation is precise and efficient under image-guidance as well as template assistance; and ③ the operation 9/15 https://ejgo.org https://doi.org/10.3802/jgo.2021.32.e15 I-125 seed for recurrent cervical cancer often lasts one hour and the patients can return to their normal life within one day. RISI has been used for the treatment of various solid tumors, especially as a salvage treatment for recurrent cancers after EBRT . Compared with surgery, RISI has more indications with the advantages of protecting normal tissues and less side effects. Compared with other RT techniques, RISI is able to deliver a higher dose to the tumor site while sparing normal tissues. Therefore, RISI is suitable for treating post-EBRT RCC. The CT guidance for RISI has been put into clinical practice in 2002 in China. The indications of RISI have been expanded from prostate carcinoma to head and neck, thoracic, abdomen, retroperitoneal, and spinal cord carcinomas . The effect of CT-guided RISI depends on the experience and expertise of physicians performing the procedure. Therefore, it is hard to repeat and evaluate the efficacy and safety of CT-guided RISI. Qu et al. used CT-guided RISI to treat 36 patients with post-EBRT RCC, the 1-year OS rate was 52% and the median OS was 11.5 months. The optimal D 90 should be more than 105 Gy. The disadvantages of CT-guided RISI are that the distribution quality of seed implantation doses cannot be assured as a pre-plan design parameter under the free-hand CT-guided procedure due to the interferences to the OARs, such as blood vessels, bones, and nerves. 3D-PNCT assisted CT-guided technique has been integrated into RISI in 2015. The accuracy and efficacy of seed implantation have been greatly increased . This not only significantly improved the safety and effect of RISI, but also made the treatment less dependent on the skills and experience of individual operators. Therefore, the combination of CT-guided and 3D-PNCT made RISI more evaluable and repeatable. 3D-PNCT assisted CT-guided RISI was easy to operate and popularize, and its cost was lower than that of EBRT. With the assistance of 3D-PNCT, physicians can accomplish RISI treatment independently after a 3-to 6-month standardized training. Previously, Ji et al. reported that 3D-PNCT assisted CT-guided RISI could enable the post-plan dose to meet the requirements of the preplan. Based on such quality assurance, we could give a specific dose to tumors to analyze the safety and efficacy of RISI in the treatment of cervical cancer. The patients included in this study mainly had pelvic lateral RCC. The preliminary results indicated that RISI was more suitable for treating pelvic lateral recurrence, while surgery or HDR-BT was the first choice for pelvic central recurrence . Therefore, in this study, the patients with pelvic central recurrence were all deemed unsuitable for further surgery and HDR-BT. There was no dose escalation clinical trial which had been conducted in RISI for pelvic RCC. Thus, we determined the prescription dose by referring to the dose selection of RISI for prostate cancer. The acceptable dose range for postimplant D 90 for RISI for prostate cancer may be 130-180 Gy . Since prostate cancer grew slowly and was less sensitive to radiotherapy than cervical cancer, we set the prescribed dose of 120 Gy for most patients. There existed two special circumstances: 1) the lesions of some patients invaded the OARs, making it difficult to limit the dose received by the OARs, thus, we would reduce the prescribed dose to 100 or 110 Gy to protect the OARs; 2) the lesions of some patients were small and far away from the OARs, making it possible to deliver relatively high doses to targets, thus, we would increase the prescribed dose to 140-180 Gy in order to achieve better tumor control efficacy. We limited the doses received by OARs by referring to the OARs dose limitation of RISI for prostate cancer. The OARs in RISI for prostate cancer mainly included the rectum and urethra. American Brachytherapy Society suggested a peripheral distribution of sources, frequently referred to as a "modified peripheral or modified uniform loading" is recommended so that the portion of the urethra receiving 150% dose or greater can be limited . The volume of the rectum receiving the prescription dose ideally should be <1 mL . Wallner et al. suggested the volume of the rectum receiving 100 Gy ideally should be <1 mL. The OARs in RISI for pelvic RCC mainly included the small intestine, rectum, and bladder. All the patients in this study had undergone pelvic EBRT before RISI, and most of them had undergone intracavitary brachytherapy. Therefore, the OARs dose limitation in this study was stricter than the limitation in RISI for prostate cancer. In this study, the OARs dose limitation was the maximum dose received by the small intestine, rectum, and bladder, which should be <50, 70, and 80 Gy, respectively. In this study, most of the lesions were lateral pelvic recurrence and far away from the OARs. Thus, in most cases, it was easy to deliver high dose to targets while meeting the OARs dose limitation. For some lesions which were close to the OARs, we referred to the urethral protection method in RISI for prostate cancer. The distances between the seeds and the OARs should be >1 cm, and the targets were covered by the edges of the seeds' influence areas. This method might result in high V 150 and V 200 , but it could effectively reduce the doses received by the OARs. If the doses received by the OARs still exceeded the limitation, the treatment plans would be fully discussed and evaluated according to the patients' specific situations on case-by-case basis. In general, OARs dose limitation might be appropriately loosed if a radical cure is possible; however, if palliative care could be achieved only, the dose delivered to the target might be reduced to protect the OARs. In this study, the LC rates were 87.4% at 1 year and 75.1% at 3 years, which were higher than those reported in previous studies. However, the OS and PFS rates in this study were lower because the failure pattern in most patients was distant metastasis. Twenty-eight patients with extra-pelvic recurrence had a worse median OS than others (10 vs. 18 months). Isolated lesions were identified and targeted in these patients, but undetected potential distant metastases might still exist. The indication selections, the optimal prescription dose, and the target regions of CTV for 3D-PNCT assisted CT-guided RISI remain ambiguous. Based on the safety and efficacy consideration, some patients with advanced or late stage RCC were included in this study and influenced the outcomes. 3D-PNCT assisted CT-guided RISI is safe to treat patients with post-EBRT RCC. Two patients suffered from intensified pain, one patient had seed immigration, and four patients suffered from grade 2 acute toxicity. One patient whose D 90 was 133.8 Gy suffered from grade 3 acute proctitis and soon recovered with no treatment. Only two patients, whose D 90 was 146.2 and 175.9 Gy, respectively, suffered from late toxicities of rectovaginal fistula and received salvage surgery. One died 14 months after RISI due to disease progression and the other one is still alive. No grade 5 late toxicities occurred. In this study, the LC rate was higher in patients of squamous cell cancer than that in patients of non-squamous cell carcinoma, although further investigations are needed to clarify the mechanisms. High doses of RISI were associated with a better LC and favorable outcomes. The LC rate was higher in patients with a D 90 of ≥130 Gy than that in patients with a D 90 of <130 Gy, and the result was consistent with the cutoff value identified by the ROC analysis, providing an important reference for selecting the prescription dose and designing future dose escalation regimens. Squamous cell pathology, D 90 values of ≥130 Gy, and CR/PR were all associated with a better OS because a good LC was more likely to translate into a survival benefit. Patients 11/15 https://ejgo.org https://doi.org/10.3802/jgo.2021.32.e15 I-125 seed for recurrent cervical cancer with pelvic recurrence had a longer OS than those with extra-pelvic recurrence, which was consistent with previous clinical reports. A hemoglobin level of <80 g/L was associated with unfavorable prognosis, which might be due to the poor general conditions of these patients. Age was not associated with the prognosis in this study, which might be attributed to the fact that most of the patients had experienced multiple recurrences and their life expectancy was short. GTV was significantly associated with LC in the univariate analysis but was not significantly associated with LC in the multivariate analysis, which was different from the results of other reports and might be due to the fact that 3D-PNCT assisted CT-guided RISI is more accurate than other RT techniques and is able to deliver a higher dose to the tumor, thereby improving the outcomes of patients with a large tumor. There was no significant difference in the prognosis between patients with central and lateral pelvic recurrence, which was different from the results of previous studies. This might be attributed to the small sample size of patients with central pelvic recurrence in this study and the selection bias, because only patients who were unsuitable for surgery and HDR-BT were treated by RISI. In conclusion, 3D-PNCT assisted CT-guided RISI is a safe, effective, and minimally invasive option for RCC. The hemoglobin level, pathological type and dose distribution are considered as independent factors for clinical outcomes. Large cohort prospective studies are needed to further clarify the efficacy, safety, and technical standards.
def check_for_existing_package(toolchain_root, pkg_name, pkg_version, compiler): version_file = version_file_path(toolchain_root, pkg_name, pkg_version) if not os.path.exists(version_file): return False label = get_platform_release_label() pkg_version_string = "{0}-{1}-{2}-{3}".format(pkg_name, pkg_version, compiler, label) with open(version_file) as f: return f.read().strip() == pkg_version_string
def types(self): return list(map(helpers.to_python_type, self.extra_headers['types']))
{-# OPTIONS_GHC -Wall -fno-warn-orphans #-} {-# LANGUAGE TypeOperators, PatternSynonyms #-} {-# LANGUAGE GADTs,TypeFamilies,DataKinds, RankNTypes #-} {-# LANGUAGE TemplateHaskell, QuasiQuotes,KindSignatures #-} module Example where import Extensible (desugarExtensible,Extensible(..),Extends(..)) import GHC.TypeLits data SrcLoc = SrcLoc Int Int desugarExtensible "Ext" [d| {-# ANN type Exp Extensible #-} data Exp id = Lit Lit | Var id | Abs (Pat id) (Exp id) | App (Exp id) (Exp id) | Tup [Exp id] | Let [Dec id] (Exp id) {-# ANN type Pat Extensible #-} data Pat id = VarP id | TupP [Pat id] | VieP (Exp id) (Pat id) {-# ANN type Dec Extensible #-} data Dec id = Fun {name :: id, patterns :: [Pat id], body :: Exp id} | Pat (Pat id) (Exp id) | Sig id Typ {-# ANN type Lit Extensible #-} data Lit = Int Integer | Rat Rational | Str String {-# ANN type Typ Extensible #-} data Typ where INT :: Typ ARR :: {argument :: Typ, result :: Typ} -> Typ {-# ANN type HsExp (Extends "Exp") #-} data HsExp id = HsAbs (Extends "Abs") (HsTyp id) SrcLoc -- (Extends ...) is a dummy field -- that I used for simulating syntax. | HsApp (Extends "App") (HsTyp id) | HsTup (Extends "Tup") (HsExp id) | HsOut (HsTyp id) (HsExp id) | HsLoc SrcLoc (HsExp id) {-# ANN type HsPat (Extends "Pat") #-} data HsPat id = HsVarP (Extends "VarP") (HsTyp id) {-# ANN type HsDec (Extends "Dec") #-} data HsDec id {-# ANN type HsLit (Extends "Lit") #-} data HsLit id {-# ANN type HsTyp (Extends "Typ") #-} data HsTyp id data Foo id = Bar (HsTyp id) |] pattern HsLit :: HsLit id -> HsExp id pattern HsVar :: id -> HsExp id pattern HsAbs :: HsPat id -> HsExp id -> HsTyp id -> SrcLoc -> HsExp id pattern HsApp :: HsExp id -> HsExp id -> HsTyp id -> HsExp id pattern HsTup :: [HsExp id] -> HsExp id -> HsExp id pattern HsLet :: [HsDec id] -> HsExp id -> HsExp id pattern HsLit i = Lit LitX i pattern HsVar x = Var VarX x pattern HsAbs p n a l = Abs (AbsX a l) p n pattern HsApp l m a = App (AppX a) l m pattern HsTup ms m = Tup (TupX m) ms pattern HsLet ms n = Let LetX ms n
// Copyright 2020 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "cc/metrics/lcd_text_metrics_reporter.h" #include "base/lazy_instance.h" #include "base/metrics/histogram_macros.h" #include "cc/base/histograms.h" #include "cc/layers/picture_layer_impl.h" #include "cc/paint/display_item_list.h" #include "cc/trees/layer_tree_host_impl.h" #include "cc/trees/layer_tree_impl.h" namespace cc { namespace { constexpr auto kMinimumTimeInterval = base::TimeDelta::FromMinutes(1); constexpr unsigned kMinimumFrameInterval = 500; // This must be the same as that used in DeviceScaleEnsuresTextQuality() in // content/renderer/render_widget.cc. constexpr float kHighDPIDeviceScaleFactorThreshold = 1.5f; constexpr char kMetricNameLCDTextKPixelsHighDPI[] = "Compositing.Renderer.LCDTextDisallowedReasonKPixels.HighDPI"; constexpr char kMetricNameLCDTextKPixelsLowDPI[] = "Compositing.Renderer.LCDTextDisallowedReasonKPixels.LowDPI"; constexpr char kMetricNameLCDTextLayersHighDPI[] = "Compositing.Renderer.LCDTextDisallowedReasonLayers.HighDPI"; constexpr char kMetricNameLCDTextLayersLowDPI[] = "Compositing.Renderer.LCDTextDisallowedReasonLayers.LowDPI"; } // anonymous namespace std::unique_ptr<LCDTextMetricsReporter> LCDTextMetricsReporter::CreateIfNeeded( const LayerTreeHostImpl* layer_tree_host_impl) { const char* client_name = GetClientNameForMetrics(); // The metrics are for the renderer only. if (!client_name || strcmp(client_name, "Renderer") != 0) return nullptr; return base::WrapUnique(new LCDTextMetricsReporter(layer_tree_host_impl)); } LCDTextMetricsReporter::LCDTextMetricsReporter( const LayerTreeHostImpl* layer_tree_host_impl) : layer_tree_host_impl_(layer_tree_host_impl) {} LCDTextMetricsReporter::~LCDTextMetricsReporter() = default; void LCDTextMetricsReporter::NotifySubmitFrame( const viz::BeginFrameArgs& args) { current_frame_time_ = args.frame_time; frame_count_since_last_report_++; if (last_report_frame_time_.is_null()) last_report_frame_time_ = current_frame_time_; } void LCDTextMetricsReporter::NotifyPauseFrameProduction() { if (current_frame_time_.is_null() || current_frame_time_ - last_report_frame_time_ < kMinimumTimeInterval || frame_count_since_last_report_ < kMinimumFrameInterval) { return; } last_report_frame_time_ = current_frame_time_; frame_count_since_last_report_ = 0; float device_scale_factor = layer_tree_host_impl_->settings().use_painted_device_scale_factor ? layer_tree_host_impl_->active_tree()->painted_device_scale_factor() : layer_tree_host_impl_->active_tree()->device_scale_factor(); bool is_high_dpi = device_scale_factor >= kHighDPIDeviceScaleFactorThreshold; for (const auto* layer : layer_tree_host_impl_->active_tree()->picture_layers()) { if (!layer->DrawsContent() || !layer->GetRasterSource()) continue; const scoped_refptr<DisplayItemList>& display_item_list = layer->GetRasterSource()->GetDisplayItemList(); if (!display_item_list) continue; int text_pixels = static_cast<int>( display_item_list->AreaOfDrawText(layer->visible_layer_rect())); if (!text_pixels) continue; auto reason = layer->lcd_text_disallowed_reason(); if (is_high_dpi) { UMA_HISTOGRAM_SCALED_ENUMERATION(kMetricNameLCDTextKPixelsHighDPI, reason, text_pixels, 1000); UMA_HISTOGRAM_ENUMERATION(kMetricNameLCDTextLayersHighDPI, reason); } else { UMA_HISTOGRAM_SCALED_ENUMERATION(kMetricNameLCDTextKPixelsLowDPI, reason, text_pixels, 1000); UMA_HISTOGRAM_ENUMERATION(kMetricNameLCDTextLayersLowDPI, reason); } } } } // namespace cc
<filename>src/schema/types/RuleStatType.ts import { GraphQLObjectType, GraphQLID, GraphQLNonNull, GraphQLInt } from "graphql"; import { UserType } from "./UserType"; import { getUserByID } from "../../services/UserService"; import { RuleType } from "./RuleType"; import { getRuleByID } from "../../services/RuleService"; const RuleStatType = new GraphQLObjectType({ name: "RuleStatType", fields: { ruleID: { type: new GraphQLNonNull(GraphQLID) }, userID: { type: new GraphQLNonNull(GraphQLID) }, count: { type: new GraphQLNonNull(GraphQLInt) }, rule: { type: new GraphQLNonNull(RuleType), resolve(parentValue: IRuleStat) { return getRuleByID(parentValue.ruleID); } }, user: { type: new GraphQLNonNull(UserType), resolve(parentValue: IRuleStat) { return getUserByID(parentValue.userID); } } } }); export { RuleStatType };
from ConfigParser import ConfigParser import sys from ant.core import log from BtAtsPowerCalculator import BtAtsPowerCalculator from KurtKineticPowerCalculator import KurtKineticPowerCalculator from TacxBlueMotionPowerCalculator import TacxBlueMotionPowerCalculator from XBikePowerCalculator import XBikePowerCalculator from constants import * import hashlib VPOWER_DEBUG = True CONFIG = ConfigParser() _CONFIG_FILENAME = 'vpower.cfg' # If there's a command-line argument, it's the location of the config file if len(sys.argv) > 1: _CONFIG_FILENAME = sys.argv[1] SECTION = 'vpower' try: if VPOWER_DEBUG: print 'Open file ' + _CONFIG_FILENAME file = open(_CONFIG_FILENAME, 'rb') if VPOWER_DEBUG: print 'Parse config' CONFIG.readfp(file) except Exception as e: print "Error: "+repr(e.__class__) if VPOWER_DEBUG: print 'Get config items' # Type of sensor connected to the trainer SENSOR_TYPE = CONFIG.getint(SECTION, 'speed_sensor_type') # ANT+ ID of the above sensor SPEED_SENSOR_ID = CONFIG.getint(SECTION, 'speed_sensor_id') # Calculator for the model of turbo pc_class = globals()[CONFIG.get(SECTION, 'power_calculator')] POWER_CALCULATOR = pc_class() # For wind/air trainers, current air density in kg/m3 (if not using a BME280 weather sensor) POWER_CALCULATOR.air_density = CONFIG.getfloat(SECTION, 'air_density') # For wind/air trainers, how often (secs) to update the air density if there *is* a BME280 present POWER_CALCULATOR.air_density_update_secs = CONFIG.getfloat(SECTION, 'air_density_update_secs') # For tyre-driven trainers, the wheel circumference in meters (2.122 for Continental Home trainer tyre) POWER_CALCULATOR.wheel_circumference = CONFIG.getfloat(SECTION, 'wheel_circumference') # Overall correction factor, e.g. to match a user's power meter on another bike POWER_CALCULATOR.set_correction_factor(CONFIG.getfloat(SECTION, 'correction_factor')) # ANT+ ID of the virtual power sensor # The expression below will choose a fixed ID based on the CPU's serial number POWER_SENSOR_ID = int(int(hashlib.md5(getserial()).hexdigest(), 16) & 0xfffe) + 1 # If set to True, the stick's driver will dump everything it reads/writes from/to the stick. DEBUG = CONFIG.getboolean(SECTION, 'debug') POWER_CALCULATOR.set_debug(DEBUG or VPOWER_DEBUG) # Set to None to disable ANT+ message logging LOG = None # LOG = log.LogWriter(filename="vpower.log") # ANT+ network key NETKEY = <KEY>' if LOG: print "Using log file:", LOG.filename print ""
<reponame>tylersiemers/securecrt-tools<gh_stars>0 # $language = "python" # $interface = "1.0" import os import sys import logging # Add script directory to the PYTHONPATH so we can import our modules (only if run from SecureCRT) if 'crt' in globals(): script_dir, script_name = os.path.split(crt.ScriptFullName) if script_dir not in sys.path: sys.path.insert(0, script_dir) else: script_dir, script_name = os.path.split(os.path.realpath(__file__)) # Now we can import our custom modules from securecrt_tools import scripts from securecrt_tools import utilities # Import message box constants as names to simplify modifying the message box looks from securecrt_tools.message_box_const import * # Create global logger so we can write debug messages from any function (if debug mode setting is enabled in settings). logger = logging.getLogger("securecrt") logger.debug("Starting execution of {0}".format(script_name)) # ################################################ SCRIPT LOGIC ################################################### def script_main(session): """ | SINGLE device script | Author: <NAME> | Email: <EMAIL> This script will grab the detailed CDP information from a Cisco IOS or NX-OS device and create SecureCRT sessions based on the information. By default all sessions will be created as SSH2, so you may have to manually change some sessions to make them work, depending on the device capabilities/configuration. Only devices that contain "Router" or "Switch" in their capabilities field of the CDP information will have sessions created for them. This skips phones, hosts like VMware or Server modules, and other devices that we don't usually log into directly). **NOTE ON DEFAULTS**: This script uses the SecureCRT Default Session settings as a base for any sessions that are created. The folder where the sessions are saved is specified in the 'settings.ini' file, and the hostname and IP are extracted from the CDP information. All other setting defaults are configured within SecureCRT. **Script Settings** (found in settings/settings.ini): * | **folder** - The path starting from the <SecureCRT Config>/Sessions/ directory where | the sessions will be created. * | **strip_domains** - A list of domain names that will be stripped away if found in the CDP remote device name. :param session: A subclass of the sessions.Session object that represents this particular script session (either SecureCRTSession or DirectSession) :type session: sessions.Session """ # Get script object that owns this session, so we can check settings, get textfsm templates, etc script = session.script # Start session with device, i.e. modify term parameters for better interaction (assuming already connected) session.start_cisco_session() # Validate device is running a supported OS session.validate_os(["IOS", "NXOS"]) raw_cdp = session.get_command_output("show cdp neighbors detail") template_file = script.get_template("cisco_os_show_cdp_neigh_det.template") cdp_table = utilities.textfsm_parse_to_list(raw_cdp, template_file) # Since "System Name" is a newer NXOS feature -- try to extract it from the device ID when its empty. strip_list = script.settings.getlist("create_sessions_from_cdp", "strip_domains") for entry in cdp_table: # entry[2] is system name, entry[1] is device ID if entry[2] == "": entry[2] = utilities.extract_system_name(entry[1], strip_list=strip_list) session_list = create_session_list(cdp_table) # Get the destination directory from settings dest_folder = script.settings.get("create_sessions_from_cdp", "folder") for device in session_list: system_name = device[0] mgmt_ip = device[1] script.create_new_saved_session(system_name, mgmt_ip, folder=dest_folder) # Track the names of the hosts we've made already logger.debug("Created session for {0}.".format(system_name)) # Calculate statistics num_created = len(session_list) num_skipped = len(cdp_table) - len(session_list) setting_msg = "{0} sessions created in the Sessions sub-directory '{1}'\n" \ "\n" \ "{0} sessions skipped (no IP, duplicate, or not Router/Switch)".format(num_created, dest_folder, num_skipped) script.message_box(setting_msg, "Sessions Created", ICON_INFO) # Return terminal parameters back to the original state. session.end_cisco_session() def create_session_list(cdp_list): """ This function takes the TextFSM output of the CDP information and uses it to create a list of new SecureCRT sessions to create (system name and IP address). :param cdp_list: The TextFSM output after processing the "show cdp neighbor detail" output :type cdp_list: list :return: A list (system name and IP address) of the sessions that need to be created. :rtype: list """ accepted_capabilities = {"Router", "Switch"} created = set() session_list = [] for device in cdp_list: # Get capabilties field of CDP and parse into a set capabilities_string = device[9] capabilities = set(capabilities_string.strip().split(' ')) # Determine if items in "accepted_capabilities" are also in the capabilties of this device. If so, we'll get # a set of common items. If not, we'll get an empty set. accepted = capabilities.intersection(accepted_capabilities) # Check for any items in our "accepted" set - If so, add it to the list to build a session, otherwise skip. if accepted: # Extract hostname and IP to create session system_name = device[2] # If we couldn't get a System name, use the device ID if system_name == "": system_name = device[1] if system_name in created: logger.debug("Skipping {0} because it is a duplicate.".format(system_name)) # Go directly to the next device (skip this one) continue mgmt_ip = device[7] if mgmt_ip == "": if device[4] == "": # If no mgmt IP or interface IP, skip device. logger.debug("Skipping {0} because cannot find IP in CDP data.".format(system_name)) # Go directly to the next device (skip this one) continue else: mgmt_ip = device[4] logger.debug("Using interface IP ({0}) for {1}.".format(mgmt_ip, system_name)) else: logger.debug("Using management IP ({0}) for {1}.".format(mgmt_ip, system_name)) # Add device to session_list session_list.append((system_name, mgmt_ip,)) # Create a new session from the default information. created.add(system_name) else: logger.debug("Skipping {0} because capabilties are {1}, which does not contain any of {2}." .format(device[1], capabilities, accepted_capabilities)) return session_list # ################################################ SCRIPT LAUNCH ################################################### # If this script is run from SecureCRT directly, use the SecureCRT specific class if __name__ == "__builtin__": # Initialize script object crt_script = scripts.CRTScript(crt) # Get session object for the SecureCRT tab that the script was launched from. crt_session = crt_script.get_main_session() # Run script's main logic against our session script_main(crt_session) # Shutdown logging after logging.shutdown() # If the script is being run directly, use the simulation class elif __name__ == "__main__": # Initialize script object direct_script = scripts.DebugScript(os.path.realpath(__file__)) # Get a simulated session object to pass into the script. sim_session = direct_script.get_main_session() # Run script's main logic against our session script_main(sim_session) # Shutdown logging after logging.shutdown()
/// <reference path="quic.abstracts.ts" /> namespace Quic{ export class Fieldset implements IFieldset,FieldsetOpts{ quic:IQuic; fields:{[index:string]:IField}; langs?:{[index:string]:string}; opts:FieldsetOpts; defs:FieldsetDefs; //数据访问器 accessFactory:IAccessFactory; constructor(quic:IQuic,opts:FieldsetOpts){ this.defs = this.defs = opts; this.quic = quic; this.accessFactory = quic.accessFactory; let fields:{[index:string]:IField} = this.fields = {}; for(var n in opts.fields){ let fieldDefs= opts.fields[n]; if(!fieldDefs.name)fieldDefs.name = n; fields[fieldDefs.name] = new Field(this,fieldDefs); } } //多语言文本处理 _T(text:string,mustReturn?:boolean):string{return; /* let txt = this.langs[text]; if(txt===undefined) { if(this.localization) txt = this.localization._T(text,mustReturn); } if(txt===undefined && this.langs!==langs) txt = langs[text]; return (txt===null || txt===undefined) && mustReturn===true?"":(text===null|| text===undefined?"":text.toString()); */ } fieldValue(fieldOpts:FieldOpts,fieldElement:HTMLElement,data:any,value?:any):any{ /* let field :Field; let accessor :(data:{[index:string]:any},value?:any)=>any;; if(fieldOpts.mappath&& fieldOpts.mappath!==field.name){ accessor = this.accessorFactory.cached(fieldOpts.mappath); } if(value===undefined){ if(!fieldElement){ //从data取值 return data?(accessor?accessor(data):data[fieldOpts.name]):undefined; }else { //从element中获取 value = field.viewRenderer.getValue(field); if(data){ if(accessor) accessor(data,value); else data[fieldOpts.name] = value; } return value; } }else { if(fieldElement) field.viewRenderer.setValue(field,value); if(data) {if(accessor) accessor(data,value); else data[fieldOpts.name] = value;} return this; }*/ } } }
/** * Handles the "Project->Properties..." command. * Opens the project's properties dialog, which allows the user to change * some attributes of the current project. * source files. */ void KScope::slotProjectProps() { ProjectBase* pProj; ProjectBase::Options opt; pProj = m_pProjMgr->curProject(); if (!pProj) return; if (pProj->isTemporary()) { KMessageBox::error(0, i18n("The Project Properties dialogue is not " "available for temporary projects.")); return; } NewProjectDlg dlg(false, this); pProj->getOptions(opt); dlg.setProperties(pProj->getName(), pProj->getPath(), opt); if (dlg.exec() != QDialog::Accepted) return; dlg.getOptions(opt); pProj->setOptions(opt); initCscope(); SymbolCompletion::initAutoCompletion(opt.bACEnabled, opt.nACMinChars, opt.nACDelay, opt.nACMaxEntries); m_pFileView->setRoot(pProj->getSourceRoot()); }
// handleError will attempt to handle an error. // If there is an error value there, then it will Notify subscribers about the error, and return with a true. // In case there is no error, the function returns and "isErrorHandled" as false. func (sm *ListenNotifySubscriptionManager) handleError(ctx context.Context, err error) (isErrorHandled bool) { if err == nil { return false } sm.subs.lock.RLock() defer sm.subs.lock.RUnlock() for _, sub := range sm.subs.creator { _ = sub.HandleError(ctx, err) } for _, sub := range sm.subs.updater { _ = sub.HandleError(ctx, err) } for _, sub := range sm.subs.deleter { _ = sub.HandleError(ctx, err) } return true }
from pyvista import examples mesh = examples.load_sphere_vectors() mesh.point_data # Expected: ## pyvista DataSetAttributes ## Association : POINT ## Active Scalars : vectors ## Active Vectors : vectors ## Active Texture : None ## Active Normals : Normals ## Contains arrays : ## Normals float32 (842, 3) NORMALS ## vectors float32 (842, 3) VECTORS
matrix = [] for i in range(5): arr = list(map(int,input().split())) matrix.append(arr) for i in matrix: if 1 in i: print(abs(2-matrix.index(i))+abs(2-i.index(1)))
News in Science Elephants smart as chimps, dolphins Working together A study of elephants has found they not only aced a test of their intelligence and ability to cooperate, they found new ways to complete it. The study, published in the latest Proceedings of the National Academy of Sciences, highlights not only the intelligence of individual elephants, but also the ability of these animals to cooperate and understand the value of teamwork. Scientists now believe elephants are in league with chimpanzees and dolphins as being among the world's most cognitively advanced animals. "Elephant sociality is very complex," says lead author Joshua Plotnik. "Social groups are made up of matriarchal herds (an older female is in charge), and varying levels of relatedness among members. Cooperation in elephants was most likely necessary in a context of communal care for, and protection of, young." "In the wild, there are fascinating anecdotes of elephants working together to lift or help fallen members, and forming clusters to protect younger elephants," added Plotnik, a Cambridge University researcher who is also head of research at Thailand's Golden Triangle Asian Elephant Foundation. Elephants tackle classic experiment Tests of elephant intelligence and their other abilities are rare, simply because working with these large and potentially dangerous animals poses risks. To meet the challenge, Plotnik and colleagues Richard Lair, Wirot Suphachoksahakun, and Frans de Waal reworked a classic 1930s experiment used on primates. The researchers positioned a sliding table, holding enticing red bowls full of corn, some distance away from a volleyball net. A rope was tied around the table such that the table would only move if two elephants working together pulled on the dangling rope ends. If just one elephant pulled, the rope would unravel. To get to the front of the volleyball net, the elephants had to walk down two separate, roped-off lanes. A total of 12 male and female elephants from the Thai Elephant Conservation Center in Lampang, Thailand, participated. It's estimated that fewer than 2500 of these animals are left in the Thai jungle, so conservation efforts now are critical. After quickly learning that the corn-on-the-table task could not be successfully completed solo, elephants would wait up to 45 seconds for the second "partner" elephant to show up. If the researchers did not release this second elephant, the first one basically looked around as if to say: "You've got to be kidding. It takes two to do this." In most cases, the elephants got the corn. Thinking outside the box Two elephants, named Neua Un and JoJo, even figured out how to outwit the researchers. "We were pleasantly surprised to see the youngest elephant, Neua Un, use her foot to hold the rope so that her partner had to do all the work," Plotnik said. "I hadn't thought about this beforehand, and Neua Un seemed to figure it out by chance, but it speaks volumes to the flexibility of elephant behavior that she was able to figure this out and stick to it." The other "cheater," JoJo, didn't even bother to walk up to the volleyball net unless his partner, Wanalee, was released. "Perhaps he had learned that if he approached the rope without her, he'd fail," says Plotnik, adding that such advanced learning, problem-solving, and cooperation are rare in the animal kingdom. Other animals clearly engage in teamwork, but he thinks they are "pre-programmed for it," unlike elephants that seem to understand the full process. Animal experts from around the world are praising the new research. "This is the first experimental evidence for learned cooperative behavior in this socially sophisticated species," says Professor Diana Reiss of Hunter College in New York. Clayton says the findings support the theory "that cognitive abilities evolved independently in animals that are as very distantly related from us as elephants and crows." Associate Professor Satoshi Hirata of Japan's Great Ape Research Institute was "amazed" when he first saw the videos of the elephant experiments. "We tend to think that elephants and humans are greatly different," says Hirata, "but the study results show that we share some social mind skills with elephants."
const DEFAULT_ISSUERS = [ { name: 'Solid Community', uri: 'https://solidcommunity.net' }, { name: 'Solid Web', uri: 'https://solidweb.org' }, { name: 'Inrupt.net', uri: 'https://inrupt.net' }, { name: 'pod.Inrupt.com', uri: 'https://broker.pod.inrupt.com' } ] /** * @returns - A list of suggested OIDC issuers */ export function getSuggestedIssuers (): { name: string, uri: string }[] { // Suggest a default list of OIDC issuers const issuers = [...DEFAULT_ISSUERS] // Suggest the current host if not already included const { host, origin } = new URL(location.href) const hosts = issuers.map(({ uri }) => new URL(uri).host) if (!hosts.includes(host) && !hosts.some(existing => isSubdomainOf(host, existing))) { issuers.unshift({ name: host, uri: origin }) } return issuers } function isSubdomainOf (subdomain: string, domain: string): boolean { const dot = subdomain.length - domain.length - 1 return dot > 0 && subdomain[dot] === '.' && subdomain.endsWith(domain) }