content
stringlengths
10
4.9M
/* * Create iterator to get SID to UID/GID mappings * * Output: * iter - iterator */ idmap_stat idmap_iter_mappings(idmap_iter_t **iter, int flag) { idmap_iter_t *tmpiter; idmap_list_mappings_1_argument *arg = NULL; __ITER_CREATE(tmpiter, arg, IDMAP_LIST_MAPPINGS); arg->flag = flag; *iter = tmpiter; return (IDMAP_SUCCESS); }
package com.mmall.beans; import lombok.Getter; import lombok.Setter; import javax.validation.constraints.Min; /** * @author liliang * @date 2017/11/20. */ public class PageQuery { @Getter @Setter @Min(value = 1,message = "当前页码不合法") private int pageNo = 1; @Getter @Setter @Min(value = 1,message = "每页展示数量不合法") private int pageSize = 10; private int offset; public int getOffset() { return (pageNo - 1) * pageSize; } }
The Final Fantasy franchise is one of the most successful video game series in history. It’s been 30 years since the first game of the series was released, and to celebrate, ‘Distant Worlds: Music From Final Fantasy’ is coming to Australia for the very first time. The concert event will feature legendary Japanese video game composer Nobuo Uematsu, a full symphony orchestra conducted by Grammy award-winner Arnie Roth, and a slew of multimedia elements such as screen images created especially for the event. Uematsu is a composer of note, who wrote – upon other scores – the Final Fantasy VII theme song ‘Eyes On Me’, which sold over 400,000 copies, scoring “Song of the Year (Western Music)” at Japan Gold Disc Awards in 1999 – the first time a video game theme had taken out the prize. The music featured in Distant Worlds has appeared in numerous Final Fantasy games throughout the decades, with new scores and arrangements also created to bolster the experience. Tickets for the event are on sale from Thursday February 16.
<reponame>Schoyen/advent-of-code-2021<gh_stars>0 use std::{fs, path::Path}; fn mul_sum_directions(dir: &Vec<&str>, amount: &Vec<i64>) -> i64 { let mut horizontal = 0; let mut depth = 0; assert_eq!(dir.len(), amount.len()); for i in 0..dir.len() { if dir[i] == "forward" { horizontal += amount[i]; } else if dir[i] == "up" { depth -= amount[i]; } else if dir[i] == "down" { depth += amount[i]; } else { dbg!("OH BOI"); } } horizontal * depth } fn mul_sum_aim(dir: &Vec<&str>, amount: &Vec<i64>) -> i64 { let mut horizontal = 0; let mut depth = 0; let mut aim = 0; assert_eq!(dir.len(), amount.len()); for i in 0..dir.len() { if dir[i] == "forward" { horizontal += amount[i]; depth += aim * amount[i]; } else if dir[i] == "up" { aim -= amount[i]; } else if dir[i] == "down" { aim += amount[i]; } else { dbg!("OH BOI"); } } horizontal * depth } fn main() { let filename = Path::new("./dat/input.dat"); let contents = fs::read_to_string(filename).unwrap().trim().to_string(); let split = contents.split("\n"); let mut dir = Vec::new(); let mut amount = Vec::new(); for s in split { let mut lines = s.split_whitespace(); dir.push(lines.next().unwrap()); amount.push(lines.next().unwrap().parse::<i64>().unwrap()); } // Part 1 dbg!(mul_sum_directions(&dir, &amount)); // Part 2 dbg!(mul_sum_aim(&dir, &amount)); }
<reponame>Jazk-Z/TypeScript_demo // 基础类型 // Boolean let isBoolean: boolean = true; // 特殊 let isBoolean1: boolean = null; let isBoolean2: boolean = undefined; console.log(isBoolean); // 数字类型 let num: number = 1 let num1: number = 0x123 console.log(num, num1) // 字符串类型 let str: string = '11' // 数组 let arr: number[] = [1, 2]; let arr1: Array<any> = [1, 2, '3']; // 元组 null let tup: [string, number, boolean] = ['1', 1, undefined]; tup[3] = undefined; // 枚举类型 enum Color { red, green, black }; console.log(Color) enum Color { Red = 1, Green = 2, Blue = 4 } let c: Color = Color.Green; console.log(c); // any 可以赋值任何值 let zzy: any = undefined; zzy = 1; let anyArr: any[] = [1, "2", {}] // void function zzya(): void { alert(1); } let aa:void = null; let aa1:void = undefined; // null undefined let u:null = null; let n:undefined = undefined; // never // 返回never的函数必须存在无法达到的终点 function error(message: string): never { throw new Error(message); } // 推断的返回值类型为never function fail() { return error("Something failed"); } // 返回never的函数必须存在无法达到的终点 function infiniteLoop(): never { while (true) { } } // 断言类型 let stre:any = 'wangcongcong'; let as1:number = (<string>stre).length; let as2:number = (stre as string).length;
/** * Editor is the base class for things which load and save * an Obj and provide a changed callback. * * @author Brian Frank * @creation 26 Sept 05 * @version $Revision$ $Date$ */ public abstract class Editor extends JPanel { //////////////////////////////////////////////////////////////// // Constructor //////////////////////////////////////////////////////////////// public Editor() { super(new BorderLayout()); } //////////////////////////////////////////////////////////////// // Methods //////////////////////////////////////////////////////////////// public final boolean isEditable() { return editable; } public final void setEditable(boolean editable) { if (this.editable == editable) return; this.editable = editable; doSetEditable(editable); } public final void load(Obj obj) { suppressChanged = true; try { doLoad(obj); } finally { suppressChanged = false; } } public final void save(Obj obj) throws Exception { doSave(obj); } //////////////////////////////////////////////////////////////// // Overrides //////////////////////////////////////////////////////////////// protected abstract void doSetEditable(boolean editable); protected abstract void doLoad(Obj obj); protected abstract void doSave(Obj obj) throws Exception; //////////////////////////////////////////////////////////////// // Listener //////////////////////////////////////////////////////////////// public void addListener(Listener listener) { listeners.add(listener); } public void removeListener(Listener listener) { listeners.remove(listener); } public void fireChanged() { if (suppressChanged) return; Listener[] listeners = (Listener[])this.listeners.toArray(new Listener[this.listeners.size()]); for (int i=0; i<listeners.length; ++i) listeners[i].changed(this); } public static interface Listener { public void changed(Editor editor); } //////////////////////////////////////////////////////////////// // Change Adaptors //////////////////////////////////////////////////////////////// public void registerForChanged(AbstractButton button) { button.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { fireChanged(); } }); } public void registerForChanged(JTextComponent text) { text.getDocument().addDocumentListener(new DocumentListener() { public void changedUpdate(DocumentEvent e) { fireChanged(); } public void insertUpdate(DocumentEvent e) { fireChanged(); } public void removeUpdate(DocumentEvent e) { fireChanged(); } }); } public void registerForChanged(JComboBox combo) { registerForChanged((JTextComponent)combo.getEditor().getEditorComponent()); combo.addItemListener(new ItemListener() { public void itemStateChanged(ItemEvent e) { fireChanged(); } }); } //////////////////////////////////////////////////////////////// // Fields //////////////////////////////////////////////////////////////// ArrayList listeners = new ArrayList(); boolean editable = true; boolean suppressChanged = false; }
/** * \brief Set undefined length burst type of the specified master. * * \param ul_id Master index. * \param burst_type Undefined length burst type. */ void matrix_set_master_burst_type(uint32_t ul_id, burst_type_t burst_type) { #if (SAMV70 || SAMS70|| SAME70) Matrix *p_matrix = MATRIX; volatile uint32_t *p_MCFG; volatile uint32_t ul_reg; uint32_t ul_dlt; ul_dlt = (uint32_t)&(p_matrix->MATRIX_MCFG1); ul_dlt = ul_dlt - (uint32_t)&(p_matrix->MATRIX_MCFG0); p_MCFG = (volatile uint32_t *)((uint32_t)&(p_matrix->MATRIX_MCFG0) + ul_id * ul_dlt); ul_reg = *p_MCFG & (~MATRIX_MCFG0_ULBT_Msk); *p_MCFG = ul_reg | (uint32_t)burst_type; #else Matrix *p_matrix = MATRIX; volatile uint32_t ul_reg; ul_reg = p_matrix->MATRIX_MCFG[ul_id] & (~MATRIX_MCFG_ULBT_Msk); p_matrix->MATRIX_MCFG[ul_id] = ul_reg | (uint32_t)burst_type; #endif }
/** * Produces a variable-order predictor by first-order learning, by tricking the * modelling into producing multiple first-order representations of states. * * @author David Rawlinson * @copyright David Rawlinson */ public class VariableOrderMM extends FirstOrderMM { public Volume _vif0; // unadulterated copy of vif public Volume _vof0; // unadulterated copy of vof public Volume _vli; // unpredicted bias public Volume _vui; // unpredicted bias public Volume _less; public Volume _more; public Volume _lost; public Volume _promotion; public VariableOrderMM( VolumeMap vm, String name, Schedule s, Dimensions input ) { super( vm, name, s, input ); _vli = new Volume( input ); _vui = new Volume( input ); _vif0 = new Volume( input ); _vof0 = new Volume( input ); _less = new Volume( input ); _more = new Volume( input ); _lost = new Volume( input ); _promotion = new Volume( input ); _p.set( "inhibition-sigma", 1.7 ); _p.set( "inhibition-delta-sigma", 0.5 ); } @Override public int order() { return ORDER_N; } @Override public void ff() { // 1. modify input with DoG. // 2. normal/superclass // 3. modify output with unpredicted inhibition // 4. renormalize output preprocess(); super.ff(); } @Override public void predict() { super.predict(); postprocess(); } public void postprocess() { _vof0.copy( _vof ); inhibitUnpredicted( _vif, _vui ); // give it the inhibited winner-take-all input _vof.mul( _vui ); // some are inhibited _vof.scaleVolume( 1.0f ); // new - old = mass gained // old - new = mass lost _lost.subLessThan( _vof0, _vof, 0.0f, 0.0f ); // this is the distribution of lost mass. double lostMass = _lost.sum(); //System.out.println( "lost mass="+lostMass ); locallyPromote( _lost, _promotion ); _promotion.scaleVolume( (float)lostMass ); // this is the increase weighted by how much it deserves it _vof.add( _promotion ); _vof.scaleVolume( 1.0f ); // this is the increase weighted by how much it deserves it } public void preprocess() { _vif0.copy( _vif ); locallyInhibit( _vif, _vli ); // mask those reduced/increased // old - new = decreased. eg 5-2 = 3 // new - old = increased eg _vif.mul( _vli ); _vif.scaleVolume( 1.0f ); _less.subLessThan( _vif0, _vif, 0.0f, 0.0f ); _more.subLessThan( _vif, _vif0, 0.0f, 0.0f ); // pos. increase double massLost = _less.sum(); double massGained = _more.sum(); double netMass = massGained - massLost; //how to distribute the extra? //mask the ones that decreased to zero // _more is 0 where it decreased otherwise the increase. // hence the extra should be distributed proportionally, ie most extra around the centre of the fn // If I add the extra mass and renormalize then I've ensured the mode has the same mass. //System.out.println( "net mass="+netMass ); _more.scaleVolume( (float)netMass ); // this is the increase weighted by how much it deserves it _vif.add( _more ); _vif.scaleVolume( 1.0f ); // this is the increase weighted by how much it deserves it } //Dave 2 Gids: //Shopping List - comment this out to make it compile. //1. Sigma,dSigma tuning. I don't want it to be a param. How can we relate it to SOM sigma? // Currently, sigma=0.25 SOM sigma and dSigma is constant. I think dSigma should // be related to sigma? //2. SOM bias weight coeff. In NeocorticalUnit line 163 I add a constant value to dilute // the effect of the FB pass to the SOM. This param is quite sensitive... //3. Idea - instead of only inhibiting the unpredicted model, I should inhibit it AND // promote its neighbours, under the assumption they're similar. This could be // achieved by applying a Laplacian of Gaussian function around anything inhibited // similar to the DoG inhibition.. in fact the sigmas might be related. //4. Normalization / correctness of unprediction. There are a number of notes in the // code below RE inhibitUnpredicted() function because I'm not sure it's // correct yet. Should I normalize the unprediction influence by scaling by // the strongest weight inbound to the current model? But the inbound weights // are not normalized, only the outbound ones!? Should I use the nonlinear // Sigmoid weight influence (it seems better from a handful of runs)? Should // I add some noise to the inhibition for more robust solution given it's a // gradient descent search for the set of models to use to represent an unknown // number of sequences... //Gids 2 Dave: //1. Sigma,dSigma tuning. I don't want it to be a param. How can we relate it to SOM sigma? // Currently, sigma=0.25 SOM sigma and dSigma is constant. I think dSigma should // be related to sigma? // // -> dsigma defines the width of the inhibitory region (95% of the mass within 2*stddev), i.e. what is the radius about which we want to inhibit // It should be 3 x sigma (accentuate one model with sigma, and inhibit around it with radius of 1-2 models ) // //2. SOM bias weight coeff. In NeocorticalUnit line 163 I add a constant value to dilute // the effect of the FB pass to the SOM. This param is quite sensitive... // // -> is there a metric that can be measured as this parameter is modified, that must be maximised/minimised // such as sparseness of the 1mm transition weights (should be minimised to increase the number of states being used) // ---> then could do some online adaption // //3. Idea - instead of only inhibiting the unpredicted model, I should inhibit it AND // promote its neighbours, under the assumption they're similar. This could be // achieved by applying a Laplacian of Gaussian function around anything inhibited // similar to the DoG inhibition.. in fact the sigmas might be related. // // -> i understood it to already maximally bias models that have an existing edge (i.e. multiply by 1.0) // there may be many models that don't have an edge, so you don't really want to bias toward all of their neighbours // //4. Normalization / correctness of unprediction. There are a number of notes in the // code below RE inhibitUnpredicted() function because I'm not sure it's // correct yet. Should I normalize the unprediction influence by scaling by // the strongest weight inbound to the current model? But the inbound weights // are not normalized, only the outbound ones!? Should I use the nonlinear // Sigmoid weight influence (it seems better from a handful of runs)? Should // I add some noise to the inhibition for more robust solution given it's a // gradient descent search for the set of models to use to represent an unknown // number of sequences... //Dave 2 Gids //VOMM not perfect but I could play with Sigma, Im happy enough. // 2 problems: // a) RSOM is a POS. Unless its totally orthogonal it confuses sequences due // to the low influence of historical values. But it's good that it handles // sequences of unknown length? Does it? // b) It consistently predicts blanks for some reason - perhaps due to the // low-activation weights .. this is not a problem when the actual input // comes along, but it is a problem for the accuracy of prediction alone! // Often the blanks are the strongest prediction!!? I really dont understand this. // @Override public void normalizeWeights() { // _vw.scaleVolume( 1.0f ); // } protected void locallyInhibit( Volume activation, Volume inhibition ) { double sigma = _p.get( "inhibition-sigma" ); double dSigma = _p.get( "inhibition-delta-sigma" ); // System.out.println( "inh.sigma="+sigma+" dSigma="+dSigma ); double sigmaWide = sigma + dSigma; double sigmaNarrow = sigma;// - dSigma; Coordinate cMax = activation.maxAt(); double max = activation.get( cMax ); Coordinate c = activation.start(); do { // subtract wide (+d) from narrow (-d) gaussians double distance = c.euclidean( cMax ); // DAVE TO GIDS: Should this be here? (or more exact form(discretized)) // if( distance < 1.0 ) continue; double DoG = Maths.gaussian( distance, sigmaNarrow ) - Maths.gaussian( distance, sigmaWide ); DoG *= max; // so no effect if zero inhibition.set( c, (float)DoG ); } while( c.next() ); inhibition.scaleRange( 0.0f, 1.0f ); } protected void locallyPromote( Volume lostMass, Volume promotion ) { double sigma = _p.get( "inhibition-sigma" ); double dSigma = _p.get( "inhibition-delta-sigma" ); // System.out.println( "inh.sigma="+sigma+" dSigma="+dSigma ); double sigmaWide = sigma + dSigma; double sigmaNarrow = sigma;// - dSigma; double verySmall = 0.0001; Coordinate c1 = lostMass.start(); do { double weight = lostMass.get( c1 ); if( weight < verySmall ) continue; Coordinate c2 = promotion.start(); do { double distance = c2.euclidean( c1 ); if( distance < 1.0 ) continue; // subtract narrow (-d) gaussians from wide (+d) double dog = Maths.gaussian( distance, sigmaWide ) - Maths.gaussian( distance, sigmaNarrow ); dog *= weight; // so no effect if zero promotion.add( c2, (float)dog ); } while( c2.next() ); } while( c1.next() ); } // inhibit at t+1, things that are highly active @ time t and ??? // ??? how to detect the MIS-predictions? public void inhibitUnpredicted( Volume activation, Volume inhibition ) { // if models that precede models in v were NOT found, inhibit v float max = activation.max(); float reciprocalMax = 1.0f / max; if( max <= 0.0f ) { inhibition.set( 1.0f ); //System.out.println( "nothing to inhibit" ); return; } int sizeW = _dw.size( "w.i" ); for( int w2 = 0; w2 < sizeW; ++w2 ) { float sum = 0.0f; // why does the normalization below produce shitty results (locked into fixed cycle) // float sumWeights = 0.0f; // float wMax = 0.0f; // // for( int w1 = 0; w1 < sizeW; ++w1 ) { // int offsetW = w1 * sizeW + w2; // P( transition i=w1,j=w2 | i=w1 ) // // float w = _vw._model[ offsetW ]; // // if( w > wMax ) wMax = w; // max weight inbound to w2 - is this normalized? // } // // float reciprocalWMax = 1.0f / wMax; for( int w1 = 0; w1 < sizeW; ++w1 ) { int offsetW = w1 * sizeW + w2; // P( transition i=w1,j=w2 | i=w1 ) float a = activation._model[ w1 ]; // before // ?? not sure about this feature below: a *= reciprocalMax; // so if A is max, no inhibition float w = _vw._model[ offsetW ]; //?scale by highest weight inbound? // w *= reciprocalWMax; // ?? the line below - is it better? Maybe... w = (float)Maths.logSigmoid1( w ); float hw = (1.0f-a) * w; hw = 1.0f - hw; sum += hw; // assoc w1->w2 // sumWeights += w; // w1 inhibited @ t+1 when there IS a relationship w2->w1, and // w2 is NOT active. In all other cases, w1 is NOT inhibited. // The intent is to force other models to form other relationships. // w1 = 1 1-w1 = 0 w=0.8 0.8*(1.0-1.0)=0.0 Strong rel, expected // w1 = 0 1-w1 = 1 w=0.8 0.8*(1.0-0.0)=0.8 String rel, NOT expected // w1 = 1 1-w1 = 0 w=0.0 0.0*(1.0-1.0)=0.0 No rel, expected // w1 = 0 1-w1 = 1 w=0.0 0.0*(1.0-1.0)=0.0 No rel, NOT expected // Then invert, cos we want to mul * 1 to keep it, * 0 to filter it. } inhibition._model[ w2 ] = sum;// + 0.02f; // not entirely removed } inhibition.scaleRange( 0.0f, 1.0f ); } }
Ocular Cyclorotation and Corneal Axial Misalignment in Femtosecond Laser-Assisted Cataract Surgery ABSTRACT Purpose: To explore ocular cyclorotation and the source of corneal axial misalignment during femtosecond laser-assisted cataract surgery (FLACS). Methods: Forty-five sequential patients (50 eyes) who had undergone FLACS (LenSx Laser System, Alcon Inc) were recruited. We took screenshots from videos of FLACS to analyze ocular cyclorotation and the real angle between primary incision and secondary incision (RAPS). In addition, crystalline lens tilt and theoretic angle between the primary and secondary incisions (TAPS) was also calculated. Results: The mean absolute value of ocular cyclorotation was 8.03 ± 4.48 degrees (0–19.1 degrees). The crystalline lens tilt was 3.30 ± 1.44 degrees (0.93–6.44 degrees). And the mean preoperative uncorrected visual acuity was 0.89 ± 0.50 LogMAR units. Pearson bivariate correlation analysis showed significant positive correlation between ocular cyclorotation with crystalline lens tilt (r = 0.37, p = .008), and ocular cyclorotation negatively correlated with axial length (r = −0.29, p = .038). In addition, the TAPS was 89.78 ± 1.45 degrees, and the RAPS was 85.68 ± 2.04 degrees. The angle error was 4.11 ± 1.28 degrees (p<0.001). Conclusions: Ocular cyclorotation commonly occurred during FLACS. In addition, increased axial length was associated with less ocular cyclorotation and increased crystalline lens tilt was related to more ocular cyclorotation. Importantly, machinery systemic errors during corneal astigmatism correction by arcuate incision in FLACS should be taken into consideration.
<filename>frontend/src/chains/eth/helper.ts // this function assumes privateKey is validated before call import {Wallet} from 'ethers'; export const HELPER_MSG_ETH_PUBLIC_KEY = "An Eth Public Key must begin with 0x04 followed by 128 hex characters"; export const signTokenForEthChain = async (privateKey: string, token: string) => { try { const wallet = new Wallet(privateKey); return await wallet.signMessage(token); } catch (e) { console.log(e); throw e; } } export const isEthPublicKeyFormatValid = (publicKey: string): { isValid: boolean, error: string } => { let error = HELPER_MSG_ETH_PUBLIC_KEY; const isValid = /^0x04([A-Fa-f0-9]{128})$/.test(publicKey); if (isValid) { error = ""; } return {isValid, error}; }
/** * Kinesis Video Producer Callbacks Provider */ #define LOG_CLASS "CallbacksProvider" #include "Include_i.h" ////////////////////////////////////////////////////////// // Public functions ////////////////////////////////////////////////////////// /** * Create the default callbacks provider */ STATUS createDefaultCallbacksProvider(UINT32 callbackChainCount, PCHAR accessKeyId, PCHAR secretKey, PCHAR sessionToken, UINT64 expiration, PCHAR region, PCHAR controlPlaneUrl, PCHAR certPath, PCHAR userAgentPostfix, PCHAR customUserAgent, BOOL cachingEndpoint, UINT64 endpointCachingPeriod, BOOL continuousRetry, PClientCallbacks* ppClientCallbacks) { ENTERS(); STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbacksProvider = NULL; PAuthCallbacks pAuthCallbacks = NULL; PStreamCallbacks pStreamCallbacks = NULL; CHK_STATUS(createAbstractDefaultCallbacksProvider(callbackChainCount, cachingEndpoint, endpointCachingPeriod, region, controlPlaneUrl, certPath, userAgentPostfix, customUserAgent, ppClientCallbacks)); pCallbacksProvider = (PCallbacksProvider) *ppClientCallbacks; CHK_STATUS(createStaticAuthCallbacks((PClientCallbacks) pCallbacksProvider, accessKeyId, secretKey, sessionToken, expiration, &pAuthCallbacks)); if (continuousRetry) { CHK_STATUS(createContinuousRetryStreamCallbacks((PClientCallbacks) pCallbacksProvider, &pStreamCallbacks)); } CleanUp: if (STATUS_FAILED(retStatus)) { if (pCallbacksProvider != NULL) { freeCallbacksProvider((PClientCallbacks *) &pCallbacksProvider); } if (pAuthCallbacks != NULL) { freeStaticAuthCallbacks(&pAuthCallbacks); } if (pStreamCallbacks != NULL) { freeContinuousRetryStreamCallbacks(&pStreamCallbacks); } pCallbacksProvider = NULL; } // Set the return value if it's not NULL if (ppClientCallbacks != NULL) { *ppClientCallbacks = (PClientCallbacks) pCallbacksProvider; } LEAVES(); return retStatus; } /** * Create the default callbacks provider with AWS credentials and defaults */ STATUS createDefaultCallbacksProviderWithAwsCredentials(PCHAR accessKeyId, PCHAR secretKey, PCHAR sessionToken, UINT64 expiration, PCHAR region, PCHAR caCertPath, PCHAR userAgentPostfix, PCHAR customUserAgent, BOOL cachingEndpoint, PClientCallbacks* ppClientCallbacks) { ENTERS(); STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbacksProvider = NULL; PAuthCallbacks pAuthCallbacks = NULL; PStreamCallbacks pStreamCallbacks = NULL; CHK_STATUS(createAbstractDefaultCallbacksProvider(DEFAULT_CALLBACK_CHAIN_COUNT, cachingEndpoint, 0, region, EMPTY_STRING, caCertPath, userAgentPostfix, customUserAgent, ppClientCallbacks)); pCallbacksProvider = (PCallbacksProvider) *ppClientCallbacks; CHK_STATUS(createStaticAuthCallbacks((PClientCallbacks) pCallbacksProvider, accessKeyId, secretKey, sessionToken, expiration, &pAuthCallbacks)); CHK_STATUS(createContinuousRetryStreamCallbacks((PClientCallbacks) pCallbacksProvider, &pStreamCallbacks)); CleanUp: if (STATUS_FAILED(retStatus)) { if (pCallbacksProvider != NULL) { freeCallbacksProvider((PClientCallbacks *) &pCallbacksProvider); } if (pAuthCallbacks != NULL) { freeStaticAuthCallbacks(&pAuthCallbacks); } if (pStreamCallbacks != NULL) { freeContinuousRetryStreamCallbacks(&pStreamCallbacks); } pCallbacksProvider = NULL; } // Set the return value if it's not NULL if (ppClientCallbacks != NULL) { *ppClientCallbacks = (PClientCallbacks) pCallbacksProvider; } LEAVES(); return retStatus; } STATUS createDefaultCallbacksProviderWithIotCertificate(PCHAR endpoint, PCHAR iotCertPath, PCHAR privateKeyPath, PCHAR caCertPath, PCHAR roleAlias, PCHAR streamName, PCHAR region, PCHAR userAgentPostfix, PCHAR customUserAgent, BOOL cachingEndpoint, PClientCallbacks* ppClientCallbacks) { ENTERS(); STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbacksProvider = NULL; PAuthCallbacks pAuthCallbacks = NULL; PStreamCallbacks pStreamCallbacks = NULL; CHK_STATUS(createAbstractDefaultCallbacksProvider(DEFAULT_CALLBACK_CHAIN_COUNT, cachingEndpoint, 0, region, EMPTY_STRING, caCertPath, userAgentPostfix, customUserAgent, ppClientCallbacks)); pCallbacksProvider = (PCallbacksProvider) *ppClientCallbacks; CHK_STATUS(createIotAuthCallbacks((PClientCallbacks) pCallbacksProvider, endpoint, iotCertPath, privateKeyPath, caCertPath, roleAlias, streamName, &pAuthCallbacks)); CHK_STATUS(createContinuousRetryStreamCallbacks((PClientCallbacks) pCallbacksProvider, &pStreamCallbacks)); CleanUp: if (STATUS_FAILED(retStatus)) { if (pCallbacksProvider != NULL) { freeCallbacksProvider((PClientCallbacks *) &pCallbacksProvider); } if (pAuthCallbacks != NULL) { freeIotAuthCallbacks(&pAuthCallbacks); } if (pStreamCallbacks != NULL) { freeContinuousRetryStreamCallbacks(&pStreamCallbacks); } pCallbacksProvider = NULL; } // Set the return value if it's not NULL if (ppClientCallbacks != NULL) { *ppClientCallbacks = (PClientCallbacks) pCallbacksProvider; } LEAVES(); return retStatus; } STATUS createDefaultCallbacksProviderWithFileAuth(PCHAR credentialsFilePath, PCHAR region, PCHAR caCertPath, PCHAR userAgentPostfix, PCHAR customUserAgent, BOOL cachingEndpoint, PClientCallbacks* ppClientCallbacks) { ENTERS(); STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbacksProvider = NULL; PAuthCallbacks pAuthCallbacks = NULL; PStreamCallbacks pStreamCallbacks = NULL; CHK_STATUS(createAbstractDefaultCallbacksProvider(DEFAULT_CALLBACK_CHAIN_COUNT, cachingEndpoint, 0, region, EMPTY_STRING, caCertPath, userAgentPostfix, customUserAgent, ppClientCallbacks)); pCallbacksProvider = (PCallbacksProvider) *ppClientCallbacks; CHK_STATUS(createFileAuthCallbacks((PClientCallbacks) pCallbacksProvider, credentialsFilePath, &pAuthCallbacks)); CHK_STATUS(createContinuousRetryStreamCallbacks((PClientCallbacks) pCallbacksProvider, &pStreamCallbacks)); CleanUp: if (STATUS_FAILED(retStatus)) { if (pCallbacksProvider != NULL) { freeCallbacksProvider((PClientCallbacks *) &pCallbacksProvider); } if (pAuthCallbacks != NULL) { freeIotAuthCallbacks(&pAuthCallbacks); } if (pStreamCallbacks != NULL) { freeContinuousRetryStreamCallbacks(&pStreamCallbacks); } pCallbacksProvider = NULL; } // Set the return value if it's not NULL if (ppClientCallbacks != NULL) { *ppClientCallbacks = (PClientCallbacks) pCallbacksProvider; } LEAVES(); return retStatus; } STATUS createDefaultCallbacksProviderWithAuthCallbacks(PAuthCallbacks pAuthCallbacks, PCHAR region, PCHAR caCertPath, PCHAR userAgentPostfix, PCHAR customUserAgent, BOOL cachingEndpoint, BOOL continuousRetry, UINT64 endpointCachingPeriod, PClientCallbacks* ppClientCallbacks) { ENTERS(); STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbacksProvider = NULL; PStreamCallbacks pStreamCallbacks = NULL; CHK_STATUS(createAbstractDefaultCallbacksProvider(DEFAULT_CALLBACK_CHAIN_COUNT, cachingEndpoint, endpointCachingPeriod, region, EMPTY_STRING, caCertPath, userAgentPostfix, customUserAgent, ppClientCallbacks)); pCallbacksProvider = (PCallbacksProvider) *ppClientCallbacks; CHK_STATUS(addAuthCallbacks(*ppClientCallbacks, pAuthCallbacks)); if (continuousRetry) { CHK_STATUS(createContinuousRetryStreamCallbacks((PClientCallbacks) pCallbacksProvider, &pStreamCallbacks)); } CleanUp: if (STATUS_FAILED(retStatus)) { if (pCallbacksProvider != NULL) { freeCallbacksProvider((PClientCallbacks *) &pCallbacksProvider); } if (pStreamCallbacks != NULL) { freeContinuousRetryStreamCallbacks(&pStreamCallbacks); } pCallbacksProvider = NULL; } // Set the return value if it's not NULL if (ppClientCallbacks != NULL) { *ppClientCallbacks = (PClientCallbacks) pCallbacksProvider; } LEAVES(); return retStatus; } STATUS createAbstractDefaultCallbacksProvider(UINT32 callbackChainCount, BOOL cachingEndpoint, UINT64 endpointCachingPeriod, PCHAR region, PCHAR controlPlaneUrl, PCHAR certPath, PCHAR userAgentName, PCHAR customUserAgent, PClientCallbacks* ppClientCallbacks) { ENTERS(); STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbacksProvider = NULL; PCurlApiCallbacks pCurlApiCallbacks = NULL; PStreamCallbacks pStreamCallbacks = NULL; UINT32 size; CHK(ppClientCallbacks != NULL, STATUS_NULL_ARG); CHK(callbackChainCount < MAX_CALLBACK_CHAIN_COUNT, STATUS_INVALID_ARG); // Calculate the size size = SIZEOF(CallbacksProvider) + callbackChainCount * (SIZEOF(ProducerCallbacks) + SIZEOF(StreamCallbacks) + SIZEOF(AuthCallbacks) + SIZEOF(ApiCallbacks)); // Allocate the entire structure pCallbacksProvider = (PCallbacksProvider) MEMCALLOC(1, size); CHK(pCallbacksProvider != NULL, STATUS_NOT_ENOUGH_MEMORY); // Set the correct values. // NOTE: Fields are zero-ed by MEMCALLOC pCallbacksProvider->clientCallbacks.version = CALLBACKS_CURRENT_VERSION; pCallbacksProvider->callbackChainCount = callbackChainCount; // Set the custom data field to self pCallbacksProvider->clientCallbacks.customData = (UINT64) pCallbacksProvider; // Set callback chain pointers pCallbacksProvider->pProducerCallbacks = (PProducerCallbacks) (pCallbacksProvider + 1); pCallbacksProvider->pStreamCallbacks = (PStreamCallbacks)(pCallbacksProvider->pProducerCallbacks + callbackChainCount); pCallbacksProvider->pAuthCallbacks = (PAuthCallbacks)(pCallbacksProvider->pStreamCallbacks + callbackChainCount); pCallbacksProvider->pApiCallbacks = (PApiCallbacks)(pCallbacksProvider->pAuthCallbacks + callbackChainCount); // Set the default Platform callbacks CHK_STATUS(setDefaultPlatformCallbacks(pCallbacksProvider)); // Create the default Curl API callbacks CHK_STATUS(createCurlApiCallbacks(pCallbacksProvider, region, cachingEndpoint, endpointCachingPeriod, controlPlaneUrl, certPath, userAgentName, customUserAgent, &pCurlApiCallbacks)); CHK_STATUS(createStreamCallbacks(&pStreamCallbacks)); CHK_STATUS(addStreamCallbacks((PClientCallbacks) pCallbacksProvider, pStreamCallbacks)); CleanUp: if (STATUS_FAILED(retStatus)) { freeCurlApiCallbacks(&pCurlApiCallbacks); freeStreamCallbacks(&pStreamCallbacks); freeCallbacksProvider((PClientCallbacks*) &pCallbacksProvider); pCallbacksProvider = NULL; } // Set the return value if it's not NULL if (ppClientCallbacks != NULL) { *ppClientCallbacks = (PClientCallbacks) pCallbacksProvider; } LEAVES(); return retStatus; } /** * Frees the callbacks provider * * NOTE: The caller should have passed a pointer which was previously created by the corresponding function * NOTE: The call is idempotent */ STATUS freeCallbacksProvider(PClientCallbacks* ppClientCallbacks) { ENTERS(); STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbackProvider = NULL; UINT32 i; CHK(ppClientCallbacks != NULL, STATUS_NULL_ARG); pCallbackProvider = (PCallbacksProvider) *ppClientCallbacks; // Call is idempotent CHK(pCallbackProvider != NULL, retStatus); // Iterate and free any callbacks if (pCallbackProvider->platformCallbacks.freePlatformCallbacksFn != NULL) { pCallbackProvider->platformCallbacks.freePlatformCallbacksFn(&pCallbackProvider->platformCallbacks.customData); } for (i = 0; i < pCallbackProvider->producerCallbacksCount; i++) { if (pCallbackProvider->pProducerCallbacks[i].freeProducerCallbacksFn != NULL) { pCallbackProvider->pProducerCallbacks[i].freeProducerCallbacksFn(&pCallbackProvider->pProducerCallbacks[i].customData); } } for (i = 0; i < pCallbackProvider->streamCallbacksCount; i++) { if (pCallbackProvider->pStreamCallbacks[i].freeStreamCallbacksFn != NULL) { pCallbackProvider->pStreamCallbacks[i].freeStreamCallbacksFn(&pCallbackProvider->pStreamCallbacks[i].customData); } } for (i = 0; i < pCallbackProvider->authCallbacksCount; i++) { if (pCallbackProvider->pAuthCallbacks[i].freeAuthCallbacksFn != NULL) { pCallbackProvider->pAuthCallbacks[i].freeAuthCallbacksFn(&pCallbackProvider->pAuthCallbacks[i].customData); } } for (i = 0; i < pCallbackProvider->apiCallbacksCount; i++) { if (pCallbackProvider->pApiCallbacks[i].freeApiCallbacksFn != NULL) { pCallbackProvider->pApiCallbacks[i].freeApiCallbacksFn(&pCallbackProvider->pApiCallbacks[i].customData); } } // Release the object MEMFREE(pCallbackProvider); // Set the pointer to NULL *ppClientCallbacks = NULL; CleanUp: LEAVES(); return retStatus; } STATUS setDefaultPlatformCallbacks(PCallbacksProvider pClientCallbacks) { ENTERS(); STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbackProvider = (PCallbacksProvider) pClientCallbacks; CHK(pCallbackProvider != NULL, STATUS_NULL_ARG); // Set the default implementations pCallbackProvider->clientCallbacks.getCurrentTimeFn = kinesisVideoStreamDefaultGetCurrentTime; pCallbackProvider->clientCallbacks.getRandomNumberFn = kinesisVideoStreamDefaultGetRandomNumber; pCallbackProvider->clientCallbacks.createMutexFn = kinesisVideoStreamDefaultCreateMutex; pCallbackProvider->clientCallbacks.lockMutexFn = kinesisVideoStreamDefaultLockMutex; pCallbackProvider->clientCallbacks.unlockMutexFn = kinesisVideoStreamDefaultUnlockMutex; pCallbackProvider->clientCallbacks.tryLockMutexFn = kinesisVideoStreamDefaultTryLockMutex; pCallbackProvider->clientCallbacks.freeMutexFn = kinesisVideoStreamDefaultFreeMutex; pCallbackProvider->clientCallbacks.createConditionVariableFn = kinesisVideoStreamDefaultCreateConditionVariable; pCallbackProvider->clientCallbacks.signalConditionVariableFn = kinesisVideoStreamDefaultSignalConditionVariable; pCallbackProvider->clientCallbacks.broadcastConditionVariableFn = kinesisVideoStreamDefaultBroadcastConditionVariable; pCallbackProvider->clientCallbacks.waitConditionVariableFn = kinesisVideoStreamDefaultWaitConditionVariable; pCallbackProvider->clientCallbacks.freeConditionVariableFn = kinesisVideoStreamDefaultFreeConditionVariable; pCallbackProvider->clientCallbacks.logPrintFn = defaultLogPrint; CleanUp: LEAVES(); return retStatus; } STATUS setPlatformCallbacks(PClientCallbacks pClientCallbacks, PPlatformCallbacks pPlatformCallbacks) { ENTERS(); STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbackProvider = (PCallbacksProvider) pClientCallbacks; CHK(pCallbackProvider != NULL && pPlatformCallbacks != NULL, STATUS_NULL_ARG); // Validate the version first CHK(pPlatformCallbacks->version <= PLATFORM_CALLBACKS_CURRENT_VERSION, STATUS_INVALID_PLATFORM_CALLBACKS_VERSION); // Struct-copy the values pCallbackProvider->platformCallbacks = *pPlatformCallbacks; // Set the aggregates if (pCallbackProvider->platformCallbacks.getCurrentTimeFn != NULL) { pCallbackProvider->clientCallbacks.getCurrentTimeFn = getCurrentTimeAggregate; } if (pCallbackProvider->platformCallbacks.getRandomNumberFn != NULL) { pCallbackProvider->clientCallbacks.getRandomNumberFn = getRandomNumberAggregate; } if (pCallbackProvider->platformCallbacks.createMutexFn != NULL) { pCallbackProvider->clientCallbacks.createMutexFn = createMutexAggregate; } if (pCallbackProvider->platformCallbacks.lockMutexFn != NULL) { pCallbackProvider->clientCallbacks.lockMutexFn = lockMutexAggregate; } if (pCallbackProvider->platformCallbacks.unlockMutexFn != NULL) { pCallbackProvider->clientCallbacks.unlockMutexFn = unlockMutexAggregate; } if (pCallbackProvider->platformCallbacks.tryLockMutexFn != NULL) { pCallbackProvider->clientCallbacks.tryLockMutexFn = tryLockMutexAggregate; } if (pCallbackProvider->platformCallbacks.freeMutexFn != NULL) { pCallbackProvider->clientCallbacks.freeMutexFn = freeMutexAggregate; } if (pCallbackProvider->platformCallbacks.createConditionVariableFn != NULL) { pCallbackProvider->clientCallbacks.createConditionVariableFn = createConditionVariableAggregate; } if (pCallbackProvider->platformCallbacks.signalConditionVariableFn != NULL) { pCallbackProvider->clientCallbacks.signalConditionVariableFn = signalConditionVariableAggregate; } if (pCallbackProvider->platformCallbacks.broadcastConditionVariableFn != NULL) { pCallbackProvider->clientCallbacks.broadcastConditionVariableFn = broadcastConditionVariableAggregate; } if (pCallbackProvider->platformCallbacks.waitConditionVariableFn != NULL) { pCallbackProvider->clientCallbacks.waitConditionVariableFn = waitConditionVariableAggregate; } if (pCallbackProvider->platformCallbacks.freeConditionVariableFn != NULL) { pCallbackProvider->clientCallbacks.freeConditionVariableFn = freeConditionVariableAggregate; } // Special handling for the logging function if (pCallbackProvider->platformCallbacks.logPrintFn != NULL) { pCallbackProvider->clientCallbacks.logPrintFn = pCallbackProvider->platformCallbacks.logPrintFn; } CleanUp: LEAVES(); return retStatus; } STATUS addProducerCallbacks(PClientCallbacks pClientCallbacks, PProducerCallbacks pProducerCallbacks) { ENTERS(); STATUS retStatus = STATUS_SUCCESS; UINT32 i; PCallbacksProvider pCallbackProvider = (PCallbacksProvider) pClientCallbacks; CHK(pCallbackProvider != NULL && pProducerCallbacks != NULL, STATUS_NULL_ARG); // Validate the version first CHK(pProducerCallbacks->version <= PRODUCER_CALLBACKS_CURRENT_VERSION, STATUS_INVALID_PRODUCER_CALLBACKS_VERSION); // Check if we have place to put it CHK(pCallbackProvider->producerCallbacksCount < pCallbackProvider->callbackChainCount, STATUS_MAX_CALLBACK_CHAIN); // Guard against adding same callbacks multiple times (duplicate) - This prevents freeing memory twice for (i = 0; i < pCallbackProvider->producerCallbacksCount; i++) { CHK(pProducerCallbacks->freeProducerCallbacksFn == NULL || pCallbackProvider->pProducerCallbacks[i].customData != pProducerCallbacks->customData || pCallbackProvider->pProducerCallbacks[i].freeProducerCallbacksFn != pProducerCallbacks->freeProducerCallbacksFn, STATUS_DUPLICATE_PRODUCER_CALLBACK_FREE_FUNC); } // Struct-copy the values and increment the current counter pCallbackProvider->pProducerCallbacks[pCallbackProvider->producerCallbacksCount++] = *pProducerCallbacks; // Set the aggregates if (pProducerCallbacks->storageOverflowPressureFn != NULL) { pCallbackProvider->clientCallbacks.storageOverflowPressureFn = storageOverflowPressureAggregate; } if (pProducerCallbacks->clientReadyFn != NULL) { pCallbackProvider->clientCallbacks.clientReadyFn = clientReadyAggregate; } if (pProducerCallbacks->clientShutdownFn != NULL) { pCallbackProvider->clientCallbacks.clientShutdownFn = clientShutdownAggregate; } CleanUp: LEAVES(); return retStatus; } STATUS addStreamCallbacks(PClientCallbacks pClientCallbacks, PStreamCallbacks pStreamCallbacks) { ENTERS(); STATUS retStatus = STATUS_SUCCESS; UINT32 i; PCallbacksProvider pCallbackProvider = (PCallbacksProvider) pClientCallbacks; CHK(pCallbackProvider != NULL && pStreamCallbacks != NULL, STATUS_NULL_ARG); // Validate the version first CHK(pStreamCallbacks->version <= STREAM_CALLBACKS_CURRENT_VERSION, STATUS_INVALID_STREAM_CALLBACKS_VERSION); // Check if we have place to put it CHK(pCallbackProvider->streamCallbacksCount < pCallbackProvider->callbackChainCount, STATUS_MAX_CALLBACK_CHAIN); // Guard against adding same callbacks multiple times (duplicate) - This prevents freeing memory twice for (i = 0; i < pCallbackProvider->streamCallbacksCount; i++) { CHK(pStreamCallbacks->freeStreamCallbacksFn == NULL || pCallbackProvider->pStreamCallbacks[i].customData != pStreamCallbacks->customData || pCallbackProvider->pStreamCallbacks[i].freeStreamCallbacksFn != pStreamCallbacks->freeStreamCallbacksFn, STATUS_DUPLICATE_STREAM_CALLBACK_FREE_FUNC); } // Struct-copy the values and increment the current counter pCallbackProvider->pStreamCallbacks[pCallbackProvider->streamCallbacksCount++] = *pStreamCallbacks; // Set the aggregates if (pStreamCallbacks->streamUnderflowReportFn != NULL) { pCallbackProvider->clientCallbacks.streamUnderflowReportFn = streamUnderflowReportAggregate; } if (pStreamCallbacks->bufferDurationOverflowPressureFn != NULL) { pCallbackProvider->clientCallbacks.bufferDurationOverflowPressureFn = bufferDurationOverflowPressureAggregate; } if (pStreamCallbacks->streamLatencyPressureFn != NULL) { pCallbackProvider->clientCallbacks.streamLatencyPressureFn = streamLatencyPressureAggregate; } if (pStreamCallbacks->streamConnectionStaleFn != NULL) { pCallbackProvider->clientCallbacks.streamConnectionStaleFn = streamConnectionStaleAggregate; } if (pStreamCallbacks->droppedFrameReportFn != NULL) { pCallbackProvider->clientCallbacks.droppedFrameReportFn = droppedFrameReportAggregate; } if (pStreamCallbacks->droppedFragmentReportFn != NULL) { pCallbackProvider->clientCallbacks.droppedFragmentReportFn = droppedFragmentReportAggregate; } if (pStreamCallbacks->streamErrorReportFn != NULL) { pCallbackProvider->clientCallbacks.streamErrorReportFn = streamErrorReportAggregate; } if (pStreamCallbacks->fragmentAckReceivedFn != NULL) { pCallbackProvider->clientCallbacks.fragmentAckReceivedFn = fragmentAckReceivedAggregate; } if (pStreamCallbacks->streamDataAvailableFn != NULL) { pCallbackProvider->clientCallbacks.streamDataAvailableFn = streamDataAvailableAggregate; } if (pStreamCallbacks->streamReadyFn != NULL) { pCallbackProvider->clientCallbacks.streamReadyFn = streamReadyAggregate; } if (pStreamCallbacks->streamClosedFn != NULL) { pCallbackProvider->clientCallbacks.streamClosedFn = streamClosedAggregate; } if (pStreamCallbacks->streamShutdownFn != NULL) { pCallbackProvider->clientCallbacks.streamShutdownFn = streamShutdownAggregate; } CleanUp: LEAVES(); return retStatus; } STATUS addAuthCallbacks(PClientCallbacks pClientCallbacks, PAuthCallbacks pAuthCallbacks) { ENTERS(); STATUS retStatus = STATUS_SUCCESS; UINT32 i; PCallbacksProvider pCallbackProvider = (PCallbacksProvider) pClientCallbacks; CHK(pCallbackProvider != NULL && pAuthCallbacks != NULL, STATUS_NULL_ARG); // Validate the version first CHK(pAuthCallbacks->version <= AUTH_CALLBACKS_CURRENT_VERSION, STATUS_INVALID_AUTH_CALLBACKS_VERSION); // Check if we have place to put it CHK(pCallbackProvider->authCallbacksCount < pCallbackProvider->callbackChainCount, STATUS_MAX_CALLBACK_CHAIN); // Guard against adding same callbacks multiple times (duplicate) - This prevents freeing memory twice for (i = 0; i < pCallbackProvider->authCallbacksCount; i++) { CHK(pAuthCallbacks->freeAuthCallbacksFn == NULL || pCallbackProvider->pAuthCallbacks[i].customData != pAuthCallbacks->customData || pCallbackProvider->pAuthCallbacks[i].freeAuthCallbacksFn != pAuthCallbacks->freeAuthCallbacksFn, STATUS_DUPLICATE_AUTH_CALLBACK_FREE_FUNC); } // Struct-copy the values and increment the current counter pCallbackProvider->pAuthCallbacks[pCallbackProvider->authCallbacksCount++] = *pAuthCallbacks; // Set the aggregates if (pAuthCallbacks->getSecurityTokenFn != NULL) { pCallbackProvider->clientCallbacks.getSecurityTokenFn = getSecurityTokenAggregate; } if (pAuthCallbacks->getDeviceCertificateFn != NULL) { pCallbackProvider->clientCallbacks.getDeviceCertificateFn = getDeviceCertificateAggregate; } if (pAuthCallbacks->deviceCertToTokenFn != NULL) { pCallbackProvider->clientCallbacks.deviceCertToTokenFn = deviceCertToTokenAggregate; } if (pAuthCallbacks->getDeviceFingerprintFn != NULL) { pCallbackProvider->clientCallbacks.getDeviceFingerprintFn = getDeviceFingerprintAggregate; } if (pAuthCallbacks->getStreamingTokenFn != NULL) { pCallbackProvider->clientCallbacks.getStreamingTokenFn = getStreamingTokenAggregate; } CleanUp: LEAVES(); return retStatus; } STATUS addApiCallbacks(PClientCallbacks pClientCallbacks, PApiCallbacks pApiCallbacks) { ENTERS(); STATUS retStatus = STATUS_SUCCESS; UINT32 i; PCallbacksProvider pCallbackProvider = (PCallbacksProvider) pClientCallbacks; CHK(pCallbackProvider != NULL && pApiCallbacks != NULL, STATUS_NULL_ARG); // Validate the version first CHK(pApiCallbacks->version <= API_CALLBACKS_CURRENT_VERSION, STATUS_INVALID_API_CALLBACKS_VERSION); // Check if we have place to put it CHK(pCallbackProvider->apiCallbacksCount < pCallbackProvider->callbackChainCount, STATUS_MAX_CALLBACK_CHAIN); // Guard against adding same callbacks multiple times (duplicate) - This prevents freeing memory twice for (i = 0; i < pCallbackProvider->apiCallbacksCount; i++) { CHK(pApiCallbacks->freeApiCallbacksFn == NULL || pCallbackProvider->pApiCallbacks[i].customData != pApiCallbacks->customData || pCallbackProvider->pApiCallbacks[i].freeApiCallbacksFn != pApiCallbacks->freeApiCallbacksFn, STATUS_DUPLICATE_API_CALLBACK_FREE_FUNC); } // Struct-copy the values and increment the current counter pCallbackProvider->pApiCallbacks[pCallbackProvider->apiCallbacksCount++] = *pApiCallbacks; // Set the aggregates if (pApiCallbacks->createStreamFn != NULL) { pCallbackProvider->clientCallbacks.createStreamFn = createStreamAggregate; } if (pApiCallbacks->describeStreamFn != NULL) { pCallbackProvider->clientCallbacks.describeStreamFn = describeStreamAggregate; } if (pApiCallbacks->getStreamingEndpointFn != NULL) { pCallbackProvider->clientCallbacks.getStreamingEndpointFn = getStreamingEndpointAggregate; } if (pApiCallbacks->putStreamFn != NULL) { pCallbackProvider->clientCallbacks.putStreamFn = putStreamAggregate; } if (pApiCallbacks->tagResourceFn != NULL) { pCallbackProvider->clientCallbacks.tagResourceFn = tagResourceAggregate; } if (pApiCallbacks->createDeviceFn != NULL) { pCallbackProvider->clientCallbacks.createDeviceFn = createDeviceAggregate; } CleanUp: LEAVES(); return retStatus; } /////////////////////////////////////////////////////////////////////////////////////////////////////////// // Auth callback aggregates /////////////////////////////////////////////////////////////////////////////////////////////////////////// STATUS getDeviceCertificateAggregate(UINT64 customData, PBYTE* buffer, PUINT32 size, PUINT64 expiration) { STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; UINT32 i; CHK(pCallbacksProvider != NULL, STATUS_INVALID_ARG); for (i = 0; i < pCallbacksProvider->authCallbacksCount; i++) { if (pCallbacksProvider->pAuthCallbacks[i].getDeviceCertificateFn != NULL) { retStatus = pCallbacksProvider->pAuthCallbacks[i].getDeviceCertificateFn(pCallbacksProvider->pAuthCallbacks[i].customData, buffer, size, expiration); // Break on stop processing CHK(retStatus != STATUS_STOP_CALLBACK_CHAIN, STATUS_SUCCESS); CHK_STATUS(retStatus); } } CleanUp: return retStatus; } STATUS getSecurityTokenAggregate(UINT64 customData, PBYTE* buffer, PUINT32 size, PUINT64 expiration) { STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; UINT32 i; CHK(pCallbacksProvider != NULL, STATUS_INVALID_ARG); for (i = 0; i < pCallbacksProvider->authCallbacksCount; i++) { if (pCallbacksProvider->pAuthCallbacks[i].getSecurityTokenFn != NULL) { retStatus = pCallbacksProvider->pAuthCallbacks[i].getSecurityTokenFn(pCallbacksProvider->pAuthCallbacks[i].customData, buffer, size, expiration); // Break on stop processing CHK(retStatus != STATUS_STOP_CALLBACK_CHAIN, STATUS_SUCCESS); CHK_STATUS(retStatus); } } CleanUp: return retStatus; } STATUS getDeviceFingerprintAggregate(UINT64 customData, PCHAR* fingerprint) { STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; UINT32 i; CHK(pCallbacksProvider != NULL, STATUS_INVALID_ARG); for (i = 0; i < pCallbacksProvider->authCallbacksCount; i++) { if (pCallbacksProvider->pAuthCallbacks[i].getDeviceFingerprintFn != NULL) { retStatus = pCallbacksProvider->pAuthCallbacks[i].getDeviceFingerprintFn(pCallbacksProvider->pAuthCallbacks[i].customData, fingerprint); // Break on stop processing CHK(retStatus != STATUS_STOP_CALLBACK_CHAIN, STATUS_SUCCESS); CHK_STATUS(retStatus); } } CleanUp: return retStatus; } STATUS deviceCertToTokenAggregate(UINT64 customData, PCHAR deviceName, PServiceCallContext pServiceCallContext) { STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; UINT32 i; CHK(pCallbacksProvider != NULL, STATUS_INVALID_ARG); for (i = 0; i < pCallbacksProvider->authCallbacksCount; i++) { if (pCallbacksProvider->pAuthCallbacks[i].deviceCertToTokenFn != NULL) { retStatus = pCallbacksProvider->pAuthCallbacks[i].deviceCertToTokenFn(pCallbacksProvider->pAuthCallbacks[i].customData, deviceName, pServiceCallContext); // Break on stop processing CHK(retStatus != STATUS_STOP_CALLBACK_CHAIN, STATUS_SUCCESS); CHK_STATUS(retStatus); } } CleanUp: return retStatus; } STATUS getStreamingTokenAggregate(UINT64 customData, PCHAR streamName, STREAM_ACCESS_MODE accessMode, PServiceCallContext pServiceCallContext) { STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; UINT32 i; CHK(pCallbacksProvider != NULL, STATUS_INVALID_ARG); for (i = 0; i < pCallbacksProvider->authCallbacksCount; i++) { if (pCallbacksProvider->pAuthCallbacks[i].getStreamingTokenFn != NULL) { retStatus = pCallbacksProvider->pAuthCallbacks[i].getStreamingTokenFn(pCallbacksProvider->pAuthCallbacks[i].customData, streamName, accessMode, pServiceCallContext); // Break on stop processing CHK(retStatus != STATUS_STOP_CALLBACK_CHAIN, STATUS_SUCCESS); CHK_STATUS(retStatus); } } CleanUp: return retStatus; } /////////////////////////////////////////////////////////////////////////////////////////////////////////// // Producer callback aggregates /////////////////////////////////////////////////////////////////////////////////////////////////////////// STATUS storageOverflowPressureAggregate(UINT64 customData, UINT64 remainingBytes) { STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; UINT32 i; CHK(pCallbacksProvider != NULL, STATUS_INVALID_ARG); for (i = 0; i < pCallbacksProvider->producerCallbacksCount; i++) { if (pCallbacksProvider->pProducerCallbacks[i].storageOverflowPressureFn != NULL) { retStatus = pCallbacksProvider->pProducerCallbacks[i].storageOverflowPressureFn(pCallbacksProvider->pProducerCallbacks[i].customData, remainingBytes); // Break on stop processing CHK(retStatus != STATUS_STOP_CALLBACK_CHAIN, STATUS_SUCCESS); CHK_STATUS(retStatus); } } CleanUp: return retStatus; } STATUS clientReadyAggregate(UINT64 customData, CLIENT_HANDLE clientHandle) { STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; UINT32 i; CHK(pCallbacksProvider != NULL, STATUS_INVALID_ARG); for (i = 0; i < pCallbacksProvider->producerCallbacksCount; i++) { if (pCallbacksProvider->pProducerCallbacks[i].clientReadyFn != NULL) { retStatus = pCallbacksProvider->pProducerCallbacks[i].clientReadyFn(pCallbacksProvider->pProducerCallbacks[i].customData, clientHandle); // Break on stop processing CHK(retStatus != STATUS_STOP_CALLBACK_CHAIN, STATUS_SUCCESS); CHK_STATUS(retStatus); } } CleanUp: return retStatus; } STATUS clientShutdownAggregate(UINT64 customData, CLIENT_HANDLE clientHandle) { STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; UINT32 i; CHK(pCallbacksProvider != NULL, STATUS_INVALID_ARG); for (i = 0; i < pCallbacksProvider->producerCallbacksCount; i++) { if (pCallbacksProvider->pProducerCallbacks[i].clientShutdownFn != NULL) { retStatus = pCallbacksProvider->pProducerCallbacks[i].clientShutdownFn(pCallbacksProvider->pProducerCallbacks[i].customData, clientHandle); // Break on stop processing CHK(retStatus != STATUS_STOP_CALLBACK_CHAIN, STATUS_SUCCESS); CHK_STATUS(retStatus); } } CleanUp: return retStatus; } /////////////////////////////////////////////////////////////////////////////////////////////////////////// // API callback aggregates /////////////////////////////////////////////////////////////////////////////////////////////////////////// STATUS createStreamAggregate(UINT64 customData, PCHAR deviceName, PCHAR streamName, PCHAR contentType, PCHAR kmsKeyId, UINT64 retentionPeriod, PServiceCallContext pServiceCallContext) { STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; UINT32 i; CHK(pCallbacksProvider != NULL, STATUS_INVALID_ARG); for (i = 0; i < pCallbacksProvider->apiCallbacksCount; i++) { if (pCallbacksProvider->pApiCallbacks[i].createStreamFn != NULL) { retStatus = pCallbacksProvider->pApiCallbacks[i].createStreamFn( pCallbacksProvider->pApiCallbacks[i].customData, deviceName, streamName, contentType, kmsKeyId, retentionPeriod, pServiceCallContext); // Break on stop processing CHK(retStatus != STATUS_STOP_CALLBACK_CHAIN, STATUS_SUCCESS); CHK_STATUS(retStatus); } } CleanUp: return retStatus; } STATUS describeStreamAggregate(UINT64 customData, PCHAR streamName, PServiceCallContext pServiceCallContext) { STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; UINT32 i; CHK(pCallbacksProvider != NULL, STATUS_INVALID_ARG); for (i = 0; i < pCallbacksProvider->apiCallbacksCount; i++) { if (pCallbacksProvider->pApiCallbacks[i].describeStreamFn!= NULL) { retStatus = pCallbacksProvider->pApiCallbacks[i].describeStreamFn( pCallbacksProvider->pApiCallbacks[i].customData, streamName, pServiceCallContext); // Break on stop processing CHK(retStatus != STATUS_STOP_CALLBACK_CHAIN, STATUS_SUCCESS); CHK_STATUS(retStatus); } } CleanUp: return retStatus; } STATUS getStreamingEndpointAggregate(UINT64 customData, PCHAR streamName, PCHAR apiName, PServiceCallContext pServiceCallContext) { STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; UINT32 i; CHK(pCallbacksProvider != NULL, STATUS_INVALID_ARG); for (i = 0; i < pCallbacksProvider->apiCallbacksCount; i++) { if (pCallbacksProvider->pApiCallbacks[i].getStreamingEndpointFn != NULL) { retStatus = pCallbacksProvider->pApiCallbacks[i].getStreamingEndpointFn( pCallbacksProvider->pApiCallbacks[i].customData, streamName, apiName, pServiceCallContext); // Break on stop processing CHK(retStatus != STATUS_STOP_CALLBACK_CHAIN, STATUS_SUCCESS); CHK_STATUS(retStatus); } } CleanUp: return retStatus; } STATUS putStreamAggregate(UINT64 customData, PCHAR streamName, PCHAR containerType, UINT64 streamStart, BOOL isAbsolute, BOOL fragmentAcks, PCHAR streamingEndpoint, PServiceCallContext pServiceCallContext) { STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; UINT32 i; CHK(pCallbacksProvider != NULL, STATUS_INVALID_ARG); for (i = 0; i < pCallbacksProvider->apiCallbacksCount; i++) { if (pCallbacksProvider->pApiCallbacks[i].putStreamFn != NULL) { retStatus = pCallbacksProvider->pApiCallbacks[i].putStreamFn( pCallbacksProvider->pApiCallbacks[i].customData, streamName, containerType, streamStart, isAbsolute, fragmentAcks, streamingEndpoint, pServiceCallContext); // Break on stop processing CHK(retStatus != STATUS_STOP_CALLBACK_CHAIN, STATUS_SUCCESS); CHK_STATUS(retStatus); } } CleanUp: return retStatus; } STATUS tagResourceAggregate(UINT64 customData, PCHAR resourceArn, UINT32 tagCount, PTag tags, PServiceCallContext pServiceCallContext) { STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; UINT32 i; CHK(pCallbacksProvider != NULL, STATUS_INVALID_ARG); for (i = 0; i < pCallbacksProvider->apiCallbacksCount; i++) { if (pCallbacksProvider->pApiCallbacks[i].tagResourceFn != NULL) { retStatus = pCallbacksProvider->pApiCallbacks[i].tagResourceFn( pCallbacksProvider->pApiCallbacks[i].customData, resourceArn, tagCount, tags, pServiceCallContext); // Break on stop processing CHK(retStatus != STATUS_STOP_CALLBACK_CHAIN, STATUS_SUCCESS); CHK_STATUS(retStatus); } } CleanUp: return retStatus; } STATUS createDeviceAggregate(UINT64 customData, PCHAR deviceName, PServiceCallContext pServiceCallContext) { STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; UINT32 i; CHK(pCallbacksProvider != NULL, STATUS_INVALID_ARG); for (i = 0; i < pCallbacksProvider->apiCallbacksCount; i++) { if (pCallbacksProvider->pApiCallbacks[i].createDeviceFn != NULL) { retStatus = pCallbacksProvider->pApiCallbacks[i].createDeviceFn( pCallbacksProvider->pApiCallbacks[i].customData, deviceName, pServiceCallContext); // Break on stop processing CHK(retStatus != STATUS_STOP_CALLBACK_CHAIN, STATUS_SUCCESS); CHK_STATUS(retStatus); } } CleanUp: return retStatus; } /////////////////////////////////////////////////////////////////////////////////////////////////////////// // Stream callback aggregates /////////////////////////////////////////////////////////////////////////////////////////////////////////// STATUS streamUnderflowReportAggregate(UINT64 customData, STREAM_HANDLE streamHandle) { STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; UINT32 i; CHK(pCallbacksProvider != NULL, STATUS_INVALID_ARG); for (i = 0; i < pCallbacksProvider->streamCallbacksCount; i++) { if (pCallbacksProvider->pStreamCallbacks[i].streamUnderflowReportFn != NULL) { retStatus = pCallbacksProvider->pStreamCallbacks[i].streamUnderflowReportFn( pCallbacksProvider->pStreamCallbacks[i].customData, streamHandle); // Break on stop processing CHK(retStatus != STATUS_STOP_CALLBACK_CHAIN, STATUS_SUCCESS); CHK_STATUS(retStatus); } } CleanUp: return retStatus; } STATUS bufferDurationOverflowPressureAggregate(UINT64 customData, STREAM_HANDLE streamHandle, UINT64 remainingDuration) { STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; UINT32 i; CHK(pCallbacksProvider != NULL, STATUS_INVALID_ARG); for (i = 0; i < pCallbacksProvider->streamCallbacksCount; i++) { if (pCallbacksProvider->pStreamCallbacks[i].bufferDurationOverflowPressureFn != NULL) { retStatus = pCallbacksProvider->pStreamCallbacks[i].bufferDurationOverflowPressureFn( pCallbacksProvider->pStreamCallbacks[i].customData, streamHandle, remainingDuration); // Break on stop processing CHK(retStatus != STATUS_STOP_CALLBACK_CHAIN, STATUS_SUCCESS); CHK_STATUS(retStatus); } } CleanUp: return retStatus; } STATUS streamLatencyPressureAggregate(UINT64 customData, STREAM_HANDLE streamHandle, UINT64 bufferDuration) { STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; UINT32 i; CHK(pCallbacksProvider != NULL, STATUS_INVALID_ARG); for (i = 0; i < pCallbacksProvider->streamCallbacksCount; i++) { if (pCallbacksProvider->pStreamCallbacks[i].streamLatencyPressureFn != NULL) { retStatus = pCallbacksProvider->pStreamCallbacks[i].streamLatencyPressureFn( pCallbacksProvider->pStreamCallbacks[i].customData, streamHandle, bufferDuration); // Break on stop processing CHK(retStatus != STATUS_STOP_CALLBACK_CHAIN, STATUS_SUCCESS); CHK_STATUS(retStatus); } } CleanUp: return retStatus; } STATUS streamConnectionStaleAggregate(UINT64 customData, STREAM_HANDLE streamHandle, UINT64 stalenessDuration) { STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; UINT32 i; CHK(pCallbacksProvider != NULL, STATUS_INVALID_ARG); for (i = 0; i < pCallbacksProvider->streamCallbacksCount; i++) { if (pCallbacksProvider->pStreamCallbacks[i].streamConnectionStaleFn != NULL) { retStatus = pCallbacksProvider->pStreamCallbacks[i].streamConnectionStaleFn( pCallbacksProvider->pStreamCallbacks[i].customData, streamHandle, stalenessDuration); // Break on stop processing CHK(retStatus != STATUS_STOP_CALLBACK_CHAIN, STATUS_SUCCESS); CHK_STATUS(retStatus); } } CleanUp: return retStatus; } STATUS droppedFrameReportAggregate(UINT64 customData, STREAM_HANDLE streamHandle, UINT64 frameTimestamp) { STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; UINT32 i; CHK(pCallbacksProvider != NULL, STATUS_INVALID_ARG); for (i = 0; i < pCallbacksProvider->streamCallbacksCount; i++) { if (pCallbacksProvider->pStreamCallbacks[i].droppedFrameReportFn != NULL) { retStatus = pCallbacksProvider->pStreamCallbacks[i].droppedFrameReportFn( pCallbacksProvider->pStreamCallbacks[i].customData, streamHandle, frameTimestamp); // Break on stop processing CHK(retStatus != STATUS_STOP_CALLBACK_CHAIN, STATUS_SUCCESS); CHK_STATUS(retStatus); } } CleanUp: return retStatus; } STATUS droppedFragmentReportAggregate(UINT64 customData, STREAM_HANDLE streamHandle, UINT64 fragmentTimestamp) { STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; UINT32 i; CHK(pCallbacksProvider != NULL, STATUS_INVALID_ARG); for (i = 0; i < pCallbacksProvider->streamCallbacksCount; i++) { if (pCallbacksProvider->pStreamCallbacks[i].droppedFragmentReportFn != NULL) { retStatus = pCallbacksProvider->pStreamCallbacks[i].droppedFragmentReportFn( pCallbacksProvider->pStreamCallbacks[i].customData, streamHandle, fragmentTimestamp); // Break on stop processing CHK(retStatus != STATUS_STOP_CALLBACK_CHAIN, STATUS_SUCCESS); CHK_STATUS(retStatus); } } CleanUp: return retStatus; } STATUS streamErrorReportAggregate(UINT64 customData, STREAM_HANDLE streamHandle, UPLOAD_HANDLE uploadHandle, UINT64 errorTimestamp, STATUS errorStatus) { STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; UINT32 i; CHK(pCallbacksProvider != NULL, STATUS_INVALID_ARG); for (i = 0; i < pCallbacksProvider->streamCallbacksCount; i++) { if (pCallbacksProvider->pStreamCallbacks[i].streamErrorReportFn != NULL) { retStatus = pCallbacksProvider->pStreamCallbacks[i].streamErrorReportFn( pCallbacksProvider->pStreamCallbacks[i].customData, streamHandle, uploadHandle, errorTimestamp, errorStatus); // Break on stop processing CHK(retStatus != STATUS_STOP_CALLBACK_CHAIN, STATUS_SUCCESS); CHK_STATUS(retStatus); } } CleanUp: return retStatus; } STATUS fragmentAckReceivedAggregate(UINT64 customData, STREAM_HANDLE streamHandle, UPLOAD_HANDLE uploadHandle, PFragmentAck pFragmentAck) { STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; UINT32 i; CHK(pCallbacksProvider != NULL, STATUS_INVALID_ARG); for (i = 0; i < pCallbacksProvider->streamCallbacksCount; i++) { if (pCallbacksProvider->pStreamCallbacks[i].fragmentAckReceivedFn != NULL) { retStatus = pCallbacksProvider->pStreamCallbacks[i].fragmentAckReceivedFn( pCallbacksProvider->pStreamCallbacks[i].customData, streamHandle, uploadHandle, pFragmentAck); // Break on stop processing CHK(retStatus != STATUS_STOP_CALLBACK_CHAIN, STATUS_SUCCESS); CHK_STATUS(retStatus); } } CleanUp: return retStatus; } STATUS streamDataAvailableAggregate(UINT64 customData, STREAM_HANDLE streamHandle, PCHAR streamName, UPLOAD_HANDLE uploadHandle, UINT64 availableDuration, UINT64 availableSize) { STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; UINT32 i; CHK(pCallbacksProvider != NULL, STATUS_INVALID_ARG); for (i = 0; i < pCallbacksProvider->streamCallbacksCount; i++) { if (pCallbacksProvider->pStreamCallbacks[i].streamDataAvailableFn != NULL) { retStatus = pCallbacksProvider->pStreamCallbacks[i].streamDataAvailableFn( pCallbacksProvider->pStreamCallbacks[i].customData, streamHandle, streamName, uploadHandle, availableDuration, availableSize); // Break on stop processing CHK(retStatus != STATUS_STOP_CALLBACK_CHAIN, STATUS_SUCCESS); CHK_STATUS(retStatus); } } CleanUp: return retStatus; } STATUS streamReadyAggregate(UINT64 customData, STREAM_HANDLE streamHandle) { STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; UINT32 i; CHK(pCallbacksProvider != NULL, STATUS_INVALID_ARG); for (i = 0; i < pCallbacksProvider->streamCallbacksCount; i++) { if (pCallbacksProvider->pStreamCallbacks[i].streamReadyFn != NULL) { retStatus = pCallbacksProvider->pStreamCallbacks[i].streamReadyFn( pCallbacksProvider->pStreamCallbacks[i].customData, streamHandle); // Break on stop processing CHK(retStatus != STATUS_STOP_CALLBACK_CHAIN, STATUS_SUCCESS); CHK_STATUS(retStatus); } } CleanUp: return retStatus; } STATUS streamShutdownAggregate(UINT64 customData, STREAM_HANDLE streamHandle, BOOL resetStream) { STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; UINT32 i; CHK(pCallbacksProvider != NULL, STATUS_INVALID_ARG); for (i = 0; i < pCallbacksProvider->streamCallbacksCount; i++) { if (pCallbacksProvider->pStreamCallbacks[i].streamShutdownFn != NULL) { retStatus = pCallbacksProvider->pStreamCallbacks[i].streamShutdownFn( pCallbacksProvider->pStreamCallbacks[i].customData, streamHandle, resetStream); // Break on stop processing CHK(retStatus != STATUS_STOP_CALLBACK_CHAIN, STATUS_SUCCESS); CHK_STATUS(retStatus); } } CleanUp: return retStatus; } STATUS streamClosedAggregate(UINT64 customData, STREAM_HANDLE streamHandle, UPLOAD_HANDLE uploadHandle) { STATUS retStatus = STATUS_SUCCESS; PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; UINT32 i; CHK(pCallbacksProvider != NULL, STATUS_INVALID_ARG); for (i = 0; i < pCallbacksProvider->streamCallbacksCount; i++) { if (pCallbacksProvider->pStreamCallbacks[i].streamClosedFn != NULL) { retStatus = pCallbacksProvider->pStreamCallbacks[i].streamClosedFn( pCallbacksProvider->pStreamCallbacks[i].customData, streamHandle, uploadHandle); // Break on stop processing CHK(retStatus != STATUS_STOP_CALLBACK_CHAIN, STATUS_SUCCESS); CHK_STATUS(retStatus); } } CleanUp: return retStatus; } /////////////////////////////////////////////////////////////////////////////////////////////////////////// // Platform callback aggregates /////////////////////////////////////////////////////////////////////////////////////////////////////////// UINT64 getCurrentTimeAggregate(UINT64 customData) { PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; CHECK_EXT(pCallbacksProvider != NULL, "NULL callback provider."); CHECK_EXT(pCallbacksProvider->platformCallbacks.getCurrentTimeFn != NULL, "Called an aggregate for a NULL function"); return pCallbacksProvider->platformCallbacks.getCurrentTimeFn(pCallbacksProvider->platformCallbacks.customData); } UINT32 getRandomNumberAggregate(UINT64 customData) { PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; CHECK_EXT(pCallbacksProvider != NULL, "NULL callback provider."); CHECK_EXT(pCallbacksProvider->platformCallbacks.getRandomNumberFn != NULL, "Called an aggregate for a NULL function"); return pCallbacksProvider->platformCallbacks.getRandomNumberFn(pCallbacksProvider->platformCallbacks.customData); } MUTEX createMutexAggregate(UINT64 customData, BOOL reentrant) { PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; CHECK_EXT(pCallbacksProvider != NULL, "NULL callback provider."); CHECK_EXT(pCallbacksProvider->platformCallbacks.createMutexFn != NULL, "Called an aggregate for a NULL function"); return pCallbacksProvider->platformCallbacks.createMutexFn(pCallbacksProvider->platformCallbacks.customData, reentrant); } VOID lockMutexAggregate(UINT64 customData, MUTEX mutex) { PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; CHECK_EXT(pCallbacksProvider != NULL, "NULL callback provider."); CHECK_EXT(pCallbacksProvider->platformCallbacks.lockMutexFn != NULL, "Called an aggregate for a NULL function"); pCallbacksProvider->platformCallbacks.lockMutexFn(pCallbacksProvider->platformCallbacks.customData, mutex); } VOID unlockMutexAggregate(UINT64 customData, MUTEX mutex) { PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; CHECK_EXT(pCallbacksProvider != NULL, "NULL callback provider."); CHECK_EXT(pCallbacksProvider->platformCallbacks.unlockMutexFn != NULL, "Called an aggregate for a NULL function"); pCallbacksProvider->platformCallbacks.unlockMutexFn(pCallbacksProvider->platformCallbacks.customData, mutex); } BOOL tryLockMutexAggregate(UINT64 customData, MUTEX mutex) { PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; CHECK_EXT(pCallbacksProvider != NULL, "NULL callback provider."); CHECK_EXT(pCallbacksProvider->platformCallbacks.tryLockMutexFn != NULL, "Called an aggregate for a NULL function"); return pCallbacksProvider->platformCallbacks.tryLockMutexFn(pCallbacksProvider->platformCallbacks.customData, mutex); } VOID freeMutexAggregate(UINT64 customData, MUTEX mutex) { PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; CHECK_EXT(pCallbacksProvider != NULL, "NULL callback provider."); CHECK_EXT(pCallbacksProvider->platformCallbacks.freeMutexFn != NULL, "Called an aggregate for a NULL function"); pCallbacksProvider->platformCallbacks.freeMutexFn(pCallbacksProvider->platformCallbacks.customData, mutex); } CVAR createConditionVariableAggregate(UINT64 customData) { PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; CHECK_EXT(pCallbacksProvider != NULL, "NULL callback provider."); CHECK_EXT(pCallbacksProvider->platformCallbacks.createConditionVariableFn != NULL, "Called an aggregate for a NULL function"); return pCallbacksProvider->platformCallbacks.createConditionVariableFn(pCallbacksProvider->platformCallbacks.customData); } STATUS signalConditionVariableAggregate(UINT64 customData, CVAR cvar) { PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; CHECK_EXT(pCallbacksProvider != NULL, "NULL callback provider."); CHECK_EXT(pCallbacksProvider->platformCallbacks.signalConditionVariableFn != NULL, "Called an aggregate for a NULL function"); return pCallbacksProvider->platformCallbacks.signalConditionVariableFn(pCallbacksProvider->platformCallbacks.customData, cvar); } STATUS broadcastConditionVariableAggregate(UINT64 customData, CVAR cvar) { PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; CHECK_EXT(pCallbacksProvider != NULL, "NULL callback provider."); CHECK_EXT(pCallbacksProvider->platformCallbacks.broadcastConditionVariableFn != NULL, "Called an aggregate for a NULL function"); return pCallbacksProvider->platformCallbacks.broadcastConditionVariableFn(pCallbacksProvider->platformCallbacks.customData, cvar); } STATUS waitConditionVariableAggregate(UINT64 customData, CVAR cvar, MUTEX mutex, UINT64 timeout) { PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; CHECK_EXT(pCallbacksProvider != NULL, "NULL callback provider."); CHECK_EXT(pCallbacksProvider->platformCallbacks.waitConditionVariableFn != NULL, "Called an aggregate for a NULL function"); return pCallbacksProvider->platformCallbacks.waitConditionVariableFn(pCallbacksProvider->platformCallbacks.customData, cvar, mutex, timeout); } VOID freeConditionVariableAggregate(UINT64 customData, CVAR cvar) { PCallbacksProvider pCallbacksProvider = (PCallbacksProvider) customData; CHECK_EXT(pCallbacksProvider != NULL, "NULL callback provider."); CHECK_EXT(pCallbacksProvider->platformCallbacks.freeConditionVariableFn != NULL, "Called an aggregate for a NULL function"); pCallbacksProvider->platformCallbacks.freeConditionVariableFn(pCallbacksProvider->platformCallbacks.customData, cvar); }
DEFECTS AND DAMAGES OF METAL COLUMNS OF INDUSTRIAL BUILDINGS This article is a continuation of the series of articles by the authors about the characteristic defects and damage to the building structures of industrial buildings. In earlier articles, there were descriptions of defects in ground foundations, damage to foundations, as well as arti-cles about damage to reinforced concrete columns and facades. The article presents a sys-tematization of defects and damage to metal columns of industrial buildings, based on the analysis of the results of the survey of many objects. The possible consequences of the de-velopment of the identified defects and damages are described. Recommendations for the assessment of the technical condition of damaged structures and recommendations for the elimination of identified defects and damage are given. The unique damages associated with the operation of lifting and transport equipment, road and rail transport are also given. The article concludes with an analysis of the characteristic zones of defect formation in the met-al columns of buildings for the created automated system for monitoring the state of indus-trial facilities.
package in.dreamlab.wicm.conf; import org.apache.giraph.conf.BooleanConfOption; import org.apache.giraph.conf.IntConfOption; @SuppressWarnings("rawtypes") public interface WICMConstants { IntConfOption BUFFER_SIZE = new IntConfOption("wicm.localBufferSize", 500, "Local Buffer size to use in block based warp"); IntConfOption MIN_MESSAGES = new IntConfOption("wicm.minMessages", 50, "minimum messages to use in block based warp"); BooleanConfOption ENABLE_BLOCK = new BooleanConfOption("icm.blockWarp", false, "Enable Block Based Warp"); }
<gh_stars>1-10 #include "ut.h" #include <API.hpp> #include "paths.h" #include <string> #include <fstream> #include <filesystem> #include <experimental/array> #include <chrono> #define make_arr std::experimental::make_array void print(std::string str){ std::cout << str << std::endl; } template<typename T> bool listValuesEqual(std::vector<T> &l1, std::vector<T>& l2) { std::sort(l1.begin(), l1.end()); std::sort(l2.begin(), l2.end()); bool good = l1.size() == l2.size(); for (int i = 0; i < l1.size() && i < l2.size(); ++i) { auto eq = l1[i] == l2[i]; if (!eq) { std::cout << "Not eq" << i << " " << l1[i] << "!=" << l2[i] << std::endl; } good = eq && good; } return good; } bool verbose = true; void test_1_rational_numbers() { if (verbose) { print("Rational numbers tests for 1-0 knapsack1."); } std::vector<double> A = { 0.2, 1.200001, 2.9000001, 3.30000009, 4.3, 5.5, 6.6, 7.7, 8.8, 9.8}; std::sort(A.begin(), A.end(), std::greater()); double s = 10.5; double expectedValue = 10.20000109; std::vector<int> indexes(A.size(), 0); std::iota(indexes.begin(), indexes.end(), 0); auto result = kb_knapsack::knapsack1(s, A, A, indexes); auto opt1 = std::get<0>(result); auto optSize = std::get<1>(result); boost::ut::expect(opt1 == expectedValue) << "Not equal "; boost::ut::expect(optSize <= s) << "Greater than size "; } void testSilvano(std::vector<int> W, std::vector<int> V, std::vector<int> R, int c){ std::vector<int> ws(W); std::vector<int> vs(V); std::vector<int> indexes(W.size(), 0); std::iota(indexes.begin(), indexes.end(), 0); kb_knapsack::tools::sortReverse(ws, vs, indexes); int expectedSV = 0; int expectedSW = 0; int ind = 0; for(int i : R) { if (i == 1) { expectedSV += V[ind]; expectedSW += W[ind]; } ind += 1; } auto result = kb_knapsack::knapsack1(c, ws, vs, indexes); auto opt1 = std::get<0>(result); boost::ut::expect(opt1 == expectedSV) << "Not equal "; } template<typename T, int N, int FIELD> struct fieldFunctor : public std::binary_function<T, std::array<T, N> , T> { T operator()(T total, const std::array<T, N>& elem) const { return total + elem[FIELD]; } }; template<typename T, int N> bool allLessOrEqual(std::array<T, N> &arr1, std::array<T, N> &arr2){ for(int i = 0; i < N; ++i) { if (arr1[i] > arr1[i]) { return false; } } return true; } template<typename T, int N> std::string printArr(std::array<T, N> &arr){ std::string s = "{ "; for(int i = 0; i < N; ++i) { s += arr[i]; } s += " }"; return s; } void test_3_search_index() { if (verbose) { print("test building and using max profit point index"); } std::vector<std::array<int, 2>> mixDimData = { make_arr(2000, 100), make_arr(1976, 100), make_arr(1702, 100), make_arr(1702, 100), make_arr(1638, 100), make_arr(1633, 100), make_arr(1633, 100), make_arr(1144, 100), make_arr(1143, 100), make_arr(1086, 100), make_arr(976, 100), make_arr(822, 100), make_arr(821, 100), make_arr(718, 100), make_arr(702, 100), make_arr(701, 100), make_arr(701, 100), make_arr(640, 100), make_arr(634, 100), make_arr(291, 100), make_arr(291, 100), make_arr(124, 100), make_arr(100, 100), }; std::vector<int> values(mixDimData.size()); for (int i = 0; i < mixDimData.size(); ++i) { values[i] = std::get<1>(mixDimData[i]); } std::vector<int> dimensions(mixDimData.size()); for (int i = 0; i < mixDimData.size(); ++i) { dimensions[i] = std::get<0>(mixDimData[i]); } std::vector<int> indexes(mixDimData.size(), 0); std::iota(indexes.begin(), indexes.end(), 0); auto sumOfAll = std::accumulate(mixDimData.begin(), mixDimData.end(), 0, fieldFunctor<int, 2, 0>()); auto lambda = [](auto a, auto b) { return a[0] < b[0]; }; auto m = std::min_element(mixDimData.begin(), mixDimData.end(), lambda); int minItem = m[0][0] - 1; std::vector<int> indexConstraints = {sumOfAll, sumOfAll / 2}; for (auto indexConstr : indexConstraints) { for (auto s = 1; s < 3; ++s) { auto testDescValues = std::vector<int>(values); auto sameProfit = s % 2 == 0; if (not sameProfit) { testDescValues[0] -= 1; } for (auto j = 1; j < 3; ++j) { auto forceUsePareto = j % 2 == 0; auto dims = std::vector<kb_knapsack::w_point_dim1<int, int, 1>>(); for (auto i = 0; i < dimensions.size(); ++i) { dims.emplace_back(kb_knapsack::w_point_dim1<int, int, 1>(dimensions[i])); } kb_knapsack::knapsack_solver<int, int, 1, kb_knapsack::w_point_dim1> binSearchSolver( dims, values,indexes); binSearchSolver.PrepareSearchIndex = true; binSearchSolver.Constraints = kb_knapsack::w_point_dim1<int, int, 1>(indexConstr - 1); binSearchSolver.ForceUsePareto = forceUsePareto; binSearchSolver.EmptyDimension = kb_knapsack::w_point_dim1<int, int, 1>(0); binSearchSolver.EmptyValue = 0; binSearchSolver.MinValue = -999999999; binSearchSolver.Solve(); for (auto constraint = minItem; constraint < indexConstr; constraint = constraint + minItem - 1) { auto constraintPoint = kb_knapsack::w_point_dim1<int, int, 1>(constraint); auto fullResult = kb_knapsack::knapsack1(constraint, dimensions, values, indexes); auto testResult = binSearchSolver.Solve(constraintPoint); auto opt = std::get<0>(fullResult); auto testOpt = std::get<0>(testResult); auto testOptSize = std::get<1>(testResult); auto good = opt == testOpt and testOptSize <= constraint; boost::ut::expect(good) << "test_3_search_index: indexConstr=" << indexConstr << "; constraint=" << constraint << "; forceUsePareto=" << forceUsePareto << "; sameProfit=" << sameProfit << "; expected - optimized: " << opt - testOpt; } } } } } void test_8_T_partition_grouping_operator() { if (verbose){ print("MKS N partition 2d matching results with limits turned off"); } std::vector<std::array<int, 2>> mixDimData = { make_arr(1702, 1), make_arr(1633, 1), make_arr(1438, 1), make_arr(1144, 1), make_arr(1086, 1), make_arr(976, 1), make_arr(821, 1), make_arr(718, 1), make_arr(701, 1), make_arr(634, 1), make_arr(291, 1), make_arr(124, 1), }; std::vector<int> values(mixDimData.size()); for (int i = 0; i < mixDimData.size(); ++i) { values[i] = std::get<0>(mixDimData[i]); } std::vector<int> indexes(mixDimData.size(), 0); std::iota(indexes.begin(), indexes.end(), 0); auto sumOfAll = std::accumulate(mixDimData.begin(), mixDimData.end(), 0, fieldFunctor<int, 2, 0>()); auto lambda = [](auto a, auto b) { return a[0] < b[0]; }; auto m = std::min_element(mixDimData.begin(), mixDimData.end(), lambda); int minItem = m[0][0] - 1; for(auto i = 1; i < 3; ++i){ auto ascOrder = i % 2 == 0; for(auto constraint1 = minItem; constraint1 < sumOfAll; constraint1 += int(minItem / 2)){ for(auto constraint2 = 1; constraint2 < mixDimData.size(); ++constraint2){ std::vector<std::array<int, 2>> testDescDims(mixDimData); std::vector<int> testDescValues(values); std::vector<int> testDescIndex(indexes); if (ascOrder){ std::reverse(testDescDims.begin(), testDescDims.end()); std::reverse(testDescValues.begin(), testDescValues.end()); std::reverse(testDescIndex.begin(), testDescIndex.end()); } std::array<int, 2> constraint = make_arr(constraint1, constraint2); auto noLimResult = kb_knapsack::knapsackN<int, int, 2>(constraint, testDescDims, testDescValues, testDescIndex, true, false); auto testResult = kb_knapsack::knapsackN<int, int, 2>(constraint, testDescDims, testDescValues, testDescIndex); auto optValueExpected = std::get<0>(noLimResult); auto optValueTest = std::get<0>(testResult); auto optSizeExpected = std::get<1>(noLimResult); auto optSizeTest = std::get<1>(testResult); auto goodVal = optValueTest >= optValueExpected; auto goodSize = allLessOrEqual<int, 2>(optSizeTest, constraint) && allLessOrEqual<int, 2>(optSizeExpected, constraint); boost::ut::expect(goodVal) << "Not equal val. Expected: " << optValueExpected << ", but was: " << optValueTest << "; at case: ascOrder=" << ascOrder << "; constraint1=" << constraint1 << "; constraint2=" << constraint2 << ";"; boost::ut::expect(goodSize) << "Not equal size. Expected: " << printArr<int, 2>(optSizeExpected) << ", but was: " << printArr<int, 2>(optSizeTest) << "; at case: ascOrder=" << ascOrder << "; constraint1=" << constraint1 << "; constraint2=" << constraint2 <<";"; } } } } void test_8_multidimensional_100() { if (verbose) { print("multidimensional knapsack test"); } auto lConstraint = 20789; auto wConstraint = 23681; auto greedyOptimumValue = 121105212; auto actualOptima = 121147356; auto dimensionMultiplier = 1000; std::vector<std::array<int, 3>> lwData100 = { make_arr(436, 1490, 649640), make_arr(232, 1320, 306240), make_arr(236, 932, 219952), make_arr(822, 638, 524436), make_arr(1004, 1092, 1096368), make_arr(266, 1220, 324520), make_arr(632, 892, 563744), make_arr(1110, 344, 381840), make_arr(598, 532, 318136), make_arr(658, 921, 606018), make_arr(732, 1830, 1339560), make_arr(822, 1740, 1430280), make_arr(932, 1106, 1030792), make_arr(598, 732, 437736), make_arr(568, 1322, 750896), make_arr(1792, 1006, 1802752), make_arr(1248, 746, 931008), make_arr(932, 892, 831344), make_arr(562, 1030, 578860), make_arr(722, 1720, 1241840), make_arr(1526, 1448, 2209648), make_arr(1858, 2644, 4912552), make_arr(1726, 464, 800864), make_arr(928, 1672, 1551616), make_arr(2028, 932, 1890096), make_arr(1028, 1636, 1681808), make_arr(756, 748, 565488), make_arr(926, 916, 848216), make_arr(2006, 564, 1131384), make_arr(1028, 1894, 1947032), make_arr(1376, 1932, 2658432), make_arr(726, 1750, 1270500), make_arr(2098, 946, 1984708), make_arr(1238, 1208, 1495504), make_arr(1026, 768, 787968), make_arr(1734, 932, 1616088), make_arr(994, 2532, 2516808), make_arr(1966, 2422, 4761652), make_arr(2828, 1946, 5503288), make_arr(1536, 1788, 2746368), make_arr(436, 732, 319152), make_arr(732, 822, 601704), make_arr(636, 932, 592752), make_arr(822, 598, 491556), make_arr(1004, 568, 570272), make_arr(464, 794, 368416), make_arr(932, 648, 603936), make_arr(2110, 934, 1970740), make_arr(598, 562, 336076), make_arr(656, 726, 476256), make_arr(926, 3726, 3450276), make_arr(1490, 1830, 2726700), make_arr(1320, 1740, 2296800), make_arr(932, 2100, 1957200), make_arr(636, 732, 465552), make_arr(1094, 1324, 1448456), make_arr(1222, 2408, 2942576), make_arr(894, 748, 668712), make_arr(548, 894, 489912), make_arr(532, 2138, 1137416), make_arr(452, 642, 290184), make_arr(722, 1264, 912608), make_arr(924, 674, 622776), make_arr(824, 632, 520768), make_arr(724, 936, 677664), make_arr(754, 446, 336284), make_arr(922, 316, 291352), make_arr(2002, 892, 1785784), make_arr(576, 1932, 1112832), make_arr(726, 1750, 1270500), make_arr(1974, 944, 1863456), make_arr(1234, 1206, 1488204), make_arr(1224, 766, 937584), make_arr(1734, 932, 1616088), make_arr(994, 2532, 2516808), make_arr(564, 2422, 1366008), make_arr(722, 1944, 1403568), make_arr(1536, 788, 1210368), make_arr(648, 1232, 798336), make_arr(1024, 894, 915456), make_arr(236, 248, 58528), make_arr(542, 126, 68292), make_arr(236, 542, 127912), make_arr(128, 128, 16384), make_arr(1026, 2788, 2860488), make_arr(9098, 8726, 79389148), make_arr(5468, 3524, 19269232), make_arr(1264, 4524, 5718336), make_arr(2354, 1298, 3055492), make_arr(1698, 2542, 4316316), make_arr(2542, 5004, 12720168), make_arr(582, 894, 520308), make_arr(566, 894, 506004), make_arr(564, 1022, 576408), make_arr(1254, 2014, 2525556), make_arr(2012, 1254, 2523048), make_arr(1256, 1298, 1630288), make_arr(2350, 2366, 5560100), make_arr(2502, 2502, 6260004), make_arr(1296, 2366, 3066336)}; std::vector<std::array<int, 2>> lwDataPoints(lwData100.size()); std::vector<int> values(lwData100.size()); for (int i = 0; i < lwData100.size(); ++i) { values[i] = std::get<2>(lwData100[i]); } for (int i = 0; i < lwData100.size(); ++i) { lwDataPoints[i] = make_arr(std::get<0>(lwData100[i]), std::get<1>(lwData100[i])); } std::vector<int> indexes(lwData100.size(), 0); std::iota(indexes.begin(), indexes.end(), 0); std::array<int, 2> constraint = make_arr(lConstraint, wConstraint); auto testResult = kb_knapsack::greedyKnapsackN<int, int, 2>(constraint, lwDataPoints, values, indexes); auto greedyResult = std::get<0>(testResult); auto greedySize = std::get<1>(testResult); auto goodVal = greedyResult >= greedyOptimumValue; boost::ut::expect(goodVal) << "Not equal val. Expected: " << greedyOptimumValue << ", but was: " << greedyResult; for (int i = 0; i < 2; ++i) { boost::ut::expect(greedySize[i] <= constraint[i]) << "Result size at " << i << " dimension " << greedySize[i] << " greater than " << constraint[i]; boost::ut::expect( greedySize[i] > 0) << "Result size at " << i << " dimension " << greedySize[i] << " is not greater than 0"; } } void test_6_Silvano_Paolo_1_0_knapsack(){ if (verbose){ print("1-0 knapsack1 solver for <NAME> and <NAME> 1990 tests."); } // page 42. Example 2.3 std::vector<int> V = {50, 50, 64, 46, 50, 5}; std::vector<int> W = {56, 59, 80, 64, 75, 17}; std::vector<int> R = {1, 1, 0, 0, 1, 0}; int c = 190; testSilvano(W, V, R, c); // page 47. Example 2.7 V = {70, 20, 39, 37, 7, 5, 10}; W = {31, 10, 20, 19, 4, 3, 6}; R = {1, 0, 0, 1, 0, 0, 0}; c = 50; testSilvano(W, V, R, c); } void test_2_superincreasing() { if (verbose) { print("Superincreasing integer numbers tests."); } std::vector<int> A = {1, 2, 5, 21, 69, 189, 376, 919}; for (int i = 1; i < 3; ++i) { std::vector<int> test(A); if (i % 2 == 1) { std::reverse(test.begin(), test.end()); } int sumA = std::accumulate(test.begin(), test.end(), 0); std::vector<int> indexes(test.size(), 0); std::iota(indexes.begin(), indexes.end(), 0); for(int s = 0; s < sumA; s++) { auto expectedResult = kb_knapsack::knapsack1(s, test, test, indexes, false); auto opt1 = std::get<0>(expectedResult); auto expected = std::get<2>(expectedResult); auto testResult = kb_knapsack::knapsack1(s, test, test, indexes); auto optTest = std::get<0>(expectedResult); auto optValues = std::get<2>(expectedResult); boost::ut::expect(listValuesEqual(expected, optValues)) << "Lists are not equal "; } } } bool startsWith(std::string s, std::string prefix){ if (s.rfind(prefix, 0) == 0) { return true; } return false; } std::vector<std::string> split(const std::string& s, char delimiter) { std::vector<std::string> tokens; std::string token; std::istringstream tokenStream(s); while (std::getline(tokenStream, token, delimiter)) { tokens.emplace_back(token); } return tokens; } void test_8_equal_subset_sum_files(std::filesystem::path testDir) { if (verbose) { print("Run equal-subset-sum knapsack1 for hardinstances_pisinger subset sum test dataset."); } std::vector<std::string> files = {"knapPI_16_20_1000.csv", "knapPI_16_50_1000.csv", "knapPI_16_100_1000.csv", "knapPI_16_200_1000.csv", "knapPI_16_500_1000.csv"}; int fi = 0; bool allGood = true; for (auto f : files) { fi += 1; int caseNumber = 1; std::filesystem::path file(f); std::filesystem::path full_path = testDir / file; std::fstream fin(full_path, std::fstream::in); std::vector<std::string> row; std::string line, word; std::vector<int> testCase; std::vector<int> testExpected; int testKnapsack = 0; int rowToSkip = 0; std::string temp; while (std::getline(fin, temp)) { row.clear(); std::stringstream s(temp); if(temp.empty()){ continue; } while (std::getline(s, word, ',')) { row.emplace_back(word); } if (row[0] == "-----") { std::sort(testCase.begin(), testCase.end()); if (verbose) { std::cout << f << " case " << caseNumber << std::endl; } std::vector<int> indexes(testCase.size(), 0); std::iota(indexes.begin(), indexes.end(), 0); auto testResult = kb_knapsack::knapsack1(testKnapsack, testCase, testCase, indexes, true, true, true); auto optVal = std::get<0>(testResult); auto optItems = std::get<2>(testResult); boost::ut::expect(optVal <= testKnapsack) << " Opt size greater than expected "; auto expSum = std::accumulate(testExpected.begin(), testExpected.end(), 0); auto testSum = std::accumulate(optItems.begin(), optItems.end(), 0); boost::ut::expect(testSum >= expSum) << "File:" << f << ", case: " << caseNumber << ". Test values sum less than expected: " << expSum << " but was :" << testSum; allGood = allGood && optVal <= testKnapsack && testSum >= expSum; testCase.clear(); testExpected.clear(); testCase = {}; testExpected = {}; testKnapsack = 0; caseNumber++; continue; } std::string row0 = row[0]; if (startsWith(row0, "knapPI")) { rowToSkip = 6; } if (startsWith(row0, "z ")) { std::string r = split(row[0], ' ')[1]; testKnapsack = stoi(r); } rowToSkip -= 1; if (rowToSkip <= 0) { testCase.emplace_back(stoi(row[1])); if (row[3] == "1") { testExpected.emplace_back(stoi(row[1])); } } } fin.close(); } boost::ut::expect(allGood) << "Some tests failed"; } void test_8_knapsack_1_0_files(std::filesystem::path testDir) { if (verbose) { print("Run 1-0 knapsack1 for hardinstances_pisinger test dataset."); } std::vector<std::string> files = { "knapPI_11_20_1000.csv", "knapPI_11_50_1000.csv", "knapPI_11_100_1000.csv", "knapPI_11_200_1000.csv" }; int fi = 0; bool allGood = true; for (auto f : files) { fi += 1; int caseNumber = 1; std::filesystem::path file(f); std::filesystem::path full_path = testDir / file; std::fstream fin(full_path, std::fstream::in); std::vector<std::string> row; std::string line, word; std::vector<int> testCaseW; std::vector<int> testCaseV; std::vector<int> testExpected; int testKnapsack = 0; int rowToSkip = 0; std::string temp; while (std::getline(fin, temp)) { row.clear(); std::stringstream s(temp); if(temp.empty()){ continue; } while (std::getline(s, word, ',')) { row.emplace_back(word); } if (row[0] == "-----") { std::vector<int> indexes(testCaseW.size(), 0); std::iota(indexes.begin(), indexes.end(), 0); kb_knapsack::tools::sortReverse(testCaseW, testCaseV, indexes); if (verbose) { std::cout << f << " case " << caseNumber << std::endl; } auto testResult = kb_knapsack::knapsack1(testKnapsack, testCaseW, testCaseV, indexes); auto optVal = std::get<0>(testResult); auto optSize = std::get<1>(testResult); auto optItems = std::get<2>(testResult); auto optValues = std::get<3>(testResult); boost::ut::expect(optSize <= testKnapsack) << " Opt size greater than expected "; auto expSum = std::accumulate(testExpected.begin(), testExpected.end(), 0); auto testSum = std::accumulate(optValues.begin(), optValues.end(), 0); boost::ut::expect(testSum >= expSum) << "File:" << f << ", case: " << caseNumber << ". Test values sum less than expected: " << expSum << " but was :" << testSum; allGood = allGood && optSize <= testKnapsack && testSum >= expSum; testCaseW.clear(); testCaseV.clear(); testExpected.clear(); testCaseW = {}; testCaseV = {}; testExpected = {}; testKnapsack = 0; caseNumber++; continue; } std::string row0 = row[0]; if (startsWith(row0, "knapPI")) { rowToSkip = 6; } if (startsWith(row0, "c ")) { std::string r = split(row[0], ' ')[1]; testKnapsack = stoi(r); } rowToSkip -= 1; if (rowToSkip <= 0) { testCaseW.emplace_back(stoi(row[2])); testCaseV.emplace_back(stoi(row[1])); if (row[3] == "1") { testExpected.emplace_back(stoi(row[1])); } } } fin.close(); } boost::ut::expect(allGood) << "Some tests failed"; } int main() { auto execDir = MyPaths::getExecutableDir(); std::filesystem::path script_dir (execDir); std::filesystem::path testData_dir ("testData/hardinstances_pisinger"); auto testDir = script_dir.parent_path().parent_path().parent_path().parent_path() / testData_dir; test_8_multidimensional_100(); test_3_search_index(); test_2_superincreasing(); test_1_rational_numbers(); test_6_Silvano_Paolo_1_0_knapsack(); const auto start = std::chrono::high_resolution_clock::now(); test_8_knapsack_1_0_files(testDir); test_8_equal_subset_sum_files(testDir); const auto stop = std::chrono::high_resolution_clock::now(); const auto s = std::chrono::duration_cast<std::chrono::seconds>(stop - start); std::cout << "File tests were finished using " << s.count() << " seconds." << std::endl; test_8_T_partition_grouping_operator(); }
AFL CEO Gillon McLachlan earned $1.74 million in 2016, an increase of $20,000 on his 2015 wage package. The AFL released key details from its 2016 financial accounts on Friday, revealing the League suffered a net loss of $17.8 million last year after grants and distributions. According to the AFL's media release, the loss was in large part due to the early purchase of Etihad Stadium and the launch of the AFL Women's competition, with the League's expenditure increasing by $18.6 million to $186.9 million. The poor on-field performances of powerhouse clubs Collingwood, Richmond and Essendon last season and their failure to make the finals also hurt the AFL’s bottom line. The AFL's 11 executives, including McLachlan, earned a combined $8.08 million last year, up $80,000 on 2015's figure of $7.99 million. Excluding McLachlan’s wage, the remaining executives earned a combined $6.34 million, at an average salary of $634,000 a year. The AFL's figures also revealed: - The League's revenue increased by $10.9 million to $517 million. - The AFL's operating surplus before grants and distributions decreased by two per cent to $330.4 million. - Clubs received $255.9 million compared with $245.2 million in 2015. - Game development grants totalled $41.6 million. Commission chairman Mike Fitzpatrick said the AFL had achieved impressive results in each of its key indicators on the health of the game.
//========= Copyright Valve Corporation, All rights reserved. ============// // // Purpose: Player for HL1. // // $NoKeywords: $ //=============================================================================// #ifndef TFC_PLAYER_H #define TFC_PLAYER_H #pragma once #include "player.h" #include "server_class.h" #include "tfc_playeranimstate.h" #include "tfc_shareddefs.h" #include "tfc_player_shared.h" class CTFCPlayer; class CTFGoal; class CTFGoalItem; // Function table for each player state. class CPlayerStateInfo { public: TFCPlayerState m_iPlayerState; const char *m_pStateName; void (CTFCPlayer::*pfnEnterState)(); // Init and deinit the state. void (CTFCPlayer::*pfnLeaveState)(); void (CTFCPlayer::*pfnThink)(); // Called every frame. }; //============================================================================= // >> CounterStrike player //============================================================================= class CTFCPlayer : public CBasePlayer { public: DECLARE_CLASS( CTFCPlayer, CBasePlayer ); DECLARE_SERVERCLASS(); CTFCPlayer(); ~CTFCPlayer(); static CTFCPlayer *CreatePlayer( const char *className, edict_t *ed ); static CTFCPlayer* Instance( int iEnt ); // This passes the event to the client's and server's CPlayerAnimState. void DoAnimationEvent( PlayerAnimEvent_t event ); virtual void PostThink(); virtual void InitialSpawn(); virtual void Spawn(); virtual void Precache(); virtual bool ClientCommand( const CCommand &args ); virtual void ChangeTeam( int iTeamNum ) OVERRIDE; virtual int TakeHealth( float flHealth, int bitsDamageType ); virtual void Event_Killed( const CTakeDamageInfo &info ); void ClientHearVox( const char *pSentence ); void DisplayLocalItemStatus( CTFGoal *pGoal ); public: // Is this entity an ally (on our team)? bool IsAlly( CBaseEntity *pEnt ) const; TFCPlayerState State_Get() const; // Get the current state. void TF_AddFrags( int nFrags ); void ResetMenu(); // On fire.. int GetNumFlames() const; void SetNumFlames( int nFlames ); void ForceRespawn(); void TeamFortress_SetSpeed(); void TeamFortress_CheckClassStats(); void TeamFortress_SetSkin(); void TeamFortress_RemoveLiveGrenades(); void TeamFortress_RemoveRockets(); void TeamFortress_DetpackStop( void ); BOOL TeamFortress_RemoveDetpacks( void ); void RemovePipebombs( void ); void RemoveOwnedEnt( char *pEntName ); // SPY STUFF public: void Spy_RemoveDisguise(); void TeamFortress_SpyCalcName(); void Spy_ResetExternalWeaponModel( void ); // ENGINEER STUFF public: void Engineer_RemoveBuildings(); // Building BOOL is_building; // TRUE for an ENGINEER if they're building something EHANDLE building; // The building the ENGINEER is using float building_wait; // Used to prevent using a building again immediately EHANDLE real_owner; float has_dispenser; // TRUE if engineer has a dispenser float has_sentry; // TRUE if engineer has a sentry float has_entry_teleporter; // TRUE if engineer has an entry teleporter float has_exit_teleporter; // TRUE if engineer has an exit teleporter // DEMO STUFF public: int m_iPipebombCount; public: // Get the class info associated with us. const CTFCPlayerClassInfo* GetClassInfo() const; // Helpers to ease porting... int tp_grenades_1() const { return GetClassInfo()->m_iGrenadeType1; } int tp_grenades_2() const { return GetClassInfo()->m_iGrenadeType2; } int no_grenades_1() const { return GetAmmoCount( TFC_AMMO_GRENADES1 ); } int no_grenades_2() const { return GetAmmoCount( TFC_AMMO_GRENADES2 ); } public: CTFCPlayerShared m_Shared; int item_list; // Used to keep track of which goalitems are // affecting the player at any time. // GoalItems use it to keep track of their own // mask to apply to a player's item_list float armortype; //float armorvalue; // Use CBasePlayer::m_ArmorValue. int armorclass; // Type of armor being worn float armor_allowed; float invincible_finished; float invisible_finished; float super_damage_finished; float radsuit_finished; int lives; // The number of lives you have left int is_unableto_spy_or_teleport; BOOL bRemoveGrenade; // removes the primed grenade if set // Replacement_Model Stuff string_t replacement_model; int replacement_model_body; int replacement_model_skin; int replacement_model_flags; // Spy int undercover_team; // The team the Spy is pretending to be in int undercover_skin; // The skin the Spy is pretending to have EHANDLE undercover_target; // The player the Spy is pretending to be BOOL is_feigning; // TRUE for a SPY if they're feigning death float immune_to_check; BOOL is_undercover; // TRUE for a SPY if they're undercover // TEAMFORTRESS VARIABLES int no_sentry_message; int no_dispenser_message; // teleporter variables int no_entry_teleporter_message; int no_exit_teleporter_message; BOOL is_detpacking; // TRUE for a DEMOMAN if they're setting a detpack float current_menu; // is set to the number of the current menu, is 0 if they are not in a menu // State management. private: void State_Transition( TFCPlayerState newState ); void State_Enter( TFCPlayerState newState ); void State_Leave(); CPlayerStateInfo* State_LookupInfo( TFCPlayerState state ); CPlayerStateInfo *m_pCurStateInfo; void State_Enter_WELCOME(); void State_Enter_PICKINGTEAM(); void State_Enter_PICKINGCLASS(); void State_Enter_ACTIVE(); void State_Enter_OBSERVER_MODE(); void State_Enter_DYING(); private: friend void Bot_Think( CTFCPlayer *pBot ); void HandleCommand_JoinTeam( const char *pTeamName ); void HandleCommand_JoinClass( const char *pClassName ); void GiveDefaultItems(); void TFCPlayerThink(); void PhysObjectSleep(); void PhysObjectWake(); void GetIntoGame(); private: // Copyed from EyeAngles() so we can send it to the client. CNetworkQAngle( m_angEyeAngles ); ITFCPlayerAnimState *m_PlayerAnimState; int m_iLegDamage; }; inline CTFCPlayer *ToTFCPlayer( CBaseEntity *pEntity ) { if ( !pEntity || !pEntity->IsPlayer() ) return NULL; #ifdef _DEBUG Assert( dynamic_cast<CTFCPlayer*>( pEntity ) != 0 ); #endif return static_cast< CTFCPlayer* >( pEntity ); } inline const CTFCPlayerClassInfo* CTFCPlayer::GetClassInfo() const { return GetTFCClassInfo( m_Shared.GetPlayerClass() ); } #endif // TFC_PLAYER_H
/** * It is POJO for the SQLiteUriMatcher. It holds mapping data between uri mPath and SQL. */ public class SQLiteMatcherEntry { private static final String TAG = SQLiteMatcherEntry.class.getSimpleName(); protected static final String TYPE_PREFIX = "vnd."; private String mAuthority; private String mPath; private Type mBaseType; private String mSubType; private String mTablesSQL; private String mRawSQL; private SQLBuilderCallback mCallback; /** * Create entry based on Uri mPath. The mPath can contain wildcards (* and #). See {@link android.content.UriMatcher} for details. * BaseType will be guessed based on mPath. SubType will be guessed based on BaseType. * @param authority authority of the provider * @param path Uri mPath */ public SQLiteMatcherEntry(String authority, String path) { this(authority, path, null, null); } /** * Create entry based on Uri mPath. The mPath can contain wildcards (* and #). See {@link android.content.UriMatcher} for details. * @param authority authority of the provider * @param path uri mPath * @param baseType base mime type * @param subType sub mime type */ public SQLiteMatcherEntry(String authority, String path, Type baseType, String subType) { if (TextUtils.isEmpty(authority)) { throw new IllegalArgumentException("authority cannot be empty"); } if (TextUtils.isEmpty(path)) { throw new IllegalArgumentException("path cannot be empty"); } this.mAuthority = authority; this.mPath = path; if (baseType == null) { if (path.endsWith("*") || path.endsWith("#")) { this.mBaseType = Type.ITEM; } else { this.mBaseType = Type.DIR; } } else { this.mBaseType = baseType; } if (TextUtils.isEmpty(subType)) { this.mSubType = makeSubType(); } else { this.mSubType = subType; } } public String getAuthority() { return mAuthority; } public String getPath() { return mPath; } public Type getBaseType() { return mBaseType; } public String getSubType() { return mSubType; } public String getTablesSQL() { return mTablesSQL; } public void setTablesSQL(String tablesSQL) { if (!TextUtils.isEmpty(mRawSQL) || mCallback != null) { throw new IllegalStateException("Only one SQL could be setup: either tablesSQL or rawSQL or callback"); } this.mTablesSQL = tablesSQL; } public String getRawSQL() { return mRawSQL; } public void setRawSQL(String rawSQL) { if (!TextUtils.isEmpty(mTablesSQL) || mCallback != null) { throw new IllegalStateException("Only one SQL could be setup: either tablesSQL or rawSQL or callback"); } this.mRawSQL = rawSQL; } public SQLBuilderCallback getCallback() { return mCallback; } public void setCallback(SQLBuilderCallback callback) { if (!TextUtils.isEmpty(mTablesSQL) || !TextUtils.isEmpty(mRawSQL)) { throw new IllegalStateException("Only one SQL could be setup: either tablesSQL or rawSQL or callback"); } this.mCallback = callback; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; SQLiteMatcherEntry that = (SQLiteMatcherEntry) o; if (!mPath.equals(that.mPath)) return false; return true; } @Override public int hashCode() { return mPath.hashCode(); } protected String makeSubType() { String cleanPath = mPath.replaceAll("\\*", ".").replaceAll("#", ".").replaceAll("\\/", "."); return new StringBuilder(TYPE_PREFIX).append(mAuthority).append(".").append(cleanPath).toString(); } /** * Base MIME type. See {@link ContentResolver#CURSOR_ITEM_BASE_TYPE} and {@link ContentResolver#CURSOR_DIR_BASE_TYPE}. */ public enum Type { ITEM, DIR; public String getType() { if (this.equals(ITEM)) { return ContentResolver.CURSOR_ITEM_BASE_TYPE; } else if (this.equals(DIR)) { return ContentResolver.CURSOR_DIR_BASE_TYPE; } return null; } } /** * Callback to the SQL builder. It allows you to build your SQL in flexible way based on uri and other params */ public interface SQLBuilderCallback { public String getRawSQL(Uri uri, String[] projection, String selection, String[] selectionArgs, String sortOrder); } }
CJASN: Turning the Page. I am privileged to have been given the opportunity to serve as the third Editor-in-Chief for theClinical Journal of the American Society of Nephrology (CJASN), a leading peer-reviewed journal in the field of nephrology. The journal owes its success to the visionary leadership and tireless efforts of the first two Editor-in-Chiefs, Bill Bennett and Gary Curhan. The current standing of the journal is even more remarkable if one considers thatCJASN started as a bimonthly journal only 11 years ago and became amonthly publication in 2008. CJASN now receives six times as many original research submissions as it did in 2006, over 80% of which are from outside the United States. As a result, the journal is now one of the most selective publications in the field, with an acceptance rate of only 13%. Under Gary’s leadership, CJASN has strengthened its reputation as a journal that provides a rapid, fair, transparent, and rigorous review of all submissions, which in turn, assures that all of the papers published in the journal meet the highest standards of reporting for clinical research in nephrology. The value provided by CJASN has been further enhanced by the introduction of high-quality series, such as those focused on renal physiology, renal immunology, and glomerular diseases for the clinicians. It is not surprising then to note that CJASN is the most read journal in nephrology, and the content is now available not only in print form and online but also, through mobile applications. In 2015, theCJASNwebsite received 1.3million unique visits, and the mobile application was accessed over 330,000 times. I know that I speak for Bill Bennett and Gary Curhan in saying that they could not have accomplished as much as they did without the support and hard work of their Deputy Editors (Harold Feldman and Mohammed Sayegh with Bill Bennett and Kirsten Johansen and Paul Palevsky with Gary Curhan), Associate Editors, and Managing Editors (Bonnie O’Brien and Shari Leventhal). My team is fortunate to inherit the journal with strong foundations, and herein, I provide our vision for moving forward. I am grateful to the group of 17 highly talented individuals who make up the incoming CJASN team. I am delighted to have two nationally recognized physician scientists, Michel Chonchol and Ian de Boer, as Deputy Editors for the journal. Michel Chonchol is Professor of Medicine at the University of Colorado and a highly productive investigator with a broad range of expertise, including mineral metabolism, polycystic kidney disease, CKD, and hypertension. Ian de Boer is Associate Professor of Medicine and Associate Director of the Kidney Research Institute at the University of Washington and an established investigator with particular interest in diabetic kidney disease and vitaminD metabolism. Each of the ten Associate Editors is also a highly accomplished physician scientist, and their cumulative expertise spans virtually the entire breadth of clinical research in nephrology. They include David Charytan, Harvard University; Richard Formica, Yale University; Orlando Gutierrez, University of Alabama; Manjula Kurella-Tamura, Stanford University; Rulan Parekh, University of Toronto; Charles Pusey, Imperial College; Mahboob Rahman, Case Western Reserve; Stephen Seliger, University of Maryland; Eddie Siew, Vanderbilt University; and ChristophWanner, University of Wurzburg. We are fortunate to have Ronit Katz and Leila Zelnick, both from the University of Washington, as Statistical Editors to maintain the rigor of the peer review process. The team also includes two Editors-in-Large, Mitchell Rosner at the University of Virginia for Clinical Practice and Barry Straube for Public Policy. They will facilitate CJASN in becoming a platform for ideas on any and every issue that affects the clinical practice of nephrology. We have the support of a large pool of very talented individuals from around the world as members of the Editorial Board, which mirrors the diversity of the membership of the American Society of Nephrology. The smooth operations of the journal are most dependent on the Managing Editor, and we are thankful to have Shari Leventhal continue in that role. Shari has been a part of the Communications Team at the American Society of Nephrology for over a decade and for the last 5 years, has become the public face of CJASN. She has been critical in ensuring a smooth transition, and we anticipate leaning on her considerably for accomplishing our goals for the journal. Our vision for CJASN is for us to cement the journal as the leading voice in communicating and influencing advances in clinical nephrology. To achieve this vision, we have set three goals: (1) rapidly and effectively communicate the most important advances in clinical and translational research in nephrology, including innovations in research methods and care delivery; (2) put these advances in context for future research directions and patient care; and (3) become an important voice on every issue that potentially affects the clinical practice of nephrology, particularly in the United States. Kidney Research Institute and Harborview Medical Center, Division of Nephrology, University of Washington, Seattle, Washington
/** * Compress the content of the message */ public class LZ4ContentCompressionStrategy implements ContentCompressionStrategy { /** * Used to get configuration values related to compression and used to compress message content */ LZ4CompressionHelper lz4CompressionHelper; public LZ4ContentCompressionStrategy(LZ4CompressionHelper lz4CompressionHelper) { this.lz4CompressionHelper = lz4CompressionHelper; } /** * {@inheritDoc} */ @Override public ContentPartHolder ContentChunkStrategy(AndesMessage message) { return compressChunks(message); } /** * Compress the content of the message. Returns content chunk pair which contains compressed andes part list and * compressed content length. * * @param message Original AndesMessage before compress * @return Content chunk pair which contains andes message part list and content length. */ ContentPartHolder compressChunks(AndesMessage message) { List<AndesMessagePart> partList = message.getContentChunkList(); AndesMessageMetadata metadata = message.getMetadata(); int contentLength = metadata.getMessageContentLength(); int originalContentLength = contentLength; if (originalContentLength > lz4CompressionHelper.getContentCompressionThreshold()) { // Compress message AndesMessagePart compressedMessagePart = lz4CompressionHelper.getCompressedMessage(partList, originalContentLength); // Update metadata to indicate the message is a compressed one metadata.updateMetadata(true); message.setMetadata(metadata); contentLength = compressedMessagePart.getDataLength(); partList.clear(); partList.add(compressedMessagePart); } ContentPartHolder partHolder = new ContentPartHolder(partList, contentLength); return partHolder; } }
/** * Returns set based on the given array of {@link MetricTarget}. * Set objects are returned from a preliminary warmed up cache, so this method has no memory overhead. * * @param targets input array * @return set containing all items from the input array */ public static Set<MetricTarget> asSet(MetricTarget... targets) { if (targets.length == 0) { return emptySet(); } return BITSET_TO_SET_CACHE.get(bitset(targets)); }
/**************************************************************************** Author:<NAME> Project: Computer Vision for Interactive Applications Version: 1.1 Date:22/02/14 details: Creation of Gestures class Version: 1.2 Date:26/02/14 details: Creation of Read from XML function Version: 1.3 Date:26/03/14 details: Creation of multiple gesture types Version: 1.4 Date:29/03/14 details: Creation of gesture Log array and functions Version: 1.5 Date:31/03/14 details: Creation of update gesture functions ****************************************************************************/ #include <iostream> #include <vector> #include <string> #include <sstream> #include <fstream> #include "Matrix3D.h" #include "tinyxml.h" #ifndef _GESTURES_H_ #define _GESTURES_H_ using namespace std; using namespace Matrix; enum gesture_type { ONE_HAND, TWO_HAND }; struct gesture { string GestureName; Matrix2D A; Matrix2D centroids; Matrix2D B; Matrix1D Pi; double gestureThreshold; }; class Gestures { public: Gestures(); ~Gestures(); vector<gesture> getGestures(); void setCurrentGesture(gesture); gesture getCurrentGesture(); vector<string> getGestureLog(); void addToGestureLog(string); void setGestureType(gesture_type); void updateGestures(); protected: void initializeGestures(); void read_data_from_XML(string, gesture&); private: vector<gesture> gestures; gesture_type GESTURE_TYPE; vector<string> gestureLog; gesture currentGesture; }; Gestures::Gestures() { GESTURE_TYPE = ONE_HAND; initializeGestures(); } Gestures::~Gestures() { } void Gestures::initializeGestures() { gesture newG; switch(GESTURE_TYPE) { case ONE_HAND: newG.GestureName = "swipe_left"; read_data_from_XML(newG.GestureName, newG); gestures.push_back(newG); newG = gesture(); newG.GestureName = "swipe_right"; read_data_from_XML(newG.GestureName, newG); gestures.push_back(newG); newG = gesture(); newG.GestureName = "wave"; read_data_from_XML(newG.GestureName, newG); gestures.push_back(newG); newG = gesture(); newG.GestureName = "scroll_up"; read_data_from_XML(newG.GestureName, newG); gestures.push_back(newG); break; case TWO_HAND: break; default: break; } //newG = gesture(); //newG.GestureName = "swipe_down"; //read_data_from_XML(newG.GestureName, newG); //gestures.push_back(newG); //newG = gesture(); //newG.GestureName = "swipe_left"; //read_data_from_XML(newG.GestureName, newG); //gestures.push_back(newG); //newG = gesture(); //newG.GestureName = "swipe_right"; //read_data_from_XML(newG.GestureName, newG); //gestures.push_back(newG); //newG = gesture(); //newG.GestureName = "put_down"; //read_data_from_XML(newG.GestureName, newG); //gestures.push_back(newG); } void Gestures::updateGestures() { gestures.clear(); gesture newG; switch(GESTURE_TYPE) { case ONE_HAND: newG.GestureName = "swipe_left"; read_data_from_XML(newG.GestureName, newG); gestures.push_back(newG); newG = gesture(); newG.GestureName = "swipe_right"; read_data_from_XML(newG.GestureName, newG); gestures.push_back(newG); newG = gesture(); newG.GestureName = "wave"; read_data_from_XML(newG.GestureName, newG); gestures.push_back(newG); newG = gesture(); newG.GestureName = "scroll_up"; read_data_from_XML(newG.GestureName, newG); gestures.push_back(newG); break; case TWO_HAND: break; default: break; } //newG = gesture(); //newG.GestureName = "swipe_down"; //read_data_from_XML(newG.GestureName, newG); //gestures.push_back(newG); //newG = gesture(); //newG.GestureName = "swipe_left"; //read_data_from_XML(newG.GestureName, newG); //gestures.push_back(newG); //newG = gesture(); //newG.GestureName = "swipe_right"; //read_data_from_XML(newG.GestureName, newG); //gestures.push_back(newG); //newG = gesture(); //newG.GestureName = "put_down"; //read_data_from_XML(newG.GestureName, newG); //gestures.push_back(newG); } vector<gesture> Gestures::getGestures() { return gestures; } void Gestures::setCurrentGesture(gesture gest) { currentGesture = gest; } gesture Gestures::getCurrentGesture() { return currentGesture; } void Gestures::read_data_from_XML(string gesturename, gesture &newGesture) { string filename = "gestures/" + gesturename + ".xml"; stringstream temp; int width, height; TiXmlDocument doc(filename.c_str()); if(!doc.LoadFile()) return; TiXmlHandle hDoc(&doc); TiXmlElement *pElem; TiXmlHandle hRoot(0); TiXmlNode *pNode; //gesture node pElem = hDoc.Child(1).Element(); hRoot = TiXmlHandle(pElem); //threshold pElem = hRoot.FirstChild("Threshold").Element(); double g; temp << pElem->GetText(); temp >> g; newGesture.gestureThreshold = g; //Centroids pElem = hRoot.FirstChild("Centroids").Element(); pElem->QueryIntAttribute("wSize",&width); pElem->QueryIntAttribute("hSize", &height); Matrix2D Readcentroids = Matrix2D(); pNode = hRoot.FirstChild("Centroids").FirstChild().Node(); temp = stringstream(); temp << pNode->Value(); for(int i = 0; i < height; i++) { Matrix1D mat = Matrix1D(width); for(int j = 0; j < width; j++) { string val; double dval; if(!getline(temp,val,',')) break; istringstream ival(val); ival >> dval; mat.data.at(j) = dval; } Readcentroids.data.push_back(mat); } newGesture.centroids = Readcentroids; //A pElem = hRoot.FirstChild("A").Element(); pElem->QueryIntAttribute("wSize",&width); pElem->QueryIntAttribute("hSize", &height); Matrix2D ReadA = Matrix2D(); pNode = hRoot.FirstChild("A").FirstChild().Node(); temp = stringstream(); temp << pNode->Value(); for(int i = 0; i < height; i++) { Matrix1D mat = Matrix1D(width); for(int j = 0; j < width; j++) { string val; double dval; if(!getline(temp,val,',')) break; istringstream ival(val); ival >> dval; mat.data.at(j) = dval; } ReadA.data.push_back(mat); } newGesture.A = ReadA; //B pElem = hRoot.FirstChild("B").Element(); pElem->QueryIntAttribute("wSize",&width); pElem->QueryIntAttribute("hSize", &height); Matrix2D ReadB = Matrix2D(); pNode = hRoot.FirstChild("B").FirstChild().Node(); temp = stringstream(); temp << pNode->Value(); for(int i = 0; i < height; i++) { Matrix1D mat = Matrix1D(width); for(int j = 0; j < width; j++) { string val; double dval; if(!getline(temp,val,',')) break; istringstream ival(val); ival >> dval; mat.data.at(j) = dval; } ReadB.data.push_back(mat); } newGesture.B = ReadB; //A pElem = hRoot.FirstChild("Pi").Element(); int size; pElem->QueryIntAttribute("size",&size); Matrix1D ReadPi = Matrix1D(); pNode = hRoot.FirstChild("Pi").FirstChild().Node(); temp = stringstream(); temp << pNode->Value(); for(int i = 0; i < size; i++) { string val; double dval; if(!getline(temp,val,',')) break; istringstream ival(val); ival >> dval; ReadPi.data.push_back(dval); } newGesture.Pi = ReadPi; } void Gestures::setGestureType(gesture_type type) { GESTURE_TYPE = type; } vector<string> Gestures::getGestureLog() { return gestureLog; } void Gestures::addToGestureLog(string entry) { if(gestureLog.size() >= 5) gestureLog.erase(gestureLog.begin()); gestureLog.push_back(entry); } #endif
Design A Mini Operating System Using Open Source System Abstract: COSMOS (C# Open Source Managed Operating System) Paper provides a framework and tool-chain to develop an OS purely in managed code (C# and VB .NET). Cosmos supports integration with Visual Studio, and therefore is much easier to use. However, the framework is not rich, and therefore, there is still a lot of work to be done on OS developer side (E.g., it does not support mouse, file system etc.) In this paper, a microkernel based OS is developed using COSMOS framework for x86 based computer systems. This paper demonstrates device driver (PS/2) pointing device and GUI System. Since Cosmos does not provide support file system, mouse, keyboard, we have designed appropriate drivers for screen, mouse and keyboard. We have developed Event handlers and drivers for each device.In this part, we have developed drivers for Hard Disk and RAM to detect them in order to achieve partial fulfilment. From the development of general purpose computers, software industry has also evolved. However, there is not much improvement in OS development, as compared with other fields of software world: while other fields are enjoying power of garbage collection, OS developers are still doing their job in classic languages like C, C++ and assembly. Although there has been some research work regarding this, none of them was successful. This project is an attempt to develop a mini OS in pure managed code, particularly C# and .NET Framework 4.0.
<filename>example/scale_example.py import z5py from heimdall import view, to_source # FIXME this fails with out of range def example(): path = '/home/pape/Work/data/cremi/example/sampleA.n5' with z5py.File(path) as f: raw = to_source(f['volumes/raw/s1'], scale=(1, 2, 2)) seg = f['volumes/segmentation/groundtruth'] seg.n_threads = 8 view(raw, seg) if __name__ == '__main__': example()
// removeExtraLocked removes all the requests not in ids. func (r *stmtDiagnosticsRequestRegistry) removeExtraLocked(ids []int) { valid := func(req stmtDiagRequest) bool { for _, id := range ids { if req.id == id { return true } } return false } i := 0 for _, req := range r.mu.requests { if valid(req) { r.mu.requests[i] = req i++ } } r.mu.requests = r.mu.requests[:i] }
import json import logging from flask import Flask, Response logging.basicConfig(level=logging.INFO) logger = logging.getLogger('flask-app') app = Flask(__name__) @app.route('/', methods=['GET', 'POST']) def main_route(): try: payload = {'output': 'Hello World from Flask'} return Response( response=json.dumps(payload), status=200, mimetype='application/json' ) except Exception as error: logger.error(error) return Response( response=json.dumps({'error': 'Application error'}), status=500, mimetype='application/json' ) if __name__ == '__main__': app.run(host='0.0.0.0')
/** * AppointmentChangeRecord class to capture the changes in the status of the appointment */ @Entity public class AppointmentChangeRecord { // external Id of the patient @Field private String externalId; // Appointment id being changed @Field private String appointmentId; // date of the Appointment creation/change @Field private DateTime appointmentDate; // Previous appointment status being changed @Field private AppointmentStatus fromStatus; // Current appointment status to change to @Field private AppointmentStatus toStatus; public AppointmentChangeRecord(String externalId, String appointmentId, DateTime appointmentDate, AppointmentStatus fromStatus, AppointmentStatus toStatus) { this.externalId = externalId; this.appointmentId = appointmentId; this.appointmentDate = appointmentDate; this.fromStatus = fromStatus; this.toStatus = toStatus; } // Getters & Setters public String getExternalId() { return this.externalId; } public void setExternalId(String externalId) { this.externalId = externalId; } public String getAppointmentId() { return this.appointmentId; } public void setAppointmentId(String appointmentId) { this.appointmentId = appointmentId; } public DateTime getAppointmentDate() { return this.appointmentDate; } public void setAppointmentDate(DateTime appointmentDate) { this.appointmentDate = appointmentDate; } public AppointmentStatus getFromStatus() { return this.fromStatus; } public void setFromStatus(AppointmentStatus fromStatus) { this.fromStatus = fromStatus; } public AppointmentStatus getToStatus() { return this.toStatus; } public void setToStatus(AppointmentStatus toStatus) { this.toStatus = toStatus; } }
<filename>backend/Backendapi/l_lib/function.py def get_reply(code,msg): reply = dict() reply['error_code'] = code reply['msg'] = msg return reply
/** * SCAN Cache object * * <pre> * SOFTWARE HISTORY * Date Ticket# Engineer Description * ------------ ---------- ----------- -------------------------- * 06/02/2011 dhladky Initial Creation. * * </pre> * * @author dhladky * @version 1.0 */ public class ScanDataCache { /** Singleton instance of this class */ private static ScanDataCache instance = null; /** cache that holds grib records used by SCAN **/ private ModelData md = new ModelData(); /** cache that holds lightning records used by SCAN **/ private LightningData ld = new LightningData(); /** keeper of Sounding info used for scan **/ private HashMap<String, SoundingData> sd = new HashMap<String, SoundingData>(); /** keeper of Radar info used for scan **/ private HashMap<String, RadarData> rd = new HashMap<String, RadarData>(); /** * Get an instance of this singleton. * * @return Instance of this class */ public static ScanDataCache getInstance() { if (instance == null) { instance = new ScanDataCache(); } return instance; } public ModelData getModelData() { return md; } public synchronized RadarData getRadarData(String icao) { RadarData data = null; if (rd.containsKey(icao)) { // System.out.println("Accessing Radar cache:-----------" + icao); data = rd.get(icao); } else { // System.out.println("Creating Radar cache:-----------" + icao); data = new RadarData(); rd.put(icao, data); } return data; } public void setModelData(ModelData md) { this.md = md; } public synchronized void setRadarData(String icao, RadarData data) { rd.put(icao, data); } public LightningData getLigtningData() { return ld; } public void setLigtningData(LightningData ld) { this.ld = ld; } public synchronized void setSoundingData(String icao, SoundingData data) { sd.put(icao, data); } public synchronized SoundingData getSoundingData(String icao) { SoundingData sounding = null; if (sd.containsKey(icao)) { // System.out.println("Accessing Sounding cache..." + icao); sounding = sd.get(icao); } else { // System.out.println("Creating Sounding cache..." + icao); sounding = new SoundingData(); sd.put(icao, sounding); } return sounding; } }
<filename>src/main/java/com/biubiu/Flyweight/FlyweightFactory.java package com.biubiu.Flyweight; import java.util.HashMap; import java.util.Map; /** * @author 张海彪 * @create 2017-10-03 下午2:53 */ public class FlyweightFactory { private Map<String, Object> strMap = new HashMap<>(); public IFlyweight getInstance(String str) { IFlyweight fly = (IFlyweight) strMap.get(str); if (fly == null) { fly = new ShareStr(str); strMap.put(str, fly); } return fly; } }
<filename>library/tests/test_edb.c #include <stdlib.h> #include "eaarlio/edb.h" #include "greatest.h" #include "assert_error.h" #include "mock_memory.h" /* Helper to initialize an EDB with data */ TEST init_edb(char *msg, int records, int files, int files_0, int files_1, struct eaarlio_memory *memory, struct eaarlio_edb *edb) { edb->record_count = 5; edb->file_count = 2; if(records) { edb->records = memory->calloc(memory, 5, sizeof(struct eaarlio_memory)); ASSERTm(msg, edb->records); edb->records[0].time_seconds = 1; edb->records[1].time_seconds = 2; edb->records[2].time_seconds = 3; edb->records[3].time_seconds = 4; edb->records[4].time_seconds = 5; } if(files) { edb->files = memory->calloc(memory, 2, sizeof(char *)); ASSERTm(msg, edb->files); edb->files[0] = edb->files[1] = NULL; if(files_0) { edb->files[0] = memory->calloc(memory, 10, sizeof(char)); ASSERTm(msg, edb->files[0]); strcpy(edb->files[0], "file0.tld"); } if(files_1) { edb->files[1] = memory->calloc(memory, 10, sizeof(char)); ASSERTm(msg, edb->files[1]); strcpy(edb->files[1], "file1.tld"); } } PASS(); } /* Checks an EDB to ensure it matches its expected initialization */ TEST check_edb(char *msg, int records, int files, int files_0, int files_1, struct eaarlio_edb *edb) { ASSERTm(msg, edb); ASSERT_EQ_FMTm(msg, 5, edb->record_count, "%d"); ASSERT_EQ_FMTm(msg, 2, edb->file_count, "%d"); if(records) { ASSERTm(msg, edb->records); ASSERT_EQ_FMTm(msg, 1, edb->records[0].time_seconds, "%d"); ASSERT_EQ_FMTm(msg, 2, edb->records[1].time_seconds, "%d"); ASSERT_EQ_FMTm(msg, 3, edb->records[2].time_seconds, "%d"); ASSERT_EQ_FMTm(msg, 4, edb->records[3].time_seconds, "%d"); ASSERT_EQ_FMTm(msg, 5, edb->records[4].time_seconds, "%d"); } else { ASSERT_FALSEm(msg, edb->records); } if(files) { ASSERTm(msg, edb->files); if(files_0) { ASSERTm(msg, edb->files[0]); ASSERT_STRN_EQm(msg, "file0.tld", edb->files[0], 10); } else { ASSERT_FALSEm(msg, edb->files[0]); } if(files_1) { ASSERTm(msg, edb->files[1]); ASSERT_STRN_EQm(msg, "file1.tld", edb->files[1], 10); } else { ASSERT_FALSEm(msg, edb->files[1]); } } else { ASSERT_FALSEm(msg, edb->files); } PASS(); } /******************************************************************************* * eaarlio_edb_free ******************************************************************************* */ TEST test_free_sanity() { eaarlio_edb_free(NULL, NULL); PASS(); } TEST test_free_null_edb() { ASSERT_EAARLIO_ERR(EAARLIO_NULL, eaarlio_edb_free(NULL, NULL)); PASS(); } TEST test_free_mem_invalid(struct eaarlio_memory *memory, struct mock_memory *mock) { struct eaarlio_memory invalid = *memory; struct eaarlio_edb edb = eaarlio_edb_empty(); CHECK_CALL(init_edb("mem_invalid", 1, 1, 1, 1, memory, &edb)); ASSERT_EQ_FMT(4, mock->ptrs_used, "%d"); ASSERT_EQ_FMT(4, mock_memory_count_in_use(mock), "%d"); // Make an invalid memory manager by setting free to null. invalid.free = NULL; ASSERT_EAARLIO_ERR( EAARLIO_MEMORY_INVALID, eaarlio_edb_free(&edb, &invalid)); // Everything should be as it was initialized CHECK_CALL(check_edb("mem_invalid", 1, 1, 1, 1, &edb)); // No memory should have been allocated or released ASSERT_EQ_FMT(4, mock->ptrs_used, "%d"); ASSERT_EQ_FMT(4, mock_memory_count_in_use(mock), "%d"); PASS(); } TEST test_free_alloc(char *msg, int records, int files, int files_0, int files_1, struct eaarlio_memory *memory, struct mock_memory *mock) { struct eaarlio_edb edb = eaarlio_edb_empty(); int allocs = records + files + files_0 + files_1; CHECK_CALL(init_edb(msg, records, files, files_0, files_1, memory, &edb)); ASSERT_EQ_FMTm(msg, allocs, mock->ptrs_used, "%d"); ASSERT_EQ_FMTm(msg, allocs, mock_memory_count_in_use(mock), "%d"); ASSERT_EAARLIO_SUCCESSm(msg, eaarlio_edb_free(&edb, memory)); ASSERT_FALSEm(msg, edb.records); ASSERT_FALSEm(msg, edb.files); ASSERT_EQ_FMTm(msg, 5, edb.record_count, "%d"); ASSERT_EQ_FMTm(msg, 2, edb.file_count, "%d"); ASSERT_EQ_FMTm(msg, 0, mock_memory_count_in_use(mock), "%d"); PASS(); } SUITE(suite_free) { struct mock_memory mock; struct eaarlio_memory memory; RUN_TEST(test_free_sanity); RUN_TEST(test_free_null_edb); mock_memory_new(&memory, &mock, 4); RUN_TESTp(test_free_mem_invalid, &memory, &mock); mock_memory_reset(&mock, 0); RUN_TESTp(test_free_alloc, "no alloc", 0, 0, 0, 0, &memory, &mock); mock_memory_reset(&mock, 1); RUN_TESTp(test_free_alloc, "alloc records", 1, 0, 0, 0, &memory, &mock); mock_memory_reset(&mock, 1); RUN_TESTp( test_free_alloc, "alloc files, neither", 0, 1, 0, 0, &memory, &mock); mock_memory_reset(&mock, 2); RUN_TESTp( test_free_alloc, "alloc files, first", 0, 1, 1, 0, &memory, &mock); mock_memory_reset(&mock, 2); RUN_TESTp( test_free_alloc, "alloc files, second", 0, 1, 0, 1, &memory, &mock); mock_memory_reset(&mock, 3); RUN_TESTp(test_free_alloc, "alloc files, both", 0, 1, 1, 1, &memory, &mock); mock_memory_reset(&mock, 4); RUN_TESTp(test_free_alloc, "alloc all", 1, 1, 1, 1, &memory, &mock); mock_memory_destroy(&memory); } /******************************************************************************* * Run the tests ******************************************************************************* */ GREATEST_MAIN_DEFS(); int main(int argc, char **argv) { GREATEST_MAIN_BEGIN(); RUN_SUITE(suite_free); GREATEST_MAIN_END(); }
inp = input() counts = {} for ch in inp: counts[ch] = counts.get(ch, 0) + 1 if counts.get("4", 0) + counts.get("7", 0) > 7: break if counts.get("4", 0) + counts.get("7", 0) in [4,7]: print("YES") else: print("NO")
/** * Created by Guest on 1/25/18. */ public class Sql2oTourDaoTest { private Connection fred; private Sql2oCauseDao causeDao; private Sql2oBusinessDao businessDao; private Sql2oAddressDao addressDao; private Sql2oTourDao tourDao; public Address setupAddress() { return new Address("street", "city", "state", "zip"); } public Address setupAddress1() { return new Address("fdasfda", "afsdsadtg", "gard", "adfg"); } public Cause setupCause() { return new Cause("name", "type", "description", "phone"); } public Cause setupCause1() { return new Cause("indigo", "charity", "we do stufF", "callme"); } public Business setupBusiness() { return new Business("poop", "pee", "12345", "[email protected]"); } public Business setupBusiness1() { return new Business("Evil Corp", "Evil", "666-666-6666", "[email protected]"); } public Tour setupTour() { return new Tour(1, 2);} public Tour setupTour1() { return new Tour (2, 3);} @Before public void setUp() throws Exception { String connectionString = "jdbc:h2:mem:testing;INIT=RUNSCRIPT from 'classpath:db/create.sql'"; Sql2o sql2o = new Sql2o(connectionString, "", ""); causeDao = new Sql2oCauseDao(sql2o); businessDao = new Sql2oBusinessDao(sql2o); addressDao = new Sql2oAddressDao(sql2o); tourDao = new Sql2oTourDao(sql2o); fred = sql2o.open(); } @After public void tearDown() throws Exception { fred.close(); } @Test public void addAdds() throws Exception { Tour tour = setupTour(); int originalId = tour.getId(); tourDao.add(tour); assertNotEquals(originalId, tour.getId()); } @Test public void getAllGetsAll() throws Exception { Tour tour = setupTour(); Tour tour1 = setupTour1(); List<String> addressIds = new ArrayList<>(); addressIds.add("2"); addressIds.add("4"); Tour tour2 = new Tour(1,3, addressIds); tourDao.add(tour); tourDao.add(tour1); tourDao.add(tour2); assertEquals(3, tourDao.getAllTours().size()); } @Test public void findById_returnsCorrectInstance_true() throws Exception { Tour testTour = setupTour(); Tour controlTour = setupTour1(); tourDao.add(testTour); tourDao.add(controlTour); assertEquals(testTour, tourDao.findById(1)); assertNotEquals(controlTour, tourDao.findById(1)); } @Test public void update_does() throws Exception { Tour testTour = setupTour(); Tour controlTour = setupTour1(); tourDao.add(testTour); tourDao.add(controlTour); assertEquals(null, testTour.getWaypoints()); List<String> addressIds = new ArrayList<>(); addressIds.add("2"); addressIds.add("4"); tourDao.update(1, 4, 5, addressIds); int testId = testTour.getId(); Integer expectedStart = 4; Integer expectedEnd = 5; assertEquals(expectedStart, tourDao.findById(testId).getStartPoint()); assertEquals(expectedEnd, tourDao.findById(testId).getEndPoint()); assertEquals(addressIds.toString(), tourDao.findById(testId).getWaypoints()); assertNotEquals(addressIds.toString(), tourDao.findById(2).getWaypoints()); } @Test public void deleteById_deletesCorrectly() throws Exception { Tour testTour = setupTour(); Tour controlTour = setupTour1(); tourDao.add(testTour); tourDao.add(controlTour); assertEquals(null, testTour.getWaypoints()); List<String> addressIds = new ArrayList<>(); addressIds.add("2"); addressIds.add("4"); Tour testTour1 = new Tour(1,3, addressIds); tourDao.add(testTour1); assertEquals(3, tourDao.getAllTours().size()); tourDao.deleteById(1); assertEquals(2, tourDao.getAllTours().size()); assertFalse(tourDao.getAllTours().contains(testTour)); } @Test public void deleteAll_removesAllInstancesOfTour_true() throws Exception { Tour testTour = setupTour(); Tour testTour1 = setupTour1(); tourDao.add(testTour); tourDao.add(testTour1); assertEquals(2, tourDao.getAllTours().size()); tourDao.deleteAll(); assertEquals(0, tourDao.getAllTours().size()); } }
import React from 'react'; import { IconAlt } from '@veupathdb/wdk-client/lib/Components'; import { cx } from './Utils'; import { Tooltip } from '@material-ui/core'; interface Props { iconClassName: string; hoverText: string; action: () => void; } export function ActionIconButton(props: Props) { const { action, hoverText, iconClassName } = props; return ( <div className={cx('-ActionIconButton')}> <Tooltip title={hoverText}> <button type="button" className="link" onClick={action}> <IconAlt fa={iconClassName} /> </button> </Tooltip> </div> ); }
Extending the sequences of HLA class I alleles without full‐length genomic coverage using single molecule real‐time DNA sequencing The assignment of an HLA allele name to a sequence requires a comparison between the generated target sequence and a reference sequence on the IPD‐IMGT/HLA database. Absence of a full‐length reference sequence can result in the inability of HLA typing software to accurately compare and assign the sequence. We sequenced the most frequently seen HLA class I alleles on the Anthony Nolan register present in the database with only a partial genomic sequence, with the aim of increasing the number of complete reference sequences. We successfully extended 95 full‐length HLA class I sequences and identified 13 novel variants. Increasing the number of full‐length HLA class I reference sequences in the database has aided accuracy of HLA analysis tools for all histocompatibility and immunogenetics laboratories.
<reponame>jgraef/vox-format use nalgebra::{ Scalar, Vector3, }; use crate::types::Vector; impl<T: Scalar> From<Vector<T>> for Vector3<T> { fn from(v: Vector<T>) -> Self { <[T; 3]>::from(v).into() } } impl<T: Scalar> From<Vector3<T>> for Vector<T> { fn from(v: Vector3<T>) -> Self { Into::<[T; 3]>::into(v).into() } }
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. from auto_nag.bzcleaner import BzCleaner class OneTwoWordSummary(BzCleaner): def description(self): return "Bugs with only one or two words in the summary" def get_bz_params(self, date): days_lookup = self.get_config("days_lookup", default=7) skiplist = self.get_config("regex_skiplist", []) params = { "bug_type": "defect", "resolution": "---", "f1": "days_elapsed", "o1": "lessthan", "v1": days_lookup, "f2": "short_desc", "o2": "regexp", "v2": "^([a-zA-Z0-9_]+ [a-zA-Z0-9_]+|[a-zA-Z0-9_]+)$", } if skiplist: for i, regex in enumerate(skiplist): j = str(i + 3) params["f" + j] = "short_desc" params["o" + j] = "notregexp" params["v" + j] = regex return params if __name__ == "__main__": OneTwoWordSummary().run()
<filename>validator-core/src/main/java/com/github/microtweak/validator/conditional/core/internal/AnnotationHelper.java<gh_stars>1-10 package com.github.microtweak.validator.conditional.core.internal; import java.io.Serializable; import java.lang.annotation.Annotation; import java.lang.annotation.Repeatable; import java.lang.reflect.*; import java.util.HashMap; import java.util.Map; import java.util.Optional; import java.util.stream.Stream; import static org.apache.commons.lang3.ArrayUtils.*; public final class AnnotationHelper { public static Map<String, Object> readAllAtributeExcept(Annotation annotation, String... exceptAttributes) { final Map<String, Object> attributes = new HashMap<>(); for (Method method : annotation.getClass().getDeclaredMethods()) { if (method.getParameterCount() > 0 || contains(exceptAttributes, method.getName())) { continue; } Object value = readAttribute(annotation, method); attributes.put(method.getName(), value); } return attributes; } public static Map<String, Object> readAllAttributes(Annotation annotation) { final Map<String, Object> attributes = new HashMap<>(); for (Method method : annotation.getClass().getDeclaredMethods()) { Object value = readAttribute(annotation, method); attributes.put(method.getName(), value); } return attributes; } public static Object readAttribute(Annotation annotation, Method method) { try { return method.invoke(annotation); } catch (IllegalAccessException | InvocationTargetException e) { throw new IllegalStateException(e); } } public static <R extends Serializable> R readAttribute(Annotation annotation, String attributeName, Class<R> returnType) throws NoSuchMethodException { Method method = annotation.annotationType().getDeclaredMethod(attributeName); Object v = readAttribute(annotation, method); return returnType.cast( v ); } public static <A extends Annotation> A createAnnotation(Class<A> annotationType, Map<String, Object> attributes) { InvocationHandler h = new FakeAnnotationInvocationHandler(annotationType, attributes); return (A) Proxy.newProxyInstance(annotationType.getClassLoader(), toArray(annotationType), h); } public static Annotation[] unwrapRepeatableAnnotations(Annotation annotation) { try { Method attr = annotation.annotationType().getDeclaredMethod("value"); return Optional.of( attr.getReturnType() ) .filter(r -> r.isArray()) .filter(r -> r.getComponentType().isAnnotationPresent(Repeatable.class)) .map(r -> (Annotation[]) readAttribute(annotation, attr)) .orElse( new Annotation[0] ); } catch (NoSuchMethodException e) { return new Annotation[0]; } } public static <A extends Annotation> Annotation[] getAnnotationsWithAnnotation(AnnotatedElement element, Class<A> annotationType) { return Stream.of( element.getAnnotations() ) .flatMap(a -> { Annotation[] repeatables = unwrapRepeatableAnnotations(a); return isNotEmpty(repeatables) ? Stream.of(repeatables) : Stream.of(a); }) .filter(a -> a.annotationType().isAnnotationPresent(annotationType)) .toArray(Annotation[]::new); } }
If I were a teacher…: Concepts of People on the Autism Spectrum About the Education System Assumptions: This article is part of a wider study on adults with autism. The main assumption of the series of studies is concerned with listening to and hearing the voices of those most affected – those with a diagnosis of autism. The material, which is based on the experiences of people with autism, allows us to present views on the role of the teacher – the creator of the educational space. Thus far, we have presented the experiences and comments of teachers and therapists; now we can listen to the voices of not only students and parents, but people with autism. The research problem was to present the educational experiences described by autistic people in a paper on pedeutology. The aim of the study was to investigate the opinions on education held by people with autism. Method. This study employed the netnographic method and involved a group of people with a diagnosis of autism who met online and agreed to participate. Results. Reaching out to a large group made it possible to analyze their statements by ranking and creating meaning bundles. Conclusions. The first conclusion is that there is a difference in the approach to the teaching/learning process. This is due to symptoms that are specific to autism, as well as to the different educational and therapeutic approaches. The second conclusion is related to the use of punishment that, in addition to forbidden forms (such as physical punishment), often evolve into psychological and social punishment, such as isolation, marginalization, or a lack of interest. It seems to be equally important to draw attention to the ineffective attempts to impose recognized normative forms of functioning (often referred to as a violation), as well as treating these people as not fully intellectually capable, which is not a condition (intellectual disability can co-occur in a few cases ).
<filename>testdata/k-and-r/Chapter 4 - Functions and Program Structure/exercises/4-6.c #include <ctype.h> #include <stdio.h> #include <stdlib.h> #define BUFSIZE 100 #define MAXOP 100 #define MAXVAL 100 // maximum depth of val stack #define NUMBER '0' #define VARIABLE 'a' double pop(void); int getch(void); int getop(char[]); void push(double); void ungetch(int); double varval[27]; // list of variable values char buf[BUFSIZE]; // buffer for ungetch int bufp = 0; // next free position in buf int sp = 0; // next free stack position double val[MAXVAL]; // value stack int main() { int type, i; double op2, last; char s[MAXOP]; // initialise 'variable values to zero' array to 0 for (i = 0; i < 27; i++) varval[i] = 0; while ((type = getop(s)) != EOF) { switch (type) { case NUMBER: push(atof(s)); break; case '+': push(pop() + pop ()); break; case '*': push(pop() * pop()); break; case '-': op2 = pop(); push(pop() - op2); break; case '/': op2 = pop(); if (op2 != 0.0) { push(pop() / op2); } else { printf("error: division by zero\n"); } break; case '%': op2 = pop(); push((double) ((int) pop() % (int) op2)); break; case VARIABLE: push(varval[s[0] - 'a']); break; case '=': push(last); break; case '\n': printf("\t%.8g\n", last = pop()); break; default: printf("error: unknown command - %c\n", type); break; } } return 0; } // push: push f onto value stack void push(double f) { if (sp < MAXVAL) { val[sp++] = f; } else { printf("error: stack full, can't push %g\n", f); } } // pop: pop and return top value from stack double pop(void) { if (sp > 0) { return val[--sp]; } else { printf("error: stack empty\n"); return 0.0; } } //getop: get next operator or numeric operand int getop(char s[]) { int i, c; while ((s[0] = c = getch()) == ' ' || c == '\t') ; s[1] = '\0'; i = 0; // collect an integer if (isdigit(c) || c == '.') { // collect integer part while (isdigit(s[++i] = c = getch())) ; if (c == '.') // collect fraction part while (isdigit(s[++i] = c = getch())) ; s[i] = '\0'; if (c != EOF) ungetch(c); return NUMBER; } else if (islower(c)) // collect a lowercase single letter variable { return VARIABLE; } else if (isupper(c)) { s[0] = c - ('A' - 'a'); return VARIABLE; } else { return c; // not a number } } // gets a (possible pushed back) character int getch(void) { return (bufp > 0) ? buf[--bufp] : getchar(); } //pushes a character back onto the input void ungetch(int c) { if (bufp >= BUFSIZE) { printf("ungetch: too many characters"); } else { buf[bufp++] = c; } }
<gh_stars>1-10 export interface FeatureToggleRules { features: Feature[]; } interface Feature { name: string; treatments: Treatment[]; } interface Treatment { name: string; includedDifferentiators: string[]; }
// NewOptions creates server options. func (c Configuration) NewOptions(iOpts instrument.Options) Options { opts := NewOptions(). SetRetryOptions(c.Retry.NewOptions(iOpts.MetricsScope())). SetInstrumentOptions(iOpts) if c.KeepAliveEnabled != nil { opts = opts.SetTCPConnectionKeepAlive(*c.KeepAliveEnabled) } if c.KeepAlivePeriod != nil { opts = opts.SetTCPConnectionKeepAlivePeriod(*c.KeepAlivePeriod) } return opts }
// user returns the user object from the session. func (a *Auth) user(r *http.Request) *gogithub.User { s, err := a.sessionStore.Get(r, sessionName) if err != nil { logrus.Errorf("Failed getting user: %s", err) return nil } jsonData, ok := s.Values[sessionUserKey].(string) if !ok { logrus.Errorf("Failed converting user key: %s", s.Values[sessionUserKey]) return nil } var u gogithub.User err = json.Unmarshal([]byte(jsonData), &u) if err != nil { logrus.Errorf("Failed marhsalling user data %s: %s", jsonData, err) return nil } return &u }
/* SPROP -- Format the value of a parameter into the output string. */ void sprop (register char *outstr, register struct operand *op) { register int type; char *index(); if (opundef (op)) cl_error (E_IERR, "can not print an undefined operand"); if (opindef (op)) { strcpy (outstr, indefstr); return; } type = op->o_type & OT_BASIC; switch (type) { case OT_BOOL: sprintf (outstr, op->o_val.v_i == NO ? falsestr : truestr); break; case OT_INT: sprintf (outstr, "%d", op->o_val.v_i); break; case OT_REAL: sprintf (outstr, "%g", op->o_val.v_r); if (index (outstr, '.') == NULL) strcat (outstr, "."); break; case OT_STRING: strcpy (outstr, op->o_val.v_s); break; default: ; } }
#!/usr/bin/python3 sep = ", " for x in range(0, 100): if x == 99: sep = "" print("{:02d}{}".format(x, sep), end='') print('')
#include <iostream> using lint = long long; void solve() { lint x; std::cin >> x; lint ans = x / 500 * 1000; x %= 500; ans += x / 5 * 5; std::cout << ans << std::endl; } int main() { std::cin.tie(nullptr); std::cout.tie(nullptr); std::ios::sync_with_stdio(false); solve(); return 0; }
// CreateTrieFromProto creates a Sparse Merkle Trie from its corresponding merkle trie. func CreateTrieFromProto(trieObj *protodb.SparseMerkleTrie) *SparseMerkleTrie { trie := &SparseMerkleTrie{ depth: uint(trieObj.Depth), originalItems: trieObj.OriginalItems, } branches := make([][][]byte, len(trieObj.Layers)) for i, layer := range trieObj.Layers { branches[i] = layer.Layer } trie.branches = branches return trie }
<gh_stars>1-10 /* * Software Name : ATK * * Copyright (C) 2007 - 2012 France Télécom * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * ------------------------------------------------------------------ * File Name : CommandLineParser.java * * Created : 16/02/2007 * Author(s) : <NAME> */ package com.orange.atk.atkUI.corecli; import java.util.ArrayList; import java.util.Collection; /** * The CommandLineParser parses command line options. * * @author <NAME> * @since JDK5.0 */ public class CommandLineParser { /** * parser comming from registered extensions */ private Collection<ICommandLineParser> cmdLineParsers = new ArrayList<ICommandLineParser>(); private String[] args; public CommandLineParser(String[] arg1) { args = arg1; } /** * Register a new parser. If the given parser is null, nothing is done. * * @param parser * parser to add */ public void registerParser(ICommandLineParser parser) { if (parser != null) cmdLineParsers.add(parser); } /** * Parses the command line arguments * * @args the arguments */ public CommandLine parse() { CommandLine globalOptions = new CommandLine(); // general treatment of options here.. if ((args.length == 0) || ((args.length == 1) && (!args[0].startsWith("-")))) { globalOptions.setMode(CommandLine.Modes.GUI); } for (int i = 0; i < args.length; i++) { String arg = args[i].trim(); if (arg.contains("-help")) { System.out.println(); System.out.println(getSynopsis()); } } // conciders extension's options for (ICommandLineParser p : cmdLineParsers) { CommandLine op = p.parse(args); if (op != null) { globalOptions.agregate(op); } } return globalOptions; } /** * Gets back the synopsis of the command line * * @return */ public String getSynopsis() { StringBuffer synopsis = new StringBuffer(); synopsis.append("Mobile Code Analysis TOols release " + Configuration.getVersion() + " (rev." + Configuration.getRevision() + ") help:\n"); return synopsis.toString(); } }
def find_team(name): global session, teams try: team = teams.get(name, None) if team is None: team = session.find('teams', name) if team is None: print("WARNING: team not found: "+name) teams[name] = False else: teams[name] = team return teams[name] except pdpyras.PDClientError as e: handle_exception(e)
def verify_and_renew_license(driver, license_num, valid_from_date, valid_to_date): verify_driver_from_license_number(driver, license_num) renew_drivers_license(driver, valid_from_date, valid_to_date, license_num)
/// Constructs a new, empty `BinaryHeap<T>`, with a custom comparator function, /// this can be used to create a priority queue or other bespoke sorting logic /// /// # Examples /// ```rust /// use data_structures::binary_heap::{BinaryHeap, BinaryHeapType}; /// /// let cmp = |a: &i32, b: &i32| a.cmp(b); /// let mut heap: BinaryHeap<i32> = BinaryHeap::with_custom_comparator_fn(BinaryHeapType::Min, &cmp); /// /// assert_eq!(heap.len(), 0); /// assert!(heap.is_empty()); /// ``` pub fn with_custom_comparator_fn<F: 'a>(heap_type: BinaryHeapType, cmp: &'a F) -> Self where F: Fn(&T, &T) -> Ordering, { Self { heap: Vec::new(), cmp: Box::new(cmp), heap_type, comparator: match heap_type { BinaryHeapType::Max => Ordering::Greater, BinaryHeapType::Min => Ordering::Less, }, } }
// // Created by conor on 31/03/18. // #include "Train.h"
//! Automatically delete a file when dropped use std::path::{Path, PathBuf}; use std::fs::remove_file; /// Removes a path when dropped pub struct RemoveOnDrop(PathBuf); impl RemoveOnDrop { pub fn new<P: AsRef<Path>>(path: P) -> RemoveOnDrop { RemoveOnDrop(path.as_ref().to_path_buf()) } } impl Drop for RemoveOnDrop { fn drop(&mut self) { println!("removing {}", self.0.display()); remove_file(&self.0).ok(); // (ignores errors) } }
<gh_stars>1000+ // License: Apache 2.0. See LICENSE file in root directory. // Copyright(c) 2019 Intel Corporation. All Rights Reserved. #include "fw-logs-xml-helper.h" #include <string.h> #include <fstream> #include <iostream> #include <memory> using namespace std; namespace fw_logger { fw_logs_xml_helper::fw_logs_xml_helper(string xml_full_file_path) : _init_done(false), _xml_full_file_path(xml_full_file_path) {} fw_logs_xml_helper::~fw_logs_xml_helper(void) { // TODO: Add cleanup code } bool fw_logs_xml_helper::get_root_node(xml_node<> **node) { if (_init_done) { *node = _xml_doc.first_node(); return true; } return false; } bool fw_logs_xml_helper::try_load_external_xml() { try { if (_xml_full_file_path.empty()) return false; rapidxml::file<> xml_file(_xml_full_file_path.c_str()); _document_buffer.resize(xml_file.size() + 2); memcpy(_document_buffer.data(), xml_file.data(), xml_file.size()); _document_buffer[xml_file.size()] = '\0'; _document_buffer[xml_file.size() + 1] = '\0'; _xml_doc.parse<0>(_document_buffer.data()); return true; } catch (...) { _document_buffer.clear(); throw; } return false; } bool fw_logs_xml_helper::init() { _init_done = try_load_external_xml(); return _init_done; } bool fw_logs_xml_helper::build_log_meta_data(fw_logs_formating_options* log_meta_data) { xml_node<> *xml_root_node_list; if (!init()) return false; if (!get_root_node(&xml_root_node_list)) { return false; } string root_name(xml_root_node_list->name(), xml_root_node_list->name() + xml_root_node_list->name_size()); // check if Format is the first root name. if (root_name.compare("Format") != 0) return false; xml_node<>* events_node = xml_root_node_list->first_node(); if (!build_meta_data_structure(events_node, log_meta_data)) return false; return true; } bool fw_logs_xml_helper::build_meta_data_structure(xml_node<> *xml_node_list_of_events, fw_logs_formating_options* logs_formating_options) { node_type res = none; int id{}; int num_of_params{}; string line; // loop through all elements in the Format. for (xml_node<>* node = xml_node_list_of_events; node; node = node->next_sibling()) { line.clear(); res = get_next_node(node, &id, &num_of_params, &line); if (res == event) { fw_log_event log_event(num_of_params, line); logs_formating_options->_fw_logs_event_list.insert(pair<int, fw_log_event>(id, log_event)); } else if (res == file) { logs_formating_options->_fw_logs_file_names_list.insert(kvp(id, line)); } else if (res == thread) { logs_formating_options->_fw_logs_thread_names_list.insert(kvp(id, line)); } else if (res == enums) { for (xml_node<>* enum_node = node->first_node(); enum_node; enum_node = enum_node->next_sibling()) { for (xml_attribute<>* attribute = enum_node->first_attribute(); attribute; attribute = attribute->next_attribute()) { string attr(attribute->name(), attribute->name() + attribute->name_size()); if (attr.compare("Name") == 0) { string name_attr_str(attribute->value(), attribute->value() + attribute->value_size()); vector<kvp> xml_kvp; for (xml_node<>* enum_value_node = enum_node->first_node(); enum_value_node; enum_value_node = enum_value_node->next_sibling()) { int key = 0; string value_str; for (xml_attribute<>* attribute = enum_value_node->first_attribute(); attribute; attribute = attribute->next_attribute()) { string attr(attribute->name(), attribute->name() + attribute->name_size()); if (attr.compare("Value") == 0) { value_str = std::string(attribute->value(), attribute->value() + attribute->value_size()); } if (attr.compare("Key") == 0) { try { auto key_str = std::string(attribute->value()); key = std::stoi(key_str, nullptr); } catch (...) {} } } xml_kvp.push_back(std::make_pair(key, value_str)); } logs_formating_options->_fw_logs_enum_names_list.insert(pair<string, vector<kvp>>(name_attr_str, xml_kvp)); } } } } else return false; } return true; } fw_logs_xml_helper::node_type fw_logs_xml_helper::get_next_node(xml_node<> *node, int* id, int* num_of_params, string* line) { string tag(node->name(), node->name() + node->name_size()); if (tag.compare("Event") == 0) { if (get_event_node(node, id, num_of_params, line)) return event; } else if (tag.compare("File") == 0) { if (get_file_node(node, id, line)) return file; } else if (tag.compare("Thread") == 0) { if (get_thread_node(node, id, line)) return thread; } else if (tag.compare("Enums") == 0) { return enums; } return none; } bool fw_logs_xml_helper::get_enum_name_node(xml_node<>* node_file, int* thread_id, string* enum_name) { for (xml_attribute<>* attribute = node_file->first_attribute(); attribute; attribute = attribute->next_attribute()) { string attr(attribute->name(), attribute->name() + attribute->name_size()); if (attr.compare("Name") == 0) { string name_attr_str(attribute->value(), attribute->value() + attribute->value_size()); *enum_name = name_attr_str; continue; } else return false; } return true; } bool fw_logs_xml_helper::get_enum_value_node(xml_node<>* node_file, int* thread_id, string* enum_name) { for (xml_attribute<>* attribute = node_file->first_attribute(); attribute; attribute = attribute->next_attribute()) { string attr(attribute->name(), attribute->name() + attribute->name_size()); if (attr.compare("Value") == 0) { string name_attr_str(attribute->value(), attribute->value() + attribute->value_size()); *enum_name = name_attr_str; continue; } else return false; } return true; } bool fw_logs_xml_helper::get_thread_node(xml_node<>* node_file, int* thread_id, string* thread_name) { for (xml_attribute<>* attribute = node_file->first_attribute(); attribute; attribute = attribute->next_attribute()) { string attr(attribute->name(), attribute->name() + attribute->name_size()); if (attr.compare("id") == 0) { string id_attr_str(attribute->value(), attribute->value() + attribute->value_size()); *thread_id = stoi(id_attr_str); continue; } else if (attr.compare("Name") == 0) { string name_attr_str(attribute->value(), attribute->value() + attribute->value_size()); *thread_name = name_attr_str; continue; } else return false; } return true; } bool fw_logs_xml_helper::get_file_node(xml_node<>* node_file, int* file_id, string* file_name) { for (xml_attribute<>* attribute = node_file->first_attribute(); attribute; attribute = attribute->next_attribute()) { string attr(attribute->name(), attribute->name() + attribute->name_size()); if (attr.compare("id") == 0) { string id_attr_str(attribute->value(), attribute->value() + attribute->value_size()); *file_id = stoi(id_attr_str); continue; } else if (attr.compare("Name") == 0) { string name_attr_str(attribute->value(), attribute->value() + attribute->value_size()); *file_name = name_attr_str; continue; } else return false; } return true; } bool fw_logs_xml_helper::get_event_node(xml_node<>* node_event, int* event_id, int* num_of_params, string* line) { for (xml_attribute<>* attribute = node_event->first_attribute(); attribute; attribute = attribute->next_attribute()) { string attr(attribute->name(), attribute->name() + attribute->name_size()); if (attr.compare("id") == 0) { string id_attr_str(attribute->value(), attribute->value() + attribute->value_size()); *event_id = stoi(id_attr_str); continue; } else if (attr.compare("numberOfArguments") == 0) { string num_of_args_attr_str(attribute->value(), attribute->value() + attribute->value_size()); *num_of_params = stoi(num_of_args_attr_str); continue; } else if (attr.compare("format") == 0) { string format_attr_str(attribute->value(), attribute->value() + attribute->value_size()); *line = format_attr_str; continue; } else return false; } return true; } }
// Validate that if a message that creates an FT is received the behavior is correct class TestStateMachine_NullFT { public: template<MessageType::Enum T> void TestHelperForFMMessage( wstring const & ftShortName, wstring const & message, wstring const & ftExpected) { TestHelper<T>(ftShortName, message, ftExpected, true); } template<MessageType::Enum T> void TestHelperForRAMessage( wstring const & ftShortName, wstring const & message, wstring const & ftExpected) { TestHelper<T>(ftShortName, message, ftExpected, false); } template<MessageType::Enum T> void TestHelper( wstring const & ftShortName, wstring const & message, wstring const & ftExpected, bool isFM) { ScenarioTestHolder holder; ScenarioTest & scenarioTest = holder.ScenarioTestObj; if (isFM) { scenarioTest.ProcessFMMessageAndDrain<T>(ftShortName, message); } else { scenarioTest.ProcessRemoteRAMessageAndDrain<T>(2, ftShortName, message); } if (!ftExpected.empty()) { scenarioTest.ValidateFT(ftShortName, ftExpected); } else { scenarioTest.ValidateFTIsNull(ftShortName); } } }
Dietary fish-oil supplementation in experimental gram-negative infection and in cerebral malaria in mice. Dietary fish-oil supplementation interferes with eicosanoid production and appears to decrease production of interleukin-1 (IL-1) and tumor necrosis factor (TNF). The effect of fish oil was investigated in an intramuscular Klebsiella pneumoniae infection in Swiss mice and in cerebral malaria induced by Plasmodium berghei in C57B1/6 mice. After a low inoculum of K. pneumoniae, 90% of fish oil-fed mice survived; survival in control mice fed equal amounts of corn or palm oil or normal chow was 30%, 40%, and 0, respectively. Cerebral malaria occurred in only 23% of fish oil-fed mice; in the controls, cerebral malaria developed in 61%, 81%, and 78%, respectively. Contrary to what was expected, lipopolysaccharide-induced ex vivo production of IL-1 alpha and TNF alpha by peritoneal cells was significantly enhanced in fish oil-fed mice compared with controls. Indomethacin treatment did not alter the outcome in these two infections, thus arguing against reduced prostaglandin synthesis as an explanation for the increase in resistance to infection.
/** * @author Ryan Heaton * @author Dave Syer */ public class BaseOAuth2ProtectedResourceDetails implements OAuth2ProtectedResourceDetails { private String id; private String grantType = "unsupported"; private String clientId; private String accessTokenUri; private List<String> scope; private String clientSecret; private AuthenticationScheme clientAuthenticationScheme = AuthenticationScheme.header; private AuthenticationScheme authorizationScheme = AuthenticationScheme.header; private String tokenName = OAuth2AccessToken.ACCESS_TOKEN; public String getId() { return id; } public void setId(String id) { this.id = id; } public String getClientId() { return clientId; } public void setClientId(String clientId) { this.clientId = clientId; } public String getAccessTokenUri() { return accessTokenUri; } public void setAccessTokenUri(String accessTokenUri) { this.accessTokenUri = accessTokenUri; } public boolean isScoped() { return scope != null && !scope.isEmpty(); } public List<String> getScope() { return scope; } public void setScope(List<String> scope) { this.scope = scope; } public boolean isAuthenticationRequired() { return StringUtils.hasText(clientId) && clientAuthenticationScheme != AuthenticationScheme.none; } public String getClientSecret() { return clientSecret; } public void setClientSecret(String clientSecret) { this.clientSecret = clientSecret; } public AuthenticationScheme getClientAuthenticationScheme() { return clientAuthenticationScheme; } public void setClientAuthenticationScheme(AuthenticationScheme clientAuthenticationScheme) { this.clientAuthenticationScheme = clientAuthenticationScheme; } public boolean isClientOnly() { return false; } public AuthenticationScheme getAuthenticationScheme() { return authorizationScheme; } public void setAuthenticationScheme(AuthenticationScheme authorizationScheme) { this.authorizationScheme = authorizationScheme; } public String getTokenName() { return tokenName; } public void setTokenName(String tokenName) { this.tokenName = tokenName; } public String getGrantType() { return grantType; } public void setGrantType(String grantType) { this.grantType = grantType; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof BaseOAuth2ProtectedResourceDetails)) { return false; } BaseOAuth2ProtectedResourceDetails that = (BaseOAuth2ProtectedResourceDetails) o; return !(id != null ? !id.equals(that.id) : that.id != null); } @Override public int hashCode() { return id != null ? id.hashCode() : 0; } }
package ca.mcgill.ecse211.lab3; import static ca.mcgill.ecse211.lab3.Resources.*; import static java.lang.Math.*; public class Navigation extends Thread { /** * Navigation class for EV3 * Allows robot to navigate from waypoint to waypoint */ /* * Maps for navigation */ private int[][] waypoints = {{2,1},{1,1},{1,2},{2,0}}; //MAP1 //private int[][] waypoints = {{0, 2}, {1 ,1}, {2, 2}, {2, 1}, {1, 0}}; //MAP2 //private int[][] waypoints = {{1, 1}, {0 ,2}, {2, 2}, {2, 1}, {1, 0}}; //MAP3 //private int[][] waypoints = {{1, 0}, {2 ,1}, {2, 2}, {0, 2}, {1, 1}}; //MAP4 //private int[][] waypoints = {{0, 1}, {1 ,2}, {1, 0}, {2, 1}, {2, 2}}; //MAP5 //private int[][] waypoints = {{0, 2}, {2 ,2}, {2, 0}, {0, 0}}; //TESTMAP private Odometer odo; private double[] pos = new double[3]; //To store current position from odometer //Variables for travelTo() private double dX; private double dY; private double turnAngle; private double hDistance; private double heading; //Initial constructor public Navigation(Odometer odo) { this.odo = odo; } public void run() { //Loops through the waypoints sequentially for(int[] point : waypoints) { travelTo(point[0] * TILE_SIZE, point[1] * TILE_SIZE); } } public void travelTo( double x, double y) { //Load current position from odometer pos = odo.getXYT(); //Calculate distance to waypoint from current position dX = x - pos[0]; dY = y - pos[1]; //Calculate heading angle to next waypoint heading = Math.toDegrees(Math.atan2(dX, dY)); turnAngle = heading - pos[2]; //Rotate robot to calculated angle turnTo(turnAngle); //Calculate the min distance (hypotenuse) needed to reach next waypoint hDistance = Math.sqrt(dX * dX + dY * dY); //Calls method that moves the robot to waypoint from forward distance toWaypoint(hDistance); } //Computes small-angle turn required for robot to head to next waypoint, and turns the robot public void turnTo(double theta) { int wheelRotation; // leftMotor.setSpeed(SLOW); rightMotor.setSpeed(SLOW); //Adjusts theta for minimal turn angle, max angle is +-180 if(theta < -180) { theta += 360; } else if(theta > 180 ){ theta -= 360; } try { Thread.sleep(500); } catch (InterruptedException e) { } wheelRotation = (int) ((TRACK * theta / 2) / (WHEEL_RADIUS)); //Calculates rotation needed based on Track and Wheel radius leftMotor.rotate(wheelRotation, true); rightMotor.rotate(-wheelRotation, false); } //Sets robot to move forward the distance to waypoint public void toWaypoint(double dis) { int wheelArc; leftMotor.setSpeed(FAST); rightMotor.setSpeed(FAST); wheelArc = (int)((dis * 180.0) / (WHEEL_RADIUS * Math.PI)); //Calculates arc angle needed for the distance to waypoint leftMotor.rotate(wheelArc, true); rightMotor.rotate(wheelArc, false); } //Checks robot motors, if moving, returns true boolean public boolean isNavigating() { return leftMotor.isMoving() && rightMotor.isMoving(); } }
/* Create multiple threads that compile and execute a query */ bool multithread_example_2(Zorba* aZorba) { unsigned int i; pthread_t* pthreads; data_2* pdata; try { pthreads=(pthread_t *)malloc(NR_THREADS*sizeof(*pthreads)); pdata=(data_2 *)malloc(NR_THREADS*sizeof(data_2)); for(i=0; i<NR_THREADS; i++) { pdata[i].lZorba = aZorba; pdata[i].index = i; pthread_create(&pthreads[i], NULL, query_thread_2, (void*)(pdata+i)); } for(i=0; i<NR_THREADS; i++) pthread_join(pthreads[i], NULL); free(pthreads); free(pdata); return true; } catch (ZorbaException &e) { std::cerr << "some exception " << e << std::endl; return false; } }
def zdumps(value): return zlib.compress(pickle.dumps(value,-1),9)
Never Give Up: Lesson learned from a severe COVID-19 patient This is a PDF file of an article that has undergone enhancements after acceptance, such as the addition of a cover page and metadata, and formatting for readability, but it is not yet the definitive version of record. This version will undergo additional copyediting, typesetting and review before it is published in its final form, but we are providing this version to give early visibility of the article. Please note that, during the production process, errors may be discovered which could affect the content, and all legal disclaimers that apply to the journal pertain. Never Give Up: Lesson learned from a severe COVID-19 patient To the Editor, COVID-19 pandemic is causing major health consequences in affected persons needing hospital admission. Since the first epidemic outbreak in China we have learned that several factors including older age, comorbidities and individual immunological responses to infection may differently address the risk of disease progression and outcome. 1 Moreover, there is now quite a unanimous consensus that mortality rate of critically ill patients with SARS-CoV2 pneumonia admitted to Intensive Care Unit (ICU) is really considerable 2 and even a slightly higher than that recorded in patients with moderate to severe Acute Respiratory Distress Syndrome (ARDS). 3 Different attempts using experimental anti-viral 4 and/or systemic anti-inflammatory drugs 5,6 have been made to counteract both disease progression and fatal prognosis. Meanwhile, an expert opinion-based document was launched in order to address the early management-related actions for the individual patient, including the choice of an appropriate setting of care and the timing for non-invasive (NIV) or invasive mechanical ventilation (MV). 7 In light of this, we here report the clinical course of a 72-year old Caucasian male (M.A.) admitted for SARS-CoV2 pneumonia at our University Hospital in Modena on last March 5th. A multidisciplinary medical staff composed of different specialists (infectious diseases, pulmonology, Figure 1 Chest X-ray on admission shows diffuse interstitial abnormalities alongside scattered bilateral infiltrates. Arrows indicate the local ultrasound patterns, in particular: irregular vertical artifacts (B-lines) with impaired pleural sliding next to subpleural small consolidations in the upper anterior sites and thick and confluent B lines in the low posterior site. intensive care) was in charge of care and assuming shared clinical decisions. Past medical history was characterized by limited atherosclerosis, systemic arterial hypertension, and stable chronic B-cell lymphocytic leukemia (LLC) only requiring periodic follow-up. Onset of symptoms was reported 6 days before admission and infection by SARS-CoV-2 was confirmed by RT-PCR swab nasal/throat samples on the day of admission. Patient presented with fever (38 • C), dry cough, tachypnea (respiratory rate-RR = 24 bpm) and mild respiratory failure (PaO2/FiO2 242 breathing room air). Fig. 1 shows the chest-X-ray and the lung ultrasound pattern on admission (day 1). Given the neurological and hemodynamic stability (Subsequent Organ Failure Assessment-SOFA score was 1) he was admitted to the Infectious Disease Unit where oxygen supply through nasal cannula at the flow of 2 L/min and antiviral therapy with darunavir/cobicistat was started in addition to hydroxychloroquine and azithromycin according to our local recommendations. Blood C-reactive Protein (CRP 7.8 mg/dl) was high whereas white blood cell-WBC count (62.510 ml ---1 ) was abnormally increased also due to his LLC condition. On day 4, patient experienced a major respiratory worsening (PaO2/FiO2 105), with RR > 40 bpm) despite oxygen supply at FiO2 60%). A multidisciplinary staff evaluation was conducted to balance the need for immediate endotracheal intubation (ETI) and MV with the potential risks derived from his hematological co-morbidity. Decision was taken for a non-ICU care approach thus NIV in pressure support mode was started with the aim to target a pulse oximetry level >90% and a RR <30 bpm. At the same time, other anti- inflammatory drugs for off-label therapeutic indication were not available in the hospital pharmacy. From day 5 to 9, patient continued on supported breathing. Finally, he presented uncontrollable respiratory distress despite NIV and maximal oxygen supply (PaO2/FiO2 < 100) on day 10. There was a further multidisciplinary evaluation and, having become available, off-label i.v. Tocilizumab (8 mg/kg of body weight with 2 infusions 12-hour apart) 6 and admission to ICU for endo-tracheal intubation and mechanical ventilation were adopted as rescue therapies. On day 13 patient's condition and gas exchange improved (PaO2/FiO2 121) and tracheostomy was performed, while patient progressively returned to an assisted breathing modality. The time course of the main inflammatory serum markers over the admission period is shown in Fig. 2. On March 18th (day 15), M.A. was transferred to a different hospital in our provincial area (Ospedale di Sassuolo) for weaning purpose. Respiratory condition progressively improved reaching spontaneous breathing on day 23, while a supervised physiotherapy protocol was started due to the consequences of prolonged immobility. Tests to assess the vocal cord and swallow integrity were then performed, according to the recommended procedures, leading on April 6th (day 31) to removal of both the naso-gastric tube and the tracheal cannula. Oxygen supplementation was also withdrawn on the same day. Two consecutive nasal/throat RT-PCR swab samples were confirmed negative for RNA virus and specific immunoglobulin-G were dosed in serum, so patient was discharge as cured and transferred to a rehabilitation unit to complete his recovery (day 33). The story of M.A. confirms the heavy risk and the long and difficult clinical process behind any severe pneumonia due to SARS-CoV2 infection and prompts several considerations. First, it is quite clear that the clinical course in patients hospitalized due to pneumonia and worsening respiratory failure is unforeseeable, thus requiring great attention and prompt action. The complexity, comorbid status and age of the patient only partially explain this variability. Indeed, it has been recently hypothesized that least 3 different grades of increasing severity may be recognized, which correspond to, distinct clinical findings, response to therapy and clinical outcome, and are likely to depend on the balance between the viral phase and the host inflammatory response. 8 Second, the history of M.A. at least partially proposes the unprecedented decisions to be taken, whether by hospital, physicians and/or nurses, i.e. withhold a ventilator and/or access to ICU when faced with an anticipated shortage of ventilators and ICU-beds during an uncontrolled epidemic outbreak 9 or with a clinical discussion on a proportionate treatment. This might in turn overexpose clinicians to the risk of civil or criminal liability in the absence of clear Government assurance when facing extraordinary events. 10 Given the particular condition to which M.A. rushed after admission, the decision taken to proportion intensity of respiratory support by NIV could have led to an unwanted liability. Third, it should be noted that, as tocilizumab was not available to prompt a potentially anti-inflammatory drug effect, even in a later stage as was the case of our patient (see Fig. 2), the choice to use NIV was indeed the only means to ''buy time'' before any upgraded decision of care could be taken. This highlights the role that noninvasive respiratory support may have during COVID-19 epidemic, which is not only to manage the advanced hypoxic respiratory failure, 7 but also to help clinicians to assist a very severely ill patient even if at high risk. Ongoing data on this epidemic are showing that the decision to prevent endotracheal intubation by NIV might be a safe option for patients. While we are waiting for more convincing data on the role of anti-inflammatory agents to early brake systemic inflammation and progression of the disease, 6,11 it has been discussed whether or not early ETI and MV would be the best option for the COVID-19 patient's outcome. 12 One of the hypotheses behind this is that COVID-19 is not a typical ARDS, so MV in many cases is not the right treatment. COVID-19 patients show significant vasoconstriction of small blood vessels compared to the healthy and ARDS patients. It is known that ventilation reduces the small blood vessel size, meaning it is probably making things worse for most COVID-19 patients. Notably, a very recent audit in UK in over 6000 patients shows that death rate in COVID-19 patients using advanced respiratory support (ETI, MV, ECMO) is significantly higher as compared with patients on basic support (oxygen with inspiratory fraction >50%, CPAP/BIPAP) (66.3 vs. 51.6%, respectively). This in turn expresses a much higher risk rate when compared to that (22%) occurring in a retrospective series of hospitalized non-COVID viral pneumonia. 13 Finally, present experience has led clinicians to the evidence that such a very severe situation with difficult and risky decisions unavoidably lead the patient to a long clinical course with associated disability. 14 Since beating the virus is just the beginning, any lack of access to early physiotherapy and to a long-term strategy for survivors of severe COVID-19 is a problem to be overcome urgently. 15 From our personal experience during COVID-19 epidemic we encourage colleagues not to give up in all circumstances due to this happily ending story. Funding The authors declare that no funding was received for this paper. Consent to publish data Informed consent to publish data was obtained from the patient. Conflicts of interest The authors have no conflicts of interest to declare. Prevention measures for COVID-19 in retail food stores in Braga, Portugal COVID-19 is the designation of the World Health Organization for the infectious disease caused by the new coronavirus, which can cause severe respiratory infection, such as pneumonia. 1 The main modes of transmission of COVID-19 are: 1) close contact with people infected by the virus or 2) contact with contaminated surfaces or objects. Transmission takes place through droplets that are expelled when a person with COVID-19 coughs, sneezes or speaks, or through contact with contaminated hands that touch the eyes, nose or mouth of a person (hands are easily contaminated by contact with objects or surfaces where droplets from an infected person have landed). 2 Knowing that close contacts can contribute to increasing the spread of the infection, breaking these transmission chains is essential. In this context, the Portuguese Directorate-General of Health (DGS) has been issuing guidelines on the public health measures to be adopted by public service establishments to prevent the spread of the virus. 3 Briefly, the best practices recommended by DGS include the following: guarantee that checkout counters maintain physical distance of 1 m; post signage on the floor to help customers maintain adequate social distancing; create physical barriers between employees and customers, in order to avoid ''excessive approximation between individuals''. As measures for cleaning and sanitizing, DGS recommends disinfecting the whole store area at least once a day; and frequently cleaning (at all hours) the high-touch surfaces, such as ticket dispenser machines or ATMs. Moreover, alcohol-based solutions should be provided in strategic locations. 3 This observational study aims to describe the measures adopted to prevent the transmission of COVID-19 by retail food stores in the municipality of Braga, Portugal. To this end, a researcher from the Institute of Education of the University of Minho designed and implemented a descriptive observational study, carried out between 5th and 8th April 2020, with a convenience sample of eight retail food stores in the municipality of Braga (Portugal). Observations and registries were made by three researchers in different types of stores (mini markets, supermarkets, and hypermarkets). Each store was observed once. Researchers were, on average, 10 min in each outlet. Data was collected by filling out a previously prepared form, which included the following variables: I. Measures adopted outside the store (signs of disinfection of the floors; disinfection of shopping carts/baskets; measures to prevent overcrowding; existence of signage that encourages employees and customers to practice health safety procedures such as social distancing and wearing face mask); II. Measures adopted inside the store (indication of the necessary physical distance between people; provision of hand sanitizer, gloves, or other personal protective equipment); III. Employees (cashier: use of a visor or a mask/glasses, use of gloves, existence of acrylic separator walls at counters; store operators: use of a visor or a mask, use of gloves); IV. Customers (gender; age group; use of a surgical or handmade face mask, or a scarf; use of a visor/other; use of gloves). Six of the eight establishments observed showed signs of disinfection of the floors. Four of them disinfected shopping carts/baskets; almost all employed measures to prevent overcrowding (n = 7) and all of them displayed signage and warnings about COVID-19. As to measures adopted inside the store, most had indications of the necessary physical
package de.ifa.arznei.mobil.entities; import java.util.List; import javax.persistence.EntityManager; import org.junit.Ignore; import org.junit.Test; public class ArtikelTest extends AbstractDatabaseTest { @Test @Ignore public void testGetId() { EntityManager em = getEntityManager(); List<Artikel> artikelList = em.createNamedQuery("Artikel.findAll", Artikel.class).getResultList(); for (Artikel artikel : artikelList) { System.out.println(artikel); } } }
Highly Conductive Nitrogen-Doped Graphene Grown on Glass toward Electrochromic Applications. The direct synthesis of low sheet resistance graphene on glass can promote the applications of such intriguing hybrid materials in transparent electronics and energy-related fields. Chemical doping is efficient for tailoring the carrier concentration and the electronic properties of graphene that previously derived from metal substrates. Herein, we report the direct synthesis of 5 in. uniform nitrogen-doped (N-doped) graphene on the quartz glass through a designed low-pressure chemical vapor deposition (LPCVD) route. Ethanol and methylamine were selected respectively as precursor and dopant for acquiring predominantly graphitic-N-doped graphene. We reveal that by a precise control of growth temperature and thus the doping level the sheet resistance of graphene on glass can be as low as one-half that of nondoped graphene, accompanied by relative high crystal quality and transparency. Significantly, we demonstrate that this scalable, 5 in. uniform N-doped graphene glass can serve as excellent electrode materials for fabricating high performance electrochromic smart windows, featured with a much simplified device structure. This work should pave ways for the direct synthesis and application of the new type graphene-based hybrid material.
<gh_stars>1-10 {-# LANGUAGE DerivingStrategies, GeneralizedNewtypeDeriving, TypeFamilies, TypeOperators, FlexibleContexts, DataKinds #-} module Godot.Core.Material (Godot.Core.Material._RENDER_PRIORITY_MAX, Godot.Core.Material._RENDER_PRIORITY_MIN, Godot.Core.Material.get_next_pass, Godot.Core.Material.get_render_priority, Godot.Core.Material.set_next_pass, Godot.Core.Material.set_render_priority) where import Data.Coerce import Foreign.C import Godot.Internal.Dispatch import System.IO.Unsafe import Godot.Gdnative.Internal import Godot.Api.Types _RENDER_PRIORITY_MAX :: Int _RENDER_PRIORITY_MAX = 127 _RENDER_PRIORITY_MIN :: Int _RENDER_PRIORITY_MIN = -128 {-# NOINLINE bindMaterial_get_next_pass #-} bindMaterial_get_next_pass :: MethodBind bindMaterial_get_next_pass = unsafePerformIO $ withCString "Material" $ \ clsNamePtr -> withCString "get_next_pass" $ \ methodNamePtr -> godot_method_bind_get_method clsNamePtr methodNamePtr get_next_pass :: (Material :< cls, Object :< cls) => cls -> IO Material get_next_pass cls = withVariantArray [] (\ (arrPtr, len) -> godot_method_bind_call bindMaterial_get_next_pass (upcast cls) arrPtr len >>= \ (err, res) -> throwIfErr err >> fromGodotVariant res) {-# NOINLINE bindMaterial_get_render_priority #-} bindMaterial_get_render_priority :: MethodBind bindMaterial_get_render_priority = unsafePerformIO $ withCString "Material" $ \ clsNamePtr -> withCString "get_render_priority" $ \ methodNamePtr -> godot_method_bind_get_method clsNamePtr methodNamePtr get_render_priority :: (Material :< cls, Object :< cls) => cls -> IO Int get_render_priority cls = withVariantArray [] (\ (arrPtr, len) -> godot_method_bind_call bindMaterial_get_render_priority (upcast cls) arrPtr len >>= \ (err, res) -> throwIfErr err >> fromGodotVariant res) {-# NOINLINE bindMaterial_set_next_pass #-} bindMaterial_set_next_pass :: MethodBind bindMaterial_set_next_pass = unsafePerformIO $ withCString "Material" $ \ clsNamePtr -> withCString "set_next_pass" $ \ methodNamePtr -> godot_method_bind_get_method clsNamePtr methodNamePtr set_next_pass :: (Material :< cls, Object :< cls) => cls -> Material -> IO () set_next_pass cls arg1 = withVariantArray [toVariant arg1] (\ (arrPtr, len) -> godot_method_bind_call bindMaterial_set_next_pass (upcast cls) arrPtr len >>= \ (err, res) -> throwIfErr err >> fromGodotVariant res) {-# NOINLINE bindMaterial_set_render_priority #-} bindMaterial_set_render_priority :: MethodBind bindMaterial_set_render_priority = unsafePerformIO $ withCString "Material" $ \ clsNamePtr -> withCString "set_render_priority" $ \ methodNamePtr -> godot_method_bind_get_method clsNamePtr methodNamePtr set_render_priority :: (Material :< cls, Object :< cls) => cls -> Int -> IO () set_render_priority cls arg1 = withVariantArray [toVariant arg1] (\ (arrPtr, len) -> godot_method_bind_call bindMaterial_set_render_priority (upcast cls) arrPtr len >>= \ (err, res) -> throwIfErr err >> fromGodotVariant res)
/** * Author: Kartik Sharma * Email Id: [email protected] * Created: 1/23/2017 5:33 PM * Description: Unavailable */ public class MediaPagerAdapter extends PagerAdapter { private List<Media> mMedia; private OnImageClickListener mOnImageClickListener; public MediaPagerAdapter(List<Media> media, OnImageClickListener onImageClickListener) { mMedia = media; mOnImageClickListener = onImageClickListener; } @Override public int getCount() { return mMedia != null ? mMedia.size() : 0; } @Override public Object instantiateItem(ViewGroup container, int position) { View view = LayoutInflater.from(container.getContext()) .inflate(R.layout.item_media_image, container, false); container.addView(view); final MediaImageViewHolder mediaImageViewHolder = new MediaImageViewHolder(view); String mediaImageUrl = mMedia.get(position).getImageUrl(); PipelineDraweeControllerBuilder controller = Fresco.newDraweeControllerBuilder(); controller.setUri(mediaImageUrl); controller.setAutoPlayAnimations(true); controller.setOldController(mediaImageViewHolder.imgMedia.getController()); controller.setControllerListener(new BaseControllerListener<ImageInfo>() { @Override public void onFinalImageSet(String id, ImageInfo imageInfo, Animatable animatable) { super.onFinalImageSet(id, imageInfo, animatable); mediaImageViewHolder.progressBarLoading.setVisibility(View.GONE); if (imageInfo == null || mediaImageViewHolder.imgMedia == null) { return; } mediaImageViewHolder.imgMedia.update(imageInfo.getWidth(), imageInfo.getHeight()); } }); mediaImageViewHolder.imgMedia.setController(controller.build()); mediaImageViewHolder.imgMedia.setOnViewTapListener(new OnViewTapListener() { @Override public void onViewTap(View view, float v, float v1) { mOnImageClickListener.onImageClicked(); } }); return view; } @Override public boolean isViewFromObject(View view, Object object) { return view == object; } @Override public void destroyItem(ViewGroup container, int position, Object object) { container.removeView((View) object); } public interface OnImageClickListener { void onImageClicked(); } public static class MediaImageViewHolder { View itemView; @BindView(R.id.image_view_media) PhotoDraweeView imgMedia; @BindView(R.id.progress_bar_loading) ProgressBar progressBarLoading; public MediaImageViewHolder(View itemView) { this.itemView = itemView; ButterKnife.bind(this, itemView); } } }
/* Copyright (C) 1992-2001, 2003-2007, 2009-2016 Free Software Foundation, Inc. This file is part of the GNU C Library. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see <http://www.gnu.org/licenses/>. */ /*#ifndef _LIBC # include <config.h> #endifi*/ #include "getpass.h" #include <stdio.h> #if !((defined _WIN32 || defined __WIN32__) && !defined __CYGWIN__) # include <stdbool.h> # if HAVE_DECL___FSETLOCKING && HAVE___FSETLOCKING # if HAVE_STDIO_EXT_H # include <stdio_ext.h> # endif # else # define __fsetlocking(stream, type) /* empty */ # endif # if HAVE_TERMIOS_H # include <termios.h> # endif # if USE_UNLOCKED_IO # include "unlocked-io.h" # else # if !HAVE_DECL_FFLUSH_UNLOCKED # undef fflush_unlocked # define fflush_unlocked(x) fflush (x) # endif # if !HAVE_DECL_FLOCKFILE # undef flockfile # define flockfile(x) ((void) 0) # endif # if !HAVE_DECL_FUNLOCKFILE # undef funlockfile # define funlockfile(x) ((void) 0) # endif # if !HAVE_DECL_FPUTS_UNLOCKED # undef fputs_unlocked # define fputs_unlocked(str,stream) fputs (str, stream) # endif # if !HAVE_DECL_PUTC_UNLOCKED # undef putc_unlocked # define putc_unlocked(c,stream) putc (c, stream) # endif # endif /* It is desirable to use this bit on systems that have it. The only bit of terminal state we want to twiddle is echoing, which is done in software; there is no need to change the state of the terminal hardware. */ # ifndef TCSASOFT # define TCSASOFT 0 # endif static void call_fclose (void *arg) { if (arg != NULL) fclose (arg); } char * getpass (const char *prompt) { FILE *tty; FILE *in, *out; # if HAVE_TCGETATTR struct termios s, t; # endif bool tty_changed = false; static char *buf; static size_t bufsize; ssize_t nread; /* Try to write to and read from the terminal if we can. If we can't open the terminal, use stderr and stdin. */ tty = fopen ("/dev/tty", "w+"); if (tty == NULL) { in = stdin; out = stderr; } else { /* We do the locking ourselves. */ __fsetlocking (tty, FSETLOCKING_BYCALLER); out = in = tty; } flockfile (out); /* Turn echoing off if it is on now. */ # if HAVE_TCGETATTR if (tcgetattr (fileno (in), &t) == 0) { /* Save the old one. */ s = t; /* Tricky, tricky. */ t.c_lflag &= ~(ECHO | ISIG); tty_changed = (tcsetattr (fileno (in), TCSAFLUSH | TCSASOFT, &t) == 0); } # endif /* Write the prompt. */ fputs_unlocked (prompt, out); fflush_unlocked (out); /* Read the password. */ nread = getline (&buf, &bufsize, in); /* According to the C standard, input may not be followed by output on the same stream without an intervening call to a file positioning function. Suppose in == out; then without this fseek call, on Solaris, HP-UX, AIX, OSF/1, the previous input gets echoed, whereas on IRIX, the following newline is not output as it should be. POSIX imposes similar restrictions if fileno (in) == fileno (out). The POSIX restrictions are tricky and change from POSIX version to POSIX version, so play it safe and invoke fseek even if in != out. */ fseeko (out, 0, SEEK_CUR); if (buf != NULL) { if (nread < 0) buf[0] = '\0'; else if (buf[nread - 1] == '\n') { /* Remove the newline. */ buf[nread - 1] = '\0'; if (tty_changed) { /* Write the newline that was not echoed. */ putc_unlocked ('\n', out); } } } /* Restore the original setting. */ # if HAVE_TCSETATTR if (tty_changed) tcsetattr (fileno (in), TCSAFLUSH | TCSASOFT, &s); # endif funlockfile (out); call_fclose (tty); return buf; } #else /* W32 native */ /* Windows implementation by <NAME> <<EMAIL>>, improved by <NAME>. */ /* For PASS_MAX. */ # include <limits.h> /* For _getch(). */ # include <conio.h> /* For strdup(). */ # include <string.h> # ifndef PASS_MAX # define PASS_MAX 512 # endif char * getpass (const char *prompt) { char getpassbuf[PASS_MAX + 1]; size_t i = 0; int c; if (prompt) { fputs (prompt, stderr); fflush (stderr); } for (;;) { c = _getch (); if (c == '\r') { getpassbuf[i] = '\0'; break; } else if (i < PASS_MAX) { getpassbuf[i++] = c; } if (i >= PASS_MAX) { getpassbuf[i] = '\0'; break; } } if (prompt) { fputs ("\r\n", stderr); fflush (stderr); } return strdup (getpassbuf); } #endif
/* * Copyright (c) 2017-2018 Globo.com * All rights reserved. * * This source is subject to the Apache License, Version 2.0. * Please see the LICENSE file for more information. * * Authors: See AUTHORS file * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.globo.grou.groot.generator.collector; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.globo.grou.groot.generator.listeners.CollectorInformations; import org.eclipse.jetty.client.HttpClient; import org.eclipse.jetty.client.api.ContentResponse; import org.eclipse.jetty.util.log.Log; import org.eclipse.jetty.util.log.Logger; /** * */ public class CollectorClient { private static final Logger LOGGER = Log.getLogger( CollectorClient.class ); /** * addresses to collect informations must in the form of 127.0.0.1:8080 (ip:port) */ private List<String> addresses; /** * will collect informations on remote server with this schedule */ private long scheduleDelayInMillis = 1; private ScheduledExecutorService scheduledExecutorService; private List<HttpClient> httpClients; private List<CollectorResultHandler> collectorResultHandlers; public CollectorClient( List<String> addresses, long scheduleDelayInMillis, List<CollectorResultHandler> collectorResultHandlers ) { this.addresses = addresses; this.scheduleDelayInMillis = scheduleDelayInMillis; this.httpClients = new CopyOnWriteArrayList<>( ); this.collectorResultHandlers = collectorResultHandlers == null ? Collections.emptyList() : collectorResultHandlers; } public CollectorClient start() throws Exception { // at least a default one if (this.collectorResultHandlers.isEmpty()) { this.collectorResultHandlers = Arrays.asList( new LoggerCollectorResultHandler() ); } this.scheduledExecutorService = Executors.newScheduledThreadPool( addresses.size() ); for ( String address : this.addresses ) { this.scheduledExecutorService.scheduleWithFixedDelay( () -> { try { HttpClient httpClient = new HttpClient(); httpClient.start(); httpClients.add( httpClient ); ObjectMapper objectMapper = new ObjectMapper(); // response time per path informations ContentResponse contentResponse = httpClient // .newRequest( "http://" + address + "/collector/response-times" ) // .send(); LOGGER.debug( "response time per path status: {}, response: {}", // contentResponse.getStatus(), // contentResponse.getContentAsString() ); TypeReference<Map<String, CollectorInformations>> typeRef = new TypeReference<Map<String, CollectorInformations>>() { }; Map<String, CollectorInformations> responseTimePerPath = objectMapper.readValue( contentResponse.getContentAsString(), typeRef ); for (CollectorResultHandler collectorResultHandler: collectorResultHandlers) { collectorResultHandler.handleResponseTime( responseTimePerPath ); } } catch ( Throwable e ) { LOGGER.warn( e ); } }, 1, this.scheduleDelayInMillis, TimeUnit.MILLISECONDS ); } return this; } public CollectorClient stop() throws Exception { for ( HttpClient httpClient : httpClients ) { httpClient.stop(); } this.scheduledExecutorService.shutdown(); return this; } //-------------------------------------------------------------- // Builder //-------------------------------------------------------------- public static class Builder { private List<String> addresses = new ArrayList<>(); private long scheduleDelayInMillis = 5000; private List<CollectorResultHandler> collectorResultHandlers; public Builder() { // no op } public Builder addAddress( String address ) { this.addresses.add( address ); return this; } public Builder addAddresses( String... addresses ) { this.addresses.addAll( Arrays.asList( addresses ) ); return this; } public Builder addAddresses( List<String> addresses ) { this.addresses.addAll( addresses ); return this; } public Builder scheduleDelayInMillis( long scheduleDelayInMillis ) { this.scheduleDelayInMillis = scheduleDelayInMillis; return this; } public Builder collectorResultHandlers( List<CollectorResultHandler> collectorResultHandlers ) { this.collectorResultHandlers = collectorResultHandlers; return this; } public CollectorClient build() { if ( this.addresses.isEmpty() ) { throw new IllegalArgumentException( "addresses are mandatory" ); } if ( this.scheduleDelayInMillis < 1 ) { throw new IllegalArgumentException( "scheduleDelayInMillis must be higher than 0" ); } return new CollectorClient( this.addresses, this.scheduleDelayInMillis, this.collectorResultHandlers ); } } }
pub mod app; pub mod framework;
David Shaw has a 54-14 record as Stanford's head coach. (Photo11: Kirby Lee, USA TODAY Sports) Until 2012, Stanford University never had to disclose the compensation of an athletic coach. As a private school, it isn’t required to reveal its employment contracts — and it hadn’t paid a coach enough money to require that those figures be listed on its federal income tax returns. On Friday evening, Stanford’s new tax records showed that during the 2014 calendar year, football coach David Shaw was credited with nearly $4.1 million in total compensation — the highest for any Stanford employee not only in 2014, but also in any one of the seven years under the IRS’ current reporting system. The previous high single-year total for a Stanford employee was the nearly $3.6 million reported as the 2012 calendar-year compensation for John Powers, then president of the Stanford Management Co., which invests and manages the university’s endowment and other financial assets. And of the amount listed for Powers, more than $500,000 was compensation that had been reported as deferred pay in a prior year. Shaw’s total for the 2014 calendar year — 4,067,219 — is close to double the amount reported for him for 2013 (just over $2.2 million). It’s almost four times the amount listed as Jim Harbaugh’s total for 2010, the figure included on the return Stanford filed in 2012. According to the new return, which the university provided in response to a request from USA TODAY Sports, Shaw’s total for 2014 included just over $2.65 million in base salary. That represents an increase of more than $1 million over Shaw’s base amount for 2013. In addition, Shaw got $590,000 in bonuses in 2014 and he was credited with just over $700,000 in deferred compensation. It is difficult to compare the compensation of private-school coaches to that of public-school coaches because the amounts reported on school tax returns take into account bonuses paid and the taxable value of various perks and benefits, but the sum of Shaw’s base and deferred pay for 2014 indicates that he is currently one of the Pac-12 Conference’s highest-paid coaches, if not the highest-paid. For the 2015 season, Washington’s Chris Petersen was the conference’s highest-paid public school coach at just over $3.4 million, not including bonuses, according to USA TODAY Sports’ annual compensation survey. Among private-school football coaches whose schools have made their tax returns public so far this year, Shaw’s total for 2014 vaults him into a range occupied that year by only TCU’s Gary Patterson (just over $4 million, including nearly $3.5 million in base pay) and former Baylor coach Art Briles (nearly $6 million, including more than $5.3 million in base pay). Beginning with 2011, Shaw’s first season as Harbaugh’s successor, Stanford has had records of 11-2, 12-2, 11-3, 8-5 and 12-2. The Cardinal have won the Pac-12 championship in three of the past four seasons and played in either the Rose or Fiesta bowl in four of the past five seasons. It is likely that Shaw’s bonus total for 2014 — which was $490,000 greater than his bonus total for 2013 — reflects amounts tied to the team’s achievements during the 2013-14 school year. That was the year in which Stanford went 11-3, lost to Michigan State in the Rose Bowl and was No. 10 in the final USA TODAY Coaches Poll. According to notes included on Stanford’s new tax return, Shaw’s bonuses were based on the team’s academic performance, its athletic performance, Shaw’s attendance at athletic department events “and/or leadership.” The notes also state that Shaw was one of five Stanford employees listed on the return to receive taxable housing benefits in 2014. The other four were either university vice presidents or deans of a school within the university. Like other private colleges, Stanford is set up as a non-profit organization. That means it must annually file a federal tax return that includes information about the pay of its officers, directors and other key university-wide leaders. It also must disclose pay information for its five most highly compensated employees who do not fall into one of those three categories. Although most of the information on a non-profit’s tax return covers a fiscal year that usually involves parts of two calendar years, the IRS requires that the compensation reporting cover the most recently completed calendar year. Due to the complexity of their returns, large colleges and universities routinely take filing extensions that result in a significant time lag between the period covered by their most recent return and the date they file. In Stanford's case, the return it filed Friday covers a fiscal year that ended Aug. 31, 2015. That makes 2014 the most recently completed calendar year and, thus, the one used for reporting Shaw's compensation. GALLERY
/** * An expression which stands for a NULL in SQL. */ public class DDlogENull extends DDlogExpression { public DDlogENull(@Nullable Node node) { super(node, DDlogTUnknown.instance); } @Override public boolean compare(DDlogExpression val, IComparePolicy policy) { if (!super.compare(val, policy)) return false; return val.is(DDlogENull.class); } public DDlogENull(@Nullable Node node, DDlogType type) { super(node, type); } @Override public String toString() { if (this.getType().is(DDlogTUnknown.class)) return "None{}"; return "None{}: " + this.type; } }
<gh_stars>0 package com.example.command; import static com.example.service.UserService.*; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import com.example.business.Inventory; import com.example.business.InventoryManager; import com.example.service.InventoryService; import com.example.service.UserService; public class ResetPasswordCommand implements Command { @Override public String execute(HttpServletRequest request, HttpServletResponse response) { UserService userService = new UserService(); // get inventory InventoryService InventoryService inventoryService = new InventoryService(); String forwardToJsp = ""; String username = request.getParameter("username"); String password = request.getParameter("password"); //Check we have a username and password... if (username != null && password != null) { //first check if the username is already in the system if (userService.checkUsername(username)) { //Make call to the 'Model' using the UserServive class to signup... boolean userResetting= userService.updatePassword(username, password); //login the user if (userResetting) { //Make call to the 'Model' using the UserServive class to login... InventoryManager userLoggingIn = userService.login(username, password); //If login successful, store the session id for this client... HttpSession session = request.getSession(); String clientSessionId = session.getId(); session.setAttribute("loggedSessionId", clientSessionId); session.setAttribute("user", username); //get the inventory from the database Inventory inventory = inventoryService.DisplayInventory(); session.setAttribute("inventory", inventory); int maxItemId = inventoryService.getMaxItemId(); session.setAttribute("maxItemId", maxItemId); forwardToJsp = "/inventory.jsp"; } else { forwardToJsp = "/loginFailure.jsp"; } } else { forwardToJsp = "/signupFailure.jsp"; } } return forwardToJsp; } }
A bill to deal with legal cannabis will be tabled next spring, said PEI Lieutenant Governor Antoinette Perry on Thursday in her first speech from the throne. The throne speech opened the fall session of the PEI legislature. While no specific details were offered, Perry said the province will be guided by the principles of good public health and safety, and they will review public input and engage with municipalities, police, public health and education experts and businesses. The province’s approach will focus on taking profits away from the black market, keeping cannabis out of the hands of young Canadians, and following a ‘consistent approach’ across the region. Ensuring similar rules like age limits with neighbouring provinces and territories will help ensure less cross-border shopping for products, similar to provincial rules for alcohol and tobacco. In neighbouring Quebec, reports have suggested that the distribution and sale of legal cannabis will be overseen by the Société des alcools du Québec, the provincial liquor board, with an age limit of 18. The province is expected to table legislation Thursday for legalization. Nearby, New Brunswick has said they intend to sell cannabis in stand alone stores and have announced some aspects of their expected legislation, like storing cannabis under lock and key, and an age limit of 19. Nova Scotia has said they expect to introduce legislation by the end of the year. Perry did not give any more information on the date or contents of the legislation. Ontario has introduced their legislation, Bill 174, and Alberta is expected to introduce a bill this week to address legalization. Manitoba recently announced their plans for managing distribution and retail sales. Bill to deal with legal pot on #PEI will be tabled in the spring. Not sure what info, if any, we'll get on this before then. #peipoli — Kerry Campbell (@kerrywcampbell) November 14, 2017 Featured image via Wiki Commons.
"""Module for creating ids""" import hashlib import time import uuid def create_id( prefix: str, salt: str = None, ) -> str: """ Create an id string. :param prefix: The prefix with which to prefix the generated id. This should be used to identify what the id is being applied to. :param salt: A salt value to use when generating the checksum. Providing a salt will make it so the checksum is not strickly determined by the id, making forged ids non-trivial. """ if '-' in prefix: raise ValueError('prefix cannot contain the "-" character.') now = int(time.time()) hex_time = '{0:#012x}'.format(now)[-10:] identifier = prefix + '-' + hex_time + '-' + str(uuid.uuid4()) return _append_hash(identifier, salt=salt) def is_valid_id(id_value: str, salt: str = None) -> bool: """ Validate that the given id has a valid checksum. :param id_value: The id value being tested. :param salt: A random string value that was used as the `salt` when calling `create_id` to generate the id. :return: True if the id is valid, False if it is either not a valid id or has an invalid checksum. """ try: parsed = parse_id(id_value) except ValueError: # It isn't even a valid nuggan id. return False expected = _append_hash(parsed['prefixed_id'], salt=salt) return id_value == expected def _append_hash(identifier: str, salt: str = None) -> str: """Append the hash of the identifier to the identifier.""" salt = salt or '' salted = salt + identifier hashed = hashlib.sha1(salted.encode()).hexdigest() return identifier + '-' + hashed[:12] def parse_id(id_value: str) -> dict: """ Parse the id_value into its component parts. :param id_value: The id value to parse. :return: A dictionary containing the parts of the id including the prefix, uuid, and checksum. """ if len(id_value.split('-')) != 8: template = '"{value}" is not a nuggan id.' raise ValueError(template.format(value=id_value)) prefix, hex_time, remaining = id_value.split('-', maxsplit=2) base_id, checksum = remaining.rsplit('-', maxsplit=1) prefixed_id, _ = id_value.rsplit('-', maxsplit=1) return { 'prefix': prefix, 'hex_time': hex_time, 'prefixed_id': prefixed_id, 'base_id': base_id, 'checksum': checksum } class IdMaker: """Class for generating and validating ids.""" def __init__(self, salt: str = None): self._salt = salt def create_id(self, prefix: str) -> str: """ Create an id with the given prefix. :param prefix: The prefix to prepend to the id. :return: An id string with the given prefix. """ return create_id(prefix, salt=self._salt) def is_valid_id(self, id_value: str) -> bool: """ Validate that the given id is valid. :param id_value: The id value being tested. :param salt: A random string value that was used as the `salt` when calling `create_id` to generate the id. :return: True if the id is valid, False if it is either not a valid id or has an invalid checksum. """ return is_valid_id(id_value, salt=self._salt)
import subprocess import time import os import os.path as osp from rlpyt.utils.launching.affinity import get_n_run_slots, prepend_run_slot, affinity_from_code from rlpyt.utils.logging.context import get_log_dir from rlpyt.utils.launching.variant import save_variant def log_exps_tree(exp_dir, log_dirs, runs_per_setting): os.makedirs(exp_dir, exist_ok=True) with open(osp.join(exp_dir, "experiments_tree.txt"), "w") as f: f.write(f"Experiment manager process ID: {os.getpid()}.\n") f.write("Number of settings (experiments) to run: " f"{len(log_dirs)} ({runs_per_setting * len(log_dirs)}).\n\n") [f.write(log_dir + "\n") for log_dir in log_dirs] def log_num_launched(exp_dir, n, total): with open(osp.join(exp_dir, "num_launched.txt"), "w") as f: f.write(f"Experiments launched so far: {n} out of {total}.\n") def launch_experiment(script, run_slot, affinity_code, log_dir, variant, run_ID, args): slot_affinity_code = prepend_run_slot(run_slot, affinity_code) affinity = affinity_from_code(slot_affinity_code) call_list = list() if isinstance(affinity, dict) and affinity.get("all_cpus", False): cpus = ",".join(str(c) for c in affinity["all_cpus"]) elif isinstance(affinity, list) and affinity[0].get("all_cpus", False): cpus = ",".join(str(c) for aff in affinity for c in aff["all_cpus"]) else: cpus = () if cpus: call_list += ["taskset", "-c", cpus] # PyTorch obeys better than just psutil. call_list += ["python", script, slot_affinity_code, log_dir, str(run_ID)] call_list += [str(a) for a in args] save_variant(variant, log_dir) print("\ncall string:\n", " ".join(call_list)) p = subprocess.Popen(call_list) return p def run_experiments(script, affinity_code, experiment_title, runs_per_setting, variants, log_dirs, common_args=None, runs_args=None): n_run_slots = get_n_run_slots(affinity_code) exp_dir = get_log_dir(experiment_title) procs = [None] * n_run_slots common_args = () if common_args is None else common_args assert len(variants) == len(log_dirs) if runs_args is None: runs_args = [()] * len(variants) assert len(runs_args) == len(variants) log_exps_tree(exp_dir, log_dirs, runs_per_setting) num_launched, total = 0, runs_per_setting * len(variants) for run_ID in range(runs_per_setting): for variant, log_dir, run_args in zip(variants, log_dirs, runs_args): launched = False log_dir = osp.join(exp_dir, log_dir) os.makedirs(log_dir, exist_ok=True) while not launched: for run_slot, p in enumerate(procs): if p is None or p.poll() is not None: procs[run_slot] = launch_experiment( script=script, run_slot=run_slot, affinity_code=affinity_code, log_dir=log_dir, variant=variant, run_ID=run_ID, args=common_args + run_args, ) launched = True num_launched += 1 log_num_launched(exp_dir, num_launched, total) break if not launched: time.sleep(10) for p in procs: if p is not None: p.wait() # Don't return until they are all done.
//! Structures and traits related to pagination. #![allow(clippy::expect_used)] #[cfg(not(feature = "std"))] use alloc::{ collections::btree_map, string::{String, ToString as _}, vec, vec::Vec, }; use core::fmt; #[cfg(feature = "std")] use std::collections::btree_map; use iroha_schema::IntoSchema; use serde::{Deserialize, Serialize}; #[cfg(feature = "warp")] use warp::{ http::StatusCode, reply::{self, Response}, Filter, Rejection, Reply, }; const PAGINATION_START: &str = "start"; const PAGINATION_LIMIT: &str = "limit"; /// Describes a collection to which pagination can be applied. /// Implemented for the [`Iterator`] implementors. pub trait Paginate: Iterator + Sized { /// Returns a paginated [`Iterator`]. fn paginate(self, pagination: Pagination) -> Paginated<Self>; } impl<I: Iterator + Sized> Paginate for I { fn paginate(self, pagination: Pagination) -> Paginated<Self> { Paginated { pagination, iter: self, } } } /// Paginated [`Iterator`]. /// Not recommended to use directly, only use in iterator chains. #[derive(Debug)] pub struct Paginated<I: Iterator> { pagination: Pagination, iter: I, } impl<I: Iterator> Iterator for Paginated<I> { type Item = I::Item; fn next(&mut self) -> Option<Self::Item> { if let Some(limit) = self.pagination.limit.as_mut() { if *limit == 0 { return None; } *limit -= 1 } #[allow(clippy::option_if_let_else)] // Required because of E0524. 2 closures with unique refs to self if let Some(start) = self.pagination.start.take() { self.iter .nth(start.try_into().expect("u32 should always fit in usize")) } else { self.iter.next() } } } /// Structure for pagination requests #[derive(Clone, Eq, PartialEq, Debug, Default, Copy, Deserialize, Serialize, IntoSchema)] pub struct Pagination { /// start of indexing pub start: Option<u32>, /// limit of indexing pub limit: Option<u32>, } impl Pagination { /// Constructs [`Pagination`]. pub const fn new(start: Option<u32>, limit: Option<u32>) -> Self { Self { start, limit } } } /// Error for pagination #[derive(Debug, Clone, Eq, PartialEq)] pub struct PaginateError(pub core::num::ParseIntError); impl fmt::Display for PaginateError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "Failed to decode pagination. Error occurred in one of numbers: {}", self.0 ) } } #[cfg(feature = "std")] impl std::error::Error for PaginateError {} #[cfg(feature = "warp")] impl Reply for PaginateError { fn into_response(self) -> Response { reply::with_status(self.to_string(), StatusCode::BAD_REQUEST).into_response() } } #[cfg(feature = "warp")] /// Filter for warp which extracts pagination pub fn paginate() -> impl Filter<Extract = (Pagination,), Error = Rejection> + Copy { warp::query() } impl From<Pagination> for btree_map::BTreeMap<String, String> { fn from(pagination: Pagination) -> Self { let mut query_params = Self::new(); if let Some(start) = pagination.start { query_params.insert(String::from(PAGINATION_START), start.to_string()); } if let Some(limit) = pagination.limit { query_params.insert(String::from(PAGINATION_LIMIT), limit.to_string()); } query_params } } impl From<Pagination> for Vec<(&'static str, usize)> { fn from(pagination: Pagination) -> Self { match (pagination.start, pagination.limit) { (Some(start), Some(limit)) => { vec![ ( PAGINATION_START, start.try_into().expect("u32 should always fit in usize"), ), ( PAGINATION_LIMIT, limit.try_into().expect("u32 should always fit in usize"), ), ] } (Some(start), None) => vec![( PAGINATION_START, start.try_into().expect("u32 should always fit in usize"), )], (None, Some(limit)) => vec![( PAGINATION_LIMIT, limit.try_into().expect("u32 should always fit in usize"), )], (None, None) => Vec::new(), } } } pub mod prelude { //! Prelude: re-export most commonly used traits, structs and macros from this module. pub use super::*; } #[cfg(test)] mod tests { use super::*; #[test] fn empty() { assert_eq!( vec![1_i32, 2_i32, 3_i32] .into_iter() .paginate(Pagination::new(None, None)) .collect::<Vec<_>>(), vec![1_i32, 2_i32, 3_i32] ) } #[test] fn start() { assert_eq!( vec![1_i32, 2_i32, 3_i32] .into_iter() .paginate(Pagination::new(Some(0), None)) .collect::<Vec<_>>(), vec![1_i32, 2_i32, 3_i32] ); assert_eq!( vec![1_i32, 2_i32, 3_i32] .into_iter() .paginate(Pagination::new(Some(1), None)) .collect::<Vec<_>>(), vec![2_i32, 3_i32] ); assert_eq!( vec![1_i32, 2_i32, 3_i32] .into_iter() .paginate(Pagination::new(Some(3), None)) .collect::<Vec<_>>(), Vec::<i32>::new() ); } #[test] fn limit() { assert_eq!( vec![1_i32, 2_i32, 3_i32] .into_iter() .paginate(Pagination::new(None, Some(0))) .collect::<Vec<_>>(), Vec::<i32>::new() ); assert_eq!( vec![1_i32, 2_i32, 3_i32] .into_iter() .paginate(Pagination::new(None, Some(2))) .collect::<Vec<_>>(), vec![1_i32, 2_i32] ); assert_eq!( vec![1_i32, 2_i32, 3_i32] .into_iter() .paginate(Pagination::new(None, Some(4))) .collect::<Vec<_>>(), vec![1_i32, 2_i32, 3_i32] ); } #[test] fn start_and_limit() { assert_eq!( vec![1_i32, 2_i32, 3_i32] .into_iter() .paginate(Pagination::new(Some(1), Some(1))) .collect::<Vec<_>>(), vec![2_i32] ) } }
On This Day Thursday 16th October 1947 71 years ago The name "Landrover" was officially approved by Rovers Board of Directors. The first Land Rover made its debut in 1948. It was designed with brilliant simplicity for extraordinary ability and unrivalled strength and durability. In fact, six decades on it is estimated that two-thirds of all these incredible vehicles are still at work – many of them in some of the most extreme conditions and inhospitable places on earth. The original 1948 Land Rover was ingeniously designed and engineered for extreme capability and strength. With extremely robust construction and characteristics such as short front and rear overhangs, it drove off the production line ready to take on some of the world’s toughest terrain. Today these qualities are as significant a part of what makes a Land Rover vehicle unique as they were 60 years ago. The Land Rover was the product of continuous evolution and refinement throughout the 1950s and 1960s with improved stability and a tighter turning circle. It was a period in which Land Rover took the lead in the emerging market for four-wheel drive vehicles. As a tough, reliable mobility platform, countless organisations came to depend on Land Rover vehicles to get personnel and equipment into the most challenging situations…and then safely out again. From organisations such as Born Free Foundation to The Royal Geographical Society and Biosphere Expeditions - we enter the second decade of the 21st century with them still relying on Land Rover In keeping with the forward-thinking philosophy that founded Land Rover, a radical, entirely new product was introduced in 1970 and created its very own vehicle category. This overnight sensation was the original Range Rover. It had all the capability of a Land Rover with the comfort and performance of an on-road car. This culture of innovation has developed ever since with both Land Rover and Range Rover vehicles: new models, more refinement, more innovative technology, more efficiency and fewer emissions. And it continues with initiatives such as e_Terrain Technologies (which improves the environmental performance of vehicles by reducing CO2 emissions), Sustainable Manufacturing and CO2 Offsetting. Land Rover will remain at the forefront of advanced design – the new small Range Rover is a testament to the vision that takes the company forward and keeps it at the cutting edge of technology and engineering.
<gh_stars>0 from eventbrite import Eventbrite def eb_fetch(latitude, longitude, radius): eventbrite = Eventbrite('LMYQEYROBA7SA3GQA2DR') user = eventbrite.get_user() my_id = eventbrite.get_user()['id'] events = eventbrite.event_search(**{'location.latitude': str(latitude), 'location.longitude': str(longitude), 'location.within': str(radius)+"mi", 'start_date.keyword': "this_week"}) events = events['events'] events_output = [] for event in events: eventDate = event['start']['local'] events_output.append({'eventName': event['name']['text'], 'eventLocation': 'null', 'eventType': event['description']['text'], 'eventLink': event['resource_uri'], 'eventDate': eventDate[:eventDate.index('T')]}) return events_output
Local remodeling of synthetic extracellular matrix microenvironments by co-cultured endometrial epithelial and stromal cells enables long-term dynamic physiological function. Mucosal barrier tissues, comprising a layer of tightly-bonded epithelial cells in intimate molecular communication with an underlying matrix-rich stroma containing fibroblasts and immune cells, are prominent targets for drugs against infection, chronic inflammation, and other disease processes. Although human in vitro models of such barriers are needed for mechanistic studies and drug development, differences in extracellular matrix (ECM) needs of epithelial and stromal cells hinder efforts to create such models. Here, using the endometrium as an example mucosal barrier, we describe a synthetic, modular ECM hydrogel suitable for 3D functional co-culture, featuring components that can be remodeled by cells and that respond dynamically to sequester local cell-secreted ECM characteristic of each cell type. The synthetic hydrogel combines peptides with off-the-shelf reagents and is thus accessible to cell biology labs. Specifically, we first identified a single peptide as suitable for initial attachment of both endometrial epithelial and stromal cells using a 2D semi-empirical screen. Then, using a co-culture system of epithelial cells cultured on top of gel-encapsulated stromal cells, we show that inclusion of ECM-binding peptides in the hydrogel, along with the integrin-binding peptide, leads to enhanced accumulation of basement membrane beneath the epithelial layer and more fibrillar collagen matrix assembly by stromal cells over two weeks in culture. Importantly, endometrial co-cultures composed of either cell lines or primary cells displayed hormone-mediated differentiation as assessed by morphological changes and secretory protein production. A multiplex analysis of apical cytokine and growth factor secretion comparing cell lines and primary cells revealed strikingly different patterns, underscoring the importance of using primary cell models in analysis of cell-cell communication networks. In summary, we define a "one-size-fits-all" synthetic ECM that enables long-term, physiologically responsive co-cultures of epithelial and stromal cells in a mucosal barrier format.
#include "rpc_controller.hpp" #include "rpc_channel.hpp" #include "rpc_server.hpp" #include "rpc_client.hpp" namespace NanoRpc { void RpcController::Send(const RpcMessage &message) { channel_->Send(message); } void RpcController::Receive(const RpcMessage &message) { // TODO: Implement client handling if (message.has_result() && message.result().status() != RpcSucceeded) { if (server_ != NULL) server_->Receive(message); } else { if (message.has_call()) { if (server_ != NULL) server_->Receive(message); } } } } // namespace
/** * @author Xuewei Huang * @created 2022-05-03 */ class BinarySearchLowerTest { private final int[] nums = new int[]{1, 1, 3, 3, 5, 5}; @Test void lower() { assertEquals(-1, BinarySearchLower.lower(nums, 0)); assertEquals(-1, BinarySearchLower.lower(nums, 1)); assertEquals(1, BinarySearchLower.lower(nums, 2)); assertEquals(1, BinarySearchLower.lower(nums, 3)); assertEquals(3, BinarySearchLower.lower(nums, 4)); assertEquals(3, BinarySearchLower.lower(nums, 5)); assertEquals(5, BinarySearchLower.lower(nums, 6)); } }
/** * The {@link AbstractIndividualFactory} creates {@link Individual}s using a * given {@link Provider} and sets the registered * {@link IndividualStateListener}s. The {@link Creator} is used to create the * problem specific {@link Genotype} with which the created {@link Individual} * is initialized. * * @author lukasiewycz * * @param <I> * the type of Individual */ public class AbstractIndividualFactory<I extends Individual> implements IndividualFactory { protected final Creator<Genotype> creator; protected final Provider<I> individualProvider; protected final Set<IndividualStateListener> individualStateListeners = new CopyOnWriteArraySet<>(); /** * Constructs an {@link AbstractIndividualFactory} with a {@link Provider} * for {@link Individual}s. * * @param individualProvider * the provider that creates new individuals * @param creator * the creator that creates random genotypes */ public AbstractIndividualFactory(Provider<I> individualProvider, Creator<Genotype> creator) { this.individualProvider = individualProvider; this.creator = creator; } /** * The {@link IndividualStateListener}s will be transmitted to each * {@link Individual} that is created by this class. The listeners are * invoked if the state of the {@link Individual} changes. * * @param listeners * the listeners */ @Inject protected void injectListeners(Set<IndividualStateListener> listeners) { individualStateListeners.addAll(listeners); } /* * (non-Javadoc) * * @see * org.opt4j.core.IndividualFactory#addIndividualStateListener(org.opt4j * .core.IndividualStateListener) */ @Override public void addIndividualStateListener(IndividualStateListener listener) { individualStateListeners.add(listener); } /* * (non-Javadoc) * * @see org.opt4j.core.IndividualFactory#create() */ @Override public Individual create() { Individual individual = individualProvider.get(); individual.setIndividualStatusListeners(individualStateListeners); Genotype genotype = creator.create(); individual.setGenotype(genotype); return individual; } /* * (non-Javadoc) * * @see * org.opt4j.core.IndividualFactory#create(org.opt4j.core.problem.Genotype) */ @Override public Individual create(Genotype genotype) { Individual individual = individualProvider.get(); individual.setIndividualStatusListeners(individualStateListeners); individual.setGenotype(genotype); return individual; } /* * (non-Javadoc) * * @see * org.opt4j.core.IndividualFactory#removeIndividualStateListener(org.opt4j * .core.IndividualStateListener) */ @Override public void removeIndividualStateListener(IndividualStateListener listener) { individualStateListeners.remove(listener); } }
mod doc; mod error; mod find; mod kmp; use std::fs; use std::string::ToString; fn main(){ //Parsing json files with kmp let pattern = "full_name"; let kmp = kmp::KMP::new(pattern); kmp.request_list("../../get_json/get_json/json.txt"); // introduces a list of Repository let list = fs::read_to_string("../list.txt").unwrap(); let mut cnt=1; let repositories_sum=8433; while &cnt<=&repositories_sum { //Get a list of mirror warehouse software based on system version //This is used to determine if the software is present in the system let sys_version=vec!["20.03-LTS","20.03-LTS-SP1","20.03-LTS-SP2","20.03-LTS-SP3","20.09","21.03","21.09","22.03-LTS"]; let mut V:Vec<String>=Vec::new(); for index in &sys_version{ let mut s:String="../version_package/openEuler-".to_string(); s.push_str(&index); s.push_str(".txt"); V.push(s); } let mut v:Vec<String>=Vec::new(); for index in V{ let s=fs::read_to_string(&index[..]).unwrap(); v.push(s) } //// Find the name of the repository based on the list let name:String=find::find_cname(&list,cnt); //Determine if you need to generate a purl let mut tag:bool=false; for index in &v { if find::find_exist(&name,&index) {tag=true;break;} } //If the tag is false, then do not download the package if tag==false {println!("{} no produce",cnt);cnt=cnt+1;continue;} //Download the software package println!("The {} repository is being downloaded and is named {}",cnt,&name); doc::git2_download(&name); //Build spec file path let name_str:&str=&name[..]; let mut path=String::from("/root/Cangku/"); path.push_str(&name_str); path.push_str("/"); path.push_str("*.spec"); println!("A purl is being generated for the {} repository, named {}",cnt,&name); let Path2:String=find::find_path(&path); //Get the contents of the spec file let contents = fs::read_to_string(&Path2[..]).unwrap(); let spec :&str =&contents[..]; //find soft package version let version :String="Version:".to_string(); let mut Version :String=Default::default(); Version=find::match_version(&contents,&version); //check verison println!("--{}--",Version); error::check_version(&Version); //write purl let mut idx:usize = 0; for index in &v { if find::find_exist(&name,&index) {doc::write_purl(&name,&Version,&sys_version[idx].to_string());} idx=idx+1; } //Delete soft package println!("The {} warehouse is being deleted, the repository name is {}",cnt,&name); doc::rmdir(&name); println!("Generation complete{}",cnt); cnt=cnt+1; } println!("purl has all been generated!"); }
package hxw.security.core.security; import java.io.IOException; import java.io.Serializable; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.springframework.security.core.AuthenticationException; import org.springframework.security.web.AuthenticationEntryPoint; import org.springframework.stereotype.Component; @Component public class JwtAuthenticationEntryPoint implements AuthenticationEntryPoint, Serializable { private static final long serialVersionUID = 5761702772397411972L; /** * 当用户尝试访问安全的REST资源而不提供任何凭据时候,调用此方法响应Http状态码401 */ @Override public void commence( HttpServletRequest request, HttpServletResponse response, AuthenticationException authException) throws IOException, ServletException { response.sendError(HttpServletResponse.SC_UNAUTHORIZED, null == authException ? "Unauthorized" : authException.getMessage()); } }
import cn from 'classnames' import Link from 'next/link' type Props = { title: string src: string slug?: string } const CoverImage = ({ title, src, slug }: Props) => { const image = ( <img src={src} alt={`Cover Image for ${title}`} className={cn('shadow-sm', { 'hover:shadow-lg transition-shadow duration-200': slug, })} /> ) return ( <div className="sm:mx-0"> {slug ? ( <Link as={`/${slug}`} href="/[...slug]"> <a aria-label={title}>{image}</a> </Link> ) : ( image )} </div> ) } export default CoverImage
def parse_split_logs(filename, num_days=2): with tempfile.NamedTemporaryFile(mode='r+') as _temp: subprocess.call(['appcfg.py', '-n', str(num_days), '--include_all', '--severity=0', 'request_logs', '.', _temp.name]) _temp.seek(0) with open(filename, 'w') as _parsed: writer = csv.DictWriter(_parsed, fieldnames=['time', 'tag', 'reader'], extrasaction='ignore', quoting=csv.QUOTE_ALL) writer.writeheader() for line in _temp: match = re.search("Creating split(?:.*)(\[\{.*\}\])", line) if match is None: continue splits = ast.literal_eval(match.groups()[0]) for split in splits: writer.writerow(split)
<reponame>dyle71/death-of-krydort # ------------------------------------------------------------ # krydort/character.py # # This is a Witcher 3 TRPG character. # # This file is part of krydort. # # (C) Copyright 2020 # <NAME>, <EMAIL> # headcode.space e.U., https://www.headcode.space # ------------------------------------------------------------ """This module represents a character in Witcher 3 Tabletop RPG.""" import json class Attributes(object): """The attributes of a character.""" def __init__(self): self.set(INT=0, REF=0, DEX=0, BODY=0, SPD=0, EMP=0, CRA=0, WILL=0, LUCK=0) def __str__(self): """Stringify this object""" return json.dumps(self.debug_dict()) def debug_dict(self): """Turn this object to a dictionary for debug purpose.""" return {'INT': self.INT, 'REF': self.REF, 'DEX': self.DEX, 'BODY': self.BODY, 'SPD': self.SPD, 'EMP': self.EMP, 'CRA': self.CRA, 'WILL': self.WILL, 'LUCK': self.LUCK} @property def names(self) -> [str]: """Return list of valid attribute names.""" return ['INT', 'REF', 'DEX', 'BODY', 'SPD', 'EMP', 'CRA', 'WILL', 'LUCK'] def set(self, **kwargs) -> None: """Sets attributes at once.""" for kw in kwargs: if kw in self.names and isinstance(kwargs[kw], int): self.__setattr__(kw, int(kwargs[kw])) class Skills(object): """Skill set of a specific attribute.""" skill_map = { 'Awareness': 'INT', 'Business': 'INT', 'Deduction': 'INT', 'Education': 'INT', 'CommonSpeech': 'INT', 'ElderSpeech': 'INT', 'Dwarven': 'INT', 'MonsterLore': 'INT', 'SocialEtiquette': 'INT', 'Streetwise': 'INT', 'Tactics': 'INT', 'Teaching': 'INT', 'WildernessSurvival': 'INT', 'Brawling': 'REF', 'DodgingEscape': 'REF', 'Melee': 'REF', 'Riding': 'REF', 'Sailing': 'REF', 'SmallBlades': 'REF', 'StaffSpear': 'REF', 'Swordsmanship': 'REF', 'Archery': 'DEX', 'Athletics': 'DEX', 'Crossbow': 'DEX', 'SleightOfHand': 'DEX', 'Stealth': 'DEX', 'Physique': 'BODY', 'Endurance': 'BODY', 'Charisma': 'EMP', 'Deceit': 'EMP', 'FineArts': 'EMP', 'Gambling': 'EMP', 'GroomingAndStyle': 'EMP', 'HumanPerception': 'EMP', 'Leadership': 'EMP', 'Persuasion': 'EMP', 'Performance': 'EMP', 'Seduction': 'EMP', 'Alchemy': 'CRA', 'Crafting': 'CRA', 'Disguise': 'CRA', 'FirstAid': 'CRA', 'Forgery': 'CRA', 'PickLock': 'CRA', 'TrapCrafting': 'CRA', 'Courage': 'WILL', 'HexWeaving': 'WILL', 'Intimidation': 'WILL', 'SpellCasting': 'WILL', 'ResistMagic': 'WILL', 'ResistCoercion': 'WILL', 'RitualCrafting': 'WILL' } def __init__(self, attribute: str, names: [str]): self._attribute = attribute self._names = names for n in self.names: self.__setattr__(n, 0) def __str__(self): """Stringify this object""" return json.dumps(self.debug_dict()) def debug_dict(self): """Turn this object to a dictionary for debug purpose.""" skills = {} for n in self.names: skills[n] = int(getattr(self, n)) return {'attribute': self.attribute, 'skills': skills} @property def attribute(self) -> str: return self._attribute @property def names(self) -> [str]: return self._names @classmethod def skills_by_attribute(cls, attribute: str) -> [str]: """Returns all known skills by an attribute name.""" return [skill for skill in cls.skill_map.keys() if cls.skill_map[skill] == attribute] def set(self, **kwargs) -> None: """Sets skills at once.""" for kw in kwargs: if kw in self.names and isinstance(kwargs[kw], int): self.__setattr__(kw, int(kwargs[kw])) class Character(object): """This is a Witcher 3 Tabletop RPG character.""" def __init__(self, name: str = '<unnamed>'): """Create a character.""" self._name = name self._attributes = Attributes() self._skills = {} for a in self._attributes.names: self._skills[a] = Skills(a, Skills.skills_by_attribute(a)) def __str__(self): """Stringify this object.""" return json.dumps(self.debug_dict()) def debug_dict(self): """Turn this object to a dictionary for debug purpose.""" skills_dict = {} for s in self.skills: skills_dict[s] = self.skills[s].debug_dict() return {'name': self.name, 'attributes': self.attributes.debug_dict(), 'skills': skills_dict} @property def name(self): return self._name @property def attributes(self): return self._attributes @property def skills(self): return self._skills def values(self, name: str) -> (int, int): """Returns first the attribute and then the skill value of the skill by the given name.""" if name not in Skills.skill_map: raise ValueError(f'Unknown skill: "{name}".') a = Skills.skill_map[name] return self.attributes.__getattribute__(a), self._skills[a].__getattribute__(name)
// AO&IW() updates the offset and/or index width of a window after addition/removal of elements of memory in that window's variable. // The offset is affected if the new/deleted memory is below the current window; the index width if affected if it is in the window. void adjustOffsetAndIW(ccBool sameWindow, ccInt *offset, ccInt *indexWidth, ccInt insertionOffset, ccInt newIndices) { if ((sameWindow) && (insertionOffset >= *offset)) { *indexWidth += newIndices; if (*indexWidth < insertionOffset - (*offset)) { *indexWidth = insertionOffset - (*offset); } } else if (insertionOffset <= *offset) { *offset += newIndices; if (insertionOffset > *offset) { *indexWidth -= insertionOffset - (*offset); if (*indexWidth < 0) *indexWidth = 0; *offset = insertionOffset; } } }
#include "Engine/Actor/Actor.hpp" #include "Engine/Scene/Scene.hpp" namespace creamyLib::engine { Actor::Actor(const ActorConfiguration& config) : EngineObject(config), transform_(TransformComponent(math::Vector3(0, 0, 0), { {this}, false})) { getOwner()->addActor(this); } Actor::~Actor() { getOwner()->removeActor(this); components_.clear(); } void Actor::internalUpdate(float deltaTime) { updateComponents(deltaTime); update(deltaTime); } void Actor::updateComponents(float deltaTime) { for(const auto& component : components_) { component->update(deltaTime); } } void Actor::addComponent(Component* component) { if (!component) return; components_.emplace_back(component); } void Actor::removeComponent(Component* component) { if (!component) return; const auto componentIterator = std::find(components_.begin(), components_.end(), component); if (componentIterator != components_.end()) { components_.erase(componentIterator); } } TransformComponent& Actor::getTransform() { return transform_; } Scene* Actor::getOwner() const { return dynamic_cast<Scene*>(config_.owner); } Application* Actor::getApplication() const { return getOwner()->getApplication(); } }
package asatsuki256.germplasm.core.gene; import static asatsuki256.germplasm.core.GermplasmCore.NBT_PREFIX; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Random; import asatsuki256.germplasm.api.gene.TraitType; import asatsuki256.germplasm.api.gene.unit.IChromosome; import asatsuki256.germplasm.api.gene.unit.IGene; import asatsuki256.germplasm.api.gene.unit.IGermplasmUnitBase; import net.minecraft.nbt.NBTBase; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.nbt.NBTTagList; import net.minecraftforge.common.util.Constants; public class Chromosome extends GermplasmUnitBase implements IChromosome { public static final String GENES_NBT_KEY = NBT_PREFIX + "genes"; private List<Gene> genes; public Chromosome(List<Gene> genes) { this.genes = genes; this.defaultName = "Chromosome"; } public Chromosome(Gene... genes) { this.genes = new ArrayList<Gene>(Arrays.asList(genes)); this.defaultName = "Chromosome"; } public Chromosome() { this.defaultName = "Chromosome"; } public List<IGene> getGenes() { List<IGene> iGenes = new ArrayList<IGene>(); for(Gene gene : genes) { iGenes.add((IGene)gene); } return iGenes; } public void setGenes(List<IGene> genes) { List<Gene> genesTemp = new ArrayList<Gene>(); for(IGene gene : genes) { genesTemp.add((Gene)gene); } this.genes = genesTemp; } @Override public int analyze(boolean simulated) { int analyzed = 0; if(!isAnalyzed) { if(!simulated) { setAnalyzed(true); } analyzed++; } for(Gene gene : genes) { analyzed += gene.analyze(simulated); } return analyzed; } @Override public List<IGermplasmUnitBase> getChilds(){ List<IGermplasmUnitBase> units = new ArrayList<IGermplasmUnitBase>(); for(Gene gene : this.genes) { units.add(gene); } return units; } @Override public int getTotalStrength(TraitType traitType) { int totalStrength = 0; for(IGene unit : genes) { if(unit.isDominant()) { totalStrength += unit.getTotalStrength(traitType); } } return totalStrength; } @Override public NBTTagCompound serializeNBT() { NBTTagCompound nbt = super.serializeNBT(); NBTTagList genes = new NBTTagList(); for (IGene gene : this.getGenes()) { NBTTagCompound geneTag = gene.serializeNBT(); genes.appendTag(geneTag); } nbt.setTag(GENES_NBT_KEY, genes); return nbt; } @Override public void deserializeNBT(NBTTagCompound nbt) { List<Gene> genes = new ArrayList<Gene>(); if (nbt != null && nbt.hasKey(GENES_NBT_KEY, Constants.NBT.TAG_LIST)) { NBTTagList genesTag = nbt.getTagList(GENES_NBT_KEY, Constants.NBT.TAG_COMPOUND); for (NBTBase geneBase : genesTag) { NBTTagCompound geneNBT = (NBTTagCompound)geneBase; Gene gene = new Gene(); gene.deserializeNBT(geneNBT); genes.add(gene); } } this.genes = genes; } @Override public boolean insertGene(IGene gene, Random random) { List<IGene> childs = this.getGenes(); if (childs.isEmpty()) { return false; } int index = random.nextInt(childs.size()); IGene insertable = childs.get(index); List<IGene> newChilds = new ArrayList<>(childs); //置換 newChilds.set(index, gene); this.setGenes(newChilds); return true; } }
/** * Adjust options for retrieval from parent. I.e. consider minVersion or creationTime as * appropriate. Note that there is some looseness to the semantics for versioning * that relies on clock synchronization. * @param options * @return */ private InternalRetrievalOptions makeOptionsForNestedRetrieve(InternalRetrievalOptions options) { VersionConstraint oldVC; options = options.waitMode(WaitMode.GET); oldVC = options.getVersionConstraint(); switch (nsOptions.getVersionMode()) { case SEQUENTIAL: throw new RuntimeException("Panic: parent not supported/expected for version mode of SEQUENTIAL"); case SINGLE_VERSION: if (debugParent) { Log.warning(oldVC); Log.warning(options.versionConstraint(oldVC.maxCreationTime( Math.min(oldVC.getMaxCreationTime(), nsProperties.getCreationTime()))).getVersionConstraint()); } return options.versionConstraint(oldVC.maxCreationTime( Math.min(oldVC.getMaxCreationTime(), nsProperties.getCreationTime()))); case CLIENT_SPECIFIED: case SYSTEM_TIME_MILLIS: case SYSTEM_TIME_NANOS: return options.versionConstraint(oldVC.max( Math.min(oldVC.getMax(), nsProperties.getMinVersion() - 1))); default: throw new RuntimeException("Panic"); } }
Talking to yourself is often thought of as something only children or crazy people do, but research shows that engaging in a little self-conversation as an adult isn’t only completely normal — it’s good for you. Talking to yourself, both in your head and out loud, is quite common, with many people reporting they talk to themselves on an almost-hourly basis. Such self-directed speech is common in children, who often narrate tasks as they perform them. For example, a child learning to tie his shoes may recite, “Over, under, pull it tight. Make a bow, pull it through to do it right.” This self-talk helps children stay on task and it guides their actions so they can master the job at hand. The same is true for adults. Go ahead and talk to yourself You may have engaged in a little instructional self-talk when you learned to drive a car, thinking or even saying aloud something like, “Foot on the brake, shift into gear, both hands on the wheel…” By actually thinking out loud, studies show that we we remain focused and are better able to perform the task at hand. "What happens with self-talk is you stimulate your action, direct your action and evaluate your action," Antonis Hatzigeorgiadis, a University of Thessaly professor who studies self-talk and the psychology of sports performance, told The Wall Street Journal. Gary Lupyan, a cognitive psychologist at the University of Wisconsin demonstrated this in a study where volunteers were shown 20 pictures of various objects and then instructed to search for a specific one. Half the participants were told to repeatedly say the item they were looking for aloud, while the others were told to be silent. Those who spoke aloud were able to find the objects 50 to 100 milliseconds faster than the silent volunteers. "The general take-home point is that language is not just a system of communication, but I'm arguing it can augment perception, augment thinking," Lupyan told Live Science. Even thinking of cue words can have a profound effect. A study of elite sprinters concluded that runners who speak certain words to themselves, such as “push” during the acceleration phase of a sprint, run faster than those who don’t. Don’t say ‘I’ Studies show that the pronouns we use when we talk to ourselves also matter. Psychologist Ethan Kross asked two groups of volunteers to give a speech with only five minutes of mental preparation time. To make the task even more stressful, he informed participants that they’d be speaking in front of a panel of experts and their speeches would be videotaped. Both groups were told that self-talk could help them prepare for stressful situations, but one group was instructed to address themselves as “I” while the other was told to use second- or third-person pronouns like “you” or their own names. Kross found that the participants who used the second- and third-person pronouns were less stressed and performed significantly better than those who used “I” because they’d created psychological distance. By referring to themselves as an “other” they mentally distanced themselves from the stressful event, enabling themselves to feel less anxious and perform better. "What we find is that a subtle linguistic shift — shifting from 'I' to your own name — can have really powerful self-regulatory effects,” Kross told NPR. "It's almost like you are duping yourself into thinking about you as though you were another person,” Kross said. Go ahead, talk to yourself. (It's good for you!) Engaging in self-talk can relieve stress and even help you perform better on certain tasks.
import { from, amount, fee, firstRound, lastRound, note, genesisID, genesisHash, group, type, to, closeRemainderTo } from '../src/transactionFieldValidators'; import { VALID_ALGORAND_ADDRESS, MAINNET_GENESIS_ID, TESTNET_GENESIS_ID, BETANET_GENESIS_ID, MAINNET_GENESIS_HASH, TESTNET_GENESIS_HASH, BETANET_GENESIS_HASH } from '../src/utils/constants'; it('Validates from', () => { expect(from(VALID_ALGORAND_ADDRESS)).toEqual(true); expect(from('test')).toEqual(false); expect(from(true)).toEqual(false); expect(from(12345)).toEqual(false); expect(from({})).toEqual(false); }); it('Validates fee', () => { expect(fee(Number.MAX_SAFE_INTEGER)).toEqual(true); expect(fee(Number.MAX_SAFE_INTEGER + 1)).toEqual(false); expect(fee(0)).toEqual(false); expect(fee(1000)).toEqual(true); expect(fee(-1)).toEqual(false); expect(fee({})).toEqual(false); expect(fee('a')).toEqual(false); expect(fee('12345')).toEqual(false); }); it('Validates first round', () => { expect(firstRound(Number.MAX_SAFE_INTEGER)).toEqual(true); expect(firstRound(Number.MAX_SAFE_INTEGER + 1)).toEqual(false); expect(firstRound(-1)).toEqual(false); expect(firstRound({})).toEqual(false); expect(firstRound('a')).toEqual(false); expect(firstRound('12345')).toEqual(false); }); it('Validates last round', () => { expect(lastRound(Number.MAX_SAFE_INTEGER)).toEqual(true); expect(lastRound(Number.MAX_SAFE_INTEGER + 1)).toEqual(false); expect(lastRound(-1)).toEqual(false); expect(lastRound({})).toEqual(false); expect(lastRound('a')).toEqual(false); expect(lastRound('12345')).toEqual(false); }); it('Validates txn note field', () => { expect(note('a'.repeat(1001))).toEqual(false); expect(note(12345)).toEqual(false); }); it('Validates genesisID field', () => { expect(genesisID(MAINNET_GENESIS_ID)).toEqual(true); expect(genesisID(TESTNET_GENESIS_ID)).toEqual(true); expect(genesisID(BETANET_GENESIS_ID)).toEqual(true); }); it('Validates genesis hash field', () => { expect(genesisHash(MAINNET_GENESIS_HASH)).toEqual(true); expect(genesisHash(TESTNET_GENESIS_HASH)).toEqual(true); expect(genesisHash(BETANET_GENESIS_HASH)).toEqual(true); }); it('Validates group', () => { expect(group(MAINNET_GENESIS_HASH)).toEqual(true); }); it('Validates transaction type', () => { const TYPE_SEND = 'pay'; const TYPE_KEY_REG = 'keyreg'; const TYPE_ASSET_CONFIG = 'acfg'; const TYPE_TRANSFER = 'axfer'; const TYPE_FREEZE = 'afrz'; const TYPE_FALSE = 'foo'; expect(type(TYPE_SEND)).toEqual(true); expect(type(TYPE_KEY_REG)).toEqual(true); expect(type(TYPE_ASSET_CONFIG)).toEqual(true); expect(type(TYPE_TRANSFER)).toEqual(true); expect(type(TYPE_FREEZE)).toEqual(true); expect(type(TYPE_FALSE)).toEqual(false); }); it('Validates transaction to', () => { expect(to(VALID_ALGORAND_ADDRESS)).toEqual(true); expect(to('test')).toEqual(false); expect(to(true)).toEqual(false); expect(to(12345)).toEqual(false); expect(to({})).toEqual(false); }); it('Validates transaction amt', () => { expect(amount(Number.MAX_SAFE_INTEGER)).toEqual(true); expect(amount(Number.MAX_SAFE_INTEGER + 1)).toEqual(false); expect(amount(0)).toEqual(true); expect(amount(-1)).toEqual(false); expect(amount({})).toEqual(false); expect(amount('a')).toEqual(false); expect(amount('12345')).toEqual(false); }); it('Validates transaction closeRemainderTo', () => { expect(closeRemainderTo(VALID_ALGORAND_ADDRESS)).toEqual(true); expect(closeRemainderTo('test')).toEqual(false); expect(closeRemainderTo(true)).toEqual(false); expect(closeRemainderTo(12345)).toEqual(false); expect(closeRemainderTo({})).toEqual(false); });
// Helper class that calls cplus_demangle when needed and takes care of freeing // the result. class Lazy_demangler { public: Lazy_demangler(const char* symbol, int options) : symbol_(symbol), options_(options), demangled_(NULL), did_demangle_(false) { } ~Lazy_demangler() { free(this->demangled_); } inline char* get(); private: const char* symbol_; const int options_; char* demangled_; bool did_demangle_; }
<filename>app/src/main/java/com/medetzhakupov/githublisting/ui/Navigator.java package com.medetzhakupov.githublisting.ui; import android.app.Activity; import android.content.Intent; import android.os.Bundle; import com.medetzhakupov.githublisting.ui.detail.DetailActivity; import javax.inject.Inject; /** * Created by <NAME>. */ public class Navigator { private final Activity activity; @Inject public Navigator(Activity activity) { this.activity = activity; } public void navigateToDetails(String username) { Intent intent = new Intent(activity, DetailActivity.class); intent.putExtra(DetailActivity.USERNAME, username); activity.startActivity(intent); } }
Click to email this to a friend (Opens in new window) Click to share on Twitter (Opens in new window) Click to share on Facebook (Opens in new window) Concerned fans took to the internet when Howard Stern unexpectedly canceled his radio show to take a “personal day” Wednesday. The SiriusXM host, who fans claim has never taken an unexpected day off, mysteriously went off the air this week without explanation. “I have listened to the #HowardStern show since the 80s… NEVER canceled a show before! I am very concerned about the reason,” one Twitter user wrote. Another fan commented, “Peace and love to my hero @HowardStern, who has missed a live show for the first time ever this morning. I hope everything is ok.” Stern’s absence, which was announced as a “personal day” on air Wednesday, sparked multiple Reddit threads and conspiracy theories about what could have caused the unexpected cancellation. The rumors ranged from marital troubles with wife Beth Stern to the passing of one of his parents to legal issues with Sirius after he was sued for airing a woman’s confidential call with the IRS. Many expressed concerns about 63-year-old Stern’s health. One Reddit user pointed out, “Just this year Howard said the only way he’d cancel a show is if a parent died,” but it was also noted by someone else that Stern poked fun at his 93-year-old father on Monday’s show and wouldn’t do that if “he was on his deathbed.” Multiple fans also mentioned that Stern was in a sour mood on Monday’s show, suggesting the two things could be related. He is expected to return to the airwaves this Monday. “The Howard Stern Show” has yet to make an announcement about the reasoning behind Stern’s absence and did not respond to our request for comment.
Synthesis of small molecules with high scaffold diversity: exploitation of metathesis cascades in combination with inter- and intramolecular Diels-Alder reactions. Our knowledge of the biological relevance of regions of chemical space is shaped, in large part, by the synthetic accessibility of small molecules. Historically, however, chemists have explored chemical space in an exceptionally uneven and unsystematic way. We have previously demonstrated that metathesis cascade chemistry may be harnessed to yield small molecule collections with high scaffold diversity. Here, we describe the extent to which inter- and intramolecular Diels-Alder reactions, when used in conjunction with metathesis cascades, can extend the range of molecular scaffolds that are accessible. A range of metathesis substrates was prepared from combinations of two or three building blocks. Metathesis cascades were exploited to "reprogram" the molecular scaffolds. In many cases, the metathesis products were 1,3-dienes, which were potential substrates for either inter- or intramolecular Diels-Alder reactions. The synthesis and functionalisation of the products was often facilitated by fluorous tagging, for example by using a "safety-catch" linker that we have developed. It was demonstrated that, in certain cases, Diels-Alder reactions could extend the range of molecular scaffolds that may be prepared by using metathesis cascade reactions.
class rotor_set(object): # Holds rotors and deals with how they rotate def __init__(self, rotors, reflector): from rotor import rotor # Save arguments self.rotors = rotors self.rotors_reversed = self.rotors[::-1] self.reflector = reflector def encode_char(self, character): # Input for rotor encoding input = (character, True) # 'True': Always advance the first rotor for i in range(len(self.rotors)): rotor = self.rotors[i] # If this is a middle rotor, do the double step # See http://users.telenet.be/d.rijmenants/en/enigmatech.htm#steppingmechanism if ((i > 0 and i < len(self.rotors) - 1) and (rotor.position == rotor.advance_position)): input = rotor.right_to_left((input[0], True)) else: input = rotor.right_to_left(input) # Go through the reflector output = self.reflector.reflect(input[0]) # Back through the rotors in revers order # and reverse direction for rotor in self.rotors_reversed: output = rotor.left_to_right(output) return output # Wraps the encode_char function in a # loop to go through a string def encode(self, encode_string): output = '' for c in encode_string: output += self.encode_char(c) return output # Resets all the rotors def reset(self): for rotor in self.rotors: rotor.reset() def __repr__(self): return ('Rotor Set with rotors ' + self.rotors.__repr__() + ' ' + self.reflector.__repr__()) def __str__(self): return self.rotors.__repr__()
<gh_stars>0 export function keyBy<T extends { [key: string]: any }>( objects: T[], field: keyof T ): { [key: string]: T } { return objects.reduce((keyedObjects, object) => { if (object[field]) keyedObjects[object[field]] = object; return keyedObjects; }, {} as { [key: string]: T }); }
/* HSV to RGB conversion function with only integer * math */ void hsvtorgb(unsigned char *r, unsigned char *g, unsigned char *b, unsigned char h, unsigned char s, unsigned char v) { unsigned char region, fpart, p, q, t; if(s == 0) { *r = *g = *b = v; return; } region = h / 43; fpart = (h - (region * 43)) * 6; p = (v * (255 - s)) >> 8; q = (v * (255 - ((s * fpart) >> 8))) >> 8; t = (v * (255 - ((s * (255 - fpart)) >> 8))) >> 8; switch(region) { case 0: *r = v; *g = t; *b = p; break; case 1: *r = q; *g = v; *b = p; break; case 2: *r = p; *g = v; *b = t; break; case 3: *r = p; *g = q; *b = v; break; case 4: *r = t; *g = p; *b = v; break; default: *r = v; *g = p; *b = q; break; } return; }
// NewGitDiffer returns a Differ that determines differences using git. func NewGitDiffer(opts ...GitDifferOption) Differ { g := &git{ useMergeCommit: false, baseBranch: "origin/master", } for _, opt := range opts { opt(g) } return &differ{ diff: g.diff, } }
/** * @author Shogo Akiyama * Solved on 05/24/2020 * * 48. Rotate Image * https://leetcode.com/problems/rotate-image/ * Difficulty: Medium * * Approach: Two Reverses * Runtime: 0 ms, faster than 100.00% of Java online submissions for Rotate Image. * Memory Usage: 39.4 MB, less than 5.77% of Java online submissions for Rotate Image. * * Time Complexity: O(n^2) * Space Complexity: O(1) * Where n is the side length of the matrix * * @see MatrixTest#testRotateImage() */ public class RotateImage2 { public void rotate(int[][] matrix) { int n = matrix.length; for (int i = 0; i < n; i++) { for (int j = 0; j < n / 2; j++) { int temp = matrix[i][j]; matrix[i][j] = matrix[i][n - 1 - j]; matrix[i][n - 1 - j] = temp; } } for (int i = 0; i < n; i++) { for (int j = 0; j < n - i; j++) { int temp = matrix[i][j]; matrix[i][j] = matrix[n - 1 - j][n - 1 - i]; matrix[n - 1 - j][n - 1 - i] = temp; } } } }