content
stringlengths
10
4.9M
/** * Notice: This class is written with the intention to give the reader insight in how these objects * should be created. Thus, this implementation might not be suitable for some situations where * memory footprint or performance is crucial. * <p/> * * @author Andreas Nilsson */ public class GlObjectFactory { // Index in array for each coordinate private static final int X = 0; private static final int Y = 1; private static final int Z = 2; private static final int NX = 5; private static final int NY = 6; private static final int NZ = 7; public static GlObject createCube(float width, float height, float depth, boolean useUVs, boolean useNormals) { // CUBE // v6----- v5 // /| /| // v1------v0| // | | | | // | |v7---|-|v4 // |/ |/ // v2------v3 final float[] v0 = {width * 0.5f, height * 0.5f, depth * 0.5f, 1, 1, 1 / 3f, 1 / 3f, 1 / 3f}; final float[] v1 = {-width * 0.5f, height * 0.5f, depth * 0.5f, 0, 1, -1 / 3f, 1 / 3f, 1 / 3f}; final float[] v2 = {-width * 0.5f, -height * 0.5f, depth * 0.5f, 0, 0, -1 / 3f, -1 / 3f, 1 / 3f}; final float[] v3 = {width * 0.5f, -height * 0.5f, depth * 0.5f, 1, 0, 1 / 3f, -1 / 3f, 1 / 3f}; final float[] v4 = {width * 0.5f, -height * 0.5f, -depth * 0.5f, 1, 0, 1 / 3f, -1 / 3f, -1 / 3f}; final float[] v5 = {width * 0.5f, height * 0.5f, -depth * 0.5f, 1, 1, 1 / 3f, 1 / 3f, -1 / 3f}; final float[] v6 = {-width * 0.5f, height * 0.5f, -depth * 0.5f, 0, 1, -1 / 3f, 1 / 3f, -1 / 3f}; final float[] v7 = {-width * 0.5f, -height * 0.5f, -depth * 0.5f, 0, 0, -1 / 3f, -1 / 3f, -1 / 3f}; float[] allVertices = { // FRONT // v0-v1-v2 v0[X], v0[Y], v0[Z], 1f, 0f, v0[NX], v0[NY], v0[NZ], v1[X], v1[Y], v1[Z], 0f, 0f, v1[NX], v1[NY], v1[NZ], v2[X], v2[Y], v2[Z], 0f, 1f, v2[NX], v2[NY], v2[NZ], // v2-v3-v0 v2[X], v2[Y], v2[Z], 0f, 1f, v2[NX], v2[NY], v2[NZ], v3[X], v3[Y], v3[Z], 1f, 1f, v3[NX], v3[NY], v3[NZ], v0[X], v0[Y], v0[Z], 1f, 0f, v0[NX], v0[NY], v0[NZ], // RIGHT // v0-v3-v4 v0[X], v0[Y], v0[Z], 0f, 0f, v0[NX], v0[NY], v0[NZ], v3[X], v3[Y], v3[Z], 0f, 1f, v3[NX], v3[NY], v3[NZ], v4[X], v4[Y], v4[Z], 1f, 1f, v4[NX], v4[NY], v4[NZ], // v4-v5-v0 v4[X], v4[Y], v4[Z], 1f, 1f, v4[NX], v4[NY], v4[NZ], v5[X], v5[Y], v5[Z], 1f, 0f, v5[NX], v5[NY], v5[NZ], v0[X], v0[Y], v0[Z], 0f, 0f, v0[NX], v0[NY], v0[NZ], // LEFT // v1-v6-v7 v1[X], v1[Y], v1[Z], 1f, 0f, v1[NX], v1[NY], v1[NZ], v6[X], v6[Y], v6[Z], 0f, 0f, v6[NX], v6[NY], v6[NZ], v7[X], v7[Y], v7[Z], 0f, 1f, v7[NX], v7[NY], v7[NZ], // v7-v2-v1 v7[X], v7[Y], v7[Z], 0f, 1f, v7[NX], v7[NY], v7[NZ], v2[X], v2[Y], v2[Z], 1f, 1f, v2[NX], v2[NY], v2[NZ], v1[X], v1[Y], v1[Z], 1f, 0f, v1[NX], v1[NY], v1[NZ], // TOP // v0-v5-v6 v0[X], v0[Y], v0[Z], 1f, 1f, v0[NX], v0[NY], v0[NZ], v5[X], v5[Y], v5[Z], 1f, 0f, v5[NX], v5[NY], v5[NZ], v6[X], v6[Y], v6[Z], 0f, 0f, v6[NX], v6[NY], v6[NZ], // v6-v1-v0 v6[X], v6[Y], v6[Z], 0f, 0f, v6[NX], v6[NY], v6[NZ], v1[X], v1[Y], v1[Z], 0f, 1f, v1[NX], v1[NY], v1[NZ], v0[X], v0[Y], v0[Z], 1f, 1f, v0[NX], v0[NY], v0[NZ], // BOTTOM // v7-v4-v3 v7[X], v7[Y], v7[Z], 0f, 1f, v7[NX], v7[NY], v7[NZ], v4[X], v4[Y], v4[Z], 1f, 1f, v4[NX], v4[NY], v4[NZ], v3[X], v3[Y], v3[Z], 1f, 0f, v3[NX], v3[NY], v3[NZ], // v3-v2-v7 v3[X], v3[Y], v3[Z], 1f, 0f, v3[NX], v3[NY], v3[NZ], v2[X], v2[Y], v2[Z], 0f, 0f, v2[NX], v2[NY], v2[NZ], v7[X], v7[Y], v7[Z], 0f, 1f, v7[NX], v7[NY], v7[NZ], // BACK // v4-v7-v6 v4[X], v4[Y], v4[Z], 0f, 1f, v4[NX], v4[NY], v4[NZ], v7[X], v7[Y], v7[Z], 1f, 1f, v7[NX], v7[NY], v7[NZ], v6[X], v6[Y], v6[Z], 1f, 0f, v6[NX], v0[NY], v6[NZ], // v6-v5-v4 v6[X], v6[Y], v6[Z], 1f, 0f, v6[NX], v6[NY], v6[NZ], v5[X], v5[Y], v5[Z], 0f, 0f, v5[NX], v5[NY], v5[NZ], v4[X], v4[Y], v4[Z], 0f, 1f, v4[NX], v4[NY], v4[NZ], }; return new GlObject("Cube, soft shaded", getVertexType(useUVs, useNormals), allVertices); } public static GlObject createCubeWithFlatNormals(float width, float height, float depth, boolean useUVs, boolean useNormals) { // CUBE // v6----- v5 // /| /| // v1------v0| // | | | | // | |v7---|-|v4 // |/ |/ // v2------v3 final float[] v0 = {width * 0.5f, height * 0.5f, depth * 0.5f}; final float[] v1 = {-width * 0.5f, height * 0.5f, depth * 0.5f}; final float[] v2 = {-width * 0.5f, -height * 0.5f, depth * 0.5f}; final float[] v3 = {width * 0.5f, -height * 0.5f, depth * 0.5f}; final float[] v4 = {width * 0.5f, -height * 0.5f, -depth * 0.5f}; final float[] v5 = {width * 0.5f, height * 0.5f, -depth * 0.5f}; final float[] v6 = {-width * 0.5f, height * 0.5f, -depth * 0.5f}; final float[] v7 = {-width * 0.5f, -height * 0.5f, -depth * 0.5f}; float[] allVertices = { // FRONT // v0-v1-v2 v0[X], v0[Y], v0[Z], 1f, 0f, 0, 0, 1, v1[X], v1[Y], v1[Z], 0f, 0f, 0, 0, 1, v2[X], v2[Y], v2[Z], 0f, 1f, 0, 0, 1, // v2-v3-v0 v2[X], v2[Y], v2[Z], 0f, 1f, 0, 0, 1, v3[X], v3[Y], v3[Z], 1f, 1f, 0, 0, 1, v0[X], v0[Y], v0[Z], 1f, 0f, 0, 0, 1, // RIGHT // v0-v3-v4 v0[X], v0[Y], v0[Z], 0f, 0f, 1, 0, 0, v3[X], v3[Y], v3[Z], 0f, 1f, 1, 0, 0, v4[X], v4[Y], v4[Z], 1f, 1f, 1, 0, 0, // v4-v5-v0 v4[X], v4[Y], v4[Z], 1f, 1f, 1, 0, 0, v5[X], v5[Y], v5[Z], 1f, 0f, 1, 0, 0, v0[X], v0[Y], v0[Z], 0f, 0f, 1, 0, 0, // LEFT // v1-v6-v7 v1[X], v1[Y], v1[Z], 1f, 0f, -1, 0, 0, v6[X], v6[Y], v6[Z], 0f, 0f, -1, 0, 0, v7[X], v7[Y], v7[Z], 0f, 1f, -1, 0, 0, // v7-v2-v1 v7[X], v7[Y], v7[Z], 0f, 1f, -1, 0, 0, v2[X], v2[Y], v2[Z], 1f, 1f, -1, 0, 0, v1[X], v1[Y], v1[Z], 1f, 0f, -1, 0, 0, // TOP // v0-v5-v60, 1, 0, v0[X], v0[Y], v0[Z], 1f, 1f, 0, 1, 0, v5[X], v5[Y], v5[Z], 1f, 0f, 0, 1, 0, v6[X], v6[Y], v6[Z], 0f, 0f, 0, 1, 0, // v6-v1-v0 v6[X], v6[Y], v6[Z], 0f, 0f, 0, 1, 0, v1[X], v1[Y], v1[Z], 0f, 1f, 0, 1, 0, v0[X], v0[Y], v0[Z], 1f, 1f, 0, 1, 0, // BOTTOM // v7-v4-v3 v7[X], v7[Y], v7[Z], 0f, 1f, 0, -1, 0, v4[X], v4[Y], v4[Z], 1f, 1f, 0, -1, 0, v3[X], v3[Y], v3[Z], 1f, 0f, 0, -1, 0, // v3-v2-v7 v3[X], v3[Y], v3[Z], 1f, 0f, 0, -1, 0, v2[X], v2[Y], v2[Z], 0f, 0f, 0, -1, 0, v7[X], v7[Y], v7[Z], 0f, 1f, 0, -1, 0, // BACK // v4-v7-v6 v4[X], v4[Y], v4[Z], 0f, 1f, 0, 0, -1, v7[X], v7[Y], v7[Z], 1f, 1f, 0, 0, -1, v6[X], v6[Y], v6[Z], 1f, 0f, 0, 0, -1, // v6-v5-v4 v6[X], v6[Y], v6[Z], 1f, 0f, 0, 0, -1, v5[X], v5[Y], v5[Z], 0f, 0f, 0, 0, -1, v4[X], v4[Y], v4[Z], 0f, 1f, 0, 0, -1, }; return new GlObject("Cube, flat shaded", getVertexType(useUVs, useNormals), allVertices); } public static GlObject createSimpleTriangle(final boolean useUVs, final boolean useNormals) { // top float[] v0 = { 0.0f, 0.622f, 0.0f, // Position .5f, 0, // UV 0, 0, 1}; // Normal // left float[] v1 = { -0.5f, -0.311f, 0.0f, // Position 0, 1, // UV 0, 0, 1}; // Normal // right float[] v2 = { 0.5f, -0.311f, 0.0f, // Position 1, 1, // UV 0, 0, 1}; // Normal final VertexType vertexType = getVertexType(useUVs, useNormals); final float[] allVertices = concatVertices(vertexType, v0, v1, v2); return new GlObject("Triangle", vertexType, allVertices); } public static GlObject createSimpleQuad(final boolean useUVs, final boolean useNormals) { // The four vertices of the quad final float v0[] = {-.5f, -.5f, 0, 0, 0, 0, 0, 1}; final float v1[] = {.5f, -.5f, 0, 1, 0, 0, 0, 1}; final float v2[] = {.5f, .5f, 0, 1, 1, 0, 0, 1}; final float v3[] = {-.5f, .5f, 0, 0, 1, 0, 0, 1}; final VertexType vertexType = getVertexType(useUVs, useNormals); final float[] allVertices = concatVertices(vertexType, v0, v1, v2, v0, v2, v3); return new GlObject("Quad", vertexType, allVertices); } public static VertexType getVertexType(final boolean useUVs, final boolean useNormals) { if (useNormals) { return VertexType.VERTEX_TYPE_POS_UV_NORMAL; } else if (useUVs) { return VertexType.VERTEX_TYPE_POS_UV; } else { return VertexType.VERTEX_TYPE_POS; } } public static float[] concatVertices(final VertexType vertexType, float[]... vertexList) { // resize vertex per defined vertex type for (int i = 0; i < vertexList.length; i++) { float[] vertex = vertexList[i]; vertexList[i] = Arrays.copyOfRange(vertex, 0, vertexType.getDimension()); } int totalCoordinates = 0; for (float[] array : vertexList) { totalCoordinates += array.length; } float[] outArray = new float[totalCoordinates]; int counter = 0; for (float[] array : vertexList) { for (float f : array) { outArray[counter++] = f; } } return outArray; } public static GlObject createTorus(float R, float r, int N, int n, boolean useUVs, boolean useNormals) { int maxn = 1000; // max precision n = Math.min(n, maxn - 1); N = Math.min(N, maxn - 1); float rr = 1.5f * r; double dv = 2 * Math.PI / n; double dw = 2 * Math.PI / N; double v; double w = 0; ArrayList<Float> vertices = new ArrayList<>(); // outer loop while (w < 2 * Math.PI + dw) { v = 0.0f; // inner loop while (v < 2 * Math.PI + dv) { // Vertex vertices.add((float) ((R + r * Math.cos(v)) * Math.cos(w))); vertices.add((float) ((R + r * Math.cos(v)) * Math.sin(w))); vertices.add((float) (r * Math.sin(v))); // uv's if (useUVs) { float tex_u = (float) (v / (2 * Math.PI)); float tex_v = (float) ((w + tex_u) / (2 * Math.PI)); vertices.add(tex_u); vertices.add(tex_v); } if (useNormals) { // normal vertices.add((float) ((R + rr * Math.cos(v)) * Math.cos(w) - (R + r * Math.cos(v)) * Math.cos(w))); vertices.add((float) ((R + rr * Math.cos(v)) * Math.sin(w) - (R + r * Math.cos(v)) * Math.sin(w))); vertices.add((float) (rr * Math.sin(v) - r * Math.sin(v))); } // Vertex vertices.add((float) ((R + r * Math.cos(v + dv)) * Math.cos(w + dw))); vertices.add((float) ((R + r * Math.cos(v + dv)) * Math.sin(w + dw))); vertices.add((float) (r * Math.sin(v + dv))); // uv's if (useUVs) { float tex_u = (float) (v / (2 * Math.PI)); float tex_v = (float) ((w + tex_u) / (2 * Math.PI)); vertices.add(tex_u); vertices.add(tex_v); } // normal if (useNormals) { vertices.add((float) ((R + rr * Math.cos(v + dv)) * Math.cos(w + dw) - (R + r * Math.cos(v + dv)) * Math.cos(w + dw))); vertices.add((float) ((R + rr * Math.cos(v + dv)) * Math.sin(w + dw) - (R + r * Math.cos(v + dv)) * Math.sin(w + dw))); vertices.add((float) (rr * Math.sin(v + dv) - r * Math.sin(v + dv))); } v += dv; } // inner loop w += dw; } //outer loop // Copy into native array float[] vertexArray = new float[vertices.size()]; for (int i = 0; i < vertices.size(); ++i) { vertexArray[i] = vertices.get(i); } return new GlObject("Torus", VertexType.VERTEX_TYPE_POS_UV_NORMAL, vertexArray, GLES20.GL_TRIANGLE_STRIP); } }
""" Contains utility functions Copyright (c) 2014 <NAME> See LICENSE for details """ import numpy def round_solution(soln, decimals=4): """ Runs through a dict() and rounds the values. Arguments: soln: The values in this dict() will be rounded using numpy.round(). decimals: The number of decimals places to the right to be rounded. Returns: A new dict() that contains the rounded values. """ new_soln = dict() for key in soln.keys(): new_soln[key] = numpy.round(soln[key], decimals=decimals) return new_soln def is_aclose(prev, curr, atol=1e-4, rtol=1e-4): """ Determines if the values within two dicts() are close. Uses numpy.isclose() Arguments: prev: previous iteration dict() curr: current iteration dict() atol: absolute tolerance rtol: relative tolerance Returns: True if the values of the dict() are within the tolerances. False otherwise. """ aprev = numpy.array(prev) acurr = numpy.array(curr) return numpy.allclose(aprev, acurr, atol=atol, rtol=rtol) def is_close(prev, curr, atol=1e-4, rtol=1e-4): """ Determines if the values within two dicts() are close. Uses numpy.isclose() Arguments: prev: previous iteration dict() curr: current iteration dict() atol: absolute tolerance rtol: relative tolerance Returns: True if the values of the dict() are within the tolerances. False otherwise. """ for k in prev.keys(): if not numpy.isclose(prev[k], curr[k], atol=atol, rtol=rtol): return False return True def generate_html_table(header, adata): """ Generates an html table for use within iPython """ def _add_row(rowtype, rowdata): """ Adds HTML for a single row """ sdata = "<tr>" for data in rowdata: sdata += "<{0}>{1}</{0}>".format(rowtype, data) sdata += "</tr>" return sdata shtml = "<table>" if header is not None: shtml += _add_row("th", header) for row in adata: shtml += _add_row("td", row) shtml += "</table>" return shtml def SFCTable(model): """ Create a pandas DateFrame for the model. model = model class object already simulated """ import pandas as pd time = len(model.solutions) data_dict = { i: model.solutions[i] for i in range(time) } df = pd.DataFrame(data_dict).transpose() return df def SolveSFC(model, time=500, iterations=100, threshold=1e-5, table=True): """ Solves the model model: a Model class object time: time range to simulate iterations: the number of iterations threshold: threshold table: if True, returns a DataFrame using SFCTable. If so, it must be assigned to a variable """ for i in range(time): model.solve(iterations=iterations, threshold=1e-5) if table == True: df = SFCTable(model) return df else: pass def ShockModel(base_model, create_function, variable, increase, time=500, initial_time = 50, table=True): """ Shocks the model. base_model: The base scenario model create_function: the function used to create the model class variable: the variable to shock. increase: the value to be increassed to the variable time: the time to simulate using SolveSFC(). """ variable = str(variable) increase = float(increase) model = create_function lagged = [key for key in base_model.solutions[-1].keys()] lagged = [i for i in lagged if "__" in i] for i in lagged: del base_model.solutions[-1][i] model.set_values(base_model.solutions[-1]) SolveSFC(model, time=initial_time, table=False) model.parameters[variable].value += increase df = SolveSFC(model, time=time, table=table) return df def SummaryShock(dfShock, shock_time = 50): """ Returns a dataframe which the shock is summarized. Only some periods are returned and a additional column with the differente of the last and previous period dfShcok: DataFrame created with ShockModel() digits: the digits to be displayed """ import pandas as pd df = pd.DataFrame({"0":dfShock.iloc[shock_time], "Shock": dfShock.iloc[shock_time+1], "1": dfShock.iloc[shock_time+2], "2": dfShock.iloc[shock_time+3], "3": dfShock.iloc[shock_time+4], "t-1": dfShock.iloc[-2], "t": dfShock.iloc[-1]}) df['difference'] = df['t'] - df['t-1'] return df
class StatsTable: """ The stats table can construct a table of stats about the players, based on many different scoring strategies. """ def __init__(self, match_reports: Collection[MatchReport], scorers: Collection[ScorerStrategy]): self._match_reports = MatchReportCollection(match_reports) self._scorers = scorers def get_full_table(self) -> Mapping[Player, StatsRow]: table = defaultdict(dict) stats = {} for scorer in self._scorers: stat_name = scorer.stat_name stats[stat_name] = scorer.stat_explanation scores = self._match_reports.get_full_player_scores(scorer) for player, value in scores.items(): table[player][stat_name] = value filtered_table = {player: row for player, row in table.items() if len(row) == len(self._scorers)} return filtered_table def get_stats_explanations(self) -> Mapping[str, str]: return {scorer.stat_name: scorer.stat_explanation for scorer in self._scorers}
// TODO NOKUBIT: header #ifndef BITCOIN_NOKUBIT_TAGBLOCK_H #define BITCOIN_NOKUBIT_TAGBLOCK_H //#include <amount.h> //#include <script/script.h> #include <script/standard.h> //#include <uint256.h> //#include <hash.h> #include <nokubit/asset.h> #include <consensus/validation.h> //#include <primitives/transaction.h> //#include <clientversion.h> //#include <vector> #include <stdlib.h> #include <stdint.h> //#include <string> namespace block_tag { static const int BLOCK_REWARD_PERIOD = CONF_BLOCK_REWARD_PERIOD; /** * Information about a fee-sharing transaction. */ struct FeeSharingInfo { /** The transaction itself */ uint256 txHash; /** Block height of the transaction. */ int nHeight; /** Fee destination address. */ WitnessV0KeyHash feeAddress; /** The fee value. */ int64_t nFee; }; bool DumpFeeSharing(std::vector<FeeSharingInfo>& vinfo); bool CheckTagBlockTransaction(const CTransaction& tx, CValidationState &state, bool fCheckDuplicateInputs); } // block_tag #endif // BITCOIN_NOKUBIT_TAGBLOCK_H
<gh_stars>0 /* * Automatically Generated from Mathematica. * Thu 4 Nov 2021 16:57:06 GMT-04:00 */ #include <stdio.h> #include <stdlib.h> #include <math.h> #include "H_right_hip_pitch_src.h" #ifdef _MSC_VER #define INLINE __forceinline /* use __forceinline (VC++ specific) */ #else #define INLINE static __inline__ /* use standard inline */ #endif /** * Copied from Wolfram Mathematica C Definitions file mdefs.hpp * Changed marcos to inline functions (<NAME>) */ INLINE double Power(double x, double y) { return pow(x, y); } INLINE double Sqrt(double x) { return sqrt(x); } INLINE double Abs(double x) { return fabs(x); } INLINE double Exp(double x) { return exp(x); } INLINE double Log(double x) { return log(x); } INLINE double Sin(double x) { return sin(x); } INLINE double Cos(double x) { return cos(x); } INLINE double Tan(double x) { return tan(x); } INLINE double Csc(double x) { return 1.0/sin(x); } INLINE double Sec(double x) { return 1.0/cos(x); } INLINE double ArcSin(double x) { return asin(x); } INLINE double ArcCos(double x) { return acos(x); } /* update ArcTan function to use atan2 instead. */ INLINE double ArcTan(double x, double y) { return atan2(y,x); } INLINE double Sinh(double x) { return sinh(x); } INLINE double Cosh(double x) { return cosh(x); } INLINE double Tanh(double x) { return tanh(x); } #define E 2.71828182845904523536029 #define Pi 3.14159265358979323846264 #define Degree 0.01745329251994329576924 /* * Sub functions */ static void output1(double *p_output1,const double *var1) { double t13252; double t13271; double t13253; double t13266; double t13284; double t13289; double t13291; double t13292; double t13293; double t13294; double t13295; double t13302; double t13270; double t13298; double t13299; double t13247; double t13303; double t13304; double t13305; double t405; double t13317; double t13318; double t13320; double t13309; double t13316; double t13321; double t13322; double t13324; double t13325; double t13326; double t13335; double t13336; double t13337; double t13339; double t13340; double t13342; double t13300; double t13306; double t13307; double t13310; double t13311; double t13312; double t13323; double t13327; double t13328; double t13330; double t13331; double t13332; double t13338; double t13343; double t13344; double t13347; double t13348; double t13349; t13252 = Cos(var1[3]); t13271 = Cos(var1[21]); t13253 = Cos(var1[4]); t13266 = Sin(var1[21]); t13284 = Cos(var1[5]); t13289 = Sin(var1[3]); t13291 = -1.*t13284*t13289; t13292 = Sin(var1[4]); t13293 = Sin(var1[5]); t13294 = t13252*t13292*t13293; t13295 = t13291 + t13294; t13302 = Cos(var1[22]); t13270 = -1.*t13252*t13253*t13266; t13298 = t13271*t13295; t13299 = t13270 + t13298; t13247 = Sin(var1[22]); t13303 = t13271*t13252*t13253; t13304 = t13266*t13295; t13305 = t13303 + t13304; t405 = Cos(var1[23]); t13317 = t13252*t13284; t13318 = t13289*t13292*t13293; t13320 = t13317 + t13318; t13309 = Sin(var1[23]); t13316 = -1.*t13253*t13266*t13289; t13321 = t13271*t13320; t13322 = t13316 + t13321; t13324 = t13271*t13253*t13289; t13325 = t13266*t13320; t13326 = t13324 + t13325; t13335 = t13266*t13292; t13336 = t13271*t13253*t13293; t13337 = t13335 + t13336; t13339 = -1.*t13271*t13292; t13340 = t13253*t13266*t13293; t13342 = t13339 + t13340; t13300 = t13247*t13299; t13306 = t13302*t13305; t13307 = t13300 + t13306; t13310 = t13302*t13299; t13311 = -1.*t13247*t13305; t13312 = t13310 + t13311; t13323 = t13247*t13322; t13327 = t13302*t13326; t13328 = t13323 + t13327; t13330 = t13302*t13322; t13331 = -1.*t13247*t13326; t13332 = t13330 + t13331; t13338 = t13247*t13337; t13343 = t13302*t13342; t13344 = t13338 + t13343; t13347 = t13302*t13337; t13348 = -1.*t13247*t13342; t13349 = t13347 + t13348; p_output1[0]=-1.*t13309*t13312 + t13307*t405; p_output1[1]=-1.*t13309*t13332 + t13328*t405; p_output1[2]=-1.*t13309*t13349 + t13344*t405; p_output1[3]=0; p_output1[4]=t13307*t13309 + t13312*t405; p_output1[5]=t13309*t13328 + t13332*t405; p_output1[6]=t13309*t13344 + t13349*t405; p_output1[7]=0; p_output1[8]=t13252*t13284*t13292 + t13289*t13293; p_output1[9]=t13284*t13289*t13292 - 1.*t13252*t13293; p_output1[10]=t13253*t13284; p_output1[11]=0; p_output1[12]=var1[0]; p_output1[13]=var1[1]; p_output1[14]=var1[2]; p_output1[15]=1.; } void H_right_hip_pitch_src(double *p_output1, const double *var1) { /* Call Subroutines */ output1(p_output1, var1); }
# -*- coding: utf-8 -*- # @Author: ZwEin # @Date: 2016-06-30 11:29:35 # @Last Modified by: ZwEin # @Last Modified time: 2016-10-02 15:12:34 from preprocessor import ZEPreprocessor from extractor import ZEExtractor from normalizer import ZENormalizer from unit import * import re PE_DICT_NAME_PRICE = 'price' PE_DICT_NAME_PPH = 'price_per_hour' PE_JSON_NAME_PRICE = 'price' PE_JSON_NAME_PRICE_UNIT = 'price_unit' PE_JSON_NAME_TIME_UNIT = 'time_unit' class DIGPriceExtractor(): def __init__(self): self.preprocessor = ZEPreprocessor() self.extractor = ZEExtractor() self.normalizer = ZENormalizer() re_digits = re.compile(r'\d+') re_alphabet = re.compile(r'[a-z ]+') def extract(self, text): cleaned_text_list = self.preprocessor.preprocess(text) extracted_text_list = self.extractor.extract_from_list(cleaned_text_list) normalized_text_list = self.normalizer.normalize_from_list(extracted_text_list) ans = dict() ans.setdefault(PE_DICT_NAME_PRICE, []) # ans.setdefault(PE_DICT_NAME_PPH, []) # for normalized in normalized_text_list: # if not normalized[PE_JSON_NAME_TIME_UNIT]: # ans[PE_DICT_NAME_PPH].append(normalized[PE_JSON_NAME_PRICE]) # else: # tunit = DIGPriceExtractor.re_alphabet.findall(normalized[PE_JSON_NAME_TIME_UNIT]) # if tunit and tunit[0].strip() in UNIT_TIME_HOUR: # if tunit[0].strip() in UNIT_TIME_HOUR: # digits = DIGPriceExtractor.re_digits.findall(normalized[PE_JSON_NAME_TIME_UNIT]) # if not digits or int(digits[0]) == 1: # # ans.append(normalized) # ans[PE_DICT_NAME_PPH].append(normalized[PE_JSON_NAME_PRICE]) ans[PE_DICT_NAME_PRICE] = normalized_text_list return ans def extract_from_list(self, text_list): return [self.extract(text) for text in text_list]
# -*- coding: utf-8 -*- # Copyright (c) 2020, mvit ise and contributors # For license information, please see license.txt from __future__ import unicode_literals import frappe from frappe.model.document import Document class DepartmentPlacementReport(Document): def get_usn(self,department,company): recruited_student_usn = list(frappe.db.sql("""select usn from `tabRecruited Students list` where parent="{0}" """.format(company))) student_usn = list(frappe.db.sql("""select usn from `tabStudent` where department="{0}" """.format(department))) self.get_department_recruited_list (recruited_student_usn,student_usn) def get_department_recruited_list(self,recruited_student_usn,student_usn): recruited_student_usn.sort() student_usn.sort() count1=len(recruited_student_usn) count2=len(student_usn) i=j=0 department_recruited_student=list() while i<count1 and j<count2: if(recruited_student_usn[i]<student_usn[j]): i+=1 elif(recruited_student_usn[i]>student_usn[j]): j+=1 else: department_recruited_student.append(recruited_student_usn[i]) i+=1 j+=1 self.update_data(department_recruited_student) def update_data(self,department_recruited_student): department_recruited_student_list = list(sum(department_recruited_student, ())) for student in department_recruited_student_list: a=fill_student_data(get_student_name(student),student) self.append("department_placed_student_list",a) def get_student_name(student): student_details = frappe.get_doc("Student",student) return " ".join(filter(None,[student_details.first_name,student_details.middle_name,student_details.last_name])) def fill_student_data(student_detail,usn): student_entry = frappe.new_doc("Student_id") student_entry.usn = usn student_entry.name1 = student_detail return student_entry
<filename>vendor/gopkg.in/mgo.v2/stats.go // mgo - MongoDB driver for Go // // Copyright (c) 2010-2012 - <NAME> <<EMAIL>> // // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // // 1. Redistributions of source code must retain the above copyright notice, this // list of conditions and the following disclaimer. // 2. Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR // ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES // (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; // LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND // ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. package mgo import ( "sync" ) var stats *Stats var statsMutex sync.Mutex func SetStats(enabled bool) { statsMutex.Lock() if enabled { if stats == nil { stats = &Stats{} } } else { stats = nil } statsMutex.Unlock() } func GetStats() (snapshot Stats) { statsMutex.Lock() snapshot = *stats statsMutex.Unlock() return } func ResetStats() { statsMutex.Lock() debug("Resetting stats") old := stats stats = &Stats{} // These are absolute values: stats.Clusters = old.Clusters stats.SocketsInUse = old.SocketsInUse stats.SocketsAlive = old.SocketsAlive stats.SocketRefs = old.SocketRefs statsMutex.Unlock() return } type Stats struct { Clusters int MasterConns int SlaveConns int SentOps int ReceivedOps int ReceivedDocs int SocketsAlive int SocketsInUse int SocketRefs int } func (stats *Stats) cluster(delta int) { if stats != nil { statsMutex.Lock() stats.Clusters += delta statsMutex.Unlock() } } func (stats *Stats) conn(delta int, master bool) { if stats != nil { statsMutex.Lock() if master { stats.MasterConns += delta } else { stats.SlaveConns += delta } statsMutex.Unlock() } } func (stats *Stats) sentOps(delta int) { if stats != nil { statsMutex.Lock() stats.SentOps += delta statsMutex.Unlock() } } func (stats *Stats) receivedOps(delta int) { if stats != nil { statsMutex.Lock() stats.ReceivedOps += delta statsMutex.Unlock() } } func (stats *Stats) receivedDocs(delta int) { if stats != nil { statsMutex.Lock() stats.ReceivedDocs += delta statsMutex.Unlock() } } func (stats *Stats) socketsInUse(delta int) { if stats != nil { statsMutex.Lock() stats.SocketsInUse += delta statsMutex.Unlock() } } func (stats *Stats) socketsAlive(delta int) { if stats != nil { statsMutex.Lock() stats.SocketsAlive += delta statsMutex.Unlock() } } func (stats *Stats) socketRefs(delta int) { if stats != nil { statsMutex.Lock() stats.SocketRefs += delta statsMutex.Unlock() } }
<gh_stars>1-10 /*============================================================================ The Medical Imaging Interaction Toolkit (MITK) Copyright (c) German Cancer Research Center (DKFZ) All rights reserved. Use of this source code is governed by a 3-clause BSD license that can be found in the LICENSE file. ============================================================================*/ #include <mitkTestingMacros.h> #include <mitkToFCameraPMDPlayerDevice.h> #include <mitkToFConfig.h> #include <mitkToFPMDConfig.h> /**Documentation * test for the class "ToFCameraPMDPlayerDevice". */ int mitkToFCameraPMDPlayerDeviceTest(int /* argc */, char* /*argv*/[]) { MITK_TEST_BEGIN("ToFCameraPMDPlayerDevice"); std::string fileName = MITK_TOF_DATA_DIR; fileName = fileName + "/TestSequence.pmd"; mitk::ToFCameraPMDPlayerDevice::Pointer tofCameraPMDPlayerDevice = mitk::ToFCameraPMDPlayerDevice::New(); tofCameraPMDPlayerDevice->SetProperty("PMDFileName",mitk::StringProperty::New(fileName)); std::string platform = MITK_TOF_PLATFORM; // PMDPlayerController only available for W32 systems if (platform=="W32") { if(std::string(MITK_TOF_PMDFILE_SOURCE_PLUGIN) != "") { MITK_TEST_CONDITION_REQUIRED(tofCameraPMDPlayerDevice->ConnectCamera(),"Test ConnectCamera()"); } tofCameraPMDPlayerDevice->StartCamera(); tofCameraPMDPlayerDevice->UpdateCamera(); float* distances = new float[40000]; float* amplitudes = new float[40000]; float* intensities = new float[40000]; int imageSequence = 0; MITK_TEST_OUTPUT(<<"Call GetDistances()"); tofCameraPMDPlayerDevice->GetDistances(distances,imageSequence); MITK_TEST_OUTPUT(<<"Call GetAmplitudes()"); tofCameraPMDPlayerDevice->GetAmplitudes(amplitudes,imageSequence); MITK_TEST_OUTPUT(<<"Call GetIntensities()"); tofCameraPMDPlayerDevice->GetIntensities(intensities,imageSequence); tofCameraPMDPlayerDevice->StopCamera(); MITK_TEST_CONDITION_REQUIRED(tofCameraPMDPlayerDevice->DisconnectCamera(),"Test DisconnectCamera()"); delete[] distances; delete[] amplitudes; delete[] intensities; } MITK_TEST_END(); }
Let’s play a game: Imagine you run a baseball team; a good baseball team; one that’s playing well and has aspirations of playing deep into October. Now imagine you have a prospect at a premiere position, tearing up AAA, who’s seen MLB service time in (small) parts of two seasons, with well above average defensive ability, superior on base skills, and speed to steal upwards of 40 bases. Imagine your MLB starter at the same position is a former utility player who has hit well enough in the last couple weeks to pull his average up to .214, and whose single redeeming trait is the moderate power that’s begotten 13 homers this season. What reason would you have for not promoting the minor leaguer and starting him over the glorified utility player? This isn’t a trick question; I’m genuinely curious. What possible reason would there be for not promoting the clearly superior player? Ok, game over. You don’t have to imagine this situation because it’s playing out, in real life, in our nations’ capitol. The prospect’s name is Trea Turner, the premium position is shortstop, and the Washington Nationals have still not promoted him to replace the forlorn relic that is Danny Espinosa. This defies all logic. (After the Nats did promote Turner a couple weeks ago, he went 3-3 (!) in his one start before being benched for 2 games and then sent back to the minors.) Turner, a 23 year old former first rounder, can’t do much more to prove he’s mastered AAA. He’s batting .295 and has stolen 22 bases in 23 attempts. Espinosa, a 29 year old who has been primarily a second basemen in his career and hasn’t really been a full time starter since 2012, is best suited as a high quality utility infielder. He has pop and can play a variety of positions; he gets hot for stretches and can be a valuable contributor. He is also overmatched as an everyday starter, especially at shortstop. He strikes out in nearly a quarter of his plate appearances and his 85 wRC+ positions him as a squarely below average hitter– even at shortstop, where the league average wRC+ is 92. There is no rational universe where Danny Espinosa should be starting and Trea Turner shouldn’t even get a roster spot. Dusty Baker, the Nationals frustrating manger, usually answers the frequent ‘Where is Trea Turner?’ questions he gets with some mumbly line about how you don’t mess with success. The Nationals have a 5.5 game lead over the Mets and Marlins, and have the 3rd best record in the National League. Fair, but since when is ‘eh, we’re already pretty good’ a justification for not playing your best players in pro sports? I know what you’re thinking: Super 2! But the answer to the Turner riddle has nothing to do with salary management and arbitration clocks, either. We’re past all that. At this point, Turner cannot accrue enough playing time to qualify for arbitration a year early, so it’s not like the Nats are saving any real money by letting him tear apart minor league pitching as opposed to the Phillies. It simply doesn’t make any sense. With Turner, the Nats would have one of the more formidable all around infields in baseball. At third base, Anthony Rendon struggles with injuries and inconsistency, but has shown the potential to be one of the better 2 hitters in baseball. A former 6th overall pick, Rendon finished 5th in the MVP balloting while posting a 6.6 WAR in 2014. He’s got power and low double digit steals speed and plays a great defense. When he’s rolling, he can carry a team. At second, Washington has Daniel Murphy. I panned the Murphy signing in the winter when the Nats made it, but Daniel was hitting .400 not too long ago and he still leads the NL with a .352 average and 93 hits. Sean actually mentioned on Opening Day just how Murphy could be a huge asset to the Nats, even with the slightly inflated paycheck. At first, Ryan Zimmerman isn’t near what he once was, but is still a pretty good first basemen. He’s got some pop and is solid defensively. Coupled with the Nationals’ 4 solid outfield options in Bryce Harper, Ben Revere, Jayson Werth, and Michael A. Taylor, Washington has one of the deeper lineups in baseball. That’s why this is so frustrating. There’s such a clear and easy upgrade to Washington’s weakest position, yet he’s sitting in AAA. Let’s compare possible lineups. With Danny Espinosa: Ben Revere cf Anthony Rendon 3b Bryce Harper rf Daniel Murphy 2b Ryan Zimmerman 1b Jayson Werth lf Wilson Ramos c Danny Espinosa ss Bleh. With Trea Turner: Ben Revere cf Anthony Rendon 3b Bryce Harper rf Daniel Murphy 2b Ryan Zimmerman 1b Jayson Werth lf Wilson Ramos c Trea Turner ss WOW. Look at that lineup. Power, speed, it’s got it all! The Nationals are a real contender with that second batting order. Turner’s much deserved call-up won’t turn Bryce Harper back into the most feared hitter in baseball. It won’t make Jayson Werth consistent. It won’t make Ryan Zimmerman strikeout less. All of those developments would be more important to the Nats’ World Series chances than turning a .200 average with power in the 8 hole into a .270 average with 25-30 steals. Turner isn’t the panacea to all that ails Washington, but he’s much better than what they have now. There’s no reason he shouldn’t get a shot to prove it. -Max Frankel
<filename>sdk/lib/fidl_driver/tests/transport/assert_peer_closed_helper.h // Copyright 2022 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef LIB_FIDL_DRIVER_TESTS_TRANSPORT_ASSERT_PEER_CLOSED_HELPER_H_ #define LIB_FIDL_DRIVER_TESTS_TRANSPORT_ASSERT_PEER_CLOSED_HELPER_H_ #include <lib/fdf/cpp/channel.h> #include <lib/fidl_driver/cpp/transport.h> #include <lib/zx/channel.h> #include <zircon/syscalls.h> namespace fidl_driver_testing { // Generates a test failure if the peer of |channel| is not closed. void AssertPeerClosed(const zx::channel& channel); // Generates a test failure if the peer of |channel| is not closed. void AssertPeerClosed(const fdf::Channel& channel); } // namespace fidl_driver_testing #endif // LIB_FIDL_DRIVER_TESTS_TRANSPORT_ASSERT_PEER_CLOSED_HELPER_H_
// Copyright <NAME> 2008. // Use, modification and distribution are subject to the // Boost Software License, Version 1.0. (See accompanying file // LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) static const std::array<std::array<typename table_type<T>::type, 2>, 336> expinti_data = {{ {{ SC_(-49.689971923828125), SC_(-0.5189914452467706181911213069148082266893e-23) }}, {{ SC_(-49.490234375), SC_(-0.6362401135264284153452455085506782750647e-23) }}, {{ SC_(-49.47381591796875), SC_(-0.6469829627301154800340680350229540167762e-23) }}, {{ SC_(-49.109668731689453125), SC_(-0.937965617499186961883342318134168114907e-23) }}, {{ SC_(-48.69077301025390625), SC_(-0.1438002258908106326351299430868912629378e-22) }}, {{ SC_(-48.5106964111328125), SC_(-0.1728000132640339028589502655738823027867e-22) }}, {{ SC_(-48.044872283935546875), SC_(-0.2779439615740334951105570945539716295032e-22) }}, {{ SC_(-46.498386383056640625), SC_(-0.1347452831431197364179115494975704338441e-21) }}, {{ SC_(-46.225093841552734375), SC_(-0.1781193691784135039948913637371768811083e-21) }}, {{ SC_(-46.21092987060546875), SC_(-0.1807144449606719884758204680891841205212e-21) }}, {{ SC_(-46.07171630859375), SC_(-0.2083224907084183832424058026703771220474e-21) }}, {{ SC_(-45.99146270751953125), SC_(-0.2261161370639941533874318532661368660359e-21) }}, {{ SC_(-45.54817962646484375), SC_(-0.3556041033058530933762241390777221084271e-21) }}, {{ SC_(-45.04344940185546875), SC_(-0.5955350514558786142053309726857087311429e-21) }}, {{ SC_(-44.921146392822265625), SC_(-0.6748061149415831787816898069300771337011e-21) }}, {{ SC_(-44.366191864013671875), SC_(-0.1189810693892140146725035626514689534384e-20) }}, {{ SC_(-44.065486907958984375), SC_(-0.1617943260346693355541238773884456402131e-20) }}, {{ SC_(-42.564510345458984375), SC_(-0.7508521784292956555049663090063210339236e-20) }}, {{ SC_(-41.65602874755859375), SC_(-0.1902237115363634179207156972256911270036e-19) }}, {{ SC_(-41.643665313720703125), SC_(-0.1926460223216498626564145997341831122167e-19) }}, {{ SC_(-41.46872711181640625), SC_(-0.2304209021566303248261300380232548536606e-19) }}, {{ SC_(-41.400691986083984375), SC_(-0.2470393735589908561748312777657558449938e-19) }}, {{ SC_(-40.7796478271484375), SC_(-0.4665489090348590088892832574733877627252e-19) }}, {{ SC_(-40.712055206298828125), SC_(-0.4999841334502210157248794528480866083337e-19) }}, {{ SC_(-39.510929107666015625), SC_(-0.1711205236582734104034468188313993053133e-18) }}, {{ SC_(-39.3155059814453125), SC_(-0.2090623264737543134488804288570287187466e-18) }}, {{ SC_(-39.270557403564453125), SC_(-0.2189181301371969956898734656820283591514e-18) }}, {{ SC_(-38.26819610595703125), SC_(-0.6117321854640996737992349893398315143733e-18) }}, {{ SC_(-37.9152069091796875), SC_(-0.8785955246457078845733834215638759432102e-18) }}, {{ SC_(-37.628902435302734375), SC_(-0.1178529325141498446240403317681089430123e-17) }}, {{ SC_(-36.91025543212890625), SC_(-0.2463830668835851637664366500894910731401e-17) }}, {{ SC_(-36.849811553955078125), SC_(-0.26215321484857982843437445475200392826e-17) }}, {{ SC_(-36.434917449951171875), SC_(-0.4013604158054956102381643294616861600937e-17) }}, {{ SC_(-36.22989654541015625), SC_(-0.4954070919347881798661389820765284703293e-17) }}, {{ SC_(-36.151371002197265625), SC_(-0.5370115748619942846995056236200542722029e-17) }}, {{ SC_(-36.0314483642578125), SC_(-0.607395305918129149416531788551080247298e-17) }}, {{ SC_(-35.710315704345703125), SC_(-0.8447444058571321223562848170132272760574e-17) }}, {{ SC_(-35.3704376220703125), SC_(-0.1197781809600591211033914907221945799548e-16) }}, {{ SC_(-35.097530364990234375), SC_(-0.1585533750794002114788183801172421930711e-16) }}, {{ SC_(-35.032459259033203125), SC_(-0.1695197166162417180772352306691754347417e-16) }}, {{ SC_(-34.819854736328125), SC_(-0.2109244705480778173400075610148135850665e-16) }}, {{ SC_(-34.751312255859375), SC_(-0.2263224452346398759852695717206224969765e-16) }}, {{ SC_(-34.3925018310546875), SC_(-0.3272986079708841109973691119640729899672e-16) }}, {{ SC_(-34.054973602294921875), SC_(-0.4631254422748072673716271830348503655722e-16) }}, {{ SC_(-33.990711212158203125), SC_(-0.4947724616135280674694624593161655499417e-16) }}, {{ SC_(-33.597461700439453125), SC_(-0.7414940191228214696888223680604909022873e-16) }}, {{ SC_(-33.57765960693359375), SC_(-0.756757300887760805540854651208858682068e-16) }}, {{ SC_(-33.2384185791015625), SC_(-0.107294068831277583736449351876748813907e-15) }}, {{ SC_(-33.01177215576171875), SC_(-0.1354864993921957916546792647862260697468e-15) }}, {{ SC_(-32.66255950927734375), SC_(-0.1941093737430240934530108031230878545462e-15) }}, {{ SC_(-32.613666534423828125), SC_(-0.204132772872516639905834010047613657569e-15) }}, {{ SC_(-32.159946441650390625), SC_(-0.3257412136825485821320656994134128511202e-15) }}, {{ SC_(-32.11270904541015625), SC_(-0.3419855833378806479533913787891698469742e-15) }}, {{ SC_(-31.7786407470703125), SC_(-0.4825063941260257502369727213840286372208e-15) }}, {{ SC_(-31.169464111328125), SC_(-0.9041208285654553216232000743485732341508e-15) }}, {{ SC_(-30.8748321533203125), SC_(-0.1225142484842369432707212786481995255584e-14) }}, {{ SC_(-29.9968280792236328125), SC_(-0.3031462122352020681576569789998943438361e-14) }}, {{ SC_(-29.7701435089111328125), SC_(-0.3830838750986454571203218948391228454923e-14) }}, {{ SC_(-29.4440135955810546875), SC_(-0.5364965948171210691720366951042121092755e-14) }}, {{ SC_(-29.2779827117919921875), SC_(-0.6368731799234152625106787139014919307596e-14) }}, {{ SC_(-28.374523162841796875), SC_(-0.1620330664359454781224975910217836859007e-13) }}, {{ SC_(-28.049365997314453125), SC_(-0.2268099668304392203466560599584613615573e-13) }}, {{ SC_(-27.112514495849609375), SC_(-0.5981365620673597072402185048412869627752e-13) }}, {{ SC_(-26.877002716064453125), SC_(-0.7633855292124147317951832423188187154848e-13) }}, {{ SC_(-26.688323974609375), SC_(-0.9282018389970155561367076378137998442654e-13) }}, {{ SC_(-26.1038379669189453125), SC_(-0.1701256502901405065500817349298173344176e-12) }}, {{ SC_(-25.6862545013427734375), SC_(-0.2623518232936666267503379731452152909371e-12) }}, {{ SC_(-25.3806858062744140625), SC_(-0.3602510949927852845512603205232949953593e-12) }}, {{ SC_(-24.812534332275390625), SC_(-0.6498766587291270315525780335102007560594e-12) }}, {{ SC_(-24.6828327178955078125), SC_(-0.7436238775678827344102896240858028195429e-12) }}, {{ SC_(-23.609222412109375), SC_(-0.2270985496456140144232209569165697838238e-11) }}, {{ SC_(-23.6052227020263671875), SC_(-0.2280458712546087199917148252497757574451e-11) }}, {{ SC_(-23.212253570556640625), SC_(-0.3433205132674812376797712986415901957608e-11) }}, {{ SC_(-22.38077545166015625), SC_(-0.8166398479320291904581660909155149328026e-11) }}, {{ SC_(-22.17484283447265625), SC_(-0.1012326926235613489340088549168765113258e-10) }}, {{ SC_(-22.028961181640625), SC_(-0.1178767187524946251357575066537303513673e-10) }}, {{ SC_(-21.93953704833984375), SC_(-0.1294076260343429420577210021644224166493e-10) }}, {{ SC_(-21.6740932464599609375), SC_(-0.1707311450429202319800292418877066733455e-10) }}, {{ SC_(-21.4142551422119140625), SC_(-0.2239660671546579826122524352899183509804e-10) }}, {{ SC_(-21.07315826416015625), SC_(-0.31989140498942794678382607273845334638e-10) }}, {{ SC_(-20.9816799163818359375), SC_(-0.3519989253439093126729073010825611989496e-10) }}, {{ SC_(-19.637241363525390625), SC_(-0.1438600620159160308817479271652146400346e-9) }}, {{ SC_(-19.5384731292724609375), SC_(-0.1595610686595867225475531858139583145912e-9) }}, {{ SC_(-19.3651943206787109375), SC_(-0.191371001872496098463574662692381338326e-9) }}, {{ SC_(-18.90593719482421875), SC_(-0.30994031572235860524960759707349725788e-9) }}, {{ SC_(-18.5577068328857421875), SC_(-0.4469016992114872271994190582757178616282e-9) }}, {{ SC_(-17.3267612457275390625), SC_(-0.1633711779868352920783956469994975258958e-8) }}, {{ SC_(-17.0185108184814453125), SC_(-0.2261792263181589044734075605962911166714e-8) }}, {{ SC_(-16.78955841064453125), SC_(-0.2880510330832347302622379645025972070067e-8) }}, {{ SC_(-16.1016254425048828125), SC_(-0.596294305040922450573339753675973857866e-8) }}, {{ SC_(-15.76634502410888671875), SC_(-0.8505935450884228183463975203958420779551e-8) }}, {{ SC_(-15.17945194244384765625), SC_(-0.1585549103627994633875481186154489551696e-7) }}, {{ SC_(-15.11905670166015625), SC_(-0.1690611957957755240799660289020897272965e-7) }}, {{ SC_(-13.43068981170654296875), SC_(-0.1022584513231563674448689979341186149526e-6) }}, {{ SC_(-12.91650867462158203125), SC_(-0.1773761262859971060164899581342656864029e-6) }}, {{ SC_(-12.575832366943359375), SC_(-0.255687274145036875029313903922532225666e-6) }}, {{ SC_(-12.5575695037841796875), SC_(-0.2607537477649603990782583543108160730844e-6) }}, {{ SC_(-11.91432666778564453125), SC_(-0.5210749882067263449474065372193218810935e-6) }}, {{ SC_(-11.5017871856689453125), SC_(-0.8134101131665215640713025641914718088437e-6) }}, {{ SC_(-11.395236968994140625), SC_(-0.91272723045117483696197441691774343853e-6) }}, {{ SC_(-11.31745433807373046875), SC_(-0.9928537403218690168906979956279759632219e-6) }}, {{ SC_(-11.19613552093505859375), SC_(-0.1132191013156966403507410220532861997662e-5) }}, {{ SC_(-10.80203342437744140625), SC_(-0.1735824908097239014329439735951592857174e-5) }}, {{ SC_(-10.257659912109375), SC_(-0.3138251672181765729003432862510751688375e-5) }}, {{ SC_(-10.04949092864990234375), SC_(-0.3938286114079258742139622090231880229156e-5) }}, {{ SC_(-8.15095806121826171875), SC_(-0.318435308755980338774238163595618232826e-4) }}, {{ SC_(-8.02857112884521484375), SC_(-0.3648660193419818119566325214639226953631e-4) }}, {{ SC_(-7.94337558746337890625), SC_(-0.4011732556063931089384872957189354164538e-4) }}, {{ SC_(-7.397450923919677734375), SC_(-0.7385058450049667092943216183158760353767e-4) }}, {{ SC_(-7.257321834564208984375), SC_(-0.8643407707350383467623358242945344658293e-4) }}, {{ SC_(-6.85502719879150390625), SC_(-0.0001360233225396037741570065066289193367288) }}, {{ SC_(-6.844749927520751953125), SC_(-0.0001376130697072288547351049358728336878462) }}, {{ SC_(-6.56009578704833984375), SC_(-0.0001900005993505645179619309041336139866346) }}, {{ SC_(-6.1387615203857421875), SC_(-0.0003071516884020679259927871076238348137779) }}, {{ SC_(-6.02390766143798828125), SC_(-0.0003503420375785161458178656827678832632704) }}, {{ SC_(-5.801117420196533203125), SC_(-0.0004525783807737199298571674150953465181097) }}, {{ SC_(-5.5370635986328125), SC_(-0.0006139802161619321096305734620234110760157) }}, {{ SC_(-5.039572238922119140625), SC_(-0.001096214284474157003983248247338271233381) }}, {{ SC_(-4.60668182373046875), SC_(-0.00182646403775848961256251726233316613562) }}, {{ SC_(-3.606259822845458984375), SC_(-0.006113092244150501182759223098480467043372) }}, {{ SC_(-3.5703563690185546875), SC_(-0.006389725283693163413106729641577233598271) }}, {{ SC_(-3.4518375396728515625), SC_(-0.007398826328864287868299387508442295495418) }}, {{ SC_(-2.544478893280029296875), SC_(-0.02349896621151128636921912021857515353106) }}, {{ SC_(-2.0270683765411376953125), SC_(-0.04710549033253332417891459635198730441661) }}, {{ SC_(-1.74664974212646484375), SC_(-0.06982224065679792018629296005666623745408) }}, {{ SC_(-1.73811972141265869140625), SC_(-0.07067947912193223977231133182821733641521) }}, {{ SC_(-1.30539000034332275390625), SC_(-0.1343263664730872765772106823412265341571) }}, {{ SC_(-0.9855175018310546875), SC_(-0.224789849918404400892777170618030689004) }}, {{ SC_(-0.765717029571533203125), SC_(-0.3306204965891935899584519691640732251379) }}, {{ SC_(-0.440425574779510498046875), SC_(-0.6391103091578925546980910928718730805524) }}, {{ SC_(0.1690093176520690576580818742513656616211e-8), SC_(-19.62126650890371652595295530979865095775) }}, {{ SC_(0.2114990849122477811761200428009033203125e-8), SC_(-19.39699968409704501608143944417902979644) }}, {{ SC_(0.7099628440698779741069301962852478027344e-8), SC_(-18.18600771456330899529400229563313464444) }}, {{ SC_(0.136718796284185373224318027496337890625e-7), SC_(-17.5307090168393574784921913178898385982) }}, {{ SC_(0.1679341288252089725574478507041931152344e-7), SC_(-17.32506343605589965123585723923677953622) }}, {{ SC_(0.586768322818898013792932033538818359375e-7), SC_(-16.07400514447660812399260402134573702052) }}, {{ SC_(0.1140460881288163363933563232421875e-6), SC_(-15.40944740947244982567362347512857196391) }}, {{ SC_(0.1455586016163579188287258148193359375e-6), SC_(-15.16547126071097956584839542123919818808) }}, {{ SC_(0.38918477685001562349498271942138671875e-6), SC_(-14.18199554726483978034167263996676840479) }}, {{ SC_(0.623782625552848912775516510009765625e-6), SC_(-13.71024759706876242023883975750942755915) }}, {{ SC_(0.104669607026153244078159332275390625e-5), SC_(-13.1926552429023750061675349060182474459) }}, {{ SC_(0.2951089072666945867240428924560546875e-5), SC_(-12.15611766257365306019177885908916324814) }}, {{ SC_(0.4877083483734168112277984619140625e-5), SC_(-11.65374262154606280170522244483307812283) }}, {{ SC_(0.9066634447663091123104095458984375e-5), SC_(-11.03368469534678539610212344978815972353) }}, {{ SC_(0.2360353755648247897624969482421875e-4), SC_(-10.07687469212656789087127871765075953034) }}, {{ SC_(0.60817910707555711269378662109375e-4), SC_(-9.130349744630693613333257072114132620679) }}, {{ SC_(0.000119476739200763404369354248046875), SC_(-8.45505371136981670609925548596536076898) }}, {{ SC_(0.0002437086659483611583709716796875), SC_(-7.742077649645587806911765144483417757553) }}, {{ SC_(0.00047970912419259548187255859375), SC_(-7.064635197452612847888578571036009517596) }}, {{ SC_(0.000960788573138415813446044921875), SC_(-6.369579496036524745416341422949188673429) }}, {{ SC_(0.00113048148341476917266845703125), SC_(-6.206765179529281495180879269354516246991) }}, {{ SC_(0.0033707791008055210113525390625), SC_(-5.112022087485498560545602301598797005544) }}, {{ SC_(0.007697627879679203033447265625), SC_(-4.281914933633107042159905427812436577356) }}, {{ SC_(0.0154774188995361328125), SC_(-3.575619984475573663157136373699830467351) }}, {{ SC_(0.0305807329714298248291015625), SC_(-2.879353318645655463312010631432614581285) }}, {{ SC_(0.0346831791102886199951171875), SC_(-2.749298553683104560876527875763233907) }}, {{ SC_(0.09283597767353057861328125), SC_(-1.704669515752701009603290162653889150725) }}, {{ SC_(0.16360938549041748046875), SC_(-1.062505460428854260642631229015752036299) }}, {{ SC_(0.22476322948932647705078125), SC_(-0.6774408343494534677895972717532955476869) }}, {{ SC_(0.37880718708038330078125), SC_(0.02441666034110163309027907713654447093795) }}, {{ SC_(0.4500701129436492919921875), SC_(0.2850997481756199579143134375351571168498) }}, {{ SC_(0.64851474761962890625), SC_(0.9150076958911796894005017309253737142643) }}, {{ SC_(0.872161686420440673828125), SC_(1.546602463133721778520197846604188825167) }}, {{ SC_(1.14188635349273681640625), SC_(2.282017713054334977384465917433486481954) }}, {{ SC_(1.632969379425048828125), SC_(3.708022196201172208181850369303918535594) }}, {{ SC_(1.91524684429168701171875), SC_(4.647560468822983657335378269935274858252) }}, {{ SC_(2.0095670223236083984375), SC_(4.989664796356000084558864930434693816234) }}, {{ SC_(2.061771869659423828125), SC_(5.186049765719297790031932942365080982976) }}, {{ SC_(2.2234554290771484375), SC_(5.829455392510293560943895777670731465053) }}, {{ SC_(3.391320705413818359375), SC_(12.93581868697820390884888173217329392277) }}, {{ SC_(3.63258075714111328125), SC_(15.24140806012294293304307663205345607658) }}, {{ SC_(3.874083042144775390625), SC_(17.9905574004211871523525648227022806255) }}, {{ SC_(3.9647958278656005859375), SC_(19.15663585397278189871759282735891458668) }}, {{ SC_(4.820046901702880859375), SC_(35.20942043634765130040913234375925316531) }}, {{ SC_(4.839860439300537109375), SC_(35.7230453208944376116432625721437408942) }}, {{ SC_(5.3509044647216796875), SC_(52.22020521530953938550833436236144368121) }}, {{ SC_(5.40289783477783203125), SC_(54.31243085873860477879662300914122221072) }}, {{ SC_(5.524809360504150390625), SC_(59.58055115475630923207417956895030150463) }}, {{ SC_(5.60483837127685546875), SC_(63.3358979487388193548024404586957936219) }}, {{ SC_(5.655277252197265625), SC_(65.83272358924251841359208893617229420928) }}, {{ SC_(5.68704509735107421875), SC_(67.45933093217862071244254216727784222832) }}, {{ SC_(6.457447052001953125), SC_(123.4702296391656695506519753815867966569) }}, {{ SC_(8.01086139678955078125), SC_(444.4463643880764930507024304168884617662) }}, {{ SC_(8.13864803314208984375), SC_(495.3101709262324392110450657667108532755) }}, {{ SC_(8.28261566162109375), SC_(559.877848810511434189052045218975505771) }}, {{ SC_(8.387729644775390625), SC_(612.4581525328497943613664298891423815016) }}, {{ SC_(8.7326412200927734375), SC_(823.599445125107912460255698424832715259) }}, {{ SC_(8.89971637725830078125), SC_(951.4942058135830875156789370724711488375) }}, {{ SC_(9.21766567230224609375), SC_(1254.116584663167251806959698506292298412) }}, {{ SC_(10.12511348724365234375), SC_(2783.932169928304521372514424737213979338) }}, {{ SC_(10.15696430206298828125), SC_(2863.594026810931459246318070217071739042) }}, {{ SC_(10.19370746612548828125), SC_(2958.379138016360344099887177971538047866) }}, {{ SC_(10.19426441192626953125), SC_(2959.840173454726579315814039625058033301) }}, {{ SC_(10.4695987701416015625), SC_(3780.023943205871070531887511694330145346) }}, {{ SC_(10.48462009429931640625), SC_(3830.912593094958387366502243352980495425) }}, {{ SC_(11.4095668792724609375), SC_(8773.100258415416694826189142702383416451) }}, {{ SC_(11.7713222503662109375), SC_(12161.33897998932776399014133107556617305) }}, {{ SC_(12.4603786468505859375), SC_(22729.88062534874311051752965324633319716) }}, {{ SC_(12.57059955596923828125), SC_(25130.98763507991967701586529249834912407) }}, {{ SC_(13.11301326751708984375), SC_(41251.55674345891428427980010586930742943) }}, {{ SC_(13.7675037384033203125), SC_(75228.92803868848986122975671702709528184) }}, {{ SC_(13.9052371978759765625), SC_(85400.1662872705358542582248024368153919) }}, {{ SC_(14.3790493011474609375), SC_(132224.1847370047667808930523187693328728) }}, {{ SC_(14.3794498443603515625), SC_(132273.1335203298191314772732228745972998) }}, {{ SC_(15.21062469482421875), SC_(285682.5599867407331192380971834146771446) }}, {{ SC_(15.39539432525634765625), SC_(339189.3027111421620231430296710100618608) }}, {{ SC_(16.218013763427734375), SC_(729907.8872092321790234225457781718572718) }}, {{ SC_(16.46546173095703125), SC_(919706.2433779946775124389017819374115907) }}, {{ SC_(17.7649173736572265625), SC_(3108879.230687753723076805660417797528995) }}, {{ SC_(18.23960113525390625), SC_(4858737.11952045506522062926385708137311) }}, {{ SC_(18.4260616302490234375), SC_(5791494.941827564738522268163850830865472) }}, {{ SC_(19.012279510498046875), SC_(10066932.15321395806087810278623274756541) }}, {{ SC_(19.5595188140869140625), SC_(16883605.29408456961771231663180943490866) }}, {{ SC_(20.37397003173828125), SC_(36508755.03776723110298560035691974974617) }}, {{ SC_(20.835704803466796875), SC_(56575951.205609704704636343530031541064) }}, {{ SC_(21.0944309234619140625), SC_(72332387.61173328702549768015323808037801) }}, {{ SC_(21.9486980438232421875), SC_(162986366.6628523568138908187268486831292) }}, {{ SC_(22.0607814788818359375), SC_(181342019.5679948669381801941271758479043) }}, {{ SC_(22.1025676727294921875), SC_(188703736.1360623611074418193171278225304) }}, {{ SC_(22.1314754486083984375), SC_(193971208.5963560967150675933295824773472) }}, {{ SC_(22.6131420135498046875), SC_(306960930.053725589282112731051688195145) }}, {{ SC_(22.9966068267822265625), SC_(442531056.6008858180372506333673348776581) }}, {{ SC_(24.66086578369140625), SC_(2172219893.769243704085432245669500194727) }}, {{ SC_(24.7471675872802734375), SC_(2359372003.318650065162623018506525353893) }}, {{ SC_(24.7672977447509765625), SC_(2405300409.260463112199179923485274897776) }}, {{ SC_(24.78017425537109375), SC_(2435147794.700459281996413056218750490234) }}, {{ SC_(24.831577301025390625), SC_(2558042555.796109988805685451913765393216) }}, {{ SC_(24.9495487213134765625), SC_(2864104017.927884220619778105722242253745) }}, {{ SC_(25.8135986328125), SC_(6558254256.348564813851988511155677431286) }}, {{ SC_(26.431148529052734375), SC_(11865100527.11967913666447024180657659641) }}, {{ SC_(26.475616455078125), SC_(12382894374.47640125042051702147914482717) }}, {{ SC_(26.8984375), SC_(18589906285.97139709477775311556771661975) }}, {{ SC_(27.6351680755615234375), SC_(37758334087.66944768183509564350686344903) }}, {{ SC_(27.6650714874267578125), SC_(38860718906.05169447986846371460827946851) }}, {{ SC_(28.030132293701171875), SC_(55224345285.73183315482360024631420661039) }}, {{ SC_(28.1774234771728515625), SC_(63640104036.92648906241078382739179720716) }}, {{ SC_(28.771076202392578125), SC_(112755629580.1567007685754818929698465233) }}, {{ SC_(29.2542781829833984375), SC_(179670592771.6875712257676967047727294651) }}, {{ SC_(29.84228515625), SC_(316865442957.5609324434506204080682970604) }}, {{ SC_(29.9650173187255859375), SC_(356720167837.8059821256007389978942158479) }}, {{ SC_(30.60785675048828125), SC_(663668224048.0604218612696926330408705931) }}, {{ SC_(30.651767730712890625), SC_(692430279700.9637790480873609641395475303) }}, {{ SC_(31.47119903564453125), SC_(1528910755990.844994280853960653895757449) }}, {{ SC_(31.744571685791015625), SC_(1991673673982.885396571525113897855042645) }}, {{ SC_(32.2966766357421875), SC_(3398181411672.422126614376199484878709082) }}, {{ SC_(32.49350738525390625), SC_(4111498043870.037918808870011846450846812) }}, {{ SC_(32.639377593994140625), SC_(4735210781514.282917532917045901722362452) }}, {{ SC_(32.9102020263671875), SC_(6155269461572.004859718923506587585651898) }}, {{ SC_(33.015533447265625), SC_(6816439946334.964794674640603518324460658) }}, {{ SC_(33.292026519775390625), SC_(8910343016135.140885096036479635757392035) }}, {{ SC_(33.351413726806640625), SC_(9438137404966.366739297549042509103959184) }}, {{ SC_(33.790431976318359375), SC_(14443862666819.21046289715593905528182097) }}, {{ SC_(33.866344451904296875), SC_(15546956543575.59764405635272615745868862) }}, {{ SC_(34.012500762939453125), SC_(17913865016638.12842343852721911357435736) }}, {{ SC_(34.20684814453125), SC_(21629122436899.19912288305015863841369024) }}, {{ SC_(35.130886077880859375), SC_(53015183051332.74153203541356793785468028) }}, {{ SC_(35.24015045166015625), SC_(58947101093536.87223820738247177154640853) }}, {{ SC_(35.67874908447265625), SC_(90241306347642.26414493832961429071231867) }}, {{ SC_(35.70839691162109375), SC_(92877271582193.34830869185118876551411853) }}, {{ SC_(37.142803192138671875), SC_(374322570986670.3197355366942794241040052) }}, {{ SC_(37.295734405517578125), SC_(434336543053163.6784396158427358728630635) }}, {{ SC_(37.337249755859375), SC_(452229579706005.3198360961998657288335433) }}, {{ SC_(37.371295928955078125), SC_(467452657592235.4557742032788566543129601) }}, {{ SC_(37.47199249267578125), SC_(515545372809301.8307905963516087180550462) }}, {{ SC_(37.700786590576171875), SC_(644036634222999.9230954385992851671021201) }}, {{ SC_(37.77214813232421875), SC_(690331189582367.0242854882027008973993238) }}, {{ SC_(37.7916412353515625), SC_(703546341004539.6440063503740195080075798) }}, {{ SC_(38.007534027099609375), SC_(867977923035967.2387116719471097584367879) }}, {{ SC_(38.030849456787109375), SC_(887892682109427.8216437485269424411635003) }}, {{ SC_(38.232257843017578125), SC_(1080118946394155.640540236541909448482468) }}, {{ SC_(38.828411102294921875), SC_(1929604067639860.142261254221961073466133) }}, {{ SC_(38.8993072509765625), SC_(2067490868399521.448714428128264044795958) }}, {{ SC_(39.070796966552734375), SC_(2443188466949830.286454990607186034125934) }}, {{ SC_(39.57132720947265625), SC_(3977896133471904.309030036870273146195475) }}, {{ SC_(39.619602203369140625), SC_(4169413741026091.435196275993524526107061) }}, {{ SC_(39.903354644775390625), SC_(5496968390256242.123534199929484292088181) }}, {{ SC_(40.292484283447265625), SC_(8031404394724528.542925073441994364789595) }}, {{ SC_(40.337055206298828125), SC_(8387940643235804.328777240761144315540951) }}, {{ SC_(40.40936279296875), SC_(9000350988468569.862069911874242513693811) }}, {{ SC_(40.486545562744140625), SC_(9703503769049494.903270793598303590559637) }}, {{ SC_(40.58036041259765625), SC_(10632608216710854.46031088764839485644682) }}, {{ SC_(40.839870452880859375), SC_(13693091147862452.41874314869980290347395) }}, {{ SC_(41.391147613525390625), SC_(23439146134369609.98587928723498036090512) }}, {{ SC_(41.850940704345703125), SC_(36703294479064600.3832020868783140575162) }}, {{ SC_(42.4788970947265625), SC_(67731301681578920.49039567895447521625383) }}, {{ SC_(43.331455230712890625), SC_(155669602636333053.4786944018929963653098) }}, {{ SC_(43.404224395751953125), SC_(167132241928819035.0710856728527709620153) }}, {{ SC_(43.8334197998046875), SC_(254143262583491699.1980601913565298527589) }}, {{ SC_(43.9896087646484375), SC_(296024739641759806.6894125116837989457761) }}, {{ SC_(44.14626312255859375), SC_(344969417880314064.475422911828132537173) }}, {{ SC_(44.89459991455078125), SC_(716642754835341877.2986625908564001827266) }}, {{ SC_(45.55641937255859375), SC_(1368424686960668756.908114295234155840218) }}, {{ SC_(45.622142791748046875), SC_(1459228779377576203.085179194127347039233) }}, {{ SC_(45.96717071533203125), SC_(2044647179130784910.065675826506375503182) }}, {{ SC_(46.443317413330078125), SC_(3257069147351952822.406316784082908654148) }}, {{ SC_(46.62737274169921875), SC_(3899455212627022627.757193164190203742303) }}, {{ SC_(47.999355316162109375), SC_(14926880987843627550.42423425846631056877) }}, {{ SC_(49.637111663818359375), SC_(74190413685387077283.04485547837473387867) }}, {{ SC_(49.81011962890625), SC_(87890252389194795154.44680392696975525721) }}, {{ SC_(50.171234130859375), SC_(125189140534515913902.5259763391614688736) }}, {{ SC_(50.383525848388671875), SC_(154131408309808097686.7891755712917358669) }}, {{ SC_(50.46710968017578125), SC_(167284637436075321503.8911970981891929543) }}, {{ SC_(50.471343994140625), SC_(167980083475835196141.8258748379299905807) }}, {{ SC_(50.7309112548828125), SC_(216626578190231872367.3411388285205467896) }}, {{ SC_(50.891300201416015625), SC_(253494280682248147653.5031137670581769109) }}, {{ SC_(51.29622650146484375), SC_(376974396494639297221.5916323256511307768) }}, {{ SC_(52.0242156982421875), SC_(769533307813336085464.2719431240758770955) }}, {{ SC_(52.21900177001953125), SC_(931463445412117009569.997805462678793371) }}, {{ SC_(52.4724273681640625), SC_(1194213241460405145426.923208285103882948) }}, {{ SC_(52.739253997802734375), SC_(1551371845141512428417.754279321277408047) }}, {{ SC_(52.741176605224609375), SC_(1554299590225018307998.020329038078465699) }}, {{ SC_(53.40814208984375), SC_(2989698064310689472929.903511572929914131) }}, {{ SC_(54.5244293212890625), SC_(8938496865444328223663.418183347060066178) }}, {{ SC_(55.05193328857421875), SC_(15000102942849475726352.24634597020497441) }}, {{ SC_(55.28836822509765625), SC_(18918195096476420185424.79716369248547301) }}, {{ SC_(55.32575225830078125), SC_(19625295432594734639694.04077946718413513) }}, {{ SC_(55.522052764892578125), SC_(23795829974930383969706.74411638885122467) }}, {{ SC_(55.544170379638671875), SC_(24318128427099146738245.91342763630756375) }}, {{ SC_(55.57183837890625), SC_(24987672558936599326235.83821092811506408) }}, {{ SC_(55.808788299560546875), SC_(31531809737095643566486.71781327383155317) }}, {{ SC_(56.137737274169921875), SC_(43552095758129714470140.32600682101217921) }}, {{ SC_(56.146297454833984375), SC_(43919686479374955504528.98148221455056972) }}, {{ SC_(56.446441650390625), SC_(58972731376406161145451.87899710728216492) }}, {{ SC_(56.5754547119140625), SC_(66937712106290641632239.78290159424299056) }}, {{ SC_(56.765209197998046875), SC_(80648977731394081623112.8051042421861491) }}, {{ SC_(57.47022247314453125), SC_(161184177439288421438465.5484877575634735) }}, {{ SC_(57.791820526123046875), SC_(221066403776152634685122.7495432888528563) }}, {{ SC_(57.92206573486328125), SC_(251241864908534403966454.9934420615252414) }}, {{ SC_(58.059658050537109375), SC_(287606300821348197823323.3455461108172774) }}, {{ SC_(58.620555877685546875), SC_(499047878771232722946891.2015515727402121) }}, {{ SC_(58.721736907958984375), SC_(551216418687710616233343.712210604071037) }}, {{ SC_(58.73737335205078125), SC_(559751492456295202734141.0778013286664605) }}, {{ SC_(59.21694183349609375), SC_(896756596017748427943431.7704197691518272) }}, {{ SC_(59.3475341796875), SC_(1019568455497745443792447.961272554810421) }}, {{ SC_(59.574817657470703125), SC_(1274778904679711262832523.146694528881756) }}, {{ SC_(59.610748291015625), SC_(1320604762679106118330023.701104305685706) }} }};
An efficient optimal rate control scheme for JPEG2000 image coding Most of the computation and memory usage of the post-compression rate-distortion (PCRD) optimization scheme in JPEG2000 are redundant. In this paper, an efficient rate PCRD scheme based on priority scanning (PS) is proposed to alleviate the problem. By encoding the truncation points in a different order based on the priority information, the proposed method can efficiently reduce the redundancy while keeping the same quality as the conventional PCRD scheme. The proposed scheme can efficiently reduce both the computational cost and working memory size of the entropy coding process by up to 52% and 71%, in the case of 0.25 bpp (1/32) compression, respectively.
Cancer biomarkers: easier said than done. BACKGROUND Biological and technical advances have led to greatly increased research and development of cancer biomarkers. This overview lists some of the challenges and barriers to developing novel effective cancer biomarkers and enablers to facilitate cancer biomarker development. METHODS Current scientific literature regarding development of biomarkers for cancer and other diseases was reviewed. RESULTS Challenges to developing cancer biomarkers include better understanding of biological heterogeneity, including host/tumor heterogeneity; analytical factors, such as interferences and analytical sensitivity; clinical pathologic factors, such as current histopathologic standards; and health service and market factors. More standardized biomarker definitions, standardization of cancer biology terminology, and high-quality reference materials (specimen and clinical data repositories) were identified as factors required to support advances in cancer biomarkers. CONCLUSIONS With the above enablers, novel cancer biomarkers may be useful, both for assessing early and established neoplasia more precisely and for contributing data toward development of novel practical concepts regarding cancer biology.
<reponame>nymtech/sphinx<gh_stars>10-100 // Copyright 2020 <NAME> // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use crate::constants::{HEADER_INTEGRITY_MAC_SIZE, MAX_PATH_LENGTH, NODE_META_INFO_SIZE}; use crate::header::delays::Delay; use crate::header::filler::Filler; use crate::header::keys::RoutingKeys; use crate::header::mac::HeaderIntegrityMac; use crate::header::routing::destination::FinalRoutingInformation; use crate::header::routing::nodes::{EncryptedRoutingInformation, RoutingInformation}; use crate::route::{Destination, Node, NodeAddressBytes}; use crate::{Error, ErrorKind, Result}; pub const TRUNCATED_ROUTING_INFO_SIZE: usize = ENCRYPTED_ROUTING_INFO_SIZE - (NODE_META_INFO_SIZE + HEADER_INTEGRITY_MAC_SIZE); pub const ENCRYPTED_ROUTING_INFO_SIZE: usize = (NODE_META_INFO_SIZE + HEADER_INTEGRITY_MAC_SIZE) * MAX_PATH_LENGTH; pub mod destination; pub mod nodes; pub const FORWARD_HOP: RoutingFlag = 1; pub const FINAL_HOP: RoutingFlag = 2; pub type RoutingFlag = u8; #[derive(Default)] pub struct Version { major: u8, minor: u8, patch: u8, } impl Version { pub fn new() -> Self { Self { major: env!("CARGO_PKG_VERSION_MAJOR").to_string().parse().unwrap(), minor: env!("CARGO_PKG_VERSION_MINOR").to_string().parse().unwrap(), patch: env!("CARGO_PKG_VERSION_PATCH").to_string().parse().unwrap(), } } pub fn to_bytes(&self) -> Vec<u8> { vec![self.major, self.minor, self.patch] } } // the derivation is only required for the tests. please remove it in production #[derive(Clone, Debug)] pub struct EncapsulatedRoutingInformation { pub(crate) enc_routing_information: EncryptedRoutingInformation, pub(crate) integrity_mac: HeaderIntegrityMac, } impl EncapsulatedRoutingInformation { pub fn encapsulate( enc_routing_information: EncryptedRoutingInformation, integrity_mac: HeaderIntegrityMac, ) -> Self { Self { enc_routing_information, integrity_mac, } } pub fn new( route: &[Node], destination: &Destination, delays: &[Delay], routing_keys: &[RoutingKeys], filler: Filler, ) -> Self { assert_eq!(route.len(), routing_keys.len()); assert_eq!(delays.len(), route.len()); let final_keys = match routing_keys.last() { Some(k) => k, None => panic!("empty keys"), }; let encapsulated_destination_routing_info = Self::for_final_hop(destination, final_keys, filler, route.len()); Self::for_forward_hops( encapsulated_destination_routing_info, delays, route, routing_keys, ) } fn for_final_hop( dest: &Destination, routing_keys: &RoutingKeys, filler: Filler, route_len: usize, ) -> Self { // personal note: I like how this looks so much. FinalRoutingInformation::new(dest, route_len) .add_padding(route_len) // add padding to obtain correct destination length .encrypt(routing_keys.stream_cipher_key, route_len) // encrypt with the key of final node (in our case service provider) .combine_with_filler(filler, route_len) // add filler to get header of correct length .encapsulate_with_mac(routing_keys.header_integrity_hmac_key) // combine the previous data with a MAC on the header (also calculated with the SPs key) } fn for_forward_hops( encapsulated_destination_routing_info: Self, delays: &[Delay], route: &[Node], // [Mix0, Mix1, Mix2, ..., Mix_{v-1}, Mix_v] routing_keys: &[RoutingKeys], // [Keys0, Keys1, Keys2, ..., Keys_{v-1}, Keys_v] ) -> Self { route .iter() .skip(1) // we don't want the first element as person creating the packet knows the address of the first hop .map(|node| node.address.as_bytes()) // we only care about the address field .zip( // we need both route (i.e. address field) and corresponding keys of the PREVIOUS hop routing_keys.iter().take(routing_keys.len() - 1), // we don't want last element - it was already used to encrypt the destination ) .zip(delays.iter().take(delays.len() - 1)) // no need for the delay for the final node .rev() // we are working from the 'inside' // we should be getting here // [(Mix_v, Keys_{v-1}, Delay_{v-1}), (Mix_{v-1}, Keys_{v-2}, Delay_{v-2}), ..., (Mix2, Keys1, Delay1), (Mix1, Keys0, Delay0)] .fold( // we start from the already created encrypted final routing info and mac for the destination // (encrypted with Keys_v) encapsulated_destination_routing_info, |next_hop_encapsulated_routing_information, ((current_node_address, previous_node_routing_keys), delay)| { RoutingInformation::new( NodeAddressBytes::from_bytes(current_node_address), delay.to_owned(), next_hop_encapsulated_routing_information, ) .encrypt(previous_node_routing_keys.stream_cipher_key) .encapsulate_with_mac(previous_node_routing_keys.header_integrity_hmac_key) }, ) } pub fn to_bytes(&self) -> Vec<u8> { self.integrity_mac .as_bytes() .iter() .cloned() .chain(self.enc_routing_information.get_value_ref().iter().cloned()) .collect() } pub fn from_bytes(bytes: &[u8]) -> Result<Self> { if bytes.len() != HEADER_INTEGRITY_MAC_SIZE + ENCRYPTED_ROUTING_INFO_SIZE { return Err(Error::new( ErrorKind::InvalidRouting, format!( "tried to recover routing information using {} bytes, expected {}", bytes.len(), HEADER_INTEGRITY_MAC_SIZE + ENCRYPTED_ROUTING_INFO_SIZE ), )); } let mut integrity_mac_bytes = [0u8; HEADER_INTEGRITY_MAC_SIZE]; let mut enc_routing_info_bytes = [0u8; ENCRYPTED_ROUTING_INFO_SIZE]; // first bytes represent the mac integrity_mac_bytes.copy_from_slice(&bytes[..HEADER_INTEGRITY_MAC_SIZE]); // the rest are for the routing info enc_routing_info_bytes.copy_from_slice( &bytes[HEADER_INTEGRITY_MAC_SIZE ..HEADER_INTEGRITY_MAC_SIZE + ENCRYPTED_ROUTING_INFO_SIZE], ); let integrity_mac = HeaderIntegrityMac::from_bytes(integrity_mac_bytes); let enc_routing_information = EncryptedRoutingInformation::from_bytes(enc_routing_info_bytes); Ok(EncapsulatedRoutingInformation { enc_routing_information, integrity_mac, }) } } #[cfg(test)] mod encapsulating_all_routing_information { use super::*; use crate::test_utils::{ fixtures::{destination_fixture, filler_fixture, routing_keys_fixture}, random_node, }; #[test] #[should_panic] fn it_panics_if_route_is_longer_than_keys() { let route = [random_node(), random_node(), random_node()]; let destination = destination_fixture(); let delays = [ Delay::new_from_nanos(10), Delay::new_from_nanos(20), Delay::new_from_nanos(30), ]; let keys = [routing_keys_fixture(), routing_keys_fixture()]; let filler = filler_fixture(route.len() - 1); EncapsulatedRoutingInformation::new(&route, &destination, &delays, &keys, filler); } #[test] #[should_panic] fn it_panics_if_keys_are_longer_than_route() { let route = [random_node(), random_node()]; let destination = destination_fixture(); let delays = [ Delay::new_from_nanos(10), Delay::new_from_nanos(20), Delay::new_from_nanos(30), ]; let keys = [ routing_keys_fixture(), routing_keys_fixture(), routing_keys_fixture(), ]; let filler = filler_fixture(route.len() - 1); EncapsulatedRoutingInformation::new(&route, &destination, &delays, &keys, filler); } #[test] #[should_panic] fn it_panics_if_empty_route_is_provided() { let route = vec![]; let destination = destination_fixture(); let delays = [ Delay::new_from_nanos(10), Delay::new_from_nanos(20), Delay::new_from_nanos(30), ]; let keys = [ routing_keys_fixture(), routing_keys_fixture(), routing_keys_fixture(), ]; let filler = filler_fixture(route.len() - 1); EncapsulatedRoutingInformation::new(&route, &destination, &delays, &keys, filler); } #[test] #[should_panic] fn it_panic_if_empty_keys_are_provided() { let route = [random_node(), random_node()]; let destination = destination_fixture(); let delays = [ Delay::new_from_nanos(10), Delay::new_from_nanos(20), Delay::new_from_nanos(30), ]; let keys = vec![]; let filler = filler_fixture(route.len() - 1); EncapsulatedRoutingInformation::new(&route, &destination, &delays, &keys, filler); } } #[cfg(test)] mod encapsulating_forward_routing_information { use super::*; use crate::test_utils::{ fixtures::{destination_fixture, filler_fixture, routing_keys_fixture}, random_node, }; #[test] fn it_correctly_generates_sphinx_routing_information_for_route_of_length_3() { // this is basically loop unwrapping, but considering the complex logic behind it, it's warranted let route = [random_node(), random_node(), random_node()]; let destination = destination_fixture(); let delay0 = Delay::new_from_nanos(10); let delay1 = Delay::new_from_nanos(20); let delay2 = Delay::new_from_nanos(30); let delays = [delay0.clone(), delay1.clone(), delay2].to_vec(); let routing_keys = [ routing_keys_fixture(), routing_keys_fixture(), routing_keys_fixture(), ]; let filler = filler_fixture(route.len() - 1); let filler_copy = filler_fixture(route.len() - 1); assert_eq!(filler, filler_copy); let destination_routing_info = EncapsulatedRoutingInformation::for_final_hop( &destination, &routing_keys.last().unwrap(), filler, route.len(), ); let destination_routing_info_copy = destination_routing_info.clone(); // sanity check to make sure our 'copy' worked assert_eq!( destination_routing_info .enc_routing_information .get_value_ref() .to_vec(), destination_routing_info_copy .enc_routing_information .get_value_ref() .to_vec() ); assert_eq!( destination_routing_info.integrity_mac.as_bytes().to_vec(), destination_routing_info_copy .integrity_mac .as_bytes() .to_vec() ); let routing_info = EncapsulatedRoutingInformation::for_forward_hops( destination_routing_info, &delays, &route, &routing_keys, ); let layer_1_routing = RoutingInformation::new(route[2].address, delay1, destination_routing_info_copy) .encrypt(routing_keys[1].stream_cipher_key) .encapsulate_with_mac(routing_keys[1].header_integrity_hmac_key); // this is what first mix should receive let layer_0_routing = RoutingInformation::new(route[1].address, delay0, layer_1_routing) .encrypt(routing_keys[0].stream_cipher_key) .encapsulate_with_mac(routing_keys[0].header_integrity_hmac_key); assert_eq!( routing_info .enc_routing_information .get_value_ref() .to_vec(), layer_0_routing .enc_routing_information .get_value_ref() .to_vec() ); assert_eq!( routing_info.integrity_mac.into_inner(), layer_0_routing.integrity_mac.into_inner() ); } #[test] fn it_correctly_generates_sphinx_routing_information_for_route_of_max_length() { // this is basically loop unwrapping, but considering the complex iterator, it's warranted assert_eq!(5, MAX_PATH_LENGTH); // make sure we catch it if we decided to change the constant /* since we're using max path length we expect literally: n4 || m4 || n3 || m3 || n2 || m2 || n1 || m1 || d || i || p // so literally no filler! where: {n1, n2, ...} are node addresses {m1, m2, ...} are macs on previous layers d is destination address i is destination identifier p is destination padding */ // TODO: IMPLEMENT SPHINX HEADER LAYER UNWRAPPING // HOWEVER! to test it, we need to first wrap function to unwrap header layer because each consecutive (ni, mi) pair is encrypted } } #[cfg(test)] mod converting_encapsulated_routing_info_to_bytes { use super::*; use crate::test_utils::fixtures::encapsulated_routing_information_fixture; #[test] fn it_is_possible_to_convert_back_and_forth() { let encapsulated_routing_info = encapsulated_routing_information_fixture(); let encapsulated_routing_info_bytes = encapsulated_routing_info.to_bytes(); let recovered_routing_info = EncapsulatedRoutingInformation::from_bytes(&encapsulated_routing_info_bytes).unwrap(); assert_eq!( encapsulated_routing_info .enc_routing_information .get_value_ref() .to_vec(), recovered_routing_info .enc_routing_information .get_value_ref() .to_vec() ); assert_eq!( encapsulated_routing_info.integrity_mac.into_inner(), recovered_routing_info.integrity_mac.into_inner() ); } }
<reponame>DandelionSprout/Stringlate package io.github.lonamiwebs.stringlate.cli; import io.github.lonamiwebs.stringlate.classes.resources.ResourceStringComparator; import io.github.lonamiwebs.stringlate.settings.AppSettings; public class Main { public static void main(String[] args) { AppSettings appSettings = new AppSettings(); System.out.println(appSettings.getPathToPropertiesFile()); appSettings.setStringSortMode(ResourceStringComparator.SORT_STRING_LENGTH); appSettings.setDownloadIconsAllowed(false); System.out.println("Icons allowed: " + appSettings.isDownloadIconsAllowed()); appSettings.setDownloadIconsAllowed(true); System.out.println("Icons allowed: " + appSettings.isDownloadIconsAllowed()); } }
import { Session } from "@inrupt/solid-client-authn-node"; export interface IAuthSession { session: ISession | Session } export interface ISession { isLoggedIn: boolean, info : { isLoggedIn?: boolean, webId?: string }, fetch : (url: RequestInfo, init?: RequestInit) => Promise<Response>, logout : () => void | Promise<void>, } export interface IEssSession extends Session { isLoggedIn?: boolean, webId?: string, WebID?: string } export interface INssSession extends ISession { loggedIn?: boolean, webId?: string, WebID?: string }
import { Meta, Story } from '@storybook/react'; import Cooldown, { Props } from './index'; export default { title: 'Components/Cooldown', component: Cooldown, argTypes: { progressPercent: { control: { type: 'range', min: 0, max: 100, step: 1, }, }, }, } as Meta; const Template: Story<Props> = (args) => <Cooldown {...args} />; export const Base = Template.bind({}); Base.args = { progressPercent: 12.5, }; export const Smaller = Template.bind({}); Smaller.args = { progressPercent: 12.5, width: 32, height: 32, }; export const Bigger = Template.bind({}); Bigger.args = { progressPercent: 12.5, width: 128, height: 128, };
<reponame>Virus288/Time-Tracking-API-V2 import * as mongoose from 'mongoose'; export const UserSchema = new mongoose.Schema( { name: { type: String, required: [true, 'Please enter username'], maxLength: [50, 'Max lenght of username is 50 characters'], minLength: [5, 'Min lenght of username is 5 characters'], }, current_task: { type: mongoose.Schema.Types.ObjectId, ref: 'Task', required: false, default: null, } }, { timestamps: false }, );
package com.hinkmond.hello; public class Greeting { private final long instanceID; private final String instanceContent; public Greeting(long id, String content) { this.instanceID = id; this.instanceContent = content; } public long getId() { return instanceID; } public String getContent() { return instanceContent; } }
/** * system 模块的数据权限配置 */ package cn.iocoder.yudao.module.system.framework.datapermission;
/* Fill_edge_structure: * Polygon helper function: initialises an edge structure for the 2d * rasteriser. */ void Fill_edge_structure(T_Polygon_edge *edge, short *i1, short *i2) { short *it; if (i2[1] < i1[1]) { it = i1; i1 = i2; i2 = it; } edge->top = i1[1]; edge->bottom = i2[1] - 1; edge->dx = ((float) i2[0] - (float) i1[0]) / ((float) i2[1] - (float) i1[1]); edge->x = i1[0] + 0.4999999; edge->prev = NULL; edge->next = NULL; if (edge->dx+1 < 0.0) edge->x += edge->dx+1; if (edge->dx >= 0.0) edge->w = edge->dx; else edge->w = -(edge->dx); if (edge->w-1.0<0.0) edge->w = 0.0; else edge->w = edge->w-1; }
package s3crypto import ( "bytes" "testing" "github.com/aws/aws-sdk-go/internal/sdkio" ) func TestBytesReadWriteSeeker_Read(t *testing.T) { b := &bytesReadWriteSeeker{[]byte{1, 2, 3}, 0} expected := []byte{1, 2, 3} buf := make([]byte, 3) n, err := b.Read(buf) if err != nil { t.Errorf("expected no error, but received %v", err) } if e, a := 3, n; e != a { t.Errorf("expected %d, but received %d", e, a) } if !bytes.Equal(expected, buf) { t.Error("expected equivalent byte slices, but received otherwise") } } func TestBytesReadWriteSeeker_Write(t *testing.T) { b := &bytesReadWriteSeeker{} expected := []byte{1, 2, 3} buf := make([]byte, 3) n, err := b.Write([]byte{1, 2, 3}) if err != nil { t.Errorf("expected no error, but received %v", err) } if e, a := 3, n; e != a { t.Errorf("expected %d, but received %d", e, a) } n, err = b.Read(buf) if err != nil { t.Errorf("expected no error, but received %v", err) } if e, a := 3, n; e != a { t.Errorf("expected %d, but received %d", e, a) } if !bytes.Equal(expected, buf) { t.Error("expected equivalent byte slices, but received otherwise") } } func TestBytesReadWriteSeeker_Seek(t *testing.T) { b := &bytesReadWriteSeeker{[]byte{1, 2, 3}, 0} expected := []byte{2, 3} m, err := b.Seek(1, sdkio.SeekStart) if err != nil { t.Errorf("expected no error, but received %v", err) } if e, a := 1, int(m); e != a { t.Errorf("expected %d, but received %d", e, a) } buf := make([]byte, 3) n, err := b.Read(buf) if err != nil { t.Errorf("expected no error, but received %v", err) } if e, a := 2, n; e != a { t.Errorf("expected %d, but received %d", e, a) } if !bytes.Equal(expected, buf[:n]) { t.Error("expected equivalent byte slices, but received otherwise") } }
/** * Broker management options for Artemis/AMQ7 brokers. */ public class BrokerArtemisManager extends AbstractArtemisManager implements BrokerManager { // TODO update this class if possible public BrokerArtemisManager(String url, Credentials credentials, String brokerName, String host) throws IOException { super(url, credentials, brokerName, host); } @Override protected Resolver<ActiveMQServerControl, QueueControl, QueueControl, AddressControl, DivertControl> initializeResolver() { return new ArtemisResolver(mBeanServerConnection, getBrokerName()); } @Override public void reload() throws MalformedObjectNameException { // TODO !? // ActiveMQServerControl serverControl = getResolver(ArtemisResolver.class).getBrokerView(); // serverControl.resetAllMessageCounters(); } @Override public void getTransportConnectors() throws Exception { ActiveMQServerControl serverControl = getResolver(ArtemisResolver.class).getBrokerView(); logger.info(formatter.convertJSON(serverControl.getConnectorsAsJSON())); logger.info(formatter.convertJSON(new JSONObject(serverControl.getConnectors()).toString())); logger.info(formatter.convertJSON(new JSONArray(serverControl.getAddressNames()).toString())); } @Override public void getNetworkTopology() throws Exception { ActiveMQServerControl serverControl = getResolver(ArtemisResolver.class).getBrokerView(); logger.info(formatter.convertJSON(serverControl.listNetworkTopology())); } @Override public void getSessions(String connectionId) throws Exception { ActiveMQServerControl serverControl = getResolver(ArtemisResolver.class).getBrokerView(); if (connectionId == null) { logger.info(formatter.convertJSON(serverControl.listAllSessionsAsJSON())); } else { logger.info(formatter.convertJSON(serverControl.listSessionsAsJSON(connectionId))); } } @Override public BrokerType getBrokerType() { return BrokerType.ARTEMIS; } public void getAllBrokerDestinations() throws Exception { Map<String, Object> allDestinationsMap = new LinkedHashMap<>(); allDestinationsMap.put("address", new JSONArray(getAddresses())); allDestinationsMap.put("queue", new JSONArray(getQueues().keySet())); allDestinationsMap.put("topic", new JSONArray(getTopics().keySet())); logger.info(formatter.convertJSON(new JSONObject(allDestinationsMap).toString())); } public void getAllBrokerDestinationsProperties() throws Exception { Map<String, Object> allDestinationsMap = new LinkedHashMap<>(); Map<String, String> queues = getQueues(); for (String queue : queues.keySet()) { allDestinationsMap.put(queue, getDestinationProperties(queues.get(queue), queue, NodeType.QUEUE)); } Map<String, String> topics = getTopics(); for (String topic : topics.keySet()) { allDestinationsMap.put(topic, getDestinationProperties(topics.get(topic), topic, NodeType.TOPIC)); } for (String address : getUnboundAddresses()) { allDestinationsMap.put(address, getDestinationProperties(address, null, NodeType.ADDRESS)); } logger.info(formatter.convertJSON(new JSONObject(allDestinationsMap).toString())); } }
/** * @author The eFaps Team */ public class SessionPanel extends Panel { /** * Reference to the style sheet. */ public static final EFapsContentReference CSS = new EFapsContentReference(AbstractSortableProvider.class, "BPM.css"); /** * Needed for serialization. */ private static final long serialVersionUID = 1L; /** * @param _wicketId wicketId of this component * @param _pageReference reference to the page * @throws EFapsException on error */ public SessionPanel(final String _wicketId, final PageReference _pageReference) throws EFapsException { super(_wicketId); final SessionTablePanel sessionTable = new SessionTablePanel("sessionTable", _pageReference, new SessionProvider()); add(sessionTable); } }
// Package libcontainer provides a native Go implementation for creating containers // with namespaces, cgroups, capabilities, and filesystem access controls. // It allows you to manage the lifecycle of the container performing additional operations // after the container is created. package libcontainer import ( "os" "time" "github.com/opencontainers/runc/libcontainer/configs" "github.com/opencontainers/runtime-spec/specs-go" ) // Status is the status of a container. type Status int const ( // Created is the status that denotes the container exists but has not been run yet. Created Status = iota // Running is the status that denotes the container exists and is running. Running // Pausing is the status that denotes the container exists, it is in the process of being paused. Pausing // Paused is the status that denotes the container exists, but all its processes are paused. Paused // Stopped is the status that denotes the container does not have a created or running process. Stopped ) func (s Status) String() string { switch s { case Created: return "created" case Running: return "running" case Pausing: return "pausing" case Paused: return "paused" case Stopped: return "stopped" default: return "unknown" } } // BaseState represents the platform agnostic pieces relating to a // running container's state type BaseState struct { // ID is the container ID. ID string `json:"id"` // InitProcessPid is the init process id in the parent namespace. InitProcessPid int `json:"init_process_pid"` // InitProcessStartTime is the init process start time in clock cycles since boot time. InitProcessStartTime uint64 `json:"init_process_start"` // Created is the unix timestamp for the creation time of the container in UTC Created time.Time `json:"created"` // Config is the container's configuration. Config configs.Config `json:"config"` } // BaseContainer is a libcontainer container object. // // Each container is thread-safe within the same process. Since a container can // be destroyed by a separate process, any function may return that the container // was not found. BaseContainer includes methods that are platform agnostic. type BaseContainer interface { // Returns the ID of the container ID() string // Returns the current status of the container. Status() (Status, error) // State returns the current container's state information. State() (*State, error) // OCIState returns the current container's state information. OCIState() (*specs.State, error) // Returns the current config of the container. Config() configs.Config // Returns the PIDs inside this container. The PIDs are in the namespace of the calling process. // // Some of the returned PIDs may no longer refer to processes in the Container, unless // the Container state is PAUSED in which case every PID in the slice is valid. Processes() ([]int, error) // Returns statistics for the container. Stats() (*Stats, error) // Set resources of container as configured // // We can use this to change resources when containers are running. // Set(config configs.Config) error // Start a process inside the container. Returns error if process fails to // start. You can track process lifecycle with passed Process structure. Start(process *Process) (err error) // Run immediately starts the process inside the container. Returns error if process // fails to start. It does not block waiting for the exec fifo after start returns but // opens the fifo after start returns. Run(process *Process) (err error) // Destroys the container, if its in a valid state, after killing any // remaining running processes. // // Any event registrations are removed before the container is destroyed. // No error is returned if the container is already destroyed. // // Running containers must first be stopped using Signal(..). // Paused containers must first be resumed using Resume(..). Destroy() error // Signal sends the provided signal code to the container's initial process. // // If all is specified the signal is sent to all processes in the container // including the initial process. Signal(s os.Signal, all bool) error // Exec signals the container to exec the users process at the end of the init. Exec() error }
/* Move window win as a result of pointer motion to coordinates rel_x,rel_y. */ void mouse_move(const int16_t rel_x, const int16_t rel_y) { if (current_window == NULL || current_window->ws != current_workspace) return; current_window->x = rel_x; current_window->y = rel_y; if (borders[2] > 0) snap_window(current_window); move_window_limit(current_window); }
package init import ( "net" "github.com/johnstarich/gomodtest/dns" ) func init() { net.DefaultResolver = dns.New() }
//********************************************************************************** //Copyright 2015 Applied Research Associates, Inc. //Licensed under the Apache License, Version 2.0 (the "License"); you may not use //this file except in compliance with the License.You may obtain a copy of the License //at : //http://www.apache.org/licenses/LICENSE-2.0 //Unless required by applicable law or agreed to in writing, software distributed under //the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR //CONDITIONS OF ANY KIND, either express or implied.See the License for the //specific language governing permissions and limitations under the License. //************************************************************************************** #include <biogears/exports.h> #include <iostream> #include <string> #include <thread> #include "exec/Driver.h" #include "data/CompoundGenerator.h" #include "data/EnvironmentGenerator.h" #include "data/NutritionGenerator.h" #include "data/PatientGenerator.h" #include "data/StabilizationGenerator.h" #include "data/SubstanceGenerator.h" #include "utils/Arguments.h" #include "utils/Config.h" #include "utils/ReportWriter.h" #include <biogears/cdm/utils/ConfigParser.h> #include <biogears/cdm/utils/FileUtils.h> #include <biogears/version.h> #include <fstream> #include <iostream> void print_help() { std::cout << "Usage cmd_bio [HELP, GENDATA, GENSTATES, GENSEPSIS, VERIFY, VERSION]\n" "[JOBS N]\n" "[CONFIG [FILE]...], [SCENARIO [FILE]...], [TEST cdm|bge], [VALIDATE patient|drug|system|all]\n" "[GENTABLES html|md|xml|web|all]\n\n"; std::cout << "Flags: \n"; std::cout << "-v : Print Version\n"; std::cout << "-j : Thread control -j N\n"; std::cout << "-h : Print this message\n"; #if defined(BIOGEARS_SUBPROCESS_SUPPORT) std::cout << "-t : Use Threads instead of Subprocess\n"; #endif std::cout << std::endl; exit(0); } //! //! \brief Reads command line argument and executes corresponding operation //! \param argc : Number of command line arguments //! \param argv : Pointers to command line arguments //! \return int 0 on success, other numbers on failure //! int main(int argc, char** argv) { biogears::Arguments args( #if defined(BIOGEARS_SUBPROCESS_SUPPORT) { "H", "HELP", "GENDATA", "GENSEPSIS", "GENSTATES", "VERIFY", "V", "VERSION", "THREADED", "T" }, /*Options*/ #else { "H", "HELP", "GENDATA", "GENSEPSIS", "GENSTATES", "VERIFY", "V", "VERSION" }, /*Options*/ #endif { "J", "JOBS" }, /*Keywords*/ { "TEST", "CONFIG", "SCENARIO", "VALIDATE", "GENTABLES" } /*MultiWords*/ ); bool run_patient_validation = false; bool run_drug_validation = false; bool run_system_validation = false; bool run_verification = false; unsigned int thread_count = std::thread::hardware_concurrency(); if (!args.parse(argc, argv) || args.Option("HELP") || args.Option("H") || args.empty()) { std::cerr << args.error_msg() << "\n"; print_help(); } if (args.Option("VERSION") || args.Option("V")) { std::cout << "Using libbiogears_lite-" << biogears::full_version_string() << std::endl; exit(0); } if (args.KeywordFound("JOBS")) { try { thread_count = std::stoi(args.Keyword("JOBS")); } catch (std::exception) { std::cerr << "Error: JOBS given but " << args.Keyword("JOBS") << " is not a valid Integer.\n"; exit(1); } } if (args.KeywordFound("J")) { try { thread_count = std::stoi(args.Keyword("J")); } catch (std::exception) { std::cerr << "Error: J given but " << args.Keyword("J") << " is not a valid Integer.\n"; exit(1); } } biogears::Driver driver { argv[0], thread_count }; const biogears::Config conf { "Email.config", true }; driver.configure(conf); bool as_subprocess = false; #if defined(BIOGEARS_SUBPROCESS_SUPPORT) as_subprocess = true; as_subprocess = !(args.Option("THREADED") || args.Option("T")); #endif if (args.Option("GENSTATES")) { const biogears::Config runs { "GenStates.config" }; driver.queue(runs, as_subprocess); } if (args.Option("GENSEPSIS")) { const biogears::Config runs { "GenSepsisStates.config" }; driver.queue(runs, as_subprocess); } if (args.Option("GENDATA")) { // gen-data std::vector<std::unique_ptr<biogears::CSVToXMLConvertor>> generators; generators.push_back(std::make_unique<biogears::SubstanceGenerator>()); generators.push_back(std::make_unique<biogears::EnvironmentGenerator>()); generators.push_back(std::make_unique<biogears::PatientGenerator>()); generators.push_back(std::make_unique<biogears::StabilizationGenerator>()); generators.push_back(std::make_unique<biogears::NutritionGenerator>()); generators.push_back(std::make_unique<biogears::CompoundGenerator>()); for (auto& gen : generators) { std::cout << "Generating Data: " << gen->Path() << gen->Filename() << "\n"; gen->parse(); gen->save(); std::cout << "\n\n"; } } if (args.Option("VERIFY")) { run_verification = true; } #ifdef CMD_BIO_SUPPORT_CIRCUIT_TEST if (args.MultiWordFound("TEST")) { auto tests = args.MultiWord("TEST"); for (auto& test : tests) { std::transform(test.begin(), test.end(), test.begin(), ::tolower); if (test == "cdm") { // run-cdm-tests biogears::Config runs { "CDMUnitTests.config" }; driver.queue(runs, as_subprocess); } else if (test == "bge") { biogears::Config runs { "BGEUnitTests.config" }; driver.queue(runs, as_subprocess); } else { std::cout << "Warning: No Test known as " << test << " exists.\n"; } } } #endif if (args.MultiWordFound("VALIDATE")) { auto tests = args.MultiWord("VALIDATE"); for (auto& test : tests) { std::transform(test.begin(), test.end(), test.begin(), ::tolower); if (test == "patient") { run_patient_validation = true; } else if (test == "drug") { run_drug_validation = true; } else if (test == "system") { run_system_validation = true; } else if (test == "all") { run_patient_validation = run_drug_validation = run_system_validation = run_verification = true; } else { std::cout << "Warning: No Validation known as " << test << " exists.\n"; } } } if (run_system_validation) { // run-system-validation const auto runs = biogears::Config("ValidationSystems.config"); driver.queue(runs, as_subprocess); } if (run_patient_validation) { //run-patient-validation const auto runs = biogears::Config("ValidationPatients.config"); driver.queue(runs, as_subprocess); } if (run_drug_validation) { // run-drug-validation const auto runs = biogears::Config("ValidationDrugs.config"); driver.queue(runs, as_subprocess); } if (run_verification) { // run-verification const auto runs = biogears::Config("VerificationScenarios.config"); driver.queue(runs, as_subprocess); } if (args.MultiWordFound("CONFIG")) { auto configs = biogears::Config {}; for (auto& arg : args.MultiWord("CONFIG")) { const auto runs = biogears::Config(arg); driver.queue(runs, as_subprocess); } } if (args.MultiWordFound("SCENARIO")) { auto configs = biogears::Config {}; for (auto& arg : args.MultiWord("SCENARIO")) { auto ex = biogears::Executor { arg, biogears::EDriver::ScenarioTestDriver }; ex.Computed("Scenarios/"); ex.Scenario(arg); configs.push_back(ex); } driver.queue(configs, as_subprocess); } if (driver.total_work()) { driver.run(); driver.stop_when_empty(); driver.join(); } //We want Gentables to run after all other work has finished if (args.MultiWordFound("GENTABLES")) { biogears::ReportWriter report_writer; auto tables = args.MultiWord("GENTABLES"); for (auto& table : tables) { std::transform(table.begin(), table.end(), table.begin(), ::tolower); if (table == "html") { report_writer.gen_tables(biogears::ReportWriter::HTML); } else if (table == "md") { report_writer.gen_tables(biogears::ReportWriter::MD); } else if (table == "xml") { report_writer.gen_tables(biogears::ReportWriter::XML); } else if (table == "web") { report_writer.gen_tables(biogears::ReportWriter::WEB); } else if (table == "all") { report_writer.gen_tables(biogears::ReportWriter::HTML); report_writer.gen_tables(biogears::ReportWriter::MD); report_writer.gen_tables(biogears::ReportWriter::XML); } else { std::cout << "Warning: " << table << " is not a valid keyword.\n"; } } } return 0; }
/** * Holds a test coordinating conversation with the test clients. This should consist of assigning the test roles, * begining the test, gathering the test reports from the participants, and checking for assertion failures against * the test reports. * * @param testCircuit The test circuit. * @param assertions The list of assertions to apply to the test circuit. * @param testProperties The test case definition. */ public void sequenceTest(Circuit testCircuit, List<Assertion> assertions, Properties testProperties) { log.debug("protected Message[] sequenceTest(Object... testProperties = " + testProperties + "): called"); TestClientDetails sender = getSender(); List<TestClientDetails> receivers = getReceivers(); ConversationFactory conversationFactory = getConversationFactory(); try { Session session = conversationFactory.getSession(); Destination senderControlTopic = session.createTopic(sender.privateControlKey); Destination receiverControlTopic = session.createTopic(receivers.get(0).privateControlKey); ConversationFactory.Conversation senderConversation = conversationFactory.startConversation(); ConversationFactory.Conversation receiverConversation = conversationFactory.startConversation(); Message assignSender = conversationFactory.getSession().createMessage(); TestUtils.setPropertiesOnMessage(assignSender, testProperties); assignSender.setStringProperty("CONTROL_TYPE", "ASSIGN_ROLE"); assignSender.setStringProperty("ROLE", "SENDER"); senderConversation.send(senderControlTopic, assignSender); Message assignReceiver = session.createMessage(); TestUtils.setPropertiesOnMessage(assignReceiver, testProperties); assignReceiver.setStringProperty("CONTROL_TYPE", "ASSIGN_ROLE"); assignReceiver.setStringProperty("ROLE", "RECEIVER"); receiverConversation.send(receiverControlTopic, assignReceiver); senderConversation.receive(); receiverConversation.receive(); Message start = session.createMessage(); start.setStringProperty("CONTROL_TYPE", "START"); senderConversation.send(senderControlTopic, start); Message senderReport = senderConversation.receive(); TestUtils.pause(500); Message statusRequest = session.createMessage(); statusRequest.setStringProperty("CONTROL_TYPE", "STATUS_REQUEST"); receiverConversation.send(receiverControlTopic, statusRequest); Message receiverReport = receiverConversation.receive(); } catch (JMSException e) { throw new RuntimeException("JMSException not handled."); } }
/** * {@link org.infinispan.xsite.BackupReceiver} implementation for clustered caches. * * @author Pedro Ruivo * @since 7.1 */ public class ClusteredCacheBackupReceiver extends BaseBackupReceiver { private static final Log log = LogFactory.getLog(ClusteredCacheBackupReceiver.class); private static final boolean trace = log.isDebugEnabled(); public ClusteredCacheBackupReceiver(Cache<Object, Object> cache) { super(cache); } @Override public void handleStateTransferControl(XSiteStateTransferControlCommand command) throws Exception { XSiteStateTransferControlCommand invokeCommand = command; if (!command.getCacheName().equals(cacheName)) { //copy if the cache name is different invokeCommand = command.copyForCache(cacheName); } invokeCommand.setSiteName(command.getOriginSite()); invokeRemotelyInLocalSite(invokeCommand); } @Override public void handleStateTransferState(XSiteStatePushCommand cmd) throws Exception { //split the state and forward it to the primary owners... assertAllowInvocation(); final long endTime = timeService.expectedEndTime(cmd.getTimeout(), TimeUnit.MILLISECONDS); final ClusteringDependentLogic clusteringDependentLogic = cache.getComponentRegistry() .getComponent(ClusteringDependentLogic.class); final Map<Address, List<XSiteState>> primaryOwnersChunks = new HashMap<>(); final Address localAddress = clusteringDependentLogic.getAddress(); if (trace) { log.tracef("Received X-Site state transfer '%s'. Splitting by primary owner.", cmd); } for (XSiteState state : cmd.getChunk()) { final Address primaryOwner = clusteringDependentLogic.getPrimaryOwner(state.key()); List<XSiteState> primaryOwnerList = primaryOwnersChunks.get(primaryOwner); if (primaryOwnerList == null) { primaryOwnerList = new LinkedList<>(); primaryOwnersChunks.put(primaryOwner, primaryOwnerList); } primaryOwnerList.add(state); } final List<XSiteState> localChunks = primaryOwnersChunks.remove(localAddress); final List<StatePushTask> tasks = new ArrayList<>(primaryOwnersChunks.size()); for (Map.Entry<Address, List<XSiteState>> entry : primaryOwnersChunks.entrySet()) { if (entry.getValue() == null || entry.getValue().isEmpty()) { continue; } if (trace) { log.tracef("Node '%s' will apply %s", entry.getKey(), entry.getValue()); } StatePushTask task = new StatePushTask(entry.getValue(), entry.getKey(), cache); tasks.add(task); task.executeRemote(); } //help gc. this is safe because the chunks was already sent primaryOwnersChunks.clear(); if (trace) { log.tracef("Local node '%s' will apply %s", localAddress, localChunks); } if (localChunks != null) { StatePushTask task = new StatePushTask(localChunks, localAddress, cache); tasks.add(task); task.executeLocal(); } if (trace) { log.tracef("Waiting for the remote tasks..."); } while (!tasks.isEmpty() && !timeService.isTimeExpired(endTime)) { for (Iterator<StatePushTask> iterator = tasks.iterator(); iterator.hasNext(); ) { if (awaitRemoteTask(iterator.next())) { iterator.remove(); } } } //the put operation can fail silently. check in the end and it is better to resend the chunk than to lose keys. assertAllowInvocation(); if (!tasks.isEmpty()) { throw new TimeoutException("Unable to apply state in the time limit."); } } private boolean awaitRemoteTask(StatePushTask task) throws Exception { try { if (trace) { log.tracef("Waiting reply from %s", task.address); } Response response = task.awaitResponse(); if (trace) { log.tracef("Response received is %s", response); } if (response == CacheNotFoundResponse.INSTANCE) { if (trace) { log.tracef("Cache not found in node '%s'. Retrying locally!", task.address); } assertAllowInvocation(); task.executeLocal(); } } catch (Exception e) { assertAllowInvocation(); RpcManager rpcManager = cache.getRpcManager(); if (rpcManager.getMembers().contains(task.address) && !rpcManager.getAddress().equals(task.address)) { if (trace) { log.tracef(e, "An exception was sent by %s. Retrying!", task.address); } task.executeRemote(); //retry! return false; } else { if (trace) { log.tracef(e, "An exception was sent by %s. Retrying locally!", task.address); } //if the node left the cluster, we apply the missing state. This avoids the site provider to re-send the //full chunk. task.executeLocal(); return false; } } return true; } private Map<Address, Response> invokeRemotelyInLocalSite(CacheRpcCommand command) throws Exception { final RpcManager rpcManager = cache.getRpcManager(); CompletableFuture<Map<Address, Response>> remoteFuture = rpcManager .invokeRemotelyAsync(null, command, rpcManager.getDefaultRpcOptions(true, DeliverOrder.NONE)); final Map<Address, Response> responseMap = new HashMap<>(); responseMap.put(rpcManager.getAddress(), LocalInvocation.newInstanceFromCache(cache, command).call()); responseMap.putAll(remoteFuture.get()); return responseMap; } private static class StatePushTask { private final List<XSiteState> chunk; private final Address address; private final AdvancedCache<?, ?> cache; private volatile Future<Map<Address, Response>> remoteFuture; private StatePushTask(List<XSiteState> chunk, Address address, AdvancedCache<?, ?> cache) { this.chunk = chunk; this.address = address; this.cache = cache; } public void executeRemote() { final RpcManager rpcManager = cache.getRpcManager(); remoteFuture = rpcManager.invokeRemotelyAsync(Collections.singletonList(address), newStatePushCommand(cache, chunk), rpcManager.getDefaultRpcOptions(true)); } public void executeLocal() { try { final Response response = LocalInvocation.newInstanceFromCache(cache, newStatePushCommand(cache, chunk)).call(); this.remoteFuture = CompletableFuture.completedFuture(Collections.singletonMap(address, response)); } catch (final Exception e) { this.remoteFuture = CompletableFutures.completedExceptionFuture(new ExecutionException(e)); } } public Response awaitResponse() throws Exception { Future<Map<Address, Response>> future = remoteFuture; if (future == null) { throw new NullPointerException("Should not happen!"); } Map<Address, Response> responseMap = future.get(); if (responseMap.size() != 1 || !responseMap.containsKey(address)) { throw new IllegalStateException("Shouldn't happen. Map is " + responseMap); } return responseMap.values().iterator().next(); } } }
UPDATE (Wednesday, Feb. 15): City attorney: Riverside pot business the first to be forced closed After months of legal battling, Riverside city officials shut down a medical marijuana dispensary that had been operating illegally in the Magnolia Center neighborhood. Riverside police, fire and code enforcement officials shut down the Re-up dispensary on Central Avenue between San Diego and Riverside avenues Thursday, according to a city news release. Officials said it was operating in violation of zoning laws. Riverside residents voted in 2015 to ban medical marijuana dispensaries. Riverside Ward 3 Councilman Mike Soubirous said 10 to 12 medical marijuana dispensaries could be operating illegally in the city. The legal process to shut them down could take months to a year, he said. Court records show that city attorneys filed for a permanent injunction against Re-up in September. Undercover police had purchased marijuana from the dispensary, according to the release, which also said the business violated codes related to the ability to enter and exit the facility. The city had issued cease and desist letters to the dispensary and later red-tagged it, making it illegal to enter the building, and disconnected its utilities, the release said. The dispensary’s operators started using a generator for power, the release said. Soubirous said neighbors had been complaining about the dispensary for months, then complained further about the generator. During the operation Thursday, the dispensary’s operators were removed from the building and it was boarded up. Court records show the dispensary’s owners filed a lawsuit against the city the day before the raid seeking declaratory and injunctive relief. Contact the writer: 951-368-9284 or [email protected] Twitter: @PE_alitadayon
<gh_stars>1-10 use std::str::FromStr; use super::{ParseError, ParseResult}; #[derive(Debug, Copy, Clone, PartialEq)] pub enum Instruction { Nop(isize), Acc(i64), Jmp(isize), } impl Instruction { pub fn parse(s: &str) -> ParseResult<Instruction> { let s = s.trim(); if s.len() == 0 { Err(ParseError::UnparseableLine(s.to_string())) } else { let instr_end = s.find(' ').unwrap_or(s.len()); match &s[..instr_end] { x @ "nop" => Ok(Self::Nop(Self::parse_param(x, &s[instr_end..])?)), x @ "acc" => Ok(Self::Acc(Self::parse_param(x, &s[instr_end..])?)), x @ "jmp" => Ok(Self::Jmp(Self::parse_param(x, &s[instr_end..])?)), x => Err(ParseError::UnknownInstruction(x.to_string())), } } } fn parse_param<F: FromStr>(instr: &str, s: &str) -> ParseResult<F> { let s = s.trim(); if s.len() == 0 { Err(ParseError::MissingParameter(instr.to_string())) } else { match s.parse() { Ok(i) => Ok(i), Err(_) => Err(ParseError::UnparseableParameter( instr.to_string(), s.to_string(), )), } } } } #[cfg(test)] mod tests { use super::*; #[test] fn parse_success() { assert_eq!(Ok(Instruction::Nop(-10)), Instruction::parse("nop -10")); assert_eq!(Ok(Instruction::Nop(10)), Instruction::parse("nop 10")); assert_eq!(Ok(Instruction::Acc(-10)), Instruction::parse("acc -10")); assert_eq!(Ok(Instruction::Acc(10)), Instruction::parse("acc 10")); assert_eq!(Ok(Instruction::Jmp(-10)), Instruction::parse("jmp -10")); assert_eq!(Ok(Instruction::Jmp(10)), Instruction::parse("jmp 10")); assert_eq!( Ok(Instruction::Nop(1)), Instruction::parse(" nop 1 ") ); } #[test] fn parse_failure() { assert_eq!( Err(ParseError::UnparseableLine("".to_string())), Instruction::parse("") ); assert_eq!( Err(ParseError::UnknownInstruction("p".to_string())), Instruction::parse("p") ); assert_eq!( Err(ParseError::UnknownInstruction("p".to_string())), Instruction::parse("p") ); assert_eq!( Err(ParseError::UnknownInstruction("pon".to_string())), Instruction::parse("pon") ); assert_eq!( Err(ParseError::UnknownInstruction("pon".to_string())), Instruction::parse("pon 1") ); assert_eq!( Err(ParseError::MissingParameter("nop".to_string())), Instruction::parse("nop") ); assert_eq!( Err(ParseError::UnparseableParameter( "nop".to_string(), "a".to_string() )), Instruction::parse("nop a") ); } }
S = input() n = len(S) # create ptn ptn = [] for bit in range(2**(n-1)): p = [] for i in range(n-1): if ((bit>>i)&1): p.append('+') else: p.append('-') ptn.append(p) # calc for p in ptn: for i, s in enumerate(S): if i == 0: result = int(s) continue if p[i-1]=='+': result += int(s) else: result -= int(s) if result == 7: print('{}{}{}{}{}{}{}=7'.format(S[0],p[0],S[1],p[1],S[2],p[2],S[3])) break
// https://projecteuler.net/problem=12 pub fn problem12() { fn prime_factorizer(mut n: i32, mut p: i32, factors: &mut Vec<i32>) { // println!("{:p}", &n); print memory address to calculate stack frame size // I was going to do this recursively but Rust doesn't handle deep recursion well. I kept running into a stack overflow error. /* if n < 2 { return; } loop { */ while n > 2 { if n % p == 0 { n = n/p; factors.push(p); //return prime_factorizer(n, p, factors); } else { p += 1; //return prime_factorizer(n, p, factors); } } } let mut n = 1; let mut triangle = 1; loop { n += 1; triangle += n; let mut prime_factors: Vec<i32> = vec![]; prime_factorizer(triangle, 2, &mut prime_factors); let mut unique_factors = prime_factors.clone(); unique_factors.dedup(); let mut div_count = 1; for x in unique_factors.iter() { let mut freq = 0; for y in prime_factors.iter() { if x == y { freq += 1; } } div_count *= freq + 1; } if div_count > 500 { println!("Problem 12: {}", triangle); break; } } }
#!/usr/bin/env python # coding: utf-8 """ This script has to be executed after hi_freq_data_to_csv.py and get_interval.py have succesfully run. This script should be called with 1 (or 2) arguments. The 1st mandatory argument is the ABSOLUTE path of the top directory for the flight campaign. /media/spectors/HDD320/lidar/20201218_fresh <<----- This is it! ----------------------------/20201218_fresh/p_00_joined_pcap_files ----------------------------/20201218_fresh/p_01_apx_csv_shapefile <<----- This must be present and will be used as input. ----------------------------/20201218_fresh/p_02_plt <<----- Not used. Just for reference. ----------------------------/20201218_fresh/p_03_pcap <<----- This must be present and will be used as input. ----------------------------/20201218_fresh/2_planned_mision ----------------------------/20201218_fresh/ ..... ----------------------------/20201218_fresh/logging <<----- This is where the logs will be stored. ----------------------------/20201218_fresh/transl_table.txt <<----- This must be present and will be used as input. The 2nd optional argument can be a boresight-calibration string. It must contain the boresight angles and be of the following form: # RabcdefghPijklmnopYqrstuvwx # Where abcdefgh is milionths of degree to ROLL. a is sign (p/n) # ..... ijklmnop is milionths of degree to PITCH. i is sign (p/n) # ..... qrstuvwx is milionths of degree to YAW. q is sign (p/n) # In this order! ROLL -> PITCH -> YAW ! # Theoretically can encode up to 9.9° around each axis This script combines .csv files with each of the .pcap flight lines and writes point clouds in .txt files. It then calls a lew lastools to convert them to las, denoise and set the correct (georeference) metadata. The script is run non-interactively. The only exception is choosing the p_01_apx_csv_shapefile and p__03_pcap folders at the beginning if there are muktiple of them. TO DO: add support for different EPSG codes. """ import time import os import sys import datetime import platform import logging import shutil import re from collections import OrderedDict from multiprocessing import Pool, cpu_count from multiprocessing.managers import SharedMemoryManager from multiprocessing.shared_memory import SharedMemory from scipy.interpolate import interp1d import numpy as np import pandas as pd import matplotlib.pyplot as plt from scapy.all import rdpcap #from vlp16_tables import * import vlp16_tables log_dir = 'p_logging' txt_dir_in = 'p_01_apx_csv_shapefile' txt_in_base_len = len(txt_dir_in) pcap_dir_in = 'p_03_pcap' pcap_in_base_len = len(pcap_dir_in) out_dir_ascii = 'p_04_ascii' out_ascii_base_len = len(out_dir_ascii) out_dir_las = 'p_05_las' out_las_base_len = len(out_dir_las) transl_table_fn = 'p_transl_table.txt' fn_keyword = 'hi_freq_apx' nl = '\n' def shorten_string(text_string): """ Function to remove all duplicates from string and keep the order of characters same https://www.geeksforgeeks.org/remove-duplicates-given-string-python/ """ return "".join(OrderedDict.fromkeys(text_string)) def remove_min_sec(ts): return (int(ts) // 3600) * 3600 # ### Function to calculate the gaps between given azimuths. Needed to interpolate azimuths that are not given. def get_azim_gap(azimuths, dual=True, preserve_shape=False): """ Only works for dual returns now. preserve_shape is relevant for dual, where the azimuths repeat. if False: return only unique gaps. if True: return same shape as azimuths """ if dual: azimuths_gap_flat = np.zeros_like(azimuths[:,0::2]).flatten() azimuths_gap_flat[:-1] = ((azimuths[:,0::2].flatten()[1:] -\ azimuths[:,0::2].flatten()[:-1]) % 36000) azimuths_gap_flat[-1] = azimuths_gap_flat[-2] azimuths_gap = azimuths_gap_flat.reshape(azimuths[:,0::2].shape) if preserve_shape: azimuths_gap = np.tile(azimuths_gap,2) return azimuths_gap else: raise NotImplementedError def get_micros_pulses(micros, dual=True, preserve_shape=False): """ preserve_shape is relevant for dual, where the azimuths repeat. if False: return only unique gaps. if True: return same shape as azimuths """ if dual: if preserve_shape: micros_pulses = np.expand_dims(micros, axis=1) +\ vlp16_tables.TIMING_OFFSETS_DUAL.T.flatten() * 1e6 else: micros_pulses = np.expand_dims(micros, axis=1) +\ vlp16_tables.TIMING_OFFSETS_DUAL.T[0::2,:].flatten() * 1e6 else: micros_pulses = np.expand_dims(micros, axis=1) +\ vlp16_tables.TIMING_OFFSETS_SINGLE.T.flatten() * 1e6 return micros_pulses def get_precision_azimuth(az_simple, azimuths_gap, dual=True, minimal_shape=True): if dual: timing_offsets_within_block = vlp16_tables.TIMING_OFFSETS_DUAL[:,0] az_pulses = np.tile(az_simple,(vlp16_tables.LASERS_PER_DATA_BLOCK)).reshape(\ az_simple.shape[0], vlp16_tables.LASERS_PER_DATA_BLOCK, az_simple.shape[1]) az_pulses = az_pulses.transpose((0,2,1)) precision_azimuth = az_pulses[:,:,:] +\ timing_offsets_within_block / (2 * vlp16_tables.T_CYCLE) *\ np.expand_dims(azimuths_gap, axis=2) precision_azimuth = precision_azimuth % 36000 if not minimal_shape: precision_azimuth = np.tile(\ precision_azimuth.transpose((0,2,1)), (1,2,1)).transpose((0,2,1)) precision_azimuth = precision_azimuth.reshape(\ (precision_azimuth.shape[0], precision_azimuth.shape[1] * precision_azimuth.shape[2])) return precision_azimuth else: raise NotImplementedError def process_file(pcap_file_in, pcap_dir_in, out_dir_ascii, out_dir_las, shm_name, shm_shp, shm_dtp, b_roll, b_pitch, b_yaw, concat_cmd, wine_cmd): print(f"Processing {pcap_file_in}") logging.info(f"Processing {pcap_file_in}") loc_shm = SharedMemory(shm_name) loc_apx_arr = np.recarray(shape=shm_shp, dtype=shm_dtp, buf=loc_shm.buf) ### Temporary plug-in here. # This is not a proper solution, just a quick proof-of-concept # Before hand must manually copy the file yaw_correction.csv into the appropriate folder if 'yaw_correction.csv' in os.listdir(pcap_dir_in): yaw_agisoft = pd.read_csv(os.path.join(pcap_dir_in, 'yaw_correction.csv'), index_col=0) else: # just have a dataframe that when interpolated will result in 0 everywhere idx = pd.Index([0, 1, 2597835528, 2597835529], name='utc_time') yaw_agisoft = pd.DataFrame(data = np.array([[0],[0],[0],[0]]), columns = ['smooth_yaw_err'], index = idx) # ### Read entire file only once (takes most time) start = time.time() packets = rdpcap(os.path.join(pcap_dir_in, pcap_file_in)) packets_read = len(packets) end = time.time() print(F"{pcap_file_in}: Read {packets_read} packets in {end-start:.2f} seconds.") logging.info(F"{pcap_file_in}: Read {packets_read} packets in {end-start:.2f} seconds.") # ### Make sure all packets have length == 1206! start = time.time() wrong_lengths = 0 for p in packets: if len(p.load) != vlp16_tables.DATA_PACKET_LENGTH: wrong_lengths += 1 end = time.time() logging.info(F"{pcap_file_in}: Checked {packets_read} packets in {end-start:.2f} seconds.") logging.info('All have same length ('+str(vlp16_tables.DATA_PACKET_LENGTH)+').' \ if wrong_lengths==0 else str(wrong_lengths)+' packets have a different length.') logging.info('This is GOOD!' if wrong_lengths==0 else 'This is BAD!') # ### Read all packets into 1 numpy array start = time.time() raw_pack_data = np.zeros((packets_read, vlp16_tables.DATA_PACKET_LENGTH), dtype = np.uint8) for i,p in enumerate(packets): raw_pack_data[i,:] = np.frombuffer(p.load, dtype = np.uint8) if i % 1e5 == 0: print(f"{pcap_file_in}: Packet {i} out of {packets_read} in {time.time()-start:.2f} seconds.") end = time.time() logging.info(F"{pcap_file_in}: Copied data from {packets_read} packets into a numpy array of shape {raw_pack_data.shape} in {end-start:.2f} seconds.") # ### Make sure all packets are captured in the same mode (last, strongest, dual) mode_hypothesis = raw_pack_data[0, vlp16_tables.RETURN_MODE_OFFSET] logging.info(f"First packet reports {vlp16_tables.RETURN_MODE_NAME[mode_hypothesis]} capture mode.") diff_ret_mode = (raw_pack_data[:, vlp16_tables.RETURN_MODE_OFFSET] != mode_hypothesis).sum() logging.info(f"{diff_ret_mode} packets disagree.") logging.info(f"{'This is GOOD!' if diff_ret_mode == 0 else 'This is BAD!'}") # ### Make sure all packets are captured with the same sensor (only VLP16 expected) sensor_hypothesis = raw_pack_data[0, vlp16_tables.PRODUCT_MODEL_OFFSET] logging.info(f"{pcap_file_in}: First packet reports {vlp16_tables.PRODUCT_MODEL_NAME[sensor_hypothesis]} sensor model.") diff_sensor = (raw_pack_data[:, vlp16_tables.PRODUCT_MODEL_OFFSET] != sensor_hypothesis).sum() logging.info(f"{pcap_file_in}: {diff_sensor} packets disagree.") logging.info(f"{pcap_file_in}: {'This is GOOD!' if diff_sensor == 0 else 'This is BAD!'}") # ### Get µs timestamp from packets and transform to UNIX timestamp # # I found that Ethernet timestamp agrees with GNSS timestamp very well. # # Can be problematic if very close ho full hour and I am not careful. # # Let's look at 1st Ethernet timestamp. # # * if it is far enough from a full hour (>=1 minute), then we continue # * ~if it is too close (<1 minute), then we look at last one~ _not implemented_ # * ~if last one is also too close (recorded for 1 entire hour, not likely), # we find an optimal one in the middle~ _not implemented_ ts_1st_pack = datetime.datetime.fromtimestamp(int(packets[0].time)) if ts_1st_pack.minute > 1 and ts_1st_pack.minute < 59: logging.info(f"{pcap_file_in}: Far enough from full hour (~{ts_1st_pack.minute} minutes).") logging.info("This is GOOD!\nContinue!") else: logging.info(f"{pcap_file_in}: Too close to full hour (~{ts_1st_pack.minute} minutes).") logging.info("That is not great, but the code below should deal with it.") # #### Take Ethernet timestamp of (1st) packet, # discard sub-hour info and add replace it with that from GNSS µs timestamp # # What happens when the capture rolls over a full hour? # # **Need to deal with this when such data is captured!** # # # Solution below! start = time.time() micros = np.zeros((packets_read,), dtype = np.int64) micro_bytes = micros.view(dtype = np.uint8) micro_bytes[0::8] = raw_pack_data[:, vlp16_tables.DATA_PACK_TIMESTAMP_OFFSET + 0] micro_bytes[1::8] = raw_pack_data[:, vlp16_tables.DATA_PACK_TIMESTAMP_OFFSET + 1] micro_bytes[2::8] = raw_pack_data[:, vlp16_tables.DATA_PACK_TIMESTAMP_OFFSET + 2] micro_bytes[3::8] = raw_pack_data[:, vlp16_tables.DATA_PACK_TIMESTAMP_OFFSET + 3] plt.plot(micros) end = time.time() logging.info(f"{pcap_file_in}: Extracted time stamp from {packets_read} packets in {end-start:.2f} seconds.") logging.info(f"{pcap_file_in}: If the line jumps, a full hour occurs. Need to deal with it!") # #### Another problem could be that the UDP packets are not guaranteed to arrive in order. # # An assumption that is made for the following calculations is that this does not happen. # # **Need to deal with this when such data is captured!** while (micros[1:] < micros[:-1]).sum() > 0: jump_position = np.where((micros[1:] < micros[:-1]))[0][0] + 1 micros[jump_position:] += int(3.6e9) logging.info(f"{pcap_file_in}: Added another hour to micros at position {jump_position}") plt.plot(micros) if (micros[1:] - micros[:-1]).min() > 0:#all chronological logging.info(f"{pcap_file_in}: Packets seem to be in right order. Continue!") else: logging.info(f"{pcap_file_in}: Not all packets are in order. Handle somehow!") print(f"{pcap_file_in}: Not all packets are in order. Handle somehow!") sys.exit(0) eth_ts_hour = remove_min_sec(packets[0].time) puck_timestamps = micros / 1e6 + eth_ts_hour * 1.0 # ### Get range and intensity info for all packets start = time.time() # the following contains only channel data (i.e. no timestamp, factory bytes or azimuth) channel_data = raw_pack_data[:,:-6].reshape(\ (packets_read, vlp16_tables.DATA_BLOCKS, 100))[:,:,4:] channel_data = channel_data.reshape(\ (packets_read, vlp16_tables.DATA_BLOCKS * vlp16_tables.LASERS_PER_DATA_BLOCK * 3)) #puck ranges in mm puck_ranges = np.zeros(\ (packets_read,\ vlp16_tables.DATA_BLOCKS * vlp16_tables.LASERS_PER_DATA_BLOCK),\ dtype = np.uint32) puck_range_bytes = puck_ranges.view(dtype = np.uint8) puck_range_bytes[:,0::4] = channel_data[:,0::3] puck_range_bytes[:,1::4] = channel_data[:,1::3] puck_ranges *= 2 #intensities as 1 byte puck_intens = np.zeros(\ (packets_read,\ vlp16_tables.DATA_BLOCKS * vlp16_tables.LASERS_PER_DATA_BLOCK),\ dtype = np.uint8) puck_intens[:,:] = channel_data[:,2::3] end = time.time() logging.info(f"{pcap_file_in}: Extracted range and intensity for {packets_read * vlp16_tables.DATA_BLOCKS * vlp16_tables.LASERS_PER_DATA_BLOCK} laser pulses in {end-start:.2f} seconds.") # ### Get all given azimuths # # Think how to treat them for dual / single cases later. # # For now assume it is always DUAL! # Changed azimuths data type to signed 32-bit integer to support in-place substraction start = time.time() # the following contains only azimuth data (i.e. no timestamp, factory bytes or channel data) azimuths = np.zeros((packets_read, vlp16_tables.DATA_BLOCKS, 1), dtype = np.int32) azim_data = azimuths.view(dtype = np.uint8) azim_data[:,:,0:2] = raw_pack_data[:, :-6].reshape(\ packets_read, vlp16_tables.DATA_BLOCKS, 100)[:, :, 2:4] azim_data = azim_data.reshape((packets_read, vlp16_tables.DATA_BLOCKS * 4)) #azimuth azimuths = azim_data.view(dtype= np.int32) end = time.time() logging.info(f"{pcap_file_in}: Extracted azimuths for {packets_read * vlp16_tables.DATA_BLOCKS} firing sequences in {end-start:.2f} seconds.") # ### All packets are in dual return mode, so the azimuths are expected to repeat (VLP-16 User Manual, Figure 9-3) # # The following checks this assumption again: az_repeat = ((azimuths[:, 0::2] != azimuths[:, 1::2]).sum() == 0) if az_repeat: logging.info(f"{pcap_file_in}: All azimuths repeat. This is good.") else: logging.info(f"{pcap_file_in}: Not all azimuths repeat. Investigate before continuing.") azimuths_gap = get_azim_gap(azimuths) micros_pulses = get_micros_pulses(micros) # timestamp for each laser pulse puck_pulse_time = micros_pulses / 1e6 + eth_ts_hour * 1.0 # ### Calculate the azimutzhs for each datapoint #Use the following simplified array if in dual mode #Otherwise can still refer to it, but it's just the original array if mode_hypothesis == vlp16_tables.RETURN_MODE_DUAL: az_simple = azimuths[:,0::2] else: az_simple = azimuths prec_az = get_precision_azimuth(az_simple, azimuths_gap, True, True) # ### Get the APX data #cut the big dataframe to only what the puck data covers interv = np.where(\ np.logical_and(\ loc_apx_arr.timestamp > puck_timestamps[0], loc_apx_arr.timestamp < puck_timestamps[-1]))[0] mid_apx_arr = loc_apx_arr[max(interv[0] - 1, 0):min(interv[-1] + 1, loc_apx_arr.shape[0])] # ### process puck data... concat_files = [] MAXIMUM_POINTS_PER_RUN = 20000# * iterate over puck_timestamps max_laps = int(np.ceil(puck_timestamps.size / MAXIMUM_POINTS_PER_RUN)) for run_count in range(max_laps): print(f'{pcap_file_in}: Running slice {run_count} out of {max_laps}') current_range = np.arange(\ 0, min(MAXIMUM_POINTS_PER_RUN, puck_timestamps.size -\ MAXIMUM_POINTS_PER_RUN * run_count)) +\ MAXIMUM_POINTS_PER_RUN * run_count #a slice that hopefully fits in RAM #time in seconds min_time = puck_timestamps[current_range][0] max_time = puck_timestamps[current_range][-1] print(f"{pcap_file_in}: Processing {(max_time - min_time):.2f} seconds") interv = np.where(\ np.logical_and(\ mid_apx_arr.timestamp > min_time, mid_apx_arr.timestamp < max_time))[0] sml_apx_arr = mid_apx_arr[max(interv[0] - 1, 0):min(interv[-1] + 2, mid_apx_arr.shape[0])] relevant_times = puck_pulse_time[current_range,:] strongest_return_ranges = puck_ranges[current_range].reshape(\ (-1, vlp16_tables.DATA_BLOCKS, vlp16_tables.LASERS_PER_DATA_BLOCK))\ [:,1::2].flatten() / 1000 strongest_return_intensities = puck_intens[current_range].reshape(\ (-1, vlp16_tables.DATA_BLOCKS, vlp16_tables.LASERS_PER_DATA_BLOCK))\ [:,1::2].flatten() last_return_ranges = puck_ranges[current_range].reshape(\ (-1, vlp16_tables.DATA_BLOCKS, vlp16_tables.LASERS_PER_DATA_BLOCK))\ [:,0::2].flatten() / 1000 last_return_intensities = puck_intens[current_range].reshape(\ (-1, vlp16_tables.DATA_BLOCKS, vlp16_tables.LASERS_PER_DATA_BLOCK))\ [:,0::2].flatten() azimuth = prec_az[current_range] vert_elev_angle = np.tile(vlp16_tables.elevation_and_vert_corr_by_laser_id[:,0], (1,12)) vert_elev_angle = np.tile(vert_elev_angle, (azimuth.shape[0],1)) global_laser_id = np.tile(np.arange(16, dtype = np.uint8), (1,12)) global_laser_id = np.tile(global_laser_id, (azimuth.shape[0],1)) azimuth = np.deg2rad(azimuth / 100).flatten() vert_elev_angle = vert_elev_angle.flatten() f_lat = interp1d(sml_apx_arr.timestamp, sml_apx_arr["lat_EPSG32632"], kind='cubic', fill_value="extrapolate") f_lon = interp1d(sml_apx_arr.timestamp, sml_apx_arr["lon_EPSG32632"], kind='cubic', fill_value="extrapolate") f_ele = interp1d(sml_apx_arr.timestamp, sml_apx_arr["elevation"], kind='cubic', fill_value="extrapolate") f_yaw = interp1d(sml_apx_arr.timestamp, sml_apx_arr["heading_continuous"], kind='cubic', fill_value="extrapolate") f_rol = interp1d(sml_apx_arr.timestamp, sml_apx_arr["roll"], kind='cubic', fill_value="extrapolate") f_pit = interp1d(sml_apx_arr.timestamp, sml_apx_arr["pitch"], kind='cubic', fill_value="extrapolate") f_yaw_agisoft = interp1d(yaw_agisoft.index.values, yaw_agisoft['smooth_yaw_err'], kind='cubic', fill_value="extrapolate") MIN_RANGE = 2 #metres for return_counter in range(1, 3): if return_counter == 1: condition = strongest_return_ranges > MIN_RANGE condition_double = np.logical_and(\ last_return_ranges > MIN_RANGE, last_return_ranges != strongest_return_ranges) return_ranges = strongest_return_ranges return_intensities = strongest_return_intensities elif return_counter == 2: condition = np.logical_and(\ last_return_ranges > MIN_RANGE, last_return_ranges != strongest_return_ranges) condition_double = np.ones_like(last_return_ranges, dtype=np.bool8) return_ranges = last_return_ranges return_intensities = last_return_intensities lat = f_lat(relevant_times).flatten() lon = f_lon(relevant_times).flatten() ele = f_ele(relevant_times).flatten() yaw = f_yaw(relevant_times).flatten() - f_yaw_agisoft(relevant_times).flatten() % 360 #check the sign! rol = f_rol(relevant_times).flatten() pit = f_pit(relevant_times).flatten() X_puck = np.ones_like(return_intensities) * np.nan Y_puck = np.ones_like(return_intensities) * np.nan Z_puck = np.ones_like(return_intensities) * np.nan #VLP manual p.53 X_puck[condition] = return_ranges[condition] * np.cos(vert_elev_angle)[condition] *\ np.sin(azimuth)[condition] Y_puck[condition] = return_ranges[condition] * np.cos(vert_elev_angle)[condition] *\ np.cos(azimuth)[condition] Z_puck[condition] = return_ranges[condition] * np.sin(vert_elev_angle)[condition] # first rotate into XYZ of the drone! # x_roll = -90 #degrees # y_pitch = 0 # z_yaw = -90 #rotation from puck to uav coordinates: R_01 = np.array([[0., 1., 0.], [0., 0., 1.], [1., 0., 0.]]) #get rid of invalid entries X_puck = X_puck[condition] Y_puck = Y_puck[condition] Z_puck = Z_puck[condition] XYZ_puck = np.vstack((X_puck, Y_puck, Z_puck)).T XYZ_puck = XYZ_puck[:, np.newaxis, :] XYZ_uav = np.matmul(XYZ_puck, R_01) #rotation for boresight roll (in UAV coord): bor_rl_s = np.sin(np.radians(b_roll)) bor_rl_c = np.cos(np.radians(b_roll)) R_02 = np.array([[1., 0., 0.], [0., bor_rl_c, -bor_rl_s], [0., bor_rl_s, bor_rl_c]]) XYZ_uav = np.matmul(XYZ_uav, R_02) #rotation for boresight pitch (in UAV coord): bor_pt_s = np.sin(np.radians(b_pitch)) bor_pt_c = np.cos(np.radians(b_pitch)) R_03 = np.array([[ bor_pt_c, 0., bor_pt_s], [ 0., 1., 0.], [ -bor_pt_s, 0., bor_pt_c]]) XYZ_uav = np.matmul(XYZ_uav, R_03) #rotation for boresight yaw (in UAV coord): bor_yw_s = np.sin(np.radians(b_yaw)) bor_yw_c = np.cos(np.radians(b_yaw)) R_04 = np.array([[ bor_yw_c, -bor_yw_s, 0.], [ bor_yw_s, bor_yw_c, 0.], [ 0., 0., 1.]]) XYZ_uav = np.matmul(XYZ_uav, R_04) #now rotate to real world... yaw_correction, pit_correction, rol_correction = -np.radians(yaw[condition]),\ -np.radians(pit[condition]),\ -np.radians(rol[condition]) cos_gamma = np.cos(rol_correction) sin_gamma = np.sin(rol_correction) cos_beta = np.cos(pit_correction) sin_beta = np.sin(pit_correction) cos_alpha = np.cos(yaw_correction) sin_alpha = np.sin(yaw_correction) R_gamma = np.array([[ np.ones_like(cos_gamma),np.zeros_like(cos_gamma),np.zeros_like(cos_gamma)], [np.zeros_like(cos_gamma), cos_gamma, -sin_gamma ], [np.zeros_like(cos_gamma), sin_gamma, cos_gamma ]]) R_gamma = np.transpose(R_gamma, (2,0,1)) R_beta = np.array([[ cos_beta, np.zeros_like(cos_beta), sin_beta ], [np.zeros_like(cos_beta), np.ones_like(cos_beta) ,np.zeros_like(cos_beta)], [ -sin_beta, np.zeros_like(cos_beta), cos_beta ]]) R_beta = np.transpose(R_beta, (2,0,1)) R_alpha = np.array([[ cos_alpha , -sin_alpha ,np.zeros_like(cos_alpha)], [ sin_alpha , cos_alpha ,np.zeros_like(cos_alpha)], [np.zeros_like(cos_alpha),np.zeros_like(cos_alpha), np.ones_like(cos_alpha)]]) R_alpha = np.transpose(R_alpha, (2,0,1)) XYZ_rotated = np.matmul(XYZ_uav, R_gamma) XYZ_rotated = np.matmul(XYZ_rotated, R_beta) XYZ_rotated = np.matmul(XYZ_rotated, R_alpha) #bring it into East, North, Up system (+90° around z, then +180° around new x) R_last = np.array([[ 0., 1., 0.], [ 1., 0., 0.], [ 0., 0.,-1.]]) XYZ_rotated = np.matmul(XYZ_rotated, R_last) flight_line_id = np.ones_like(\ vert_elev_angle[condition], dtype=np.uint16) * int(pcap_file_in.split(".")[0].split("_")[-1]) flight_line_id = flight_line_id[:, np.newaxis, np.newaxis] XYZ_rotated = np.concatenate((XYZ_rotated, flight_line_id), axis = -1) return_id = np.ones_like(vert_elev_angle[condition], dtype=np.uint16) * return_counter return_id = return_id[:, np.newaxis, np.newaxis] XYZ_rotated = np.concatenate((XYZ_rotated, return_id), axis = -1) return_intensities = return_intensities[condition] return_intensities = return_intensities[:, np.newaxis, np.newaxis] XYZ_rotated = np.concatenate((XYZ_rotated, return_intensities), axis = -1) number_of_returns = np.ones_like(vert_elev_angle[condition], dtype=np.uint8) +\ condition_double[condition] #1 for single, 2 for double number_of_returns = number_of_returns[:, np.newaxis, np.newaxis] XYZ_rotated = np.concatenate((XYZ_rotated, number_of_returns), axis = -1) #subtract 1 billion (see here https://support.geocue.com/fixing-las-global-encoding/) laser_times = relevant_times.flatten()[condition] - 1e9 laser_times = laser_times[:, np.newaxis, np.newaxis] XYZ_rotated = np.concatenate((XYZ_rotated, laser_times), axis = -1) #for angles delta_pos = np.copy(XYZ_rotated[:,:,0:3]) delta_pos = np.matmul(delta_pos, R_alpha) #take delta_z as positive when looking down (normal scan) new_scan_angle = np.degrees(np.arctan2(delta_pos[:,0,0], - delta_pos[:,0,2])) #for some reason does not want to use short even though version 1.4 new_scan_angle = np.clip(new_scan_angle, -128, +127) new_scan_angle = new_scan_angle[:, np.newaxis, np.newaxis] XYZ_rotated = np.concatenate((XYZ_rotated, new_scan_angle), axis = -1) #take delta_z as positive when looking down (normal scan) new_along_track_angle = np.degrees(np.arctan2(delta_pos[:,0,1], - delta_pos[:,0,2])) new_along_track_angle = new_along_track_angle[:, np.newaxis, np.newaxis] XYZ_rotated = np.concatenate((XYZ_rotated, new_along_track_angle), axis = -1) laser_id = global_laser_id.flatten()[condition] laser_id = laser_id[:, np.newaxis, np.newaxis] XYZ_rotated = np.concatenate((XYZ_rotated, laser_id), axis = -1) XYZ_rotated[:,0,0] += lon[condition] XYZ_rotated[:,0,1] += lat[condition] XYZ_rotated[:,0,2] += ele[condition] #to easily display height in cloudcompare extra_elevation_field = XYZ_rotated[:,:,2].flatten() extra_elevation_field = extra_elevation_field[:, np.newaxis, np.newaxis] XYZ_rotated = np.concatenate((XYZ_rotated, extra_elevation_field), axis = -1) if return_counter == 1: first_returns = np.copy(XYZ_rotated) elif return_counter == 2: first_returns = np.concatenate((first_returns, XYZ_rotated)) fname = f'{pcap_file_in.split(".")[0]}' np.savetxt(os.path.join(out_dir_ascii, f"{fname}_r{run_count:03d}.xyz"), np.squeeze(first_returns, axis = -2), fmt=['%.3f', '%.3f', '%.3f', '%3d', '%1d', '%3d', '%1d', '%.9f', '%.3f', '%.3f', '%d', '%.3f']) current_ascii_file = f"{fname}_r{run_count:03d}.xyz" concat_files.append(os.path.join(out_dir_ascii, current_ascii_file)) merged_ascii_file = os.path.join(out_dir_ascii, f"{fname}.xyz") print(f"{pcap_file_in}: Concatenating {len(concat_files)} ascii files") start = time.time() command = f"{concat_cmd} {' '.join(concat_files)} > {merged_ascii_file}" logging.info(command) os.system(command) print(f"{pcap_file_in}: Done in {(time.time() -start):.2f} seconds.") print(f"{pcap_file_in}: Removing {len(concat_files)} redundant ascii files") start = time.time() for f in concat_files: os.remove(f) print(f"{pcap_file_in}: Done in {(time.time() -start):.2f} seconds.") merged_las_file = merged_ascii_file.replace(out_dir_ascii, out_dir_las)[:-3]+'laz' print(f"{pcap_file_in}: Transforming the ascii file to las.") start = time.time() command = f'{wine_cmd}txt2las -i {merged_ascii_file} -parse xyzprinta012 -add_attribute 4 "AngleAlongTrack" "in degrees, positive forward, negative backward" 0.006 -add_attribute 1 "LaserID" "from 0 to 15" -add_attribute 9 "Elevation ASL" "metres above sea level" -set_point_type 6 -o {merged_las_file}' logging.info(command) os.system(command) print(f"{pcap_file_in}: Done in {(time.time() -start):.2f} seconds.") basename = merged_las_file[:-4] print(f"{pcap_file_in}: Adding further information and denoising") start = time.time() command = f'{wine_cmd}lasinfo -i {merged_las_file} -set_global_encoding 1\n' logging.info(command) os.system(command) command = f'{wine_cmd}las2las -i {merged_las_file} -epsg 32632 -target_epsg 32632 -o {basename}_epsg.laz\n' logging.info(command) os.system(command) command = f'{wine_cmd}lasnoise -i {basename}_epsg.laz -remove_noise -o {basename}_denoised.laz\n' logging.info(command) os.system(command) print(f"{pcap_file_in}: Done in {(time.time() -start):.2f} seconds.") print(f"Finished processing file {os.path.join(pcap_dir_in, pcap_file_in)}") logging.info(f"Finished processing file {os.path.join(pcap_dir_in, pcap_file_in)}") if __name__=='__main__': #this_script = sys.argv[0].strip().split(os.path.sep)[-1] this_script = str(sys.argv[0]) print(this_script) # sys.exit(0) if len(sys.argv)<=1: print("ERROR\n\nPlease provide the absolute path of the campaign's top directory.\n\nEXITING") sys.exit(0) else: try: top_dir = sys.argv[1].rstrip(os.path.sep) except Exception as e: print(e); sys.exit(0) if sys.byteorder == 'little': print(f"System endianness: {sys.byteorder}.\nGood to go :)") else: print(f"System endianness: {sys.byteorder}.\nSTOP right now!") sys.exit(0) #can also modify the following instead of passing the argument boresight_str = "Rp1050000Pp0200000Yn1900000" if len(sys.argv)>2: try: boresight_str = sys.argv[2] except Exception as e: print(f"failed: {e}.....{e.args}") sys.exit(0) try: scale_factor = 1e-6 boresight_roll = scale_factor * int(boresight_str[2:9]) *\ (1 if boresight_str[1] == "p" else -1) boresight_pitch = scale_factor * int(boresight_str[11:18]) *\ (1 if boresight_str[10] == "p" else -1) boresight_yaw = scale_factor * int(boresight_str[20:27]) *\ (1 if boresight_str[19] == "p" else -1) print(f"Using the following boresight angles: Roll: {boresight_roll}, Pitch: {boresight_pitch}, Yaw: {boresight_yaw}") except Exception as e: print(f"failed: {e}.....{e.args}") sys.exit(0) os.chdir(top_dir) if os.getcwd() != top_dir: print("Something went wrong.") print(f"cwd: {os.getcwd}") sys.exit(0) if log_dir not in os.listdir(): os.mkdir(log_dir) while out_dir_ascii in os.listdir(): print(f"{out_dir_ascii} already existing.") time.sleep(0.1) out_dir_ascii = f"{out_dir_ascii[:out_ascii_base_len]}_{str(datetime.datetime.now()).replace(':', '.').replace(' ', '_')}" os.mkdir(out_dir_ascii) with open(os.path.join(out_dir_ascii, "00_boresight_angles.txt"), 'w') as fh: fh.write(boresight_str) fh.write(nl) fh.write(f"Using the following boresight angles: Roll: {boresight_roll}, Pitch: {boresight_pitch}, Yaw: {boresight_yaw}") while out_dir_las in os.listdir(): print(f"{out_dir_las} already existing.") time.sleep(0.1) out_dir_las = f"{out_dir_las[:out_las_base_len]}_{str(datetime.datetime.now()).replace(':', '.').replace(' ', '_')}" os.mkdir(out_dir_las) shutil.copy2(os.path.join(out_dir_ascii, "00_boresight_angles.txt"), out_dir_las) logging.basicConfig(format='<%(asctime)s> <%(levelname)-8s> <%(message)s>', level=logging.DEBUG, filename=os.path.join(top_dir, log_dir, f'{datetime.date.today().strftime("%Y.%m.%d")}_{os.path.split(this_script)[-1]}.log'), datefmt='%Y-%m-%d %H:%M:%S') logging.info(f"{this_script} has started and received a valid top directory name") logging.info(f"Using the following boresight angles: Roll: {boresight_roll}, Pitch: {boresight_pitch}, Yaw: {boresight_yaw}") potential_dirs_in = len(re.findall(txt_dir_in, " ".join(os.listdir()))) if potential_dirs_in == 0: print("No valid input directories found. Make sure to run script 00_join_pcap_.. first\nEXITING!") elif potential_dirs_in > 1: print("Multiple input directories found. Choose one:") valid_choices = dict() for i, d_name in enumerate(os.listdir()): if os.path.isfile(d_name): continue elif txt_dir_in not in d_name: continue else: print(f"{i}: {d_name}") valid_choices[i] = d_name dir_in_selected = False while not dir_in_selected: try: dir_in_id = int(input("Which directory do we use? Type a number and press enter.\n")) if dir_in_id in valid_choices.keys(): txt_dir_in = valid_choices[dir_in_id] dir_in_selected = True except: print("Invalid choice!") potential_pcap_dirs_in = len(re.findall(pcap_dir_in, " ".join(os.listdir()))) if potential_pcap_dirs_in == 0: print("No valid PCAP input directories found. Make sure to run the other scripts first\nEXITING!") elif potential_pcap_dirs_in > 1: print("Multiple input directories found. Choose one:") valid_choices = dict() for i, d_name in enumerate(os.listdir()): if os.path.isfile(d_name): continue elif pcap_dir_in not in d_name: continue else: print(f"{i}: {d_name}") valid_choices[i] = d_name dir_in_selected = False while not dir_in_selected: try: dir_in_id = int(input("Which directory do we use? Type a number and press enter.\n")) if dir_in_id in valid_choices.keys(): pcap_dir_in = valid_choices[dir_in_id] dir_in_selected = True except: print("Invalid choice!") print(f"Using {txt_dir_in} and {pcap_dir_in} as input") logging.info(f"Using {txt_dir_in} and {pcap_dir_in} as input") this_os = platform.system() if this_os == "Linux": concatenate_command = "cat" wine_command = "wine " elif this_os == "Windows": concatenate_command = "type" wine_command = "" else: print("Unknown OS. Terminating.") sys.exit(0) print(f"Running on {this_os}. To concatenate we wil use '{concatenate_command}'") print(f"Calling lastools with e.g. '{wine_command}lastool'") print("Making APX dataframe...") csv_files = [f for f in sorted(os.listdir(txt_dir_in)) if fn_keyword in f] print(f"Found {len(csv_files)} CSV files.") big_apx_df = pd.concat(\ (pd.read_csv(os.path.join(txt_dir_in, f), sep=";", index_col=0, na_values="NAN") for f in csv_files)) big_apx_df = big_apx_df[["lon_EPSG32632", "lat_EPSG32632", "elevation", "heading_continuous", "roll", "pitch"]] glob_apx_arr = big_apx_df.to_records() del big_apx_df shm_shape, shm_dtype = glob_apx_arr.shape, glob_apx_arr.dtype print(f"Done concatenating {len(csv_files)} CSV files into dataframe.") print("\n\n\n") logging.info(f"Used {csv_files} from {txt_dir_in}") with SharedMemoryManager() as smm: shm = smm.SharedMemory(glob_apx_arr.nbytes) shm_apx_arr = np.recarray(shape=shm_shape, dtype=shm_dtype, buf=shm.buf) np.copyto(shm_apx_arr, glob_apx_arr) fnames = sorted([fn for fn in os.listdir(pcap_dir_in) if "line_" in fn\ and len(fn) == 13 and "pcap" in fn]) func_args = zip(fnames, [pcap_dir_in] * len(fnames), [out_dir_ascii] * len(fnames), [out_dir_las] * len(fnames), [shm.name] * len(fnames), [shm_shape] * len(fnames), [shm_dtype] * len(fnames), [boresight_roll] * len(fnames), [boresight_pitch] * len(fnames), [boresight_yaw] * len(fnames), [concatenate_command] * len(fnames), [wine_command] * len(fnames)) with Pool(processes=cpu_count() * 9 // 10) as pool: results = pool.starmap(process_file, func_args) print(results)
Enhanced low-rank plus group sparse decomposition for speckle reduction in OCT images Suppression of speckle artifact in optical coherence tomography (OCT) is necessary for high quality quantitative assessment of ocular disorders associated with vision loss. However, due to its dual role as a source of noise and as a carrier of information about tissue microstructure, complete suppression of speckle is not desirable. That is what represents challenge in development of methods for speckle suppression. We propose method for additive decomposition of a matrix into low-rank and group sparsity constrained terms. Group sparsity constraint represents novelty in relation to state-of-the-art in low-rank sparse additive matrix decompositions. Group sparsity enforces more noise-related speckle to be absorbed by the sparse term of decomposition. Thus, the low-rank term is expected to enhance the OCT image further. In particular, proposed method uses the elastic net regularizer to induce the grouping effect. Its proximity operator is shrunken version of the soft-thresholding operator. Thus, the group sparsity regularization adds no extra computational complexity in comparison with the e1 norm regularized problem. We derive alternating direction method of multipliers based algorithm for related optimization problem. New method for speckle suppression is automatic and computationally efficient. The method is validated in comparison with state-of-the-art on ten 3D macular-centered OCT images of normal eyes. It yields OCT image with improved contrast-to-noise ratio, signal-to-noise ratio, contrast and edge fidelity (sharpness).
IMPLEMENTING BI-TEMPORAL PROPERTIES INTO VARIOUS NOSQL DATABASE CATEGORIES NoSQL database systems have emerged and developed at an accelerating rate in the last years. Attractive properties such as scalability and performance, which are needed by many applications today, contributed to their increasing popularity. Time is very important aspect in many applications. Many NoSQL database systems do not offer built in management for temporal properties. In this paper, we discuss how we can embed temporal properties in NoSQL databases. We review and differentiate between the most popular NoSQL stores. Moreover, we propose various solutions to modify data models for embedding bitemporal properties in two of the most popular categories of NoSQL databases (Key-value stores and Column stores). In addition, we give examples of how to represent bitemporal properties using Redis Key-value store and Cassandra column oriented store. This work can be used as basis for designing and implementing temporal operators and temporal data management in NoSQL databases.
/** * Contains integration tests (interaction with the Model) for {@code SortMenuCommand}. */ public class SortMenuCommandTest { private RestaurantBook ab = new RestaurantBookBuilder().withItem(BEEF_BURGER).withItem(APPLE_JUICE).build(); private Model model = new ModelManager(ab, new UserPrefs()); private Model expectedModel = new ModelManager(ab, new UserPrefs()); private CommandHistory commandHistory = new CommandHistory(); @Test public void execute_sortMenuByName() { SortMethod sortMethod = SortMethod.NAME; String expectedMessage = String.format(MESSAGE_SORTED, sortMethod.name()); SortMenuCommand command = new SortMenuCommand(sortMethod); expectedModel.sortMenu(sortMethod); expectedModel.commitRestaurantBook(); assertCommandSuccess(command, model, commandHistory, expectedMessage, expectedModel); } @Test public void execute_sortMenuByPrice() { SortMethod sortMethod = SortMethod.PRICE; String expectedMessage = String.format(MESSAGE_SORTED, sortMethod.name()); SortMenuCommand command = new SortMenuCommand(sortMethod); expectedModel.sortMenu(sortMethod); expectedModel.commitRestaurantBook(); assertCommandSuccess(command, model, commandHistory, expectedMessage, expectedModel); } }
/** * Manager providing access to fonts installed to the current system. */ public class SystemFontManager { /** * Available system fonts as mapping from the font name to its description. */ private static FontFile[] SYSTEM_FONTS; /** * Get all available fonts in the system. * * @return available fonts * @throws CouldNotGetFontsException in case fonts could not be retrieved from the system */ public static FontFile[] getAvailableFonts() throws CouldNotGetFontsException { if (SYSTEM_FONTS == null) { List<FontFile> descriptorList = new ArrayList<>(); try { for (String location : SystemFontLocations.getLocations(OperatingSystem.current())) { File fontsFolder = new File(location); if (fontsFolder.isDirectory()) { findFontsInFolder(fontsFolder, descriptorList); } } } catch (CouldNotDetermineOperatingSystemException | FontFormatException | IOException e) { throw new CouldNotGetFontsException(e); } SYSTEM_FONTS = descriptorList.toArray(new FontFile[0]); } return SYSTEM_FONTS; } /** * Find fonts in the passed folder file. * * @param folder to search for fonts in * @param result the result list to add found fonts to * @throws IOException in case a font could not be read * @throws FontFormatException in case a font format is incorrect */ private static void findFontsInFolder(File folder, List<FontFile> result) throws IOException, FontFormatException { for (File file : folder.listFiles()) { if (file.isDirectory()) { findFontsInFolder(file, result); } else if (file.getName().toLowerCase().endsWith(".ttf")) { result.add(new SingleFontFile(file.getAbsolutePath())); } else if (file.getName().toLowerCase().endsWith(".ttc")) { result.add(new FontCollectionFile(file.getAbsolutePath())); } } } }
<filename>td-racing/core/src/com/mygdx/game/objects/Car.java package com.mygdx.game.objects; import com.badlogic.gdx.graphics.g2d.Sprite; import com.badlogic.gdx.graphics.g2d.SpriteBatch; import com.badlogic.gdx.math.MathUtils; import com.badlogic.gdx.math.Vector2; import com.badlogic.gdx.physics.box2d.Body; import com.badlogic.gdx.physics.box2d.BodyDef; import com.badlogic.gdx.physics.box2d.FixtureDef; import com.badlogic.gdx.physics.box2d.PolygonShape; import com.badlogic.gdx.physics.box2d.World; import com.badlogic.gdx.utils.Disposable; import com.mygdx.game.gamestate.states.PlayState; public class Car implements Disposable { private static final float SPEED_MAX = 15; private static final float ACCELERATION_FORWARD = 2000f; private static final float BRAKE_POWER = 5000f; private static final float ACCELERATION_BACK = 1000f; private static final float STEER_POWER = 1800; private final Body body; private final Sprite sprite; private float deltaTime; public Car(final World world, final Sprite sprite, final float xPostion, final float yPosition) { final BodyDef bodydef = new BodyDef(); bodydef.type = BodyDef.BodyType.DynamicBody; bodydef.position.set(xPostion * PlayState.PIXEL_TO_METER, yPosition * PlayState.PIXEL_TO_METER); body = world.createBody(bodydef); final PolygonShape carBox = new PolygonShape(); carBox.setAsBox(sprite.getWidth() * 0.5f, sprite.getHeight() * 0.5f); final FixtureDef fdef = new FixtureDef(); fdef.shape = carBox; fdef.density = 1f; fdef.friction = 1f; fdef.filter.categoryBits = PlayState.ENEMY_BOX; body.createFixture(fdef); body.setUserData(this); body.setAngularDamping(2); this.sprite = sprite; deltaTime = 0; // turn the car at the beginning body.setTransform(body.getPosition(), (float) Math.toRadians(180)); } public void accelarate() { final Vector2 acc = new Vector2(ACCELERATION_FORWARD * deltaTime, 0); acc.rotateRad(body.getAngle()); body.applyForceToCenter(acc, true); } public void brake() { final Vector2 acc = new Vector2( ((getForwardVelocity().x >= 0) ? BRAKE_POWER : ACCELERATION_BACK) * -1 * deltaTime, 0); acc.rotateRad(body.getAngle()); body.applyForceToCenter(acc, true); } public void steerLeft() { body.applyTorque(STEER_POWER * deltaTime * getTurnFactor(), true); } public void steerRight() { body.applyTorque(STEER_POWER * -1 * deltaTime * getTurnFactor(), true); } private float getNormalizedSpeed() { final float mult = (getForwardVelocity().x < 0) ? -1 : 1; final float ns = getForwardVelocity().x / SPEED_MAX; return ns * mult; } private float getTurnFactor() { final float mult = (getForwardVelocity().x < 0) ? -1 : 1; final float x = Math.abs(getNormalizedSpeed() * 2); final float factor = (float) (1 - Math.exp(-3 * MathUtils.clamp(x, 0.05f, 1))); if (factor < -1 || factor > 1) System.out.println("Turnfactor ist falsch!"); return factor * mult; } public void update(final float deltaTime) { this.deltaTime = deltaTime; reduceToMaxSpeed(SPEED_MAX); killLateral(0.95f); sprite.setPosition(getX(), getY()); sprite.setRotation(body.getAngle() * MathUtils.radDeg); } private void reduceToMaxSpeed(float maxspeed) { float speed = getForwardVelocity().x; if (speed < maxspeed * -1) speed = maxspeed * -1; if (speed > maxspeed) speed = maxspeed; final Vector2 newSpeed = new Vector2(speed, getForwardVelocity().y); newSpeed.rotateRad(body.getAngle()); body.setLinearVelocity(newSpeed); } private void killLateral(float drift) { float lat = getVelocityVector().dot(getOrthogonal()); body.applyLinearImpulse(getOrthogonal().scl(drift).scl(lat).scl(-1), body.getPosition(), true); } private Vector2 getForwardVelocity() { final Vector2 velo = getVelocityVector(); velo.rotateRad(body.getAngle() * -1); return velo; } public float getX() { return body.getPosition().x - sprite.getWidth() / 2; } public float getY() { return body.getPosition().y - sprite.getHeight() / 2; } public void draw(SpriteBatch spriteBatch) { sprite.draw(spriteBatch); } public Vector2 getForward() { return new Vector2(body.getAngle(), 0); } private Vector2 getVelocityVector() { return body.getLinearVelocity(); } private Vector2 getOrthogonal() { final Vector2 ort = new Vector2(1, 0); ort.rotateRad(body.getAngle()); ort.rotate90(1); return ort; } public float hitEnemy(final Enemy e) { float damage = Math.abs(getForwardVelocity().x * 2f); if (damage > 0.1f) e.takeDamage(damage); return e.health; } @Override public void dispose() { sprite.getTexture().dispose(); } }
<reponame>subinvelleringatt/A-December-of-Algorithms-2019<filename>December-14/haskell_atarv.hs<gh_stars>100-1000 {- December 14 - A Wordplay with Vowels and Consonants -} import Data.Set ( fromList , member , Set ) import Data.List data Player = A | B -- Vowels in English vowels :: Set Char vowels = fromList "aeiou" isVowel :: Char -> Bool isVowel c = c `member` vowels -- Generates substrings of string substrings :: String -> [String] substrings s = substrings' s [] where substrings' [] subs = subs substrings' (x : xs) subs = substrings' (xs) $ subs ++ (filter (not . null) $ inits (x : xs)) -- First is substrings starting with a vowel (player A), second starting with any other character (player B) substringsByPlayer :: String -> ([String], [String]) substringsByPlayer s = partition (isVowel . head) $ substrings s winnerAndScore :: String -> (Maybe Player, Int) winnerAndScore s = let (substringsA, substringsB) = substringsByPlayer s in case (length substringsA) `compare` (length substringsB) of LT -> (Just B, length substringsB) GT -> (Just A, length substringsA) EQ -> (Nothing, length substringsA) main = do putStrLn "Enter string:" s <- getLine case winnerAndScore s of (Nothing, score) -> putStrLn $ "It's a tie with score " ++ show score (Just A , score) -> putStrLn $ "Winner is A with score " ++ show score (Just B , score) -> putStrLn $ "Winner is B with score " ++ show score
Multisensory enhancement of burst activity in an insect auditory neuron. Detecting predators is crucial for survival. In insects, a few sensory interneurons receiving sensory input from a distinct receptive organ extract specific features informing the animal about approaching predators and mediate avoidance behaviors. Although integration of multiple sensory cues relevant to the predator enhances sensitivity and precision, it has not been established whether the sensory interneurons that act as predator detectors integrate multiple modalities of sensory inputs elicited by predators. Using intracellular recording techniques, we found that the cricket auditory neuron AN2, which is sensitive to the ultrasound-like echolocation calls of bats, responds to airflow stimuli transduced by the cercal organ, a mechanoreceptor in the abdomen. AN2 enhanced spike outputs in response to cross-modal stimuli combining sound with airflow, and the linearity of the summation of multisensory integration depended on the magnitude of the evoked response. The enhanced AN2 activity contained bursts, triggering avoidance behavior. Moreover, cross-modal stimuli elicited larger and longer lasting excitatory postsynaptic potentials (EPSP) than unimodal stimuli, which would result from a sublinear summation of EPSPs evoked respectively by sound or airflow. The persistence of EPSPs was correlated with the occurrence and structure of burst activity. Our findings indicate that AN2 integrates bimodal signals and that multisensory integration rather than unimodal stimulation alone more reliably generates bursting activity. NEW & NOTEWORTHY Crickets detect ultrasound with their tympanum and airflow with their cercal organ and process them as alert signals of predators. These sensory signals are integrated by auditory neuron AN2 in the early stages of sensory processing. Multisensory inputs from different sensory channels enhanced excitatory postsynaptic potentials to facilitate burst firing, which could trigger avoidance steering in flying crickets. Our results highlight the cellular basis of multisensory integration in AN2 and possible effects on escape behavior.
// SOLUTION ============================================== // Each person takes turns buying the highest costing flower // available first, increasing the factor by 1 each round. func minCost(COSTS []int, N, K int) (total int) { buys, factor := 1, 1 for _, cost := range COSTS { total += cost * factor buys++ if buys > K { buys = 1 factor++ } } return }
/** * @return A malloced string to be returned on read and configuration * errors. */ static char * policy_checking_failed (void) { return g_strdup ("Password policy checking failed (internal error)"); }
def undefined_names(sourcecode): import pyflakes.api import pyflakes.reporter class CaptureReporter(pyflakes.reporter.Reporter): def __init__(reporter, warningStream, errorStream): reporter.syntax_errors = [] reporter.messages = [] reporter.unexpected = [] def unexpectedError(reporter, filename, msg): reporter.unexpected.append(msg) def syntaxError(reporter, filename, msg, lineno, offset, text): reporter.syntax_errors.append(msg) def flake(reporter, message): reporter.messages.append(message) names = set() reporter = CaptureReporter(None, None) pyflakes.api.check(sourcecode, '_.py', reporter) for msg in reporter.messages: if msg.__class__.__name__.endswith('UndefinedName'): assert len(msg.message_args) == 1 names.add(msg.message_args[0]) return names
SEATTLE — A proposed pipeline-expansion project in Canada will put the fishing rights and cultural heritage of U.S. tribes at risk, a lawyer representing several Washington state tribes told Canadian energy regulators Friday. Kinder Morgan’s Trans Mountain project would nearly triple pipeline capacity from 300,000 to 890,000 barrels of crude oil a day. It would carry oil from Alberta’s oil sands to the Vancouver area to be loaded on to barges and tankers for Asian and U.S. markets. The project would dramatically increase the number of oil tankers that ply Washington state waters. “This project will harm the cultures of the US tribes,” said Kristen Boyles, an Earthjustice attorney who spoke against the project on behalf of the Swinomish, Tulalip, Suquamish and Lummi tribes. She made final arguments to Canada’s National Energy Board at a hearing in Burnaby, British Columbia, which was broadcast online. Boyles told the three-member panel that project officials didn’t consult with the U.S. tribes and didn’t consider the impacts to the tribes. “This project is all risk and no reward,” she added. The U.S. tribes are among the municipalities, environmental groups, First Nations and residents along the pipeline route who are intervenors in the case. Many have raised concerns about risk of pipeline leaks, increased vessel traffic and potential oil spills. Last month, a lawyer for Kinder Morgan Inc. told the energy board that the $5.4 billion expansion can be done in a way that minimizes impact on the environment, addresses social impacts and provides many economic benefits. “There is no doubt as to the need for this project and the benefits that will flow from it,” Kinder Morgan lawyer Shawn Denstedt said in December, according to an NEB transcript. “Trans Mountain has mature operations, maintenance systems, and emergency response plans already in place,” he added. Kinder Morgan said in an email Friday that it was actively engaged with the Washington maritime community, which included representatives from government, industry, environment and tribes. “Our engagement is focused on providing information and gathering feedback related to marine traffic and emergency preparation and response,” it said. The Washington Department of Ecology, also an intervenor in the case, is scheduled to give arguments next month. In earlier written testimony, Ecology said it expects an additional 350 loaded oil tankers moving though state waters each year if the project is built. The state wants the project to ensure that vessels calling at the Vancouver terminal follow standards that are as protective as those in Washington. “Any spill is immediately damaging, even the smallest of spill,” Linda Pilkey-Jarvis, manager of Washington’s spill preparedness, said in an interview. Ecology officials have said the project should require all vessels and barges to have tug escorts. It should also be required to help pay for and develop a response plan to address the risks of vessels carrying diluted bitumen, as well as show evidence it can pay for response and damage costs if a spill happens. Trans Mountain has agreed to make it a requirement that tankers accepted at the Vancouver terminal have enhanced tug escort from the terminal to the west entrance of the Strait of Juan de Fuca, according to Pilkey-Jarvis. Northwest tribes say increased vessel traffic poses a safety risk to fishermen and could have disastrous impacts on tribal fishing areas that are protected by its treaty with the U.S. They say the fisheries are important not only to jobs and livelihood but that it plays a major part of their self-identity. “We are speaking directly to the Canadian regulators to highlight the risks of this pipeline to our lives, our culture, and the priceless waters of the Salish Sea,” Swinomish Chairman Brian Cladoosby said in a prepared statement. Environmental groups also worry about the environmental impacts to the small endangered population of orcas that spend time in Washington state and Canada waters. The energy board is scheduled to make its recommendations on the pipeline expansion by May 20.
def handle_inbound_message(message_text, user_id, space_name, email): user = User.objects.get_or_create( space_name=space_name, user_id=user_id, email=email )[0] message = message_text.lower().split() if message[0] == 'start': if (len(message) != 1 or get_active_loop(user)): return COMMAND_ERROR return start_active_loop(user) if message[0] == 'stop': if len(message) != 1 or not get_active_loop(user): return COMMAND_ERROR return end_active_loop(user) if not get_active_loop(user): return COMMAND_ERROR return log_user_response(user, message)
Book review: The Eternal Criminal Record by James Jacobs forms. The question Occupy! Scenes from Occupied America raises then is: ‘Should we view the Occupy movements as carnival or carnivalesque?’ That question is unresolved, but that it is raised is no small feat. Taylor and Gessen are on the right track. This text, in including these voices, provides the preconditions for sustained reflection on protest in the US. With the tragic shooting of Michael Brown in Ferguson, Missouri, continued violence in Syria, and the potential for large-scale conflict in the Ukraine, this eye-opening look at protest deserves to be read. The Occupy movements may have faded from public memory as quickly as they began, but this should not dissuade readers from picking up this volume, because the lessons of the Occupy movements resonate today. The Occupy movements taught us about egalitarianism, collectivism, friendship, questions of space and place, and the impact of corporations on everyday life. These ideas matter now as we mobilize against police brutality and state violence. Readers should seek this text out. Flaws and flair combine to produce a question-raising reading experience that will reward those who stick with it. The illustrations don’t hurt either. Activists and scholars both have something to gain, and despite my concerns about the need for more sustained theoretical reflection, this text is not without merit, specifically its readability and multi-vocality. When a text provides readers with meaningful theoretical questions, it deserves a wide readership, even if it does not provide the theory to answer these questions. Whether Occupy! or the Occupy movements are carnival or carnivalesque is up to the reader to decide.
def _get_time_series_display_name_to_id_mapping(self) -> Dict[str, str]: time_series = TensorboardTimeSeries.list( tensorboard_run_name=self.resource_name, credentials=self.credentials ) return {ts.display_name: ts.name for ts in time_series}
import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.util.StringTokenizer; public class Main { static int n,k; static int right[]; static int winner[]; static int real_winner[]; static void make_set() { for(int i = 0; i < n; i++) { right[i] = i; winner[i] = i; real_winner[i] = i; } } static int find_real_winner(int node) { if (real_winner[node] == node) return node; return real_winner[node] =find_real_winner(real_winner[node]); } static int find_winner(int node) { if (winner[node] == node) return node; return find_winner(winner[node]); } static int find_right(int node) { if (node == right[node]) return node; return right[node] = find_right(right[node]); } static void union_right(int a, int b) { if (a > b) { right[b] = a; } else { right[a] = b; } } public static void main(String[] args) throws IOException { BufferedReader bf = new BufferedReader(new InputStreamReader(System.in)); StringTokenizer tk = new StringTokenizer(bf.readLine()); n = Integer.parseInt(tk.nextToken()); k = Integer.parseInt(tk.nextToken()); right = new int[n]; winner = new int[n]; real_winner = new int[n]; int wnr; make_set(); int R; int l,r,x; for (int i = 0; i < k; i++) { tk = new StringTokenizer(bf.readLine()); l = Integer.parseInt(tk.nextToken())-1; r = Integer.parseInt(tk.nextToken())-1; x = Integer.parseInt(tk.nextToken())-1; while (l <= r) { wnr = find_real_winner(l); winner[wnr] = x; real_winner[wnr] = x; R = find_right(l); union_right(r, R); l = R+1; } } for (int i = 0; i < n; i++) { System.out.print(winner[i] == i?0:(winner[i]+1)); if (i < n - 1) System.out.print(" "); } System.out.println(); } }
n=int(input()) a=[int(i) for i in input().split()] ans=[] positive_flag=True if 0<=min(a): positive_flag=True elif max(a)<=0: positive_flag=False elif min(a)*(-1) <= max(a): positive_flag=True maxId=a.index(max(a)) for i in range(n): if i!=maxId: ans.append([maxId+1,i+1]) else: positive_flag=False minId=a.index(min(a)) for i in range(n): if i!=minId: ans.append([minId+1,i+1]) if positive_flag: for i in range(1,n): ans.append([i,i+1]) else: for i in range(n,1,-1): ans.append([i,i-1]) print(len(ans)) for num in ans: print(str(num[0])+" "+str(num[1]))
// RegisterInterfaces registers the commitment interfaces to protobuf Any. func RegisterInterfaces(registry codectypes.InterfaceRegistry) { registry.RegisterInterface( "ibc.core.commitment.v1.Root", (*exported.Root)(nil), ) registry.RegisterInterface( "ibc.core.commitment.v1.Prefix", (*exported.Prefix)(nil), ) registry.RegisterInterface( "ibc.core.commitment.v1.Path", (*exported.Path)(nil), ) registry.RegisterInterface( "ibc.core.commitment.v1.Proof", (*exported.Proof)(nil), ) registry.RegisterImplementations( (*exported.Root)(nil), &MerkleRoot{}, ) registry.RegisterImplementations( (*exported.Prefix)(nil), &MerklePrefix{}, ) registry.RegisterImplementations( (*exported.Path)(nil), &MerklePath{}, ) registry.RegisterImplementations( (*exported.Proof)(nil), &MerkleProof{}, ) }
package wb.t20190422; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.InputStream; import java.io.OutputStream; import charlotte.tools.BinTools; import charlotte.tools.FileTools; import charlotte.tools.LimitedInputStream; import charlotte.tools.RTError; import charlotte.tools.SecurityTools; public class Test0001 { public static void main(String[] args) { try { test01(); System.out.println("OK!"); } catch(Throwable e) { e.printStackTrace(); } System.exit(0); } private static void test01() throws Exception { test01_a("C:/var/A83Map_01.bmp"); } private static void test01_a(String file) throws Exception { String file2 = "C:/temp/2.tmp"; String file3 = "C:/temp/3.tmp"; FileTools.copyFile(file, file2); addHash(file2); checkHash(file2); unaddHash(file2, file3); System.out.println(BinTools.Hex.toString(SecurityTools.getMD5File(file))); System.out.println(BinTools.Hex.toString(SecurityTools.getMD5File(file2))); System.out.println(BinTools.Hex.toString(SecurityTools.getMD5File(file3))); } private static void addHash(String file) throws Exception { byte[] hash = SecurityTools.getSHA512File(file); try(OutputStream writer = new FileOutputStream(file, true)) { writer.write(hash); } } private static void checkHash(String file) throws Exception { long fileSize = new File(file).length(); if(fileSize < 64L) { throw new RTError(); } try( InputStream reader = new FileInputStream(file); InputStream reader2 = new LimitedInputStream(reader, fileSize - 64L); ) { byte[] hash = SecurityTools.getSHA512(reader2); byte[] hash2 = FileTools.read(reader, 64); if(BinTools.comp_array.compare(hash, hash2) != 0) { throw new RTError(); } } } private static void unaddHash(String rFile, String wFile) throws Exception { long fileSize = new File(rFile).length(); if(fileSize < 64L) { throw new RTError(); } try( InputStream reader = new FileInputStream(rFile); InputStream reader2 = new LimitedInputStream(reader, fileSize - 64L); OutputStream writer = new FileOutputStream(wFile); ) { FileTools.readToEnd(reader2, writer); } } }
package org.opencb.biodata.tools.variant; import org.junit.Test; import org.opencb.biodata.models.variant.Variant; import org.opencb.biodata.models.variant.VariantSource; import org.opencb.biodata.models.variant.VariantStudy; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import static org.junit.Assert.assertEquals; /** * Created on 16/05/16 * * @author <NAME> &lt;<EMAIL>&gt; */ public class VariantVcfHtsjdkReaderTest { @Test public void readFileTest() throws Exception { InputStream inputStream = getClass().getResourceAsStream("/ibs.vcf"); VariantSource source = new VariantSource("ibs.vcf", "2", "1", "myStudy", VariantStudy.StudyType.FAMILY, VariantSource.Aggregation.NONE); VariantVcfHtsjdkReader reader = new VariantVcfHtsjdkReader(inputStream, source); reader.open(); reader.pre(); assertEquals(Arrays.asList("s0", "s1", "s2", "s3", "s4", "s5"), reader.getSampleNames()); assertEquals("##fileformat=VCFv4.1\n" + "##INFO=<ID=AC,Number=1,Type=Integer,Description=\"Alele count in genotypes\">\n" + "#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\tFORMAT\ts0\ts1\ts2\ts3\ts4\ts5", reader.getHeader()); List<Variant> read; int i = 0; do { read = reader.read(); for (Variant variant : read) { i++; System.out.println("variant = " + variant.toJson()); } } while (!read.isEmpty()); assertEquals(3, i); reader.post(); reader.close(); } /** * Illumina produces variant calls, which are invalid for htsjdk. Make sure these are logged. * @throws Exception */ @Test public void readInvalidFormat() throws Exception { String malformatedLine = "1\t1000000\t.\tTTTCCA\tTTTCCA\t100\tPASS\tAC=1\tGT\t0/1"; String vcf = "##fileformat=VCFv4.1\n" + "#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\tFORMAT\ts0\n" + malformatedLine; VariantSource source = new VariantSource("test.vcf", "2", "1", "myStudy", VariantStudy.StudyType.CASE_CONTROL, VariantSource.Aggregation.NONE); VariantVcfHtsjdkReader reader = new VariantVcfHtsjdkReader(new ByteArrayInputStream(vcf.getBytes()), source); final List<String> malformated = new ArrayList<>(); reader.registerMalformatedVcfHandler((a,b) -> malformated.add(a)); reader.open(); reader.pre(); List<Variant> read = null; do{ read = reader.read(); System.out.println(read); } while(!read.isEmpty()); reader.post(); reader.close(); assertEquals(1, malformated.size()); assertEquals(malformatedLine, malformated.get(0)); } }
package response import "gin-vue-admin/model" type SysAPIResponse struct { Api model.SysApi `json:"api"` } type SysAPIListResponse struct { Apis []model.SysApi `json:"apis"` }
// // Created on 18-5-27. // #include "controlInfo.h" using namespace std; bool ControlInfo::get(JSin &res) { lock_guard<mutex> l(info_mutex); if (used) return false; else { res = this->in; used = true; return true; } } void ControlInfo::set(unsigned char *cin) { lock_guard<mutex> l(info_mutex); for (int i = 0; i < sizeof(this->in.data); ++i) { this->in.data[i] = cin[i]; } this->used = false; }
Childhood socioeconomic position, gender, adult body mass index, and incidence of type 2 diabetes mellitus over 34 years in the Alameda County Study. OBJECTIVES We examined the association between childhood socioeconomic position and incidence of type 2 diabetes and the effects of gender and adult body mass index (BMI). METHODS We studied 5913 participants in the Alameda County Study from 1965 to 1999 who were diabetes free at baseline (1965). Cox proportional hazards models estimated diabetes risk associated with childhood socioeconomic position and combined childhood socioeconomic position-adult BMI categories in pooled and gender-stratified samples. Demographic confounders and potential pathway components (physical inactivity, smoking, alcohol consumption, hypertension, depression, health care access) were included as covariates. RESULTS Low childhood socioeconomic position was associated with excess diabetes risk, especially among women. Race and body composition accounted for some of this excess risk. The association between childhood socioeconomic position and diabetes incidence differed by adult BMI category in the pooled and women-only groups. Adjustment for race and behaviors attenuated the risk attributable to low childhood socioeconomic position among the obese group only. CONCLUSIONS Childhood socioeconomic position was a robust predictor of incident diabetes, especially among women. A cumulative risk effect was observed for both childhood socioeconomic position and adult BMI, especially among women.
{-# LANGUAGE DeriveAnyClass #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE KindSignatures #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE PolyKinds #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE TemplateHaskell #-} ----------------------------------------------------------------------------- -- | -- Module : TicketDispenser -- Copyright : (C) 2017, ATS Advanced Telematic Systems GmbH -- License : BSD-style (see the file LICENSE) -- -- Maintainer : <NAME> <<EMAIL>> -- Stability : provisional -- Portability : non-portable (GHC extensions) -- -- This module contains a specification of a simple ticket dispenser. -- ----------------------------------------------------------------------------- module TicketDispenser ( prop_ticketDispenser , prop_ticketDispenserParallel , prop_ticketDispenserParallelOK , prop_ticketDispenserParallelBad , withDbLock , setupLock , cleanupLock ) where import Control.Exception (IOException, catch) import Data.TreeDiff (ToExpr) import GHC.Generics (Generic, Generic1) import Prelude hiding (readFile) import System.Directory (createDirectoryIfMissing, getTemporaryDirectory, removeFile) import System.FileLock (SharedExclusive(..), lockFile, unlockFile) import System.FilePath ((</>)) import System.IO (hClose, openTempFile) import System.IO.Strict (readFile) import Test.QuickCheck (Gen, Property, frequency, (===)) import Test.QuickCheck.Monadic (monadicIO) import Test.StateMachine import qualified Test.StateMachine.Types.Rank2 as Rank2 ------------------------------------------------------------------------ -- The actions of the ticket dispenser are: data Action (r :: * -> *) = TakeTicket | Reset deriving (Show, Generic1, Rank2.Functor, Rank2.Foldable, Rank2.Traversable) data Response (r :: * -> *) = GotTicket Int | ResetOk deriving (Show, Generic1, Rank2.Foldable) -- Which correspond to taking a ticket and getting the next number, and -- resetting the number counter of the dispenser. ------------------------------------------------------------------------ -- The dispenser has to be reset before use, hence the maybe integer. newtype Model (r :: * -> *) = Model (Maybe Int) deriving (Eq, Show, Generic) deriving instance ToExpr (Model Concrete) initModel :: Model r initModel = Model Nothing preconditions :: Model Symbolic -> Action Symbolic -> Logic preconditions (Model Nothing) TakeTicket = Bot preconditions (Model (Just _)) TakeTicket = Top preconditions _ Reset = Top transitions :: Model r -> Action r -> Response r -> Model r transitions (Model m) cmd _ = case cmd of TakeTicket -> Model (succ <$> m) Reset -> Model (Just 0) postconditions :: Model Concrete -> Action Concrete -> Response Concrete -> Logic postconditions (Model m) TakeTicket (GotTicket n) = Just n .== (succ <$> m) postconditions _ Reset ResetOk = Top postconditions _ _ _ = error "postconditions" ------------------------------------------------------------------------ -- With stateful generation we ensure that the dispenser is reset before -- use. generator :: Model Symbolic -> Gen (Action Symbolic) generator _ = frequency [ (1, pure Reset) , (4, pure TakeTicket) ] shrinker :: Action Symbolic -> [Action Symbolic] shrinker _ = [] ------------------------------------------------------------------------ -- We will implement the dispenser using a simple database file which -- stores the next number. A file lock is used to allow concurrent use. semantics :: SharedExclusive -- ^ Indicates if the file -- lock should be shared -- between threads or if it -- should be exclusive. -- Sharing it could cause -- race conditions. -> (FilePath, FilePath) -- ^ File paths to the -- database storing the -- ticket counter and the -- file lock used for -- synchronisation. -> Action Concrete -> IO (Response Concrete) semantics se (tdb, tlock) cmd = case cmd of TakeTicket -> do lock <- lockFile tlock se i <- read <$> readFile tdb `catch` (\(_ :: IOException) -> return "-1") writeFile tdb (show (i + 1)) `catch` (\(_ :: IOException) -> return ()) unlockFile lock return (GotTicket (i + 1)) Reset -> do lock <- lockFile tlock se writeFile tdb (show (0 :: Integer)) `catch` (\(_ :: IOException) -> return ()) unlockFile lock return ResetOk mock :: Model Symbolic -> Action Symbolic -> GenSym (Response Symbolic) mock (Model Nothing) TakeTicket = error "mock: TakeTicket" mock (Model (Just n)) TakeTicket = GotTicket <$> pure n mock _ Reset = pure ResetOk ------------------------------------------------------------------------ type DbLock = (FilePath, FilePath) setupLock :: IO DbLock setupLock = do tmp <- getTemporaryDirectory let tmpTD = tmp </> "ticket-dispenser" createDirectoryIfMissing True tmpTD (tdb, dbh) <- openTempFile tmpTD "ticket-dispenser.db" hClose dbh (tlock, lockh) <- openTempFile tmpTD "ticket-dispenser.lock" hClose lockh return (tdb, tlock) cleanupLock :: DbLock -> IO () cleanupLock (tdb, tlock) = do removeFile tdb removeFile tlock withDbLock :: (DbLock -> IO ()) -> IO () withDbLock run = do lock <- setupLock run lock cleanupLock lock sm :: SharedExclusive -> DbLock -> StateMachine Model Action IO Response sm se files = StateMachine initModel transitions preconditions postconditions Nothing Nothing generator Nothing shrinker (semantics se files) id mock -- Sequentially the model is consistent (even though the lock is -- shared). prop_ticketDispenser :: DbLock -> Property prop_ticketDispenser files = forAllCommands sm' Nothing $ \cmds -> monadicIO $ do (hist, _, res) <- runCommands sm' cmds prettyCommands sm' hist $ checkCommandNames cmds (res === Ok) where sm' = sm Shared files prop_ticketDispenserParallel :: SharedExclusive -> DbLock -> Property prop_ticketDispenserParallel se files = forAllParallelCommands sm' $ \cmds -> monadicIO $ prettyParallelCommands cmds =<< runParallelCommandsNTimes 100 sm' cmds where sm' = sm se files -- So long as the file locks are exclusive, i.e. not shared, the -- parallel property passes. prop_ticketDispenserParallelOK :: DbLock -> Property prop_ticketDispenserParallelOK = prop_ticketDispenserParallel Exclusive prop_ticketDispenserParallelBad :: DbLock -> Property prop_ticketDispenserParallelBad = prop_ticketDispenserParallel Shared
package types import ( "helm.sh/helm/v3/pkg/release" v1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) // Release is a helm release with a form attached type Release struct { *release.Release *PorterRelease Form *FormYAML `json:"form,omitempty"` } type PorterRelease struct { ID uint `json:"id"` WebhookToken string `json:"webhook_token"` LatestVersion string `json:"latest_version"` GitActionConfig *GitActionConfig `json:"git_action_config,omitempty"` ImageRepoURI string `json:"image_repo_uri"` BuildConfig *BuildConfig `json:"build_config,omitempty"` } type GetReleaseResponse Release type UpdateNotificationConfigRequest struct { Payload struct { Enabled bool `json:"enabled"` Success bool `json:"success"` Failure bool `json:"failure"` } `json:"payload"` } type CreateReleaseBaseRequest struct { RepoURL string `schema:"repo_url"` TemplateName string `json:"template_name" form:"required"` TemplateVersion string `json:"template_version" form:"required"` Values map[string]interface{} `json:"values"` Name string `json:"name" form:"required"` } type CreateReleaseRequest struct { *CreateReleaseBaseRequest ImageURL string `json:"image_url" form:"required"` GithubActionConfig *CreateGitActionConfigRequest `json:"github_action_config,omitempty"` BuildConfig *CreateBuildConfigRequest `json:"build_config,omitempty"` } type CreateAddonRequest struct { *CreateReleaseBaseRequest HelmRepoID uint `json:"helm_repo_id"` } type RollbackReleaseRequest struct { Revision int `json:"revision" form:"required"` } type UpgradeReleaseRequest struct { Values string `json:"values" form:"required"` ChartVersion string `json:"version"` } type UpdateImageBatchRequest struct { ImageRepoURI string `json:"image_repo_uri" form:"required"` Tag string `json:"tag" form:"required"` } type GetJobsStatusResponse struct { Status string `json:"status,omitempty"` StartTime *metav1.Time `json:"start_time,omitempty"` } const URLParamToken URLParam = "token" type WebhookRequest struct { Commit string `schema:"commit"` // NOTICE: deprecated. This field should no longer be used; it is not supported // internally. Repository string `schema:"repository"` } type GetGHATemplateRequest struct { ReleaseName string `json:"release_name"` GithubActionConfig *CreateGitActionConfigRequest `json:"github_action_config" form:"required"` } type GetGHATemplateResponse string type GetReleaseStepsResponse []SubEvent type SubEvent struct { EventID string `json:"event_id"` Name string `json:"name"` Index int64 `json:"index"` Status EventStatus `json:"status"` Info string `json:"info"` Time int64 `json:"time"` } type EventStatus int64 const ( EventStatusSuccess EventStatus = 1 EventStatusInProgress = 2 EventStatusFailed = 3 ) type UpdateReleaseStepsRequest struct { Event SubEvent `json:"event" form:"required"` } type NotificationConfig struct { Enabled bool `json:"enabled"` Success bool `json:"success"` Failure bool `json:"failure"` NotifLimit string `json:"notif_limit"` } type GetNotificationConfigResponse struct { *NotificationConfig } type DNSRecord struct { ExternalURL string `json:"external_url"` Endpoint string `json:"endpoint"` Hostname string `json:"hostname"` ClusterID uint `json:"cluster_id"` } type GetReleaseAllPodsResponse []v1.Pod
Show full PR text KNO UNVEILS BETA TEXTBOOK APP FOR iPAD WITH WORLD'S LARGEST eTEXTBOOK CATALOG App's Unique Features Make Learning More Engaging, Efficient and Social Santa Clara, CA – June 6, 2011 –Today Kno, Inc., an education software company, unveiled a beta version of Textbooks, an innovative learning application that marries the world's largest catalog of digital textbooks with an industry leading set of features for Apple's iPad. Kno is pushing the boundaries of how a student interacts with textbooks and learning materials. The app combines the best textbook and PDF reader technology with a Course Manager that allows students to organize documents by class and term. The beta application also integrates academic social features by allowing students to ask questions, post comments, or share their location with a study buddy. Additionally, students can annotate by using one touch highlighting and sticky notes or use the search feature to easily find topics, terms or subjects within a textbook. "The first version of the Kno app ensures that we maintain the consistency and integrity of today's textbook, while making it more engaging, efficient and social," said Osman Rashid, CEO and Co-Founder, Kno. Inc. "This enhanced digital version gives both students and professors the confidence of knowing that the page numbers, chapters, diagrams and photographs will be exactly the same as the physical textbook, yet a lot more interactive." Kno has partnered with the leading textbook publishers to offer the world's largest catalog of over 70,000 eTextbooks, at a 30-50% discount off the list price. All books purchased from the Kno website are backed by a 15-day Money Back Guarantee policy. The app is available for free to download at www.kno.com. "The Kno app allows us to offer a huge catalog to meet the needs of many students today, while we continue to further develop better ways to interact with educational content," said Babur Habib, CTO and Co-Founder, Kno, Inc. "The beta version of Textbooks puts us on that path." To kick off the app's launch, Kno is offering a free textbook for the first 200 students that participate in the Kno Early Bird Beta Program. U.S. college students participating will provide regular product feedback sessions in exchange for a full refund on a purchased textbook at the end of program. For more details go to www.facebook.com/goodtokno.
<reponame>spa5k/discord-slash import { SlashCommandBuilder } from '@discordjs/builders'; import { REST } from '@discordjs/rest'; import type { RESTPostAPIApplicationCommandsJSONBody } from 'discord-api-types/rest/v9'; import { Routes } from 'discord-api-types/rest/v9'; import dotenv from 'dotenv'; import { readdirSync } from 'fs'; import path from 'path'; import { novelSlash } from './novel/novelSlash'; import roles from './novel/roles'; import { db } from './prisma'; import { SlashCommand } from './types/discord'; dotenv.config(); const token = process.env.DISCORD_TOKEN; const clientId = process.env.CLIENT_ID; const guildId = process.env.GUILD_ID; const deployCommands = async () => { const commands: RESTPostAPIApplicationCommandsJSONBody[] = []; const commandPath = path.join(__dirname, './commands/slash'); const commandFiles = readdirSync(commandPath).filter((file) => file.endsWith('.js' || '.ts'), ); for (const file of commandFiles) { const command: SlashCommand = await import(`./commands/slash/${file}`); commands.push(command.data.toJSON()); } const novels = await db.novel.findMany({ include: { sources: true, }, }); novels.forEach((novel) => { const sources = novel.sources.map((source) => source.source); const options: [name: string, value: string][] = []; sources.forEach((source) => { options.push([source, source]); }); const command = new SlashCommandBuilder() .setName(novel.slug) .setDescription(novel.name) .addStringOption((option) => option .setName('source') .setDescription('Select the source you want to get update for') .setRequired(false) .addChoices(options), ); const singleCommand: SlashCommand = { name: novel.slug, data: command, execute: novelSlash.execute, permission: 0n, }; commands.push(singleCommand.data.toJSON()); }); // Role command const roleOptions: [name: string, value: string][] = []; // name and slug novels.forEach((novel) => { novel.sources.forEach((source) => { const roleString = `${novel.slug}-${source.source}`; roleOptions.push([roleString, source.roleId || '']); }); }); const command = new SlashCommandBuilder() .setName('roles') .setDescription('Handle the notification role') .addStringOption((option) => option .setName('get') .setDescription('Get the role') .setRequired(false) .addChoices(roleOptions), ) .addStringOption((option) => option .setName('remove') .setDescription('Remove the role') .setRequired(false) .addChoices(roleOptions), ); const singleCommand: SlashCommand = { name: 'roles', data: command, execute: roles.execute, permission: 0n, }; commands.push(singleCommand.data.toJSON()); const rest = new REST({ version: '9' }).setToken(String(token)); try { const bot = rest.put( Routes.applicationGuildCommands(clientId as string, guildId as string), { body: commands, }, ); await bot; console.log('Registered'); } catch (error) { console.log(error); } }; deployCommands().catch((error) => { console.log(error); });
def flattenedIndices(indices, numOfVerts): return (np.arange(2*numOfVerts).reshape((numOfVerts, 2))[indices]).flatten()
package bmstu.restfulcrud.service; import java.io.IOException; import java.io.PrintWriter; import java.util.List; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpPost; import bmstu.restfulcrud.DAO.EmployeeDAO; import bmstu.restfulcrud.model.Employee; @Path("/employees") public class EmployeeService extends HttpServlet { // URI: // /contextPath/servletPath/employees @GET @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) public void getEmployees_JSON(HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException, ClientProtocolException{ List<Employee> listOfCountries = EmployeeDAO.getAllEmployees(); PrintWriter out = response.getWriter(); response.setContentType("application/json"); response.setCharacterEncoding("UTF-8"); out.print(listOfCountries); out.flush(); } // URI: // /contextPath/servletPath/employees/{empNo} @GET @Path("/{empNo}") @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) public void getEmployee(@PathParam("empNo") String empNo, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException, ClientProtocolException { PrintWriter out = response.getWriter(); response.setContentType("application/json"); response.setCharacterEncoding("UTF-8"); out.print(EmployeeDAO.getEmployee(empNo)); out.flush(); } // URI: // /contextPath/servletPath/employees @POST @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) public void addEmployee(Employee emp, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException, ClientProtocolException{ HttpPost httpPost = new HttpPost("http://localhost:8080/RESTfulCRUD/employees"); httpPost.setEntity(new UrlEncodedFormEntity(EmployeeDAO.addEmployee(emp); httpPost.setHeader("Accept", "application/json"); httpPost.setHeader("Content-type", "application/json"); CloseableHttpResponse response = client.execute(httpPost); response.getStatusLine().getStatusCode(); client.close(); } // URI: // /contextPath/servletPath/employees @PUT @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) public void updateEmployee(Employee emp, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException, ClientProtocolException{ HttpPost httpPost = new HttpPost("http://localhost:8080/RESTfulCRUD/employees"); httpPost.setEntity(new UrlEncodedFormEntity(EmployeeDAO.addEmployee(emp); httpPost.setHeader("Accept", "application/json"); httpPost.setHeader("Content-type", "application/json"); CloseableHttpResponse response = client.execute(httpPost); response.getStatusLine().getStatusCode(); client.close(); } @DELETE @Path("/{empNo}") @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML }) public void deleteEmployee(@PathParam("empNo") String empNo, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException, ClientProtocolException{ HttpDelete httpDelete = new HttpDelete("http:////localhost:8080/RESTfulCRUD" + empNo); ClientProtocolException("Unexpected response status: " + (String) response.getStatusLine().getStatusCode();); EmployeeDAO.deleteEmployee(empNo); CloseableHttpResponse response = client.execute(httpDelete); client.close(); } }
/* In the election process, one sends an election msg to all bigger nodes in the system. * If any of them responds, it will continue the election process. * If none of them respond, then the current node is the new leader. * The return statement is the negation of the bool because if no one has answered, then this node is the leader. */ func startElection(instance string, monitors []string) bool { log.Debugf("Starting election.") biggerNodeResponded := false for _, monitor := range monitors { if utils.MonitorName(instance) < monitor { log.Tracef("In election with '%s'.", monitor) url := electionUrl(monitor) log.Tracef("Sending election message to '%s'.", monitor) _, err := http.Get(url) if err != nil { errText := utils.NetworkErrorText(err) log.Warnf("Service '%s' detected as not running. %s", monitor, errText) } else { biggerNodeResponded = true break } } } return !biggerNodeResponded }
/** @jsx createElement */ import { Children, createElement, Element, Fragment, Raw, } from "@b9g/crank/crank.js"; import {renderer} from "@b9g/crank/html.js"; import fs from "fs-extra"; import type {Stats} from "fs"; import * as path from "path"; import frontmatter from "front-matter"; // TODO: lazily import these? import "prismjs"; import "prismjs/components/prism-javascript.js"; import "prismjs/components/prism-jsx.js"; import "prismjs/components/prism-typescript.js"; import "prismjs/components/prism-tsx.js"; import "prismjs/components/prism-diff.js"; import "prismjs/components/prism-bash.js"; import {createComponent} from "./marked.js"; import {CodeBlock} from "../shared/prism.js"; import {Page, Link, Script, Storage} from "./esbuild.js"; const rootDirname = new URL("..", import.meta.url).pathname; interface WalkInfo { filename: string; info: Stats; } async function* walk(dir: string): AsyncGenerator<WalkInfo> { const files = await fs.readdir(dir); for (let filename of files) { filename = path.join(dir, filename); const info = await fs.stat(filename); if (info.isDirectory()) { yield* walk(filename); } else if (info.isFile()) { yield {filename, info}; } } } interface DocInfo { attributes: { title: string; publish: boolean; publishDate?: Date; }; url: string; filename: string; body: string; } async function collectDocuments(name: string): Promise<Array<DocInfo>> { const root = path.join(rootDirname, "documents"); let docs: Array<DocInfo> = []; for await (const {filename} of walk(name)) { if (filename.endsWith(".md")) { const md = await fs.readFile(filename, {encoding: "utf8"}); let {attributes, body} = frontmatter(md) as unknown as DocInfo; attributes.publish = attributes.publish == null ? true : attributes.publish; if (attributes.publishDate != null) { attributes.publishDate = new Date(attributes.publishDate); } const url = path .join("/", path.relative(root, filename)) .replace(/\.md$/, "") .replace(/([0-9]+-)+/, ""); docs.push({url, filename, body, attributes}); } } return docs; } const storage = new Storage({ dirname: rootDirname, }); interface RootProps { title: string; children: Children; url: string; } // TODO: I wonder if we can do some kind of slot-based or includes API function Root({title, children, url}: RootProps): Element { return ( <Fragment> <Raw value="<!DOCTYPE html>" /> <Page storage={storage}> <html lang="en"> <head> <meta charset="UTF-8" /> <meta name="viewport" content="width=device-width" /> <title>{title}</title> <Link rel="stylesheet" type="text/css" href="client/index.css" /> <link rel="shortcut icon" href="/static/favicon.ico" /> <script async src="https://www.googletagmanager.com/gtag/js?id=UA-20910936-4" /> <script innerHTML={` window.dataLayer = window.dataLayer || []; function gtag(){dataLayer.push(arguments);} gtag('js', new Date()); gtag('config', 'UA-20910936-4'); `} /> </head> <body> <Navbar url={url} /> <div class="non-footer">{children}</div> <Script src="client/index.tsx" /> </body> </html> </Page> </Fragment> ); } interface NavbarProps { url: string; } function Navbar({url}: NavbarProps): Element { return ( <nav id="navbar" class="navbar"> <div class="navbar-group"> <div class="navbar-item"> <a class={`navbar-title-link ${url === "/" && "current"}`} href="/"> <img class="navbar-logo" src="/static/logo.svg" alt="" /> <span>Crank.js</span> </a> </div> <div class="navbar-item"> <a class={url.startsWith("/guides") && "current"} href="/guides/getting-started" > Docs </a> </div> <div class="navbar-item"> <a class={url.startsWith("/blog") && "current"} href="/blog/"> Blog </a> </div> </div> <div class="navbar-group"> <div class="navbar-item"> <a href="https://github.com/bikeshaving/crank">GitHub</a> </div> <div class="navbar-item"> <a href="http://npm.im/@bikeshaving/crank">NPM</a> </div> </div> </nav> ); } interface SidebarProps { docs: Array<DocInfo>; url: string; title: string; } function Sidebar({docs, title, url}: SidebarProps): Element { const links: Array<Element> = []; for (const doc of docs) { if (doc.attributes.publish) { links.push( <div class="sidebar-item"> <a href={doc.url} class={doc.url === url ? "current" : ""}> {doc.attributes.title} </a> </div>, ); } } return ( <div id="sidebar" class="sidebar"> <h3>{title}</h3> {links} </div> ); } function Home(): Element { // TODO: Move home content to a document. return ( <Root title="Crank.js" url="/"> <div class="home"> <header class="hero"> <div> <h1>Crank.js</h1> <h2> Write JSX-driven components with functions, promises and generators. </h2> <a href="/guides/getting-started">Get Started</a> </div> </header> <h2>Features</h2> <main class="features"> <div class="feature"> <h3>Declarative</h3> <p> Crank uses the same JSX syntax and diffing algorithm popularized by React, allowing you to write HTML-like code directly in JavaScript. </p> </div> <div class="feature"> <h3>Just Functions</h3> <p> All components in Crank are just functions or generator functions. No classes, hooks, proxies or template languages are needed. </p> </div> <div class="feature"> <h3>Promise-friendly</h3> <p> Crank provides first-class support for promises. You can define components as async functions and race renderings to display fallback UIs. </p> </div> <div class="feature"> <h3>Lightweight</h3> <p> Crank has no dependencies, and its core is a single file. It currently measures at{" "} <a href="https://bundlephobia.com/result?p=@bikeshaving/crank"> 5.1kB minified and gzipped </a> . </p> </div> <div class="feature"> <h3>Performant</h3> <p> <a href="https://github.com/krausest/js-framework-benchmark"> According to benchmarks </a> , Crank beats React in terms of speed and memory usage, and is currently comparable to Preact or Vue. </p> </div> <div class="feature"> <h3>Extensible</h3> <p> The core renderer can be extended to target alternative environments such as WebGL libraries, terminals, smartphones or smart TVs. </p> </div> </main> </div> </Root> ); } interface BlogContentProps { title: string; publishDate?: Date; children: Children; } function BlogContent({title, publishDate, children}: BlogContentProps) { const formattedDate = publishDate && publishDate.toLocaleString("en-US", { month: "long", year: "numeric", day: "numeric", timeZone: "UTC", }); return ( <Fragment> <h1>{title}</h1> {formattedDate && <p>{formattedDate}</p>} {children} </Fragment> ); } interface BlogPreviewProps { docs: Array<DocInfo>; } function BlogPreview({docs}: BlogPreviewProps): Array<Element> { return docs.map((doc) => { let {body} = doc; if (body.match("<!-- endpreview -->")) { body = body.split("<!-- endpreview -->")[0]; } else { const lines = body.split(/\r\n|\r|\n/); body = ""; let count = 0; for (const line of lines) { body += line + "\n"; if (line.trim()) { count++; } if (count > 2) { break; } } } const {title, publishDate} = doc.attributes; const Body = createComponent(body); return ( <div class="content"> <BlogContent title={title} publishDate={publishDate}> <Body components={components} /> </BlogContent> <div> <a href={doc.url}>Read more…</a> </div> </div> ); }); } interface BlogIndexPageProps { docs: Array<DocInfo>; url: string; } function BlogIndexPage({docs, url}: BlogIndexPageProps): Element { return ( <Root title="Crank.js | Blog" url={url}> <Sidebar docs={docs} url={url} title="Recent Posts" /> <main class="main"> <BlogPreview docs={docs} /> </main> </Root> ); } interface BlogPageProps { title: string; url: string; publishDate?: Date; docs: Array<DocInfo>; children: Children; } function BlogPage({ title, docs, children, publishDate, url, }: BlogPageProps): Element { return ( <Root title={`Crank.js | ${title}`} url={url}> <Sidebar docs={docs} url={url} title="Recent Posts" /> <main class="main"> <div class="content"> <BlogContent title={title} publishDate={publishDate}> {children} </BlogContent> </div> </main> </Root> ); } interface GuidePageProps { title: string; url: string; docs: Array<DocInfo>; children: Children; } function GuidePage({title, docs, url, children}: GuidePageProps): Element { return ( <Root title={`Crank.js | ${title}`} url={url}> <Sidebar docs={docs} url={url} title="Guides" /> <main class="main"> <div class="content"> <h1>{title}</h1> {children} </div> </main> </Root> ); } const components = { codespan({token}: any) { return <code class="inline">{token.text}</code>; }, code({token}: any) { const {text: code, lang} = token; return ( <div class="codeblock" data-code={code} data-lang={lang}> <CodeBlock value={code} lang={lang} /> </div> ); }, }; (async () => { const dist = path.join(rootDirname, "./dist"); await fs.ensureDir(dist); await fs.emptyDir(dist); await fs.copy(path.join(rootDirname, "static"), path.join(dist, "static")); // HOME await fs.writeFile( path.join(dist, "index.html"), await renderer.render(<Home />), ); // GUIDES { const docs = await collectDocuments( path.join(rootDirname, "documents/guides"), ); await Promise.all( docs.map(async ({attributes: {title, publish}, url, body}) => { if (!publish) { return; } const Body = createComponent(body); const filename = path.join(dist, url + ".html"); await fs.ensureDir(path.dirname(filename)); return fs.writeFile( filename, await renderer.render( <GuidePage title={title} docs={docs} url={url}> <Body components={components} /> </GuidePage>, ), ); }), ); } // BLOG { const posts = await collectDocuments( path.join(rootDirname, "documents/blog"), ); posts.reverse(); await fs.ensureDir(path.join(dist, "blog")); await fs.writeFile( path.join(dist, "blog/index.html"), await renderer.render(<BlogIndexPage docs={posts} url="/blog" />), ); await Promise.all( posts.map( async ({attributes: {title, publish, publishDate}, url, body}) => { if (!publish) { return; } const Body = createComponent(body); const filename = path.join(dist, url + ".html"); await fs.ensureDir(path.dirname(filename)); return fs.writeFile( filename, await renderer.render( <BlogPage title={title} docs={posts} url={url} publishDate={publishDate} > <Body components={components} /> </BlogPage>, ), ); }, ), ); } await storage.write(path.join(dist, "static/")); storage.clear(); })();
/** * HQLCoreQueryProcessorConfigUi * UI for configuring the HQL implementation of CQL against * the caCORE SDK v3.1 * * @author David Ervin * * @created Apr 23, 2007 3:49:15 PM * @version $Id: HQLCoreQueryProcessorConfigUi.java,v 1.3 2007/04/25 14:29:41 dervin Exp $ */ public class HQLCoreQueryProcessorConfigUi extends CQLQueryProcessorConfigUI { public static final String APPLICATION_SERVICE_URL = "appserviceUrl"; public static final String CASE_INSENSITIVE_QUERYING = "queryCaseInsensitive"; public static final String USE_CSM_FLAG = "useCsmSecurity"; public static final String CSM_CONTEXT_NAME = "csmContextName"; private JLabel urlLabel = null; private JTextField urlTextField = null; private JCheckBox caseInsensitiveCheckBox = null; private JCheckBox useCsmCheckBox = null; private JLabel csmContextLabel = null; private JTextField csmContextTextField = null; private JButton copyUrlButton = null; private JPanel optionsPanel = null; public HQLCoreQueryProcessorConfigUi() { super(); initialize(); } private void initialize() { // set up the interface layout GridBagConstraints gridBagConstraints21 = new GridBagConstraints(); gridBagConstraints21.gridx = 0; gridBagConstraints21.gridwidth = 3; gridBagConstraints21.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints21.gridy = 0; GridBagConstraints gridBagConstraints11 = new GridBagConstraints(); gridBagConstraints11.gridx = 2; gridBagConstraints11.insets = new Insets(2, 2, 2, 2); gridBagConstraints11.gridy = 2; GridBagConstraints gridBagConstraints3 = new GridBagConstraints(); gridBagConstraints3.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints3.gridy = 2; gridBagConstraints3.weightx = 1.0; gridBagConstraints3.insets = new Insets(2, 2, 2, 2); gridBagConstraints3.gridx = 1; GridBagConstraints gridBagConstraints2 = new GridBagConstraints(); gridBagConstraints2.gridx = 0; gridBagConstraints2.insets = new Insets(2, 2, 2, 2); gridBagConstraints2.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints2.gridy = 2; GridBagConstraints gridBagConstraints1 = new GridBagConstraints(); gridBagConstraints1.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints1.gridy = 1; gridBagConstraints1.weightx = 1.0; gridBagConstraints1.insets = new Insets(2, 2, 2, 2); gridBagConstraints1.gridwidth = 2; gridBagConstraints1.gridx = 1; GridBagConstraints gridBagConstraints = new GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.insets = new Insets(2, 2, 2, 2); gridBagConstraints.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints.gridy = 1; this.setLayout(new GridBagLayout()); this.setSize(new Dimension(399, 95)); this.add(getUrlLabel(), gridBagConstraints); this.add(getUrlTextField(), gridBagConstraints1); this.add(getCsmContextLabel(), gridBagConstraints2); this.add(getCsmContextTextField(), gridBagConstraints3); this.add(getCopyUrlButton(), gridBagConstraints11); this.add(getOptionsPanel(), gridBagConstraints21); } /** * This method initializes urlLabel */ private JLabel getUrlLabel() { if (urlLabel == null) { urlLabel = new JLabel(); urlLabel.setText("Remote Service URL:"); urlLabel.setSize(new Dimension(85, 27)); } return urlLabel; } /** * This method initializes urlTextField * * @return javax.swing.JTextField */ private JTextField getUrlTextField() { if (urlTextField == null) { urlTextField = new JTextField(); } return urlTextField; } /** * This method initializes caseInsensitiveCheckBox * * @return javax.swing.JCheckBox */ private JCheckBox getCaseInsensitiveCheckBox() { if (caseInsensitiveCheckBox == null) { caseInsensitiveCheckBox = new JCheckBox(); caseInsensitiveCheckBox.setText("Case Insensitive Queries"); } return caseInsensitiveCheckBox; } /** * This method initializes useCsmCheckBox * * @return javax.swing.JCheckBox */ private JCheckBox getUseCsmCheckBox() { if (useCsmCheckBox == null) { useCsmCheckBox = new JCheckBox(); useCsmCheckBox.setText("Use CSM Security"); useCsmCheckBox.addItemListener(new ItemListener() { public void itemStateChanged(ItemEvent e) { setCsmConfigEnabled(useCsmCheckBox.isSelected()); } }); } return useCsmCheckBox; } /** * This method initializes csmContextLabel * * @return javax.swing.JLabel */ private JLabel getCsmContextLabel() { if (csmContextLabel == null) { csmContextLabel = new JLabel(); csmContextLabel.setText("CSM Context Name:"); } return csmContextLabel; } /** * This method initializes csmContextTextField * * @return javax.swing.JTextField */ private JTextField getCsmContextTextField() { if (csmContextTextField == null) { csmContextTextField = new JTextField(); } return csmContextTextField; } /** * This method initializes copyUrlButton * * @return javax.swing.JButton */ private JButton getCopyUrlButton() { if (copyUrlButton == null) { copyUrlButton = new JButton(); copyUrlButton.setText("Copy App URL"); copyUrlButton.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent e) { String url = getUrlTextField().getText(); getCsmContextTextField().setText(url); } }); } return copyUrlButton; } /** * This method initializes optionsPanel * * @return javax.swing.JPanel */ private JPanel getOptionsPanel() { if (optionsPanel == null) { GridBagConstraints gridBagConstraints5 = new GridBagConstraints(); gridBagConstraints5.gridx = 1; gridBagConstraints5.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints5.insets = new Insets(2, 2, 2, 2); gridBagConstraints5.gridy = 0; GridBagConstraints gridBagConstraints4 = new GridBagConstraints(); gridBagConstraints4.gridx = 0; gridBagConstraints4.insets = new Insets(2, 2, 2, 2); gridBagConstraints4.fill = GridBagConstraints.HORIZONTAL; gridBagConstraints4.gridy = 0; optionsPanel = new JPanel(); optionsPanel.setLayout(new GridBagLayout()); optionsPanel.add(getCaseInsensitiveCheckBox(), gridBagConstraints4); optionsPanel.add(getUseCsmCheckBox(), gridBagConstraints5); } return optionsPanel; } private void setCsmConfigEnabled(boolean enable) { getCsmContextLabel().setEnabled(enable); getCsmContextTextField().setEnabled(enable); if (!enable) { getCsmContextTextField().setText(""); } getCopyUrlButton().setEnabled(enable); } public void setUpUi(File serviceDir, Properties cqlProcessorProperties) { String serviceUrl = cqlProcessorProperties.getProperty( HQLCoreQueryProcessor.APPLICATION_SERVICE_URL); getUrlTextField().setText(serviceUrl); String caseInsensitiveValue = cqlProcessorProperties.getProperty( HQLCoreQueryProcessor.CASE_INSENSITIVE_QUERYING); if (caseInsensitiveValue != null) { getCaseInsensitiveCheckBox().setSelected( Boolean.valueOf(caseInsensitiveValue).booleanValue()); } String useCsmValue = cqlProcessorProperties.getProperty( HQLCoreQueryProcessor.USE_CSM_FLAG); if (useCsmValue != null) { boolean csmSelected = Boolean.valueOf(useCsmValue).booleanValue(); getUseCsmCheckBox().setSelected(csmSelected); setCsmConfigEnabled(csmSelected); } String csmContext = cqlProcessorProperties.getProperty( HQLCoreQueryProcessor.CSM_CONTEXT_NAME); getCsmContextTextField().setText(csmContext); } public Properties getConfiguredProperties() { Properties props = new Properties(); props.setProperty(HQLCoreQueryProcessor.APPLICATION_SERVICE_URL, getUrlTextField().getText()); props.setProperty(HQLCoreQueryProcessor.CASE_INSENSITIVE_QUERYING, String.valueOf(getCaseInsensitiveCheckBox().isSelected())); props.setProperty(HQLCoreQueryProcessor.USE_CSM_FLAG, String.valueOf(getUseCsmCheckBox().isSelected())); props.setProperty(HQLCoreQueryProcessor.CSM_CONTEXT_NAME, getCsmContextTextField().getText()); return props; } public Dimension getPreferredSize() { return new Dimension(450, 120); } }
import { Field, ObjectType } from '@nestjs/graphql'; import { UserProfileOutput } from '../../users/dto/user-profile.output'; import { ChatOutput } from './chat.output'; @ObjectType() export class MessagesOutput { @Field() chatId: string; @Field(() => ChatOutput) chat: ChatOutput; @Field() authorId: string; @Field(() => UserProfileOutput) author: UserProfileOutput; @Field() message: string; @Field() createdAt: Date; }
/** * The implementation of UUID key generator * based on the algorithm from Floyd Marinescu's EJB Design Patterns. * * @author <a href="mailto:[email protected]">Alex Loubyansky</a> * * @version $Revision: 57209 $ */ public class UUIDKeyGenerator implements KeyGenerator { // Attributes --------------------------------------------------- /** secure random to provide nonrepeating seed */ SecureRandom seeder; /** cached middle value */ private String midValue; // Constructor -------------------------------------------------- public UUIDKeyGenerator() throws Exception { // cache the middle part for UUID StringBuffer buffer = new StringBuffer( 16 ); // construct host part of the uuid (8 hex digits) byte[] addr = InetAddress.getLocalHost().getAddress(); buffer.append( toHex( toInt(addr), 8 ) ); // append the hash code for this object (8 hex digits) buffer.append( toHex( System.identityHashCode(this), 8 ) ); // set up midValue midValue = buffer.toString(); // load up the randomizer seeder = new SecureRandom(); int node = seeder.nextInt(); } // KeyGenerator implementation ---------------------------------- public Object generateKey() { StringBuffer buffer = new StringBuffer( 32 ); // append current time as unsigned int value buffer.append(toHex((int)(System.currentTimeMillis() & 0xFFFFFFFF), 8)); // append cached midValue buffer.append( midValue ); // append the next random int buffer.append( toHex( seeder.nextInt(), 8 ) ); // return the result return buffer.toString(); } // Private ------------------------------------------------------ /** * Converts int value to string hex representation */ private String toHex( int value, int length ) { // hex digits char[] hexDigits = { '0','1','2','3','4','5','6','7','8','9','A','B','C','D','E','F' }; StringBuffer buffer = new StringBuffer( length ); int shift = (length - 1) << 2; int i = -1; while( ++i < length ) { buffer.append( hexDigits[(value >> shift) & 0x0000000F] ); value <<= 4; } return buffer.toString(); } /** * Constructs int value from byte array */ private static int toInt( byte[] bytes ) { int value = 0; int i = -1; while( ++i < bytes.length ) { value <<= 8; int b = bytes[ i ] & 0xff; value |= b; } return value; } }
/** Small helper used to register all base paths in the symbol solver. * * @param solver The symbol solver to add the file to * @param file The base file/src dir */ private void addFolderSymbolSolvers(CombinedTypeSolver solver, File file) { if (file != null) { if (file.isDirectory()) { boolean isEmpty = true; for (File f: file.listFiles()) { if (f.isDirectory()) { Shotput.logger.info("File Path {}", f.getAbsolutePath()); solver.add(new JavaParserTypeSolver(f)); isEmpty = false; } } if (isEmpty) { solver.add(new JavaParserTypeSolver(file)); } } } }
<reponame>spontaneously5201314/leetcode_practice<gh_stars>0 package com.base.structure.tree; import java.util.Comparator; /** * 二叉搜索树的Java实现 * * @author 洪飞 * @date 2020/6/8 */ public class BinarySearchTree<E> extends BinaryTree<E> { protected Comparator<E> comparator; public BinarySearchTree() { this(null); } public BinarySearchTree(Comparator<E> comparator) { this.comparator = comparator; } @Override public void add(E element) { elementNotNullCheck(element); // 添加第一个节点 if (root == null) { root = createNode(element, null); size++; // 新添加节点之后的处理 afterAdd(root); return; } // 添加的不是第一个节点 // 找到父节点 Node<E> parent = root; Node<E> node = root; int cmp = 0; do { cmp = compare(element, node.element); parent = node; if (cmp > 0) { node = node.right; } else if (cmp < 0) { node = node.left; } else { // 相等 node.element = element; return; } } while (node != null); // 看看插入到父节点的哪个位置 Node<E> newNode = createNode(element, parent); if (cmp > 0) { parent.right = newNode; } else { parent.left = newNode; } size++; // 新添加节点之后的处理 afterAdd(newNode); } @Override public void remove(E element) { remove(node(element)); } private void remove(Node<E> node) { if (node == null) return; size--; if (node.hasTwoChildren()) { //说明删除的是度为2的节点,那么需要将其前驱或者后继节点的值放到该节点,并将前驱或者后继节点删除 //这里我们使用后继节点 Node<E> successor = successor(node); //用后继节点的值覆盖度为2的节点的值 node.element = successor.element; //删除后继节点,使用接下来的逻辑来删除后继节点 node = successor; } //删除node节点(node的度必然是1或者0) Node<E> replacement = node.left != null ? node.left : node.right; if (replacement != null) { //说明要删除的后继节点是度为1的节点 //更改parent replacement.parent = node.parent; //更改parent的left和right的指向 if (node.parent == null) { //说明node是度为1的节点,并且是根节点 root = replacement; } else if (node == node.parent.left) { node.parent.left = replacement; } else { node.parent.right = replacement; } // 删除节点之后的处理 afterRemove(replacement); } else if (node.parent == null) { //node是叶子节点,并且是根节点 root = null; // 删除节点之后的处理 afterRemove(node); } else { //node是叶子节点,但不是根节点 if (node == node.parent.left) { node.parent.left = null; } else { node.parent.right = null; } // 删除节点之后的处理 afterRemove(node); } } /** * 添加node之后的调整 * * @param node 新添加的节点 */ protected void afterAdd(Node<E> node) { } /** * 删除node之后的调整 * * @param node 被删除的节点 或者 用以取代被删除节点的子节点(当被删除节点的度为1) */ protected void afterRemove(Node<E> node) { } @Override public boolean contains(E element) { return node(element) != null; } private Node<E> node(E element) { Node<E> node = root; while (node != null) { int cmp = compare(element, node.element); if (cmp == 0) return node; if (cmp > 0) { node = node.right; } else { node = node.left; } } return null; } /** * @return 返回值等于0,代表e1和e2相等;返回值大于0,代表e1大于e2;返回值小于于0,代表e1小于e2 */ private int compare(E e1, E e2) { if (comparator != null) { return comparator.compare(e1, e2); } return ((Comparable<E>) e1).compareTo(e2); } }
FLINT TWP, MI -- A member of Herman's Hermits is adding a footnote to one of the Flint area's most sensational -- and disputed -- legends. Barry Whitwam, drummer with the Hermits, told the Western Morning News in a story posted Wednesday, July 29, that Keith Moon, former drummer for The Who, did smash out two front teeth while staying in Flint in 1967. Moon himself and others have claimed he drove a car into the pool of a Flint Township Holiday Inn as part of a drunken birthday celebration while the bands were in Flint for an Atwood Stadium concert. Moon detailed driving a Lincoln Continental into the pool in an interview with Rolling Stone Magazine in 1972. But the late drummer's version of the story has been contested by music journalists and other musicians. Whitwam told the News that Moon, who was celebrating his birthday, ended up having to have emergency dental work in Flint as a result of his antics -- but not because of a car crash. "We were at The Holiday Inn in Flint, Michigan, and we had around 200 cakes in the dining room sent from fans, but we couldn't eat them as we didn't know what was in them," Whitwam said in the Western Morning News. The cakes ended up as part of a food fight, the story says, covering the walls, ceiling and carpet inside the hotel, and friends tried to get Moon, who was wearing no pants, out of the hotel without being arrested. "Keith pulled up his pants, apologized to the officer and (we) pushed Keith out of the fire exit. He then tripped over the pavement and smashed out his two front teeth. He spent the rest of the night in the emergency dental hospital," the story says. Moon told Rolling Stone he stripped to his underwear after a food fight at the hotel and "jumped into the first car I came to, which was a brand-new Lincoln Continental." "It was parked on a slight hill, and when I took the handbrake off, it started to roll and it smashed straight through this pool surround (fence), and the whole Lincoln Continental went into the Holiday Inn swimming pool, with me in it," Moon said in the story.
//*In theory* this should also be compatible with tank drive. @TeleOp(name = "WestCoastDrive", group = "TeleOp") public class WestCoastDrive extends LinearOpMode{ /* Declare OpMode members. */ HardwareConfig robot = new HardwareConfig(); //Configs hardware @Override public void runOpMode () throws InterruptedException { robot.init(hardwareMap); // Send telemetry message to signify robot waiting; telemetry.addData("Say", "Shock drone going live!"); // telemetry.update(); waitForStart(); telemetry.addData("Say", "ASSUMING DIRECT CONTROL"); // telemetry.update(); while(opModeIsActive()) { //tank drive code robot.motorFR.setPower(gamepad1.right_stick_y); robot.motorFL.setPower(gamepad1.left_stick_y); robot.motorRR.setPower(gamepad1.right_stick_y); robot.motorRL.setPower(gamepad1.left_stick_y); idle(); } } }
// Copyright 2021 The gVisor Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package constraintutil import ( "go/build/constraint" "testing" ) func TestFileParsing(t *testing.T) { for _, test := range []struct { name string data string expr string }{ { name: "Empty", }, { name: "NoConstraint", data: "// copyright header\n\npackage main", }, { name: "ConstraintOnFirstLine", data: "//go:build amd64\n#include \"textflag.h\"", expr: "amd64", }, { name: "ConstraintAfterSlashSlashComment", data: "// copyright header\n\n//go:build linux\n\npackage newlib", expr: "linux", }, { name: "ConstraintAfterSlashStarComment", data: "/*\ncopyright header\n*/\n\n//go:build !race\n\npackage oldlib", expr: "!race", }, { name: "ConstraintInSlashSlashComment", data: "// blah blah //go:build windows", }, { name: "ConstraintInSlashStarComment", data: "/*\n//go:build windows\n*/", }, { name: "ConstraintAfterPackageClause", data: "package oops\n//go:build race", }, { name: "ConstraintAfterCppInclude", data: "#include \"textflag.h\"\n//go:build arm64", }, } { t.Run(test.name, func(t *testing.T) { e, err := FromString(test.data) if err != nil { t.Fatalf("FromString(%q) failed: %v", test.data, err) } if e == nil { if len(test.expr) != 0 { t.Errorf("FromString(%q): got no constraint, wanted %q", test.data, test.expr) } } else { got := e.String() if len(test.expr) == 0 { t.Errorf("FromString(%q): got %q, wanted no constraint", test.data, got) } else if got != test.expr { t.Errorf("FromString(%q): got %q, wanted %q", test.data, got, test.expr) } } }) } } func TestCombine(t *testing.T) { for _, test := range []struct { name string in []string out string }{ { name: "0", }, { name: "1", in: []string{"amd64 || arm64"}, out: "amd64 || arm64", }, { name: "2", in: []string{"amd64", "amd64 && linux"}, out: "amd64 && amd64 && linux", }, { name: "3", in: []string{"amd64", "amd64 || arm64", "amd64 || riscv64"}, out: "amd64 && (amd64 || arm64) && (amd64 || riscv64)", }, } { t.Run(test.name, func(t *testing.T) { inexprs := make([]constraint.Expr, 0, len(test.in)) for _, estr := range test.in { line := "//go:build " + estr e, err := constraint.Parse(line) if err != nil { t.Fatalf("constraint.Parse(%q) failed: %v", line, err) } inexprs = append(inexprs, e) } outexpr := Combine(inexprs) if outexpr == nil { if len(test.out) != 0 { t.Errorf("Combine(%v): got no constraint, wanted %q", test.in, test.out) } } else { got := outexpr.String() if len(test.out) == 0 { t.Errorf("Combine(%v): got %q, wanted no constraint", test.in, got) } else if got != test.out { t.Errorf("Combine(%v): got %q, wanted %q", test.in, got, test.out) } } }) } }
import java.util.*; public class Solution{ static int bs(int[] arr,int n,int k) { if(k>=arr[n-1]) return n; int strt=0,end=n-1; while(strt<=end) { int mid=strt+((end-strt)/2); if(k==arr[mid]) return mid+1; else if(k<arr[mid]) end=mid-1; else strt=mid+1; } return end+1; } public static void main(String[] args) { Scanner sc = new Scanner(System.in); int n =sc.nextInt(); int k =sc.nextInt(); int t=240-k; int[] arr = new int[n]; arr[0]=5*1; for(int i=1;i<n;i++) { arr[i]=arr[i-1]+(5*(i+1)); } System.out.println(bs(arr,n,t)); } }
package org.opensha.sha.earthquake.faultSysSolution.ruptures.util; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.List; import org.opensha.sha.earthquake.faultSysSolution.ruptures.FaultSubsectionCluster; import org.opensha.sha.faultSurface.FaultSection; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; /** * Unique rupture as defined only by the set of subsection IDs included (regardless of order) * * @author kevin * */ public class UniqueRupture { private final SectIDRange[] ranges; private int size; public static UniqueRupture forIDs(Collection<Integer> sectIDs) { return new Builder().add(sectIDs).build(); } public static UniqueRupture forSects(Collection<? extends FaultSection> sects) { List<Integer> ids = new ArrayList<>(sects.size()); for (FaultSection sect : sects) ids.add(sect.getSectionId()); return new Builder().add(ids).build(); } public static UniqueRupture forClusters(FaultSubsectionCluster... clusters) { Builder builder = new Builder(); for (FaultSubsectionCluster cluster : clusters) for (SectIDRange range : cluster.unique.ranges) builder.add(range); return builder.build(); } public static UniqueRupture add(UniqueRupture... uniques) { Builder builder = new Builder(); for (UniqueRupture unique : uniques) for (SectIDRange range : unique.ranges) builder.add(range); return builder.build(); } public static Builder builder() { return new Builder(); } public static class Builder { private List<SectIDRange> list; private int size; private Builder() { list = new ArrayList<>(); size = 0; } private int insertionIndex(SectIDRange range) { int index = Collections.binarySearch(list, range); if (index < 0) index = -(index + 1); return index; } private boolean contiguous(List<Integer> ids, boolean increasing) { if (increasing) { int prev = ids.get(0); for (int i=1; i<ids.size(); i++) { int cur = ids.get(i); if (cur != prev+1) return false; prev = cur; } return true; } else { int prev = ids.get(0); for (int i=1; i<ids.size(); i++) { int cur = ids.get(i); if (cur != prev-1) return false; prev = cur; } return true; } } public Builder add(Collection<Integer> ids) { if (ids.size() == 1) { int id = ids.iterator().next(); add(SectIDRange.build(id, id)); return this; } if (ids instanceof List<?>) { // look for special case of contiguous List<Integer> list = (List<Integer>)ids; int first = list.get(0); int lastIndex = list.size()-1; int last = list.get(lastIndex); if (last == first+lastIndex && contiguous(list, true)) { // increasing and contiguous add(SectIDRange.build(first, last)); return this; } if (first == last+lastIndex && contiguous(list, false)) { // decreasing and contiguous add(SectIDRange.build(last, first)); return this; } } // Preconditions.checkState(size == 0, // TODO I don't think this is the case. check? // "buildFrom can only be called when empty, have %s already", size); int rangeStartID = Integer.MIN_VALUE; int rangeEndID = -2; boolean backwards = false; for (int id : ids) { if (id == rangeEndID+1 && !backwards) { // next in a forwards series rangeEndID++; } else if (id == rangeEndID-1 && backwards) { // 3rd+ in a backwards series rangeEndID--; } else if (id == rangeStartID-1 && rangeStartID == rangeEndID) { // 2nd in a backwards series backwards = true; rangeEndID--; } else { // it's a break in the range if (rangeStartID != Integer.MIN_VALUE) { if (backwards) add(SectIDRange.build(rangeEndID, rangeStartID)); else add(SectIDRange.build(rangeStartID, rangeEndID)); } rangeStartID = id; rangeEndID = id; backwards = false; } } if (rangeStartID != Integer.MIN_VALUE) { if (backwards) add(SectIDRange.build(rangeEndID, rangeStartID)); else add(SectIDRange.build(rangeStartID, rangeEndID)); } Preconditions.checkState(ids.size() == size, "Size mismatch, duplicates? Expected %s, have %s", ids.size(), size); return this; } public Builder add(SectIDRange range) { if (list.isEmpty()) { list.add(range); size += range.size(); return this; } int index = insertionIndex(range); int sizeAdd = range.size(); if (index > 0) { SectIDRange before = list.get(index-1); Preconditions.checkState(range.getStartID() > before.getEndID(), "Overlappping ID ranges detected: %s %s", before, range); if (range.getStartID() == before.getEndID()+1) { // combine them list.remove(index-1); index--; range = SectIDRange.build(before.getStartID(), range.getEndID()); } } if (index < list.size()) { SectIDRange after = list.get(index); Preconditions.checkState(range.getEndID() < after.getStartID(), "Overlappping ID ranges detected: %s %s", range, after); if (range.getEndID() == after.getStartID()-1) { // combine them list.remove(index); range = SectIDRange.build(range.getStartID(), after.getEndID()); } } list.add(index, range); size += sizeAdd; return this; } public UniqueRupture build() { return new UniqueRupture(list.toArray(new SectIDRange[list.size()]), size); } } private UniqueRupture(SectIDRange[] ranges, int size) { this.ranges = ranges; this.size = size; } public int size() { return size; } private static final Comparator<SectIDRange> containsCompare = new Comparator<SectIDRange>() { @Override public int compare(SectIDRange o1, SectIDRange o2) { if (o1.size() == 1 && o2.contains(o1.getStartID()) || o2.size() == 1 && o1.contains(o2.getStartID())) return 0; return Integer.compare(o1.getStartID(), o2.getStartID()); } }; public boolean contains(int id) { if (ranges.length == 0) return false; if (ranges.length == 1) return ranges[0].contains(id); int index = Arrays.binarySearch(ranges, SectIDRange.build(id, id), containsCompare); return index >= 0; } private int hashCode = -1; @Override public int hashCode() { if (hashCode == -1) { synchronized (this) { if (hashCode == -1) { final int prime = 31; int result = 1; for (SectIDRange range : ranges) result = prime * result + (range == null ? 0 : range.hashCode()); hashCode = result; } } } return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; UniqueRupture other = (UniqueRupture) obj; if (size != other.size) return false; if (ranges == null) { if (other.ranges != null) return false; } else if (!Arrays.equals(ranges, other.ranges)) return false; return true; } /** * @return immutable view of the list of ID ranges */ public SectIDRange[] getRanges() { return Arrays.copyOf(ranges, ranges.length); } @Override public String toString() { StringBuilder str = new StringBuilder(); str.append("UniqueRupture(size="+size+"): "); for (int i=0; i<ranges.length; i++) { if (i > 0) str.append(","); str.append(ranges[i]); } return str.toString(); } }
Effect of oral administration of lysozyme or digested bacterial cell walls on immunostimulation in guinea pigs The effect of oral administration of bacteriolytic enzymes and enzymatically digested bacterial cell walls on immunostimulation in guinea pigs was studied. Guinea pigs were given lysozyme or pronase or both orally for a period of 8 days, and on day 7 they were primed with hepatitis B surface antigen. Circulating antibody titers to the antigen in the enzyme-treated group were significantly higher (four to six times, P less than 0.05) than those in nontreated control groups on day 16 after immunization. Stimulation of cellular immunity in the group receiving both lysozyme and pronase simultaneously was significantly increased compared with the group receiving only one of them. The humoral immune response was enhanced by oral administration of enzymatically digested cell walls isolated from Bifidobacterium longum. The result suggested that intestinal bacteria might be solubilized by oral administration of bacteriolytic enzymes and that the absorbable fragment of peptidoglycan released from the bacterial cell walls might be responsible for the enhanced host immune responses.
<reponame>EasyLian/rhino3dm #include "bindings.h" #pragma once #if defined(ON_PYTHON_COMPILE) void init3dmSettingsBindings(pybind11::module& m); #else void init3dmSettingsBindings(void* m); #endif class BND_ConstructionPlane { ON_3dmConstructionPlane m_cplane; public: BND_ConstructionPlane() = default; BND_Plane GetPlane() const { return BND_Plane::FromOnPlane(m_cplane.m_plane); } void SetPlane(const BND_Plane& plane) { m_cplane.m_plane = plane.ToOnPlane(); } double GetGridSpacing() const { return m_cplane.m_grid_spacing; } void SetGridSpacing(double s) { m_cplane.m_grid_spacing = s; } double GetSnapSpacing() const { return m_cplane.m_snap_spacing; } void SetSnapSpacing(double s) { m_cplane.m_snap_spacing = s; } int GetGridLineCount() const { return m_cplane.m_grid_line_count; } void SetGridLineCount(int c) { m_cplane.m_grid_line_count = c; } int GetThickLineFrequency() const { return m_cplane.m_grid_thick_frequency; } void SetThickLineFrequency(int i) { m_cplane.m_grid_thick_frequency = i; } bool IsDepthBuffered() const { return m_cplane.m_bDepthBuffer; } void SetDepthBuffered(bool b) { m_cplane.m_bDepthBuffer = b; } std::wstring GetName() const { return std::wstring(m_cplane.m_name); } void SetName(std::wstring s) { m_cplane.m_name = s.c_str(); } }; class BND_ViewInfo { public: ON_3dmView m_view; public: BND_ViewInfo() = default; std::wstring GetName() const { return std::wstring(m_view.m_name.Array()); } void SetName(std::wstring s) { m_view.m_name = s.c_str(); } std::wstring GetWallpaperFilename() const { return std::wstring(m_view.m_wallpaper_image.m_image_file_reference.FullPathAsPointer()); } bool ShowWallpaperInGrayScale() const { return m_view.m_wallpaper_image.m_bGrayScale; } void SetShowWallpaperInGrayScale(bool b) { m_view.m_wallpaper_image.m_bGrayScale = b; } bool WallpaperHidden() const { return m_view.m_wallpaper_image.m_bHidden; } void SetWallpaperHidden(bool b) { m_view.m_wallpaper_image.m_bHidden = b; } double GetFocalBlurDistance() const { return m_view.FocalBlurDistance(); } void SetFocalBlurDistance(double d) { m_view.SetFocalBlurDistance(d); } double GetFocalBlurAperture() const { return m_view.FocalBlurAperture(); } void SetFocalBlurAperture(double d) { m_view.SetFocalBlurAperture(d); } double GetFocalBlurJitter() const { return m_view.FocalBlurJitter(); } void SetFocalBlurJitter(double d) { m_view.SetFocalBlurJitter(d); } unsigned int GetFocalBlurSampleCount() const { return m_view.FocalBlurSampleCount(); } void SetFocalBlurSampleCount(unsigned int i) { m_view.SetFocalBlurSampleCount(i); } //public ViewInfoFocalBlurModes FocalBlurMode class BND_Viewport* GetViewport() const; void SetViewport(const class BND_Viewport& viewport); }; class BND_RenderSettings : public BND_CommonObject { std::shared_ptr<ONX_Model> m_model; ON_3dmRenderSettings* m_render_settings = nullptr; protected: void SetTrackedPointer(ON_3dmRenderSettings* renderSettings, const ON_ModelComponentReference* compref); public: BND_RenderSettings(std::shared_ptr<ONX_Model> m); BND_RenderSettings(); BND_RenderSettings(const BND_RenderSettings& other); BND_RenderSettings(ON_3dmRenderSettings* renderSettings, const ON_ModelComponentReference* compref); ~BND_RenderSettings(); BND_Color GetAmbientLight() const { return ON_Color_to_Binding(m_render_settings->m_ambient_light); } void SetAmbientLight(const BND_Color& color) { m_render_settings->m_ambient_light = Binding_to_ON_Color(color); } BND_Color GetBackgroundColorTop() const { return ON_Color_to_Binding(m_render_settings->m_background_color); } void SetBackgroundColorTop(const BND_Color& color) { m_render_settings->m_background_color = Binding_to_ON_Color(color); } BND_Color GetBackgroundColorBottom() const { return ON_Color_to_Binding(m_render_settings->m_background_bottom_color); } void SetBackgroundColorBottom(const BND_Color& color) { m_render_settings->m_background_bottom_color = Binding_to_ON_Color(color); } bool GetUseHiddenLights() const { return m_render_settings->m_bUseHiddenLights; } void SetUseHiddenLights(bool b) { m_render_settings->m_bUseHiddenLights = b; } bool GetDepthCue() const { return m_render_settings->m_bDepthCue; } void SetDepthCue(bool b) { m_render_settings->m_bDepthCue = b; } bool GetFlatShade() const { return m_render_settings->m_bFlatShade; } void SetFlatShade(bool b) { m_render_settings->m_bFlatShade = b; } bool GetRenderBackFaces() const { return m_render_settings->m_bRenderBackfaces; } void SetRenderBackFaces(bool b) { m_render_settings->m_bRenderBackfaces = b; } bool GetRenderPoints() const { return m_render_settings->m_bRenderPoints; } void SetRenderPoints(bool b) { m_render_settings->m_bRenderPoints = b; } bool GetRenderCurves() const { return m_render_settings->m_bRenderCurves; } void SetRenderCurves(bool b) { m_render_settings->m_bRenderCurves = b; } bool GetRenderIsoParams() const { return m_render_settings->m_bRenderIsoparams; } void SetRenderIsoParams(bool b) { m_render_settings->m_bRenderIsoparams = b; } bool GetRenderMeshEdges() const { return m_render_settings->m_bRenderMeshEdges; } void SetRenderMeshEdges(bool b) { m_render_settings->m_bRenderMeshEdges = b; } bool GetRenderAnnotations() const { return m_render_settings->m_bRenderAnnotation; } void SetRenderAnnotations(bool b) { m_render_settings->m_bRenderAnnotation = b; } //AntialiasLevel bool GetUseViewportSize() const { return !m_render_settings->m_bCustomImageSize; } void SetUseViewportSize(bool b) { m_render_settings->m_bCustomImageSize = !b; } bool GetScaleBackgroundToFit() const { return m_render_settings->ScaleBackgroundToFit(); } void SetScaleBackgroundToFit(bool b) { m_render_settings->SetScaleBackgroundToFit(b); } bool GetTransparentBackground() const { return m_render_settings->m_bTransparentBackground; } void SetTransparentBackground(bool b) { m_render_settings->m_bTransparentBackground = b; } // ImageUnitSystem double GetImageDpi() const { return m_render_settings->m_image_dpi; } void SetImageDpi(double d) { m_render_settings->m_image_dpi = d; } // ImageSize int GetShadowMapLevel() const { return m_render_settings->m_shadowmap_style; } void SetShadowMapLevel(int i) { m_render_settings->m_shadowmap_style = i; } // BackgroundStyls std::wstring GetNamedView() const { return std::wstring(m_render_settings->NamedView().Array()); } void SetNamedView(const std::wstring& s) { m_render_settings->SetNamedView(s.c_str()); } std::wstring GetSnapShot() const { return std::wstring(m_render_settings->Snapshot().Array()); } void SetSnapShot(const std::wstring& s) { m_render_settings->SetSnapshot(s.c_str()); } std::wstring GetSpecificViewport() const { return std::wstring(m_render_settings->SpecificViewport().Array()); } void SetSpecificViewport(const std::wstring& s) { m_render_settings->SetSpecificViewport(s.c_str()); } //RenderSource }; class BND_EarthAnchorPoint { public: ON_EarthAnchorPoint m_anchor_point; public: BND_EarthAnchorPoint() = default; double EarthBasepointLatitude() const { return m_anchor_point.Latitude(); } void SetEarthBasepointLatitude(double d) { m_anchor_point.SetLatitude(d); } double EarthBasepointLongitude() const { return m_anchor_point.Longitude(); } void SetEarthBasepointLongitude(double d) { m_anchor_point.SetLongitude(d); } double EarthBasepointElevation() const { return m_anchor_point.ElevationInMeters(); } void SetEarthBasepointElevation(double d) { m_anchor_point.SetElevation(ON::LengthUnitSystem::Meters, d); } ON::EarthCoordinateSystem EarthBasepointElevationZero() const { return m_anchor_point.EarthCoordinateSystem(); } void SetEarthBasepointElevationZero(ON::EarthCoordinateSystem cs) { m_anchor_point.SetEarthCoordinateSystem(cs); } ON_3dPoint ModelBasePoint() const { return m_anchor_point.ModelPoint(); } void SetModelBasePoint(const ON_3dPoint& pt) { m_anchor_point.SetModelPoint(pt); } ON_3dVector ModelNorth() const { return m_anchor_point.ModelNorth(); } void SetModelNorth(const ON_3dVector& v) { m_anchor_point.SetModelNorth(v); } ON_3dVector ModelEast() const { return m_anchor_point.ModelEast(); } void SetModelEast(const ON_3dVector& v) { m_anchor_point.SetModelEast(v); } std::wstring Name() const { return std::wstring(m_anchor_point.m_name); } void SetName(const std::wstring& name) { m_anchor_point.m_name = name.c_str(); } std::wstring Description() const { return std::wstring(m_anchor_point.m_description); } void SetDescription(const std::wstring& desc) { m_anchor_point.m_description = desc.c_str(); } bool EarthLocationIsSet() const { return m_anchor_point.EarthLocationIsSet(); } BND_Plane GetModelCompass() const; BND_Transform GetModelToEarthTransform(ON::LengthUnitSystem modelUnitSystem) const; //BND_TUPLE GetEarthAnchorPlane() const; }; class BND_File3dmSettings { std::shared_ptr<ONX_Model> m_model; public: BND_File3dmSettings(std::shared_ptr<ONX_Model> m) { m_model = m; } std::wstring GetModelUrl() const { return std::wstring(m_model->m_settings.m_model_URL); } void SetModelUrl(const std::wstring& s) { m_model->m_settings.m_model_URL = s.c_str(); } ON_3dPoint GetModelBasePoint() const { return m_model->m_settings.m_model_basepoint; } void SetModelBasePoint(const ON_3dPoint& pt) { m_model->m_settings.m_model_basepoint = pt; } BND_EarthAnchorPoint GetEarthAnchorPoint() const; void SetEarthAnchorPoint(const BND_EarthAnchorPoint& anchorPoint); double GetModelAbsoluteTolerance() const { return m_model->m_settings.m_ModelUnitsAndTolerances.m_absolute_tolerance; } void SetModelAbsoluteTolerance(double t) { m_model->m_settings.m_ModelUnitsAndTolerances.m_absolute_tolerance = t; } double GetModelAngleToleranceRadians() const { return m_model->m_settings.m_ModelUnitsAndTolerances.m_angle_tolerance; } void SetModelAngleToleranceRadians(double t) { m_model->m_settings.m_ModelUnitsAndTolerances.m_angle_tolerance = t; } double GetModelAngleToleranceDegrees() const { return ON_DegreesFromRadians(m_model->m_settings.m_ModelUnitsAndTolerances.m_angle_tolerance); } void SetModelAngleToleranceDegrees(double t) { m_model->m_settings.m_ModelUnitsAndTolerances.m_angle_tolerance = ON_RadiansFromDegrees(t); } double GetModelRelativeTolerance() const { return m_model->m_settings.m_ModelUnitsAndTolerances.m_relative_tolerance; } void SetModelRelativeTolerance(double t) { m_model->m_settings.m_ModelUnitsAndTolerances.m_relative_tolerance = t; } double GetPageAbsoluteTolerance() const { return m_model->m_settings.m_PageUnitsAndTolerances.m_absolute_tolerance; } void SetPageAbsoluteTolerance(double t) { m_model->m_settings.m_PageUnitsAndTolerances.m_absolute_tolerance = t; } double GetPageAngleToleranceRadians() const { return m_model->m_settings.m_PageUnitsAndTolerances.m_angle_tolerance; } void SetPageAngleToleranceRadians(double t) { m_model->m_settings.m_PageUnitsAndTolerances.m_angle_tolerance = t; } double GetPageAngleToleranceDegrees() const { return ON_DegreesFromRadians(m_model->m_settings.m_PageUnitsAndTolerances.m_angle_tolerance); } void SetPageAngleToleranceDegrees(double t) { m_model->m_settings.m_PageUnitsAndTolerances.m_angle_tolerance = ON_RadiansFromDegrees(t); } double GetPageRelativeTolerance() const { return m_model->m_settings.m_PageUnitsAndTolerances.m_relative_tolerance; } void SetPageRelativeTolerance(double t) { m_model->m_settings.m_PageUnitsAndTolerances.m_relative_tolerance = t; } ON::LengthUnitSystem GetModelUnitSystem() const { return m_model->m_settings.m_ModelUnitsAndTolerances.m_unit_system.UnitSystem(); } void SetModelUnitSystem(ON::LengthUnitSystem us) { m_model->m_settings.m_ModelUnitsAndTolerances.m_unit_system.SetUnitSystem(us); } ON::LengthUnitSystem GetPageUnitSystem() const { return m_model->m_settings.m_PageUnitsAndTolerances.m_unit_system.UnitSystem(); } void SetPageUnitSystem(ON::LengthUnitSystem us) { m_model->m_settings.m_PageUnitsAndTolerances.m_unit_system.SetUnitSystem(us); } BND_RenderSettings GetRenderSettings() { return BND_RenderSettings(m_model); } };
/// Sets the horizontal text alignment. pub fn alignment(mut self, alignment: Alignment) -> Self { self.style.alignment = alignment; self }
package no.nav.vedtak.felles.integrasjon.ldap; import static org.assertj.core.api.Assertions.assertThat; import java.util.Collections; import java.util.List; import javax.naming.InvalidNameException; import javax.naming.LimitExceededException; import javax.naming.NamingEnumeration; import javax.naming.NamingException; import javax.naming.directory.BasicAttribute; import javax.naming.directory.BasicAttributes; import javax.naming.directory.SearchControls; import javax.naming.directory.SearchResult; import javax.naming.ldap.LdapContext; import javax.naming.ldap.LdapName; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.mockito.ArgumentMatchers; import org.mockito.Mockito; import no.nav.vedtak.exception.IntegrasjonException; import no.nav.vedtak.exception.TekniskException; public class LdapBrukeroppslagTest { @Rule public ExpectedException expectedException = ExpectedException.none(); LdapContext context = Mockito.mock(LdapContext.class); LdapName baseSearch = new LdapName("ou=ServiceAccounts,dc=test,dc=local"); public LdapBrukeroppslagTest() throws InvalidNameException { } @Test public void skal_liste_ut_brukernavn_når_det_er_i_resultatet() throws Exception { BasicAttributes attributes = new BasicAttributes(); attributes.put("displayName", "Lars Saksbehandler"); attributes.put("cn", "L999999"); attributes.put(new BasicAttribute("memberOf")); SearchResult resultat = new SearchResult("CN=L999999,OU=ApplAccounts", null, attributes); LdapBrukeroppslag ldap = new LdapBrukeroppslag(context, baseSearch); assertThat(ldap.getDisplayName(resultat)).isEqualTo("Lars Saksbehandler"); } @Test public void skal_liste_ut_gruppene_når_det_er_i_resultatet() throws Exception { BasicAttributes attributes = new BasicAttributes(); attributes.put("displayName", "<NAME>"); attributes.put("cn", "L999999"); BasicAttribute memberOf = new BasicAttribute("memberOf"); memberOf.add("CN=myGroup"); memberOf.add("OU=ourGroup"); attributes.put(memberOf); SearchResult resultat = new SearchResult("CN=L999999,OU=ApplAccounts", null, attributes); LdapBrukeroppslag ldap = new LdapBrukeroppslag(null, null); assertThat(ldap.getMemberOf(resultat)).contains("CN=myGroup", "OU=ourGroup"); } @Test public void skal_gi_exception_når_søket_gir_ingen_treff() throws Exception { expectedException.expect(IntegrasjonException.class); expectedException.expectMessage("F-418891:Fikk ingen treff på søk mot LDAP etter ident L999999"); SearchMock heleResultatet = new SearchMock(Collections.emptyList()); Mockito.when(context.search(ArgumentMatchers.eq(baseSearch), ArgumentMatchers.eq("(cn=L999999)"), ArgumentMatchers.any(SearchControls.class))).thenReturn(heleResultatet); LdapBrukeroppslag ldap = new LdapBrukeroppslag(context, baseSearch); ldap.hentBrukerinformasjon("L999999"); } @Test public void skal_gi_exception_når_søket_gir_to_treff() throws Exception { expectedException.expect(IntegrasjonException.class); expectedException.expectMessage("F-137440:Forventet ett unikt resultat på søk mot LDAP etter ident L999999, men fikk flere treff"); Mockito.when(context.search(ArgumentMatchers.eq(baseSearch), ArgumentMatchers.eq("(cn=L999999)"), ArgumentMatchers.any(SearchControls.class))).thenThrow(new LimitExceededException("This is a test")); LdapBrukeroppslag ldap = new LdapBrukeroppslag(context, baseSearch); ldap.hentBrukerinformasjon("L999999"); } @Test public void skal_gi_exception_når_svaret_mangler_forventet_attibutt() throws Exception { expectedException.expect(IntegrasjonException.class); expectedException.expectMessage("Resultat fra LDAP manglet påkrevet attributtnavn displayName"); BasicAttributes attributes = new BasicAttributes(); attributes.put("cn", "L999999"); SearchResult resultat = new SearchResult("CN=L999999,OU=ApplAccounts", null, attributes); SearchMock heleResultatet = new SearchMock(Collections.singletonList(resultat)); Mockito.when(context.search(ArgumentMatchers.eq(baseSearch), ArgumentMatchers.eq("(cn=L999999)"), ArgumentMatchers.any(SearchControls.class))).thenReturn(heleResultatet); LdapBrukeroppslag ldap = new LdapBrukeroppslag(context, baseSearch); ldap.hentBrukerinformasjon("L999999"); } @Test public void skal_gi_exception_når_det_søkes_med_spesialtegn() throws Exception { expectedException.expect(TekniskException.class); expectedException.expectMessage("F-271934:Mulig LDAP-injection forsøk. Søkte med ugyldig ident 'L999999) or (cn=A*'"); LdapBrukeroppslag ldap = new LdapBrukeroppslag(context, baseSearch); ldap.hentBrukerinformasjon("L999999) or (cn=A*"); } private static class SearchMock implements NamingEnumeration<SearchResult> { private int index = 0; private List<SearchResult> resultList; SearchMock(List<SearchResult> resultList) { this.resultList = resultList; } @Override public SearchResult next() throws NamingException { throw new IllegalArgumentException("Test---not implemented"); } @Override public boolean hasMore() throws NamingException { throw new IllegalArgumentException("Test---not implemented"); } @Override public void close() throws NamingException { } @Override public boolean hasMoreElements() { return index < resultList.size(); } @Override public SearchResult nextElement() { return resultList.get(index++); } } }
<filename>src/namespace/network.rs use libc::{ CLONE_NEWNET, c_int, }; use ::error::*; use ::Child; use super::Namespace; /// Networking /// /// The networking namespace encapsulates an entire network stack shared between /// processes. Each physical network device lives in (usually) the global /// networking namespace as does the networking stack that communicates with /// them. /// /// A set of processes can be placed in a separate networking namespace to /// isolate them from networking or to provide some filtered access to the /// global networking namespace (and external network) using virtual network /// devices. #[derive(Clone)] pub struct Network {} impl Network { /// Configure a new IPC namespace for creation. pub fn new() -> Network { Network {} } } impl Namespace for Network { fn clone_flag(&self) -> c_int { CLONE_NEWNET } }
def handle(self): self.ip = self.client_address[0] self.port = self.client_address[1] self.connection = self.request self.logged_in = False self.active = True while self.active: received_data = "" try: received_string = self.connection.recv(4096) received_data = json.loads(received_string) except ValueError: continue self.server.callback(received_data)
#include<stdio.h> int main() { int day; int amount,money; int sum,min; int i; scanf("%d",&day); scanf("%d%d",&amount,&money); min = money; sum = amount*min; for(i=2;i<=day;i++) { scanf("%d%d",&amount,&money); if(min>money) min = money; sum += min*amount; } printf("%d\n",sum); return 0; }
<filename>mojo/apps/js/test/handle_unittest.cc // Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "mojo/bindings/js/handle.h" #include "mojo/bindings/js/handle_close_observer.h" #include "mojo/public/cpp/system/core.h" #include "testing/gtest/include/gtest/gtest.h" namespace mojo { namespace js { class HandleWrapperTest : public testing::Test, public gin::HandleCloseObserver { public: HandleWrapperTest() : closes_observed_(0) {} virtual void OnWillCloseHandle() OVERRIDE { closes_observed_++; } protected: int closes_observed_; private: DISALLOW_COPY_AND_ASSIGN(HandleWrapperTest); }; class TestHandleWrapper : public gin::HandleWrapper { public: explicit TestHandleWrapper(MojoHandle handle) : HandleWrapper(handle) {} private: DISALLOW_COPY_AND_ASSIGN(TestHandleWrapper); }; // Test that calling Close() on a HandleWrapper for an invalid handle does not // notify observers. TEST_F(HandleWrapperTest, CloseWithInvalidHandle) { { TestHandleWrapper wrapper(MOJO_HANDLE_INVALID); wrapper.AddCloseObserver(this); ASSERT_EQ(0, closes_observed_); wrapper.Close(); EXPECT_EQ(0, closes_observed_); } EXPECT_EQ(0, closes_observed_); } // Test that destroying a HandleWrapper for an invalid handle does not notify // observers. TEST_F(HandleWrapperTest, DestroyWithInvalidHandle) { { TestHandleWrapper wrapper(MOJO_HANDLE_INVALID); wrapper.AddCloseObserver(this); ASSERT_EQ(0, closes_observed_); } EXPECT_EQ(0, closes_observed_); } // Test that calling Close on a HandleWrapper for a valid handle notifies // observers once. TEST_F(HandleWrapperTest, CloseWithValidHandle) { { mojo::MessagePipe pipe; TestHandleWrapper wrapper(pipe.handle0.release().value()); wrapper.AddCloseObserver(this); ASSERT_EQ(0, closes_observed_); wrapper.Close(); EXPECT_EQ(1, closes_observed_); // Check that calling close again doesn't notify observers. wrapper.Close(); EXPECT_EQ(1, closes_observed_); } // Check that destroying a closed HandleWrapper doesn't notify observers. EXPECT_EQ(1, closes_observed_); } // Test that destroying a HandleWrapper for a valid handle notifies observers. TEST_F(HandleWrapperTest, DestroyWithValidHandle) { { mojo::MessagePipe pipe; TestHandleWrapper wrapper(pipe.handle0.release().value()); wrapper.AddCloseObserver(this); ASSERT_EQ(0, closes_observed_); } EXPECT_EQ(1, closes_observed_); } } // namespace js } // namespace mojo
<reponame>Acidburn0zzz/peridot<filename>bin/story_runner/story_controller_impl.h // Copyright 2016 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // The Story service is the context in which a story executes. It // starts modules and provides them with a handle to itself, so they // can start more modules. It also serves as the factory for Link // instances, which are used to share data between modules. #ifndef PERIDOT_BIN_STORY_RUNNER_STORY_CONTROLLER_IMPL_H_ #define PERIDOT_BIN_STORY_RUNNER_STORY_CONTROLLER_IMPL_H_ #include <map> #include <memory> #include <set> #include <string> #include <vector> #include "lib/app/fidl/application_controller.fidl.h" #include "lib/async/cpp/operation.h" #include "lib/component/fidl/component_context.fidl.h" #include "lib/fidl/cpp/bindings/binding.h" #include "lib/fidl/cpp/bindings/binding_set.h" #include "lib/fidl/cpp/bindings/interface_handle.h" #include "lib/fidl/cpp/bindings/interface_ptr.h" #include "lib/fidl/cpp/bindings/interface_ptr_set.h" #include "lib/fidl/cpp/bindings/interface_request.h" #include "lib/fidl/cpp/bindings/struct_ptr.h" #include "lib/fxl/macros.h" #include "lib/ledger/fidl/ledger.fidl.h" #include "lib/module/fidl/module.fidl.h" #include "lib/module/fidl/module_context.fidl.h" #include "lib/module/fidl/module_controller.fidl.h" #include "lib/module/fidl/module_data.fidl.h" #include "lib/story/fidl/create_link.fidl.h" #include "lib/story/fidl/per_device_story_info.fidl.h" #include "lib/story/fidl/story_controller.fidl.h" #include "lib/story/fidl/story_data.fidl.h" #include "lib/story/fidl/story_shell.fidl.h" #include "lib/surface/fidl/surface.fidl.h" #include "lib/ui/views/fidl/view_token.fidl.h" #include "lib/user_intelligence/fidl/user_intelligence_provider.fidl.h" #include "peridot/lib/fidl/app_client.h" #include "peridot/lib/fidl/context.h" #include "peridot/lib/fidl/scope.h" #include "peridot/lib/ledger_client/ledger_client.h" #include "peridot/lib/ledger_client/page_client.h" #include "peridot/lib/ledger_client/types.h" namespace modular { class ChainImpl; class LinkImpl; class ModuleControllerImpl; class ModuleContextImpl; class StoryProviderImpl; constexpr char kRootLink[] = "root"; constexpr char kRootModuleName[] = "root"; // HACK(mesch): The context topics that influence story importance is hardcoded // to a single one right now. This will be generalized, but we cannot simply // look at the whole context, because it's too big. constexpr char kStoryImportanceContext[] = "location/home_work"; // The story runner, which holds all the links and runs all the modules as well // as the story shell. It also implements the StoryController service to give // clients control over the story. class StoryControllerImpl : PageClient, StoryController, StoryContext { public: StoryControllerImpl(const fidl::String& story_id, LedgerClient* ledger_client, LedgerPageId story_page_id, StoryProviderImpl* story_provider_impl); ~StoryControllerImpl() override; // Called by StoryProviderImpl. void Connect(fidl::InterfaceRequest<StoryController> request); // Called by StoryProviderImpl. bool IsRunning(); // Called by StoryProviderImpl. // // A variant of Stop() that stops the story because the story is being // deleted. The StoryControllerImpl instance is deleted by StoryProviderImpl // and the story data are deleted from the ledger once the done callback is // invoked. // // No further operations invoked after this one are executed. (The Operation // accomplishes this by not calling Done() and instead invoking its callback // directly from Run(), such that the OperationQueue stays blocked on it until // it gets deleted.) void StopForDelete(const std::function<void()>& done); // Called by StoryProviderImpl. void StopForTeardown(const std::function<void()>& done); // Called by StoryProviderImpl. void AddForCreate(const fidl::String& module_name, const fidl::String& module_url, const fidl::String& link_name, CreateLinkInfoPtr create_link_info, const std::function<void()>& done); // Called by StoryProviderImpl. StoryState GetStoryState() const; void Log(StoryContextLogPtr log_entry); void Sync(const std::function<void()>& done); void GetImportance(const ContextState& context_state, const std::function<void(float)>& result); // Called by ModuleControllerImpl and ModuleContextImpl. void FocusModule(const fidl::Array<fidl::String>& module_path); // Called by ModuleControllerImpl. void DefocusModule(const fidl::Array<fidl::String>& module_path); // Called by ModuleControllerImpl. void StopModule(const fidl::Array<fidl::String>& module_path, const std::function<void()>& done); // Called by ModuleControllerImpl. void OnModuleStateChange(const fidl::Array<fidl::String>& module_path, ModuleState state); // Called by ModuleControllerImpl. // // Releases ownership of |controller|, which deletes itself after return. void ReleaseModule(ModuleControllerImpl* module_controller_impl); // Called by ModuleContextImpl. const fidl::String& GetStoryId() const; // Called by ModuleContextImpl. void RequestStoryFocus(); // Called by ModuleContextImpl. void ConnectChainPath(fidl::Array<fidl::String> chain_path, fidl::InterfaceRequest<Chain> request); // Called by ModuleContextImpl. void ConnectLinkPath(LinkPathPtr link_path, fidl::InterfaceRequest<Link> request); // Called by ModuleContextImpl. void StartModule( const fidl::Array<fidl::String>& parent_module_path, const fidl::String& module_name, const fidl::String& module_url, const fidl::String& link_name, fidl::InterfaceRequest<app::ServiceProvider> incoming_services, fidl::InterfaceRequest<ModuleController> module_controller_request, fidl::InterfaceRequest<mozart::ViewOwner> view_owner_request, ModuleSource module_source); // Called by ModuleContextImpl and AddModule. void StartModuleInShell( const fidl::Array<fidl::String>& parent_module_path, const fidl::String& module_name, const fidl::String& module_url, const fidl::String& link_name, fidl::InterfaceRequest<app::ServiceProvider> incoming_services, fidl::InterfaceRequest<ModuleController> module_controller_request, SurfaceRelationPtr surface_relation, bool focus, ModuleSource module_source); // Called by ModuleContextImpl. Note this is always from an internal module // source. void EmbedModule( const fidl::Array<fidl::String>& parent_module_path, const fidl::String& module_name, const fidl::String& module_url, const fidl::String& link_name, fidl::InterfaceRequest<app::ServiceProvider> incoming_services, fidl::InterfaceRequest<ModuleController> module_controller_request, fidl::InterfaceHandle<EmbedModuleWatcher> embed_module_watcher, fidl::InterfaceRequest<mozart::ViewOwner> view_owner_request); private: class ModuleWatcherImpl; // |PageClient| void OnPageChange(const std::string& key, const std::string& value) override; // |StoryController| void GetInfo(const GetInfoCallback& callback) override; void SetInfoExtra(const fidl::String& name, const fidl::String& value, const SetInfoExtraCallback& callback) override; void Start(fidl::InterfaceRequest<mozart::ViewOwner> request) override; void Stop(const StopCallback& done) override; void Watch(fidl::InterfaceHandle<StoryWatcher> watcher) override; void AddModule(fidl::Array<fidl::String> module_path, const fidl::String& module_name, const fidl::String& module_url, const fidl::String& link_name, SurfaceRelationPtr surface_relation) override; void GetActiveModules(fidl::InterfaceHandle<StoryModulesWatcher> watcher, const GetActiveModulesCallback& callback) override; void GetModules(const GetModulesCallback& callback) override; void GetModuleController( fidl::Array<fidl::String> module_path, fidl::InterfaceRequest<ModuleController> request) override; void GetActiveLinks(fidl::InterfaceHandle<StoryLinksWatcher> watcher, const GetActiveLinksCallback& callback) override; void GetLink(fidl::Array<fidl::String> module_path, const fidl::String& name, fidl::InterfaceRequest<Link> request) override; // Phases of Start() broken out into separate methods. void StartStoryShell(fidl::InterfaceRequest<mozart::ViewOwner> request); // Misc internal helpers. void NotifyStateChange(); void DisposeLink(LinkImpl* link); void AddModuleWatcher(ModuleControllerPtr module_controller, const fidl::Array<fidl::String>& module_path); void OnRootStateChange(ModuleState state); void ProcessPendingViews(); static bool IsRootModule(const fidl::Array<fidl::String>& module_path); bool IsExternalModule(const fidl::Array<fidl::String>& module_path); // The ID of the story, its state and the context to obtain it from and // persist it to. const fidl::String story_id_; // This is the canonical source for state. The value in the ledger is just a // write-behind copy of this value. StoryState state_{StoryState::INITIAL}; // Story state is determined by external module state, but only until the // story gets stopped or deleted. This flag blocks processing of state // notifications from modules while the story winds down. bool track_root_module_state_{true}; StoryProviderImpl* const story_provider_impl_; LedgerClient* const ledger_client_; const LedgerPageId story_page_id_; // The scope in which the modules within this story run. Scope story_scope_; // Implements the primary service provided here: StoryController. fidl::BindingSet<StoryController> bindings_; // Watcher for various aspects of the story. fidl::InterfacePtrSet<StoryWatcher> watchers_; fidl::InterfacePtrSet<StoryModulesWatcher> modules_watchers_; fidl::InterfacePtrSet<StoryLinksWatcher> links_watchers_; // Everything for the story shell. Relationships between modules are conveyed // to the story shell using their instance IDs. std::unique_ptr<AppClient<Lifecycle>> story_shell_app_; StoryShellPtr story_shell_; fidl::Binding<StoryContext> story_context_binding_; // The module instances (identified by their serialized module paths) already // known to story shell. Does not include modules whose views are pending and // not yet sent to story shell. std::set<fidl::String> connected_views_; // Holds the view of a non-embedded running module (identified by its // serialized module path) until its parent is connected to story shell. Story // shell cannot display views whose parents are not yet displayed. std::map<fidl::String, std::pair<fidl::Array<fidl::String>, mozart::ViewOwnerPtr>> pending_views_; // The first ingredient of a story: Modules. For each Module in the Story, // there is one Connection to it. struct Connection { ModuleDataPtr module_data; EmbedModuleWatcherPtr embed_module_watcher; std::unique_ptr<ModuleContextImpl> module_context_impl; std::unique_ptr<ModuleControllerImpl> module_controller_impl; }; std::vector<Connection> connections_; // Finds the active connection for a module at the given module path. May // return nullptr if the module at the path is not running, regardless of // whether a module at that path is known to the story. Connection* FindConnection(const fidl::Array<fidl::String>& module_path); // Finds the active connection for the story shell anchor of a module with the // given connection. The anchor is the closest ancestor module of the given // module that is not embedded and actually known to the story shell. This // requires that it must be running, otherwise it cannot be connected to the // story shell. May return nullptr if the anchor module, or any intermediate // module, is not running, regardless of whether a module at such path is // known to the story. Connection* FindAnchor(Connection* connection); // Finds the connection of the closest embedder of a module at the given // module path. May return null if there is no module running that is // embedding the module at module_path. Connection* FindEmbedder(const fidl::Array<fidl::String>& module_path); // The magic ingredient of a story: Chains. They group Links. std::vector<std::unique_ptr<ChainImpl>> chains_; // The second ingredient of a story: Links. They connect Modules. std::vector<std::unique_ptr<LinkImpl>> links_; // A dummy service that allows applications that can run both as modules in a // story and standalone from the shell to determine whether they are in a // story. See story_marker.fidl for more details. class StoryMarkerImpl; std::unique_ptr<StoryMarkerImpl> story_marker_impl_; // A collection of services, scoped to this Story, for use by intelligent // Modules. maxwell::IntelligenceServicesPtr intelligence_services_; // Asynchronous operations are sequenced in a queue. OperationQueue operation_queue_; // Operations implemented here. class LaunchModuleCall; class KillModuleCall; class StartModuleCall; class StartModuleInShellCall; class AddModuleCall; class AddForCreateCall; class StopCall; class StopModuleCall; class DeleteCall; class ConnectLinkCall; class StartCall; class GetImportanceCall; class LedgerNotificationCall; class FocusCall; class DefocusCall; class BlockingModuleDataWriteCall; // A blocking module data write call blocks while waiting for some // notifications, which are received by the StoryControllerImpl instance. std::vector<std::pair<ModuleDataPtr, BlockingModuleDataWriteCall*>> blocked_operations_; FXL_DISALLOW_COPY_AND_ASSIGN(StoryControllerImpl); }; } // namespace modular #endif // PERIDOT_BIN_STORY_RUNNER_STORY_CONTROLLER_IMPL_H_
/* identify files being referenced in the file content this is so we can harvest the metadata on these files as well */ fn find_paths(text: &str, already_seen: &mut Vec<String>) -> std::io::Result<()> { lazy_static! { static ref RE: Regex = Regex::new(r#"(?mix)(?:^|[\x20"':=!|])((?:/[\w.-]+)+)"#) .expect("Invalid Regex"); } for c in RE.captures_iter(text) { let path = std::path::Path::new(&c[1]); process_file("FileContent", path, already_seen)?; } Ok(()) }
/** * Argument parser specific to import */ public static class ImportParser extends ExportImportParser { public static final String IMPORT_ALL_FLAG = "-import-all"; public static final String STATUS_FLAG = "-status"; private Boolean importAll = false; private String status = null; @Override protected boolean checkArg(String arg) { if (arg.equals(IMPORT_ALL_FLAG)) { importAll = true; return true; } if (arg.equals(STATUS_FLAG)) { status = nextArg(arg); return true; } return super.checkArg(arg); } public ImportParser(String[] args) { super(args); } @Override public void usage(String errorMsg) { exit(ExitCode.EXIT_USAGE, errorMsg, Import.COMMAND_NAME); } @Override protected void verifyArgs() { if (!importAll() && getTableNames() == null) { String errorMessage = "Missing flags: " + IMPORT_ALL_FLAG + " | " + TABLE_FLAG + ". Please use either one of the " + "flags to perform the import"; usage(errorMessage); } if (importAll() && getTableNames() != null) { String errorMessage = "Found flags: " + IMPORT_ALL_FLAG + " | " + TABLE_FLAG + ". Please use either one of the " + "flags to perform the import"; usage(errorMessage); } super.verifyArgs(); } public Boolean importAll() { return importAll; } public String getStatus() { return status; } public static String getImportAllUsage() { return IMPORT_ALL_FLAG; } public static String getStatusFlagUsage() { return STATUS_FLAG + " <status_file>"; } }
Dawid Andres and Hubert Kisinski had an audacious goal: they would bike the length of the Amazon River—from the headwaters in Peru, down the Andes, through hundreds of miles of rainforest, all the way to the Atlantic. And since much of the region lacks paved roads, they would do the vast majority of the trek on homemade, floating paddle-bikes—think mountain bikes outfitted with pontoons and a pedal-powered propeller. Pretty impressive goal for two Polish half-brothers who, by their own admission, “aren’t big bikers,” and had never been on a proper expedition before. For two years before the pair set off last September, Andres, 40, had been poring over maps and planning the expedition from his home in Arizona. He’d originally planned a journey through the Middle East, but stories of the “extremes” of the Amazon called to him. Once he felt he had a handle on the basics of the journey, Andres flew to Arizona to meet the 32-year-old Kisinski, who lives in Poland. The two then flew to Peru and set off from Camana, on the Pacific. In March, after nearly six months and 5,000 miles, Andres and Kisinski finished their journey where the Amazon flows into the Atlantic, becoming the first people to “bike” the Amazon. In Poland, the men returned to a hero’s welcome. They were invited to discuss the journey in front of a crowd of several thousand at the country’s premier adventuring conference. Afterwards, to their total surprise, they were awarded the big prize of the evening for best “stunt achievement” in exploring. Outside caught up with the half brothers after the show to hear about the toughest parts of spending six months on the world’s second longest river. Building the Bikes Why bike down the Amazon? “Because it’s cheaper than kayak!” says Andres, laughing. He had originally planned to paddle the whole river, but transporting a boat to the Andes turned out to be very costly. They’d seen prototypes of bicycle-driven paddleboats on YouTube and decided they would make their own. How hard could it be? “Everything was wrong with it,” Andres says. “After 30 minutes, Hubert’s propeller just broke.” The journey, which started 16,811 feet above sea level at lake Ticlla Cocha in Peru, quickly became a constant battle of machines versus nature. The Amazon, with its punishing currents, chest-high waves, and whirlpools, took a tremendous toll on the men’s vehicles. On the fly, they had to improve the buoyancy of the bikes, jury-rig drive shafts—which transmitted the pedal power to the paddles instead of chains—and create spare parts wherever they could. Andres said it took hundreds of miles of on-the-go repairs to get the bikes fully sea worthy. “If we had it like this at the beginning, we would be a month faster,” he says. Pirates on the High Seas The most dangerous part of the trip wasn’t even the river. Much of the Amazon is extremely remote, and piracy is a constant threat. Fifteen years ago, Sir Peter Blake, one of the world’s most famous sailors, was shot and killed by a gang of pirates on the river in Brazil. The Polish half brothers—who had already been robbed earlier on the journey by small time gangsters in a semi-autonomous Peruvian region occupied by the Ashaninka tribe—were constantly on the lookout. (The robbers relieved them of the cash equivalent of about $100.) After spending Christmas with a Peruvian family that welcomed them into the fold, Andres and Kisinski were aided by the currents and making great time, covering 80 miles a day—fast, but not fast enough to escape river marauders. First, the men were accosted on the water by a pair of scrawny, unarmed 20-somethings wearing Ray Ban sunglasses. They were menacing more than anything, and after sizing up the Poles’ bikes, decided there was nothing on board worth stealing. Shortly thereafter, the paddle-bikers encountered a greater threat in the form of a small boat motoring toward them at high speed. “There were five guys with gun and they were yelling, ‘Put your hands on the head!’” Andres says. “They started to ask us what we are doing. I’m on the bike in the waves with my hands behind my head. Guy is aiming a gun at me. Gun was huge. It was like [a gun] for the elephants.” The pirates asked if Andres and Kisinski were transporting drugs. Andres explained they were on a biking expedition down the river, a reply that drew laughs from the bandits. One dove into the river and checked their pontoons for storage compartments. It was, Andres said, extremely tense. “So I ask them, ‘Maybe you have a beer? Pirates, do you have a beer?’ And they said no they don’t have a beer! Then [the main pirate began] laughing,” Andres said. “That was our weapon. Conversation.” The pirates let them pass unharmed, with all their belongings. Getting Lost Andres and Kisinski made a smart decision early on: they enlisted the help of Piotr Chmielinski, probably Poland’s greatest Amazon explorer. Thirty years ago, he was part of the first expedition to successfully kayak the length of the Amazon, and today the expedition website Explorers Web calls him the godfather of Amazon exploration. In the years since, he’s mentored a generation of adventurers. When Andres asked him for help, Chmielinski was initially skeptical of the two amateurs. But when they called from the top of the Peruvian Andes, after biking up the base of Mount Mismi with all their equipment, he was on board. “They are so enthusiastic,” he says. “They don’t go for record. They didn’t want to beat anything. It was assumed they will be the first bike ride along the Amazon—but really they didn’t go for this.” Chmielinski agreed to guide them from from his home in Washington, D.C., to help them with GPS navigation and communications with the outside world. For 4,300 some odd miles, navigation was fairly simple: the paddle-bikers followed the river. But as they approached Belem, in the Amazon’s massive delta near the Atlantic coast, the river diverged into dozens of different channels, muddling their path. With Chmielinski’s support, the explorers hunted for overland roads through the delta. “Road,” it turned out, “was a giant misunderstanding,” Chmielinksi says. The washed out dirt paths through the heavy jungle made for slow going and the two men often had to get back in the water to cross sloughs. After tens of miles, Andres and Kisinski got a call on their satellite phone. It was Chmielinski. He’d been following their movements on GPS—they were going in circles. Eventually, the pair started following telephone poles, hoping they would lead to Belem. Their vision blurry from dehydration, the pair finally made it into town. From Belem, it was just a 120-mile bike ride along dirt roads and jungle brush to the Atlantic. Finally, on March 4, nearly six months after they left the Peruvian Andes, the pair dove into the sea. “We got the cries, a little. I’ve never been so happy,” Andres says. “We jump, we cry. Then we understood what we have done. For us, it was special.” CORRECTION: An earlier version of this story misspelled Hubert Kisinski's name.
Boing Boing's Holiday Gift Guide part one: Kids Well, it's coming up to the holidays and I've started to make my list and fill it in. As a starting point, I went through all the books and DVDs and gadgets I'd reviewed on Boing Boing since last November and looked at what had been the best-sellers among BB's readership, figuring you folks have pretty good taste! As I was taking a walk down old review lane, I realized that many of you would probably be interested in seeing these lists too, so I've turned them into a series of blog-posts that I'll be sticking up, one per day, for the next week or so. Today I'm starting with kids' media and media about kids and child-rearing. Later this week, I'll do fiction, nonfiction, comics and graphic novels, CDs and DVDs and gadgets and everything else, one a day. Hope this helps you with your holiday shopping as much as it's helped me with mine! Baby's First Mythos (C.J. Henderson) Cthluhoid picture book Original Boing Boing post Invention of Hugo Cabret (Brian Selznik) Award-winning steampunk graphic novel for kids Original Boing Boing post Good as Lily (Derek Kirk Kim) Ass-kicking girl-positive graphic novel for young readers Original Boing Boing post The Plain Janes (Cecil Castellucci, Jim Rugg) Funny, spirited little story about a gang of girls named Jane at a strait-laced high-school, rejected by the mainstream, and their art adventures. Original Boing Boing post Little Brother (Cory Doctorow) My bestselling young adult novel about kids who hack for freedom Original Boing Boing post The Starry Rift (Jonathan Strahan) Science fiction anthology for teens Original Boing Boing post St. Trinian's: The Entire Appalling Business (Ronald Searle) Ronald Searle's original dark, weird and hilarious St Trinian's comics Original Boing Boing post The Adventures of Johnny Bunko: The Last Career Guide You'll Ever Need (Daniel H. Pink) Optimistic and iconoclastic career guide in manga form Original Boing Boing post Alice in Wonderland Tattoos Alice in Wonderland temporary tatts Original Boing Boing post Freakazoid - The Complete First Season The best TV cartoon since the Max Fleischer era, on DVD Original Boing Boing post Boy Proof (Cecil Castellucci) A compassionate young adult novel about a weird, smart, angry girl Original Boing Boing post Cycler (Lauren McLaughlin) Smart YA novel about sex and sexuality Original Boing Boing post My Mother Wears Combat Boots: A Parenting Guide for the Rest of Us (Jessica Mills) Kick-ass punk-parenting book Original Boing Boing post How to Ditch Your Fairy (Justine Larbalestier) Hilarious kids book about the problems with fairies Original Boing Boing post Nation (Terry Pratchett) Moving and sweet young adult novel about science, superstition and decency Original Boing Boing post ABC3D (Marion Bataille) The best pop-up book in the world Original Boing Boing post The Baby Sleep Solution: A Proven Program to Teach Your Baby to Sleep Twelve Hours a Night (Suzy Giordano) The best parenting book I've read Original Boing Boing post How Children Learn (John Holt) Cllassic of human, kid-centered learning Original Boing Boing post The Graveyard Book (Neil Gaiman) Spooky, magical retelling of The Jungle Book in a graveyard Original Boing Boing post How Children Fail (John Holt) Angry lessons from failures to teach Original Boing Boing post 20,000 Leagues Under the Sea: A Pop-Up Book (Sam Ita) The paper kraken wakes Original Boing Boing post Alphabutt (Kimya Dawson) Weird, jangly, hilarious awesome music for kids Original Boing Boing post Zoe's Tale (John Scalzi) Scalzi's smart-ass young-adult sf thriller Original Boing Boing post Free to Be...You and Me (The 35th Anniversary Edition, Hardcover) (Marlo Thomas and Friends) The book every kid needs Original Boing Boing post
High-temperature oxidation of ceramic matrix composites dispersed with metallic particles Abstract Oxidation behavior of ceramic matrix composites dispersed with metallic particles is discussed to establish materials design for high-temperature applications. Oxidation kinetics of ceramic matrix composites dispersed with metallic particles is understood from the viewpoint of the diffusion properties and defect chemistry of matrix oxides. High-temperature oxidation of Ni(p)/partially stabilized zirconia, Ni(p)/Al2O3 and Ni(p)/MgO was described as examples. Introduction New high-temperature materials are required to increase operation temperature for increasing conversion efficiency of thermal cycles such as gas turbine engines. Hybridization of different materials is an important concept of material design to develop new functional materials and highperformance materials. Ceramic matrix composites dispersed with metallic particles (referred to as M(p)-CMCs) are candidates for high-temperature structure materials as functionally graded materials (FGMs) or nano-composites. Their mechanical and physical properties have been studied as well as their production processes . High-temperature oxidation/corrosion of M(p)-CMCs has, however, not been investigated in any details. High-temperature oxidation of thermal barrier coatings (TBCs) with a functionally graded structure has been reported briefly . There are also a few phenomenological studies on the oxidation behavior of oxide ceramics with metals or non-oxide ceramic dispersoid such as SiC or TiC . At high temperatures, oxygen can pass through oxide matrix, and metallic dispersions will be oxidized in the matrix. The metallic dispersoid expands due to oxidation and stresses the matrix. The matrix is cracked when the stress generated by the oxidation of the metal dispersoid reaches to the fracture strength. Finally, the composite is fractured. Thus, to design FGMs and nano-composites for high-temperature applications, systematic understanding of oxidation/corrosion behavior M(p)-CMCs is required for estimation of their lifetime. The author's group has studied oxidation behavior of M(p)-CMCs at high temperatures, from the point of view with respect to diffusion properties of ceramic matrix. In this paper, the oxidation mode of M(p)-CMCs depending on diffusion properties of matrix is discussed for materials design of M(p)-CMCs with superior oxidation resistance. Classification of oxidation behavior of ceramic matrix composites dispersed with metallic particles Oxidation of metallic dispersoid in oxide matrix is dominated by not only diffusivity of ions of ceramic matrix, but also oxidation properties of itself. Luthra et al. classified oxidation mode of oxide matrix composites dispersed with non-oxide dispersoid. A schematic illustration of the oxidation mode is shown in Fig. 1. Because engineering ceramics for high-temperature structure applications have high melting point and high Young's modulus, they have generally low diffusivities of component ions, except for oxide ions in zirconia and their related oxides. Almost M(p)-CMCs will show the mode II or III in the classification proposed by Luthra et al. In these modes, there are oxidized zones, which include oxidized metallic particles in the ceramic matrices. Oxidation behavior of real M(p)-CMCs is very complex because of the oxidation reaction between matrix and dispersoid and fracture of matrix due to the volume expansion of dispersoid by oxidation. Diffusion in oxides In order to discuss kinetics of oxidation of M(p)-CMCs at high temperatures, the steady-state diffusion through oxidized zone should be considered. Steady-state diffusion flux of substance i, J i , is obtained by the following equation: where D i , C i and m i are diffusion coefficient, concentration and chemical potential of substance i, respectively. Eq. (1) is well known as Fick's First law. Driving force for growth of oxidized zone is oxygen potential difference between the surrounding atmosphere and the oxidation front. In order to consider cation diffusion by using Eq. (1), chemical potential of M is derived from oxygen potential. Chemical potential of MO, m MO can be written by the following: Chemical potential of MO in the oxidized zone can be assumed to be constant during oxidation, hence Difference in chemical potential of oxygen gives difference in chemical potential of M and leads to cation diffusion during oxidation. Diffusion of ions in oxides occurs by migration of defects such as vacancies or interstitial ions. Defect concentration in oxide is a fundamental issue to discuss growth of oxidized zone of M(p)-CMCs. Because the ions are charged, the migration of ions will cause an electric field to be set up across the oxidized zone moving from the metallic particle to atmosphere. The net result of the migration of ions and electrons or electron-holes is therefore to be balanced for electrical neutrality condition. The defect with secondary fastest flux is the rate-controlling process on oxidation of metallic particles. In this paper, oxidation of MO matrix composite dispersed with metal A (A/MO) is discussed to understand oxidation of M(p)-CMCs. Here the oxide of A, A 2 O 3 , is dissolved into MO. Fig. 2 shows the schematic drawing of diffusion in oxidized zone of A/MO. In the inside of A/MO, the oxygen potential is lower than the equilibrium between A and A 2 O 3 . Difference in oxygen potential between the surrounding atmosphere and the inside is the driving force for growth of oxidized zone. Defect chemistry of oxide MO doped with A 2 O 3 should be considered as a function of oxygen partial pressure (P O 2 ). Defect chemistry of oxides can be expressed systematically by Brouwer diagram . Fig. 3 be expressed as follows: The equilibrium constant of Eq. (4), K i , is represented by: The Schottky disorder is written as follows then The structural oxide ceramics has usually higher K s than K i . Oxidized zone of A/MO consists of MO doped with a large amount of A 2 O 3 (½A $ M O K 1=2 s ). As shown in Fig. 3(b), the concentration of cation vacancy is increased as seen in the following equation: When P O 2 at the surface to the oxidation front (equilibrium between A and A 2 O 3 ) is located in the region in which ½A $ M Z 2½V 00 M is the electrical neutrality condition, growth of oxidized zone is rate-controlled by outward diffusion of M with the vacancy diffusion mechanism. In this case, electrons move mainly to establish electronic neutrality under lower P O 2 . With increasing P O 2 , electron concentration decreases and electron-holes will migrates mainly for the electric neutrality. Since electrons and electron holes have much higher mobility than ions, the rate-controlling process of growth of oxidized zone will be the fastest ionic diffusion process. Macroscopic defects in oxidized zone Difference in diffusing ions during oxidation leads to difference in macroscopic defects in oxidized zone such as voids and cracks. Fig. 4 illustrates the macroscopic defects in oxidized zone of A/MO. Almost metals will expand by oxidation. The ratio of volume change by oxidation is called as Pilling-Bedwarth ratio . Inward diffusion of oxygen will give volume increase of dispersoid during oxidation. The matrix of oxidized zone will be fractured. On the other hand, outward diffusion of cations leads to develop new oxidized zone on the surface of the composite, as shown in Fig. 4(b). Volume change in the inside of oxidized zone will be negative. Voids will form in the inside of oxidized zone. Growth kinetics of oxidized zone Growth rate of oxidized zone of M(p)-CMCs with mode II or III can be estimated quantitatively from the diffusion flux of ions passing through oxidized zone. Oxidation kinetics of A/MO as shown in Fig. 2 is considered as an example. Diffusion flux in oxidized zone can be given by Eq. (1). Amount of metal A per area in dX can be obtained by where v f and v A are volume fraction and molar volume of metal A, respectively. Growth rate of oxidized zone, dX/dt, is given from the stoichiometric relationship of 3n A Z2n O in the oxidation reaction of metal A: When oxygen passing through the oxidized zone oxidizes metallic particles dispersed in the matrix and oxygen diffusion is the rate-controlling step, the growth rate can be expressed as follows: Integration of Eq. (11) over the depth of the oxidized zone, X, gives where subscripts of S and OF are surface and oxidation front, respectively. Finally, thickness of oxidized zone, X, at oxidation time, t, can be represented by: Growth of oxidized zone predominated by outward diffusion of cations can be expressed with the similar process as follows: These simple equations can estimate growth of oxidized zone of M(p)-CMCs. To estimate a parabolic constant for growth of oxidized zone, the product of diffusion coefficient-concentration is required as a function of oxygen partial pressure. In the vacancy diffusion cases, the following relation between diffusion of species i and its vacancy should be established from the flux balance: Since the vacancy concentration is generally quite low, C i and D V can be regarded to be constant. When diffusion coefficient, D i under a fixed P O 2 is obtained and dependence of on P O 2 is clarified as ½V i f P 1=n O 2 , diffusion coefficient of species i is given by: Thus, diffusion coefficient and defect chemistry are important for understanding oxidation properties of M(p)-CMCs at high temperatures. However, diffusion data and defect chemistry in engineering ceramics are not enough yet, in particular, on effects of impurities. Case studies Taking account of mechanical properties at high temperatures, Al 2 O 3 , MgO, zirconia and mullite are potential ceramics for matrix of M(p)-CMCs for hightemperature applications. Zirconia ceramics (stabilized zirconia) have been used for TBCs on metal parts such as gas turbine blades. Functional graded coatings are expected to reduce thermal expansion difference between TBCs and metal parts. Yoshiba reported internal oxidation of TBCs for turbine blades . Since this type of TBCs is usually porous, surrounding gas can pass into the inside of coating and oxidize the dispersed metallic particles. Nanko et al. studied high-temperature oxidation of dense Ni-dispersed partially stabilized zirconia (PSZ). Ni is a base-metal for heat-resistant alloys. Fig. 5 shows the cross-section of Ni(p)/ zirconia ceramics oxidized at 700 8C for 11 h in air. There is a cracked zone in the depth of 200 mm. The cracked zone grew proportionally with oxidation time. The oxidation of Ni/PSZ is different with the diffusion-controlled model described in Eqs. (13) and (14). Fig. 6 illustrates schematically a kinetic mechanism of high-temperature oxidation of Ni(p)/PSZ. Because PSZ is an oxide ion conductor, oxygen diffuses through zirconia matrix to the dispersed Ni particles in the beginning of oxidation. Ni particles are oxidized and then the matrix is fractured by volume expansion of Ni particles. The surrounding gas is supplied through the crack network in cracked zone. Ni particles below the cracks are oxidized by inward diffusion of oxide ions passing through zirconia ceramics again. The model gives a constant growth rate of cracked zone, which is obtained by dividing the average distance between Ni particles with a period to complete crack network between Ni particles. In the case of Ni(p)/Al 2 O 3 ,voids were formed without any cracks in the oxidized zone. Fig. 7 shows the cross-section of Ni(p)/Al 2 O 3 oxidized at 1300 8C in air. The NiAl 2 O 4 layer on the surface is observed. The product layer on the surface is also an evidence of outward diffusion of cations. Growth of the oxidized zone obeyed the parabolic law. Diffusion properties of Al 2 O 3 ceramics are very complex and predominated by grain boundary diffusion. Both of oxide and cation can diffuse comparably in polycrystalline Al 2 O 3 . Wang at al. reported phenomelogocal study on high-temperature oxidation of Ni(p)/ Al 2 O 3 . Nanko et al. reported high-temperature oxidation of nano-Ni(p)/Al 2 O 3 . Because of finer Al 2 O 3 matrix, nano-Ni(p)/Al 2 O 3 has faster oxidation rate than macro-Ni/Al 2 O 3 . High-temperature oxidation of Ni/MgO also followed the parabolic law . In Ni/MgO, outward diffusion of cation is rate-controlling process and resulted in the formation of voids in the oxidized zone. Summary Oxidation behavior of ceramic matrix composites dispersed with metallic particles can be understood from the diffusion properties and defect chemistry of matrix oxides. High-temperature oxidation of Ni(p)/PSZ, Ni(p)/Al 2 O 3 and Ni(p)/MgO was described as examples. In order to establish oxidation resistance of M(p)-CMCs, diffusion properties and defect chemistry of engineering ceramics should be studied, in particular, on influences of impurity doping.
<gh_stars>1-10 from mopro.database import ( database, initialize_database, CorsikaSettings, CorsikaRun, Status, ) initialize_database() runs_per_bin = 20 delta_zd = 5 delta_az = 10 corsika_settings = ( CorsikaSettings .select(CorsikaSettings.id) .where(CorsikaSettings.name == 'epos_urqmd_iact_lapalma_winter') .where(CorsikaSettings.version == 76900) ) options = dict( primary_particle=14, n_showers=10000, walltime=2880, energy_min=100, energy_max=200e3, spectral_index=-2.7, max_radius=500, viewcone=0, reuse=20, corsika_settings=corsika_settings, status=Status.select(Status.id).where(Status.name == 'created'), ) def generator(): for i in range(runs_per_bin): for min_zd in range(0, 30, delta_zd): for min_az in range(0, 360, delta_az): yield dict( zenith_min=min_zd, zenith_max=min_zd + delta_zd, azimuth_min=min_az, azimuth_max=min_az + delta_az, **options, ) with database.connection_context(): print(CorsikaRun.insert_many( generator(), ).execute())
/// <reference path="../../test.ts/test.ts/assert.ts" /> /// <reference path="../../test.ts/test.ts/report.ts" /> /// <reference path="../respond.ts/respond.ts" /> /// <reference path="../../test.ts/test.ts/test.ts" /> class MyClass { value: any; @property altValue: number; @sender senderNumber(input: number): string { return input.toString(); } @sender senderString(input: string): number { return Number(input); } @sender senderArray(input: number[]): number[] { return input; } @receiver receiverString(input: string) { this.value = input; } @receiver receiverNumber(input: number) { this.value = input; } @receiver receiverAny(input: any) { this.value = input; } } class RespondTests extends Test.Case { target: MyClass; before(): void { this.target = new MyClass(); Assert.that(this.target.value).is.undefined(); } @test Connect() { Respond.to.sender(this.target.senderNumber).with.receiver(this.target.receiverString); this.target.senderNumber(1); Assert.that(this.target.value).is.exact.to('1'); } @test ConnectFunction() { var value, func = function (val) { value = val; } Respond.to.sender(this.target.senderNumber).with.function(func); this.target.senderNumber(1); Assert.that(value).is.exact.to('1'); } @test ConnectProperty() { Respond.to.property(this.target.altValue).with.receiver(this.target.receiverNumber); Assert.that(this.target.value).is.undefined(); this.target.altValue = 1; Assert.that(this.target.value).is.exact.to(1); Respond.to.sender(this.target.senderString).with.property(this.target.altValue); this.target.senderString('2'); Assert.that(this.target.value).is.exact.to(2); } @test As() { Respond.to.sender(this.target.senderString).as(item => item.toString()).with.receiver(this.target.receiverString); this.target.senderString('1'); Assert.that(this.target.value).is.exact.to('1'); } @test Count() { Respond.to.sender(this.target.senderString).count().with.receiver(this.target.receiverNumber); this.target.senderString('1'); Assert.that(this.target.value).is.exact.to(1); this.target.senderString('1'); Assert.that(this.target.value).is.exact.to(2); this.target.senderString('2'); Assert.that(this.target.value).is.exact.to(3); } @test Delay() { Respond.to.sender(this.target.senderNumber).delay.for(4).with.receiver(this.target.receiverString); var start = performance.now(); this.target.senderNumber(1); Assert.that(this.target.value).is.undefined(); } @test Flatten() { Respond.to.sender(this.target.senderArray).flatten().with.receiver(this.target.receiverNumber); this.target.senderArray([1,2,3]); Assert.that(this.target.value).is.exact.to(3); } @test GroupBy() { Respond.to.sender(this.target.senderString).group.by(value => value < 3).with.receiver(this.target.receiverAny); this.target.senderString('1'); this.target.senderString('2'); Assert.that(this.target.value).is.undefined(); this.target.senderString('3'); Assert.that(this.target.value.key).is.exact.to(true); Assert.that(this.target.value.values.length).is.exact.to(2); Assert.that(this.target.value.values[0]).is.exact.to(1); Assert.that(this.target.value.values[1]).is.exact.to(2); } @test GroupOf() { Respond.to.sender(this.target.senderString).group.of(2).with.receiver(this.target.receiverAny); this.target.senderString('1'); Assert.that(this.target.value).is.undefined(); this.target.senderString('2'); this.target.senderString('3'); Assert.that(this.target.value.key).is.exact.to(2); Assert.that(this.target.value.values.length).is.exact.to(2); Assert.that(this.target.value.values[0]).is.exact.to(1); Assert.that(this.target.value.values[1]).is.exact.to(2); this.target.senderString('4'); Assert.that(this.target.value.values[0]).is.exact.to(3); Assert.that(this.target.value.values[1]).is.exact.to(4); this.target.senderString('5'); Assert.that(this.target.value.values[0]).is.exact.to(3); Assert.that(this.target.value.values[1]).is.exact.to(4); } @test GroupWith() { Respond.to.sender(this.target.senderString).group.with(this.target.senderNumber).with.receiver(this.target.receiverAny); this.target.senderString('1'); this.target.senderString('2'); Assert.that(this.target.value).is.undefined(); this.target.senderNumber(3); Assert.that(this.target.value.key).is.exact.to('3'); Assert.that(this.target.value.values.length).is.exact.to(2); Assert.that(this.target.value.values[0]).is.exact.to(1); Assert.that(this.target.value.values[1]).is.exact.to(2); this.target.senderString('4'); this.target.senderNumber(5); Assert.that(this.target.value.key).is.exact.to('5'); Assert.that(this.target.value.values.length).is.exact.to(1); Assert.that(this.target.value.values[0]).is.exact.to(4); } @test Maximum() { Respond.to.sender(this.target.senderString).maximum() .with.receiver(this.target.receiverNumber); this.target.senderString('1'); Assert.that(this.target.value).is.exact.to(1); this.target.senderString('2'); this.target.senderString('1'); Assert.that(this.target.value).is.exact.to(2); } @test Minimum() { Respond.to.sender(this.target.senderString).minimum() .with.receiver(this.target.receiverNumber); this.target.senderString('2'); Assert.that(this.target.value).is.exact.to(2); this.target.senderString('1'); this.target.senderString('2'); Assert.that(this.target.value).is.exact.to(1); } @test Mix() { Respond.to.sender(this.target.senderString).as(item => item.toString()) .mix.with(this.target.senderNumber) .with.receiver(this.target.receiverString); this.target.senderString('1'); Assert.that(this.target.value).is.exact.to('1'); this.target.senderNumber(2); Assert.that(this.target.value).is.exact.to('2'); } @test Pair() { Respond.to.sender(this.target.senderNumber) .pair.with(this.target.senderString) .with.receiver(this.target.receiverAny); this.target.senderNumber(1); Assert.that(this.target.value).is.undefined(); this.target.senderString('2'); Assert.that(this.target.value.source).is.exact.to('1'); Assert.that(this.target.value.target).is.exact.to(2); this.target.senderNumber(3); Assert.that(this.target.value.source).is.exact.to('3'); Assert.that(this.target.value.target).is.exact.to(2); } @test QueueOf() { Respond.to.sender(this.target.senderString).queue.of(2).with.receiver(this.target.receiverNumber); this.target.senderString('1'); Assert.that(this.target.value).is.undefined(); this.target.senderString('2'); Assert.that(this.target.value).is.exact.to(2); this.target.senderString('3'); Assert.that(this.target.value).is.exact.to(2); this.target.senderString('3'); Assert.that(this.target.value).is.exact.to(3); } @test QueueWith() { Respond.to.sender(this.target.senderString).queue.with(this.target.senderNumber).with.receiver(this.target.receiverNumber); this.target.senderString('1'); this.target.senderString('2'); Assert.that(this.target.value).is.undefined(); this.target.senderNumber(3); Assert.that(this.target.value).is.exact.to(2); } @test Skip() { Respond.to.sender(this.target.senderNumber).skip(1).with.receiver(this.target.receiverString); this.target.senderNumber(1); Assert.that(this.target.value).is.undefined(); this.target.senderNumber(2); Assert.that(this.target.value).is.exact.to('2'); } @test SkipIf() { Respond.to.sender(this.target.senderNumber).skip.if(value => value == '2').with.receiver(this.target.receiverString); this.target.senderNumber(1); Assert.that(this.target.value).is.exact.to('1'); this.target.senderNumber(2); Assert.that(this.target.value).is.exact.to('1'); } @test SkipWhile() { Respond.to.sender(this.target.senderNumber).skip.while(value => value == '2').with.receiver(this.target.receiverString); this.target.senderNumber(2); Assert.that(this.target.value).is.undefined(); this.target.senderNumber(2); Assert.that(this.target.value).is.undefined(); this.target.senderNumber(1); Assert.that(this.target.value).is.exact.to('1'); this.target.senderNumber(2); Assert.that(this.target.value).is.exact.to('2'); } @test Take() { Respond.to.sender(this.target.senderNumber).take(1).with.receiver(this.target.receiverString); this.target.senderNumber(1); Assert.that(this.target.value).is.exact.to('1'); this.target.senderNumber(2); Assert.that(this.target.value).is.exact.to('1'); } @test TakeIf() { Respond.to.sender(this.target.senderNumber).take.if(value => value == '2').with.receiver(this.target.receiverString); this.target.senderNumber(1); Assert.that(this.target.value).is.undefined(); this.target.senderNumber(2); Assert.that(this.target.value).is.exact.to('2'); } @test TakeWhile() { Respond.to.sender(this.target.senderNumber).take.while(value => value == '2').with.receiver(this.target.receiverString); this.target.senderNumber(2); Assert.that(this.target.value).is.exact.to('2'); this.target.senderNumber(2); Assert.that(this.target.value).is.exact.to('2'); this.target.senderNumber(1); Assert.that(this.target.value).is.exact.to('2'); this.target.senderNumber(3); Assert.that(this.target.value).is.exact.to('2'); } @test Unique() { Respond.to.sender(this.target.senderNumber).unique().with.receiver(this.target.receiverString); this.target.senderNumber(1); Assert.that(this.target.value).is.exact.to('1'); this.target.senderNumber(2); Assert.that(this.target.value).is.exact.to('2'); this.target.senderNumber(1); Assert.that(this.target.value).is.exact.to('2'); this.target.senderNumber(3); Assert.that(this.target.value).is.exact.to('3'); } @test WithholdSimple() { Respond.to.sender(this.target.senderNumber).with.receiver(this.target.receiverString); this.target.senderNumber(1); Assert.that(this.target.value).is.exact.to('1'); Respond.to.sender(this.target.senderNumber).withhold.receiver(this.target.receiverString); this.target.senderNumber(2); Assert.that(this.target.value).is.exact.to('1'); } @test WithholdChained() { Respond.to.sender(this.target.senderNumber).unique().with.receiver(this.target.receiverString); this.target.senderNumber(1); Assert.that(this.target.value).is.exact.to('1'); Respond.to.sender(this.target.senderNumber).withhold.receiver(this.target.receiverString); this.target.senderNumber(2); Assert.that(this.target.value).is.exact.to('1'); } @test Zip() { Respond.to.sender(this.target.senderNumber) .zip.with(this.target.senderString) .with.receiver(this.target.receiverAny); this.target.senderNumber(1); Assert.that(this.target.value).is.undefined(); this.target.senderString('2'); Assert.that(this.target.value.source).is.exact.to('1'); Assert.that(this.target.value.target).is.exact.to(2); this.target.senderNumber(3); Assert.that(this.target.value.source).is.exact.to('1'); Assert.that(this.target.value.target).is.exact.to(2); this.target.senderString('4'); Assert.that(this.target.value.source).is.exact.to('3'); Assert.that(this.target.value.target).is.exact.to(4); } } window.onload = () => { document.getElementById('content').innerHTML = new Report.Html(new RespondTests).run(); };
NEW DELHI: NIA on Wednesday arrested a core member of Jamaat-ul-Mujahideen Bangladesh (JMB) in connection with October 2, 2014 Burdwan blast . Mufazzil Haque was arrested from Mukimnagar in Murshidabad (West Bengal) on Tuesday night.According to NIA, Haque used to own the Lalgola madarsa in Mukimnagar which was one of the two important training centres for JMB’s anti-national activities. He was also among the chief trainers and recruiters of outfit.He was such a fundamentalist and so dedicated to the cause of jihad that he styled himself on Osama Bin Laden , said sources.“Sporting a long beard and dressing up like the dreaded slain terrorist, he would keep his women in strict purdah forbidding them from going out of the house and force his family to live by the Sharia. His lifestyle had already earned him the moniker of ‘bin Laden’ in his village even before his name cropped up in Burdwan blast,” said an NIA officer.Sources said Haque was also a close associate of Sheikh Sajid alias Rehmatullah who headed the India chapter of JMB. Sajid has already been arrested by NIA. “They together ran the madarsa. Since Haque was a resident of the Lalgola area, he was instrumental in getting recruits for the outfit. He also arranged for hideouts for those who came to Mukimnagar,” said the officer.Khalid Mohammed, one of the prime accused in the Burdwan blast case, being produced at the Bankshal court by NIA officials in Kolkata on Nov 20, 2014. (​TOI Photo by Ei Samay)Lagola madarsa, set up in 2011, was the most important recruitment and training centres for JMB after the Simulia madarsa of Burdwan. The madras was also used for making country-made pistols and assembling bombs, said sources.Haque’s arrest is 17th in the case with many members of the outfit still absconding.NIA investigations have till now found that the outfit had established its network in different districts of West Bengal, Assam and Jharkhand — particularly in Murshidabad, Nadia, Malda, Birbhum and Bardhaman of West Bengal and Barpetta of Assam apart from Sahibganj and Pakur of Jharkhand.The investigations have also revealed that senior members of JMB had established terrorist training centres and bomb making units in Beldanga and Mukimnagar of Murshidabad, Nanur of Birbhum and Khagraghar and Simulia of Burdwan district.
The Los Angeles City Council today voted unanimously to approve the the Lucas Museum of Narrative Art, the $1.5 billion project to be built in Exposition Park next to the L.A. Memorial Coliseum and USC, where Lucas went to film school. When the L.A. site was chosen in January after overtures from Chicago and San Francisco, the museum board said the facility will generate more than 1,500 new construction jobs and 350-plus permanent jobs with no cost to taxpayers. The museum, designed by Ma Yansong of MAD Architects, will be 300,000 square feet and five stories tall. The council vote was 14-0. “People come to Los Angeles from all over the world to be inspired by art, and to see things they have only imagined become real through storytelling,” said Los Angeles Mayor Eric Garcetti. “The Lucas Museum of Narrative Art will add another world-class institution to our city’s cultural landscape, and bring a breathtaking architectural jewel to Exposition Park. I am proud to have worked with George Lucas and Mellody Hobson to bring this incredible gift to Los Angeles — and I applaud the City Council for voting to approve a gem for South L.A. that will touch the lives of Angelenos and visitors for generations to come.” The museum will house the Star Wars mogul’s personal art collection and his Hollywood memorabilia. Its website says the facility will “will present original work by world renowned and emerging artists, cutting-edge digital technologies, and daily film screenings in state-of-the-art theaters, as well as extraordinary educational opportunities for students of all ages.” It also “will feature a bold new architectural design and will be a one-of-a-kind gathering place to experience collections, films and exhibitions dedicated to the power of visual storytelling and the evolution of art and moving images.”
/* * Licensed to Elasticsearch B.V. under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch B.V. licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import { i18n } from '@kbn/i18n'; import { IAction, createAction, IncompatibleActionError } from '../ui_actions'; import { IEmbeddable, EmbeddableInput } from '../embeddables'; import { esFilters } from '../../../../../plugins/data/public'; export const APPLY_FILTER_ACTION = 'APPLY_FILTER_ACTION'; type RootEmbeddable = IEmbeddable<EmbeddableInput & { filters: esFilters.Filter[] }>; interface ActionContext { embeddable: IEmbeddable; filters: esFilters.Filter[]; } async function isCompatible(context: ActionContext) { if (context.embeddable === undefined) { return false; } const root = context.embeddable.getRoot() as RootEmbeddable; return Boolean(root.getInput().filters !== undefined && context.filters !== undefined); } export function createFilterAction(): IAction<ActionContext> { return createAction<ActionContext>({ type: APPLY_FILTER_ACTION, id: APPLY_FILTER_ACTION, getDisplayName: () => { return i18n.translate('embeddableApi.actions.applyFilterActionTitle', { defaultMessage: 'Apply filter to current view', }); }, isCompatible, execute: async ({ embeddable, filters }) => { if (!filters || !embeddable) { throw new Error('Applying a filter requires a filter and embeddable as context'); } if (!(await isCompatible({ embeddable, filters }))) { throw new IncompatibleActionError(); } const root = embeddable.getRoot() as RootEmbeddable; root.updateInput({ filters, }); }, }); }
// NewMarsGrid is a rectangular grid with min point at (0, 0) and max point given. func NewMarsGrid(maxPos image.Point) *MarsGrid { cells := make([][]cell, maxPos.Y) for y := range cells { cells[y] = make([]cell, maxPos.X) for x := range cells[y] { cells[y][x].lifeSigns = rand.Intn(1001) } } return &MarsGrid{ bounds: image.Rectangle{ Max: maxPos, }, cells: cells, } }
/// Converts a given BssDescription into a BssInfo. pub fn convert_bss_description( &self, bss: &BssDescription, wmm_param: Option<ie::WmmParam>, ) -> BssInfo { let mut probe_resp_wsc = None; match bss.find_wsc_ie() { Some(ie) => match wsc::parse_probe_resp_wsc(ie) { Ok(wsc) => probe_resp_wsc = Some(wsc), Err(_e) => { // Parsing could fail because the WSC IE comes from a beacon, which does // not contain all the information that a probe response WSC is expected // to have. We don't have the information to distinguish between a beacon // and a probe response, so we let this case fail silently. } }, None => (), } BssInfo { bssid: bss.bssid.clone(), ssid: bss.ssid.clone(), rx_dbm: get_rx_dbm(bss), snr_db: bss.snr_db, channel: bss.chan.primary, protection: bss.get_protection(), compatible: self.is_bss_compatible(bss), probe_resp_wsc, wmm_param, } }
/** \brief link DMA channel to i2s device \param[in] i2s i2s handle to operate. \param[in] rx_dma the DMA channel for receive, when it is NULL means to unused dma. \return error code */ csi_error_t csi_i2s_rx_link_dma(csi_i2s_t *i2s, csi_dma_ch_t *rx_dma) { CSI_PARAM_CHK(i2s, CSI_ERROR); csi_error_t ret = CSI_OK; rx_dma->parent = i2s; ret = csi_dma_ch_alloc(rx_dma, -1, -1); if (ret == CSI_OK) { csi_dma_ch_attach_callback(rx_dma, wj_i2s_dma_event_cb, NULL); i2s->rx_dma = rx_dma; } else { if (i2s->rx_dma) { csi_dma_ch_detach_callback(i2s->rx_dma); csi_dma_ch_free(i2s->rx_dma); i2s->rx_dma = NULL; } else { ret = CSI_ERROR; } } return ret; }
As a warm up post, I’m going to talk about an important generalization of something that should be familiar to anyone’s who has made through a semester of calculus: Taylor series! (And if you haven’t seen these guys before, or are perhaps feeling a bit rusty, then by all means please head on over to Khan academy to quickly brush up. Go right ahead, I’ll still be here when you get back.) Now this probably sounds like an awfully scary title for the first article in this miniseries about mathematics in graphics programming. Perhaps you’re right! But I’d like to think that graphics programmers are a tough bunch, and that using this kind of a name may indeed have quite the opposite effect of emboldening those cocky individuals who think they can press on and figure things out in spite of any frightening mathematical jargon. Taylor Series At the very heart of this discussion we are going to deal with two of the most important tasks any graphics programmer needs to worry about: approximation and book keeping. Taylor series are of course one of the oldest and best known methods for approximating functions. You can think of Taylor series in a couple of ways. One possibility is to imagine that they are successively approximating a given input function by adding additional polynomial terms to it. So the idea is that you can start with some arbitrary 1D function, let’s call it , and suppose that you are allowed the following two operations: You can evaluate a function at 0. You can take a derivative, Then, we can compute the Taylor series expansion of f about 0 in the usual way, and so on If we’re really slick, we can save the first coefficients for these polynomials in a vector, call them say ., and then we can evaluate some approximation of f by summing up the first k terms in the above approximation: Here is an animated gif showing the convergence of the Taylor series for the exponential function that I shamelessly ripped off from wikipedia: Higher Dimensional Taylor Series It is easy to adapt Taylor series to deal with vector valued functions in a single variable, you just treat each component separately. But what if the domain f was not one dimensional? This could easily happen for example, if you were trying to approximate something like a surface, a height field, or an image filter locally. Well, in this post I will show you a slick way to deal with Taylor series of all sizes, shapes and dimensions! But before I do that, let me show you what the ugly and naive approach to this problem looks like: Suppose that is a 2D scalar-valued function. Then, let us look for a best quadratic (aka degree 2) approximation to f within the region near . By analogy to the 1D case, we want to find some 2nd order polynomial $p(x,y)$ in two variables such that: And: Phew! That’s a lot more equations and coefficients than we got in the 1D case for a second order approximation. Let’s work through solving for the coefficient $a_{20}$: To do this, we take p and plug it into the appropriate equation: If we generalize this idea a bit, then we can see that for an arbitrary Taylor series approximation in 2D, the coefficient is determined by the equation, All this is well and good, but it has a few problems. First, the summation for p is quite disorganized. How are we supposed to keep track of which coefficients go where? If we want to store the coefficients of p on a computer, then we need some less ad-hoc method for naming them and packing them into memory. Second, it seems like this is going to be a headache to deal with 3 or more dimensions, since we’ll need to basically repeat the same sort of reasoning over and over again. As a result, if we want to start playing around with higher dimensional Taylor series, we’re going to need a more principled notation for dealing with higher order multivariate polynomials. Tensors to the Rescue! One such system is tensor notation! Though often maligned by mathematical purists. algebraic geometers, and those more modern algebraically savvy category theorists, tensors are a simple and effective notation that dramatically simplifies calculations. From a very simplistic point of view, a tensor is just a multidimensional array. The rank of the tensor is the number of different indices, each of which has a distinct dimension. In C-style syntax, you could declare a tensor in the following way: float vector[10]; //A rank 1 tensor, with dimension (10) float matrix[5][5]; //A rank 2 tensor, with dimensions (5, 5) float spreadsheet[ROWS][COLUMNS][PAGES]; //A rank 3 tensor, with dimensions (ROWS, COLUMNS, PAGES) float crazy_thing[10][16][3][8]; //A rank 4 tensor, with dimension (10, 16, 3, 8 ) We will usually write tensors as upper case letters. To reference an individual entry in this array, we will use an ordered subscript, like so: Which we can take to be equivalent to the C-code: M[i][j] For the rest of the article, we are going to stick with this point of view that tensors are just big arrays of numbers. We aren’t going to bother worrying about things like covariance/contravariance (and if you don’t know what those words are, forget I said anything), nor are we going to mess around too much with operators like tensor products. There is nothing wrong with doing this, though it can be a bit narrow minded and it does somewhat limit the applications to which tensors may be applied. If it bothers you to think about tensors in this way, here is a more algebraic/operational picture of what a tensor does: much as how a row vector can represent a scalar-valued linear function, (via the dot-product), a tensor can represents a multi-linear function, : that is, it takes in several vectors and spits out a scalar which varies linearly in each of its arguments. That said, even if we just think about tensors as arrays, there’s still a number of useful, purely formal, operations that one can perform on tensors. For example, you can add them up just like vectors. If A and B are two tensors with the same dimensions, then we can define their sum componentwise as follows: Where we take the symbol i to be a generic index here. The other important operation that we will need to use is called tensor contraction. You can think of tensor contraction as being something like a generalization of both the dot product for vectors, and matrix multiplication . Here is the idea: Suppose that you have two tensors with dimensions and ranks n,m respectively and some index between them with a common dimension. Then we can form a new tensor with rank n + m – 1 by summing over that index in A and B simultaneously: Writing all this out is pretty awkward, so mathematicians use a slick short hand called Einstein summation convention to save space. Here is the idea: Any time that you see a repeated index in a product of tensor coefficients that are written next to each other, you sum over that index. For example, you can use Einstein notation to write the dot product of two vectors, , as follows: Similarly, suppose that you have a matrix M, then you can write the linear transformation of a vector x by M in the same shorthand, Which beats having to remember the rows-by-columns rule for multiplying vectors! Similarly, you can multiply two matrices using the same type of notation, It even works for computing things like the trace of a matrix: We can also use tensor notation to deal with things like computing a gradient. For example, we write the derivative of a function with respect to the component as . Combined with Einstein’s notation, we can also perform operations such as taking a gradient of a function along a certain direction. If is some direction vector, then the derivative of along evaluated at the point is, Symmetric Tensors and Homogeneous Polynomials Going back to multidimensional Taylor series, how can we use all this notation to help us deal with polynomials? Well, let us define a vector whose components are just the usual coordinate variables, and pick some rank 1 tensor A with appropriate dimension. If we just plug in x, then we get the following expression: Which is of course just a linear function on x! What if we wanted to make a quadratic function? Again, using tensor contraction this is no big deal: Neat! This gives us a quadratic multivariate polynomial on x. Moreover, it is also homogeneous, which means that it doesn’t have any degree 1 or lower terms sitting around. In fact, by generalizing from this pattern if we wanted to say store an arbitrary degree n polynomial, we could pack all of its coefficients into a rank n tensor, and evaluate by contracting: But there is some redundancy here. Notice in the degree 2 case, we are assuming that the components of x commute, or in other words the terms are really the same and so we really don’t need to store both the coefficients and . We could make our lives a bit easier if we just assumed that they were equal. In fact, it would be really nice if whenever we took any index, like say and then permuted it to something arbitrary, for example , it always gave us back the same thing. To see why this is, let us try to work out what the coefficient for the monomial in the degree n polynomial given by . Directly expanding using Einstein summation, we get a sum over all permutations of the indices : If we assume that all those coefficients are identical, then that above nasty summation has a pretty simple form: namely it is a multinomial coefficient! As a result, if we wanted to say find the coefficient of in $A_{i j k} x_i x_j x_k$, then we could just use the following simple formula: A tensor which has the property that its coefficients are invariant under permutation of its indices is called a symmetric tensor, and as we have just seen symmetric tensors provide a particularly efficient method for representing homogeneous polynomials. But wait! There’s more… If we pick , then the above formula is almost exactly the right formula for the Taylor series coefficient of in the expansion of . The only thing is that we are off by a factor of , but no matter, we can just divide that off. Taking this into account, we get the following striking formula for the Taylor series expansion of a function about the origin, Which is remarkably close to the formula for 1D Taylor series expansions! Next Time I will show how to actually implement symmetric tensors in C++11, and give some example applications of multidimensional Taylor series in implicit function modeling and non-linear deformations! I’ll also show how the above expansion can be simplified even further by working in projective coordinates. Advertisements
import { expect, test } from "vitest"; import { clone } from "../src"; import { getComposeValue } from "./util"; test("clone", () => { expect(clone("Hello World")).toBe("Hello World"); expect(clone(123)).toBe(123); }); test("deep clone", () => { expect(clone(getComposeValue(), true)).toEqual(getComposeValue()); });