content
stringlengths
10
4.9M
// prepare block for the step ticker, called everytime the block changes // this is done during planning so does not delay tick generation and step ticker can simply grab the next block during the interrupt void Block::prepare() { float inv = 1.0F / this->steps_event_count; for (uint8_t m = 0; m < n_actuators; m++) { uint32_t steps = this->steps[m]; this->tick_info[m].steps_to_move = steps; if(steps == 0) continue; float aratio = inv * steps; this->tick_info[m].steps_per_tick = STEPTICKER_TOFP((this->initial_rate * aratio) / STEP_TICKER_FREQUENCY); this->tick_info[m].counter = 0; this->tick_info[m].step_count = 0; this->tick_info[m].next_accel_event = this->total_move_ticks + 1; float acceleration_change = 0; if(this->accelerate_until != 0) { this->tick_info[m].next_accel_event = this->accelerate_until; acceleration_change = this->acceleration_per_tick; } else if(this->decelerate_after == 0 ) { acceleration_change = -this->deceleration_per_tick; } else if(this->decelerate_after != this->total_move_ticks ) { this->tick_info[m].next_accel_event = this->decelerate_after; } this->tick_info[m].acceleration_change= STEPTICKER_TOFP(acceleration_change * aratio); this->tick_info[m].deceleration_change= -STEPTICKER_TOFP(this->deceleration_per_tick * aratio); this->tick_info[m].plateau_rate= STEPTICKER_TOFP((this->maximum_rate * aratio) / STEP_TICKER_FREQUENCY); } }
<reponame>SorcererX/SepiaStream #include <sepia/comm/observer.h> #include <sepia/util/threadobject.h> #include "commtester_messages.pb.h" class ReceiveTester : public sepia::comm::Observer< commtester_msgs::Test > , public sepia::util::ThreadObject { public: ReceiveTester(); ~ReceiveTester(); ReceiveTester( const ReceiveTester& a_object ); void start(); void stop(); int getMessageCount(); protected: void receive( const commtester_msgs::Test *msg ); void own_thread(); int m_messageCount; };
use crate::{ convert::{ToPyObject, TryFromObject}, object::{AsObject, PyObjectRef, PyResult}, VirtualMachine, }; #[derive(result_like::OptionLike)] pub enum PyArithmeticValue<T> { Implemented(T), NotImplemented, } impl PyArithmeticValue<PyObjectRef> { pub fn from_object(vm: &VirtualMachine, obj: PyObjectRef) -> Self { if obj.is(&vm.ctx.not_implemented) { Self::NotImplemented } else { Self::Implemented(obj) } } } impl<T: TryFromObject> TryFromObject for PyArithmeticValue<T> { fn try_from_object(vm: &VirtualMachine, obj: PyObjectRef) -> PyResult<Self> { PyArithmeticValue::from_object(vm, obj) .map(|x| T::try_from_object(vm, x)) .transpose() } } impl<T> ToPyObject for PyArithmeticValue<T> where T: ToPyObject, { fn to_pyobject(self, vm: &VirtualMachine) -> PyObjectRef { match self { PyArithmeticValue::Implemented(v) => v.to_pyobject(vm), PyArithmeticValue::NotImplemented => vm.ctx.not_implemented(), } } } pub type PyComparisonValue = PyArithmeticValue<bool>;
#include<bits/stdc++.h> #define pi pair<int,int> #define mk make_pair #define N 1000005 using namespace std;pi d[N]; int n,Q,x,y,kk,opt,p[N],sz[N],dep[N],rel[N],top[N],head[N],tmppp[N],father[N],heavyson[N]; struct Tree{int nxt,to;}e[N]; inline void link(int x,int y){e[++kk].nxt=head[x];e[kk].to=y;head[x]=kk;} void dfs1(int u,int fa){ sz[u]=1; for (int i=head[u];i;i=e[i].nxt){ int v=e[i].to; if (v==fa) continue; dep[v]=dep[u]+1; father[v]=u; dfs1(v,u); sz[u]+=sz[v]; if (!heavyson[u]||sz[v]>sz[heavyson[u]]) heavyson[u]=v; } } void dfs2(int u,int first){ top[u]=first; if (!heavyson[u]) return; dfs2(heavyson[u],first); for (int i=head[u];i;i=e[i].nxt){ int v=e[i].to; if (v==father[u]||v==heavyson[u]) continue; dfs2(v,v); } } inline int lca(int x,int y){ while (top[x]!=top[y]){ if (dep[top[x]]<dep[top[y]]) swap(x,y); x=father[top[x]]; } if (dep[x]<dep[y]) return x;return y; } inline pi merge(pi aa,pi bb){ if (!aa.first||!bb.first) return mk(aa.first+bb.first,aa.second+bb.second); if (aa.first==-1||bb.first==-1) return mk(-1,-1); tmppp[1]=aa.first;tmppp[2]=aa.second; tmppp[3]=bb.first;tmppp[4]=bb.second; for (int i=1;i<4;i++){ for (int j=i+1;j<=4;j++){ int now=lca(tmppp[i],tmppp[j]); bool flag=1; for (int k=1;k<=4;k++){ if (k==i||k==j) continue; if (lca(tmppp[k],now)!=now){flag=0;break;} if (lca(tmppp[k],tmppp[i])!=tmppp[k]&&lca(tmppp[k],tmppp[j])!=tmppp[k]){flag=0;break;} } if (flag) return mk(tmppp[i],tmppp[j]); } } return mk(-1,-1); } void build(int k,int l,int r){ if (l==r){d[k].first=d[k].second=rel[l];return;} int mid=(l+r)>>1; build(k*2,l,mid);build(k*2+1,mid+1,r); d[k]=merge(d[k*2],d[k*2+1]); } void update(int k,int l,int r,int x){ if (l==r){d[k].first=d[k].second=rel[x];return;} int mid=(l+r)>>1; if (x<=mid) update(k*2,l,mid,x); else update(k*2+1,mid+1,r,x); d[k]=merge(d[k*2],d[k*2+1]); } int query(int k,int l,int r,pi tmp){ if (l==r){ if (merge(d[k],tmp).first==-1) return 0; return l; } if (merge(d[k],tmp).first!=-1) return r; else { int mid=(l+r)>>1; pi tmp1=merge(d[k*2],tmp); if (tmp1.first==-1) return query(k*2,l,mid,tmp); else return max(mid,query(k*2+1,mid+1,r,tmp1)); } } int main(){ scanf("%d",&n); for (int i=1;i<=n;i++) scanf("%d",&p[i]),rel[p[i]]=i; for (int i=2;i<=n;i++){ scanf("%d",&x); link(x,i); }rel[n]=-1; dfs1(1,-1);dfs2(1,1); build(1,0,n-1); scanf("%d",&Q); while (Q--){ scanf("%d",&opt); if (opt==2) printf("%d\n",query(1,0,n-1,mk(0,0))+1); else { scanf("%d%d",&x,&y); swap(p[x],p[y]); rel[p[x]]=x;rel[p[y]]=y; update(1,0,n-1,p[x]); update(1,0,n-1,p[y]); } } return 0; }
/*NewCdpClient create plugin */ func NewCdpClient(ctx *core.PluginCtx, initJson []byte) *core.PluginBase { o := new(PluginCdpClient) fastjson.Unmarshal(initJson, &o.init) o.InitPluginBase(ctx, o) o.RegisterEvents(ctx, cdpEvents, o) nsplg := o.Ns.PluginCtx.GetOrCreate(CDP_PLUG) o.cdpNsPlug = nsplg.Ext.(*PluginCdpNs) o.OnCreate() return &o.PluginBase }
<reponame>brennanxyz/Deep-Lynx /* * DataStagingStorage encompasses all logic dealing with the manipulation of the * data_staging table in storage. Note that records should be inserted manually * with caution. There are database triggers and other automated processes in place * for taking data from the imports table and parsing it into the data_staging table. */ import PostgresStorage from "../postgresStorage"; import {DataStagingT} from "../../types/import/dataStagingT"; import Result from "../../result"; import {QueryConfig} from "pg"; import PostgresAdapter from "../adapters/postgres/postgres"; import {QueueProcessor} from "../../services/event_system/events"; export default class DataStagingStorage extends PostgresStorage { public static tableName = "data_staging"; private static instance: DataStagingStorage; public static get Instance(): DataStagingStorage { if(!DataStagingStorage.instance) { DataStagingStorage.instance = new DataStagingStorage() } return DataStagingStorage.instance } private constructor() { super(); } public async Create(dataSourceID: string, importID:string, typeMappingID: string, data: any): Promise<Result<boolean>> { return new Promise((resolve) => { PostgresAdapter.Instance.Pool.query(DataStagingStorage.createStatement(dataSourceID, importID, typeMappingID, data)) .then(() => { QueueProcessor.Instance.emit([{ source_id: dataSourceID, source_type: "data_source", type: "data_imported" }]) resolve(Result.Success(true)) }) .catch((e:Error) => resolve(Result.Failure(e.message))) }) } public async Count(importID: string): Promise<Result<number>> { return super.count(DataStagingStorage.countImportStatement(importID)) } public async CountUninsertedForImport(importID: string): Promise<Result<number>> { return super.count(DataStagingStorage.countUninsertedByImportStatement(importID)) } // returns the count of records in an import that also contain an active type mapping // which contains transformations - used in the process loop public async CountUninsertedActiveMapping(importID: string): Promise<Result<number>> { return super.count(DataStagingStorage.countImportUninsertedActiveMappingStatement(importID)) } public async Retrieve(id: number): Promise<Result<DataStagingT>> { return super.retrieve<DataStagingT>(DataStagingStorage.retrieveStatement(id)) } public async List(importID: string, offset:number, limit:number, sortBy?:string, sortDesc?: boolean): Promise<Result<DataStagingT[]>>{ if(limit === -1) { return super.rows<DataStagingT>(DataStagingStorage.listAllStatement(importID)) } return super.rows<DataStagingT>(DataStagingStorage.listStatement(importID, offset, limit,sortBy, sortDesc)) } public async ListUninserted(importID: string, offset:number, limit:number): Promise<Result<DataStagingT[]>>{ return super.rows<DataStagingT>(DataStagingStorage.listUninsertedStatement(importID, offset, limit)) } // list uninserted records which also have an active type mapping record along with transformations public async ListUninsertedActiveMapping(importID: string, offset:number, limit:number): Promise<Result<DataStagingT[]>>{ return super.rows<DataStagingT>(DataStagingStorage.listUninsertedActiveMappingStatement(importID, offset, limit)) } public async ListUninsertedByDataSource(dataSourceID: string, offset:number, limit:number): Promise<Result<DataStagingT[]>>{ return super.rows<DataStagingT>(DataStagingStorage.listUninsertedByDataSourceStatement(dataSourceID, offset, limit)) } public async CountUninsertedByDataSource(dataSourceID: string): Promise<Result<number>>{ return super.count(DataStagingStorage.countUninsertedByDataSourceStatement(dataSourceID)) } public async SetInsertedByImport(importID: string): Promise<Result<boolean>> { return super.runAsTransaction(DataStagingStorage.setInsertedByImportStatement(importID)) } public async SetInserted(id: number): Promise<Result<boolean>> { return super.runAsTransaction(DataStagingStorage.setInsertedStatement(id)) } public async PartialUpdate(id: number, userID:string, updatedField: {[key:string]: any}): Promise<Result<boolean>> { const toUpdate = await this.Retrieve(id); if(toUpdate.isError) { return new Promise(resolve => resolve(Result.Failure(toUpdate.error!.error))) } const updateStatement:string[] = []; const values:string[] = []; let i = 1; Object.keys(updatedField).map(k => { updateStatement.push(`${k} = $${i}`); values.push(updatedField[k]); i++ }); return new Promise(resolve => { PostgresAdapter.Instance.Pool.query({ text: `UPDATE data_staging SET ${updateStatement.join(",")} WHERE id = '${id}'`, values }) .then(() => { resolve(Result.Success(true)) }) .catch(e => resolve(Result.Failure(e))) }) } public async PermanentlyDelete(id: string): Promise<Result<boolean>> { return super.run(DataStagingStorage.deleteStatement(id)) } // completely overwrite the existing error set public SetErrors(id:number, errors: string[]): Promise<Result<boolean>> { return super.runAsTransaction(DataStagingStorage.setErrorsStatement(id, errors)) } // add an error to an existing error set public AddError(id:number, errors: string): Promise<Result<boolean>> { return super.runAsTransaction(DataStagingStorage.addErrorsStatement(id, errors)) } private static createStatement(dataSourceID: string, importID:string, typeMappingID: string, data: any): QueryConfig { return { text: `INSERT INTO data_staging(data_source_id,import_id,data,mapping_id) VALUES($1,$2,$3,$4)`, values: [dataSourceID, importID, data, typeMappingID] } } private static retrieveStatement(id: number): QueryConfig { return { text:`SELECT * FROM data_staging WHERE id = $1`, values: [id] } } private static listStatement(importID: string, offset: number, limit: number, sortBy?: string, sortDesc?:boolean): QueryConfig { if(sortDesc) { return { text: `SELECT * FROM data_staging WHERE import_id = $1 ORDER BY "${sortBy}" DESC OFFSET $2 LIMIT $3`, values: [importID, offset, limit] } } else if(sortBy) { return { text: `SELECT * FROM data_staging WHERE import_id = $1 ORDER BY "${sortBy}" ASC OFFSET $2 LIMIT $3`, values: [importID, offset, limit] } } else { return { text: `SELECT * FROM data_staging WHERE import_id = $1 OFFSET $2 LIMIT $3`, values: [importID, offset, limit] } } } private static listAllStatement(importID: string): QueryConfig { return { text: `SELECT * FROM data_staging WHERE import_id = $1`, values: [importID] } } private static listUninsertedStatement(importID: string, offset: number, limit: number): QueryConfig { return { text: `SELECT * FROM data_staging WHERE import_id = $1 AND inserted_at IS NULL OFFSET $2 LIMIT $3`, values: [importID, offset, limit] } } private static listUninsertedActiveMappingStatement(importID: string, offset: number, limit: number): QueryConfig { return { text: `SELECT data_staging.* FROM data_staging LEFT JOIN data_type_mappings ON data_type_mappings.id = data_staging.mapping_id WHERE import_id = $1 AND inserted_at IS NULL AND data_type_mappings.active IS TRUE AND EXISTS (SELECT * from data_type_mapping_transformations WHERE data_type_mapping_transformations.type_mapping_id = data_staging.mapping_id) OFFSET $2 LIMIT $3`, values: [importID, offset, limit] } } private static listUninsertedByDataSourceStatement(dataSourceID: string, offset: number, limit: number): QueryConfig { return { text: `SELECT * FROM data_staging WHERE data_source_id = $1 AND inserted_at IS NULL AND mapping_id IS NULL OFFSET $2 LIMIT $3`, values: [dataSourceID, offset, limit] } } private static countUninsertedByDataSourceStatement(dataSourceID: string): QueryConfig { return { text: `SELECT COUNT(*) FROM data_staging WHERE data_source_id = $1 AND inserted_at IS NULL AND mapping_id IS NULL`, values: [dataSourceID] } } private static countImportStatement(importID: string): QueryConfig { return { text: `SELECT COUNT(*) FROM data_staging WHERE import_id = $1`, values: [importID] } } private static countUninsertedByImportStatement(importID: string): QueryConfig { return { text: `SELECT COUNT(*) FROM data_staging WHERE inserted_at IS NULL AND import_id = $1`, values: [importID] } } private static countImportUninsertedActiveMappingStatement(importID: string): QueryConfig { return { text: `SELECT COUNT(*) FROM data_staging LEFT JOIN data_type_mappings ON data_type_mappings.id = data_staging.mapping_id WHERE data_staging.import_id = $1 AND data_staging.inserted_at IS NULL AND data_type_mappings.active IS TRUE AND EXISTS (SELECT * from data_type_mapping_transformations WHERE data_type_mapping_transformations.type_mapping_id = data_staging.mapping_id) `, values: [importID] } } private static setInsertedByImportStatement(importID: string): QueryConfig { return { text: `UPDATE data_staging SET inserted_At = NOW() WHERE import_id = $1`, values: [importID] } } private static setInsertedStatement(id: number): QueryConfig { return { text: `UPDATE data_staging SET inserted_At = NOW() WHERE id = $1`, values: [id] } } private static deleteStatement(id: string): QueryConfig { return { text:`DELETE FROM data_staging WHERE id = $1`, values: [id] } } private static setErrorsStatement(id: number, errors: string[]): QueryConfig { return { text: `UPDATE data_staging SET errors = $1 WHERE id = $2`, values: [errors, id] } } private static addErrorsStatement(id: number, error: string): QueryConfig { return { text: `UPDATE data_staging SET errors = array_append(errors, $1) WHERE id = $2`, values: [error, id] } } }
Joint Base Station Cooperative Transmission and ON-OFF Mechanism in Internet of Things Networks The ultra-dense networks in the fifth generation of wireless networks (5G) guarantee a high-speed transmission of data for the internet of things networks. The decreased cell radius can improve the received signal strength, yet larger wireless interference for users. To achieve a better wireless interference management, this paper first surveys the base stations (BS) cooperative transmission and BS cooperative ON-OFF switching mechanism. For the former, a set of neighboring base stations can cooperate to serve edge users with enhanced received signal strength. For the latter, by switching users to the neighboring BSs, the BSs with poor wireless channel quality can be turned off to save the energy consumptions. Take the demands of users into account, we further expect a joint BS cooperative transmission and ON-OFF mechanism with better environment-adaptation. We also point out several promising research directions in the future.
def run_init(self): inputs = self.ctx.inputs inputs.settings['ONLY_INITIALIZATION'] = True inputs.options = update_mapping(inputs['options'], get_default_options()) process = PwCalculation.process() inputs = self._prepare_process_inputs(process, inputs) running = self.submit(process, **inputs) self.report('launching initialization PwCalculation<{}>'.format(running.pk)) return ToContext(calculation_init=running)
<reponame>i386x/abcdoc # -*- coding: utf-8 -*- #! \file ~/doit_doc_template/builders.py #! \author <NAME>, <<EMAIL> AT <EMAIL>> #! \stamp 2018-08-26 14:39:35 +0200 #! \project DoIt! Doc: Sphinx Extension for DoIt! Documentation #! \license MIT #! \version See doit_doc_template.__version__ #! \brief See __doc__ # """\ Sphinx builder classes.\ """ __license__ = """\ Copyright (c) 2014 - 2018 <NAME>. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\ """ import os from docutils.frontend import OptionParser from docutils.io import FileOutput from sphinx.builders import Builder from sphinx.errors import ExtensionError from sphinx.locale import __ from sphinx.util import logging from sphinx.util.osutil import SEP from .core.keywords import KW_VARIABLES from .core.utils import get_config_value, Importer from .writers import DoItHtmlTranslator, DoItHtmlWriter logger = logging.getLogger(__name__) builtin_templates_dir = os.path.join( os.path.dirname(os.path.realpath(__file__)), "templates" ) class DoItHtmlBuilder(Builder): """ """ name = "doit-html" format = "html" epilog = "The HTML pages are in %(outdir)s." out_suffix = ".html" default_translator_class = DoItHtmlTranslator supported_image_types = ["image/png"] __slots__ = [ "context", "template_cache", "template_stack", "template", "docwriter", "docsettings" ] def __init__(self, app): """ """ Builder.__init__(self, app) #-def def init(self): """ """ self.context = {} self.template_cache = {} self.template_stack = [] self.template = None self.docwriter = None self.docsettings = None #-def def get_outdated_docs(self): """ """ for docname in self.env.found_docs: yield docname #-def def get_target_uri(self, docname, typ=None): """ """ if docname == "index": return "" if docname.endswith(SEP + "index"): return docname[:-5] return docname + SEP #-def def prepare_writing(self, docnames): """ """ self.init_variables() self.template = self.get_template() self.docwriter = DoItHtmlWriter(self) self.docsettings = OptionParser( defaults = self.env.settings, components = (self.docwriter,), read_config_files = True ).get_default_values() self.template.setup() #-def def write_doc(self, docname, doctree): """ """ html_file_suffix = self.get_builder_config("file_suffix", "html") if html_file_suffix is not None: self.out_suffix = html_file_suffix docfile = docname + self.out_suffix destination = FileOutput( destination_path = os.path.join(self.outdir, docfile), encoding = "utf-8" ) doctree.settings = self.docsettings self.docwriter.write(doctree, destination) #-def def finish(self): """ """ pass #-def def init_variables(self): """ """ app = self.app srcdir = app.srcdir config = app.config templates_path = get_config_value(config, "templates_path") or [] html_theme_path = self.get_builder_config("theme_path", "html") or [] templates_path = html_theme_path + templates_path templates_path = [os.path.join(srcdir, p) for p in templates_path] templates_path.append(builtin_templates_dir) html_theme = self.get_builder_config("theme", "html") if not html_theme: logger.warning(__("HTML theme is not specified, using default.")) html_theme = "default" self.context[KW_VARIABLES] = dict( _srcdir = srcdir, _outdir = app.outdir, _config = config, _path = templates_path, _name = html_theme ) #-def def get_template(self, name=None): """ """ variables = self.context[KW_VARIABLES] if name is None: name = variables["_name"] if name in self.template_cache: return self.template_cache[name] template = self.load_template(variables["_path"], name) if template is None: raise ExtensionError("Template '{}' was not found.".format(name)) self.template_cache[name] = template return template #-def def load_template(self, path, name): """ """ with Importer(path, False): module = __import__(name, None, None, ["load"]) if hasattr(module, "load"): return module.load(self) return None #-def #-class
/** * Utilities for working with {@link SummaryConfusionMatrix}es. In particular, to build a {@link * SummaryConfusionMatrix}, use {@link #builder()}. * * <p>Other useful things: computing F-measures ({@link #FMeasureVsAllOthers(SummaryConfusionMatrix, * Symbol)}) and pretty-printing ({@link #prettyPrint(SummaryConfusionMatrix)}. * * @author rgabbard */ public final class SummaryConfusionMatrices { private SummaryConfusionMatrices() { throw new UnsupportedOperationException(); } public static String prettyPrint(SummaryConfusionMatrix m, Ordering<Symbol> labelOrdering) { final StringBuilder sb = new StringBuilder(); for (final Symbol key1 : labelOrdering.sortedCopy(m.leftLabels())) { for (final Symbol key2 : labelOrdering.sortedCopy(m.rightLabels())) { sb.append(String.format("%s / %s: %6.2f\n", key1, key2, m.cell(key1, key2))); } } return sb.toString(); } public static String prettyDelimPrint(final SummaryConfusionMatrix m, final String delimiter) { return prettyDelimPrint(m, delimiter, SymbolUtils.byStringOrdering()); } public static String prettyDelimPrint( final SummaryConfusionMatrix m, final String delimiter, final Ordering<Symbol> labelOrdering) { final Joiner delimJoiner = Joiner.on(delimiter); final ImmutableList.Builder<String> lines = ImmutableList.builder(); final List<Symbol> rowLabels = labelOrdering.sortedCopy(m.leftLabels()); final List<Symbol> columnLabels = labelOrdering.sortedCopy(m.rightLabels()); // Create header final ImmutableList.Builder<String> header = ImmutableList.builder(); header.add("Predicted"); header.addAll(Iterables.transform(columnLabels, SymbolUtils.desymbolizeFunction())); lines.add(delimJoiner.join(header.build())); // Output each line for (final Symbol rowLabel : rowLabels) { final ImmutableList.Builder<String> row = ImmutableList.builder(); row.add(rowLabel.asString()); for (final Symbol columnLabel : columnLabels) { row.add(String.format("%.2f", m.cell(rowLabel, columnLabel))); } lines.add(delimJoiner.join(row.build())); } // Return all lines return StringUtils.unixNewlineJoiner().join(lines.build()); } public static String prettyPrint(SummaryConfusionMatrix m) { return prettyPrint(m, SymbolUtils.byStringOrdering()); } public static final FMeasureCounts FMeasureVsAllOthers( SummaryConfusionMatrix m, final Symbol positiveSymbol) { return FMeasureVsAllOthers(m, ImmutableSet.of(positiveSymbol)); } public static final FMeasureCounts FMeasureVsAllOthers( SummaryConfusionMatrix m, final Set<Symbol> positiveSymbols) { double truePositives = 0; for (final Symbol goodSymbol : positiveSymbols) { for (final Symbol goodSymbol2 : positiveSymbols) { truePositives += m.cell(goodSymbol, goodSymbol2); } } double falsePositives = -truePositives; double falseNegatives = -truePositives; for (final Symbol goodSymbol : positiveSymbols) { falsePositives += m.rowSum(goodSymbol); falseNegatives += m.columnSum(goodSymbol); } return FMeasureCounts.fromTPFPFN(truePositives, falsePositives, falseNegatives); } /** * Returns accuracy, which is defined as the sum of the cells of the form (X,X) over the sum of * all cells. If the sum is 0, 0 is returned. To pretty-print this you probably want to multiply * by 100. */ public static final double accuracy(SummaryConfusionMatrix m) { final double total = m.sumOfallCells(); double matching = 0.0; for (final Symbol key : Sets.intersection(m.leftLabels(), m.rightLabels())) { matching += m.cell(key, key); } return DoubleUtils.XOverYOrZero(matching, total); } /** * Returns the maximum accuracy that would be achieved if a single classification were selected * for all instances. */ public static final double chooseMostCommonRightHandClassAccuracy(SummaryConfusionMatrix m) { final double total = m.sumOfallCells(); double max = 0.0; for (final Symbol right : m.rightLabels()) { max = Math.max(max, m.columnSum(right)); } return DoubleUtils.XOverYOrZero(max, total); } public static final double chooseMostCommonLeftHandClassAccuracy(SummaryConfusionMatrix m) { final double total = m.sumOfallCells(); double max = 0.0; for (final Symbol left : m.leftLabels()) { max = Math.max(max, m.rowSum(left)); } return DoubleUtils.XOverYOrZero(max, total); } public static Builder builder() { return new Builder(); } /** * To build a {@link SummaryConfusionMatrix}, call {@link SummaryConfusionMatrices#builder()}. On * the returned object, call {@link #accumulatePredictedGold(Symbol, Symbol, double)} to record * the number of times a system response corresponds to a gold standard responses for some item. * Typically the double value will be 1.0 unless you are using fractional counts for some reason. * * <p>When done, call {@link #build()} to get a {@link SummaryConfusionMatrix}. */ public static class Builder { private final Table<Symbol, Symbol, Double> table = HashBasedTable.create(); public Builder accumulate(final SummaryConfusionMatrix matrix) { matrix.accumulateTo(this); return this; } public Builder accumulate(final Symbol row, final Symbol col, final double val) { final Double cur = table.get(row, col); final double setVal; if (cur != null) { setVal = cur + val; } else { setVal = val; } table.put(row, col, setVal); return this; } /** * This is just an alias for accumulate. However, since the F-measure functions assume the * predictions are on the rows and the gold-standard on the columns, using this method in such * cases and make the code clearer and reduce errors. */ public Builder accumulatePredictedGold( final Symbol prediction, final Symbol gold, final double val) { accumulate(prediction, gold, val); return this; } public SummaryConfusionMatrix build() { // first attemtp the more efficient implementation for the common binary case final Optional<BinarySummaryConfusionMatrix> binaryImp = BinarySummaryConfusionMatrix.attemptCreate(table); if (binaryImp.isPresent()) { return binaryImp.get(); } else { return new TableBasedSummaryConfusionMatrix(table); } } public static final Function<Builder, SummaryConfusionMatrix> Build = new Function<Builder, SummaryConfusionMatrix>() { @Override public SummaryConfusionMatrix apply(Builder input) { return input.build(); } }; private Builder() {} } }
def __get_trim_iters( self, tolerance: float = 0.1, max_trim: float = 0.5, max_iters: int = 7, show_progress: bool = False, **smoother_kwargs: Any, ) -> List[DataFrame]: eigs = np.copy(self._untrimmed) trim_iters = [TrimIter(eigs, eigs, tolerance, **smoother_kwargs)] for i in range(max_iters): if show_progress: print(f"Completed trim-unfold iteration: {i}.") trim = trim_iters[-1].next_iter() if trim.proportion_removed > max_trim: break trim_iters.append(trim) if trim.is_all_inliers(): break return trim_iters
from luigi import Parameter, WrapperTask from tasks.meta import OBSColumn, current_session from tasks.tags import SectionTags, SubsectionTags, LicenseTags, UnitTags from tasks.us.epa.huc import HUCColumns, SourceTags, HUC from tasks.util import (DownloadUnzipTask, shell, ColumnsTask, TableTask, CSV2TempTableTask, MetaWrapper) from collections import OrderedDict import os class DownloadMetrics(DownloadUnzipTask): URL = 'http://edg.epa.gov/data/Public/ORD/EnviroAtlas/National/National_metrics_July2015_CSV.zip' def download(self): shell('wget -O "{output}".zip "{url}"'.format( output=self.output().path, url=self.URL )) class EnviroAtlasTempTable(CSV2TempTableTask): csv_name = Parameter() def requires(self): return DownloadMetrics() def input_csv(self): return os.path.join(self.input().path, self.csv_name) class EnviroAtlasColumns(ColumnsTask): table = Parameter() def version(self): return 2 def requires(self): return { 'sections': SectionTags(), 'subsections': SubsectionTags(), 'licenses': LicenseTags(), 'sources': SourceTags(), 'units': UnitTags(), } def solar_energy(self, usa, environmental, license_, source, units): return OrderedDict([ ('sole_area', OBSColumn( name='Area with solar energy potential', tags=[usa, environmental, license_, source, units['km2']], weight=5, type='Numeric', aggregate='sum', )), ('sole_mean', OBSColumn( name='Annual Average direct normal solar resources kWh/m2/day', tags=[usa, environmental, license_, source, units['ratio']], weight=5, type='Numeric', aggregate='average', )) ]) def avgprecip(self, usa, environmental, license_, source, units): inches = units['inches'] return OrderedDict([ ('meanprecip', OBSColumn( name='Average annual precipitation', description='Average annual precipitation in inches.', aggregate='average', type='Numeric', weight=5, tags=[usa, environmental, license_, source, inches],)) ]) def landcover(self, usa, environmental, license_, source, units): ratio = units['ratio'] pfor = OBSColumn( name='Forest land cover', description='Percentage of land area within the WBD 12-digit hydrologic unit that is classified as forest land cover (2006 NLCD codes: 41, 42, 43). A value of -1 indicates that no land cover data was located within the hydrologic unit.', type='Numeric', weight=5, tags=[usa, environmental, license_, source, ratio], ) pwetl = OBSColumn( name='Wetland land cover', description='Percentage of land area within the WBD 12-digit hydrologic unit that is classified as wetland land cover (2006 NLCD codes: 90, 95). A value of -1 indicates that no land cover data was located within the hydrologic unit.', type='Numeric', weight=5, tags=[usa, environmental, license_, source, ratio], ) pagt = OBSColumn( name='Agricultural/cultivated land cover', description='Percentage of land area within the WBD 12-digit hydrologic unit that is classified as agricultural/cultivated land cover (2006 NLCD codes: 21, 81, 82). A value of -1 indicates that no land cover data was located within the hydrologic unit.', type='Numeric', weight=5, tags=[usa, environmental, license_, source, ratio], ) pagp = OBSColumn( name='Agricultural pasture land cover', description='Percentage of land area within the WBD 12-digit hydrologic unit that is classified as agricultural pasture land cover (2006 NLCD codes: 81). A value of -1 indicates that no land cover data was located within the hydrologic unit.', type='Numeric', weight=5, tags=[usa, environmental, license_, source, ratio], ) pagc = OBSColumn( name='Agricultural cropland land cover', description='Percentage of land area within the WBD 12-digit hydrologic unit that is classified as agricultural cropland land cover (2006 NLCD codes: 82). A value of -1 indicates that no land cover data was located within the hydrologic unit.', type='Numeric', weight=5, tags=[usa, environmental, license_, source, ratio], ) pfor90 = OBSColumn( name='Modified forest land cover', description='Percentage of land area within the WBD 12-digit hydrologic unit that is classified as modified forest land cover (2006 NLCD codes: 41, 42, 43, and 90). A value of -1 indicates that no land cover data was located within the hydrologic unit.', type='Numeric', weight=5, tags=[usa, environmental, license_, source, ratio], ) pwetl95 = OBSColumn( name='Modified wetlands land cover', description='Percentage of land area within the WBD 12-digit hydrologic unit that is classified as modified wetlands land cover (2006 NLCD codes: 95). A value of -1 indicates that no land cover data was located within the hydrologic unit.', type='Numeric', weight=5, tags=[usa, environmental, license_, source, ratio], ) return OrderedDict([ ('pfor', pfor), ('pwetl', pwetl), ('pagt', pagt), ('pagp', pagp), ('pagc', pagc), ('pfor90', pfor90), ('pwetl95', pwetl95), ]) def columns(self): input_ = self.input() usa = input_['sections']['united_states'] environmental = input_['subsections']['environmental'] license_ = input_['licenses']['no-restrictions'] source = input_['sources']['epa-enviroatlas'] units = input_['units'] cols = getattr(self, self.table)(usa, environmental, license_, source, units) for colname, col in cols.iteritems(): col.id = '{}_{}'.format(self.table, colname) return cols class EnviroAtlas(TableTask): table = Parameter() time = Parameter() def requires(self): return { 'geom_cols': HUCColumns(), 'data_cols': EnviroAtlasColumns(table=self.table.lower()), 'data': EnviroAtlasTempTable(csv_name=self.table + '.csv'), } def timespan(self): return self.time def columns(self): cols = OrderedDict() input_ = self.input() cols['huc_12'] = input_['geom_cols']['huc_12'] cols.update(input_['data_cols']) return cols def populate(self): session = current_session() cols = self.columns() cols.pop('huc_12') colnames = cols.keys() session.execute(''' INSERT INTO {output} (huc_12, {colnames}) SELECT huc_12, {typed_colnames}::Numeric FROM {input} '''.format(input=self.input()['data'].table, output=self.output().table, colnames=', '.join(colnames), typed_colnames='::Numeric, '.join(colnames))) class AllTables(WrapperTask): TABLES = [ ('AvgPrecip', '2010'), ('landcover', '2006'), ('solar_energy', '2012'), ] def requires(self): for table, timespan in self.TABLES: yield EnviroAtlas(table=table, time=timespan) # class HUCMetaWrap(MetaWrapper): # table = Parameter() # time = Parameter() # # params = { # 'table': ['AvgPrecip','landcover','solar_energy'], # 'time': ['2010','2006','2012'] # } # # def tables(self): # yield EnviroAtlas(table=self.table, time=self.time) # yield HUC()
# -*- coding: utf-8 -*- # Authors: <NAME> <<EMAIL>> import unittest from .. import SingleElementinaSortedArray class test_SingleElementinaSortedArray(unittest.TestCase): solution = SingleElementinaSortedArray.Solution() def test_singleNonDuplicate(self): self.assertEqual(self.solution.singleNonDuplicate([1, 1, 2, 2, 3, 4, 4]), 3) self.assertEqual(self.solution.singleNonDuplicate([1, 1, 2, 3, 3, 4, 4, 8, 8]), 2) if __name__ == '__main__': unittest.main()
// --- Viem model converter functions --- func (c *EntryController) createListViewModel(userContract *model.UserContract, workSummary *model.WorkSummary, pageNum int, cnt int, entries []*model.Entry, entryTypesMap map[int]*model.EntryType, entryActivitiesMap map[int]*model.EntryActivity) *vm. ListEntries { lesvm := vm.NewListEntries() lesvm.Summary = c.createListSummaryViewModel(userContract, workSummary) lesvm.HasPrevPage = pageNum > 1 lesvm.HasNextPage = (pageNum * pageSize) < cnt lesvm.PrevPageNum = pageNum - 1 lesvm.NextPageNum = pageNum + 1 lesvm.Days = c.createEntriesViewModel(userContract, entries, entryTypesMap, entryActivitiesMap, true) return lesvm }
Survival benefit of solid-organ transplant in the United States. IMPORTANCE The field of transplantation has made tremendous progress since the first successful kidney transplant in 1954. OBJECTIVE To determine the survival benefit of solid-organ transplant as recorded during a 25-year study period in the United Network for Organ Sharing (UNOS) database and the Social Security Administration Death Master File. DESIGN, SETTING, AND PARTICIPANTS In this retrospective analysis of UNOS data for solid-organ transplant during a 25-year period (September 1, 1987, through December 31, 2012), we reviewed the records of 1,112,835 patients: 533,329 recipients who underwent a transplant and 579 506 patients who were placed on the waiting list but did not undergo a transplant. MAIN OUTCOMES AND MEASURES The primary outcome was patient death while on the waiting list or after transplant. Kaplan-Meier survival functions were used for time-to-event analysis. RESULTS We found that 2,270,859 life-years (2,150,200 life-years from the matched analysis) were saved to date during the 25 years of solid-organ transplant. A mean of 4.3 life-years were saved (observed to date) per solid-organ transplant recipient. Kidney transplant saved 1,372,969 life-years; liver transplant, 465,296 life-years; heart transplant, 269,715 life-years; lung transplant, 64,575 life-years; pancreas-kidney transplant, 79,198 life-years; pancreas transplant, 14,903 life-years; and intestine transplant, 4402 life-years. CONCLUSIONS AND RELEVANCE Our analysis demonstrated that more than 2 million life-years were saved to date by solid-organ transplants during a 25-year study period. Transplants should be supported and organ donation encouraged.
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ Derived Parameters ------------------ The engineering archive has pseudo-MSIDs that are derived via computation from telemetry MSIDs. All derived parameter names begin with the characters "DP_" (not case sensitive as usual). Otherwise there is no difference from standard MSIDs. """ from .base import * # noqa from .thermal import * # noqa from .test import * # noqa from .acispow import * # noqa from .pcad import * # noqa from .orbit import * # noqa from .eps import * # noqa
// Invoked when device is mounted (configured) void tuh_mount_cb (uint8_t daddr) { printf("Device attached, address = %d\r\n", daddr); tuh_descriptor_get_device(daddr, &desc_device, 18, print_device_descriptor, 0); }
import time from dataclasses import dataclass from typing import List, Iterable, Optional import numpy as np from numpy import log, exp, log10 @dataclass class TrialGrid: """Stores trial grid parameters""" size: int period_min: float period_max: float @dataclass class Result: """ Stores fit results. Attributes ---------- period : float Best-fit period, in seconds period_uncertainty : float Uncertainty on best-fit period, in seconds toas : list List of TOAs that were provided as input toa_uncertainties : list List of TOA uncertainties that were provided as input grid : TrialGrid Parameters of the period trial grid that was applied rotation_indices : list List of fitted integer rotation indices, one per input TOA. The rotation index associated with the first provided TOA is 0. formatted_period : str Best-fit period formatted following the usual pulsar astronomy convention, e.g. 1.05(1) scaled_toa_uncertainties : list The input TOA uncertainties scaled by a constant factor so that they would yield a reduced chi-square of 1. residuals : list Fit residuals (in seconds) on each time interval between each TOA and the first one. The fit residual on the first TOA is always 0, because the first TOA is used as the time reference. solve_time : float Total time spent to get a solution, in seconds. """ period: float period_uncertainty: float toas: List[float] toa_uncertainties: List[float] grid: TrialGrid rotation_indices: List[int] formatted_period: str scaled_toa_uncertainties: List[float] residuals: List[float] solve_time: float DAY_SECONDS = 86400.0 def dot3(a, b, c) -> float: """ Returns the matrix product A.B.C, implicitly assuming that the result is a scalar """ return np.dot(a, np.dot(b, c))[0, 0] def format_uncertain_quantity(quantity: float, uncertainty: float) -> str: """ Format a number with an associated uncertainty following the usual pulsar astronomy convention. >>> format_uncertain_quantity(1.05, 0.01) 1.05(1) """ decimals = -int(np.floor(log10(uncertainty))) qr = round(quantity, decimals) ur = int(round(uncertainty * 10 ** decimals)) if ur == 10: ur = 1 decimals -= 1 return f"{qr:.{decimals}f}({ur})" def rratsolve( toas: Iterable[float], toa_uncertainty: float, max_grid_size: Optional[int] = 100_000_000, ) -> Result: """ Find the longest spin period that fits a sparse set of single pulse TOAs. The method is based on finding a common periodicity that divides all the time intervals between the first and all subsequent TOAs. Only period is fitted; initial phase is ignored. Step 1: Find the optimal spacing between consecutive trial periods Step 2: Try periods between: * Period min: 10 times the TOA uncertainty and * Period max: the smallest interval between all TOA pairs plus 10%, and pick the period that yields the smallest RMS phase residuals Step 3: Refine the solution assuming that the rotation counts associated to each TOA have been inferred correctly. In this case there is an analytical solution, which the one returned. Parameters ---------- toas : list or ndarray Pulse arrival MJDs. toa_uncertainty : float The estimated TOA RMS uncertainty in second. max_grid_size : int or None Maximum allowed number of points in the trial grid. If None, no limit is enforced. If not None and the limit is exceeded, raise ValueError. Returns ------- result : Result Result object (dataclass) which wraps all the outputs as attributes See Also -------- Result : storage class for the outputs of this function """ start_time = time.time() n = len(toas) if not n >= 3: raise ValueError("Need at least 3 TOAs") toa_uncertainties = np.repeat(toa_uncertainty, n) iref = 0 tref = toas[iref] toas = np.asarray(toas) T = (toas - tref) * DAY_SECONDS T = np.delete(T, iref).reshape(-1, 1) sigma = np.delete(toa_uncertainties, iref) C = np.diag(sigma ** 2) + sigma[iref] ** 2 M = np.linalg.inv(C) pmin = 10 * sigma.max() pmax = 1.1 * abs(T).min() + 10 * sigma.max() delta_logp = n ** 0.5 * dot3(T.T, M, T) ** -0.5 pgrid = exp(np.arange(log(pmin), log(pmax), delta_logp)) if max_grid_size is not None and pgrid.size > max_grid_size: raise ValueError( "Trial grid size would exceed allowed maximum. " "Try reducing the time span of the TOAs " "or increasing the estimated TOA uncertainty" ) trial_grid = TrialGrid(size=pgrid.size, period_min=pgrid[0], period_max=pgrid[-1]) # Estimated fractional turn counts D = T / pgrid # Estimated integer turn counts K = D.round().astype(int) # Phase residuals R = D - K # NOTE: This is q = Q / P^2 # Q is a chi2 with n-1 degrees of freedom q = (np.dot(M, R) * R).sum(axis=0) # Best-fit trial grid point iopt = q.argmin() Kopt = K[:, iopt].reshape(-1, 1) # Assuming the turn counts are correct, we can now refine the best-fit period pstar = dot3(T.T, M, Kopt) / dot3(Kopt.T, M, Kopt) Dstar = T / pstar Kstar = Dstar.round().astype(int) Rstar = Dstar - Kstar Qstar = (np.dot(M, Rstar) * Rstar).sum() * pstar ** 2 time_residuals = (Rstar * pstar).ravel() # Uncertainty scaling factor such that Qstar = n - 1 uscale = (Qstar / (n - 1)) ** 0.5 # 1-sigma uncertainty on Pstar # NOTE: On some artificial inputs, uncertainty can be exactly zero, and we make sure that # does not happen pstar_uncertainty = max(pstar * dot3(T.T, M, T) ** -0.5 * uscale, np.finfo(float).eps) formatted_period = format_uncertain_quantity(pstar, pstar_uncertainty) # NOTE: must cast to int from np.int64 to avoid JSON serialization problems later # Also, the rotation index of the first TOA is always 0 rotation_indices = [0] + list(map(int, Kopt.ravel())) time_residuals = [0] + list(time_residuals) end_time = time.time() result = Result( period=pstar, period_uncertainty=pstar_uncertainty, toas=list(toas), toa_uncertainties=list(toa_uncertainties), grid=trial_grid, rotation_indices=rotation_indices, formatted_period=formatted_period, scaled_toa_uncertainties=list(toa_uncertainties * uscale), residuals=time_residuals, solve_time=end_time - start_time, ) return result
/** * * @author Hishan Kavishka */ public class SQlConnection { String sourceURL = null; public SQlConnection() { try { // Load JDBC driver Class.forName("com.mysql.jdbc.Driver"); // Connection URL. //sourceURL = new String("jdbc:mysql://localhost:3306/ITP_Java"); sourceURL = new String("jdbc:mysql://localhost:3306/shuttle"); } catch (Exception e) { JOptionPane.showMessageDialog(null,"Unable to load database driver classes"); } } public Connection connect() { Connection dbConn = null; try { dbConn = DriverManager.getConnection(sourceURL, "root", ""); } catch (Exception e) { JOptionPane.showMessageDialog(null,"Unable to Connect Database"); } return dbConn; } public void con_close(Connection dbConn) { try { dbConn.close(); } catch (Exception e) { JOptionPane.showMessageDialog(null,"Database connection closing failure"); } } }
#include <bits/stdc++.h> #define ll long long #define llu unsigned long long #define ui unsigned int #define MAX(a, b, c) max(a, max(b, c)) #define MIN(a, b, c) min(a, min(b, c)) #define FOR(from, n) for (int i = from; i < n; ++i) #define FORJ(from, n) for (int j = from; j < n; ++j) #define FORR(from, to) for (int i = from; i >= to; i--) #define MAXLL(a, b) (a > b) ? a : b #define MINLL(a, b) (a < b) ? a : b #define endl '\n' // no flushing using namespace std; mt19937 rng(chrono::steady_clock::now().time_since_epoch().count()); bool f(int st, vector<int> &a) { int x = 0; if (st == 1) x = 2; else x = 1; bool was_swap = false; for (int i = st; i < a.size() - x; i += 2) { if (a[i] > a[i+1]) { swap(a[i], a[i+1]); was_swap = true; } } return was_swap; } void test_case() { int n; cin >> n; vector<int> a(n+1); FOR(1, n+1) { cin >> a[i]; } int ctr = 0; FOR(1, 1111) { bool x = f(2 - (i % 2), a); if (x) ctr = i; } cout << ctr << endl; } int main() { ios::sync_with_stdio(false); cin.tie(NULL); cout.tie(NULL); typedef numeric_limits<double> dbl; cout.precision(dbl::max_digits10); int t; cin >> t; while (t--) { test_case(); } return 0; }
// readFileInfo applies build constraints to an input file and returns whether // it should be compiled. func readFileInfo(bctx build.Context, input string) (fileInfo, error) { fi := fileInfo{filename: input} if ext := filepath.Ext(input); ext == ".C" { fi.ext = cxxExt } else { switch strings.ToLower(ext) { case ".go": fi.ext = goExt case ".c": fi.ext = cExt case ".cc", ".cxx", ".cpp": fi.ext = cxxExt case ".m": fi.ext = objcExt case ".mm": fi.ext = objcxxExt case ".s": fi.ext = sExt case ".h", ".hh", ".hpp", ".hxx": fi.ext = hExt default: return fileInfo{}, fmt.Errorf("unrecognized file extension: %s", ext) } } dir, base := filepath.Split(input) if strings.HasPrefix(base, "_cgo") { fi.matched = true } else { match, err := bctx.MatchFile(dir, base) if err != nil { return fi, err } fi.matched = match } if fi.ext != goExt { return fi, nil } f, err := os.Open(input) if err != nil { return fileInfo{}, err } defer f.Close() fi.fset = token.NewFileSet() if err := readGoInfo(f, &fi); err != nil { return fileInfo{}, err } for _, imp := range fi.imports { if imp.path == "C" { fi.isCgo = true break } } fi.matched = fi.matched && (bctx.CgoEnabled || !fi.isCgo) return fi, nil }
/** * Command for listing available variables. */ public class ShowKeys extends AbstractCommand<VoidResult> { public ShowKeys() { super("keys", VoidResult.class); } @Override public String getDescription() { return "Returns the available variable names that can be set in the session."; } @Override public VoidResult execute(CommandLine cli) throws CommandException { println(); for (final SessionVariable variable : SessionVariable.values()) { println("\t" + variable.getVariableName()); } println(); return VoidResult.getInstance(); } }
Contextualising institutional complementarity. How long‐term unemployment depends on employment protection legislation, active labour market policies and the economic climate This study investigated if and how active labour market policies (ALMPs) and employment protection interact with each other in light of long‐term unemployment reduction. We argue that how well the interaction between both labour market institutions reduces long‐term unemployment depends on the level of economic growth. To improve analytical clarity, two types of ALMPs were differentiated, namely training and employment programmes. Using data on 20 European countries over 16 years, our results suggest that employment protection moderates the relationship between employment programmes and long‐term unemployment. The combination of high spending on employment programmes and less strict employment protection is associated with less long‐term unemployment. This moderation effect is stronger during an economic downturn. A moderation effect from employment protection on the relationship between training programmes and long‐term unemployment was not found, even when the economic climate was taken into account as a contextual factor.
import gym import gym_sokoban from algorithms.sarsa import run_sarsa from algorithms.montecarlo import run_montecarlo from algorithms.qlearning import run_qlearning from sokoban_utils.policy import run_policy from sokoban_utils.utils import * from sokoban_utils.global_configs import GlobalConfigs import numpy as np env = gym.make('Boxoban-Train-v1') config = Config() config.total_episodes = 100 print("[!] Max Epsilon Test") #Test montecarlo, sarsa and qlearning max epsilon (exploration probability at start) sarsa_max_epsilon = open("logs/sarsa_max_epsilon.txt", "a+") montecarlo_max_epsilon = open("logs/montecarlo_max_epsilon.txt", "a+") qlearning_max_epsilon = open("logs/qlearning_max_epsilon.txt", "a+") max_epsilon_config = copy_config(config) # Defining list of values for the max epsilon max_epsilon_list = [1.0] for max_epsilon in max_epsilon_list: max_epsilon_config.epsilon = max_epsilon max_epsilon_config.max_epsilon = max_epsilon _, logfile = run_sarsa(env, log=True, initial_config=max_epsilon_config) sarsa_max_epsilon.write(logfile + "\n") _, logfile = run_montecarlo(env, log=True, initial_config=max_epsilon_config) montecarlo_max_epsilon.write(logfile + "\n") _, logfile = run_qlearning(env, log=True, initial_config=max_epsilon_config) qlearning_max_epsilon.write(logfile + "\n") sarsa_max_epsilon.close() montecarlo_max_epsilon.close() qlearning_max_epsilon.close() env.close()
/** * The CreateKeyTransferResponse contains either key Transfer attributes * or any faults that prevented the application key transfer. * * @author rbhat */ @JsonInclude(JsonInclude.Include.NON_DEFAULT) public class KeyTransferResponse extends AbstractResponse { private String status; private String operation; private KeyTransferAttributes data; public KeyTransferResponse() { super(); } public KeyTransferResponse(KeyTransferAttributes created) { super(); this.data = created; } protected void setStatus(String status) { this.status = status; } public String getStatus() { return this.status; } protected void setOperation(String operation) { this.operation = operation; } public String getOperation() { return this.operation; } @JsonInclude(JsonInclude.Include.NON_DEFAULT) public final KeyTransferAttributes getData() { return data; } }
/** * Test of startGame method, of class PebbleGame. * Tests the startGame method with 100 players. */ @Test(timeout=10000) public void testStartGameHundredPlayer() { setUp(100); ecm1414_ca.PebbleGame pg = new ecm1414_ca.PebbleGame(this.players, this.pebbleValues); pg.startGame(); }
N = int(input()) s = [] for _ in range(N): s.append(int(input())) s = sorted(s) ans = sum(s) i = 0 #print(s) while ans % 10 == 0: ans = sum(s) #print(i) ans -= s[i] i += 1 if i >= N: ans = 0 break print(ans)
/** * Post processor implementation for registration of all found annotated methods inside collector. * * @param <T> annotation type * @author Vyacheslav Rusakov * @since 27.11.2018 */ public class SimpleAnnotationProcessor<T extends Annotation> implements MethodPostProcessor<T> { private final MethodsCollector collector; public SimpleAnnotationProcessor(final MethodsCollector collector) { this.collector = collector; } @Override public void process(final T annotation, final Method method, final Object instance) throws Exception { Utils.checkNoParams(method); collector.register(annotation.annotationType(), instance, method); } }
/** * Remove the given <code>row</code> from the table. * * @param row */ public void removeRow(OutputRow row) { if (row.getPair() instanceof DiagnosticPair) { mDiagnosticRows.remove(row); } else { mCommandRows.remove(row); } updateAdapter(); }
The media's insatiable desire to sensationalize stories has resulted in a lot of misinformation around bitcoin. For business owners who aren't properly educated, this misinformation – understandably – leaves them with inaccurate opinions, and the reality of bitcoin is left somewhere behind. Those who understand bitcoin view the technology as highly innovative and one that holds a tremendous amount of potential. I personally share this opinion, but as a payment professional, came to this conclusion from a very cautious position. It was important that our company educate ourselves on this new type of currency in order to determine how to best deal with it. So, what is bitcoin and what does it mean to businesses? Bitcoin is comprised of the following four things: Story continues below advertisement First, it is software. In 2009, Satoshi Nakamoto released the first version of the bitcoin software as an open-source project. The software essentially defines the protocol in which the bitcoin network operates. As an open-source project, the software can be freely used (and modified) for other projects. As a result, many other coins – informally known as altcoins – have been launched using modified versions of the original bitcoin software. Some examples of altcoins are Litecoin, Dogecoin and Peercoin. Second, bitcoin is an open peer-to-peer network. The use of and participation in the bitcoin software by users across the globe creates the bitcoin network. Anyone can participate and it's the collective computing power of all the users (specifically a type of user known as a Miner) across the globe that supports and operates the bitcoin network. The peer-to-peer design of the network does not support any central authority that controls the network – in other words, it's decentralized. I like to describe it as "a network by the people, for the people. Third, bitcoin is a secure transaction verification mechanism. It combines a general accounting ledger, known as the Blockchain, with encryption technology to provide a secure transaction verification engine. This removes the need for a trusted third party and allows bitcoin to operate independently. The bitcoin Blockchain is shared to the public and everyone can view it in real time. Fourth, it is a brand. It's the name of the encrypted digital tokens that represent a unit of value within the bitcoin payment network; much like the greenback or loonie represent a unit of value within their respective fiat currency systems. Bitcoin's impact on business Bitcoin was designed first and foremost to facilitate online payments and it is as a payment system that I believe it will have its initial significant impact on business. On a daily basis we rely on conventional payment networks such as Visa, MasterCard and Interac. Now more than ever, businesses that want to be competitive must utilize electronic payments in selling their goods and services. The use of cheques and cash has been in steady decline for many years while e-commerce and electronic payments continue to grow year after year. In my opinion, there are a number of characteristics of bitcoin which offer improvements over conventional payment networks. 1. It's very easy to participate in and use bitcoin. Conventional payment networks rely on the banks as trusted third parties to facilitate the transfer of funds across those networks. You need to have a bank account in order to participate. Setting up a bank account, particularly as a business, is often difficult and beset by bureaucracy. If you do have a bank account, you are then bound by rules and limitations that can stifle innovation and forward thinking. You can set up a bitcoin address in a matter of seconds – there's no application or approval process. It costs you nothing, you can participate as an individual or as a business, you can do it from anywhere in the world and the only limitation for transacting is how many bitcoins you have to spend. Story continues below advertisement Story continues below advertisement 2. Bitcoin is cost effective and flexible. Perhaps the coolest thing about bitcoin is that the payor (the person sending the funds) can voluntarily set the transaction fee and can also choose to have no fee at all. Bitcoin's unique fee structure achieves four important things: a. Bitcoin pays no attention to user logistics. The network does not care where the sender and receiver of a bitcoin transaction physically exist. It bears no effect on the speed of transaction verification nor the cost. This allows users across the globe to transact quickly with low or even no cost. Transacting internationally with conventional payment systems is expensive and often slow and cumbersome. b. It supports micro-payments. You can send very small amounts through the bitcoin network without having to pay a transaction cost. This has long been an issue with conventional payment systems. Conventional transaction fees can be equal to or even more than the value being transferred which is an obvious problem. c. It supports macro-payments. You can send very large amounts through the bitcoin network without having to pay a large transaction cost. This is not possible with conventional payment systems. d. It provides the sender with a tool to incent the bitcoin network to validate their transaction. This is helpful in a scenario where validating the transaction quickly is important. 3. It's secure. It's difficult – edging on the realm of impossible – to de-fraud the bitcoin network. Payment related fraud committed over conventional payment networks is a big problem. In 2012, over $21-billion in payment fraud occurred in the U.S. alone. That cost is much larger on a global scale and it is incurred by all of us that participate as consumers and business owners. Conventional payment networks were not originally designed to facilitate electronic transactions. Payment related fraud, particularly e-commerce based payment fraud, is relatively easy to commit and there is little to no recourse to those that attempt it. bitcoin does not suffer this problem. Explaining why gets pretty complex but I'll key in on a few important aspects: Story continues below advertisement Bitcoin is a 'push' payment system. Payment transactions are 'pushed' or initiated by the payor. The payor defines the value amount and who the payee is and then 'pushes' the bitcoin value to the payee. It's the equivalent of taking cash out of your wallet and handing it to the person that you owe. Conventional payment networks primarily use 'pull' payments. The payor authorizes the payee to 'pull' funds from their bank account or card account. It's the equivalent of handing your wallet to the person you owe and trusting them to take out the right amount of cash. The 'pull' system – in which you trust the recipient to take the correct amount – is one of the biggest reasons why payment related fraud has grown to be an enormous problem on a global scale. Bitcoin is indemnified. There is no internal dispute mechanism within the bitcoin protocol. When you send someone bitcoins, it is final. There's no getting the bitcoins back, unless the recipient chooses to return them to you. Traditional payment networks have internal dispute mechanisms. These dispute mechanisms are supposed to protect consumers but in reality they are heavily abused by fraudsters and are another big reason why payment related fraud has grown to be an enormous problem on a global scale. What are the main risks of using bitcoin? Bitcoin is a good payment network, but it's in its infancy and does have its shortcomings. The following are things I would recommend anyone be aware of to effectively use bitcoin for sending and receiving payments: Bitcoin is currently pseudonymous: The Blockchain provides amounts and bitcoin addresses of each transaction, but it does not provide the personal or businesses information behind the owners of the addresses. I believe we will see identity added to the bitcoin software, but regardless, it's your responsibility both as a consumer and as a business owner to know and to understand with whom you are transacting. Story continues below advertisement The world is full of bad people participating in illicit activity and bitcoin makes it very easy and convenient to transact across the globe. The media's sensational association of bitcoin to illicit activity has given bitcoin an unfairly bad reputation, in my opinion. The amount of bitcoin used in illicit activity doesn't even come close to the amount of fiat currency used in illicit activity. It's not even in the same spectrum. No one has called for the end of the U.S. dollar because it's used by drug dealers as that would undoubtedly be seen as ridiculous. What isn't ridiculous, however, is knowing whom you are transacting with and bitcoin can make it easy to skirt that responsibility so be aware of that. Bitcoin can be lost or stolen: You have to make an effort to properly store your bitcoin because it can be lost or stolen. If someone gets access to your private account keys they can take your bitcoin. If you lose your account keys, unless you find them, the bitcoin cannot be recovered. There are many effective ways to store bitcoin. Take the time to educate yourself on the methods that work best for you. Volatility and efficient conversion to fiat currencies: The early adoption and participation in bitcoin has been bolstered by speculative trading. Traders look for arbitrage opportunities between bitcoin, fiat currencies and altcoins. This is good as is drives value into the bitcoin network but it also creates volatility with the value of bitcoin. There is not yet enough participation in bitcoin for businesses to effectively operate with bitcoin only. Until that day, everyone still relies heavily on fiat currencies such as the Canadian dollar. So, if you are a business in this country and you want to accept bitcoin, you'll want to be sure that you are properly valuing the bitcoin against the Canadian dollar and then trading the bitcoin over to Canadian dollars regularly so you are not affected by price volatility. The easiest way to do this is to work with a bitcoin exchange service. There are now many reputable and innovative bitcoin exchanges and their services are improving on a daily basis. Bank scrutiny: The banks are being very cautious with respect to bitcoin. It is important to any business that they maintain a good banking relationship. If you use bitcoin in your business and your banker becomes aware of that, you may come under some scrutiny. You may even risk losing your banking. The banks big concern around bitcoin is source of funds and knowing your customer. This goes back to my earlier statement; it's is imperative for a business to know and understand whom they are transacting with. The last thing a bank wants to see are funds going through their bank that are associated with any illicit activity and they will err way on the side of caution in this regard. Personally, I don't see any of these risks as being difficult to mitigate. As participation in bitcoin grows, all of these risks will become significantly reduced. In my opinion, conventional payment systems carry more risk than bitcoin as a payment system. Geoff Gordon is the CEO of Vogogo, a payment processing startup based in Calgary. Story continues below advertisement Follow us @GlobeSmallBiz, on Pinterest and Instagram Join our Small Business LinkedIn group Add us to your circles Sign up for our weekly newsletter
// TODO: this looping code can be combined into a single loop function that accepts an additional // function argument. In Fein's case it would be Feis and in Fynd's case it would be Tail. func Fein(arg string) (*big.Int, error) { v, ok := big.NewInt(0).SetString(arg, 10) if !ok { return nil, fmt.Errorf(ugi.ErrInvalidInt, arg) } return feinLoop(v) }
import torch import torch.utils.data import torchvision.transforms as transforms import Datasets import numpy as np import os import scipy.io as sio def extract_features_MARS(model, scale_image_size, info_folder, data, extract_features_folder, logger, batch_size=128, workers=4, is_tencrop=False): logger.info('Begin extract features') normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) if is_tencrop: logger.info('==> Using TenCrop') tencrop = transforms.Compose([ transforms.Resize([int(x*1.125) for x in scale_image_size]), transforms.TenCrop(scale_image_size)]) else: tencrop = None transform = transforms.Compose([ transforms.Resize(scale_image_size), transforms.ToTensor(), normalize, ]) train_name_path = os.path.join(info_folder, 'train_name.txt') test_name_path = os.path.join(info_folder, 'test_name.txt') train_data_folder = os.path.join(data, 'bbox_train') test_data_folder = os.path.join(data, 'bbox_test') logger.info('Train data folder: '+train_data_folder) logger.info('Test data folder: '+test_data_folder) logger.info('Begin load train data') train_dataloader = torch.utils.data.DataLoader( Datasets.MARSEvalDataset(folder=train_data_folder, image_name_file=train_name_path, transform=transform, tencrop=tencrop), batch_size=batch_size, shuffle=False, num_workers=workers, pin_memory=True) logger.info('Begin load test data') test_dataloader = torch.utils.data.DataLoader( Datasets.MARSEvalDataset(folder=test_data_folder, image_name_file=test_name_path, transform=transform, tencrop=tencrop), batch_size=batch_size, shuffle=False, num_workers=workers, pin_memory=True) train_features = extract_features(model, train_dataloader, is_tencrop) test_features = extract_features(model, test_dataloader, is_tencrop) if os.path.isdir(extract_features_folder) is False: os.makedirs(extract_features_folder) sio.savemat(os.path.join(extract_features_folder, 'train_features.mat'), {'feature_train_new': train_features}) sio.savemat(os.path.join(extract_features_folder, 'test_features.mat'), {'feature_test_new': test_features}) return def extract_features_Market1501(model, scale_image_size, data, extract_features_folder, logger, batch_size=128, workers=4, is_tencrop=False, gen_stage_features = False): logger.info('Begin extract features') normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) if is_tencrop: logger.info('==> Using TenCrop') tencrop = transforms.Compose([ transforms.Resize([int(x*1.125) for x in scale_image_size]), transforms.TenCrop(scale_image_size)]) else: tencrop = None transform = transforms.Compose([ transforms.Resize(scale_image_size), transforms.ToTensor(), normalize, ]) train_data_folder = os.path.join(data, 'bounding_box_train') test_data_folder = os.path.join(data, 'bounding_box_test') query_data_folder = os.path.join(data, 'query') logger.info('Begin load train data from '+train_data_folder) train_dataloader = torch.utils.data.DataLoader( Datasets.Market1501EvaluateDataset(folder=train_data_folder, transform=transform, tencrop=tencrop), batch_size=batch_size, shuffle=False, num_workers=workers, pin_memory=True) logger.info('Begin load test data from '+test_data_folder) test_dataloader = torch.utils.data.DataLoader( Datasets.Market1501EvaluateDataset(folder=test_data_folder, transform=transform, tencrop=tencrop), batch_size=batch_size, shuffle=False, num_workers=workers, pin_memory=True) logger.info('Begin load query data from '+query_data_folder) query_dataloader = torch.utils.data.DataLoader( Datasets.Market1501EvaluateDataset(folder=query_data_folder, transform=transform, tencrop=tencrop), batch_size=batch_size, shuffle=False, num_workers=workers, pin_memory=True) if not gen_stage_features: train_features = extract_features(model, train_dataloader, is_tencrop) test_features = extract_features(model, test_dataloader, is_tencrop) query_features = extract_features(model, query_dataloader, is_tencrop) if os.path.isdir(extract_features_folder) is False: os.makedirs(extract_features_folder) sio.savemat(os.path.join(extract_features_folder, 'train_features.mat'), {'feature_train_new': train_features}) sio.savemat(os.path.join(extract_features_folder, 'test_features.mat'), {'feature_test_new': test_features}) sio.savemat(os.path.join(extract_features_folder, 'query_features.mat'), {'feature_query_new': query_features}) else: # model.gen_stage_features = True train_features = extract_stage_features(model, train_dataloader, is_tencrop) test_features = extract_stage_features(model, test_dataloader, is_tencrop) query_features = extract_stage_features(model, query_dataloader, is_tencrop) if os.path.isdir(extract_features_folder) is False: os.makedirs(extract_features_folder) for i in range(4): sio.savemat(os.path.join(extract_features_folder, 'train_features_{}.mat'.format(i + 1)), {'feature_train_new': train_features[i]}) sio.savemat(os.path.join(extract_features_folder, 'test_features_{}.mat'.format(i + 1)), {'feature_test_new': test_features[i]}) sio.savemat(os.path.join(extract_features_folder, 'query_features_{}.mat'.format(i + 1)), {'feature_query_new': query_features[i]}) sio.savemat(os.path.join(extract_features_folder, 'train_features_fusion.mat'), {'feature_train_new': train_features[4]}) sio.savemat(os.path.join(extract_features_folder, 'test_features_fusion.mat'), {'feature_test_new': test_features[4]}) sio.savemat(os.path.join(extract_features_folder, 'query_features_fusion.mat'), {'feature_query_new': query_features[4]}) def extract_features_CUHK03(model, scale_image_size, data, extract_features_folder, logger, batch_size=128, workers=4, is_tencrop=False,normalize=None): logger.info('Begin extract features') if normalize == None: normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) if is_tencrop: logger.info('==> Using TenCrop') tencrop = transforms.Compose([ transforms.Resize([int(x*1.125) for x in scale_image_size]), transforms.TenCrop(scale_image_size)]) else: tencrop = None transform = transforms.Compose([ transforms.Resize(scale_image_size), transforms.ToTensor(), normalize, ]) train_data_folder = data logger.info('Begin load train data from '+train_data_folder) train_dataloader = torch.utils.data.DataLoader( Datasets.CUHK03EvaluateDataset(folder=train_data_folder, transform=transform, tencrop=tencrop), batch_size=batch_size, shuffle=False, num_workers=workers, pin_memory=True) train_features = extract_features(model, train_dataloader, is_tencrop) if os.path.isdir(extract_features_folder) is False: os.makedirs(extract_features_folder) sio.savemat(os.path.join(extract_features_folder, 'train_features.mat'), {'feature_train_new': train_features}) return def extract_stage_features(net, dataloader, is_tencrop=False): net.eval() # we have five stages in total features_list = [] for i in range(5): features_list.append([]) count = 0 for i, input in enumerate(dataloader): if is_tencrop: input = input.view((-1, *input.size()[-3:])) input_var = torch.autograd.Variable(input, volatile=True) features = net(input_var) for j in range(5): feature = features[j].cpu().data.numpy() if is_tencrop: feature = feature.reshape((-1, 10, feature.shape[1])) feature = feature.mean(1) features_list[j].append(feature) if is_tencrop: count += int(input.size()[0]/10) else: count += input.size()[0] print('finish ' + str(count) + ' images') for j in range(5): features_list[j] = np.concatenate(features_list[j]).T return features_list def extract_features(net, dataloader, is_tencrop=False): net.eval() features_list = [] count = 0 for i, input in enumerate(dataloader): if is_tencrop: input = input.view((-1, *input.size()[-3:])) input_var = torch.autograd.Variable(input, volatile=True) feature = net(input_var) feature = feature.cpu().data.numpy() if is_tencrop: feature = feature.reshape((-1, 10, feature.shape[1])) feature = feature.mean(1) features_list.append(feature) if is_tencrop: count += int(input.size()[0]/10) else: count += input.size()[0] print('finish ' + str(count) + ' images') return np.concatenate(features_list).T
/* * Copyright (c) 2011 Research In Motion Limited. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "labelwindow.h" #include "pngreader.h" LabelWindow *LabelWindow::create(screen_context_t context, int width, int height) { const int zOrder = 6; // FIXME: hardcoded LabelWindow *window = new LabelWindow(context, width, height); if (!window->m_valid) { delete window; return 0; } if (!window->setZOrder(zOrder) || !window->setTouchSensitivity(false)) { delete window; return 0; } return window; } void LabelWindow::draw(PNGReader &reader) { screen_buffer_t buffer; unsigned char *pixels; int stride; if (!getPixels(&buffer, &pixels, &stride)) { fprintf(stderr, "Unable to get label window buffer\n"); return; } // // if (reader.m_stride != stride || reader.m_height != m_size[1]) { // fprintf(stderr, "Memcpy is unsafe!!!\n"); // } // memcpy(pixels, reader.m_data, reader.m_stride * reader.m_height); screen_buffer_t pixmapBuffer; screen_get_pixmap_property_pv(reader.m_pixmap, SCREEN_PROPERTY_RENDER_BUFFERS, (void**)&pixmapBuffer); int attribs[] = { SCREEN_BLIT_SOURCE_X, 0, SCREEN_BLIT_SOURCE_Y, 0, SCREEN_BLIT_SOURCE_WIDTH, reader.m_width, SCREEN_BLIT_SOURCE_HEIGHT, reader.m_height, SCREEN_BLIT_DESTINATION_X, 0, SCREEN_BLIT_DESTINATION_Y, 0, SCREEN_BLIT_DESTINATION_WIDTH, m_size[0], SCREEN_BLIT_DESTINATION_HEIGHT, m_size[1], SCREEN_BLIT_END }; screen_blit(m_context, buffer, pixmapBuffer, attribs); this->post(buffer); int visible = 0; int rc = screen_set_window_property_iv(m_window, SCREEN_PROPERTY_VISIBLE, &visible); } void LabelWindow::showAt(screen_window_t parent, int x, int y) { int rc = 0; if (parent && parent != m_parent) { int parentBufferSize[2]; int parentSize[2]; rc = screen_get_window_property_iv(parent, SCREEN_PROPERTY_POSITION, m_offset); rc = screen_get_window_property_iv(parent, SCREEN_PROPERTY_BUFFER_SIZE, parentBufferSize); rc = screen_get_window_property_iv(parent, SCREEN_PROPERTY_SIZE, parentSize); m_scale[0] = parentSize[0] / (float)parentBufferSize[0]; m_scale[1] = parentSize[1] / (float)parentBufferSize[1]; int newSize[] = {m_size[0] * m_scale[0], m_size[1] * m_scale[1]}; rc = screen_set_window_property_iv(m_window, SCREEN_PROPERTY_SIZE, newSize); } if (!setParent(parent)) return; move(x, y); int visible = 1; rc = screen_set_window_property_iv(m_window, SCREEN_PROPERTY_VISIBLE, &visible); if (rc) { perror("set label window visible: "); } } void LabelWindow::move(int x, int y) { int position[] = {m_offset[0] + (x * m_scale[0]), m_offset[1] + (y * m_scale[1])}; int rc = screen_set_window_property_iv(m_window, SCREEN_PROPERTY_POSITION, position); if (rc) { perror("LabelWindow set position: "); return; } }
The Need for Cultural Competency in Health Care. PURPOSE To highlight the importance of cultural competency education in health care and in the medical imaging industry. METHODS A comprehensive search of the Education Resource Information Center and MEDLINE databases was conducted to acquire full-text and peer-reviewed articles relating to cultural competency training in health care. RESULTS A total of 1008 academic journal articles and 3 books were identified for this literature review. Search criteria was narrowed to peer-reviewed articles published between 2000 and 2016, resulting in 24 articles. A majority of the research studies addressed cultural competency education in allied health professions, as well as psychology and athletic training. Recent research studies pertaining to the cultural competence of imaging professionals were not found. DISCUSSION Research shows that the behaviors of health care providers can contribute to health disparities. National standards have been established to promote patient-centered care that reduces or eliminates health disparities in the U.S. POPULATION Lectures and training sessions help professionals maintain these standards, but they might not be adequate. Health care workers need to interact and work with diverse patient populations to increase their empathy and become culturally competent. CONCLUSION A patient-centered care approach that responds to patients' unique needs and reduces health disparities among diverse patient populations can be achieved by training culturally competent health care professionals. More research is needed to determine the nature of cultural competency education taught in radiography programs.
<filename>plugins/homekit/src/types/lock.ts import { Lock, LockState, ScryptedDevice, ScryptedDeviceType, ScryptedInterface } from '@scrypted/sdk'; import { addSupportedType, bindCharacteristic, DummyDevice, HomeKitSession } from '../common'; import { Characteristic, CharacteristicEventTypes, CharacteristicSetCallback, CharacteristicValue, Service } from '../hap'; import { makeAccessory } from './common'; addSupportedType({ type: ScryptedDeviceType.Lock, probe(device: DummyDevice) { return device.interfaces.includes(ScryptedInterface.Lock); }, getAccessory: async (device: ScryptedDevice & Lock, homekitSession: HomeKitSession) => { const accessory = makeAccessory(device, homekitSession); const service = accessory.addService(Service.LockMechanism, device.name); function toCurrentState(lockState: LockState) { switch (lockState) { case LockState.Locked: return Characteristic.LockCurrentState.SECURED; case LockState.Jammed: return Characteristic.LockCurrentState.JAMMED; default: return Characteristic.LockCurrentState.UNSECURED; } } function toTargetState(lockState: LockState) { switch (lockState) { case LockState.Locked: return Characteristic.LockTargetState.SECURED; default: return Characteristic.LockTargetState.UNSECURED; } } let targetState = toTargetState(device.lockState); service.getCharacteristic(Characteristic.LockTargetState) .on(CharacteristicEventTypes.SET, (value: CharacteristicValue, callback: CharacteristicSetCallback) => { targetState = value as number; callback(); switch (targetState) { case Characteristic.LockTargetState.UNSECURED: device.unlock(); break; default: device.lock(); break; } }); bindCharacteristic(device, ScryptedInterface.Lock, service, Characteristic.LockTargetState, () => { targetState = toTargetState(device.lockState); return targetState; }) bindCharacteristic(device, ScryptedInterface.Lock, service, Characteristic.LockCurrentState, () => toCurrentState(device.lockState)); return accessory; } });
// NOTE: this is not name length. This is length of whole entry int entry_length (struct item_head * ih, struct reiserfs_de_head * deh, int pos_in_item) { if (pos_in_item) return (deh_location (deh - 1) - deh_location (deh)); return (ih_item_len (ih) - deh_location (deh)); }
<filename>nets/dilated.py """ Code reproduced from: https://github.com/divelab/dilated/blob/master/dilated.py Texas A&M DIVELab """ import tensorflow as tf import numpy as np import six import sys """ This script provides different 2d dilated convolutions. I appreciate ideas for a more efficient implementation of the proposed two smoothed dilated convolutions. """ def _dilated_conv2d(dilated_type, x, kernel_size, num_o, dilation_factor, name, biased=False): if dilated_type == 'regular': return _regular_dilated_conv2d(x, kernel_size, num_o, dilation_factor, name, biased) elif dilated_type == 'decompose': return _decomposed_dilated_conv2d(x, kernel_size, num_o, dilation_factor, name, biased) elif dilated_type == 'smooth_GI': return _smoothed_dilated_conv2d_GI(x, kernel_size, num_o, dilation_factor, name, biased) elif dilated_type == 'smooth_SSC': return _smoothed_dilated_conv2d_SSC(x, kernel_size, num_o, dilation_factor, name, biased) else: print('dilated_type ERROR!') print("Please input: regular, decompose, smooth_GI or smooth_SSC") sys.exit(-1) def _regular_dilated_conv2d(x, kernel_size, num_o, dilation_factor, name, biased=False): """ Dilated conv2d without BN or relu. """ num_x = x.shape[3].value with tf.variable_scope(name) as scope: w = tf.get_variable('weights', shape=[kernel_size, kernel_size, num_x, num_o]) o = tf.nn.atrous_conv2d(x, w, dilation_factor, padding='SAME') if biased: b = tf.get_variable('biases', shape=[num_o]) o = tf.nn.bias_add(o, b) return o def _decomposed_dilated_conv2d(x, kernel_size, num_o, dilation_factor, name, biased=False): """ Decomposed dilated conv2d without BN or relu. """ # padding so that the input dims are multiples of dilation_factor H = tf.shape(x)[1] W = tf.shape(x)[2] pad_bottom = (dilation_factor - H % dilation_factor) if H % dilation_factor != 0 else 0 pad_right = (dilation_factor - W % dilation_factor) if W % dilation_factor != 0 else 0 pad = [[0, pad_bottom], [0, pad_right]] # decomposition to smaller-sized feature maps # [N,H,W,C] -> [N*d*d, H/d, W/d, C] o = tf.space_to_batch(x, paddings=pad, block_size=dilation_factor) # perform regular conv2d num_x = x.shape[3].value with tf.variable_scope(name) as scope: w = tf.get_variable('weights', shape=[kernel_size, kernel_size, num_x, num_o]) s = [1, 1, 1, 1] o = tf.nn.conv2d(o, w, s, padding='SAME') if biased: b = tf.get_variable('biases', shape=[num_o]) o = tf.nn.bias_add(o, b) o = tf.batch_to_space(o, crops=pad, block_size=dilation_factor) return o def _smoothed_dilated_conv2d_GI(x, kernel_size, num_o, dilation_factor, name, biased=False): """ Smoothed dilated conv2d via the Group Interaction (GI) layer without BN or relu. """ # padding so that the input dims are multiples of dilation_factor H = tf.shape(x)[1] W = tf.shape(x)[2] pad_bottom = (dilation_factor - H % dilation_factor) if H % dilation_factor != 0 else 0 pad_right = (dilation_factor - W % dilation_factor) if W % dilation_factor != 0 else 0 pad = [[0, pad_bottom], [0, pad_right]] # decomposition to smaller-sized feature maps # [N,H,W,C] -> [N*d*d, H/d, W/d, C] o = tf.space_to_batch(x, paddings=pad, block_size=dilation_factor) # perform regular conv2d num_x = x.shape[3].value with tf.variable_scope(name) as scope: w = tf.get_variable('weights', shape=[kernel_size, kernel_size, num_x, num_o]) s = [1, 1, 1, 1] o = tf.nn.conv2d(o, w, s, padding='SAME') fix_w = tf.Variable(tf.eye(dilation_factor*dilation_factor), name='fix_w') l = tf.split(o, dilation_factor*dilation_factor, axis=0) os = [] for i in six.moves.range(0, dilation_factor*dilation_factor): os.append(fix_w[0, i] * l[i]) for j in six.moves.range(1, dilation_factor*dilation_factor): os[i] += fix_w[j, i] * l[j] o = tf.concat(os, axis=0) if biased: b = tf.get_variable('biases', shape=[num_o]) o = tf.nn.bias_add(o, b) o = tf.batch_to_space(o, crops=pad, block_size=dilation_factor) return o def _smoothed_dilated_conv2d_SSC(x, kernel_size, num_o, dilation_factor, name, biased=False): """ Smoothed dilated conv2d via the Separable and Shared Convolution (SSC) without BN or relu. """ num_x = x.shape[3].value fix_w_size = dilation_factor * 2 - 1 with tf.variable_scope(name) as scope: fix_w = tf.get_variable('fix_w', shape=[fix_w_size, fix_w_size, 1, 1, 1], initializer=tf.zeros_initializer) mask = np.zeros([fix_w_size, fix_w_size, 1, 1, 1], dtype=np.float32) mask[dilation_factor - 1, dilation_factor - 1, 0, 0, 0] = 1 fix_w = tf.add(fix_w, tf.constant(mask, dtype=tf.float32)) o = tf.expand_dims(x, -1) o = tf.nn.conv3d(o, fix_w, strides=[1,1,1,1,1], padding='SAME') o = tf.squeeze(o, -1) w = tf.get_variable('weights', shape=[kernel_size, kernel_size, num_x, num_o]) o = tf.nn.atrous_conv2d(o, w, dilation_factor, padding='SAME') if biased: b = tf.get_variable('biases', shape=[num_o]) o = tf.nn.bias_add(o, b) return o
//---------------------------------------------------------------------------- // // TSDuck - The MPEG Transport Stream Toolkit // Copyright (c) 2005-2021, <NAME> // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // // 1. Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // 2. Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. // //---------------------------------------------------------------------------- #include "tsURL.h" #include "tsSysUtils.h" TSDUCK_SOURCE; //---------------------------------------------------------------------------- // Constructors. //---------------------------------------------------------------------------- ts::URL::URL() : _scheme(), _username(), _password(), _host(), _port(0), _path(), _query(), _fragment() { } ts::URL::URL(const UString& path) : URL() { setURL(path); } ts::URL::URL(const UString& path, const UString& base) : URL() { setURL(path, base); } ts::URL::URL(const UString& path, const URL& base) : URL() { setURL(path, base); } //---------------------------------------------------------------------------- // Set URL from a string. //---------------------------------------------------------------------------- void ts::URL::setURL(const UString& path) { clear(); parse(path); // Default to a file URL. if (_scheme.empty()) { _scheme = u"file"; if (!_path.startWith(u"/")) { // Make it an absolute path. UString dir(CurrentWorkingDirectory()); #if defined(TS_WINDOWS) dir.substitute(u'\\', u'/'); dir.insert(0, u"/"); #endif // A directory must end with a slash in a URL. if (!dir.endWith(u"/") && !_path.empty()) { dir.append(u"/"); } _path.insert(0, dir); } } // Cleanup /../ and /./ cleanupPath(); } //---------------------------------------------------------------------------- // Set URL from a string and a base. //---------------------------------------------------------------------------- void ts::URL::setURL(const UString& path, const UString& base) { clear(); parse(path); applyBase(URL(base)); } void ts::URL::setURL(const UString& path, const URL& base) { clear(); parse(path); applyBase(base); } //---------------------------------------------------------------------------- // Parse a URL, leave unspecified fields unmodified. //---------------------------------------------------------------------------- void ts::URL::parse(const UString& path) { const size_t colon = SchemeLength(path); size_t current = 0; // Parse scheme://host/ if there is one. if (colon > 0) { _scheme = path.substr(0, colon); _username.clear(); _password.clear(); _host.clear(); _port = 0; // There must be "://" at index 'colon'. current = colon + 3; assert(current <= path.size()); // Check if a host is present. // When there is a third slash, this is the beginning of the path and there is no host. bool has_host = current < path.size() && path[current] != u'/'; // But, on Windows, a file: URL may have only two slashes followed by a device letter. #if defined(TS_WINDOWS) if (has_host && _scheme == u"file" && current + 1 < path.size() && IsAlpha(path[current]) && (path[current+1] == u':' || path[current+1] == u'|')) { // URL is "file://C:/..." or "file://C|/..." has_host = false; // Move backward to make sure the path starts with a "/". --current; } #endif // Parse [user[:password]@]host[:port]. if (has_host) { size_t start = current; // start of host part current = path.find(u'/', start); // start of path part size_t at = path.find(u'@', start); if (at < current) { // There is a username part. const size_t sep = path.find(u':', start); if (sep < at) { _username = path.substr(start, sep - start); _password = path.substr(sep + 1, at - sep - 1); } else { _username = path.substr(start, at - start); _password.clear(); } start = at + 1; } const size_t sep = path.find(u':', start); if (sep < current) { // There is port. _host = path.substr(start, sep - start); path.substr(sep + 1, current - sep - 1).toInteger(_port); } else { _host = path.substr(start, current - start); _port = 0; } } } // Parse path[?query][#fragment] if (current < path.size()) { const size_t qmark = path.find(u'?', current); const size_t hash = path.find(u'#', current); _path = path.substr(current, std::min(qmark, hash) - current); if (qmark < path.size()) { _query = path.substr(qmark + 1, hash < qmark ? NPOS : hash - qmark - 1); } if (hash < path.size() && (qmark > path.size() || hash > qmark)) { _fragment = path.substr(hash + 1); } } // On Windows, normalize file URL. #if defined(TS_WINDOWS) if (_scheme.empty() || _scheme == u"file") { _path.substitute(u'\\', u'/'); if (_path.size() >= 2 && IsAlpha(_path[0]) && (_path[1] == u':' || _path[1] == u'|')) { // This is an absolute Windows path. _path.insert(0, u"/"); } if (_path.size() >= 3 && _path[0] == u'/' && IsAlpha(_path[1]) && _path[2] == u'|') { _path[2] = u':'; } } #endif } //---------------------------------------------------------------------------- // Apply missing base components from a base URL. //---------------------------------------------------------------------------- void ts::URL::applyBase(const URL& base) { // If there is no scheme, this was a relative URL. if (_scheme.empty()) { // The scheme and host part is fully inherited from the base URL. _scheme = base._scheme; _username = base._username; _password = <PASSWORD>; _host = base._host; _port = base._port; // The path is built based on the base URL. // If the path already starts with a slash, it is absolute on the host. if (_path.empty()) { // Completely missing path, use base _path = base._path; } else if (!_path.startWith(u"/")) { // Relative path, append after base. if (base._path.endWith(u"/")) { // Base path is a directory, use it. _path.insert(0, base._path); } else { // Base path is a file/object, extract directory part. const size_t last_slash = base._path.rfind(u'/'); if (last_slash >= base._path.size()) { // No slash in base path, assume root. _path.insert(0, 1, u'/'); } else { // Insert directory part (including slash) of the base path. _path.insert(0, base._path, 0, last_slash + 1); } } } } // Cleanup /../ and /./ cleanupPath(); } //---------------------------------------------------------------------------- // Cleanup /../ and /./ from path. //---------------------------------------------------------------------------- void ts::URL::cleanupPath() { const bool end_slash = _path.endWith(u"/"); // Use CleanupFilePath() which works on OS separators. #if defined(TS_WINDOWS) _path.substitute(u'/', u'\\'); #endif _path = CleanupFilePath(_path); #if defined(TS_WINDOWS) _path.substitute(u'\\', u'/'); #endif // Preserve final slash (meaningful in URL) if removed by CleanupFilePath(). if (end_slash && !_path.endWith(u"/")) { _path.append(u"/"); } } //---------------------------------------------------------------------------- // Clear the content of the URL (becomes invalid). //---------------------------------------------------------------------------- void ts::URL::clear() { _scheme.clear(); _username.clear(); _password.clear(); _host.clear(); _port = 0; _path.clear(); _query.clear(); _fragment.clear(); } //---------------------------------------------------------------------------- // Convert to a string object. //---------------------------------------------------------------------------- ts::UString ts::URL::toString(bool useWinInet) const { UString url; if (!_scheme.empty()) { url = _scheme; url.append(u"://"); #if defined(TS_WINDOWS) if (useWinInet && _scheme == u"file" && _username.empty() && _password.empty() && _host.empty() && _port == 0) { // We need the final string 'file://C:/dir/file' to contain 2 slashes instead of the standard 3. url.pop_back(); } #endif if (!_username.empty() || !_password.empty()) { url.append(_username); if (!_password.empty()) { url.append(u":"); url.append(_password); } url.append(u"@"); } url.append(_host); if (_port != 0) { url.append(UString::Format(u":%d", {_port})); } if (!_path.startWith(u"/")) { // Enforce a slash between host and path. url.append(u"/"); } url.append(_path); if (!_query.empty()) { url.append(u"?"); url.append(_query); } if (!_fragment.empty()) { url.append(u"#"); url.append(_fragment); } } return url; } //---------------------------------------------------------------------------- // Extract a relative URL of this object, from a base URL. //---------------------------------------------------------------------------- ts::UString ts::URL::toRelative(const UString& base, bool useWinInet) const { return toRelative(URL(base), useWinInet); } ts::UString ts::URL::toRelative(const URL& base, bool useWinInet) const { // If the base is not on the same server, there is no relative path, return the full URL. if (!sameServer(base)) { return toString(useWinInet); } // Get directory part of base path. size_t start = 0; const size_t last_slash = base._path.rfind(u'/'); if (last_slash < base._path.size() && _path.startWith(base._path.substr(0, last_slash + 1))) { // The path has the same base, including trailing slash. start = last_slash + 1; } // Build the relative URL. UString url(_path, start, _path.size() - start); if (!_query.empty()) { url.append(u"?"); url.append(_query); } if (!_fragment.empty()) { url.append(u"#"); url.append(_fragment); } return url; } //---------------------------------------------------------------------------- // Check if two URL's use the same server (scheme, host, user, etc.) //---------------------------------------------------------------------------- bool ts::URL::sameServer(const URL& other) const { return _scheme == other._scheme && _username == other._username && _password == other._password && _host == other._host && _port == other._port; } //---------------------------------------------------------------------------- // Locate the scheme part of a URL string. //---------------------------------------------------------------------------- size_t ts::URL::SchemeLength(const UString& path) { // Look for the URL scheme delimiter. const size_t colon = path.find(u"://"); // On Windows, do not consider an absolute path with a device letter // as a URL (C://foo/bar is not a URL with scheme C:). We require a // scheme name with more than one single letter to avoid that case. if (colon < 2 || colon > path.size()) { // No scheme found, not a URL. return 0; } else { // Check that all preceding characters are alphanumerical. for (size_t i = 0; i < colon; ++i) { if (!IsAlpha(path[i]) && !IsDigit(path[i])) { // Invalid character before scheme, not a URL. return 0; } } return colon; } }
def _resetState(self) -> None: while self.serverClient.receive(): pass self.serverClient._buffer = b'' self.serverClient._decodedBuffer = "" self._roomId = "" self._turn = 0
Abstract 2749: Defining structure activity relationships for GPCR engagement and anti-cancer efficacy of imipridone small molecules G protein-coupled receptors (GPCRs) represent the most widely exploited superfamily of drug targets for FDA-approved therapies for many diseases, however, these receptors are underexploited for oncology. ONC201 is a selective antagonist of GPCRs dopamine receptor D2 (DRD2) and DRD3 that has been shown to induce tumor regressions with a benign safety profile in high grade glioma patients. ONC201 (benzyl-2-methylbenzyl-imipridone) is the founding member of the imipridone class of small molecules that share a unique tri-heterocyclic core chemical structure. Imipridones share several chemical and biological properties that are desirable drug-like characteristics: oral administration, wide therapeutic window, chemical stability and blood brain barrier penetrance. In this study, we profiled a series of imipridones for GPCR engagement and anti-cancer efficacy. Several imipridones were screened against a large panel of human GPCRs using a β-arrestin recruitment assay. The imipridones tested resulted in GPCR agonist/antagonist activity (threshold set at >20% activity) that was heterogenous, but exclusive among Class A GPCRs that represent the largest class. Minor chemical modifications to the ONC201 chemical structure caused large shifts in agonist versus antagonist activity and selectivity for GPCRs. Specifically, switching the ONC201 imipridone core from an angular to a linear isomer resulted in loss of DRD2 antagonist activity and impaired inhibition of cancer cell viability, indicating the imipridone core structure is critical for GPCR engagement and anti-cancer effects. The addition of electron withdrawing groups (e.g. di- or tri-halogen substitution) to the methyl benzyl ring improved potency for GPCR engagement and anti-cancer effects, but not for the benzyl ring. Loss of the benzyl ring impaired anti-cancer effects. Among all of the GPCR hits identified, maximal variance in imipridone GPCR engagement was identified for DRD2/DRD3 antagonism and GPR132 agonism that were prioritized considering their known biological relevance in oncology. ONC206 (benzyl-2,4-difluoromethylbenzyl-imipridone) emerged as the most selective and potent antagonist for D2-like dopamine receptors that are overexpressed and critical for survival in several cancers. ONC212 (benzyl-4-trifluoromethylbenzyl-imipridone) was the most selective and potent agonist for tumor suppressor GPR132. Both compounds were tested in the GDSC panel of >1000 cancer cell lines and demonstrated broad spectrum nanomolar inhibition of cancer cell viability and a wide therapeutic window. GPCR target expression correlated with anti-cancer efficacy in the GDSC panel for both compounds, providing potential biomarkers of response. Thus, chemical derivatization of ONC201 has generated a class of novel GPCR-targeting agents with promising preclinical efficacy and safety profiles in oncology. Citation Format: Varun V. Prabhu, Abed Rahman Kawakibi, Neel S. Madhukar, Lakshmi Anantharaman, Sean Deacon, Neil S. Charter, Mathew J. Garnett, Ultan McDermott, Cyril H. Benes, Wolfgang Oster, Olivier Elemento, Martin Stogniew, Joshua E. Allen. Defining structure activity relationships for GPCR engagement and anti-cancer efficacy of imipridone small molecules . In: Proceedings of the American Association for Cancer Research Annual Meeting 2019; 2019 Mar 29-Apr 3; Atlanta, GA. Philadelphia (PA): AACR; Cancer Res 2019;79(13 Suppl):Abstract nr 2749.
/** * The base class for all common exception.<br/> * <p> * </p> * * @author * @version 28-May-2016 */ public class ServiceException extends Exception { /** * default exception id. */ public static final String DEFAULT_ID = "framwork.remote.SystemError"; /** * Serial number. */ private static final long serialVersionUID = 5703294364555144738L; /** * Exception id. */ private String id = DEFAULT_ID; private Object[] args = null; // NOSONAR private int httpCode = 500; private ExceptionArgs exceptionArgs = null; /** * The default constructor<br/> * <p> * This method is only used as deserialized, in other cases, use parameterized constructor. * </p> * * @since */ public ServiceException() { super(""); } /** * Constructor<br/> * <p> * </p> * * @since * @param id: details. * @param cause: reason. */ public ServiceException(final String id, final Throwable cause) { super(cause); this.setId(id); } /** * Constructor<br/> * <p> * </p> * * @since * @param message: details. */ public ServiceException(final String message) { super(message); } /** * Constructor<br/> * <p> * </p> * * @since * @param id: exception id. * @param message: details. */ public ServiceException(final String id, final String message) { super(message); this.setId(id); } /** * Constructor<br/> * <p> * </p> * * @since * @param id: exception id. * @param httpCode: http status code. */ public ServiceException(final String id, final int httpCode) { super(); this.setId(id); this.setHttpCode(httpCode); } /** * Constructor<br/> * <p> * the exception include the httpcode and message. * </p> * * @since * @param httpCode http code. * @param message details. */ public ServiceException(final int httpCode, final String message) { super(message); this.setHttpCode(httpCode); } /** * Constructor<br/> * <p> * </p> * * @since * @param id: exception id. * @param httpCode: http code. * @param exceptionArgs: Exception handling frame parameters. */ public ServiceException(final String id, final int httpCode, final ExceptionArgs exceptionArgs) { super(); this.setId(id); this.setHttpCode(httpCode); this.setExceptionArgs(exceptionArgs); } /** * Constructor<br/> * <p> * Have a placeholder exception, use args formatted message. * </p> * * @since * @param id: exception id. * @param message: details. * @param args: Placeholders for parameters */ public ServiceException(final String id, final String message, final Object... args) { super(MessageFormat.format(message, args)); this.setId(id); this.args = args; } /** * Constructor<br/> * <p> * Have a placeholder exception, use args formatted message * </p> * * @since * @param id: exception id. * @param message: details. * @param cause: reason. * @param args: placeholder for parameters */ public ServiceException(final String id, final String message, final Throwable cause, final Object... args) { super(MessageFormat.format(message, args), cause); this.setId(id); this.args = args; } /** * Constructor<br/> * <p> * </p> * * @since * @param id: exception id. * @param message: details. * @param cause: reason. */ public ServiceException(final String id, final String message, final Throwable cause) { super(message, cause); this.setId(id); } /** * Constructor<br/> * <p> * </p> * * @since * @param cause: reason. */ public ServiceException(final Throwable cause) { super(cause); } /** * Get exceptoin id.<br/> * * @return * @since */ public String getId() { if(id == null || id.isEmpty()) { return DEFAULT_ID; } return id; } public void setId(final String id) { this.id = id; } public int getHttpCode() { return this.httpCode; } public void setHttpCode(final int httpCode) { this.httpCode = httpCode; } /** * Obtain the ROA exception handling framework parameters<br/> * * @return exception args. * @since */ public ExceptionArgs getExceptionArgs() { return exceptionArgs; } public void setExceptionArgs(final ExceptionArgs exceptionArgs) { this.exceptionArgs = exceptionArgs; } /** * Gets the parameter information<br/> * * @return parameter list. * @since */ protected Object[] getArgs() { if(args == null || args.length == 0 || DEFAULT_ID.equals(getId())) { return new Object[] {}; } return args; } @Override public String toString() { return "exception.id: " + getId() + "; " + super.toString(); } }
import { useEffect, useRef } from 'react'; import gsap from 'gsap'; const useSlideUp = (element: string, duration: number) => { useEffect(() => { gsap.fromTo( element, { opacity: 0, yPercent: 200, }, { opacity: 1, yPercent: 0, ease: 'back.out(1)', duration, } ); }, [element, duration]); }; export default useSlideUp;
def refreshCredentialsUI(self): self.credentialsCollapsibleButton.collapsed = self.isUserLogged self.mainCollapsibleButton.collapsed = not self.isUserLogged self.rememberCredentialsCheckBox.visible = self.loginButton.visible = not self.isUserLogged self.logoutButton.visible = not self.isUserLogged self.logoutButton.visible = self.isUserLogged self.loginLineEdit.readOnly = self.passwordLineEdit.readOnly = self.isUserLogged
package test import ( "fmt" "main/internal/stgo" "main/internal/tforgo" "testing" ) func TestMain(t *testing.T) { fmt.Println("Test for package Main") tforgo.Tmain() stgo.Smain() }
The Greek state secretary for industry in the run-up to its debt crisis, who supervised a 5.5bn euro recession loan plan, has an MBA from a bogus university, Channel 4 News reveals. George Anastasopoulos, the state secretary for communications and later industry between 2007-2009, widely cites his MBA qualification from the unaccredited “Chadwick University”, a Channel 4 News investigation has found. Chadwick University’s operations were discontinued indefinitely last year. Previously based in Alabama, it has become so notorious that the use of Chadwick degrees to seek employment is illegal in Texas and banned in other states. Mr Anastasopolous states on his LinkedIn profile that he has supervised billions in spending in the run-up to the Greek debt crisis, some involving European Union funds. While the former state secretary does hold a genuine PhD in engineering from Northwestern University, his bogus MBA is his only business qualification. On LinkedIn, Mr Anastasopoulos states that as state secretary for industry he developed a “resession [sic] emergency plan for SMEs [small and medium sized enterprises]”. The plan included the use of a pool of European Union development funds to provide loans to 77,000 Greek businesses, totalling €9.5 billion in funds in two phases, among which he supervised €5.5 billion. He previously served as secretary of state for communications, where he says that he managed: “Digital Development EU projects related to telecoms (300 mils euro).” Mr Anastasopoulos also states on his LinkedIn page that he represented Greece at EU summits as a Minister. ‘Knew it was unaccredited’ When asked whether his business qualification was bogus, Mr Anastasopoulos told Channel 4 News: “When I took Chadwick courses through distance learning, I was already a graduate student at Northwestern University, USA. I knew that it was unaccredited but I wanted to enhance my knowledge on Business issues.” Mr Anastasopoulos appears to have completed the unaccredited MBA at the same time as undertaking his engineering PhD, which he says counts as a business qualification. He didn’t respond to questions about whether his MBA helped him secure his position in government. I didn’t have the impression that it was scam, but it was clear from the first moment that it was not a accredited distance learning program… If you noticed in my CV I never claimed that it was an accredited program George Anastasopoulos Prior to becoming a state secretary, Mr Anastasopoulos himself worked in personnel certification. He claims on his LinkedIn that he was “Responsible for development of Personnel Certification services all over Europe,” in a role at a non-profit organisation. He has also served as the general secretary of the International Personnel Certification Association. ‘Never accredited’ Chadwick University has never been recognized as a government-sanctioned university by official accreditation bodies that guarantee the quality of universities. Chadwick did have a licence from the state of Alabama to act as a private school, but at a time when Alabama had lax regulation on setting up private schools and didn’t review the quality of all private post-secondary education. The state began requiring more rigour from its schools in 2007 and and Chadwick ceased being recognised that same year. Chadwick notes on its own website that its accreditator is “not affiliated with any government agency.” George Gollin, a University of Illinois expert on unaccredited institutions, told Channel 4 News: “The degrees are academically meaningless, though some of Chadwick’s customers will no doubt claim to have done some work in addition to forking over the payments required to obtain a Chadwick diploma.” Mr Gollin adds: “Chadwick was never accredited by recognized bodies, though I believe it did claim accreditation from a pair of accreditation mills: the ‘International Association of Universities and Schools’ and the ridiculous ‘World Association of Universities and Colleges,’ whose owner claimed to have discovered the lost continent of Atlantis. The fact that [Chadwick] would link itself to bogus credentialing organizations is enough to assure us that Chadwick was a fake George Gollin It is unclear whether Mr Anastasopoulos acquired the same level of knowledge as would be expected from an official MBA degree, but he does say that he studied the materials he was sent and that his tests were graded. His LinkedIn page is available to view online.
//////////////////////////////////////////////////////////////////// // Function: BitArray::read_datagram // Access: Public // Description: Reads the object that was previously written to a Bam // file. //////////////////////////////////////////////////////////////////// void BitArray:: read_datagram(DatagramIterator &scan, BamReader *manager) { size_t num_words = scan.get_uint32(); _array = Array::empty_array(num_words); for (size_t i = 0; i < num_words; ++i) { _array[i] = WordType(scan.get_uint32()); } _highest_bits = scan.get_uint8(); }
On April 2, a wildcat strike broke out in a metalworks in Chorzow, Poland. Tomorrow police and private security will try to violently break it. On April 2, a few hundred metalworkers at Huta Batory ironworkers started a wildcat strike against planned reductions and the use of temporary workers on trash contracts. The ironworks started mass layoffs - 50 people were dismissed last week alone. At the same time, they increased the use of temporary workers. The metalworkers described how people doing the same job as they were earned half their salaries - just 1400 zloties netto (350 euros). The workers consider this type of salary to be obscene for the work they do and demand the reinstatement of the dismissed workers and respecting the existing collective agreement. They did not wait for the union negotiations and just went on strike. The company has hired a lot of security and is working with the police to organize an action tomorrow to break the strike. Already a lot of extra police are seen in the area.
def flag_clipped(ref_p, tile_p): tile_clip = np.where(tile_p >= rfe_clip) ref_p[tile_clip] = np.nan tile_p[tile_clip] = np.nan return (ref_p, tile_p)
/** * Created by zhangbing on 16/6/20. */ public abstract class BaseServletWithSession extends HttpServlet{ private static final Logger LOG = LoggerFactory.getLogger(BaseServletWithSession.class); @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { LOG.info("Request: " + req.getRequestURI() + " IP:" + IPUtil.getIP(req) + " Input:" + req.getParameterMap()); SqlSession sqlSession = MyBatisUtil.getSqlSession(); try { this.doGet(req, resp, sqlSession); } catch (WeMediaException e) { HttpUtil.sentFiled(resp, e.getError(), e.getMessage()); } catch (Exception e) { HttpUtil.sentFiled(resp, ErrorCode.GeneralError, e.getMessage()); } finally { sqlSession.close(); LOG.info("Success Return"); } } @Override protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { LOG.info("Request: " + req.getRequestURI() + " IP:" + IPUtil.getIP(req) + " Input:" + req.getParameterMap()); SqlSession sqlSession = MyBatisUtil.getSqlSession(); try { this.doPost(req, resp, sqlSession); } catch (WeMediaException e) { HttpUtil.sentFiled(resp, e.getError(), e.getMessage()); } catch (Exception e) { HttpUtil.sentFiled(resp, ErrorCode.GeneralError, e.getMessage()); } finally { sqlSession.close(); LOG.info("Success Return"); } } protected abstract void doGet(HttpServletRequest req, HttpServletResponse resp, SqlSession sqlSession) throws ServletException, IOException; protected abstract void doPost(HttpServletRequest req, HttpServletResponse resp, SqlSession sqlSession) throws ServletException, IOException; }
/* * Copyright 2003-2020 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jetbrains.mps.smodel.tempmodel; import jetbrains.mps.module.ReloadableModule; import jetbrains.mps.project.AbstractModule; import jetbrains.mps.project.ModuleId; import jetbrains.mps.project.structure.modules.ModuleDescriptor; import jetbrains.mps.vfs.IFile; import org.jetbrains.annotations.Nullable; /** * Alternative temporary module, not a subject to classloading (not a {@link jetbrains.mps.module.ReloadableModule}). * * TL;DR * We've got global repositories, tests need temporary models living in a repository (editor doesn't tolerate detached models). * Each test creates own module and effectively triggers re-calculation of dependency edges for other modules in the repository which slows * down test execution. Check https://youtrack.jetbrains.com/issue/MPS-27846 * <p/> * Classloading reacts to any ReloadableModule added to a global repository (ClassLoaderManager.myWatchableCondition which is propagated down to ModuleUpdater) * and spends noticeable time in ModuleUpdater#updateAdded -> updateAllEdges. {@link TempModule} is {@link jetbrains.mps.module.ReloadableModule}, and there's * no other temp module. TransformationTests (like NodesTestCase, EditorTestCase) use modeling facilities only and don't need to load classes for test nodes, * however I can't tell for sure other clients of TempModule do not, therefore a change in TempModule would be dangerous. Another alternative is to use * {@link ReloadableModule#canLoadClasses()} ()} and corresponding setting in TempModuleOptions, but there's no clear contract for willLoad(), besides, it might get * changed with respect to improved module classloading story. Yet another alternative is to use TransientModuleProvider and TransientModelsModule (which * is not ReloadableModule as well). Though this seems the most appropriate way forward, it's not viable right now. First, due to single provider instance, * second, due to greater changes in the tests. * * @author <NAME> * @since 2018.2 */ public class TempModule2 extends AbstractModule { private final ModuleDescriptor myDescriptor; /*package*/ TempModule2() { super((IFile) null); myDescriptor = new ModuleDescriptor(); myDescriptor.setId(ModuleId.regular()); myDescriptor.setNamespace("TempModule" + myDescriptor.getId()); setModuleReference(myDescriptor.getModuleReference()); } @Nullable @Override public ModuleDescriptor getModuleDescriptor() { // descriptor is needed as at the moment it's the only way to record added module dependencies return myDescriptor; } @Override public boolean isPackaged() { // odd assumptions in super return false; } @Override public boolean isReadOnly() { return false; } }
// Copyright (c) Microsoft Corporation. // Licensed under the MIT License. import React, { Fragment, useRef, useState, useCallback, useEffect, useMemo } from 'react'; import formatMessage from 'format-message'; import { PrimaryButton, DefaultButton } from '@fluentui/react/lib/Button'; import { Stack, StackItem } from '@fluentui/react/lib/Stack'; import { TextField } from '@fluentui/react/lib/TextField'; import { FontSizes } from '@fluentui/theme'; import { useRecoilValue } from 'recoil'; import debounce from 'lodash/debounce'; import { isUsingAdaptiveRuntime, SDKKinds, isManifestJson } from '@bfc/shared'; import { DialogWrapper, DialogTypes } from '@bfc/ui-shared'; import { Separator } from '@fluentui/react/lib/Separator'; import { Dropdown, IDropdownOption } from '@fluentui/react/lib/Dropdown'; import { FontWeights } from '@fluentui/react/lib/Styling'; import { JSZipObject } from 'jszip'; import { ResponsiveMode } from '@fluentui/react/lib/utilities/decorators/withResponsiveMode'; import { LoadingSpinner } from '../../components/LoadingSpinner'; import { settingsState, designPageLocationState, dispatcherState, luFilesSelectorFamily, publishTypesState, botProjectFileState, rootDialogSelector, } from '../../recoilModel'; import { addSkillDialog } from '../../constants'; import httpClient from '../../utils/httpUtil'; import TelemetryClient from '../../telemetry/TelemetryClient'; import { TriggerFormData } from '../../utils/dialogUtil'; import { selectIntentDialog } from '../../constants'; import { PublishProfileDialog } from '../../pages/botProject/create-publish-profile/PublishProfileDialog'; import { skillNameRegex } from '../../utils/skillManifestUtil'; import { SelectIntent } from './SelectIntent'; import { SkillDetail } from './SkillDetail'; import { SetAppId } from './SetAppId'; import { BrowserModal } from './BrowserModal'; export interface SkillFormDataErrors { endpoint?: string; manifestUrl?: string; name?: string; } const urlRegex = /^http[s]?:\/\/\w+/; const filePathRegex = /([^<>/\\:""]+\.\w+$)/; // All endpoints should have endpoint url const hasEndpointUrl = (content) => { const endpoints = content.endpoints; if (endpoints && endpoints.length > 0) { return endpoints.every((endpoint) => !!endpoint.endpointUrl); } return false; }; export const msAppIdRegex = /^[0-9A-Fa-f]{8}-[0-9A-Fa-f]{4}-[0-9A-Fa-f]{4}-[0-9A-Fa-f]{4}-[0-9A-Fa-f]{12}$/; export interface CreateSkillModalProps { projectId: string; addRemoteSkill: (manifestUrl: string, endpointName: string, zipContent: Record<string, any>) => Promise<void>; addTriggerToRoot: (dialogId: string, triggerFormData: TriggerFormData, skillId: string) => Promise<void>; onDismiss: () => void; } export const validateManifestUrl = ({ formData, formDataErrors, setFormDataErrors }, skills: string[] = []) => { const { manifestUrl } = formData; const { manifestUrl: _, ...errors } = formDataErrors; if (!manifestUrl) { setFormDataErrors({ ...errors, manifestUrl: formatMessage('Please input a manifest URL') }); } else if (!urlRegex.test(manifestUrl) && !filePathRegex.test(manifestUrl)) { setFormDataErrors({ ...errors, manifestUrl: formatMessage('URL should start with http:// or https:// or file path of your system'), }); } else if (skills.includes(manifestUrl)) { setFormDataErrors({ ...errors, manifestUrl: formatMessage('The bot is already part of the Bot Project'), }); } else { setFormDataErrors({}); } }; export const validateLocalZip = async (files: Record<string, JSZipObject>) => { const result: { error: any; zipContent?: Record<string, string>; manifestContent?: any; path: string } = { error: {}, path: '', }; try { // get manifest const manifestFiles: JSZipObject[] = []; const zipContent: Record<string, string> = {}; for (const fPath in files) { zipContent[fPath] = await files[fPath].async('string'); // eslint-disable-next-line no-useless-escape if (fPath.match(/\.([^\.]+)$/)?.[1] === 'json' && isManifestJson(zipContent[fPath])) { manifestFiles.push(files[fPath]); result.path = fPath.substr(0, fPath.lastIndexOf('/') + 1); } } // update content for detail panel and show it if (manifestFiles.length > 1) { result.error = { manifestUrl: formatMessage('zip folder has multiple manifest json') }; } else if (manifestFiles.length === 1) { const content = JSON.parse(await manifestFiles[0].async('string')); if (hasEndpointUrl(content)) { result.manifestContent = content; result.zipContent = zipContent; } else { result.error = { manifestUrl: formatMessage( 'Endpoints should not be empty or endpoint should have endpoint url field in manifest json' ), }; } } else { result.error = { manifestUrl: formatMessage('could not locate manifest.json in zip') }; } } catch (err) { // eslint-disable-next-line format-message/literal-pattern result.error = { manifestUrl: formatMessage(err.toString()) }; } return result; }; const validateSKillName = (skillContent, setSkillManifest) => { skillContent.name = skillContent.name.replace(skillNameRegex, ''); setSkillManifest(skillContent); }; export const getSkillManifest = async ( projectId: string, manifestUrl: string, setSkillManifest, setFormDataErrors, setShowDetail ) => { try { const { data } = await httpClient.get(`/projects/${projectId}/skill/retrieveSkillManifest`, { params: { url: manifestUrl, }, }); validateSKillName(data, setSkillManifest); } catch (error) { const httpMessage = error?.response?.data?.message; const message = httpMessage?.match('Unexpected string in JSON') ? formatMessage('Error attempting to parse Skill manifest. There could be an error in its format.') : formatMessage('Manifest URL can not be accessed'); setFormDataErrors({ ...error, manifestUrl: message }); setShowDetail(false); } }; const getTriggerFormData = (intent: string, content: string): TriggerFormData => ({ errors: {}, $kind: 'Microsoft.OnIntent', event: '', intent: intent, triggerPhrases: content, regEx: '', }); const buttonStyle = { root: { marginLeft: '8px' } }; const setAppIdDialogStyles = { dialog: { title: { fontWeight: FontWeights.bold, fontSize: FontSizes.size20, paddingTop: '14px', paddingBottom: '11px', }, subText: { fontSize: FontSizes.size14, marginBottom: '0px', }, }, modal: { main: { maxWidth: '80% !important', width: '960px !important', }, }, }; export const CreateSkillModal: React.FC<CreateSkillModalProps> = (props) => { const { projectId, addRemoteSkill, addTriggerToRoot, onDismiss } = props; const [title, setTitle] = useState(addSkillDialog.SET_APP_ID); const [showSetAppIdDialog, setShowSetAppIdDialog] = useState(true); const [showIntentSelectDialog, setShowIntentSelectDialog] = useState(false); const [formData, setFormData] = useState<{ manifestUrl: string; endpointName: string }>({ manifestUrl: '', endpointName: '', }); const [formDataErrors, setFormDataErrors] = useState<SkillFormDataErrors>({}); const [skillManifest, setSkillManifest] = useState<any | null>(null); const [showDetail, setShowDetail] = useState(false); const [createSkillDialogHidden, setCreateSkillDialogHidden] = useState(false); const [manifestDirPath, setManifestDirPath] = useState(''); const [zipContent, setZipContent] = useState({}); const publishTypes = useRecoilValue(publishTypesState(projectId)); const { languages, luFeatures, runtime, publishTargets = [], MicrosoftAppId } = useRecoilValue( settingsState(projectId) ); const { dialogId } = useRecoilValue(designPageLocationState(projectId)); const rootDialog = useRecoilValue(rootDialogSelector(projectId)); const luFiles = useRecoilValue(luFilesSelectorFamily(projectId)); const { updateRecognizer, setMicrosoftAppProperties, setPublishTargets } = useRecoilValue(dispatcherState); const { content: botProjectFile } = useRecoilValue(botProjectFileState(projectId)); const skillUrls = Object.keys(botProjectFile.skills).map((key) => botProjectFile.skills[key].manifest as string); const debouncedValidateManifestURl = useRef(debounce(validateManifestUrl, 500)).current; const validationHelpers = { formDataErrors, setFormDataErrors, }; const options: IDropdownOption[] = useMemo(() => { return skillManifest?.endpoints?.map((item) => { return { key: item.name, // eslint-disable-next-line format-message/literal-pattern text: formatMessage(item.name), }; }); }, [skillManifest]); const handleManifestUrlChange = (_, currentManifestUrl = '') => { // eslint-disable-next-line @typescript-eslint/no-unused-vars const { manifestUrl, ...rest } = formData; debouncedValidateManifestURl( { formData: { manifestUrl: currentManifestUrl }, ...validationHelpers, }, skillUrls ); setFormData({ ...rest, manifestUrl: currentManifestUrl, }); setSkillManifest(null); setShowDetail(false); }; const validateUrl = useCallback( (event) => { event.preventDefault(); setShowDetail(true); const localManifestPath = formData.manifestUrl.replace(/\\/g, '/'); getSkillManifest(projectId, formData.manifestUrl, setSkillManifest, setFormDataErrors, setShowDetail); setManifestDirPath(localManifestPath.substring(0, localManifestPath.lastIndexOf('/'))); }, [projectId, formData] ); const handleSubmit = async (event, content: string, enable: boolean) => { event.preventDefault(); // add a remote skill, add skill identifier into botProj file await addRemoteSkill(formData.manifestUrl, formData.endpointName, zipContent); TelemetryClient.track('AddNewSkillCompleted', { from: Object.keys(zipContent).length > 0 ? 'zip' : 'url', }); // if added remote skill fail, just not addTrigger to root. const skillId = location.href.match(/skill\/([^/]*)/)?.[1]; //if the root dialog is orchestrator recoginzer type or user chooses orchestrator type before connecting, //add the trigger to the root dialog. const boundId = rootDialog && (rootDialog.luProvider === SDKKinds.OrchestratorRecognizer || enable) ? rootDialog.id : dialogId; if (skillId) { // add trigger with connect to skill action to root bot const triggerFormData = getTriggerFormData(skillManifest.name, content); await addTriggerToRoot(boundId, triggerFormData, skillId); TelemetryClient.track('AddNewTriggerCompleted', { kind: 'Microsoft.OnIntent' }); } if (enable) { // update recognizor type to orchestrator await updateRecognizer(projectId, boundId, SDKKinds.OrchestratorRecognizer); } }; const handleDismiss = () => { setShowSetAppIdDialog(true); onDismiss(); }; const handleGotoAddSkill = (publishTargetName: string) => { const profileTarget = publishTargets.find((target) => target.name === publishTargetName); const configuration = JSON.parse(profileTarget?.configuration || ''); setMicrosoftAppProperties( projectId, configuration.settings.MicrosoftAppId, configuration.settings.MicrosoftAppPassword ); setShowSetAppIdDialog(false); setTitle({ subText: '', title: addSkillDialog.SKILL_MANIFEST_FORM.title, }); }; const handleGotoCreateProfile = () => { setCreateSkillDialogHidden(true); }; const handleBrowseButtonUpdate = async (path: string, files: Record<string, JSZipObject>) => { // update path in input field setFormData({ ...formData, manifestUrl: path, }); const result = await validateLocalZip(files); setFormDataErrors(result.error); result.path && setManifestDirPath(result.path); result.zipContent && setZipContent(result.zipContent); if (result.manifestContent) { validateSKillName(result.manifestContent, setSkillManifest); setShowDetail(true); } }; useEffect(() => { if (skillManifest?.endpoints) { setFormData({ ...formData, endpointName: skillManifest.endpoints[0].name, }); } }, [skillManifest]); useEffect(() => { if (MicrosoftAppId) { setShowSetAppIdDialog(false); setTitle({ subText: '', title: addSkillDialog.SKILL_MANIFEST_FORM.title, }); } }, [MicrosoftAppId]); return ( <Fragment> <DialogWrapper dialogType={showSetAppIdDialog ? DialogTypes.Customer : DialogTypes.CreateFlow} isOpen={!createSkillDialogHidden} onDismiss={handleDismiss} {...title} customerStyle={setAppIdDialogStyles} > {showSetAppIdDialog && ( <Fragment> <Separator styles={{ root: { marginBottom: '20px' } }} /> <SetAppId projectId={projectId} onDismiss={handleDismiss} onGotoCreateProfile={handleGotoCreateProfile} onNext={handleGotoAddSkill} /> </Fragment> )} {showIntentSelectDialog && ( <SelectIntent dialogId={dialogId} languages={languages} luFeatures={luFeatures} manifest={skillManifest} manifestDirPath={manifestDirPath} projectId={projectId} rootLuFiles={luFiles} runtime={runtime} zipContent={zipContent} onBack={() => { setTitle({ subText: '', title: addSkillDialog.SKILL_MANIFEST_FORM.title, }); setShowIntentSelectDialog(false); }} onDismiss={handleDismiss} onSubmit={handleSubmit} onUpdateTitle={setTitle} /> )} {!showIntentSelectDialog && !showSetAppIdDialog && ( <Fragment> <div style={{ marginBottom: '16px' }}> {addSkillDialog.SKILL_MANIFEST_FORM.subText('https://aka.ms/bf-composer-docs-publish-bot')} </div> <Separator /> <Stack horizontal horizontalAlign="start" styles={{ root: { height: 300 } }}> <div style={{ width: '50%' }}> <div style={{ display: 'flex' }}> <TextField required errorMessage={formDataErrors.manifestUrl} label={formatMessage('Skill Manifest')} placeholder={formatMessage('Enter manifest URL or select a .zip file')} styles={{ root: { width: '300px' } }} value={formData.manifestUrl || ''} onChange={handleManifestUrlChange} /> <BrowserModal onError={setFormDataErrors} onUpdate={handleBrowseButtonUpdate} /> </div> {skillManifest?.endpoints?.length > 1 && ( <Dropdown defaultSelectedKey={skillManifest.endpoints[0].name} label={formatMessage('Endpoints')} options={options} responsiveMode={ResponsiveMode.large} onChange={(e, option?: IDropdownOption) => { if (option) { setFormData({ ...formData, endpointName: option.key as string, }); } }} /> )} </div> {showDetail && ( <Fragment> <Separator vertical styles={{ root: { padding: '0px 20px' } }} /> <div style={{ minWidth: '50%' }}> {skillManifest ? <SkillDetail manifest={skillManifest} /> : <LoadingSpinner />} </div> </Fragment> )} </Stack> <Stack> <Separator /> <StackItem align={'end'}> <DefaultButton data-testid="SkillFormCancel" text={formatMessage('Cancel')} onClick={onDismiss} /> {skillManifest ? ( isUsingAdaptiveRuntime(runtime) && luFiles.length > 0 && skillManifest.dispatchModels?.intents?.length > 0 ? ( <PrimaryButton disabled={formDataErrors.manifestUrl ? true : false} styles={buttonStyle} text={formatMessage('Next')} onClick={(event) => { setTitle(selectIntentDialog.SELECT_INTENT(dialogId, skillManifest.name)); setShowIntentSelectDialog(true); }} /> ) : ( <PrimaryButton styles={buttonStyle} text={formatMessage('Done')} onClick={(event) => { addRemoteSkill(formData.manifestUrl, formData.endpointName, zipContent); }} /> ) ) : ( <PrimaryButton disabled={!formData.manifestUrl || formDataErrors.manifestUrl !== undefined} styles={buttonStyle} text={formatMessage('Next')} onClick={validateUrl} /> )} </StackItem> </Stack> </Fragment> )} </DialogWrapper> {createSkillDialogHidden ? ( <PublishProfileDialog closeDialog={() => { setCreateSkillDialogHidden(false); }} current={null} projectId={projectId} setPublishTargets={setPublishTargets} targets={publishTargets || []} types={publishTypes} /> ) : null} </Fragment> ); }; export default CreateSkillModal;
use diesel::sql_types::{Array, Date, Double, Interval, Text, Timestamp}; sql_function!(#[aggregate] fn array_agg<T>(x: T) -> Array<T>); sql_function!(fn canon_crate_name(x: Text) -> Text); sql_function!(fn to_char(a: Date, b: Text) -> Text); sql_function!(fn lower(x: Text) -> Text); sql_function!(fn date_part(x: Text, y: Timestamp) -> Double); sql_function! { #[sql_name = "date_part"] fn interval_part(x: Text, y: Interval) -> Double; } sql_function!(fn floor(x: Double) -> Integer); sql_function!(fn greatest<T>(x: T, y: T) -> T); sql_function!(fn least<T>(x: T, y: T) -> T);
<reponame>Tasemo/intellij-community import lombok.Singular; import java.util.Set; @lombok.experimental.SuperBuilder public class SingularSet<T> { @Singular private Set rawTypes; @Singular private Set<Integer> integers; @Singular private Set<T> generics; @Singular private Set<? extends Number> extendsGenerics; }
#include <stdio.h> #include "header.h" int main() { int v[20]; int n; printf("Numarul de elemente : "); scanf("%d",&n); citire(v,n); printf("Vectorul este: "); afisare(v,n); printf("\n minimul: %d",minim(v,n)); printf("\n maximul: %d",maxim(v,n)); printf("\n media aritmetica: %.2f",aritmetica(v,n)); printf("\n media geometrica este: %.3f",geometrica(v,n)); printf("\n"); return 0; }
/// Returns if an priority update will be issued pub fn maybe_update_priority(&mut self, priority: Priority) -> bool { if priority == self.priority { false } else { self.priority = priority; true } }
// // Copyright © 2018 Aljabr, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package compile import ( "fmt" "github.com/kocircuit/kocircuit/lang/circuit/model" "github.com/kocircuit/kocircuit/lang/circuit/syntax" "github.com/kocircuit/kocircuit/lang/go/kit/tree" ) // Step logics include Operator. func graftFunc(pkg string, parsed syntax.Design) (f *model.Func, err error) { g := newGrafting() if err = g.graftArgs(parsed); err != nil { return nil, fmt.Errorf("grafting fields of %s.%v (%v)", pkg, parsed.Name, err) } if err = g.graftBody(parsed.Returns); err != nil { return nil, fmt.Errorf("grafting body of %s.%v (%v)", pkg, parsed.Name.Name(), err) } returned, ok := g.labelStep["return"] if !ok { return nil, fmt.Errorf( "missing return in %s of %s", tree.Sprint(pkg), tree.Sprint(parsed.Name), ) } var gatherReturned []*model.Gather for _, s := range returned { gatherReturned = append(gatherReturned, &model.Gather{Field: model.MainFlowLabel, Step: s}) } f = &model.Func{ Doc: parsed.Comment, ID: model.FuncID(pkg, parsed.Name.Name()), Name: parsed.Name.Name(), Pkg: pkg, Enter: g.in, Field: g.arg, Monadic: g.monadic, Leave: g.add(model.Step{Label: "0_leave", Gather: gatherReturned, Logic: model.Leave{}, Syntax: parsed}), Step: sortStep(g.all), Spread: nil, // filled later Syntax: parsed, } if err = computeStepIDs(f); err != nil { return nil, err } BacklinkFunc(f) return f, nil } func BacklinkFunc(fu *model.Func) { fu.Spread = map[*model.Step][]*model.Step{} for _, s := range fu.Step { s.Func = fu // add backlink to the func for _, g := range s.Gather { fu.Spread[g.Step] = append(fu.Spread[g.Step], s) } } } func computeStepIDs(f *model.Func) error { label := map[string]bool{} for _, s := range f.Step { if label[s.Label] { return fmt.Errorf("duplicate step label %s in %s", s.Label, f.FullPath()) } label[s.Label] = true s.ID = StepID(s.Label) } return nil } func StepID(label string) model.ID { return model.StringID(label) } func (g *grafting) graftArgs(parsed syntax.Design) error { g.in = g.add(model.Step{Label: "0_enter", Logic: model.Enter{}, Syntax: parsed}) g.arg = map[string]*model.Step{} for _, factor := range parsed.Factor { if factor.Monadic { g.monadic = factor.Name.Name() } if g.arg[factor.Name.Name()] != nil { return fmt.Errorf("duplicate field name %s", factor.Name.Name()) } g.arg[factor.Name.Name()] = g.add( model.Step{ Label: fmt.Sprintf("0_enter_%s", factor.Name.Name()), Gather: []*model.Gather{{Field: model.MainFlowLabel, Step: g.in}}, Logic: model.Link{Name: factor.Name.Name(), Monadic: factor.Monadic}, Syntax: parsed, }, ) } return nil } func (g *grafting) graftBody(asm syntax.Assembly) error { if len(asm.Sign.Path) != 0 || asm.Type != "{}" { return fmt.Errorf("function body syntax near %v", asm) } for _, t := range asm.Term { if t.Label.Name() == "" { return fmt.Errorf("step label cannot be empty (near %s)", asm.RegionString()) } g.labelTerm[t.Label.Name()] = append(g.labelTerm[t.Label.Name()], t) g.pendingLabel = append(g.pendingLabel, t.Label.Name()) } for len(g.pendingLabel) > 0 { label := g.pendingLabel[0] g.pendingLabel = g.pendingLabel[1:] if _, err := g.graftLabel(label); err != nil { return fmt.Errorf("grafting label %s at %v (%v)", label, asm, err) } } return nil } func (g *grafting) graftLabel(l string) (step []*model.Step, err error) { // caching and cyclical references if step, ok := g.labelStep[l]; ok { return step, nil } if g.graftingLabel[l] { return nil, fmt.Errorf("label %s involved in cyclical reference", l) } g.graftingLabel[l] = true defer delete(g.graftingLabel, l) // grafting for _, t := range g.labelTerm[l] { s, err := g.graftTerm(t.Label.Name(), t.Label, t) if err != nil { return nil, err } g.labelStep[l] = append(g.labelStep[l], s...) } return g.labelStep[l], nil }
. The differential-diagnostic importance of some ECG criteria and results of orthoclinostatic test in distinguishing functional form organic extrasystolic arrhythmias in childhood is appraised. Among the electrocardiographic indices the coupling interval is of great importance in differential diagnosis. Left-ventricular extrasystole is mostly encountered in rhythm disorders of functional origin. The data of the orthoclinostatic test are highly reliable in distinguishing functional disorders of rhythm from organic disorders.
<filename>src/include/public/SLA.h #ifndef SLA_H_INCLUDED #define SLA_H_INCLUDED #include "SLAStdint.h" /* バージョン文字列 */ #define SLA_VERSION_STRING "1.0.0" /* フォーマットバージョン */ #define SLA_FORMAT_VERSION 1 /* ヘッダのサイズ */ #define SLA_HEADER_SIZE 43 /* ブロックヘッダのサイズ */ #define SLA_BLOCK_HEADER_SIZE 10 /* サンプル数の無効値 */ #define SLA_NUM_SAMPLES_INVALID 0xFFFFFFFF /* SLAブロック数の無効値 */ #define SLA_NUM_BLOCKS_INVALID 0xFFFFFFFF /* 最大ブロックサイズの無効値 */ #define SLA_MAX_BLOCK_SIZE_INVAILD 0xFFFFFFFF /* ブロックエンコード/デコードに十分なブロックサイズ */ #define SLA_CalculateSufficientBlockSize(num_channels, num_samples, bit_per_sample) \ (2 * (num_channels) * (num_samples) * ((bit_per_sample) / 8)) /* API結果型 */ typedef enum SLAApiResultTag { SLA_APIRESULT_OK = 0, SLA_APIRESULT_NG, SLA_APIRESULT_INVALID_ARGUMENT, /* 無効な引数 */ SLA_APIRESULT_EXCEED_HANDLE_CAPACITY, /* ハンドルの許容範囲外 */ SLA_APIRESULT_INSUFFICIENT_BUFFER_SIZE, /* バッファサイズが不足 */ SLA_APIRESULT_INVAILD_CHPROCESSMETHOD, /* チャンネル処理を行えないチャンネル数が指定された */ SLA_APIRESULT_FAILED_TO_CALCULATE_COEF, /* 予測係数を求めるのに失敗した */ SLA_APIRESULT_FAILED_TO_PREDICT, /* 予測に失敗した */ SLA_APIRESULT_FAILED_TO_SYNTHESIZE, /* 合成に失敗した */ SLA_APIRESULT_INSUFFICIENT_DATA_SIZE, /* データ不足 */ SLA_APIRESULT_INVALID_HEADER_FORMAT, /* ヘッダが不正 */ SLA_APIRESULT_DETECT_DATA_CORRUPTION, /* データ破壊を検出した */ SLA_APIRESULT_FAILED_TO_FIND_SYNC_CODE, /* 同期コードを発見できなかった */ SLA_APIRESULT_INVALID_WINDOWFUNCTION_TYPE, /* 不正な窓関数が指定された */ SLA_APIRESULT_NO_DATA_FRAGMENTS, /* 回収可能なデータ片が存在しない */ SLA_APIRESULT_PARAMETER_NOT_SET /* 波形パラメータ/エンコードパラメータがハンドルにセットされていない */ } SLAApiResult; /* マルチチャンネル処理方法 */ typedef enum SLAChannelProcessMethodTag { SLA_CHPROCESSMETHOD_NONE = 0, /* 何もしない */ SLA_CHPROCESSMETHOD_STEREO_MS /* ステレオMS処理 */ } SLAChannelProcessMethod; /* 窓関数タイプ */ typedef enum SLAWindowFunctionTypeTag { SLA_WINDOWFUNCTIONTYPE_RECTANGULAR = 0, /* 矩形窓(何もしない) */ SLA_WINDOWFUNCTIONTYPE_SIN, /* サイン窓 */ SLA_WINDOWFUNCTIONTYPE_HANN, /* ハン窓 */ SLA_WINDOWFUNCTIONTYPE_BLACKMAN, /* ブラックマン窓 */ SLA_WINDOWFUNCTIONTYPE_VORBIS /* Vorbis窓 */ } SLAWindowFunctionType; /* 波形フォーマット */ struct SLAWaveFormat { uint32_t num_channels; /* チャンネル数 */ uint32_t bit_per_sample; /* サンプルあたりビット数 */ uint32_t sampling_rate; /* サンプリングレート */ uint8_t offset_lshift; /* オフセット分の左シフト量 */ }; /* エンコードパラメータ */ struct SLAEncodeParameter { uint32_t parcor_order; /* PARCOR係数次数 */ uint32_t longterm_order; /* ロングターム次数 */ uint32_t lms_order_per_filter; /* LMS1フィルタあたりの次数 */ SLAChannelProcessMethod ch_process_method; /* マルチチャンネル処理法 */ SLAWindowFunctionType window_function_type; /* 窓関数の種類 */ uint32_t max_num_block_samples; /* ブロックあたりサンプル数 */ }; /* SLAヘッダ情報 */ struct SLAHeaderInfo { struct SLAWaveFormat wave_format; /* 波形フォーマット */ struct SLAEncodeParameter encode_param; /* エンコードパラメータ */ uint32_t num_samples; /* 全サンプル数 */ uint32_t num_blocks; /* ブロック数 */ uint32_t max_block_size; /* 最大ブロックサイズ[byte] */ uint32_t max_bit_per_second; /* 最大bps */ }; #endif /* SLA_H_INCLUDED */
pub mod bgp; pub mod error; pub use error::MyBgpError; pub use error::MyError;
def upload_file(f): name, ext = os.path.splitext(f.name) name = "%s%s" % (str(uuid.uuid4()), ext) path = date.today().strftime("%Y") filepath = os.path.join(settings.MEDIA_ROOT, path) if not os.path.exists(filepath): os.makedirs(filepath) filepath = os.path.join(filepath, name) with open(filepath, 'wb+') as destination: for chunk in f.chunks(): destination.write(chunk) return os.path.join(settings.MEDIA_URL, path, name)
<filename>tools/mailer/src/generators/index.ts import { generateReleaseMailText } from './release/generateReleaseMailText'; export const mailGenerators = { release: generateReleaseMailText, };
from clustviz.clarans import compute_cost_clarans import pandas as pd def test_compute_cost_clarans(): X = pd.DataFrame([[1, 1], [6, 5], [6, 6]]) assert compute_cost_clarans(X, [0, 2]) == (1.0, {0: [0], 2: [1, 2]})
package math func SatoshisToXBT(sats int) (xbts float64) { fSats := float64(sats) return fSats / 100000000 }
async def generate_async(self, args): raise NotImplementedError('generate method is required')
1 of 1 2 of 1 The mayor’s Vision Vancouver party has named its youngest candidate in its history to run in the October 14 council by-election. Diego Cardona is just 21 years old but already has a long life story. He moved to Vancouver in 2005 with his mother and younger sister. They arrived as refugees from Colombia after guerrilla forces kidnapped and murdered Cardona’s father. Vision Vancouver did not make Cardona available before the Straight went to print. But in an August 2016 interview with the Straight, he recounted how his first few years adjusting to a new life in Canada were tough. He said he focused on school and became a young advocate for refugees. Then, in 2012, when Cardona was 16, he lost his mother to leukemia. He and his sister found themselves alone and were placed in foster care. Despite successive tragedies, Cardona graduated high school with top marks and was accepted to UBC. He was studying politics and economics there until 2016, when he was diagnosed with cancer and took a break from school to undergo treatment. With the disease now in remission, Cardona works as a program coordinator for Kiwassa Neighbourhood House in Vancouver's East Side. He also volunteers with a group he cofounded called Fresh Voices, which meets with new immigrants and coordinates with the provincial government to help meet their needs. Cardona doesn’t appear to have a lot of experience in local politics. But Fresh Voices has played the role of an advocate at the federal level. In 2015, it contributed to the Liberal government’s decision to eliminate a requirement for Syrian refugees to repay the government for transportation costs to Canada. At the provincial level, Cardona previously held the title of participation coordinator at the Federation of B.C. Youth in Care Networks, which advocates for children and young adults who are in the care of the B.C. Ministry of Children and Family Development. Vision named Cardona its council candidate via an internal party appointment. The opposition Non-Partisan Association (NPA) is scheduled to hold a nomination meeting on September 6. Three members are competing for the council spot. Hector Bremner works in public affairs and previously served as an executive assistant to the former deputy premier. Glen Chernen is a financial analyst who has run for office unsuccessfully with the Cedar Party. And Penny Noble is a former school board trustee who has also worked as a teacher. Other candidates running in the by-election include Jean Swanson, Judy Graves, Pete Fry, and Mary Jean Dunsdon.
/* Monitor MQTT topic, produce on the queue when receiving one */ public void run() { int qos = 2; try { client = new MqttClient(broker, clientId); MqttConnectOptions connOpts = new MqttConnectOptions(); connOpts.setCleanSession(true); client.connect(connOpts); client.setCallback(new MqttCallback() { public void messageArrived(String topic, MqttMessage message) { String time = new Timestamp(System.currentTimeMillis()).toString(); String receivedMessage = new String(message.getPayload()); Order o = Order.unpackJson(receivedMessage); if (o.id == -1){ System.out.println("ERROR while unpacking order json"); } else { queue.produce(o); } } public void connectionLost(Throwable cause) { System.out.println(clientId + " Connection lost! cause:" + cause.getMessage()+ "- Thread PID: " + Thread.currentThread().getId()); System.out.println(Arrays.toString(cause.getStackTrace())); } public void deliveryComplete(IMqttDeliveryToken token) { } }); System.out.println("MQTT CLIENT: \n\t- Subscribing ... - Thread PID: " + Thread.currentThread().getId()); client.subscribe(topic,qos); System.out.println("\t- Subscribed to topics : " + topic); } catch (MqttException me) { System.out.println("reason " + me.getReasonCode()); System.out.println("msg " + me.getMessage()); System.out.println("loc " + me.getLocalizedMessage()); System.out.println("cause " + me.getCause()); System.out.println("excep " + me); me.printStackTrace(); } }
package listadoble; public class DobleLista { private NodoDoble inicio; private NodoDoble fin; //************************************************************************************************************** //Constructor Por defecto inicializa la Lista public DobleLista () { inicio = null; fin = null; } //************************************************************************************************************** //Metodo para saber estado de la lista public boolean EstadoLista() { return inicio == null; } //************************************************************************************************************** //Metodo para Insertar Elementos al final de la Lista public DobleLista InsertarFinal(Object d, Object n, Object ap, Object p, Object o, Object de) { NodoDoble nuevo; nuevo = new NodoDoble(d,n,ap,p,o,de,null,fin); if (inicio == null) { inicio = nuevo; } else { NodoDoble aux = inicio; while(aux.siguiente != null) { aux = aux.siguiente; } if(aux.siguiente == null) { aux.siguiente = nuevo; } } return this; } //************************************************************************************************************** //Metodo para Insertar Elementos al inicio de la Lista public DobleLista InsertarInicio(Object d, Object n, Object ap, Object p, Object o, Object de) { NodoDoble nuevo; nuevo = new NodoDoble(d,n,ap,p,o,de,inicio,null); nuevo.siguiente = inicio; if (inicio != null) inicio.anterior = nuevo; inicio = nuevo; return this; } //************************************************************************************************************** //Metodo para Visualizar Elementos de la Lista public void VisualizarLista() { NodoDoble recorrer = inicio; System.out.println(); System.out.println("[No. Identifiacion]"+"\t\t"+"[Nombres]"+"\t\t\t"+"[Apellidos]"+"\t\t"+"[Edad]"+"\t\t"+"[Lugar De Origen]"+"\t"+"[Lugar de Destino]"+"\n"); while(recorrer != null) { System.out.print("["+recorrer.Id+"\t\t\t"+recorrer.nombre+"\t\t\t"+recorrer.apellido+"\t\t"+recorrer.edad+"\t\t"+recorrer.origen+"\t\t\t"+recorrer.destino+"]\n"); recorrer = recorrer.siguiente; } } //************************************************************************************************************** //Metodo para Eliminar Elementos del Inicio de la Lista public Object EliminarNodoInicio() { Object elem = inicio.Id; if(inicio == fin) { inicio = fin = null; } else { inicio = inicio.siguiente; } return elem; } //************************************************************************************************************** /*Metodo para Eliminar Elementos del final de la Lista public void EliminarNodoFin() { NodoDoble temp = fin; if(temp == inicio) { inicio = fin = null; } else { fin = temp.anterior; temp.siguiente = null; } }*/ //************************************************************************************************************** //Metodo para Eliminar Elementos del final de la Lista public Object Buscar(Object destino) { NodoDoble recorrer; for (recorrer = inicio; recorrer!= null; recorrer = recorrer.siguiente) { if(destino.equals(recorrer.Id)) return recorrer; } return null; } //************************************************************************************************************** //Metodo para Eliminar Elementos del final de la Lista public Object BuscarAux(Object destino) { NodoDoble recorrer; for (recorrer = inicio; recorrer!= null; recorrer = recorrer.siguiente) { if(destino.equals(recorrer.Id)) { System.out.print("["+recorrer.Id+"\t\t\t"+recorrer.nombre+"\t\t\t"+recorrer.apellido+"\t\t"+recorrer.edad+"\t\t"+recorrer.origen+"\t\t\t"+recorrer.destino+"]\n"); return recorrer; } } return null; } //************************************************************************************************************** //Metodo para Eliminar Elementos de la Lista public void EliminarDato (Object entrada) { NodoDoble actual = inicio; NodoDoble atras = inicio.anterior; while(actual != null) { if (entrada.equals(actual.Id)) { if(actual == inicio) { inicio = inicio.siguiente; //inicio.anterior = null; } else { atras.siguiente = actual.siguiente; //actual.siguiente.anterior = actual.anterior; } } atras = actual; actual = actual.siguiente; } } }
<reponame>saumyasingh048/hacktoberithms def sum_all(num_list): num_list = sorted(num_list) diff = num_list[1] - num_list[0] count = 0 while diff > num_list[0] and diff > 0: count += diff diff -= 1 return num_list[0] + num_list[1] + count print(sum_all([1, 4]))
/// Tries to create a GridIndex with the given value. /// A value in the range of [0, 8] will return a valid GridIndex. /// A value outside of that range will return a TooBigIndex error. pub const fn try_new(value: u8) -> Result<Self, TooBigIndex> { if value < 9 { Ok(GridIndex(value)) } else { Err(value) } }
// NewHashed returns a new instance of KeyMutex which hashes arbitrary keys to // a fixed set of locks. `n` specifies number of locks, if n <= 0, we use // number of cpus. // Note that because it uses fixed set of locks, different keys may share same // lock, so it's possible to wait on same lock. func NewHashed(n int) KeyMutex { if n <= 0 { n = runtime.NumCPU() } return &hashedKeyMutex{ mutexes: make([]sync.Mutex, n), } }
<gh_stars>0 import { getRandStr, TCromwellBlockData, TCromwellBlockProps, TGallerySettings } from '@cromwell/core'; import clsx from 'clsx'; import { ButtonBack, ButtonNext, CarouselContext, CarouselInjectedProps, CarouselProvider, CarouselStoreInterface, DotGroup, Image as CarouselImage, ImageWithZoom, Slide, Slider, ImageProps, WithStore, } from 'pure-react-carousel'; import React, { useContext, useEffect } from 'react'; import ReactResizeDetector from 'react-resize-detector'; import { CBlock } from '../CBlock/CBlock'; import { Link } from '../Link/Link'; import styles from './CGallery.module.scss'; import Lightbox from './Lightbox'; import Thumbs from './Thumbs'; export type TCGalleryProps = { className?: string; shouldComponentUpdate?: boolean; } & TCromwellBlockProps; export type TImageComponent = React.ComponentClass<Partial<ImageProps>>; /** @internal */ class CarouselStoreSetterRaw extends React.Component<CarouselInjectedProps & { setStore: (store: CarouselStoreInterface) => any; }> { constructor(props) { super(props); this.props.setStore(this.props.carouselStore); } render() { this.props.setStore(this.props.carouselStore); return <></>; } } /** @internal */ const CarouselStoreSetter = WithStore(CarouselStoreSetterRaw) as any as React.ComponentType<{ setStore: (store: CarouselStoreInterface) => any; }>; export class CGallery extends React.Component<TCGalleryProps> { private gallerySettings?: TGallerySettings; private galleryId?: string; private thumbsId?: string; private randId = getRandStr(5); private galleryStore?: CarouselStoreInterface; private activeSlide: number = 0; private setLightbox?: (open: boolean, index: number) => void; private forceUpdateThumbs?: () => void; private onActiveSlideChange = (index: number) => { this.activeSlide = index; if (this.gallerySettings?.thumbs && this.gallerySettings?.images?.length) this.forceUpdateThumbs?.(); } private setActiveSlide = (index: number) => { this.galleryStore?.setStoreState({ currentSlide: index }) } private openFullScreen = (index: number) => { this.setLightbox?.(true, index); } private onSlideClick = (index: number) => { if (this.gallerySettings?.fullscreen) this.openFullScreen(index) } private getContent = (data?: TCromwellBlockData, width?: number, height?: number) => { const { gallery: propsSettings } = this.props; const totalGallerySettings = data?.gallery ?? propsSettings; this.gallerySettings = totalGallerySettings; if (totalGallerySettings?.responsive && width && Object.keys(totalGallerySettings?.responsive).length) { // find closest breakpoint to width but less let closest = 0; Object.keys(totalGallerySettings.responsive).forEach((b, index) => { if (index === 0) { // use first if no appropriate would found this.gallerySettings = totalGallerySettings?.responsive?.[b]; } const breakpoint = parseInt(b); if (isNaN(breakpoint)) return; if (breakpoint > closest && breakpoint < width) { closest = breakpoint; this.gallerySettings = totalGallerySettings?.responsive?.[b]; } }); // Make responsive config override higher-level one this.gallerySettings = Object.assign({}, totalGallerySettings, this.gallerySettings) } const gallerySettings = this.gallerySettings; this.galleryId = `CGallery_${data?.id}_${this.randId}`; this.thumbsId = `${this.galleryId}_thumbs`; if (!gallerySettings || !(gallerySettings.images || gallerySettings.slides)) return <></>; const Image = (gallerySettings.zoom ? ImageWithZoom : CarouselImage) as TImageComponent; const totalSlides = gallerySettings.images?.length ?? gallerySettings.slides?.length ?? 0 let visibleSlides = gallerySettings.visibleSlides ?? 1; if (visibleSlides > totalSlides) visibleSlides = totalSlides; const ButtonBackContent = gallerySettings.components?.backButton ?? (() => <div className={clsx(styles.navBtnContent, styles.btnBack)}></div>); const ButtonNextContent = gallerySettings.components?.nextButton ?? (() => <div className={clsx(styles.navBtnContent, styles.btnNext)}></div>); let slideWidth; if (width && gallerySettings.slideMinWidth && !gallerySettings.visibleSlides) { const maxSlides = Math.floor(width / gallerySettings.slideMinWidth) slideWidth = width / maxSlides; if (gallerySettings.slideMaxWidth && slideWidth > gallerySettings.slideMaxWidth) slideWidth = gallerySettings.slideMaxWidth; visibleSlides = maxSlides; } let containerHeight = gallerySettings.height; if (gallerySettings.autoHeight) containerHeight = height; let interval = gallerySettings.interval; if (typeof interval === 'number') { if (interval < 400) interval = 400; } const galleryJsx = ( <CarouselProvider orientation={gallerySettings.orientation} visibleSlides={visibleSlides} naturalSlideWidth={gallerySettings.ratio ? 100 * gallerySettings.ratio : 125} naturalSlideHeight={100} totalSlides={totalSlides} infinite={gallerySettings.loop} interval={interval} isPlaying={gallerySettings.autoPlay} > <CarouselOnChangeWatcher onChange={this.onActiveSlideChange} /> <CarouselStoreSetter setStore={(store) => { this.galleryStore = store }} /> <div className={styles.container} style={{ height: containerHeight ? containerHeight + 'px' : '100%', width: gallerySettings.width ? gallerySettings.width + 'px' : '100%', }}> <Slider style={containerHeight !== undefined ? { height: containerHeight + 'px', } : undefined} > {gallerySettings.images && gallerySettings.images.map((img, index) => { if (!img.src) return <></>; let imgItem = ( <Image src={img.src} style={containerHeight !== undefined ? { height: containerHeight + 'px', } : undefined} alt={img.alt} className={clsx(gallerySettings?.backgroundSize === 'contain' ? styles.slideContain : styles.slideCover)} /> ); if (gallerySettings.components?.imgWrapper) { const WrapComp = gallerySettings.components.imgWrapper; imgItem = <WrapComp image={img}>{imgItem}</WrapComp> } if (img.href) { imgItem = ( <Link href={img.href}><a>{imgItem}</a></Link> ); } imgItem = ( <Slide onClick={() => this.onSlideClick(index)} index={index} key={img.src + index} style={{ height: containerHeight && containerHeight + 'px' }} > {imgItem} </Slide> ); return imgItem; })} {gallerySettings.slides && gallerySettings.slides.map((slideJsx, index) => { let el = slideJsx; if (gallerySettings.components?.imgWrapper) { const WrapComp = gallerySettings.components.imgWrapper; el = <WrapComp>{el}</WrapComp> } return ( <Slide onClick={() => this.onSlideClick(index)} index={index} key={`slide_${index}`} style={{ height: containerHeight && height + 'px', // width: slideWidth && slideWidth + 'px', }} > {el} </Slide> ); })} </Slider> {gallerySettings.pagination && ( <div className={styles.dotContainer}> <DotGroup className={styles.dotGroup} /> </div> )} {gallerySettings.navigation && (<> <ButtonBack className={clsx(styles.navBtn, styles.navBtnBack, gallerySettings.classes?.navBtn)}><ButtonBackContent /></ButtonBack> <ButtonNext className={clsx(styles.navBtn, styles.navBtnNext, gallerySettings.classes?.navBtn)}><ButtonNextContent /></ButtonNext> </>)} </div> </CarouselProvider> ); return ( <> {galleryJsx} {!!(gallerySettings?.thumbs && gallerySettings?.images?.length) && ( <Thumbs thumbsId={this.thumbsId} gallerySettings={gallerySettings} width={width} activeSlide={this.activeSlide} totalSlides={totalSlides} setActiveSlide={this.setActiveSlide} getUpdate={(forceUpdate) => { this.forceUpdateThumbs = forceUpdate }} /> )} {gallerySettings?.fullscreen && gallerySettings.images && ( <Lightbox images={gallerySettings.images?.map(img => img.src) ?? []} getState={(setOpen) => { this.setLightbox = setOpen; }} /> )} </> ); } render() { return ( <CBlock {...this.props} type='gallery' key={this.props.id + '_crw'} content={(data, ref, setContentInstance) => { setContentInstance(this); return ( <ReactResizeDetector handleWidth handleHeight refreshMode="throttle" refreshRate={50} > {({ targetRef, width, height }) => { return ( <div id={this.galleryId} className={styles.max} ref={targetRef as any} > {this.getContent(data, width, height)} </div> ) }} </ReactResizeDetector> ) }} /> ) } } /** @internal */ export function CarouselOnChangeWatcher(props: { onChange: (index: number) => void; }) { const carouselContext = useContext(CarouselContext); useEffect(() => { function onChange() { props.onChange(carouselContext.state.currentSlide); } carouselContext.subscribe(onChange); return () => carouselContext.unsubscribe(onChange); }, [carouselContext]); return <></>; }
/** * Created by Ridiculous on 2016/7/14. */ @Controller public class pageAction { @RequestMapping("payAction") public ModelAndView pageAction(HttpServletRequest request) { ModelAndView view = new ModelAndView(); int index = Integer.parseInt(request.getParameter("index")); String title = null; String desc = null; switch (index) { case 1: title = "确认订购 乐游狂人包"; desc = "乐游狂人包:订购即可获得当月推出的12款精品游戏,定期更新部分游戏,游戏使用过程中无其他信息费用。资费10元/月,次月自动续订。"; break; case 2: title = "确认订购 超凡游戏包"; desc = "超凡游戏包:订购即可获得当月推出的12款精品游戏,定期更新部分游戏,游戏使用过程中无其他信息费用。资费10元/月,次月自动续订。"; break; case 3: title = "确认订购 全民游戏包"; desc = "全民游戏包:订购即可获得当月推出的12款精品游戏,定期更新部分游戏,游戏使用过程中无其他信息费用。资费10元/月,次月自动续订。"; break; case 4: title = "确认订购 漫漫世界动漫精品包"; desc = "漫漫世界动漫精品包:年轻人喜好的潮流都市休闲幽默漫画,浪漫爱情、曲折情节快乐不停!资费12元/月,次月自动续订。"; break; case 5: title = "确认订购 赏漫乐园动漫精品包"; desc = "赏漫乐园动漫精品包:热门网络小说神印王座漫画版火爆上线,充满玄幻趣味的动漫作品让你看个过瘾!"; break; default: return new ModelAndView("404"); } view.setViewName("payment"); System.out.println(title); view.addObject("title", title); view.addObject("desc", desc); return view; } }
// Hash computes a (non-cryptographic) hash. This hash is the same for all permutations of edges func (e *Edges) Hash() uint64 { if e.hash != nil { return *e.hash } var output uint64 e.hashMux.Lock() if e.hash == nil { for i := range e.Slice() { h := fnv.New64a() bs := make([]byte, 8) binary.LittleEndian.PutUint64(bs, uint64(e.Slice()[i].Hash())) h.Write(bs) output = output ^ h.Sum64() } h := fnv.New64a() bs := make([]byte, 8) binary.LittleEndian.PutUint64(bs, uint64(len(e.Slice()))) h.Write(bs) output = output ^ h.Sum64() e.hash = &output } e.hashMux.Unlock() return *e.hash }
Kabul, Afghanistan (CNN) -- Afghanistan's interior minister and director of national security have resigned in the wake of an attack on a high-level peace conference last week, a spokesman for President Hamid Karzai said Sunday. Karzai demanded an explanation of the security breach from Interior Minister Hanif Atmar and NDS chief Amrullah Saleh, and when they could not provide one, he accepted their resignations, the president's office said. Karzai planned the peace gathering with tribal leaders to discuss a reintegration plan for Taliban members who renounce violence and lay down their arms. Suspected militants, however, fired rockets, detonated explosives and engaged in an intense gun battle with security forces Wednesday as Karzai spoke at the peace meeting. Three suspected suicide bombers were killed and one were arrested in connection with the attack, presidential spokesman Seyamak Herwi said Wednesday. The government later revised the number killed to two. The attackers are not known to have killed or wounded anyone. The resignations came on the same that Karzai began to act on the recommendations of the peace conference, or jirga. He created a new commission to review cases of all suspected Taliban militants held in Afghan jails, his office said Sunday. The commission will review the cases in order to find those who are believed to be in prison "without sufficient legally binding criminal evidence," the statement from his office said. The commission is one of about 200 recommendations to emerge after delegates from around Afghanistan met at the peace conference. Delegates to the conference also urged the Taliban to cut its ties with the al Qaeda terror network and asked that Taliban prisoners be freed from international detention centers. Among other things, the delegates recommended that names of all Taliban members be removed from blacklists maintained by the United States and United Nations. Those lists contain the names of suspected militants that U.S. authorities and their allies would like to arrest. The delegates called for the release of any prisoners currently being held for giving faulty information to NATO forces, and they urged international forces to stop searches of private homes without adequate reason. They also advised U.S.-led forces to accelerate their training of the Afghan military in advance of the anticipated handover of security responsibilities. At the same time, they called for the establishment of a peace council comprised of provincial council representatives, tribal leaders and religious scholars. The council, they said, should create a committee to help facilitate the release and reintegration of prisoners. Karzai planned the peace gathering with tribal leaders to discuss a reintegration plan for Taliban members who renounce violence and lay down their arms. The Taliban released a press statement a day before the attack, calling the peace meeting a "foreign-scripted peace jirga." CNN's Thomas Evans contributed to this report.
import { Alert, Button, Card, CardBody, CardFooter, CardHeader, Input, } from '@retail-ui/core' import { useSession } from 'next-auth/client' import React, { useState } from 'react' import AccessDeniedIndicator from '@/components/AccessDeniedIndicator' import { useInsertFeedMutation } from '@/generated/graphql' const AddNewFeedForm = () => { const [body, setBody] = useState('') const [session] = useSession() const [errorMessage, setErrorMessage] = React.useState('') const [insertFeed, { loading, error }] = useInsertFeedMutation() React.useEffect(() => { setErrorMessage(error?.message || '') }, [error]) React.useEffect(() => { setErrorMessage('') }, [body]) if (!session) { return ( <AccessDeniedIndicator message="You need to be signed in to add a new feed!" /> ) } const handleSubmit = async () => { try { await insertFeed({ variables: { body } }) setBody('') } catch (error) { console.log('Error: ', Object.entries(error)) } } const errorNode = () => { if (!errorMessage) { return null } return ( <Alert color="danger" size="sm" content={errorMessage} hasClose onClose={() => setErrorMessage('')} ></Alert> ) } return ( <Card> <CardHeader title="New post"></CardHeader> <CardBody> {errorNode()} <Input placeholder="What's on your mind ?" id="body" value={body} onChange={(e) => setBody(e.currentTarget.value)} onKeyDown={(e) => e.key === 'Enter' && handleSubmit()} /> </CardBody> <CardFooter> <div> <Button onClick={handleSubmit} isLoading={loading} isDisabled={!body.trim()} > Post </Button> </div> </CardFooter> </Card> ) } export default AddNewFeedForm
def done(self, done_options=""): self.__log("done %s" % (self.property("initialized"))) if not self.isOpen(): return -1 opts = _utils.tomap(done_options) timeout = 10000000 if "timeout" in opts: timeout = int(opts["timeout"]) timeout *= 1E-6 if property("initialized") == 0: if "flushing" in self.__properties: del self.__properties["flushing"] return 1 if not "flushing" in self.__properties: self.__properties["flushing"] = 1 self.__send(Type.BYTE, "done", done_options) pass def wait_func(): if self.flag == 1: return False self.__recv(0) return self.property("initialized") == 0 self.__wait(timeout, wait_func) if self.property("initialized") == 1: if timeout > 0: return -1 return 0 if "flushing" in self.__properties: del self.__properties["flushing"] return 1
/** * This XML output returns a complete representation of the differences. * * <p>It is a bit more verbose than the default output, but can be used to produce both input XML. * * @author Christophe LAuret * @version 0.9.0 * @since 0.9.0 */ public final class CompleteXMLDiffOutput extends XMLDiffOutputBase implements XMLDiffOutput { /** * Underlying XML writer. */ private final XMLWriter xml; /** * Holds the list of attributes inserted to the previous element. */ private final List<AttributeToken> insertedAttributes = new ArrayList<>(); /** * Holds the list of attributes deleted from the previous element. */ private final List<AttributeToken> deletedAttributes = new ArrayList<>(); /** * Namespace URI used to report differences. */ private String diffNamespaceUri = getDiffNamespace().getUri(); public CompleteXMLDiffOutput(Writer out) { this.xml = new XMLWriterNSImpl(out); } @Override public void start() { this.diffNamespaceUri = getDiffNamespace().getUri(); try { if (this.includeXMLDeclaration) this.xml.xmlDecl(); declareNamespaces(); } catch (IOException ex) { throw new UncheckedIOException(ex); } } @Override public void end() { try { this.xml.flush(); } catch (IOException ex) { throw new UncheckedIOException(ex); } } @Override public void handle(@NotNull Operator operator, XMLToken token) throws UncheckedIOException, IllegalStateException { try { // We must flush the inserted/deleted attributes if (token.getType() != XMLTokenType.ATTRIBUTE) { this.flushAttributes(); } // Handle matches and clashes if (operator == Operator.MATCH) handleMatch(token); else handleEdit(operator, token); } catch (IOException ex) { throw new UncheckedIOException(ex); } } private void handleMatch(XMLToken token) throws IOException { token.toXML(this.xml); } private void handleEdit(Operator operator, XMLToken token) throws IOException { if (token instanceof StartElementToken) { token.toXML(this.xml); // insert an attribute to specify if inserted or deleted this.xml.attribute(this.diffNamespaceUri, operator == Operator.INS ? "ins" : "del", "true"); } else if (token == SpaceToken.NEW_LINE) { // just output the new line if (operator == Operator.INS) { token.toXML(this.xml); } } else if (token instanceof TextToken) { // wrap the characters in a <ins> / <del> element this.xml.openElement(this.diffNamespaceUri, operator == Operator.INS ? "ins" : "del", false); token.toXML(this.xml); this.xml.closeElement(); } else if (token instanceof AttributeToken) { if (operator == Operator.INS) { token.toXML(this.xml); this.insertedAttributes.add((AttributeToken) token); } else { this.deletedAttributes.add((AttributeToken) token); } } else if (token instanceof EndElementToken) { token.toXML(this.xml); } else { // TODO comments and processing instructions, wrap in <ins> / <del> like text ? if (operator == Operator.INS) { token.toXML(this.xml); } } } /** * Write the namespaces mapping to the XML output */ private void declareNamespaces() { Namespace diff = getDiffNamespace(); this.xml.setPrefixMapping(diff.getUri(), diff.getPrefix()); for (Namespace namespace : this.namespaces) { this.xml.setPrefixMapping(namespace.getUri(), namespace.getPrefix()); } } /** * Flush the inserted or deleted attributes on the element. * <p> * This method must be called before we finish writing the start element tag. */ private void flushAttributes() throws IOException { // Attributes first if (!this.insertedAttributes.isEmpty()) { String names = getQNames(this.insertedAttributes, this.namespaces); this.xml.attribute(this.diffNamespaceUri, "ins-attributes", names); } if (!this.deletedAttributes.isEmpty()) { String names = getQNames(this.deletedAttributes, this.namespaces); this.xml.attribute(this.diffNamespaceUri, "del-attributes", names); } // Elements if (!this.insertedAttributes.isEmpty()) { this.xml.openElement(this.diffNamespaceUri, "ins", false); for (AttributeToken attribute : this.insertedAttributes) { this.xml.attribute(attribute.getNamespaceURI(), attribute.getName(), attribute.getValue()); } this.xml.closeElement(); this.insertedAttributes.clear(); } if (!this.deletedAttributes.isEmpty()) { this.xml.openElement(this.diffNamespaceUri, "del", false); for (AttributeToken attribute : this.deletedAttributes) { this.xml.attribute(attribute.getNamespaceURI(), attribute.getName(), attribute.getValue()); } this.xml.closeElement(); this.deletedAttributes.clear(); } } private static String getQNames(List<AttributeToken> attributes, NamespaceSet namespaces) { StringBuilder names = new StringBuilder(); for (int i = 0; i < attributes.size(); i++) { if (i > 0) names.append(' '); names.append(getQName(attributes.get(i), namespaces)); } return names.toString(); } private static String getQName(AttributeToken attribute, NamespaceSet namespaces) { if (attribute.getName().indexOf(':') > 0) return attribute.getName(); String prefix = namespaces.getPrefix(attribute.getNamespaceURI()); return prefix != null && !prefix.isEmpty() ? prefix + ":" + attribute.getName() : attribute.getName(); } }
<filename>tools/federation/src/main/java/org/eclipse/rdf4j/federated/evaluation/iterator/GroupedCheckConversionIteration.java /******************************************************************************* * Copyright (c) 2019 Eclipse RDF4J contributors. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Distribution License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/org/documents/edl-v10.php. *******************************************************************************/ package org.eclipse.rdf4j.federated.evaluation.iterator; import java.util.Iterator; import java.util.List; import org.eclipse.rdf4j.common.iteration.CloseableIteration; import org.eclipse.rdf4j.common.iteration.ConvertingIteration; import org.eclipse.rdf4j.query.Binding; import org.eclipse.rdf4j.query.BindingSet; import org.eclipse.rdf4j.query.QueryEvaluationException; /** * Retrieves the original bindings for the particular result * * @author <NAME> */ public class GroupedCheckConversionIteration extends ConvertingIteration<BindingSet, BindingSet, QueryEvaluationException> { protected final List<BindingSet> bindings; public GroupedCheckConversionIteration(CloseableIteration<BindingSet, QueryEvaluationException> iter, List<BindingSet> bindings) { super(iter); this.bindings = bindings; } @Override protected BindingSet convert(BindingSet bIn) throws QueryEvaluationException { int bIndex = -1; Iterator<Binding> bIter = bIn.iterator(); while (bIter.hasNext()) { Binding b = bIter.next(); String name = b.getName(); bIndex = Integer.parseInt(name.substring(name.lastIndexOf('_') + 1)); } return bindings.get(bIndex); } }
/** * Commands representing the General Authenticate commands in gemSpec_COS#14.7.2 */ public class GeneralAuthenticateCommand extends AbstractHealthCardCommand { private static final Logger LOG = LoggerFactory.getLogger(GeneralAuthenticateCommand.class); private static final int CLA_COMMAND_CHAINING = 0x10; private static final int CLA_NO_COMMAND_CHAINING = 0x00; private static final int INS = 0x86; private static final int NO_MEANING = 0x00; private static final Map<Integer, Response.ResponseStatus> RESPONSE_MESSAGES = new HashMap<>(); static { RESPONSE_MESSAGES.put(0x9000, Response.ResponseStatus.SUCCESS); RESPONSE_MESSAGES.put(0x6300, Response.ResponseStatus.AUTHENTICATION_FAILURE); RESPONSE_MESSAGES.put(0x6400, Response.ResponseStatus.PARAMETER_MISMATCH); RESPONSE_MESSAGES.put(0x6982, Response.ResponseStatus.SECURITY_STATUS_NOT_SATISFIED); RESPONSE_MESSAGES.put(0x6983, Response.ResponseStatus.KEY_EXPIRED); RESPONSE_MESSAGES.put(0x6985, Response.ResponseStatus.NO_KEY_REFERENCE); RESPONSE_MESSAGES.put(0x6A80, Response.ResponseStatus.NUMBER_PRECONDITION_WRONG); RESPONSE_MESSAGES.put(0x6A81, Response.ResponseStatus.UNSUPPORTED_FUNCTION); RESPONSE_MESSAGES.put(0x6A88, Response.ResponseStatus.KEY_NOT_FOUND); } /** * UseCase: gemSpec_COS#14.7.2.1.1 PACE for end-user cards, Step 1 a * * @param commandChaining * -true for command chaining false if not * @throws IOException if an error occurred */ public GeneralAuthenticateCommand(final boolean commandChaining) throws IOException { super(commandChaining ? CLA_COMMAND_CHAINING : CLA_NO_COMMAND_CHAINING, INS); this.ne = NE_MAX_SHORT_LENGTH; this.p1 = NO_MEANING; this.p2 = NO_MEANING; DERApplicationSpecific app = new DERApplicationSpecific(28, new ASN1EncodableVector()); this.data = app.getEncoded(); } /** * UseCase: gemSpec_COS#14.7.2.1.1 PACE for end-user cards, Step 2a (tagNo 1), 3a (3) , 5a (5) * * @param commandChaining * -true for command chaining false if not * @param data byteArray with data * @throws IOException if an error occurred */ public GeneralAuthenticateCommand(final boolean commandChaining, final byte[] data, final int tagNo) throws IOException { super(commandChaining ? CLA_COMMAND_CHAINING : CLA_NO_COMMAND_CHAINING, INS); this.ne = NE_MAX_SHORT_LENGTH; this.p1 = NO_MEANING; this.p2 = NO_MEANING; this.data = Bytes.concatNullables(// new DERApplicationSpecific(28, new DERTaggedObject(false, tagNo, new DEROctetString(data))).getEncoded()); } @Override public Map<Integer, Response.ResponseStatus> getStatusResponseMessages() { return RESPONSE_MESSAGES; } }
/* * Generate a test signal and compute the theoretical FFT. * * The test signal is specified by |signal_type|, and the test signal * is saved in |x| with the corresponding FFT in |fft|. The size of * the test signal is |size|. |signalValue| is desired the amplitude * of the test signal. * * If |real_only| is true, then the test signal is assumed to be real * instead of complex, which is the default. This is only applicable * for a |signal_type| of 0 or 3; the other signals are already real-valued. */ void GenerateTestSignalAndFFT(struct ComplexFloat* x, struct ComplexFloat* fft, int size, int signal_type, float signal_value, int real_only) { int k; switch (signal_type) { case 0: for (k = 0; k < size; ++k) { x[k].Re = signal_value; x[k].Im = real_only ? 0 : signal_value; } fft[0].Re = signal_value * size; fft[0].Im = real_only ? 0 : signal_value * size; for (k = 1; k < size; ++k) { fft[k].Re = fft[k].Im = 0; } break; case 1: { double factor = signal_value / (float) size; double omega = 2 * M_PI / size; for (k = 0; k < size; ++k) { x[k].Re = ((k + 1)*factor); x[k].Im = 0; } fft[0].Re = factor * size * (size + 1) / 2; fft[0].Im = 0; for (k = 1; k < size; ++k) { double phase; phase = omega * k; fft[k].Re = factor * -size / 2; fft[k].Im = factor * size / 2 * (sin(phase) / (1 - cos(phase))); } fft[size / 2].Im = 0; } break; case 2: { double omega = 2 * M_PI / size; for (k = 0; k < size; ++k) { x[k].Re = signal_value * sin(omega * k); x[k].Im = 0; } x[size / 2 ].Re = 0; for (k = 0; k < size; ++k) { fft[k].Re = 0; fft[k].Im = 0; } if (size != 2) { fft[1].Im = -signal_value * (size / 2); fft[size - 1].Im = signal_value * (size / 2); } } break; case 3: if (!real_only) { double omega = 2 * M_PI / size; for (k = 0; k < size; ++k) { x[k].Re = 0; x[k].Im = 0; } x[1].Im = -signal_value; x[size-1].Im = signal_value; if (size == 2) { fft[0].Re = 0; fft[0].Im = signal_value; fft[1].Re = 0; fft[1].Im = -signal_value; } else { for (k = 0; k < size; ++k) { fft[k].Re = -2 * signal_value * sin(omega * k); fft[k].Im = 0; } fft[size / 2].Re = 0; } break; } case MAX_SIGNAL_TYPE: default: fprintf(stderr, "invalid signal type: %d\n", signal_type); exit(1); } }
def hilbert_envelope(signal): signal = np.asarray(signal) N_orig = signal.shape[-1] N = next_pow_2(N_orig) y_h = hilbert(signal, N) return np.abs(y_h[..., :N_orig])
Thanks to two fast-acting teenagers, Monty the service dog is back on the job. His owner, Sue Anderson, shared the story with CTV Ottawa. "There's so many stories you hear about bad choices teenagers make," Anderson said, "that you don't hear of good choices they're making and this was one of the good choices." For the past six years, Monty has been at Anderson's side, anticipating the Ottawa woman's seizures. This weekend, Monty was the one who needed help. On Saturday evening, Monty suddenly collapsed. When Anderson, who is wheelchair-bound, quickly realized she wasn't going to be able to book a Para Transpo ride to the emergency veterinary hospital in time — no accessible taxis were immediately available either — she bundled up the dog and started wheeling him in the direction of the hospital. After more than two kilometres, she stopped at a Tim Hortons, concerned about her dog's hypothermia risk. Fortunately for Anderson, two compassion teenagers, Audrey Duvnjak and Emma Norris, were on the evening shift. "Immediately I thought what if this happened to Spencer, my dog," 18-year-old Norris told CTV News, "and I was like, 'Okay, this woman needs help. I need to help her. We need to get this dog to the animal hospital before something bad happens to it.'" Duvnjak, who had a car, offered to drive Monty to the Alta Vista Animal Hospital while Norris tended to Anderson. "And when I drove him, I have heated seats, this dog is freezing," Duvnjak recalled, "I'm putting my seat heater on, and he did not want to be alone. He doesn't even know me, he crawled up into my lap into little ball, super scared." Thanks to the teens' quick thinking, Monty is back in service, having since woken up Anderson when he detected an irregular heartbeat in his human companion. He is now on medication for a disc disease.
#include "sha1.hpp" #include <cstdio> #include <cstring> #include <iostream> int main() { Digest::SHA1 sha1; char str[] = "Hello World"; sha1.update(str, strlen(str)); const uint8_t *msgDigest = sha1.digest(); for (int i = 0; i < 20; ++i) { printf("%02x", (uint32_t)msgDigest[i]); } std::cout << std::endl; }
/** * Used with operators require list of elements, * such as {@link Operator#$in} and {@link Operator#$nin} */ public static Expression of(String field, Operator operator, Temporal... values) { return new ListExpression(field, operator, Arrays.stream(values).map(Object::toString).collect(toList()) ); }
<reponame>latticework/proto-goquiles-server-todo<gh_stars>0 package api import ( "errors" "net/http" "net/url" "os" "github.com/latticework/proto-goquiles-server-todo/goquilessvr/command" ) var ( errRedirect = errors.New("redirect") ) type Config struct { Address string HttpClient *http.Client } func DefaultConfig() *Config { config := Config{ Address: "http://127.0.0.1:8700", HttpClient: &http.Client{}, } if address := os.Getenv(command.EnvQuilesServerAddress); address != "" { config.Address = address } return config } func NewClient(c *Config) (*Client, error) { url, err := url.Parse(c.Address) if err != nil { return nil, err } if c.HttpClient == nil { c.HttpClient = http.DefaultClient } // // Make a copy of the HTTP client so we can configure it without // // affecting the original // // // // If no cookie jar is set on the client, we set a default empty // // cookie jar. // if c.HttpClient.Jar == nil { // jar, err := cookiejar.New(&cookiejar.Options{}) // if err != nil { // return nil, err // } // // c.HttpClient.Jar = jar // } // Ensure redirects are not automatically followed c.HttpClient.CheckRedirect = func(req *http.Request, via []*http.Request) error { return errRedirect } client := &Client{ address: url, config: c, } // if token := os.Getenv("VAULT_TOKEN"); token != "" { // client.SetToken(token) // } return client, nil }
// Copyright 2020 <NAME> // Distributed under the Boost license, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) // See https://github.com/danielaparker/jsoncons for latest version #ifndef JSONCONS_JSONSCHEMA_KEYWORD_VALIDATOR_FACTORY_HPP #define JSONCONS_JSONSCHEMA_KEYWORD_VALIDATOR_FACTORY_HPP #include <jsoncons/config/jsoncons_config.hpp> #include <jsoncons/uri.hpp> #include <jsoncons/json.hpp> #include <jsoncons_ext/jsonpointer/jsonpointer.hpp> #include <jsoncons_ext/jsonschema/subschema.hpp> #include <jsoncons_ext/jsonschema/keyword_validator.hpp> #include <jsoncons_ext/jsonschema/schema_draft7.hpp> #include <jsoncons_ext/jsonschema/schema_version.hpp> #include <cassert> #include <set> #include <sstream> #include <iostream> #include <cassert> #if defined(JSONCONS_HAS_STD_REGEX) #include <regex> #endif namespace jsoncons { namespace jsonschema { template <class Json> using uri_resolver = std::function<Json(const jsoncons::uri & /*id*/)>; template <class Json> class reference_schema : public keyword_validator<Json> { using validator_pointer = typename keyword_validator<Json>::self_pointer; validator_pointer referred_schema_; public: reference_schema(const std::string& id) : keyword_validator<Json>(id), referred_schema_(nullptr) {} void set_referred_schema(validator_pointer target) { referred_schema_ = target; } private: void do_validate(const Json& instance, const jsonpointer::json_pointer& instance_location, error_reporter& reporter, Json& patch) const override { if (!referred_schema_) { reporter.error(validation_output("", this->absolute_keyword_location(), instance_location.to_uri_fragment(), "Unresolved schema reference " + this->absolute_keyword_location())); return; } referred_schema_->validate(instance, instance_location, reporter, patch); } jsoncons::optional<Json> get_default_value(const jsonpointer::json_pointer& instance_location, const Json& instance, error_reporter& reporter) const override { if (!referred_schema_) { reporter.error(validation_output("", this->absolute_keyword_location(), instance_location.to_uri_fragment(), "Unresolved schema reference " + this->absolute_keyword_location())); return jsoncons::optional<Json>(); } return referred_schema_->get_default_value(instance_location, instance, reporter); } }; template <class Json> class keyword_validator_factory; template <class Json> class json_schema { using validator_pointer = typename keyword_validator<Json>::self_pointer; friend class keyword_validator_factory<Json>; std::vector<std::unique_ptr<keyword_validator<Json>>> subschemas_; validator_pointer root_; public: json_schema(std::vector<std::unique_ptr<keyword_validator<Json>>>&& subschemas, validator_pointer root) : subschemas_(std::move(subschemas)), root_(root) { if (root_ == nullptr) JSONCONS_THROW(schema_error("There is no root schema to validate an instance against")); } json_schema(const json_schema&) = delete; json_schema(json_schema&&) = default; json_schema& operator=(const json_schema&) = delete; json_schema& operator=(json_schema&&) = default; void validate(const Json& instance, const jsonpointer::json_pointer& instance_location, error_reporter& reporter, Json& patch) const { JSONCONS_ASSERT(root_ != nullptr); root_->validate(instance, instance_location, reporter, patch); } }; template <class Json> struct default_uri_resolver { Json operator()(const jsoncons::uri& uri) { if (uri.path() == "/draft-07/schema") { return jsoncons::jsonschema::schema_draft7<Json>::get_schema(); } JSONCONS_THROW(jsonschema::schema_error("Don't know how to load JSON Schema " + std::string(uri.base()))); } }; template <class Json> class keyword_validator_factory : public abstract_keyword_validator_factory<Json> { using validator_pointer = typename keyword_validator<Json>::self_pointer; struct subschema_registry { std::map<std::string, validator_pointer> schemas; // schemas std::map<std::string, reference_schema<Json>*> unresolved; // unresolved references std::map<std::string, Json> unprocessed_keywords; }; uri_resolver<Json> resolver_; validator_pointer root_; // Owns all schemas std::vector<std::unique_ptr<keyword_validator<Json>>> subschemas_; // Map location to subschema_registry std::map<std::string, subschema_registry> subschema_registries_; public: keyword_validator_factory(uri_resolver<Json>&& resolver) noexcept : resolver_(std::move(resolver)) { } keyword_validator_factory(const keyword_validator_factory&) = delete; keyword_validator_factory& operator=(const keyword_validator_factory&) = delete; keyword_validator_factory(keyword_validator_factory&&) = default; keyword_validator_factory& operator=(keyword_validator_factory&&) = default; std::shared_ptr<json_schema<Json>> get_schema() { return std::make_shared<json_schema<Json>>(std::move(subschemas_), root_); } validator_pointer make_required_validator(const std::vector<schema_location>& uris, const std::vector<std::string>& r) override { auto sch_orig = jsoncons::make_unique<required_validator<Json>>(uris, r); auto sch = sch_orig.get(); subschemas_.emplace_back(std::move(sch_orig)); return sch; } validator_pointer make_null_validator(const std::vector<schema_location>& uris) override { auto sch_orig = jsoncons::make_unique<null_validator<Json>>(uris); auto sch = sch_orig.get(); subschemas_.emplace_back(std::move(sch_orig)); return sch; } validator_pointer make_true_validator(const std::vector<schema_location>& uris) override { auto sch_orig = jsoncons::make_unique<true_validator<Json>>(uris); auto sch = sch_orig.get(); subschemas_.emplace_back(std::move(sch_orig)); return sch; } validator_pointer make_false_validator(const std::vector<schema_location>& uris) override { auto sch_orig = jsoncons::make_unique<false_validator<Json>>(uris); auto sch = sch_orig.get(); subschemas_.emplace_back(std::move(sch_orig)); return sch; } validator_pointer make_object_validator(const Json& schema, const std::vector<schema_location>& uris) override { auto sch_orig = jsoncons::make_unique<object_validator<Json>>(this, schema, uris); auto sch = sch_orig.get(); subschemas_.emplace_back(std::move(sch_orig)); return sch; } validator_pointer make_array_validator(const Json& schema, const std::vector<schema_location>& uris) override { auto sch_orig = jsoncons::make_unique<array_validator<Json>>(this, schema, uris); auto sch = sch_orig.get(); subschemas_.emplace_back(std::move(sch_orig)); return sch; } validator_pointer make_string_validator(const Json& schema, const std::vector<schema_location>& uris) override { auto sch_orig = jsoncons::make_unique<string_validator<Json>>(schema, uris); auto sch = sch_orig.get(); subschemas_.emplace_back(std::move(sch_orig)); return sch; } validator_pointer make_boolean_validator(const std::vector<schema_location>& uris) override { auto sch_orig = jsoncons::make_unique<boolean_validator<Json>>(uris); auto sch = sch_orig.get(); subschemas_.emplace_back(std::move(sch_orig)); return sch; } validator_pointer make_integer_validator(const Json& schema, const std::vector<schema_location>& uris, std::set<std::string>& keywords) override { auto sch_orig = jsoncons::make_unique<integer_validator<Json>>(schema, uris, keywords); auto sch = sch_orig.get(); subschemas_.emplace_back(std::move(sch_orig)); return sch; } validator_pointer make_number_validator(const Json& schema, const std::vector<schema_location>& uris, std::set<std::string>& keywords) override { auto sch_orig = jsoncons::make_unique<number_validator<Json>>(schema, uris, keywords); auto sch = sch_orig.get(); subschemas_.emplace_back(std::move(sch_orig)); return sch; } validator_pointer make_not_validator(const Json& schema, const std::vector<schema_location>& uris) override { auto sch_orig = jsoncons::make_unique<not_validator<Json>>(this, schema, uris); auto sch = sch_orig.get(); subschemas_.emplace_back(std::move(sch_orig)); return sch; } validator_pointer make_all_of_validator(const Json& schema, const std::vector<schema_location>& uris) override { auto sch_orig = jsoncons::make_unique<combining_validator<Json,all_of_criterion<Json>>>(this, schema, uris); auto sch = sch_orig.get(); subschemas_.emplace_back(std::move(sch_orig)); return sch; } validator_pointer make_any_of_validator(const Json& schema, const std::vector<schema_location>& uris) override { auto sch_orig = jsoncons::make_unique<combining_validator<Json,any_of_criterion<Json>>>(this, schema, uris); auto sch = sch_orig.get(); subschemas_.emplace_back(std::move(sch_orig)); return sch; } validator_pointer make_one_of_validator(const Json& schema, const std::vector<schema_location>& uris) override { auto sch_orig = jsoncons::make_unique<combining_validator<Json,one_of_criterion<Json>>>(this, schema, uris); auto sch = sch_orig.get(); subschemas_.emplace_back(std::move(sch_orig)); return sch; } validator_pointer make_type_validator(const Json& schema, const std::vector<schema_location>& uris) override { auto sch_orig = jsoncons::make_unique<type_validator<Json>>(this, schema, uris); auto sch = sch_orig.get(); subschemas_.emplace_back(std::move(sch_orig)); return sch; } validator_pointer make_keyword_validator(const Json& schema, const std::vector<schema_location>& uris, const std::vector<std::string>& keys) override { std::vector<schema_location> new_uris = update_uris(schema, uris, keys); validator_pointer sch = nullptr; switch (schema.type()) { case json_type::bool_value: if (schema.template as<bool>()) { sch = make_true_validator(new_uris); } else { sch = make_false_validator(new_uris); } break; case json_type::object_value: { auto it = schema.find("definitions"); if (it != schema.object_range().end()) { for (const auto& def : it->value().object_range()) make_keyword_validator(def.value(), new_uris, {"definitions", def.key()}); } it = schema.find("$ref"); if (it != schema.object_range().end()) // this schema is a reference { schema_location relative(it->value().template as<std::string>()); schema_location id = relative.resolve(new_uris.back()); sch = get_or_create_reference(id); } else { sch = make_type_validator(schema, new_uris); } break; } default: JSONCONS_THROW(schema_error("invalid JSON-type for a schema for " + new_uris[0].string() + ", expected: boolean or object")); break; } for (const auto& uri : new_uris) { insert(uri, sch); if (schema.type() == json_type::object_value) { for (const auto& item : schema.object_range()) insert_unknown_keyword(uri, item.key(), item.value()); // save unknown keywords for later reference } } return sch; } void load_root(const Json& sch) { if (sch.is_object()) { auto it = sch.find("$schema"); if (it != sch.object_range().end()) { auto sv = it->value().as_string_view(); if (!schema_version::contains(sv)) { std::string message("Unsupported schema version "); message.append(sv.data(), sv.size()); JSONCONS_THROW(schema_error(message)); } } } load(sch); } void load(const Json& sch) { subschema_registries_.clear(); root_ = make_keyword_validator(sch, {{"#"}}, {}); // load all external schemas that have not already been loaded std::size_t loaded_count = 0; do { loaded_count = 0; std::vector<std::string> locations; for (const auto& item : subschema_registries_) locations.push_back(item.first); for (const auto& loc : locations) { if (subschema_registries_[loc].schemas.empty()) // registry for this file is empty { if (resolver_) { Json external_schema = resolver_(loc); make_keyword_validator(external_schema, {{loc}}, {}); ++loaded_count; } else { JSONCONS_THROW(schema_error("External schema reference '" + loc + "' needs to be loaded, but no resolver provided")); } } } } while (loaded_count > 0); for (const auto &file : subschema_registries_) { if (!file.second.unresolved.empty()) { JSONCONS_THROW(schema_error("after all files have been parsed, '" + (file.first == "" ? "<root>" : file.first) + "' has still undefined references.")); } } } private: void insert(const schema_location& uri, validator_pointer s) { auto& file = get_or_create_file(std::string(uri.base())); auto schemas_it = file.schemas.find(std::string(uri.fragment())); if (schemas_it != file.schemas.end()) { JSONCONS_THROW(schema_error("schema with " + uri.string() + " already inserted")); return; } file.schemas.insert({std::string(uri.fragment()), s}); // is there an unresolved reference to this newly inserted schema? auto unresolved_it = file.unresolved.find(std::string(uri.fragment())); if (unresolved_it != file.unresolved.end()) { unresolved_it->second->set_referred_schema(s); file.unresolved.erase(unresolved_it); } } void insert_unknown_keyword(const schema_location& uri, const std::string& key, const Json& value) { auto &file = get_or_create_file(std::string(uri.base())); auto new_u = uri.append(key); schema_location new_uri(new_u); if (new_uri.has_fragment() && !new_uri.has_identifier()) { auto fragment = std::string(new_uri.fragment()); // is there a reference looking for this unknown-keyword, which is thus no longer a unknown keyword but a schema auto unresolved = file.unresolved.find(fragment); if (unresolved != file.unresolved.end()) make_keyword_validator(value, {{new_uri}}, {}); else // no, nothing ref'd it, keep for later file.unprocessed_keywords[fragment] = value; // recursively add possible subschemas of unknown keywords if (value.type() == json_type::object_value) for (const auto& subsch : value.object_range()) { insert_unknown_keyword(new_uri, subsch.key(), subsch.value()); } } } validator_pointer get_or_create_reference(const schema_location& uri) { auto &file = get_or_create_file(std::string(uri.base())); // a schema already exists auto sch = file.schemas.find(std::string(uri.fragment())); if (sch != file.schemas.end()) return sch->second; // referencing an unknown keyword, turn it into schema // // an unknown keyword can only be referenced by a JSONPointer, // not by a plain name identifier if (uri.has_fragment() && !uri.has_identifier()) { std::string fragment = std::string(uri.fragment()); auto unprocessed_keywords_it = file.unprocessed_keywords.find(fragment); if (unprocessed_keywords_it != file.unprocessed_keywords.end()) { auto &subsch = unprocessed_keywords_it->second; auto s = make_keyword_validator(subsch, {{uri}}, {}); // A JSON Schema MUST be an object or a boolean. file.unprocessed_keywords.erase(unprocessed_keywords_it); return s; } } // get or create a reference_schema auto ref = file.unresolved.find(std::string(uri.fragment())); if (ref != file.unresolved.end()) { return ref->second; // unresolved, use existing reference } else { auto orig = jsoncons::make_unique<reference_schema<Json>>(uri.string()); auto p = file.unresolved.insert(ref, {std::string(uri.fragment()), orig.get()}) ->second; // unresolved, create new reference subschemas_.emplace_back(std::move(orig)); return p; } } subschema_registry& get_or_create_file(const std::string& loc) { auto file = subschema_registries_.find(loc); if (file != subschema_registries_.end()) return file->second; else return subschema_registries_.insert(file, {loc, {}})->second; } }; template <class Json> std::shared_ptr<json_schema<Json>> make_schema(const Json& schema) { keyword_validator_factory<Json> loader{default_uri_resolver<Json>()}; loader.load_root(schema); return loader.get_schema(); } template <class Json,class URIResolver> typename std::enable_if<type_traits::is_unary_function_object_exact<URIResolver,Json,std::string>::value,std::shared_ptr<json_schema<Json>>>::type make_schema(const Json& schema, const URIResolver& resolver) { keyword_validator_factory<Json> loader(resolver); loader.load_root(schema); return loader.get_schema(); } } // namespace jsonschema } // namespace jsoncons #endif // JSONCONS_JSONSCHEMA_SCHEMA_LOADER_HPP
{-# LANGUAGE DataKinds #-} {-# LANGUAGE TypeApplications #-} -- | -- Main module for running the interpreter module Main where import Control.Monad ( void ) import Data.Interpreter ( runProgram ) import Data.List ( isSuffixOf ) import Data.Machine ( initialize ) import Data.Proxy ( Proxy (Proxy) ) import System.Environment ( getArgs ) import System.IO ( BufferMode (NoBuffering), hSetBuffering, stdin, stdout ) import Text.Parser ( processProgram ) main :: IO () main = do args <- getArgs case args of [filepath] | ".bf" `isSuffixOf` filepath || ".b" `isSuffixOf` filepath -> run filepath _ -> print usage where run fp = do hSetBuffering stdin NoBuffering hSetBuffering stdout NoBuffering program <- processProgram <$> readFile fp case program of Left err -> print err Right pgm -> do machine <- initialize (Proxy @64) void (runProgram machine pgm) usage = "Usage: stack run [filepath]\nfilepath : a brainfuck file (ending in .bf or .b)"
def check_import_of_config(physical_line, logical_line, filename): excluded_files = ["./rally/common/cfg.py"] forbidden_imports = ["from oslo_config", "import oslo_config"] if filename not in excluded_files: for forbidden_import in forbidden_imports: if logical_line.startswith(forbidden_import): yield (0, "N311 Wrong module for config is imported. Please " "use `rally.common.cfg` instead.")
"""Native adapter for serving CherryPy via mod_python Basic usage: ########################################## # Application in a module called myapp.py ########################################## import cherrypy class Root: @cherrypy.expose def index(self): return 'Hi there, Ho there, Hey there' # We will use this method from the mod_python configuration # as the entry point to our application def setup_server(): cherrypy.tree.mount(Root()) cherrypy.config.update({'environment': 'production', 'log.screen': False, 'show_tracebacks': False}) ########################################## # mod_python settings for apache2 # This should reside in your httpd.conf # or a file that will be loaded at # apache startup ########################################## # Start DocumentRoot "/" Listen 8080 LoadModule python_module /usr/lib/apache2/modules/mod_python.so <Location "/"> PythonPath "sys.path+['/path/to/my/application']" SetHandler python-program PythonHandler cherrypy._cpmodpy::handler PythonOption cherrypy.setup myapp::setup_server PythonDebug On </Location> # End The actual path to your mod_python.so is dependent on your environment. In this case we suppose a global mod_python installation on a Linux distribution such as Ubuntu. We do set the PythonPath configuration setting so that your application can be found by from the user running the apache2 instance. Of course if your application resides in the global site-package this won't be needed. Then restart apache2 and access http://127.0.0.1:8080 """ import logging import sys import cherrypy from cherrypy._cpcompat import BytesIO, copyitems, ntob from cherrypy._cperror import format_exc, bare_error from cherrypy.lib import httputil # ------------------------------ Request-handling def setup(req): from mod_python import apache # Run any setup functions defined by a "PythonOption cherrypy.setup" directive. options = req.get_options() if 'cherrypy.setup' in options: for function in options['cherrypy.setup'].split(): atoms = function.split('::', 1) if len(atoms) == 1: mod = __import__(atoms[0], globals(), locals()) else: modname, fname = atoms mod = __import__(modname, globals(), locals(), [fname]) func = getattr(mod, fname) func() cherrypy.config.update({'log.screen': False, "tools.ignore_headers.on": True, "tools.ignore_headers.headers": ['Range'], }) engine = cherrypy.engine if hasattr(engine, "signal_handler"): engine.signal_handler.unsubscribe() if hasattr(engine, "console_control_handler"): engine.console_control_handler.unsubscribe() engine.autoreload.unsubscribe() cherrypy.server.unsubscribe() def _log(msg, level): newlevel = apache.APLOG_ERR if logging.DEBUG >= level: newlevel = apache.APLOG_DEBUG elif logging.INFO >= level: newlevel = apache.APLOG_INFO elif logging.WARNING >= level: newlevel = apache.APLOG_WARNING # On Windows, req.server is required or the msg will vanish. See # http://www.modpython.org/pipermail/mod_python/2003-October/014291.html. # Also, "When server is not specified...LogLevel does not apply..." apache.log_error(msg, newlevel, req.server) engine.subscribe('log', _log) engine.start() def cherrypy_cleanup(data): engine.exit() try: # apache.register_cleanup wasn't available until 3.1.4. apache.register_cleanup(cherrypy_cleanup) except AttributeError: req.server.register_cleanup(req, cherrypy_cleanup) class _ReadOnlyRequest: expose = ('read', 'readline', 'readlines') def __init__(self, req): for method in self.expose: self.__dict__[method] = getattr(req, method) recursive = False _isSetUp = False def handler(req): from mod_python import apache try: global _isSetUp if not _isSetUp: setup(req) _isSetUp = True # Obtain a Request object from CherryPy local = req.connection.local_addr local = httputil.Host(local[0], local[1], req.connection.local_host or "") remote = req.connection.remote_addr remote = httputil.Host(remote[0], remote[1], req.connection.remote_host or "") scheme = req.parsed_uri[0] or 'http' req.get_basic_auth_pw() try: # apache.mpm_query only became available in mod_python 3.1 q = apache.mpm_query threaded = q(apache.AP_MPMQ_IS_THREADED) forked = q(apache.AP_MPMQ_IS_FORKED) except AttributeError: bad_value = ("You must provide a PythonOption '%s', " "either 'on' or 'off', when running a version " "of mod_python < 3.1") threaded = options.get('multithread', '').lower() if threaded == 'on': threaded = True elif threaded == 'off': threaded = False else: raise ValueError(bad_value % "multithread") forked = options.get('multiprocess', '').lower() if forked == 'on': forked = True elif forked == 'off': forked = False else: raise ValueError(bad_value % "multiprocess") sn = cherrypy.tree.script_name(req.uri or "/") if sn is None: send_response(req, '404 Not Found', [], '') else: app = cherrypy.tree.apps[sn] method = req.method path = req.uri qs = req.args or "" reqproto = req.protocol headers = copyitems(req.headers_in) rfile = _ReadOnlyRequest(req) prev = None try: redirections = [] while True: request, response = app.get_serving(local, remote, scheme, "HTTP/1.1") request.login = req.user request.multithread = bool(threaded) request.multiprocess = bool(forked) request.app = app request.prev = prev # Run the CherryPy Request object and obtain the response try: request.run(method, path, qs, reqproto, headers, rfile) break except cherrypy.InternalRedirect: ir = sys.exc_info()[1] app.release_serving() prev = request if not recursive: if ir.path in redirections: raise RuntimeError("InternalRedirector visited the " "same URL twice: %r" % ir.path) else: # Add the *previous* path_info + qs to redirections. if qs: qs = "?" + qs redirections.append(sn + path + qs) # Munge environment and try again. method = "GET" path = ir.path qs = ir.query_string rfile = BytesIO() send_response(req, response.output_status, response.header_list, response.body, response.stream) finally: app.release_serving() except: tb = format_exc() cherrypy.log(tb, 'MOD_PYTHON', severity=logging.ERROR) s, h, b = bare_error() send_response(req, s, h, b) return apache.OK def send_response(req, status, headers, body, stream=False): # Set response status req.status = int(status[:3]) # Set response headers req.content_type = "text/plain" for header, value in headers: if header.lower() == 'content-type': req.content_type = value continue req.headers_out.add(header, value) if stream: # Flush now so the status and headers are sent immediately. req.flush() # Set response body if isinstance(body, basestring): req.write(body) else: for seg in body: req.write(seg) # --------------- Startup tools for CherryPy + mod_python --------------- # import os import re try: import subprocess def popen(fullcmd): p = subprocess.Popen(fullcmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True) return p.stdout except ImportError: def popen(fullcmd): pipein, pipeout = os.popen4(fullcmd) return pipeout def read_process(cmd, args=""): fullcmd = "%s %s" % (cmd, args) pipeout = popen(fullcmd) try: firstline = pipeout.readline() if (re.search(ntob("(not recognized|No such file|not found)"), firstline, re.IGNORECASE)): raise IOError('%s must be on your system path.' % cmd) output = firstline + pipeout.read() finally: pipeout.close() return output class ModPythonServer(object): template = """ # Apache2 server configuration file for running CherryPy with mod_python. DocumentRoot "/" Listen %(port)s LoadModule python_module modules/mod_python.so <Location %(loc)s> SetHandler python-program PythonHandler %(handler)s PythonDebug On %(opts)s </Location> """ def __init__(self, loc="/", port=80, opts=None, apache_path="apache", handler="cherrypy._cpmodpy::handler"): self.loc = loc self.port = port self.opts = opts self.apache_path = apache_path self.handler = handler def start(self): opts = "".join([" PythonOption %s %s\n" % (k, v) for k, v in self.opts]) conf_data = self.template % {"port": self.port, "loc": self.loc, "opts": opts, "handler": self.handler, } mpconf = os.path.join(os.path.dirname(__file__), "cpmodpy.conf") f = open(mpconf, 'wb') try: f.write(conf_data) finally: f.close() response = read_process(self.apache_path, "-k start -f %s" % mpconf) self.ready = True return response def stop(self): os.popen("apache -k stop") self.ready = False
<gh_stars>0 #set( $symbol_pound = '#' ) #set( $symbol_dollar = '$' ) #set( $symbol_escape = '\' ) package ${package}.transformer; import org.apache.log4j.Logger; import org.finra.datagenerator.consumer.DataPipe; import org.finra.datagenerator.consumer.DataTransformer; import java.util.Map; import java.util.Random; public class SampleMachineTransformer implements DataTransformer { protected static final Logger log = Logger.getLogger(SampleMachineTransformer.class); private final Random rand = new Random(System.currentTimeMillis()); public void transform(DataPipe cr) { for (Map.Entry<String, String> entry : cr.getDataMap().entrySet()) { String value = entry.getValue(); if (value.equals("${symbol_pound}{customplaceholder}")) { // Generate a random number int ran = rand.nextInt(); entry.setValue(String.valueOf(ran)); } } } }
/************************************************************************** * make_cs1_contiguous() - for es2 and above remap cs1 behind cs0 to allow * command line mem=xyz use all memory with out discontinuous support * compiled in. Could do it at the ATAG, but there really is two banks... * Called as part of 2nd phase DDR init. **************************************************************************/ void make_cs1_contiguous(void) { u32 size, a_add_low, a_add_high; size = get_sdr_cs_size(CS0); size /= SZ_32M; a_add_high = (size & 3) << 8; a_add_low = (size & 0x3C) >> 2; writel((a_add_high | a_add_low), &sdrc_base->cs_cfg); }
package lesson; /** * Created by igor on 19.07.2015. */ public class SimpleEvents { public static void main(String[] args) { PrintReport printReport = new PrintReport(); printReport.addEventListener( new PrintEvent() { @Override public void action() { System.out.println("Event"); } }); printReport.printSomething(); } } interface PrintEvent { void action(); } class PrintReport { PrintEvent event = null; public void addEventListener(PrintEvent event) { this.event = event; } public void printSomething( ) { if(event!=null) { event.action(); } } }
/*----------------------------------------------------------------------------- Talking BIOS device driver for the AT&T PC6300. Copyright (C) <NAME> 1987 This software may be freely used and distributed for any non-profit purpose. *----------------------------------------------------------------------------- */ /* savebuf.c: save the accumulated text in a session file */ #include <stdio.h> #ifdef MSDOS #include <dos.h> #else #include <sgtty.h> #endif #ifdef MSDOS struct { char far *bufbot; char far *buftop; char far *buftail; char far *bufhead; } devparams; struct WORDREGS regs; setenv(){} /* no environment */ #endif char *filename = "session.log"; main(argc, argv) int argc; char **argv; { FILE *f; register char c; #ifdef MSDOS int fh; char far *cp; short i, j; #else int bsize; char *buf, *malloc(); #endif if(argc > 1) filename = argv[1]; if(argc > 2) { fprintf(stderr, "usage: savebuf [file]\n"); exit(1); } f = fopen(filename, "w"); if(!f) { fprintf(stderr, "savebuf: cannot create file %s\n", filename); exit(1); } #ifdef MSDOS /* get device driver parameters from ioctl call */ fh = open("SPEAK$", 0); regs.bx = fh; regs.ax = 0x4402; /* read ioctl string */ regs.cx = 16; /* get 16 bytes */ regs.dx = (unsigned) &devparams; intdos(&regs, &regs); if(regs.cflag & 1 || regs.ax != 16) { fprintf(stderr, "savebuf: bad ioctl() call on device driver SPEAK$\n"); exit(1); } close(fh); if(devparams.bufbot) { if(!devparams.buftop) { /* indicates screen mode */ cp = devparams.bufbot; for(i=0; i<25; ++i) { for(j=0; j<80; ++j) { putc(*cp, f); cp += 2; } putc('\n', f); } } else { cp = devparams.buftail; while(cp != devparams.bufhead) { c = *cp; if(c == '\r') c = '\n'; putc(c,f); if(++cp == devparams.buftop) cp = devparams.bufbot; } /* end loop writing file */ } /* screen/line */ } /* nonzero pointers */ #else /* I assume stdin is opened to the speech device driver */ if(ioctl(0, TIOCSDSIZE, &bsize) < 0) { fprintf(stderr, "ioctl failed, cannot get size of text buffer from speech device driver\n"); exit(1); } if(!(buf = malloc(bsize))) { fprintf(stderr, "cannot malloc %d bytes for text buffer\n", bsize); exit(1); } /* now copy the text */ if(ioctl(0, TIOCSDGETBUF, buf) < 0) { fprintf(stderr, "ioctl failed, cannot copy text buffer from speech device driver\n"); exit(1); } /* now write the file */ while((c = *buf++) > 0) { if(c == '\r') c = '\n'; putc(c,f); } /* end writing text buffer to file */ #endif fclose(f); exit(0); } /* main */
#include <bits/stdc++.h> #define FOR(i, u, v) for (int i = u; i <= v; i++) #define ll long long #define pii pair<ll, ll> #define mp make_pair #define F first #define S second #define PB push_back #define N 105 using namespace std; int n, a[N]; int main() { ios_base::sync_with_stdio(0); cin.tie(0); cout.tie(0); //freopen("INP.TXT", "r", stdin); cin >> n; FOR(i, 1, n) cin >> a[i]; sort(a+1, a+n+1); deque<int> q; FOR(i, 1, n) { if (i % 2 == 1) q.push_back(a[i]); else q.push_front(a[i]); } while (!q.empty()) { cout <<q.back()<<' '; q.pop_back(); } }
<filename>base/solution/SolutionGroupMetaData.cpp /* * File: SolutionGroupMetaData.cpp * */ #include "SolutionGroupMetaData.h" #include <time.h> #include <sys/time.h> using namespace khe; using sgmd = SolutionGroupMetaData; sgmd::SolutionGroupMetaData(const std::string &contrib, const std::string &pdate, const std::string &desc, const std::string &pub, const std::string &remarks): contrib(contrib), date(pdate), desc(desc), pub(pub), remarks(remarks){ meta = KheSolnGroupMetaDataMake(Util::sTc(this->contrib), Util::sTc(date), Util::sTc(desc), Util::sTc(pub), Util::sTc(remarks)); } std::string sgmd::getContributor() const{ return KheSolnGroupMetaDataContributor(meta); } std::string sgmd::getDate() const{ return KheSolnGroupMetaDataDate(meta); } std::string sgmd::getDescription() const{ return KheSolnGroupMetaDataDescription(meta); } std::string sgmd::getPublication() const{ return KheSolnGroupMetaDataPublication(meta); } std::string sgmd::getRemarks() const{ return KheSolnGroupMetaDataRemarks(meta); }
/******************************************************************************* * Copyright (c) 2011, 2013 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation * ******************************************************************************/ package org.eclipse.persistence.internal.jpa.jpql; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Set; import org.eclipse.persistence.jpa.jpql.ExpressionTools; import org.eclipse.persistence.jpa.jpql.JPQLQueryDeclaration.Type; import org.eclipse.persistence.jpa.jpql.LiteralType; import org.eclipse.persistence.jpa.jpql.parser.AbstractEclipseLinkExpressionVisitor; import org.eclipse.persistence.jpa.jpql.parser.AbstractSchemaName; import org.eclipse.persistence.jpa.jpql.parser.CollectionExpression; import org.eclipse.persistence.jpa.jpql.parser.CollectionMemberDeclaration; import org.eclipse.persistence.jpa.jpql.parser.CollectionValuedPathExpression; import org.eclipse.persistence.jpa.jpql.parser.DeleteClause; import org.eclipse.persistence.jpa.jpql.parser.DeleteStatement; import org.eclipse.persistence.jpa.jpql.parser.Expression; import org.eclipse.persistence.jpa.jpql.parser.FromClause; import org.eclipse.persistence.jpa.jpql.parser.IdentificationVariable; import org.eclipse.persistence.jpa.jpql.parser.IdentificationVariableDeclaration; import org.eclipse.persistence.jpa.jpql.parser.JPQLExpression; import org.eclipse.persistence.jpa.jpql.parser.Join; import org.eclipse.persistence.jpa.jpql.parser.RangeVariableDeclaration; import org.eclipse.persistence.jpa.jpql.parser.ResultVariable; import org.eclipse.persistence.jpa.jpql.parser.SelectClause; import org.eclipse.persistence.jpa.jpql.parser.SelectStatement; import org.eclipse.persistence.jpa.jpql.parser.SimpleFromClause; import org.eclipse.persistence.jpa.jpql.parser.SimpleSelectClause; import org.eclipse.persistence.jpa.jpql.parser.SimpleSelectStatement; import org.eclipse.persistence.jpa.jpql.parser.SubExpression; import org.eclipse.persistence.jpa.jpql.parser.TableVariableDeclaration; import org.eclipse.persistence.jpa.jpql.parser.UpdateClause; import org.eclipse.persistence.jpa.jpql.parser.UpdateStatement; /** * This visitor visits the declaration clause of the JPQL query and creates the list of * {@link Declaration Declarations}. * * @version 2.5 * @since 2.4 * @author <NAME> */ @SuppressWarnings("nls") final class DeclarationResolver { /** * The first {@link Declaration} that was created when visiting the declaration clause. */ private Declaration baseDeclaration; /** * The {@link Declaration} objects mapped to their identification variable. */ private List<Declaration> declarations; /** * The parent {@link DeclarationResolver} which represents the superquery's declaration or * <code>null</code> if this is used for the top-level query. */ private DeclarationResolver parent; /** * Determines whether the {@link Declaration Declaration} objects were created after visiting the * query's declaration clause. */ private boolean populated; /** * The {@link JPQLQueryContext} is used to query information about the application metadata and * cached information. */ private JPQLQueryContext queryContext; /** * The result variables used to identify select expressions. */ private Collection<IdentificationVariable> resultVariables; /** * Creates a new <code>DeclarationResolver</code>. * * @param queryContext The context used to query information about the application metadata and * cached information * @param parent The parent {@link DeclarationResolver} which represents the superquery's declaration */ DeclarationResolver(JPQLQueryContext queryContext, DeclarationResolver parent) { super(); initialize(queryContext, parent); } /** * Adds a "virtual" range variable declaration that will be used when parsing a JPQL fragment. * * @param entityName The name of the entity to be accessible with the given variable name * @param variableName The identification variable used to navigate to the entity */ void addRangeVariableDeclaration(String entityName, String variableName) { // This method should only be used by HermesParser.buildSelectionCriteria(), // initializes these variables right away since this method should only be // called by HermesParser.buildSelectionCriteria() populated = true; resultVariables = Collections.emptySet(); // Create the "virtual" range variable declaration RangeVariableDeclaration rangeVariableDeclaration = new RangeVariableDeclaration( entityName, variableName ); // Make sure the identification variable was not declared more than once, // this could cause issues when trying to resolve it RangeDeclaration declaration = new RangeDeclaration(queryContext); declaration.rootPath = entityName; declaration.baseExpression = rangeVariableDeclaration; declaration.identificationVariable = (IdentificationVariable) rangeVariableDeclaration.getIdentificationVariable(); declarations.add(declaration); // Make sure it is marked as the base declaration and the base Expression is created if (baseDeclaration == null) { baseDeclaration = declaration; // Make sure the base Expression is initialized, which will cache it // into the right context as well (the top-level context) declaration.getQueryExpression(); } } /** * Converts the given {@link Declaration} from being set as a range variable declaration to * a path expression declaration. * <p> * In this query "<code>UPDATE Employee SET firstName = 'MODIFIED' WHERE (SELECT COUNT(m) FROM * managedEmployees m) > 0</code>" <em>managedEmployees</em> is an unqualified collection-valued * path expression (<code>employee.managedEmployees</code>). * * @param declaration The {@link Declaration} that was parsed to range over an abstract schema * name but is actually ranging over a path expression * @param outerVariableName The identification variable coming from the parent identification * variable declaration */ void convertUnqualifiedDeclaration(RangeDeclaration declaration, String outerVariableName) { QualifyRangeDeclarationVisitor visitor = new QualifyRangeDeclarationVisitor(); // Convert the declaration expression into a derived declaration visitor.declaration = declaration; visitor.outerVariableName = outerVariableName; visitor.queryContext = queryContext.getCurrentContext(); declaration.declarationExpression.accept(visitor); // Now replace the old declaration with the new one int index = declarations.indexOf(declaration); declarations.set(index, visitor.declaration); // Update the base declaration if (baseDeclaration == declaration) { baseDeclaration = visitor.declaration; } } /** * Retrieves the {@link Declaration} for which the given variable name is used to navigate to the * "root" object. * * @param variableName The name of the identification variable that is used to navigate a "root" * object * @return The {@link Declaration} containing the information about the identification variable * declaration */ Declaration getDeclaration(String variableName) { for (Declaration declaration : declarations) { if (declaration.getVariableName().equalsIgnoreCase(variableName)) { return declaration; } } return null; } /** * Returns the ordered list of {@link Declaration Declarations}. * * @return The {@link Declaration Declarations} of the current query that was parsed */ List<Declaration> getDeclarations() { return declarations; } /** * Returns the first {@link Declaration} that was created after visiting the declaration clause. * * @return The first {@link Declaration} object */ Declaration getFirstDeclaration() { return baseDeclaration; } /** * Returns the parsed representation of a <b>JOIN FETCH</b> that were defined in the same * declaration than the given range identification variable name. * * @param variableName The name of the identification variable that should be used to define an entity * @return The <b>JOIN FETCH</b> expressions used in the same declaration or an empty collection * if none was defined */ Collection<Join> getJoinFetches(String variableName) { Declaration declaration = getDeclaration(variableName); if ((declaration != null) && (declaration.getType() == Type.RANGE)) { RangeDeclaration rangeDeclaration = (RangeDeclaration) declaration; if (rangeDeclaration.hasJoins()) { return rangeDeclaration.getJoinFetches(); } } return null; } /** * Returns the parent of this {@link DeclarationResolver}. * * @return The parent of this {@link DeclarationResolver} if this is used for a subquery or * <code>null</code> if this is used for the top-level query */ DeclarationResolver getParent() { return parent; } /** * Returns the variables that got defined in the select expression. This only applies to JPQL * queries built for JPA 2.0 or later. * * @return The variables identifying the select expressions, if any was defined or an empty set * if none were defined */ Collection<IdentificationVariable> getResultVariables() { if (parent != null) { return parent.getResultVariables(); } if (resultVariables == null) { ResultVariableVisitor visitor = new ResultVariableVisitor(); queryContext.getJPQLExpression().accept(visitor); resultVariables = visitor.resultVariables; } return resultVariables; } /** * Initializes this <code>DeclarationResolver</code>. * * @param queryContext The context used to query information about the query * @param parent The parent {@link DeclarationResolver}, which is not <code>null</code> when this * resolver is created for a subquery */ private void initialize(JPQLQueryContext queryContext, DeclarationResolver parent) { this.parent = parent; this.queryContext = queryContext; this.declarations = new LinkedList<Declaration>(); } /** * Determines whether the given identification variable is defining a <b>JOIN</b> expression or * in a <code>IN</code> expressions for a collection-valued field. If the search didn't find the * identification in this resolver, then it will traverse the parent hierarchy. * * @param variableName The identification variable to check for what it maps * @return <code>true</code> if the given identification variable maps a collection-valued field * defined in a <code>JOIN</code> or <code>IN</code> expression; <code>false</code> otherwise */ boolean isCollectionIdentificationVariable(String variableName) { boolean result = isCollectionIdentificationVariableImp(variableName); if (!result && (parent != null)) { result = parent.isCollectionIdentificationVariableImp(variableName); } return result; } /** * Determines whether the given identification variable is defining a <b>JOIN</b> expression or * in a <code>IN</code> expressions for a collection-valued field. The search does not traverse * the parent hierarchy. * * @param variableName The identification variable to check for what it maps * @return <code>true</code> if the given identification variable maps a collection-valued field * defined in a <code>JOIN</code> or <code>IN</code> expression; <code>false</code> otherwise */ boolean isCollectionIdentificationVariableImp(String variableName) { for (Declaration declaration : declarations) { switch (declaration.getType()) { case COLLECTION: { if (declaration.getVariableName().equalsIgnoreCase(variableName)) { return true; } return false; } case RANGE: case DERIVED: { AbstractRangeDeclaration rangeDeclaration = (AbstractRangeDeclaration) declaration; // Check the JOIN expressions for (Join join : rangeDeclaration.getJoins()) { String joinVariableName = queryContext.literal( join.getIdentificationVariable(), LiteralType.IDENTIFICATION_VARIABLE ); if (joinVariableName.equalsIgnoreCase(variableName)) { // Make sure the JOIN expression maps a collection mapping Declaration joinDeclaration = queryContext.getDeclaration(joinVariableName); return joinDeclaration.getMapping().isCollectionMapping(); } } } } } return false; } /** * Determines whether the given variable name is an identification variable name used to define * an abstract schema name. * * @param variableName The name of the variable to verify if it's defined in a range variable * declaration in the current query or any parent query * @return <code>true</code> if the variable name is mapping an abstract schema name; <code>false</code> * if it's defined in a collection member declaration */ boolean isRangeIdentificationVariable(String variableName) { boolean result = isRangeIdentificationVariableImp(variableName); if (!result && (parent != null)) { result = parent.isRangeIdentificationVariableImp(variableName); } return result; } private boolean isRangeIdentificationVariableImp(String variableName) { Declaration declaration = getDeclaration(variableName); return (declaration != null) && declaration.getType().isRange(); } /** * Determines whether the given variable is a result variable or not. * * @param variableName The variable to check if it used to identify a select expression * @return <code>true</code> if the given variable is defined as a result variable; * <code>false</code> otherwise */ boolean isResultVariable(String variableName) { // Only the top-level SELECT query has result variables if (parent != null) { return parent.isResultVariable(variableName); } for (IdentificationVariable resultVariable : getResultVariables()) { if (resultVariable.getText().equalsIgnoreCase(variableName)) { return true; } } return false; } /** * Visits the given {@link Expression} (which is either the top-level query or a subquery) and * retrieve the information from its declaration clause. * * @param expression The {@link Expression} to visit in order to retrieve the information * contained in the given query's declaration */ void populate(Expression expression) { if (!populated) { populated = true; populateImp(expression); } } private void populateImp(Expression expression) { DeclarationVisitor visitor = new DeclarationVisitor(); visitor.queryContext = queryContext; visitor.declarations = declarations; expression.accept(visitor); baseDeclaration = visitor.baseDeclaration; } private static class DeclarationVisitor extends AbstractEclipseLinkExpressionVisitor { /** * The first {@link Declaration} that was created when visiting the declaration clause. */ private Declaration baseDeclaration; /** * This flag is used to determine what to do in {@link #visit(SimpleSelectStatement)}. */ private boolean buildingDeclaration; /** * The {@link Declaration} being populated. */ private Declaration currentDeclaration; /** * The list of {@link Declaration} objects to which new ones will be added by traversing the * declaration clause. */ List<Declaration> declarations; /** * The {@link JPQLQueryContext} is used to query information about the application metadata and * cached information. */ JPQLQueryContext queryContext; /** * {@inheritDoc} */ @Override public void visit(AbstractSchemaName expression) { String rootPath = expression.getText(); // Abstract schema name (entity name) if (rootPath.indexOf('.') == -1) { currentDeclaration = new RangeDeclaration(queryContext); } else { // Check to see if the "root" path is a class name before assuming it's a derived path Class<?> type = queryContext.getType(rootPath); // Fully qualified class name if (type != null) { RangeDeclaration declaration = new RangeDeclaration(queryContext); declaration.type = type; currentDeclaration = declaration; } // Derived path expression (for subqueries) else { currentDeclaration = new DerivedDeclaration(queryContext); } } currentDeclaration.rootPath = rootPath; } /** * {@inheritDoc} */ @Override public void visit(CollectionExpression expression) { expression.acceptChildren(this); } /** * {@inheritDoc} */ @Override public void visit(CollectionMemberDeclaration expression) { Declaration declaration = new CollectionDeclaration(queryContext); declaration.baseExpression = expression.getCollectionValuedPathExpression(); declaration.rootPath = declaration.baseExpression.toActualText(); declaration.declarationExpression = expression; declarations.add(declaration); // A derived collection member declaration does not have an identification variable if (!expression.isDerived()) { IdentificationVariable identificationVariable = (IdentificationVariable) expression.getIdentificationVariable(); declaration.identificationVariable = identificationVariable; } // This collection member declaration is the first defined, // it is then the base Declaration if (baseDeclaration == null) { baseDeclaration = declaration; } } /** * {@inheritDoc} */ @Override public void visit(CollectionValuedPathExpression expression) { String rootPath = expression.toParsedText(); // Check to see if the "root" path is a class name before assuming it's a derived path Class<?> type = queryContext.getType(rootPath); // Fully qualified class name if (type != null) { RangeDeclaration declaration = new RangeDeclaration(queryContext); declaration.type = type; currentDeclaration = declaration; } // Derived path expression (for subqueries) else { currentDeclaration = new DerivedDeclaration(queryContext); } currentDeclaration.rootPath = rootPath; } /** * {@inheritDoc} */ @Override public void visit(DeleteClause expression) { try { expression.getRangeVariableDeclaration().accept(this); } finally { currentDeclaration = null; } } /** * {@inheritDoc} */ @Override public void visit(DeleteStatement expression) { expression.getDeleteClause().accept(this); } /** * {@inheritDoc} */ @Override public void visit(FromClause expression) { expression.getDeclaration().accept(this); } /** * {@inheritDoc} */ @Override public void visit(IdentificationVariableDeclaration expression) { try { // Visit the RangeVariableDeclaration, it will create the right Declaration expression.getRangeVariableDeclaration().accept(this); currentDeclaration.declarationExpression = expression; // Now visit the JOIN expressions expression.getJoins().accept(this); } finally { currentDeclaration = null; } } /** * {@inheritDoc} */ @Override public void visit(Join expression) { ((AbstractRangeDeclaration) currentDeclaration).addJoin(expression); if (!expression.hasFetch() || expression.hasIdentificationVariable()) { IdentificationVariable identificationVariable = (IdentificationVariable) expression.getIdentificationVariable(); JoinDeclaration declaration = new JoinDeclaration(queryContext); declaration.baseExpression = expression; declaration.identificationVariable = identificationVariable; declarations.add(declaration); } } /** * {@inheritDoc} */ @Override public void visit(JPQLExpression expression) { expression.getQueryStatement().accept(this); } /** * {@inheritDoc} */ @Override public void visit(RangeVariableDeclaration expression) { // Traverse the "root" object, it will create the right Declaration buildingDeclaration = true; expression.getRootObject().accept(this); buildingDeclaration = false; // Cache more information currentDeclaration.identificationVariable = (IdentificationVariable) expression.getIdentificationVariable(); currentDeclaration.baseExpression = expression; declarations.add(currentDeclaration); // This range variable declaration is the first defined, // it is then the base declaration if (baseDeclaration == null) { baseDeclaration = currentDeclaration; } } /** * {@inheritDoc} */ @Override public void visit(SelectStatement expression) { expression.getFromClause().accept(this); } /** * {@inheritDoc} */ @Override public void visit(SimpleFromClause expression) { expression.getDeclaration().accept(this); } /** * {@inheritDoc} */ @Override public void visit(SimpleSelectClause expression) { expression.getSelectExpression().accept(this); } /** * {@inheritDoc} */ @Override public void visit(SimpleSelectStatement expression) { // The parent query is using a subquery in the FROM clause if (buildingDeclaration) { currentDeclaration = new SubqueryDeclaration(queryContext); currentDeclaration.rootPath = ExpressionTools.EMPTY_STRING; } // Simply traversing the tree to create the declarations else { expression.getFromClause().accept(this); } } /** * {@inheritDoc} */ @Override public void visit(SubExpression expression) { expression.getExpression().accept(this); } /** * {@inheritDoc} */ @Override public void visit(TableVariableDeclaration expression) { TableDeclaration declaration = new TableDeclaration(queryContext); declaration.declarationExpression = expression; declaration.baseExpression = expression.getTableExpression(); declaration.rootPath = declaration.baseExpression.toParsedText(); declaration.identificationVariable = (IdentificationVariable) expression.getIdentificationVariable(); declarations.add(declaration); } /** * {@inheritDoc} */ @Override public void visit(UpdateClause expression) { try { expression.getRangeVariableDeclaration().accept(this); } finally { currentDeclaration = null; } } /** * {@inheritDoc} */ @Override public void visit(UpdateStatement expression) { expression.getUpdateClause().accept(this); } } private static class QualifyRangeDeclarationVisitor extends AbstractEclipseLinkExpressionVisitor { /** * The {@link Declaration} being modified. */ AbstractRangeDeclaration declaration; /** * The identification variable coming from the parent identification variable declaration. */ String outerVariableName; /** * The {@link JPQLQueryContext} is used to query information about the application metadata and * cached information. */ JPQLQueryContext queryContext; /** * {@inheritDoc} */ @Override public void visit(CollectionValuedPathExpression expression) { // Create the path because CollectionValuedPathExpression.toParsedText() // does not contain the virtual identification variable StringBuilder rootPath = new StringBuilder(); rootPath.append(outerVariableName); rootPath.append("."); rootPath.append(expression.toParsedText()); declaration.rootPath = rootPath.toString(); } /** * {@inheritDoc} */ @Override public void visit(IdentificationVariableDeclaration expression) { expression.getRangeVariableDeclaration().accept(this); declaration.declarationExpression = expression; } /** * {@inheritDoc} */ @Override public void visit(RangeVariableDeclaration expression) { DerivedDeclaration derivedDeclaration = new DerivedDeclaration(queryContext); derivedDeclaration.joins = declaration.joins; derivedDeclaration.rootPath = declaration.rootPath; derivedDeclaration.baseExpression = declaration.baseExpression; derivedDeclaration.identificationVariable = declaration.identificationVariable; declaration = derivedDeclaration; expression.setVirtualIdentificationVariable(outerVariableName, declaration.rootPath); expression.getRootObject().accept(this); } } /** * This visitor traverses the <code><b>SELECT</b></code> clause and retrieves the result variables. */ private static class ResultVariableVisitor extends AbstractEclipseLinkExpressionVisitor { Set<IdentificationVariable> resultVariables; /** * Creates a new <code>ResultVariableVisitor</code>. */ public ResultVariableVisitor() { super(); resultVariables = new HashSet<IdentificationVariable>(); } /** * {@inheritDoc} */ @Override public void visit(CollectionExpression expression) { expression.acceptChildren(this); } /** * {@inheritDoc} */ @Override public void visit(JPQLExpression expression) { expression.getQueryStatement().accept(this); } /** * {@inheritDoc} */ @Override public void visit(ResultVariable expression) { IdentificationVariable identificationVariable = (IdentificationVariable) expression.getResultVariable(); resultVariables.add(identificationVariable); } /** * {@inheritDoc} */ @Override public void visit(SelectClause expression) { expression.getSelectExpression().accept(this); } /** * {@inheritDoc} */ @Override public void visit(SelectStatement expression) { expression.getSelectClause().accept(this); } } }
/** * Handles the {@link ClientRequestAdapter}. This method should be called when new client * request is created. * * @param <C> * type of carrier adapter is providing * @param requestAdapter * {@link ClientRequestAdapter} providing necessary information. * @return Created span */ public <C> SpanImpl handleRequest(ClientRequestAdapter<C> requestAdapter) { SpanBuilderImpl builder = handleRequestInternal(requestAdapter); if (null == builder) { return null; } builder.doNotReport(); SpanImpl span = builder.start(); tracer.inject(span.context(), requestAdapter.getFormat(), requestAdapter.getCarrier()); return span; }
/** * Deletes all invalid Files from the HDD and the cache. */ private void cleanup(){ Iterator<Map.Entry<String, Long>> it = uploadIds.entrySet().iterator(); while (it.hasNext()){ Map.Entry<String, Long> entry = it.next(); if(!isTimestampValid(entry.getValue())){ deleteUploadedFiles(entry.getKey()); it.remove(); } } }
<gh_stars>1-10 package com.zheng.user.dao.model; import java.io.Serializable; public class VerifyCode implements Serializable { private Integer verifyId; private String phone; private String code; private Long createTime; /** * 更新时间,如使用或者激活时间 * * @mbg.generated */ private Long updateTime; /** * 1:注册,2找回密码,3修改密码,4更改手机号,5修改手机号新 * * @mbg.generated */ private String type; private Integer status; private static final long serialVersionUID = 1L; public Integer getVerifyId() { return verifyId; } public void setVerifyId(Integer verifyId) { this.verifyId = verifyId; } public String getPhone() { return phone; } public void setPhone(String phone) { this.phone = phone; } public String getCode() { return code; } public void setCode(String code) { this.code = code; } public Long getCreateTime() { return createTime; } public void setCreateTime(Long createTime) { this.createTime = createTime; } public Long getUpdateTime() { return updateTime; } public void setUpdateTime(Long updateTime) { this.updateTime = updateTime; } public String getType() { return type; } public void setType(String type) { this.type = type; } public Integer getStatus() { return status; } public void setStatus(Integer status) { this.status = status; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(getClass().getSimpleName()); sb.append(" ["); sb.append("Hash = ").append(hashCode()); sb.append(", verifyId=").append(verifyId); sb.append(", phone=").append(phone); sb.append(", code=").append(code); sb.append(", createTime=").append(createTime); sb.append(", updateTime=").append(updateTime); sb.append(", type=").append(type); sb.append(", status=").append(status); sb.append("]"); return sb.toString(); } @Override public boolean equals(Object that) { if (this == that) { return true; } if (that == null) { return false; } if (getClass() != that.getClass()) { return false; } VerifyCode other = (VerifyCode) that; return (this.getVerifyId() == null ? other.getVerifyId() == null : this.getVerifyId().equals(other.getVerifyId())) && (this.getPhone() == null ? other.getPhone() == null : this.getPhone().equals(other.getPhone())) && (this.getCode() == null ? other.getCode() == null : this.getCode().equals(other.getCode())) && (this.getCreateTime() == null ? other.getCreateTime() == null : this.getCreateTime().equals(other.getCreateTime())) && (this.getUpdateTime() == null ? other.getUpdateTime() == null : this.getUpdateTime().equals(other.getUpdateTime())) && (this.getType() == null ? other.getType() == null : this.getType().equals(other.getType())) && (this.getStatus() == null ? other.getStatus() == null : this.getStatus().equals(other.getStatus())); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((getVerifyId() == null) ? 0 : getVerifyId().hashCode()); result = prime * result + ((getPhone() == null) ? 0 : getPhone().hashCode()); result = prime * result + ((getCode() == null) ? 0 : getCode().hashCode()); result = prime * result + ((getCreateTime() == null) ? 0 : getCreateTime().hashCode()); result = prime * result + ((getUpdateTime() == null) ? 0 : getUpdateTime().hashCode()); result = prime * result + ((getType() == null) ? 0 : getType().hashCode()); result = prime * result + ((getStatus() == null) ? 0 : getStatus().hashCode()); return result; } }
Formatting may be lacking as a result. If this article is un-readable please report it so that we may fix it. Posted on June 28, 2011, CJ Miozzi Diggity on eSports: StarCraft 2, League of Legends, NASL Finals (Interview) With Europe’s DreamHack setting records for the world’s largest LAN festival, and South Korea’s televised professional StarCraft competition circuit enjoyed by millions of fans, many North American gamers are left wondering: what about us? Last week, I had the opportunity to chat with veteran eSports commentator Diggity about the state of eSports in North America and what it’ll take to reach the level of success that South Korea has enjoyed for years. Starting his casting career in late 2007 with StarCraft: Brood War, Diggity transitioned into StarCraft 2 commentary and has recently branched out into League of Legends and Fallout 3. He has cast for StarCraft 2′s North American Star League, whose finals are coming up in July. Has branching into games outside of the StarCraft series helped bring in new viewers? Diggity: I’ve had some success with Fallout 3, though I don’t know how much cross-viewership I’ve gotten. But I think LoL is turning out to be an even larger eSport than SC2 at the moment. DreamHack hit 900,000 concurrent viewers on their stream. LoL is a great game, and I don’t think it needs to have an isolated viewership, so I’m hoping it makes it and we’ll get some side interest in SC2 as well. South Korea is the Mecca for StarCraft; where is League of Legends most popular? I’m fairly certain it has a gigantic popularity-base on China. I know it’s also very popular in Europe and the United States. It seems to be growing very rapidly. In fact, I was trying to take a nap in my car the other day and I heard a guy just wandering by, talking to his friend, ranting about something LoL-related. Honestly, I think the way LoL is crafted, the friendliness of the user interface, and its team-based nature, appeals to a broader audience than SC2. Currently, it just looks like it’s popular everywhere. Do you think League of Legends may pave the way for professional eSports leagues in North America to really kick off? I would really hope so. I think that LoL has a couple advantages over SC. First, it’s developed to be a permanent fixture. You’re not waiting for LoL 2 to come along, like with SC2 where you have the patches and you don’t know how that’s going to affect game balance. Second, LoL is not supposed to be a perfectly balanced game, where any two heroes are somehow perfectly balanced. I definitely feel like LoL is growing interest in eSports; it’s getting huge numbers absolutely everywhere. It has a different crowd than SC. The 1v1 SC crowd tends to include very opinionated individuals who are very particular about what they want their content to be. So I think they’re really hard on tournament organizers and demanding on the people putting effort into the community itself. I don’t know that LoL has that same problem. In LoL, the higher ELO you go, the friendlier the players generally get, because they have to work in a team environment rather than 1v1. That being said, I know that Valve is developing DOTA2, and that could put a big roadblock for LoL even though it’s a different game. DOTA2 might rip that audience or split it. But currently, it looks like LoL is outpacing SC2 as far as flat viewership goes and baseline user growth. The biggest factor is probably that it’s free to play. Do you think the North American audience may be more receptive to League of Legends because it’s accustomed to watching team-based physical sports, like basketball? Actually, I think SC2 is a much easier viewing eSport, because what’s happening on-screen is more apparent. Like in basketball, where there’s a player shooting a ball in a hoop, there doesn’t need to be a huge explanation of the rules, and information can be explained more easily and rapidly in a cast in SC2 than in LoL. There are definitely people who have watched SC2 as an eSport who haven’t played SC2. I don’t know anybody that would watch LoL — at least competitively — who hasn’t actually played the game. But it has such a gigantic base of people who have played that it can still have that baseline viewer growth. Page 1/4