content
stringlengths
10
4.9M
# -*- coding: utf-8 -*- """ Created on Tue Mar 22 13:57:00 2022 @author: jdu Web based GUI of controller tuning panel. """ import os import requests from filip.clients.ngsi_v2 import ContextBrokerClient from filip.models.base import FiwareHeader import PySimpleGUIWeb as sg class ControllerPanel: def __init__(self): # initialize controller parameters (in dict) self.params = self.initialize_params() # FIWARE parameters self.cb_url = os.getenv("CB_URL", "http://localhost:1026") self.entity_id = None # will be read on the web GUI self.entity_type = "PIDController" self.service = os.getenv("FIWARE_SERVICE", '') self.service_path = os.getenv("FIWARE_SERVICE_PATH", '') # Create the fiware header fiware_header = FiwareHeader(service=self.service, service_path=self.service_path) # Create orion context broker client self.ORION_CB = ContextBrokerClient(url=self.cb_url, fiware_header=fiware_header) # initial pid controller list self.controller_list = [] try: self.refresh_list() except: pass # initialize gui window sg.theme("DarkBlue") pid_id_bar = [ [sg.Text("Controller ID", size=(10, 1)), sg.Combo(self.controller_list, key="controller_list"), sg.Button("Refresh")] ] param_bars = [ [sg.Text(param.capitalize(), size=(10, 1)), sg.InputText(self.params[param], key=param)] for param in self.params.keys() ] io_bars = [[sg.Button("Send"), sg.Button("Read")]] layout = pid_id_bar + param_bars + io_bars self.window = sg.Window("PID controller", layout, web_port=80, web_start_browser=True) def gui_update(self): """Update the shown text on web GUI""" # update parameter values for param in self.params.keys(): self.window[param].update(self.params[param]) self.window["controller_list"].Update(values=self.controller_list) self.window["controller_list"].Update(value=self.entity_id) def gui_loop(self): """GUI main loop""" try: while True: event, values = self.window.read(timeout=1000) self.entity_id = values["controller_list"] if event in (sg.WINDOW_CLOSED, None): break elif event == "Send": self.send(values) elif event == "Read": print("Read", flush=True) self.read() elif event == "Refresh": self.refresh_list() self.gui_update() finally: print("panel loop fails") self.window.close() os.abort() def read(self): """Read parameter values from context broker""" try: params_update = self.initialize_params() for param in self.params.keys(): params_update[param] = float(self.ORION_CB.get_attribute_value(entity_id=self.entity_id, entity_type=self.entity_type, attr_name=param)) self.params = params_update except requests.exceptions.HTTPError as err: msg = err.args[0] if "NOT FOUND" not in msg.upper(): raise print("Cannot find controller entity") self.params = self.initialize_params() finally: self.gui_update() def send(self, params): """Send new parameter values to context broker""" for param in self.params.keys(): try: value = float(params[param]) self.ORION_CB.update_attribute_value(entity_id=self.entity_id, entity_type=self.entity_type, attr_name=param, value=value) except ValueError: print(f"Wrong value type of {param}: {params[param]}. Must be numeric!") def refresh_list(self): """Refresh the controller list""" entity_list = self.ORION_CB.get_entity_list(entity_types=[self.entity_type]) if entity_list: list_new = [controller.id for controller in entity_list] else: list_new = [] if all([isinstance(controller_id, str) for controller_id in list_new]) or not list_new: self.controller_list = list_new @staticmethod def initialize_params(): """Initialize the values of all control parameters""" # initialize controller parameters shown on panel params = { "kp": "Proportional gain", "ki": "Integral gain", "kd": "Derivative gain", "limLower": "Lower limit of output", "limUpper": "Upper limit of output", "setpoint": "The set point of control variable" } return params if __name__ == "__main__": panel = ControllerPanel() panel.gui_loop()
/** * Loads study annotation groups. * @param study to load. */ public static void loadCollection(Study study) { loadCollection(study.getAnnotationGroups()); for (AnnotationGroup group : study.getAnnotationGroups()) { loadCollection(group.getAnnotationFieldDescriptors()); } }
def delete_rocktype(self, rocktypename): if rocktypename in self.rocktype: rt = self.rocktype[rocktypename] del self.rocktype[rocktypename] self.rocktypelist.remove(rt)
def Kabsch(A, B, backend="auto"): A, B, backend = shape_and_backend(A, B, backend) if backend == "torch": return kabsch_torch(A[0], B[0]) else: return kabsch_numpy(A[0], B[0])
package ru.ifmo.ctddev.gmwcs.solver; import ilog.concert.IloException; import ilog.concert.IloLinearNumExpr; import ilog.concert.IloNumExpr; import ilog.concert.IloNumVar; import ilog.cplex.IloCplex; import ru.ifmo.ctddev.gmwcs.Pair; import ru.ifmo.ctddev.gmwcs.TimeLimit; import ru.ifmo.ctddev.gmwcs.graph.*; import java.util.*; public class RLTSolver implements RootedSolver { public static final double EPS = 0.01; private IloCplex cplex; private Map<Node, IloNumVar> y; private Map<Edge, IloNumVar> w; private Map<Edge, Pair<IloNumVar, IloNumVar>> x; private Map<Node, IloNumVar> d; private Map<Node, IloNumVar> x0; private TimeLimit tl; private int threads; private boolean suppressOutput; private Graph graph; private double minimum; private Node root; private boolean isSolvedToOptimality; private int maxToAddCuts; private int considerCuts; public RLTSolver() { tl = new TimeLimit(Double.POSITIVE_INFINITY); threads = 1; this.minimum = -Double.MAX_VALUE; maxToAddCuts = considerCuts = Integer.MAX_VALUE; } public void setMaxToAddCuts(int num) { maxToAddCuts = num; } public void setConsideringCuts(int num) { considerCuts = num; } public void setTimeLimit(TimeLimit tl) { this.tl = tl; } public void setThreadsNum(int threads) { if (threads < 1) { throw new IllegalArgumentException(); } this.threads = threads; } public void setRoot(Node root) { this.root = root; } @Override public List<Unit> solve(Graph graph) throws SolverException { try { cplex = new IloCplex(); this.graph = graph; initVariables(); addConstraints(); addObjective(); maxSizeConstraints(); long timeBefore = System.currentTimeMillis(); if (root == null) { breakRootSymmetry(); } else { tighten(); } breakTreeSymmetries(); tuning(cplex); boolean solFound = cplex.solve(); tl.spend(Math.min(tl.getRemainingTime(), (System.currentTimeMillis() - timeBefore) / 1000.0)); if (solFound) { return getResult(); } return Collections.emptyList(); } catch (IloException e) { throw new SolverException(e.getMessage()); } finally { cplex.end(); } } private void breakTreeSymmetries() throws IloException { int n = graph.vertexSet().size(); for (Edge e : graph.edgeSet()) { Node from = graph.getEdgeSource(e); Node to = graph.getEdgeTarget(e); cplex.addLe(cplex.sum(d.get(from), cplex.prod(n - 1, w.get(e))), cplex.sum(n, d.get(to))); cplex.addLe(cplex.sum(d.get(to), cplex.prod(n - 1, w.get(e))), cplex.sum(n, d.get(from))); } } private void tighten() throws IloException { Blocks blocks = new Blocks(graph); Separator separator = new Separator(y, w, cplex, graph); separator.setMaxToAdd(maxToAddCuts); separator.setMinToConsider(considerCuts); if (blocks.cutpoints().contains(root)) { for (Set<Node> component : blocks.incidentBlocks(root)) { dfs(root, component, true, blocks, separator); } } else { dfs(root, blocks.componentOf(root), true, blocks, separator); } cplex.use(separator); } private void dfs(Node root, Set<Node> component, boolean fake, Blocks blocks, Separator separator) throws IloException { separator.addComponent(graph.subgraph(component), root); if (!fake) { for (Node node : component) { cplex.addLe(cplex.diff(y.get(node), y.get(root)), 0); } } for (Edge e : graph.edgesOf(root)) { if (!component.contains(graph.getOppositeVertex(root, e))) { continue; } cplex.addEq(getX(e, root), 0); } for (Node cp : blocks.cutpointsOf(component)) { if (root != cp) { for (Set<Node> comp : blocks.incidentBlocks(cp)) { if (comp != component) { dfs(cp, comp, false, blocks, separator); } } } } } public boolean isSolvedToOptimality() { return isSolvedToOptimality; } private List<Unit> getResult() throws IloException { isSolvedToOptimality = false; List<Unit> result = new ArrayList<>(); for (Node node : graph.vertexSet()) { if (cplex.getValue(y.get(node)) > EPS) { result.add(node); } } for (Edge edge : graph.edgeSet()) { if (cplex.getValue(w.get(edge)) > EPS) { result.add(edge); } } if (cplex.getStatus() == IloCplex.Status.Optimal) { isSolvedToOptimality = true; } return result; } private void initVariables() throws IloException { y = new LinkedHashMap<>(); w = new LinkedHashMap<>(); d = new LinkedHashMap<>(); x = new LinkedHashMap<>(); x0 = new LinkedHashMap<>(); for (Node node : graph.vertexSet()) { String nodeName = Integer.toString(node.getNum() + 1); d.put(node, cplex.numVar(0, Double.MAX_VALUE, "d" + nodeName)); y.put(node, cplex.boolVar("y" + nodeName)); x0.put(node, cplex.boolVar("x_0_" + (node.getNum() + 1))); } for (Edge edge : graph.edgeSet()) { Node from = graph.getEdgeSource(edge); Node to = graph.getEdgeTarget(edge); String edgeName = (from.getNum() + 1) + "_" + (to.getNum() + 1); w.put(edge, cplex.boolVar("w_" + edgeName)); IloNumVar in = cplex.boolVar("x_" + edgeName + "_in"); IloNumVar out = cplex.boolVar("x_" + edgeName + "_out"); x.put(edge, new Pair<>(in, out)); } } private void tuning(IloCplex cplex) throws IloException { if (suppressOutput) { cplex.setOut(null); cplex.setWarning(null); } cplex.setParam(IloCplex.IntParam.Threads, threads); cplex.setParam(IloCplex.IntParam.ParallelMode, -1); cplex.setParam(IloCplex.IntParam.MIPOrdType, 3); if (tl.getRemainingTime() <= 0) { cplex.setParam(IloCplex.DoubleParam.TiLim, EPS); } else if (tl.getRemainingTime() != Double.POSITIVE_INFINITY) { cplex.setParam(IloCplex.DoubleParam.TiLim, tl.getRemainingTime()); } } private void breakRootSymmetry() throws IloException { int n = graph.vertexSet().size(); PriorityQueue<Node> nodes = new PriorityQueue<>(); nodes.addAll(graph.vertexSet()); int k = n; IloNumExpr[] terms = new IloNumExpr[n]; IloNumExpr[] rs = new IloNumExpr[n]; while (!nodes.isEmpty()) { Node node = nodes.poll(); terms[k - 1] = cplex.prod(k, x0.get(node)); rs[k - 1] = cplex.prod(k, y.get(node)); k--; } IloNumVar sum = cplex.numVar(0, n, "prSum"); cplex.addEq(sum, cplex.sum(terms)); for (int i = 0; i < n; i++) { cplex.addGe(sum, rs[i]); } } private void addObjective() throws IloException { Map<Unit, IloNumVar> summands = new LinkedHashMap<>(); Set<Unit> toConsider = new LinkedHashSet<>(); toConsider.addAll(graph.vertexSet()); toConsider.addAll(graph.edgeSet()); for (Unit unit : toConsider) { summands.put(unit, getVar(unit)); } IloNumExpr sum = unitScalProd(summands.keySet(), summands); cplex.addGe(sum, minimum); cplex.addMaximize(sum); } private IloNumVar getVar(Unit unit) { return unit instanceof Node ? y.get(unit) : w.get(unit); } @Override public void suppressOutput() { suppressOutput = true; } private void addConstraints() throws IloException { sumConstraints(); otherConstraints(); distanceConstraints(); } private void distanceConstraints() throws IloException { int n = graph.vertexSet().size(); for (Node v : graph.vertexSet()) { cplex.addLe(d.get(v), cplex.diff(n, cplex.prod(n, x0.get(v)))); } for (Edge e : graph.edgeSet()) { Node from = graph.getEdgeSource(e); Node to = graph.getEdgeTarget(e); addEdgeConstraints(e, from, to); addEdgeConstraints(e, to, from); } } private void addEdgeConstraints(Edge e, Node from, Node to) throws IloException { int n = graph.vertexSet().size(); IloNumVar z = getX(e, to); cplex.addGe(cplex.sum(n, d.get(to)), cplex.sum(d.get(from), cplex.prod(n + 1, z))); cplex.addLe(cplex.sum(d.get(to), cplex.prod(n - 1, z)), cplex.sum(d.get(from), n)); } private void maxSizeConstraints() throws IloException { for (Node v : graph.vertexSet()) { for (Node u : graph.neighborListOf(v)) { if (u.getWeight() >= 0) { Edge e = graph.getEdge(v, u); if (e != null && e.getWeight() >= 0) { cplex.addLe(y.get(v), w.get(e)); } } } } } private void otherConstraints() throws IloException { // (36), (39) for (Edge edge : graph.edgeSet()) { Pair<IloNumVar, IloNumVar> arcs = x.get(edge); Node from = graph.getEdgeSource(edge); Node to = graph.getEdgeTarget(edge); cplex.addLe(cplex.sum(arcs.first, arcs.second), w.get(edge)); cplex.addLe(w.get(edge), y.get(from)); cplex.addLe(w.get(edge), y.get(to)); } } private void sumConstraints() throws IloException { // (31) cplex.addLe(cplex.sum(graph.vertexSet().stream().map(x -> x0.get(x)).toArray(IloNumVar[]::new)), 1); if (root != null) { cplex.addEq(x0.get(root), 1); } // (32) for (Node node : graph.vertexSet()) { Set<Edge> edges = graph.edgesOf(node); IloNumVar xSum[] = new IloNumVar[edges.size() + 1]; int i = 0; for (Edge edge : edges) { xSum[i++] = getX(edge, node); } xSum[xSum.length - 1] = x0.get(node); cplex.addEq(cplex.sum(xSum), y.get(node)); } } private IloNumVar getX(Edge e, Node to) { if (graph.getEdgeSource(e) == to) { return x.get(e).first; } else { return x.get(e).second; } } private IloLinearNumExpr unitScalProd(Set<? extends Unit> units, Map<? extends Unit, IloNumVar> vars) throws IloException { int n = units.size(); double[] coef = new double[n]; IloNumVar[] variables = new IloNumVar[n]; int i = 0; for (Unit unit : units) { coef[i] = unit.getWeight(); variables[i++] = vars.get(unit); } return cplex.scalProd(coef, variables); } public void setLB(double lb) { this.minimum = lb; } }
package edu.neu.ccs.pyramid.regression.lad_boost; import edu.neu.ccs.pyramid.dataset.DataSet; import edu.neu.ccs.pyramid.dataset.RegDataSet; import edu.neu.ccs.pyramid.optimization.gradient_boosting.GBOptimizer; import edu.neu.ccs.pyramid.optimization.gradient_boosting.GradientBoosting; import edu.neu.ccs.pyramid.regression.ConstantRegressor; import edu.neu.ccs.pyramid.regression.Regressor; import edu.neu.ccs.pyramid.regression.RegressorFactory; import edu.neu.ccs.pyramid.util.MathUtil; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import java.util.stream.IntStream; /** * Created by chengli on 10/8/16. */ public class LADBoostOptimizer extends GBOptimizer { private static final Logger logger = LogManager.getLogger(); private double[] labels; public LADBoostOptimizer(GradientBoosting boosting, DataSet dataSet, RegressorFactory factory, double[] weights, double[] labels) { super(boosting, dataSet, factory, weights); this.labels = labels; } public LADBoostOptimizer(GradientBoosting boosting, DataSet dataSet, RegressorFactory factory, double[] labels) { super(boosting, dataSet, factory); this.labels = labels; } public LADBoostOptimizer(GradientBoosting boosting, RegDataSet dataSet, RegressorFactory factory) { this(boosting, dataSet, factory, dataSet.getLabels()); } @Override protected void addPriors() { double median = MathUtil.weightedMedian(labels, weights); Regressor constant = new ConstantRegressor(median); boosting.getEnsemble(0).add(constant); } @Override protected double[] gradient(int ensembleIndex) { return IntStream.range(0, dataSet.getNumDataPoints()).parallel(). mapToDouble(i-> MathUtil.sign(labels[i]-scoreMatrix.getScoresForData(i)[0])).toArray(); } @Override protected void initializeOthers() { return; } @Override protected void updateOthers() { return; } }
You're invited. Live TV. $20 a month. No cable company. Get 7 days FREE. | View this email in your browser. You're invited. Be one of the first to try Sling TV – FREE for 7 days. Watch the best of live TV, including ESPN, Adult Swim, Disney Channel, and more. No cable company. Add our Sports Extra, Kids Extra, and News & Info Extra as part of your FREE 7-day trial. Sling TV lets you watch on your Roku LT and higher, iOS or Android device, and PC or Mac – with more devices coming soon. START YOUR 7 DAYS FREE > The Best of Live TV. Just $20 a month. Watch your favorite live sports, shows, breaking news, plus hit movies and more with our Best of Live TV package featuring ESPN, ESPN2, TNT, TBS, Food Network, HGTV, Travel Channel, El Rey, CNN, ABC Family, Disney Channel, Cartoon Network, Adult Swim, Maker Studios, and Galavision! EXPLORE NOW > Sports Extra. Only $5 a month. Complete your ESPN experience with the greatest in sports from the U.S. and beyond with SEC Network, ESPNU, ESPNEWS, and others. Kids Extra. Only $5 a month. Add on Disney Junior, Disney XD, Boomerang, BabyTV, and ducktv! News & Info Extra. Only $5 a month. Add on Cooking Channel, DIY Network, HLN, and Bloomberg News! TRY IT OUT >
# coding: utf-8 # Your code here! N,A,B = map(int,input().split()) ans1 = (N - A - B) if ans1 <= 0: ans1 = abs(ans1) else: ans1 = 0 print(min(A,B),ans1)
One of the main factors businesses consider when deciding on where to relocate or expand is the available pool of college-educated workers. And that has cities competing for college-educated young adults. “The American population, contrary to popular opinion, is not very mobile, but there is one very significant exception, what we call ‘the young and the restless,’” explains Lee Fisher, president of CEOs for Cities, a national not-for-profit organization that helps U.S. cities map out economic growth. And there's one place this desired demographic, college-educated professionals between the ages of 25 and 34, tends to want to live: tight-knit urban neighborhoods that are close to work and have lots of entertainment and shopping options within an easy walk. In fact this demographic's population grew 26% from 2000 to 2010 in major cities' downtowns, or twice as fast as it did in the those cities' overall metro areas, according to a CEOs for Cities report based on U.S. Census data. That is one of the reasons city planners have been plowing money and resources into revitalizing their core business districts. “The cities that capture the mobile, college-educated ‘young and restless’ are the ones who are most likely to revitalize their downtowns and accelerate economic progress in their cities,” says Fisher. Take Denver. Civic and business leaders began work on the city's Lower Downtown neighborhood in 1989 with the issuance of $240 million in bonds. Today LoDo is a trendy 'hood of over 100 restored Victorian warehouses and buildings filled with art galleries, boutiques, local eateries and nightclubs. Now Denver is in the midst of a 20-year, seven-mega project plan to expand the revitalization efforts through the rest of the downtown district. “We carefully evaluate what the future workforce is looking for and we incorporate those demands into what we are building,” asserts Tami Door, chief executive of the Downtown Denver Partnership. Those demands span pedestrian walkways, a bike path grid, and “green” housing complexes comprised of smaller units, typically rentals. Residential buildings chock-full of amenities like fitness centers aren't in the cards. “This group doesn’t want to necessarily come into the development and lock themselves in at night; they want to be out connecting with the community so they want amenities near their homes,” stresses Door. The investments seem to be paying off. Denver, relative to the rest of the country, has been a faster growing city, with a population growing by about 1.3% per year, according to Moody’s Economy, and a 2012 that clocked 2.4% job growth and 3.3% economic growth. In Birmingham, Ala., the number of residents downtown has increased 32% since 2000, with 737 planned units in the construction pipeline. A stadium for the minor league baseball team the Birmingham Barons has been built at Railroad Park, a green space created on a former industrial site next to a rail corridor. Office space absorption was positive in 2012, with net 126,000 square feet leased out, and downtown employment density relative to the southern city’s size is comparable to Philadelphia’s business district, local economists are quick to point out. Yet, the city is still struggling to overcome a reputation for crime. “Despite the positive there are still people who have a negative view about downtown, particularly around the perception of crime,” sighs David Fleming, chief executive of REV Birmingham, a local economic development organization. “But if you look at the statistics, the chance of being a victim of crime in the central business district is actually less likely than in the suburbs.” Other cities are getting creative with their efforts. Over the past decade, Louisville, Ky., converted much of its subsidized housing downtown to market-rate real estate, and it expanded retail offerings. Now it’s adding a twist. In 2011, the mayor unveiled a public-private initiative to restore downtown Louisville’s Whiskey Row. Buildings were rescued from scheduled demolition by an investor group for promising, with the help of government aid, to preserve the facades of the area’s cast-iron buildings. Two years later renovations are under way, and the buildings are expected to house bourbon-themed restaurants and nightlife spots, adding to the success of nearby projects like the mixed-use Whiskey Row Lofts. “Bourbon is an industry that is growing in Louisville, especially downtown,” says Alan DeLisle, executive director of the Louisville Downtown Development Corporation. “Distillers are reinvesting downtown where they were once located off the river and we are building visitor centers and a streetscape plan that tells the story of the industry.” Among the bourbon businesses coming back to the area: Mitcher’s Distillery, Heaven Hill and whiskey giant Jim Beam. Still, Louisville’s downtown has a long way to go. A mere 4,500 people live in the area according to 2012 data from the Louisville Downtown Management District, up 19% from a decade earlier and nearly 45% from 1970. Yet real estate developers, demographers and economists believe demand across the U.S. for downtown living will flourish in coming years. Louisville, for example, found that 23% of residents under the age of 31 would like to move downtown, especially as more housing stock is created. “We are seeing a combination of the economics of infrastructure and the change in demographics give downtown housing markets more of a leg up,” says Jeff Soule, a fellow of the American Institute of Certified Planners and a director at the American Planning Association. Larger cities with more established business districts are already proving this to be the case. Seattle for example, has welcomed a “condo craze” in its downtown over the past year, according to Dean Jones of Realogics Sotheby’s International Realty. He projects that the first quarter of 2013’s median home price will $500,000 -- up 35% from a year earlier – as people flock to the area and inventory levels dwindle. –- You can follow me on Twitter or subscribe to my Facebook profile. Read my Forbes column here.
Regularized matched-mode processing for source localization. This paper develops a new approach to matched-mode processing (MMP) for ocean acoustic source localization. MMP consists of decomposing far-field acoustic data measured at an array of sensors to obtain the excitations of the propagating modes, then matching these with modeled replica excitations computed for a grid of possible source locations. However, modal decomposition can be ill-posed and unstable if the sensor array does not provide an adequate spatial sampling of the acoustic field (i.e., the problem is underdetermined). For such cases, standard decomposition methods yield minimum-norm solutions that are biased towards zero. Although these methods provide a mathematical solution (i.e., a stable solution that fits the data), they may not represent the most physically meaningful solution. The new approach of regularized matched-mode processing (RMMP) carries out an independent modal decomposition prior to comparison with the replica excitations for each grid point, using the replica itself as the a priori estimate in a regularized inversion. For grid points at or near the source location, this should provide a more physically meaningful decomposition; at other points, the procedure provides a stable inversion. In this paper, RMMP is compared to standard MMP and matched-field processing for a series of realistic synthetic test cases, including a variety of noise levels and sensor array configurations, as well as the effects of environmental mismatch.
/** * @license * Copyright Google LLC All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ import { InjectionToken } from '@angular/core'; import { CdkPortal } from '@angular/cdk/portal'; /** * Injection token that can be used to reference instances of `MatTabLabel`. It serves as * alternative token to the actual `MatTabLabel` class which could cause unnecessary * retention of the class and its directive metadata. */ import * as ɵngcc0 from '@angular/core'; export declare const MAT_TAB_LABEL: InjectionToken<MatTabLabel>; /** Used to flag tab labels for use with the portal directive */ export declare class MatTabLabel extends CdkPortal { static ɵfac: ɵngcc0.ɵɵFactoryDef<MatTabLabel, never>; static ɵdir: ɵngcc0.ɵɵDirectiveDefWithMeta<MatTabLabel, "[mat-tab-label], [matTabLabel]", never, {}, {}, never>; } //# sourceMappingURL=tab-label.d.ts.map
/** * Setup data-type and message converters for the given message channel. * @param channel message channel to set the data-type and message converters * @param channelName the channel name */ @Override public void configureMessageChannel(MessageChannel channel, String channelName) { Assert.isAssignable(AbstractMessageChannel.class, channel.getClass()); AbstractMessageChannel messageChannel = (AbstractMessageChannel) channel; BindingProperties bindingProperties = this.channelBindingServiceProperties.getBindingProperties(channelName); final String contentType = bindingProperties.getContentType(); if (StringUtils.hasText(contentType)) { MimeType mimeType = MessageConverterUtils.getMimeType(contentType); SmartMessageConverter messageConverter = this.compositeMessageConverterFactory.getMessageConverterForType(mimeType); Class<?>[] supportedDataTypes = this.compositeMessageConverterFactory.supportedDataTypes(mimeType); messageChannel.setDatatypes(supportedDataTypes); messageChannel.setMessageConverter(new MessageWrappingMessageConverter(messageConverter, mimeType)); messageChannel.addInterceptor(new ChannelInterceptorAdapter() { @Override public Message<?> preSend(Message<?> message, MessageChannel messageChannel) { Object contentTypeFromMessage = message.getHeaders().get(MessageHeaders.CONTENT_TYPE); if (contentTypeFromMessage == null) { return messageBuilderFactory .fromMessage(message) .setHeader(MessageHeaders.CONTENT_TYPE, contentType) .build(); } return message; } }); } }
def gather_filepath_list(site_folder): import os filepath_list = [] for root, folders, files in os.walk(site_folder): for filename in files: fullpath = os.path.join(root, filename) if ".nii" in fullpath: filepath_list.append(fullpath.replace(site_folder + "/", "")) return filepath_list
#include "framebuffer.h" int enif_make_framebuffer(ErlNifEnv* env, struct fb_fix_screeninfo finfo, struct fb_var_screeninfo vinfo, ERL_NIF_TERM fd, ERL_NIF_TERM* fb) { ERL_NIF_TERM framebuffer = enif_make_new_map(env); if (!enif_make_map_put(env, framebuffer, enif_make_atom(env, "__struct__"), enif_make_atom(env, "Elixir.Framebuffer"), &framebuffer)) return 0; if (!enif_make_map_put(env, framebuffer, enif_make_atom(env, "ref"), fd, &framebuffer)) return 0; ERL_NIF_TERM fix_screeninfo; ERL_NIF_TERM var_screeninfo; if (!finfo_to_struct(env, finfo, &fix_screeninfo)) return 0; if (!vinfo_to_struct(env, vinfo, &var_screeninfo)) return 0; if (!enif_make_map_put(env, framebuffer, enif_make_atom(env, "fix_screeninfo"), fix_screeninfo, &framebuffer)) return 0; if (!enif_make_map_put(env, framebuffer, enif_make_atom(env, "var_screeninfo"), var_screeninfo, &framebuffer)) return 0; *fb = framebuffer; return 1; } int fb_put_finfo(ErlNifEnv* env, struct fb_fix_screeninfo finfo, ERL_NIF_TERM* framebuffer) { ERL_NIF_TERM fix_screeninfo; if(!finfo_to_struct(env, finfo, &fix_screeninfo)) return 0; if (!enif_make_map_put(env, *framebuffer, enif_make_atom(env, "fix_screeninfo"), fix_screeninfo, framebuffer)) return 0; return 1; } int fb_put_vinfo(ErlNifEnv* env, struct fb_var_screeninfo vinfo, ERL_NIF_TERM* framebuffer) { ERL_NIF_TERM var_screeninfo; if(!vinfo_to_struct(env, vinfo, &var_screeninfo)) return 0; if (!enif_make_map_put(env, *framebuffer, enif_make_atom(env, "var_screeninfo"), var_screeninfo, framebuffer)) return 0; return 1; }
A Switch-Reduced Multicell-to-Multicell Battery Equalizer Based on Full-Bridge Bipolar-Resonant LC Converter Many battery equalizers have been proposed to achieve voltage consistency between series connected battery cells. Among them, the multicell-to-multicell (MC2MC) equalizers, which can directly transfer energy from consecutive more-charged cells to less-charged cells, can enable fast balancing and a high efficiency. However, due to the limitations of the equalizers, it is not possible to achieve fast equalization and reduce the size of the circuit at the same time. Therefore, a MC2MC equalizer based on a full-bridge bipolar-resonant LC Converter (FBBRLCC) is proposed in this paper, which not only implements MC2MC equalization, but also greatly reduces the circuit size by reducing the number of switches by nearly half. A mathematical model and simulation comparison with conventional equalizers are used to illustrate the high-speed equalization performance of the proposed equalizer and excellent balancing efficiency. An experimental prototype for eight cells is built to verify the performance of the proposed FBBRLCC equalizer and the balancing efficiencies in different operating modes are from 85.19% to 88.77% with the average power from 1.888 W to 14.227 W.
<gh_stars>0 "use strict"; var cryptojs = require('crypto-js') var Crypto = function Crypto($scope){ this.data = $scope || {} this.cryptojs = cryptojs } Crypto.prototype.MD5 = function(){ return this.cryptojs.MD5(this.data.entry) } Crypto.prototype.md5 = function(){ return this.MD5().toString() } export function method(entry, alga, key){ return new Crypto({entry:entry, key:key}) }
import 'mocha'; describe('sample spec', () => { it('passes', () => { }); });
def _build_ic(self): s2 = ee.ImageCollection(self.collection_ids[0]) s2cloud = ee.ImageCollection(self.collection_ids[1]) s2joined = ee.Join.saveFirst('cloud_mask').apply( primary=s2, secondary=s2cloud, condition=ee.Filter.equals( leftField='system:index', rightField='system:index' ) ) s2combined = ee.ImageCollection(s2joined).map( lambda img: img.addBands( img.get('cloud_mask') ) ) return s2combined
from sklearn.model_selection import GridSearchCV from sklearn.svm import SVC from numpy import nan from sklearn.metrics import accuracy_score import pandas as pd from utils import PreProcessing2ClassesDataset2, HandlingIO # Preprocessing handle_io = HandlingIO() pathsave = "results/test23/" train_file = "dataset/train_E6oV3lV.csv" test_file = "dataset/test_tweets_anuFYb8.csv" max_features = [10, 50, 100, 250, 500, 1000, 1500, 2000] kernels = ['rbf', 'linear'] for max_feature in max_features: preprocessing = PreProcessing2ClassesDataset2(max_feature) train_dataframe = preprocessing._read_file__(train_file) test_dataframe = preprocessing._read_file__(test_file) train_dataframe = train_dataframe.replace(to_replace='None', value=nan).dropna() raw_dataset = train_dataframe.append(test_dataframe, ignore_index=True, sort=False) dataset, features_names = preprocessing._fit__(raw_dataset, col_names='tweet') X_training_set = dataset[:len(train_dataframe), :] X_testing_set = dataset[len(train_dataframe):, :] for kernel in kernels: if kernel == "rbf": tuned_parameters = [{'gamma': [1e-3, 1e-4], 'C': [1, 10, 100, 500, 1000]}] else: tuned_parameters = [{'C': [1, 10, 50, 100, 200, 500, 1000]}] clf = GridSearchCV(SVC(), tuned_parameters, cv=5) t1 = len(X_training_set) t2 = raw_dataset["label"][:len(X_training_set)].values clf.fit(X_training_set, t2) y_pred = clf.best_estimator_.predict(X_testing_set) ## Results in training process df = pd.DataFrame(clf.cv_results_, columns=clf.cv_results_.keys()) df.to_csv(pathsave + "svm_" + str(max_feature) + "_" + kernel + ".csv", index=None, header=True) ## Get accuracy, save results and save model model_name = "svm_" + str(max_feature) + "_" + kernel handle_io._save_model__(clf, model_name, pathsave) test_dataframe['label'] = y_pred submission = test_dataframe[['id', 'label']] submission.to_csv(pathsave + "submission_svm_" + str(max_feature) + "_" + kernel + ".csv", index=False) # writing data to a CSV file
Posttraumatic stress disorder at the end of life. A Caucasian, unmarried man was referred for inpatient hospice care. He was diagnosed with congestive heart failure and had a history of type 2 diabetes. The patient was referred for hospice care because of increasing difficulties caring for himself, as evidenced by two recent falls. He denied any psychiatric or substance abuse history. After admission, he began experiencing difficulty sleeping because of nightmares about being stalked or attacked, disturbing thoughts and memories that he could not put out of his mind, mild paranoia, vivid hallucination-like episodes, and intense anxiety alternating with periods of having “no feelings at all.” Moreover, he became increasingly confrontational with staff. He was a veteran of the Vietnam War. He had been drafted into the Army and served as a field medic. When asked directly about his wartime experience, he indicated that he believed the present symptoms were connected to it but changed the subject if asked to elaborate in any detail. The staff struggled with how best to care for this patient, who died 14 days after admission.
// InterStore computes the intersection (AND) of all provided bitmaps and save to destination. func (bs *Bitmaps) InterStore(destination string, names ...string) uint64 { bm := bs.intersection(names...) if bm == nil { return 0 } bs.mu.Lock() bs.bitmaps[destination] = &Bitmap{bitmap: bm} bs.mu.Unlock() return bm.GetCardinality() }
/** * Shows libraries customizer for given library manager. * @param activeLibrary if not null the activeLibrary is selected in the opened customizer * @return true if user pressed OK and libraries were successfully modified */ @Messages("TXT_LibrariesManager=Ant Library Manager") public static boolean showCustomizer (Library activeLibrary, LibraryManager libraryManager) { org.netbeans.modules.project.libraries.ui.LibrariesCustomizer customizer = new org.netbeans.modules.project.libraries.ui.LibrariesCustomizer ( LibrariesSupport.getLibraryStorageArea(libraryManager)); customizer.setBorder(new EmptyBorder(12, 12, 0, 12)); if (activeLibrary != null) { customizer.setSelectedLibrary (LibrariesSupport.getLibraryImplementation(activeLibrary)); } DialogDescriptor descriptor = new DialogDescriptor(customizer, TXT_LibrariesManager()); Dialog dlg = DialogDisplayer.getDefault().createDialog(descriptor); setAccessibleDescription(dlg, customizer.getAccessibleContext().getAccessibleDescription()); try { dlg.setVisible(true); if (descriptor.getValue() == DialogDescriptor.OK_OPTION) { return customizer.apply(); } else { return false; } } finally { dlg.dispose(); } }
After months of lobbying and tweaking, the NCAA's new autonomy proposal has passed. The new set of rules represents the most substantive change of rules in the history of the organization. Basically, the bigger schools will have more power than ever to determine how they operate, which will at some point mean increased benefits for players. Here's a look at the biggest autonomy questions and what it means for your team and the future of the sport. [This story was updated after January 17's autonomy vote.] 1. What new rules will we see? Four main changes happened Saturday: Schools can now offer scholarships that cover the full cost of attending the university . (Of 65 power schools, 64 voted in favor. Boston College was the lone dissent .) Coaches can no longer pull scholarships from athletes for athletics-only reasons. Athletes can now borrow against their future earnings when obtaining loss-of-value insurance. Schools made a vague pledge to limit the time athletes spend on their sports. 2. Who can make their own rules? The power conferences can make their own rules, but those changes will also be available to smaller schools. This would make sure strong programs in those conferences — UConn basketball, for example — wouldn't be put at a recruiting disadvantage. But it would also ensure smaller schools that can't afford these benefits don't have to pay them. However, there will also be rules that are passed by all of Division I that will live outside of the autonomous structure, including rules regarding scholarship limits, time demands, and athlete health. 3. What is the voting process? The power conferences (ACC, Big 12, Big Ten, Pac-12, SEC) can pass their own legislation. This involves voting by the 65 schools, 15 player representatives, and the conferences themselves. New rules can be approved either by 60 percent of the 65 power schools and 15 player representatives (48 votes) and three of the power conferences or by 51 percent of the 65 power schools and 15 player representatives (41 votes) and four of the power conferences. The votes of the players and the schools are counted together in the same group. Here’s how autonomy could work: pic.twitter.com/0pg0udsZtE — Inside the NCAA (@InsidetheNCAA) July 18, 2014 4. Will this help the NCAA keep the lawsuits off its back? Since the autonomy structure is not really autonomy — just more streamlined collusion — and still does not permit schools to give athletes monetary benefits, it will have no effect on the lawsuits. While athletes might be treated better, the Kessler lawsuit still claims NCAA collusion is keeping prices low and not allowing schools to give athletes what the market says they deserve, so they will not give up their lawsuits.
/** * Determines the metadata profile (and the CF type) based on the data contained in the provided WizardData object. * The user may have explicitly specified the profile(s) to use, or we may have to determine them from the * community info. * * @param wizardData The wizardData object containing the user input data. * @return The name of the metadata profile. * @throws RosettaDataException If unable to determine the metadata profile. */ private String determineMetadataProfile(WizardData wizardData) throws RosettaDataException { String metadataProfile = wizardData.getMetadataProfile(); if (wizardData.getCommunity() != null) { String userSelectedCommunityName = wizardData.getCommunity(); if (userSelectedCommunityName != null) { StringBuilder sb = new StringBuilder(); for (MetadataProfile metadataProfileResource : resourceManager.getMetadataProfiles()) { String match = getMetadataProfileFromCommunity(metadataProfileResource, userSelectedCommunityName); if (match != null) { sb.append(match); sb.append(","); } } metadataProfile = sb.toString(); if (metadataProfile.substring(metadataProfile.length() - 1).equals(",")) { metadataProfile = metadataProfile.substring(0, metadataProfile.length() - 1); } } else { throw new RosettaDataException( "Neither metadata profile or community values present: " + wizardData.toString()); } } else { if (metadataProfile == null) { metadataProfile = "CF"; } else { if (!metadataProfile.contains("CF")) { metadataProfile = metadataProfile + ",CF"; } } } String cfType = wizardData.getCfType(); if (Objects.isNull(cfType)) { if (Objects.isNull(wizardData.getPlatform())) { throw new RosettaDataException( "Neither metadata profile or community values present: " + wizardData.toString()); } cfType = resourceManager.getCFTypeFromPlatform(wizardData.getPlatform()).replaceAll("_", " "); } if (cfType.equals("Profile")) { metadataProfile = metadataProfile + ",RosettaProfileDsg"; } if (cfType.equals("Time Series")) { metadataProfile = metadataProfile + ",RosettaTimeSeriesDsg"; } return metadataProfile; }
from math import ceil,gcd from collections import deque,defaultdict from heapq import heappush as hpush,heappop as hpop, heapify n=input() print( int(n[-1])%2)
T cell-based tracking of multidrug resistant tuberculosis infection after brief exposure. Molecular epidemiology indicates significant transmission of Mycobacterium tuberculosis after casual contact with infectious tuberculosis cases. We investigated M. tuberculosis transmission after brief exposure using a T cell-based assay, the enzyme-linked-immunospot (ELISPOT) for IFN-gamma. After childbirth, a mother was diagnosed with sputum smear-positive multidrug-resistant tuberculosis. Forty-one neonates and 47 adults were present during her admission on the maternity unit; 11 weeks later, all underwent tuberculin skin testing (TST) and ELISPOT. We correlated test results with markers of exposure to the index case. The participants, who were asymptomatic and predominantly had no prior tuberculosis exposure, had 6.05 hours mean exposure (range: 0-65 hours) to the index case. Seventeen individuals, including two newborns, were ELISPOT-positive, and ELISPOT results correlated significantly with three of four predefined measures of tuberculosis exposure. For each hour sharing room air with the index case, the odds of a positive ELISPOT result increased by 1.05 (95% CI: 1.02-1.09, p = 0.003). Only four adults were TST-positive and TST results did not correlate with exposure. Thus, ELISPOT, but not TST, suggested quite extensive nosocomial transmission of multidrug-resistant M. tuberculosis after brief exposure. These results help to explain the apparent importance of casual contact for tuberculosis transmission, and may have implications for prevention.
n = int(input()) s = input() begin = ord('A') end = ord('Z') modulo = end - begin + 1 ans = [(ord(c) - begin + n) % modulo for c in s] ans = ''.join([chr(o + begin) for o in ans]) print(ans)
/** * A RequestHandler handles requests performed over a socket. Specifically it * - Reads the command string specifying which method is to be invoked * - Reads the appropriate arguments * - Delegates the actual invocation to the given sjavac implementation * - Writes the result back to the socket output stream * * None of the work performed by this class is really bound by the CPU. It * should be completely fine to have a large number of RequestHandlers active. * To limit the number of concurrent compilations, use PooledSjavac. * * <p><b>This is NOT part of any supported API. * If you write code that depends on this, you do so at your own risk. * This code and its internal interfaces are subject to change or * deletion without notice.</b> */ public class RequestHandler extends Thread { private final Socket socket; private final Sjavac sjavac; public RequestHandler(Socket socket, Sjavac sjavac) { this.socket = socket; this.sjavac = sjavac; } @Override public void run() { try (BufferedReader in = new BufferedReader(new InputStreamReader(socket.getInputStream())); PrintWriter out = new PrintWriter(socket.getOutputStream(), true)) { // Set up logging for this thread. Stream back logging messages to // client on the format format "level:msg". Log.setLogForCurrentThread(new Log(out, out) { @Override protected boolean isLevelLogged(Level l) { // Make sure it is up to the client to decide whether or // not this message should be displayed. return true; } @Override protected void printLogMsg(Level msgLevel, String msg) { // Follow sjavac server/client protocol: Send one line // at a time and prefix with message with "level:". Util.getLines(msg) .map(line -> msgLevel + ":" + line) .forEach(line -> super.printLogMsg(msgLevel, line)); } }); // Read argument array int n = Integer.parseInt(in.readLine()); String[] args = new String[n]; for (int i = 0; i < n; i++) { args[i] = in.readLine(); } // If there has been any internal errors, notify client checkInternalErrorLog(); // Perform compilation Main.Result rc = sjavac.compile(args); // Send return code back to client out.println(LINE_TYPE_RC + ":" + rc.name()); // Check for internal errors again. checkInternalErrorLog(); } catch (Exception ex) { // Not much to be done at this point. The client side request // code will most likely throw an IOException and the // compilation will fail. Log.error(ex); } finally { Log.setLogForCurrentThread(null); } } private void checkInternalErrorLog() { Path errorLog = ServerMain.getErrorLog().getLogDestination(); if (errorLog != null) { Log.error("Server has encountered an internal error. See " + errorLog.toAbsolutePath() + " for details."); } } }
// By Ratna Priya class Solution { public List<Integer> partitionLabels(String s) { List<Integer> parlen = new ArrayList<>(); int[] lastind = new int[26]; for(int i =0; i<s.length(); i++){ lastind[s.charAt(i)-'a'] = i; } int i =0; while(i<s.length()){ int end = lastind[s.charAt(i)-'a']; int j=i; while(j!=end){ end = Math.max(end, lastind[s.charAt(j)-'a']); j++; } parlen.add(j-i+1); i = j+1; } return parlen; } }
// ---------------------------------------------------------------------------- // - Open3D: www.open3d.org - // ---------------------------------------------------------------------------- // The MIT License (MIT) // // Copyright (c) 2018 www.open3d.org // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS // IN THE SOFTWARE. // ---------------------------------------------------------------------------- #include "open3d/t/geometry/TSDFVoxelGrid.h" #include "open3d/Open3D.h" #include "open3d/t/geometry/PointCloud.h" #include "open3d/t/geometry/kernel/TSDFVoxelGrid.h" #include "open3d/utility/Console.h" namespace open3d { namespace t { namespace geometry { TSDFVoxelGrid::TSDFVoxelGrid( std::unordered_map<std::string, core::Dtype> attr_dtype_map, float voxel_size, float sdf_trunc, int64_t block_resolution, int64_t block_count, const core::Device &device, const core::HashmapBackend &backend) : voxel_size_(voxel_size), sdf_trunc_(sdf_trunc), block_resolution_(block_resolution), block_count_(block_count), device_(device), attr_dtype_map_(attr_dtype_map) { if (attr_dtype_map_.count("tsdf") == 0 || attr_dtype_map_.count("weight") == 0) { utility::LogError( "[TSDFVoxelGrid] expected properties tsdf and weight are " "missing."); } int64_t total_bytes = 0; if (attr_dtype_map_.count("tsdf") != 0) { core::Dtype dtype = attr_dtype_map_.at("tsdf"); if (dtype != core::Dtype::Float32) { utility::LogWarning( "[TSDFVoxelGrid] unexpected TSDF dtype, please " "implement your own Voxel structure in " "core/kernel/GeneralEWSharedImpl.h for dispatching."); } total_bytes += dtype.ByteSize(); } if (attr_dtype_map_.count("weight") != 0) { core::Dtype dtype = attr_dtype_map_.at("weight"); if (dtype != core::Dtype::Float32 && dtype != core::Dtype::UInt16) { utility::LogWarning( "[TSDFVoxelGrid] unexpected weight dtype, please " "implement your own Voxel structure in " "core/kernel/GeneralEWSharedImpl.h for " "dispatching."); } total_bytes += dtype.ByteSize(); } if (attr_dtype_map_.count("color") != 0) { core::Dtype dtype = attr_dtype_map_.at("color"); if (dtype != core::Dtype::Float32 && dtype != core::Dtype::UInt16) { utility::LogWarning( "[TSDFVoxelGrid] unexpected color dtype, please " "implement your own Voxel structure in " "core/kernel/GeneralEWSharedImpl.h for dispatching."); } total_bytes += dtype.ByteSize() * 3; } // Users can add other key/dtype checkers here for potential extensions. // SDF trunc check, critical for TSDF touch operation that allocates TSDF // volumes. if (sdf_trunc > block_resolution_ * voxel_size_ * 0.499) { utility::LogError( "SDF trunc is too large. Please make sure sdf trunc is smaller " "than half block size (i.e., block_resolution * voxel_size * " "0.5)"); } block_hashmap_ = std::make_shared<core::Hashmap>( block_count_, core::Dtype::Int32, core::Dtype::UInt8, core::SizeVector{3}, core::SizeVector{block_resolution_, block_resolution_, block_resolution_, total_bytes}, device, backend); } void TSDFVoxelGrid::Integrate(const Image &depth, const core::Tensor &intrinsics, const core::Tensor &extrinsics, float depth_scale, float depth_max) { Image empty_color; Integrate(depth, empty_color, intrinsics, extrinsics, depth_scale, depth_max); } void TSDFVoxelGrid::Integrate(const Image &depth, const Image &color, const core::Tensor &intrinsics, const core::Tensor &extrinsics, float depth_scale, float depth_max) { if (depth.IsEmpty()) { utility::LogError( "[TSDFVoxelGrid] input depth is empty for integration."); } // Create a point cloud from a low-resolution depth input to roughly // estimate surfaces. // TODO(wei): merge CreateFromDepth and Touch in one kernel. int down_factor = 4; PointCloud pcd = PointCloud::CreateFromDepthImage( depth, intrinsics, extrinsics, depth_scale, depth_max, down_factor); int64_t capacity = (depth.GetCols() / down_factor) * (depth.GetRows() / down_factor) * 8; if (point_hashmap_ == nullptr) { point_hashmap_ = std::make_shared<core::Hashmap>( capacity, core::Dtype::Int32, core::Dtype::UInt8, core::SizeVector{3}, core::SizeVector{1}, device_, core::HashmapBackend::Default); } else { point_hashmap_->Clear(); } core::Tensor block_coords; kernel::tsdf::Touch(point_hashmap_, pcd.GetPoints().Contiguous(), block_coords, block_resolution_, voxel_size_, sdf_trunc_); // Active voxel blocks in the block hashmap. core::Tensor addrs, masks; int64_t n = block_hashmap_->Size(); try { block_hashmap_->Activate(block_coords, addrs, masks); } catch (const std::runtime_error &) { utility::LogError( "[TSDFIntegrate] Unable to allocate volume during rehashing. " "Consider using a " "larger block_count at initialization to avoid rehashing " "(currently {}), or choosing a larger voxel_size " "(currently {})", n, voxel_size_); } // Collect voxel blocks in the viewing frustum. Note we cannot directly // reuse addrs from Activate, since some blocks might have been activated in // previous launches and return false. // TODO(wei): support one-pass operation ActivateAndFind. // TODO(wei): set point_hashmap_[block_coords] = addrs and use the small // hashmap for raycasting block_hashmap_->Find(block_coords, addrs, masks); // TODO(wei): directly reuse it without intermediate variables. // Reserved for raycasting active_block_coords_ = block_coords; core::Tensor depth_tensor = depth.AsTensor().Contiguous(); core::Tensor color_tensor; if (color.IsEmpty()) { utility::LogDebug( "[TSDFIntegrate] color image is empty, perform depth " "integration only."); } else if (color.GetRows() == depth.GetRows() && color.GetCols() == depth.GetCols() && color.GetChannels() == 3) { if (attr_dtype_map_.count("color") != 0) { color_tensor = color.AsTensor().To(core::Dtype::Float32).Contiguous(); } else { utility::LogWarning( "[TSDFIntegrate] color image is ignored since voxels do " "not contain colors."); } } else { utility::LogWarning( "[TSDFIntegrate] color image is ignored for the incompatible " "shape."); } core::Tensor dst = block_hashmap_->GetValueTensor(); // TODO(wei): use a fixed buffer. kernel::tsdf::Integrate(depth_tensor, color_tensor, addrs.To(core::Dtype::Int64).IndexGet({masks}), block_hashmap_->GetKeyTensor(), dst, intrinsics, extrinsics, block_resolution_, voxel_size_, sdf_trunc_, depth_scale, depth_max); } std::unordered_map<TSDFVoxelGrid::SurfaceMaskCode, core::Tensor> TSDFVoxelGrid::RayCast(const core::Tensor &intrinsics, const core::Tensor &extrinsics, int width, int height, float depth_scale, float depth_min, float depth_max, float weight_threshold, int ray_cast_mask) { // Extrinsic: world to camera -> pose: camera to world core::Tensor vertex_map, depth_map, color_map, normal_map; if (ray_cast_mask & TSDFVoxelGrid::SurfaceMaskCode::VertexMap) { vertex_map = core::Tensor({height, width, 3}, core::Dtype::Float32, device_); } if (ray_cast_mask & TSDFVoxelGrid::SurfaceMaskCode::DepthMap) { depth_map = core::Tensor({height, width, 1}, core::Dtype::Float32, device_); } if (ray_cast_mask & TSDFVoxelGrid::SurfaceMaskCode::ColorMap) { color_map = core::Tensor({height, width, 3}, core::Dtype::Float32, device_); } if (ray_cast_mask & TSDFVoxelGrid::SurfaceMaskCode::NormalMap) { normal_map = core::Tensor({height, width, 3}, core::Dtype::Float32, device_); } core::Tensor range_minmax_map; int down_factor = 8; kernel::tsdf::EstimateRange(active_block_coords_, range_minmax_map, intrinsics, extrinsics, height, width, down_factor, block_resolution_, voxel_size_, depth_min, depth_max); core::Tensor block_values = block_hashmap_->GetValueTensor(); auto device_hashmap = block_hashmap_->GetDeviceHashmap(); kernel::tsdf::RayCast(device_hashmap, block_values, range_minmax_map, vertex_map, depth_map, color_map, normal_map, intrinsics, extrinsics, height, width, block_resolution_, voxel_size_, sdf_trunc_, depth_scale, depth_min, depth_max, weight_threshold); std::unordered_map<TSDFVoxelGrid::SurfaceMaskCode, core::Tensor> results; if (ray_cast_mask & TSDFVoxelGrid::SurfaceMaskCode::VertexMap) { results.emplace(TSDFVoxelGrid::SurfaceMaskCode::VertexMap, vertex_map); } if (ray_cast_mask & TSDFVoxelGrid::SurfaceMaskCode::DepthMap) { results.emplace(TSDFVoxelGrid::SurfaceMaskCode::DepthMap, depth_map); } if (ray_cast_mask & TSDFVoxelGrid::SurfaceMaskCode::ColorMap) { results.emplace(TSDFVoxelGrid::SurfaceMaskCode::ColorMap, color_map); } if (ray_cast_mask & TSDFVoxelGrid::SurfaceMaskCode::NormalMap) { results.emplace(TSDFVoxelGrid::SurfaceMaskCode::NormalMap, normal_map); } results.emplace(TSDFVoxelGrid::SurfaceMaskCode::RangeMap, range_minmax_map); return results; } PointCloud TSDFVoxelGrid::ExtractSurfacePoints(int estimated_number, float weight_threshold, int surface_mask) { // Extract active voxel blocks from the hashmap. if ((surface_mask & SurfaceMaskCode::VertexMap) == 0) { utility::LogError("VertexMap must be specified in Surface extraction."); } core::Tensor active_addrs; block_hashmap_->GetActiveIndices(active_addrs); core::Tensor active_nb_addrs, active_nb_masks; std::tie(active_nb_addrs, active_nb_masks) = BufferRadiusNeighbors(active_addrs); // Extract points around zero-crossings. core::Tensor points, normals, colors; kernel::tsdf::ExtractSurfacePoints( active_addrs.To(core::Dtype::Int64), active_nb_addrs.To(core::Dtype::Int64), active_nb_masks, block_hashmap_->GetKeyTensor(), block_hashmap_->GetValueTensor(), points, surface_mask & SurfaceMaskCode::NormalMap ? utility::optional<std::reference_wrapper<core::Tensor>>( normals) : utility::nullopt, surface_mask & SurfaceMaskCode::ColorMap ? utility::optional<std::reference_wrapper<core::Tensor>>( colors) : utility::nullopt, block_resolution_, voxel_size_, weight_threshold, estimated_number); auto pcd = PointCloud(points.Slice(0, 0, estimated_number)); if (surface_mask & SurfaceMaskCode::ColorMap) { pcd.SetPointColors(colors.Slice(0, 0, estimated_number)); } if (surface_mask & SurfaceMaskCode::NormalMap) { pcd.SetPointNormals(normals.Slice(0, 0, estimated_number)); } return pcd; } TriangleMesh TSDFVoxelGrid::ExtractSurfaceMesh(float weight_threshold) { // Query active blocks and their nearest neighbors to handle boundary cases. core::Tensor active_addrs; block_hashmap_->GetActiveIndices(active_addrs); core::Tensor active_nb_addrs, active_nb_masks; std::tie(active_nb_addrs, active_nb_masks) = BufferRadiusNeighbors(active_addrs); // Map active indices to [0, num_blocks] to be allocated for surface mesh. int64_t num_blocks = block_hashmap_->Size(); core::Tensor inverse_index_map({block_hashmap_->GetCapacity()}, core::Dtype::Int64, device_); std::vector<int64_t> iota_map(num_blocks); std::iota(iota_map.begin(), iota_map.end(), 0); inverse_index_map.IndexSet( {active_addrs.To(core::Dtype::Int64)}, core::Tensor(iota_map, {num_blocks}, core::Dtype::Int64, device_)); core::Tensor vertices, triangles, vertex_normals, vertex_colors; kernel::tsdf::ExtractSurfaceMesh( active_addrs.To(core::Dtype::Int64), inverse_index_map, active_nb_addrs.To(core::Dtype::Int64), active_nb_masks, block_hashmap_->GetKeyTensor(), block_hashmap_->GetValueTensor(), vertices, triangles, vertex_normals, vertex_colors, block_resolution_, voxel_size_, weight_threshold); TriangleMesh mesh(vertices, triangles); mesh.SetVertexNormals(vertex_normals); if (vertex_colors.NumElements() != 0) { mesh.SetVertexColors(vertex_colors); } return mesh; } TSDFVoxelGrid TSDFVoxelGrid::To(const core::Device &device, bool copy) const { if (!copy && GetDevice() == device) { return *this; } TSDFVoxelGrid device_tsdf_voxelgrid(attr_dtype_map_, voxel_size_, sdf_trunc_, block_resolution_, block_count_, device); auto device_tsdf_hashmap = device_tsdf_voxelgrid.block_hashmap_; *device_tsdf_hashmap = block_hashmap_->To(device); return device_tsdf_voxelgrid; } std::pair<core::Tensor, core::Tensor> TSDFVoxelGrid::BufferRadiusNeighbors( const core::Tensor &active_addrs) { // Fixed radius search for spatially hashed voxel blocks. // A generalization will be implementing dense/sparse fixed radius search // with coordinates as hashmap keys. core::Tensor key_buffer_int3_tensor = block_hashmap_->GetKeyTensor(); core::Tensor active_keys = key_buffer_int3_tensor.IndexGet( {active_addrs.To(core::Dtype::Int64)}); int64_t n = active_keys.GetShape()[0]; // Fill in radius nearest neighbors. core::Tensor keys_nb({27, n, 3}, core::Dtype::Int32, device_); for (int nb = 0; nb < 27; ++nb) { int dz = nb / 9; int dy = (nb % 9) / 3; int dx = nb % 3; core::Tensor dt = core::Tensor(std::vector<int>{dx - 1, dy - 1, dz - 1}, {1, 3}, core::Dtype::Int32, device_); keys_nb[nb] = active_keys + dt; } keys_nb = keys_nb.View({27 * n, 3}); core::Tensor addrs_nb, masks_nb; block_hashmap_->Find(keys_nb, addrs_nb, masks_nb); return std::make_pair(addrs_nb.View({27, n, 1}), masks_nb.View({27, n, 1})); } } // namespace geometry } // namespace t } // namespace open3d
<filename>hphp/runtime/vm/jit/func-effects.cpp /* +----------------------------------------------------------------------+ | HipHop for PHP | +----------------------------------------------------------------------+ | Copyright (c) 2010-2015 Facebook, Inc. (http://www.facebook.com) | +----------------------------------------------------------------------+ | This source file is subject to version 3.01 of the PHP license, | | that is bundled with this package in the file LICENSE, and is | | available through the world-wide-web at the following url: | | http://www.php.net/license/3_01.txt | | If you did not receive a copy of the PHP license and are unable to | | obtain it through the world-wide-web, please send a note to | | <EMAIL> so we can mail you a copy immediately. | +----------------------------------------------------------------------+ */ #include "hphp/runtime/vm/jit/func-effects.h" #include "hphp/runtime/vm/func.h" #include "hphp/runtime/vm/unit.h" #include "hphp/runtime/vm/jit/normalized-instruction.h" namespace HPHP { namespace jit { namespace { const StaticString s_http_response_header("http_response_header"), s_php_errormsg("php_errormsg"), s_extract("extract"), s_extractNative("__SystemLib\\extract"), s_parse_str("parse_str"), s_parse_strNative("__SystemLib\\parse_str"), s_assert("assert"), s_assertNative("__SystemLib\\assert"), s_set_frame_metadata("HH\\set_frame_metadata"); bool funcByNameDestroysLocals(const StringData* fname) { if (!fname) return false; return fname->isame(s_extract.get()) || fname->isame(s_extractNative.get()) || fname->isame(s_parse_str.get()) || fname->isame(s_parse_strNative.get()) || fname->isame(s_assert.get()) || fname->isame(s_assertNative.get()) || fname->isame(s_set_frame_metadata.get()); } using FuncSet = std::unordered_set<std::string, string_hashi, string_eqstri>; /* * This is a conservative list of functions that we are certain won't inspect * the caller frame (generally by either CallerFrame or vm_call_user_func). */ FuncSet ignoresCallerFrame = { "array_key_exists", "key_exists", "array_keys", "array_pop", "array_push", "array_rand", "array_search", "array_shift", "array_slice", "array_splice", "array_unique", "array_unshift", "array_values", "compact", "shuffle", "count", "sizeof", "each", "current", "in_array", "range", "sort", "rsort", "asort", "arsort", "ksort", "krsort", "natsort", "natcasesort", "hphp_array_idx", "ctype_alnum", "ctype_alpha", "ctype_cntrl", "ctype_digit", "ctype_graph", "ctype_lower", "ctype_print", "ctype_punct", "ctype_space", "ctype_upper", "ctype_xdigit", "fb_serialize", "fb_unserialize", "fb_compact_serialize", "fb_compact_unserialize", "fb_utf8ize", "fb_utf8_strlen", "fb_utf8_strlen_deprecated", "fb_utf8_substr", "fb_get_code_coverage", "fb_output_compression", "fb_set_exit_callback", "fb_get_last_flush_size", "fb_lazy_lstat", "fb_lazy_realpath", "hash", "hash_algos", "hash_file", "hash_final", "hash_init", "hash_update", "hash_copy", "hash_equals", "furchash_hphp_ext", "hphp_murmurhash", "get_declared_classes", "get_declared_interfaces", "get_declared_traits", "class_alias", "class_exists", "interface_exists", "trait_exists", "enum_exists", "get_class_methods", "get_class_constants", "is_a", "is_subclass_of", "method_exists", "property_exists", "error_log", "error_reporting", "restore_error_handler", "restore_exception_handler", "set_error_handler", "set_exception_handler", "hphp_set_error_page", "hphp_clear_unflushed", "get_defined_functions", "function_exists", "min", "max", "abs", "is_finite", "is_infinite", "is_nan", "ceil", "floor", "round", "deg2rad", "rad2deg", "decbin", "dechex", "decoct", "bindec", "hexdec", "octdec", "base_convert", "pow", "exp", "expm1", "log10", "log1p", "log", "cos", "cosh", "sin", "sinh", "tan", "tanh", "acos", "acosh", "asin", "asinh", "atan", "atanh", "atan2", "hypot", "fmod", "sqrt", "getrandmax", "srand", "rand", "mt_getrandmax", "mt_srand", "mt_rand", "lcg_value", "intdiv", "flush", "hphp_crash_log", "hphp_stats", "hphp_get_stats", "hphp_get_status", "hphp_get_iostatus", "hphp_set_iostatus_address", "hphp_get_timers", "hphp_output_global_state", "hphp_instruction_counter", "hphp_get_hardware_counters", "hphp_set_hardware_events", "hphp_clear_hardware_events", "wordwrap", "sprintf", "is_null", "is_bool", "is_int", "is_float", "is_numeric", "is_string", "is_scalar", "is_array", "is_object", "is_resource", "boolval", "intval", "floatval", "strval", "gettype", "get_resource_type", "settype", "serialize", "unserialize", "addcslashes", "stripcslashes", "addslashes", "stripslashes", "bin2hex", "hex2bin", "nl2br", "quotemeta", "str_shuffle", "strrev", "strtolower", "strtoupper", "ucfirst", "lcfirst", "ucwords", "strip_tags", "trim", "ltrim", "rtrim", "chop", "explode", "implode", "join", "str_split", "chunk_split", "strtok", "str_replace", "str_ireplace", "substr_replace", "substr", "str_pad", "str_repeat", "html_entity_decode", "htmlentities", "htmlspecialchars_decode", "htmlspecialchars", "fb_htmlspecialchars", "quoted_printable_encode", "quoted_printable_decode", "convert_uudecode", "convert_uuencode", "str_rot13", "crc32", "crypt", "md5", "sha1", "strtr", "convert_cyr_string", "get_html_translation_table", "hebrev", "hebrevc", "setlocale", "localeconv", "nl_langinfo", "chr", "ord", "money_format", "number_format", "strcmp", "strncmp", "strnatcmp", "strcasecmp", "strncasecmp", "strnatcasecmp", "strcoll", "substr_compare", "strchr", "strrchr", "strstr", "stristr", "strpbrk", "strpos", "stripos", "strrpos", "strripos", "substr_count", "strspn", "strcspn", "strlen", "str_getcsv", "count_chars", "str_word_count", "levenshtein", "similar_text", "soundex", "metaphone", "base64_decode", "base64_encode", "get_headers", "get_meta_tags", "http_build_query", "parse_url", "rawurldecode", "rawurlencode", "urldecode", "urlencode", }; bool funcByNameNeedsCallerFrame(const StringData* fname) { return ignoresCallerFrame.find(fname->data()) == ignoresCallerFrame.end(); } } bool builtinFuncDestroysLocals(const Func* callee) { assertx(callee && callee->isCPPBuiltin()); auto const fname = callee->name(); return funcByNameDestroysLocals(fname); } bool callDestroysLocals(const NormalizedInstruction& inst, const Func* caller) { // We don't handle these two cases, because we don't compile functions // containing them: assertx(caller->lookupVarId(s_php_errormsg.get()) == -1); assertx(caller->lookupVarId(s_http_response_header.get()) == -1); auto* unit = caller->unit(); auto checkTaintId = [&](Id id) { auto const str = unit->lookupLitstrId(id); return funcByNameDestroysLocals(str); }; if (inst.op() == OpFCallBuiltin) return checkTaintId(inst.imm[2].u_SA); if (!isFCallStar(inst.op())) return false; const FPIEnt *fpi = caller->findFPI(inst.source.offset()); assertx(fpi); auto const fpushPc = unit->at(fpi->m_fpushOff); auto const op = peek_op(fpushPc); if (op == OpFPushFunc) { // If the call has any arguments, the FPushFunc will be in a different // tracelet -- the tracelet will break on every FPass* because the reffiness // of the callee isn't knowable. So we have to say the call destroys locals, // to be conservative. If there aren't any arguments, then it can't destroy // locals -- even if the call is to extract(), there's no argument, so it // won't do anything. auto const numArgs = inst.imm[0].u_IVA; return (numArgs != 0); } if (op == OpFPushFuncD) return checkTaintId(getImm(fpushPc, 1).u_SA); if (op == OpFPushFuncU) { return checkTaintId(getImm(fpushPc, 1).u_SA) || checkTaintId(getImm(fpushPc, 2).u_SA); } return false; } bool builtinFuncNeedsCallerFrame(const Func* callee) { assertx(callee && callee->isCPPBuiltin()); return funcByNameNeedsCallerFrame(callee->name()); } bool callNeedsCallerFrame(const NormalizedInstruction& inst, const Func* caller) { auto* unit = caller->unit(); auto checkTaintId = [&](Id id) { auto const str = unit->lookupLitstrId(id); if (!str) return true; // if the function was invoked dynamically we can't // be sure /* * Only C++ functions can inspect the caller frame, we know these are all * loaded ahead of time and unique/persistent. */ if (auto f = Unit::lookupFunc(str)) { return f->isCPPBuiltin() && funcByNameNeedsCallerFrame(str); } return false; }; if (inst.op() == OpFCallBuiltin) return checkTaintId(inst.imm[2].u_SA); if (!isFCallStar(inst.op())) return false; const FPIEnt *fpi = caller->findFPI(inst.source.offset()); assertx(fpi); auto const fpushPc = unit->at(fpi->m_fpushOff); auto const op = peek_op(fpushPc); if (op == OpFPushFunc) return true; if (op == OpFPushFuncD) return checkTaintId(getImm(fpushPc, 1).u_SA); if (op == OpFPushFuncU) { return checkTaintId(getImm(fpushPc, 1).u_SA) || checkTaintId(getImm(fpushPc, 2).u_SA); } return false; } }}
<filename>bspump/crypto/__init__.py<gh_stars>1-10 from .aes import DecryptAESProcessor from .aes import EncryptAESProcessor ''' Test AES openssl aes-128-cbc -e -in /etc/services -out /Users/ateska/Downloads/services.enc -K 00000000000000000000000000000000 -iv 00000000000000000000000000000000 '''
<reponame>masatoko/test-ghci-pipe module GHCi where import Control.Concurrent (forkIO) import Control.Monad (forever, unless) import qualified Data.ByteString as BS import qualified Data.ByteString.Char8 as BC import Data.List (isPrefixOf) import Data.Maybe (isJust) import Safe (readMay) import System.IO import System.Process data GhciPipe = GhciPipe { ghciIn :: Handle , ghciOut :: Handle } data Message = Output String | ParseFailure String | Result String deriving (Eq, Show) genGhciPipe :: String -> IO GhciPipe genGhciPipe moduleName = do (Just hin, Just hout, Just herr, _ph) <- createProcess (proc "ghc/bin/ghci-8.0.1.exe" []) { std_out = CreatePipe , std_err = CreatePipe , std_in = CreatePipe , cwd = Just "src" } -- StdErr forkIO $ forever $ do line <- hGetLine herr putStrLn $ "ERROR: " ++ line -- Initialize hPutStrLn hin $ ":l " ++ moduleName hFlush hin readTillLoaded hout return $ GhciPipe hin hout where readTillLoaded hout = go where go = do line <- hGetLine hout putStrLn line unless ("Ok, modules loaded" `isPrefixOf` line) go readResult :: GhciPipe -> IO Message readResult ghci = evaluate <$> BC.hGetLine (ghciOut ghci) where evaluate :: BC.ByteString -> Message evaluate bs0 = case evalPart bs0 of Nothing -> Output $ BC.unpack bs0 Just bs' -> Result $ BC.unpack bs' where evalPart bs | BC.null bs' = Nothing | otherwise = if isJust $ BC.find (== '>') bs' then evalPart bs' else Just bs' where bs' = BC.tail $ BC.dropWhile (/= '>') bs -- eval :: GhciPipe -> String -> IO () eval ghci command = do hPutStrLn (ghciIn ghci) command hFlush (ghciIn ghci) evalBS :: GhciPipe -> BS.ByteString -> IO () evalBS ghci command = do BC.hPutStrLn (ghciIn ghci) command hFlush (ghciIn ghci)
<reponame>ricky26/netlib<gh_stars>1-10 #include "netlib/shared_memory.h" #include <Windows.h> namespace netlib { // // shmem_private // struct shmem_internal { HANDLE handle; size_t size; void *pointer; shmem_internal() { handle = INVALID_HANDLE_VALUE; size = 0; pointer = nullptr; } static shmem_internal *get(void *_p) { return static_cast<shmem_internal*>(_p); } }; // // shared_memory // shared_memory::shared_memory() { mInternal = new shmem_internal(); } shared_memory::shared_memory(std::string const& _name, size_t _sz) { mInternal = new shmem_internal(); open(_name, _sz); } shared_memory::shared_memory(shared_memory &&_mem) : mInternal(_mem.mInternal) { _mem.mInternal = nullptr; } shared_memory::~shared_memory() { if(mInternal) { close(); shmem_internal *si = shmem_internal::get(mInternal); mInternal = nullptr; delete si; } } bool shared_memory::valid() const { shmem_internal *si = shmem_internal::get(mInternal); return si && si->handle != INVALID_HANDLE_VALUE; } size_t shared_memory::size() const { shmem_internal *si = shmem_internal::get(mInternal); if(si) return si->size; return 0; } void shared_memory::close() { shmem_internal *si = shmem_internal::get(mInternal); if(!si || si->handle == INVALID_HANDLE_VALUE) return; UnmapViewOfFile(si->pointer); CloseHandle(si->handle); si->pointer = nullptr; si->size = 0; si->handle = INVALID_HANDLE_VALUE; } bool shared_memory::open(std::string const& _name, size_t _sz) { shmem_internal *si = shmem_internal::get(mInternal); if(!si || si->handle != INVALID_HANDLE_VALUE) return false; std::string name = "Global\\" + _name; HANDLE hMap = OpenFileMappingA(FILE_MAP_ALL_ACCESS, FALSE, _name.c_str()); if(hMap == INVALID_HANDLE_VALUE) return false; void *pPtr = MapViewOfFile(hMap, FILE_MAP_ALL_ACCESS, 0, 0, _sz); if(!pPtr) { CloseHandle(hMap); return false; } si->handle = hMap; si->pointer = pPtr; si->size = _sz; return true; } bool shared_memory::create(std::string const& _name, size_t _sz) { shmem_internal *si = shmem_internal::get(mInternal); if(!si || si->handle != INVALID_HANDLE_VALUE) return false; std::string name = "Global\\" + _name; HANDLE hMap = CreateFileMappingA(INVALID_HANDLE_VALUE, NULL, PAGE_READWRITE, 0, (DWORD)_sz, _name.c_str()); if(hMap == INVALID_HANDLE_VALUE) return false; void *pPtr = MapViewOfFile(hMap, FILE_MAP_ALL_ACCESS, 0, 0, _sz); if(!pPtr) { CloseHandle(hMap); return false; } si->handle = hMap; si->pointer = pPtr; si->size = _sz; return true; } void *shared_memory::pointer() const { shmem_internal *si = shmem_internal::get(mInternal); if(!si || si->handle == INVALID_HANDLE_VALUE) return nullptr; return si->pointer; } }
Analysis of Instantaneous Collision Process of Axial Flow Check Valve Closing When the pump stops suddenly, the axial flow check valve will shut down suddenly under the action of the medium pressure differential force, and the impact load between the disc and the valve seat will produce when the valve closes, which will have a certain impact on the structural strength and life of the check valve. In this paper, the display dynamics is used to simulate the impact process of the disc and the valve seat when the valve is completely closed. The dynamic response of the disc and the seat, the velocity of the disc and the energy change during the collision are obtained. The research has certain guiding significance for the practical application of axial flow check valve. Introduction Axial-flow check valve is an automatic valve that depends on the pressure difference of medium to realize opening and closing. When the pump stops suddenly, the check valve will act quickly under the pressure difference of medium itself to prevent the driving equipment in front of the valve from reversing due to the backflow of medium. It is widely used in long-distance pipeline system . In case of emergency, the medium in the pipeline behind the valve flows back quickly, which makes the pressure behind the valve greater than the pressure before the valve, so that the disc moves towards the closing direction. with the increase of the pressure difference of the medium, the speed of the disc reaches the maximum at the moment of closing, colliding with the sealing surface of the valve seat, causing some damage to the sealing surface and reducing the service life of the check valve. In severe cases, the whole pipeline system can not run normally . Sibilla S etc. used dynamic grid technology to simulate the opening and closing process of axial flow check valve, and obtained that the parameters such as disc size and spring stiffness have important influence on its dynamic response ; YAMASHITA Akihiko etc. studied the characteristics of rapid braking and closing of the check valve when the pump stops suddenly, and obtained that the impact acceleration will be generated when the valve clack hits the valve seat and the water hammer caused by low pressure . Junfeng Wang studies the impact force between the impact hammer and the impact table in the impact testing machine, and simulates the impact moment with LS-DYNA software, and obtains the dynamic response characteristics and laws of the whole system ; Weiqun Xu analyzed the dynamic response characteristics of simply supported beam under arbitrary impact load, taking Dengzhou Bridge as an example, ANSYS/LS-DYNA software was used to simulate and analyze the collision moment between automobile and bridge, and the stress-strain law during collision and the deformation . It is difficult for researchers to measure the impact directly because of its short action time and high destructiveness. In this paper, the impact process between the valve disc and the valve seat at the moment when the axial flow check valve is completely closed when the pump stops suddenly is studied by using the explicit dynamic numerical simulation method. the basic concept of impact movement Impact is a process in which the impact load instantly acts on the system in millisecond time, causing sudden changes in the stress and motion state of the system. Impact load is an external load that changes arbitrarily with time, and impact process is a series of dynamic changes of the system with time. Impact dynamics mainly studies the change of stress wave and the dynamic response of each component in the system. The research in this paper belongs to the problem of system dynamic response. Non-persistent collision is the effect of system constraints in the collision process, and once the collision ends, the constraints are automatically eliminated. For this kind of problems, the dynamic response results of the system after collision are related to the motion state of the system before collision, the recovery coefficient and collision impulse in the collision contact process, this paper studies this kind of collision problem . Explicit dynamics According to the classical mechanics theory, the dynamic equation is: In which: is the model quality matrix; is the model damping matrix; K is the model stiffness matrix; is the load vector; is the infinitesimal displacement vector; is the infinitesimal velocity vector and is the infinitesimal acceleration vector. Explicit solution method, also known as closed solution method, is calculated by matrix multiplication in each step, which does not need balanced iteration, has fast solution speed, and occupies little memory. When the time step is small enough, convergence problem generally does not occur, so it is suitable for solving transient collision problems . ANSYS LS-DYNA is a world-famous software for solving nonlinear dynamics, which can simulate and solve various complex problems, such as collision and explosion of various nonlinear structures, and has a variety of contact calculation models, good parallel solving ability, grid adaptive ability and etc. . Establish a numerical model The 3d model of the assembly is established by Solidworks software, as shown in fig.1, the check valve is mainly composed of seven parts as shown in the figure. The inlet and outlet diameters of the valve body are 252 mm, the total length of the cavity body is 622 mm, the front and rear lengths of the valve seat are 156 mm and 462 mm respectively, and the medium in the valve is water. Under normal working conditions, the medium goes in and out from the left, and the pressure in the valve is high on the left and low on the right. When the pump is stopped, the high-pressure medium flows backwards, so that the valve clack impacts the valve seat to generate impact load during the closing process. Meshing platform is used to mesh the model in ANSYS Workbench, the grid diagram of check valve collision calculation is shown in Fig.2, with 22,322 nodes and 93,480 units. Dynamic response results of valve body It can be seen from Fig.3 and Fig.4 that the stress mainly occurs at the sealing surface of the valve seat because the disc collides with the sealing surface of the valve seat at the moment of closing. At t=0.5 ms, the stress on the valve body reaches the maximum 289.53 MPa. At t=0.5 ms, the disc is separated from the sealing surface of the valve seat, the collision end, and the stress of the valve body drops rapidly to 16.874 MPa. In the period after reaching ms, the stress and strain value of the valve body changed little, and Dynamic response results of valve clack According to fig.5 and fig.6, when t=0.25ms occurs, it is distributed in a 120 array around the sealing surface of the disc, and the stress of the disc reaches the maximum 155.68MPa. At t=0.5ms, 1.75ms and 2ms, the corresponding maximum stresses are 64.405MPa, 62.409MPa and 53.424MPa in turn. The occurrence position of the maximum stress continuously moves from the sealing surface to the center of the disc, and then from the center of the disc to the sealing surface of the disc, which is the result that stress waves are propagated, reflected and unloaded many times in a short time. It can be seen from fig.7, At t=0.5 ms, the collision is completed, the valve disc is no longer in contact with the valve seat. The valve disc bounces back due to the collision obstruction, because of the energy loss during the collision, the speed of the valve disc changes from the initial speed of -3.624 m/s to 3.11 m/s. After 0.5 ms, the speed remains unchanged at 3.11 m/s, because the change law of the medium force on the right side of the valve disc has not been obtained, so numerical simulation is conducted. 6. Conclusion 1)Through numerical simulation of the dynamic response of the whole collision process between the valve disc and the valve seat, the stress of the valve body and the valve disc reaches the maximum value at t=0.25 ms, and the whole collision process lasts for 0.5ms. system energy absorption and conversion 2) The velocity of clack after collision is 3.11m/s through numerical simulation of display dynamics. 3) In the process of collision, due to the action of damping and friction, there is energy conversion in the collision system, which is about 1.3J, and the energy conversion is irreversible. In a word, the research in this paper has certain guiding significance for the practical application of axial-flow check valve when the pump is stopped.
//Checks if a photo exists with specified id and photoLink in the database @Override public boolean imageExists(Integer imageId, String imageLink) { try { LOGGER.info("Queried id: {}", imageId); boolean returnValue = imageRepo.existsImageIdAndImageLink(imageId, imageLink); LOGGER.info("Result: {}", returnValue); return returnValue; } catch (RuntimeException error) { LOGGER.error("Could not confirm if the image exists with id {} and link {}, error {} ", imageId, imageLink, error.getMessage()); throw new RuntimeException("Failed to execute the request ", error.getCause()); } }
Iodine deficiency disorders: Public health measures to mitigate the global burden Iodine is an essential micronutrient required for the synthesis of the hormones - thyroxine and triiodothyronine. Iodine insufficiency is prevalent throughout the world, and is rated as one of the most significant public health concern in more than 125 countries. An optimal intake of dietary iodine during pregnancy, lactation and early childhood period is crucial for ensuring proper development of the brain of the fetus/newborn. During the course of implementation of the strategy of universal administration of the iodized salt, a wide gamut of challenges has been identified. In order to combat the same and simultaneously enhance the coverage of universal administration of the iodized salt, many measures have been proposed, namely formulating clear plan with time-based targets; encouraging coordination amongst all stakeholders; strengthening of the existing infrastructure; and increasing consumer awareness and demand for adequately iodized salt. To conclude, iodine has a crucial role in ensuring the normal growth and development of the child. However, to expand the coverage of the universal iodized salt to the vulnerable population, sustained political commitment and transparent monitoring and evaluation mechanism is the need of the hour.
// Unmarshal parses the common formatted checkpoint data and stores the result // in the Checkpoint. // // The supplied data is expected to begin with the following 3 lines of text, // each followed by a newline: // - <Origin string> // - <decimal representation of log size> // - <base64 representation of root hash> // // There must be no extraneous trailing data. func (c *Checkpoint) Unmarshal(data []byte) error { l := bytes.SplitN(data, []byte("\n"), 4) if len(l) < 4 { return errors.New("invalid checkpoint - too few newlines") } origin := string(l[0]) if origin != OriginV0 { return fmt.Errorf("invalid checkpoint - incorrect origin %q", origin) } size, err := strconv.ParseUint(string(l[1]), 10, 64) if err != nil { return fmt.Errorf("invalid checkpoint - size invalid: %w", err) } h, err := base64.StdEncoding.DecodeString(string(l[2])) if err != nil { return fmt.Errorf("invalid checkpoint - invalid hash: %w", err) } if xl := len(l[3]); xl > 0 { return fmt.Errorf("invalid checkpoint - %d bytes of unexpected trailing data", xl) } *c = Checkpoint{ Origin: origin, Size: size, Hash: h, } return nil }
/** * A response for a transaction that installs a jar in a yet not initialized blockchain. */ @Immutable public class GameteCreationTransactionResponse extends InitialTransactionResponse implements TransactionResponseWithUpdates { final static byte SELECTOR = 0; /** * The updates resulting from the execution of the transaction. */ private final Update[] updates; /** * The created gamete. */ public final StorageReference gamete; /** * Builds the transaction response. * * @param updates the updates resulting from the execution of the transaction * @param gamete the created gamete */ public GameteCreationTransactionResponse(Stream<Update> updates, StorageReference gamete) { this.updates = updates.toArray(Update[]::new); this.gamete = gamete; } @Override public boolean equals(Object other) { if (other instanceof GameteCreationTransactionResponse) { GameteCreationTransactionResponse otherCast = (GameteCreationTransactionResponse) other; return Arrays.equals(updates, otherCast.updates) && gamete.equals(otherCast.gamete); } else return false; } @Override public int hashCode() { return gamete.hashCode() ^ Arrays.hashCode(updates); } @Override public final Stream<Update> getUpdates() { return Stream.of(updates); } @Override public String toString() { return getClass().getSimpleName() + ":\n" + " gamete: " + gamete + "\n" + " updates:\n" + getUpdates().map(Update::toString).collect(Collectors.joining("\n ", " ", "")); } /** * Yields the outcome of the execution having this response. * * @return the outcome */ public StorageReference getOutcome() { return gamete; } @Override public void into(MarshallingContext context) throws IOException { context.writeByte(SELECTOR); intoArray(updates, context); gamete.intoWithoutSelector(context); } /** * Factory method that unmarshals a response from the given stream. * The selector of the response has been already processed. * * @param context the unmarshalling context * @return the request * @throws IOException if the response could not be unmarshalled * @throws ClassNotFoundException if the response could not be unmarshalled */ public static GameteCreationTransactionResponse from(UnmarshallingContext context) throws IOException, ClassNotFoundException { Stream<Update> updates = Stream.of(context.readArray(Update::from, Update[]::new)); return new GameteCreationTransactionResponse(updates, StorageReference.from(context)); } }
/** * Reverts the rotation of the {@link IPoint4 Point} around the z-axis by * the provided rotation angle. * * @param <T> * the {@link Number} type of the {@link IPoint4 Point} to * rotate. * * @param pointToRotate * the {@link IPoint4 Point} to rotate. * @param rotationAngle * the rotation angle. * @return the rotated {@link IPoint4 Point}. */ public static <T extends Number> IPoint4<T> revertRotateZ( final IPoint4<T> pointToRotate, final Number rotationAngle) { IMatrix4<T> rotationMatrix = GeometricOperations .inverseZRotationMatrix(rotationAngle, pointToRotate.getType()); return VectorAlgebraicOperations.multiply(rotationMatrix, pointToRotate, pointToRotate.getType()); }
A LADAR bare earth extraction technique for diverse topography and complex scenes Bare earth extraction is an important component to LADAR data analysis in terms of terrain classification. The challenge in providing accurate digital models is augmented when there is diverse topography within the data set or complex combinations of vegetation and built structures. A successful approach provides a flexible methodology (adaptable for topography and/or environment) that is capable of integrating multiple ladar point cloud data attributes. A newly developed approach (TE-SiP) uses a 2nd and 3rd order spatial derivative for each point in the DEM to determine sets of contiguous regions of similar elevation. Specifically, the derivative of the central point represents the curvature of the terrain at that position. Contiguous sets of high (positive or negative) values define sharp edges such as building edges or cliffs. This method is independent of the slope, such that very steep, but continuous topography still have relatively low curvature values and are preserved in the terrain classification. Next, a recursive segmentation method identifies unique features of homogeneity on the surface separated by areas of high curvature. An iterative selection process is used to eliminate regions containing buildings or vegetation from the terrain surface. This technique was tested on a variety of existing LADAR surveys, each with varying levels of topographic complexity. The results shown here include developed and forested regions in the Dominican Republic.
<reponame>zugaldia/capitalbikeshare from flask.ext.restful import fields, marshal
/** * Represents a single state in the recognition trellis. Subclasses of a token are used to represent the various * emitting state. * <p/> * All scores are maintained in LogMath log base */ @SuppressWarnings("serial") public class Token implements Scoreable { private static int curCount; private static int lastCount; private static final DecimalFormat scoreFmt = new DecimalFormat("0.0000000E00"); private static final DecimalFormat numFmt = new DecimalFormat("0000"); private final Token predecessor; private final float logLanguageScore; private float logTotalScore; private float logInsertionScore; private float logAcousticScore; private float logWorkingScore; private final SearchState searchState; private int location; private final int frameNumber; private Data myData; /** * A collection of arbitrary properties assigned to this token. This field becomes lazy initialized to reduce * memory footprint. */ private HashMap<String, Object> tokenProps; /** * Internal constructor for a token. Used by classes Token, CombineToken, ParallelToken * * @param predecessor the predecessor for this token * @param state the SentenceHMMState associated with this token * @param logTotalScore the total entry score for this token (in LogMath log base) * @param logLanguageScore the language score associated with this token (in LogMath log base) * @param frameNumber the frame number associated with this token */ public Token(Token predecessor, SearchState state, float logTotalScore, float logInsertionScore, float logLanguageScore, int frameNumber) { this.predecessor = predecessor; this.searchState = state; this.logTotalScore = logTotalScore; this.logInsertionScore = logInsertionScore; this.logLanguageScore = logLanguageScore; this.frameNumber = frameNumber; this.location = -1; curCount++; } /** * Creates the initial token with the given word history depth * * @param state the SearchState associated with this token * @param frameNumber the frame number for this token */ public Token(SearchState state, int frameNumber) { this(null, state, 0.0f, 0.0f, 0.0f, frameNumber); } /** * Creates a Token with the given acoustic and language scores and predecessor. * * @param logAcousticScore the log acoustic score * @param logLanguageScore the log language score * @param predecessor the predecessor Token */ public Token(Token predecessor, float logTotalScore, float logAcousticScore, float logInsertionScore, float logLanguageScore) { this(predecessor, null, logTotalScore, logInsertionScore, logLanguageScore, 0); this.logAcousticScore = logAcousticScore; } /** * Returns the predecessor for this token, or null if this token has no predecessors * * @return the predecessor */ public Token getPredecessor() { return predecessor; } /** * Returns the frame number for this token. Note that for tokens that are associated with non-emitting states, the * frame number represents the next frame number. For emitting states, the frame number represents the current * frame number. * * @return the frame number for this token */ public int getFrameNumber() { return frameNumber; } /** Sets the feature for this Token. * @param data*/ public void setData(Data data) { myData = data; } /** * Returns the feature for this Token. * * @return the feature for this Token */ public Data getData() { return myData; } /** * Returns the score for the token. The score is a combination of language and acoustic scores * * @return the score of this frame (in logMath log base) */ public float getScore() { return logTotalScore; } /** * Calculates a score against the given feature. The score can be retrieved * with get score. The token will keep a reference to the scored feature-vector. * * @param feature the feature to be scored * @return the score for the feature */ public float calculateScore(Data feature) { logAcousticScore = ((ScoreProvider) searchState).getScore(feature); logTotalScore += logAcousticScore; setData(feature); return logTotalScore; } public float[] calculateComponentScore(Data feature){ return ((ScoreProvider) searchState).getComponentScore(feature); } /** * Normalizes a previously calculated score * * @param maxLogScore the score to normalize this score with * @return the normalized score */ public float normalizeScore(float maxLogScore) { logTotalScore -= maxLogScore; logAcousticScore -= maxLogScore; return logTotalScore; } /** * Gets the working score. The working score is used to maintain non-final * scores during the search. Some search algorithms such as bushderby use * the working score * * @return the working score (in logMath log base) */ public float getWorkingScore() { return logWorkingScore; } /** * Sets the working score for this token * * @param logScore the working score (in logMath log base) */ public void setWorkingScore(float logScore) { logWorkingScore = logScore; } /** * Sets the score for this token * * @param logScore the new score for the token (in logMath log base) */ public void setScore(float logScore) { this.logTotalScore = logScore; } /** * Returns the language score associated with this token * * @return the language score (in logMath log base) */ public float getLanguageScore() { return logLanguageScore; } /** * Returns the insertion score associated with this token. * Insertion score is the score of the transition between * states. It might be transition score from the acoustic model, * phone insertion score or word insertion probability from * the linguist. * * @return the language score (in logMath log base) */ public float getInsertionScore() { return logInsertionScore; } /** * Returns the acoustic score for this token (in logMath log base). * Acoustic score is a sum of frame GMM. * * @return score */ public float getAcousticScore() { return logAcousticScore; } /** * Returns the SearchState associated with this token * * @return the searchState */ public SearchState getSearchState() { return searchState; } /** * Determines if this token is associated with an emitting state. An emitting state is a state that can be scored * acoustically. * * @return <code>true</code> if this token is associated with an emitting state */ public boolean isEmitting() { return searchState.isEmitting(); } /** * Determines if this token is associated with a final SentenceHMM state. * * @return <code>true</code> if this token is associated with a final state */ public boolean isFinal() { return searchState.isFinal(); } /** * Determines if this token marks the end of a word * * @return <code>true</code> if this token marks the end of a word */ public boolean isWord() { return searchState instanceof WordSearchState; } /** * Retrieves the string representation of this object * * @return the string representation of this object */ @Override public String toString() { return numFmt.format(getFrameNumber()) + ' ' + scoreFmt.format(getScore()) + ' ' + scoreFmt.format(getAcousticScore()) + ' ' + scoreFmt.format(getLanguageScore()) + ' ' + getSearchState() + (tokenProps == null ? "" : " " + tokenProps); } /** dumps a branch of tokens */ public void dumpTokenPath() { dumpTokenPath(true); } /** * dumps a branch of tokens * * @param includeHMMStates if true include all sentence hmm states */ public void dumpTokenPath(boolean includeHMMStates) { Token token = this; List<Token> list = new ArrayList<Token>(); while (token != null) { list.add(token); token = token.getPredecessor(); } for (int i = list.size() - 1; i >= 0; i--) { token = list.get(i); if (includeHMMStates || (!(token.getSearchState() instanceof HMMSearchState))) { System.out.println(" " + token); } } System.out.println(); } /** * Returns the string of words leading up to this token. * * @param wantFiller if true, filler words are added * @param wantPronunciations if true append [ phoneme phoneme ... ] after each word * @return the word path */ public String getWordPath(boolean wantFiller, boolean wantPronunciations) { StringBuilder sb = new StringBuilder(); Token token = this; while (token != null) { if (token.isWord()) { WordSearchState wordState = (WordSearchState) token.getSearchState(); Pronunciation pron = wordState.getPronunciation(); Word word = wordState.getPronunciation().getWord(); // System.out.println(token.getFrameNumber() + " " + word + " " + token.logLanguageScore + " " + token.logAcousticScore); if (wantFiller || !word.isFiller()) { if (wantPronunciations) { sb.insert(0, ']'); Unit[] u = pron.getUnits(); for (int i = u.length - 1; i >= 0; i--) { if (i < u.length - 1) sb.insert(0, ','); sb.insert(0, u[i].getName()); } sb.insert(0, '['); } sb.insert(0, word.getSpelling()); sb.insert(0, ' '); } } token = token.getPredecessor(); } return sb.toString().trim(); } /** * Returns the string of words for this token, with no embedded filler words * * @return the string of words */ public String getWordPathNoFiller() { return getWordPath(false, false); } /** * Returns the string of words for this token, with embedded silences * * @return the string of words */ public String getWordPath() { return getWordPath(true, false); } /** * Returns the string of words and units for this token, with embedded silences. * * @return the string of words and units */ public String getWordUnitPath() { StringBuilder sb = new StringBuilder(); Token token = this; while (token != null) { SearchState searchState = token.getSearchState(); if (searchState instanceof WordSearchState) { WordSearchState wordState = (WordSearchState) searchState; Word word = wordState.getPronunciation().getWord(); sb.insert(0, ' ' + word.getSpelling()); } else if (searchState instanceof UnitSearchState) { UnitSearchState unitState = (UnitSearchState) searchState; Unit unit = unitState.getUnit(); sb.insert(0, ' ' + unit.getName()); } token = token.getPredecessor(); } return sb.toString().trim(); } /** * Returns the word of this Token, the search state is a WordSearchState. If the search state is not a * WordSearchState, return null. * * @return the word of this Token, or null if this is not a word token */ public Word getWord() { if (isWord()) { WordSearchState wordState = (WordSearchState) searchState; return wordState.getPronunciation().getWord(); } else { return null; } } /** Shows the token count */ public static void showCount() { System.out.println("Cur count: " + curCount + " new " + (curCount - lastCount)); lastCount = curCount; } /** * Returns the location of this Token in the ActiveList. In the HeapActiveList implementation, it is the index of * the Token in the array backing the heap. * * @return the location of this Token in the ActiveList */ public final int getLocation() { return location; } /** * Sets the location of this Token in the ActiveList. * * @param location the location of this Token */ public final void setLocation(int location) { this.location = location; } /** * Determines if this branch is valid * * @return true if the token and its predecessors are valid */ public boolean validate() { return true; } /** * Return the DecimalFormat object for formatting the print out of scores. * * @return the DecimalFormat object for formatting score print outs */ protected static DecimalFormat getScoreFormat() { return scoreFmt; } /** * Return the DecimalFormat object for formatting the print out of numbers * * @return the DecimalFormat object for formatting number print outs */ protected static DecimalFormat getNumberFormat() { return numFmt; } /** * Returns the application object * * @return the application object */ public synchronized Map<String, Object> getTokenProps() { if (tokenProps == null) tokenProps = new HashMap<String, Object>(); return tokenProps; } }
/** * Reset the reader, so that it can be read again. */ public void resetReader() { this.packetValues.addAll(readableObjects); this.readableObjects.clear(); this.readableObjects.addAll(packetValues); this.packetValues.clear(); }
#include <iostream> #include <vector> using tMatriz = std::vector<std::vector<int>>; const tMatriz calc = {{7,8,9},{4,5,6},{1,2,3}}; bool gana( int s, int n, int x, int y){ bool OK = false; for (int j = 0; !OK && j < 3; ++j) { if (calc[j][y] != n && s + calc[j][y] < 31) { OK = !gana(s + calc[j][y], calc[j][y], j, y); } } for (int i = 0; !OK && i < 3; ++i) { if (calc[x][i] != n && s + calc[x][i] < 31) { OK = !gana(s + calc[x][i], calc[x][i], x, i); } } return OK; } void resuelveCaso(){ int s, n; std::cin >> s >> n; int x,y; for(int i= 0; i< 3; ++i){ for(int j = 0; j< 3; ++j){ if(calc[i][j]== n){ x = i; y = j; }}} std::vector<int> v; if (s == 31) std::cout << "GANA\n"; else if(gana(s, n, x , y))std::cout<< "GANA\n"; else std::cout<<"PIERDE\n"; } int main(){ long long int n; std::cin >> n; for (int i = 0; i<n ; ++i) resuelveCaso(); return 0;}
// Add all adjacencies to node at (row, col) inline std::vector<thrift::Adjacency> createGridAdjacencys(const int row, const int col, const uint32_t n) { std::vector<thrift::Adjacency> adjs; auto nodeId = row * n + col; auto otherId = row * n + col + 1; createGridAdjacency( row, col + 1, getIfName(nodeId, otherId), adjs, n, getIfName(otherId, nodeId)); otherId = row * n + col - 1; createGridAdjacency( row, col - 1, getIfName(nodeId, otherId), adjs, n, getIfName(otherId, nodeId)); otherId = (row - 1) * n + col; createGridAdjacency( row - 1, col, getIfName(nodeId, otherId), adjs, n, getIfName(otherId, nodeId)); otherId = (row + 1) * n + col; createGridAdjacency( row + 1, col, getIfName(nodeId, otherId), adjs, n, getIfName(otherId, nodeId)); return adjs; }
def hyperglance_automation(boto_session, resource: dict, automation_params = ''): client = boto_session.client('iam') policy_arn = resource['attributes']['Policy ARN'] default_version_id = client.get_policy(PolicyArn=policy_arn) ['Policy']['DefaultVersionId'] policy_versions = client.list_policy_versions( PolicyArn=policy_arn ) ["Versions"] if len(policy_versions) > 1: new_version_id = policy_versions[1]['VersionId'] if policy_versions[0]['IsDefaultVersion'] else policy_versions[0]['VersionId'] client.set_default_policy_version( PolicyArn=policy_arn, VersionId=new_version_id ) client.delete_policy_version( PolicyArn=policy_arn, VersionId=default_version_id )
<gh_stars>0 import Matter from "matter-js"; import * as PIXI from "pixi.js"; import { WorldExtended } from "./exttypes"; import { imgPaths, params } from "./config"; import * as utils from "./utils"; import * as repulsion from "./repulsion"; import * as grouping from "./grouping"; import * as poke from "./randompokes"; import * as align from "./alignment2"; export namespace Knollbot { export const run = () => { // create an engine and runner const engine = Matter.Engine.create(); const world = engine.world as WorldExtended; const runner = Matter.Runner.create(); // enable force by default world.forceOn = true; // enable poking by default world.pokeEnabled = false; // disable gravity world.gravity.y = 0.0; // -------------------------------------- // Screen parameters const ScreenWidth = document.documentElement.clientWidth - 20; const ScreenHeight = document.documentElement.clientHeight - 20; const ScreenWidthHalf = Math.floor(ScreenWidth / 2); const ScreenHeightHalf = Math.floor(ScreenHeight / 2); // Wall parameters const WallThickness = 200; const WallMargin = 100; const WallVisible = 5; const WallOffset = Math.floor(WallThickness / 2) - WallVisible; // -------------------------------------- // Object parameters const NumBoxes = imgPaths.length; // -------------------------------------- // Random poking world.pokeScale = params.pokeScale; world.pokeDecay = params.pokeDecay; // Alignment force world.alignmentForceCoeff = params.alignmentForceCoeff; world.alignmentForceRange = params.alignmentForceRange; // pixels world.alignmentForceWallRange = params.alignmentForceWallRange; world.alignmentForceOffset = params.alignmentForceOffset; // AntiGravity force world.repulsionCoeff = params.repulsionCoeff; world.repulsionRange = params.repulsionRange; // NOT pixels // Grouping attraction/repulsion world.groupingCoeff = params.groupingCoeff; // Flag displaying alignment lines world.displayLines = false; // -------------------------------------- // create two boxes const bodyOptions = { inertia: Infinity, frictionAir: params.frictionAir, friction: params.friction, }; // generate boxes randomly const generateRandomBoxes = (): Matter.Body[] => { const allSquare = false; const MinSizeX = 30; const MaxSizeX = 170; const MinSizeY = 30; const MaxSizeY = 170; const UnitSize = 16; const boxes = Array<Matter.Body>(NumBoxes); for (let i = 0; i < NumBoxes; i++) { const rectWidth = utils.randRange(MinSizeX, MaxSizeX, UnitSize); let rectHeight = rectWidth; if (!allSquare) { rectHeight = utils.randRange(MinSizeY, MaxSizeY, UnitSize); } const offsetX = WallOffset + rectWidth / 2; const offsetY = WallOffset + rectHeight / 2; const x = utils.randRange(offsetX, ScreenWidth - offsetX); const y = utils.randRange(offsetY, ScreenHeight - offsetY); boxes[i] = Matter.Bodies.rectangle(x, y, rectWidth, rectHeight, bodyOptions); } return boxes; }; // https://stackoverflow.com/a/55934241/524526 const getImageDimensions = (path: string) => new Promise((resolve, reject) => { const img = new Image(); // the following handler will fire after the successful loading of the image img.onload = () => { const { naturalWidth: width, naturalHeight: height } = img; resolve({ width, height }); }; // and this handler will fire if there was an error with the image (like if it's not really an image or a corrupted one) img.onerror = () => { reject('There was some problem with the image.'); }; img.src = path; }); const getBox = async (imgPath: string) => { let img = await getImageDimensions(imgPath) as { width: number, height: number }; let offsetX = WallOffset + img.width / 2; let offsetY = WallOffset + img.height / 2; let x = utils.randRange(offsetX, ScreenWidth - offsetX); let y = utils.randRange(offsetY, ScreenHeight - offsetY); return Matter.Bodies.rectangle(x, y, img.width, img.height, bodyOptions); }; const promisedBoxes = Promise.all(imgPaths.map(getBox)); // surrounding wall const wallOptions = { isStatic: true, friction: params.wallFriction, } const wallTop = Matter.Bodies.rectangle( ScreenWidthHalf, -WallOffset, ScreenWidth + WallMargin, WallThickness, wallOptions, ); const wallBottom = Matter.Bodies.rectangle( ScreenWidthHalf, ScreenHeight + WallOffset, ScreenWidth + WallMargin, WallThickness, wallOptions, ); const wallLeft = Matter.Bodies.rectangle( - WallOffset, ScreenHeightHalf, WallThickness, ScreenHeight + WallMargin, wallOptions, ); const wallRight = Matter.Bodies.rectangle( ScreenWidth + WallOffset, ScreenHeightHalf, WallThickness, ScreenHeight + WallMargin, wallOptions, ); // mouse and constraint const mouse = Matter.Mouse.create(document.body); const constraint = Matter.Constraint.create( { // Must define pointA and pointB unlike IConstraintDefinition interface pointA: mouse.position, pointB: { x: 0, y: 0 }, stiffness: 0.2, render: { visible: false, }, }, ); const mouseConstraint = Matter.MouseConstraint.create( engine, { mouse: mouse, constraint: constraint, } ); // `blocks` is to contain boxes, walls, and mouse constraints var blocks: Matter.Body[]; const setupWorld = async () => { let boxes = await promisedBoxes; blocks = [...boxes, wallTop, wallBottom, wallLeft, wallRight] Matter.World.add(world, blocks); Matter.World.add(world, mouseConstraint); Matter.Runner.run(runner, engine); }; const app = new PIXI.Application({ width: ScreenWidth, height: ScreenHeight, }); document.body.appendChild(app.view); app.renderer.backgroundColor = 0x247c41; const loader = new PIXI.Loader(); let sprites: PIXI.Sprite[]; // loader.add raises an error if duplicates exists loader.add(utils.unique(imgPaths)); loader.load((loader) => { setupWorld(); // boxes as sprites sprites = imgPaths.map(p => new PIXI.Sprite(loader.resources[p].texture)); // set center of sprites as reference points sprites.forEach(sprite => sprite.anchor.set(0.5)); // walls as still sprites // Not calling blocks as they may not be ready for (let i = 0; i < 4; i++) { const wall = PIXI.Sprite.from(PIXI.Texture.WHITE); if (i % 2 == 0) { wall.width = ScreenWidth; wall.height = WallVisible; } else { wall.width = WallVisible; wall.height = ScreenHeight; } wall.name = `wall${i}`; wall.tint = 0x000000; if (i == 1) { wall.position.set(ScreenWidth - WallVisible, 0); } else if (i == 2) { wall.position.set(0, ScreenHeight - WallVisible); } else { wall.position.set(0, 0); } sprites.push(wall); } app.stage.addChild(...sprites); app.ticker.add(gameLoop); }); // pixi gameLoop const gameLoop = () => { // draw alignment lines if (world.displayLines) { if (app.stage.children.length > sprites.length) { app.stage.removeChildren(sprites.length); } const boxes = blocks.slice(0, blocks.length - 4); const attractorXs = align.getAttractorXs(boxes, world.alignmentForceRange); for (let x of attractorXs) { const line = new PIXI.Graphics(); line.lineStyle(1, params.colorLinesVertical); line.moveTo(x, 0); line.lineTo(x, ScreenHeight); app.stage.addChild(line); } const attractorYs = align.getAttractorYs(boxes, world.alignmentForceRange); for (let y of attractorYs) { const line = new PIXI.Graphics(); line.lineStyle(1, params.colorLinesHorizontal); line.moveTo(0, y); line.lineTo(ScreenWidth, y); app.stage.addChild(line); } } // draw blocks for (let i in imgPaths) { const sprite = sprites[i]; const block = blocks[i]; sprite.x = block.position.x; sprite.y = block.position.y; } }; // main loop var counter = 0; Matter.Events.on(engine, 'beforeUpdate', (event: Matter.Events) => { counter += 1; if (counter % 300 == 0) { console.log("counter: ", counter); } if (world.forceOn) { if (counter < 180) { grouping.applyGrouping(world, blocks); } else if (counter < 240) { repulsion.applyAntiGravity(world, blocks); } else { align.applyAlignment(world, blocks); } if (world.pokeEnabled) { poke.applyRandomPokes(world, blocks); } } }); document.addEventListener('keydown', (e) => { // Toggle forces by pressing Space key if (e.code === "Space") { world.forceOn = !world.forceOn; console.log(`Toggled force: ${world.forceOn}`); } // Toggle alignment lines with L key if (e.code === "KeyL") { if (world.displayLines && app.stage.children.length > sprites.length) { app.stage.removeChildren(sprites.length); } world.displayLines = !world.displayLines; console.log(`Toggled displayLines: ${world.displayLines}`); } // Toggle random poking if (e.code === "KeyP") { world.pokeEnabled = !world.pokeEnabled; if (world.pokeEnabled) { world.pokeScale = params.pokeScale; } console.log(`Toggled poking: ${world.pokeEnabled}`); } }); // Rotate a block by double clicking document.addEventListener('dblclick', () => { console.log(`--- Double click at t=${counter} ---`); // iterate over blocks except for walls for (let i = 0; i < blocks.length - 4; i++) { const b = blocks[i]; if (!b.isStatic && Matter.Bounds.contains(b.bounds, mouse.position)) { Matter.Body.rotate(b, Math.PI / 2); const sprite = sprites[i]; sprite.angle += 90; break; } } }); } }
ATG3, a Target of miR-431-5p, Promotes Proliferation and Invasion of Colon Cancer via Promoting Autophagy Background Studies have indicated that ATG3 could mediate the effects of other tumor-related regulators in carcinogenesis. However, the expression, role, and mechanism of ATG3 itself in cancers are rarely revealed. Thus, we explored the expression, function, and mechanism of ATG3 in colon cancer. Materials and methods The expression of ATG3 was detected in colon cancer tissues and cell lines, as well as in adjacent tumor tissues and normal colon epithelial cells. The effects of ATG3 alteration on proliferation and invasion were further analyzed. The expression and role of miR-431-5p, a potential negative regulator of ATG3, were also studied. Eventually, the role of autophagy in ATG3 related effects in colon cancer was checked. Results ATG3 is upregulated in colon cancer tissues and cells demonstrated by qPCR and IHC. ATG3 knockdown significantly suppressed proliferation and invasion of colon cancer cells indicated by plate clone formation and Transwell invasion assays. The expression of miR-431-5p is downregulated and negatively correlates with ATG3 in colon cancer. Furthermore, luciferase report system, plate clone formation and Transwell invasion assays demonstrated that miR-431-5p could prohibit cell proliferation and invasion via directly targeting ATG3 in colon cancer. Eventually, Western blot, plate clone formation and Transwell invasion assays proved that autophagy block could antagonize the promotive functions of ATG3 on proliferation and invasion in cancer suggesting autophagy activation accounts for the promotive role of ATG3 on proliferation and invasion in colon cancer. Conclusion Collectively, ATG3 upregulation, caused by downregulated miR-435-5p, promotes proliferation and invasion via an autophagy-dependent manner in colon cancer suggesting that miR-431-5p/ATG3/autophagy may be a potential therapeutic target in colon cancer. Introduction Colon cancer is one of the most prevalent and fatal malignant diseases both in developing and developed countries. Surgical resection and chemoradiotherapy, the main treatments for colon cancer, is effective for colon cancer at an early stage (stage 0 and I); however, the prognostic outcome of colon cancer patients at advanced stages (II-IV) is unsatisfactory. Therefore, more efforts should be taken to identify critical regulators in carcinogenesis and to reveal the corresponding mechanism in colon cancer, which can potentially provide new diagnostic markers and therapeutic targets and eventually improve the treatment efficacy of colon cancer. Studies have revealed that autophagy, a conserved process of self-degradation, plays vital roles in mediating the functions of tumor relative factors almost at every stage of cancer, including carcinogenesis, recurrence, metastasis, as well as therapeutic sensitivity. Serving as the constitutive components of autophagy process, autophagy-associated proteins are critical regulators of autophagy. On the one hand, autophagy-associated proteins are in charge of autophagy modification upon receiving the regulatory signals from upstream regulators of autophagy, such as PI3K/AKT, 8 AMPK, 9 and MAPK. 10 On the other hand, the aberrant expressions of autophagy-related proteins largely account for the abnormal status of autophagy. Notably, although the roles of autophagy in cancers are widely explored, few studies have focused on the role and mechanism of autophagy-associated proteins themselves in the malignant progression of tumors. Thus, studying the expression of autophagy-related proteins and exploring the underlying mechanisms may help to elucidate the molecular mechanism of cancer and present potential targets for clinical therapy. ATG3, serving as an E2-like enzyme, is an important autophagy regulator involving in LC3-B generation via promoting covalent binding of phosphatidylethanolamine to the C-terminal glycine of LC3-A, a key step for the membrane association of ATG8-like proteins. 14 According to the available studies, ATG3 could mediate the functions of other tumor-related factors either in an autophagy-dependent or in the autophagy-independent manners, suggesting ATG3 may exert critical roles in carcinogenesis. Up to now, the expression and functions of ATG3 in colon cancer have not been revealed. Thus, in this study, we analyzed the expression of ATG3 in colon cancer and further explored functions and the underlying mechanism of aberrant expression of ATG3 in colon cancer. Our results demonstrate that ATG3 is upregulated in colon cancer tissues and cell lines, and ATG3 can promote proliferation and invasion of colon cancer cells by activating autophagy. Loss of miR-431-5p accounts for the overexpression of ATG3 and miR-431-5p can antagonize the oncogenic functions of ATG3 in colon cancer. Tissue Samples Twenty-four cases of colon cancer tissues and paired adjacent tumor tissues were collected from the resection tissues of newly diagnosed colon cancer patients with surgery in the department of general surgery of Xiangya Hospital of Central South University. The collected tissues were immediately divided into two parts, one was frozen in liquid nitrogen for RNA extraction, and one was fixed with formalin for paraffin sectioning. The study was approved by the Ethical Committee of Xiangya Hospital of Central South University and informed consents were signed by all patients before tissue collection. Immunohistochemistry Immunohistochemistry (IHC) was performed according to our previous study. 19 Simply, paraffin-embedded sections were successively immersed in xylene and graded ethanol for dewaxing and re-hydration. After antigen retrieval and endogenous peroxidase activity blocking, the slides were subsequently incubated with rabbit anti-ATG3 polyclonal antibody (dilution: 1:150, catalog number: D221649, BBI, Shanghai, China) overnight at 4°C. Following three times washing, the slides were successively subjected to the rest processes including incubation with biotin-linked secondary antibody, PBS washing, incubation with streptavidin-HRP, DAB staining, and hematoxylin counterstaining. Eventually, the pictures were captured with the microscopic imaging system microscope (Olympus, Tokyo, Japan) and analyzed by two independent pathologists. Semiquantitative analysis of IHC results was carried out based on staining intensity and area according to our previous description. 19,20 Cell Culture Colon cancer cell lines, SW480, SW620, HCT116, and HCT15, being purchased from ATCC, were maintained with RPMI-1640 (BI, Jerusalem, Israel) plus 10% fetal bovine serum (BI, Jerusalem, Israel). NCM460, an immortalized colon epithelial cell line, was purchased from ATCC and cultured with M3 Base medium (INCELL, TX, USA) plus 10% FBS (BI, Jerusalem, Israel). Human embryonic kidney 293T cells were brought from Cell Bank of Typical Culture Preservation Committee of Chinese Academy of Sciences and cultured with DMEM plus 10% FBS (BI, Jerusalem, Israel). All cells were maintained in a humidified cell incubator at 37°C with 5% CO 2 . Chloroquine (CQ, Sigma, MI, USA) at 50μM was added into the culture medium for inhibition of autophagy flux. Dual Luciferase Reporter Assay Both psiCHECK2-wild-ATG3-3ʹUTR and psiCHECK2mutant-ATG3-3ʹUTR luciferase report plasmids of ATG3 were constructed, respectively, according to manufacturer's instruction (Promega Corporation, WI, USA). Then, luciferase reporter vectors were co-transfected with miR-451-5p mimics into 293T cells by using Lipofectamine TM 2000 reagent (Thermo, MA, USA). Forty-eight hours later, the luciferase activity was detected by using a Dual-Luciferase Reporter detection System (Promega Corporation, WI, USA) and the signaling was collected with an Epoch2 spectrophotometer (BioTek, VT, USA). The relative luciferase activity was presented by the ratio of firefly luciferase to Renilla luciferase activity. Plate Clone Formation Assay Plate clone formation assay was performed as previously described. 3 Briefly, colon cancer cells were seeded into sixwell plates at a density of 800 cells/well. Eight days later, the cells were fixed with methanol and stained by 0.5% crystal violet. Clones containing more than fifty cells were counted under an inverse microscope (Nikon, Tokyo, Japan). Transwell Invasion Assay Transwell assay was applied as described in our previous paper with minor modifications. 21 Briefly, 8-μm pore size transwell chambers (Costar, ME, USA) pre-coated with Matrigel (Corning, NY, USA) were used to evaluate the ability of cell invasion. 300 μL pure RPMI-1640 containing 2.5×10 4 cells were added into the upper chamber. The lower well was filled with 750μL RPMI 1640 medium plus 5% FBS as a chemoattractant. After culture at 37°C for twentyfour hours, the cells were fixed with methanol and stained by 0.5% crystal violet. After swabbing the cells above on the upper side of the membrane, the invasive cells were photoed and counted under a microscopic scope. Statistical Analysis All experiments were independently repeated for at least 3 times. Statistical analyses and statistical charts were conducted and produced using SPSS18.0 software and GraphPad Prism version 8. For comparisons between two groups, a Student's t-test or chi-square test was used. Pearson correlation coefficients was adopted to analyze the expression correlation between miR-451-5p and ATG3. P <0.05 were considered to be statistically significant. ATG3 Is Upregulated in Colon Cancer Firstly, we analyzed the expression of ATG3 in colon cancer based on the online data from TCGA and GEO using UALCAN 22 and Oncomine database. As Figure 1A shows, ATG3 is significantly upregulated in colon cancer tissues, which is confirmed by two GEO data sets ( Figure 1B and C). Compared with adjacent tumor tissues, upregulation of ATG3 is also validated in collected colon cancer tissues demonstrated by qPCR and IHC assays ( Figure 1D and E). Thus, these results proved that ATG3 is upregulated and may serve as an oncogenic regulator in colon cancer. ATG3 Knockdown Inhibits Growth Proliferation and Invasion of Colon Cancer Cells Next, we checked the expression of ATG3 in colon cancer cell lines. As indicated by Figure 2A and B, compared with the expression level in NCM460 cells, ATG3 was slightly upregulated in HCT15 and SW480 and significantly upregulated in SW620 and HCT116. Therefore, to explore the functions of ATG3 in colon cancer, specific siRNAs of ATG3 were introduced into SW620 and HCT116 cells to knock down ATG3 expression. Successful ATG3 knockdown was achieved, indicating by notably decreased protein level ( Figure 2C). Subsequently, the proliferation and invasion of SW620 and HCT116 were analyzed by plate clone formation and Transwell invasion assays. Significant inhibitory effects of ATG3 knockdown on cell proliferation and invasion were observed demonstrated by fewer clones (Figure 2D), and less invasive cells ( Figure 2E). Therefore, these results indicated ATG3 could promote proliferation and invasion in colon cancer. Downregulated miR-431-5p Accounts for the High Expression of ATG3 in Colon Cancer Cells Both transcriptional and post-transcriptional mechanisms may account for the expression regulation. 23,24 Hence, we detected the level of ATG3 hnRNA (heterogeneous nuclear RNA), the primary transcript products which can directly indicate transcript activity, between colon cancer cells and normal colon epithelium cells using qPCR. No significant difference was observed among SW620, HCT116, and NCM460 cells (Supplementary Figure 1A). Furthermore, the TCGA data shows that methylation status of ATG3 promoter is comparable between colon and colon cancer tissues (Supplementary Figure 1B). Thus, these results imply that post-transcript mechanisms such as miRNAs are involved in the regulation of ATG3 in colon Figure 2A-C). Indeed, downregulated miR-431-5p in colon cancer cells and tissues was indicated by qPCR ( Figure 3A and B), and the expression of miR-431-5p was inversely correlated to ATG3 in colon cancer ( Figure 3C). Accordingly, the level of ATG3 mRNA and protein could be dramatically decreased by miR-431-5p mimics in SW620 and HCT116 cells (Supplementary Figure 3A and B). Importantly, compared to the negative control, miR-431-5p mimics could significantly decrease luciferase activity of 293T cells transfected with wild ATG3 3ʹUTR report plasmids; whereas, the inhibitory effect did not appear in 293T cells transfected with mutant ATG3 3ʹUTR report plasmids ( Figure 3D). Thus, our results confirmed that miR-431-5p could directly target and inhibit ATG3 in colon cancer. Ectopic miR-431-5p Expression Suppresses Colon Cancer Progression by Targeting ATG3 Following, we explored the functions of miR-431-5p and the roles of ATG3 in miR-431-5p related functions in colon cancer. By ectopic expression of ATG3, we successfully rescued the expression of ATG3 in colon cancer cells transfected with miR-431-5p mimics ( Figure 3E), which ensured the reliability of function experiments. As Figure 4A and B indicated, miR-431-5p mimics can significantly suppress the proliferation and invasion of colon cells, and these inhibitory effects on colon cancer cells were almost released upon ectopic expression of ATG3. Thus, miR-431-5p can inhibit the proliferation and migration of colon cancer cells by targeting ATG3. ATG3 Promotes Proliferation and Invasion of Colon Cancer Cells in an Autophagy-Dependent Manner Considering ATG3 is an autophagy-related protein, we further explored whether ATG3 exerts its functions by modulating autophagy in colon cancer. As Figure 5A demonstrated, ATG3 knockdown significantly inhibited the accumulation of LC3B, an autophagy marker for indicating autophagosome formation, and these inhibitory effects could be observed under chloroquine (CQ) treatment, excluding the possibility of degradation obstacle accounting for LC3B accumulation, and, suggesting that ATG3 can suppress autophagy flux in colon cancer. Next, we investigated the role of autophagy in ATG3 related functions in colon cancer. Re-expression of ATG3 with or without CQ addition was successfully applied in colon cancer cells with ATG3 knockdown ( Figure 5B). Subsequently, function experiments showed that CQ treatment could significantly antagonize the rescue effects of ATG3 restoration on proliferation and invasion in colon cancer ( Figure 5C and D). Consequently, our results manifested that ATG3 can promote proliferation and invasion of colon cancer cells in an autophagy-dependent manner. Discussion Although the expression patterns of autophagy-related proteins have been exclusively revealed in multiple cancers, almost all studies focus on only three proteins: BECN1, p62, and LC3B. As for ATG3, its expression in tumors has been rarely revealed. According to the expression profiles from the TCGA database, ATG3 upregulation is observed in most type of tumors (data not shown) suggesting ATG3 is an important regulator in carcinogenesis. A recent study shows that ATG3 is upregulated in gastric cancer tissues, but acts as a favorable prognostic factor supported by overall survival analysis. 30 Accordingly, we proved that ATG3 is also upregulated in colon cancer tissues and cell lines indicating ATG3 may serve as an oncogenic regulator in colon cancer. No significant difference of ATG3 hnRNAs between colon epithelial cells and colon cancer cells was observed indicating post-transcriptional regulators involved in the regulation of ATG3. Micro RNAs (miRNAs), a type of non-coding RNA with 21-24 nucleotides in length, are the most common negative regulators of expression which can cause mRNA translation inhibition or degradation via directly binding its target mRNAs at 3ʹ untranslated region (3ʹUTR). Several miRNAs including miR-495, 31 −23a, 32 −155, 33 −206, 34 −1, 35 and −365 36 have been confirmed to target ATG3 in both malignant and non-malignant cells suggesting miRNAs may account for the dysregulation of ATG3 in colon cancer. Indeed, we validated that miR-431-5p is downregulated and inversely correlated to ATG3 in colon cancer. Moreover, miR-451-5p can directly bind and decrease ATG3 level in colon cancer. Therefore, we originally reveal that downregulation of miR-431-5p accounts for ATG3 upregulation in colon cancer. The roles of ATG3 in tumorigenesis and progression are tissue-dependent demonstrated by the available studies. Generally, ATG3 serves as a positive and protective factor in tumors. Oncogenic lncRNAs (long noncoding RNAs), including lnc-HOTAIR, 17 -Meg3, 15 -PVT1 36 and -GAS5, 32 promote tumorigenesis and progression via stabilizing ATG3 mRNA by specifically sponging miRNAs targeting ATG3 like miR-365 and miR-23a, or directly interacting with ATG3 mRNA in distinct cancers. Meanwhile, sustained levels of ATG3 can impair the resistance of malignant cells to DNA-damaging drugs. 16 Consistent with most research findings, the oncogenic roles of ATG3 in colon cancer, such as promoting proliferation and invasion, are confirmed by our results. Moreover, miR-431-5p, serving as a tumor-suppressive regulator, can significantly inhibit colon cancer progression by targeting ATG3. However, additional efforts should be paid to reveal the underlying mechanism of miR-431-5p deregulation in colon cancer. In addition to acting in an autophagy-dependent manner, autophagy-related proteins can also regulate tumorigenesis and progression in an autophagy-independent manner manifesting by numerous studies. 18, For example, BECN1 can promote degradation of MCL1, one of oncogenes, via physical interaction, being independent of autophagy. 40 Similarly, Y203 phosphorylation of ATG3 is not critical for autophagy induction but is important in sensitizing cancer cells to DNA-damaging agents. 16 Moreover, the pro-apoptotic roles of ATG3 for attached intestinal epithelial cells are independent of ATG3 related autophagy. 18 Thus, it is necessary to distinguish the function of autophagy in autophagy-related proteins. In our study, CQ treatment significantly antagonized the promotive effects of ATG3 on colon cancer cells, suggesting that ATG3 promotes colon cancer progression in an autophagy-dependent manner. Conclusions Collectively, we demonstrate that upregulation of ATG3, a result of the loss of miR-431-5p, can promote cell proliferation and invasion in colon cancer. miR-431-5p restoration can significantly inhibit proliferation and invasion via targeting ATG3 in colon cancer. Moreover, we further reveal that ATG3 promotes colon cancer progression in an Figure 4 miR-431-5p could inhibit proliferation and invasion of colon cancer cells by targeting ATG3. Notes: (A) As the results of plate clone formation assay indicated, miR-431-5p mimic significantly inhibits proliferation of SW620 and HCT116 which could be subsequently rescued by restoration of ATG3. (B) miR-431-5p mimic significantly inhibits invasion of SW620 and HCT116 which could be subsequently rescued by ectopic expression of ATG3, demonstrating by Transwell assay. **Stands for P < 0.01; ***Stands for P < 0.001. autophagy-dependent manner suggesting that miR-451-5p/ ATG3/autophagy may be an effective therapeutic target in colon cancer. The manuscript management system is completely online and includes a very quick and fair peer-review system, which is all easy to use. Visit http://www.dovepress.com/testimonials.php to read real quotes from published authors.
President Trump fanned the flames that surround Walt Disney Co.’s sports broadcaster ESPN with a Friday morning tweet that fed on critics’ assertions that ESPN’s liberal-leaning politics are ruining the worldwide leader in sports television. The president claimed that ESPN’s declining viewership, a problem that has caused headaches for Disney DIS, -0.08% and its investors, is to be blamed on the network’s broadcasting its politics, or that of the journalists it employees. In a new take on his go-to “fake news” gripe, the president called for ESPN to “Apologize for untruth.” ESPN is paying a really big price for its politics (and bad programming). People are dumping it in RECORD numbers. Apologize for untruth! — Donald J. Trump (@realDonaldTrump) September 15, 2017 What prompted the president to tweet about ESPN (aside from his affinity for criticizing the media)? On Sept. 11, Jemele Hill, co-host of ESPN’s 6 p.m. SportsCenter broadcast, known as “The Six,” called Trump, among other things, “a white supremacist who has largely surrounded himself with other white supremacists.” She made the comments as part of a conversation on Twitter. White House Press Secretary Sarah Huckabee Sanders, fielding questions on Wednesday, said Hill’s comments were a fireable offense. This, of course, wouldn’t be the first time that Trump and/or his administration targeted a journalist or media outlet. See: Trump slams CNN, which says he should start doing his job Read: Donald Trump’s war on media is boosting subscriptions and donations to news outlets ESPN didn’t fire Hill. The network issued a couple of statements saying that Hill’s comments didn’t reflect ESPN’s position, but that the company had addressed the issue with its anchor. ESPN Statement on Jemele Hill: pic.twitter.com/3kfexjx9zQ — ESPN PR (@ESPNPR) September 12, 2017 “Jemele has a right to her personal opinions but not to publicly share them on a platform that implies that she was in any way speaking on behalf of ESPN,” the network said in a statement. “She has acknowledged that her tweets crossed that line and has apologized for doing so. We accept her apology.” Think Progress, citing unnamed sources inside ESPN, said the network did try to move Hill off her programming slot but met with resistance from the on-air staff they pursued to replace her. Co-host Michael Smith also reportedly dug in against the move. ESPN officials denied they asked other anchors to fill in, in a response to the Washington Post. The issue fits into the larger narrative around what some see as the politicization of ESPN and its role, if any, in the network’s stumble from grace. As MarketWatch has reported, industry and investor commentary has focused on ESPN’s declining subscriber numbers, significant cost-cutting layoffs and uncertainty over its cord-cutting adjustments as more households dump cable for on-demand viewing. Notably, a significant number of reader comments on those articles are critical of ESPN’s political or social voice. Read: Disney’s plan for ESPN is shrouded in uncertainty Check out: More TV viewers are cutting the cable cord, but the savings aren’t what they were To the public, ESPN has seemingly picked a side, whether it was awarding Caitlyn Jenner the Arthur Ashe Courage Award at the 2015 ESPYS, or the company moving a golf tournament from a club owned by then presidential candidate Donald Trump. Some viewers have complained about ESPN covering former NFL quarterback Colin Kaepernick’s kneeling protests during the National Anthem. In fact, much of the criticism about ESPN’s political views and social justice stance has been leveled directly at Jemele Hill and her co-host Michael Smith. ESPN commissioned third-party research to look at its perceived liberal bias, most recently in May. That study found that about 64% of ESPN viewers think the network does a good job of mixing sports and politics. The network’s conclusion was that while some viewers may disagree with ESPN, it has no material impact on viewership. However, in December of 2016, ESPN Public Editor Jim Brady addressed how the network has dealt with changing and intensifying political dynamics. “As it turns out, ESPN is far from immune from the political fever that has afflicted so much of the country over the past year,” Brady wrote in a company post. “Internally, there’s a feeling among staffers—both liberal and conservative—that the company’s perceived move leftward has had a stifling effect on discourse inside the company and has affected its public-facing products. Consumers have sensed that same leftward movement, alienating some.” Read: ESPN under fire for removing announcer Robert Lee from Virginia football game One employee told Brady that Republicans, and more broadly, conservatives, in the office feel the need to talk in whispers. ESPN’s detractors and even competitors have criticized the network for actions they believe signal proof of the network’s bias. Mike Soltys, ESPN’s vice president of corporate communications, told MarketWatch that it is something that is constantly talked about within the walls of ESPN’s Bristol, Conn., headquarters. Check out: Warriors’ dominance could cost Disney $130 million See: The Golden State Warriors will get $20 million a year for a Rakuten ad on its jerseys But Soltys said when something like the Kaepernick news happens, they have to cover it, and they will continue to do so. He believes ESPN has been fairly balanced in commentators’ criticisms and support. When fans at Boston’s Fenway Park were ejected from a Red Sox game for holding a banner reading: “Racism is as American as Baseball,” as happened earlier this week, Soltys said ESPN is going to report and discuss it as well as the Red Sox’s three-game lead in Major League Baseball’s American League East conference. “It is accurate that the Walt Disney Co. and ESPN are committed to diversity and inclusion,” ESPN president John Skipper told Brady. “These are long-standing values that drive fundamental fairness while providing us with the widest possible pool of talent to create the smartest and most creative staff. We do not view this as a political stance, but as a human stance. We do not think tolerance is the domain of a particular political philosophy.” Walt Disney’s shares have dropped nearly 6% in the year to date, while the S&P 500 index SPX, -0.08% is up more than 11% and the Dow Jones Industrial Average DJIA, -0.13% is up more than 12%.
<reponame>MiniGameWorlds/AllMiniGames package com.worldbiomusic.allgames.games.solo; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.bukkit.Location; import org.bukkit.Material; import org.bukkit.Sound; import org.bukkit.block.Block; import org.bukkit.entity.Player; import org.bukkit.event.Event; import org.bukkit.event.block.BlockBreakEvent; import org.bukkit.inventory.ItemStack; import com.wbm.plugin.util.BlockTool; import com.wbm.plugin.util.LocationTool; import com.worldbiomusic.allgames.AllMiniGamesMain; import com.worldbiomusic.minigameworld.minigameframes.SoloMiniGame; import com.wbm.plugin.util.Metrics; import com.wbm.plugin.util.SoundTool; public class FitTool extends SoloMiniGame { /* * Break blocks with fit tools */ private List<Material> blocks; private Location pos1, pos2; public FitTool() { super("FitTool", 30, 10); // bstats new Metrics(AllMiniGamesMain.getInstance(), 14387); this.getSetting().setIcon(Material.STONE_PICKAXE); } @Override protected void initGame() { // fill blocks BlockTool.fillBlockWithRandomMaterial(this.pos1, this.pos2, this.blocks); } @Override protected void initCustomData() { super.initCustomData(); Map<String, Object> data = this.getCustomData(); // Blocks // save with String (Material doesn't implement ConfigurationSerialization) List<String> blocksData = new ArrayList<>(); // sword blocksData.add(Material.COBWEB.name()); // axe blocksData.add(Material.OAK_WOOD.name()); // pickaxe blocksData.add(Material.COBBLESTONE.name()); // shovel blocksData.add(Material.DIRT.name()); data.put("blocks", blocksData); // blocks location data.put("pos1", this.getLocation()); data.put("pos2", this.getLocation()); } private Material getRandomBlock() { int r = (int) (Math.random() * this.blocks.size()); return this.blocks.get(r); } @Override public void loadCustomData() { this.blocks = new ArrayList<>(); // blocks @SuppressWarnings("unchecked") List<String> blocksStr = (List<String>) this.getCustomData().get("blocks"); for (String block : blocksStr) { this.blocks.add(Material.valueOf(block)); } // blocks location this.pos1 = (Location) this.getCustomData().get("pos1"); this.pos2 = (Location) this.getCustomData().get("pos2"); } @Override public void onEvent(Event event) { if (event instanceof BlockBreakEvent) { BlockBreakEvent e = (BlockBreakEvent) event; Player p = e.getPlayer(); Block b = e.getBlock(); // plus score with specific block if (LocationTool.isIn(pos1, b.getLocation(), pos2) && this.blocks.contains(b.getType())) { e.setCancelled(true); this.plusScore(p, 1); // random block b.setType(this.getRandomBlock()); // sound SoundTool.play(b.getLocation(), Sound.BLOCK_NOTE_BLOCK_BELL); } } } @Override protected void onStart() { // give tools for (Player p : this.getPlayers()) { p.getInventory().addItem(new ItemStack(Material.IRON_SWORD)); p.getInventory().addItem(new ItemStack(Material.IRON_PICKAXE)); p.getInventory().addItem(new ItemStack(Material.IRON_AXE)); p.getInventory().addItem(new ItemStack(Material.IRON_SHOVEL)); } } @Override protected List<String> tutorial() { List<String> tutorial = new ArrayList<>(); tutorial.add("Break blocks with fit tools"); tutorial.add("Breaking block: +1"); return tutorial; } }
<filename>electives/gkw/lab/lista-2/ex-1/canvas.ts import { vertexShaderRaw, fragmentShaderRaw } from './shaders' export type Colour = number[]; export class MyCanvas { /** * WebGL context. */ private gl: WebGLRenderingContext private program: WebGLProgram public readonly DrawTypes: { POINTS: number, LINES: number, LINE_STRIP: number, LINE_LOOP: number, TRIANGLES: number, TRIANGLE_STRIP: number, TRIANGLE_FAN: number, } constructor(private canvas: HTMLCanvasElement, private vertexGenericLocationIndex = 3) { this.gl = this.canvas.getContext('webgl') if (!this.gl) { this.gl = this.canvas.getContext('experimental-webgl') as WebGLRenderingContext } if (!this.gl) { throw new Error('WebGL not supported') } // initialize pseudo-enum containing types of primitive shapes this.DrawTypes = { POINTS: this.gl.POINTS, LINES: this.gl.LINES, LINE_STRIP: this.gl.LINE_STRIP, LINE_LOOP: this.gl.LINE_LOOP, TRIANGLES: this.gl.TRIANGLES, TRIANGLE_STRIP: this.gl.TRIANGLE_STRIP, TRIANGLE_FAN: this.gl.TRIANGLE_FAN, } this.clearScreen() this.prepare() } private prepare() { const gl = this.gl; const vertexShader = this.prepareShader(vertexShaderRaw.source, gl.VERTEX_SHADER) const fragmentShader = this.prepareShader(fragmentShaderRaw.source, gl.FRAGMENT_SHADER) this.program = this.prepareProgram([vertexShader, fragmentShader]) } /** * Wipe the whole screen clean. */ private clearScreen() { this.gl.clearColor(0, 0, 0, 1) this.gl.clear(this.gl.COLOR_BUFFER_BIT | this.gl.DEPTH_BUFFER_BIT) } private prepareShader(source: string, shaderType: number) { const gl = this.gl const shader = gl.createShader(shaderType) gl.shaderSource(shader, source) gl.compileShader(shader) if (!this.gl.getShaderParameter(shader, this.gl.COMPILE_STATUS)) { throw new Error('Shader compilation error: ' + this.gl.getShaderInfoLog(shader)) } return shader; } private prepareProgram(shaders: WebGLShader[]) { const gl = this.gl; const program = gl.createProgram() shaders.forEach((shader) => { gl.attachShader(program, shader) }); gl.bindAttribLocation(program, this.vertexGenericLocationIndex, vertexShaderRaw.attributes.vertPosition) gl.linkProgram(program) if (!gl.getProgramParameter(program, gl.LINK_STATUS)) { throw new Error('Program linking error: ' + this.gl.getProgramInfoLog(program)) } gl.validateProgram(program) if (!gl.getProgramParameter(program, gl.VALIDATE_STATUS)) { throw new Error('Program validating error: ' + gl.getProgramInfoLog(program)) } return program; } public printAllActiveAttributes() { const gl = this.gl const numAttribs = gl.getProgramParameter(this.program, gl.ACTIVE_ATTRIBUTES); for (let i = 0; i < numAttribs; ++i) { const info = gl.getActiveAttrib(this.program, i); console.log('name:', info.name, 'type:', info.type, 'size:', info.size); } } public printAllActiveUniforms() { const gl = this.gl const numUniforms = gl.getProgramParameter(this.program, gl.ACTIVE_UNIFORMS); for (let i = 0; i < numUniforms; ++i) { const info = gl.getActiveUniform(this.program, i); console.log('name:', info.name, 'type:', info.type, 'size:', info.size); } } public printAttribLocation() { console.log( this.gl.getAttribLocation( this.program, vertexShaderRaw.attributes.vertPosition ) ) } public draw2DUniformColor(vertices: number[], drawType: number, colour: Colour) { const gl = this.gl this.clearScreen() const verticesCount = vertices.length / 2; // the array must be dividable by five // every vertex is expected to have two coordinate parameters // and three colour parameters if (verticesCount !== Math.floor(vertices.length / 2)) { throw new Error('Invalid number of vertices’ parameters') } const vertexBuffer = gl.createBuffer() gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer) gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertices), gl.STATIC_DRAW) const positionAttribLocation = gl.getAttribLocation(this.program, vertexShaderRaw.attributes.vertPosition) gl.vertexAttribPointer( positionAttribLocation, // the vertex position attrib 2, // 2D gl.FLOAT, false, 2 * Float32Array.BYTES_PER_ELEMENT, 0 // offset equal to zero, because we’re not expecting any additional information in the given array ) gl.enableVertexAttribArray(positionAttribLocation) gl.useProgram(this.program) const colourUniformLocation = gl.getUniformLocation(this.program, fragmentShaderRaw.uniforms.colour) gl.uniform3fv(colourUniformLocation, new Float32Array(colour)) gl.drawArrays(drawType, 0, verticesCount) } }
<reponame>LastLeaf/rc_forest use super::*; pub trait ForestNodeContent { #[inline] fn associate_node(&mut self, _node: ForestNodeSelf<Self>) where Self: Sized { } #[inline] fn parent_node_changed(&mut self) where Self: Sized { } }
def consStep(self, step): df = self.consClean df = df[df['msg'] == step.value] if step in [Step.Propose, Step.Prevote, Step.Precommit] : df = df[df['R'] == 0] return pd.DataFrame(index=df['H'].values, data={'at':df['at'].values })
// Automatically generated code. Edit at your own risk! // Generated by bali2jak v2002.09.03. package mdb; import minidb.je.ExecuteHelpers; import java.io.File; import static minidb.je.ExecuteHelpers.myDbEnvPath; public class CloseCmd extends Close { final public static int ARG_LENGTH = 1 /* Kludge! */ ; final public static int TOK_LENGTH = 2 ; public void execute () { super.execute(); myDbEnvPath = new File("JEDB"); ExecuteHelpers.prepareDB(); } public AstToken getCLOSE () { return (AstToken) tok [0] ; } public AstToken getSEMI () { return (AstToken) tok [1] ; } public boolean[] printorder () { return new boolean[] {true, true} ; } public CloseCmd setParms (AstToken tok0, AstToken tok1) { arg = new AstNode [ARG_LENGTH] ; tok = new AstTokenInterface [TOK_LENGTH] ; tok [0] = tok0 ; /* CLOSE */ tok [1] = tok1 ; /* SEMI */ InitChildren () ; return (CloseCmd) this ; } }
/** * Get the status of a specific Order * * Retrieve the current status of an order being processed * * @throws ApiException * if the Api call fails */ @Test public void vendorOrdersOrderIdStatusGetTest() throws ApiException { String orderId = null; IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> { ApiResponse<BrAPIVendorOrderStatusResponse> response = api.vendorOrdersOrderIdStatusGet(orderId); }); }
// TestGenerateDockerfileEnvBuildArgs ensures environment variables are used as // build args. func TestGenerateDockerfileEnvBuildArgs(t *testing.T) { t.Parallel() dockerfile := filepath.Join(generateDockerBaseDir, "args", "buildargs", "Dockerfile") envPath := filepath.Join(generateDockerBaseDir, "args", "buildargs", ".env") flags := []string{fmt.Sprintf("--dockerfiles=%s", dockerfile), fmt.Sprintf("--env-file=%s", envPath), fmt.Sprintf("--dockerfile-env-build-args")} tOs := []generateTestObject{ { filePath: filepath.ToSlash(dockerfile), wantImages: []generate.DockerfileImage{ {Image: &generate.Image{Name: "busybox", Tag: "latest"}}, }, testFn: checkGenerateDockerfile, }, } var shouldFail bool testGenerate(t, flags, tOs, shouldFail) }
/** * Determine the maximum ciphertext expansion for a given enctype. * * @param[in] k0 The rxgk key to be used. * @param[out] len_out The maximum ciphertext expansion, in octets. * @return rxgk error codes. */ afs_int32 rxgk_cipher_expansion(rxgk_key k0, afs_uint32 *len_out) { krb5_context ctx = NULL; krb5_crypto crypto = NULL; krb5_enctype enctype; krb5_error_code ret; struct rxgk_keyblock *keyblock = key2keyblock(k0); size_t len; *len_out = 0; enctype = deref_keyblock_enctype(&keyblock->key); ret = krb5_init_context(&ctx); if (ret != 0) goto done; ret = krb5_crypto_init(ctx, &keyblock->key, enctype, &crypto); if (ret != 0) goto done; len = krb5_crypto_overhead(ctx, crypto); *len_out = len; done: if (crypto != NULL) krb5_crypto_destroy(ctx, crypto); if (ctx != NULL) { krb5_free_context(ctx); } return ktor(ret); }
<reponame>chirkovd/sim-sub-es-plugin package org.elasticsearch.plugin; import com.carrotsearch.randomizedtesting.RandomizedRunner; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.io.IOUtils; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.lucene.search.function.CombineFunction; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.ScriptQueryBuilder; import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders; import org.elasticsearch.plugin.index.highlighters.SimSubHighlighter; import org.elasticsearch.plugin.index.scripts.similarity.SimilarityScript; import org.elasticsearch.plugin.index.scripts.similarity.SimilarityScriptFactory; import org.elasticsearch.plugin.index.scripts.substructure.SubstructureScript; import org.elasticsearch.plugin.index.scripts.substructure.SubstructureScriptFactory; import org.elasticsearch.plugin.model.SimSubItem; import org.elasticsearch.plugin.services.SimSubSearchPluginStub; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; import org.elasticsearch.test.ESIntegTestCase; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import java.io.IOException; import java.util.*; import java.util.stream.Collectors; import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST; /** * Project: sim-sub-es-plugin * Description: * Date: 7/23/2017 * * @author Dmitriy_Chirkov * @since 1.8 */ @RunWith(RandomizedRunner.class) @ESIntegTestCase.ClusterScope(scope = TEST, numDataNodes = 1) public class SimSubSearchTest extends ESIntegTestCase { public static final String SS_TEST = "ss_test"; public static final String SS_STRING = "ss_string"; private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private static String mapping; private static String settings; @BeforeClass public static void setup() { ClassLoader classLoader = SimSubSearchTest.class.getClassLoader(); try { mapping = IOUtils.toString(classLoader.getResourceAsStream("es.mapping.json"), "UTF-8"); settings = IOUtils.toString(classLoader.getResourceAsStream("es.settings.json"), "UTF-8"); } catch (IOException e) { e.printStackTrace(); } } @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(SimSubSearchPluginStub.class); } @Override @Before public void setUp() throws Exception { super.setUp(); if (!indexExists(SS_TEST)) { CreateIndexRequestBuilder builder = prepareCreate(SS_TEST); builder.setSettings(settings, XContentType.JSON); builder.addMapping(SS_STRING, mapping, XContentType.JSON); CreateIndexResponse indexResponse = builder.execute().actionGet(); Assert.assertNotNull(indexResponse); } } @Test public void testSimilaritySearch() throws Exception { SimSubItem item1 = new SimSubItem(); item1.setId(1L); item1.setItem("first item"); SimSubItem item2 = new SimSubItem(); item2.setId(2L); item2.setItem("second item"); List<SimSubItem> items = Arrays.asList(item1, item2); for (SimSubItem item : items) { IndexResponse response = index(SS_TEST, SS_STRING, String.valueOf(item.getId()), getSource(item)); Assert.assertNotNull(response); } for (SimSubItem item : items) { GetResponse response = get(SS_TEST, SS_STRING, String.valueOf(item.getId())); Assert.assertNotNull(response); } String queryValue = "irst item"; QueryBuilder query = QueryBuilders.matchQuery("item.sim", queryValue); Map<String, Object> params = new HashMap<>(); params.put(SimilarityScriptFactory.FIELD, "item"); params.put(SimilarityScriptFactory.TYPE, "tanimoto"); params.put(SimilarityScriptFactory.QUERY, queryValue); Script script = new Script(ScriptType.INLINE, "native", SimilarityScript.NAME, params); FunctionScoreQueryBuilder scoreQueryBuilder = QueryBuilders .functionScoreQuery(query, ScoreFunctionBuilders.scriptFunction(script)); scoreQueryBuilder.boostMode(CombineFunction.REPLACE); scoreQueryBuilder.setMinScore(80); SearchRequestBuilder requestBuilder = client().prepareSearch(SS_TEST).setTypes(SS_STRING); requestBuilder.setQuery(scoreQueryBuilder); SearchResponse searchResponse = requestBuilder.get(); Assert.assertNotNull(searchResponse); SearchHits hits = searchResponse.getHits(); Assert.assertTrue(hits.getTotalHits() == 1); Assert.assertTrue(hits.getMaxScore() == 87.5F); Assert.assertEquals("first item", hits.getAt(0).getSource().get("item")); } @Test public void testSubstructureSearch() throws Exception { SimSubItem item1 = new SimSubItem(); item1.setId(3L); item1.setItem("hello world"); SimSubItem item2 = new SimSubItem(); item2.setId(4L); item2.setItem("hello application"); List<SimSubItem> items = Arrays.asList(item1, item2); for (SimSubItem item : items) { IndexResponse response = index(SS_TEST, SS_STRING, String.valueOf(item.getId()), getSource(item)); Assert.assertNotNull(response); } for (SimSubItem item : items) { GetResponse response = get(SS_TEST, SS_STRING, String.valueOf(item.getId())); Assert.assertNotNull(response); } String queryValue = "world"; /* Init highlighter */ HighlightBuilder highlightBuilder = new HighlightBuilder(); highlightBuilder.field("item"); highlightBuilder.highlighterType(SimSubHighlighter.NAME); Map<String, Object> options = new HashMap<>(); options.put(SimSubHighlighter.QUERY_OPTION, queryValue); options.put(SimSubHighlighter.START_TAG, "<hi>"); options.put(SimSubHighlighter.END_TAG, "</hi>"); highlightBuilder.options(options); /* Substructure search by tokenizer */ QueryBuilder query = QueryBuilders.matchQuery("item.sub", queryValue).operator(Operator.AND); SearchRequestBuilder requestBuilder = client().prepareSearch(SS_TEST).setTypes(SS_STRING); requestBuilder.setQuery(query); requestBuilder.highlighter(highlightBuilder); SearchResponse searchResponse = requestBuilder.get(); Assert.assertNotNull(searchResponse); SearchHits hits = searchResponse.getHits(); Assert.assertTrue(hits.getTotalHits() == 1); Map<String, HighlightField> fields = hits.getAt(0).getHighlightFields(); Assert.assertNotNull(fields); Text[] fragments = fields.get("item").getFragments(); String highlighted = Arrays.asList(fragments).stream().map(Text::string).collect(Collectors.joining("")); Assert.assertEquals("hello <hi>world</hi>", highlighted); /* Substructure search by script */ Map<String, Object> params = new HashMap<>(); params.put(SubstructureScriptFactory.FIELD, "item"); params.put(SubstructureScriptFactory.QUERY, queryValue); Script script = new Script(ScriptType.INLINE, "native", SubstructureScript.NAME, params); ScriptQueryBuilder scriptQuery = QueryBuilders.scriptQuery(script); query = QueryBuilders.boolQuery().must(scriptQuery); SearchRequestBuilder scriptRequestBuilder = client().prepareSearch(SS_TEST).setTypes(SS_STRING); scriptRequestBuilder.setQuery(query); scriptRequestBuilder.highlighter(highlightBuilder); searchResponse = scriptRequestBuilder.get(); Assert.assertNotNull(searchResponse); hits = searchResponse.getHits(); Assert.assertTrue(hits.getTotalHits() == 1); fields = hits.getAt(0).getHighlightFields(); Assert.assertNotNull(fields); fragments = fields.get("item").getFragments(); highlighted = Arrays.asList(fragments).stream().map(Text::string).collect(Collectors.joining("")); Assert.assertEquals("hello <hi>world</hi>", highlighted); } private String getSource(SimSubItem object) { try { return OBJECT_MAPPER.writerFor(object.getClass()).writeValueAsString(object); } catch (JsonProcessingException e) { throw new IllegalStateException("Cannot build source"); } } }
import base64 import gzip import json import textwrap import uuid from collections.abc import KeysView, ValuesView from typing import Any from box import Box def lowercase_first_letter(s: str) -> str: """ Given a string, returns that string with a lowercase first letter """ if s: return s[0].lower() + s[1:] return s class GraphQLResult(Box): def __repr__(self) -> str: try: return json.dumps(self, indent=4) except TypeError: return repr(self.to_dict()) class EnumValue: """ When parsing GraphQL arguments, strings can be wrapped in this class to be rendered as enum values, without quotation marks. Args: - value (str): the value that should be represented as an enum value """ def __init__(self, value: str): self.value = value def __str__(self) -> str: return self.value def LiteralSetValue(value: list) -> str: """ When parsing GraphQL arguments, `LiteralSetValue` renders strings as literal set values, without internal quotation marks. For example: "{a, b, c}" Args: - value (list): the value that should be represented as a literal set """ return "{" + ", ".join(v for v in value) + "}" class GQLObject: """ Helper object for building GraphQL queries. """ def __init__(self, name: str = None, _arguments: str = None): self.__name = name or lowercase_first_letter(type(self).__name__) self.__arguments = _arguments def __call__(self, arguments: str) -> "GQLObject": return type(self)(name=self.__name, _arguments=arguments) def __repr__(self) -> str: return '<GQL: "{name}">'.format(name=self.__name) def __str__(self) -> str: if self.__arguments: return with_args(self.__name, self.__arguments) return self.__name def parse_graphql(document: Any) -> str: """ Parses a document into a GraphQL-compliant query string. Documents can be a mix of `strings`, `dicts`, `lists` (or other sequences), and `GQLObjects`. The parser attempts to maintain the form of the Python objects in the resulting GQL query. For example: ``` query = parse_graphql({ 'query': { 'books(published: {gt: 1990})': { 'title' }, 'authors': [ 'name', 'books': { 'title' }] } } }) ``` results in: ``` query { books(published: {gt: 1990}) { title } authors { name books { title } } } ``` For convenience, if a dictionary key is either True or Ellipsis(...), it is ignored and the key alone is used as a field name. ```python {'query':{ 'books': { 'id': True, 'name': ..., 'author': { 'id', 'name', } } }} ``` is equivalent to: ```python {'query':{ 'books': [ 'id', 'name', {'author': { 'id', 'name', }} ] }} ``` Args: - document (Any): A collection of Python objects complying with the general shape of a GraphQL query. Generally, this will consist of (at least) a dictionary, but also sequences and `GQLObjects`. Returns: - str: a GraphQL query compiled from the provided Python structures. Raises: - TypeError: if the user provided a `GQLObject` class, rather than an instance. """ delimiter = " " parsed = _parse_graphql_inner(document, delimiter=delimiter) parsed = parsed.replace(delimiter + "}", "}") parsed = textwrap.dedent(parsed).strip() return parsed def _parse_graphql_inner(document: Any, delimiter: str) -> str: """ Inner loop function of for `parse_graphql`. """ if isinstance(document, (tuple, list, set, KeysView, ValuesView)): return "\n".join( [_parse_graphql_inner(item, delimiter=delimiter) for item in document] ) elif isinstance(document, dict): result = [] for key, value in document.items(): if value in (True, Ellipsis): result.append(key) else: result.append( "{key} {{\n{value}\n}}".format( key=key, value=_parse_graphql_inner(value, delimiter=delimiter) ) ) return _parse_graphql_inner(result, delimiter=delimiter) elif isinstance(document, type) and issubclass(document, GQLObject): raise TypeError( 'It looks like you included a `GQLObject` class ("{name}") ' "in your document. Did you mean to use an instance of that type?".format( name=document.__name__ ) ) else: return str(document).replace("\n", "\n" + delimiter) def parse_graphql_arguments(arguments: Any) -> str: """ Parses a dictionary of GraphQL arguments, returning a GraphQL-compliant string representation. If a string is passed, it is returned without modification. This parser makes a few adjustments to the dictionary's usual string representation: - `'` around keys are removed - spaces added around curly braces - leading and lagging braces are removed - `True` becomes `true`, `False` becomes `false`, and `None` becomes `null` Args: - arguments (Any): an object (usually a dictionary) representing the GraphQL arguments Returns: - str: a string representing the parsed GraphQL arguments """ parsed = _parse_arguments_inner(arguments) # remove '{ ' and ' }' from front and end of parsed dict if isinstance(arguments, dict): parsed = parsed[2:-2] # remove '"' and '"' from front and end of parsed str elif isinstance(arguments, str): parsed = parsed[1:-1] return parsed def _parse_arguments_inner(arguments: Any) -> str: if isinstance(arguments, dict): # empty dicts are valid GQL arguments if len(arguments) == 0: return "{}" formatted = [] for key, value in arguments.items(): formatted.append( "{key}: {value}".format(key=key, value=_parse_arguments_inner(value)) ) return "{ " + ", ".join(formatted) + " }" elif isinstance(arguments, (list, tuple, set, KeysView, ValuesView)): return "[" + ", ".join([_parse_arguments_inner(a) for a in arguments]) + "]" elif isinstance(arguments, str): return json.dumps(arguments) elif arguments is True: return "true" elif arguments is False: return "false" elif arguments is None: return "null" elif isinstance(arguments, uuid.UUID): return _parse_arguments_inner(str(arguments)) return str(arguments) def with_args(field: Any, arguments: Any) -> str: """ Given Python objects representing a field name and arguments, formats them as a single GraphQL compatible string. Example: ``` query = parse_graphql({ 'query': { with_args("task", {"where": {"id": 3}}): { "id" } } }) assert query == ''' query { task(where: {id: 3}) { id } } ''' ``` Args: - field (Any): the GraphQL field that will be supplied with arguments - arguments (Any): the arguments to be parsed and supplied to the field Returns: - str: the parsed field and arguments """ parsed_field = parse_graphql(field) parsed_arguments = parse_graphql_arguments(arguments) return "{field}({arguments})".format(field=parsed_field, arguments=parsed_arguments) def compress(input: Any) -> str: """ Convenience function for compressing something before sending it to Cloud. Converts to string, encodes, compresses, encodes again using b64, and decodes. Args: - input (Any): the dictionary to be compressed Returns: - str: The string resulting from the compression """ return base64.b64encode(gzip.compress(json.dumps(input).encode())).decode() def decompress(string: str) -> Any: """ Convenience function for decompressing a string that's been compressed. Base64 decodes the string, decodes it, decompresses it, and loads. Args: - string (str): the string to decompress Returns: - Any: The object resulting from the decompression """ return json.loads(gzip.decompress(base64.b64decode(string)).decode())
use crate::commands::prelude::*; use requester::google::{GoogleSearchData, GoogleScraper as _}; use serenity::builder::CreateEmbed; use crate::traits::Paginator; #[command] #[aliases("g")] #[min_args(1)] /// Search gooogle async fn search(ctx: &Context, msg: &Message, args: Args) -> CommandResult { let text = args.rest().to_owned(); let data = get_data::<ReqwestClient>(&ctx) .await .unwrap() .google_search(&text) .await?; let color = crate::read_config().await.color.information; (Search { data, text, color }).pagination(ctx, msg).await?; Ok(()) } struct Search { data: Vec<GoogleSearchData>, text: String, color: u64, } impl Paginator for Search { fn append_page(&self, page: core::num::NonZeroUsize, embed: &mut CreateEmbed) { let data = &self.data[page.get() - 1]; let description = format!("{}\n[[Link]]({})", data.description, data.link); embed.title(format!("Result for `{}`", self.text)); embed.field(&data.title, description, false); embed.color(self.color); embed.footer(|f| f.text(format!("Result {} / {}", page, self.data.len()))); } fn total_pages(&self) -> Option<usize> { Some(self.data.len()) } }
Patients with HIV and coronary disease: are we meeting national guidelines? Cardiovascular disease (CVD) has a higher incidence in patients with HIV infection. This study sought to determine whether HIV-infected patients with established CVD were being managed according to national guidelines. Data were collected from Australian general practitioners for 77 HIV-infected patients with a median age of 59 (range 54-64). There was good adherence to guidelines with regards to anti-platelet (84%; n=65; 95% confidence interval (CI) 74-92%) and statin therapy (97%; n=75; 95% CI 91-100%), despite a failure to meet cholesterol targets, with only 31% (n=24; 95% CI 21-42%) of the cohort meeting low-density lipoprotein target values. Similarly, there was limited adherence to guidelines regarding the prescriptions of medications for those with established hypertension (66%; n=51; 95% CI 55-77%), body mass index targets met (40%; n=31; 95% CI 29-52%), and depression screening (32%; n=25; 95% CI 22-44%). This Australian audit provides insight into adherence to guidelines for individuals with CVD and HIV, suggesting that current screening and management practices for these patients falls short of guidelines, particularly in relation to cholesterol management.
The report released on International Migrants Day is critical of Qatar for failing to deliver changes to labour rights or compliance, and warns construction companies, hotels, retail chains and UK and US Universities the cost of doing business in a slave state. “Every CEO operating in Qatar is aware that their profits are driven by appallingly low wage levels – wages that are often based on a system of racial discrimination – and that these profits risk safety, resulting in indefensible workplace injuries, illnesses and deaths,” said Sharan Burrow, General Secretary, International Trade Union Confederation. Using new data uncovered in Qatar’s own government statistics, the ITUC estimates 7,000 workers will die before a ball is kicked in the 2022 World Cup. “Qatar still refuses to make public the actual death toll of migrant workers or the real causes of death. The vast majority of the workers are working to deliver the huge World Cup infrastructure programme by the 2022 deadline. By analysing Qatar’s own statistics and health reports over the past three years, previous reports of 4,000 workers dying by 2022 are a woeful underestimate. The real fatality rate is over 1,000 per year, meaning that 7,000 workers will die by 2022. Qatar hospital emergency departments are receiving 2,800 patients per day – 20% more from 2013 to 2014,” said Sharan Burrow. Estimates for spending on infrastructure for the 2022 World Cup are as high as $220 billion, involving major international construction companies profiled in the ITUC report from Australia, Europe and the USA including ACS (Spain), Bechtel (USA), Besix (Belgium), Bouygues (France), Carillion (UK), CCC (Greece), Ch2M Hill (USA), CIMIC (Australia), Hochtief (Germany), Porr (Austria) and QDVC (France). “This crisis goes beyond the borders of Qatar, involving companies across the world who are profiting from the kafala labour system which enslaves workers. The Khalifa Stadium project, a showcase World Cup venue, pays workers $1.50 an hour. It is estimated that more than 40 percent of the world’s top 250 international construction contractors are participating in projects in Qatar. Shareholders with investments in fourteen different stock exchanges are exposed to the profits using modern day slavery under the kafala system,” said Sharan Burrow. While the government continues to refuse legal reform, the ITUC is calling on companies there to: Give workers exit visas immediately and without condition, and allow workers to transfer to another job; Allow workers a collective voice to raise complaints and negotiate together with their employer; Establish a single minimum living wage rate for all migrants; In the absence of effective government labour inspection or a labour court, ensure fair and effective inspection, compliance and dispute resolution within their operations including subcontractors. Since the ITUC released its special report The Case Against Qatar in March 2014, nothing has changed for workers in Qatar. The Government has failed to bring its laws in line with international standards and the much promised labour law, which will not come into effect until 2017, adds a new layer of repression for migrant workers. “Qatar’s labour laws are ruinous for workers. All the government has done is to codify slavery. Employers can now even lend out workers to another employer without the workers consent for up to a year.” The ITUC has called on the Qatar authorities to take immediate steps: End the kafala system starting with the elimination of the exit visa; Allow worker representation – a collective voice with elected representatives and workplace committees; Employment contracts through direct employment or large, reputable, recruitment companies; A national minimum wage for all workers, and collective bargaining rights; Proper labour inspection and grievance mechanisms, inclusive of contractors, and an independent labour court. The ITUC is also demanding that FIFA, which has failed to exert any real pressure on Qatar, to put workers’ rights at the centre of 2022 World Cup preparations. ENDS For interviews with Sharan Burrow, contact Gemma Swart [email protected] +32 479 06 41 63 Read the report: Qatar: Profit and Loss. Counting the cost of modern day slavery in Qatar: What price freedom? Watch the ITUC multi-media investigation: Qatar Exposed
AUSTIN — Two of Texas’ most powerful business leaders have mounted a late-hour push to derail an anti-illegal-immigration bill in the Legislature’s special session. Houston homebuilder Bob Perry and HEB/Central Market grocery chieftain Charles Butt applied pressure Thursday to members of a House panel to block the “sanctuary cities” measure that would let police officers ask anyone they stop about their residency status. The Senate-passed bill faces a crucial hearing Monday before the House State Affairs Committee. The special session must end by Wednesday night. On Thursday, Austin superlobbyist Neal “Buddy” Jones Jr. of HillCo Partners, which represents Perry Homes and HEB, urged committee members not to pass the bill. “Just want to tell you that Charles Butt and Bob Perry have asked me to call every member of State Affairs and ask them not to pass the sanctuary city bill,” Jones wrote in an email to Rep. Pete Gallego, D-Alpine. House Democrats released the email to The Dallas Morning News. “They think it is very bad for Texas,” Jones said, encouraging Gallego, an opponent of the bill, to inform his committee colleagues “that these two giants of Texas business are concerned that this is taking Texas in the wrong direction.” Asked to elaborate on Perry’s and Butt’s concerns about the bill, Jones declined. House bill sponsor Rep. Burt Solomons of Carrollton said grassroots Republicans favor the measure, which he said merely would outlaw official interference with immigration law enforcement. “You shouldn’t get to pick and choose which federal laws you abide by,” he said. “These big businessmen all of a sudden think we shouldn’t have any type of sanctuary city legislation. Well, where were they for six months?” Gov. Rick Perry added the bill to the special session’s agenda, and Bob Perry — no relation — has been his biggest political contributor for many years. Bob Perry, owner of Perry Homes, and Butt, chairman and chief executive of HEB Grocery Co. LP, were ranked by Texans for Public Justice as No. 1 and No. 3, respectively, among givers to legislative candidates in the 2008 cycle. Perry forked over nearly $5.1 million, while Butt gave nearly $2.2 million. The bill, which Democrats say could lead to racial profiling, would prevent cities or counties from having a policy to prevent police from questioning people they detain or arrest about their immigration status. If a locality tried to limit such questioning, any citizen could file a complaint with the attorney general, which could lead to a cutoff of state funding. The bill also would require those obtaining driver’s licenses to provide proof of citizenship or a legal visa.
import tw, { styled, css } from 'twin.macro' import { Modal } from '@QCFE/qingcloud-portal-ui' interface IDarkModal { orient?: 'fullright' | 'center' dark?: boolean noBorder?: boolean } export const ModalWrapper = styled(Modal)( ({ orient, noBorder = false }: IDarkModal) => [ orient === 'fullright' && css` .modal-card { ${tw`fixed top-0 right-0 bottom-0 max-h-full`} .modal-card-body { ${tw`p-0`} } .modal-card-head, .modal-card-foot { ${tw`rounded-none`} } } `, noBorder && css` .modal-content { > .modal-card-head, > .modal-card-foot { ${tw`border-none`} } > .modal-card-body { ${tw`px-8`} } } `, ] ) export const DarkModal = ({ ...props }) => <ModalWrapper {...props} dark /> export default ModalWrapper
<filename>webapp/lib/evaluategrade.py<gh_stars>0 import sqlite3 from contextlib import closing import json import validators def urls_without_scheme(src_list): """ Looks for urls/domains without scheme and if it exists, it returns true. otherwise false """ result = False for item in src_list: if validators.domain(item.replace('*.', '').replace(':*', '').split(':', 1)[0].split('/', 1)[0]): return True else: result = False return result def csp_src_check(src_list, redirected_scheme): """ Bad: = 0 * unsafe-inline unsafe-eval unsafe-hashes data: http: http:// - urls Moderate: = 0.5 https: URLs whitelist strict-dynamic Good: = 1 'self' hash/nonce none """ new_src_list = [] for item in src_list: new_src_list.append(item[0]) src_list = new_src_list if (urls_without_scheme(src_list) and redirected_scheme == 'http') or "'*'" in src_list or "'unsafe-inline'" in src_list or "'unsafe-eval'" in src_list or "'unsafe-hashes'" in src_list or "'http:'" in src_list or any('data:' in substring for substring in src_list) or any('http://' in substring for substring in src_list): return 0 elif (urls_without_scheme(src_list) and redirected_scheme == 'https') or "'https:'" in src_list or "'strict-dynamic'" in src_list or any('https://' in substring for substring in src_list): return 0.5 elif "'self'" in src_list or "'none'" in src_list or any('nonce-' in substring for substring in src_list) or any('sha256-' in substring for substring in src_list) or any('sha384-' in substring for substring in src_list) or any('sha512-' in substring for substring in src_list): return 1 else: return 0 def csp_frame_ancestors_check(src_list, redirected_scheme): """ Bad: = 0 * http: http:// - urls Moderate: = 0.5 https: Good: = 1 https:// - urls none self """ new_src_list = [] for item in src_list: new_src_list.append(item[0]) src_list = new_src_list if (urls_without_scheme(src_list) and redirected_scheme == 'http') or "'*'" in src_list or "'http:'" in src_list or any('http://' in substring for substring in src_list): return 0 elif "'https:'" in src_list: return 0.5 elif (urls_without_scheme(src_list) and redirected_scheme == 'https') or "'none'" in src_list or "'self'" or any('https://' in substring for substring in src_list): return 1 else: return 0 def evaluate_csp(website_id, test_weights): """ Checks: no fallback to default: base-uri form-action frame-ancestors report-to/uri sandbox upgrade-insecure-requests src: child-src connect-src default-src font-src frame-src - fallsback to child-src which falls back to default img-src manifest-src media-src object-src style-src script-src strict-dynamic unsafe-hashes worker-src if a check is to be done on script-src for example but it's not explicitly defined but default-src is, use the score from default-src instead """ score_dict = {'default-src': 0, 'child-src': 0, 'connect-src': 0, 'font-src': 0, 'frame-src': 0, 'img-src': 0, 'manifest-src': 0, 'media-src': 0, 'object-src': 0, 'script-src': 0, 'style-src': 0, 'worker-src': 0, 'report-to/uri': 0, 'base-uri': 0, 'form-action': 0, 'frame-ancestors': 0, 'sandbox': 0, 'upgrade-insecure-requests': 0} csp_data = None with closing(sqlite3.connect("results.db")) as connection: with closing(connection.cursor()) as cursor: csp_src_directives = ["default-src","child-src","connect-src","font-src","frame-src","img-src","manifest-src","media-src","object-src","script-src","style-src","worker-src"] csp_default_directive_score = 0 csp_child_src_directive_score = 0 cursor.execute("SELECT scheme FROM website WHERE id = ?", (website_id,)) redirected_scheme = cursor.fetchone() if redirected_scheme != None: redirected_scheme = redirected_scheme[0] else: #Assume http redirected_scheme = "http" for directive in csp_src_directives: cursor.execute("SELECT csp_data FROM csp WHERE website_id = ? AND csp_type = ?", (website_id, directive)) csp_data = cursor.fetchall() if len(csp_data) > 0: result = csp_src_check(csp_data, redirected_scheme) if directive == "default-src": csp_default_directive_score = result elif directive == "child-src": csp_child_src_directive_score = result score_dict[directive] = round(result * test_weights[directive], 4) elif directive == "frame-src": score_dict[directive] = round(csp_child_src_directive_score * test_weights[directive], 4) elif directive == "child-src": score_dict[directive] = round(csp_default_directive_score * test_weights[directive], 4) csp_child_src_directive_score = csp_default_directive_score elif directive != "default-src": score_dict[directive] = round(csp_default_directive_score * test_weights[directive], 4) csp_directives = ["base-uri","form-action","frame-ancestors","report-to","report-uri","sandbox","upgrade-insecure-requests"] for directive in csp_directives: cursor.execute("SELECT csp_data FROM csp WHERE website_id = ? AND csp_type = ?", (website_id, directive)) csp_data = cursor.fetchall() if len(csp_data) > 0: result = 0 if directive == 'base-uri' or directive == 'form-action': result = csp_src_check(csp_data, redirected_scheme) elif directive == 'frame-ancestors': result = csp_frame_ancestors_check(csp_data, redirected_scheme) elif directive == 'report-to' or directive == 'report-uri': result = 1 elif directive == 'sandbox': result = 1 elif directive == 'upgrade-insecure-requests': result = 1 if directive == 'report-to' or directive == 'report-uri': score_dict['report-to/uri'] = round(result * test_weights['report-to/uri'], 4) else: score_dict[directive] = round(result * test_weights[directive], 4) return score_dict def evaluate_sri(website_id, test_weights): """ Checks ratio of external resources to external resources with SRI = 1 """ score_dict = {'sri': 0} website_data = None with closing(sqlite3.connect("results.db")) as connection: with closing(connection.cursor()) as cursor: cursor.execute("SELECT nr_external_tags, nr_external_tags_SRI FROM website WHERE id = ?", (website_id,)) website_data = cursor.fetchone() if website_data == None: return score_dict if website_data[0] != 0: score_dict['sri'] = round((website_data[1]/website_data[0])*test_weights['sri'], 4) else: score_dict['sri'] = round(1*test_weights['sri'], 4) return score_dict def evaluate_https(website_id, test_weights): """ checks if https is there = 1 tests if redirected to https = 1 """ score_dict = {'redirect_to_https': 0, 'https_exists': 0} website_data = None with closing(sqlite3.connect("results.db")) as connection: with closing(connection.cursor()) as cursor: cursor.execute("SELECT scheme, https_exists FROM website WHERE id = ?", (website_id,)) website_data = cursor.fetchone() if website_data == None: return score_dict if website_data[0] == 'https': score_dict['redirect_to_https'] = round(1 * test_weights['redirect_to_https'], 4) if website_data[1] == 1: score_dict['https_exists'] = round(1 * test_weights['https_exists'], 4) return score_dict def evaluate_x_xss_protection(website_id, test_weights): """ checks if xss filter is enabled = 1 checks if mode is set to block (and filter is enabled) (with this option enabled, rather than sanitizing, the browser prevents the rendering of the page) = 1 """ score_dict = {'xss_filter_enabled': 0, 'xss_filter_block_mode': 0} website_data = None with closing(sqlite3.connect("results.db")) as connection: with closing(connection.cursor()) as cursor: cursor.execute("SELECT headerdata FROM headers WHERE headertype = 'x-xss-protection' AND website_id = ?", (website_id,)) website_data = cursor.fetchone() if website_data == None: return score_dict if website_data != None: website_data = website_data[0].split(',') website_data = website_data[-1].split(';') if len(website_data) > 1: if website_data[0] == '1' and website_data[1].lower() == 'mode=block': score_dict['xss_filter_block_mode'] = round(1 * test_weights['xss_filter_block_mode'], 4) if len(website_data) > 0: if website_data[0] == '1': score_dict['xss_filter_enabled'] = round(1 * test_weights['xss_filter_enabled'], 4) return score_dict def evaluate_hsts(website_id, test_weights): """ checks max-age of HSTS header, = 0 if <6months, = 0.5 if >=6months and <2years, = 1 if >=2 years checks includesubdomains = 1 checks preload = 1 if includesubdomains exist and max_age is >= 1 year """ score_dict = {'max-age': 0, 'includesubdomains': 0, 'preload': 0} isd_set = False max_age = 0 headerdata = None with closing(sqlite3.connect("results.db")) as connection: with closing(connection.cursor()) as cursor: db_variable = (website_id,) cursor.execute("SELECT headerdata FROM headers WHERE headertype = 'strict-transport-security' AND website_id = ?;", db_variable) headerdata = cursor.fetchone() cursor.execute("SELECT scheme FROM website WHERE id = ?;", db_variable) scheme = cursor.fetchone() scheme = scheme[0] if headerdata == None or scheme == "http": return score_dict headerdata = headerdata[0] if ',' in headerdata: split_data = headerdata.split(',') headerdata = split_data[len(split_data)-1] headerdata = headerdata.replace(' ', '') headerdata_list = headerdata.split(';') for directives in headerdata_list: directives = str(directives.lower()) if "max-age=" in directives: max_age = int(directives.replace("max-age=", '')) if max_age >= 15780000 and max_age < 63072000: score_dict['max-age'] = round((1*test_weights['max-age'])/2, 4) elif max_age >= 63072000: score_dict['max-age'] = round(1*test_weights['max-age'], 4) if "includesubdomains" in directives: score_dict['includesubdomains'] = round(1*test_weights['includesubdomains'], 4) isd_set = True if "preload" in directives and isd_set and max_age >= 31536000: score_dict['preload'] = round(1*test_weights['preload'], 4) return score_dict def evaluate_x_content_type_options(website_id, test_weights): """ Checks if x-content-type-options is present with the nosniff value """ score_dict = {'nosniff': 0} with closing(sqlite3.connect("results.db")) as connection: with closing(connection.cursor()) as cursor: db_variable = (website_id,) cursor.execute("SELECT headerdata FROM headers WHERE headertype = 'x-content-type-options' AND website_id = ?;", db_variable) headerdata = cursor.fetchone() if headerdata == None: return score_dict headerdata = headerdata[0] if "nosniff" in headerdata: score_dict['nosniff'] = round(1*test_weights['nosniff'], 4) return score_dict def evaluate_x_frame_options(website_id, test_weights): """ Checks if x-frame-options is present with deny or sameorigin values """ score_dict = {'set_secure': 0} with closing(sqlite3.connect("results.db")) as connection: with closing(connection.cursor()) as cursor: db_variable = (website_id,) cursor.execute("SELECT headerdata FROM headers WHERE headertype = 'x-frame-options' AND website_id = ?;", db_variable) headerdata = cursor.fetchone() if headerdata == None: return score_dict headerdata = headerdata[0] if "deny" in headerdata.lower() or "sameorigin" in headerdata.lower(): score_dict['set_secure'] = round(1*test_weights['set_secure'], 4) return score_dict def evaluate_set_cookie(website_id, test_weights): score_dict = {'httponly': round(1*test_weights['httponly'], 4), 'samesite': round(1*test_weights['samesite'], 4), 'secure': round(1*test_weights['secure'], 4)} with closing(sqlite3.connect("results.db")) as connection: with closing(connection.cursor()) as cursor: db_variable = (website_id,) cursor.execute("SELECT headerdata FROM headers WHERE headertype = 'set-cookie' AND website_id = ?;", db_variable) headerdata = cursor.fetchall() if headerdata == None: return score_dict total_score = 3 for jdata in headerdata: temp_score_dict = {'httponly': round(1*test_weights['httponly'], 4), 'samesite': round(1*test_weights['samesite'], 4), 'secure': round(1*test_weights['secure'], 4)} temp_score = 3 data_dict = json.loads(jdata[0]) if not ("Httponly" in data_dict['_rest'] or "HttpOnly" in data_dict['_rest']): temp_score_dict['httponly'] = 0 temp_score -= round(1*test_weights['httponly'], 4) if not ("SameSite" in data_dict['_rest']): temp_score_dict['samesite'] = 0 temp_score -= round(1*test_weights['samesite'], 4) else: if data_dict['_rest']['SameSite'].lower() == "none": temp_score_dict['samesite'] = 0 temp_score -= round(1*test_weights['samesite'], 4) if data_dict['secure'] == False: temp_score_dict['secure'] = 0 temp_score -= round(1*test_weights['secure'], 4) if temp_score < total_score: total_score = temp_score score_dict = temp_score_dict.copy() return score_dict def evaluate_referrer_policy(website_id, test_weights): score_dict = {'refpolicy': 0} with closing(sqlite3.connect("results.db")) as connection: with closing(connection.cursor()) as cursor: db_variable = (website_id,) cursor.execute("SELECT headerdata FROM headers WHERE headertype = 'referrer-policy' AND website_id = ?;", db_variable) headerdata = cursor.fetchone() if headerdata == None: score_dict['refpolicy'] = 0.5 * test_weights['refpolicy'] return score_dict headerdata = headerdata[0] if ',' in headerdata: split_data = headerdata.split(',') headerdata = split_data[len(split_data)-1] if 'unsafe-url' not in headerdata: score_dict['refpolicy'] = round(1*test_weights['refpolicy'], 4) return score_dict def get_score(json_string): input_data = None output_data = {} total_score = 0 try: input_data = json.loads(json_string) except: raise Exception("Not valid JSON data") output_data['website_id'] = input_data['website_id'] output_data['headers'] = [] for category in input_data['headers']: if category['headertype'] == "HTTPS": scores = evaluate_https(int(input_data['website_id']), category['weights']) for score in scores: total_score += scores[score] output_data['headers'].append({'headertype': 'HTTPS', 'scores': scores}) if category['headertype'] == "X-Xss-Protection": scores = evaluate_x_xss_protection(int(input_data['website_id']), category['weights']) for score in scores: total_score += scores[score] output_data['headers'].append({'headertype': 'X-Xss-Protection', 'scores': scores}) if category['headertype'] == "CSP": scores = evaluate_csp(int(input_data['website_id']), category['weights']) for score in scores: total_score += scores[score] output_data['headers'].append({'headertype': 'CSP', 'scores': scores}) if category['headertype'] == "SRI": scores = evaluate_sri(int(input_data['website_id']), category['weights']) for score in scores: total_score += scores[score] output_data['headers'].append({'headertype': 'SRI', 'scores': scores}) if category['headertype'] == "HSTS": scores = evaluate_hsts(int(input_data['website_id']), category['weights']) for score in scores: total_score += scores[score] output_data['headers'].append({'headertype': 'HSTS', 'scores': scores}) if category['headertype'] == "X-Content-Type-Options": scores = evaluate_x_content_type_options(int(input_data['website_id']), category['weights']) for score in scores: total_score += scores[score] output_data['headers'].append({'headertype': 'X-Content-Type-Options', 'scores': scores}) if category['headertype'] == "X-Frame-Options": scores = evaluate_x_frame_options(int(input_data['website_id']), category['weights']) for score in scores: total_score += scores[score] output_data['headers'].append({'headertype': 'X-Frame-Options', 'scores': scores}) if category['headertype'] == "set-cookie": scores = evaluate_set_cookie(int(input_data['website_id']), category['weights']) for score in scores: total_score += scores[score] output_data['headers'].append({'headertype': 'set-cookie', 'scores': scores}) if category['headertype'] == "referrer-policy": scores = evaluate_referrer_policy(int(input_data['website_id']), category['weights']) for score in scores: total_score += scores[score] output_data['headers'].append({'headertype': 'referrer-policy', 'scores': scores}) output_data['total_score'] = round(total_score, 4) return output_data def test(): with open('sample.json', 'r') as file: data = file.read() jsontest = json.loads(data) for jt in jsontest['headers']: if jt['headertype'] == "CSP": print(evaluate_csp(int(jsontest['website_id']), jt['weights'])) if jt['headertype'] == "SRI": print(evaluate_sri(int(jsontest['website_id']), jt['weights'])) if jt['headertype'] == "HTTPS": print(evaluate_https(int(jsontest['website_id']), jt['weights'])) elif jt['headertype'] == "HSTS": print(evaluate_hsts(int(jsontest['website_id']), jt['weights'])) elif jt['headertype'] == "X-Xss-Protection": print(evaluate_x_xss_protection(int(jsontest['website_id']), jt['weights'])) elif jt['headertype'] == "X-Content-Type-Options": print(evaluate_x_content_type_options(int(jsontest['website_id']), jt['weights'])) elif jt['headertype'] == "X-Frame-Options": print(evaluate_x_frame_options(int(jsontest['website_id']), jt['weights'])) elif jt['headertype'] == "set-cookie": print(evaluate_set_cookie(int(jsontest['website_id']), jt['weights'])) elif jt['headertype'] == "referrer-policy": print(evaluate_referrer_policy(int(jsontest['website_id']), jt['weights']))
<gh_stars>0 package server import ( "time" "github.com/alexedwards/scs/v2" "github.com/aws/aws-sdk-go-v2/aws" "github.com/aws/aws-sdk-go-v2/credentials" "github.com/aws/aws-sdk-go-v2/service/dynamodb" "github.com/sjansen/dynamostore" "github.com/sjansen/bouncer/internal/web/config" ) func (s *Server) openDynamoStores(cfg *config.SessionStore) (scs.Store, scs.Store, error) { var svc *dynamodb.Client if cfg.EndpointURL.Host == "" { svc = dynamodb.NewFromConfig(s.config.AWS.Config) } else { creds := credentials.NewStaticCredentialsProvider("id", "secret", "token") svc = dynamodb.NewFromConfig( aws.Config{ Credentials: creds, Region: "us-west-2", }, dynamodb.WithEndpointResolver( dynamodb.EndpointResolverFromURL( cfg.EndpointURL.String(), func(e *aws.Endpoint) { e.HostnameImmutable = true }, ), ), ) } store := dynamostore.NewWithTableName(svc, cfg.TableName) if cfg.CreateTable { for i := 0; i < 3; i++ { if err := store.CreateTable(); err == nil { break } else if i > 1 { return nil, nil, err } time.Sleep(1 * time.Second) } } relaystate := NewPrefixStore("r:", store) sessions := NewPrefixStore("s:", store) return relaystate, sessions, nil } // PrefixStore enables multiple sessions to be stored in a single // session store by automatically pre-pending a prefix to tokens. type PrefixStore struct { prefix string store scs.Store } // NewPrefixStore wraps a session store so it can be shared. func NewPrefixStore(prefix string, store scs.Store) *PrefixStore { return &PrefixStore{ prefix: prefix, store: store, } } // Delete removes the session token and data from the store. func (s *PrefixStore) Delete(token string) (err error) { return s.store.Delete(s.prefix + token) } // Find returns the data for a session token from the store. func (s *PrefixStore) Find(token string) (b []byte, found bool, err error) { return s.store.Find(s.prefix + token) } // Commit adds the session token and data to the store. func (s *PrefixStore) Commit(token string, b []byte, expiry time.Time) (err error) { return s.store.Commit(s.prefix+token, b, expiry) }
// Tests that RtpPacket::RemoveExtension can successfully remove extension when // other extensions are present but not registered. TEST(RtpPacketTest, RemoveExtensionPreservesOtherUnregisteredExtensions) { RtpPacketToSend::ExtensionManager extensions; extensions.Register<TransmissionOffset>(kTransmissionOffsetExtensionId); extensions.Register<AudioLevel>(kAudioLevelExtensionId); RtpPacketToSend packet(&extensions); packet.SetPayloadType(kPayloadType); packet.SetSequenceNumber(kSeqNum); packet.SetTimestamp(kTimestamp); packet.SetSsrc(kSsrc); packet.SetExtension<TransmissionOffset>(kTimeOffset); packet.SetExtension<AudioLevel>(kVoiceActive, kAudioLevel); EXPECT_THAT(kPacketWithTOAndAL, ElementsAreArray(packet.data(), packet.size())); RtpPacketToSend::ExtensionManager extensions1; extensions1.Register<AudioLevel>(kAudioLevelExtensionId); packet.IdentifyExtensions(extensions1); EXPECT_FALSE(packet.RemoveExtension(kRtpExtensionTransmissionTimeOffset)); EXPECT_TRUE(packet.RemoveExtension(kRtpExtensionAudioLevel)); EXPECT_THAT(kPacketWithTO, ElementsAreArray(packet.data(), packet.size())); }
<gh_stars>1-10 import { Gender } from '@ayzek/core/conversation'; import { Image } from '@ayzek/core/model/attachment'; import { validateData } from '@ayzek/core/util/config'; import { PromiseMap } from '@meteor-it/utils'; import * as t from 'io-ts'; import { VKApi } from '..'; import GroupingVKApiRequester from '../groupingRequester'; import VKUser from './user'; export class VKRealUser extends VKUser { constructor(api: VKApi, public apiUser: VKApiUserType) { super( api, api.encodeUserUid(apiUser.id), apiUser.domain || null, apiUser.first_name, apiUser.last_name, [ null, Gender.WOMAN, Gender.MAN, ][apiUser.sex] || Gender.OTHER, apiUser.domain ? `https://vk.com/${apiUser.domain}` : `https://vk.com/id${apiUser.id}`, false, ); } private _photoImage: Promise<Image> | null = null; get photoImage() { if (this._photoImage) return this._photoImage; throw new Error('unsupported'); // return this._photoImage = Promise.resolve(Image.fromUrl('GET', this.apiUser.photo_map, {}, 'photo.jpeg', 'image/jpeg')); } } export const VKApiUser = t.interface({ id: t.number, domain: t.union([t.string, t.undefined]), first_name: t.string, last_name: t.string, sex: t.number, // photo_map: t.string, }); export type VKApiUserType = t.TypeOf<typeof VKApiUser>; export class VKUserMap extends PromiseMap<number, VKRealUser> { processor: GroupingVKApiRequester<number>; constructor(public api: VKApi) { super(); this.processor = new GroupingVKApiRequester(200, api.processor, ids => ({ method: 'users.get', params: { user_ids: ids.join(','), fields: 'sex,bdate,photo_max,online,domain', }, }), (v) => v, (u: any) => +u.id); } protected async getPromise(key: number): Promise<VKRealUser | null> { const apiUser = await this.processor.runTask(key); if (apiUser === null) return null; return new VKRealUser(this.api, validateData(apiUser, VKApiUser)); } }
/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ import * as assert from 'assert'; import { MarkdownString } from 'vs/workbench/api/node/extHostTypeConverters'; import { isEmptyObject } from 'vs/base/common/types'; import { size } from 'vs/base/common/collections'; suite('ExtHostTypeConverter', function () { test('MarkdownConvert - uris', function () { let data = MarkdownString.from('Hello'); assert.equal(isEmptyObject(data.uris), true); assert.equal(data.value, 'Hello'); data = MarkdownString.from('Hello [link](foo)'); assert.equal(data.value, 'Hello [link](foo)'); assert.equal(isEmptyObject(data.uris), true); // no scheme, no uri data = MarkdownString.from('Hello [link](www.noscheme.bad)'); assert.equal(data.value, 'Hello [link](www.noscheme.bad)'); assert.equal(isEmptyObject(data.uris), true); // no scheme, no uri data = MarkdownString.from('Hello [link](foo:path)'); assert.equal(data.value, 'Hello [link](foo:path)'); assert.equal(size(data.uris), 1); assert.ok(!!data.uris['foo:path']); data = MarkdownString.from('<EMAIL>'); assert.equal(data.value, '<EMAIL>'); assert.equal(size(data.uris), 1); assert.ok(!!data.uris['mailto:<EMAIL>']); data = MarkdownString.from('*hello* [click](command:me)'); assert.equal(data.value, '*hello* [click](command:me)'); assert.equal(size(data.uris), 1); assert.ok(!!data.uris['command:me']); data = MarkdownString.from('*hello* [click](file:///somepath/here). [click](file:///somepath/here)'); assert.equal(data.value, '*hello* [click](file:///somepath/here). [click](file:///somepath/here)'); assert.equal(size(data.uris), 1); assert.ok(!!data.uris['file:///somepath/here']); data = MarkdownString.from('*hello* [click](file:///somepath/here). [click](file:///somepath/here)'); assert.equal(data.value, '*hello* [click](file:///somepath/here). [click](file:///somepath/here)'); assert.equal(size(data.uris), 1); assert.ok(!!data.uris['file:///somepath/here']); data = MarkdownString.from('*hello* [click](file:///somepath/here). [click](file:///somepath/here2)'); assert.equal(data.value, '*hello* [click](file:///somepath/here). [click](file:///somepath/here2)'); assert.equal(size(data.uris), 2); assert.ok(!!data.uris['file:///somepath/here']); assert.ok(!!data.uris['file:///somepath/here2']); }); });
/** * Represents a Javascript file that will be compiled and run by the V8 engine * * Once a script text is set or loaded, you can invoke functions in the * script using nvokeFunction(String functionName, Object[] parameters)}, * to handle events delivered to it. */ public class SXRJavascriptV8File { /** * Loads a Javascript file from a text string. * * @param gvrContext * The SXR Context. * @param scriptText * String containing a Javascript program. */ private static final String TAG = SXRJavascriptV8File.class.getSimpleName(); protected String mScriptText; protected SXRContext mGvrContext = null; static protected ScriptEngine mEngine = null; protected Bindings bindings = null; protected Invocable invocable = null; protected Bindings inputBindings = null; Map inputVars = null; public SXRJavascriptV8File(SXRContext gvrContext, String scriptText) { mScriptText = scriptText; mGvrContext = gvrContext; } public void setInputValues(Map inputValues) { inputVars = inputValues; } public boolean invokeFunction(String funcName, Object[] parameters, String paramString) { boolean runs = false; try { if ( mEngine == null ) { mEngine = new V8ScriptEngineFactory().getScriptEngine(); } if ( inputVars != null ) { Bindings inputBindings = mEngine.createBindings(); inputBindings.putAll(inputVars); } mEngine.eval( paramString ); mEngine.eval( mScriptText ); invocable = (Invocable) mEngine; invocable.invokeFunction(funcName, parameters); bindings = mEngine.getBindings( ScriptContext.ENGINE_SCOPE); runs = true; } catch (ScriptException e) { Log.d(TAG, "ScriptException: " + e); Log.d(TAG, " function: '" + funcName + "'"); Log.d(TAG, " input Variables: '" + paramString + "'"); Log.d(TAG, " JavaScript:\n" + mScriptText); } catch (Exception e) { Log.d(TAG, "Exception: " + e); Log.d(TAG, " function: '" + funcName + "'"); Log.d(TAG, " input Variables: '" + paramString + "'"); Log.d(TAG, " JavaScript:\n" + mScriptText); } return runs; } /** * Access to values modified during invoking of Script file * Enables X3D to get values script modifies.. * @return */ public Bindings getLocalBindings() { return bindings; } /** * Sets the script file. */ public void setScriptText(String scriptText) { mScriptText = scriptText; } /** * Gets the script file. * @return The script string. */ public String getScriptText() { return mScriptText; } }
After eight months in office, President Donald Trump has bombed Syria, called for increased deployment levels to Afghanistan, and threatened North Korea with “fire and fury like the world has never seen.” Before running for president, though, he tweeted against military intervention in Syria, called the war in Afghanistan a “complete waste,” and said he would “negotiate like crazy” with North Korea. While campaigning, Trump opined in much the same way. While many have criticized the president for his mixed stance towards military intervention, his propensity for self-contradiction is disturbing: it remains unclear whether America’s Commander-in-Chief has any military strategy whatsoever. Past administrations have varied in approach to foreign policy, but all followed self-enforced doctrines­­­­­­­­­­­­­­­ that were drafted, tested, and advertised during their campaigns. After six months in office, Trump’s military maneuvering has been at complete odds with his campaign promises. Worse still, he has yet to even present a coherent military doctrine. In an interview with the HPR, Robert Kaplan, a senior fellow at the Center for a New American Security and a member of the U.S. Navy’s Executive Panel, described Trump’s military maneuvering as a “transactional foreign policy—in other words, a policy of trade-offs with no overriding, idealistic vision.” What results a “transactional” U.S. military strategy might bring are unclear, but early indications suggest that over time Trump could significantly reshape America’s military and its mission to fit his worldview. Unlike presidents over the past 75 years, Trump appears willing to give the military wide latitude to use unchecked force without larger policy goals to guide its actions, Kaplan says. In such uncertain times as these, Kaplan argues that the White House, and by extension the armed forces, must have a defined military “doctrine.” Without overarching policy to guide and limit actions, he warns, the Trump administration risks alienating allies, emboldening enemies, and undermining national and global security. The Liberal Order To understand the current direction of U.S. foreign policy, it is helpful to evaluate the history of American military doctrine. Across more than a century of shifting administrations, economic and ideological trends, and wars, the political leadership of the United States has generally worked towards what Brookings Institute senior fellow Thomas Wright calls a “liberal international order based on alliance systems, an open global economy, the primacy of rules and institutions, and the promotion of democracy and human rights.” With the end of World War II, this U.S.-centric liberal order has held increasing sway. Each used a coherent military strategy to benefit: in times of crisis, they were able to quickly develop plans of action based on their overarching doctrine. In times of peace and order, these doctrines set clear guidelines for routine international interactions, thus greatly reducing the risk of unintentional provocation. In war, peace, and confusion, past presidents found comfort in the predictability of established military strategy. The Obama Doctrine When Barack Obama took office in 2009, he sought to uphold a liberal world order. Under his administration, the armed forces operated under a doctrine that military analyst Kevin Baron described to the HPR as the “pledge to keep American troops out of unnecessary fighting while helping local populations defend themselves.” Obama’s aides famously embraced the doctrine “Don’t do stupid stuff.” From the emergence of China as a dominant world power in the Pacific to Russia’s growing military assertiveness; from the nuclear threat posed by Iran to rekindled weapons development in North Korea and the establishment of ISIL in the Middle East; the Obama administration faced uniquely global threats. The Obama Doctrine was a response to an overwhelming number of global security risks. New President, New Direction Trump has inherited many of the same global threats faced by Obama. Since taking office, however, he has challenged accepted norms by radically breaking from the Obama doctrine. In articulating his “America First” philosophy, Trump has complained that the liberal world order has harmed the United States in many ways. In response, he appears poised to be the first president in over 70 years to move away from the liberal world order. This shift could have a profound impact on the U.S. Armed Forces and its mission. Kevin Baron warns that it could bring “looser and more frequent military intervention.” Candidate Trump laid out what many saw as a traditionally neo-conservative approach to foreign policy in his “Contract with the American Voter,” emphasizing elimination of the defense sequester, the military’s spending cap in any given year, for increased investment. But in an interview with the HPR, former Army officer and Congressman Chris Gibson (R – N.Y.), who served on the Committee on Armed Services, said that “over time, it has become even more unclear whether [Trump] intends on following promises he made as a candidate.” Since assuming office, Trump has moved to reshape the U.S. military at multiple levels. Michael Klare, Five Colleges professor of peace and world security studies, told the HPR that “under Obama, the rules of engagement were very strict, [but] Trump sees the use of force as perfectly fine.” America’s Military First? So far, the closest approximation to Trump’s military strategy is his apparent adherence to “might makes right.” In the president’s own words, the military today, compared to that under the Obama Doctrine, will be receive “a great rebuilding effort.” Although Trump has yet to announce how this strengthened military might be used, he has promised that it will be more powerful than ever. Trump appears more willing to defer to his military advisers than Obama or his recent predecessors. In a nationally televised speech in August, he acceded to a Pentagon request for blanket authority to deploy troops to Afghanistan without presidential limit. “We will not talk about numbers of troops or our plans for further military activities,” Trump said. “Conditions on the ground, not arbitrary timetables, will guide our strategy from now on … Micromanagement from Washington, D.C., does not win battles.” The Trump administration seems to believe that conflict in the Middle East is best remedied by the presence of American ground troops and military might, such as the April use of a M.O.A.B. on Afghani soil. The delegation of power in military decisions traditionally reserved for the president and his closest advisors, including troop deployment levels, points to Trump’s reluctance to ensnare himself in difficult military decisions or develop a coherent military doctrine. The president also seems to prefer giving more responsibility to lower-level field officers on the ground, a decision that Kevin Baron interprets as “wanting the military to govern, manage, and lead themselves.” Trump’s security priorities and overall stance on the use of military power come to light in his proposed budget, which calls for raising military spending by $54 billion dollars—an almost 10 percent increase. As Trump said, this would be “one of the largest increases in defense spending in American history.” Such an increase would dwarf the U.S. State Department’s entire current budget of $29 billion dollars , which will likely be cut by 29 percent in 2018. Trump’s proposed budget also significantly reduces funding for American humanitarian and diplomatic programs, traditional levers of American soft-power designed to help maintain the liberal world order. By depriving funding to departments whose main tasks revolve around facilitating communication and understanding to maintain peace, the president appears willing to accept the potential risks of military conflict. Force Masquerading as Doctrine Trump’s reconstruction of the military stands apart from other administrations’ attempts to “rebuild” the armed forces. Unlike his predecessors, Trump has yet present a clear military doctrine defining what sort of military force is appropriate. On April 7, less than four months after taking office, Trump ordered the launch of 59 Tomahawk cruise missiles at Shayrat air base in Syria. A week later, the U.S. military dropped America’s most powerful non-nuclear bomb on targets in Afghanistan. His interactions with other countries have also raised some eyebrows. As Philip Gordon wrote in Foreign Affairs, talks between Trump and foreign governments have been conducted with a high-stakes negotiating style and a refusal to compromise. This characterization squares with the common belief that Trump is a brash, no-holds-barred negotiator who seeks the best deal for himself. While Trump’s negotiating style might reap rewards in business, it seems unlikely to be very effective in international diplomacy. Trump is betting that with a strengthened military poised to deliver action, foreign leaders will be more likely to buckle to American demands. Trump has tapped an unprecedented number of generals, former generals and other ex-military personnel to serve in his cabinet or as top advisers. By comparison, the Obama administration’s cadre of advisers came from academia, think tanks, and diplomatic circles, and advocated nation-building and communication-reliant strategies to ease hostilities. In following military strategy developed by diplomats, the U.S. armed forces during Obama’s tenure were expected to facilitate communication and rapport, not conflict. Given the makeup of Trump’s advising network, however, some wonder whether the Trump administration will show similar restraint in the face of conflicts overseas. As Institute for Policy Studies fellow Phyllis Bennis told the HPR, “When it comes time to make a decision, Trump will have to turn to the people he’s surrounded himself with: his generals. Trump has privileged his military over seasoned political operatives across the board.” The Necessity of a Defined Military Doctrine Under the “America First” philosophy, the Trump administration is pursuing a security policy designed to enable America to “start winning again.” However, a military strategy or foreign policy doctrine meant to safeguard national security cannot be built around the ambiguous concept of “winning.” By operating without a fully developed defense doctrine, the Trump administration may be risking lives at home and abroad. Perhaps most worrisome, if things go wrong, Kevin Baron concludes, “nobody is coming to help Trump out.” Formulating a clear national security doctrine is especially important at a time when the United States is building up its military in a global context that presents so many novel challenges. The immediate risks of an absent defense doctrine may be difficult to discern, but the long-term consequences could be troubling and hard to undo. Trump’s incoherent approach to dealing with adversaries and allies alike could lead the military to blunder into long and costly conflict. Trump’s lack of a clear military strategy increases the likelihood of a misstep bringing tragic consequences. In our fast-paced modern era, in which daily challenges to peace and stability affect the globe, it’s imperative that the Trump administration develop a clear and coherent military doctrine designed for a strengthened Armed Forces. If such a doctrine already exists, it should be plainly communicated to promote confidence at home and abroad. The risk of operating with the current “transactional” foreign policy is just too high. Image Credits: Wikimedia Commons / Shealah Craighead, U.S. Coast Guard / Petty Officer 1st Class David B. Mosley, Wikimedia Commons / Master Sergeant Michel Sauret
/** * Open pdf in available viewer * * @param url */ public void openPdf(String className) { String path = resultingPdfs.get(className); if (path != null) { Intent i = new Intent(Intent.ACTION_VIEW); i.setDataAndType(Uri.fromFile(new File(Environment .getExternalStorageDirectory() + File.separator + "droidtext" + File.separator + path)), "application/pdf"); i.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP); try { startActivity(i); } catch (ActivityNotFoundException e) { Toast.makeText(this, "No Application Available to View PDF", Toast.LENGTH_SHORT).show(); } startActivity(i); } else { Toast.makeText(this, "No resulting pdf for example " + className, Toast.LENGTH_SHORT).show(); } }
""" """ ## Standard Library import sys import os from collections import namedtuple ## Third-Party import colorama ## Constants STDIN_FD = 0 STDOUT_FD = 1 STDERR_FD = 2 class Stream(object): """Coloured text stream in C++ style with verbosity control. Example ------- >>> Stream.set_lvl(0) >>> mystream = Stream() >>> mystream[1] << "Hello." >>> mystream[0] << "Hello." Hello. >>> mystream << "Hello." Hello. """ COLORS = {"BLACK", "RED", "GREEN", "YELLOW", "BLUE", "MAGENTA", "CYAN", "WHITE", None} STYLES = {"DIM", "NORMAL", "BRIGHT", None} RESET = colorama.Style.RESET_ALL Params = namedtuple('params', ['bg', 'fg', 'sty', 'file'], defaults=[None, None, None, sys.stdout]) __ref__ = {} __lvl__ = None def __new__(cls, lvl: int=0, **kwargs: dict): ## Gather parameters params = cls.Params(**kwargs) ## Check Background if params.bg not in cls.COLORS: raise ValueError(f'Color {params.bg} not available.\nOptions: {cls.COLORS}') ## Check Foreground if params.fg not in cls.COLORS: raise ValueError(f'Color {params.fg} not available.\nOptions: {cls.COLORS}') ## Check Style if params.sty not in cls.STYLES: raise ValueError(f'Style {params.sty} not available.\nOptions: {cls.STYLES}') if params not in cls.__ref__: cls.__ref__[params] = super().__new__(cls) return cls.__ref__[params] def __init__(self, lvl: int=0, **kwargs: dict): """ Parameters ---------- lvl : int Verbosity level. **kwargs bg : str Background ANSI control sequence. fg : str Foreground ANSI control sequence. sty : str Style ANSI control sequence. file : _io.TextIOWrapper File object interface for writing to. """ ## Set lvl self.lvl = lvl ## Gather parameters bg, fg, sty, file = self.params = self.Params(**kwargs) ## Gather escape sequences self.bg = "" if bg is None else getattr(colorama.Back, bg) self.fg = "" if fg is None else getattr(colorama.Fore, fg) self.sty = "" if sty is None else getattr(colorama.Style, sty) ## Set output file self.file = file def __repr__(self): params = ", ".join([f"{key}={getattr(self.params, key)!r}" for key in ('bg', 'fg', 'sty')]) return f"{self.__class__.__name__}({self.lvl}, {params})" def printf(self, *args, **kwargs): if self.echo: self._printf(*args, **kwargs) def _printf(self, *args, **kwargs): print(self.bg, self.fg, self.sty, sep="", end="", file=self.file) print(*args, **kwargs, file=self.file) print(self.RESET, sep="", end="", file=self.file) def string(self, s: str): """Generates formated string, appending and preppending respective ANSI control sequences. Parameters ---------- s : str Input string. Returns ------- str Formated string. """ if self.bg or self.fg or self.sty: return f"{self.RESET}{self.bg}{self.fg}{self.sty}{s}{self.RESET}" else: return s def __lshift__(self, s: str): if self.echo: print(self.string(s), file=self.file) return self def __getitem__(self, lvl: int): return self.__class__(lvl, **self.params._asdict()) def __call__(self, **kwargs): return self.__class__(self.lvl, **kwargs) def __bool__(self) -> bool: return self.echo ## Block skip class SkipBlock(Exception): ... def trace(self, *args, **kwargs): raise self.SkipBlock() def __enter__(self): """Implements with-block skipping. """ if not self.echo: sys.settrace(lambda *args, **kwargs: None) frame = sys._getframe(1) frame.f_trace = self.trace else: return self def __exit__(self, type_, value, traceback): """ """ if type_ is None: return None elif issubclass(type_, self.SkipBlock): return True else: return None ## File interface def read(self): raise NotImplementedError def write(self, s: str, *args, **kwargs): self.printf(s, **kwargs) @property def echo(self): return (self.__lvl__ is None) or (self.__lvl__ >= self.lvl) @classmethod def set_lvl(cls, lvl : int = None): if lvl is None or type(lvl) is int: cls.__lvl__ = lvl else: raise TypeError(f'Invalid type `{type(lvl)}`` for debug lvl. Must be `int`.') class NullStream(object): __ref__ = None def __new__(cls): if cls.__ref__ is None: cls.__ref__ = object.__new__(cls) return cls.__ref__ def __enter__(self, *args, **kwargs): global sys self.sys_stdout = sys.stdout sys.stdout = open(os.devnull, 'w') return self def __exit__(self, *args, **kwargs): global sys sys.stdout.close() sys.stdout = self.sys_stdout return None class logfile: def __init__(self): self.fd = os.dup(STDERR_FD) self.file = os.fdopen(self.fd, mode='w', encoding='utf-8') def write(self, s: str, *args, **kwargs): self.file.write(s, *args, **kwargs) ## Initialize shell environment colorama.init() os.system("") ## Create default streams stderr = Stream(fg='RED', file=sys.stderr) stdwar = Stream(fg='YELLOW', file=sys.stderr) stdlog = Stream(fg='CYAN', file=logfile()) ## Initialize log file stdout = Stream(file=sys.stdout) devnull = NullStream()
/** * Determines whether the specified JndiDataSourceType enum and the given, supported Pivotal GemFire 'named', * JNDI DataSource implementation are a match. * * @param jndiDataSourceType the given JndiDataSourceType enum used in the match. * @param name the specified Pivotal GemFire "named" JNDI DataSource implementation. * @return a boolean value indicating whether the given JndiDataSourceType enumerated value matched the given name. * @see java.lang.String#equalsIgnoreCase(String) * @see org.springframework.util.StringUtils#trimWhitespace(String) */ private static boolean isMatch(final JndiDataSourceType jndiDataSourceType, String name) { name = StringUtils.trimWhitespace(name); return (jndiDataSourceType.getName().equalsIgnoreCase(name) || jndiDataSourceType.name().equalsIgnoreCase(name)); }
/* * Call blocker (GoBGP or Arista) */ func CallBlocker(acls []ACL, customerID int) (err error){ ch := make(chan *models.ACLBlockerList, 10) errCh := make(chan error, 10) defer func() { close(ch) close(errCh) }() unregisterCommands := make([]func(), 0) counter := 0 blockerConfig, err := models.GetBlockerConfiguration(customerID, string(messages.DATACHANNEL_ACL)) if err != nil { return err } log.WithFields(log.Fields{ "blocker_type": blockerConfig.BlockerType, }).Debug("Get blocker configuration") for _,acl := range acls { models.BlockerSelectionService.EnqueueDataChannelACL(acl.ACL, blockerConfig, customerID, acl.Id, ch, errCh) counter++ } sessName := string(dots_common.RandStringBytes(10)) for counter > 0 { select { case aclList := <-ch: if aclList.Blocker == nil { counter -- err = errors.New("Blocker does not exist") break } p, e := aclList.Blocker.RegisterProtection(&models.MitigationOrDataChannelACL{nil, aclList.ACL}, aclList.ACLID, aclList.CustomerID, string(messages.DATACHANNEL_ACL)) if e != nil { err = e break } unregisterCommands = append(unregisterCommands, func() { aclList.Blocker.UnregisterProtection(p) }) action := models.EXIT_VALUE if counter == 1 { action = models.COMMIT_VALUE } p.SetSessionName(sessName) p.SetAction(action) e = aclList.Blocker.ExecuteProtection(p) if e != nil { counter-- err = e break } counter-- case e := <-errCh: counter-- err = e break } } if err != nil { for _, f := range unregisterCommands { f() } } return }
def extract_weights(self, name): weight_layer = (self.merged_model).get_layer(name) weights = weight_layer.get_weights()[0] return weights
import tool.TreeNode; import java.util.Deque; import java.util.LinkedList; /** * 实现一个二叉搜索树迭代器类BSTIterator ,表示一个按中序遍历二叉搜索树(BST)的迭代器: * BSTIterator(TreeNode root) 初始化 BSTIterator 类的一个对象。BST 的根节点 root 会作为构造函数的一部分给出。指针应初始化为一个不存在于 BST 中的数字,且该数字小于 BST 中的任何元素。 * boolean hasNext() 如果向指针右侧遍历存在数字,则返回 true ;否则返回 false 。 * int next()将指针向右移动,然后返回指针处的数字。 * 注意,指针初始化为一个不存在于 BST 中的数字,所以对 next() 的首次调用将返回 BST 中的最小元素。 * 你可以假设next()调用总是有效的,也就是说,当调用 next()时,BST 的中序遍历中至少存在一个下一个数字。 * * 示例: * 输入 * ["BSTIterator", "next", "next", "hasNext", "next", "hasNext", "next", "hasNext", "next", "hasNext"] * [[[7, 3, 15, null, null, 9, 20]], [], [], [], [], [], [], [], [], []] * 输出 * [null, 3, 7, true, 9, true, 15, true, 20, false] * * 解释 * BSTIterator bSTIterator = new BSTIterator([7, 3, 15, null, null, 9, 20]); * bSTIterator.next(); // 返回 3 * bSTIterator.next(); // 返回 7 * bSTIterator.hasNext(); // 返回 True * bSTIterator.next(); // 返回 9 * bSTIterator.hasNext(); // 返回 True * bSTIterator.next(); // 返回 15 * bSTIterator.hasNext(); // 返回 True * bSTIterator.next(); // 返回 20 * bSTIterator.hasNext(); // 返回 False * * * 提示: * 树中节点的数目在范围 [1, 105] 内 * 0 <= Node.val <= 106 * 最多调用 105 次 hasNext 和 next 操作 * * * 进阶: * * 你可以设计一个满足下述条件的解决方案吗?next() 和 hasNext() 操作均摊时间复杂度为 O(1) ,并使用 O(h) 内存。其中 h 是树的高度。 * * 来源:力扣(LeetCode) * 链接:https://leetcode-cn.com/problems/binary-search-tree-iterator * 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。 */ public class Q00173m { private final Deque<TreeNode> stack; private TreeNode curr; public Q00173m(TreeNode root) { curr = root; stack = new LinkedList<>(); } public int next() { while (curr != null) { stack.push(curr); curr = curr.left; } curr = stack.pop(); int val = curr.val; curr = curr.right; return val; } public boolean hasNext() { return curr != null || stack.size() > 0; } }
// The extractor trick is in progress. Take the next step, when possible. // At most one step occurs per frame. void ProductionManager::doExtractorTrick() { if (_extractorTrickState == ExtractorTrick::Start) { UAB_ASSERT(!_extractorTrickBuilding, "already have an extractor trick building"); int nDrones = WorkerManager::Instance().getNumMineralWorkers(); if (nDrones <= 0) { _extractorTrickState = ExtractorTrick::None; } else if (getFreeMinerals() >= 100 || (nDrones >= 6 && getFreeMinerals() >= 76)) { if (the.self()->completedUnitCount(BWAPI::UnitTypes::Zerg_Larva) > 0) { BWAPI::TilePosition loc = BWAPI::TilePosition(0, 0); Building & b = BuildingManager::Instance().addTrackedBuildingTask(MacroAct(BWAPI::UnitTypes::Zerg_Extractor), loc, nullptr, false); _extractorTrickState = ExtractorTrick::ExtractorOrdered; _extractorTrickBuilding = &b; } } } else if (_extractorTrickState == ExtractorTrick::ExtractorOrdered) { if (_extractorTrickUnitType == BWAPI::UnitTypes::None) { _extractorTrickState = ExtractorTrick::UnitOrdered; } else { int supplyAvail = the.self()->supplyTotal() - the.self()->supplyUsed(); if (supplyAvail >= 2 && getFreeMinerals() >= _extractorTrickUnitType.mineralPrice() && getFreeGas() >= _extractorTrickUnitType.gasPrice()) { BWAPI::Unit larva = getClosestLarvaToPosition(the.bases.myMain()->getPosition()); if (larva && _extractorTrickUnitType != BWAPI::UnitTypes::None) { if (_extractorTrickUnitType == BWAPI::UnitTypes::Zerg_Zergling && the.my.completed.count(BWAPI::UnitTypes::Zerg_Spawning_Pool) == 0) { } else { the.micro.Make(larva, _extractorTrickUnitType); } _extractorTrickState = ExtractorTrick::UnitOrdered; } } else if (supplyAvail < -2) { _extractorTrickState = ExtractorTrick::UnitOrdered; } else if (WorkerManager::Instance().getNumMineralWorkers() <= 0) { _extractorTrickState = ExtractorTrick::UnitOrdered; } } } else if (_extractorTrickState == ExtractorTrick::UnitOrdered) { UAB_ASSERT(_extractorTrickBuilding, "no extractor to cancel"); BuildingManager::Instance().cancelBuilding(*_extractorTrickBuilding); _extractorTrickState = ExtractorTrick::None; _extractorTrickUnitType = BWAPI::UnitTypes::None; _extractorTrickBuilding = nullptr; } else if (_extractorTrickState == ExtractorTrick::MakeUnitBypass) { BWAPI::Unit larva = getClosestLarvaToPosition(the.bases.myMain()->getPosition()); if (larva && getFreeMinerals() >= _extractorTrickUnitType.mineralPrice() && getFreeGas() >= _extractorTrickUnitType.gasPrice()) { the.micro.Make(larva, _extractorTrickUnitType); _extractorTrickState = ExtractorTrick::None; } } else { UAB_ASSERT(false, "unexpected extractor trick state (possibly None)"); } }
<gh_stars>0 package types type Double struct { Target *float64 } func (b *Double) SetBoolean(v bool) { panic("Unable to assign boolean to double field") } func (b *Double) SetInt(v int32) { *(b.Target) = float64(v) } func (b *Double) SetLong(v int64) { *(b.Target) = float64(v) } func (b *Double) SetFloat(v float32) { *(b.Target) = float64(v) } func (b *Double) SetDouble(v float64) { *(b.Target) = v } func (b *Double) SetUnionElem(v int64) { panic("Unable to assign union elem to double field") } func (b *Double) SetBytes(v []byte) { panic("Unable to assign bytes to double field") } func (b *Double) SetString(v string) { panic("Unable to assign string to double field") } func (b *Double) Get(i int) Field { panic("Unable to get field from double field") } func (b *Double) SetDefault(i int) { panic("Unable to set default on double field") } func (b *Double) AppendMap(key string) Field { panic("Unable to append map key to from double field") } func (b *Double) AppendArray() Field { panic("Unable to append array element to from double field") } func (b *Double) NullField(int) { panic("Unable to null field in double field") } func (b *Double) Finalize() {}
Adrian Brown/Bloomberg via Getty It’s surprisingly difficult to come up with a truly random sequence of numbers or items. Doing so requires cognitive skills such as memory and attention, as well as a sense of complexity. “Our brains are wired to find patterns even where there are none – for example, when looking at clouds or stars in the sky,” says Hector Zenil at the Karolinska Institute in Stockholm, Sweden, and the LABORES Research Lab in Paris, France. Zenil and his colleagues have now found that our ability to think up random sequences peaks when we reach 25 before declining with age. This mirrors the evolution and decline of our cognitive abilities, suggesting that monitoring this skill could give an insight into these changes over time. Advertisement They asked more than 3400 people between the ages of 4 and 91 to complete an online assessment that included five tasks designed to measure their ability to generate random sequences. These included creating a hypothetical list of the results of flipping a coin 12 times and guessing which card would come next in a shuffled pack of cards. To measure how random people’s answers were, the researchers used a concept called “algorithmic randomness”. The idea is that if a sequence is truly random, it should be difficult to create an algorithm or computer program that can generate it. Statistics software provided an estimate of how complex this would be for each response. Rise and fall The ability to generate randomness was found to steadily increase between the ages of 4 and 25. It then gradually declined until the age of around 60, when it began dropping faster. This follows a similar pattern to that of cognitive ability overall. Age was the only factor that could reliably predict someone’s ability to create randomness, says Zenil. “Neither gender, language spoken, beliefs nor education level had any impact,” he says. “This is the first time that we can see how randomness changes across the whole age range,” says Martin Fischer at the University of Potsdam in Germany. If the results are further validated, it could be useful to add a random-generation task to cognitive tests used to monitor people with neurodegenerative disease, he says. Journal reference: PLOS Computational Biology, DOI: 10.1371/journal.pcbi.1005408
In an interview with Komsomolskaya Pravda, Russian history professor Elena Sinyavskaya in a discussion with Alexey Ovchinnikov disputes the factual provenance of the Red Army rape of Germany. The Myth of the “Rape of Germany” was Invented by Goebbels In recent years, Victory Day has unfortunately acquired a not very pleasant tradition: the closer the holiday comes, the more do all sorts of “researchers” begin to broadcast the myth of “raped Germany.” In this way, over the years the number of German maidens, allegedly victims of the Red Army, simply grows. But for whom is it necessary that the Russian soldier remain in the national memory not as a liberator and protector, but as a rapist and a robber? This is something that we have talked about to a leading researcher at the Institute of Russian History, the Russian Academy of Sciences, Doctor of Historical Sciences, Professor Elena Sinyavskaya. “The Nazis intimidated people to the point that they committed suicide” Elena Spartakovna, is all this the result of restructuring? Those years generated a lot of rubbish … Not really. This nasty story began much earlier, with Goebbels’ propaganda, when it was announced to the population that the Red Army was brutally raping all German women between the ages of 8 to 80 years. And the people were really intimidated to the limit, to the extent that Nazi party activists firstly killed their families and then themselves. So why was such an Image necessary? Firstly, to increase resistance against the advancing Red Army, and secondly, so that the population would leave the lost territories and could be of no assistance to the Soviet armed forces. Goebbels’ line was then continued in the same year of 1945 by the allies, when the first publications appeared in which it was attempted to represent the Red Army as an army of looters and rapists and with absolutely nothing said about the outrages that were happening in the western zone of occupation. With the start of the “cold war” the theme was exaggerated, but not so aggressively and massively as has begun to occur in the last twenty years. The numbers “raped” were initially modest: from 20,000 to 150,000 in Germany. But in 1992, after the collapse of the Soviet Union, in Germany there was published a book by two feminists, Helga Zander and Barbara Jor, “The Liberators and Liberated,” where for the first time a figure of 2 million was arrived at. Moreover, it was derived from a completely flawed premise: statistical data for 1945-1946 were collected in a Berlin hospital in which there were born somewhere around 500 children per year and approximately 15-20 people were listed under “nationality of father” as “Russian”. Moreover, two or three such cases were classified as “raped”. What did these “researchers” do? They arrived at the conclusion that all the cases where the father was Russian were the result of being raped. Then Goebbel’s formula from “8 to 80” was simply factored in. However, the mass distribution of this figure took place in 2002 with the publication of Anthony Beevor’s book “The Fall of Berlin”, which was published here in 2004, and the mythical figure of “2 million” was then taken out for a stroll by the Western mass media on the eve of the 60th anniversary of the Victory. The Germans are tired of repenting One can understand those who went on about this topic during the “cold war” years, but then the Berlin Wall fell and, according to Gorbachev, there was established “peace and friendship” … The geopolitical realities have changed. On the one hand there have been attempts to revise the results of World War II, plus the desire to remove the Soviet Union (and Russia as its legal successor) from the victorious side and all the good that is associated with it. This was one of the steps that led to the fact that in a number of decisions of the European institutions, including the European Parliament, was the equating of Stalinism with Nazism, where the aggressor and victim have been placed on the same footing, shifting the question of guilt and responsibility and forcing us to repent for something that was not. Does it appear that these “researchers” have not written these essays themselves but have been commissioned by those who have been creating this new geoploitic? .. Of course. The second reason is that the myth is pleasing to the West – to the psychology of the German people, who are tired of feeling guilty. There we hear the present generation already saying: “Well, why do we have to repent for our forefathers’ sin?” With them there has already arisen a wave of feeling that with the formation of a national consciousness is trying to assert the idea that their ancestors were not that guilty; that there isn’t a collective German responsibility … This is where the geopolitical order merges with the sentiments of the masses. Leave aside revenge! And what actually happened to these raped people? We cannot say that such things did not happen. There were rapes, but not on the scale about which they are now lying today. In documents such facts as “extra-ordinary happenings and immoral events” are considered. The country’s leadership and high command believed that it just wasn’t simply a case of creating a bad image for the Red Army, it also undermined discipline. And they fought against this with all available means, starting with party-political work, explanations, and finally ending with tribunal sentences, up to and including the shooting of looters and rapists. Are there statistics? Unfortunately, not all documents have been declassified, but from those that have been, we are able to calculate the scale of the phenomenon. Here is a report from the military prosecutor of the 1st Belorussian Front concerning unlawful acts against the civilian population during the period from 22 April to 5 May 1945. The seven armies of the front consisted of 908,500 personnel and 124 crimes were recorded, including 72 of rape. Only 72 cases per 908,500 … Your opponents have written that a wave of rapes occurred before the capture of Berlin … On April 20 there were directives concerning a change in attitude towards the German civilian population and prisoners of war. So here we have our opponents focusing on the fact that the order had come too late, that during the whole period of the winter and early spring of ’45 the Red Army rioted with impunity. This is not true. Because in addition to this order and subsequent directives, there were orders at front, army and individual unit level that were issued before the Red Army entered the territory of other states. Pamphlets were distributed that told the history of a country, its culture and local traditions. In January of ’45, orders were given to Konev, Rokossovsky and Zhukov, saying that they had to control any feelings of direct revenge [that those under their command had] and to prevent any incidents that would be interpreted as negative. And how was this perceived by the soldiers? After all, many had lost loved ones at home; a feeling of revenge had been engendered amongst them. Remember Ilya Ehrenburg and his “Kill the Germans!” And then they came to the lair [of the beast] and all of a sudden were told to “put aside revenge” … Of course, many were not happy with these explanations as regards this new attitude towards revenge. In reports from commissars there are recorded conversations between soldiers who resented these orders: “First they say one thing, then another and why we should feel sorry for those Germans as if they had behaved well on our territory” … But tough disciplinary measures on the one hand, and the Russian love of children on the other (even the Germans recognized that our soldiers were very nice with German children and fed them not only from central food stores but also from their own rations, sometimes giving them all that they had) prevented acts of revenge from happening. But the main thing, which was emphasized at all levels, was that in their actions “we should not be like the Germans”. The historian Yuri Zhukov has argued that rape and other crimes were mostly committed not by soldiers of the Red Army but by former Red Army soldiers just liberated from concentration camps and by civilians who had been deported to Germany … Yes, while awaiting repatriation they were not under any control or command, and they generally were quite a motley crowd of repatriates. They formed gangs and started robbing locals in order to make up for the humiliations they had suffered, and as this happened in the area of responsibility of the Soviet troops, all of this was blamed on our soldiers. There is other evidence of allies liberated from the camps, who were engaged in looting in Berlin, stuffing old cars with junk and being told as they were exiting the city to take it back to where they had got it. Another point: in the same reports from military prosecutors it is often stated that there are cases stipulated in which an alleged rape was not confirmed, where the commanders had to punish the innocent. There is a very interesting diary left by Australian correspondent Osmar White, who accompanied the American army, and visited all the zones of occupation. He did not feel much sympathy for us, but claimed that the Red Army, in contrast to the allies, was very disciplined; that the Soviet administration was very effective not only in its acts to combat crime, but also in the field of urban regeneration and the provision of the necessities of life; and all the horrors that they tell of our soldiers were, on the one hand, rumours and gossip, and that, on the other, these crimes were mostly committed by those who were awaiting repatriation. The Germans fled from the Anglo-Americans to the Russians And how did women in those territories relate to our soldiers? Oh, a whole dissertation could be written about that topic. First of all, there was a colossal difference in mentalities. All of these tales that soldiers, especially those from rural areas, had been de-Christianized and were lascivious by definition are nonsense. On the contrary, most of them had been brought up in a patriarchal tradition; that Hungarians and Austrians routinely had numerous sexual relations before marriage was for them simply animal-like behaviour. From a Russian soldier’s understanding, what kind of woman does that? I shall not say, because that would imply a certain aversion to such women. Commanders were in a state of shock. There are lots of documented reports of groups of women who, led by their “Madame”, immediately offered their sexual services as soon as contact was made with a village. In all such cases, the reaction of our officers was angry and abusive. In addition, it was often revealed that the Nazis specifically allowed a certain number of women infected with venereal disease to cause soldiers to be incapacitated. Is this part and parcel of “raped Germany” as well? In Romania and Hungary our soldiers visited brothels, but, as a rule, not very many: they went out of curiosity, and then later there were unpleasant feelings and a sense of disgust and confusion. The very idea of buying another person did not fit in the head of the Soviet man. Do not forget that there existed the very common phenomenon of military prostitution. There have been preserved diaries of German women, where they philosophically argue that prostitution is quite a respectable profession. It was very common, especially in the western zone of occupation, where the Germans, by the way, were very scantily supplied with food (as opposed to the Soviet zone, where children up to 8 years of age were even given even milk). The German daily ration was less than an American breakfast. Naturally, the women were forced to earn a living in the well known manner. In this type of rape there were more than enough cases. And if the Germans make claims of violence, it is not directed towards us but against the Allies, from whom the Germans had fled en masse in terror into the Soviet occupation zone. And how, by the way, did U.S. commanders react to the crimes of their subordinates? They often chose not to pay any attention to them. The diaries of the same Osmar White said that crimes against German women were widespread and that they were in no way brought to a halt by the American commanders; that if any kind of reaction did take place, then this happened only in respect of the Negro rapists. Racism? Yes. In the minds of U.S. commanders, Negros dared not raise a hand against a white woman; if they did, they would be treated in the customary way back in the USA. It was different for French troops. In the U.S. Senate after the war there was described the behaviour of native [African] French forces, the Senegalese, in Stuttgart. Cited figures say that within one to two days there were about three thousand raped in the Stuttgart underground alone. To this very day the Italians claim that the Anglo-American military were responsible for the atrocities that took place on Italian territory and committed by Moroccans. These men raped not only women, but also young men … Trophies – bolts of fabric and needles We are also accused of looting. Those trophies, by the way: where did they come from? This is very interesting. There are complete documents stating that it was chiefly the Anglo-Americans who were engaged in looting and according to a thoroughgoing programme as well: possessions were loaded onto ships, and gradually they began to cause bottlenecks in ports. Basically, they were collecting an assortment of things of various value. As for the Red Army, there was such a thing as “baraholstvo” [odds and ends]. What’s that? It wasn’t a question of robbery, but the collection of abandoned property in abeyance: open houses, smashed shops, abandoned suitcases … even when they were fighting and during a lull in the combat. At certain times their commanding officers let them send parcels back homes. But they didn’t send back diamond encrusted watches, but what was necessary in a war-ravaged economy: valued sets of sewing needles – they could be bartered for a good selection of food products; bolts of cloth, because there was no clothing left back home; many sent back tools: hammers, pliers, planes: the front line troops knew that they would soon be back home and would need something to rebuild burnt down villages with. You cannot point an accusing finger at them for doing that. In all the letters enclosed with the parcels, the soldiers try to justify themselves in front of their wives and relatives for taking these rags and junk. They were very disgusted with themselves for doing this … By the way, do you remember the famous photo taken of the Reichstag being discussed on the Internet recently, where on one of the officer’s wrists there are two watches? I have had that picture for a while now. They’re actually a watch and a compass that I believe a commanding officer wears. And do you remember the photo, where a Soviet soldier is robbing a Berliner of her bike and how the Web Liberals were screaming about the looting? But what was shown was a soldier confiscating a bicycle needed by the army. Do you see the difference in the way this action is looked at? History is blackened free of charge By the way, about our home-grown liberals: have they some interest in lying about their forebears? In fact, there are people who are fully aware of what they are doing. Let’s just say that there is a price, though we are not necessarily talking about a financial reward. There are other means of encouragement: a trip abroad, grants, citizenship … But there is a large stratum of Internet professionals who mindlessly repeat a lie behind the first group. Their minds are so muddled that they are willing to believe any nonsense. And it’s not only the network hamsters that are involved in this, but intellectuals as well. Here is a lecturer from the North Caucasus Federal University, Pavel Polyan, speaking on the air from a very liberal radio station, after having been asked about the fate some of our women who had had intimate relationships with the occupiers. He says: “There has been rape, but it was not a massive wave of rape. In any case, it is not commensurate with the mass rapes, which the Red Army undertook when entering Germany … ” By the way, is there a lot of disagreement between Russian historians on this issue? I should not want to focus on specific colleagues. There are historians of the professional community, and there are people who have positioned themselves as such: we call them “folk-history” and they are amateurs trying to impose their views on the public. So, amongst the professionals there are no differences of opinion concerning this subject and there cannot be… “This is an attempt to deprive the people of their history” It’s bad enough that this image of a drunken and rapacious Russian soldier appears in Western movies, but we do the same thing in our own films! This hasn’t just begun now. Remember how many of those films there were after the collapse of the Union. And the first picture that showed the war not from the point of name-calling, but from a patriotic point of view, came only in 2002, “Zvezda” [The Star]. All that came before was full of myths about Stalin’s tyranny, the “bloody” NKVD, SMERSH, the Special Department of which, it appears, only did shootings of good officers in the back and terrorized the troops. And we were fed the idea that victory came despite our leadership, and in a number of films you could sense the hidden suggestion that perhaps we might not really have won … What was the reason for this? The Great Patriotic War [in the West: The Russo-Soviet War 1941-1945, the Eastern Front (Europe) of World War II – trans. ME] is still in our history; it is that episode that unites the people, and not just ours, but the other nations of the former Soviet Union. And when on May 9 they try to erase it from memory or to tarnish it, the aim of this is quite obvious: it is an attempt to deprive the people of their history, and to show that we have no past to be proud of. If the majority of the population can see this, then such people will have no future. Understanding the history of the Great Patriotic War has long turned into a major field of the information battle. Are we losing this battle? In general, yes. Why does not one historian oppose these attacks on our history? Doesn’t the state talk about the need to protect history from falsifications, and don’t films in which Russian soldiers continue to appear like frostbitten cattle, and don’t the liberals quietly continue to broadcast on the government channels about the “crime” of the Red Army… Do you want to hear what I think about this? Because there really is no anti-falsification at state level. And you’ve got to be really tough about this issue. And it should be brought up with the person at the very top. One of the Russian emperors, Nicholas I, having somehow found out that there was to be performed in Paris a play that would blacken the name of the Russian army, demanded that it not be performed. And when the French king refused to do so, claiming freedom of artistic expression, the Russian emperor replied “Well, I’ll send you an audience of a million dressed in their army greatcoats, and they’ll hiss and boo. The play was immediately cancelled… Can you imagine someone in the United States “at the behest of the soul” making a film in which American soldiers in Germany only rape, rob and drink? .. I think in the career of such directors and writers it would be their last film. They are very closely monitoring such manifestations of “freedom.” They realize how dangerous it is. Not only that: research on this subject, if it is maintained, is not publicized. By the way, in 1989, there was published the book “Other Losses” by Canadian author James Baca, who argued that in the camps in the American zone of occupation over a million German prisoners of war were starved to death. He was immediately picked on by his colleagues and announced to be almost a fool … From Firsthand Sources Eyewitnesses of Germany, 1945 “… At the end of the first day of my stay in Berlin, I was convinced that the city was dead. Human beings simply could not live in this terrifying pile of debris. By the end of the first week, my opinion began to change. Society had come alive in the ruins. Berliners began to receive food and water in an amount sufficient to survive. More and more people were employed in the public works under the guidance of the Russians. Thanks to the Russians, with their extensive experience in dealing with similar problems in their own devastated cities, the spread of epidemics had been put under control. I am convinced that the Soviets did in those days more in order that Berlin should survive than the Anglo-Americans would have been able to do if they had been in the position that the Russians found themselves…” “… After the fighting had moved onto German soil, there were committed many rapes by soldiers, both front line troops and those who followed immediately behind them. The number [of these rapes] depended on the attitude of senior officers towards them … Lawyers acknowledged that because of cruel and perverse sexual acts with German women, some of the soldiers were executed by firing squad, especially in cases where they were Negros. However, I know that many women were raped by white Americans. No action against these criminals has been taken … ” “… In the Red Army, strict discipline prevails. Robbery, rape and abuse is no more greater than in any other zone of occupation. Wild stories about atrocities emerge from the exaggeration and distortion of individual cases brought on by nervousness caused by Russian soldiers’ wild manners and their love of vodka. One woman who had told me most of the tales of Russian brutality, tales which would make one’s hair stand on end, was eventually forced to admit that the only evidence that she had seen with her own eyes was of drunken Russian officers firing their guns into the air and at bottles … ” From the diaries of the Australian war correspondent Osmar White “… After moving to Oberhunden. Coloured boys staged here God knows what. They set fire to the house. Slashed all the Germans with razors and raped the women … ” From the journal of the U.S. Army signalman Edward Wise “… Around the same day I had a talk with a pretty Hungarian girl. When she asked whether I liked it in Budapest, I replied that I did, but that I found the brothels embarrassing. “But why?” asked the girl. “Because it is not natural; it’s animal-like”, I explained. “A woman takes the money and straight after there follows ‘lovemaking’!” She thought a moment, then nodded and said, “You’re right: taking the money first is not nice …” From the memoirs of Alexander’s cavalry Homeland “… We went to a German city, billeted in homes. A “Frau” appears. She’s about 45 years old and asks for the “Herr Kommandant.” She declares that she is responsible for the town district and has collected 20 German women for the sexual (!!!) servicing of Russian soldiers … The reaction of the officers were angry and abusive. They drove off the German woman together with her ready for action ‘detachment’ …. ” From the memoirs of mortar man Nahum Orlov “… A little further on, at a railway crossing just before the village, we came across a “post for the collection of weapons and watches”. I thought I was dreaming: the civilized, prosperous British have been taking watches off German soldiers who were covered in dirt! From there we were sent to the school yard in the centre of the village. There had already been gathered there a lot of German soldiers. Watching over us, the Englishmen rolled chewing gum between their teeth – that was something new to us – and boasted to each other with their trophies, raising high their arms, studded with wrist watch … ” From the memoirs of corporal Egon Kopiske “… All this has been acquired by completely honest means, and do not imagine that in Germany, robbery and burglary is not ignored. Complete order. Whenever they came across confiscated things abandoned by Berlin “big noises”, they were distributed in a comradely way to whoever liked them… ” From a letter from Staff Sergeant V. V. Syrlitsyn to his wife REFERENCE “KP” Senyavskaya, Elena Spartakovna – Senior Research Fellow of the Institute of Russian History, Russian Academy of Sciences, Doctor of Historical Sciences, Professor, member of the Academy of Military Sciences. Specialist in Russian military history of the XX century, military psychology and author of over 250 scientific papers.
/*********************************** Copyright 2017 <NAME> Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ***********************************/ #ifndef _OF_CONTROLS_ #define _OF_CONTROLS_ #include <string> #include <map> #include <QObject> #include <QSharedPointer> #include <osg/ref_ptr> #include <osg/Vec3d> #include <osg/Vec4> QT_FORWARD_DECLARE_CLASS(QPushButton); QT_FORWARD_DECLARE_CLASS(QCheckBox); QT_FORWARD_DECLARE_CLASS(QListWidget); QT_FORWARD_DECLARE_CLASS(QListWidgetItem); namespace OpenFrames { class ReferenceFrame; class QWidgetPanel; class Sphere; } class RenderThread; class OFWindow; /******************************************************************* * <NAME> * OpenFrames Demo of embedded Qt control panels ******************************************************************/ class OFControls : public QObject { Q_OBJECT public: OFControls(bool useVR); virtual ~OFControls(); // Reserve copy and assignment constructors OFControls(OFControls&) = delete; OFControls& operator=(OFControls&) = delete; public slots: /// Slot for toggling visibility void toggleSphere(); /// Slot for toggling visibility void setSphere(bool checked); /// Slot for toggling color void setColor(QListWidgetItem *item); /// Slot for toggling hidden panel void setHiddenPanel(bool checked = false); /// Slot for moving the sphere based on slider output void setXLocation(int position); /// Slot for moving the sphere based on slider output void setYLocation(int position); /// Slot for moving the sphere based on slider output void setZLocation(int position); private: static const double MAIN_LOOP_PERIOD; static const char *LOREM_IPSUM_DOLOR; static const std::map<std::string, osg::Vec4> COLORS; static const char *DEFAULT_SPHERE_COLOR; /// Default Panel dimensions static const double panelWidth, panelHeight, panelZPos; QPushButton *_toggleButton; QCheckBox *_showCheckBox; QListWidget *_list; /// Sphere origin osg::Vec3d _sphereOrigin; RenderThread *_renderer; OFWindow *_window; osg::ref_ptr<OpenFrames::ReferenceFrame> _root; osg::ref_ptr<OpenFrames::QWidgetPanel> _hiddenPanel; osg::ref_ptr<OpenFrames::Sphere> _sphere; }; #endif
class ForecastAPILoader: """ Class that retreives Forecast data from external API Methods: ________ fetch_forecast(lat, long) request weather data based on lat/long from external API load_forecasts_from_api(resorts) load weather data for each resort in resorts """ API_URL = "https://aerisweather1.p.rapidapi.com/forecasts/" API_KEY = os.environ.get('API_KEY') API_HEADER = { 'x-rapidapi-host': "aerisweather1.p.rapidapi.com", 'x-rapidapi-key': API_KEY } def fetch_forecast(self, lat, lon) -> dict: """ Requeset weather data from external API based on lat/long and return json text. Keyword arguments: lat -- the latitude of the weather forecast coordinates long -- the longitude of the weather forecast coordinates """ # list the specific fields we want in the json response so we don't # get a huge json file with fields we don't need response_fields = [ 'periods.maxTempF', 'periods.minTempF', 'periods.snowIN', 'periods.minHumidity', 'periods.weatherPrimary', 'periods.validTime', 'periods.weatherPrimaryCoded' ] request_url = (self.API_URL+lat+','+lon+'?fields='+','.join(response_fields)) response = requests.request("GET", request_url, headers=self.API_HEADER) data = json.loads(response.text) return data def load_forecasts_from_api(self, resorts) -> list: """updates the Foreecast object for each resort Keyword arguments: resorts -- a list of resort dict objects """ forecasts = [] fs = FauxSnow() for resort in resorts: forecast_data = self.fetch_forecast(resort.lat, resort.long) response = forecast_data.get('response') # if the api call doesn't return a response value, # exit the function and return None if response: forecast = Forecast( resort.resort_id, datetime.datetime.now().strftime("%d/%m/%Y %I:%M %p") ) for period_data in forecast_data['response'][0]['periods']: forecast_period = ForecastPeriod( datetime.datetime.strptime(period_data['validTime'], '%Y-%m-%dT%H:%M:%S%z').strftime("%a %-d"), period_data['minTempF'], period_data['maxTempF'], period_data['snowIN'], period_data['weatherPrimary'], period_data['weatherPrimaryCoded'], period_data['minHumidity'], fs.calc_conditions( period_data['weatherPrimaryCoded'], period_data['snowIN'], period_data['minTempF'], period_data['minHumidity']) ) forecast.periods.append(forecast_period) forecasts.append(forecast) return forecasts
/****************************************************************************** * Copyright (c) 2014, <NAME> * * All rights reserved. * ******************************************************************************/ #include "yahtzee.h" #include "chance.h" int Chance::score(std::vector<Die> dice) { // Check to see if the slot has already been scored. if (!_filled) { // Iterate over the dice and add each value to the score. for (auto &die : dice) { _score += die.getValue(); } // Ensure the slot cannot be used in the future. _filled = true; } return getScore(); } const std::string Chance::getName() { return std::string("Chance"); } bool Chance::isFilled() { return _filled; } int Chance::getScore() { return _score; }
import { Application } from 'express'; import bodyParser from 'body-parser'; import cors from 'cors'; import config from '../config'; import api from '../api/index'; import errorMiddleware from '../api/middlewares/errorMiddleware'; import methodOverride from 'method-override'; import HttpException from '../exceptions/HttpException'; import logger from './logger'; export default (app: Application) => { app.use(cors()); app.use(methodOverride()); app.use(bodyParser.json()); app.use(config.apiPrefix, api()); app.use((req, res, next) => { const err = new HttpException(404, 'Not Found'); next(err); }); app.use(errorMiddleware); logger.info('Express middlewares loaded.'); };
module Test.Locals where import Test.Tasty import Test.Tasty.HUnit import Data.Map.Strict as M import Locals tests :: TestTree tests = testGroup "Tests" [unitTests] unitTests :: TestTree unitTests = testGroup "Unit tests" [ testCase "sum" $ do let Right (Store store) = runImp sumPgm M.lookup 0 store @?= Just 0 M.lookup 1 store @?= Just 5050 , testCase "uninitialized access" $ do let Left (ImpError { env = Env env, store = Store store, message }) = runImp uninitializedAccessPgm do { loc <- M.lookup "x" env ; M.lookup loc store } @?= Just 0 M.lookup "xx" env @?= Nothing message @?= "xx uninitialized!" , testCase "uninitialized assignment" $ do let Left (ImpError { env = Env env, store = Store store, message }) = runImp uninitializedAssignPgm do { loc <- M.lookup "x" env ; M.lookup loc store } @?= Just 1 M.lookup "xx" env @?= Nothing message @?= "xx uninitialized!" , testCase "division by zero" $ do let Left (ImpError { env = Env env, store = Store store, message }) = runImp divZeroPgm do { loc <- M.lookup "x" env ; M.lookup loc store } @?= Just 0 message @?= "Division by zero!" , testCase "locals" $ do let Right (Store store) = runImp localsPgm M.lookup 0 store @?= Just 4 ] sumPgm :: Stmt sumPgm = Stmts [ Declare ["n", "sum"] , Assign "n" (I 100) , Assign "sum" (I 0) , While (Not (Leq (Var "n") (I 0))) $ Stmts [ Assign "sum" (Plus (Var "sum") (Var "n")) , Assign "n" (Plus (Var "n") (I (-1))) ] ] uninitializedAccessPgm :: Stmt uninitializedAccessPgm = Stmts [ Declare ["x"], Assign "x" (Var "xx") ] uninitializedAssignPgm :: Stmt uninitializedAssignPgm = Stmts [ Declare ["x"] , Assign "x" (I 1) , Assign "xx" (I 2) ] divZeroPgm :: Stmt divZeroPgm = Stmts [ Declare ["x"] , Assign "x" (Div (Var "x") (I 0)) , Assign "x" (I 1) ] localsPgm :: Stmt localsPgm = Stmts [ Declare ["x", "y"] , Assign "y" (I 1) , xPlusEqualsY , Stmts [ Declare ["y"] , Assign "y" (I 2) , xPlusEqualsY ] , xPlusEqualsY ] where xPlusEqualsY = Assign "x" (Plus (Var "x") (Var "y"))
<gh_stars>100-1000 // Copyright (c) 2015 fjz13. All rights reserved. // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. #include "MedusaPreCompiled.h" #include "Engine_Binding.h" #ifdef MEDUSA_LUA #include "Core/Script/ScriptEngine.h" #include "Binding/Lua/Geometry_Binding.h" #include "Binding/Lua/Node_Binding.h" #include "Binding/Lua/EventArg_Binding.h" #include "Binding/Lua/SceneManager_Binding.h" #include "Binding/Lua/NodeFactory_Binding.h" #include "Binding/Lua/NodeDefines_Binding.h" #include "Binding/Lua/EngineConstants_Binding.h" #define MEDUSA_LUA_REGISTER(x) ScriptEngine::Instance().AddNextLoadModule(ScriptBinding::Register_##x, #x) MEDUSA_SCRIPT_BINDING_BEGIN; bool Register_Engine() { MEDUSA_LUA_REGISTER(INode); MEDUSA_LUA_REGISTER(Sprite); MEDUSA_LUA_REGISTER(ILayer); MEDUSA_LUA_REGISTER(NormalLayer); MEDUSA_LUA_REGISTER(IScene); MEDUSA_LUA_REGISTER(UIScene); MEDUSA_LUA_REGISTER(SceneManager); MEDUSA_LUA_REGISTER(NodeFactory); //events MEDUSA_LUA_REGISTER(IEventArg); MEDUSA_LUA_REGISTER(TouchEventArg); MEDUSA_LUA_REGISTER(KeyDownEventArg); MEDUSA_LUA_REGISTER(KeyUpEventArg); MEDUSA_LUA_REGISTER(CharInputEventArg); MEDUSA_LUA_REGISTER(ScrollEventArg); MEDUSA_LUA_REGISTER(KeyboardEventArg); MEDUSA_LUA_REGISTER(TapGestureEventArg); MEDUSA_LUA_REGISTER(DoubleTapGestureEventArg); MEDUSA_LUA_REGISTER(DragBeganGestureEventArg); MEDUSA_LUA_REGISTER(DragingGestureEventArg); MEDUSA_LUA_REGISTER(DragEndGestureEventArg); MEDUSA_LUA_REGISTER(DragFailedGestureEventArg); MEDUSA_LUA_REGISTER(LongPressBeganGestureEventArg); MEDUSA_LUA_REGISTER(LongPressFailedGestureEventArg); MEDUSA_LUA_REGISTER(PanBeginGestureEventArg); MEDUSA_LUA_REGISTER(PanGestureEventArg); MEDUSA_LUA_REGISTER(PanEndGestureEventArg); MEDUSA_LUA_REGISTER(PinchGestureEventArg); MEDUSA_LUA_REGISTER(SwipeBeginGestureEventArg); MEDUSA_LUA_REGISTER(SwipeMovedGestureEventArg); MEDUSA_LUA_REGISTER(SwipeFailedGestureEventArg); MEDUSA_LUA_REGISTER(SwipeSuccessGestureEventArg); MEDUSA_LUA_REGISTER(NodeCreateFlags); MEDUSA_LUA_REGISTER(NodeDeleteFlags); MEDUSA_LUA_REGISTER(NodePushFlags); MEDUSA_LUA_REGISTER(NodePopFlags); MEDUSA_LUA_REGISTER(PublishDevices); MEDUSA_LUA_REGISTER(PublishLanguages); MEDUSA_LUA_REGISTER(PublishVersions); MEDUSA_LUA_REGISTER(ApplicationDebugInfoFlags); MEDUSA_LUA_REGISTER(NodeEditors); return true; } MEDUSA_SCRIPT_BINDING_END; #undef MEDUSA_LUA_REGISTER #endif
<filename>Source/PointsToDraw.cpp #include "PointsToDraw.h" const float * PointsToDraw::getRawData() const noexcept { return data.data(); } //Return: start position and interval size Pair<int, int> PointsToDraw::operator[](unsigned int intervalNumver) const { return Pair<int, int>(intervals[intervalNumver], intervals[intervalNumver + 1] - intervals[intervalNumver]); } int PointsToDraw::amountOfIntervals() const noexcept { return static_cast<int>(intervals.size() - 1); } std::vector<float> & PointsToDraw::getData() noexcept { return data; } unsigned int PointsToDraw::getRawDataSize() const noexcept { return static_cast<unsigned int>(data.size()); }
#pragma once /** GPS Status, as provided by rcd */ typedef enum { /** Timestamp is invalid */ GpsTimeInvalid, /** Using CPU time */ UsingCpuTime, /** Using GPS time */ UsingGpsTime } GpsTimeStatus_t;
<gh_stars>10-100 use crate::{ spawn::{spawn_task, task_name}, Bus, Lifeline, }; use log::{debug, error}; use std::future::Future; use std::{any::TypeId, fmt::Debug}; /// Takes channels from the [Bus](./trait.Bus.html), and spawns a tree of tasks. Returns one or more [Lifeline](./struct.Lifeline.html) values. /// When the [Lifeline](./struct.Lifeline.html) is dropped, the task tree is immediately cancelled. /// /// - Simple implementations can return the [Lifeline](./struct.Lifeline.html) value, a handle returned by [Task::task](./trait.Task.html#method.task). /// - Implementations which have fallible spawns can return `anyhow::Result<Lifeline>`. /// - Implementations which spawn multiple tasks can store lifelines for each task in self, and return `anyhow::Result<Self>`. /// /// ## Example /// ``` /// use lifeline::prelude::*; /// use tokio::sync::mpsc; /// /// lifeline_bus!(pub struct ExampleBus); /// /// #[derive(Debug, Clone)] /// struct ExampleMessage {} /// /// impl Message<ExampleBus> for ExampleMessage { /// type Channel = mpsc::Sender<Self>; /// } /// /// struct ExampleService { /// _run: Lifeline /// } /// /// impl Service for ExampleService { /// type Bus = ExampleBus; /// type Lifeline = anyhow::Result<Self>; /// /// fn spawn(bus: &ExampleBus) -> anyhow::Result<Self> { /// let mut rx = bus.rx::<ExampleMessage>()?; /// /// let _run = Self::task("run", async move { /// while let Some(msg) = rx.recv().await { /// log::info!("got message: {:?}", msg); /// } /// }); /// /// Ok(Self { _run }) /// } /// } /// /// async fn run() { /// let bus = ExampleBus::default(); /// let _service = ExampleService::spawn(&bus); /// } /// ``` pub trait Service: Task { /// The bus, which must be provided to spawn the task type Bus: Bus; /// The service lifeline. When dropped, all spawned tasks are immediately cancelled. type Lifeline; /// Spawns the service with all sub-tasks, and returns a lifeline value. When the lifeline is dropped, all spawned tasks are immediately cancelled. /// /// Implementations should synchronously take channels from the bus, and then use them asynchronously. This makes errors occur as early and predictably as possible. fn spawn(bus: &Self::Bus) -> Self::Lifeline; } /// Constructs the bus, spawns the service, and returns both. pub trait DefaultService: Service { fn spawn_default() -> (Self::Bus, Self::Lifeline); } impl<T> DefaultService for T where T: Service, { fn spawn_default() -> (Self::Bus, Self::Lifeline) { let bus = Self::Bus::default(); let lifeline = Self::spawn(&bus); (bus, lifeline) } } /// Carries messages between **two** bus instances. A variant of the [Service](./trait.Service.html). /// /// Bus types form a tree, with a 'root application' bus, and multiple busses focused on particular domains. This structure provides isolation, /// and predictable failures when [Services](./trait.Service.html) spawn. /// ```text /// - MainBus /// | ListenerBus /// | | ConnectionBus /// | DomainSpecificBus /// | | ... /// ``` /// /// This trait can be implemented to carry messages between the root and the leaf of the tree. /// /// ## Example /// ``` /// use lifeline::prelude::*; /// use tokio::sync::mpsc; /// lifeline_bus!(pub struct MainBus); /// lifeline_bus!(pub struct LeafBus); /// /// #[derive(Debug, Clone)] /// struct LeafShutdown {} /// /// #[derive(Debug, Clone)] /// struct MainShutdown {} /// /// impl Message<LeafBus> for LeafShutdown { /// type Channel = mpsc::Sender<Self>; /// } /// /// impl Message<MainBus> for MainShutdown { /// type Channel = mpsc::Sender<Self>; /// } /// /// pub struct LeafMainCarrier { /// _forward_shutdown: Lifeline /// } /// /// impl CarryFrom<MainBus> for LeafBus { /// type Lifeline = anyhow::Result<LeafMainCarrier>; /// fn carry_from(&self, from: &MainBus) -> Self::Lifeline { /// let mut rx = self.rx::<LeafShutdown>()?; /// let mut tx = from.tx::<MainShutdown>()?; /// /// let _forward_shutdown = Self::try_task("forward_shutdown", async move { /// if let Some(msg) = rx.recv().await { /// tx.send(MainShutdown{}).await?; /// } /// /// Ok(()) /// }); /// /// Ok(LeafMainCarrier { _forward_shutdown }) /// } /// } /// ``` pub trait CarryFrom<FromBus: Bus>: Bus + Task + Sized { /// The carrier lifeline. When dropped, all spawned tasks are immediately cancelled. type Lifeline; /// Spawns the carrier service, returning the lifeline value. fn carry_from(&self, from: &FromBus) -> Self::Lifeline; } /// The receprocial of the [CarryFrom](./trait.CarryFrom.html) trait. Implemented for all types on which [CarryFrom](./trait.CarryFrom.html) is implemented. pub trait CarryInto<IntoBus: Bus>: Bus + Task + Sized { /// The carrier lifeline. When dropped, all spawned tasks are immediately cancelled. type Lifeline; /// Spawns the carrier service, returning the lifeline value. fn carry_into(&self, into: &IntoBus) -> Self::Lifeline; } impl<F, I> CarryInto<I> for F where I: CarryFrom<F>, F: Bus, I: Bus, { type Lifeline = <I as CarryFrom<F>>::Lifeline; fn carry_into(&self, into: &I) -> Self::Lifeline { into.carry_from(self) } } /// Constructs two bus types, and spawns the carrier between them. /// Returns both busses, and the carrier's lifeline. pub trait DefaultCarrier<FromBus: Bus>: CarryFrom<FromBus> { fn carry_default() -> (Self, FromBus, Self::Lifeline) { let into = Self::default(); let from = FromBus::default(); let lifeline = into.carry_from(&from); (into, from, lifeline) } } /// Provides the [Self::task](./trait.Task.html#method.task) and [Self::try_task](./trait.Task.html#method.try_task) associated methods for all types. /// /// Lifeline supports the following task executors (using feature flags), and will use the first enabled flag: /// - `tokio-executor` /// - `async-std-executor` /// /// Fallible tasks can be invoked with [Self::try_task](./trait.Task.html#method.try_task). Lifeline will log OK/ERR status when the task finishes. /// /// # Example /// ``` /// use lifeline::prelude::*; /// use tokio::sync::mpsc; /// /// lifeline_bus!(pub struct ExampleBus); /// /// #[derive(Debug, Clone)] /// struct ExampleMessage {} /// /// impl Message<ExampleBus> for ExampleMessage { /// type Channel = mpsc::Sender<Self>; /// } /// /// struct ExampleService { /// _run: Lifeline /// } /// /// impl Service for ExampleService { /// type Bus = ExampleBus; /// type Lifeline = anyhow::Result<Self>; /// /// fn spawn(bus: &ExampleBus) -> anyhow::Result<Self> { /// let mut rx = bus.rx::<ExampleMessage>()?; /// /// let _run = Self::task("run", async move { /// while let Some(msg) = rx.recv().await { /// log::info!("got message: {:?}", msg); /// } /// }); /// /// Ok(Self { _run }) /// } /// } /// ``` pub trait Task { /// Spawns an infallible task using the provided executor, wrapping it in a [Lifeline](./struct.Lifeline.html) handle. /// The task will run until it finishes, or until the [Lifeline](./struct.Lifeline.html) is droped. fn task<Out>(name: &str, fut: impl Future<Output = Out> + Send + 'static) -> Lifeline where Out: Debug + Send + 'static, Self: Sized, { let service_name = task_name::<Self>(name); spawn_task(service_name, fut) } /// Spawns an fallible task using the provided executor, wrapping it in a [Lifeline](./struct.Lifeline.html) handle. /// The task will run until it finishes, or until the [Lifeline](./struct.Lifeline.html) is droped. /// /// If the task finishes, lifeline will log an 'OK' or 'ERR' message with the return value. fn try_task<Out>( name: &str, fut: impl Future<Output = anyhow::Result<Out>> + Send + 'static, ) -> Lifeline where Out: Debug + 'static, Self: Sized, { let service_name = task_name::<Self>(name); spawn_task(service_name.clone(), async move { match fut.await { Ok(val) => { if TypeId::of::<Out>() != TypeId::of::<()>() { debug!("OK {}: {:?}", service_name, val); } else { debug!("OK {}", service_name); } } Err(err) => { error!("ERR: {}: {}", service_name, err); } } }) } } impl<T> Task for T {}
def begin(self): logging.debug('begin') x = self.read8(self.TSL2561_REGISTER_ID) if not(x & 0x0A): return False self._tsl2561Initialised = True self.set_integration_time(self._tsl2561IntegrationTime) self.set_gain(self._tsl2561Gain) self.disable() logging.debug('begin_end') return True
<reponame>oliverbunting/clash-compiler {-# LANGUAGE RankNTypes #-} module Clash.Tests.Laws.Enum (tests) where import Control.DeepSeq (NFData) import Data.Proxy import Test.Tasty import Test.Tasty.HUnit import Clash.Sized.Index (Index) import Clash.Sized.Signed (Signed) import Clash.Sized.Unsigned (Unsigned) import Test.Tasty.HUnit.Extra succMaxBoundLaw :: forall a . (NFData a, Show a, Enum a, Bounded a) => Proxy a -> Assertion succMaxBoundLaw Proxy = expectException (succ @a maxBound) predMinBoundLaw :: forall a . (NFData a, Show a, Enum a, Bounded a) => Proxy a -> Assertion predMinBoundLaw Proxy = expectException (pred @a minBound) enumLaws :: (NFData a, Show a, Enum a, Bounded a) => Proxy a -> [TestTree] enumLaws proxy = [ testCase "succ maxBound ~ _|_" (succMaxBoundLaw proxy) , testCase "pred minBound ~ _|_" (predMinBoundLaw proxy) ] testEnumLaws :: (NFData a, Show a, Enum a, Bounded a) => String -> Proxy a -> TestTree testEnumLaws typeName proxy = testGroup typeName (enumLaws proxy) tests :: TestTree tests = testGroup "Enum" [ testEnumLaws "Index 1" (Proxy @(Index 1)) , testEnumLaws "Index 2" (Proxy @(Index 2)) , testEnumLaws "Index 128" (Proxy @(Index 128)) , testEnumLaws "Unsigned 0" (Proxy @(Unsigned 0)) , testEnumLaws "Unsigned 1" (Proxy @(Unsigned 1)) , testEnumLaws "Unsigned 32" (Proxy @(Unsigned 32)) , testEnumLaws "Unsigned 127" (Proxy @(Unsigned 127)) , testEnumLaws "Unsigned 128" (Proxy @(Unsigned 128)) , testEnumLaws "Signed 0" (Proxy @(Signed 0)) , testEnumLaws "Signed 1" (Proxy @(Signed 1)) , testEnumLaws "Signed 32" (Proxy @(Signed 32)) , testEnumLaws "Signed 127" (Proxy @(Signed 127)) , testEnumLaws "Signed 128" (Proxy @(Signed 128)) -- Note Fixed is tested elsewhere. ]
Senate Minority Leader Harry Reid launched a petition on Monday pressuring Donald Trump to prove he can pass the U.S. citizenship test. The petition said Trump should "put up or shut up" about immigration until the GOP presidential nominee can show his knowledge of the country he intends to lead. "Donald Trump wants to impose new tests for immigrants coming to America," read the petition. "Immigrants already face vigorous testing before gaining citizenship. And before Donald Trump makes it even harder for these hardworking people to achieve their American Dream, he should prove he can pass the test himself." Reid said last week that Trump would fail the current naturalization requirements, let alone the more stringent ones Trump has previously said he would favor, describing him as a "spoiled, unpatriotic drain on society who has earned nothing and helped no one." Trump has asserted that he is working with the Hispanic community to ensure fair but firm naturalization policies, which has led to some criticism from conservatives that Trump may be softening his hardline approach for dealing with millions of illegal immigrants. "No. I'm not flip-flopping. We want to come up with one a really fair, but firm answer. That's — it has to be very firm. But we want to come up with something fair," Trump said.
#include "IOMC/ParticleGuns/interface/BeamMomentumGunProducer.h" #include "DataFormats/Math/interface/deltaPhi.h" #include "SimDataFormats/GeneratorProducts/interface/HepMCProduct.h" #include "SimDataFormats/GeneratorProducts/interface/GenEventInfoProduct.h" #include "FWCore/Framework/interface/Event.h" #include "FWCore/ParameterSet/interface/ParameterSet.h" #include "FWCore/MessageLogger/interface/MessageLogger.h" #include "FWCore/ServiceRegistry/interface/Service.h" #include "FWCore/Utilities/interface/RandomNumberGenerator.h" #include "CLHEP/Random/RandFlat.h" #include "TFile.h" #include <cmath> namespace CLHEP { class HepRandomEngine; } namespace edm { BeamMomentumGunProducer::BeamMomentumGunProducer(const edm::ParameterSet& pset) : FlatBaseThetaGunProducer(pset), parPDGId_(nullptr), parX_(nullptr), parY_(nullptr), parZ_(nullptr), parPx_(nullptr), parPy_(nullptr), parPz_(nullptr), b_npar_(nullptr), b_eventId_(nullptr), b_parPDGId_(nullptr), b_parX_(nullptr), b_parY_(nullptr), b_parZ_(nullptr), b_parPx_(nullptr), b_parPy_(nullptr), b_parPz_(nullptr) { edm::ParameterSet pgun_params = pset.getParameter<edm::ParameterSet>("PGunParameters"); // doesn't seem necessary to check if pset is empty xoff_ = pgun_params.getParameter<double>("XOffset"); yoff_ = pgun_params.getParameter<double>("YOffset"); zpos_ = pgun_params.getParameter<double>("ZPosition"); if (fVerbosity > 0) edm::LogVerbatim("BeamMomentumGun") << "Beam vertex offset (cm) " << xoff_ << ":" << yoff_ << " and z position " << zpos_; edm::FileInPath fp = pgun_params.getParameter<edm::FileInPath>("FileName"); std::string infileName = fp.fullPath(); fFile_ = new TFile(infileName.c_str()); fFile_->GetObject("EventTree", fTree_); nentries_ = fTree_->GetEntriesFast(); if (fVerbosity > 0) edm::LogVerbatim("BeamMomentumGun") << "Total Events: " << nentries_ << " in " << infileName; // Set branch addresses and branch pointers int npart = fTree_->SetBranchAddress("npar", &npar_, &b_npar_); int event = fTree_->SetBranchAddress("eventId", &eventId_, &b_eventId_); int pdgid = fTree_->SetBranchAddress("parPDGId", &parPDGId_, &b_parPDGId_); int parxx = fTree_->SetBranchAddress("parX", &parX_, &b_parX_); int paryy = fTree_->SetBranchAddress("parY", &parY_, &b_parY_); int parzz = fTree_->SetBranchAddress("parZ", &parZ_, &b_parZ_); int parpx = fTree_->SetBranchAddress("parPx", &parPx_, &b_parPx_); int parpy = fTree_->SetBranchAddress("parPy", &parPy_, &b_parPy_); int parpz = fTree_->SetBranchAddress("parPz", &parPz_, &b_parPz_); if ((npart != 0) || (event != 0) || (pdgid != 0) || (parxx != 0) || (paryy != 0) || (parzz != 0) || (parpx != 0) || (parpy != 0) || (parpz != 0)) throw cms::Exception("GenException") << "Branch address wrong in i/p file\n"; produces<HepMCProduct>("unsmeared"); produces<GenEventInfoProduct>(); if (fVerbosity > 0) edm::LogVerbatim("BeamMomentumGun") << "BeamMonetumGun is initialzed"; } void BeamMomentumGunProducer::produce(edm::Event& e, const edm::EventSetup& es) { if (fVerbosity > 0) edm::LogVerbatim("BeamMomentumGun") << "BeamMomentumGunProducer : Begin New Event Generation"; edm::Service<edm::RandomNumberGenerator> rng; CLHEP::HepRandomEngine* engine = &rng->getEngine(e.streamID()); // event loop (well, another step in it...) // no need to clean up GenEvent memory - done in HepMCProduct // here re-create fEvt (memory) // fEvt = new HepMC::GenEvent(); // random entry generation for peaking event randomly from tree long int rjentry = static_cast<long int>(CLHEP::RandFlat::shoot(engine, 0, nentries_ - 1)); fTree_->GetEntry(rjentry); if (fVerbosity > 0) edm::LogVerbatim("BeamMomentumGun") << "Entry " << rjentry << " : " << eventId_ << " : " << npar_; // loop over particles int barcode = 1; for (unsigned int ip = 0; ip < parPDGId_->size(); ip++) { int partID = parPDGId_->at(ip); const HepPDT::ParticleData* pData = fPDGTable->particle(HepPDT::ParticleID(std::abs(partID))); double mass = pData->mass().value(); if (fVerbosity > 0) edm::LogVerbatim("BeamMomentumGun") << "PDGId: " << partID << " mass: " << mass; double xp = (xoff_ * cm2mm_ + (-1) * parY_->at(ip)); // 90 degree rotation applied double yp = (yoff_ * cm2mm_ + parX_->at(ip)); // 90 degree rotation applied double zp = zpos_ * cm2mm_; HepMC::GenVertex* Vtx = new HepMC::GenVertex(HepMC::FourVector(xp, yp, zp)); double pxGeV = MeV2GeV_ * parPx_->at(ip); double pyGeV = MeV2GeV_ * parPy_->at(ip); double pzGeV = MeV2GeV_ * parPz_->at(ip); double momRand2 = pxGeV * pxGeV + pyGeV * pyGeV + pzGeV * pzGeV; double energy = std::sqrt(momRand2 + mass * mass); double mom = std::sqrt(momRand2); HepMC::FourVector pGeV(pxGeV, pyGeV, pzGeV, energy); double ptheta = pGeV.theta(); double pphi = pGeV.phi(); double theta = CLHEP::RandFlat::shoot(engine, fMinTheta, fMaxTheta); double phi = CLHEP::RandFlat::shoot(engine, fMinPhi, fMaxPhi); if (phi > M_PI) phi = -(2 * M_PI - phi); double newtheta = ptheta + theta; if (newtheta > M_PI && newtheta <= 2 * M_PI) newtheta = 2 * M_PI - newtheta; double newphi = reco::reduceRange(pphi + phi); double px = mom * sin(newtheta) * cos(newphi); double py = mom * sin(newtheta) * sin(newphi); double pz = mom * cos(newtheta); if (fVerbosity > 0) { edm::LogVerbatim("BeamMomentumGun") << "ptheta:pphi " << ptheta << ":" << pphi << "\ntheta:phi " << theta << ":" << phi << "\nnewtheta:newphi " << newtheta << ":" << newphi; edm::LogVerbatim("BeamMomentumGun") << "x:y:z [mm] " << xp << ":" << yp << ":" << zpos_ << "\npx:py:pz [GeV] " << px << ":" << py << ":" << pz; } HepMC::FourVector p(px, py, pz, energy); HepMC::GenParticle* part = new HepMC::GenParticle(p, partID, 1); part->suggest_barcode(barcode); barcode++; Vtx->add_particle_out(part); if (fAddAntiParticle) { HepMC::FourVector ap(-px, -py, -pz, energy); int apartID = (partID == 22 || partID == 23) ? partID : -partID; HepMC::GenParticle* apart = new HepMC::GenParticle(ap, apartID, 1); apart->suggest_barcode(barcode); if (fVerbosity > 0) edm::LogVerbatim("BeamMomentumGun") << "Add anti-particle " << apartID << ":" << -px << ":" << -py << ":" << -pz; barcode++; Vtx->add_particle_out(apart); } fEvt->add_vertex(Vtx); } fEvt->set_event_number(e.id().event()); fEvt->set_signal_process_id(20); if (fVerbosity > 0) fEvt->print(); std::unique_ptr<HepMCProduct> BProduct(new HepMCProduct()); BProduct->addHepMCData(fEvt); e.put(std::move(BProduct), "unsmeared"); std::unique_ptr<GenEventInfoProduct> genEventInfo(new GenEventInfoProduct(fEvt)); e.put(std::move(genEventInfo)); if (fVerbosity > 0) edm::LogVerbatim("BeamMomentumGun") << "BeamMomentumGunProducer : Event Generation Done"; } } // namespace edm
def _on_disconnect(client, userdata, return_code): _logger.info(''.join(('Client has disconnected with code <', str(return_code), '>.'))) client.connected = False
/* * WARNING! All changes made in this file will be lost! * Created from 'scheme.tl' by 'mtprotoc' * * Copyright (c) 2021-present, Teamgram Studio (https://teamgram.io). * All rights reserved. * * Author: teamgramio (<EMAIL>) */ package status_client import ( "context" "github.com/teamgram/proto/mtproto" "github.com/teamgram/teamgram-server/app/service/status/status" "github.com/zeromicro/go-zero/zrpc" ) var _ *mtproto.Bool type StatusClient interface { StatusSetSessionOnline(ctx context.Context, in *status.TLStatusSetSessionOnline) (*mtproto.Bool, error) StatusSetSessionOffline(ctx context.Context, in *status.TLStatusSetSessionOffline) (*mtproto.Bool, error) StatusGetUserOnlineSessions(ctx context.Context, in *status.TLStatusGetUserOnlineSessions) (*status.UserSessionEntryList, error) StatusGetUsersOnlineSessionsList(ctx context.Context, in *status.TLStatusGetUsersOnlineSessionsList) (*status.Vector_UserSessionEntryList, error) } type defaultStatusClient struct { cli zrpc.Client } func NewStatusClient(cli zrpc.Client) StatusClient { return &defaultStatusClient{ cli: cli, } } // StatusSetSessionOnline // status.setSessionOnline user_id:long auth_key_id:long gateway:string expired:long layer:int = Bool; func (m *defaultStatusClient) StatusSetSessionOnline(ctx context.Context, in *status.TLStatusSetSessionOnline) (*mtproto.Bool, error) { client := status.NewRPCStatusClient(m.cli.Conn()) return client.StatusSetSessionOnline(ctx, in) } // StatusSetSessionOffline // status.setSessionOffline user_id:long auth_key_id:long = Bool; func (m *defaultStatusClient) StatusSetSessionOffline(ctx context.Context, in *status.TLStatusSetSessionOffline) (*mtproto.Bool, error) { client := status.NewRPCStatusClient(m.cli.Conn()) return client.StatusSetSessionOffline(ctx, in) } // StatusGetUserOnlineSessions // status.getUserOnlineSessions user_id:long = UserSessionEntryList; func (m *defaultStatusClient) StatusGetUserOnlineSessions(ctx context.Context, in *status.TLStatusGetUserOnlineSessions) (*status.UserSessionEntryList, error) { client := status.NewRPCStatusClient(m.cli.Conn()) return client.StatusGetUserOnlineSessions(ctx, in) } // StatusGetUsersOnlineSessionsList // status.getUsersOnlineSessionsList Vector<long> = Vector<UserSessionEntryList>; func (m *defaultStatusClient) StatusGetUsersOnlineSessionsList(ctx context.Context, in *status.TLStatusGetUsersOnlineSessionsList) (*status.Vector_UserSessionEntryList, error) { client := status.NewRPCStatusClient(m.cli.Conn()) return client.StatusGetUsersOnlineSessionsList(ctx, in) }
// Copyright 2015 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef HIGHWAYHASH_HIGHWAYHASH_HIGHWAY_TREE_HASH_H_ #define HIGHWAYHASH_HIGHWAYHASH_HIGHWAY_TREE_HASH_H_ #ifdef __AVX2__ #include <cstddef> #include "state_helpers.h" #include "vec2.h" namespace highwayhash { // J-lanes tree hashing: see http://dx.doi.org/10.4236/jis.2014.53010 class HighwayTreeHashState { public: // Four (2 x 64-bit) hash states are updated in parallel by injecting // four 64-bit packets per Update(). Finalize() combines the four states into // one final 64-bit digest. using Key = uint64[4]; static const int kPacketSize = sizeof(Key); explicit INLINE HighwayTreeHashState(const Key& key_lanes) { // "Nothing up my sleeve" numbers, concatenated hex digits of Pi from // http://www.numberworld.org/digits/Pi/, retrieved Feb 22, 2016. // // We use this python code to generate the fourth number to have // more even mixture of bits: /* def x(a,b,c): retval = 0 for i in range(64): count = ((a >> i) & 1) + ((b >> i) & 1) + ((c >> i) & 1) if (count <= 1): retval |= 1 << i return retval */ const V4x64U init0(0x243f6a8885a308d3ull, 0x13198a2e03707344ull, 0xa4093822299f31d0ull, 0xdbe6d5d5fe4cce2full); const V4x64U init1(0x452821e638d01377ull, 0xbe5466cf34e90c6cull, 0xc0acf169b5f18a8cull, 0x3bd39e10cb0ef593ull); const V4x64U key = LoadU(key_lanes); v0 = key ^ init0; v1 = Permute(key) ^ init1; mul0 = init0; mul1 = init1; } INLINE void Update(const char* packet_ptr) { const V4x64U packet = LoadU(reinterpret_cast<const uint64*>(packet_ptr)); Update(packet); } INLINE void Update(const V4x64U& packet) { v1 += packet; v1 += mul0; mul0 ^= V4x64U(_mm256_mul_epu32(v0, v1 >> 32)); v0 += mul1; mul1 ^= V4x64U(_mm256_mul_epu32(v1, v0 >> 32)); v0 += ZipperMerge(v1); v1 += ZipperMerge(v0); } INLINE uint64 Finalize() { // Mix together all lanes. PermuteAndUpdate(); PermuteAndUpdate(); PermuteAndUpdate(); PermuteAndUpdate(); const V4x64U sum = v0 + v1 + mul0 + mul1; // Much faster than Store(v0 + v1) to uint64[]. return _mm_cvtsi128_si64(_mm256_extracti128_si256(sum, 0)); } private: static INLINE V4x64U ZipperMerge(const V4x64U& v) { // Multiplication mixes/scrambles bytes 0-7 of the 64-bit result to // varying degrees. In descending order of goodness, bytes // 3 4 2 5 1 6 0 7 have quality 228 224 164 160 100 96 36 32. // As expected, the upper and lower bytes are much worse. // For each 64-bit lane, our objectives are: // 1) maximizing and equalizing total goodness across the four lanes. // 2) mixing with bytes from the neighboring lane (AVX-2 makes it difficult // to cross the 128-bit wall, but PermuteAndUpdate takes care of that); // 3) placing the worst bytes in the upper 32 bits because those will not // be used in the next 32x32 multiplication. const uint64 hi = 0x070806090D0A040Bull; const uint64 lo = 0x000F010E05020C03ull; return V4x64U(_mm256_shuffle_epi8(v, V4x64U(hi, lo, hi, lo))); } static INLINE V4x64U Permute(const V4x64U& v) { // For complete mixing, we need to swap the upper and lower 128-bit halves; // we also swap all 32-bit halves. const V4x64U indices(0x0000000200000003ull, 0x0000000000000001ull, 0x0000000600000007ull, 0x0000000400000005ull); return V4x64U(_mm256_permutevar8x32_epi32(v, indices)); } INLINE void PermuteAndUpdate() { // It is slightly better to permute v0 than v1; it will be added to v1. Update(Permute(v0)); } V4x64U v0; V4x64U v1; V4x64U mul0; V4x64U mul1; }; // AVX-2 specialization for 1.1x higher hash throughput at 1KB. template <> INLINE void PaddedUpdate<HighwayTreeHashState>(const uint64 size, const char* remaining_bytes, const uint64 remaining_size, HighwayTreeHashState* state) { // Copying into an aligned buffer incurs a store-to-load-forwarding stall. // Instead, we use masked loads to read any remaining whole uint32 // without incurring page faults for the others. const size_t remaining_32 = remaining_size >> 2; // 0..7 // mask[32*i+31] := uint32 #i valid/accessible ? 1 : 0. // To avoid large lookup tables, we pack uint32 lanes into bytes, // compute the packed mask by shifting, and then sign-extend 0xFF to // 0xFFFFFFFF (although only the MSB needs to be set). // remaining_32 = 0 => mask = 00000000; remaining_32 = 7 => mask = 01111111. const uint64 packed_mask = 0x00FFFFFFFFFFFFFFULL >> ((7 - remaining_32) * 8); const V4x64U mask(_mm256_cvtepi8_epi32(_mm_cvtsi64_si128(packed_mask))); // Load 0..7 remaining (potentially unaligned) uint32. const V4x64U packet28(_mm256_maskload_epi32( reinterpret_cast<const int*>(remaining_bytes), mask)); // Load any remaining bytes individually and combine into a uint32. const int remainder_mod4 = remaining_size & 3; // Length padding ensures that zero-valued buffers of different lengths // result in different hashes. uint32 packet4 = static_cast<uint32>(size) << 24; const char* final_bytes = remaining_bytes + (remaining_32 * 4); for (int i = 0; i < remainder_mod4; ++i) { const uint32 byte = static_cast<unsigned char>(final_bytes[i]); packet4 += byte << (i * 8); } // The upper 4 bytes of packet28 are zero; replace with packet4 to // obtain the (length-padded) 32-byte packet. const V4x64U v4(_mm256_broadcastd_epi32(_mm_cvtsi32_si128(packet4))); const V4x64U packet(_mm256_blend_epi32(packet28, v4, 0x80)); state->Update(packet); } // J-lanes tree hash based upon multiplication and "zipper merges". // // Robust versus timing attacks because memory accesses are sequential // and the algorithm is branch-free. Requires an AVX-2 capable CPU. // // "key" is a secret 256-bit key unknown to attackers. // "bytes" is the data to hash (possibly unaligned). // "size" is the number of bytes to hash; exactly that many bytes are read. // // Returns a 64-bit hash of the given data bytes. static INLINE uint64 HighwayTreeHash(const HighwayTreeHashState::Key& key, const char* bytes, const uint64 size) { return ComputeHash<HighwayTreeHashState>(key, bytes, size); } } // namespace highwayhash #endif // #ifdef __AVX2__ #endif // #ifndef HIGHWAYHASH_HIGHWAYHASH_HIGHWAY_TREE_HASH_H_
<filename>bin/kegbot_core.py<gh_stars>1-10 #!/usr/bin/env python from kegbot.pycore import kegbot_app __doc__ = kegbot_app.__doc__ if __name__ == '__main__': kegbot_app.KegbotCoreApp.BuildAndRun()
/** * Extracts the OperationId from a Operation-Location returned by the POST Read operation * @param operationLocation * @return operationId */ private static String extractOperationIdFromOpLocation(String operationLocation) { if (operationLocation != null && !operationLocation.isEmpty()) { String[] splits = operationLocation.split("/"); if (splits != null && splits.length > 0) { return splits[splits.length - 1]; } } throw new IllegalStateException("Something went wrong: Couldn't extract the operation id from the operation location"); }