content
stringlengths
10
4.9M
<reponame>LeandroOSBr/contagemEvasao import os directory = '../Videos/segmentado/comprimido' for filename in sorted(os.listdir(directory)): #print(filename) txtFilename = directory + "/" + (filename[0:len(filename)-4]) + ".txt" mode = 'a' if os.path.exists(txtFilename) else 'w' with open(txtFilename, mode) as f: print(txtFilename)
#[derive(Copy, Clone)] enum Movement { Up, Down, Left, Right } fn print_direction(m: Movement) { match m { Movement::Up => println!("^"), Movement::Down => println!("v"), Movement::Left => println!(">"), Movement::Right => println!("<"), } } pub fn run() { for m in [ Movement::Up, Movement::Down, Movement::Left, Movement::Right ].iter() { print_direction(*m); // BUGFIX: move occurs because `*m` has type `enums::Movement`, which does not implement the `Copy` trait } }
<filename>src/day09/input.ts export const day9Input = `446 players; last marble is worth 71522 points`;
#infile = open('task.in') t, s, x = map(int, input().split()) #infile.readline().split()) #infile.close() #outfile = open('task.out', 'w') if ((x-t) % s == 0 or (x-t-1) % s == 0) and x >= t and x != t+1: #outfile.write print('YES') else: #outfile.write print('NO') #outfile.close()
/** * Query ProjectDataSet based on the combination of conditions */ public ProjectDataSetMySqlModel findOne(String projectId, String dataSetId, JobMemberRole memberRole) { return projectDataSetRepo .findOne( Where .create() .equal("projectId", projectId) .equal("dataSetId", dataSetId) .equal("memberRole", memberRole) .build(ProjectDataSetMySqlModel.class) ).orElse(null); }
<reponame>mamaral/MAThemeKit // // MAThemeKit.h // MAThemeKit // // Created by Mike on 8/29/14. // Copyright (c) 2014 <NAME>. All rights reserved. // #import <UIKit/UIKit.h> @interface MAThemeKit : NSObject /* Loosely based on the list of methods and properties conforming to UIAppearance in iOS 7, by Mattt. https://gist.github.com/mattt/5135521 This is by no means an exhaustive list, but the most common things I see in most apps. */ #pragma mark - Master Theme + (void)setupThemeWithPrimaryColor:(nonnull UIColor *)primaryColor secondaryColor:(nonnull UIColor *)secondaryColor fontName:(nullable NSString *)fontName lightStatusBar:(BOOL)lightStatusBar; #pragma mark - UINavigationBar + (void)customizeNavigationBarColor:(nonnull UIColor *)barColor textColor:(nonnull UIColor *)textColor buttonColor:(nonnull UIColor *)buttonColor; + (void)customizeNavigationBarColor:(nonnull UIColor *)barColor textColor:(nonnull UIColor *)textColor fontName:(nonnull NSString *)fontName fontSize:(CGFloat)fontSize buttonColor:(nonnull UIColor *)buttonColor; #pragma mark - UIBarButtonItem + (void)customizeNavigationBarButtonColor:(nonnull UIColor *)buttonColor; #pragma mark - UITabBar + (void)customizeTabBarColor:(nonnull UIColor *)barColor textColor:(nonnull UIColor *)textColor; + (void)customizeTabBarColor:(nonnull UIColor *)barColor textColor:(nonnull UIColor *)textColor fontName:(nonnull NSString *)fontName fontSize:(CGFloat)fontSize; #pragma mark - UIButton + (void)customizeButtonColor:(nonnull UIColor *)buttonColor; #pragma mark - UISwitch + (void)customizeSwitchOnColor:(nonnull UIColor *)switchOnColor; #pragma mark - UISearchBar + (void)customizeSearchBarColor:(nonnull UIColor *)barColor buttonTintColor:(nonnull UIColor *)buttonTintColor; #pragma mark - UIActivityIndicator + (void)customizeActivityIndicatorColor:(nonnull UIColor *)color; #pragma mark - UISegmentedControl + (void)customizeSegmentedControlWithMainColor:(nonnull UIColor *)mainColor secondaryColor:(nonnull UIColor *)secondaryColor; #pragma mark - UISlider + (void)customizeSliderColor:(nonnull UIColor *)sliderColor; #pragma mark - UIToolbar + (void)customizeToolbarTintColor:(nonnull UIColor *)tintColor barTintColor:(nonnull UIColor *)barTintColor; + (void)customizeToolbarTintColor:(nonnull UIColor *)tintColor; + (void)customizeToolbarBarTintColor:(nonnull UIColor *)barTintColor; #pragma mark - UIPageControl + (void)customizePageControlCurrentPageColor:(nonnull UIColor *)mainColor; #pragma mark - UILabel + (void)customizeLabelColor:(nonnull UIColor *)textColor fontName:(nullable NSString *)fontName fontSize:(CGFloat)fontSize; #pragma mark - UITableView + (void)customizeTableViewColor:(nonnull UIColor *)mainColor secondaryColor:(nonnull UIColor *)secondaryColor; #pragma mark - UIBarButtonItem + (void)customizeBarButtonItemColor:(nonnull UIColor *)mainColor fontName:(nullable NSString *)fontName fontSize:(CGFloat)fontSize; #pragma mark - Color utilities + (nonnull UIColor *)colorWithR:(CGFloat)r G:(CGFloat)g B:(CGFloat)b; + (nonnull UIColor *)colorWithHexString:(nonnull NSString *)hex; @end
// coapRequestIdsForPath requests id's for a path and returns array response func coapRequestIdsForPath(ctx context.Context, conn *coap.ClientConn, path ...string) ([]uint, error) { var ids []uint path_ := filepath.Join(append([]string{"/"}, path...)...) if response, err := conn.GetWithContext(ctx, path_); err != nil { return nil, err } else if response.Code() != codes.Content { return nil, gopi.ErrUnexpectedResponse.WithPrefix(response.Code(), strconv.Quote(path_)) } else if err := json.Unmarshal(response.Payload(), &ids); err != nil { return nil, err } return ids, nil }
// connectCmd handles connect command from client - client must send this // command immediately after establishing Websocket or SockJS connection with // Centrifugo func (c *client) connectCmd(cmd *connectClientCommand) (response, error) { if c.authenticated { logger.ERROR.Println("connect error: client already authenticated") return nil, ErrInvalidMessage } user := cmd.User info := cmd.Info c.app.RLock() secret := c.app.config.Secret insecure := c.app.config.Insecure closeDelay := c.app.config.ExpiredConnectionCloseDelay connLifetime := c.app.config.ConnLifetime version := c.app.config.Version presenceInterval := c.app.config.PresencePingInterval c.app.RUnlock() var timestamp string var token string if !insecure { timestamp = cmd.Timestamp token = cmd.Token } else { timestamp = "" token = "" } if !insecure { isValid := auth.CheckClientToken(secret, string(user), timestamp, info, token) if !isValid { logger.ERROR.Println("invalid token for user", user) return nil, ErrInvalidToken } } if !insecure { ts, err := strconv.Atoi(timestamp) if err != nil { logger.ERROR.Println(err) return nil, ErrInvalidMessage } c.timestamp = int64(ts) } else { c.timestamp = time.Now().Unix() } c.User = user body := connectBody{} body.Version = version body.Expires = connLifetime > 0 body.TTL = connLifetime var timeToExpire int64 if connLifetime > 0 && !insecure { timeToExpire = c.timestamp + connLifetime - time.Now().Unix() if timeToExpire <= 0 { body.Expired = true return newClientConnectResponse(body), nil } } c.authenticated = true c.defaultInfo = []byte(info) c.Channels = map[Channel]bool{} c.channelInfo = map[Channel][]byte{} if c.staleTimer != nil { c.staleTimer.Stop() } c.presenceTimer = time.AfterFunc(presenceInterval, c.updatePresence) err := c.app.addConn(c) if err != nil { logger.ERROR.Println(err) return nil, ErrInternalServerError } if c.app.mediator != nil { c.app.mediator.Connect(c.UID, c.User) } if timeToExpire > 0 { duration := closeDelay + time.Duration(timeToExpire)*time.Second c.expireTimer = time.AfterFunc(duration, c.expire) } body.Client = c.UID return newClientConnectResponse(body), nil }
package net.avalara.avatax.rest.client.models; import net.avalara.avatax.rest.client.enums.*; import net.avalara.avatax.rest.client.serializer.JsonSerializer; import java.lang.Override; import java.math.BigDecimal; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; /* * AvaTax Software Development Kit for Java JRE based environments * * (c) 2004-2018 Avalara, Inc. * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. * * @author <NAME> <<EMAIL>> * @copyright 2004-2018 Avalara, Inc. * @license https://www.apache.org/licenses/LICENSE-2.0 * @link https://github.com/avadev/AvaTax-REST-V2-JRE-SDK */ /** * Represents a Jurisdiction with applicable TaxType, TaxSubType and RateType. */ public class JurisdictionRateTypeTaxTypeMappingModel { private Integer id; /** * Getter for id * * The unique ID number of this Jurisdiction RateType TaxType Mapping. */ public Integer getId() { return this.id; } /** * Setter for id * * The unique ID number of this Jurisdiction RateType TaxType Mapping. */ public void setId(Integer value) { this.id = value; } private String country; /** * Getter for country * * Name or ISO 3166 code identifying the country of this jurisdiction. * * This field supports many different country identifiers: * * Two character ISO 3166 codes * * Three character ISO 3166 codes * * Fully spelled out names of the country in ISO supported languages * * Common alternative spellings for many countries * * For a full list of all supported codes and names, please see the Definitions API `ListCountries`. */ public String getCountry() { return this.country; } /** * Setter for country * * Name or ISO 3166 code identifying the country of this jurisdiction. * * This field supports many different country identifiers: * * Two character ISO 3166 codes * * Three character ISO 3166 codes * * Fully spelled out names of the country in ISO supported languages * * Common alternative spellings for many countries * * For a full list of all supported codes and names, please see the Definitions API `ListCountries`. */ public void setCountry(String value) { this.country = value; } private String state; /** * Getter for state * * Name or ISO 3166 code identifying the region of this jurisdiction. * * This field supports many different region identifiers: * * Two and three character ISO 3166 region codes * * Fully spelled out names of the region in ISO supported languages * * Common alternative spellings for many regions * * For a full list of all supported codes and names, please see the Definitions API `ListRegions`. */ public String getState() { return this.state; } /** * Setter for state * * Name or ISO 3166 code identifying the region of this jurisdiction. * * This field supports many different region identifiers: * * Two and three character ISO 3166 region codes * * Fully spelled out names of the region in ISO supported languages * * Common alternative spellings for many regions * * For a full list of all supported codes and names, please see the Definitions API `ListRegions`. */ public void setState(String value) { this.state = value; } private String jurisdictionType; /** * Getter for jurisdictionType * * Jurisdiction TypeId */ public String getJurisdictionType() { return this.jurisdictionType; } /** * Setter for jurisdictionType * * Jurisdiction TypeId */ public void setJurisdictionType(String value) { this.jurisdictionType = value; } private String jurisdictionCode; /** * Getter for jurisdictionCode * * Jurisdiction Code */ public String getJurisdictionCode() { return this.jurisdictionCode; } /** * Setter for jurisdictionCode * * Jurisdiction Code */ public void setJurisdictionCode(String value) { this.jurisdictionCode = value; } private String longName; /** * Getter for longName * * Jurisdiction long name */ public String getLongName() { return this.longName; } /** * Setter for longName * * Jurisdiction long name */ public void setLongName(String value) { this.longName = value; } private String taxTypeId; /** * Getter for taxTypeId * * Tax Type to which this jurisdiction is applicable */ public String getTaxTypeId() { return this.taxTypeId; } /** * Setter for taxTypeId * * Tax Type to which this jurisdiction is applicable */ public void setTaxTypeId(String value) { this.taxTypeId = value; } private String taxSubTypeId; /** * Getter for taxSubTypeId * * Tax Type to which this jurisdiction is applicable */ public String getTaxSubTypeId() { return this.taxSubTypeId; } /** * Setter for taxSubTypeId * * Tax Type to which this jurisdiction is applicable */ public void setTaxSubTypeId(String value) { this.taxSubTypeId = value; } private String taxTypeGroupId; /** * Getter for taxTypeGroupId * * Tax Type Group to which this jurisdiction is applicable */ public String getTaxTypeGroupId() { return this.taxTypeGroupId; } /** * Setter for taxTypeGroupId * * Tax Type Group to which this jurisdiction is applicable */ public void setTaxTypeGroupId(String value) { this.taxTypeGroupId = value; } private String rateTypeId; /** * Getter for rateTypeId * * Rate Type to which this jurisdiction is applicable */ public String getRateTypeId() { return this.rateTypeId; } /** * Setter for rateTypeId * * Rate Type to which this jurisdiction is applicable */ public void setRateTypeId(String value) { this.rateTypeId = value; } private Date effectiveDate; /** * Getter for effectiveDate * * The date this jurisdiction starts to take effect on tax calculations */ public Date getEffectiveDate() { return this.effectiveDate; } /** * Setter for effectiveDate * * The date this jurisdiction starts to take effect on tax calculations */ public void setEffectiveDate(Date value) { this.effectiveDate = value; } private Date endDate; /** * Getter for endDate * * The date this jurisdiction stops to take effect on tax calculations */ public Date getEndDate() { return this.endDate; } /** * Setter for endDate * * The date this jurisdiction stops to take effect on tax calculations */ public void setEndDate(Date value) { this.endDate = value; } /** * Returns a JSON string representation of JurisdictionRateTypeTaxTypeMappingModel */ @Override public String toString() { return JsonSerializer.SerializeObject(this); } }
#! /usr/bin/env python2.3 pass
Statistical Surveillance of Volatility Forecasting Models This paper elaborates sequential procedures for monitoring the validity of a volatility model. A state-space representation describes dynamics of daily integrated volatility. The observation equation relates the integrated volatility to its measures such as the realized volatility or bipower variation. On-line control procedures, based on volatility forecasting errors, allow us to decide whether the chosen representation remains correctly specified. A signal indicates that the assumed volatility model may no longer be valid. The performance of our approach is analyzed within a Monte Carlo simulation study and illustrated in an empirical application for selected U.S. stocks. Copyright The Author 2011. Published by Oxford University Press. All rights reserved. For Permissions, please e-mail: [email protected]., Oxford University Press.
/** {@link TimeIndependentLOSTransform LOS transform} based on a fixed rotation. * @author Luc Maisonobe * @see LOSBuilder */ public class FixedRotation implements TimeIndependentLOSTransform { /** Parameters scaling factor. * <p> * We use a power of 2 to avoid numeric noise introduction * in the multiplications/divisions sequences. * </p> */ private final double SCALE = FastMath.scalb(1.0, -20); /** Rotation axis. */ private final Vector3D axis; /** Underlying rotation. */ private Rotation rotation; /** Underlying rotation with derivatives. */ private FieldRotation<DerivativeStructure> rDS; /** Driver for rotation angle. */ private final ParameterDriver angleDriver; /** Simple constructor. * <p> * The single parameter is the rotation angle. * </p> * @param name name of the rotation (used for estimated parameters identification) * @param axis rotation axis * @param angle rotation angle */ public FixedRotation(final String name, final Vector3D axis, final double angle) { this.axis = axis; this.rotation = null; this.rDS = null; try { this.angleDriver = new ParameterDriver(name, angle, SCALE, -2 * FastMath.PI, 2 * FastMath.PI); angleDriver.addObserver(new ParameterObserver() { @Override public void valueChanged(final double previousValue, final ParameterDriver driver) { // reset rotations to null, they will be evaluated lazily if needed rotation = null; rDS = null; } }); } catch (OrekitException oe) { // this should never happen throw RuggedException.createInternalError(oe); } } /** {@inheritDoc} */ @Override public Stream<ParameterDriver> getParametersDrivers() { return Stream.of(angleDriver); } /** {@inheritDoc} */ @Override public Vector3D transformLOS(final int i, final Vector3D los) { if (rotation == null) { // lazy evaluation of the rotation rotation = new Rotation(axis, angleDriver.getValue(), RotationConvention.VECTOR_OPERATOR); } return rotation.applyTo(los); } /** {@inheritDoc} */ @Override public FieldVector3D<DerivativeStructure> transformLOS(final int i, final FieldVector3D<DerivativeStructure> los, final DSGenerator generator) { if (rDS == null) { // lazy evaluation of the rotation final FieldVector3D<DerivativeStructure> axisDS = new FieldVector3D<DerivativeStructure>(generator.constant(axis.getX()), generator.constant(axis.getY()), generator.constant(axis.getZ())); final DerivativeStructure angleDS = generator.variable(angleDriver); rDS = new FieldRotation<DerivativeStructure>(axisDS, angleDS, RotationConvention.VECTOR_OPERATOR); } return rDS.applyTo(los); } }
#ifndef GAME_FACTORY_H #define GAME_FACTORY_H #include <IGameLoop.h> #include "GameLoop.h" class GameFactory : public IGameFactory { public: virtual IGameLoop *CreateGame(const IController &p_test_controller,IEntityObserverFactory &p_observer_factory) override { return new GameLoop(p_test_controller,p_observer_factory); } GameFactory() {} virtual ~GameFactory() override {} }; #endif // GAME_FACTORY_H
/* * Testerra * * (C) 2020, <NAME>, T-Systems Multimedia Solutions GmbH, Deutsche Telekom AG * * Deutsche Telekom AG and all other contributors / * copyright owners license this file to you under the Apache * License, Version 2.0 (the "License"); you may not use this * file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import {autoinject} from 'aurelia-framework'; import {NavigationInstruction, RouteConfig} from "aurelia-router"; import {AbstractViewModel} from "../abstract-view-model"; import {StatisticsGenerator} from "../../services/statistics-generator"; import {StatusConverter} from "../../services/status-converter"; import {Timeline, TimelineOptionsGroupHeightModeType} from "vis-timeline/standalone"; import "vis-timeline/styles/vis-timeline-graph2d.css"; import {data} from "../../services/report-model"; import {Router} from "aurelia-router"; import MethodContext = data.MethodContext; import ResultStatusType = data.ResultStatusType; import IMethodContext = data.IMethodContext; import IContextValues = data.IContextValues; import "./threads.scss"; import IExecutionAggregate = data.IExecutionAggregate; import {ExecutionStatistics} from "../../services/statistic-models"; @autoinject() export class Threads extends AbstractViewModel { private _searchRegexp: RegExp; private _classNamesMap:{[key:string]:string}; private _loading: boolean; private _container:HTMLDivElement; private _methodNameInput:HTMLElement; private _inputValue; private _timeline; private _currentSelection; constructor( private _statistics: StatisticsGenerator, private _statusConverter: StatusConverter, private _router: Router ) { super(); } activate(params: any, routeConfig: RouteConfig, navInstruction: NavigationInstruction) { super.activate(params, routeConfig, navInstruction); this._router = navInstruction.router; } attached() { this._loading = true; this._statistics.getExecutionStatistics().then(executionStatistics => { this._classNamesMap = {}; executionStatistics.classStatistics.forEach(classStatistic => { this._classNamesMap[classStatistic.classContext.contextValues.id] = classStatistic.classIdentifier; }); this._prepareTimelineData(executionStatistics) }); } selectionChanged(){ if (this._inputValue.length == 0){ this.updateUrl({}); this._timeline.fit(); } } private _focusOn(methodId:string) { //adjusts timeline zoom to selected method this._timeline.setSelection(methodId, {focus: "true"}); window.setTimeout(() => { const methodElement = document.getElementById(methodId); methodElement?.scrollIntoView(); }, 500); } private _getLookupOptions = async (filter: string, methodId: string): Promise<IContextValues[]> => { return this._statistics.getExecutionStatistics().then(executionStatistics => { let methodContexts:IMethodContext[]; if (methodId) { methodContexts = [executionStatistics.executionAggregate.methodContexts[methodId]]; this._searchRegexp = null; delete this.queryParams.methodName; this._focusOn(methodId); this.updateUrl({methodId: methodId}); } else if (filter?.length > 0) { this._searchRegexp = this._statusConverter.createRegexpFromSearchString(filter); delete this.queryParams.methodId; methodContexts = Object.values(executionStatistics.executionAggregate.methodContexts).filter(methodContext => methodContext.contextValues.name.match(this._searchRegexp)); } else { methodContexts = Object.values(executionStatistics.executionAggregate.methodContexts); } return methodContexts.map(methodContext => methodContext.contextValues); }); }; private _threadItemClicked(properties) { // console.log("timeline element selected.", properties); let methodId = properties.items[0].split("_")[0]; this._router.navigateToRoute('method', {methodId: methodId}) } private _prepareTimelineData(executionStatistics:ExecutionStatistics) { // DOM element where the Timeline will be attached const container = this._container; const style = new Map <string,string>(); style.set("PASSED", "background-color: " + this._statusConverter.getColorForStatus(ResultStatusType.PASSED) + "; color: #fff;"); style.set("PASSED_RETRY", "background-color: " + this._statusConverter.getColorForStatus(ResultStatusType.PASSED_RETRY) + "; color: #fff;"); style.set("SKIPPED", "background-color: " + this._statusConverter.getColorForStatus(ResultStatusType.SKIPPED) + "; color: #fff;"); style.set("FAILED", "background-color: " + this._statusConverter.getColorForStatus(ResultStatusType.FAILED) + "; color: #fff;"); style.set("FAILED_EXPECTED", "background-color: " + this._statusConverter.getColorForStatus(ResultStatusType.FAILED_EXPECTED) + "; color: #fff;"); style.set("FAILED_MINOR", "background-color: " + this._statusConverter.getColorForStatus(ResultStatusType.FAILED_MINOR) + "; color: #fff;"); style.set("FAILED_RETRIED", "background-color: " + this._statusConverter.getColorForStatus(ResultStatusType.FAILED_RETRIED) + "; color: #fff;"); const groupItems = []; const dataItems = []; const dataMap = new Map(); Object.values(executionStatistics.executionAggregate.methodContexts).forEach(methodContext => { if (!dataMap.has(methodContext.threadName)) { dataMap.set(methodContext.threadName, []); } dataMap.get(methodContext.threadName).push(methodContext); }); dataMap.forEach((methodContexts, threadName) => { let groupId: string = "group-" + threadName; groupItems.push({id: groupId, content: threadName}); methodContexts.forEach((context: MethodContext) => { /* * workaround for XSS-protection update of vis-timeline by using an HTMLElement instead of injecting the html directly in which the XSS protection would remove the class names needed for our styling * @see: https://github.com/visjs/vis-timeline/issues/846#issuecomment-749691286 */ const element = document.createElement("content"); element.innerHTML = ` <div class="item-content" id="${context.contextValues.id}"> <div class="item-content-head">${context.contextValues.name}</div> <div class='item-content-body'> <p class="m0">${this._classNamesMap[context.classContextId]}</p> <p class="m0">(${context.methodRunIndex})</p> </div> </div> `; dataItems.push({ id: context.contextValues.id, content: element, start: context.contextValues.startTime, end: context.contextValues.endTime, group: groupId, callbackInfos: [context.contextValues.id], style: "background-color: " + this._statusConverter.getColorForStatus(context.resultStatus) + ";", title: context.contextValues.name }); }); }); groupItems.sort((item1, item2) => { let contentA = item1.content.toUpperCase(), contentB = item2.content.toUpperCase(); if (contentA < contentB) { return -1; } if (contentA > contentB) { return 1; } return 0; }); // Configuration for the Timeline const options = { onInitialDrawComplete: () => { this._loading = false; if (this.queryParams.methodId?.length > 0) { this._focusOn(this.queryParams.methodId); } }, showTooltips:false, //max zoom out to be 1 Day zoomMax:8.64e+7, //Min Zoom set to be 10 Millisecond zoomMin:10, margin: { item: { horizontal: 2 } }, groupHeightMode: 'fixed' as TimelineOptionsGroupHeightModeType }; // Create a Timeline this._timeline = new Timeline(container, dataItems, groupItems, options); this._timeline.on('select',(event) => { this._threadItemClicked(event); }); } }
Single Session Cystolitholapaxy and PCNL for Encrusted DJ Stent with Large Associated Stone Burden IntroductIon. Ureteral stent use is commonplace in urology to prevent or relieve ureteral obstruction. If ureteral stents are neglected, they can cause severe morbidity due to migration, occlusion, encrustation, breakage, stone formation, and even death, due to life-threatening urosepsis or complications related to operative intervention. Extracorporeal shockwave lithotripsy, ureterorenoscopy, electrohydraulic lithotripsy, laser lithotripsy, and percutaneous nephrolitholapaxy (PCNL) have been reported for forgotten ureteral stent management, but currently there are no guidelines for this challenging situation and only few algorithms have been introduced by some studies. Methods. We present a case of a man presenting with an encrusted left double J (DJ) stent, inserted two years before, and bulky radiolucent lithiasis at both ends of the stent. The patient was studied with intravenous pyelogram and non contrast-enhanced computed tomography, and then treated with cystolithotripsy and PCNL in a single session. results. Complete clearance of the stones was obtained and the DJ stent was removed without breaking from the percutaneous access. conclusIons. Neglected stents still represent a challenge in urology: while endourology remains the best option for treatment, the management of ureteral stents should be based on follow-up and prevention, using for example a computerized warning and stent retrieval software system.
def _replace_series_name(seriesname, replacements): for pat, replacement in six.iteritems(replacements): if re.match(pat, seriesname, re.IGNORECASE | re.UNICODE): return replacement return seriesname
More on Trans-Pacific Partnership Models: Response to Petri and Plummer I see that Peter Petri and Michael Plummer (PP) have responded to my blog post on their models projections for the TPP. In essence, they minimize the concern that the TPP or even trade deficits more generally can lead to a prolonged period of high unemployment or secular stagnation to use the currently fashionable term. Dealing with the second issue first, they argue: “While trade agreements include many provisions on exports and imports, they typically contain no provisions to affect savings behavior. Thus, net national savings, and hence trade balances, will remain at levels determined by other variables, and real exchange rates will adjust instead. “A similar argument applies to overall employment. The TPP could affect employment in the short run — a possibility that we examine below — but those effects will fade because of market and policy adjustments. Since there is nothing in TPP provisions to affect long-term employment trends, employment too will converge to these levels, as long as adjustments are completed in the model’s 10 to 15 year time horizon.” In short, PP explicitly argues that trade agreements neither affect the trade balance nor employment as a definitional matter. They argue that the trade balance is determined by net national savings. They explicitly disavow the contention in my prior note that we cannot assume an adjustment process that will restore the economy to full employment: “In fact, critics of microeconomic analysis often challenge the credibility of market adjustment even in the long term. Dean Baker (2016) argues, for example, that mechanisms that may have once enabled the US economy to return to equilibrium are no longer working in the aftermath of the financial crisis. But the data tell a different, less pessimistic story (figure 1). Since 2010, the US economy has added 13 million jobs, a substantial gain compared to job growth episodes in recent decades, and the US civilian unemployment rate has declined from nearly 10 percent to under 5 percent. The broadest measure of unemployment (U6), which also includes part-time and discouraged workers, has declined almost as sharply, from 17 to 10 percent, and is now nearly back to average levels in precrisis, nonrecession years.” As I noted in my original blog post, the PP analysis is entirely consistent with standard trade and macroeconomic approaches, however these approaches do not seem credible in the wake of the Great Recession. The standard view was that the economy would quickly bounce back to its pre-recession trend levels of output and employment. This view provides the basis for the projections made by the Congressional Budget Office (CBO) in its 2010 Budget and Economic Outlook (CBO, 2010). These projections are useful both because they were made with a full knowledge of the depth of the downturn (the recovery had begun in June of 2009) and also because CBO explicitly tries to make projections that are in line with the mainstream of the economics profession. The figure below shows the projected path of GDP in billions of 2009 dollars from 2010 to 2016 compared with the actual path. Source: CBO and BEA. While PP implies that recovery has pretty much set everything back to normal, the data do not support this contention. Output in 2015 was $1,330 billion (in 2015 dollars) below the level that CBO had projected in 2010.[1] This is a loss in output of $4,180 per person. The cumulative gap between the path projected by CBO and the actual path of the economy was $3,810 billion or $11,940 per person. The recovery in employment was also much slower than CBO had projected. Employment February of 2016 is almost 6 million below the level projected in 2010 by CBO.[2] The weakness of the labor market has also been associated with a large redistribution from labor to capital. CBO projected that the labor share of GDP would be 0.8 percentage points higher in 2015 than was in fact the case. The lost output combined with the redistribution from wages to profits implies a loss of wage income of $1040 billion in 2015 or an average of $7,460 per worker. To put this loss in perspective, this loss of wage income would have the same impact on the aggregate take home pay of worker as an increase in the Social Security tax of 14.3 percentage points, which would more than double the current level. (Most of this loss is due to fewer people working, not lower wages.) Given the actual path of the recovery from the Great Recession, it defies reality to assert that we don’t have to worry about a trade deficit creating a gap in demand and higher unemployment. Can anyone doubt that if the annual trade deficit had been $300 billion lower (@ 2 percentage points of GDP) over the last six years that we would have seen far more employment and higher wages? Before the Great Recession few mainstream economists took seriously the possibility that major economies could suffer from long periods of inadequate demand which depress employment. This is no longer true, as many of the world’s most prominent economists, including Paul Krugman, Larry Summers, and Olivier Blanchard, openly wrestle with the problem of secular stagnation. If economies can suffer from a chronic shortfall in demand then there is no mechanism to automatically replace the demand lost as a result of a larger trade deficit. After dismissing the idea that trade in general can lead to lower levels of output and employment, PP argue that in any case the TPP is too small to have a noticeable impact on employment and wages. While noting the work of Autor, Dorn, and Hanson, which did find a substantial impact on employment and wages as a result of imports from China, PP argue that this impact was not due to trade per se, but rather a: “massive surge in net capital inflows into the United States.” This should not provide much basis for complacency about the impact of the TPP. A massive surge of net capital flows is by definition the flip side of a large rise in the trade deficit. One implies the other. Of course none of the standard trade models predicted the massive increase in the trade deficit that we saw in the decade following 1997.[3] They are not designed for that purpose. By design, the sort of model employed in the PP analysis can tell us nothing about whether we should anticipate a “surge” in capital inflows following the passage of the TPP. There are two reasons that we should be concerned about the impact of the TPP on the size of the trade deficit and therefore overall levels of employment. First, the TPP limits the ability of the United States government to pressure countries over their currency policy. In principle, the U.S. government can use tariffs and other measures to retaliate in a situation where it has determined that a foreign government is deliberately depressing the value of its currency to gain a trade advantage. Its ability to apply such measures will be sharply curtailed by the rules in the TPP. This means that if one of the countries in the agreement chooses to hold down the value of its currency by buying massive amounts of dollars, the U.S. government will be less well positioned to pressure this country to change the policy. The other reason that the TPP should raise these concerns is the failure to include rules on currency in the agreement itself. The TPP has been one of the biggest items on the Obama administration’s trade agenda in its two terms in the White House. If it chose not to address the problem of currency management in this agreement, it is difficult to see where it or a future administration will take up the issue. Access to the TPP was an important potential carrot to many of the parties. For example, PP project that Vietnam will see a gain of 8.1 percentage points to its GDP in 2030 as a result of the deal. This could have been a very large carrot with which to persuade Vietnam and other countries to accept enforceable rules on currency management. By not including such rules in the TPP, the United States is losing an important opportunity to prevent the sort of surges in capital inflows that led to the explosion of the trade deficit in the decade from 1997 to 2007. This concern is amplified by the fact that the TPP is quite explicitly designed to be expandable so that it may include major countries like India and China at some future date. For this reason, the failure to include enforceable rules on currency may turn out to have large impacts on the trade deficit, employment, and wages. There is one other point in the debate over the TPP that deserves emphasis. The model used by PP assumes that the TPP does not affect the overall trade balance, but it does change the composition of trade. While this may not prove accurate for reasons I noted, it is worth noting one of the implications of this assumption. The TPP is quite explicitly designed to increase the amount of money that U.S. pharmaceutical companies collect from other countries for their drug patents and forms of intellectual property. It also should increase the amount of money that Microsoft and other software companies collect for their copyrights, as well as the sums that Disney and the rest of the entertainment industry collect for their intellectual property. Using the PP analysis, if these industries collect more money for their patents and copyrights, manufacturing and other sectors must collect less for their output. In other words, if the pharmaceutical, software, and entertainment industry see their foreign revenue rise by $40 billion as a result of the TPP, then our trade deficit in other sectors must increase by $40 billion. This amounts to a redistribution of income from manufacturing and other sectors to the pharmaceutical industry, the software industry, and the entertainment industry. Given recent trends in inequality, this may not seem like an appropriate goal for public policy at the moment. Note: Some of the numbers in this post were corrected from an earlier version. [1] The projections are taken from the data for Table C-1. [2] This calculation is based on the projection of potential labor force growth in CBO (2010 Table 2-2) and the projection that the unemployment rate would be 5.0 percent in 2016. [3] The dollar soared in value following the East Asian financial crisis as developing countries began to accumulate foreign reserves on a massive basis. This meant that instead of capital flowing from rich countries to poor countries, as the textbook story predicts, large amounts of capital flowed from poor countries to rich countries (Baker and Walentin 2001). This surge in capital inflows led to an explosion in the size of the trade deficit from just over 1.0 percent of GDP in the mid-1990s to a peak of almost 6.0 percent of GDP in 2005.
/** Note: anchor tag is spit out by appendTestAnchor() before this because this gets wrapped in divs by appendServiceResult() */ public String formatSummary(ServiceResult serviceResult, int tocID) { StringBuffer fb = new StringBuffer(); boolean includePartSummary = true; fb.append(detail(serviceResult, false, includePartSummary, DETAIL_LINESEP, DETAIL_END, tocID)); return fb.toString(); }
Mass shootings in Orlando, Fla., Alexandria, Va., and San Francisco during the first two weeks of June — two of them on the same day — have once again put America's complicated relationship with guns in the spotlight. Americans have remained fairly evenly divided between a desire to strengthen gun control measures and to protect gun rights during the past eight years, a Pew Research Center survey taken before the June shootings finds. In the latest poll, 51 percent of respondents said it is more important to control gun ownership, whereas 47 percent said it's more important to protect the right of Americans to own guns. But the divide between Republicans and Democrats who say it's important to protect the right of Americans to own guns has widened since 2000 — from an 18-percentage-point gap to a 54-point gap. This gap is underscored by reactions to the shooting at a congressional baseball practice that sent five people to the hospital earlier this month. Hours after the shooting, U.S. Rep. Paul Mitchell, R-Mich., told NPR that "gun control laws simply limit ... law-abiding citizens." In the same conversation, his colleague, Rep. Val Demings, D-Fla., remained firm in her commitment "to keep guns out of the hands of bad people who shouldn't have them." But while that gap has widened, the Pew survey did reveal some areas of consensus: 89 percent of Americans want to restrict people with mental illnesses from buying guns. 84 percent think there should be background checks for private gun sales and at gun shows. 83 percent want to ban sales of guns to people on no-fly or watch lists. "We can live in a safer place if we kept guns out of the hands of people that shouldn't have them — domestic abusers, felons, people who are dangerously mentally ill, even suspected terrorists," Mark Kelly said during a recent interview with NPR. His wife, former Rep. Gabrielle Giffords, was shot in an assassination attempt in 2011. Forty-two percent of adults in the U.S. now say they live in a home with a gun. Pew surveyed nearly 4,000 U.S. adults, including 1,269 gun owners. Fifty-two percent of people who didn't own guns said they could see themselves owning a gun in the future. Forty-four percent of Americans said they know someone who has been shot. And 23 percent said they, or someone in their family, has been threatened or intimidated by someone using a gun. The survey also asked about safety practices; 7 in 10 gun owners said they had taken a gun safety course such as weapons training, hunter safety or firearm safety. Sixty-three percent of gun owners said there is at least one gun in their home that is not kept in a locked place. Fifty-five percent of gun owners have a gun that is loaded and easily accessible to them at home all or most of the time. Poll results come from a survey of 3,930 U.S. adults conducted March 13 to 27 and April 4 to 18. The poll has a margin of sampling error of 2.8 percentage points for all respondents, 4.8 percentage points for gun owners and 3.4 percentage points for people who don't own guns. The question about gun rights was asked as part of a separate Pew national survey, most recently conducted April 5 to 11 among 1,501 adults.
/** * kernel/model.c * @author nladuo * @source url:https://github.com/nladuo/TinyExtMvc */ #include "kernel/model.h" #include "kernel/loader.h" zend_class_entry *tem_model_ce; ZEND_METHOD(Model, __construct) { //add loader as a property of model zval *load_obj; MAKE_STD_ZVAL(load_obj); object_init_ex(load_obj, tem_loader_ce); walu_call_user_method(NULL, load_obj, "__construct", "z", getThis()); add_property_zval_ex(getThis(), "load", strlen("load") + 1, load_obj); zval_ptr_dtor(&load_obj); } static zend_function_entry model_method[]={ ZEND_ME(Model, __construct, NULL, ZEND_ACC_PUBLIC|ZEND_ACC_CTOR) {NULL, NULL, NULL} }; int start_up_tem_model(){ zend_class_entry ce; INIT_NS_CLASS_ENTRY(ce, "Tem", "Model", model_method); tem_model_ce = zend_register_internal_class(&ce TSRMLS_CC); return SUCCESS; }
package org.checkerframework.checker.experimental.regex_qual; import org.checkerframework.qualframework.base.Checker; import org.checkerframework.framework.qual.StubFiles; /** * {@link Checker} for the Regex-Qual type system. */ @StubFiles("apache-xerces.astub") public class RegexQualChecker extends Checker<Regex> { @Override protected RegexQualifiedTypeFactory createTypeFactory() { return new RegexQualifiedTypeFactory(this); } }
<filename>src/common/signalsHandler.c #include "common/common.h" int SetSigHdr( int p_sig, void p_func(int sig) ) { struct sigaction act; memset (&act, '\0', sizeof(act)); act.sa_handler = p_func; if (sigaction(p_sig, &act, NULL) < 0) { ELOG(ERROR_SIGNAL, "sigaction error"); } return SUCCESS; }
Frontrunner for the Republican presidential nomination, Donald Trump, is allegedly in advanced talks to add AC Milan to his international sporting empire, which already includes a number of golf courses. The 69-year-old recently made a detour from the campaign trail to fly into Italy on an overnight flight from Chicago. The former High School variety soccer star was spotted getting into a waiting car outside Casa Milan in the early hours of the morning. Former Italian prime minister, Silvio Berlusconi, had been attempting to sell the club to Thai businessman Bee Taechaubol for some time before the American tycoon stepped in to unexpectedly come close to sealing a deal. The Rossoneri were regarded as a valuable political tool for Berlusconi and the Trump campaign now hope that the ‘make Milan great again’ platform will help tap into the large Italian-American demographic. The Republican frontrunner has also reportedly opened talks with Trump Tower inhabitant, Cristiano Ronaldo, in a bid to take him to the Stadio San Siro. Happy April Fool’s day!
#!/usr/bin/env python # -*- coding: utf-8 -*- """Take a "screen-shot" (full or partial), save to a ImageStim()-like RBGA object.`""" # Part of the PsychoPy library # Copyright (C) 2002-2018 <NAME> (C) 2019-2021 Open Science Tools Ltd. # Distributed under the terms of the GNU General Public License (GPL). from __future__ import absolute_import, division, print_function from builtins import str # Ensure setting pyglet.options['debug_gl'] to False is done prior to any # other calls to pyglet or pyglet submodules, otherwise it may not get picked # up by the pyglet GL engine and have no effect. # Shaders will work but require OpenGL2.0 drivers AND PyOpenGL3.0+ import pyglet pyglet.options['debug_gl'] = False GL = pyglet.gl import psychopy # so we can get the __path__ from psychopy import core, logging # tools must only be imported *after* event or MovieStim breaks on win32 # (JWP has no idea why!) from psychopy.tools.attributetools import attributeSetter, setAttribute from psychopy.tools.typetools import float_uint8 from psychopy.visual.image import ImageStim try: from PIL import Image except ImportError: from . import Image import numpy class BufferImageStim(ImageStim): """Take a "screen-shot", save as an ImageStim (RBGA object). The screen-shot is a single collage image composed of static elements that you can treat as being a single stimulus. The screen-shot can be of the visible screen (front buffer) or hidden (back buffer). BufferImageStim aims to provide fast rendering, while still allowing dynamic orientation, position, and opacity. It's fast to draw but slower to init (same as an ImageStim). You specify the part of the screen to capture (in norm units), and optionally the stimuli themselves (as a list of items to be drawn). You get a screenshot of those pixels. If your OpenGL does not support arbitrary sizes, the image will be larger, using square powers of two if needed, with the excess image being invisible (using alpha). The aim is to preserve the buffer contents as rendered. Checks for OpenGL 2.1+, or uses square-power-of-2 images. **Example**:: # define lots of stimuli, make a list: mySimpleImageStim = ... myTextStim = ... stimList = [mySimpleImageStim, myTextStim] # draw stim list items & capture (slow; see EXP log for times): screenshot = visual.BufferImageStim(myWin, stim=stimList) # render to screen (very fast, except for the first draw): while <conditions>: screenshot.draw() # fast; can vary .ori, .pos, .opacity other_stuff.draw() # dynamic myWin.flip() See coder Demos > stimuli > bufferImageStim.py for a demo, with timing stats. :Author: - 2010 <NAME>, with on-going fixes """ def __init__(self, win, buffer='back', rect=(-1, 1, 1, -1), sqPower2=False, stim=(), interpolate=True, flipHoriz=False, flipVert=False, mask='None', pos=(0, 0), name=None, autoLog=None): """ :Parameters: buffer : the screen buffer to capture from, default is 'back' (hidden). 'front' is the buffer in view after win.flip() rect : a list of edges [left, top, right, bottom] defining a screen rectangle which is the area to capture from the screen, given in norm units. default is fullscreen: [-1, 1, 1, -1] stim : a list of item(s) to be drawn to the back buffer (in order). The back buffer is first cleared (without the win being flip()ed), then stim items are drawn, and finally the buffer (or part of it) is captured. Each item needs to have its own .draw() method, and have the same window as win. interpolate : whether to use interpolation (default = True, generally good, especially if you change the orientation) sqPower2 : - False (default) = use rect for size if OpenGL = 2.1+ - True = use square, power-of-two image sizes flipHoriz : horizontally flip (mirror) the captured image, default = False flipVert : vertically flip (mirror) the captured image; default = False """ # depends on: window._getRegionOfFrame # what local vars are defined (these are the init params) for use by # __repr__ self._initParams = dir() self._initParams.remove('self') self.autoLog = False # set this False first and change later _clock = core.Clock() if stim: # draw all stim to the back buffer win.clearBuffer() buffer = 'back' if hasattr(stim, '__iter__'): for stimulus in stim: try: if stimulus.win == win: stimulus.draw() else: msg = ('BufferImageStim.__init__: user ' 'requested "%s" drawn in another window') logging.warning(msg % repr(stimulus)) except AttributeError: msg = 'BufferImageStim.__init__: "%s" failed to draw' logging.warning(msg % repr(stimulus)) else: raise ValueError('Stim is not iterable in BufferImageStim. ' 'It should be a list of stimuli.') # take a screenshot of the buffer using win._getRegionOfFrame(): glversion = pyglet.gl.gl_info.get_version() if glversion >= '2.1' and not sqPower2: region = win._getRegionOfFrame(buffer=buffer, rect=rect) else: if not sqPower2: msg = ('BufferImageStim.__init__: defaulting to square ' 'power-of-2 sized image (%s)') logging.debug(msg % glversion) region = win._getRegionOfFrame(buffer=buffer, rect=rect, squarePower2=True) if stim: win.clearBuffer() # turn the RGBA region into an ImageStim() object: if win.units in ['norm']: pos *= win.size / 2. size = region.size / win.size / 2. super(BufferImageStim, self).__init__( win, image=region, units='pix', mask=mask, pos=pos, size=size, interpolate=interpolate, name=name, autoLog=False) self.size = region.size # to improve drawing speed, move these out of draw: self.thisScale = numpy.array([4, 4]) self.flipHoriz = flipHoriz self.flipVert = flipVert # set autoLog now that params have been initialised wantLog = autoLog is None and self.win.autoLog self.__dict__['autoLog'] = autoLog or wantLog if self.autoLog: logging.exp("Created %s = %s" % (self.name, str(self))) msg = 'BufferImageStim %s: took %.1fms to initialize' logging.exp(msg % (name, 1000 * _clock.getTime())) @attributeSetter def flipHoriz(self, flipHoriz): """If set to True then the image will be flipped horizontally (left-to-right). Note that this is relative to the original image, not relative to the current state. """ self.__dict__['flipHoriz'] = flipHoriz @attributeSetter def flipVert(self, flipVert): """If set to True then the image will be flipped vertically (left-to-right). Note that this is relative to the original image, not relative to the current state. """ self.__dict__['flipVert'] = flipVert def setFlipHoriz(self, newVal=True, log=None): """Usually you can use 'stim.attribute = value' syntax instead, but use this method if you need to suppress the log message. """ setAttribute(self, 'flipHoriz', newVal, log) # call attributeSetter def setFlipVert(self, newVal=True, log=None): """Usually you can use 'stim.attribute = value' syntax instead, but use this method if you need to suppress the log message. """ setAttribute(self, 'flipVert', newVal, log) # call attributeSetter def draw(self, win=None): """Draws the BufferImage on the screen, similar to :class:`~psychopy.visual.ImageStim` `.draw()`. Allows dynamic position, size, rotation, mirroring, and opacity. Limitations / bugs: not sure what happens with shaders and self._updateList() """ if win is None: win = self.win self._selectWindow(win) GL.glPushMatrix() # preserve state # GL.glLoadIdentity() # dynamic flip GL.glScalef(self.thisScale[0] * (1, -1)[self.flipHoriz], self.thisScale[1] * (1, -1)[self.flipVert], 1.0) # enable dynamic position, orientation, opacity; depth not working? GL.glColor4f(*self._foreColor.render('rgba1')) GL.glCallList(self._listID) # make it happen GL.glPopMatrix() # return the view to previous state
Cobalt-based catalysts activation process FIELD: petrochemical process catalysts. SUBSTANCE: invention relates to preparation of supported Fischer-Tropsch catalysts and comprises treatment of supported Fischer-Tropsch catalyst precursor in the first step, which precursor is in pre-reduced state in the form of particles. Precursor contains cobalt-impregnated catalyst support and reducible labilized cobalt oxide in fired state selected from compounds depicted by formulas including CoO a H b , wherein a=1.7 and b>0, and monometallic hydrocalcite-type compounds Co ii 0,74 Co iii 0,26 (OH) 2,01 (NO) 0,21 (CO) 0,02 ×0,6H 2 O. Cobalt oxide is reduced with reducing gas, which is pure hydrogen, at the first volumetric velocity of supplied gas SV1 and first heating velocity HR1 to form partially reduced catalyst precursor. Resulting precursor is activated, in the second step, with reducing gas, which is pure hydrogen, at the second volumetric velocity of supplied gas SV2 and second heating velocity HR2, so that SV2<SV1 and/or HR2≥HR1 provided that, when SV2=SV1, then HR2≠HR1 and, HR2=HR1, then SV2≠SV1. EFFECT: achieved maximum catalytic activity. 12 cl, 3 dwg, 5 tbl, 5 ex
<gh_stars>100-1000 # coding: utf-8 import os datasets_root_train ='/home/zxq/code/coding/saliency_Dataset/DUTS/DUTS-TR' datasets_root_test ='/home/zxq/code/coding/saliency_Dataset/DUTS/DUTS-TE' train_data = os.path.join(datasets_root_train) test_data = os.path.join(datasets_root_test)
/** * This TimsProduct class implements the commodity interface * @author Meet Patel **/ public class TimsProduct implements Commodity { // name of the product private String name; // cost of the product private double cost; // actual price of the product private double price; // set the name, cost and price public TimsProduct(String name, double cost, double price) { this.name = name; this.cost = cost; this.price = price; } // return name public String getName() { return name; } // return cost of the product public double getProductionCost() { return cost; } // return retail price public double getRetailPrice() { return price; } // print string public String toString() { return "Name: " + name + "Cost: " + cost + "Price: " + price ; } }
import { Injectable} from '@angular/core'; import { BehaviorSubject, Observable} from 'rxjs'; @Injectable() export class PageService { private isHomePage$: BehaviorSubject<Boolean>; private currentPath$: BehaviorSubject<string>; constructor() { this.isHomePage$ = new BehaviorSubject(true); this.currentPath$ = new BehaviorSubject(''); } public get isHome$(): Observable<Boolean> { return this.isHomePage$.asObservable(); } public setIsHome(isHome: boolean): void { this.isHomePage$.next(isHome); } public get getCurrentPath$(): Observable<string> { return this.currentPath$.asObservable(); } public setCurrentPath(path: string): void { this.currentPath$.next(path); } }
/** * Lists the contents on a remote FTP server. * * @param dirName the fully specified remote directory name. * * @return a directory listing for the specified FTP directory. */ public static List<String> list(String dirName) { validateRemoteFile(dirName); if (!dirName.endsWith("/")) dirName = dirName + "/"; JamProcess process = JamProcess.create("curl", "--list-only", dirName); process.run(); List<String> fileNames = new ArrayList<String>(); for (String fileName : process.stdout()) fileNames.add(fileName); return fileNames; }
// WithCompareNumberAndNumericString configures differ to compare a number with a numeric string. // Differ parses the string to number before comparing their values. // e.g. 1.5 == "1.5" func WithCompareNumberAndNumericString() Option { return func(d *differ) { d.compareNumberAndNumericString = true } }
/** * Probabilistic Monte Carlo tree search */ public class MctsP { private static final double C_PUCT = 1.41; private Map<String, Node> tree = new HashMap<>(); private Node root; private Evaluator evaluator; public MctsP(@NonNull AssetManager assetManager) { evaluator = Evaluator.create(assetManager); } /** * Perform MCTS simulations starting from current game state. * Return a vector of MCTS score over all actions. * * @param state Root game state * @return Actions visit counts */ public int[] getDistribution(State state) { // Set root node if (tree.containsKey(state.getId())) { root = tree.get(state.getId()); pruneTree(); } else { createTree(state); } int simulations = GameController.getSimulations(); AgentAsyncTask agentAsyncTask = GameController.getAgentAsyncTask(); // Explore the tree for (int i = 0; i < simulations; i++) { if (agentAsyncTask.isCancelled()) { // task is aborted, bail out return null; } GameController.onProgressUpdate((int) (i * 100 / (double) simulations + 0.5)); simulate(); } // Return visit counts int[] visits = new int[Game.board_size]; for (Edge edge : root.edges) { visits[edge.action] = edge.N; } return visits; } /** * Move to leaf node, evaluate it, and back propagate the value */ private void simulate() { List<Edge> breadcrumbs = new ArrayList<>(); Node leaf = moveToLeaf(breadcrumbs); float value; if (leaf.state.isFinished()) { value = leaf.state.getValue(); } else { float[] pi = new float[Game.board_size]; value = evaluator.predict(pi, leaf.state.getCanonicalBoard()); expandNode(leaf, pi); } backPropagate(leaf, value, breadcrumbs); } /** * Move down the tree until hit a leaf node */ private Node moveToLeaf(List<Edge> breadcrumbs) { Node node = root; while (!node.edges.isEmpty()) { Edge bestEdge = getBestEdge(node); node = bestEdge.outNode; breadcrumbs.add(bestEdge); } return node; } /** * Expand node */ private void expandNode(Node node, float[] pi) { State newState; Node newNode; for (int action : node.state.getValidActions()) { newState = node.state.getNextState(action); if (tree.containsKey(newState.getId())) { newNode = tree.get(newState.getId()); } else { newNode = new Node(newState); tree.put(newState.getId(), newNode); } node.edges.add(new Edge(node, newNode, action, pi[action])); } } /** * Back propagate the value up the tree */ private void backPropagate(Node node, float value, List<Edge> breadcrumbs) { int player = node.state.getPlayer(); for (Edge edge : breadcrumbs) { edge.N++; edge.W += edge.player == player ? value : -value; edge.Q = (double) edge.W / edge.N; } } /** * Pick edge with highest upper confidence bound */ private Edge getBestEdge(Node node) { int nodeVisits = 0; for (Edge edge : node.edges) { nodeVisits += edge.N; } double max_u = -Double.MAX_VALUE; double u; Edge best_edge = null; for (Edge edge : node.edges) { u = edge.Q + C_PUCT * edge.P * Math.sqrt(nodeVisits) / (1 + edge.N); if (u > max_u) { max_u = u; best_edge = edge; } } return best_edge; } /** * Create a new tree */ private void createTree(State state) { tree = new HashMap<>(); root = new Node(state); tree.put(state.getId(), root); } /** * Keep only subtree of the node and prune the rest */ private void pruneTree() { Map<String, Node> subtree = new HashMap<>(); subtree.put(root.state.getId(), root); copySubtree(subtree, root); tree = subtree; } private void copySubtree(Map<String, Node> subtree, Node node) { for (Edge edge : node.edges) { subtree.put(edge.outNode.state.getId(), edge.outNode); copySubtree(subtree, edge.outNode); } } private static final class Node { final State state; List<Edge> edges = new ArrayList<>(); Node(State state) { this.state = state; } } private static final class Edge { final Node outNode; final int action; final int player; final float P; int N = 0; int W = 0; double Q = 0; Edge(Node inNode, Node outNode, int action, float prior) { this.outNode = outNode; this.action = action; this.player = inNode.state.getPlayer(); this.P = prior; } } }
import { UnitsSystem } from './unit.system'; export interface UnitsSystems { cmkg: UnitsSystem inlb: UnitsSystem }
/* LanguageTool, a natural language style checker * Copyright (C) 2013 Stefan Lotties * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 * USA */ package org.languagetool.language; import java.util.ArrayList; import java.util.List; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; import org.languagetool.JLanguageTool; import org.languagetool.Language; public abstract class AbstractLanguageConcurrencyTest { protected abstract Language createLanguage(); protected abstract String createSampleText(); volatile int failedTests; @Ignore("too slow to run every time") @Test public void testSpellCheckerFailure() throws Exception { String sampleText = createSampleText(); Language language = createLanguage(); int threadCount = Runtime.getRuntime().availableProcessors() * 10; int testRuns = 100; ReadWriteLock testWaitLock = new ReentrantReadWriteLock(); Lock testWriteLock = testWaitLock.writeLock(); testWriteLock.lock(); failedTests = 0; List<Thread> threads = new ArrayList<>(); for (int i = 0; i < threadCount; i++) { Thread t = new Thread(new TestRunner(testWaitLock, language, testRuns, sampleText)); t.start(); threads.add(t); } // Release the lock and allow all TestRunner threads to do their work. testWriteLock.unlock(); for (Thread t : threads) { t.join(); } Assert.assertEquals(0, failedTests); } final class TestRunner implements Runnable { private final ReadWriteLock waitLock; private final Language language; private final int testRuns; private final String sampleText; TestRunner(ReadWriteLock waitLock, Language language, int testRuns, String sampleText) { this.waitLock = waitLock; this.language = language; this.testRuns = testRuns; this.sampleText = sampleText; } @Override public void run() { /* Request a read-lock to force this thread waiting until the main-thread releases the write-lock. * This ensures all TestRunner threads will be executed very concurrently and force threading issues to come up, * in case the tested code is not thread-safe. */ Lock lock = waitLock.readLock(); lock.lock(); lock.unlock(); for (int i = 0; i < this.testRuns; i++) { try { JLanguageTool tool = new JLanguageTool(this.language); Assert.assertNotNull(tool.check(this.sampleText)); } catch (Exception e) { failedTests += 1; // Force a log message and the debugger to pause. throw new RuntimeException(e); } } } } }
<gh_stars>0 import type { Context } from 'koa'; import Joi from 'joi'; import { validateBody } from '../../libs/utils'; import { getManager, getRepository } from 'typeorm'; import Item from '../../entities/Item'; import Cart from '../../entities/Cart'; import loadCart from '../../libs/loadCart'; const addCart = async (ctx: Context) => { type RequestType = { item_id: string; count: number; price: number; }; const schema = Joi.object().keys({ item_id: Joi.string().required(), count: Joi.number().required(), price: Joi.number().required(), }); if (!validateBody(ctx, schema)) return; const { item_id, count, price }: RequestType = ctx.request.body; try { const { user_id } = ctx.state.user; if (!user_id) { ctx.status = 401; ctx.body = '로그인 후 이용해주세요'; return; } const itemRepo = await getRepository(Item); const cartRepo = await getRepository(Cart); const prevCart = await loadCart(user_id); const item = await itemRepo.findOne({ id: item_id }); if (!item) { ctx.status = 404; ctx.body = '존재하지 않는 품목입니다.'; return; } const addItem = { ...item, count, price, amount: count * price, }; if (!prevCart) { // 기존 카트가 존재하지 않을 때 const cart = new Cart(); cart.items = [addItem]; cart.user_id = user_id; cart.completed = false; cart.deleted = false; await cartRepo.save(cart); ctx.body = cart; } else { // 기존 카트가 있을 경우 기존 카트에 품목 추가 await cartRepo.update( { id: prevCart.id }, { ...prevCart, items: [...prevCart.items, addItem] } ); const cart = await cartRepo.findOne({ id: prevCart.id }); if (!cart) { ctx.status = 404; ctx.body = '카트가 존재하지 않습니다.'; return; } ctx.body = cart; } } catch (err: any) { ctx.throw(500, err); } }; export default addCart;
// NewStoreSet returns a new set of stores from cluster peers and statically configured ones. func NewStoreSet( logger log.Logger, reg *prometheus.Registry, storeSpecs func() []StoreSpec, dialOpts []grpc.DialOption, unhealthyStoreTimeout time.Duration, ) *StoreSet { storeNodeConnections := prometheus.NewGauge(prometheus.GaugeOpts{ Name: "thanos_store_nodes_grpc_connections", Help: "Number indicating current number of gRPC connection to store nodes. This indicates also to how many stores query node have access to.", }) if logger == nil { logger = log.NewNopLogger() } if reg != nil { reg.MustRegister(storeNodeConnections) } if storeSpecs == nil { storeSpecs = func() []StoreSpec { return nil } } ss := &StoreSet{ logger: log.With(logger, "component", "storeset"), storeSpecs: storeSpecs, dialOpts: dialOpts, storeNodeConnections: storeNodeConnections, gRPCInfoCallTimeout: 10 * time.Second, externalLabelOccurrencesInStores: map[string]int{}, stores: make(map[string]*storeRef), storeStatuses: make(map[string]*StoreStatus), unhealthyStoreTimeout: unhealthyStoreTimeout, } storeNodeCollector := &storeSetNodeCollector{externalLabelOccurrences: ss.externalLabelOccurrences} if reg != nil { reg.MustRegister(storeNodeCollector) } return ss }
<filename>pkg/cve_params.go package spyse func (s *CVEService) Params() CVEParams { return CVEParams{ ID: CVEParamID{ Name: "id", Operator: CVEIDOperators{ Equal: OperatorEqual, }, }, CPE: CVEParamCPE{ Name: "cpe", Operator: CVECPEOperators{ Equal: OperatorEqual, StartsWith: OperatorStartsWith, }, }, ScoreCVSS2: CVEParamScoreCVSS2{ Name: "score_cvss2", Operator: CVEScoreCVSS2Operators{ Gte: OperatorGreaterThanOrEqual, Lte: OperatorLessThanOrEqual, }, }, ScoreCVSS3: CVEParamScoreCVSS3{ Name: "score_cvss3", Operator: CVEScoreCVSS3Operators{ Gte: OperatorGreaterThanOrEqual, Lte: OperatorLessThanOrEqual, }, }, SeverityCVSS2: CVEParamSeverityCVSS2{ Name: "severity_cvss2", Operator: CVESeverityCVSS2Operators{ Equal: OperatorEqual, }, }, SeverityCVSS3: CVEParamSeverityCVSS3{ Name: "severity_cvss3", Operator: CVESeverityCVSS3Operators{ Equal: OperatorEqual, }, }, PublishedAt: CVEParamPublishedAt{ Name: "published_at", Operator: CVEPublishedAtOperators{ Equal: OperatorEqual, }, }, ModifiedAt: CVEParamModifiedAt{ Name: "modified_at", Operator: CVEModifiedAtOperators{ Equal: OperatorEqual, }, }, } } // CVEParams for CVE search: // // All search parameters see at https://spyse-dev.readme.io/reference/cves#cve_search type CVEParams struct { // ID search by the CVE ID defined by the MITRE Corporation. ID CVEParamID // CPE search by the Common Platform Enumeration (CPE) name or prefix. Example: // cpe:2.3:o:canonical:ubuntu_linux:12.04. CPE CVEParamCPE // ScoreCVSS2 search by the CVE score according to the Common Vulnerability Scoring System Version 2 (CVSS2). ScoreCVSS2 CVEParamScoreCVSS2 // ScoreCVSS3 search by the CVE score according to the Common Vulnerability Scoring System Version 3 (CVSS3). ScoreCVSS3 CVEParamScoreCVSS3 // SeverityCVSS2 search by the CVE severity according to CVSSv2. Supported options: high, medium, low. SeverityCVSS2 CVEParamSeverityCVSS2 // SeverityCVSS3 search by the CVE severity according to CVSSv3. Supported options: high, medium, low, critical. SeverityCVSS3 CVEParamSeverityCVSS3 // PublishedAt search by the vulnerability publication date. Format: YYYY-MM-DD. PublishedAt CVEParamPublishedAt // ModifiedAt search by the vulnerability modification date. Format: YYYY-MM-DD. ModifiedAt CVEParamModifiedAt } type CVEParamID struct { Name string Operator CVEIDOperators } type CVEIDOperators struct { Equal string } type CVEParamCPE struct { Name string Operator CVECPEOperators } type CVECPEOperators struct { Equal string StartsWith string } type CVEParamScoreCVSS2 struct { Name string Operator CVEScoreCVSS2Operators } type CVEScoreCVSS2Operators struct { Gte string Lte string } type CVEParamScoreCVSS3 struct { Name string Operator CVEScoreCVSS3Operators } type CVEScoreCVSS3Operators struct { Gte string Lte string } type CVEParamSeverityCVSS2 struct { Name string Operator CVESeverityCVSS2Operators } type CVESeverityCVSS2Operators struct { Equal string } type CVEParamSeverityCVSS3 struct { Name string Operator CVESeverityCVSS3Operators } type CVESeverityCVSS3Operators struct { Equal string } type CVEParamPublishedAt struct { Name string Operator CVEPublishedAtOperators } type CVEPublishedAtOperators struct { Equal string } type CVEParamModifiedAt struct { Name string Operator CVEModifiedAtOperators } type CVEModifiedAtOperators struct { Equal string }
<filename>src/main/java/com/chris/bulleyeadmin/system/dto/MenuDto.java package com.chris.bulleyeadmin.system.dto; import com.chris.bulleyeadmin.system.pojo.Menu; import com.chris.bulleyeadmin.system.pojo.MenuAuth; import java.util.List; public class MenuDto extends Menu { private boolean open; private String value; private String key; private String type; private Boolean isLeaf; private List<MenuDto> children; public String getValue() { return value; } public void setValue(String value) { this.value = value; } public String getKey() { return key; } public void setKey(String key) { this.key = key; } public boolean isOpen() { return this.open; } public void setOpen(boolean open) { this.open = open; } public List<MenuDto> getChildren() { return this.children; } public void setChildren(List<MenuDto> children) { this.children = children; } public Boolean getIsLeaf() { return isLeaf; } public void setIsLeaf(Boolean leaf) { this.isLeaf = leaf; } public String getType() { return type; } public void setType(String type) { this.type = type; } }
How information heterogeneity influences traffic congestion during hurricane evacuation We investigate the effects of the amount and kind of information received by hurricane evacuees on the level of urban evacuation-induced traffic congestion. With the help of agent-based simulation driven by survey data for evacuees of Hurricane Matthew in Jacksonville, FL, we find that sending evacuation notices to households stands out as the most dominant factor impacting evacuation congestion. We use travel time metrics and introduce a percolation congestion index to show that congestion increases marginally by providing more mandatory than voluntary notices, which compensates for the benefits that are obtained by higher evacuation. We also observe that segments of commonly used evacuation routes in the flood-prone areas are more likely to be congested during the evacuation period than the other road segments. This study affirms the importance of evacuation notices in evacuation planning and suggests that planning agencies might benefit by strategically sending these notices to people to control peak congestion.
<filename>src/Generate.hs {-# LANGUAGE DeriveGeneric #-} module Generate ( universal , name ) where import Data.HashMap.Strict as HashMap import GHC.Generics import Data.Aeson import Data.Function import qualified Data.List as List import Program type Machine = HashMap State [Transition] type StateInt = Int globalFailureState = -1 globalSuccessState = -2 globalAlphabet = ["0" , "1" , "X", "Y", "Z", "B", ".", "F"] globalBlank = "0" for_letter :: [Symbol] -> Transition -> [Transition] for_letter list_symb trans = List.map (\symb -> if Program.write trans /= "" then trans { Program.read = symb } else trans { Program.read = symb, Program.write = symb }) list_symb not_letter :: [Symbol] -> Transition -> [Transition] not_letter list_not_symb = for_letter (globalAlphabet List.\\ list_not_symb) any_letter = not_letter [] newTransition :: Symbol -> Symbol -> Direction -> StateInt -> Transition newTransition read write action state = Transition { Program.read = read , Program.write = write , to_state = show state , action = action } transition_state first_state final_state = HashMap.empty & HashMap.insert (show (first_state)) (for_letter globalAlphabet (newTransition "" "" Program.None final_state)) maxState :: Machine -> StateInt maxState m = maximum (List.map Prelude.read (keys m)) (==>) :: (StateInt, StateInt) -> (StateInt -> Machine) -> Machine (==>) (first_state, nb_state) f = transition_state first_state (first_state + nb_state) & union (f (first_state + nb_state)) (===>) :: (Machine, StateInt) -> (StateInt -> Machine) -> Machine (===>) (machine, first_state) f = transition_state first_state (maxState machine + 1) & union (f (maxState machine + 1)) & union machine find_first_until :: Direction -> Symbol -> [Symbol] -> StateInt -> StateInt -> StateInt -> Machine find_first_until dir alpha until success_state failure_state first_state = HashMap.empty & HashMap.insert (show (first_state)) ([(newTransition alpha alpha None success_state)] ++ (for_letter until (newTransition "" "" Program.None failure_state)) ++ not_letter (until ++ [alpha]) (newTransition "" "" dir (first_state))) find_first :: Direction -> Symbol -> StateInt -> StateInt -> Machine find_first dir alpha success_state first_state = find_first_until dir alpha [] success_state globalFailureState first_state replace_first_until :: Direction -> Symbol -> Symbol -> [Symbol] -> StateInt -> StateInt -> StateInt -> Machine replace_first_until dir alpha beta until success_state failure_state first_state = (first_state, 2) ==> find_first_until dir alpha until (first_state + 1) failure_state & HashMap.insert (show (first_state + 1)) ([(newTransition alpha beta None success_state)] ++ not_letter ([alpha]) (newTransition "" "" dir (failure_state))) replace_first :: Direction -> Symbol -> Symbol -> StateInt -> StateInt -> Machine replace_first dir alpha beta success_state first_state = replace_first_until dir alpha beta [] success_state globalFailureState first_state replace_all :: Direction -> Symbol -> Symbol -> [Symbol] -> StateInt -> StateInt -> Machine replace_all dir alpha beta until success_state first_state = replace_first_until dir alpha beta until first_state success_state (first_state) type FunctionMachine = (StateInt -> StateInt -> StateInt -> Machine) compose_function :: FunctionMachine -> FunctionMachine -> StateInt -> StateInt -> StateInt -> Machine compose_function f1 f2 global_success global_failure first_state = (first_state, 2) ==> f1 (first_state + 1) global_failure & \m -> (m, first_state + 1) ===> f2 (global_success) (global_failure) copy_machine :: Symbol -> Symbol -> StateInt -> StateInt -> Machine copy_machine fromSymb toSymb success_state first_state = ((first_state, 4) ==> replace_first_until Program.Right "1" "B" [toSymb, globalBlank] (first_state + 1) (first_state + 3)) & \m -> (m, first_state + 1) ===> compose_function (find_first_until Program.Right toSymb []) (replace_first_until Program.Right "0" "1" []) (first_state + 2) globalFailureState & \m -> (m, first_state + 2) ===> find_first Program.Left "B" (first_state) & \m -> (m, first_state + 3) ===> replace_all Program.Left "B" "1" [fromSymb] success_state copy_machine_rev :: Symbol -> Symbol -> StateInt -> StateInt -> Machine copy_machine_rev fromSymb toSymb success_state first_state = ((first_state, 4) ==> find_first Program.Right fromSymb (first_state + 1) & \m -> (m, first_state + 1) ===> replace_first_until Program.Right "1" "B" [globalBlank] (first_state + 2) (first_state + 3)) & \m -> (m, first_state + 2) ===> compose_function (find_first_until Program.Left toSymb []) (replace_first_until Program.Right "0" "1" []) (first_state) globalFailureState & \m -> (m, first_state + 3) ===> replace_all (Program.Left) "B" "1" [fromSymb] success_state matching_machine :: Symbol -> Symbol -> StateInt -> StateInt -> StateInt -> Machine matching_machine first_symb second_symb success_state failure_state first_state = ((first_state, 7) ==> replace_first_until Program.Right "1" "B" [second_symb, globalBlank] (first_state + 1) (first_state + 4)) & \m -> (m, first_state + 1) ===> find_first Program.Right second_symb (first_state + 2) & \m -> (m, first_state + 2) ===> replace_first_until Program.Right "1" "B" [globalBlank] (first_state + 3) (first_state + 5) & \m -> (m, first_state + 3) ===> find_first Program.Left first_symb (first_state) & \m -> (m, first_state + 4) ===> compose_function (find_first_until Program.Right second_symb []) (find_first_until Program.Right "1" [globalBlank]) (first_state + 5) (first_state + 6) & \m -> (m, first_state + 5) ===> replace_all Program.Left "B" "1" [first_symb] failure_state & \m -> (m, first_state + 6) ===> replace_all Program.Left "B" "1" [first_symb] success_state shiftl_machine :: Symbol -> StateInt -> StateInt -> Machine shiftl_machine untilSymbol success_state first_state = HashMap.empty & HashMap.insert (show first_state) ( [newTransition "1" "." Program.Left (first_state + 1) , newTransition "0" "." Program.Left (first_state + 2) , newTransition "." "." Program.Left (first_state)] ) & HashMap.insert (show (first_state + 1)) ( [newTransition "1" "1" Program.Left (first_state + 1) , newTransition "0" "1" Program.Left (first_state + 2) , newTransition untilSymbol untilSymbol Program.None (success_state)] ) & HashMap.insert (show (first_state + 2)) ( [newTransition "1" "0" Program.Left (first_state + 1) , newTransition "0" "0" Program.Left (first_state + 2) , newTransition untilSymbol untilSymbol Program.None (success_state)] ) collapse_machine :: Symbol -> StateInt -> StateInt -> Machine collapse_machine untilSymbol success_state first_state = (first_state, 3) ==> find_first_until Program.Right "1" ["0", "."] (first_state + 1) (success_state) & \m -> (m, first_state + 1) ===> find_first Program.Right "." (first_state + 2) & \m -> (m, first_state + 2) ===> shiftl_machine untilSymbol first_state left_machine success_state first_state = HashMap.empty & HashMap.insert (show first_state) (any_letter (newTransition "" "" Program.Left success_state)) right_machine success_state first_state = HashMap.empty & HashMap.insert (show first_state) (any_letter (newTransition "" "" Program.Right success_state)) machine_with_transition :: [Transition] -> StateInt -> Machine machine_with_transition trans first_state = HashMap.empty & HashMap.insert (show first_state) (trans) shiftr_machine :: Symbol -> StateInt -> StateInt -> Machine shiftr_machine fromSymbol success_state first_state = HashMap.empty & HashMap.insert (show first_state) ( [newTransition fromSymbol fromSymbol Program.Right first_state , newTransition "1" "1" Program.Right (first_state + 1) , newTransition "0" "1" Program.Right (first_state + 2)] ) & HashMap.insert (show (first_state + 1)) ( [newTransition "1" "1" Program.Right (first_state + 1) , newTransition "0" "1" Program.Right (first_state + 2) , newTransition "." "1" Program.None (success_state)] ) & HashMap.insert (show (first_state + 2)) ( [newTransition "1" "0" Program.Right (first_state + 1) , newTransition "0" "0" Program.Right (first_state + 2) , newTransition "." "0" Program.None (success_state)] ) substitution_machine :: Symbol -> Symbol -> StateInt -> StateInt -> Machine substitution_machine fromSymbol toSymbol success_state first_state = (first_state, 6) ==> collapse_machine toSymbol (first_state + 1) & \m -> (m, first_state + 1) ===> find_first Program.Left fromSymbol (first_state + 2) & \m -> (m, first_state + 2) ===> replace_first_until Program.Right "1" "B" [globalBlank, toSymbol] (first_state + 3) (first_state + 5) & \m -> (m, first_state + 3) ===> find_first Program.Right toSymbol (first_state + 4) & \m -> (m, first_state + 4) ===> shiftr_machine "Z" (first_state + 1) & \m -> (m, first_state + 5) ===> replace_all Program.Left "B" "1" [fromSymbol] success_state step1 :: StateInt -> StateInt -> Machine step1 success_state first_state = (first_state, 5) ==> copy_machine_rev "Y" "X" (first_state + 1) & \m -> (m, first_state + 1) ===> find_first Program.Left "X" (first_state + 2) & \m -> (m, first_state + 2) ===> replace_first Program.Right "0" "X" (first_state + 3) & \m -> (m, first_state + 3) ===> replace_first Program.Right "Y" "0" (first_state + 4) & \m -> (m, first_state + 4) ===> find_first Program.Right "Z" (success_state) step2 :: StateInt -> StateInt -> Machine step2 success_state first_state = copy_machine_rev "Z" "X" success_state first_state step3 :: StateInt -> StateInt -> Machine step3 success_state first_state = (first_state, 7) ==> replace_first Program.Left "X" "0" (first_state + 1) & \m -> (m, first_state + 1) ===> right_machine (first_state + 2) & \m -> (m, first_state + 2) ===> find_first Program.Right "0" (first_state + 3) & \m -> (m, first_state + 3) ===> find_first Program.Right "1" (first_state + 4) & \m -> (m, first_state + 4) ===> left_machine (first_state + 5) & \m -> (m, first_state + 5) ===> replace_first Program.Right "0" "Y" (first_state + 6) & \m -> (m, first_state + 6) ===> find_first Program.Left "X" success_state shift_one_term_left :: Direction -> Symbol -> StateInt -> StateInt -> Machine shift_one_term_left dir symbol success_state first_state = (first_state, 5) ==> find_first dir symbol (first_state + 1) & \m -> (m, first_state + 1) ===> replace_first Program.Left "0" symbol (first_state + 2) & \m -> (m, first_state + 2) ===> right_machine (first_state + 3) & \m -> (m, first_state + 3) ===> replace_first Program.Right symbol "0" (first_state + 4) & \m -> (m, first_state + 4) ===> find_first Program.Left symbol success_state simple_shift_one_term_right :: Direction -> Symbol -> StateInt -> StateInt -> Machine simple_shift_one_term_right dir symbol success_state first_state = (first_state, 5) ==> find_first dir symbol (first_state + 1) & \m -> (m, first_state + 1) ===> replace_first Program.Right "0" symbol (first_state + 2) & \m -> (m, first_state + 2) ===> left_machine (first_state + 3) & \m -> (m, first_state + 3) ===> replace_first Program.Left symbol "0" (first_state + 4) & \m -> (m, first_state + 4) ===> find_first Program.Right symbol success_state shift_one_term_right :: Direction -> Symbol -> StateInt -> StateInt -> Machine shift_one_term_right dir symbol success_state first_state = (first_state, 10) ==> find_first dir symbol (first_state + 1) & \m -> (m, first_state + 1) ===> replace_first Program.Right "0" symbol (first_state + 2) & \m -> (m, first_state + 2) ===> find_first_until Program.Right "." ["0", "1"] (first_state + 6) (first_state + 9) & \m -> (m, first_state + 3) ===> left_machine (first_state + 4) & \m -> (m, first_state + 4) ===> replace_first Program.Left symbol "0" (first_state + 5) & \m -> (m, first_state + 5) ===> find_first Program.Right symbol success_state & \m -> (m, first_state + 6) ===> replace_first Program.Right "." "1" (first_state + 7) & \m -> (m, first_state + 7) ===> replace_first Program.Right "." "0" (first_state + 8) & \m -> (m, first_state + 8) ===> left_machine (first_state + 9) & \m -> (m, first_state + 9) ===> left_machine (first_state + 3) compare_configuration :: StateInt -> StateInt -> StateInt -> Machine compare_configuration success_state failed_state first_state = (first_state, 7) ==> matching_machine "X" "Y" (first_state + 1) failed_state & \m -> -- ATTENTION (first_state + 5) & \m -> (m, first_state + 1) ===> replace_first Program.Right "0" "X" (first_state + 2) & \m -> (m, first_state + 2) ===> shift_one_term_right Program.Right "Y" (first_state + 3) & \m -> (m, first_state + 3) ===> find_first Program.Left "X" (first_state + 4) & \m -> (m, first_state + 4) ===> matching_machine "X" "Y" (first_state + 6) (first_state + 5) & \m -> (m, first_state + 5) ===> replace_first Program.Left "X" "0" failed_state & \m -> (m, first_state + 6) ===> replace_first Program.Left "X" "0" success_state check_final_state :: StateInt -> Machine check_final_state first_state = (first_state, 4) ==> find_first Program.Left "F" (first_state + 1) & \m -> (m, first_state + 1) ===> matching_machine "F" "X" (globalSuccessState) (first_state + 2) & \m -> (m, first_state + 2) ===> simple_shift_one_term_right Program.Left "F" (first_state + 3) & \m -> (m, first_state + 3) ===> find_first_until Program.Right "X" ["1"] (globalFailureState) (first_state) end_machine :: StateInt -> Machine end_machine first_state = (first_state, 6) ==> replace_first Program.Right "0" "Z" (first_state + 1) & \m -> (m, first_state + 1) ===> right_machine (first_state + 2) & \m -> (m, first_state + 2) ===> shift_one_term_right Program.Right "Z" (first_state + 3) & \m -> (m, first_state + 3) ===> replace_first Program.Right "Z" "0" (first_state + 4) & \m -> (m, first_state + 4) ===> find_first Program.Right "." (first_state + 5) & \m -> (m, first_state + 5) ===> check_final_state next_configuration :: StateInt -> StateInt -> Machine next_configuration success_state first_state = (first_state, 9) ==> find_first Program.Right "Y" (first_state + 1) & \m -> (m, first_state + 1) ===> find_first Program.Right "0" (first_state + 2) & \m -> (m, first_state + 2) ===> right_machine (first_state + 3) & \m -> (m, first_state + 3) ===> machine_with_transition ( [newTransition "0" "Y" Program.Right (first_state + 4)] ++ (not_letter ["0"] (newTransition "" "" Program.Right (first_state + 1))) ) & \m -> (m, first_state + 4) ===> find_first_until Program.Right "0" ["1"] (first_state + 8) (first_state + 5) & \m -> (m, first_state + 5) ===> find_first Program.Left "Y" (first_state + 6) & \m -> (m, first_state + 6) ===> left_machine (first_state + 7) & \m -> (m, first_state + 7) ===> replace_first Program.Left "Y" "0" success_state & \m -> (m, first_state + 8) ===> end_machine step4 :: StateInt -> StateInt -> Machine step4 success_state first_state = (first_state, 5) ==> find_first Program.Left "X" (first_state + 1) & \m -> (m, first_state + 1) ===> compare_configuration success_state (first_state + 2) & \m -> (m, first_state + 2) ===> next_configuration (first_state + 3) & \m -> (m, first_state + 3) ===> find_first Program.Right "Y" (first_state + 4) & \m -> (m, first_state + 4) ===> find_first_until Program.Right "Z" ["0", "1"] (globalFailureState) (first_state) step5 :: StateInt -> StateInt -> Machine step5 success_state first_state = (first_state, 5) ==> right_machine (first_state + 1) & \m -> (m, first_state + 1) ===> find_first Program.Right "0" (first_state + 2) & \m -> (m, first_state + 2) ===> replace_all Program.Left "1" "0" ["X"] (first_state + 3) & \m -> (m, first_state + 3) ===> shift_one_term_right Program.Right "Y" (first_state + 4) & \m -> (m, first_state + 4) ===> shift_one_term_right Program.Right "Y" success_state step6 :: StateInt -> StateInt -> Machine step6 success_state first_state = (first_state, 2) ==> find_first Program.Right "Z" (first_state + 1) & \m -> (m, first_state + 1) ===> substitution_machine "Y" "Z" success_state shiftr_machine_from0withZ ::StateInt -> StateInt -> Machine shiftr_machine_from0withZ success_state first_state = HashMap.empty & HashMap.insert (show first_state) ( [newTransition "Z" "0" Program.Right (first_state + 3) , newTransition "1" "0" Program.Right (first_state + 1) , newTransition "0" "0" Program.Right (first_state + 2)] ) & HashMap.insert (show (first_state + 1)) ( [newTransition "1" "1" Program.Right (first_state + 1) , newTransition "0" "1" Program.Right (first_state + 2) , newTransition "Z" "1" Program.Right (first_state + 3) , newTransition "." "1" Program.None (success_state)] ) & HashMap.insert (show (first_state + 2)) ( [newTransition "1" "0" Program.Right (first_state + 1) , newTransition "0" "0" Program.Right (first_state + 2) , newTransition "Z" "0" Program.Right (first_state + 3) , newTransition "." "0" Program.None (success_state)] ) & HashMap.insert (show (first_state + 3)) ( [newTransition "1" "Z" Program.Right (first_state + 1) , newTransition "0" "Z" Program.Right (first_state + 2) , newTransition "Z" "Z" Program.Right (first_state + 3) , newTransition "." "Z" Program.None (success_state)] ) shift_one_term_left_on_tape :: Direction -> Symbol -> StateInt -> StateInt -> Machine shift_one_term_left_on_tape dir symbol success_state first_state = (first_state, 10) ==> find_first dir symbol (first_state + 1) & \m -> (m, first_state + 1) ===> replace_first Program.Left "0" symbol (first_state + 2) & \m -> (m, first_state + 2) ===> right_machine (first_state + 3) & \m -> (m, first_state + 3) ===> replace_first Program.Right symbol "0" (first_state + 4) & \m -> (m, first_state + 4) ===> find_first Program.Left symbol (first_state + 5) & \m -> (m, first_state + 5) ===> find_first_until Program.Right "0" ["1"] (first_state + 6) success_state & \m -> (m, first_state + 6) ===> shiftr_machine "Z" (first_state + 7) & \m -> (m, first_state + 7) ===> find_first Program.Left "Z" (first_state + 8) & \m -> (m, first_state + 8) ===> left_machine (first_state + 9) & \m -> (m, first_state + 9) ===> shiftr_machine_from0withZ success_state step7 :: StateInt -> StateInt -> Machine step7 success_state first_state = (first_state, 8) ==> shift_one_term_right Program.Right "Y" (first_state + 1) & \m -> (m, first_state + 1) ===> find_first_until Program.Right "1" ["0"] (first_state + 2) globalFailureState & \m -> (m, first_state + 2) ===> right_machine (first_state + 3) & \m -> (m, first_state + 3) ===> find_first_until Program.Right "1" ["0"] (first_state + 4) (first_state + 6) & \m -> (m, first_state + 4) ===> right_machine (first_state + 5) & \m -> (m, first_state + 5) ===> find_first_until Program.Right "1" ["0"] (first_state + 7) success_state & \m -> (m, first_state + 6) ===> shift_one_term_left_on_tape Program.Right "Z" success_state & \m -> (m, first_state + 7) ===> shift_one_term_right Program.Right "Z" success_state step8 :: StateInt -> StateInt -> Machine step8 success_state first_state = (first_state, 2) ==> shift_one_term_left Program.Left "Y" (first_state + 1) & \m -> (m, first_state + 1) ===> shift_one_term_left Program.Left "Y" success_state universal_machine :: StateInt -> StateInt -> Machine universal_machine success_state first_state = (first_state, 9) ==> find_first Program.Right "Y" (first_state + 1) & \m -> (m, first_state + 1) ===> step1 (first_state + 2) & \m -> (m, first_state + 2) ===> step2 (first_state + 3) & \m -> (m, first_state + 3) ===> step3 (first_state + 4) & \m -> (m, first_state + 4) ===> step4 (first_state + 5) & \m -> (m, first_state + 5) ===> step5 (first_state + 6) & \m -> (m, first_state + 6) ===> step6 (first_state + 7) & \m -> (m, first_state + 7) ===> step7 (first_state + 8) & \m -> (m, first_state + 8) ===> step8 first_state universal = -- let trans = find_first_until Program.Right "X" [] 0 1 2 in -- let trans = replace_first_until Program.Right "X" "B" [] 0 1 2 in -- let trans = replace_all Program.Right "X" "B" ["Z"] 0 1 in -- let trans = copy_machine "X" "Z" 0 1 in -- let trans = copy_machine_rev "Z" "X" 0 1 in -- let trans = matching_machine 0 1 2 in -- let trans = collapse_machine 0 1 in -- let trans = shiftr_machine 0 1 in -- let trans = substitution_machine 0 1 in let trans = universal_machine 0 1 in Program { name="Turing'ception" , alphabet = globalAlphabet ++ ["."] , blank = "." , states = List.map (\x -> show x) [-2..(maxState trans + 1)] , initial = "1" , finals = ["-2"] , transitions = trans }
/* IMPORT */ import * as _ from 'lodash'; import * as React from 'react'; import Tags from '@renderer/utils/tags'; import TagSingle from './tag_single'; /* TAG GROUP */ const TagGroup: React.FC<any> = React.memo ( function TagGroup ({ tag, ...props }) { if ( !tag.notes.length ) return null; return ( <div className="tag-group multiple vertical fluid"> <TagSingle tag={tag.path} name={tag.name} {...props} /> {tag.collapsed ? null : ( Tags.sort ( Object.values ( tag.tags ) ).map ( ( tag: any ) => ( <TagGroup key={tag.path} tag={tag} {...props} /> )) )} </div> ); }); /* EXPORT */ export default TagGroup;
/** * Note: The attributes are lowerCamelCase. */ @Value public static class TestableReference { @JacksonXmlProperty(localName = "BuildableReference") BuildableReference buildableReference; @JacksonXmlProperty(isAttribute = true) public boolean getSkipped() { return false; } }
<reponame>al2698/sp<filename>10-riscv/05-nix5/03-fputs/os5/kernel/nix.h #ifndef __NIX_H__ #define __NIX_H__ #include <board.h> #include <std.h> #include <string.h> #define UART_THR (char*)(UART+0x00) // THR:transmitter holding register 傳送暫存器 #define UART_RHR (char*)(UART+0x00) // THR:transmitter holding register 傳送暫存器 #define UART_LSR (char*)(UART+0x05) // LSR:line status register 輸出狀態暫存器 #define UART_LSR_RX_READY 0x01 // input is waiting to be read from RHR #define UART_LSR_EMPTY_MASK 0x40 // LSR Bit 6: 當 LSR 的第六位元為 1 時,代表傳送區為空的,可以傳了 (Transmitter empty; both the THR and LSR are empty) #define CONSOLE 1 #define NDEV 32 // map major device number to device functions. struct devsw { ssize_t (*read)(void *, size_t); ssize_t (*write)(const void *, size_t); }; extern struct devsw devsw[NDEV]; extern struct ftable ftable; void nix_init(void); int nix_start(void); int main(); ssize_t read(int fd, void *buf, size_t count); ssize_t write(int fd, const void *buf, size_t count); ssize_t cdev_write(const void *buf, size_t n, int (*_putc)(int)); ssize_t cdev_read(void *buf, size_t n, int (*_getc)(void)); // uart int uart_putc(int ch); int uart_getc(); // console ssize_t console_read(void *buf, size_t n); ssize_t console_write(const void *buf, size_t n); // ramdisk void ramdisk_init(); ssize_t ramdisk_read(void *buf, size_t n); ssize_t ramdisk_write(const void *buf, size_t n); void ramdisk_close(); #endif
<reponame>jinmang2/DeZero import numpy as np import dezero from dezero.core import Function, Variable, as_variable, as_array # =================================================================== # Basic functions: sin / cos / tanh / exp / log # =================================================================== class Sin(Function): def forward(self, x): y = np.sin(x) return y def backward(self, gy): x, = self.inputs gx = gy * cos(x) return gx def sin(x): return Sin()(x) class Cos(Function): def forward(self, x): y = np.cos(x) return y def backward(self, gy): x, = self.inputs gx = gy * -sin(x) return gx def cos(x): return Cos()(x) class Tanh(Function): def forward(self, x): y = np.tanh(x) return y def backward(self, gy): y = self.outputs[0]() # weakref gx = gy * (1 - y * y) return gx def tanh(x): return Tanh()(x)
n = int(input()) arr = list(map(int, input().split())) odd_sum = [0] even_sum = [0] for i, val in enumerate(arr, start=1): if i % 2 == 1: # odd odd_sum.append(odd_sum[i-1] + val) even_sum.append(even_sum[-1]) else: # even even_sum.append(even_sum[-1] + val) odd_sum.append(odd_sum[-1]) count = 0 for i, val in enumerate(arr, start=1): # i-th val is the one we give away pre_odd_sum = odd_sum[i-1] pre_even_sum = even_sum[i-1] post_odd_sum = even_sum[-1] - even_sum[i] post_even_sum = odd_sum[-1] - odd_sum[i] if pre_odd_sum + post_odd_sum == pre_even_sum + post_even_sum: count += 1 print(count)
Supply Chain Engineering in China’s Retailing Industry: A Case of Meiyijia Abstract This paper addresses the supply chain engineering and its application in China’s retailing industry. Based on the approaches of systems engineering, we propose the concept of supply chain engineering, which applies the idea of supply chain management to the engineering practices through the advanced information and management technology, to integrate the supply chain system and optimize its operations. We then illustrate the application of the supply chain engineering in China’s retailing industry. In such practices, we developed the virtual retailing enterprise mode and the FROM-SCM system, and designed the sales assistant etc. Such theory and practices are successfully applied in Meiyijia, which has transformed Meiyijia from a traditional retailer to a modern service enterprise, and the profits are resulted from the service fees rather than the traditional surplus between buying and selling prices. Now Meiyijia has built an ecosystem with the retailer in the core, the headquarter as the service platform. The success of Meiyijia in recent years shows the effectiveness of the supply chain engineering.
Accuracy analysis and form-finding design of uncertain mesh reflectors based on interval force density method Mesh reflectors are uncertain structures because of the existing errors of dimension and material in the procedure of design and manufacture. These uncertainties have significant impacts on the mechanical and electrical properties, which must be considered during the design phase. Three directly related factors of cable uncertainties in mesh reflectors are considered in this paper, including the initial length, cross-sectional area, and elastic modulus. The analytical relationship between the cable uncertainties and the surface accuracy of mesh reflectors is deduced by interval analysis, and an interval force density method is thus proposed. First, this method is used to analyze the influence of the cable uncertainties on the surface accuracy. Then it is applied into the form-finding optimization of uncertain mesh reflectors to minimize the influence of cable uncertainties on the surface accuracy. Three kinds of cable nets of mesh reflectors are illustrated to analyze the influence of the cable uncertainties on the surface accuracy, and the mesh reflectors with high surface accuracy are obtained by the proposed method. Finally, the influences of both the design values and deviation amplitudes of cable uncertainties on the surface accuracy are revealed.
// Resets whatever connection is associated with this ip port. void node_mgr::init(const string& ip, int32_t port) { string nip = getID(ip, port); map<string,FawnKVFrontendClient*>::iterator it = ip_fe_map.find(nip); if (it != ip_fe_map.end()) { delete (*it).second; ip_fe_map.erase(it); } }
def is_TumorInputRead1_present(self, key_value_tuples): try: tumor_input_read_1_tuple = next(pair for pair in key_value_tuples if pair[0] == "TumorInputRead1") if tumor_input_read_1_tuple[1] in ("", '""'): self.project_logger.log_debug("The TumorInputRead1 key was found, and its value was empty") return False else: self.project_logger.log_debug("The TumorInputRead1 key was found, and its value was not empty") return True except StopIteration: self.project_logger.log_debug("The TumorInputRead1 key was not found in the config file") return False
<reponame>ericvh/veracruz //! The Veracruz global policy. //! //! The global policy captures important information about a Veracruz //! computation that principals need to audit before they enroll themselves in a //! computation. This includes: //! //! - The identities and roles of every principals in the computation, //! - Important URLs, both for the Veracruz bridge server on the untrusted //! host's machine and the Veracruz proxy attestation service, //! - Permissible ciphersuites for TLS connections between clients and the //! trusted Veracruz runtime, as well as the hashes of the expected program //! and of the trusted Veracruz runtime itself, //! - The expiry date (moment in time) of the self-signed certificate issued by //! the enclave during a pre-computation bootstrapping process, //! - The execution strategy that will be used by the trusted Veracruz runtime //! to execute the WASM binary, as well as a debug configuration flag which //! allows the WASM binary to write data to `stdout` on the untrusted host's //! machine, //! - The order in which data inputs provisioned into the enclave will be placed //! which is important for the program provider to understand in order to //! write software for Veracruz. //! //! ## Authors //! //! The Veracruz Development Team. //! //! ## Licensing and copyright notice //! //! See the `LICENSE.markdown` file in the Veracruz root directory for //! information on licensing and copyright. use crate::{ platform::Platform, policy::{ error::PolicyError, expiry::Timepoint, principal::{ CapabilityTable, ExecutionStrategy, FileCapability, FileOperation, Identity, Principal, Program, }, }, }; use serde::{Deserialize, Serialize}; use serde_json; use std::{ collections::{HashMap, HashSet}, string::{String, ToString}, vec::Vec, }; //////////////////////////////////////////////////////////////////////////////// // Veracruz policies, proper. //////////////////////////////////////////////////////////////////////////////// /// A type representing the data stored in a Veracruz global policy. This file /// is public information available to every principal in a Veracruz computation /// and contains data that every principal needs to audit and understand before /// they enroll in a computation, so that they are capable of assessing whether /// a computation is "safe" or not for them to join. #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Policy { /// The identities of every principal involved in a computation. identities: Vec<Identity<String>>, /// The candidate programs that can be loaded in the execution engine. programs: Vec<Program>, /// The URL of the Veracruz server. veracruz_server_url: String, /// The expiry of the enclave's self-signed certificate, which will be /// issued during the Veracruz bootstrapping process prior to the /// computation. enclave_cert_expiry: Timepoint, /// The ciphersuite that will be used with the TLS connections between the /// principals of the computation and the enclave. ciphersuite: String, /// The hash of the Veracruz trusted runtime for SGX enclaves. runtime_manager_hash_sgx: Option<String>, /// The hash of the Veracruz trusted runtime for TrustZone TAs. runtime_manager_hash_tz: Option<String>, /// The hash of the Veracruz trusted runtime for AWS Nitro Enclaves. runtime_manager_hash_nitro: Option<String>, /// The URL of the proxy attestation service. proxy_attestation_server_url: String, /// The debug configuration flag. This dictates whether the WASM program /// will be able to print debug configuration messages to *stdout* on the /// host's machine. debug: bool, /// The execution strategy that will be used to execute the WASM binary. execution_strategy: ExecutionStrategy, } impl Policy { /// Constructs a new Veracruz policy type, validating the well-formedness of /// the resulting policy in the process. Returns `Ok(policy)` iff these /// well-formedness checks pass. pub fn new( identities: Vec<Identity<String>>, programs: Vec<Program>, veracruz_server_url: String, enclave_cert_expiry: Timepoint, ciphersuite: String, runtime_manager_hash_sgx: Option<String>, runtime_manager_hash_tz: Option<String>, runtime_manager_hash_nitro: Option<String>, proxy_attestation_server_url: String, debug: bool, execution_strategy: ExecutionStrategy, ) -> Result<Self, PolicyError> { let policy = Self { identities, programs, veracruz_server_url, enclave_cert_expiry, ciphersuite, runtime_manager_hash_sgx, runtime_manager_hash_tz, runtime_manager_hash_nitro, proxy_attestation_server_url, debug, execution_strategy, }; policy.assert_valid()?; Ok(policy) } /// Parses a Veracruz policy type from a JSON-encoded string, `json`, /// validating the well-formedness of the resulting policy in the process. /// Returns `Ok(policy)` iff these well-formedness checks pass. pub fn from_json(json: &str) -> Result<Self, PolicyError> { let policy: Self = serde_json::from_str(json)?; policy.assert_valid()?; Ok(policy) } /// Returns the identities associated with this policy. #[inline] pub fn identities(&self) -> &Vec<Identity<String>> { &self.identities } /// Returns the URL of the Veracruz server associated with this policy. #[inline] pub fn veracruz_server_url(&self) -> &String { &self.veracruz_server_url } /// Returns the enclave certificate expiry moment associated with this /// policy. #[inline] pub fn enclave_cert_expiry(&self) -> &Timepoint { &self.enclave_cert_expiry } /// Returns the permissible ciphersuites for TLS links associated with this /// policy. #[inline] pub fn ciphersuite(&self) -> &String { &self.ciphersuite } /// Returns the hash of the trusted Veracruz runtime, associated with this /// policy. #[inline] pub fn runtime_manager_hash(&self, platform: &Platform) -> Result<&String, PolicyError> { let hash = match platform { Platform::SGX => match &self.runtime_manager_hash_sgx { Some(hash) => hash, None => { return Err(PolicyError::MissingPolicyFieldError( "runtime_manager_hash_sgx".to_string(), )) } }, Platform::TrustZone => match &self.runtime_manager_hash_tz { Some(hash) => hash, None => { return Err(PolicyError::MissingPolicyFieldError( "runtime_manager_hash_tz".to_string(), )) } }, Platform::Nitro => match &self.runtime_manager_hash_nitro { Some(hash) => hash, None => { return Err(PolicyError::MissingPolicyFieldError( "runtime_manager_hash_nitro".to_string(), )) } }, Platform::Mock => match &self.runtime_manager_hash_sgx { Some(hash) => hash, None => { return Err(PolicyError::MissingPolicyFieldError( "runtime_manager_hash_sgx".to_string(), )) } }, }; return Ok(&hash); } /// Returns the URL of the proxy attestation service, associated with this /// policy. #[inline] pub fn proxy_attestation_server_url(&self) -> &String { &self.proxy_attestation_server_url } /// Returns the debug configuration flag associated with this policy. #[inline] pub fn debug(&self) -> &bool { &self.debug } /// Returns the execution strategy associated with this policy. #[inline] pub fn execution_strategy(&self) -> &ExecutionStrategy { &self.execution_strategy } /// Checks that the policy is valid, returning `Err(reason)` iff the policy /// is found to be invalid. In all other cases, `Ok(())` is returned. fn assert_valid(&self) -> Result<(), PolicyError> { let mut client_ids = Vec::new(); for identity in self.identities.iter() { identity.assert_valid()?; // check IDs of all the participants if client_ids.contains(identity.id()) { return Err(PolicyError::DuplicatedClientIDError(*identity.id() as u64)); } client_ids.push(*identity.id()); } // Check the ciphersuite #[cfg(features = "std")] { let policy_ciphersuite = rustls::CipherSuite::lookup_value(self.ciphersuite()) .map_err(|_| { PolicyError::TLSInvalidCyphersuiteError(self.get_ciphersuite().to_string()) })?; if !rustls::ALL_CIPHERSUITES .iter() .fold(false, |acc, sup| acc || (sup.suite == policy_ciphersuite)) { return Err(PolicyError::TLSUnsupportedCyphersuiteError( policy_ciphersuite, )); } } // NB: no check of enclave certificate validity as there is no reliable // way of obtaining a time from within an enclave. This is the // responsibility of the clients of Veracruz. Ok(()) } /// Returns the identity of any principal in the computation who is capable /// of requesting a shutdown of the computation. At the moment, only the /// principals who can request the result can also request shutdown. pub fn expected_shutdown_list(&self) -> Vec<u64> { self.identities() .iter() .fold(Vec::new(), |mut acc, identity| { acc.push(*identity.id() as u64); acc }) } /// Returns `Ok(identity)` if a principal with a certificate matching the /// X509 certificate, `cert`, is present within the list of /// identities/principals associated with this policy. Otherwise, returns /// an error. pub fn check_client_id(&self, cert: &str) -> Result<u64, PolicyError> { for identity in self.identities().iter() { if identity.certificate().as_str() == cert { return Ok(*identity.id() as u64); } } Err(PolicyError::InvalidClientCertificateError(cert.to_string())) } /// Return the CapabilityTable in this policy. It contains capabilities related to all /// participants and programs. pub fn get_capability_table(&self) -> CapabilityTable { let mut table = HashMap::new(); for identity in self.identities() { let id = identity.id(); let file_permissions = identity.file_permissions(); let capabilities_table = Self::to_capabilities(&file_permissions); table.insert(Principal::Participant(*id as u64), capabilities_table); } for program in &self.programs { let program_file_name = program.program_file_name(); let file_permissions = program.file_permissions(); let capabilities_table = Self::to_capabilities(&file_permissions); table.insert( Principal::Program(program_file_name.to_string()), capabilities_table, ); } table } /// Convert a vec of FileCapability to a Hashmap from filenames to sets of allowed FileOperation. fn to_capabilities( file_permissions: &[FileCapability], ) -> HashMap<String, HashSet<FileOperation>> { let mut capabilities_table = HashMap::new(); for permission in file_permissions { let (file_name, capabilities) = permission.to_capability_entry(); capabilities_table.insert(file_name, capabilities); } capabilities_table } /// Return the program digest table, mapping program filenames to their expected digests. pub fn get_program_digests(&self) -> Result<HashMap<String, Vec<u8>>, PolicyError> { let mut table = HashMap::new(); for program in &self.programs { let program_file_name = program.program_file_name(); let pi_hash = program.pi_hash(); table.insert( program_file_name.to_string(), hex::decode(pi_hash) .map_err(|_e| PolicyError::HexDecodeError(program_file_name.to_string()))?, ); } Ok(table) } /// Return the program input table, mapping program filenames to their expected input filenames. pub fn get_input_table(&self) -> Result<HashMap<String, Vec<String>>, PolicyError> { let mut table = HashMap::new(); for program in &self.programs { let program_file_name = program.program_file_name(); let file_permissions = program.file_permissions(); table.insert( program_file_name.to_string(), Self::get_required_inputs(&file_permissions), ); } Ok(table) } /// Extract the input filenames from a vec of FileCapability. If a prorgam has permission to /// read, it is considered as an input file. fn get_required_inputs(cap: &[FileCapability]) -> Vec<String> { let mut rst = cap.iter().fold(Vec::new(), |mut acc, x| { if x.read() { acc.push(x.file_name().to_string()); } acc }); rst.sort(); rst } }
/* * Merge all buffered messages into a single buffer. * * If no message is present, both `bufferp`'s and `contextp`'s value * will be set to `NULL` and the return code will be * `RAWRTC_CODE_NO_VALUE`. * * In case all messages did not provide a buffer, `bufferp`'s value will * be set to `NULL` but `contextp`'s value will represent the context of * the fist message (which may also be `NULL`). The return code will be * `RAWRTC_CODE_SUCCESS`. * * Note: Only the first message's context will be returned. */ enum rawrtc_code rawrtc_message_buffer_merge( struct mbuf** const bufferp, void** const contextp, struct list* const message_buffer ) { struct le* le; struct rawrtc_buffered_message* buffered_message; void* context; struct mbuf* buffer = NULL; int err = 0; size_t pos; size_t end; if (!bufferp || !contextp || !message_buffer) { return RAWRTC_CODE_INVALID_ARGUMENT; } le = list_head(message_buffer); if (!le) { DEBUG_PRINTF("Nothing to merge\n"); *bufferp = NULL; *contextp = NULL; return RAWRTC_CODE_NO_VALUE; } buffered_message = le->data; context = buffered_message->context; DEBUG_PRINTF("Merging %zu buffered messages\n", list_count(message_buffer)); for (; le != NULL; le = le->next) { buffered_message = le->data; if (!buffer) { if (buffered_message->buffer) { buffer = buffered_message->buffer; pos = buffer->pos; end = buffer->end; err = mbuf_resize(buffer, pos + buffer_sum_left(message_buffer)); if (err) { goto out; } DEBUG_PRINTF("Resized buffer to %zu bytes\n", buffer->size); mbuf_skip_to_end(buffer); } continue; } if (buffered_message->buffer) { err = mbuf_write_mem(buffer, mbuf_buf(buffered_message->buffer), mbuf_get_left(buffered_message->buffer)); if (err) { goto out; } } } out: if (buffer) { mbuf_set_pos(buffer, pos); } if (err) { if (buffer) { mbuf_set_end(buffer, end); mbuf_trim(buffer); DEBUG_PRINTF("Resized buffer back to %zu bytes due to error\n", buffer->size); } } else { *bufferp = mem_ref(buffer); *contextp = mem_ref(context); list_flush(message_buffer); DEBUG_PRINTF("Merging complete\n"); } return rawrtc_error_to_code(err); }
import java.util.Comparator; import java.util.Iterator; import java.util.Scanner; import java.util.TreeMap; public class Main { public static void main(String[] args) { Scanner sc = new Scanner(System.in); TreeMap<String, TreeMap<Integer, Integer>> restaurants = new TreeMap<>(); int N = sc.nextInt(); for (int i = 0; i < N; i++) { String city = sc.next(); TreeMap<Integer, Integer> restaurantsInCity = restaurants.get(city); if (restaurantsInCity == null) { restaurantsInCity = new TreeMap<>(new Comparator<Integer>() { public int compare (Integer m, Integer n) { return m.compareTo(n) * -1; } }); } restaurantsInCity.put(sc.nextInt(), i + 1); restaurants.put(city, restaurantsInCity); } Iterator<String> itr = restaurants.keySet().iterator(); while (itr.hasNext()) { TreeMap<Integer, Integer> resutaurantsInCity = restaurants.get(itr.next()); Iterator<Integer> index = resutaurantsInCity.values().iterator(); while (index.hasNext()) { System.out.println(index.next()); } } } }
// TODO: 7/2/17 Ensure absolute consistency public class UserPrivileges { /** * A constant denoting whether the service should <em>not</em> present ads to the user */ public static final String DISABLED_ADS = "disableAds"; private static final String TAG = UserPrivileges.class.getSimpleName(); private static final String PRIVILEGES_KEY = "privileges"; /** * Fetches service privileges for the given privilege and returns the value in the given * callback * * @param privilege A privilege denoted by {@link Privilege} * @param userId The ID of a user that exists in the database */ public static void fetchFor(@Privilege String privilege, PrivilegeCallback callback, String userId) { // For testing/marketing, automatically grants any privilege if this is demo user if (BuildConfig.DEBUG) { callback.onResult(true); return; } // Real magic happens here OldDatabase.getUserReference(userId).child(PRIVILEGES_KEY).child(privilege) .addListenerForSingleValueEvent(new ValueEventListener() { @Override public void onDataChange(DataSnapshot dataSnapshot) { Boolean hasPrivilege = dataSnapshot.getValue(Boolean.class); Log.d(TAG, "onDataChange: User privilege " + privilege + " is " + hasPrivilege); callback.onResult(hasPrivilege != null ? hasPrivilege : false); } @Override public void onCancelled(DatabaseError databaseError) { Log.w(TAG, "onCancelled: Error when fetching user privilege " + privilege, databaseError.toException()); callback.onResult(false); } }); } /** * @param privilege A privilege denoted by {@link Privilege} * @param userId The ID of a user that exists in the database */ public static void enableFor(@Privilege String privilege, String userId, PrivilegeCallback callback) { OldDatabase.getUserReference(userId).child(PRIVILEGES_KEY).child(privilege) .setValue(true).addOnSuccessListener(aVoid -> { Log.i(TAG, "enableFor: Privilege successfully set for " + userId); callback.onResult(true); }).addOnFailureListener(e -> { Log.w(TAG, "enableFor: Error when setting privilege for " + userId, e); }); } /** * @param privilege A privilege denoted by {@link Privilege} * @param userId The ID of a user that exists in the database */ public static void disableFor(@Privilege String privilege, String userId, PrivilegeCallback callback) { OldDatabase.getUserReference(userId).child(PRIVILEGES_KEY).child(privilege) .setValue(false).addOnSuccessListener(aVoid -> { Log.i(TAG, "disableFor: Privilege successfully set for " + userId); callback.onResult(false); }).addOnFailureListener(e -> { Log.w(TAG, "disableFor: Error when setting privilege for " + userId, e); }); } /** * An annotation which indicates the given parameter requires one of {@link #DISABLED_ADS} */ @Retention(RetentionPolicy.SOURCE) @StringDef({DISABLED_ADS}) public @interface Privilege { // no-op } /** * A callback called when a feature request returns */ public interface PrivilegeCallback { /** * Is called when the backend returns the privilege state of a user * * @param hasPrivilege True if the privilege should be granted within the app */ void onResult(boolean hasPrivilege); } }
n = int(input()) + 1 print(n // 36, n % 36 // 3)
/*****************************************************************************/ /** * * This function is designed to look like an interrupt handler in a device * driver. This is typically a 2nd level handler that is called from the * interrupt controller interrupt handler. This handler would typically * perform device specific processing such as reading and writing the registers * of the device to clear the interrupt condition and pass any data to an * application using the device driver. * * @param CallbackRef is passed back to the device driver's interrupt handler * by the XScuGic driver. It was given to the XScuGic driver in the * XScuGic_Connect() function call. It is typically a pointer to the * device driver instance variable if using the Xilinx Level 1 device * drivers. In this example, we are passing it as scugic cpu * interface base address to access ack and EOI registers. * * @return None. * * @note None. * ******************************************************************************/ void LowInterruptHandler(u32 CallbackRef) { u32 BaseAddress; u32 IntID; BaseAddress = CallbackRef; #if defined (versal) && !defined(ARMR5) IntID = XScuGic_get_IntID(); #else IntID = XScuGic_ReadReg(BaseAddress, XSCUGIC_INT_ACK_OFFSET) & XSCUGIC_ACK_INTID_MASK; #endif if(XSCUGIC_MAX_NUM_INTR_INPUTS < IntID){ return; } InterruptProcessed = 1; #if defined (versal) && !defined(ARMR5) XScuGic_ack_Int(IntID); #else XScuGic_WriteReg(BaseAddress, XSCUGIC_EOI_OFFSET, IntID); #endif }
Analysis and Optimization of Thermal Matching between Multi-core Connector and Encapsulated Aluminum Shell Aluminum alloy has been widely used as the encapsulated shell in power modules because of its low density and high thermal conductivity. However, the thermal mismatch happened in the junction of aluminum shell and multi-core connector causes poor reliability of power modules. Focusing on the thermal matching between multi-core connector and encapsulated aluminum shell, we have analyzed air leakage phenomenon of power modules through thermodynamic simulation based on finite element method in this paper. Finally, a buffer ring structure is designed to significantly reduce the glasses’ stress of multi-core connector, which can effectively improve the reliability of power modules.
package enhancedTipCalculator; import javafx.event.ActionEvent; import java.math.BigDecimal; import java.math.RoundingMode; import java.text.NumberFormat; import javafx.beans.value.ChangeListener; import javafx.beans.value.ObservableValue; import javafx.fxml.FXML; import javafx.scene.control.Label; import javafx.scene.control.Slider; import javafx.scene.control.TextField; public class enhancedTipCalculatorController { private static final NumberFormat currency=NumberFormat.getCurrencyInstance(); private static final NumberFormat percent=NumberFormat.getPercentInstance(); private BigDecimal tipPercentage=new BigDecimal(0.15); @FXML private TextField bakshishFushe; @FXML private Label bakshishLabel; @FXML private Slider bakshishPerqindjeSlider; @FXML private TextField nrPersonaFushe; @FXML private Label nrPersonaLabel; @FXML private Label pagesaLabel; @FXML private TextField pagesaPersonFushe; @FXML private Label pagesaPersonLabel; @FXML private Label perqindjeBakshishLabel; @FXML private TextField pertuPaguarFushe; @FXML private TextField totalFushe; @FXML private Label totalLabel; @FXML private void llogaritFaturenButonShtypur(ActionEvent event) { try { BigDecimal amount=new BigDecimal(totalFushe.getText()); BigDecimal tip=amount.multiply(tipPercentage); BigDecimal total=amount.add(tip); BigDecimal nrPersona=new BigDecimal(nrPersonaFushe.getText()); BigDecimal pagesaPerson=total.divide(nrPersona); bakshishFushe.setText(currency.format(tip)); pertuPaguarFushe.setText(currency.format(total)); pagesaPersonFushe.setText(currency.format(pagesaPerson)); } catch (NumberFormatException e) { totalFushe.setText("Enter amount"); totalFushe.selectAll(); totalFushe.requestFocus(); } } public void initialize() { currency.setRoundingMode(RoundingMode.HALF_UP); bakshishPerqindjeSlider.valueProperty().addListener(new ChangeListener<Number>() { @Override public void changed(ObservableValue<? extends Number> ov, Number oldValue, Number newValue) { tipPercentage=BigDecimal.valueOf(newValue.intValue()/100.0); perqindjeBakshishLabel.setText(percent.format(tipPercentage)); } } ); } }
<reponame>tanzeelrana/Menu-App export const ENV = { production: false, parseAppId: 'menuApp', parseServerUrl: 'https://pacecouriers.com/menuApp' }
BAMMA is brought to you in Association with: Lonsdale & Sports Direct - The Official Equipment Partners of BAMMA Safe MMA - Independent British Medical Organisation January 20, 2014 - Europe's flagship Mixed Martial Arts promotion, BAMMA, is proud to announce a new television partnership with Asia's premier action entertainment channel KIX, operated by Celestial Tiger Entertainment. The deal will see BAMMA events now broadcast across Asia on KIX and KIX HD in Hong Kong, Singapore, Thailand, Indonesia, Malaysia, Brunei, Vietnam and the Philippines. KIX is the ultimate destination for action entertainment in Asia. KIX offers action fans a high-octane blend of programs including combat sports, blockbuster action movies, hit action series and cutting-edge reality shows. With first and exclusive premieres every month, KIX guarantees you non-stop adrenaline-pumping entertainment. So get into the action with KIX! www.kix-tv.com BAMMA's Director, Ashley Bothwell said: "We're really pleased to be able to confirm our deal with Celestial Tiger Entertainment. KIX is a perfect platform for BAMMA in Asia and our events will sit well amongst the channel's core programming of combat sports, action reality and series, and action movies. We are excited about the extended audience that BAMMA shows will now reach across parts of Asia, and we look forward to working with Celestial Tiger Entertainment and KIX over 2014." The date for BAMMA 15, the first of BAMMA's schedule for 2014, is to be announced in the coming weeks.
package com.vailsys.freeclimb.api.phoneNumber; import com.vailsys.freeclimb.api.Filters; /** * Represents the possible fields one can set as filters when searching for * incoming phone numbers. */ public class IncomingPhoneNumberSearchFilters extends Filters { /** * A PCRE compatible regular expression to match against the phone numbers of * the incoming phone number list. * */ private String phoneNumber; /** * The alias of the incoming phone number. */ private String alias; /** * ID of the application that FreeClimb should contact if a Call of SMS arrives for this phone number or a Call from this number is placed. */ private String applicationId; /** * Country of this phone number. */ private String country; /** * State or province of this phone number. */ private String region; /** * Indication of whether the phone number can handle sending and receiving SMS messages. */ private boolean smsEnabled; /** * Indicates whether the phone number can handle calls. */ private boolean voiceEnabled; /** * Indicates whether the phone number is associated with an application. */ private boolean hasApplication; /** * Retrieve the value of the phone number filter. * * @return The phone number to filter by. */ public String getPhoneNumber() { return phoneNumber; } /** * Set the phone number filter for incoming phone numbers. * * @param phoneNumber The phone number to filter by. */ public void setPhoneNumber(String phoneNumber) { this.phoneNumber = phoneNumber; } /** * Retrieve the value of the alias filter. * * @return The alias of the filter. */ public String getAlias() { return alias; } /** * Set the alias filter for incoming phone numbers. * * @param alias The alias to filter by. */ public void setAlias(String alias) { this.alias = alias; } /** * Retrieve the value of the applicationId filter. * * @return The value of the applicationId filter. */ public String getApplicationId() { return applicationId; } /** * Set the applicationId filter for incoming phone numbers. * * @param applicationId The applicationId to filter by. */ public void setApplicationId(String applicationId) { this.applicationId = applicationId; } /** * Retrieve the value of the country filter. * * @return The value of the country filter. */ public String getCountry() { return country; } /** * Set the country filter for incoming phone numbers. * * @param country The country to filter by. */ public void setCountry(String country) { this.country = country; } /** * Retrieve the value of the region filter. * * @return The value of the region filter. */ public String getRegion() { return region; } /** * Set the region filter for incoming phone numbers. * * @param region The region to filter by. */ public void setRegion(String region) { this.region = region; } /** * Retrieve the value of the smsEnabled filter. * * @return The value of the smsEnabled filter. */ public boolean getSmsEnabled() { return smsEnabled; } /** * Set the smsEnabled filter for incoming phone numbers. * * @param smsEnabled The value of smsEnabled to filter by. */ public void setSmsEnabled(boolean smsEnabled) { this.smsEnabled = smsEnabled; } /** * Retrieve the value of the voiceEnabled filter. * * @return The value of the voiceEnabled filter. */ public boolean getVoiceEnabled() { return voiceEnabled; } /** * Set the voiceEnabled filter for incoming phone numbers. * * @param voiceEnabled The value of voiceEnabled to filter by. */ public void setVoiceEnabled(boolean voiceEnabled) { this.voiceEnabled = voiceEnabled; } /** * Retrieve the value of the hasApplication filter. * * @return The value of the hasApplication filter. */ public boolean getHasApplication() { return hasApplication; } /** * Set the hasApplication filter for incoming phone numbers. * * @param hasApplication The value of hasApplication to filter by. */ public void setHasApplication(boolean hasApplication) { this.hasApplication = hasApplication; } }
def can_be_moderated_by(user): return user.is_active and user.is_staff and ( user.has_perm('blog.change_membership') or user.has_perm('blog.change_blog'))
<reponame>wez/mosquitto-rs //! This crate implements an async MQTT client using libmosquitto. //! //! ```no_run //! use mosquitto_rs::*; //! //! fn main() -> Result<(), Error> { //! smol::block_on(async { //! let mut client = Client::with_auto_id()?; //! let rc = client.connect( //! "localhost", 1883, //! std::time::Duration::from_secs(5), None).await?; //! println!("connect: {}", rc); //! //! let subscriptions = client.subscriber().unwrap(); //! //! client.subscribe("test", QoS::AtMostOnce).await?; //! println!("subscribed"); //! //! client.publish("test", b"woot", QoS::AtMostOnce, false) //! .await?; //! println!("published"); //! //! if let Ok(msg) = subscriptions.recv().await { //! println!("msg: {:?}", msg); //! } //! //! Ok(()) //! }) //! } //! ``` //! //! ## Features //! //! The following feature flags are available: //! //! * `vendored-mosquitto` - use bundled libmosquitto 2.4 library. This is on by default. //! * `vendored-openssl` - build openssl from source, rather than using the system library. Recommended for macOS and Windows users to enable this. mod client; mod error; mod lowlevel; pub use client::*; pub use error::*; pub use lowlevel::*;
import { createAction } from '@reduxjs/toolkit' import { PageNotificationsProps } from 'cx-portal-shared-components' import { name } from './types' const setNotification = createAction( `${name}/setNotification`, function update(notification: PageNotificationsProps) { return { payload: { notification, }, } } ) const resetNotification = createAction(`${name}/resetNotification`) export { setNotification, resetNotification }
As the ambush of two police officers in Miami last week reminds us, the war on police, fomented to some extent by former President Obama and his cronies, is not over. But in the two months since President Trump took office, things are much more positive in the law enforcement community. Trump, who was unapologetically pro-police from the outset of his campaign, has demonstrated since becoming president that his campaign promises were not just talk. In his first two weeks in office, he signed a series of executive orders designed to curb violence against law enforcement, reduce crime, and enforce federal law to rein in transnational criminal organizations. He had a statement posted to the White House website that said, “The Trump Administration will be a law and order administration. President Trump will honor our men and women in uniform and will support their mission of protecting the public. The dangerous anti-police atmosphere in America is wrong. The Trump Administration will end it.” He used the high-profile occasion of his late-February speech to Congress to reiterate his strong endorsement of the work police do. “We must work with – not against –the men and women of law enforcement,” the president said in his speech to Congress. “We must build bridges of cooperation and trust – not drive the wedge of disunity and division.” And, of course, he appointed Sen. Jeff Sessions of Alabama, a noted advocate of law enforcement, to be Attorney General. President Trump aims to change the anti-police narrative, relentlessly and continually expounded by Obama and the Eric Holder-Loretta Lynch Justice Department. That campaign began with Obama’s remark that police were “stupid” in the way they handled the confrontation with Harvard Law Professor Henry Louis Gates, and it continued with his immediate condemnation of police in the shooting of Michael Brown in Ferguson and in the death of Freddie Gray in Baltimore. Obama’s ambivalence—and sometimes even hostility—toward the police may have played well to his liberal base, but it had a real impact on the lives of Americans—and that impact was not positive. It led to the “Ferguson effect”—the reluctance of police to become involved in confrontations, investigations, and arrests that are no-win situations for them. That, in turn, led to police going on defense and criminals on offense, which led to chronic offenders remaining on the streets to commit additional crimes, which led to generational increases in crime rates in major cities across America. Baltimore set records for murders in 2015, and crime jumped more than 50 percent in Washington, D.C., that year. Shootings in Chicago returned to numbers not seen since the violent 1990s with an astounding 4,400 people shot and 760 murdered in 2016. Most of the victims were black – 900 more black men were killed in 2015 than the year before. The former president’s rhetoric fueled a protest movement led by Black Lives Matter and other radical groups, who claimed the entire law enforcement apparatus to be a racist enterprise and drug laws to be a means by which our country seeks to reinstitute slavery. Black Lives Matter activists were regular visitors to the White House, to “help mend frayed ties between law enforcement and the communities they serve,” according to one of them. Police officers knew well before the election they had a friend in Trump, which is why a poll shortly before the election by Police Magazine found Trump with 84 percent support among the men and women in blue to 8 percent for Hillary Clinton. Not all the increase in crime can be blamed on Obama but as Heather MacDonald—one of the nation’s leading authorities on crime and policing—said in her 2016 book The War on Cops: As 2015 progressed, few law-enforcement practices escaped attack for allegedly imposing unjust burdens on blacks. But it was the virulent anti-cop rhetoric that was most consequential. Officers working in inner cities routinely found themselves surrounded by hostile, jeering crowds when they tried to make an arrest or conduct an investigation. Cops feared becoming the latest YouTube pariah when a viral cell-phone video showed them using force against a suspect who had been resisting arrest. There is no hard evidence yet, but it seems morale among police is improving and the kind of violent and radical opposition seen last summer is abating. Attorney General Sessions has sent strong signals he will bring a refreshing support for prosecutors and law enforcement officers, push back on the Ferguson Effect, which has led to less proactive policing in dangerous communities, and change the climate in which law enforcement is maligned for what he calls “the unacceptable deeds of a few bad actors.” For better or worse, President Trump is learning his words carry tremendous weight. America’s police officers are listening, and one can’t help but be optimistic those words will have an impact on their work and, eventually, the crime rate. Content created by The Center for American Greatness, Inc is available without charge to any eligible news publisher that can provide a significant audience. For licensing opportunities of our original content, please contact [email protected]
def invert(a, b): return extended_euclidian_inverse(a, b)
It’s your second week in your new apartment and your bathroom light goes out. The following week, your neighbor decides to become a drummer. Then, some mice take up residency in your stove. Do you have to fight them back on your own? Or can you call on your landlord to save the day? Renting your first apartment can be confusing. What are your responsibilities? What are your landlord’s? We spoke with some Boston realty companies to find the protocol for renting etiquette in a variety of sticky situations that might arise: Carbon monoxide/smoke detectors: Michael DiMella, owner of Charlesgate Realty Group, said the Massachusetts state safe and sanitary code requires that carbon monoxide detectors and smoke detectors be installed and tested when an apartment turns over. So they should definitely be in working condition when you move in. But if one of your detectors starts beeping due to a low battery, DiMella said it might be your job to replace it. “There’s no set protocol, but good management companies tend to have policies to replace those batteries,’’ DiMella said. Just check your lease. Mark Pardis, a realtor with Bulfinch Boston Realty, mentioned that if your detector’s battery starts beeping and you get frustrated and decide to rip it off your wall, it’s on you if there’s a fire and your apartment burns down. “I have someone sign a document that says I gave them a working smoke detector and carbon monoxide detector,’’ Pardis said. “We tested it together. That way, if a few weeks later, some college kid pulls it off the wall, that’s their fault.’’ Light bulbs: Pardis, said when you move in, you shouldn’t have to put in all new light bulbs. Your landlord should do that prior to your arrival. “But once you’re in your apartment and the light bulbs burn out, it’s not your landlord’s responsibility,’’ Pardis said. They just have to turn the apartment over to you in “working condition.’’ But DiMella said there’s no standard when it comes to replacing light bulbs. “Anything that would burn out like a light bulb with normal use is up to the tenant to replace,’’ DiMella said. “In certain circumstances, if a landlord or management company knows something is very difficult to replace, a landlord might help.’’ So basically, if your lights burn out, you have to go buy new light bulbs and put them in yourself. However, if your light is located in a cathedral ceiling, your landlord might be nice enough to lend a hand. Just ask. Annoying neighbor: This issue actually had a variety of different responses. DiMella called it “a tricky one.’’ “If you’re in an apartment building and all the apartments are owned by one landlord, it may be his responsibility to deal with the noisy neighbor or talk to both tenants to solve the problem,’’ DiMella said. “If the apartments are owned by two different landlords, there’s not much he can do in that case.’’ Pardis, however, said your landlord is not a police officer. He said if one of his tenants made a noise complaint, he’d tell them to call the cops. “He could call the tenant up and threaten to not renew the lease, but that’s not really grounds for throwing him out,’’ Pardis said. “Your best bet is the police.’’ Still, other property managers urged resolving the issue by talking to the neighbor yourself. Wendy Heyman, owner of SGH Property Management, said, “You have to live with this person, so your best course of action is to try and resolve it in a friendly way so there’s not an uncomfortable feeling moving forward.’’ Once you involve your landlord or the police, she added, “you know you and that person are never going to be friends.’’ Basically, you’ll have to weigh how annoying this person is with your method of resolving the situation. Maybe first try a friendly knock on the door, followed by a note, then landlord involvement, and finally, the police. Then you can say, “Hey, I tried.’’ Broken appliance: If in the course of regular wear and tear, something malfunctions with your refrigerator, oven, washer, dryer, freezer, etc., it is up to your landlord to fix the problem, Heyman said. But say your best friend has one too many glasses of wine and sticks her phone down your garbage disposal. That’s on you, my friend. “If you clog the toilet and we have to snake it out, that’s on you,’’ Heyman said. “But if your flusher stops working, that’s on your landlord.’’ It can be a fine line between what’s your responsibility and what’s your landlord’s when it comes to appliances, but the bottom line is: don’t be an idiot, and don’t do anything to your appliances that you wouldn’t want to pay for. Infestations: Bedbugs, roaches, mice, oh my! Pardis said if you have an infestation, your landlord is required to exterminate the critter and eradicate the problem. But say your landlord isn’t responding to your 1,000 phone calls about the rat family in your fireplace. Pardis said it’s a good idea to keep track of the emails you send your landlord about issues like this. “Calling him on the phone might not do it, but an email is proof that you sent it,’’ Pardis said. “Keep a paper trail.’’ Locks: Broken locks on your window or door are entirely on your landlord to fix, DiMella said. “Anything having to do with safety or sanitary issues falls under your landlord’s responsibility.’’ But much like appliances, it’s up to the tenant to maintain what is provided. So no punching holes in your window. Stolen parking spot: Heyman said this is another situation where the best course of action is to first attempt to talk to your neighbor. Obviously, this only works if you know who is stealing your parking spot. But if a friendly note or knock on the door isn’t working, Heyman said you have every right to “call a towing company or the police if someone is parked in your spot.’’ Mold: Hopefully, you never have to deal with the green and black gunk creeping up your bathroom walls. But if you do, Pardis said it’s the landlord’s responsibility to get rid of it. Pardis said mold is often a preventable problem, though. “College kids will get lazy and hang wet towels on the back of their bathroom door and it stays wet 24/7,’’ he said. “It becomes moldy. They don’t care because they aren’t living there. Six months go by, and soon, there’s mold everywhere.’’ Mold is gross and it’s unhealthy to breathe spores – it can lead to chronic cough and allergies. Be a courteous tenant. If it’s 100 degrees outside and you’re taking a shower, open a window or turn a fan on. Heat/AC: If heat and air conditioning are provided to you in your lease agreement, and one or the other breaks, DiMella said it is your landlord’s responsibility to fix it. But if you have a window AC unit that you installed yourself, it is your responsibility to install it and remove it properly. Water leak: This also falls under the state sanitary and safety code, and is most definitely your landlord’s responsibility to resolve. Other fun tips: -If you live on the first floor, your landlord should provide you with blinds, Pardis said. But if you choose to rip them off and let the whole neighborhood watch you dance to “Shake It Off,’’ that’s really your prerogative. -You are guaranteed two “means of egress,’’ or escape. This means that if you have a stairwell near the entrance of your second-floor apartment, you should also have an unblocked fire escape or some other way of getting out in the event of a fire or other emergency. If you lack this, speak up! “The key for apartment living, or in any relationship, is to have good lines of communication,’’ DiMella said. “It goes a long way to fixing issues fast. Not every property out there is in the best shape or condition unfortunately. But hopefully, the right policies are in place to fix anything that needs to be fixed.’’
'Summertime' plays ahead of Trump's climate announcement In this May 4, 2017, file photo, President Donald Trump claps as he arrives in the Rose Garden of the White House, Thursday, May 4, 2017, in Washington, followed by Vice President Mike Pence. (Photo11: Evan Vucci, AP) Summertime and the living is easy. Even if you make a major announcement about pulling your country out of a major climate agreement, don't you cry. According to reports on Twitter, the song "Summertime" was played by the military band ahead of President Trump's Rose Garden appearance on Thursday. The president announced that the United States will withdraw from the Paris climate agreement, which requires its member countries to set targets for reducing greenhouse gases. Jazz band in the Rose Garden playing 'Summertime'. A new lament for global warming ?as Trump pulls out of Paris. pic.twitter.com/WLKewd5ERk — Kylie Morris (@C4KylieM) June 1, 2017 The scene at the back of the Rose Garden ahead of Trump's climate announcement. Military band is playing "Summertime." pic.twitter.com/zESwLbtsPC — Jennifer Epstein (@jeneps) June 1, 2017 Jazz band playing "summertime (and the living is easy)" in the White House Rose Garden ahead of Trump's climate announcement #Parispic.twitter.com/Ll2eHodlhr — Heather Timmons (@HeathaT) June 1, 2017 In case you want to know, here's how the song, from the opera "Porgy and Bess," begins: Summertime, And the livin' is easy Fish are jumpin' And the cotton is high Your daddy's rich And your mamma's good lookin' So hush little baby Don't you cry Read or Share this story: https://usat.ly/2rvkm9E
import React, { useState, useRef, useEffect, forwardRef, useMemo, useImperativeHandle, } from 'react'; import CodeMirror from 'codemirror'; import 'codemirror/mode/meta'; import 'codemirror/addon/display/autorefresh'; import 'codemirror/addon/comment/comment'; import 'codemirror/addon/fold/foldcode'; import 'codemirror/addon/fold/xml-fold'; import 'codemirror/addon/edit/closebrackets'; import 'codemirror/addon/edit/matchtags'; import 'codemirror/addon/edit/matchbrackets'; import 'codemirror/addon/edit/continuelist'; import 'codemirror/addon/edit/closetag'; import 'codemirror/keymap/sublime'; import 'codemirror/lib/codemirror.css'; import { ReactCodemirrorProps } from './types'; const defaultOptions = { tabSize: 2, autoCloseBrackets: true, matchBrackets: true, showCursorWhenSelecting: true, lineNumbers: true, fullScreen: true, }; const ReactCodeMirror = (props: ReactCodemirrorProps, ref: any) => { const { options = {}, value = '', width = '100%', height = '100%' } = props; const textareaRef = useRef(null); const [editor, setEditor] = useState<CodeMirror.EditorFromTextArea | null>(null); useImperativeHandle(ref, () => ({ editor })); const getEventHandleFromProps = () => { const propNames = Object.keys(props); const eventHandle = propNames.filter((keyName) => /^on+/.test(keyName)); const eventDict: any = {}; eventHandle.forEach((ele) => { const name = ele.slice(2); if (name && name[0]) { eventDict[ele] = name.replace(name[0], name[0].toLowerCase()); } }); return eventDict; }; // http://codemirror.net/doc/manual.html#config const setOptions = async (instance: any, opt: any = {}) => { if (typeof opt === 'object' && window) { const mode = CodeMirror.findModeByName(opt.mode || ''); if (mode) { opt.mode = mode.mime; } Object.keys(opt).forEach((name) => { if (opt[name] && JSON.stringify(opt[name])) { instance.setOption(name, opt[name]); } }); } }; useEffect(() => { if (!editor && window) { // codemirror instance const instance = CodeMirror.fromTextArea((textareaRef as any).current, { ...defaultOptions, ...options, }); const eventDict = getEventHandleFromProps(); Object.keys(eventDict).forEach((event) => { instance.on(eventDict[event], (props as any)[event]); }); instance.setValue(value || ''); if (width || height) { instance.setSize(width, height); } setEditor(instance); setOptions(instance, { ...defaultOptions, ...options }); } return () => { if (editor && window) { editor.toTextArea(); setEditor(null); } }; }, []); useMemo(() => { if (!editor || !window) return; const val = editor.getValue(); if (value !== undefined && value !== val) { editor.setValue(value); } }, [value]); useMemo(() => { if (!editor || !window) return; editor.setSize(width, height); }, [width, height]); useMemo(() => { if (!editor || !window) return; setOptions(editor, { ...defaultOptions, ...options }); }, [editor, options]); return <textarea ref={textareaRef} />; }; export default forwardRef(ReactCodeMirror);
class qc: """Wrapper class to maintain state + operators.""" def __init__(self, name=None, eager=True): self.name = name self.psi = 1.0 self.ir = ir.Ir() self.build_ir = True self.eager = eager self.global_reg = 0 class scope: """Scope object to allow grouping of gates in the output.""" def __init__(self, ir_param, desc): self.ir = ir_param self.desc = desc def __enter__(self): self.ir.section(self.desc) def __exit__(self, t, value, traceback): self.ir.end_section() # --- States ---------------------------------------------------- def reg(self, size, it=0, *, name=None): ret = state.Reg(size, it, self.global_reg) self.global_reg = self.global_reg + size self.psi = self.psi * ret.psi() self.ir.reg(size, name, ret) return ret def qubit(self, alpha=None, beta=None): self.psi = self.psi * state.qubit(alpha, beta) def zeros(self, n): self.psi = self.psi * state.zeros(n) def ones(self, n): self.psi = self.psi * state.ones(n) def bitstring(self, *bits): self.psi = self.psi * state.bitstring(*bits) def arange(self, n): self.zeros(n) for i in range(0, 2**n): self.psi[i] = float(i) def rand(self, n): self.psi = self.psi * state.rand(n) def stats(self): return ('Circuit Statistics\n' + ' Qubits: {}\n'.format(self.nbits) + ' Gates : {}\n'.format(self.ir.ngates)) def dump_with_dumper(self, flag, dumper_func): if flag: result = dumper_func(self.ir) with open(flag, 'w') as f: print(result, file=f) def dump_to_file(self): self.dump_with_dumper(flags.FLAGS.libq, dumpers.libq) self.dump_with_dumper(flags.FLAGS.qasm, dumpers.qasm) self.dump_with_dumper(flags.FLAGS.cirq, dumpers.cirq) self.dump_with_dumper(flags.FLAGS.latex, dumpers.latex) def optimize(self): self. ir = optimizer.optimize(self.ir) @property def nbits(self): return self.psi.nbits def ctl_by_0(self, ctl): ctl_qubit = ctl ctl_by_0 = False if not isinstance(ctl, int): ctl_qubit = ctl[0] ctl_by_0 = True return ctl_qubit, ctl_by_0 # --- Gates ---------------------------------------------------- def apply1(self, gate, idx, name=None, *, val=None): if isinstance(idx, state.Reg): for reg in range(idx.nbits): if self.build_ir: self.ir.single(name, idx[reg], gate, val) if self.eager: xgates.apply1(self.psi, gate.reshape(4), self.psi.nbits, idx[reg], tensor.tensor_width) return if self.build_ir: self.ir.single(name, idx, gate, val) if self.eager: xgates.apply1(self.psi, gate.reshape(4), self.psi.nbits, idx, tensor.tensor_width) def apply_controlled(self, gate, ctl, idx, name=None, *, val=None): if isinstance(idx, state.Reg): raise AssertionError('controlled register not supported') ctl_qubit, by_0 = self.ctl_by_0(ctl) if by_0: self.x(ctl_qubit) if self.build_ir: self.ir.controlled(name, ctl_qubit, idx, gate, val) if self.eager: xgates.applyc(self.psi, gate.reshape(4), self.psi.nbits, ctl_qubit, idx, tensor.tensor_width) if by_0: self.x(ctl_qubit) def cv(self, idx0, idx1): self.apply_controlled(ops.Vgate(), idx0, idx1, 'cv') def cv_adj(self, idx0, idx1): self.apply_controlled(ops.Vgate().adjoint(), idx0, idx1, 'cv_adj') def cx0(self, idx0, idx1): self.apply_controlled(ops.PauliX(), idx0, idx1, 'cx') def cx(self, idx0, idx1): self.apply_controlled(ops.PauliX(), idx0, idx1, 'cx') def cy(self, idx0, idx1): self.apply_controlled(ops.PauliY(), idx0, idx1, 'cy') def cz(self, idx0, idx1): self.apply_controlled(ops.PauliZ(), idx0, idx1, 'cz') def cu1(self, idx0, idx1, value): self.apply_controlled(ops.U1(value), idx0, idx1, 'cu1', val=value) def crk(self, idx0, idx1, value): self.apply_controlled(ops.Rk(value), idx0, idx1, 'crk', val=value) def ccx(self, idx0, idx1, idx2): """Sleator-Weinfurter Construction.""" i0, c0_by_0 = self.ctl_by_0(idx0) i1, c1_by_0 = self.ctl_by_0(idx1) i2 = idx2 with self.scope(self.ir, f'ccx({idx0}, {idx1}, {idx2})'): if c0_by_0: self.x(i0) if c1_by_0: self.x(i1) self.cv(i0, i2) self.cx(i0, i1) self.cv_adj(i1, i2) self.cx(i0, i1) self.cv(i1, i2) if c0_by_0: self.x(i0) if c1_by_0: self.x(i1) def toffoli(self, idx0, idx1, idx2): self.ccx(idx0, idx1, idx2) def h(self, idx): self.apply1(ops.Hadamard(), idx, 'h') def t(self, idx): self.apply1(ops.Tgate(), idx, 't') def u1(self, idx, val): self.apply1(ops.U1(val), idx, 'u1', val=val) def v(self, idx): self.apply1(ops.Vgate(), idx, 'v') def x(self, idx): self.apply1(ops.PauliX(), idx, 'x') def y(self, idx): self.apply1(ops.PauliY(), idx, 'y') def z(self, idx): self.apply1(ops.PauliZ(), idx, 'z') def yroot(self, idx): self.apply1(ops.Yroot(), idx, 'yroot') def rx(self, idx, theta): self.apply1(ops.RotationX(theta), idx, 'rx', val=theta) def ry(self, idx, theta): self.apply1(ops.RotationY(theta), idx, 'ry', val=theta) def rz(self, idx, theta): self.apply1(ops.RotationZ(theta), idx, 'rz', val=theta) # Appplying a random unitary is possible, but it is not a # 1- or 2-qubit gate, hence slow. # Do not use (unless really unavoidable) # # def unitary(self, op, idx): # self.psi = ops.Operator(op)(self.psi, idx) # --- Measure ---------------------------------------------------- def measure_bit(self, idx, tostate=0, collapse=True): return ops.Measure(self.psi, idx, tostate, collapse) def sample_state(self, prob_state0): if prob_state0 < random.random(): return 1 return 0 # --- Advanced --------------------------------------------------- def swap(self, idx0, idx1): """Simple Swap operation.""" # pylint: disable=arguments-out-of-order with self.scope(self.ir, f'swap({idx0}, {idx1})'): self.cx(idx1, idx0) self.cx(idx0, idx1) self.cx(idx1, idx0) def cswap(self, ctl, idx0, idx1): """Controlled Swap.""" with self.scope(self.ir, f'cswap({ctl}, {idx0}, {idx1})'): self.ccx(ctl, idx1, idx0) self.ccx(ctl, idx0, idx1) self.ccx(ctl, idx1, idx0) def multi_control(self, ctl, idx1, aux, gate, desc): """Multi-controlled gate, using aux as ancilla.""" # This is a simpler version that requires n-1 ancillaries, instead of n-2. # The benefit is that the gate can be used as a single-controlled gate, # which means we don't need to take the root (no need to include scipy). # This construction also makes the controlled-by-0 gates a little bit # easier, those controllers are being passed as single-element lists, eg.: # ctl = [1, 2, [3], [4], 5] # # This can be optimized (later) to turn into a space-optimized n-2 version. # # We also generalize to the case where ctl is empty or only has 1 # control qubit. This is very flexible and practically any gate # could be expressed this way. This would make bulk control of # whole gate sequences straight-forward, but changes the trivial IR # we're working with here. Something to keep in mind. with self.scope(self.ir, f'multi({ctl}, {idx1}) # {desc})'): if len(ctl) == 0: self.apply1(gate, idx1, desc) return if len(ctl) == 1: self.apply_controlled(gate, ctl[0], idx1, desc) return # Compute the predicate. self.ccx(ctl[0], ctl[1], aux[0]) aux_idx = 0 for i in range(2, len(ctl)): self.ccx(ctl[i], aux[aux_idx], aux[aux_idx+1]) aux_idx = aux_idx + 1 # Use predicate to single-control qubit at idx1. self.apply_controlled(gate, aux[aux_idx], idx1, desc) # Uncompute predicate. aux_idx = aux_idx - 1 for i in range(len(ctl)-1, 1, -1): self.ccx(ctl[i], aux[aux_idx], aux[aux_idx+1]) aux_idx = aux_idx - 1 self.ccx(ctl[0], ctl[1], aux[0]) def flip(self, reg): """Flip a quantum register via swaps.""" for idx in range(reg[0], reg[0] + reg.nbits // 2): self.swap(idx, reg[0] + reg.nbits - idx - 1) def qft_rk(self, reg, swap=True): """Apply Qft with Rk gates directly.""" nbits = reg.nbits for idx in range(reg[0], reg[0] + nbits): # Each qubit first gets a Hadamard self.had(idx) # Each qubit now gets a sequence of Rk(2), Rk(3), ..., Rk(nbits) # controlled by qubit (1, 2, ..., nbits-1). for rk in range(2, nbits - idx + 1): controlled_from = idx + rk - 1 self.crk(controlled_from, idx, rk) if swap: self.flip(reg) # --- qc of qc ------------------------------------------ def qc(self, qc, offset=0): """Add another full circuit to this circuit.""" # Iterate of the new circuit and add the gates one by one, # using this circuit's eager mode. # for gate in qc.ir.gates: if gate.is_single(): self.apply1(gate.gate, gate.idx0+offset, gate.name, val=gate.val) if gate.is_ctl(): self.apply_controlled(gate.gate, gate.ctl+offset, gate.idx1+offset, gate.name, val=gate.val) def run(self): """Apply gates in this qc, don't rebuild IR.""" build_ir = self.build_ir eager = self.eager self.build_ir = False self.eager = True self.qc(self) self.build_ir = build_ir self.eager = eager def inverse(self): """Return, but don't apply, the inverse circuit.""" # The order of the gates is reversed and the each gates # itself becomes its adjoint. After this, a new circuit # is returned. Eager mode is False. The expectation # is that an inverse circuit inv is constructed and then applied # via circuit.qc(inv), at which point it is applied according to the # eager mode of the qc circuit. Usage model: # # main = circuit.qc('main circuit') # ... add gates, eager or not. # # c = circuit.qc('sub circuit', eager=False) # ... add gates to c, not eager. # # Now let's add c to main, at which point the gates are applied. # main.qc(c) # # Let's construct the inverse (non-Eager) and add to main (eager) # at an offset. # c_inv = c0.inverse() # main.qc(c_inv, offset=3) # newqc = qc(self.name, eager=False) for gate in self.ir.gates[::-1]: val=-gate.val if gate.val else None if gate.is_single(): newqc.apply1(gate.gate.adjoint(), gate.idx0, gate.name+'*', val=val) if gate.is_ctl(): newqc.apply_controlled(gate.gate.adjoint(), gate.ctl, gate.idx1, gate.name+'*', val=val) return newqc # --- Debug -------------------------------------------------- def dump(self): """Simple dumper for basic debugging of a circuit.""" if self.name: print(f'Circuit: {self.name}, Nodes: {len(self.ir.gates)}') print(self.ir, end='')
// function used by linear and logistic transitions AnyType __clustered_common_transition (AnyType& args, string regressionType, void (*func)( MutableClusteredState&, const MappedColumnVector&, const double& y)) { MutableClusteredState state = args[0].getAs<MutableByteString>(); double y; if (regressionType == "log") y = args[1].getAs<bool>() ? 1. : -1; else if(regressionType == "lin") y = args[1].getAs<double>(); else if(regressionType == "mlog") y = args[1].getAs<int>(); const MappedColumnVector& x = args[2].getAs<MappedColumnVector>(); if (!std::isfinite(y)) throw std::domain_error("Dependent variables are not finite."); else if (x.size() > std::numeric_limits<uint16_t>::max()) throw std::domain_error("Number of independent variables cannot be " "larger than 65535."); if (state.numRows == 0) { if(regressionType == "mlog") { state.numCategories = static_cast<uint16_t>(args[4].getAs<int>()); state.refCategory = static_cast<uint16_t>(args[5].getAs<int>()); } else { state.numCategories = 2; state.refCategory = 0; } state.widthOfX = static_cast<uint16_t>(x.size() * (state.numCategories-1)); state.resize(); const MappedColumnVector& coef = args[3].getAs<MappedColumnVector>(); state.coef = coef; state.meat_half.setZero(); } if (state.widthOfX != static_cast<uint16_t>(x.size() * (state.numCategories-1))) throw std::runtime_error("Inconsistent numbers of independent " "variables."); state.numRows++; (*func)(state, x, y); return state.storage(); }
import React from "react"; import { Wizard, WizardStep, useWizard } from "../src"; import { cleanup, fireEvent, render } from "react-testing-library"; import * as routing from "../src/routing"; afterEach(cleanup); afterEach(() => (window.location.hash = "")); afterEach(() => jest.restoreAllMocks()); const TestComponent = () => { return ( <Wizard> <WizardStep routeTitle="FirstStep"> {({ isActive, nextStep }) => isActive && ( <div data-testid="step-1" onClick={nextStep}> Step 1 </div> ) } </WizardStep> <WizardStep routeTitle="SecondStep"> {({ isActive, previousStep }) => isActive && ( <div data-testid="step-2" onClick={previousStep}> Step 2 </div> ) } </WizardStep> </Wizard> ); }; const BuildUpWizardTestComponent = () => { return ( <Wizard> <WizardStep routeTitle="FirstStep"> {({ hasBeenActive, nextStep }) => hasBeenActive && ( <div data-testid="step-1" onClick={nextStep}> Step 1 </div> ) } </WizardStep> <WizardStep routeTitle="SecondStep"> {({ hasBeenActive, previousStep }) => hasBeenActive && ( <div data-testid="step-2" onClick={previousStep}> Step 2 </div> ) } </WizardStep> </Wizard> ); }; const TestComponentWithPartialRouteTitles = () => { return ( <Wizard> <WizardStep> {({ isActive, nextStep }) => isActive && ( <div data-testid="step-1" onClick={nextStep}> Step 1 </div> ) } </WizardStep> <WizardStep routeTitle="SecondStep"> {({ isActive, previousStep }) => isActive && ( <div data-testid="step-2" onClick={previousStep}> Step 2 </div> ) } </WizardStep> <WizardStep> {({ isActive, previousStep }) => isActive && ( <div data-testid="step-3" onClick={previousStep}> Step 3 </div> ) } </WizardStep> </Wizard> ); }; const TestComponentWithoutAnyRouteTitles = () => { return ( <Wizard> <WizardStep> {({ isActive, nextStep }) => isActive && ( <div data-testid="step-1" onClick={nextStep}> Step 1 </div> ) } </WizardStep> <WizardStep> {({ isActive, previousStep }) => isActive && ( <div data-testid="step-2" onClick={previousStep}> Step 2 </div> ) } </WizardStep> <WizardStep> {({ isActive, previousStep }) => isActive && ( <div data-testid="step-3" onClick={previousStep}> Step 3 </div> ) } </WizardStep> </Wizard> ); }; const ConditionallyRenderedSteps = () => { const { getStep } = useWizard(); const [list, setList] = React.useState<string[]>([]); return ( <div> {list.map( (entry) => getStep({ routeTitle: entry }).isActive && ( <div key={entry}>{entry}</div> ) )} </div> ); }; const verifyOnlyFirstStepIsVisible = (container: any) => { expect(container.queryByTestId("step-1")).toBeTruthy(); expect(container.queryByTestId("step-2")).toBeNull(); }; const verifyOnlySecondStepIsVisible = (container: any) => { expect(container.queryByTestId("step-1")).toBeNull(); expect(container.queryByTestId("step-2")).toBeTruthy(); }; test("it should update hash location if route title is present", () => { const container = render(<TestComponent />); container.rerender(<TestComponent />); verifyOnlyFirstStepIsVisible(container); expect(window.location.hash).toBe("#FirstStep"); fireEvent.click(container.queryByTestId("step-1")!); container.rerender(<TestComponent />); verifyOnlySecondStepIsVisible(container); expect(window.location.hash).toBe("#SecondStep"); }); test("it should move to second step if location hash matches", () => { window.location.hash = "SecondStep"; const container = render(<TestComponent />); container.rerender(<TestComponent />); verifyOnlySecondStepIsVisible(container); expect(window.location.hash).toBe("#SecondStep"); }); test("it should set hasBeenActive to true for all preceding steps if hash location redirects to a different initial step", () => { window.location.hash = "SecondStep"; const container = render(<BuildUpWizardTestComponent />); container.rerender(<BuildUpWizardTestComponent />); expect(container.queryByTestId("step-1")).toBeTruthy(); expect(container.queryByTestId("step-2")).toBeFalsy(); expect(window.location.hash).toBe("#SecondStep"); }); test("it should render initial step if location hash is unknown", () => { window.location.hash = "UnknownHash"; const container = render(<TestComponent />); container.rerender(<TestComponent />); verifyOnlyFirstStepIsVisible(container); expect(window.location.hash).toBe("#FirstStep"); }); test("it should not update hash location if some route titles are missing and warn about it", () => { const consoleSpy = jest.spyOn(console, "warn"); const container = render(<TestComponentWithPartialRouteTitles />); container.rerender(<TestComponentWithPartialRouteTitles />); expect(window.location.hash).toBe(""); expect(consoleSpy).toBeCalledWith( "You have not specified a title for the steps with the indices: 0, 2" ); }); test("it should not update hash location if all route titles are missing but don't warn about it", () => { const consoleSpy = jest.spyOn(console, "warn"); const container = render(<TestComponentWithoutAnyRouteTitles />); container.rerender(<TestComponentWithoutAnyRouteTitles />); expect(window.location.hash).toBe(""); expect(consoleSpy).not.toBeCalled(); }); test("it should not update hash location if window is not defined (ssr)", () => { const consoleSpy = jest.spyOn(console, "warn"); jest.spyOn(routing, "getWindow").mockReturnValue(null); const container = render(<TestComponent />); container.rerender(<TestComponent />); expect(window.location.hash).toBe(""); expect(consoleSpy).not.toBeCalled(); }); test("it should work with conditionally rendered steps", () => { const { rerender } = render(<ConditionallyRenderedSteps />); rerender(<ConditionallyRenderedSteps />); expect(window.location.hash).toBe(""); });
<gh_stars>0 import React from 'react'; import { Normalize } from 'styled-normalize'; import routes from '@src/routes'; import AppFooter from '@src/components/AppFooter'; import GlobalStyle from '@src/components/GlobalStyle'; const Application = () => ( <> <Normalize /> <GlobalStyle /> <main>{routes}</main> <AppFooter /> </> ); export default Application;
import os import tensorflow as tf import argparse from subclassed_madnet import MADNet from preprocessing import StereoDatasetCreator import matplotlib.pyplot as plt from datetime import datetime print("\nTensorFlow Version: {}".format(tf.__version__)) parser=argparse.ArgumentParser(description='Script for inferencing subclassed MADNet') parser.add_argument("--left_dir", help='path to left images folder', required=True) parser.add_argument("--right_dir", help='path to right images folder', required=True) parser.add_argument("--mad_pred", help='use modular adaptation while inferencing', action="store_true", default=False) parser.add_argument("--num_adapt", help='number of modules to adapt', default=1, type=int, required=False) parser.add_argument("--search_range", help='maximum dispacement (ie. smallest disparity)', default=2, type=int, required=False) parser.add_argument("-o", "--output_dir", help='path to folder for saving updated model (only needed if performing MAD)', default=None, required=False) parser.add_argument("--checkpoint_path", help="path to pretrained MADNet checkpoint file", required=True) parser.add_argument("--lr", help="learning rate (only used if weights are a checkpoint)", default=0.0001, type=float, required=False) parser.add_argument("--height", help='model image input height resolution', type=int, default=320) parser.add_argument("--width", help='model image input height resolution', type=int, default=1216) parser.add_argument("--batch_size", help='batch size to use during training', type=int, default=1) parser.add_argument("--use_full_res_loss", help='for using only the final resolution loss during backpropagation', action="store_true", default=False) parser.add_argument("--show_pred", help='displays the models predictions', action="store_true", default=False) parser.add_argument("--steps", help='number of steps to inference, set to None to inference all the data', default=None, type=int, required=False) args=parser.parse_args() def main(args): left_dir = args.left_dir right_dir = args.right_dir mad_pred = args.mad_pred num_adapt = args.num_adapt search_range = args.search_range output_dir = args.output_dir checkpoint_path = args.checkpoint_path lr = args.lr height = args.height width = args.width batch_size = args.batch_size use_full_res_loss = args.use_full_res_loss show_pred = args.show_pred steps = args.steps run_eager = True # Initialise the model model = MADNet(height=height, width=width, search_range=search_range, batch_size=batch_size) optimizer = tf.keras.optimizers.Adam(learning_rate=lr) model.compile( optimizer=optimizer, run_eagerly=run_eager ) model.load_weights(checkpoint_path).expect_partial() # Get training data predict_dataset = StereoDatasetCreator( left_dir=left_dir, right_dir=right_dir, batch_size=batch_size, height=height, width=width ) predict_ds = predict_dataset() # set model attributes model.run_eagerly = run_eager model.MAD_predict = mad_pred model.num_adapt_modules = num_adapt model.use_full_res_loss = use_full_res_loss # inference the dataset disparities = model.predict(predict_ds, steps=steps) # View disparity predictions if show_pred: for i in range(disparities.shape[0]): plt.axis("off") plt.grid(visible=None) plt.imshow(disparities[i]) plt.show() # save the checkpoint and saved_model if it was updated if output_dir is not None and mad_pred == True: os.makedirs(output_dir, exist_ok=True) now = datetime.now() current_time = now.strftime("%Y%m%dT%H%M%SZ") model_path = f"{output_dir}/MADNet-{current_time}.ckpt" model.save_weights(model_path) if __name__ == "__main__": main(args)
<filename>ADS/geode-core/src/main/java/org/apache/geode/distributed/internal/locks/ElderInitProcessor.java<gh_stars>1-10 /* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.distributed.internal.locks; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import org.apache.logging.log4j.Logger; import org.apache.geode.DataSerializer; import org.apache.geode.distributed.internal.DM; import org.apache.geode.distributed.internal.DistributionManager; import org.apache.geode.distributed.internal.DistributionMessage; import org.apache.geode.distributed.internal.MessageWithReply; import org.apache.geode.distributed.internal.PooledDistributionMessage; import org.apache.geode.distributed.internal.ReplyException; import org.apache.geode.distributed.internal.ReplyMessage; import org.apache.geode.distributed.internal.ReplyProcessor21; import org.apache.geode.distributed.internal.membership.InternalDistributedMember; import org.apache.geode.internal.Assert; import org.apache.geode.internal.i18n.LocalizedStrings; import org.apache.geode.internal.logging.LogService; import org.apache.geode.internal.logging.log4j.LocalizedMessage; import org.apache.geode.internal.logging.log4j.LogMarker; /** * A processor for initializing the ElderState. This may involve sending a message to every existing * member to discover what services they have. * * @since GemFire 4.0 */ public class ElderInitProcessor extends ReplyProcessor21 { private static final Logger logger = LogService.getLogger(); private final HashMap grantors; private final HashSet crashedGrantors; ////////// Public static entry point ///////// /** * Initializes ElderState map by recovering all existing grantors and crashed grantors in the * current ds. */ static void init(DM dm, HashMap map) { HashSet crashedGrantors = new HashSet(); if (!dm.isAdam()) { Set others = dm.getOtherDistributionManagerIds(); if (!others.isEmpty()) { ElderInitProcessor processor = new ElderInitProcessor(dm, others, map, crashedGrantors); ElderInitMessage.send(others, dm, processor); try { processor.waitForRepliesUninterruptibly(); } catch (ReplyException e) { e.handleAsUnexpected(); } } } // always recover from ourself GrantorRequestProcessor.readyForElderRecovery(dm.getSystem(), null, null); DLockService.recoverLocalElder(dm, map, crashedGrantors); { Iterator it = crashedGrantors.iterator(); while (it.hasNext()) { map.put(it.next(), new GrantorInfo(null, 0, 0, true)); } } } //////////// Instance methods ////////////// /** * Creates a new instance of ElderInitProcessor */ private ElderInitProcessor(DM dm, Set others, HashMap grantors, HashSet crashedGrantors) { super(dm/* fix bug 33297 */, others); this.grantors = grantors; this.crashedGrantors = crashedGrantors; } /** * Note the synchronization; we can only process one response at a time. */ private synchronized void processData(ArrayList rmtGrantors, ArrayList rmtGrantorVersions, ArrayList rmtGrantorSerialNumbers, ArrayList rmtNonGrantors, InternalDistributedMember rmtId) { { Iterator iterGrantorServices = rmtGrantors.iterator(); Iterator iterGrantorVersions = rmtGrantorVersions.iterator(); Iterator iterGrantorSerialNumbers = rmtGrantorSerialNumbers.iterator(); while (iterGrantorServices.hasNext()) { String serviceName = (String) iterGrantorServices.next(); long versionId = ((Long) iterGrantorVersions.next()).longValue(); int serialNumber = ((Integer) iterGrantorSerialNumbers.next()).intValue(); GrantorInfo oldgi = (GrantorInfo) this.grantors.get(serviceName); if (oldgi == null || oldgi.getVersionId() < versionId) { this.grantors.put(serviceName, new GrantorInfo(rmtId, versionId, serialNumber, false)); this.crashedGrantors.remove(serviceName); } } } { Iterator it = rmtNonGrantors.iterator(); while (it.hasNext()) { String serviceName = (String) it.next(); if (!this.grantors.containsKey(serviceName)) { this.crashedGrantors.add(serviceName); } } } } @Override public void process(DistributionMessage msg) { if (msg instanceof ElderInitReplyMessage) { ElderInitReplyMessage eiMsg = (ElderInitReplyMessage) msg; processData(eiMsg.getGrantors(), eiMsg.getGrantorVersions(), eiMsg.getGrantorSerialNumbers(), eiMsg.getNonGrantors(), eiMsg.getSender()); } else { Assert.assertTrue(false, "Expected instance of ElderInitReplyMessage but got " + msg.getClass()); } super.process(msg); } /////////////// Inner message classes ////////////////// public static final class ElderInitMessage extends PooledDistributionMessage implements MessageWithReply { private int processorId; protected static void send(Set others, DM dm, ReplyProcessor21 proc) { ElderInitMessage msg = new ElderInitMessage(); msg.processorId = proc.getProcessorId(); msg.setRecipients(others); if (logger.isTraceEnabled(LogMarker.DLS)) { logger.trace(LogMarker.DLS, "ElderInitMessage sending {} to {}", msg, others); } dm.putOutgoing(msg); } @Override public int getProcessorId() { return this.processorId; } private void reply(DM dm, ArrayList grantors, ArrayList grantorVersions, ArrayList grantorSerialNumbers, ArrayList nonGrantors) { ElderInitReplyMessage.send(this, dm, grantors, grantorVersions, grantorSerialNumbers, nonGrantors); } @Override protected void process(DistributionManager dm) { ArrayList grantors = new ArrayList(); // svc names grantor for ArrayList grantorVersions = new ArrayList(); // grantor versions ArrayList grantorSerialNumbers = new ArrayList(); // serial numbers of grantor svcs ArrayList nonGrantors = new ArrayList(); // svc names non-grantor for if (dm.waitForElder(this.getSender())) { GrantorRequestProcessor.readyForElderRecovery(dm.getSystem(), this.getSender(), null); DLockService.recoverRmtElder(grantors, grantorVersions, grantorSerialNumbers, nonGrantors); reply(dm, grantors, grantorVersions, grantorSerialNumbers, nonGrantors); } else if (dm.getOtherNormalDistributionManagerIds().isEmpty()) { // bug 38690 // Either we're alone (and // received a message from // an unknown member) // or else we haven't yet // processed a view, In // either case, we clearly // don't have any grantors, // so we return empty lists. logger.info(LogMarker.DLS, LocalizedMessage.create( LocalizedStrings.ElderInitProcessor__0_RETURNING_EMPTY_LISTS_BECAUSE_I_KNOW_OF_NO_OTHER_MEMBERS, this)); reply(dm, grantors, grantorVersions, grantorSerialNumbers, nonGrantors); } else { // TODO make this fine level? logger.info(LogMarker.DLS, LocalizedMessage.create( LocalizedStrings.ElderInitProcessor_0_DISREGARDING_REQUEST_FROM_DEPARTED_MEMBER, this)); } } public int getDSFID() { return ELDER_INIT_MESSAGE; } @Override public void fromData(DataInput in) throws IOException, ClassNotFoundException { super.fromData(in); this.processorId = in.readInt(); } @Override public void toData(DataOutput out) throws IOException { super.toData(out); out.writeInt(this.processorId); } @Override public String toString() { StringBuffer buff = new StringBuffer(); buff.append("ElderInitMessage (processorId='").append(this.processorId).append(")"); return buff.toString(); } } public static final class ElderInitReplyMessage extends ReplyMessage { private ArrayList grantors; // svc names private ArrayList grantorVersions; // grantor version longs private ArrayList grantorSerialNumbers; // grantor dls serial number ints private ArrayList nonGrantors; // svc names public static void send(MessageWithReply reqMsg, DM dm, ArrayList grantors, ArrayList grantorVersions, ArrayList grantorSerialNumbers, ArrayList nonGrantors) { ElderInitReplyMessage m = new ElderInitReplyMessage(); m.grantors = grantors; m.grantorVersions = grantorVersions; m.grantorSerialNumbers = grantorSerialNumbers; m.nonGrantors = nonGrantors; m.processorId = reqMsg.getProcessorId(); m.setRecipient(reqMsg.getSender()); dm.putOutgoing(m); } public ArrayList getGrantors() { return this.grantors; } public ArrayList getGrantorVersions() { return this.grantorVersions; } public ArrayList getGrantorSerialNumbers() { return this.grantorSerialNumbers; } public ArrayList getNonGrantors() { return this.nonGrantors; } @Override public int getDSFID() { return ELDER_INIT_REPLY_MESSAGE; } @Override public void fromData(DataInput in) throws IOException, ClassNotFoundException { super.fromData(in); this.grantors = DataSerializer.readArrayList(in); this.grantorVersions = DataSerializer.readArrayList(in); this.grantorSerialNumbers = DataSerializer.readArrayList(in); this.nonGrantors = DataSerializer.readArrayList(in); } @Override public void toData(DataOutput out) throws IOException { super.toData(out); DataSerializer.writeArrayList(this.grantors, out); DataSerializer.writeArrayList(this.grantorVersions, out); DataSerializer.writeArrayList(this.grantorSerialNumbers, out); DataSerializer.writeArrayList(this.nonGrantors, out); } @Override public String toString() { StringBuffer buff = new StringBuffer(); buff.append("ElderInitReplyMessage").append("; sender=").append(getSender()) .append("; processorId=").append(super.processorId).append("; grantors=") .append(this.grantors).append("; grantorVersions=").append(this.grantorVersions) .append("; grantorSerialNumbers=").append(this.grantorSerialNumbers) .append("; nonGrantors=").append(this.nonGrantors).append(")"); return buff.toString(); } } }
/** * Commands text */ import { Commands } from "../api"; export const commands: Commands = { QUICK_FIX_STABLE_ID_ERROR: { name: "ui5_lang.quick_fix_stable_id", title: "Generate an ID", }, QUICK_FIX_STABLE_ID_FILE_ERRORS: { name: "ui5_lang.quick_fix_file_stable_id", title: "Generate IDs for the entire file", }, };
Newfoundland and Labrador Premier Kathy Dunderdale deleted her Twitter account on Wednesday night, shortly after a report that showed she happened to be following an X-rated account that featured pornographic videos. Dunderdale said that she had not used her Twitter account for a lengthy period, and had no knowledge that she followed the account. "I have not used my Twitter account for over a year very purposefully — I have decided to disengage from Twitter," she told reporters on Thursday. "I don't mind if my cabinet minister, or my ministers, or other people do it, but as premier of the province, I have taken the position personally that I don't want to be engaged." CBC News reported on Wednesday that several government MHAs — who have steadily denounced New Democrat Gerry Rogers for being a member of a Facebook group that contained threatening posts — had questionable connections themselves, including Dunderdale's own list of followed accounts. But rather than delete that Twitter user from her list, Dunderdale or her staff shut down the @kathydunderdale account. Dunderdale said she does not feel she needs to take part in all forms of social media in order to stay in touch with the people of the province. "Is there some requirement that I have to read every letter, read every email, have a Twitter account, be engaged in Facebook? I have all kinds of opportunity to hear from the people of Newfoundland and Labrador, and I have the freedom to choose which one of those, or how many of those tools, that I'm going to use," Dunderdale said. Dunderdale said politicians have an obligation to maintain a professional public profile while using various social media platforms. "We live in public life, we're examples to people, we need to be careful about what we're doing," she said. "When I found that somebody had attached disturbing material to my account, I disassociated myself with it immediately and took it down immediately. "Another thing that I've been involved in all my life is anti-violence work, and I particularly understand how pornography contributes to violence against women and the degradation of women, and let me tell you, I am not going to be associated with anything that promotes that kind of behaviour," Dunderdale said. Rogers was ejected from the house of assembly on Tuesday, even though she said she had been added to the Facebook group 'Kathy Dunderdale Must Go!' without her knowledge or consent. Some Tory MHAs said Rogers was effectively condoning violent threats against Dunderdale because of her membership in that group, a charge that Rogers said was absurd. Earlier this week, Dunderdale denounced Rogers for being sloppy in management of her social media activity. "As an MHA, when you're on Facebook, when you're engaged in Twitter, then you have to have an obligation to pay attention," Dunderdale told reporters on Tuesday. Dunderdale continued the attack against Rogers in her statement on Wednesday night. "What remains troublesome is that we have a similar case where an MHA has chosen to remain a part of a group that has allowed many violent and threatening comments," Dunderdale's statement said.
/** * A utility class to help with building ClinicIO objects. * Example usage: <br> * {@code ClinicIo clinicIo = new ClinicIoBuilder().withPerson("John", "Doe").build();} */ public class ClinicIoBuilder { private ClinicIo clinicIo; public ClinicIoBuilder() { clinicIo = new ClinicIo(); } public ClinicIoBuilder(ClinicIo clinicIo) { this.clinicIo = clinicIo; } /** * Adds a new {@code Person} to the {@code ClinicIo} that we are building. */ public ClinicIoBuilder withPerson(Person person) { clinicIo.addPerson(person); return this; } /** * Adds a new {@code Patient} to the {@code ClinicIo} that we are building. */ public ClinicIoBuilder withPatient(Patient patient) { clinicIo.addPatient(patient); return this; } /** * Adds a new {@code Staff} to the {@code ClinicIo} that we are building. */ public ClinicIoBuilder withStaff(Staff staff) { clinicIo.addStaff(staff); return this; } //@@author aaronseahyh /** * Adds a new {@code Medicine} to the {@code ClinicIo} that we are building. */ public ClinicIoBuilder withMedicine(Medicine medicine) { clinicIo.addMedicine(medicine); return this; } public ClinicIo build() { return clinicIo; } }
<reponame>Revxrsal/Lamp /* * This file is part of lamp, licensed under the MIT License. * * Copyright (c) Revxrsal <<EMAIL>> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package revxrsal.commands.bukkit.brigadier; import com.mojang.brigadier.arguments.ArgumentType; import com.mojang.brigadier.arguments.StringArgumentType; import com.mojang.brigadier.tree.LiteralCommandNode; import org.bukkit.command.Command; import org.bukkit.entity.Player; import org.bukkit.plugin.java.JavaPlugin; import org.jetbrains.annotations.NotNull; import revxrsal.commands.bukkit.BukkitBrigadier; import revxrsal.commands.bukkit.BukkitCommandHandler; import revxrsal.commands.bukkit.EntitySelector; import revxrsal.commands.bukkit.core.BukkitActor; import revxrsal.commands.command.CommandActor; import revxrsal.commands.command.CommandParameter; import revxrsal.commands.util.ClassMap; import static revxrsal.commands.bukkit.brigadier.CommodoreProvider.isSupported; import static revxrsal.commands.bukkit.brigadier.DefaultArgTypeResolvers.*; import static revxrsal.commands.util.Preconditions.notNull; public final class CommodoreBukkitBrigadier implements BukkitBrigadier { private final BukkitCommandHandler handler; private final Commodore commodore; private final ClassMap<ArgumentTypeResolver> argumentTypes = new ClassMap<>(); public CommodoreBukkitBrigadier(BukkitCommandHandler handler) { this.handler = handler; commodore = new Commodore(handler.getPlugin()); if (isSupported()) { bind(String.class, STRING); bind(Number.class, NUMBER); bind(Boolean.class, BOOLEAN); bind(Player.class, PLAYER); bind(EntitySelector.class, ENTITY_SELECTOR); } } @Override public void bind(@NotNull Class<?> type, @NotNull ArgumentTypeResolver resolver) { notNull(type, "type"); notNull(resolver, "resolver"); argumentTypes.add(type, resolver); } @Override public void bind(@NotNull Class<?> type, @NotNull ArgumentType<?> argumentType) { notNull(type, "type"); notNull(argumentType, "argument type"); argumentTypes.add(type, parameter -> argumentType); } @Override public void bind(@NotNull Class<?> type, @NotNull MinecraftArgumentType argumentType) { notNull(type, "type"); notNull(argumentType, "argument type"); argumentType.getIfPresent().ifPresent(c -> argumentTypes.add(type, parameter -> c)); } public @NotNull ArgumentType<?> getArgumentType(@NotNull CommandParameter parameter) { ArgumentTypeResolver resolver = argumentTypes.getFlexible(parameter.getType()); if (resolver != null) { ArgumentType<?> type = resolver.getArgumentType(parameter); if (type != null) return type; } return StringArgumentType.string(); } private void checkSupported() { if (commodore == null) throw new IllegalArgumentException("Brigadier is not supported on this version."); } @Override public @NotNull CommandActor wrapSource(@NotNull Object commandSource) { checkSupported(); return new BukkitActor(commodore.getBukkitSender(commandSource), handler); } @Override public void register() { if (!isSupported()) return; BrigadierTreeParser.parse(this, handler).forEach(n -> register(n.build())); } private void register(@NotNull LiteralCommandNode<?> node) { Command command = ((JavaPlugin) handler.getPlugin()).getCommand(node.getLiteral()); if (command == null) commodore.register(node); else commodore.register(command, node); } }
CVT: A Crowdsourcing Video Transcoding Scheme Based on Blockchain Smart Contracts Streaming media has been largely used by millions of users every day. The number of customers and programs, e.g., TV series, movies, and various shows, are still growing fast. However, the demand for video transcoding for various personal terminal devices results in the shortage of computing resources and the prolongation of processing delay in centralized video transcoding systems. To solve this issue, we propose a blockchain, especially, smart contract based scheme that can achieve decentralized and on-demand crowdsourcing for video transcoding, which remarkably mitigates the transcoding overhead. Specifically, our scheme consists of four key components such as employers, workers, task allocation, and payment. An employer initializes the smart contract, releases the task, and initiates the smart contract. Workers bid for the task, and the successful bidder will obtain the task and execute the task. The task allocation mechanism and the payment mechanism can guarantee the profits of both and encourage both as well. Moreover, the smart contract consists of the bidding contract and the task execution contract. The extensive analysis of our proposed scheme justified the feasibility, security for defending against typical threats, applicability in realistic situations, and portability for most multimedia such as videos and audios. I. INTRODUCTION Recently, streaming media such as Netflix, Disneyland+, and Twitch attract increasing subscribers since it enables conveniently and pervasively video accessing. However, current streaming media still imposes several inevitable problems. The most critical issue is to guarantee the efficiency of video transcoding during the video transmission process in the centralized systems. The sharp increment of the number of users and videos requires more computation resources for video transcoding, which may slow down or even terminate the process of video transcoding and so as to remarkably damage the user experiences. The associate editor coordinating the review of this manuscript and approving it for publication was Chuan Heng Foh . In order to solve the weaknesses of current centralized systems, crowdsourcing is proposed as a promising approach. Crowdsourcing is a modern business model to distribute the assignments to the volunteers. For example, a company can hardly complete a mass of processing and computing work (e.g., video transcoding) due to the limit of several factors, such as computation resources and the performance of the devices. Because of the characteristics of crowdsourcing, it is an efficient and beneficial way to solve this problem. But we still need to think about how to protect the interests of each entity during the crowdsourcing process while maintaining the efficiency of crowdsourcing. Researchers have proposed some methods to implement higher efficient video transcoding approaches - , and other scholars have proposed applying crowdsourcing to deal with the drawbacks of centralized systems - . Also there are several methods presented to solve the common issues in the multimedia field - . According to the work mentioned above, we consider using blockchain-based Ethereum (ETH) smart contracts to achieve a novel crowdsourcing video transcoding mechanism for improving the efficiency of video transcoding that can reduce the stress of the centralized systems and also create profits for crowdsourcing volunteers. The contributions of our work in this paper are shown as follows: • We propose four essential components for achieving crowdsourcing video transcoding tasks, including the employer, the worker, the task allocation mechanism and the payment mechanism. • We design a kind of smart contract combined with our proposed four components that can realize the bidding function and task execution function at the same time during the crowdsourcing video transcoding process. We will introduce the related work in Section II. The proposed scheme and the detailed design are shown in Section III. The analysis of our scheme is illustrated in Section IV. The experimental results are shown in Section V. Finally, we will conclude our paper in Section VI. II. RELATED WORK We divide related work into three categories according to the domain and characteristics of the work and we will introduce these three classifications respectively below. A. VIDEO TRANSCODING Koziri et al. investigated the current situation of the development of video transcoding related cloud service and summarized challenges and future opportunities. The Cloud-based Video Streaming Services (CVS2) is presented by Li et al. for enhancing the robustness of the system while keeping the cost of the service provider from fluctuating significantly when users use cloud services for video on demand. Liu et al. proposed a blockchain-based mobile edge computing (MEC) architecture, which offers video streaming distributedly and securely for blockchain based video transcoding architecture to satisfy the demands of the customers. Also, Costero et al. proposed using a multiagent machine learning based method to achieve dynamic optimization during the video transcoding process to greatly improved service quality and reduced power consumption. Jiang et al. presented an enhanced architecture for dealing with the problems in large-scale video transcoding in the public cloud through the application of the message queue layer. He et al. proposed a new framework called CrowdTranscoding that utilizes the surplus of personal terminal computing resources to execute video transcoding tasks. And they also put forward Viewer Crowdsourcing Transcode Scheduler (VCTS) to solve the issues of workload during the task allocation. Furthermore, Wei et al. proposed a cloud-based online video transcoding system (COVT) targeting at providing the quality-of-service and economical solution for large-scale video transcoding that they also performed simulation evaluation to verify their proposed scheme. Yoon et al. proposed to run a low-cost video transcoding scheme on the edge of the wireless. Comparing with using costly server-based transcoding, they designed a transparent, low-cost, scalable video transcoding system that is realized on a hardware Raspberry PI at a low cost. B. CROWDSOURCING The concept of crowdsourcing was put forward earlier by Howe which contributes a lot to the establishment of the distributed task assignment system for various demands. Then, Vukovic took crowdsourcing in the field of software development as an example, deduced the need to provide general crowdsourcing services in the cloud environment, and made crowdsourcing further developed. Moreover, Estellés-Arolas and González-Ladrón-De-Guevara modified definition of crowdsourcing, established the basic characteristics of crowdsourcing and presented a more complete and consistent definition of crowdsourcing. Chittilappilly et al. did an investigation, the goal of which is to provide a comprehensive picture of the current state of general crowdsourcing technology. On the basis of their concerns, the works are divided into three parts that are incentive design, task assignment and quality control respectively. In the similar way, Mao et al. did a survey that investigated the application of crowdsourcing in software engineering and concluded the challenges, opportunities and future trends of crowdsourcing. Garcia-Molina et al. provided an overview of data crowdsourcing and the processes involved collecting, managing, and analyzing the data, and described the essential design steps when implementing a crowdsourced scheme. As crowdsourcing evolves, many researchers proposed novel methods to apply crowdsourcing. For example, Chatzimilioudis et al. proposed applying smart phones to implement the crowdsourcing according to the typical characteristics of the smart phone. Chawla et al. studied the design of crowdsourcing and analyzed the approximation of optimal crowdsourcing competitions. Additionally, they presented a scheme of the optimal crowdsourcing competition that reflects the optimal auction design theory. Franklin et al. the proposed CrowdDB solves the problem that neither database systems nor search engines can adequately answer by crowdsourcing human input, and they also put forward an important way to develop crowdsourced query processing systems in the future. Gu et al. proposed a distributed online crowdsourcing mechanism based on smart contracts, the characteristics of which are decentralized, unalterable, and truthful. Kittur et al. proposed a generic framework utilizing microtask markets for crowdsourcing complex and VOLUME 8, 2020 interdependent tasks, and described the framework and analyzed the advantages and flaws of the scheme. Crow-dAsm proposed by Pan et al. that is useful for collaborative crowdsourcing systems to maximize the quality of expected results, at the same time, minimizing expected delays by combining the expertise of available workers. Also, Tong et al. and Wang et al. proposed a scheme for large-scale crowdsourced task decomposition and a human-computer hybrid to improve crowdsourcing efficiency respectively. C. BLOCKCHAIN FOR CROWDSOURCING Blockchain has emerged in recent years, several researchers combine this new technology with crowdsourcing according to the characteristics of them. Feng and Yan proposed a blockchain-based mobile crowdsourcing chain system to enable distributed and decentralized trust management to address the inefficiencies of traditional blockchain-based crowdsourcing systems, and a new block-generated consensus mechanism that significantly reduces computational overhead. A framework based on blockchain is presented by Han et al. for sustaining crowdsourcing platforms. The proposed framework has two main advantages, one is the transparent incentives are provided by the framework, the other is trusted personal data can be shared in a cross-platform system. Kogias et al. proposed applying blockchain and smart contract technologies to improve the existing crowdsourcing scheme from the perspective of security. Li et al. proposed a decentralized CrowdBC framework based on blockchain, which can preserve the privacy of customers. ZebraLancer, a private and anonymous decentralized crowdsourcing system, is designed and implemented by Lu et al. , and the issues of data leakage and identity breach in the decentralized crowdsourcing system are solved. Wang et al. developed an optimized blockchain-based architecture aiming at peer-to-peer energy transactions, for administering and managing the operation of crowdsourced energy systems (CES). Also, some scholars proposed their own ideas towards combing the blockchain with crowdsourcing - , then {w j } ∪ W → W ; Otherwise, refusing w j for joining W . 2) Assuming the task published by the employer can be completed, then the task allocation target is formulated below: where r is a parameter set by the employer for adjusting the weight between c i and t i according to the actual demands of the employer. 3) For each w j in W bidding for task e , if m i,j offered by w j can satisfy c i,j ≤ max p and t i,j ≤ max t , then maintaining w j in W ; Otherwise removing w j from W . 4) Selecting w j from W as the successful bidder that meets the condition w j = arg i∈S min bidder, marking the current W (the task allocation result) as W . c) If there are more than one w j from W are chosen, the final successful bidder is the worker that satisfies w j = argmin tag j . Then recording m i,j offered by the only w j in W i.e. the successful bidder, marking the current W as W . 5) Updating the quotation set C and the time duration set T as c i,j ∪C → C and t i,j ∪T → T respectively according to m i,j that the successful bidder offered. 6) Recording the task allocation result W , the quotation set C, and the time duration set T as the promise of the successful bidder for task e by the smart contract. • Payment Mechanism. It is easy to find out the successful bidder should be paid according to the task allocation result W . The payment mechanism is used to confirm the time and the amount that the successful bidder should be paid. We stipulate the payment result of the successful bidder for task e is P. On the basis of W , the payment mechanism is shown as follows: 1) The agreed delivery time for task e is determined as If there is no element in W , the employer does not need to pay, i.e. P = 0. 3) If w j exists in W , the successful bidder needs to complete task e before T d : a) If the successful bidder completes task e before T d , the payment result P = c i,j . b) If the successful bidder fails to finish task e before T d , the payment result P = −P w . B. SYSTEM OVERALL ARCHITECTURE The main thought of our designed system is to combine the above four components and smart contracts for implementing crowdsourcing video transcoding. To enhance the connections among the above four components, we add more components, and our whole scheme is shown in Figure 1. The employer sets up the smart contract according to the specific requirements of the task that needs to be transcoded. Also, the smart contract includes the bidding part and the task execution part to ask for proper workers to complete the video transcoding task. Then the workers who are willing to accept the task can participate in the bidding. Finally, the smart contract will select a successful bidder from the bidding part according to the task allocation mechanism who will start the trading part with the employer then, at the same time the employer should release the corresponding task. Moreover, the trusted third party is needed in the system to check the transcoded task join the arbitration committee, at the same time workers as the same number of the trusted third party will join the committee too. We will further introduce this designed part in Section III-G. The next section focuses on the design of the smart contract in our scheme. C. SMART CONTRACT DESIGN The designed smart contract needs to satisfy two functions: one is bidding for the tasks marked as the bidding part, and the other is to ensure the process of the task execution between the employer and the successful bidder correct for guaranteeing the profits of both parties marked as the task execution part. The specific design of the smart contract is shown in Algorithm 1. Besides, there are four steps for achieving crowdsourcing video transcoding on the basis of the designed smart contract, we will describe each process in the following sections in detail. D. SMART CONTRACT INITIALIZATION Before the smart contract starts to initiate the bidding process, the employer should upload the basic parameters of the bidding part which contain D v , max p and max t . Algorithm 1 Smart Contract for Bidding and Task Execution 1: The employer uploads Q to the smart contract, ads A = E ads and m 0 = (max t , max p ); 2: The employer releases task e ; 3: The employer sends P e to the smart contract, T r = now and tag = tag j = 0; 4: while time ∈ : 1) If c x +t x < c 0 +t 0 , the smart contract substitutes current m 0 to m x , current tag to tag j , and current ads B to w j,ads . 2) If c x + t x = c 0 + t 0 and tag j < tag, the smart contract reacts the same as the previous point. 3) If there are other situations, the smart contract preserves the current parameters. Besides, if there are no workers bid for the task until the time period is passed, the smart contract will refund P e to the employer and end the process. Otherwise, the worker corresponding to ads B is the successful bidder who will start the next process with the employer through the smart contract. We will detailedly describe the task execution between the employer and the successful bidder through the task execution part of the smart contract in the next section. F. TASK EXECUTION After finishing the bidding part and choosing the successful bidder, the task execution part of the smart contract starts. The whole process of this part is illustrated in Figure 3 and the steps of task execution part are shown as follow: 1) The successful bidder sends P w to the smart contract and the amount of P w is equal to c x that the successful bidder announced in the bidding part; 2) The employer randomly chooses a hash algorithm and uploads it to the smart contract; 3) The employer releases the task and and uses the selected hash algorithm to calculate the hash value of each video block and the hash value of each video block is marked as H b , then uploads H b and Q to the smart contract; 4) The successful bidder downloads the transcoding task and calculates the hash value of the task as H b by the hash algorithm selected by the employer: a) If H b = H b , the smart contract randomly sends one of the addresses of the trusted third party (marked as ads t ) for checking the transcoded video to the successful bidder and starts the transcoding task; b) If H b = H b , the successful bidder terminates the process, and the smart contract refunds P e to the employer and P w to the successful bidder. 5) The successful bidder calculates the hash value of each transcoded video block marked as H tb and uploads H tb and Q to the smart contract and sends the transcoded task to ads t : a) If Q = Q , the smart contract continues the process; b) If Q = Q , the smart contract terminates the process and sends P w as the compensation to the employer and refunds P e to the employer. 6) The trusted third party downloads the transcoded task from ads t and calculates the hash value of the transcoded video block marked as H tb and uploads H tb of the transcoded video blocks to the smart contract: a) If H tb = H tb , the smart contract sends P e to the successful bidder, and refunds P w to the successful bidder and P e − P w to the employer; b) If H tb = H tb , the smart contract terminates the process and raises a punishment to the successful bidder that sends P w as the compensation to the employer and refunds P e to the employer. 7) The trusted third party sends the transcoded video to the employer, and the employer checks whether the content of the transcoded video block is correct before the time T d : a) If the content of the transcoded video blocks is correct, the employer terminates the task execution that means the task is completed. b) If the content of the transcoded video blocks is incorrect, the employer can send a request to the smart contract to launch an arbitration, and the smart contract will invite the arbitration committee to judge. c) If the current time exceeds T d and the employer has not sent any request, the smart contract terminates the process that means the task is completed. When the employer sends a request to the smart contract, the arbitration mechanism is activated to deal with the dispute between the employer and the successful bidder, and we will introduce the arbitration mechanism in the following section. G. ARBITRATION MECHANISM The arbitration mechanism consists of two parts: one is the arbitration committee, and the other is the rules corresponding to different situations that happen in the voting of the arbitration committee. We introduce the composition of the arbitration committee at first. From Figure 4, we can see that the smart contract randomly requires the trusted third party and the same number of workers to join the arbitration committee. After that, each member of the arbitration committee will do as the steps below before the time T d , for realizing the arbitration mechanism: 1) The member downloads the original task released by the employer and calculates the hash value of the video blocks as H o : a) If H o = H b , the smart contract continues the process; b) If H o = H b , the smart contract searches for a new member to join the arbitration committee from the VOLUME 8, 2020 same party and repeats point 1) until the number of members of both parties is equal; 2) The member downloads the transcoded task uploaded by the successful bidder and calculates the hash value of the video blocks as H t : a) If H t = H tb , the smart contract continues the process; b) If H t = H tb , the smart contract searches for a new member to join the arbitration committee from the same party and repeats points 1) and 2) until the number of members of both parties is equal; 3) After verifying the original video blocks and the transcoded video blocks, the member of the arbitration committee can compare the content of these two tasks: a) If the contents of these two tasks are different, the member of the committee sends 0 to the smart contract; b) If the contents of these two tasks are the same, the member of the committee sends 1 to the smart contract; 4) The smart contract counts the number of 0 and 1 marked as num 0 and num 1 sent by the arbitration committee member and sends the feedback (see Figure 5) to each party according to the rules as follow: a) If num 0 > num 1 , the smart contract raises a punishment for the successful bidder that it sends P w to the employer as the compensation and refunds P e to the employer. b) If num 0 < num 1 , the smart contract raises a punishment for the employer that it sends P e to the successful bidder as the compensation and refunds P w to the successful bidder. c) If num 0 = num 1 , the smart contract refunds P e to the employer and P w to the successful bidder. After finishing the above steps, the crowdsourcing video transcoding task is completed. According to the different situations that happen during the process, we give the corresponding strategy towards dealing with them. In Section IV our scheme will be analyzed in detail on the basis of each essential part during the process. IV. SECURITY ANALYSIS In this section, we list several threats towards our system and analyze the security and the feasibility of our scheme. We analyze the system as three parts respectively the bidding part, the task execution part and the arbitration part. A. BIDDING ANALYSIS During the bidding, there are two essential issues that should be considered: 1) The dishonest quotation (including c i,j and t i,j ). a) c i,j the worker offers is too high. The employer sets max p before the smart contract starts for limiting the workers to offer a too high price that the employer can not afford. b) c i,j the worker offers is too low. If one worker offers a too low price, that means other workers will not compete with this task, and the worker will work for the employer and earns only little that almost makes no sense for the worker. c) t i,j the worker offers is too long. The employer sets max t before the smart contract starts for limiting the workers to offer too long time that the employer can not accept. d) t i,j the worker offers is too short. If one worker offers too short time and the worker does not finish it on time during the task execution, the smart contract will send the worker's P w to the employer as compensation. Thus, it is unnecessary for workers to complete for a task by offering a too short time that the workers can not finish the task on time. 2) The successful bidder or the employer does not continue to enforce the process after the bidding. If one of the parties stops to implement the process after the bidding part, when the time reaches T d the smart contract terminates the process and refunds P e to the employer and P w to the successful bidder, that will not affect the interests of both parties. B. TASK EXECUTION ANALYSIS Also, three important issues have to be analyzed during the task execution part: 1) The employer or the successful bidder uploads the video blocks that are not the same as the video blocks they announced to upload. Before each party uploads the video blocks, the smart contract will ask it to provide the corresponding hash value of each video block as the evidence. If one party cheats during the task execution process, it is easy to find out by the hash value. Then the smart contract will punish the cheated party and send its earnest money to the other party. 2) The successful bidder uploads the incorrect video with correct Q. The employer can initiate arbitration and send a request to the smart contract if the employer finds the contents of transcoded video blocks are different from the video blocks the employer releases. a) The voting result of arbitration is that the employer is correct. The smart contract sends P w to the employer and refunds P e to the employer. b) The voting result of arbitration is that the employer is incorrect. The smart contract sends P e to the successful bidder as compensation and refunds P w to the successful bidder. 3) The successful bidder accepts the task but does not finish it as the employer requires. a) The successful bidder does not finish the task before T d . When the current time exceeds T d , but the successful bidder does not upload related parameters or only uploads part of parameters, the smart contract sends P w to the employer as compensation and refunds P e to the employer. b) Q is not the same as Q uploaded by the employer. At least one parameter in Q is not the same as the corresponding parameter in Q that the employer uploaded, the smart contract sends P w to the employer and refunds P e to the employer. C. ARBITRATION ANALYSIS Moreover, two essential issues have existed during the arbitration process as follow: 1) The employer sends a request that is incorrect about the successful bidder to the smart contract. There is only one situation that the employer should send a request to the smart contract that is the video blocks the successful bidder uploaded have the same Q as Q, however, the content of these video blocks is incorrect. Thus, after the employer starts the arbitration mechanism, each member of the arbitration can obtain the original task released by the employer and the transcoded task uploaded by the successful bidder. Then, they can compare the contents of the original task and the transcoded task, and send their the results to the smart contract. If num 0 < num 1 , that means the employer cheats, the smart contract sends P e to the successful bidder and refunds P w to the successful bidder. 2) The trusted third party colludes with either party. For the arbitration mechanism, every time the employer sends the request to the smart contract, the smart contract will randomly select the members from the trusted third party and the workers. That means, it is hard for each party to find enough members of the randomly chosen arbitration committee on the anonymous networks for collusion in order to gain profits by the incorrect result of the arbitration committee sent to the smart contract. V. PERFORMANCE ANALYSIS We tested our scheme on the private chain to show the utility of the scheme in real situations. We implemented the experiment as the steps presented in Section III. Specifically, we applied Remix 1 to compile and test our scheme. First, we chose the compiler with the version 0.4.25+commit to compile the smart contract codes of our scheme. 2 Then, we built a private chain on the local host and create several accounts on the chain. Next, we started mining on the private chain. After the mining began, we set the environment Web3 Provider on Remix to connect the private chain we built. Finally, we deployed the smart contract on the private chain to test. We selected testing the factors (steps) that have the greatest impact on the performance of our scheme during the interactive process between users and the smart contract. In order to show the results clearer, we divided the experiment into two parts -the bidding part performance and the task execution part performance. We will illustrate and analyze the experimental results below. A. BIDDING PART PERFORMANCE During the bidding, the most important factor is the speed of uploading parameters. The extensibility of our scheme also should be considered that means changing the number of uploaded parameters in the bidding situation. Therefore, we tested the speed of uploading the different numbers of parameters at once by each participated worker. The experimental results are shown in Figure 6. We can see that as the number of parameters that uploaded by each worker at once raises, the time consumption increases slightly. Even if the worker is required to upload nine parameters at one time, the time consumption is within acceptable limits (within 2 seconds). Therefore, it is sufficient to show that our designed smart contract can deal with the bidding part well that also has the extensibility of changing the number of parameters according to different requirements. B. TASK EXECUTION PART PERFORMANCE After the bidding part is finished, the smart contract will implement the execution part. According to the process and analysis of our scheme in Section III, it is uncomplicated to summarize that six steps have a huge impact on the performance during the task execution part. The specific six steps are listed as follows: 1) Deploy the smart contract. 2) Send the earnest money to the smart contract. 3) Upload the parameters to the smart contract by the successful bidder. 4) Upload the parameters to the smart contract by the trusted third party. 5) Send the request to the smart contract. 6) Select the members of the committee. We respectively mark the above steps in order as act i (1 ≤ i ≤ 6). The experimental results are shown in Figure 7. The results demonstrate that deploying the smart contract and selecting the members of the committee take the most time (generally around 2 seconds). Additionally, it can be seen that the time consumption of sending tokens to the smart contract (around 1.5 seconds) is higher than the other three uploading parameter operations (generally within 1 second). Thus, the experimental results illustrate that the performance of the task execution part of the scheme can fit the real-world application demands. VI. CONCLUSION In this paper, we propose a blockchain smart contract based scheme combined with the four essential components, which is designed to achieve crowdsourcing video transcoding for solving the lack of computing resources and enhances the efficiency of video transcoding. According to the security analysis and performance analysis, our proposed scheme (i.e., CVT) is justified to be feasible and sufficient to deal with the common security issues in crowdsourcing video transcoding. Also, the scheme has good portability, which means our scheme can be applied to other similar multimedia crowdsourcing scenarios by modifying a few settings and parameters.
<gh_stars>1-10 #!/usr/bin/python # # testlib_httpd.py quality assurance test script # Copyright (C) 2008-2013 Canonical Ltd. # Author: <NAME> <<EMAIL>> # Author: <NAME> <<EMAIL>> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3, # as published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <httpd://www.gnu.org/licenses/>. # import unittest, subprocess import os import sys import tempfile import testlib import time import socket import shutil import cookielib import urllib2 import re import base64 class HttpdCommon(testlib.TestlibCase): '''Common functions''' def _setUp(self, clearlogs = False): '''Setup''' self.release = self.lsb_release['Codename'] self.html_page = "/var/www/test.html" self.php_page = "/var/www/test.php" self.cgi_page = "/usr/lib/cgi-bin/test-cgi.pl" self.apache2_default = "/etc/default/apache2" self.ssl_key = "/etc/ssl/private/server.key" self.ssl_crt = "/etc/ssl/certs/server.crt" self.ssl_site = "/etc/apache2/sites-enabled/999-testlib" self.ports_file = "/etc/apache2/ports.conf" self.access_log = "/var/log/apache2/access.log" self.error_log = "/var/log/apache2/error.log" if not hasattr(self, 'initscript'): self._set_initscript("apache2") # Dapper's apache2 is disabled by default if self.lsb_release['Release'] == 6.06: testlib.config_replace(self.apache2_default, "", append=True) subprocess.call(['sed', '-i', 's/NO_START=1/NO_START=0/', self.apache2_default]) self._stop() if clearlogs == True: self._clearlogs() self._start() def _set_initscript(self, initscript): self.initscript = initscript def _tearDown(self): '''Clean up after each test_* function''' self._stop() time.sleep(2) if os.path.exists(self.html_page): os.unlink(self.html_page) if os.path.exists(self.php_page): os.unlink(self.php_page) if os.path.exists(self.cgi_page): os.unlink(self.cgi_page) if os.path.exists(self.ssl_key): os.unlink(self.ssl_key) if os.path.exists(self.ssl_crt): os.unlink(self.ssl_crt) if os.path.exists(self.ssl_site): os.unlink(self.ssl_site) self._disable_mod("ssl") testlib.config_restore(self.ports_file) testlib.config_restore(self.apache2_default) def _start(self): '''Start process''' # check if there is a /etc/init/initscript.conf if os.path.exists("/etc/init/%s.conf" % self.initscript): rc, report = testlib.cmd(['start', self.initscript]) else: rc, report = testlib.cmd(["/etc/init.d/%s" % self.initscript, 'start']) expected = 0 result = 'Got exit code %d, expected %d\n' % (rc, expected) self.assertEquals(expected, rc, result + report) time.sleep(2) def _stop(self): '''Stop process''' # check if process is running if os.path.exists("/etc/init/%s.conf" % self.initscript): rc, pidof = testlib.cmd(['pidof', '-x', self.initscript]) if pidof: rc, report = testlib.cmd(['stop', self.initscript]) else: # no need to stop it, it's stopped rc, report = (0, '') else: rc, report = testlib.cmd([self.initscript, 'stop']) expected = 0 result = 'Got exit code %d, expected %d\n' % (rc, expected) self.assertEquals(expected, rc, result + report) def _clearlogs(self): '''Clear httpd logs''' if os.path.exists(self.access_log): os.unlink(self.access_log) if os.path.exists(self.error_log): os.unlink(self.error_log) def __disable_mod(self, mod): if not os.path.exists(os.path.join("/etc/apache2/mods-available", mod + \ ".load")): return if not os.path.exists("/usr/sbin/a2dismod"): return rc, report = testlib.cmd(['a2dismod', mod]) expected = 0 result = 'Got exit code %d, expected %d\n' % (rc, expected) self.assertEquals(expected, rc, result + report) def _disable_mod(self, mod): self.__disable_mod(mod) self._restart() time.sleep(2) def _disable_mods(self, mods): '''take a list of modules to disable''' for mod in mods: self.__disable_mod(mod) self._restart() time.sleep(2) def __enable_mod(self, mod): rc, report = testlib.cmd(['a2enmod', mod]) expected = 0 result = 'Got exit code %d, expected %d\n' % (rc, expected) self.assertEquals(expected, rc, result + report) def _enable_mod(self, mod): self.__enable_mod(mod) # for some reason, force-reload doesn't work # if self.lsb_release['Release'] >= 8.04: # self._reload() # else: self._restart() time.sleep(2) def _enable_mods(self, mods): '''take a list of modules to enable''' for mod in mods: self.__enable_mod(mod) # for some reason, force-reload doesn't work # if self.lsb_release['Release'] >= 8.04: # self._reload() # else: self._restart() time.sleep(2) def _disable_site(self, sitename): rc, report = testlib.cmd(['a2dissite', sitename]) expected = 0 result = 'Got exit code %d, expected %d\n' % (rc, expected) self.assertEquals(expected, rc, result + report) self._restart() time.sleep(2) def _enable_site(self, sitename): rc, report = testlib.cmd(['a2ensite', sitename]) expected = 0 result = 'Got exit code %d, expected %d\n' % (rc, expected) self.assertEquals(expected, rc, result + report) # for some reason, force-reload doesn't work # if self.lsb_release['Release'] >= 8.04: # self._reload() #else: self._restart() time.sleep(2) def _reload(self): '''Reload httpd''' rc, report = testlib.cmd([self.initscript, 'force-reload']) expected = 0 result = 'Got exit code %d, expected %d\n' % (rc, expected) self.assertEquals(expected, rc, result + report) def _restart(self): '''Restart httpd''' self._stop() self._start() def _prepare_ssl(self, srvkey, srvcert): '''Prepare Apache for ssl connections''' self._enable_mod("ssl") # copy instead of rename so we don't get invalid cross-device link errors shutil.copy(srvkey, self.ssl_key) shutil.copy(srvcert, self.ssl_crt) if self.lsb_release['Release'] <= 7.04: testlib.config_replace(self.ports_file, "Listen 443", True) # create the conffile entry site_contents = ''' NameVirtualHost *:443 <VirtualHost *:443> SSLEngine on SSLOptions +StrictRequire SSLCertificateFile /etc/ssl/certs/server.crt SSLCertificateKeyFile /etc/ssl/private/server.key ServerAdmin webmaster@localhost DocumentRoot /var/www/ ErrorLog /var/log/apache2/error.log # Possible values include: debug, info, notice, warn, error, crit, # alert, emerg. LogLevel warn CustomLog /var/log/apache2/access.log combined ServerSignature On </VirtualHost> ''' testlib.create_fill(self.ssl_site, site_contents) self._reload() def _test_url_proxy(self, url="http://localhost/", content="", proxy="localhost:3128"): '''Test the given url''' rc, report = testlib.cmd(['elinks', '-verbose', '2', '-no-home', '1', '-eval', 'set protocol.ftp.proxy.host = "%s"' %(proxy), '-eval', 'set protocol.http.proxy.host = "%s"' %(proxy), '-eval', 'set protocol.https.proxy.host = "%s"' %(proxy), '-dump', url]) expected = 0 result = 'Got exit code %d, expected %d\n' % (rc, expected) self.assertEquals(expected, rc, result + report) if content != "": self._word_find(report, content) def _test_url(self, url="http://localhost/", content="", invert=False, source=False): '''Test the given url''' if source: report = self._get_page_source(url) else: report = self._get_page(url) if content != "": self._word_find(report, content, invert) def _get_page_source(self, url="http://localhost/", data='', headers=None): '''Fetch html source''' cookies = "/tmp/cookies.lwp" testlib.create_fill(cookies, "#LWP-Cookies-2.0") if headers == None: headers = {'User-agent' : 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)'} clean_url = url if re.search(r'http(|s)://.*:.*@[a-z].*', url): tmp = re.sub(r'^http(|s)://', '', url) username = tmp.split('@')[0].split(':')[0] password = tmp.split('@')[0].split(':')[1] base64_str = base64.encodestring('%s:%s' % (username, password))[:-1] headers['Authorization'] = "Basic %s" % (base64_str) # strip out the username and password from the url clean_url = re.sub(r'%s:%s@' % (username, password), '', url) cj = cookielib.LWPCookieJar(filename=cookies) cj.load() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) urllib2.install_opener(opener) try: if data != '': req = urllib2.Request(clean_url, data, headers) else: req = urllib2.Request(clean_url, headers=headers) except: raise tries = 0 failed = True while tries < 3: try: handle = urllib2.urlopen(req) failed = False break except urllib2.HTTPError, e: raise if e.code != 503: # for debugging #print >>sys.stderr, 'Error retrieving page "url=%s", "data=%s"' % (url, data) raise tries += 1 time.sleep(2) self.assertFalse(failed, 'Could not retrieve page "url=%s", "data=%s"' % (url, data)) html = handle.read() cj.save() return html def _get_page(self, url="http://localhost/"): '''Get contents of given url''' rc, report = testlib.cmd(['elinks', '-verbose', '2', '-no-home', '1', '-dump', url]) expected = 0 result = 'Got exit code %d, expected %d\n' % (rc, expected) self.assertEquals(expected, rc, result + report) return report def _test_raw(self, request="", content="", host="localhost", port=80, invert = False, limit=1024): '''Test the given url with a raw socket to include headers''' s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((host, port)) s.send(request) data = s.recv(limit) s.close() if content != "": self._word_find(data, content, invert = invert) def create_php_page(page, php_content=None): '''Create a basic php page''' # complexity here is due to maintaining interface compatability when # php_content is not provided if not php_content: str = "php works" php_content = "echo '" + str + "'; " else: str = php_content script = '''<?php %s ?>''' %(php_content) testlib.create_fill(page, script) return str def create_perl_script(page): '''Create a basic perl script''' str = "perl works" script = '''#!/usr/bin/perl print "Content-Type: text/plain\\n\\n"; print "''' + str + '''\\n"; ''' testlib.create_fill(page, script, 0755) return str def create_html_page(page): '''Create html page''' str = "html works" testlib.create_fill(page, "<html><body>" + str + "</body></html>") return str
/** * Helper method to generate the tags from the response. Local cache data will be applied. * * @param response The response. * @return The tag groups. */ @NonNull private Map<String, Set<String>> generateTags(Map<String, Set<String>> requestedTags, TagGroupResponse response, long cacheTime) { Map<String, Set<String>> currentTags = new HashMap<>(response.tags); List<TagGroupsMutation> mutations = getTagGroupOverrides(cacheTime - getPreferLocalTagDataTime()); for (TagGroupsMutation mutation : mutations) { mutation.apply(currentTags); } return TagGroupUtils.intersect(requestedTags, currentTags); }
#include <iostream> #include <iomanip> #include <algorithm> #include <vector> int main() { const int MAX_N = 600; int n, m; std::cin >> n >> m; struct Room { std::vector<int> next; double e = 0.0; double p = 0.0; }; Room *r = new Room[n]; int *s = new int[m]; int *t = new int[m]; for (int i = 0; i < m; ++i) { std::cin >> s[i] >> t[i]; } for (int i = 0; i < n; ++i) { for (int j = 0; j < m; ++j) { if (i == s[j] - 1) { r[i].next.push_back(t[j] - 1); } } } for (int i = n - 2; i >= 0; --i) { for (int j = 0; j < r[i].next.size(); ++j) { r[i].e += r[r[i].next[j]].e; } r[i].e /= (double)(r[i].next.size()); r[i].e += 1.0; } r[0].p = 1.0; for (int i = 0; i < n - 1; ++i) { for (int j = 0; j < r[i].next.size(); ++j) { r[r[i].next[j]].p += r[i].p / (double)r[i].next.size(); } } double max = 0.0; for (int i = 0; i < n - 1; ++i) { if (r[i].next.size() > 1) { double sum = 0.0; for (int j = 0; j < r[i].next.size(); ++j) { sum += r[r[i].next[j]].e; } for (int j = 0; j < r[i].next.size(); ++j) { double exp = r[i].p * (r[i].e - 1.0 - (sum - r[r[i].next[j]].e) / (double)(r[i].next.size() - 1)); if (exp > max) { max = exp; } } } } std::cout << std::setprecision(20) << r[0].e - max << std::endl; return 0; }
Center for Policy Research Working Paper No . 80 ESTIMATING HETEROGENEOUS PRODUCTION IN FISHERIES Stochastic production frontier models are used extensively in the agricultural and resource economics literature to estimate production functions and technical efficiency, as well as to guide policy. Traditionally these models assume that each agent’s production can be specified as a representative, homogeneous function. This paper proposes the synthesis of a latent class regression and an agricultural production frontier model to estimate technical efficiency while allowing for the possibility of production heterogeneity. We use this model to estimate a latent class production function and efficiency measures for vessels in the Northeast Atlantic herring fishery. Our results suggest that traditional measures of technical efficiency may be incorrect, if heterogeneity of agricultural production exists. Introduction Production function estimation is important to the development and analysis of a wide range of agricultural and environmental policies. It can be used to identify areas of improvement in agricultural processes, to measure the value of production or input technology changes, or to assess producer response to new regulation or opportunities. Recent studies have focused on the role of agricultural policy (Paul et al. 2000), the accessibility to credit markets, and the use of new agricultural practices in developing nations (Bayarsaihan and Coeilli 2003; Hazarika and Alwang 2003; Kudaligama and Yanagida 2000; Liu and Zhuang 2000 to cite a few). In many applications, production function estimation is supplemented by producer-level technical efficiency estimates, which are used to identify the extent to which producers select inputs to make effective use of fixed resources. In many agricultural applications, efficiency analyses help extension agents identify resources that might aid farmers and help policymakers target resources for subsidy (Khairo and Battese 2004). When a production technology is used to exploit a common pool resource (such as a fishery) accurate characterization is particularly important. In this case, production estimates are often used to guide management policies aimed as reducing pressure on the resource and ensuring its future viability. For example, buyback programs are used in many over-exploited fisheries to reduce the amount of capital being applied to a dwindling stock. Buybacks have also been utilized in rationalizing so-called "derby fisheries," where the fishing season is open only until a set quantity of fish is harvested, providing an incentive to overcapitalize and catch as much as possible before others catch the limit. Therefore, an accurate picture of a fleet’s production profile aids in identifying likely participants in buyback programs and in developing estimates of reservation prices that may be used to establish budgets for a successful program (Guyader et al. 2004). Other fisheries are managed by input restrictions, such as maximum days-at-sea, gear restrictions or limits on the quantity of fixed gear (e.g., traps). However, experience shows that fishermen often respond to these restrictions by substituting unrestricted inputs, in some cases using more variable inputs (e.g., increasing crew size), or by investing in more fixed capital (e.g., purchasing a larger engine to reduce steam time or using a larger trawl device). Estimating production input elasticities helps managers predict the extent to which new input restrictions are likely to result in decreased stock pressure, or simply a substitution of other unrestricted inputs (Kompas et al. 2004). With both types of management measures, policymakers can use production functions to
import { createElement } from "react"; import { fuzzyFilter, caseInsensitiveFilter } from "@react-md/utils"; import { FilterFunction } from "../types"; import { getResultId, getResultLabel, getResultValue, getFilterFunction, noFilter, } from "../utils"; const div = createElement("div", { className: "class-name" }, "Children"); const span = createElement("span", "Hello"); describe("getResultId", () => { it("should generate the correct id by incrementing the provided index by 1", () => { expect(getResultId("prefix", 0)).toBe("prefix-result-1"); expect(getResultId("something-again", 1)).toBe("something-again-result-2"); }); }); describe("getResultLabel", () => { it("should return the result itself if it is a string", () => { expect(getResultLabel("", "", "")).toBe(""); expect(getResultLabel("", "key", "")).toBe(""); expect(getResultLabel("label", "", "")).toBe("label"); expect(getResultLabel("label", "label", "")).toBe("label"); expect(getResultLabel("this is something", "label", "")).toBe( "this is something" ); }); it("should return the labelKey's value property if the result is an object", () => { expect(getResultLabel({ label: "Thing" }, "label", "")).toBe("Thing"); expect(getResultLabel({ label: span }, "label", "")).toBe(span); expect(getResultLabel({ label: div, thing: "Thing" }, "label", "")).toBe( div ); expect(getResultLabel({ label: div, thing: "Thing" }, "thing", "")).toBe( "Thing" ); }); it("should return null if the labelKey is undefined", () => { expect(getResultLabel({}, "label", "")).toBe(null); expect(getResultLabel({ thing: "Thing" }, "label", "")).toBe(null); expect(getResultLabel({ label: "" }, "label", "")).toBe(""); expect(getResultLabel({ label: 0 }, "label", "")).toBe(0); expect(getResultLabel({ label: null }, "label", "")).toBe(null); expect(getResultLabel({ label: false }, "label", "")).toBe(false); expect(getResultLabel({ label: true }, "label", "")).toBe(true); }); it("should return the children property if it true-ish", () => { expect(getResultLabel({ children: div, label: "Thing" }, "label", "")).toBe( div ); expect(getResultLabel({ children: span, label: div }, "label", "")).toBe( span ); expect(getResultLabel({ children: null, label: span }, "label", "")).toBe( span ); }); }); describe("getResultValue", () => { it("should return the result itself if it is a string", () => { expect(getResultValue("", "")).toBe(""); expect(getResultValue("", "key")).toBe(""); expect(getResultValue("value", "")).toBe("value"); expect(getResultValue("value", "value")).toBe("value"); expect(getResultValue("this is something", "value")).toBe( "this is something" ); }); it("should return the valueKey on the result object stringified", () => { expect(getResultValue({ value: "Bob" }, "value")).toBe("Bob"); expect(getResultValue({ name: "Colorado" }, "name")).toBe("Colorado"); expect(getResultValue({ name: "Colorado", value: "CO" }, "name")).toBe( "Colorado" ); expect(getResultValue({ a: 0 }, "a")).toBe("0"); expect(getResultValue({ a: 1 }, "a")).toBe("1"); }); it("should log an error if the valueKey does not return a number or a string", () => { const error = "Unable to extract a result value string"; expect(() => getResultValue({}, "")).toThrowError(error); expect(() => getResultValue({}, "label")).toThrowError(error); expect(() => getResultValue({ label: null }, "label")).toThrowError(error); expect(() => getResultValue({ label: div }, "label")).toThrowError(error); expect(() => getResultValue({ label: span }, "label")).toThrowError(error); expect(() => getResultValue({ label: [] }, "label")).toThrowError(error); }); }); describe("getFilterFunction", () => { it('should return the fuzzyFilter function if the filter is the string "fuzzy"', () => { expect(getFilterFunction("fuzzy")).toBe(fuzzyFilter); }); it('should return the caseInsensitiveFilter if the filter is the string "case-insensitive"', () => { expect(getFilterFunction("case-insensitive")).toBe(caseInsensitiveFilter); }); it('should return the noFilter function if the filter is the string "none"', () => { expect(getFilterFunction("none")).toBe(noFilter); }); it("should throw an error for any other strings for non-typescript users", () => { // @ts-expect-error expect(() => getFilterFunction("")).toThrow( 'Invalid filter function: "". Supported values are: "fuzzy", "case-insenitive", "none", or a custom function' ); // @ts-expect-error expect(() => getFilterFunction("custom")).toThrow( 'Invalid filter function: "custom". Supported values are: "fuzzy", "case-insenitive", "none", or a custom function' ); }); it("should returnt he noFilter result for an invalid fulter function if the NODE_ENV is not production", () => { const env = process.env.NODE_ENV; process.env.NODE_ENV = "production"; // @ts-expect-error expect(getFilterFunction("")).toBe(noFilter); // @ts-expect-error expect(getFilterFunction("custom")).toBe(noFilter); process.env.NODE_ENV = env; }); it("should return the function itself if it is a function", () => { expect(getFilterFunction(fuzzyFilter)).toBe(fuzzyFilter); expect(getFilterFunction(caseInsensitiveFilter)).toBe( caseInsensitiveFilter ); expect(getFilterFunction(noFilter)).toBe(noFilter); const custom: FilterFunction = (value, data) => data.filter((d) => !!d && getResultValue(d, "thing").startsWith(value)); expect(getFilterFunction(custom)).toBe(custom); }); });
def modbb_disk_r(cls,name='modbb_disk_r', wavelengths=cfg.models['default_wave'], temperatures=10**np.arange(0,3,0.1), lam0=10**np.arange(1,3,0.1), beta=np.arange(0,3,0.1), write=False,overwrite=False): if write and overwrite == False: if os.path.exists(cfg.model_loc[name]+name+'.fits'): raise utils.SdfError("{} exists, will not overwrite". format(cfg.model_loc[name]+name+'.fits')) self = cls() self.fnujy_sr = np.zeros((len(wavelengths), len(temperatures), len(lam0), len(beta)),dtype=float) for i,temp in enumerate(temperatures): for j,l0 in enumerate(lam0): for k,b in enumerate(beta): m =spectrum.ModelSpectrum.bnu_wave_micron(wavelengths,temp, lam0=l0,beta=b) self.fnujy_sr[:,i,j,k] = m.fnujy_sr self.name = m.name self.wavelength = m.wavelength self.parameters = ['log_Temp','log_lam0','beta'] self.param_values = {'log_Temp':np.log10(temperatures)} self.param_values['log_lam0'] = np.log10(lam0) self.param_values['beta'] = beta if write: self.write_model(name,overwrite=overwrite) return self
<reponame>FeodorVolguine/Gogaman<gh_stars>0 #pragma once #include "Gogaman/Core/CRTP.h" namespace Gogaman { namespace RHI { template<typename ImplementationType> class AbstractSampler : public CRTP<ImplementationType, AbstractSampler> { public: enum class Interpolation : uint8_t { Point, Bilinear, Trilinear, Anisotropic }; enum class AddressMode : uint8_t { Repeat, MirroredRepeat, Clamp }; public: AbstractSampler(const AbstractSampler &) = delete; AbstractSampler(AbstractSampler &&) = default; AbstractSampler &operator=(const AbstractSampler &) = delete; AbstractSampler &operator=(AbstractSampler &&) = default; inline constexpr Interpolation GetInterpolation() const { return m_Interpolation; } inline constexpr AddressMode GetAddressMode() const { return m_AddressMode; } static inline constexpr auto GetNativeInterpolation(const Interpolation interpolation) { return ImplementationType::GetNativeInterpolation(interpolation); } static inline constexpr auto GetNativeAddressMode(const AddressMode addressMode) { return ImplementationType::GetNativeAddressMode(addressMode); } private: AbstractSampler(const Interpolation interpolation = Interpolation::Point, const AddressMode addressMode = AddressMode::Repeat) : m_Interpolation(interpolation), m_AddressMode(addressMode) {} ~AbstractSampler() = default; private: Interpolation m_Interpolation; AddressMode m_AddressMode; private: friend ImplementationType; }; } }
/** * Test that resources that generate heuristics are instrumented correctly * @throws Exception */ @Test public void testMBeanHeuristic () throws Exception { FailureXAResource failureXAResource = new FailureXAResource(FailureXAResource.FailLocation.commit); getHeuristicMBean(osb, new TransactionImple(), failureXAResource); }
/** * Catalog entity. @author MyEclipse Persistence Tools */ public class Catalog implements java.io.Serializable { // Fields private Integer catalogid; private String catalogname; private Set flowers = new HashSet(0); // Constructors /** default constructor */ public Catalog() { } /** full constructor */ public Catalog(String catalogname, Set flowers) { this.catalogname = catalogname; this.flowers = flowers; } // Property accessors public Integer getCatalogid() { return this.catalogid; } public void setCatalogid(Integer catalogid) { this.catalogid = catalogid; } public String getCatalogname() { return this.catalogname; } public void setCatalogname(String catalogname) { this.catalogname = catalogname; } public Set getFlowers() { return this.flowers; } public void setFlowers(Set flowers) { this.flowers = flowers; } }
Global consultancy EY and New York blockchain startup Paxos have announced that they are co-developing new technology solutions for the gold market. Aimed at providing new tools for the clearing and settlement of gold transactions, the partnership will see the two firms leveraging Paxos’ blockchain-based network, Bankchain, as a basis for the services. David Williams, EY’s partner for capital markets innovation, said in a statement: “We believe that the future of capital markets requires ever stronger and more innovative ecosystems, and expect this to be a key early example of the type of collaboration between FinTech firms and existing market participants that will truly transform the marketplace.” The idea of using the technology to reshape how gold markets function has gained traction in the past year and a half. Paxos, which rebranded to focus on blockchain solutions in tandem with its bitcoin exchange, announced this past summer that it was working with Euroclear on a similar project. The startup has moved in recent months to reposition itself amid growing interest in blockchain solutions among the world’s banks and corporations. Established names in the capital market sector are also looking at the tech for use in the gold trade. Last month, the brains behind the exchange that inspired the Michael Lewis bestseller Flash Boys announced that they were looking to blockchain as they build a next-generation gold exchange. Image via Shutterstock Disclosure: CoinDesk is a subsidiary of Digital Currency Group, which has an ownership stake in itBit.
<reponame>Atos1337/HSHelper package ru.hsHelper.api.controllers; import ru.hsHelper.api.entities.Group; import ru.hsHelper.api.entities.UserGroupRole; import ru.hsHelper.api.repositories.CourseRepository; import ru.hsHelper.api.repositories.GroupRepository; import ru.hsHelper.api.repositories.PartitionRepository; import ru.hsHelper.api.repositories.UserGroupRoleRepository; import ru.hsHelper.api.requests.create.GroupCreateRequest; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; import javax.validation.Valid; @RestController @RequestMapping("/groups") public class GroupController { private final GroupRepository groupRepository; private final UserGroupRoleRepository userGroupRoleRepository; private final PartitionRepository partitionRepository; private final CourseRepository courseRepository; @Autowired public GroupController(GroupRepository groupRepository, UserGroupRoleRepository userGroupRoleRepository, PartitionRepository partitionRepository, CourseRepository courseRepository) { this.groupRepository = groupRepository; this.userGroupRoleRepository = userGroupRoleRepository; this.partitionRepository = partitionRepository; this.courseRepository = courseRepository; } @PostMapping ({"", "/"}) public Group createGroup(@RequestBody @Valid GroupCreateRequest request) { return groupRepository.save(new Group(request.getName())); } @GetMapping("/{id}") public Group createGroup(@PathVariable long id) { return groupRepository.findById(id).orElseThrow(() -> new IllegalArgumentException("No group with such id")); } @DeleteMapping("/{id}") public void deleteGroupById(@PathVariable long id) { Group group = groupRepository.findById(id).orElseThrow(() -> new IllegalArgumentException("No group with such id")); partitionRepository.deleteByGroup(group); courseRepository.deleteByGroup(group); for (UserGroupRole ugr : group.getUserGroupRoleSet()) { ugr.getUser().getGroups().remove(ugr); } userGroupRoleRepository.deleteByGroup(group); groupRepository.deleteById(id); } }
How congenital Zika virus impacted my child’s functioning and disability: a Brazilian qualitative study guided by the ICF Introduction The Zika virus outbreak in Brazil has had devasting social, medical and financial consequences for families. Both researchers and clinicians are measuring longer-term outcomes to understand the impact of the Zika on child development, functioning and disability. Outcomes and tools used to measure them are very varied and we are unclear how meaningful they are to families and children. This study aimed to identify the parents’ perspectives on relevant areas of functioning and disability that should be included as outcome measures for children with congenital Zika syndrome (CZS), as guided by the International Classification of Functioning, Disability and Health (ICF). Methods This qualitative study included parents or caregivers of children aged 0–5 years with confirmed CZS from two states in northeastern Brazil. Interviews were conducted using focus groups. Content mapping followed the WHO’s ICF linking rules. Three raters analysed the content using NVivo V.11. Results Thirty-two caregivers participated in six focus groups, 88% were mothers with an average age of 30 years. Most children were male (59%) and all were level V (severe) to on the Gross Motor Function Classification System (GMFCS). Overall, 825 themes were mapped to 36 ICF categories. Although parents mentioned areas across all ICF domains, they reported that areas of mobility, eating and recreation were most relevant for them. In addition, environmental factors were highly identified as barriers, specifically services, policies and access to assistive devices. The most predominant facilitators within the environment were; immediate family support, kind relationships with therapists and support from the extended family. Conclusions Although parents emphasised issues related to mobility, their greatest concerns involved environmental factors, such as access and quality of health and social services, systems and policies. These results reinforce the importance of including parents’ perspectives when selecting or developing outcome measures for CZS. This article benefits from the qualitative research approach and the in-depth responses that were gained from participants. This study has the potential to provide important evidence toward understanding the use of outcomes measures from a caregiver perspective. However, the authors could further strengthen this manuscript by a more in-depth discussion of the study population and participants, detailed methods and analysis and a critical review of use of language to effectively communicate their research findings. My specific revision suggestions are outlined below: Please review language and sentence structure, for example: 1. P 3 Line 10 -14 2. P4 Lines 49 -60 would benefit from being two sentences to assist the reader 3. P5 Line 5 -6 please include more information about COMET in the main textsuch as the institutions that are involved and main aim of the trials 4. P5 line 14 -clarification of what is meant by 'in this context' is required for the reader to understand whether this relates to the COMET trials, this study or the context of parental participation. 5. P5 Line 36 -the title 'Zika Cos and congential infections:' is unclear. Consider clarifiying 'Cos'my understanding is that this may be an acronym that relates to 'Core Outcome Set' 6. P 9 Line 35 -40 7. P11 Line 18 -23 8. P14 Line 16 The study would be strengthened through using the Consolidated Criteria for Reporting Qualitative Studies (COREQ) in the study textalthough the study flow chart is detailed in Figure 1 and the study notes that the COREQ guidelines are considered (P 7 Line 25), important additional details are required in the methods in order to assess the rigor of analysis and credibility of the findings, and these should be included in the text of the paper, such as: 9. Page 6 Line 41 -48: Who were the participants purposively selected by? What was the method of approach? What are the characteristics of the respective rehabilitation services in Rio Grande do Norte and Paraíba? P7 Line 29where was the sociodemographic questionnaire undertaken? (Q9-16 of COREQ guidelines) L14, was only one physiotherapist involved or was this the primary physiotherapist of the child? When did the physiotherapist classify the child's motor abilities with GMFM? Detail of research team and reflexivity (Q1-8 of COREQ guidelines) is required. How many research assistants? 10. Was the questionnaire piloted prior to use? 11. Greater explanation of 'fun fishing' is required P7 Line 57eg what it is, what the purpose of it is and when it was encouraged to be used. 12. P8 were the interviews in Brazilian Portuguese? If so, were they translated? 13. P9 subheading on line 15consider including this as a sentence to clarify exactly what patients were not involved in. With regards the results, 14. P 9 Line 42 it is unclear why personal factors were not considered given the use of the ICF. Please include justification of this. 15. P11 Line 7 -16 The text notes that a father desired more access to rehabilitation services, however the illustrative quote is from Mother 20. 16.Also, the discussion section should not include results but instead discuss them according to the literature on the subject, which is partially done. There may be other relevant studies that could enrich the discussion of the results. 17.Please provide the ethical approval numbers/IDs of ethical approvals on p6 line 15-28 18. The abstract would benefit from a revision to provide succinct detail of methods and results. REVIEWER Ashley Walker Georgia Southern University USA REVIEW RETURNED 17-Jun-2020 GENERAL COMMENTS A nice application of the ICF to help contextualize the perspectives of caregivers. I would recommend one quick review to identify minor formatting issues in the manuscript. Raphael Ximenes University Health Network, Canada REVIEW RETURNED 26-Jul-2020 GENERAL COMMENTS This study aims to understand the views and perceptions of parents with regard to the needs of their children with CZS framed within the perspective of the ICF. Even during the COVID-19 pandemic, it is very important that we do not forget other diseases like Zika. This work shows the relevance of continuing research on Zika and the impact caused by the 2015/16 epidemic in South American countries, mainly in Brazil. I recommend publishing the manuscript with minor adjustments. Page 6: "The inclusion criteria included being; parents/caregivers of children with confirmed diagnosis of CZS by polymerase chain reaction (PCR) or presumed diagnosis based on obstetric ultrasound, transfontanellar ultrasound, computed tomography (CT) and magnetic resonance imaging (MRI), who were from zero to six years old and who lived in the area covered by the study (states of Rio Grande do Norte and Paraíba) and who attended the respective rehabilitation services." If the Zika cases happened in 2015, why do the inclusion criteria include children up to 6 years of age? Same question about the Zika Focus Group, in the Supplementary material 1. Why did you include children from 0-10 years and not 0-5 years old only? Page 7: Please, make sure all acronyms are defined when you use it at first. You only define CP on page 16 but you're using it on page 7. "This is an age-specific scheme designed for children with CP based on five levels" Same thing for COREQ on page 7. Consolidated criteria for reporting qualitative studies (COREQ). Page 11 "In the following verbatim, it is possible to observe the father's desire to have access to a rehabilitation service in his own city." "Also that in Alagoas state has been the same possibilities that here in Campina, to don´t need to move from one place to other, it´s so complicated spend 3 months here, searching treatment that there it should have. I don´t know they can´t leave this treatment there, to every states have, for us don´t need to move a lot to do a treatment. For them could at least sit, maintain their trunk, catch and walk." Mother 20. You wrote "father's desire" but the sentence cites Mother 20. Is that an error or are you using Mother as a general reference? If yes, why not to use parent/caregivers? Page 29-Fix typo -Participants were invited via phone call or in person Michel Counotte Institute of Social and Preventive Medicine, University of Bern, Bern, Switzerland REVIEW RETURNED 27-Jul-2020 GENERAL COMMENTS Campos et al describe the need for a patient/care-taker centred approach to improve the core outcome sets for studies conducted in children growing up with congenital zika syndrome. They conduct focus group interviews to identify their perspectives on relevant areas of functioning and disability. The work is important and conducted well, however the reporting and discussion needs clarification. Abstract: * the authors start with a percentage, could they first provide the total number of included participants? * the abbreviation GMFCS is not explained Methods: * Can the authors be more explicit about the enrolment, how many people were approaches, how many refused? * Given the age distribution of the children reported in figure 2, it seems this study was conducted some time ago. Can the authors be more explicit when the focus groups were conducted. Results: * Since a main objective seems to be to identify priorities, it is unclear how 'frequency' translates into 'priority'. Does the ranking in table 1 reflect priority, or frequency of mentioning? And is mentioning this the same as having difficulties within this category? Can the authors be more clear/explicit about this (which are barriers and which are facilitators?)? * The English translation of some of the verbatim text quoting the parents/caretakers is somewhat confusing, would the authors consider improving these translations (possibly back/forth translation to ensure the meaning is not lost) * Figure 2+3: Even though I appreciate the effort the authors put in Figure 2+3, it reports an overview of the characteristics of the study population. A structured table would be more appropriate, reporting the number, the denominator and the percentages. Rounded percentages are sufficient for these small numbers (n=32). Discussion: * Would the authors consider starting the discussion with the most important findings? * Which factors do the authors recommend to be included as part of the COS? * This is a relatively small sample, the author do hint briefly about the representativeness, can the authors discuss this in more detail, how their sample might be generalized? * Can the authors comment on the potential shift of priorities as the children age? Now they all seem to be still young, care needs/priorities will likely shift in the future. VERSION 1 -AUTHOR RESPONSE Reviewer: 1 Reviewer Name: Tracey Smythe Institution and Country: London School of Hygiene & Tropical Medicine, UK. Dear Dr Smythe, thank you for all suggestions, they made possible an important improvement of our paper. P 3 Line 10 -14 (Please review language and sentence structure) Answer: The second author, native in English, carried out an extensive review of the writing style. P4 Lines 49 -60 would benefit from being two sentences to assist the reader (Please review language and sentence structure) Answer: We made this change. P5 Line 5 -6 please include more information about COMET in the main textsuch as the institutions that are involved and main aim of the trials (Please review language and sentence structure) Answer: We made this change by providing this information. P5 line 14 -clarification of what is meant by 'in this context' is required for the reader to understand whether this relates to the COMET trials, this study or the context of parental participation. (Please review language and sentence structure) Answer: We made this change by providing this information. P5 Line 36 -the title 'Zika Cos and congential infections:' is unclear. Consider clarifiying 'Cos'my understanding is that this may be an acronym that relates to 'Core Outcome Set' (Please review language and sentence structure) Answer: We made this change by providing this information. P 9 Line 35 -40 (Please review language and sentence structure) Answer: We edit the text. P11 Line 18 -23 (Please review language and sentence structure) Answer: We edit the text. P14 Line 16 (Please review language and sentence structure) Answer: We edit the text. Page 6 Line 41 -48: Who were the participants purposively selected by? What was the method of approach? What are the characteristics of the respective rehabilitation services in Rio Grande do Norte and Paraíba? Answer: We edit the text, providing this information (participants with important common characteristics were purposively selected by members of the research team, through active search by phone call or direct personal approach. These rehabilitation centers are public services linked to research institutions and higher education in the region). P7 Line 29where was the sociodemographic questionnaire undertaken? Answer: We edit the text, providing this information (all parents responded to a sociodemographic questionnaire, applied by the researchers before the realization of the focus groups, at rehabilitation services). L14, was only one physiotherapist involved or was this the primary physiotherapist of the child? When did the physiotherapist classify the child's motor abilities with GMFM? Detail of research team and reflexivity (Q1-8 of COREQ guidelines) is required. How many research assistants? Answer: We edit the text, providing this information. Was the questionnaire piloted prior to use? Answer: We edit the text, providing this information (this questionnaire was applied to the first group and observed whether the questions were well understood by the respondents. Two research assistants were involved in the data collection). Greater explanation of 'fun fishing' is required P7 Line 57eg what it is, what the purpose of it is and when it was encouraged to be used. Answer: We edit the text, providing this information (in order to thank the parents for participating in the study. They were encouraged by the professional team to use it as one therapeutic toy for stimulation of their children at home and promote fun). P8 were the interviews in Brazilian Portuguese? If so, were they translated? Answer: We edit the text, providing this information. P9 subheading on line 15consider including this as a sentence to clarify exactly what patients were not involved in Answer: We edit the text, providing this information (families were not involved in the design, recruitment or conduct of the study. However, the results will be presented to families, professionals and managers at each participating center, with a view to discussing strategies to meet the needs of children with CZS and their families). P 9 Line 42 it is unclear why personal factors were not considered given the use of the ICF. Please include justification of this. Answer: We edit the text, providing this information (as personal factors were expressed only minimally, they are not shown in the Figure). P11 Line 7 -16 The text notes that a father desired more access to rehabilitation services, however the illustrative quote is from Mother 20. Answer: this was a mistake, thanks for the observation, it was corrected in the text. Also, the discussion section should not include results but instead discuss them according to the literature on the subject, which is partially done. There may be other relevant studies that could enrich the discussion of the results. Answer: we edited the text of the discussion and included new and relevant studies. Please provide the ethical approval numbers/IDs of ethical approvals on p6 line 15-28 Answer: we edit the text, providing this information. The abstract would benefit from a revision to provide succinct detail of methods and results. Answer: we edit some sentences of the abstract. Reviewer: 2 Reviewer Name: Ashley Walker Institution and Country: Georgia Southern University USA I would recommend one quick review to identify minor formatting issues in the manuscript. Dear Dr Walker, thank you, we edit the style and format of the paper. Reviewer: 3 Reviewer Name: Raphael Ximenes Institution and Country: University Health Network, Canada Dear Dr Ximenes, thank you very much indeed, we believe that the paper is much better with the suggested changes. Page 6: "The inclusion criteria included being; parents/caregivers of children with confirmed diagnosis of CZS by polymerase chain reaction (PCR) or presumed diagnosis based on obstetric ultrasound, transfontanellar ultrasound, computed tomography (CT) and magnetic resonance imaging (MRI), who were from zero to six years old and who lived in the area covered by the study (states of Rio Grande do Norte and Paraíba) and who attended the respective rehabilitation services." If the Zika cases happened in 2015, why do the inclusion criteria include children up to 6 years of age? Same question about the Zika Focus Group, in the Supplementary material 1. Why did you include children from 0-10 years and not 0-5 years old only? Answer: thank you very much for realizing this, we adjust in all sessions to age 0-5 years. Page 7: Please, make sure all acronyms are defined when you use it at first. You only define CP on page 16 but you're using it on page 7. "This is an age-specific scheme designed for children with CP based on five levels" Same thing for COREQ on page 7. Consolidated criteria for reporting qualitative studies (COREQ). Answer: we made these adjustments to specific parts of the text. Page 11 "In the following verbatim, it is possible to observe the father's desire to have access to a rehabilitation service in his own city." "Also that in Alagoas state has been the same possibilities that here in Campina, to don´t need to move from one place to other, it´s so complicated spend 3 months here, searching treatment that there it should have. I don´t know they can´t leave this treatment there, to every states have, for us don´t need to move a lot to do a treatment. For them could at least sit, maintain their trunk, catch and walk." Mother 20. You wrote "father's desire" but the sentence cites Mother 20. Is that an error or are you using Mother as a general reference? If yes, why not to use parent/caregivers? Answer: this was a mistake, thanks for the observation, it was corrected in the text. Page 29-Fix typo -Participants were invited via phone call or in person … Answer: we edit the text. Reviewer: 4 Reviewer Name: Michel Counotte Institution and Country: Institute of Social and Preventive Medicine, University of Bern, Bern, Switzerland Dear Dr Counotte, we were very happy with your suggestions, thank you very much for contributing to improve our article. the authors start with a percentage, could they first provide the total number of included participants? Answer: we edit the text. the abbreviation GMFCS is not explained Answer: we edit the text (Gross Motor Function Classification System -GMFCS). Can the authors be more explicit about the enrolment, how many people were approaches, how many refused? Answer: yes, we edit the text, providing this information (of the 36 parents identified and invited to the study, 32 agreed to participate). Given the age distribution of the children reported in figure 2, it seems this study was conducted some time ago. Can the authors be more explicit when the focus groups were conducted. Answer: we edit the text, providing this information (the focus groups were carried out between September / 2018 and January / 2019). * Since a main objective seems to be to identify priorities, it is unclear how 'frequency' translates into 'priority'. Does the ranking in table 1 reflect priority, or frequency of mentioning? And is mentioning this the same as having difficulties within this category? Can the authors be more clear/explicit about this (which are barriers and which are facilitators?)? Answer: we provide this information in the methods and we also make it clear in the results which aspects were considered barriers or facilitators. The English translation of some of the verbatim text quoting the parents/caretakers is somewhat confusing, would the authors consider improving these translations (possibly back/forth translation to ensure the meaning is not lost) Answer: we took great care in that regard. The content of the focus groups in Portuguese was translated into English by a Brazilian pediatrician who at the time of the study was doing research at the University of Liverpool with Dr Gladstone. After this translation, the authors held a skype meeting to discuss with him possible doubts, to ensure that the participants' expressions were not lost. Upon your request, the second author, a native of the English language, made an extensive review of the citations. Figure 2+3: Even though I appreciate the effort the authors put in Figure 2+3, it reports an overview of the characteristics of the study population. A structured table would be more appropriate, reporting the number, the denominator and the percentages. Rounded percentages are sufficient for these small numbers (n=32). Answer: We appreciate your suggestion, but we would really like to keep the presentation of the data with this visual information, as this is an innovative and attractive way of presenting our sample data. This trend has been growing and holds more attention from the reader. I hope you understand. Would the authors consider starting the discussion with the most important findings? Answer: we edit the text, providing this information. Which factors do the authors recommend to be included as part of the COS? Answer: we edit the text, providing this information. This is a relatively small sample, the author do hint briefly about the representativeness, can the authors discuss this in more detail, how their sample might be generalized? VERSION 2 -AUTHOR RESPONSE Reviewer 1 previously requested that more information about COMET is presented in the main textsuch as the institutions that are involved and main aim of the trials (Please review language and sentence structure). This revision does not appear to have been carried out. Answer: Corrections in the language were made by Dr. Gladstone and the requested information was added to the text. The Core Outcomes Measures in Effectiveness Trials (COMET) is an initiative aimed at identifying and creating a core set of outcomes for any clinical health situation. This is often conducted through a process of systematic reviews of outcomes measured, consensus work as well as the involvement of families who support the development of these outcomes in order to ensure that researchers consider outcomes that are most relevant and appropriate to the patient's needs Reviewer: 4 Reviewer Name: Michel Counotte Institution and Country: Institute of Social and Preventive Medicine (ISPM), Bern, Switzerland The English translation of some of the verbatim text quoting the parents/caretakers is somewhat confusing, would the authors consider improving these translations (possibly back/forth translation to ensure the meaning is not lost) Answer: One of the English authors (Dr. Gladstone) made changes to the text and corrected the English. Reviewer: 3 Reviewer Name: Raphael Ximenes Institution and Country: University Health Network, Canada Please leave your comments for the authors below The authors made the changes suggested by me in my first review. That way I don't have any more editing/correction recommendations.
<reponame>gerdreiss/sdkman-ui use std::borrow::Cow; use std::collections::HashMap; use eframe::egui::*; use image::GenericImageView; use api::local::*; use api::remote::*; const PADDING: f32 = 8.0; const WHITE: Color32 = Color32::from_rgb(255, 255, 255); const CYAN: Color32 = Color32::from_rgb(0, 255, 255); #[derive(PartialEq)] pub struct Logo { pub size: (usize, usize), pub pixels: Vec<Color32>, } #[derive(Debug, Clone, PartialEq)] pub struct Candidate { name: String, default_version: String, url: String, description: String, installation_instruction: String, versions: Vec<RemoteVersion>, } impl Candidate { fn from_model(remote_candidate: &RemoteCandidate) -> Candidate { Candidate { name: remote_candidate.name().clone(), default_version: remote_candidate.default_version().clone(), url: remote_candidate.homepage().clone(), description: remote_candidate.description().clone(), installation_instruction: format!("$ sdk install {}", remote_candidate.binary_name()), versions: remote_candidate.versions().to_vec(), } } fn to_model(&self) -> RemoteCandidate { RemoteCandidate::new( self.name.clone(), self.installation_instruction .split_whitespace() .last() .unwrap_or(&self.name.to_lowercase()) .to_owned(), self.description.clone(), self.url.clone(), self.default_version.clone(), ) } } pub struct SdkmanApp { app_name: &'static str, app_heading: &'static str, logo: Logo, candidates: Vec<Candidate>, local_candidates: Vec<LocalCandidate>, selected_candidate: Option<Candidate>, candidate_search_dialog: bool, candidate_search_term: String, error_message: Option<String>, } impl Default for SdkmanApp { fn default() -> Self { let image = image::load_from_memory(include_bytes!("../assets/logo.png")).unwrap(); let size = (image.width() as usize, image.height() as usize); let pixels: Vec<Color32> = image .to_rgba8() .into_vec() .chunks(4) .map(|p| Color32::from_rgba_unmultiplied(p[0], p[1], p[2], p[3])) .collect(); Self { app_name: "sdkman-ui", app_heading: "sdkman candidates", logo: Logo { size, pixels }, candidates: Vec::new(), local_candidates: Vec::new(), selected_candidate: None, candidate_search_dialog: false, candidate_search_term: String::default(), error_message: None, } } } impl SdkmanApp { pub fn new( remote_candidates: &[RemoteCandidate], local_candidates: &[LocalCandidate], ) -> SdkmanApp { SdkmanApp { candidates: remote_candidates .iter() .map(|remote_candidate| Candidate::from_model(remote_candidate)) .collect(), local_candidates: local_candidates.to_vec(), ..Default::default() } } pub fn app_name(&self) -> &str { self.app_name } pub fn configure_fonts(&self, ctx: &CtxRef) { let mut font_def = FontDefinitions::default(); font_def.font_data.insert( "MesloLGS".to_string(), Cow::Borrowed(include_bytes!("../assets/MesloLGS_NF_Regular.ttf")), ); font_def.family_and_size.insert( eframe::egui::TextStyle::Heading, (FontFamily::Proportional, 35.), ); font_def.family_and_size.insert( eframe::egui::TextStyle::Body, (FontFamily::Proportional, 20.), ); font_def .fonts_for_family .get_mut(&FontFamily::Proportional) .unwrap() .insert(0, "MesloLGS".to_string()); ctx.set_fonts(font_def); } pub(crate) fn render_top_panel(&mut self, ctx: &CtxRef, frame: &mut eframe::epi::Frame<'_>) { let Self { app_name: _, app_heading, logo, candidates, local_candidates, selected_candidate, candidate_search_dialog, candidate_search_term: _, error_message, } = self; // define a TopBottomPanel widget TopBottomPanel::top("top_panel").show(ctx, |ui| { ui.add_space(10.); menu::bar(ui, |ui| { // logo ui.with_layout(Layout::left_to_right(), |ui| { let texture_id = frame .tex_allocator() .alloc_srgba_premultiplied(logo.size, &logo.pixels); if ui .add(Image::new(texture_id, [56., 56.]).sense(Sense::click())) .on_hover_ui(|ui| { ui.ctx().output().cursor_icon = CursorIcon::PointingHand; show_tooltip_text(ui.ctx(), Id::new("sdkman-logo"), "Go to sdkman.io"); }) .clicked() { let modifiers = ui.ctx().input().modifiers; ui.ctx().output().open_url = Some(output::OpenUrl { url: "https://sdkman.io/".to_owned(), new_tab: modifiers.any(), }); } }); // Candidates ui.vertical_centered(|ui| { ui.heading(*app_heading); }); // controls ui.with_layout(Layout::right_to_left(), |ui| { ui.add_space(10.); // Close button if ui .add(Button::new("❌").text_style(TextStyle::Body)) .on_hover_text("Close") .clicked() { frame.quit(); } // Refresh button if ui .add(Button::new("🔄").text_style(TextStyle::Body)) .on_hover_text("Refresh") .clicked() { match fetch_remote_candidates() { Ok(models) => { let cands: Vec<Candidate> = models .iter() .map(|model| Candidate::from_model(model)) .collect(); *candidates = cands; *selected_candidate = None; } Err(e) => { *selected_candidate = None; *error_message = Some(format!( "Refreshing the list of candidates failed with:\n{}", e )); } } } // Search button if ui .add(Button::new("🔎").text_style(TextStyle::Body)) .on_hover_text("Search") .clicked() { *candidate_search_dialog = true; } // Display installed button if ui .add(Button::new("I").text_style(TextStyle::Body)) .on_hover_text("Search") .clicked() { let local_binary_names: Vec<&String> = local_candidates.iter().map(|lc| lc.binary_name()).collect(); *candidates = candidates .iter() .filter(|candidate| { local_binary_names.contains( &&candidate .installation_instruction .split_whitespace() .last() .unwrap_or_default() .to_string(), ) }) .cloned() .collect(); *selected_candidate = None; } }); }); ui.add_space(10.); }); } fn render_error(ctx: &CtxRef, message: &str) { Window::new("Search").show(ctx, |ui| { ui.add_space(PADDING); ui.label(message); ui.add_space(PADDING); }); } pub fn render_candidates(&mut self, ctx: &CtxRef, ui: &mut Ui) { let Self { app_name: _, app_heading: _, logo: _, candidates, local_candidates, selected_candidate, candidate_search_dialog, candidate_search_term, error_message, } = self; if ui.input().key_pressed(Key::Escape) { *selected_candidate = None; *candidate_search_dialog = false; *candidate_search_term = String::default(); *error_message = None; } if let Some(err) = error_message { SdkmanApp::render_error(ctx, err); } if *candidate_search_dialog { SdkmanApp::render_search_dialog( ctx, candidates, selected_candidate, candidate_search_dialog, candidate_search_term, error_message, ); } // render candidates for curr in candidates { // check whether to display the selected candidate only let candidate = if selected_candidate.is_none() || curr.name == selected_candidate.as_ref().unwrap().name { curr } else { continue; }; ui.add_space(PADDING); // render name, default version, and homepage URL ui.horizontal(|ui| { // render name and default version ui.with_layout(Layout::left_to_right(), |ui| { let btn_label = format!("{} {} ⤴", candidate.name, candidate.default_version); let title_btn = Button::new(btn_label) .text_style(TextStyle::Body) .text_color(WHITE); let added = ui.add(title_btn).on_hover_ui(|ui| { show_tooltip_text( ui.ctx(), Id::new(&candidate.name), "Click to display all available versions", ); }); // handle clicks on the name and default version if added.clicked() { match fetch_candidate_versions(&mut candidate.to_model()) { Ok(candidate_with_versions) => { *selected_candidate = Some(Candidate::from_model(candidate_with_versions)); } Err(e) => { *selected_candidate = None; *error_message = Some(format!( "Fetching available candidate versions failed with:\n{}", e )); } } } }); // render homepage URL ui.with_layout(Layout::right_to_left(), |ui| { ui.style_mut().visuals.hyperlink_color = CYAN; ui.add(Hyperlink::new(&candidate.url).text(&candidate.url)); }); }); ui.add_space(PADDING); // render description let description = Label::new(&candidate.description) .wrap(true) .text_style(eframe::egui::TextStyle::Body); ui.add(description); ui.add_space(PADDING); // render installation instruction ui.with_layout(Layout::right_to_left(), |ui| { let installation = Label::new(&candidate.installation_instruction) .text_style(eframe::egui::TextStyle::Body); ui.add(installation); }); ui.add_space(PADDING); ui.add(Separator::default()); if selected_candidate.is_some() { SdkmanApp::render_selected_candidate( ui, selected_candidate, local_candidates.iter().find(|local_candidate| { if let Some(remote_candidate) = &selected_candidate { local_candidate.binary_name() == remote_candidate .installation_instruction .split_whitespace() .last() .unwrap_or_default() } else { false } }), ); } } ui.add_space(7. * PADDING); } fn render_selected_candidate( ui: &mut Ui, selected_candidate: &mut Option<Candidate>, local_candidate: Option<&LocalCandidate>, ) { ui.add_space(PADDING); ui.horizontal(|ui| { ui.with_layout(Layout::left_to_right(), |ui| { ui.add_space(PADDING); ui.add( Label::new(format!( "Available {} versions", selected_candidate.as_ref().unwrap().name )) .wrap(true) .text_style(eframe::egui::TextStyle::Body), ); }); ui.with_layout(Layout::right_to_left(), |ui| { ui.add_space(PADDING); ui.with_layout(Layout::top_down(Align::RIGHT), |ui| { let close_btn_label = Label::new("❌") .wrap(true) .text_style(eframe::egui::TextStyle::Body) .sense(Sense::click()); if ui .add(close_btn_label) .on_hover_ui(|ui| { show_tooltip_text( ui.ctx(), Id::new(selected_candidate.as_ref().map(|c| &c.name).unwrap()), "Close", ); }) .clicked() { *selected_candidate = None; } }); }); }); // render all available versions ui.add_space(2. * PADDING); for selected_candidate_version in selected_candidate .as_ref() .map(|c| c.versions.to_vec()) .unwrap_or_default() { SdkmanApp::render_selected_candidate_version( ui, &selected_candidate_version, local_candidate .map(|c| c.versions()) .unwrap_or(&HashMap::new()), ); } ui.add_space(3. * PADDING); } fn render_selected_candidate_version( ui: &mut Ui, version: &RemoteVersion, local_versions: &HashMap<String, bool>, ) { ui.horizontal(|ui| { ui.with_layout(Layout::left_to_right(), |ui| { ui.label(version.mk_string(local_versions)); }); ui.with_layout(Layout::right_to_left(), |ui| { if ui .add_enabled( local_versions.contains_key(version.id()), Button::new("delete").text_style(eframe::egui::TextStyle::Body), ) .on_hover_ui(|ui| { show_tooltip_text(ui.ctx(), Id::new(version.id()), "Delete version"); }) .clicked() { println!("Deleting candidate version..."); } if ui .add_enabled( local_versions.get(version.id()).is_none(), Button::new("install").text_style(eframe::egui::TextStyle::Body), ) .on_hover_ui(|ui| { show_tooltip_text(ui.ctx(), Id::new(version.id()), "Install version"); }) .clicked() { println!("Installing candidate version..."); } if ui .add_enabled( local_versions.contains_key(version.id()) && local_versions.get(version.id()).unwrap() == &false, Button::new("current").text_style(eframe::egui::TextStyle::Body), ) .on_hover_ui(|ui| { show_tooltip_text(ui.ctx(), Id::new(version.id()), "Set current version"); }) .clicked() { println!("Setting current candidate version..."); } }); }); } fn render_search_dialog( ctx: &CtxRef, candidates: &[Candidate], selected_candidate: &mut Option<Candidate>, candidate_search_dialog: &mut bool, candidate_search_term: &mut String, error_message: &mut Option<String>, ) { Window::new("Search").show(ctx, |ui| { ui.add_space(PADDING); ui.horizontal(|ui| { ui.label("Candidate:"); ui.with_layout(Layout::left_to_right(), |ui| { let text_input = ui.text_edit_singleline(candidate_search_term); if text_input.lost_focus() && ui.input().key_pressed(Key::Enter) { match candidates.iter().find(|candidate| { candidate.name == *candidate_search_term || candidate .installation_instruction .ends_with(candidate_search_term.as_str()) }) { None => {} Some(found) => { match fetch_candidate_versions(&mut found.to_model()) { Ok(candidate_with_versions) => { *selected_candidate = Some(Candidate::from_model(candidate_with_versions)); } Err(e) => { *selected_candidate = None; *error_message = Some(format!( "Loading all versions for candidate '{}' failed with {}", candidate_search_term, e )); } } *candidate_search_dialog = false; *candidate_search_term = String::default(); } } } }); }); ui.add_space(PADDING); }); } pub fn render_footer(&self, ctx: &CtxRef) { TopBottomPanel::bottom("footer").show(ctx, |ui| { ui.vertical_centered(|ui| { ui.add_space(10.); ui.add(Label::new("API: https://api.sdkman.io/2").monospace()); ui.add( Hyperlink::new("https://github.com/emilk/egui") .text("Made with egui") .text_style(TextStyle::Monospace), ); ui.add( Hyperlink::new("https://github.com/gerdreiss/sdkman-ui") .text("Hosted on Github") .text_style(TextStyle::Monospace), ); ui.add_space(10.); }) }); } }
To make and keep the upper stories of a tall building habitable requires routine high technology. The lifts have to work, the water needs to be pumped, the toilets have let the poop down one hundred stories without shattering violence. It is not all that expensive. Current office space costs in the centers of major cities are so high that very tall buildings are immensely profitable. It is simply difficult to do, requires able people working together, both initially to build the systems, and subsequently to keep them going. It is habitable floors that are hard to do, and habitable floors are what generates the rental income. So, to assess a society’s technological level, count habitable floors. By and large, the taller the building, the more the profit. Doubtless there is a limit, but in the center of most major cities, most tall buildings are below that limit. If people could build taller, they would. At our present technological level, settling space seems likely to be fatally unprofitable, but building upwards, building the city of tomorrow, is highly profitable. In South Africa the upper parts of tall buildings built by whites have become uninhabitable. That the upper part of the skyline has gone dark is the most visible symptom of social decay. Mark Steyn makes fun of the South African government’s incapacity to govern, for which National Review will surely purge him, if it has not done so already. Thamsanqa Jantjie, the lovable laugh-a-minute sign-language fraud who stood alongside President Obama gesticulating meaninglessly to the delight of all, was exposed in the days that followed as a far darker character. A violent schizophrenic charged over the years with burglary, rape, kidnapping, and murder, he was also a member of a “necklacing” gang — necklacing being the practice of placing a gasoline-filled tire over the head of the victim and setting it alight. … a lot of things in South Africa simply don’t function anymore. As revealing as Mr. Jantjie’s extensive and violent criminal background is the fact that the National Prosecuting Authority cannot reliably state which offenses he has been convicted of, and, for the one crime for which he seems definitively to have been sentenced, whether in fact he served the sentence. The twin towers had one hundred and ten habitable stories. The top habitable floor of Freedom Tower, the replacement to the Twin Towers is labeled floor 104, but that is fraud, to disguise the fact that we cannot do this thing any more. It is actually the ninety fourth habitable floor. A slower pace of decay than South Africa, but decay nonetheless. Seems to me that the pace of decay has accelerated lately, but to draw definitive conclusions, will need a few more years. The Soviet Union fell in part because they could not keep the lights on, and the elite got sick of the darkness. I expect that by the second term of Bill de Blasio, the New York night skyline will be showing some dark spots, like the missing teeth of an aging homeless alcoholic. This will be proudly depicted as huge progress in reducing carbon emissions. Tags: decline of the west, technological and scientific stagnation
<reponame>sassansh/Small-DSL package ast; public class Move extends AnimStatement { private final XyCord startPosition; private final XyCord endPosition; public Move(String objName, XyCord startPosition, XyCord endPosition, String timestamp, String duration) { this.objName = objName; this.startPosition = startPosition; this.endPosition = endPosition; // remove the "s" from the given timestamp and duration this.timestamp = timestamp.replaceAll("[^0-9]", ""); this.duration = Integer.parseInt(duration.replaceAll("[^0-9]", "")); } public XyCord getStartPosition() { return startPosition; } public XyCord getEndPosition() { return endPosition; } @Override public <T> T accept(YouAnimateVisitor<T> v) { return v.visit(this); } }
package com.dimples.po; import java.io.Serializable; import lombok.Data; /** * @author zhongyj <<EMAIL>><br/> * @date 2020/5/16 */ @Data public class Drug implements Serializable { /** * 主键id */ private Long id; private String primaryId; private String caseId; private String drugSeq; private String roleCod; private String drugname; private String prodAi; private String valVbm; private String route; private String doseVbm; private String cumDoseChr; private String cumDoseUnit; private String dechal; private String rechal; private String lotNum; private String expDt; private String ndaNum; private String doseAmt; private String doseUnit; private String doseForm; private String doseFreq; private static final long serialVersionUID = 1L; }
/** * Tests that the external task is not completed, when it should wait for a boundary event. */ @Test public void testWaitForBoundaryEvent() { assertThat(handler.isWaitingForBoundaryEvent(), is(false)); handler.waitForBoundaryEvent(); assertThat(handler.isWaitingForBoundaryEvent(), is(true)); AssertionError e = assertThrows(AssertionError.class, () -> { tc.createExecutor().execute(); }); assertThat(e.getMessage(), containsString("to have passed activities [externalTask, endEvent]")); }
<filename>src/link/dto/linkDto.ts import { IsNotEmpty, IsString, IsUrl } from 'class-validator'; export class LinkDto { @IsString() @IsNotEmpty() slug: string; @IsUrl() @IsNotEmpty() url: string; }
<filename>src/lib.rs #![cfg_attr(not(test), no_std)] pub mod adc; pub mod ble_composer; pub mod ble_parser; pub mod buttons; pub mod console; pub mod ctap; pub mod debug; pub mod drivers; pub mod electronics; pub mod executor; pub mod futures; pub mod gpio; pub mod hmac; pub mod leds; pub mod result; pub mod rng; pub mod sensors; pub mod simple_ble; pub mod temperature; pub mod timer; pub use drivers::retrieve_drivers; pub use libtock_codegen::main; pub use libtock_core::*;
This Oliva V Melanio is an incredible cigar. Easily one of my top 10 cigars that came out in 2012, even though I smoked it in 2013. This cigar packs a nice punch of pepper and spice on the palate, but is nice and soothing with the retrohale bringing out some very pleasureful floral notes that ease the burn of the spice. It's a very medium-full bodied cigar with regards to tobacco strength. However it's very full bodied in terms of flavor. Complex yet balance and easy to smoke! It's a 10/10 for sure! Initial Thoughts This year was a big year for the Oliva V Melanio it seems like everywhere you turned people were reviewing it and talking about. It took me a while to finally get on the bandwagon and get out there and find one of these sticks. Personally, I’m a big fan of the regular V series. I haven’t reviewed one on here yet, but I do love them. It’s definitely my favorite out of the Oliva line. I love the spice that it offers up. A nice big powerful stick is always easy to smoke. So needless to say I was excited to try out this new Melanio once I heard about it. This latest addition to the Oliva Serie V line is named after the company’s patriarch Melanio Oliva. According to the little insert that I snagged from the box at my local B&M it says that Melanio began growing tobacco in Pinar Del Rio in 1886. Later his grandson Gilberto re-established the business in Nicaragua. The long filler tobacco used in this cigar is primarily ligero which is “known for their robust and rich flavors”. So that’s the back story on the Oliva V Melanio, a nice homage to an important man to the Oliva brand name! Let’s get into the review and see what has so many people drawn into this stick. Looks This Oliva V Melanio comes in 5 different vitolas. The one I picked up is the torpedo. I tried to find the Figurado, but no one seemed to have it. It comes in a beautiful 10 count box. That is custom cut for each of these cigars. The top part of the box has sections cut to form V’s so that the head of the of the torpedo can lay there without being smashed. They look similar to teeth cut out of a saw blade. I love the packaging on this cigar. It’s gorgeous you can tell the people at Oliva put a lot of time and effort into this release. The actual cigar is lovely as well. A nice milk chocolate wrapper that is box pressed with slightly rounded edges so that it lays down nice and even on the ash tray while you smoke. The head comes to a perfectly tapered point, and the wrapper is absolutely flawless. You can really notice the box press of the cigar while looking at the foot. It’s a almost an oval looking shape similar to the San Lotano. This Oliva V Melanio also had dual bands which is fairly common for a special edition or limited release product. It has to have something to set it apart from the original blend so that people know which one they are picking up. It has the traditional Oliva Serie V band, but right below it there is a smaller band that says “Melanio”. Even though the band is smaller, word really pops out at you and it’s easy to see. This was one the reasons that drew me in to pick it up. Well that and the fact that everyone raved about the cigar. Function Let’s talk about how this cigar feels! It has some very interesting feelings, somedays it’s a manic depressive the other days it’s like walking on sunshine… I crack myself up sometimes! I’ve always been a fan of the box press cigar. I just love it feels when you hold it in your hand and it sits better in your mouth if you ask me. This Oliva V Melanio has a nice amount of firmness to it. It’s not overly stiff, and there is a good amount of sponginess to it. Unlike a cigar I had the other day that was so packed full of tobacco I couldn’t even draw on it! The construction on this stick is absolutely flawless, and I’ve come to expect nothing but that from the Oliva brand. They are on of the most consistent brands out there in regards to construction of all of their cigars. Smoking So it looks good and it feels good, well it smokes pretty damn good too! As I said above the construction this Oliva V Melanio is absolutely flawless which leads to an incredible smoking experience. I had a little trouble getting it lit, but it was more of my fault because my lighter ran out of fuel and I was trying to light it on fumes. So when that didn’t work I reached for the matches and then this thing took off. The burn line was just perfect. The first ash held on for easily an inch and a half. I love the tight burn curls on the ash on this stick. The ash was very solid and stayed in tact once I knocked it off. Not flaky what so ever. To me that’s always a sign of a great stick. I hate to see a cigar a flaky ash that just crumbles when it falls off. The second third burned just as well as the first. There was a ton of smoke that rolled off this cigar. It was a nice creamy smoke that filled the room. The ash on the second third didn’t look as tight as the first third, but it still held of for over an inch. Never had an issue with the draw whatsoever and the entire smoking experience was easy and beyond enjoyable, if that’s even possible!
<gh_stars>10-100 import { IContentType, FieldLink, FieldLinkProps } from "gd-sprest-def/lib/SP"; import { IsetContentTypeFields } from "../../../@types/helper/methods"; import { ContextInfo, Web } from "../../lib"; declare var SP; /** * Sets the field links associated with a content type. * @param ctInfo - The content type information */ export const setContentTypeFields: IsetContentTypeFields = (ctInfo: { id: string, fields: Array<string | FieldLinkProps>, listName?: string, webUrl?: string }): PromiseLike<void> => { // Clears the content type field links let clearLinks = (): PromiseLike<Array<FieldLink>> => { // Return a promise return new Promise((resolve, reject) => { // Get the links getLinks().then(fieldLinks => { let skipFields: Array<FieldLink> = []; // See if we need to remove any fields if (fieldLinks.length > 0) { let updateFl = false; // Set the context let ctx = ctInfo.webUrl ? new SP.ClientContext(ctInfo.webUrl) : new SP.ClientContext(ContextInfo.webServerRelativeUrl); // Get the source let src = ctInfo.listName ? ctx.get_web().get_lists().getByTitle(ctInfo.listName) : ctx.get_web(); // Get the content type let contentType = src.get_contentTypes().getById(ctInfo.id); // Parse the content type field links for (let i = 0; i < fieldLinks.length; i++) { let fieldLink = fieldLinks[i]; let removeFl = true; // Parse the fields to add for (let j = 0; j < ctInfo.fields.length; j++) { let field = ctInfo.fields[j]; let fieldName = typeof (field) === "string" ? field : field.Name || field.FieldInternalName; // See if we are keeping this field if (fieldName == fieldLink.Name) { let propUpdateFl = false; // Checks if an update is needed let updateField = (oldValue, newValue) => { // Ensure a value exists if (newValue == null) { return; } // See if an update is needed if (oldValue == newValue) { return; } // Set the flag propUpdateFl = true; } // Update the properties updateField(fieldLink.DisplayName, (field as FieldLinkProps).DisplayName); updateField(fieldLink.Hidden, (field as FieldLinkProps).Hidden); updateField(fieldLink.ReadOnly, (field as FieldLinkProps).ReadOnly); updateField(fieldLink.Required, (field as FieldLinkProps).Required); updateField(fieldLink.ShowInDisplayForm, (field as FieldLinkProps).ShowInDisplayForm); // See if an update to the property is needed if (!propUpdateFl) { // Set the flag to not remove this field reference removeFl = false; // Add the field to skip skipFields.push(fieldLink); } // Break from the loop break; } } // See if we are removing the field if (removeFl) { // Remove the field link contentType.get_fieldLinks().getById(fieldLink.Id).deleteObject(); // Set the flag updateFl = true; // Log console.log("[gd-sprest][Set Content Type Fields] Removing the field link: " + fieldLink.Name); } } // See if an update is required if (updateFl) { // Update the content type contentType.update(false); // Execute the request ctx.executeQueryAsync( // Success () => { // Log console.log("[gd-sprest][Set Content Type Fields] Removed the field links successfully."); // Resolve the request resolve(skipFields); }, // Error (sender, args) => { // Log console.log("[gd-sprest][Set Content Type Fields] Error removing the field links."); // Reject the request reject(); }); } else { // Log console.log("[gd-sprest][Set Content Type Fields] No fields need to be removed."); // Resolve the request resolve(skipFields); } } else { // Resolve the request resolve(skipFields); } }, reject); }); } // Creates the field links let createLinks = (skipFields: Array<FieldLink>): PromiseLike<void> => { // Return a promise return new Promise((resolve, reject) => { // Set the context let ctx = ctInfo.webUrl ? new SP.ClientContext(ctInfo.webUrl) : new SP.ClientContext(ContextInfo.webServerRelativeUrl); // Get the source let src = ctInfo.listName ? ctx.get_web().get_lists().getByTitle(ctInfo.listName) : ctx.get_web(); let skipField = (fieldName: string, fields: Array<FieldLink>): boolean => { for (let i = 0; i < fields.length; i++) { // See if we are skipping this field if (fields[i].Name == fieldName) { return true; } } } // Parse the fields to add let fields: Array<{ ref: any, info: string | FieldLinkProps }> = []; for (let i = 0; i < ctInfo.fields.length; i++) { let fieldInfo = ctInfo.fields[i]; let fieldName = typeof (fieldInfo) === "string" ? fieldInfo : fieldInfo.Name || fieldInfo.FieldInternalName; // See if we are skipping this field if (skipField(fieldName, skipFields)) { continue; } // Load the field let field = src.get_fields().getByInternalNameOrTitle(fieldName); ctx.load(field); // Log console.log("[gd-sprest][Set Content Type Fields] Adding the field link: " + fieldName); // Save a reference to this field fields.push({ ref: field, info: fieldInfo }); } // See if an update is needed if (fields.length > 0) { // Execute the request ctx.executeQueryAsync(() => { // Get the content type let contentType = src.get_contentTypes().getById(ctInfo.id); ctx.load(contentType); // Parse the fields for (let i = 0; i < fields.length; i++) { let field = fields[i]; /** * The field link set_[property] methods don't seem to work. Setting the field information seems to be the only way. * The read only property is the only one that doesn't seem to work. */ // See if the field ref has properties to update if (typeof (field.info) !== "string") { // Update the field properties field.info.DisplayName != null ? field.ref.set_title(field.info.DisplayName) : null; field.info.Hidden != null ? field.ref.set_hidden(field.info.Hidden) : null; field.info.ReadOnly != null ? field.ref.set_readOnlyField(field.info.ReadOnly) : null; field.info.Required != null ? field.ref.set_required(field.info.Required) : null; field.info.ShowInDisplayForm != null ? field.ref.setShowInDisplayForm(field.info.ShowInDisplayForm) : null; } // Create the field link let fieldLink = new SP.FieldLinkCreationInformation(); fieldLink.set_field(field.ref); // Add the field link to the content type contentType.get_fieldLinks().add(fieldLink); } // Update the content type contentType.update(false); // Execute the request ctx.executeQueryAsync( // Success () => { // Log console.log("[gd-sprest][Set Content Type Fields] Added the field links successfully."); // Resolve the request resolve(); }, // Error (sender, args) => { // Log console.log("[gd-sprest][Set Content Type Fields] Error adding field references.", args.get_message()); // Reject the request reject(); }); }, (sender, args) => { // Log console.log("[gd-sprest][Set Content Type Fields] Error getting field references.", args.get_message()); // Resolve the request resolve(); }); } else { // Log console.log("[gd-sprest][Set Content Type Fields] No fields need to be added."); // Resolve the request resolve(); } }); } // Gets the content type field links let getLinks = (): PromiseLike<Array<FieldLink>> => { // Return a promise return new Promise((resolve, reject) => { let ct: IContentType = null; // See if list name exists if (ctInfo.listName) { // Get the list content type ct = Web(ctInfo.webUrl).Lists(ctInfo.listName).ContentTypes(ctInfo.id); } else { // Get the content type ct = Web(ctInfo.webUrl).ContentTypes(ctInfo.id); } // Query the field links ct.FieldLinks().query({ Select: ["DisplayName", "Id", "Name", "Required", "ReadOnly", "ShowInDisplayForm"] }).execute(fieldLinks => { // Resolve the request resolve(fieldLinks.results); }, reject); }); } // Set the order of the field references let setOrder = (): PromiseLike<void> => { // Return a promise return new Promise((resolve, reject) => { // Set the context let ctx = ctInfo.webUrl ? new SP.ClientContext(ctInfo.webUrl) : new SP.ClientContext(ContextInfo.webServerRelativeUrl); // Get the source let src = ctInfo.listName ? ctx.get_web().get_lists().getByTitle(ctInfo.listName) : ctx.get_web(); // Get the content type let contentType = src.get_contentTypes().getById(ctInfo.id); // Parse the fields to add let fieldNames = []; for (let i = 0; i < ctInfo.fields.length; i++) { let fieldInfo = ctInfo.fields[i]; let fieldName = typeof (fieldInfo) === "string" ? fieldInfo : fieldInfo.Name || fieldInfo.FieldInternalName; // Add the field name fieldNames.push(fieldName); } // Reorder the content type contentType.get_fieldLinks().reorder(fieldNames); // Update the content type contentType.update(ctInfo.listName ? false : true); // Execute the request ctx.executeQueryAsync( // Success () => { // Log console.log("[gd-sprest][Set Content Type Fields] Updated the field order successfully."); // Resolve the request resolve(); }, // Error (sender, args) => { // Log console.log("[gd-sprest][Set Content Type Fields] Error updating the field order.", args.get_message()); // Reject the request reject(); }); }); } // Return a promise return new Promise((resolve, reject) => { // Ensure the SP object exists if (window["SP"]) { // Ensure fields exist if (ctInfo.fields) { // Clear the links clearLinks().then(skipFields => { // Create the links createLinks(skipFields).then(() => { // Set the field order setOrder().then(resolve, reject); }, reject); }, reject); } else { // Resolve the promise resolve(); } } else { // Resolve the request // This will cause issues in the SPConfig class resolve(); } }); }
<gh_stars>0 import React from 'react' import { Router, RouteComponentProps } from '@reach/router' import Layout from 'components/templates/layout/layout' import SEO from 'components/templates/seo/seo' import { newsProps } from '../input/news.props' import { News } from '../components/news/postPage' import { SinglePost } from '../components/news/singlePost' import { PathProps } from 'types/path.props' const PostsPage: React.FC<RouteComponentProps> = () => ( <> <SEO title="News" /> <News news={newsProps} /> </> ) const NewsWrap: React.FC<PathProps> = ({ path }) => ( <Layout path={path}> <Router> <PostsPage path={'/posts/'} /> <SinglePost path={`posts/:postSlug`} /> </Router> </Layout> ) export default NewsWrap