content
stringlengths
10
4.9M
. The aim of this work was to compare the outcome of the questionnaire survey of low back pain with data on related sickness absenteeism. The questionnaire was responded by 585 (88.8%) employees. Of these number 288 (49.2%) respondents complained of low back pain. Sick leaves were given to 48 (16.7%) persons who had reported low back pains in the questionnaire and to 25 (8.4%) employees of 297 who had not reported the pain and also to 4 (5.4%) persons of 57 who had not responded to the questionnaire. Among those who had not complained of low back pain and were given sick leaves, mostly short ones, there were mainly men and blue collar workers. On the basis of our survey it seems that the analysis of sickness absenteeism due to low back pain does not completely reflect the prevalence of this disease, and that epidemiological studies of the prevalence should also take into consideration relevant sickness absenteeism.
//loads data for the next turn in another thread public void loadTurn() { usableBlocksThread = new Thread() { public void run() { ArrayList<Vector2> path = findPath(player.x, player.y, levelConfig.endX, levelConfig.endY, blocks); if (path != null) { ArrayList<Block> nonPathBlocks = new ArrayList<Block>(); for(Block block : blocks) { if(!vectorListContains(path, new Vector2(block.x, block.y))) { nonPathBlocks.add(block); } } usableBlocks = findEdgeBlocks(nonPathBlocks); System.out.println(usableBlocks.size()); System.out.println(allPaths.size()); } else { usableBlocks = null; System.out.println("no usable blocks"); } usableBlocksThread = null; } }; usableBlocksThread.start(); }
/** * Checks recursively if object or its subobjects has a property with specified * name. * <p> * The object can be a user defined object, map or array. The property name * correspondently must be object property, map key or array index. * * @param obj an object to introspect. * @param name a name of the property to check. * @return true if the object has the property and false if it doesn't. */ public static boolean hasProperty(Object obj, String name) { if (obj == null || name == null) return false; String[] names = name.split("\\."); if (names.length == 0) return false; return performHasProperty(obj, names, 0); }
// New returns a configuration struct with content from the exercism.json file func New(path string) (*Config, error) { c := &Config{} err := c.load(path) return c, err }
package org.hibernate.tool.internal.reveng; import org.hibernate.mapping.Property; import org.hibernate.mapping.Table; import org.hibernate.mapping.Value; import org.hibernate.tool.api.reveng.ReverseEngineeringStrategy; import org.jboss.logging.Logger; public class PropertyBinder { private static final Logger log = Logger.getLogger(PropertyBinder.class); public static Property makeProperty( Table table, String defaultCatalog, String defaultSchema, String propertyName, Value value, boolean insertable, boolean updatable, boolean lazy, String cascade, String propertyAccessorName, ReverseEngineeringStrategy revengStrategy) { log.debug("Building property " + propertyName); Property prop = new Property(); prop.setName(propertyName); prop.setValue(value); prop.setInsertable(insertable); prop.setUpdateable(updatable); prop.setLazy(lazy); prop.setCascade(cascade==null?"none":cascade); prop.setPropertyAccessorName(propertyAccessorName==null?"property":propertyAccessorName); MetaAttributesBinder.bindMetaAttributes( prop, revengStrategy, table, defaultCatalog, defaultSchema); return prop; } }
#include <stdio.h> int main(void){ int w,h,n,x,y,a,max1=0,min2,max3=0,min4,ans; scanf("%d%d%d",&w,&h,&n); min2=w; min4=h; for(int i=0;i<n;i++){ scanf("%d%d%d",&x,&y,&a); if(a==1&&max1<x)max1=x; else if(a==2&&min2>x)min2=x; else if(a==3&&max3<y)max3=y; else if(a==4&&min4>y)min4=y; } if(min2!=w&&min4!=h&&(max1>=min2||max3>=min4))ans=0; else ans=(min2-max1)*(min4-max3); printf("%d\n",ans); }
. The aim of this paper was to describe the geographic distribution of mortality resulting from external causes during 1991 within the city of Salvador, Bahia, Brazil. Mortality indicators were calculated in terms of deaths from external causes and in terms of specific types of violence. Of the deaths in that year, 15% were due to external causes, corresponding to a mortality rate of 78/100,000 inhabitants. The homicide death rate was 32.2/100,000 inhabitants, and the traffic-related death rate was 21.8/100,000 inhabitants. The elderly, young adults, and teenagers were at higher risk of death from violent causes. Among the elderly, 38% of the deaths were from car accidents and 28% from falls. Among teenagers, homicides were the main cause of death, especially in males between 15 and 29 years of age. Our results show that the geographic distribution of violent deaths in Salvador is uneven. Such deaths occur mainly in poorer neighborhoods, which have higher violent death rates than average for the city of Salvador. There is a need for prevention and control measures, including public policies and public health activities, focusing on the areas that have the greatest risk of death from external causes.
def format_folia(lemma: str, pos_tag: str, word: str) -> str: return '[ @folia {0} {1} {2} ]'.format(lemma, pos_tag, word) if word else ''
/** * Currently, this method is only used for pin node get from mNodeCache. Pin MNode in memory makes * the pinned node and its ancestors not be evicted during cache eviction. The pinned MNode will * occupy memory resource, thus this method will check the memory status which may trigger cache * eviction or flushing. * * @param node */ @Override public void pin(IMNode node) throws MetadataException { readLock.lock(); try { cacheManager.pinMNode(node); } finally { readLock.unlock(); } }
Nearly nine in 10 businesses worldwide are worried about the threat of cyberattacks, according to a new survey. Cyberattacks, followed by data breaches and unplanned IT and telecom outages are the leading causes of concern regarding operations among businesses globally, according to a study from the Business Continuity Institute and British Standards Institute. Eighty-eight percent of businesses report being concerned or extremely concerned about the threat of cyberattacks, including malware and distributed denial of service attacks. Cyberattacks have topped the list of perceived threats for three straight years in the annual study, which surveyed more than 700 organizations in 79 different countries this year. ADVERTISEMENT Cyberattacks and intrusions have attracted increased attention in the wake of the election-related hacks of systems used by the Democratic National Committee and former Clinton campaign chairman John Podesta and massive data breaches disclosed by Yahoo. By one estimate, cyberattacks cost businesses a total $400 billion annually, a figure that is expected to rise in coming years. The pair of Yahoo data breaches—which were announced last year within months of one another—resulted in Verizon Communications acquiring the company at a $350 million discount in a revised deal announced Tuesday. U.S. government computer systems have also been successful targets of cyber intrusions, including the Office of Personnel Management (OPM) data breach carried out by Chinese hackers that compromised the personal information of more than 20 million Americans. Lawmakers and government agencies have given more attention to improving the nation’s cyber defenses in the wake of the OPM hack and the election hacking, which the intelligence community has attributed to Russia.
import java.util.Scanner; public class A116 { static Scanner sc = new Scanner (System.in); static int stops = sc.nextInt(); public static void main(String[] args) { System.out.println(Solver()); } private static int Solver() { int min_kap=0; int aktuell=0; while(stops>0) { int aus=sc.nextInt(); int ein=sc.nextInt(); aktuell=aktuell+(ein-aus); if (aktuell > min_kap) { min_kap=aktuell; } stops--; } return min_kap; } }
def current_url(self): idx = self.widget.currentIndex() return self.widget.tab_url(idx)
// Store in the same structure as client, just under BasePath func mediaPath(sha string, config *Config, path string) (string, error) { abspath := filepath.Join(config.BasePath, path, sha[0:2], sha[2:4]) if err := os.MkdirAll(abspath, 0744); err != nil { return "", fmt.Errorf("Error trying to create local media directory in '%s': %s", abspath, err) } return filepath.Join(abspath, sha), nil }
module suncore { /** * 缓动服务类,专门用于管理缓动 */ export class TweenService extends BaseService { static readonly NAME: string = "suncore.TweenService"; /** * 缓动对象列表 */ private $tweens: ITween[] = []; /** * 避免添加或移除缓动对象时对正在执行的缓动列表产生干扰 */ private $locker: boolean = false; protected $onRun(): void { this.facade.registerObserver(NotifyKey.PAUSE_TIMELINE, this.$onTimelinePause, this, false, suncom.EventPriorityEnum.EGL); this.facade.registerObserver(NotifyKey.DRIVE_TWEEN_TICK, this.$onDriveTweenTick, this); this.facade.registerObserver(NotifyKey.REGISTER_TWEEN_OBJECT, this.$onRegisterTweenObject, this); } protected $onStop(): void { this.facade.removeObserver(NotifyKey.PAUSE_TIMELINE, this.$onTimelinePause, this); this.facade.removeObserver(NotifyKey.DRIVE_TWEEN_TICK, this.$onDriveTweenTick, this); this.facade.removeObserver(NotifyKey.REGISTER_TWEEN_OBJECT, this.$onRegisterTweenObject, this); } /** * 时间轴暂停事件回调,仅关心停止事件 */ private $onTimelinePause(mod: ModuleEnum, stop: boolean): void { if (stop === true) { for (let i: number = 0; i < this.$tweens.length; i++) { const tween: ITween = this.$tweens[i]; if (tween.var_mod === mod) { tween.cancel(); } } } } /** * 时间流逝 */ private $onDriveTweenTick(): void { this.$locker = true; // 使用临时变量持有tweens列表,因为列表在执行的过程中可能会被复制 const tweens: ITween[] = this.$tweens; for (let mod: ModuleEnum = 0; mod < ModuleEnum.MAX; mod++) { if (System.isModulePaused(mod) === false) { for (let i: number = 0; i < tweens.length; i++) { const tween: ITween = tweens[i]; if (tween.var_mod === mod) { let timeLeft: number = 1; while (timeLeft > 0 && tween.var_canceled === false && tween.var_recovered === false) { timeLeft = tween.func_doAction(); } } } } } // 移除己被取消的缓动对象 for (let i: number = this.$tweens.length - 1; i > -1; i--) { const tween: ITween = this.$tweens[i]; if (tween.func_getUsePool() === true) { if (tween.var_canceled === false) { continue; } } else if (tween.var_recovered === false) { continue; } tweens.splice(i, 1)[0].func_recover(); } this.$locker = false; } /** * 添加缓动对象 */ private $onRegisterTweenObject(tween: ITween): void { // 避免干扰 if (this.$locker === true) { this.$tweens = this.$tweens.slice(0); this.$locker = false; } this.$tweens.push(tween); } } }
import { PopupBg, PopupButton, PopupContainer, PopupStyled } from './Popup.style' interface PopupViewProps { hideCallback: () => void } export const PopupView = ({ hideCallback }: PopupViewProps) => { return ( <PopupStyled> <PopupBg /> <PopupContainer> <svg onClick={() => hideCallback()}> <use xlinkHref="/icons/sprites.svg#close" /> </svg> <pre> Cardashift Launchpad is live! <br /> Register for Batch Genesis </pre> <a href="https://launchpad.cardashift.com/" target="_blank" rel="noreferrer"> <PopupButton appearance="secondary">Launch App</PopupButton> </a> </PopupContainer> </PopupStyled> ) }
/** * Class denoting a stream variable, which will contain a type and a name. */ public static class StreamVariable extends Operand { private String name; StreamVariable(String name, Attribute.Type type) { this.name = name; this.type = type; } /** * This method is used to return the name of the stream variable. * @return name of the stream variable */ public String getName() { return name; } }
<gh_stars>1-10 /****************************************************************************** * Copyright (c) 2004 - 2020 Xilinx, Inc. All rights reserved. * SPDX-License-Identifier: MIT ******************************************************************************/ #ifndef XIL_XARMV8_H_ /* prevent circular inclusions */ #define XIL_XARMV8_H_ /* by using protection macros */ /***************************** Include Files ********************************/ #include "xil_types.h" /**************************** Type Definitions ******************************/ typedef struct { u32 TimestampFreq; u32 CpuFreq; u8 CpuId; /* CPU Number */ } XARMv8_Config; #endif /* XIL_XARMV8_H_ */
<reponame>matsumotory/mod_vhost_maxclients /* ** mod_vhost_maxclients - Max Clients per VitualHost ** ** Copyright (c) <NAME> 2015 - ** ** Permission is hereby granted, free of charge, to any person obtaining ** a copy of this software and associated documentation files (the ** "Software"), to deal in the Software without restriction, including ** without limitation the rights to use, copy, modify, merge, publish, ** distribute, sublicense, and/or sell copies of the Software, and to ** permit persons to whom the Software is furnished to do so, subject to ** the following conditions: ** ** The above copyright notice and this permission notice shall be ** included in all copies or substantial portions of the Software. ** ** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, ** EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF ** MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. ** IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY ** CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, ** TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE ** SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ** ** [ MIT license: http://www.opensource.org/licenses/mit-license.php ] */ #include "httpd.h" #include "http_config.h" #include "http_request.h" #include "http_protocol.h" #include "http_core.h" #include "http_log.h" #include "util_time.h" #include "ap_mpm.h" #include "apr_strings.h" #include "ap_regex.h" #include "scoreboard.h" #define MODULE_NAME "mod_vhost_maxclients" #define MODULE_VERSION "0.7.0" #if (AP_SERVER_MINORVERSION_NUMBER > 2) #define __APACHE24__ #endif #ifdef __APACHE24__ #define ap_get_scoreboard_worker ap_get_scoreboard_worker_from_indexes #endif #define _log_debug \ ap_log_error(APLOG_MARK, APLOG_NOTICE, 0, NULL, "DEBUG: " MODULE_NAME "/" MODULE_VERSION "%s:%d", __func__, __LINE__) #ifdef __APACHE24__ #include "http_main.h" #else #define ap_server_conf NULL #endif #define VHOST_MAXEXTENSIONS 16 #define AP_CTIME_COMPACT_LEN 20 #if !defined(__APACHE24__) && defined(_WIN32) /* * libhttpd.dll does not export following variables. * This won't work correctly, but working well for other functional. */ int ap_extended_status = 0; #endif module AP_MODULE_DECLARE_DATA vhost_maxclients_module; static int vhost_maxclients_server_limit, vhost_maxclients_thread_limit; static apr_file_t *vhost_maxclients_log_fp = NULL; typedef struct { /* vhost max clinetns */ int dryrun; const char *log_path; signed int vhost_maxclients; signed int vhost_maxclients_log; signed int vhost_maxclients_per_ip; apr_array_header_t *ignore_extensions; apr_array_header_t *ignore_request_regexp; unsigned int vhost_maxclients_time_from; unsigned int vhost_maxclients_time_to; ap_regex_t *regexpc; } vhost_maxclients_config; #ifndef __APACHE24__ static apr_status_t ap_recent_ctime_compact(char *date_str, apr_time_t t) { apr_time_exp_t xt; int real_year; int real_month; ap_explode_recent_localtime(&xt, t); real_year = 1900 + xt.tm_year; real_month = xt.tm_mon + 1; *date_str++ = real_year / 1000 + '0'; *date_str++ = real_year % 1000 / 100 + '0'; *date_str++ = real_year % 100 / 10 + '0'; *date_str++ = real_year % 10 + '0'; *date_str++ = '-'; *date_str++ = real_month / 10 + '0'; *date_str++ = real_month % 10 + '0'; *date_str++ = '-'; *date_str++ = xt.tm_mday / 10 + '0'; *date_str++ = xt.tm_mday % 10 + '0'; *date_str++ = ' '; *date_str++ = xt.tm_hour / 10 + '0'; *date_str++ = xt.tm_hour % 10 + '0'; *date_str++ = ':'; *date_str++ = xt.tm_min / 10 + '0'; *date_str++ = xt.tm_min % 10 + '0'; *date_str++ = ':'; *date_str++ = xt.tm_sec / 10 + '0'; *date_str++ = xt.tm_sec % 10 + '0'; *date_str++ = 0; return APR_SUCCESS; } #endif #define vhost_maxclients_log_error(r, fmt, ...) _vhost_maxclients_log_error(r, apr_psprintf(r->pool, fmt, __VA_ARGS__)) static void *_vhost_maxclients_log_error(request_rec *r, char *log_body) { char log_time[AP_CTIME_COMPACT_LEN]; char *log; /* example for compact format: "1993-06-30 21:49:08" */ /* 1234567890123456789 */ #ifdef __APACHE24__ int time_len = AP_CTIME_COMPACT_LEN; ap_recent_ctime_ex(log_time, r->request_time, AP_CTIME_OPTION_COMPACT, &time_len); #else ap_recent_ctime_compact(log_time, r->request_time); #endif log = apr_psprintf(r->pool, "%s %s\n", log_time, log_body); apr_file_puts(log, vhost_maxclients_log_fp); apr_file_flush(vhost_maxclients_log_fp); } static void *vhost_maxclients_create_server_config(apr_pool_t *p, server_rec *s) { vhost_maxclients_config *scfg = (vhost_maxclients_config *)apr_pcalloc(p, sizeof(*scfg)); scfg->dryrun = -1; scfg->log_path = NULL; scfg->vhost_maxclients = 0; scfg->vhost_maxclients_log = 0; scfg->vhost_maxclients_per_ip = 0; scfg->ignore_extensions = apr_array_make(p, VHOST_MAXEXTENSIONS, sizeof(char *)); scfg->ignore_request_regexp = apr_array_make(p, 1, sizeof(char *)); scfg->vhost_maxclients_time_from = 0; scfg->vhost_maxclients_time_to = 2359; scfg->regexpc = NULL; return scfg; } static void *vhost_maxclients_create_server_merge_conf(apr_pool_t *p, void *b, void *n) { vhost_maxclients_config *base = (vhost_maxclients_config *)b; vhost_maxclients_config *new = (vhost_maxclients_config *)n; vhost_maxclients_config *scfg = (vhost_maxclients_config *)apr_pcalloc(p, sizeof(*scfg)); if (new->dryrun > -1) { scfg->dryrun = new->dryrun; } else { scfg->dryrun = base->dryrun; } scfg->log_path = base->log_path; scfg->vhost_maxclients = new->vhost_maxclients; scfg->vhost_maxclients_log = new->vhost_maxclients_log; scfg->vhost_maxclients_per_ip = new->vhost_maxclients_per_ip; scfg->ignore_extensions = new->ignore_extensions; scfg->ignore_request_regexp = new->ignore_request_regexp; scfg->vhost_maxclients_time_from = new->vhost_maxclients_time_from; scfg->vhost_maxclients_time_to = new->vhost_maxclients_time_to; scfg->regexpc = base->regexpc; return scfg; } static int check_extension(char *filename, apr_array_header_t *exts) { int i; for (i = 0; i < exts->nelts; i++) { const char *extension = ((char **)exts->elts)[i]; ssize_t name_len = strlen(filename) - strlen(extension); if (name_len >= 0 && strcmp(&filename[name_len], extension) == 0) return 1; } return 0; } static int check_time_slot(apr_pool_t *p, unsigned int from, unsigned int to) { unsigned int cur; apr_time_exp_t tm; apr_time_exp_lt(&tm, apr_time_now()); cur = atoi(apr_psprintf(p, "%02d%02d", tm.tm_hour, tm.tm_min)); if (from > to){ to += 2400; } if ((from < cur) && (to > cur)){ return 0; } return 1; } static char *build_vhostport_name(request_rec *r) { #ifdef __APACHE24__ ssize_t vhostport_len; char *vhostport; vhostport_len = strlen(r->hostname) + sizeof(":65536"); vhostport = apr_pcalloc(r->pool, vhostport_len); apr_snprintf(vhostport, vhostport_len, "%s:%d", r->hostname, r->connection->local_addr->port); return vhostport; #else return (char *)r->hostname; #endif } static int vhost_maxclients_handler(request_rec *r) { int i, j; int vhost_count = 0; int ip_count = 0; char *vhostport; vhost_maxclients_config *scfg = (vhost_maxclients_config *)ap_get_module_config(r->server->module_config, &vhost_maxclients_module); if (!ap_is_initial_req(r)) { return DECLINED; } if (scfg->vhost_maxclients <= 0) { return DECLINED; } if (r->hostname == NULL) { return DECLINED; } if (r->filename == NULL) { return DECLINED; } if (!ap_extended_status) { ap_log_error(APLOG_MARK, APLOG_DEBUG, 0, ap_server_conf, "DEBUG: only used when ExtendedStatus On"); return DECLINED; } /* check ignore extesions */ if (check_extension(r->filename, scfg->ignore_extensions)) { return DECLINED; } /* check time */ if (check_time_slot(r->pool, scfg->vhost_maxclients_time_from, scfg->vhost_maxclients_time_to)) { return DECLINED; } /* build vhostport name */ vhostport = build_vhostport_name(r); for (i = 0; i < vhost_maxclients_server_limit; ++i) { for (j = 0; j < vhost_maxclients_thread_limit; ++j) { worker_score *ws_record = ap_get_scoreboard_worker(i, j); #ifdef __APACHE24__ char *client_ip = r->connection->client_ip; #else char *client_ip = r->connection->remote_ip; #endif switch (ws_record->status) { case SERVER_BUSY_READ: case SERVER_BUSY_WRITE: case SERVER_BUSY_KEEPALIVE: case SERVER_BUSY_LOG: case SERVER_BUSY_DNS: case SERVER_CLOSING: case SERVER_GRACEFUL: /* check maxclients per vhost */ if (strcmp(vhostport, ws_record->vhost) == 0) { if (scfg->regexpc != NULL && !ap_regexec(scfg->regexpc, ws_record->request, 0, NULL, 0)){ break; } vhost_count++; ap_log_error(APLOG_MARK, APLOG_DEBUG, 0, ap_server_conf, "DEBUG: (increment %s): %d/%d", vhostport, vhost_count, scfg->vhost_maxclients); /* logging only for vhost_maxclients_log */ if (scfg->vhost_maxclients_log > 0 && vhost_count > scfg->vhost_maxclients_log) { if (vhost_maxclients_log_fp != NULL) { vhost_maxclients_log_error( r, "LOG-ONLY-VHOST_COUNT return 503 from %s : %d / %d client_ip: %s uri: %s filename: %s", vhostport, vhost_count, scfg->vhost_maxclients_log, client_ip, r->uri, r->filename); } else { ap_log_error( APLOG_MARK, APLOG_NOTICE, 0, ap_server_conf, "NOTICE: LOG-ONLY-VHOST_COUNT return 503 from %s : %d / %d client_ip: %s uri: %s filename: %s", vhostport, vhost_count, scfg->vhost_maxclients_log, client_ip, r->uri, r->filename); } } if (vhost_count > scfg->vhost_maxclients) { if (scfg->dryrun > 0) { if (vhost_maxclients_log_fp != NULL) { vhost_maxclients_log_error( r, "DRY-RUN-VHOST_COUNT return 503 from %s : %d / %d client_ip: %s uri: %s filename: %s", vhostport, vhost_count, scfg->vhost_maxclients, client_ip, r->uri, r->filename); } else { ap_log_error(APLOG_MARK, APLOG_NOTICE, 0, ap_server_conf, "DRY-RUN-VHOST_COUNT return 503 from %s : %d / %d client_ip: %s uri: %s filename: %s", vhostport, vhost_count, scfg->vhost_maxclients, client_ip, r->uri, r->filename); } } else { if (vhost_maxclients_log_fp != NULL) { vhost_maxclients_log_error( r, "VHOST_COUNT return 503 from %s : %d / %d client_ip: %s uri: %s filename: %s", vhostport, vhost_count, scfg->vhost_maxclients, client_ip, r->uri, r->filename); } else { ap_log_error(APLOG_MARK, APLOG_NOTICE, 0, ap_server_conf, "VHOST_COUNT return 503 from %s : %d / %d client_ip: %s uri: %s filename: %s", vhostport, vhost_count, scfg->vhost_maxclients, client_ip, r->uri, r->filename); } } return (scfg->dryrun > 0) ? DECLINED : HTTP_SERVICE_UNAVAILABLE; } /* check maxclients per ip in same vhost */ if (scfg->vhost_maxclients_per_ip > 0) { if (strcmp(client_ip, ws_record->client) == 0) { ip_count++; if (ip_count > scfg->vhost_maxclients_per_ip) { if (scfg->dryrun > 0) { if (vhost_maxclients_log_fp != NULL) { vhost_maxclients_log_error( r, "DRY-RUN-CLIENT_COUNT return 503 from %s : %d / %d client_ip: %s uri: %s filename: %s", vhostport, ip_count, scfg->vhost_maxclients_per_ip, client_ip, r->uri, r->filename); } else { ap_log_error(APLOG_MARK, APLOG_NOTICE, 0, ap_server_conf, "NOTICE: DRY-RUN-CLIENT_COUNT return " "503 from %s : %d / %d client_ip: %s " "uri: %s filename: %s", vhostport, ip_count, scfg->vhost_maxclients_per_ip, client_ip, r->uri, r->filename); } } else { if (vhost_maxclients_log_fp != NULL) { vhost_maxclients_log_error( r, "CLIENT_COUNT return 503 from %s : %d / %d client_ip: %s uri: %s filename: %s", vhostport, ip_count, scfg->vhost_maxclients_per_ip, client_ip, r->uri, r->filename); } else { ap_log_error(APLOG_MARK, APLOG_NOTICE, 0, ap_server_conf, "CLIENT_COUNT return 503 from %s : %d / %d client_ip: %s uri: %s filename: %s", vhostport, ip_count, scfg->vhost_maxclients_per_ip, client_ip, r->uri, r->filename); } } return (scfg->dryrun > 0) ? DECLINED : HTTP_SERVICE_UNAVAILABLE; } } } } break; default: break; } } } /* not reached vhost_maxclients */ return DECLINED; } static const char *set_vhost_maxclients_dryrun(cmd_parms *parms, void *mconfig, int flag) { vhost_maxclients_config *scfg = (vhost_maxclients_config *)ap_get_module_config(parms->server->module_config, &vhost_maxclients_module); scfg->dryrun = flag; return NULL; } static const char *set_vhost_maxclientsvhost(cmd_parms *parms, void *mconfig, const char *arg1) { vhost_maxclients_config *scfg = (vhost_maxclients_config *)ap_get_module_config(parms->server->module_config, &vhost_maxclients_module); signed long int limit = strtol(arg1, (char **)NULL, 10); if ((limit > 65535) || (limit < 0)) { return "Integer overflow or invalid number"; } scfg->vhost_maxclients = limit; return NULL; } static const char *set_vhost_maxclientsvhost_log(cmd_parms *parms, void *mconfig, const char *arg1) { vhost_maxclients_config *scfg = (vhost_maxclients_config *)ap_get_module_config(parms->server->module_config, &vhost_maxclients_module); signed long int limit = strtol(arg1, (char **)NULL, 10); if ((limit > 65535) || (limit < 0)) { return "Integer overflow or invalid number"; } scfg->vhost_maxclients_log = limit; return NULL; } static const char *set_vhost_maxclientsvhost_log_path(cmd_parms *parms, void *mconfig, const char *arg1) { vhost_maxclients_config *scfg = (vhost_maxclients_config *)ap_get_module_config(parms->server->module_config, &vhost_maxclients_module); scfg->log_path = arg1; return NULL; } static const char *set_vhost_maxclientsvhost_perip(cmd_parms *parms, void *mconfig, const char *arg1) { vhost_maxclients_config *scfg = (vhost_maxclients_config *)ap_get_module_config(parms->server->module_config, &vhost_maxclients_module); signed long int limit = strtol(arg1, (char **)NULL, 10); if ((limit > 65535) || (limit < 0)) { return "Integer overflow or invalid number"; } scfg->vhost_maxclients_per_ip = limit; return NULL; } static const char *set_vhost_ignore_extensions(cmd_parms *parms, void *mconfig, const char *arg) { vhost_maxclients_config *scfg = (vhost_maxclients_config *)ap_get_module_config(parms->server->module_config, &vhost_maxclients_module); if (VHOST_MAXEXTENSIONS < scfg->ignore_extensions->nelts) { return "the number of ignore extensions exceeded"; } *(const char **)apr_array_push(scfg->ignore_extensions) = arg; return NULL; } static const char *set_vhost_ignore_request_regexp(cmd_parms *parms, void *mconfig, const char *arg) { vhost_maxclients_config *scfg = (vhost_maxclients_config *)ap_get_module_config(parms->server->module_config, &vhost_maxclients_module); APR_ARRAY_PUSH(scfg->ignore_request_regexp,const char *) = arg; char **regexpi = (char **) scfg->ignore_request_regexp->elts; scfg->regexpc = ap_pregcomp(parms->pool, (char *)regexpi[0], AP_REG_EXTENDED|AP_REG_ICASE); if (scfg->regexpc == NULL) return "regexp error"; return NULL; } static const char *set_vhost_maxclients_time(cmd_parms *parms, void *mconfig, const char *arg1, const char *arg2) { vhost_maxclients_config *scfg = (vhost_maxclients_config *)ap_get_module_config(parms->server->module_config, &vhost_maxclients_module); scfg->vhost_maxclients_time_from = atoi(arg1); scfg->vhost_maxclients_time_to = atoi(arg2); if(scfg->vhost_maxclients_time_from < 0 || scfg->vhost_maxclients_time_from > 2359){ return "VhostMaxClientsTimeSlot_From is invalid. should be set range 0 < VhostMaxClientsTimeSlot_From < 2359"; } if (scfg->vhost_maxclients_time_to < 0 || scfg->vhost_maxclients_time_to > 2359){ return "VhostMaxClientsTimeSlot_To is invalid. should be set range 0 < VhostMaxClientsTimeSlot_To < 2359"; } return NULL; } static command_rec vhost_maxclients_cmds[] = { AP_INIT_FLAG("VhostMaxClientsDryRun", set_vhost_maxclients_dryrun, NULL, ACCESS_CONF | RSRC_CONF, "Enable dry-run which don't return 503, logging only: On / Off (default Off)"), AP_INIT_TAKE1("VhostMaxClients", set_vhost_maxclientsvhost, NULL, RSRC_CONF | ACCESS_CONF, "maximum connections per Vhost"), AP_INIT_TAKE1("VhostMaxClientsLogOnly", set_vhost_maxclientsvhost_log, NULL, RSRC_CONF | ACCESS_CONF, "loggign only: maximum connections per Vhost"), AP_INIT_TAKE1("VhostMaxClientsLogPath", set_vhost_maxclientsvhost_log_path, NULL, RSRC_CONF | ACCESS_CONF, "logging file path instead of error_log"), AP_INIT_TAKE1("VhostMaxClientsPerIP", set_vhost_maxclientsvhost_perip, NULL, RSRC_CONF | ACCESS_CONF, "maximum connections per IP of Vhost"), AP_INIT_ITERATE("IgnoreVhostMaxClientsExt", set_vhost_ignore_extensions, NULL, ACCESS_CONF | RSRC_CONF, "Set Ignore Extensions."), AP_INIT_TAKE1("IgnoreVhostMaxClientsRequestRegexp", set_vhost_ignore_request_regexp, NULL, ACCESS_CONF | RSRC_CONF, "Set Ignore Request."), AP_INIT_TAKE2("VhostMaxClientsTimeSlot", set_vhost_maxclients_time, NULL, RSRC_CONF | ACCESS_CONF, "Time to enable the VhostMaxClients. (default 0:00 ~ 23:59)"), {NULL}, }; static int vhost_maxclients_init(apr_pool_t *p, apr_pool_t *plog, apr_pool_t *ptemp, server_rec *server) { void *data; const char *userdata_key = "vhost_maxclients_init"; vhost_maxclients_config *scfg = (vhost_maxclients_config *)ap_get_module_config(server->module_config, &vhost_maxclients_module); apr_pool_userdata_get(&data, userdata_key, server->process->pool); if (!data) { apr_pool_userdata_set((const void *)1, userdata_key, apr_pool_cleanup_null, server->process->pool); return OK; } ap_mpm_query(AP_MPMQ_HARD_LIMIT_THREADS, &vhost_maxclients_thread_limit); ap_mpm_query(AP_MPMQ_HARD_LIMIT_DAEMONS, &vhost_maxclients_server_limit); ap_log_error(APLOG_MARK, APLOG_NOTICE, 0, ap_server_conf, MODULE_NAME "/" MODULE_VERSION " enabled: %d/%d thread_limit/server_limit", vhost_maxclients_thread_limit, vhost_maxclients_server_limit); /* open custom log instead of error_log */ if (scfg->log_path != NULL) { if (apr_file_open(&vhost_maxclients_log_fp, scfg->log_path, APR_WRITE | APR_APPEND | APR_CREATE, APR_OS_DEFAULT, p) != APR_SUCCESS) { ap_log_error(APLOG_MARK, APLOG_EMERG, 0, server, "%s ERROR %s: vhost_maxclients log file oepn failed: %s", MODULE_NAME, __func__, scfg->log_path); return HTTP_INTERNAL_SERVER_ERROR; } } return OK; } static void vhost_maxclients_register_hooks(apr_pool_t *p) { ap_hook_post_config(vhost_maxclients_init, NULL, NULL, APR_HOOK_MIDDLE); ap_hook_access_checker(vhost_maxclients_handler, NULL, NULL, APR_HOOK_MIDDLE); } #ifdef __APACHE24__ AP_DECLARE_MODULE(vhost_maxclients) = { #else module AP_MODULE_DECLARE_DATA vhost_maxclients_module = { #endif STANDARD20_MODULE_STUFF, NULL, /* create per-dir config structures */ NULL, /* merge per-dir config structures */ vhost_maxclients_create_server_config, /* create per-server config structures */ vhost_maxclients_create_server_merge_conf, /* merge per-server config structures */ vhost_maxclients_cmds, /* table of config file commands */ vhost_maxclients_register_hooks};
def show_text(msg, color, x_coord, y_coord): screen_text = font.render(msg, True, color) screen.blit(screen_text, (x_coord, y_coord))
/** * Bluetooth ACL connection state changed callback */ void bt_acl_state_changed_callback(bt_status_t status, bt_bdaddr_t *remote_bd_addr, bt_acl_state_t state) { Tracer trc("bt_acl_state_changed_callback"); bt_bdaddr_t *bda = remote_bd_addr; ALOGV("bt_acl_state_changed_callback. status=%d state=%sconnected(%d) remote=%02X:%02X:%02X:%02X:%02X:%02X", status, state == BT_ACL_STATE_DISCONNECTED ? "dis" : "", state, bda->address[0], bda->address[1], bda->address[2], bda->address[3], bda->address[4], bda->address[5]); }
An Optimal Spatiotemporal Noise Filter for NI/OCT Imaging A novel spatiotemporal filtering formalism is proposed as means to further improve SNR realization in Near-Infrared Optical Coherence Tomography (NI/OCT) imaging applications. As is well known noise filtering on such images is rendered more difficult with presence of characteristic multiplicative ‘speckle’ noise processes that preclude direct application of standard LTI techniques. In this work, we first demonstrate OCT noise may be approximated as an additive source for which a significant noise/signal cross-correlation is present. With this approximation, we then develop a spatiotemporal noise filter formalism based upon an MMSE optimal Wiener principle. Noise filtering is then applied along space/time axes based upon a multi-slice, motion-compensated estimation process. In benchmark application to OCT volume data, this filter is shown to exhibit desirable properties of sharply reduced noise variance without distortion or blurring characteristic of more simple filtering schemes.
<gh_stars>1-10 package handlers import ( "github.com/aklinker1/miasma/internal/server/database" "github.com/aklinker1/miasma/internal/server/gen/restapi/operations" "github.com/aklinker1/miasma/internal/server/services/plugin_service" "github.com/aklinker1/miasma/internal/server/utils/constants" "github.com/aklinker1/miasma/internal/shared/log" "github.com/go-openapi/runtime/middleware" ) var GetAppTraefikConfig = operations.GetAppTraefikConfigHandlerFunc( func(params operations.GetAppTraefikConfigParams) middleware.Responder { log.V("handlers.GetAppTraefikConfig()") db, onDefer := database.ReadOnly() defer onDefer() if !plugin_service.IsInstalled(db, constants.PluginNameTraefik) { return operations.NewGetAppTraefikConfigBadRequest(). WithPayload("Traefik is not installed") } appID := params.AppID.String() plugins, err := plugin_service.GetAppPlugins(db, appID) if err != nil { return operations.NewGetAppTraefikConfigDefault(500).WithPayload(err.Error()) } traefikConfig := plugins.Traefik if traefikConfig == nil { return operations.NewGetAppTraefikConfigNotFound() } return operations.NewGetAppTraefikConfigOK().WithPayload(traefikConfig) }, )
from gstats import start_request, end_request _collector_addr = 'tcp://127.0.0.2:2345' _prefix = b'my_app' def pre_request(req, worker): start_request(req, collect=True, collector=_collector_addr, prefix=_prefix) def post_request(req, worker): end_request(req, collector=_collector_addr, prefix=_prefix)
const LOOKUP_TABLE: [char; 64] = [ 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '+', '/', ]; const PADDING: char = '='; fn encode_chunks(chunks: &[u8]) -> Vec<char> { let mut v = Vec::new(); match chunks.len() { 3 => { let b1 = chunks[0] >> 2; v.push(LOOKUP_TABLE[b1 as usize]); let b2 = ((chunks[0] & 0b00000011) << 4) | (chunks[1] >> 4); v.push(LOOKUP_TABLE[b2 as usize]); let b3 = ((chunks[1] & 0b00001111) << 2) | ((chunks[2] & 0b11000000) >> 6); v.push(LOOKUP_TABLE[b3 as usize]); let b4 = chunks[2] & 0b00111111; v.push(LOOKUP_TABLE[b4 as usize]); } 2 => { let b1 = chunks[0] >> 2; v.push(LOOKUP_TABLE[b1 as usize]); let b2 = ((chunks[0] & 0b00000011) << 4) | (chunks[1] >> 4); v.push(LOOKUP_TABLE[b2 as usize]); let b3 = (chunks[1] & 0b00001111) << 2; v.push(LOOKUP_TABLE[b3 as usize]); v.push(PADDING); } 1 => { let b1 = chunks[0] >> 2; v.push(LOOKUP_TABLE[b1 as usize]); let b2 = (chunks[0] & 0b00000011) << 4; v.push(LOOKUP_TABLE[b2 as usize]); v.push(PADDING); v.push(PADDING); } _ => {} } v } pub fn base64(data: &str) -> String { let byte_array: &[u8] = data.as_bytes(); let mut v: Vec<char> = Vec::new(); for octet_array in byte_array.chunks(3) { v.extend(encode_chunks(octet_array)); } return v.into_iter().collect::<String>(); } fn decode_string(input: &str) -> Vec<u8> { let filtered = input .chars() .map(|c| if c == PADDING { 'A' } else { c }) .collect::<Vec<char>>(); let equals = input.chars().filter(|&c| c == '=').count(); let mut v: Vec<u8> = Vec::new(); for chunk in filtered.chunks(4) { let mut n = LOOKUP_TABLE.iter().position(|&x| x == chunk[0]).unwrap() << 18; let n1 = LOOKUP_TABLE.iter().position(|&x| x == chunk[1]).unwrap() << 12; let n2 = LOOKUP_TABLE.iter().position(|&x| x == chunk[2]).unwrap() << 6; let n3 = LOOKUP_TABLE.iter().position(|&x| x == chunk[3]).unwrap(); n = n | n1 | n2 | n3; v.push(((n >> 16) & 0xff) as u8); v.push(((n >> 8) & 0xff) as u8); v.push((n & 0xff) as u8); } let len = v.len() - equals; v.into_iter().take(len).collect() } pub fn base64_decode(data: &str) -> String { if data.len() % 4 != 0 { String::from("Incompatible String") } else { std::str::from_utf8(&decode_string(data)) .unwrap() .to_owned() } } #[cfg(test)] mod tests { use super::*; #[test] fn test_encode() { assert_eq!(base64("hello world"), "aGVsbG8gd29ybGQ="); } #[test] fn test_decode() { assert_eq!(base64_decode("aGVsbG8gd29ybGQ="), "hello world"); } }
// Address returns the tcp (local) address of this MemoryRedis server func (server *Server) Address() string { if server == nil { return "" } return server.addr }
{-# language StandaloneDeriving #-} {-# options_ghc -fno-warn-orphans #-} module Graphics.Vulkan.HL.Internal.Orphan where import Graphics.Vulkan deriving instance Show CommandPool deriving instance Show DebugReportCallback deriving instance Show Extent2D deriving instance Show Extent3D deriving instance Show Image deriving instance Show ImageView deriving instance Show QueueFamilyProperties deriving instance Show RenderPass deriving instance Show Surface deriving instance Show SurfaceCapabilities deriving instance Show SurfaceFormat deriving instance Show Swapchain
#include<stdio.h> int d[100001][2]={0}; int school[100001][2]={0}; int main(){ int n; scanf("%d",&n); for(int i=0;i<n;i++){ scanf("%d %d",d[i],d[i]+1); school[d[i][0]][1]+=d[i][1]; } int max=0;int maxI=0; for(int j=1;j<100001;j++){ if(max<school[j][1]){ max=school[j][1];maxI=j;} } printf("%d %d",maxI,max); getchar(); getchar(); return 0;}
def convert2idx(self, sentence): idx = [] for word in sentence: if word in self.word2idx.keys(): idx.append(self.word2idx[word]) else: idx.append(0) return idx
<reponame>kfsone/tinker """ Factorio's recipe tree is written in Lua, we want it in Python. """ import argparse import collections import glob import json import logging import os import sys try: from .slpp import slpp except (ValueError, ImportError): from slpp import slpp # Subdirectory within Factorio where we expect to find recipe files. RECIPE_LOCATION = os.path.join("data", "base", "prototypes", "recipe") if sys.platform == 'darwin': DEFAULT_PATH = os.path.expanduser("~/Library/Application Support/Steam/steamapps/common/Factorio/factorio.app/Contents") else: ###TODO: Linux, kthxbai DEFAULT_PATH="C:\\Program Files (x86)\\steam\\steamapps\\common\\Factorio" # What we expect scripts to begin with. RECIPE_PREFIX = "data:extend(" # What we expect scripts to end with. RECIPE_SUFFIX = ")" logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger("recipe") class ParseError(RuntimeError): pass class Resource(object): """ Raw or manufactured item that can be constructed or used to construct. """ def __init__(self, name, recipe): self.name = name self.recipe = recipe self.inputs = {} self.outputs = {} def __repr__(self): return '%s("%s")' % (self.__class__.__name__, self.name) def __hash__(self): return hash(self.name) class RawResource(Resource): def __init__(self, name): super().__init__(name, None) class Manufactured(Resource): def __init__(self, name, recipe): super().__init__(name, recipe) class Production(object): def __init__(self, resource_in, quantity, manufactured): assert isinstance(resource_in, Resource) assert isinstance(manufactured, Manufactured) self.resource_in = resource_in self.quantity = quantity self.manufactured = manufactured assert manufactured.name not in resource_in.inputs assert manufactured.name not in resource_in.outputs assert resource_in.name not in manufactured.inputs assert resource_in.name not in manufactured.outputs resource_in.outputs[manufactured.name] = self manufactured.inputs[resource_in.name] = self def __repr__(self): return "Production('%s', %d, '%s')" % (self.resource_in.name, self.quantity, self.manufactured.name) def load_file_recipes(fh, enabled_only=False, expensive=False, logger=logger): """ Load all the recipes from a given file handle. :param enabled_only: Set True to limit to only enabled recipes. :param expensive: Set True to use 'expensive' configurations. :return: dict(name -> {recipe}) """ logger.info("Loading recipes from %s", fh.name) lua_text = fh.read().strip() logger.debug("Loaded %d bytes", len(lua_text)) # Strip the non-table wrapper. if not lua_text.startswith(RECIPE_PREFIX) or not lua_text.endswith(RECIPE_SUFFIX): logger.warning("%s does not appear to be a recipe definition file.", fh.name) return {} lua_table = lua_text[len(RECIPE_PREFIX):-len(RECIPE_SUFFIX)].strip() definitions = {} for table in slpp.decode(lua_table): own_version = {} # Only handle 'recipe's. if table.get('type') != "recipe": logger.debug("Ignoring: %s", table) continue name = table.get('name').lower() if not name: logger.warning("Malformed entry: %s", table) continue own_version['name'] = name # Check if we're skipping disabled recipes. if enabled_only: if table.get('enabled', True) is False: logger.debug("Skipping %s: disabled" % name) continue own_version['enabled'] = table['enabled'] # Make sure it has a unique name. if name in definitions: raise ParseError("%s: Duplicated recipe: %s" % (fh.name, name)) inset = table.get('normal') if expensive: inset = table.get('expensive', inset) if inset: if enabled_only and inset.get('enabled', True) is False: logger.debug("Skipping %s: inset dsabled" % name) continue if 'ingredients' in inset: table = inset ingredients = table.get('ingredients') if not ingredients: logger.warning("Entry with no ingredients: %s", table) continue own_version['ingredients'] = {} for entry in ingredients: if isinstance(entry, (tuple, list)): assert len(entry) == 2 assert isinstance(entry[1], int) own_version['ingredients'][entry[0]] = entry[1] else: assert isinstance(entry, dict) assert len(entry) == 3 own_version['ingredients'][entry['name']] = int(entry['amount']) if 'energy_required' in table: own_version['energy_required'] = table['energy_required'] logger.debug("\"%s\": %s", name, json.dumps(own_version, sort_keys=True)) definitions[name] = own_version return definitions def load_all_recipes(factorio_path, logger=logger): if not os.access(factorio_path, os.R_OK): raise ValueError("%s: No access or no such folder." % factorio_path) # Iterate across all the files we find in the RECIPE_LOCATION. recipes_path = os.path.join(factorio_path, RECIPE_LOCATION) logger.debug("recipes path: %s", recipes_path) if not os.access(recipes_path, os.R_OK): raise ValueError("%s: Missing recipe folder or no access: %s" % (factorio_path, recipes_path)) lua_files = glob.glob(os.path.join(recipes_path, "*.lua")) if not lua_files: raise IOError("%s: no .lua files present" % (recipes_path)) # Strip down to just the filename. lua_files = [os.path.basename(filename) for filename in lua_files] logger.debug("lua files: %s" % (", ".join(lua_files))) recipes = dict() for filename in lua_files: with open(os.path.join(recipes_path, filename), 'r') as fh: # Get a dictionary of the recipes from this file. new_recipes = load_file_recipes(fh) if not new_recipes: continue logger.debug("Loaded %d recipes: %s", len(new_recipes), ",".join(new_recipes.keys())) # Check for any duplicates. new_keys, old_keys = frozenset(new_recipes.keys()), frozenset(recipes.keys()) for dupe in new_keys.intersection(old_keys): logger.warning("'%s' redefined in %s", dupe, filename) # Add the recipes to the main dictionary. recipes.update(new_recipes) logger.debug("Loaded %d recipes total" % len(recipes)) return recipes def dump_requirements(recipes, requests): requests = set(r.lower() for r in requests) requirements = collections.defaultdict(int) uses = collections.defaultdict(set) for request in requests: recipe = recipes.get(request) if not recipe: raise ValueError("Unknown recipe: " + request) ingredients = list(((ing, qty), request) for (ing, qty) in recipe['ingredients'].items()) while ingredients: (ingredient, quantity), use = ingredients.pop(0) uses[ingredient].add(use) ingredient_recipe = recipes.get(ingredient) if ingredient_recipe: for (sub_ingredient, sub_quantity) in ingredient_recipe['ingredients'].items(): ingredients.append(((sub_ingredient, sub_quantity * quantity), ingredient)) requirements[ingredient] += quantity keys = list(requirements.keys()) keys.sort(key=lambda k: k) keys.sort(key=lambda k: requirements[k], reverse=True) keys.sort(key=lambda k: len(uses[k]), reverse=True) print("%-5s %-5s %-40s %s" % ("Qty", "#Use", "Item", "Uses")) for item in keys: print("%5d %5d %-40s %s" % (requirements[item], len(uses[item]), item, ', '.join(uses[item]))) def build_graph(recipes, goals): """ Builds a graph of productions to produce a set of goals. """ resources = {} productions = dict() produces = set() consumes = set() demands = [(goal, 1) for goal in goals] while demands: product_name, quantity = demands.pop() produces.add(product_name) product_recipe = recipes[product_name] product_resource = resources.get(product_name, Manufactured(product_name, product_recipe)) for ing_name, ing_quantity in product_recipe['ingredients'].items(): consumes.add(ing_name) ing_resource = resources.get(ing_name) if not ing_resource: ing_recipe = recipes.get(ing_name) if ing_recipe: ing_resource = Manufactured(ing_name, ing_recipe) else: ing_resource = RawResource(ing_name) resources[ing_name] = ing_resource if (ing_resource, product_resource) not in productions: productions[ing_resource, product_resource] = Production(ing_resource, ing_quantity, product_resource) if not isinstance(ing_resource, RawResource): demands.append((ing_name, quantity * ing_quantity)) outputs = frozenset((produces - consumes) | set(goals)) assert outputs == set(goals) inputs = frozenset(consumes - produces) ready = set(resources[k] for k in inputs) rounds = [] while True: outputs = collections.defaultdict(set) for pin, pout in productions: if pin not in ready: continue if pout in ready: continue if not all(ing.resource_in in ready for ing in pout.inputs.values()): continue outputs[pout].add(pin) if not outputs: break rounds.append(dict(outputs)) ready.update(outputs.keys()) return { 'inputs': inputs, 'outputs': outputs, 'rounds': rounds, 'resources': resources, 'productions': productions, } if __name__ == "__main__": parser = argparse.ArgumentParser("Recipe translator") parser.add_argument("--verbose", "-v", help="Increase verbosity.", default=0, action="count") parser.add_argument("--json", help="Write json dump", required=False, type=str) parser.add_argument("--path", help="Path to the Factorio folder", default=DEFAULT_PATH, type=str) parser.add_argument("--make", help="Add an item to the request list", action="append") args = parser.parse_args(sys.argv[1:]) if not os.path.exists(args.path): logger.error("No such path: %s", args.path) sys.exit(-1) if args.verbose == 0: logger.setLevel(logging.WARN) elif args.verbose == 1: logger.setLevel(logging.INFO) elif args.verbose >= 2: logger.setLevel(logging.DEBUG) if args.path.endswith('.json'): if args.json: raise RuntimeError("--json is not compatible with specifying a .json input source") with open(args.path, 'r') as fh: recipes = json.loads(fh.read()) else: recipes = load_all_recipes(args.path) if args.json: with open(args.json, 'w') as fh: fh.write(json.dumps(recipes, indent=4)) if args.make: project = dump_requirements(recipes, args.make)
/** * A class representing the state of a single audited contest for * across multiple counties * */ @Entity @Cacheable(true) @Table(name = "comparison_audit") @SuppressWarnings({"PMD.ImmutableField", "PMD.ExcessiveClassLength", "PMD.CyclomaticComplexity", "PMD.GodClass", "PMD.ModifiedCyclomaticComplexity", "PMD.StdCyclomaticComplexity", "PMD.TooManyFields", "PMD.TooManyMethods", "PMD.ExcessiveImports"}) public class ComparisonAudit implements PersistentEntity { /** * Class-wide logger */ public static final Logger LOGGER = LogManager.getLogger(ComparisonAudit.class); /** * The database stored precision for decimal types. */ public static final int PRECISION = 10; /** * The database stored scale for decimal types. */ public static final int SCALE = 8; /** * The ID number. */ @Id @Column(updatable = false, nullable = false) @GeneratedValue(strategy = GenerationType.SEQUENCE) private Long my_id; /** * The version (for optimistic locking). */ @Version private Long my_version; /** * The contest result for this audit state. */ @ManyToOne(optional = false, fetch = FetchType.LAZY) @JoinColumn private ContestResult my_contest_result; /** * The reason for this audit. */ @Column(updatable = false, nullable = false) @Enumerated(EnumType.STRING) private AuditReason my_audit_reason; /** * The status of this audit. */ @Column(nullable = false) @Enumerated(EnumType.STRING) private AuditStatus my_audit_status = AuditStatus.NOT_STARTED; /** * The gamma. */ @Column(updatable = false, nullable = false, precision = PRECISION, scale = SCALE) private BigDecimal my_gamma = Audit.GAMMA; /** * The diluted margin */ @Column(updatable = false, nullable = false, precision = PRECISION, scale = SCALE) private BigDecimal diluted_margin = BigDecimal.ONE; /** * The risk limit. */ @Column(updatable = false, nullable = false, precision = PRECISION, scale = SCALE) private BigDecimal my_risk_limit = BigDecimal.ONE; /** * The number of samples audited. */ @Column(nullable = false) private Integer my_audited_sample_count = 0; /** * The number of samples to audit overall assuming no further overstatements. */ @Column(nullable = false) private Integer my_optimistic_samples_to_audit = 0; /** * The expected number of samples to audit overall assuming overstatements * continue at the current rate. */ @Column(nullable = false) private Integer my_estimated_samples_to_audit = 0; /** * The number of two-vote understatements recorded so far. */ @Column(nullable = false) private Integer my_two_vote_under_count = 0; /** * The number of one-vote understatements recorded so far. */ @Column(nullable = false) private Integer my_one_vote_under_count = 0; /** * The number of one-vote overstatements recorded so far. */ @Column(nullable = false) private Integer my_one_vote_over_count = 0; /** * The number of two-vote overstatements recorded so far. */ @Column(nullable = false) private Integer my_two_vote_over_count = 0; /** * The number of discrepancies recorded so far that are neither * understatements nor overstatements. */ @Column(nullable = false) private Integer my_other_count = 0; /** * The number of disagreements. */ @Column(nullable = false) private Integer my_disagreement_count = 0; /** * gets incremented */ @Column(nullable = true) // true for migration private BigDecimal overstatements = BigDecimal.ZERO; /** * A flag that indicates whether the optimistic ballots to audit * estimate needs to be recalculated. */ @Column(nullable = false) private Boolean my_optimistic_recalculate_needed = true; /** * A flag that indicates whether the non-optimistic ballots to * audit estimate needs to be recalculated */ @Column(nullable = false) private Boolean my_estimated_recalculate_needed = true; /** * The sequence of CastVoteRecord ids for this contest ordered by County id */ @Column(name = "contest_cvr_ids", columnDefinition = "text") @Convert(converter = LongListConverter.class) private List<Long> contestCVRIds = new ArrayList<Long>(); /** * A map from CVRAuditInfo objects to their discrepancy values for this * audited contest. */ @ElementCollection @CollectionTable(name = "contest_comparison_audit_discrepancy", joinColumns = @JoinColumn(name = "contest_comparison_audit_id", referencedColumnName = "my_id")) @MapKeyJoinColumn(name = "cvr_audit_info_id") @Column(name = "discrepancy") private Map<CVRAuditInfo, Integer> my_discrepancies = new HashMap<>(); /** * A map from CVRAuditInfo objects to their discrepancy values for this * audited contest. */ @ManyToMany @JoinTable(name = "contest_comparison_audit_disagreement", joinColumns = @JoinColumn(name = "contest_comparison_audit_id", referencedColumnName = "my_id"), inverseJoinColumns = @JoinColumn(name = "cvr_audit_info_id", referencedColumnName = "my_id")) private Set<CVRAuditInfo> my_disagreements = new HashSet<>(); /** * Constructs a new, empty ComparisonAudit (solely for persistence). */ public ComparisonAudit() { super(); } /** * Constructs a ComparisonAudit for the given params * * @param contestResult The contest result. * @param riskLimit The risk limit. * @param dilutedMargin μ * @param gamma γ * @param auditReason The audit reason. */ // FIXME estimatedSamplesToAudit / optimisticSamplesToAudit have side // effects, so we should call that out // // FIXME Remove the warning by not calling overridable methods :D @SuppressWarnings({"PMD.ConstructorCallsOverridableMethod"}) public ComparisonAudit(final ContestResult contestResult, final BigDecimal riskLimit, final BigDecimal dilutedMargin, final BigDecimal gamma, final AuditReason auditReason) { super(); my_contest_result = contestResult; my_risk_limit = riskLimit; this.diluted_margin = dilutedMargin; my_gamma = gamma; my_audit_reason = auditReason; // compute initial sample size optimisticSamplesToAudit(); estimatedSamplesToAudit(); if (contestResult.getDilutedMargin().equals(BigDecimal.ZERO)) { // the diluted margin is 0, so this contest is not auditable my_audit_status = AuditStatus.NOT_AUDITABLE; } } /** * {@inheritDoc} */ @Override public Long id() { return my_id; } /** * {@inheritDoc} */ @Override public void setID(final Long the_id) { my_id = the_id; } /** * {@inheritDoc} */ @Override public Long version() { return my_version; } /** * @return the counties related to this contestresult. */ public Set<County> getCounties() { return Collections.unmodifiableSet(this.contestResult().getCounties()); } /** * @return the contest result associated with this audit. */ public ContestResult contestResult() { return my_contest_result; } /** * @return the gamma associated with this audit. */ public BigDecimal getGamma() { return my_gamma; } /** * @return the risk limit associated with this audit. */ public BigDecimal getRiskLimit() { return my_risk_limit; } /** * @return the diluted margin from the ContestResult. */ public BigDecimal getDilutedMargin() { return this.diluted_margin; } public String getContestName() { return this.contestResult().getContestName(); } /** * @return the audit reason associated with this audit. */ public AuditReason auditReason() { return my_audit_reason; } /** * @return the audit status associated with this audit. */ public AuditStatus auditStatus() { return my_audit_status; } /** set audit status **/ public void setAuditStatus(final AuditStatus auditStatus) { my_audit_status = auditStatus; } /** see if the county is participating in this audit(contest) **/ public boolean isForCounty(final Long countyId) { return getCounties().stream() .filter(c -> c.id().equals(countyId)) .findFirst() .isPresent(); } /** * Does this audit belong to only a single county? */ public boolean isSingleCountyFor(final County c) { return getCounties().equals(Stream.of(c) .collect(Collectors.toSet())); } /** * Updates the audit status based on the current risk limit. If the audit * has already been ended or the contest is not auditable, this method has * no effect on its status. * Fix: RLA-00450 */ public void updateAuditStatus() { LOGGER.debug(String.format("[updateAuditStatus: %s for contest=%s " + "my_optimistic_samples_to_audit=%d my_audited_sample_count=%d my_optimistic_recalculate_needed=%s my_estimated_recalculate_needed=%s]", my_audit_status, contestResult().getContestName(), my_optimistic_samples_to_audit, my_audited_sample_count, my_optimistic_recalculate_needed, my_estimated_recalculate_needed)); if (my_audit_status == AuditStatus.ENDED || my_audit_status == AuditStatus.HAND_COUNT || my_audit_status == AuditStatus.NOT_AUDITABLE) { return; } if (Boolean.TRUE.equals(my_optimistic_recalculate_needed) || Boolean.TRUE.equals(my_estimated_recalculate_needed)) { recalculateSamplesToAudit(); } //below calculation needs recalculate RLA-00450 if (my_optimistic_samples_to_audit - my_audited_sample_count <= 0) { LOGGER.debug(String.format("[updateAuditStatus: RISK_LIMIT_ACHIEVED for contest=%s]", contestResult().getContestName())); my_audit_status = AuditStatus.RISK_LIMIT_ACHIEVED; } else { // risk limit has not been achieved // note that it _is_ possible to go from RISK_LIMIT_ACHIEVED to // IN_PROGRESS if a sample or set of samples is "unaudited" if (my_audit_status.equals(AuditStatus.RISK_LIMIT_ACHIEVED)) { LOGGER.warn("[updateAuditStatus: Moving from RISK_LIMIT_ACHIEVED -> IN_PROGRESS!]"); } my_audit_status = AuditStatus.IN_PROGRESS; } } /** * Ends this audit; if the audit has already reached its risk limit, * or the contest is not auditable, this call has no effect on its status. */ public void endAudit() { if (my_audit_status != AuditStatus.RISK_LIMIT_ACHIEVED && my_audit_status != AuditStatus.NOT_AUDITABLE) { my_audit_status = AuditStatus.ENDED; } } /** * @return the initial expected number of samples to audit. */ @SuppressWarnings({"checkstyle:magicnumber", "PMD.AvoidDuplicateLiterals"}) public int initialSamplesToAudit() { return computeOptimisticSamplesToAudit(0, 0, 0, 0). setScale(0, RoundingMode.CEILING).intValue(); } /** * @return the expected overall number of ballots to audit, assuming no * further overstatements occur. */ public Integer optimisticSamplesToAudit() { if (my_optimistic_recalculate_needed) { recalculateSamplesToAudit(); } return my_optimistic_samples_to_audit; } /** estimatedSamplesToAudit minus getAuditedSampleCount **/ public final Integer estimatedRemaining() { return Math.max(0, estimatedSamplesToAudit() - getAuditedSampleCount()); } /** optimisticSamplesToAudit minus getAuditedSampleCount **/ public final Integer optimisticRemaining() { return Math.max(0, optimisticSamplesToAudit() - getAuditedSampleCount()); } /** * @return the expected overall number of ballots to audit, assuming * overstatements continue to occur at the current rate. */ public final Integer estimatedSamplesToAudit() { if (my_estimated_recalculate_needed) { LOGGER.debug("[estimatedSampleToAudit: recalculate needed]"); recalculateSamplesToAudit(); } return my_estimated_samples_to_audit; } /** * * The number of one-vote and two-vote overstatements across the set * of counties participating in this audit. * * TODO collect the number of 1 and 2 vote overstatements across * participating counties. */ public BigDecimal getOverstatements() { return this.overstatements; // FIXME } /** the number of ballots audited **/ public Integer getAuditedSampleCount() { return this.my_audited_sample_count; } /** * A scaling factor for the estimate, from 1 (when no samples have * been audited) upward.41 The scaling factor grows as the ratio of * overstatements to samples increases. */ private BigDecimal scalingFactor() { final BigDecimal auditedSamples = BigDecimal.valueOf(getAuditedSampleCount()); if (auditedSamples.equals(BigDecimal.ZERO)) { return BigDecimal.ONE; } else { return BigDecimal.ONE.add(getOverstatements() .divide(auditedSamples, MathContext.DECIMAL128)); } } /** * Recalculates the overall numbers of ballots to audit, setting this * object's `my_optimistic_samples_to_audit` and * `my_estimates_samples_to_audit` fields. */ private void recalculateSamplesToAudit() { LOGGER.debug(String.format("[recalculateSamplestoAudit start contestName=%s, " + "twoUnder=%d, oneUnder=%d, oneOver=%d, twoOver=%d" + " optimistic=%d, estimated=%d]", contestResult().getContestName(), my_two_vote_under_count, my_one_vote_under_count, my_one_vote_over_count, my_two_vote_over_count, my_optimistic_samples_to_audit, my_estimated_samples_to_audit)); if (my_optimistic_recalculate_needed) { LOGGER.debug("[recalculateSamplesToAudit: calling computeOptimisticSamplesToAudit]"); final BigDecimal optimistic = computeOptimisticSamplesToAudit(my_two_vote_under_count, my_one_vote_under_count, my_one_vote_over_count, my_two_vote_over_count); my_optimistic_samples_to_audit = optimistic.intValue(); my_optimistic_recalculate_needed = false; } if (my_one_vote_over_count + my_two_vote_over_count == 0) { LOGGER.debug("[recalculateSamplesToAudit: zero overcounts]"); my_estimated_samples_to_audit = my_optimistic_samples_to_audit; } else { LOGGER.debug(String.format("[recalculateSamplesToAudit: non-zero overcounts, using scaling factor %s]", scalingFactor())); my_estimated_samples_to_audit = BigDecimal.valueOf(my_optimistic_samples_to_audit) .multiply(scalingFactor()) .setScale(0, RoundingMode.CEILING) .intValue(); } LOGGER.debug(String.format("[recalculateSamplestoAudit end contestName=%s, " + "twoUnder=%d, oneUnder=%d, oneOver=%d, twoOver=%d" + " optimistic=%d, estimated=%d]", contestResult().getContestName(), my_two_vote_under_count, my_one_vote_under_count, my_one_vote_over_count, my_two_vote_over_count, my_optimistic_samples_to_audit, my_estimated_samples_to_audit)); my_estimated_recalculate_needed = false; } /** * Computes the expected number of ballots to audit overall given the * specified numbers of over- and understatements. * * @param the_two_under The two-vote understatements. * @param the_one_under The one-vote understatements. * @param the_one_over The one-vote overstatements. * @param the_two_over The two-vote overstatements. * * @return the expected number of ballots remaining to audit. * This is the stopping sample size as defined in the literature: * https://www.stat.berkeley.edu/~stark/Preprints/gentle12.pdf */ private BigDecimal computeOptimisticSamplesToAudit(final int twoUnder, final int oneUnder, final int oneOver, final int twoOver) { return Audit.optimistic(getRiskLimit(), getDilutedMargin(), getGamma(), twoUnder, oneUnder, oneOver, twoOver) ; } /** * Signals that a sample has been audited. This ensures that estimates * are recalculated correctly and states are updated. * * @param count The count of samples that have been audited simultaneously * (for duplicates). */ public void signalSampleAudited(final int count) { my_estimated_recalculate_needed = true; my_audited_sample_count = my_audited_sample_count + count; // this may not be needed, but I'm not sure if (my_audit_status == AuditStatus.RISK_LIMIT_ACHIEVED) { LOGGER.warn("RESETTING AuditStatus from RISK_LIMIT_ACHIEVED to IN_PROGRESS"); my_audit_status = AuditStatus.IN_PROGRESS; } } /** * Signals that a sample has been audited, if the CVR was selected for * this audit and this audit is targeted (i.e., not for opportunistic * benefits.) * * @param count The count of samples that have been audited simultaneously * @param cvrID ID of the CVR being audited */ public void signalSampleAudited(final int count, final Long cvrID) { final boolean covered = isCovering(cvrID); final boolean targeted = isTargeted(); if (targeted && !covered) { LOGGER.debug (String.format("[signalSampleAudited: %s is targeted, but cvrID (%d) not selected for audit.]", contestResult().getContestName(), cvrID)); } if (targeted && covered) { LOGGER.debug (String.format ("[signalSampleAudited: targeted and covered! " + "contestName=%s, cvrID=%d, auditedSamples=%d, count=%d]", contestResult().getContestName(), cvrID, getAuditedSampleCount(), count)); signalSampleAudited(count); } } /** * Signals that a sample has been unaudited. This ensures that estimates * are recalculated correctly and states are updated. * * @param the_count The count of samples that have been unaudited simultaneously * (for duplicates). */ public void signalSampleUnaudited(final int count) { my_estimated_recalculate_needed = true; my_audited_sample_count = my_audited_sample_count - count; // this may not be needed, but I'm not sure if (my_audit_status == AuditStatus.RISK_LIMIT_ACHIEVED) { LOGGER.warn("RESETTING AuditStatus from RISK_LIMIT_ACHIEVED to IN_PROGRESS"); my_audit_status = AuditStatus.IN_PROGRESS; } } /** * Signals that a sample has been unaudited, if the CVR was selected * for this audit. * * @param count The count of samples that have been unaudited simultaneously * (for duplicates). * @parma cvrID The ID of the CVR to unaudit */ public void signalSampleUnaudited(final int count, final Long cvrID) { LOGGER.debug (String.format ("[signalSampleUnaudited: start " + "contestName=%s, cvrID=%d, auditedSamples=%d, count=%d]", contestResult().getContestName(), cvrID, getAuditedSampleCount(), count)); final boolean covered = isCovering(cvrID); final boolean targeted = isTargeted(); if (targeted && !covered) { LOGGER.debug (String.format("[signalSampleUnaudited: Targeted contest, but cvrID (%d) not selected.]", cvrID)); } if (targeted && covered) { LOGGER.debug(String.format("[signalSampleUnaudited: CVR ID [%d] is interesting to %s]", cvrID, contestResult().getContestName())); signalSampleUnaudited(count); } } /** * Records a disagreement with the specified CVRAuditInfo. * * @param the_record The CVRAuditInfo record that generated the disagreement. */ public void recordDisagreement(final CVRAuditInfo the_record) { my_disagreements.add(the_record); my_disagreement_count = my_disagreement_count + 1; } /** * Removes a disagreement with the specified CVRAuditInfo. * * @param the_record The CVRAuditInfo record that generated the disagreement. */ public void removeDisagreement(final CVRAuditInfo the_record) { my_disagreements.remove(the_record); my_disagreement_count = my_disagreement_count - 1; } /** * @return the disagreement count. */ public int disagreementCount() { return my_disagreement_count; } /** was the given cvrid selected for this contest? **/ public boolean isCovering(final Long cvrId) { return getContestCVRIds().contains(cvrId); } /** * Adds to the current collection of Contest CVR IDs * @param contestCVRIds a list */ public void addContestCVRIds (final List<Long> contestCVRIds) { this.contestCVRIds.addAll(contestCVRIds); } /** * getter */ public List<Long> getContestCVRIds() { return this.contestCVRIds; } /** * Is this audit because of a targeted contest? */ public boolean isTargeted() { return this.contestResult().getAuditReason().isTargeted() && !isHandCount(); } /** * Is an audit finished, or should we find more samples to compare? * */ public boolean isFinished() { return this.auditStatus().equals(AuditStatus.NOT_AUDITABLE) || this.auditStatus().equals(AuditStatus.RISK_LIMIT_ACHIEVED) || this.auditStatus().equals(AuditStatus.HAND_COUNT) || this.auditStatus().equals(AuditStatus.ENDED); } public boolean isHandCount() { return this.auditStatus().equals(AuditStatus.HAND_COUNT); } /** calculate the number of times the given cvrId appears in the selection * (across all rounds) **/ public int multiplicity(final Long cvrId) { return Collections.frequency(getContestCVRIds(), cvrId); } /** * Records the specified discrepancy. If the discrepancy is for this Contest * but from a CVR/ballot that was not selected for this Contest (selected for * another Contest), is does not contribute to the counts and calculations. It * is still recorded, though, for informational purposes. The valid range is * -2 .. 2: -2 and -1 are understatements, 0 is a discrepancy that doesn't * affect the RLA calculations, and 1 and 2 are overstatements). * * @param the_record The CVRAuditInfo record that generated the discrepancy. * @param the_type The type of discrepancy to add. * @exception IllegalArgumentException if an invalid discrepancy type is * specified. */ @SuppressWarnings("checkstyle:magicnumber") public void recordDiscrepancy(final CVRAuditInfo the_record, final int the_type) { // we never trigger an estimated recalculate here; it is // triggered by signalBallotAudited() regardless of whether there is // a discrepancy or not if (isCovering(the_record.cvr().id())) { switch (the_type) { case -2: my_two_vote_under_count = my_two_vote_under_count + 1; my_optimistic_recalculate_needed = true; break; case -1: my_one_vote_under_count = my_one_vote_under_count + 1; my_optimistic_recalculate_needed = true; break; case 0: my_other_count = my_other_count + 1; // no optimistic recalculate needed break; case 1: my_one_vote_over_count = my_one_vote_over_count + 1; my_optimistic_recalculate_needed = true; break; case 2: my_two_vote_over_count = my_two_vote_over_count + 1; my_optimistic_recalculate_needed = true; break; default: throw new IllegalArgumentException("invalid discrepancy type: " + the_type); } } LOGGER.info(String.format("[recordDiscrepancy type=%s, record=%s]", the_type, the_record)); my_discrepancies.put(the_record, the_type); } /** * get the discrepancy value that was recorded for this * ComparisonAudit(contest) on the given CVRAuditInfo(ballot). used for * reporting. **/ public Integer getDiscrepancy(final CVRAuditInfo cai) { return my_discrepancies.get(cai); } /** * Removes the specified over/understatement (the valid range is -2 .. 2: * -2 and -1 are understatements, 0 is a discrepancy that doesn't affect the * RLA calculations, and 1 and 2 are overstatements). This is typically done * when a new interpretation is submitted for a ballot that had already been * interpreted. * * @param the_record The CVRAuditInfo record that generated the discrepancy. * @param the_type The type of discrepancy to remove. * @exception IllegalArgumentException if an invalid discrepancy type is * specified. */ @SuppressWarnings("checkstyle:magicnumber") public void removeDiscrepancy(final CVRAuditInfo the_record, final int the_type) { // we never trigger an estimated recalculate here; it is // triggered by signalBallotAudited() regardless of whether there is // a discrepancy or not switch (the_type) { case -2: my_two_vote_under_count = my_two_vote_under_count - 1; my_optimistic_recalculate_needed = true; break; case -1: my_one_vote_under_count = my_one_vote_under_count - 1; my_optimistic_recalculate_needed = true; break; case 0: my_other_count = my_other_count - 1; // no recalculate needed break; case 1: my_one_vote_over_count = my_one_vote_over_count - 1; my_optimistic_recalculate_needed = true; break; case 2: my_two_vote_over_count = my_two_vote_over_count - 1; my_optimistic_recalculate_needed = true; break; default: throw new IllegalArgumentException("invalid discrepancy type: " + the_type); } my_discrepancies.remove(the_record); } /** * Returns the count of the specified type of discrepancy. -2 and -1 represent * understatements, 0 represents a discrepancy that doesn't affect the RLA * calculations, and 1 and 2 represent overstatements. * * @param the_type The type of discrepancy. * @exception IllegalArgumentException if an invalid discrepancy type is * specified. */ @SuppressWarnings("checkstyle:magicnumber") public int discrepancyCount(final int the_type) { final int result; switch (the_type) { case -2: result = my_two_vote_under_count; break; case -1: result = my_one_vote_under_count; break; case 0: result = my_other_count; break; case 1: result = my_one_vote_over_count; break; case 2: result = my_two_vote_over_count; break; default: throw new IllegalArgumentException("invalid discrepancy type: " + the_type); } return result; } /** * Computes the over/understatement represented by the CVR/ACVR pair stored in * the specified CVRAuditInfo. This method returns an optional int that, if * present, indicates a discrepancy. There are 5 possible types of * discrepancy: -1 and -2 indicate 1- and 2-vote understatements; 1 and 2 * indicate 1- and 2- vote overstatements; and 0 indicates a discrepancy that * does not count as either an under- or overstatement for the RLA algorithm, * but nonetheless indicates a difference between ballot interpretations. * * @param the_info The CVRAuditInfo. * @return an optional int that is present if there is a discrepancy and absent * otherwise. */ public OptionalInt computeDiscrepancy(final CVRAuditInfo the_info) { if (the_info.acvr() == null || the_info.cvr() == null) { throw new IllegalArgumentException("null CVR or ACVR in pair " + the_info); } else { return computeDiscrepancy(the_info.cvr(), the_info.acvr()); } } /** * Computes the over/understatement represented by the specified CVR and ACVR. * This method returns an optional int that, if present, indicates a discrepancy. * There are 5 possible types of discrepancy: -1 and -2 indicate 1- and 2-vote * understatements; 1 and 2 indicate 1- and 2- vote overstatements; and 0 * indicates a discrepancy that does not count as either an under- or * overstatement for the RLA algorithm, but nonetheless indicates a difference * between ballot interpretations. * * @param cvr The CVR that the machine saw * @param auditedCVR The ACVR that the human audit board saw * @return an optional int that is present if there is a discrepancy and absent * otherwise. */ @SuppressWarnings("checkstyle:magicnumber") // FIXME Should we point to the ContestResult instead? public OptionalInt computeDiscrepancy(final CastVoteRecord cvr, final CastVoteRecord auditedCVR) { OptionalInt result = OptionalInt.empty(); // FIXME this needs to get this stuff from the ContestResult // - a CastVoteRecord belongs to a county. // - a CVRContestInfo belongs to a Contest, which belongs to a county. // - should we change the CVRContestInfo to belong to a ContestResult instead? // // The CVRContestInfo has teh list of choices. we need this for // winners and loser of the contest......BUT the ContestResult also // has a set of winners and losers, which is now the MOST ACCURATE // version of this, since we're now out of the county context... final Optional<CVRContestInfo> cvr_info = cvr.contestInfoForContestResult(my_contest_result); final Optional<CVRContestInfo> acvr_info = auditedCVR.contestInfoForContestResult(my_contest_result); if (auditedCVR.recordType() == RecordType.PHANTOM_BALLOT) { if (cvr_info.isPresent()) { result = OptionalInt.of(computePhantomBallotDiscrepancy(cvr_info.get(), my_contest_result)); } else { //not sure why exactly, but that is what computePhantomBallotDiscrepancy //returns if winner_votes is empty, which it is, in this case, if it is //not present result = OptionalInt.of(1); } } else if (cvr.recordType() == RecordType.PHANTOM_RECORD){ // similar to the phantom ballot, we use the worst case scenario, a 2-vote // overstatement, except here, we don't have a CVR to check anything on. result = OptionalInt.of(2); } else if (cvr_info.isPresent() && acvr_info.isPresent()) { if (acvr_info.get().consensus() == ConsensusValue.NO) { // a lack of consensus for this contest is treated // identically to a phantom ballot result = OptionalInt.of(computePhantomBallotDiscrepancy(cvr_info.get(), my_contest_result)); } else { result = computeAuditedBallotDiscrepancy(cvr_info.get(), acvr_info.get()); } } return result; } /** * Computes the discrepancy between two ballots. This method returns an optional * int that, if present, indicates a discrepancy. There are 5 possible types of * discrepancy: -1 and -2 indicate 1- and 2-vote understatements; 1 and 2 indicate * 1- and 2- vote overstatements; and 0 indicates a discrepancy that does not * count as either an under- or overstatement for the RLA algorithm, but * nonetheless indicates a difference between ballot interpretations. * * @param the_cvr_info The CVR info. * @param the_acvr_info The ACVR info. * @return an optional int that is present if there is a discrepancy and absent * otherwise. */ @SuppressWarnings({"PMD.ModifiedCyclomaticComplexity", "PMD.StdCyclomaticComplexity", "PMD.NPathComplexity", "PMD.ExcessiveMethodLength", "checkstyle:methodlength"}) private OptionalInt computeAuditedBallotDiscrepancy(final CVRContestInfo the_cvr_info, final CVRContestInfo the_acvr_info) { // Check for overvotes. // // Overvotes are represented, perhaps confusingly, in the CVR as "all // zeroes" for the given contest - it will look indistinguishable from a // contest in which no selections were made. We therefore have to check if // the number of selections the audit board found is less than or equal to // the allowed votes for the given contest. If it is, then the audit board // found a valid selection and we can proceed with the rest of the math as // usual. If not, then the audit board recorded an overvote which we must // now make match the way the CVR format records overvotes: we must record // *no* selections. The code below does that by excluding the selections // submitted by the audit board. // // If the CVR does show an overvote (no selections counted) then our // zero-selection ACVR will match it and we will find no discrepancies. If, // however, the CVR *did* show a selection but the audit board recorded an // overvote, then we will be able to calculate the discrepancy - the CVR // will have a choice (or choices) marked as selected, but the ACVR will // not. The converse is also true: if the CVR shows an overvote but the // audit board records a valid selection, we will calculate an expected // discrepancy. final Set<String> acvr_choices = new HashSet<>(); if (the_acvr_info.choices().size() <= my_contest_result.winnersAllowed()) { acvr_choices.addAll(the_acvr_info.choices()); } // avoid linear searches on CVR choices final Set<String> cvr_choices = new HashSet<>(the_cvr_info.choices()); // if the choices in the CVR and ACVR are identical now, we can simply return the // fact that there's no discrepancy if (cvr_choices.equals(acvr_choices)) { return OptionalInt.empty(); } // we want to get the maximum pairwise update delta, because that's the "worst" // change in a pairwise margin, and the discrepancy we record; we start with // Integer.MIN_VALUE so our maximization algorithm works. it is also the case // that _every_ pairwise margin must be increased for an understatement to be // reported int raw_result = Integer.MIN_VALUE; boolean possible_understatement = true; // FIXME my_contest_result is global to this object. I'd rather it // be an argument to this function. for (final String winner : my_contest_result.getWinners()) { final int winner_change; if (!cvr_choices.contains(winner) && acvr_choices.contains(winner)) { // this winner gained a vote winner_change = 1; } else if (cvr_choices.contains(winner) && !acvr_choices.contains(winner)) { // this winner lost a vote winner_change = -1; } else { // this winner's votes didn't change winner_change = 0; } if (my_contest_result.getLosers().isEmpty()) { // if there are no losers, we'll just negate this number - even though in // real life, we wouldn't be auditing the contest at all raw_result = Math.max(raw_result, -winner_change); } else { for (final String loser : my_contest_result.getLosers()) { final int loser_change; if (!cvr_choices.contains(loser) && acvr_choices.contains(loser)) { // this loser gained a vote loser_change = 1; } else if (cvr_choices.contains(loser) && !acvr_choices.contains(loser)) { // this loser lost a vote loser_change = -1; } else { // this loser's votes didn't change loser_change = 0; } // the discrepancy is the loser change minus the winner change (i.e., if this // loser lost a vote (-1) and this winner gained a vote (1), that's a 2-vote // understatement (-1 - 1 = -2). Overstatements are worse than understatements, // as far as the audit is concerned, so we keep the highest discrepancy final int discrepancy = loser_change - winner_change; // taking the max here does not cause a loss of information even if the // discrepancy is 0; if the discrepancy is 0 we can no longer report an // understatement, and we still know there was a discrepancy because we // didn't short circuit earlier raw_result = Math.max(raw_result, discrepancy); // if this discrepancy indicates a narrowing of, or no change in, this pairwise // margin, then an understatement is no longer possible because that would require // widening _every_ pairwise margin if (discrepancy >= 0) { possible_understatement = false; } } } } if (raw_result == Integer.MIN_VALUE) { // this should only be possible if something went horribly wrong (like the contest // has no winners) throw new IllegalStateException("unable to compute discrepancy in contest " + contestResult().getContestName()); } final OptionalInt result; if (possible_understatement) { // we return the raw result unmodified result = OptionalInt.of(raw_result); } else { // we return the raw result with a floor of 0, because we can't report an // understatement result = OptionalInt.of(Math.max(0, raw_result)); } return result; } /** * Computes the discrepancy between a phantom ballot and the specified * CVRContestInfo. * @return The number of discrepancies */ private Integer computePhantomBallotDiscrepancy(final CVRContestInfo cvrInfo, final ContestResult contestResult) { int result = 2; // the second predicate means "no contest winners had votes on the // original CVR" final Set<String> winner_votes = new HashSet<>(cvrInfo.choices()); winner_votes.removeAll(contestResult.getLosers()); if (winner_votes.isEmpty()) { result = 1; } return result; } /** * a good idea */ @Override public String toString() { return String.format("[ComparisonAudit for %s: counties=%s, auditedSampleCount=%d, overstatements=%f," + " contestResult.contestCvrIds=%s, status=%s, reason=%s]", this.contestResult().getContestName(), this.contestResult().getCounties(), this.getAuditedSampleCount(), this.getOverstatements(), this.getContestCVRIds(), my_audit_status, this.auditReason()); } }
from __future__ import print_function import argparse import copy import torch import torch.nn as nn import torch.nn.functional as F import torch.optim as optim from torchvision import datasets, transforms from torch.autograd import Variable import os import time import numpy as np start_time = time.time() class Net(nn.Module): def __init__(self, dropout=0.0): super(Net, self).__init__() self.fc1 = nn.Linear(28 * 28, 1200) self.fc2 = nn.Linear(1200, 1200) self.fc3 = nn.Linear(1200, 10) self.dropout = dropout print("Droput rate : ", self.dropout) def forward(self, x): x = x.view(-1, 28 * 28) x = F.relu(self.fc1(x)) x = F.dropout(x, p=self.dropout, training=self.training) x = F.relu(self.fc2(x)) x = F.dropout(x, p=self.dropout, training=self.training) x = self.fc3(x) return x def cross_entropy_soft(predicted, target): """ Cross Entropy loss for soft labels :param predicted: :param target: :return: """ return -(target * torch.log(predicted)).sum(dim=1).mean() def combined_loss(output, target, soft_target, alpha): """ Returns a combined loss which has two parts : A soft cross entropy loss based on previous iterations's model output and a cross entropy loss based on the ground truth. The weighting is achieved via the hyperparameter alpha. :param output: :param target: :param soft_target: :param alpha: :return: """ hard_loss = F.cross_entropy(output, target) output = F.softmax(output, dim=1) soft_target = F.softmax(soft_target, dim=1) soft_loss = cross_entropy_soft(output, soft_target) return alpha * soft_loss + (1.0 - alpha) * hard_loss def distillation_loss(target, output, teacher, data, device, alpha): """ :param target: ground truth , shape (Nx1) :param output: predicted logits, shape (NxK) :param teacher: model from previous iteration :param data: data corresponding to ground truth, shape (NxD) :param device: :param alpha: hyperparameter for loss weighting :return: """ if teacher is None: loss = F.cross_entropy(output, target) #print("Hard label loss ", loss.item()) else: teacher.eval() teacher.to(device) #Loss 1 - KLDivLoss() #criterion = nn.KLDivLoss() # use Kullback-Leibler divergence loss #output = F.log_softmax(output, dim=1) with torch.no_grad(): soft_target = teacher(data) loss = combined_loss(output, target, soft_target, alpha) return loss def self_distillation_train(model, train_loader, device, optimizer, epochs, teacher, test_loader, log_every, alpha, step): """ Self Distillation training loop which uses output from previous distillation iteration as ground truth for the current distillation round. :param model: :param train_loader: :param device: :param optimizer: :param epochs: :param teacher: :param test_loader: :param log_every: :param alpha: :param step: :return: """ model.train() mx_test_acc = 0.0 for epoch in range(epochs): epoch_loss = [] print("\nBeginning of Epoch : {}".format(epoch)) for batch_idx, (data, target) in enumerate(train_loader): data, target = data.to(device), target.to(device) optimizer.zero_grad() output = model(data) loss = distillation_loss(target, output, teacher, data, device, alpha) loss.backward() #print("Loss ", loss.item()) optimizer.step() #print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format( # epoch, batch_idx * len(data), len(train_loader.dataset), # 100. * batch_idx / len(train_loader), loss.item())) epoch_loss.append(loss.item()) if epoch % log_every == 0: #test_acc, test_loss = test_batch(model, data, target, teacher) test_loss, test_acc = test(model, test_loader, device, distilled=True) if test_acc > mx_test_acc: mx_test_acc = test_acc torch.save(model.state_dict(), 'distilled_models/distilled' + str(step) + "-" + str(epoch) + "_" + str(mx_test_acc) + '.pth.tar') #print("epoch {} test accuracy {} and loss {:06f} (for 1 batch)".format(epoch , test_acc , test_loss)) print('Epoch: {}, Loss : {:.6f}'.format(epoch, np.sum(epoch_loss) / len(epoch_loss))) return model def train_evaluate(model, train_loader, device): model.eval() train_loss = 0 correct = 0 with torch.no_grad(): for data, target in train_loader: data, target = data.to(device), target.to(device) #data, target = Variable(data, volatile=True), Variable(target) output = model(data) train_loss += F.cross_entropy(output, target).item() # sum up batch loss pred = output.data.max(1, keepdim=True)[1] correct += pred.eq(target.data.view_as(pred)).cpu().sum() print('\nTrain set: Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)\n'.format( train_loss, correct, len(train_loader.dataset), 100. * correct / len(train_loader.dataset))) def test(model, test_loader, device, distilled): """ Evaluates the model on the test dataset :param model: :param test_loader: :param device: :param distilled: :return: test_loss, test_acc """ model.eval() test_loss = 0 correct = 0 with torch.no_grad(): for data, target in test_loader: data, target = data.to(device), target.to(device) #data, target = Variable(data, volatile=True), Variable(target) output = model(data) test_loss += F.cross_entropy(output, target).item() # sum up batch loss pred = output.data.max(1, keepdim=True)[1] # get the index of the max log-probability correct += pred.eq(target.data.view_as(pred)).cpu().sum() test_loss /= len(test_loader.dataset) test_acc = 100. * correct / len(test_loader.dataset) if distilled == False: print('Test set: Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)'.format(test_loss, correct, len(test_loader.dataset), 100. * correct / len(test_loader.dataset))) else: print('Test set loss for distilled model: Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)'.format(test_loss, correct, len(test_loader.dataset), 100. * correct / len(test_loader.dataset))) return test_loss, test_acc def get_optimizer(optimizer_name, model, lr, momentum): """ Returns an optimizer based on the input. :param optimizer_name: :return: """ if optimizer_name.lower() == "adam": optimizer = torch.optim.Adam(model.parameters(), lr=lr) elif optimizer_name.lower() == "sgd": optimizer = torch.optim.SGD(model.parameters(), lr=lr, momentum=momentum, weight_decay=5e-4) return optimizer def get_arguments(): # Training settings parser = argparse.ArgumentParser(description='PyTorch MNIST Example') parser.add_argument('-batch-size', type=int, default=32, metavar='N', help='train batch size') parser.add_argument('-test-batch-size', type=int, default=32, metavar='N', help='test batch size') parser.add_argument('-epochs', type=int, default=20, metavar='N', help='number of epochs to train') parser.add_argument('-lr', type=float, default=0.01, metavar='LR', help='learning rate') parser.add_argument('-optimizer', type=str, default='sgd', metavar='O', help='optimizer name') parser.add_argument('-alpha', type=float, default=1.0, metavar='O', help='Hyperparameter to weight soft and hard loss') parser.add_argument('-dropout', type=float, default=0.0, metavar='LR', help='Dropout rate') parser.add_argument('-momentum', type=float, default=0.9, metavar='M', help='SGD momentum') parser.add_argument('-distill_iter', type=int, default=15, metavar='M', help='Number of self distillation iterations to perform.') parser.add_argument('-log_every', type=int, default=10, metavar='N', help='how many batches to wait before logging training status') args = parser.parse_args() return args def main(args): model = Net() #model.load_state_dict(torch.load('teacher_MLP_test.pth.tar')) #optimizer = optim.SGD(model.parameters(), lr=args.lr, momentum=args.momentum, weight_decay=5e-4) #optimizer = get_optimizer(args.optimizer, model, args.lr, args.momentum) device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') model.to(device) kwargs = {'num_workers': 1, 'pin_memory': False} train_loader = torch.utils.data.DataLoader( datasets.FashionMNIST('~/.pytorch/F_MNIST_data/', train=True, download=True, transform=transforms.Compose([ transforms.ToTensor(), transforms.Normalize((0.5,), (0.5,)) ])), batch_size=args.batch_size, shuffle=True, **kwargs) test_loader = torch.utils.data.DataLoader( datasets.FashionMNIST('~/.pytorch/F_MNIST_data/', train=False, transform=transforms.Compose([ transforms.ToTensor(), transforms.Normalize((0.5,), (0.5,)) ])), batch_size=args.test_batch_size, shuffle=True, **kwargs) #test(model, test_loader, device, distilled=False) teacher = None test_losses = test_accs = [] model = Net(args.dropout) model.to(device) for step in range(args.distill_iter): print("\n********** Begin Self Distillation Step - {} *************\n".format(step)) optimizer = optim.SGD(model.parameters(), lr=args.lr, momentum=args.momentum, weight_decay=5e-4) model = self_distillation_train(model, train_loader, device, optimizer, args.epochs, teacher, test_loader, args.log_every, args.alpha, step) teacher = Net(args.dropout) teacher.load_state_dict(model.state_dict()) #torch.save(model.state_dict(), 'distilled_models/distilled' + str(step) + '.pth.tar') torch.save(model.state_dict(), 'distilled.pth.tar') print("--- %s seconds ---" % (time.time() - start_time)) if __name__ == '__main__': args = get_arguments() main(args)
/** * A GuacamoleProperty whose value is a byte array. The bytes of the byte array * must be represented as a hexadecimal string within the property value. The * hexadecimal string is case-insensitive. * * @author Michael Jumper */ public abstract class ByteArrayProperty implements GuacamoleProperty<byte[]> { @Override public byte[] parseValue(String value) throws GuacamoleException { // If no property provided, return null. if (value == null) return null; // Return value parsed from hex try { return DatatypeConverter.parseHexBinary(value); } // Fail parse if hex invalid catch (IllegalArgumentException e) { throw new GuacamoleServerException("Invalid hexadecimal value.", e); } } }
def convert_to_float64(self, channel=None): if hasattr(self, "data_list"): if channel is None: for i, _data in enumerate(self.data_list): self.data_list[i] = _data.astype("float64") else: _data = self.data_list[channel] self.data_list[channel] = _data.astype("float64")
/** * The Magnum class. * @author Adriano Pereira Rezende */ public class Magnum extends Gun { // Constants private static final int MAX_BULLETS = 6; private static final int MAX_DAMAGE = 2; // Sound Constants private static final String SHOOT_ID = "shootSE"; private static final String RELOAD_ID = "reloadSE"; private static final String EMPTY_GUN_ID = "emptyGunSE"; //Timer attributes private Timer timer1 = new Timer(false); private Timer timer2 = new Timer(false); // Images private Bitmap frame1; private Bitmap frame2; private Bitmap frame3; private Bitmap frame4; private Bitmap frame5; private Bitmap frame6; private Bitmap frame7; private Bitmap frame8; private Bitmap frame9; private Bitmap frame10; private Bitmap frame11; private Bitmap frame12; private Bitmap frame13; private Bitmap frame14; private Bitmap frame15; private Bitmap frame16; private Bitmap frame17; private Bitmap frame18; private Bitmap frame19; // Default Constructor public Magnum(Resources res) { super(MAX_BULLETS, MAX_DAMAGE); // Initial Charge to every frame frame1 = BitmapFactory.decodeResource(res, R.drawable.cylinder10); frame2 = BitmapFactory.decodeResource(res, R.drawable.cylinder11); frame3 = BitmapFactory.decodeResource(res, R.drawable.cylinder12); frame4 = BitmapFactory.decodeResource(res, R.drawable.cylinder20); frame5 = BitmapFactory.decodeResource(res, R.drawable.cylinder21); frame6 = BitmapFactory.decodeResource(res, R.drawable.cylinder22); frame7 = BitmapFactory.decodeResource(res, R.drawable.cylinder30); frame8 = BitmapFactory.decodeResource(res, R.drawable.cylinder31); frame9 = BitmapFactory.decodeResource(res, R.drawable.cylinder32); frame10 = BitmapFactory.decodeResource(res, R.drawable.cylinder40); frame11 = BitmapFactory.decodeResource(res, R.drawable.cylinder41); frame12 = BitmapFactory.decodeResource(res, R.drawable.cylinder42); frame13 = BitmapFactory.decodeResource(res, R.drawable.cylinder50); frame14 = BitmapFactory.decodeResource(res, R.drawable.cylinder51); frame15 = BitmapFactory.decodeResource(res, R.drawable.cylinder52); frame16 = BitmapFactory.decodeResource(res, R.drawable.cylinder60); frame17 = BitmapFactory.decodeResource(res, R.drawable.cylinder61); // Empty Animation Initial Frame frame18 = BitmapFactory.decodeResource(res, R.drawable.cylinder70); frame19 = BitmapFactory.decodeResource(res, R.drawable.cylinder71); setFrameVector(new Bitmap[]{frame1, frame2, frame3, frame4, frame5, frame6, frame7, frame8, frame9, frame10, frame11, frame12, frame13, frame14, frame15, frame16, frame17, frame18, frame19}); } /* * (non-Javadoc) * @see com.adrianoprezende.zombies.core.Gun#draw(android.graphics.Canvas) */ @Override public void draw(Canvas canvas) { if(this.isActive()) { if(getCurrentFrame() >= getFrameVector().length) { setCurrentFrame(getFrameVector().length -1); } super.draw(canvas); } } /* * (non-Javadoc) * @see com.adrianoprezende.zombies.core.Gun#animate() */ public void animate() { //do nothing } /* * (non-Javadoc) * @see com.adrianoprezende.zombies.core.Gun#screenTouched() */ public void screenTouched() { if(isTouched()) { // Tocou no tambor, entao recarrega a arma reload(); } else { // Atirou, entao segue fluxo para animar o tambor if(getCurrentFrame() < getFrameVector().length) { incrementCurrentFrame(); } else if(getCurrentFrame() >= getFrameVector().length) { setCurrentFrame(getFrameVector().length - 3); } timer1.schedule(new CylinderRoutine(), 100); timer2.schedule(new CylinderRoutine(), 200); if(getBullets() > 0) { SoundManager.playSound(SHOOT_ID); decrementBullets(); } else { SoundManager.playSound(EMPTY_GUN_ID); } } } /* * (non-Javadoc) * @see com.adrianoprezende.zombies.core.InteractiveSpriteObject#handleActionDown(int, int) */ @Override public synchronized void handleActionDown(int eventX, int eventY) { if(this.isActive()) { super.handleActionDown(eventX, eventY); screenTouched(); } } /* * (non-Javadoc) * @see com.adrianoprezende.zombies.core.Gun#reload() */ public void reload() { SoundManager.playSound(RELOAD_ID); setBullets(MAX_BULLETS); setCurrentFrame(0); setTouched(false); } /** * Inner Class - Routine to Cylinder spin * @author adrianopr * */ class CylinderRoutine extends TimerTask { @Override public void run() { incrementCurrentFrame(); } } }
<filename>src/app/app.component.ts import { Component } from '@angular/core'; import { v4 as uuidv4 } from 'uuid'; class Todo { id: string; description: string; isDone: boolean; } @Component({ selector: 'app-root', templateUrl: './app.component.html', styleUrls: ['./app.component.scss'], }) export class AppComponent { title = 'TODO list with Angular'; public todos: Todo[] = []; public description: string; ngOnInit(): void { this.loadData(); } loadData(): void { const data = JSON.parse(localStorage.getItem('todos')); if (data) { this.todos = data; } } saveData(): void { localStorage.setItem('todos', JSON.stringify(this.todos)); } addNewTodo(): void { if (this.description) { this.todos.push({ id: uuidv4(), description: this.description, isDone: false, }); console.log({ todos: this.todos }); this.description = null; localStorage.setItem('todos', JSON.stringify(this.todos)); } } doneTodo(id: string): void { this.todos = this.todos.map((todo) => { if (todo.id === id) { return { ...todo, isDone: !todo.isDone, }; } return todo; }); localStorage.setItem('todos', JSON.stringify(this.todos)); } removeTodo(id: string): void { this.todos = this.todos.filter((todo) => todo.id !== id); localStorage.setItem('todos', JSON.stringify(this.todos)); } }
def timit2ipa(x): x = x.upper() if x in _timit2ipa: return(_timit2ipa[x]) else: return(_arpabet2ipa[x])
use std::collections::HashMap; use crate::stdx::IterExt; const INPUT: &'static str = include_str!("../inputs/14.txt"); pub fn run() { println!("day 14, output 1: {}", parse1(INPUT)); println!("day 14, output 2: {}", parse2(INPUT)); } pub fn parse1(s: &str) -> usize { let (input, rules) = s.split_once("\n\n").unwrap(); let mut input: Vec<_> = input.chars().collect(); let map: HashMap<_, _> = rules .lines() .map(|line| { let (pat, val) = line.split_once(" -> ").unwrap(); let val = val.chars().next().unwrap(); let mut pat = pat.chars(); let lpat = pat.next().unwrap(); let rpat = pat.next().unwrap(); ((lpat, rpat), val) }) .collect(); for _ in 0..10 { let mut output = Vec::with_capacity(input.len()); let last = input.len() - 2; for (i, [l, r]) in input.into_iter().array_windows().enumerate() { match map.get(&(l, r)) { Some(new) => output.extend([l, *new]), None => output.extend([l]), } if i == last { output.push(r); } } input = output; } let map: HashMap<char, usize> = input.into_iter().fold(HashMap::new(), |mut acc, val| { let count = acc.entry(val).or_default(); *count += 1; acc }); let mut max = usize::MIN; let mut min = usize::MAX; for (_, val) in map.into_iter() { min = val.min(min); max = val.max(max); } max - min } pub fn parse2(s: &str) -> usize { let (input, rules) = s.split_once("\n\n").unwrap(); let input: Vec<_> = input.chars().collect(); let last_char = input[input.len() - 1]; let rules_map: HashMap<_, _> = rules .lines() .map(|line| { let (pat, val) = line.split_once(" -> ").unwrap(); let val = val.chars().next().unwrap(); let mut pat = pat.chars(); let lpat = pat.next().unwrap(); let rpat = pat.next().unwrap(); ((lpat, rpat), val) }) .collect(); let mut count_map: HashMap<(char, char), usize> = HashMap::with_capacity(input.len()); for [l, r] in input.into_iter().array_windows() { *count_map.entry((l, r)).or_default() += 1; } for _ in 0..40 { let mut new_count_map: HashMap<(char, char), usize> = HashMap::new(); for ((l, r), count) in count_map { let new = rules_map.get(&(l, r)).unwrap(); *new_count_map.entry((l, *new)).or_default() += count; *new_count_map.entry((*new, r)).or_default() += count; } count_map = new_count_map; } let mut map: HashMap<char, usize> = count_map .into_iter() .fold(HashMap::new(), |mut acc, ((l, _), count)| { *acc.entry(l).or_default() += count; acc }); *map.get_mut(&last_char).unwrap() += 1; let mut max = usize::MIN; let mut min = usize::MAX; for (_, val) in map.into_iter() { min = val.min(min); max = val.max(max); } max - min } #[cfg(test)] mod test { use super::*; const INPUT: &str = "NNCB CH -> B HH -> N CB -> H NH -> C HB -> C HC -> B HN -> C NN -> C BH -> H NC -> B NB -> B BN -> B BB -> N BC -> B CC -> N CN -> C"; #[test] fn first() { assert_eq!(parse1(INPUT), 1588); } #[test] fn second() { assert_eq!(parse2(INPUT), 2188189693529); } }
<reponame>asmsuechan/minute import { parse } from './parser'; import { generate } from './generator'; import { analize } from './lexer'; export const convertToHTMLString = (markdown: string) => { const mdArray = analize(markdown); const asts = mdArray.map((md) => parse(md)); const htmlString = generate(asts); return htmlString; };
<gh_stars>0 from precise.skaters.managers.schurmanagerfactory import schur_vol_vol_ewa_manager_factory,schur_diag_diag_buf_emp_manager_factory # Stands for <NAME> # Some Hierarchical Risk-Parity managers very loosely based on some literature # Original Hierarchical Risk Parity approach # (Note that seriation may be different so this is not supposed to be a replica) def ldp_s5_n50_long_manager(y, s, k=1, e=1): return schur_diag_diag_buf_emp_manager_factory(y=y, s=s, n_buffer=50, e=e, gamma=0, delta=0) def ldp_s25_n50_long_manager(y, s, k=1, e=1): return schur_diag_diag_buf_emp_manager_factory(y=y, s=s, n_buffer=50, e=e, gamma=0, delta=0) def ldp_s5_n100_long_manager(y, s, k=1, e=1): return schur_diag_diag_buf_emp_manager_factory(y=y, s=s, n_buffer=100, e=e, gamma=0, delta=0) def ldp_s25_n100_long_manager(y, s, k=1, e=1): return schur_diag_diag_buf_emp_manager_factory(y=y, s=s, n_buffer=100, e=e, gamma=0, delta=0) LDP_LONG_MANAGERS = [ldp_s5_n50_long_manager, ldp_s25_n50_long_manager, ldp_s5_n100_long_manager, ldp_s25_n100_long_manager ] HRP_LS_NAMED_MANAGERS = [] HRP_NAMED_MANAGERS = LDP_LONG_MANAGERS + HRP_LS_NAMED_MANAGERS
def _add_block(self, block: Type[Block]): self._blocks.append(block)
<filename>components/MobileMenu.tsx import { WidthWrapper } from "@/components/core/Layout"; import { IconButton, Flex } from "@chakra-ui/react"; import HamburgerMenu from "react-hamburger-menu"; import useStore from "@/src/store"; import { ReactElement, useRef } from "react"; import NavOverlay from "@/components/NavOverlay"; import TopBarWrapper from "@/components/TopBarWrapper"; import TopBarLogo from "@/components/TopBarLogo"; export default function MobileMenu(): ReactElement { const isNavOverlayOpen = useStore((state) => state.isNavOverlayOpen); const setNavOverlayOpen = useStore((state) => state.setNavOverlayOpen); const componentRef = useRef(); const getComponentHeight = () => { const element = componentRef.current as HTMLElement | undefined; return element ? element.getBoundingClientRect().height : 0; }; return ( <> <NavOverlay getTopBarHeight={getComponentHeight} /> <TopBarWrapper> <WidthWrapper> <Flex align="center" justify="space-between" paddingY="8px"> <TopBarLogo /> <IconButton aria-label="Navigation Menu" background="none !important" size="auto" paddingX="2px" paddingY="7px" display={["block", "block", "block", "block", "none"]} onClick={() => setNavOverlayOpen(!isNavOverlayOpen)} icon={ <HamburgerMenu color="#1a202c" isOpen={isNavOverlayOpen} menuClicked={() => setNavOverlayOpen(!isNavOverlayOpen)} height={17} width={28} strokeWidth={3} rotate={180} /> } /> </Flex> </WidthWrapper> </TopBarWrapper> </> ); }
Development of an in vivo method to identify mutants of phage T4 lysozyme of enhanced thermostability An M13 bacteriophage‐based in vivo screening system has been developed to identify T4 lysozyme mutants of enhanced thermal stability. This system takes advantage of easy mutagenesis in an M13 host, the production of functional T4 lysozyme during M13 growth, and the ability to detect lysozyme activity on agar plates. Of several mutagenesis procedures that were tested, the most efficient was based on misincorporation by avian myeloma virus reverse transcriptase. This one‐step mutagenesis and screening system has been used to find 18 random single‐site mutant lysozymes, of which 11 were heat resistant. Each of these had a melting temperature within 0.8–1.4°C of wild type, suggesting that the screening system is quite sensitive.
Lessons from Personhood's Defeat: Abortion Restrictions and Side Effects on Women's Health State personhood laws pose a puzzle. These laws would establish fertilized eggs as persons and, by doing so, would ban all abortions. Many states have consistently supported laws restricting abortion care. Yet, thus far no personhood laws have passed. Why? This Article offers a possible explanation and draws lessons from that explanation for understanding and resisting abortion restrictions more broadly. I suggest that voters’ recognition of the implications of personhood legislation for health issues other than abortion may have led to personhood’s defeat. In other words, opponents of personhood proposals appear to have successfully reconnected abortion to pregnancy care, contraception, fertility, and women’s health in general. Public concern over the “side effects” of personhood laws seems to have persuaded even those opposed to abortion to reject personhood legislation. If this is so, personhood opponents may have struck on a strategy that could apply more broadly. As this Article explains, various anti-abortion regulations — not just personhood laws — have deleterious “side effects” on women’s health. Focusing the public’s attention on these side effects could not only create stronger support for access to abortion care but could also better promote the full spectrum of women’s healthcare needs.
/** * (Mock)Test JobTracker.removeJobTasks() which is called only when the job * retires. */ public void testJobRemoval() throws Exception { MiniMRCluster mr = null; try { JobConf conf = new JobConf(); mr = startCluster(conf, 0); JobTracker jobtracker = mr.getJobTrackerRunner().getJobTracker(); testRemoveJobTasks(jobtracker, conf, TaskType.MAP); testRemoveJobTasks(jobtracker, conf, TaskType.REDUCE); testRemoveJobTasks(jobtracker, conf, TaskType.JOB_SETUP); testRemoveJobTasks(jobtracker, conf, TaskType.JOB_CLEANUP); } finally { if (mr != null) { mr.shutdown();} FileUtil.fullyDelete(new File(testDir.toString())); } }
I don’t have to tell you there’s a sort of Podcast Renaissance taking place (again). But here’s what you might not know — there’s a Renaissance happening in Public Radio too. I’m not talking about new companies making new kinds of stories for a new(ish) medium, although this success should be applauded. I’m talking about in old public media, at the radio station. One that, if successful, can position local and regional stations to recapture the informed and engaged, while fostering a culture of curiosity and empathy within their communities. Yet with this transformation comes immense risk for stations unable or unwilling to embrace their new role. Where we stand Once upon a time terrestrial radio was king, and the local station held a monopoly on NPR news, and other public media material. For many markets it’s still the leader, but this will change. This is changing already. But it’s not just a technology problem, the radio format itself is the problem. On demand is growing, while traditional radio formats — both FM vs streaming — are flat or in decline. And when it happens, what does the local station have left to offer? If your answer is ‘local news’ or ‘local content’ I’ve got some bad news for you. Becoming a wire service for NPR ONE isn’t a bright future. It’s no secret local newspapers have had a rough decade. To remain profitable, many have had to remake their online offerings into something more resembling the local television station than papers of record. Here I could write a swan song for the statehouse reporter, or the 7 year veteran of the city council meetings, but this isn’t the place. Many of these companies, owned by larger national companies, shifted to homepages splattered with accused child molesters, blow-by-blow coverage of rape cases, or gossip columns on local sports and businesspeople, because they grew traffic. Revenue. Thank god this isn’t an option for public radio, but it’s time to pull our heads out of the sand. So where’s the danger? The media landscape is changing faster than most realize, and often not in the way we think. The last three years have been filled with failed technical solutions to fundamentally content problems, with very few successes. For small stations this can pose an immense challenge. Identifying a successful platform at the offset is nearly impossible — looking at you Periscope and Vine. For many stations that struggle to get web-friendly versions of their stories online moving beyond Facebook and Twitter is simply not feasible. And that’s okay — but it’s not a free pass for business as usual. Eventually the local audience will switch to internet distribution, as the FM demographic ages higher and higher. For stations that haven’t laid the community groundwork for a post-FM, on-demand audience, this change could be disastrous. It’s not impossible to imagine a 7–10 year timeline where we lose 1/4th to 1/3rd of NPR member stations. The dials may not go dark, but what used to be a 7 person station would be replaced with a repeater for the nearest large market, with one or two short reports a week. With forced, unplanned consolidation comes a narrowing of the mission. In some parts of the country we’re beginning to see it already, especially among the smaller university-owned stations. To avoid this future, smaller stations must begin thinking now about what it means to be local in an on-demand distribution age. A way forwards With the rare exception — informative local reporting alone will not pay the bills. We need analysis, yes, but more than ever before we need the context in which to ask questions. The daily talk shows, a steadfast staple of local public radio, are being cut across markets. So where does this leave us? The key, I believe, is to apply the spirit of public media reporting and analysis to new spaces, and new people, in new ways. In short, we must pivot from creating a current, shared experience to sharing enduring experiences, and creating space for community growth. That is, content that demands no schedule from the listener, remains relevant for some time, and serves as a catalyst for community discussions. Explainer podcasts, live events, and expanded digital content offerings are are all good starts. The goals: create works that spark questions for the community, but crucially also provide a forum for that discussion. If it’s a story about an underserved community, do the events at the parish church, not the community college. This means moving away from the now, or at least packaging it in a wrapper of history and context. It also means fostering a nontraditional relationship with audience, especially supporters. Courtesy of http://wearehearken.com/ One pathway is Hearken, a company built out of Curious City (funded by an AIR grant) has helped stations build a different sort of relationship with their audience. They allow readers to ask questions of journalists, and after some moderation vote on what they’d like answered. The Corporation for Public Broadcasting is trying another route through Regional Journalism Center grants (RJCs). These look to fund regional collaborations between public media entities to tell more complete, regional stories. Yet for the three-person newsroom at the ten person station technical platforms may be beyond reach, and this is where a focus on sharing enduring experiences, and creating space for community growth comes in. By embracing their role as storytellers and reporters local stations can explain local politics, contextualize community discussions, and serve as a forum on air, and in person. In many of the larger markets this transition is already occurring in some form or another. Let me just say this— the giants will be fine, as will the majority of large stations/networks (Colorado Public Radio, St. Louis Public Radio…) yet often these changes occur laterally, as a growth off of the organization, not from within it. A lot of these larger stations still have the budget, and competition, to experiment. But trying something new is a lot harder when it means stopping something that previously worked. Without a doubt there are small stations, and other nonprofit media, already doing it right. Texas Tribune makes 17% of its revenue off community engagement events. Tiny public radio stations cover national news stories in a way only public media can, and win a national Murrow awards for it. Many stations have a Mike Pesca or a Starlee Kine or an Alex Kapelman waiting for a green light. But it’s still about local. My old news director’s hyper-local explainer podcast was consistently our best-received content by an order of magnitude. It might not fit in the traditional radio format. That might not be a bad thing.
<reponame>cholcombe973/isilon<filename>src/models/pools_pool_rule_create_params.rs<gh_stars>1-10 #[allow(unused_imports)] use serde_json::Value; #[derive(Debug, Serialize, Deserialize)] pub struct PoolsPoolRuleCreateParams { /// Description for the provisioning rule. #[serde(rename = "description")] pub description: Option<String>, /// Interface name the provisioning rule applies to. #[serde(rename = "iface")] pub iface: String, /// Name of the provisioning rule. #[serde(rename = "name")] pub name: String, /// Node type the provisioning rule applies to. #[serde(rename = "node_type")] pub node_type: Option<String>, }
/** * A storage of pages in files. * <p> * All pages passed into this store are restricted to be {@link SerializedPage}s. * <p> * While {@link DiskPageStore} uses a single file per session, this implementation stores each page * in its own file. This improves on a {@link DiskPageStore disadvantage of DiskPageStore} surfacing * with alternating Ajax requests from different browser tabs. */ public class FilePageStore extends AbstractPersistentPageStore implements IPersistentPageStore { private static final String ATTRIBUTE_PAGE_TYPE = "user.wicket_page_type"; private static final String FILE_SUFFIX = ".data"; private static final Logger log = LoggerFactory.getLogger(FilePageStore.class); private final Bytes maxSizePerSession; private final NestedFolders folders; /** * Create a store that supports {@link SerializedPage}s only. * * @param applicationName * name of application * @param fileStoreFolder * folder to store to * @param maxSizePerSession * maximum size per session * * @see SerializingPageStore */ public FilePageStore(String applicationName, File fileStoreFolder, Bytes maxSizePerSession) { this(applicationName, fileStoreFolder, maxSizePerSession, null); } /** * Create a store to files. * * @param applicationName * name of application * @param fileStoreFolder * folder to store to * @param maxSizePerSession * maximum size per session * @param serializer * for serialization of pages */ public FilePageStore(String applicationName, File fileStoreFolder, Bytes maxSizePerSession, ISerializer serializer) { super(applicationName); this.folders = new NestedFolders(new File(fileStoreFolder, applicationName + "-filestore")); this.maxSizePerSession = Args.notNull(maxSizePerSession, "maxSizePerSession"); } /** * Pages are always serialized, so versioning is supported. */ @Override public boolean supportsVersioning() { return true; } private File getPageFile(String sessionId, int id, boolean create) { File folder = folders.get(sessionId, create); return new File(folder, id + FILE_SUFFIX); } @Override protected IManageablePage getPersistedPage(String sessionIdentifier, int id) { byte[] data = readFile(sessionIdentifier, id); if (data == null) { return null; } return new SerializedPage(id, "unknown", data); } private byte[] readFile(String sessionIdentifier, int id) { File file = getPageFile(sessionIdentifier, id, false); if (file.exists() == false) { return null; } byte[] data = null; try { FileChannel channel = FileChannel.open(file.toPath()); try { int size = (int)channel.size(); MappedByteBuffer buf = channel.map(MapMode.READ_ONLY, 0, size); data = new byte[size]; buf.get(data); } finally { IOUtils.closeQuietly(channel); } } catch (IOException ex) { log.warn("cannot read page data for session {} page {}", sessionIdentifier, id, ex); } return data; } @Override protected void removePersistedPage(String sessionIdentifier, IManageablePage page) { File file = getPageFile(sessionIdentifier, page.getPageId(), false); if (file.exists()) { if (!file.delete()) { log.warn("cannot remove page data for session {} page {}", sessionIdentifier, page.getPageId()); } } } @Override protected void removeAllPersistedPages(String sessionIdentifier) { folders.remove(sessionIdentifier); } @Override protected void addPersistedPage(String sessionIdentifier, IManageablePage page) { if (page instanceof SerializedPage == false) { throw new WicketRuntimeException("FilePageStore works with serialized pages only"); } SerializedPage serializedPage = (SerializedPage) page; String type = serializedPage.getPageType(); byte[] data = serializedPage.getData(); writeFile(sessionIdentifier, serializedPage.getPageId(), type, data); checkMaxSize(sessionIdentifier); } private void writeFile(String sessionIdentifier, int pageId, String pageType, byte[] data) { File file = getPageFile(sessionIdentifier, pageId, true); try { FileChannel channel = FileChannel.open(file.toPath(), StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.WRITE); try { ByteBuffer buffer = ByteBuffer.wrap(data); channel.write(buffer); } finally { IOUtils.closeQuietly(channel); } } catch (IOException ex) { log.warn("cannot store page data for session {} page {}", sessionIdentifier, pageId, ex); } setPageType(file, pageType); } private void checkMaxSize(String sessionIdentifier) { File[] files = folders.get(sessionIdentifier, true).listFiles(); Arrays.sort(files, new LastModifiedComparator()); long total = 0; for (int f = 0; f < files.length; f++) { File candidate = files[f]; total += candidate.length(); if (total > maxSizePerSession.bytes()) { if (!Files.remove(candidate)) { log.warn("cannot remove page data for session {} page {}", sessionIdentifier, candidate.getName()); } } } } public class LastModifiedComparator implements Comparator<File> { @Override public int compare(File f1, File f2) { return Long.compare(f2.lastModified(), f1.lastModified()); } } @Override public Set<String> getSessionIdentifiers() { Set<String> sessionIdentifiers = new HashSet<>(); for (File folder : folders.getAll()) { sessionIdentifiers.add(folder.getName()); } return sessionIdentifiers; } @Override public List<IPersistedPage> getPersistedPages(String sessionIdentifier) { List<IPersistedPage> pages = new ArrayList<>(); File folder = folders.get(sessionIdentifier, false); if (folder.exists()) { File[] files = folder.listFiles(); Arrays.sort(files, new LastModifiedComparator()); for (File file : files) { String name = file.getName(); if (name.endsWith(FILE_SUFFIX)) { int pageId; try { pageId = Integer .valueOf(name.substring(0, name.length() - FILE_SUFFIX.length())); } catch (Exception ex) { log.debug("unexpected file {}", file.getAbsolutePath()); continue; } String pageType = getPageType(file); pages.add(new PersistedPage(pageId, pageType, file.length())); } } } return pages; } /** * Get the type of page from the given file. * <p> * This is an optional operation that returns <code>null</code> in case of any error. * * @param file * @return pageType */ protected String getPageType(File file) { String pageType = null; try { UserDefinedFileAttributeView view = java.nio.file.Files .getFileAttributeView(file.toPath(), UserDefinedFileAttributeView.class); ByteBuffer buffer = ByteBuffer.allocate(view.size(ATTRIBUTE_PAGE_TYPE)); view.read(ATTRIBUTE_PAGE_TYPE, buffer); buffer.flip(); pageType = Charset.defaultCharset().decode(buffer).toString(); } catch (IOException ex) { log.debug("cannot get pageType for {}", file); } return pageType; } /** * Set the type of page on the given file. * <p> * This is an optional operation that silently fails in case of an error. * * @param file * @param pageType */ protected void setPageType(File file, String pageType) { try { UserDefinedFileAttributeView view = java.nio.file.Files .getFileAttributeView(file.toPath(), UserDefinedFileAttributeView.class); view.write(ATTRIBUTE_PAGE_TYPE, Charset.defaultCharset().encode(pageType)); } catch (IOException ex) { log.debug("cannot set pageType for {}", file, ex); } } @Override public Bytes getTotalSize() { long total = 0; for (File folder : folders.getAll()) { for (File file : folder.listFiles()) { String name = file.getName(); if (name.endsWith(FILE_SUFFIX)) { total += file.length(); } } } return Bytes.bytes(total); } }
<gh_stars>1-10 #include "Filter.h" #include "Cpp/Warnings.h" #include "Container/Sequential/Array.h" #include "Types.h" #include "Range/Mutation/Transform.h" #include "Range/Reduction.h" #include <stdio.h> INTRA_PUSH_DISABLE_REDUNDANT_WARNINGS namespace Intra { namespace Audio { namespace Synth { FilterCoeffs FilterCoeffs::Calculate(float rezAmount, float cutoffRatio, FilterType type) { float c = Math::Tan(float(Math::PI)*cutoffRatio); if(type == FilterType::LowPass) c = 1 / c; FilterCoeffs result; result.C = 1 / (1 + rezAmount*c + c*c); result.A1 = -2*result.C; result.A2 = result.C; result.B1 = 2*(1 - c*c)*result.C; result.B2 = (-1 + rezAmount*c - c*c)*result.C; if(type == FilterType::LowPass) { result.A1 = -result.A1; result.B1 = -result.B1; } return result; } void Filter::operator()(Span<float> inOutSamples) { //TODO: сделать оптимизированные версии с использованием SIMD if(A2 == 0) { if(A1 == 0) { if(B2 == 0) { if(B1 == 0) { Multiply(inOutSamples, C); return; } for(float& sample: inOutSamples) { sample *= C; sample += B1*PrevSample; PrevSample = sample; } return; } for(float& sample: inOutSamples) { sample *= C; sample += B1*PrevSample + B2*PrevSample2; PrevSample2 = PrevSample; PrevSample = sample; } return; } if(B2 == 0) { if(B1 == 0) { for(float& sample: inOutSamples) { const float cur = sample; sample *= C; sample += A1*PrevSrc; PrevSrc = cur; } return; } for(float& sample: inOutSamples) { const float cur = sample; sample *= C; sample += A1*PrevSrc; sample += B1*PrevSample; PrevSrc = cur; PrevSample = sample; } return; } for(float& sample: inOutSamples) { const float cur = sample; sample *= C; sample += A1*PrevSrc; sample += B1*PrevSample + B2*PrevSample2; PrevSrc = cur; PrevSample2 = PrevSample; PrevSample = sample; } return; } if(B2 == 0) { if(B1 == 0) { for(float& sample: inOutSamples) { const float cur = sample; sample *= C; sample += A1*PrevSrc + A2*PrevSrc2; PrevSrc2 = PrevSrc; PrevSrc = cur; } return; } for(float& sample: inOutSamples) { const float cur = sample; sample *= C; sample += A1*PrevSrc + A2*PrevSrc2; sample += B1*PrevSample; PrevSrc2 = PrevSrc; PrevSrc = cur; PrevSample = sample; } return; } for(float& sample: inOutSamples) { const float cur = sample; sample *= C; sample += A1*PrevSrc + A2*PrevSrc2; sample += B1*PrevSample + B2*PrevSample2; PrevSrc2 = PrevSrc; PrevSrc = cur; PrevSample2 = PrevSample; PrevSample = sample; } } void ResonanceFilter::operator()(Span<float> inOutSamples) { for(float& sample: inOutSamples) { sample += S*DeltaPhase + PrevSample; PrevSample = sample; S -= sample*DeltaPhase; S *= QFactor; } } void DriveEffect::operator()(Span<float> inOutSamples) { static const float halfPi = float(Math::PI)*0.5f; for(float& sample: inOutSamples) { const float s = sample*K; const float x = s + 0.5f / (1 + s*s) - 0.5f; //sample = Math::Atan(x); sample = x > 0? halfPi * x / (x + 1): -halfPi * x / (x - 1); } } void SoftHighPassFilter::operator()(Span<float> inOutSamples) { const float K1 = 1 - K; for(float& sample: inOutSamples) { S *= K; S += sample*K1; sample -= S; } } void NormalizeEffect::operator()(Span<float> inOutSamples) { auto minimax = Range::MiniMax(inOutSamples.AsConstRange()); float absMax = Math::Max(Math::Abs(minimax.first), Math::Abs(minimax.second)); if(AbsMax < absMax) AbsMax = absMax; const float multiplier = 1 / AbsMax; Multiply(inOutSamples, multiplier*Volume); } }}} INTRA_WARNING_POP
from django.template import Library from django.template.defaultfilters import stringfilter from typing import List register = Library() @register.filter(is_safe=True) def hexlist(value: List[int]): return ', '.join('0x{:02x}'.format(x) for x in value)
##################################################### # Copyright (c) <NAME> [GitHub D-X-Y], 2021.03 # ##################################################### # pytest tests/test_synthetic_utils.py -s # ##################################################### import sys, random import unittest import pytest from pathlib import Path lib_dir = (Path(__file__).parent / ".." / "lib").resolve() print("library path: {:}".format(lib_dir)) if str(lib_dir) not in sys.path: sys.path.insert(0, str(lib_dir)) from datasets.synthetic_core import TimeStamp class TestTimeStamp(unittest.TestCase): """Test the timestamp generator.""" def test_simple(self): for mode in (None, "train", "valid", "test"): generator = TimeStamp(0, 1) print(generator) for idx, (i, xtime) in enumerate(generator): self.assertTrue(i == idx) if idx == 0: self.assertTrue(xtime == 0) if idx + 1 == len(generator): self.assertTrue(abs(xtime - 1) < 1e-8)
import { Injectable } from '@angular/core'; import { CanActivate } from '@angular/router'; import { NbAuthService } from '@nebular/auth'; import { tap } from 'rxjs/operators'; import { NbToastrService } from '@nebular/theme'; @Injectable({ providedIn: 'root', }) export class AuthGuardService implements CanActivate { constructor(private readonly nbAuthService: NbAuthService, private readonly nbToastrService: NbToastrService) {} canActivate() { return this.nbAuthService.isAuthenticated().pipe( tap((authenticated) => { if (!authenticated) { this.nbToastrService.show('로그인을 해주세요.', '알림'); } }), ); } }
// Note: Contact points are used for calculating the distance from the centre of mass to the contact point. // I believe this is necessary for rotational components. It is unclear how the contact point should be calculated. // Sometimes the contact point is on the boundary of one object and not the other, and it is not clear which object it should be on the boundary of. // It may be the case that these objects are close enough that it makes little difference. // // For example: // circle to circle : if circles are identical in position and size, contact is at centre of circle // else contact is at edge of circle A // // circle to polygon : if circles are in vertex regions, contact is at the vertex of the polygon // else contact is at the edge of the circle // // polygon to polygon : unknown (todo) CollisionInformation circle_to_circle(RigidBody& a, RigidBody& b) { CollisionInformation collision_information; Circle* circle_a = static_cast<Circle*>(a.shape); Circle* circle_b = static_cast<Circle*>(b.shape); vec2 normal = b.centre - a.centre; float squared_distance = length_squared(normal); float radii_sum = circle_a->radius + circle_b->radius; float squared_radii_sum = radii_sum * radii_sum; if (squared_distance < squared_radii_sum) { float distance = std::sqrt(squared_distance); float penetration_depth = radii_sum - distance; vec2 collision_normal = vec2(0.0f, -1.0f); vec2 contact_point = a.centre; if (distance > 0.0f) { collision_normal = normal / distance; contact_point += collision_normal * circle_a->radius; } collision_information.contact_count = 1; collision_information.contact_data[0] = ContactData{ contact_point, penetration_depth }; collision_information.collision_normal = collision_normal; } else { collision_information.contact_count = 0; } return collision_information; }
def hold(job, clean_job=False, clean_logs=False, hold_children=True): if not job.state in db.STATES_ACTIVE: return False log.info("Holding job: %s-%d", str(job), job.id) set_state(job, db.STATE_HOLD, cleanup=clean_job) db.update_job_states(get_group_jobs(job)) if len(job.pipe_from) == 0: cluster = jip.cluster.get() cluster.cancel(job) if clean_logs: clean(job) if hold_children: for child in job.children: hold(child, clean_job=clean_job, clean_logs=clean_logs, hold_children=hold_children) return True
Facebook has been trying to get India to fall in love with its Free Basics service for several months since it launched in February. CEO Mark Zuckerberg even visited the capital of New Delhi last week and attempted to address concerns about it during a Townhall Q&A session. But he still doesn’t get why Indians are opposed to the social network’s zero-rating service. More than 330,000 people signed a petition to oppose zero-rating and uphold net neutrality principles in the country and numerous Web and media companies dropped off Facebook’s offering in support of the initiative. Zuckerberg still thinks that Free Basics will serve India well, and believes that campaigns against it don’t factor in the benefits it brings to those who are still offline. Mark Zuckerberg hosting a Facebook Townhall session at IIT Delhi The trouble with Free Basics is that it’s not completely open — Facebook decides allows publishers and developers to submit their services and content to the platform, but reserves the right to reject them. That’s not what the internet is about. With Facebook acting as gatekeeper, the company retains the ability to block services and content that compete with it. As volunteer collective SaveTheInternet.in notes, Zuckerberg has suggested that universal access is more important than net neutrality, and there is a possibility of taking these principles “too far”. However, that makes sense only if there are no alternatives to offering a zero-rating service. The simplest one is to offer data in exchange for viewing ads. The Mozilla Foundation already runs a similar program in Bangladesh. India is constantly adding more internet subscribers — IAMAI reported that 52 million got online in just the first half of 2015. But Free Basics will divide people into those who access the open Web, and those who access Facebook and the services it approves of. Free Basics offers Facebook avenues to profit from bringing users and services onto its platform, and Zuckerberg won’t admit it. Savetheinternet.in notes: Data for all the websites on Internet.org will be with Facebook, and restrictions are placed on them publicly disclosing usage of their sites and services by users on Internet.org. Apart from the fact that no open platform places such restrictions, this data and the learnings gained from it gives a competitive advantage to Facebook, because of the competitive advantage given to Internet.org by its telecom operator partners. Free Basics hurts Facebook’s competition and hampers innovation. Either Zuckerberg doesn’t understand this, or this was his vision all along. Whichever way you slice it, it’s not good for India. STI Statement on Mark Zuckerberg’s townhall at IIT Delhi on SaveTheInternet.in Read next: Texpand for Android saves you loads of keystrokes unintrusively
use { crate::{ core::Terrain, persist::Level, pos::*, }, super::{ drawing_action::*, ink::*, }, }; #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum PenShape { Dot, Line, Rect, } pub static PEN_SHAPES: &[PenShape] = &[PenShape::Dot, PenShape::Line, PenShape::Rect]; /// defines what will happen on click on the board #[derive(Debug, Clone, Copy)] pub struct Pen { pub shape: PenShape, pub ink: Ink, shape_start: Option<Pos>, } impl Pen { pub fn new_for(level: &Level) -> Self { Self { shape: PenShape::Dot, ink: Ink::Terrain(if level.default_terrain==Terrain::Mud { Terrain::Stone } else { Terrain::Mud }), shape_start: None, } } /// Maybe change the state of the pen and return the drawing /// action which should be applied to board, if any. pub fn click( &mut self, click_pos: Pos, is_control_click: bool, ) -> Option<DrawingAction> { match self.shape { PenShape::Dot => { Some(DrawingAction::DotInk(self.ink, click_pos)) } PenShape::Line => { if let Some(start) = self.shape_start { let action = if is_control_click { DrawingAction::CompassLineInk(self.ink, start, click_pos) } else { DrawingAction::LineInk(self.ink, start, click_pos) }; self.shape_start = None; Some(action) } else { self.shape_start = Some(click_pos); None } } PenShape::Rect => { if let Some(start) = self.shape_start { let action = DrawingAction::RectInk(self.ink, start, click_pos); self.shape_start = None; Some(action) } else { self.shape_start = Some(click_pos); None } } } } pub fn set_ink(&mut self, ink: Ink) { self.ink = ink; debug!("new pen ink: {:?}", self.ink); } pub fn set_shape(&mut self, shape: PenShape) { self.shape = shape; self.shape_start = None; } pub fn shape_started(&self) -> bool { self.shape_start.is_some() } /// return the help to display depending on the state, /// if any. pub fn status_help(&self) -> Option<String> { match self.shape { PenShape::Line if self.shape_start.is_some() => { Some("Click again to draw a line - with the *ctrl* key down to force compass directions".to_string()) } PenShape::Rect if self.shape_start.is_some() => { Some("Click again to draw a rectangle".to_string()) } _ => None, } } }
package cmdx_test import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" "github.com/w6d-io/x/cmdx" ) var _ = Describe("Command functions testing", func() { Context("Variables is not empty", func() { It("Version responds correctly", func() { By("Set variables") version := "v0.1.0-test" commit := "2c246741bce7284a8e728d7ca492dc3a47cd3c85" buildTime := "Thu Oct 7 08:47:07 2021 +0200" By("instanciate Version") cmd := cmdx.Version(&version, &commit, &buildTime) By("execute version command") Expect(cmd.Execute()).To(Succeed()) }) }) Context("Variables is empty", func() { It("Version responds correctly", func() { By("Set variables") version := "" commit := "" buildTime := "" By("instanciate Version") cmd := cmdx.Version(&version, &commit, &buildTime) By("execute version command") Expect(cmd.Execute()).To(Succeed()) }) }) })
//go:build !tinygo // +build !tinygo package gfx import "math" // HunterLab represents a color in Hunter-Lab. type HunterLab struct { L float64 A float64 B float64 } // XYZ converts from HunterLab to XYZ. // // Reference-X, Y and Z refer to specific illuminants and observers. // Common reference values are available below in this same page. // // var_Ka = ( 175.0 / 198.04 ) * ( Reference-Y + Reference-X ) // var_Kb = ( 70.0 / 218.11 ) * ( Reference-Y + Reference-Z ) // // Y = ( ( Hunter-L / Reference-Y ) ^ 2 ) * 100.0 // X = ( Hunter-a / var_Ka * sqrt( Y / Reference-Y ) + ( Y / Reference-Y ) ) * Reference-X // Z = - ( Hunter-b / var_Kb * sqrt( Y / Reference-Y ) - ( Y / Reference-Y ) ) * Reference-Z // func (h HunterLab) XYZ(ref XYZ) XYZ { Ka := (175.0 / 198.04) * (ref.Y + ref.X) Kb := (70.0 / 218.11) * (ref.Y + ref.Z) Y := math.Pow((h.L/ref.Y), 2) * 100.0 X := (h.A/Ka*math.Sqrt(Y/ref.Y) + (Y / ref.Y)) * ref.X Z := -(h.B/Kb*math.Sqrt(Y/ref.Y) - (Y / ref.Y)) * ref.Z return XYZ{X, Y, Z} } // HunterLab converts from XYZ to HunterLab. // // Reference-X, Y and Z refer to specific illuminants and observers. // Common reference values are available below in this same page. // // var_Ka = ( 175.0 / 198.04 ) * ( Reference-Y + Reference-X ) // var_Kb = ( 70.0 / 218.11 ) * ( Reference-Y + Reference-Z ) // // Hunter-L = 100.0 * sqrt( Y / Reference-Y ) // Hunter-a = var_Ka * ( ( ( X / Reference-X ) - ( Y / Reference-Y ) ) / sqrt( Y / Reference-Y ) ) // Hunter-b = var_Kb * ( ( ( Y / Reference-Y ) - ( Z / Reference-Z ) ) / sqrt( Y / Reference-Y ) ) // func (xyz XYZ) HunterLab(ref XYZ) HunterLab { Ka := (175.0 / 198.04) * (ref.Y + ref.X) Kb := (70.0 / 218.11) * (ref.Y + ref.Z) return HunterLab{ L: 100.0 * math.Sqrt(xyz.Y/ref.Y), A: Ka * (((xyz.X / ref.X) - (xyz.Y / ref.Y)) / math.Sqrt(xyz.Y/ref.Y)), B: Kb * (((xyz.Y / ref.Y) - (xyz.Z / ref.Z)) / math.Sqrt(xyz.Y/ref.Y)), } }
/** * @return MobileCoreJsonConfig * @throws IOException if reading the stream fails * @throws JSONException if the json document is malformed */ public MobileCoreConfiguration parse() throws IOException, JSONException { JSONObject jsonConfig = new JSONObject(readJsonStream(jsonStream)); JSONArray servicesJson = jsonConfig.getJSONArray("services"); JSONObject httpsJson = jsonConfig.optJSONObject("https"); return MobileCoreConfiguration.newBuilder() .setHttpsConfiguration(parseHttpsConfig(httpsJson)) .setServiceConfiguration(parseMobileCoreArray(servicesJson)).build(); }
The problem of polyfunctionality in the traditional and computational linguistics This article describes in detail the study of multifunctional words in traditional linguistics by world and Uzbek scientists. Its relation to concomitant phenomena – homonymy and polysemy is expressed. It is noted that in computational linguistics, the question about the functional vocabulary is not understood, not solved the question of its purpose in the housing, on the agenda posed the need for prompt decision of this question by establishing a theoretical framework for the differentiation of polyfunctional vocabulary in information retrieval system.
/** * Used to unmarshal json using specified type and class binding. In case if json have @type or @class attribute classBinding and type will be replaced with * more suitable for this case. * * @param json json to unmarshal. * @param typeOfT type to use during unmarshal. * @param classBinding class binding to use. * @param context context. * @param holder holder which keeps type or class name and class instance. * * @return deserialized instance. */ private T unmarshal(JsonElement json, Type typeOfT, AbstractClassBinding<T> classBinding, JsonDeserializationContext context, ClassTypeHolder holder) { ClassTypeHolder holderToUse = holder; if (json == null || json.isJsonNull()) { return null; } if (typeOfT instanceof Class) { return unmarshalClass(json, (Class<T>) typeOfT, classBinding, context); } Type typeToUse = typeOfT; AbstractClassBinding<T> binding = null; if (json.isJsonObject()) { JsonObject o = json.getAsJsonObject(); if (holderToUse == null) { holderToUse = getClassFromJson(o, null); typeToUse = holderToUse.clazz == null ? typeToUse : holderToUse.clazz; if (holderToUse.alias != null) { binding = BindingRegistry.getInstance().getBindingByTypeAlias(holderToUse.alias); } else if (holderToUse.className != null) { binding = BindingRegistry.getInstance().getBinding(holderToUse.clazz); } } } if (binding != null && typeToUse instanceof Class) { Class<T> aClass = (Class<T>) typeToUse; return unmarshal(json, aClass, binding, context, holderToUse); } return context.deserialize(json, typeOfT); }
package servicefabric import ( "errors" "strings" "text/template" "github.com/containous/traefik/log" "github.com/containous/traefik/provider" "github.com/containous/traefik/provider/label" "github.com/containous/traefik/types" sf "github.com/jjcollinge/servicefabric" ) func (p *Provider) buildConfiguration(services []ServiceItemExtended) (*types.Configuration, error) { var sfFuncMap = template.FuncMap{ // Services "getServices": getServices, "hasLabel": hasService, "getLabelValue": getServiceStringLabel, "getLabelsWithPrefix": getServiceLabelsWithPrefix, "isPrimary": isPrimary, "isStateful": isStateful, "isStateless": isStateless, "isEnabled": getFuncBoolLabel(label.TraefikEnable, false), "getBackendName": getBackendName, "getDefaultEndpoint": getDefaultEndpoint, "getNamedEndpoint": getNamedEndpoint, // FIXME unused "getApplicationParameter": getApplicationParameter, // FIXME unused "doesAppParamContain": doesAppParamContain, // FIXME unused "filterServicesByLabelValue": filterServicesByLabelValue, // FIXME unused // Backend functions "getWeight": getFuncServiceIntLabel(label.TraefikWeight, label.DefaultWeight), "getProtocol": getFuncServiceStringLabel(label.TraefikProtocol, label.DefaultProtocol), "getMaxConn": getMaxConn, "getHealthCheck": getHealthCheck, "getCircuitBreaker": getCircuitBreaker, "getLoadBalancer": getLoadBalancer, // Frontend Functions "getPriority": getFuncServiceIntLabel(label.TraefikFrontendPriority, label.DefaultFrontendPriority), "getPassHostHeader": getFuncServiceBoolLabel(label.TraefikFrontendPassHostHeader, label.DefaultPassHostHeader), "getPassTLSCert": getFuncBoolLabel(label.TraefikFrontendPassTLSCert, label.DefaultPassTLSCert), "getEntryPoints": getFuncServiceSliceStringLabel(label.TraefikFrontendEntryPoints), "getBasicAuth": getFuncServiceSliceStringLabel(label.TraefikFrontendAuthBasic), "getFrontendRules": getFuncServiceLabelWithPrefix(label.TraefikFrontendRule), "getWhiteList": getWhiteList, "getHeaders": getHeaders, "getRedirect": getRedirect, // SF Service Grouping "getGroupedServices": getFuncServicesGroupedByLabel(traefikSFGroupName), "getGroupedWeight": getFuncServiceStringLabel(traefikSFGroupWeight, "1"), } templateObjects := struct { Services []ServiceItemExtended }{ Services: services, } return p.GetConfiguration(tmpl, sfFuncMap, templateObjects) } func isPrimary(instance replicaInstance) bool { _, data := instance.GetReplicaData() return data.ReplicaRole == "Primary" } func getBackendName(service ServiceItemExtended, partition PartitionItemExtended) string { return provider.Normalize(service.Name + partition.PartitionInformation.ID) } func getDefaultEndpoint(instance replicaInstance) string { id, data := instance.GetReplicaData() endpoint, err := getReplicaDefaultEndpoint(data) if err != nil { log.Warnf("No default endpoint for replica %s in service %s endpointData: %s", id, data.Address) return "" } return endpoint } func getNamedEndpoint(instance replicaInstance, endpointName string) string { id, data := instance.GetReplicaData() endpoint, err := getReplicaNamedEndpoint(data, endpointName) if err != nil { log.Warnf("No names endpoint of %s for replica %s in endpointData: %s. Error: %v", endpointName, id, data.Address, err) return "" } return endpoint } func getReplicaNamedEndpoint(replicaData *sf.ReplicaItemBase, endpointName string) (string, error) { endpoints, err := decodeEndpointData(replicaData.Address) if err != nil { return "", err } endpoint, exists := endpoints[endpointName] if !exists { return "", errors.New("endpoint doesn't exist") } return endpoint, nil } func getApplicationParameter(app sf.ApplicationItem, key string) string { for _, param := range app.Parameters { if param.Key == key { return param.Value } } log.Errorf("Parameter %s doesn't exist in app %s", key, app.Name) return "" } func getServices(services []ServiceItemExtended, key string) map[string][]ServiceItemExtended { result := map[string][]ServiceItemExtended{} for _, service := range services { if value, exists := service.Labels[key]; exists { if matchingServices, hasKeyAlready := result[value]; hasKeyAlready { result[value] = append(matchingServices, service) } else { result[value] = []ServiceItemExtended{service} } } } return result } func doesAppParamContain(app sf.ApplicationItem, key, shouldContain string) bool { value := getApplicationParameter(app, key) return strings.Contains(value, shouldContain) } func filterServicesByLabelValue(services []ServiceItemExtended, key, expectedValue string) []ServiceItemExtended { var srvWithLabel []ServiceItemExtended for _, service := range services { value, exists := service.Labels[key] if exists && value == expectedValue { srvWithLabel = append(srvWithLabel, service) } } return srvWithLabel } func getHeaders(service ServiceItemExtended) *types.Headers { return label.GetHeaders(service.Labels) } func getWhiteList(service ServiceItemExtended) *types.WhiteList { return label.GetWhiteList(service.Labels) } func getRedirect(service ServiceItemExtended) *types.Redirect { return label.GetRedirect(service.Labels) } func getMaxConn(service ServiceItemExtended) *types.MaxConn { return label.GetMaxConn(service.Labels) } func getHealthCheck(service ServiceItemExtended) *types.HealthCheck { return label.GetHealthCheck(service.Labels) } func getCircuitBreaker(service ServiceItemExtended) *types.CircuitBreaker { return label.GetCircuitBreaker(service.Labels) } func getLoadBalancer(service ServiceItemExtended) *types.LoadBalancer { return label.GetLoadBalancer(service.Labels) }
/** * Writes a message to the logs. * This may cause older messages to be pushed out. * Warning: Due to the memory shifting done on the log, logging is expensive. */ void write_log(const char* message) { #if LOG_ENABLED == 1 int len = strlen(message) + 1; for (int i = LOG_LENGTH - len; i >= 0; i--) { logs[i + len] = logs[i]; } memcpy(logs, message, len); #endif }
/** * Preforms cleanup before exiting via interrupt */ void signalhandler(int signum) { if (signum == SIGINT) { sigint_flag = 1; } if (signum == SIGUSR1) { environment_flag = 1; } if (signum == SIGUSR2) { camera_timeout_flag = 0; } }
Pressure to allow Ottawa businesses to open on holidays appears to be building, with retailers and their advocates saying stores should have the option to open regardless of where they are located. Last month, the Ontario Municipal Board upheld an Ottawa city council decision granting the Glebe an exemption from provincial legislation. That means businesses in the neighbourhood are allowed to stay open on six statutory holidays, including Thanksgiving. Ontario's Retail Business Holidays Act also exempts retail outlets that sell handicrafts and book or magazine stores that are less than 2,400 square feet with a maximum of three employees, as well as pharmacies under 7,500 square feet, nurseries, flower shops, gardening centres and gas stations. Christine Leadman, the head of the Bank Street BIA, says letting other business districts open on holidays is a "no-brainer." Stores in locations established as tourist areas by specific municipal bylaws can also keep the lights on, and that's how the Glebe won its exemption. 'It's a no-brainer' Retailers in the city's other business improvement areas say whether or not a neighbourhood is in a tourist zone shouldn't be relevant grounds to permit or deny opening. "To me, it's a no-brainer," said Bank Street BIA executive director Christine Leadman. Leadman said holiday shopping should be extended not just north of the Glebe, but to all neighbourhoods — even where there are historically few tourists. "To suggest that only people who are here visiting are shoppers doesn't make a whole lot of sense," she said. Some stores want to give employees day off Statutory holiday openings are not a "silver bullet" for business owners, says Wellington West BIA head Zach Dayler. (Stu Mills) At nearby Compact Music, manager Tyler Clarke said even if his Centretown location was permitted to stay open, he probably wouldn't. Clarke says his record store employees are happy to have a holiday Monday off. "It's nicer to have the extra time and recharge the batteries," he said. The Ottawa and District Labour Council has opposed the push for stores to open on holidays, arguing employees would face pressure to work on the holidays or lose other shifts. The labour council has said it intends to appeal the Glebe's new exemption at the Ontario Municipal Board. Relax rules for 2017, says own retail group Zach Dayler, the head of the in Wellington West BIA, said the city should exempt his area and others in preparation for what will likely be a record year for tourism. "With 2017 on the horizon and lots of people wanting to do events it's going to be interesting to see how the city responds," Dayler said. "Opening a business for a holiday exemption isn't going to be the silver bullet that solves the challenges for local business," he said, but added that there has been a growing interest among his association members to follow in the Glebe's footsteps. Flock Boutique co-owner Bridget Remai says her Wellington West staff are eager to work on statutory holidays. (Stu Mills) Because it is smaller than 2,400 square feet, and is staffed by fewer than four people, Flock Boutique in Wellington Street is exempt from the Retail Business Holidays Act "Our staff actually battle each other out for holiday work shifts," joked co-owner Bridget Remai. "It's nice to walk out on the street and meet everybody — and if no businesses are open there isn't really a common meeting place," said Remai. Remai said the majority of her Thanksgiving Monday customers will be locals, and not tourists. She thinks the time has come to let all neighbourhoods, tourist destinations or not, open on holidays.​
Q,H,S,D=list(map(int,input().split())) N=int(input()) ans=0 min_tea=min(Q*8,H*4,S*2,D) ans+= (N//2) * min_tea N-=(N//2)*2 min_tea=min(Q*4,H*2,S*1) ans+= (N//1) * min_tea print(ans)
/** creates fully formatted address based on inputted seed. If seed is empty then a randomly generated seed is used by libsodium the format of the return is: pay:sov:{32 byte address}{4 byte checksum} */ fn indy_create_key(&self, wallet_id: IndyHandle, config: PaymentAddressConfig) -> Result<String, ErrorCode> { trace!("create_payment_address calling indy_create_key"); let mut config_json: String = config.to_json().unwrap(); // indy-sdk expects a valid but empty input to be this below // so if no seed was provided, create the json to look like this instead if 0 == config.seed.chars().count() { config_json = r#"{ }"#.to_string(); } return Key::create(wallet_id, Some(&config_json)); }
<gh_stars>10-100 #include <cassert> #include <vector> #include <string> #include <algorithm> using namespace std; #include "nextCombination.h" /////////// For Testing /////////////////////////////////////////////////////// #include <time.h> #include <cassert> #include <iostream> #include "../common/iostreamhelper.h" #include "../common/profile.h" #include "../common/rand.h" #include "../integer/bit.h" static vector<vector<int>> getCombinationsSlow(int n, int r) { vector<vector<int>> res; for (int i = 1; i < (1 << n); i++) { if (popcount((unsigned int)i) == r) { res.push_back({}); for (int j = 0, t = i; t; j++, t >>= 1) { if (t & 1) res.back().push_back(j); } } } sort(res.begin(), res.end()); return res; } void testNextCombination() { //return; //TODO: if you want to test, make this line a comment. cout << "--- Next Combination --------------------------------------" << endl; { int N = 5, R = 3; vector<int> c; vector<vector<int>> ans; initCombination(c, N, R); do { cout << c << endl; ans.push_back(c); } while (nextCombination(c, N)); auto gt = getCombinationsSlow(N, R); if (ans != gt) { cout << "Mismatched : " << ans << ", " << gt << endl; } assert(ans == gt); } cout << "OK!" << endl; }
/** * Data Access Object (DAO) for policy groups. * <p> * * @see <a href="http://www.corej2eepatterns.com/Patterns/DataAccessObject.htm">Core J2EE Patterns - Data Access * Object</a> */ @Preamble(description = "Data Access Object (DAO) for policy groups.") public class PolicyGroupDao extends AbstractModelDao<PolicyGroup> implements CrudDao<PolicyGroup> { /** * Constructor. * * @throws MbedCloudException * if an error occurs during the process. */ public PolicyGroupDao() throws MbedCloudException { super(); } /** * Constructor. * * @param client * an api client wrapper. * @throws MbedCloudException * if an error occurs during the process. */ public PolicyGroupDao(ApiClientWrapper client) throws MbedCloudException { super(client); } /** * Constructor. * * @param options * a connection options. * @throws MbedCloudException * if an error occurs during the process. */ public PolicyGroupDao(ConnectionOptions options) throws MbedCloudException { super(options); } /** * Constructor. * * @param sdkContext * an sdk context. * @throws MbedCloudException * if an error occurs during the process. */ public PolicyGroupDao(SdkContext sdkContext) throws MbedCloudException { super(sdkContext); } /** * Creates a {@link Paginator} for the list of api keys matching filter options. * * <p> * Note: uses internal data model * <p> * Similar to * {@link com.arm.mbed.cloud.sdk.accounts.model.PolicyGroupDao#allApiKeys(com.arm.mbed.cloud.sdk.accounts.model.ApiKeyListOptions, com.arm.mbed.cloud.sdk.accounts.model.PolicyGroup)} * * @param options * list options. * @return paginator over the list of api keys * @throws MbedCloudException * if an error occurs during the process. */ public Paginator<ApiKey> allApiKeys(@Nullable ApiKeyListOptions options) throws MbedCloudException { return allApiKeys(options, getModel()); } /** * Creates a {@link Paginator} for the list of api keys matching filter options. * * <p> * Similar to * {@link com.arm.mbed.cloud.sdk.Accounts#allApiKeys(com.arm.mbed.cloud.sdk.accounts.model.ApiKeyListOptions, com.arm.mbed.cloud.sdk.accounts.model.PolicyGroup)} * * @param options * list options. * @param policyGroup * a policy group. * @return paginator over the list of api keys * @throws MbedCloudException * if an error occurs during the process. */ public Paginator<ApiKey> allApiKeys(@Nullable ApiKeyListOptions options, @NonNull PolicyGroup policyGroup) throws MbedCloudException { return ((Accounts) getModuleOrThrow()).allApiKeys(options, policyGroup); } /** * Creates a {@link Paginator} for the list of api keys matching filter options. * * <p> * Similar to * {@link com.arm.mbed.cloud.sdk.Accounts#allApiKeys(String, com.arm.mbed.cloud.sdk.accounts.model.ApiKeyListOptions)} * * @param id * The ID of the group. * @param options * list options. * @return paginator over the list of api keys * @throws MbedCloudException * if an error occurs during the process. */ public Paginator<ApiKey> allApiKeys(@NonNull String id, @Nullable ApiKeyListOptions options) throws MbedCloudException { return ((Accounts) getModuleOrThrow()).allApiKeys(id, options); } /** * Creates a {@link Paginator} for the list of users matching filter options. * * <p> * Similar to * {@link com.arm.mbed.cloud.sdk.Accounts#allUsers(String, com.arm.mbed.cloud.sdk.accounts.model.UserListOptions)} * * @param id * The ID of the group. * @param options * list options. * @return paginator over the list of users * @throws MbedCloudException * if an error occurs during the process. */ public Paginator<User> allUsers(@NonNull String id, @Nullable UserListOptions options) throws MbedCloudException { return ((Accounts) getModuleOrThrow()).allUsers(id, options); } /** * Creates a {@link Paginator} for the list of users matching filter options. * * <p> * Note: uses internal data model * <p> * Similar to * {@link com.arm.mbed.cloud.sdk.accounts.model.PolicyGroupDao#allUsers(com.arm.mbed.cloud.sdk.accounts.model.UserListOptions, com.arm.mbed.cloud.sdk.accounts.model.PolicyGroup)} * * @param options * list options. * @return paginator over the list of users * @throws MbedCloudException * if an error occurs during the process. */ public Paginator<User> allUsers(@Nullable UserListOptions options) throws MbedCloudException { return allUsers(options, getModel()); } /** * Creates a {@link Paginator} for the list of users matching filter options. * * <p> * Similar to * {@link com.arm.mbed.cloud.sdk.Accounts#allUsers(com.arm.mbed.cloud.sdk.accounts.model.UserListOptions, com.arm.mbed.cloud.sdk.accounts.model.PolicyGroup)} * * @param options * list options. * @param policyGroup * a policy group. * @return paginator over the list of users * @throws MbedCloudException * if an error occurs during the process. */ public Paginator<User> allUsers(@Nullable UserListOptions options, @NonNull PolicyGroup policyGroup) throws MbedCloudException { return ((Accounts) getModuleOrThrow()).allUsers(options, policyGroup); } /** * Get the API keys of a group. * * * <p> * Note: uses internal data model * <p> * Similar to * {@link com.arm.mbed.cloud.sdk.accounts.model.PolicyGroupDao#apiKeys(com.arm.mbed.cloud.sdk.accounts.model.ApiKeyListOptions, com.arm.mbed.cloud.sdk.accounts.model.PolicyGroup)} * * @param options * list options. * @return the list of api keys corresponding to filter options (One page). * @throws MbedCloudException * if an error occurs during the process. */ public ListResponse<ApiKey> apiKeys(@Nullable ApiKeyListOptions options) throws MbedCloudException { return apiKeys(options, getModel()); } /** * Get the API keys of a group. * * * <p> * Similar to * {@link com.arm.mbed.cloud.sdk.Accounts#apiKeys(com.arm.mbed.cloud.sdk.accounts.model.ApiKeyListOptions, com.arm.mbed.cloud.sdk.accounts.model.PolicyGroup)} * * @param options * list options. * @param policyGroup * a policy group. * @return the list of api keys corresponding to filter options (One page). * @throws MbedCloudException * if an error occurs during the process. */ public ListResponse<ApiKey> apiKeys(@Nullable ApiKeyListOptions options, @NonNull PolicyGroup policyGroup) throws MbedCloudException { return ((Accounts) getModuleOrThrow()).apiKeys(options, policyGroup); } /** * Get the API keys of a group. * * * <p> * Similar to * {@link com.arm.mbed.cloud.sdk.Accounts#apiKeys(String, com.arm.mbed.cloud.sdk.accounts.model.ApiKeyListOptions)} * * @param id * The ID of the group. * @param options * list options. * @return the list of api keys corresponding to filter options (One page). * @throws MbedCloudException * if an error occurs during the process. */ public ListResponse<ApiKey> apiKeys(@NonNull String id, @Nullable ApiKeyListOptions options) throws MbedCloudException { return ((Accounts) getModuleOrThrow()).apiKeys(id, options); } /** * Clones this instance. * * <p> * * @see java.lang.Object#clone() * @return a cloned instance */ @Override @SuppressWarnings({ "resource", "unused" }) public PolicyGroupDao clone() { try { return new PolicyGroupDao().configureAndGet(getModuleOrThrow() == null ? null : getModuleOrThrow().clone()); } catch (MbedCloudException exception) { return null; } } /** * Adds a policy group. * * <p> * Note: uses internal data model * <p> * Similar to * {@link com.arm.mbed.cloud.sdk.accounts.model.PolicyGroupDao#create(com.arm.mbed.cloud.sdk.accounts.model.PolicyGroup)} * * @return an added policy group * @throws MbedCloudException * if an error occurs during the process. */ @Override public PolicyGroup create() throws MbedCloudException { return create(getModel()); } /** * Adds a policy group. * * <p> * Note: uses internal data model * <p> * Similar to * {@link com.arm.mbed.cloud.sdk.accounts.model.PolicyGroupDao#create(java.lang.Object, com.arm.mbed.cloud.sdk.accounts.model.PolicyGroup)} * * @param members * Represents arrays of user and API key IDs. * @return an added policy group * @throws MbedCloudException * if an error occurs during the process. */ public PolicyGroup create(@Nullable Object members) throws MbedCloudException { return create(members, getModel()); } /** * Adds a policy group. * * <p> * Similar to * {@link com.arm.mbed.cloud.sdk.Accounts#createPolicyGroup(java.lang.Object, com.arm.mbed.cloud.sdk.accounts.model.PolicyGroup)} * * @param members * Represents arrays of user and API key IDs. * @param policyGroup * a policy group. * @return an added policy group * @throws MbedCloudException * if an error occurs during the process. */ public PolicyGroup create(@Nullable Object members, @NonNull PolicyGroup policyGroup) throws MbedCloudException { return setAndGetModel(((Accounts) getModuleOrThrow()).createPolicyGroup(members, policyGroup)); } /** * Adds a policy group. * * <p> * Similar to * {@link com.arm.mbed.cloud.sdk.Accounts#createPolicyGroup(com.arm.mbed.cloud.sdk.accounts.model.PolicyGroup)} * * @param policyGroup * a policy group. * @return an added policy group * @throws MbedCloudException * if an error occurs during the process. */ @Override public PolicyGroup create(@NonNull PolicyGroup policyGroup) throws MbedCloudException { return setAndGetModel(((Accounts) getModuleOrThrow()).createPolicyGroup(policyGroup)); } /** * Deletes a policy group. * * <p> * Note: uses internal data model * <p> * Similar to * {@link com.arm.mbed.cloud.sdk.accounts.model.PolicyGroupDao#delete(com.arm.mbed.cloud.sdk.accounts.model.PolicyGroup)} * * @throws MbedCloudException * if an error occurs during the process. */ @Override public void delete() throws MbedCloudException { delete(getModel()); } /** * Deletes a policy group. * * <p> * Similar to * {@link com.arm.mbed.cloud.sdk.Accounts#deletePolicyGroup(com.arm.mbed.cloud.sdk.accounts.model.PolicyGroup)} * * @param policyGroup * a policy group. * @throws MbedCloudException * if an error occurs during the process. */ @Override public void delete(@NonNull PolicyGroup policyGroup) throws MbedCloudException { ((Accounts) getModuleOrThrow()).deletePolicyGroup(policyGroup); } /** * Deletes a policy group. * * <p> * Similar to {@link com.arm.mbed.cloud.sdk.Accounts#deletePolicyGroup(String)} * * @param id * The ID of the group to delete. * @throws MbedCloudException * if an error occurs during the process. */ @Override public void delete(@NonNull String id) throws MbedCloudException { ((Accounts) getModuleOrThrow()).deletePolicyGroup(id); } /** * Instantiates model. * * @return instantiated model */ @Override @Internal protected PolicyGroup instantiateModel() { return new PolicyGroup(); } /** * Instantiates modules. * * @param client * an api client wrapper. * @return instantiated module */ @Override @Internal protected SdkContext instantiateModule(ApiClientWrapper client) { return new Accounts(client); } /** * Instantiates modules. * * @param options * a connection options. * @return instantiated module */ @Override @Internal protected SdkContext instantiateModule(ConnectionOptions options) { return new Accounts(options); } /** * Instantiates modules. * * @param context * an sdk context. * @return instantiated module */ @Override @Internal protected SdkContext instantiateModule(SdkContext context) { return new Accounts(context); } /** * Gets a policy group. * * <p> * Note: uses internal data model * <p> * Similar to * {@link com.arm.mbed.cloud.sdk.accounts.model.PolicyGroupDao#read(com.arm.mbed.cloud.sdk.accounts.model.PolicyGroup)} * * @return something * @throws MbedCloudException * if an error occurs during the process. */ @Override public PolicyGroup read() throws MbedCloudException { return read(getModel()); } /** * Gets a policy group. * * <p> * Similar to * {@link com.arm.mbed.cloud.sdk.Accounts#readPolicyGroup(com.arm.mbed.cloud.sdk.accounts.model.PolicyGroup)} * * @param policyGroup * a policy group. * @return something * @throws MbedCloudException * if an error occurs during the process. */ public PolicyGroup read(@NonNull PolicyGroup policyGroup) throws MbedCloudException { return setAndGetModel(((Accounts) getModuleOrThrow()).readPolicyGroup(policyGroup)); } /** * Gets a policy group. * * <p> * Similar to {@link com.arm.mbed.cloud.sdk.Accounts#readPolicyGroup(String)} * * @param id * The ID of the group. * @return something * @throws MbedCloudException * if an error occurs during the process. */ @Override public PolicyGroup read(@NonNull String id) throws MbedCloudException { return setAndGetModel(((Accounts) getModuleOrThrow()).readPolicyGroup(id)); } /** * Modifies a policy group. * * <p> * Note: uses internal data model * <p> * Similar to * {@link com.arm.mbed.cloud.sdk.accounts.model.PolicyGroupDao#update(com.arm.mbed.cloud.sdk.accounts.model.PolicyGroup)} * * @return something * @throws MbedCloudException * if an error occurs during the process. */ @Override public PolicyGroup update() throws MbedCloudException { return update(getModel()); } /** * Modifies a policy group. * * <p> * Similar to * {@link com.arm.mbed.cloud.sdk.Accounts#updatePolicyGroup(com.arm.mbed.cloud.sdk.accounts.model.PolicyGroup)} * * @param policyGroup * a policy group. * @return something * @throws MbedCloudException * if an error occurs during the process. */ @Override public PolicyGroup update(@NonNull PolicyGroup policyGroup) throws MbedCloudException { return setAndGetModel(((Accounts) getModuleOrThrow()).updatePolicyGroup(policyGroup)); } /** * Modifies a policy group. * * <p> * Note: uses internal data model * <p> * Similar to * {@link com.arm.mbed.cloud.sdk.accounts.model.PolicyGroupDao#update(String, com.arm.mbed.cloud.sdk.accounts.model.PolicyGroup)} * * @param id * The ID of the group. * @return an updated policy group * @throws MbedCloudException * if an error occurs during the process. */ public PolicyGroup update(@NonNull String id) throws MbedCloudException { return update(id, getModel()); } /** * Modifies a policy group. * * <p> * Similar to * {@link com.arm.mbed.cloud.sdk.Accounts#updatePolicyGroup(String, com.arm.mbed.cloud.sdk.accounts.model.PolicyGroup)} * * @param id * The ID of the group. * @param policyGroup * a policy group. * @return an updated policy group * @throws MbedCloudException * if an error occurs during the process. */ public PolicyGroup update(@NonNull String id, @NonNull PolicyGroup policyGroup) throws MbedCloudException { return setAndGetModel(((Accounts) getModuleOrThrow()).updatePolicyGroup(id, policyGroup)); } /** * Get users of a group. * * * <p> * Similar to * {@link com.arm.mbed.cloud.sdk.Accounts#users(String, com.arm.mbed.cloud.sdk.accounts.model.UserListOptions)} * * @param id * The ID of the group. * @param options * list options. * @return the list of users corresponding to filter options (One page). * @throws MbedCloudException * if an error occurs during the process. */ public ListResponse<User> users(@NonNull String id, @Nullable UserListOptions options) throws MbedCloudException { return ((Accounts) getModuleOrThrow()).users(id, options); } /** * Get users of a group. * * * <p> * Note: uses internal data model * <p> * Similar to * {@link com.arm.mbed.cloud.sdk.accounts.model.PolicyGroupDao#users(com.arm.mbed.cloud.sdk.accounts.model.UserListOptions, com.arm.mbed.cloud.sdk.accounts.model.PolicyGroup)} * * @param options * list options. * @return the list of users corresponding to filter options (One page). * @throws MbedCloudException * if an error occurs during the process. */ public ListResponse<User> users(@Nullable UserListOptions options) throws MbedCloudException { return users(options, getModel()); } /** * Get users of a group. * * * <p> * Similar to * {@link com.arm.mbed.cloud.sdk.Accounts#users(com.arm.mbed.cloud.sdk.accounts.model.UserListOptions, com.arm.mbed.cloud.sdk.accounts.model.PolicyGroup)} * * @param options * list options. * @param policyGroup * a policy group. * @return the list of users corresponding to filter options (One page). * @throws MbedCloudException * if an error occurs during the process. */ public ListResponse<User> users(@Nullable UserListOptions options, @NonNull PolicyGroup policyGroup) throws MbedCloudException { return ((Accounts) getModuleOrThrow()).users(options, policyGroup); } }
It’s the holidays and I want me some good ol’ fashioned shepherd’s pie (*insert southwestern accent). 🙂 Well, almost “good ol’ fashioned.” 😛 This vegetarian shepherd’s pie is vegan and made with lentils and sweet potato. Oh, and it’s absolutely satisfying! Unfortunately, even though I LOVE sweet potato, my tummy does not and so I have to limit my consumption. This means that I have sweet potato, I have to make it count and completely worth it. Friends, the vegan shepherd’s pie made it worth it. 🙂 Ingredients for Vegetarian Shepherd’s Pie with Lentils & Sweet Potato 6 medium sized sweet potatoes 3 cups cooked mung beans 4 carrots, chopped 4 stalks of celery 1 medium onion, chopped 1 jalapeno, seeded and chopped 4 cloves garlic, chopped (I love garlic but you can reduce if you please) 2 tbsp cumin 1 tbsp coriander 1/2 chilli powder 1 tsp salt 2 tbsp olive oil, divided 2 tbsp almond milk (or any other dairy free milk) Directions for Vegetarian Shepherd’s Pie with Lentils & Sweet Potato Preheat oven to 350 Peel sweet potatoes and chop into chunks Place sweet potatoes into boiling salted water and cool for 15 minutes, or until soft In the meantime, add a tbsp of olive oil in a large skillet over medium heat When the oil is hot, add onions and cook until translucent Add garlic and cook for another 2-3 minutes Add carrots, celery, jalapeno (I add the seeds as well if you want it more spicy), all the spices, and the lentils Cook for about 20 minutes until all the veggies are tender Once the sweet potatoes are soft, mash them with a fork or masher while adding 1 tbsp of olive oil and the almond milk Place the lentil mixture into an casserole dish Smooth sweet potato mash on top Bake for 45 minutes or until the mash starts to brown (note: if it’s still not brown after an hour, you can broil it for a few minutes, keep your eye on it though as it will burn very quickly!)
#include "RobotParameters.h" namespace RobotParameters { const EigenVectord3 zeroVec{0, 0, 0}; const EigenVectord3 ex{1,0,0}; const EigenVectord3 ey{0,1,0}; const EigenVectord3 ez{0,0,1}; const double scalarGravity = 9.81; const EigenVectord3 gravity{scalarGravity * ez}; const EigenVectord3 s1Translation{(0.015 - 0.014) * ex}; const EigenVectord3 s1RotationAxis{ez}; EigenMatrixd3 s1Rotation(double rad) { EigenMatrixd3 out; out << cos(rad), -sin(rad), 0, sin(rad), cos(rad), 0, 0, 0, 1; return out; } const EigenVectord3 s2Translation{-0.2 * ey}; const EigenVectord3 s2RotationAxis{-ex}; EigenMatrixd3 s2Rotation(double rad) { EigenMatrixd3 out; out << 1, 0, 0, 0, cos(rad), sin(rad), 0, -sin(rad), cos(rad); return out; } const EigenVectord3 s3Translation{-(0.164 + 0.025) * ey}; const EigenVectord3 s3RotationAxis{ex}; EigenMatrixd3 s3Rotation(double rad) { EigenMatrixd3 out; out << 1, 0, 0, 0, cos(rad), -sin(rad), 0, sin(rad), cos(rad); return out; } const EigenVectord3 s4Translation{zeroVec}; const EigenVectord3 s4RotationAxis{ey}; EigenMatrixd3 s4Rotation(double rad) { EigenMatrixd3 out; out << cos(rad), 0, sin(rad), 0, 1, 0, -sin(rad), 0, cos(rad); return out; } const double s5TranslationLength{0.020 + 0.055 - 0.0265}; const EigenVectord3 s5Translation{-s5TranslationLength * ey}; const EigenVectord3 s5RotationAxis{-ex}; EigenMatrixd3 s5Rotation(double rad) { EigenMatrixd3 out; out << 1, 0, 0, 0, cos(rad), sin(rad), 0, -sin(rad), cos(rad); return out; } const EigenVectord3 s6Translation{-(0.029 / 2 + 0.020) * ez}; const EigenVectord3 s6ZeroRotationDir{-ez}; const EigenVectord3 s6RotationAxis{ey}; EigenMatrixd3 s6Rotation(double rad) { EigenMatrixd3 out; out << cos(rad), 0, sin(rad), 0, 1, 0, -sin(rad), 0, cos(rad); return out; } auto s1Dynamic(const EigenVectord3& wacc, const EigenVectord3& wvel, const EigenVectord3& acc, const EigenVectord3& me, const EigenVectord3& fe) { DynamicRetType out{}; double mass = 0.1; EigenVectord3 mcDist{0, 0, 0}; EigenMatrixd3 I; I << 0, 0, 0, 0, 0, 0, 0, 0, 0; EigenVectord3 mcAcc = acc + wacc.cross(mcDist) + wvel.cross(wvel.cross(mcDist)); out.f = mass * mcAcc + fe; out.m = I * wacc + s1Translation.cross(fe) - mass * mcAcc.cross(mcDist) + me; out.tau = s1RotationAxis.dot(out.m); return out; } auto s2Dynamic(const EigenVectord3& wacc, const EigenVectord3& wvel, const EigenVectord3& acc, const EigenVectord3& me, const EigenVectord3& fe) { DynamicRetType out{}; const double mass = 0.18; const double mcDistLength = 0.1; const double pendulumResFre = 10.0 / (8 + 3 * 1 / 30.0); const double pendulumResW = 2 * M_PI * pendulumResFre; const double inertia = (mass * scalarGravity * mcDistLength) / (pendulumResW * pendulumResW) - mass * mcDistLength * mcDistLength; EigenVectord3 mcDist{0, -mcDistLength, 0}; EigenMatrixd3 I; I << inertia, 0, 0, 0, 0, 0, 0, 0, inertia; I *= mass * mcDistLength * mcDistLength; EigenVectord3 mcAcc = acc + wacc.cross(mcDist) + wvel.cross(wvel.cross(mcDist)); out.f = mass * mcAcc + fe; out.m = I * wacc + s2Translation.cross(fe) - mass * mcAcc.cross(mcDist) + me; out.tau = s2RotationAxis.dot(out.m); return out; } auto s3Dynamic(const EigenVectord3& wacc, const EigenVectord3& wvel, const EigenVectord3& acc, const EigenVectord3& me, const EigenVectord3& fe) { DynamicRetType out{}; const double mass = 0.118; const double mcDistLength = 0.174; const double pendulumResFre = 35 / (32 + 22 / 30.0); const double pendulumResW = 2 * M_PI * pendulumResFre; const double inertia = (mass * scalarGravity * mcDistLength) / (pendulumResW * pendulumResW) - mass * mcDistLength * mcDistLength; EigenVectord3 mcDist{0, -mcDistLength, 0}; EigenMatrixd3 I; I << inertia, 0, 0, 0, 0, 0, 0, 0, inertia; I *= mass * mcDistLength * mcDistLength; EigenVectord3 mcAcc = acc + wacc.cross(mcDist) + wvel.cross(wvel.cross(mcDist)); out.f = mass * mcAcc + fe; out.m = I * wacc + s3Translation.cross(fe) - mass * mcAcc.cross(mcDist) + me; out.tau = s3RotationAxis.dot(out.m); return out; } EigenVectord3 combinedDynamic(const EigenVectord3& s1Wacc, const EigenVectord3& s2Wacc, const EigenVectord3& s3Wacc, const EigenVectord3& s1Wvel, const EigenVectord3& s2Wvel, const EigenVectord3& s3Wvel, const EigenVectord3& s1Acc, const EigenVectord3& s2Acc, const EigenVectord3& s3Acc, const EigenMatrixd3& s2RotationMatrix, const EigenMatrixd3& s3RotationMatrix) { auto [ms3, fs3, taus3] = s3Dynamic(s3Wacc, s3Wvel, s3Acc, EigenVectord3{0, 0, 0}, EigenVectord3{0, 0, 0}); ms3 = s3RotationMatrix * ms3; fs3 = s3RotationMatrix * fs3; auto [ms2, fs2, taus2] = s2Dynamic(s2Wacc, s2Wvel, s2Acc, ms3, fs3); ms2 = s2RotationMatrix * ms2; fs2 = s2RotationMatrix * fs2; auto [ms1, fs1, taus1] = s1Dynamic(s1Wacc, s1Wvel, s1Acc, ms2, fs2); return EigenVectord3{taus1, taus2, taus3}; } DynamicMatrices getDynamicMatrices(const EigenVectord6& a) { EigenMatrixd3 s2RotationM = s2Rotation(a[1]); EigenMatrixd3 s3RotationM = s3Rotation(a[2]); EigenMatrixd3 s1InvRotationM = s1Rotation(-a[0]); EigenMatrixd3 s2InvRotationM = s2Rotation(-a[1]); EigenMatrixd3 s3InvRotationM = s3Rotation(-a[2]); EigenVectord3 s1Acc = s1InvRotationM * gravity; EigenVectord3 s2Acc = s2InvRotationM * s1Acc; EigenVectord3 s3Acc = s3InvRotationM * s2Acc; EigenVectord3 accTau = combinedDynamic(zeroVec, zeroVec, zeroVec, zeroVec, zeroVec, zeroVec, s1Acc, s2Acc, s3Acc, s2RotationM, s3RotationM); EigenVectord3 s1s1Wacc = s1RotationAxis; EigenVectord3 s1s2Wacc = s2InvRotationM * s1s1Wacc; EigenVectord3 s1s2DistToC = s2InvRotationM * s1Translation; EigenVectord3 s1s3Wacc = s3InvRotationM * s1s2Wacc; EigenVectord3 s1s3DistToC = s3InvRotationM * (s1s2DistToC + s2Translation); EigenVectord3 s1Tau = combinedDynamic(s1s1Wacc, s1s2Wacc, s1s3Wacc, zeroVec, zeroVec, zeroVec, zeroVec, s1s2Wacc.cross(s1s2DistToC), s1s3Wacc.cross(s1s3DistToC), s2RotationM, s3RotationM); EigenVectord3 s2s2Wacc = s2RotationAxis; EigenVectord3 s2s3Wacc = s3InvRotationM * s2s2Wacc; EigenVectord3 s2s3DistToC = s3InvRotationM * (s2Translation); EigenVectord3 s2Tau = combinedDynamic(zeroVec, s2s2Wacc, s2s3Wacc, zeroVec, zeroVec, zeroVec, zeroVec, zeroVec, s2s3Wacc.cross(s2s3DistToC), s2RotationM, s3RotationM); EigenVectord3 s3s3Wacc = s3RotationAxis; EigenVectord3 s3Tau = combinedDynamic(zeroVec, zeroVec, s3s3Wacc, zeroVec, zeroVec, zeroVec, zeroVec, zeroVec, zeroVec, s3RotationM, s3RotationM); EigenVectord3 s1Vels2DistToC = s2InvRotationM * s1Translation; EigenVectord3 s1Vels3DistToC = s3InvRotationM * (s1Vels2DistToC + s2Translation); EigenVectord3 s1Vels2Acc = s1RotationAxis.cross(s1RotationAxis.cross(s1Vels2DistToC)); EigenVectord3 s1Vels3Acc = s1RotationAxis.cross(s1RotationAxis.cross(s1Vels3DistToC)); EigenVectord3 s1s1Wvel = s1RotationAxis; EigenVectord3 s1s2Wvel = s2InvRotationM * s1s1Wvel; EigenVectord3 s1s3Wvel = s3InvRotationM * s1s2Wvel; EigenVectord3 s1WvelTau = combinedDynamic(zeroVec, zeroVec, zeroVec, s1s1Wvel, s1s2Wvel, s1s3Wvel, zeroVec, s1Vels2Acc, s1Vels3Acc, s2RotationM, s3RotationM); EigenVectord3 s2Vels3DistToC = s3InvRotationM * (s2Translation); EigenVectord3 s2Vels3Acc = s2RotationAxis.cross(s2RotationAxis.cross(s2Vels3DistToC)); EigenVectord3 s2s2Wvel = s2RotationAxis; EigenVectord3 s2s3Wvel = s3InvRotationM * s2s2Wvel; EigenVectord3 s2WvelTau = combinedDynamic(zeroVec, zeroVec, zeroVec, zeroVec, s2s2Wvel, s2s3Wvel, zeroVec, zeroVec, s2Vels3Acc, s2RotationM, s3RotationM); EigenVectord3 s3s3Wvel = s3RotationAxis; EigenVectord3 s3WvelTau = combinedDynamic(zeroVec, zeroVec, zeroVec, zeroVec, zeroVec, s3s3Wvel, zeroVec, zeroVec, zeroVec, s2RotationM, s3RotationM); DynamicMatrices out; out.inert << s1Tau[0], s2Tau[0], s3Tau[0], 0, 0, 0, s1Tau[1], s2Tau[1], s3Tau[1], 0, 0, 0, s1Tau[2], s2Tau[2], s3Tau[2], 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0; out.w2Torque << s1WvelTau[0], s2WvelTau[0], s3WvelTau[0], 0, 0, 0, s1WvelTau[1], s2WvelTau[1], s3WvelTau[1], 0, 0, 0, s1WvelTau[2], s2WvelTau[2], s3WvelTau[2], 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0; out.externalTorque << accTau[0], accTau[1], accTau[2], 0, 0, 0; return out; } const double DynamicRobotDynamics::pwmLimit = 1023; const double DynamicRobotDynamics::currentToTorqueScale = 0.00056; const double DynamicRobotDynamics::gearBoxMomentOfInertia = 0.015064771031078847; const double DynamicRobotDynamics::pwmToStallCurrent = 1.945991041784367; const double DynamicRobotDynamics::backEmfCurrent = -0.00030467666381376284 * 4096 / 2.0 / M_PI; DynamicRobotDynamics::DynamicRobotDynamics(double dtTime) { RobotDynamics<6, double>::dt = dtTime; RobotDynamics<6, double>::maxAxisAbsVel = {2, 2, 2, 2, 2, 2}; }; DynamicRobotDynamics::~DynamicRobotDynamics(){}; void DynamicRobotDynamics::update(const Eigen::Matrix<double, 6, 1>& pos, const Eigen::Matrix<double, 6, 1>& preVelDir, const Eigen::Matrix<double, 6, 1>& postVelDir, double approxVel) { RobotDynamics<6, double>::torqueLimits = {pwmLimit, pwmLimit, pwmLimit, pwmLimit, pwmLimit, pwmLimit}; auto dynamicMatrices = getDynamicMatrices({0,0,0,0,0,0}); dynamicMatrices.inert += Eigen::Matrix<double, 6, 6>::Identity() * gearBoxMomentOfInertia; // wacc = inertInv * currentToTorqueScale * backEmfCurrent * pwmLimit * vk + inertInv * currentToTorqueScale * pwmToStallCurrent * u - inertInv * w2torq * vk * vk - inertInv * externTorq auto inertInv = dynamicMatrices.inert.inverse(); auto waccVk = inertInv * currentToTorqueScale * backEmfCurrent * pwmLimit; auto waccU = inertInv * currentToTorqueScale * pwmToStallCurrent; auto approxW2 = preVelDir; approxW2.normalize(); approxW2 *= 0;//approxVel; //std::transform(std::cbegin(approxW2), std::cend(approxW2), std::begin(approxW2), [](auto& c){return c * c;}); auto waccExtern = - inertInv * (dynamicMatrices.w2Torque * approxW2 + dynamicMatrices.externalTorque); //vkp1 = vk + dt * wacc RobotDynamics<6, double>::a = Eigen::Matrix<double, 6, 6>::Identity() * 1.0; RobotDynamics<6, double>::a += RobotDynamics<6, double>::dt * waccVk; RobotDynamics<6, double>::b = RobotDynamics<6, double>::dt * waccU; RobotDynamics<6, double>::externalTorqueAcc = RobotDynamics<6, double>::dt * waccExtern; RobotDynamics<6, double>::bInv = RobotDynamics<6, double>::b.inverse(); }; void DynamicRobotDynamics::recalculateFreedForward(TrajectoryItem<6, double>& itemK, const TrajectoryItem<6, double>& itemKp1) { auto dynamicMatrices = getDynamicMatrices(itemK.p); dynamicMatrices.inert += Eigen::Matrix<double, 6, 6>::Identity() * gearBoxMomentOfInertia; // wacc = inertInv * currentToTorqueScale * backEmfCurrent * pwmLimit * vk + inertInv * currentToTorqueScale * pwmToStallCurrent * u - inertInv * w2torq * vk * vk - inertInv * externTorq auto inertInv = dynamicMatrices.inert.inverse(); auto waccU = inertInv * currentToTorqueScale; auto approxW2 = itemK.v; std::transform(std::cbegin(approxW2), std::cend(approxW2), std::begin(approxW2), [](auto& c){return c * c;}); auto waccExtern = - inertInv * (dynamicMatrices.w2Torque * approxW2 + dynamicMatrices.externalTorque); //vkp1 = vk + dt * wacc auto a = Eigen::Matrix<double, 6, 6>::Identity() * 1.0; auto b = RobotDynamics<6, double>::dt * waccU; auto externalTorqueAcc = RobotDynamics<6, double>::dt * waccExtern; itemK.u = b.inverse() * (itemKp1.v - a * itemK.v - externalTorqueAcc); }; Eigen::Matrix<double, 6, 1> DynamicRobotDynamics::recalcPwm(const Eigen::Matrix<double, 6, 1>& current, const Eigen::Matrix<double, 6, 1>& vel) { Eigen::Matrix<double, 6, 1> pwm; //current[i] = pwmToStallCurrent * pwm[i] + backEmfCurrent * vel[i] * pwm[i]; for (size_t i = 0; i != 6; ++i) { pwm[i] = current[i] / (pwmToStallCurrent + backEmfCurrent * vel[i]); } return pwm; } };
<filename>jp.atcoder/abc185/abc185_f/28632173.py import typing # import dataclasses S = typing.TypeVar("S") # @dataclasses.dataclass class Monoid(typing.Generic[S]): op: typing.Callable[[S, S], S] e: typing.Callable[[], S] def __init__(self, op, e) -> None: self.op = op self.e = e def floor_sqrt(n: int) -> int: r"""Floor Sqrt.""" assert n >= 0 x = 0 while x * x <= n: x += 1 return x - 1 class SqrtDecomposition(typing.Generic[S]): def __init__(self, monoid: Monoid[S], arr: typing.List[S]) -> None: n = len(arr) sqrt = floor_sqrt(n) num_buckets = (n + sqrt - 1) // sqrt buckets = [monoid.e() for _ in range(num_buckets)] data_size = sqrt * num_buckets data = [monoid.e() for _ in range(data_size)] data[:n] = arr.copy() for i in range(num_buckets): for j in range(sqrt * i, sqrt * (i + 1)): buckets[i] = monoid.op(buckets[i], data[j]) self.__data = data self.__buckets = buckets self.__sqrt = sqrt self.__original_size = n self.__monoid = monoid def __len__(self) -> int: return self.__original_size def __setitem__(self, i: int, x: S) -> None: assert 0 <= i < len(self) self.__data[i] = x idx = i // self.__sqrt self.__buckets[idx] = self.__monoid.e() for j in range(self.__sqrt * idx, self.__sqrt * (idx + 1)): self.__buckets[idx] = self.__monoid.op( self.__buckets[idx], self.__data[j], ) def __getitem__(self, i: int) -> S: assert 0 <= i < len(self) return self.__data[i] def get(self, left: int, right: int) -> S: assert 0 <= left <= right <= len(self) v = self.__monoid.e() for i in range(len(self.__buckets)): if left >= self.__sqrt * (i + 1): continue if right <= self.__sqrt * i: break if left <= self.__sqrt * i and self.__sqrt * (i + 1) <= right: v = self.__monoid.op(v, self.__buckets[i]) continue for j in range(self.__sqrt * i, self.__sqrt * (i + 1)): if j < left: continue if j >= right: break v = self.__monoid.op(v, self.__data[j]) return v def main() -> None: n, q = map(int, input().split()) a = list(map(int, input().split())) monoid = Monoid[int](op=lambda x, y: x ^ y, e=lambda: 0) sd = SqrtDecomposition[int](monoid, a) res = [] for _ in range(q): t, x, y = map(int, input().split()) x -= 1 if t == 1: sd[x] ^= y else: res.append(sd.get(x, y)) print(*res, sep='\n') main()
#include "Endstop.h" Endstop::Endstop(byte pin) { _pin = pin; pinMode(_pin, INPUT_PULLUP); } bool Endstop::isClicked() { if (digitalRead(_pin) == LOW) return true; else return false; }
<reponame>wwjiang007/fuchsia-1 // Copyright 2020 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use crate::agent::earcons::agent::CommonEarconsParams; use crate::agent::earcons::sound_ids::{VOLUME_CHANGED_SOUND_ID, VOLUME_MAX_SOUND_ID}; use crate::agent::earcons::utils::{connect_to_sound_player, play_sound}; use crate::audio::types::{AudioInfo, AudioStream, AudioStreamType}; use crate::audio::{create_default_modified_counters, ModifiedCounters}; use crate::base::{SettingInfo, SettingType}; use crate::event; use crate::handler::base::{Payload, Request}; use crate::message::base::Audience; use crate::message::receptor::extract_payload; use crate::service; use anyhow::Error; use fuchsia_async as fasync; use fuchsia_syslog::fx_log_debug; use futures::StreamExt; use std::collections::{HashMap, HashSet}; /// The `VolumeChangeHandler` takes care of the earcons functionality on volume change. pub(super) struct VolumeChangeHandler { common_earcons_params: CommonEarconsParams, last_user_volumes: HashMap<AudioStreamType, f32>, modified_counters: ModifiedCounters, publisher: event::Publisher, } /// The maximum volume level. const MAX_VOLUME: f32 = 1.0; /// The file path for the earcon to be played for max sound level. const VOLUME_MAX_FILE_PATH: &str = "volume-max.wav"; /// The file path for the earcon to be played for volume changes below max volume level. const VOLUME_CHANGED_FILE_PATH: &str = "volume-changed.wav"; impl VolumeChangeHandler { pub(super) async fn create( publisher: event::Publisher, params: CommonEarconsParams, messenger: service::message::Messenger, ) -> Result<(), Error> { let mut receptor = messenger .message( Payload::Request(Request::Get).into(), Audience::Address(service::Address::Handler(SettingType::Audio)), ) .send(); // Get initial user media volume level. let last_user_volumes = if let Ok((Payload::Response(Ok(Some(SettingInfo::Audio(info)))), _)) = receptor.next_of::<Payload>().await { // Create map from stream type to user volume levels for each stream. info.streams .iter() .filter(|x| { x.stream_type == AudioStreamType::Media || x.stream_type == AudioStreamType::Interruption }) .map(|stream| (stream.stream_type, stream.user_volume_level)) .collect() } else { // Could not extract info from response, default to empty volumes. HashMap::new() }; fasync::Task::spawn(async move { let mut handler = Self { common_earcons_params: params, last_user_volumes, modified_counters: create_default_modified_counters(), publisher, }; let listen_receptor = messenger .message( Payload::Request(Request::Listen).into(), Audience::Address(service::Address::Handler(SettingType::Audio)), ) .send() .fuse(); futures::pin_mut!(listen_receptor); loop { futures::select! { volume_change_event = listen_receptor.next() => { if let Some( service::Payload::Setting(Payload::Response(Ok(Some( SettingInfo::Audio(audio_info))))) ) = extract_payload(volume_change_event) { handler.on_audio_info(audio_info).await; } } complete => break, } } }) .detach(); Ok(()) } /// Calculates and returns the streams that were changed based on /// their timestamps, updating them in the stored timestamps if /// they were changed. fn calculate_changed_streams( &mut self, all_streams: [AudioStream; 5], new_modified_counters: ModifiedCounters, ) -> Vec<AudioStream> { let mut changed_stream_types = HashSet::new(); for (stream_type, timestamp) in new_modified_counters { if self.modified_counters.get(&stream_type) != Some(&timestamp) { changed_stream_types.insert(stream_type); self.modified_counters.insert(stream_type, timestamp); } } all_streams .iter() .filter(|stream| changed_stream_types.contains(&stream.stream_type)) .cloned() .collect() } /// Retrieve a user volume of the specified [stream_type] from the given [changed_streams]. fn get_user_volume( &self, changed_streams: Vec<AudioStream>, stream_type: AudioStreamType, ) -> Option<f32> { changed_streams.iter().find(|&&x| x.stream_type == stream_type).map(|x| x.user_volume_level) } /// Helper for on_audio_info. Handles the changes for a specific AudioStreamType. /// Enables separate handling of earcons on different streams. async fn on_audio_info_for_stream( &mut self, new_user_volume: f32, stream_type: AudioStreamType, ) { let volume_is_max = new_user_volume == MAX_VOLUME; let last_user_volume = self.last_user_volumes.get(&stream_type); // Logging for debugging volume changes. fx_log_debug!( "[earcons_agent] New {:?} user volume: {:?}, Last {:?} user volume: {:?}", stream_type, new_user_volume, stream_type, last_user_volume, ); if last_user_volume != Some(&new_user_volume) || volume_is_max { if last_user_volume != None { // On restore, the last media user volume is set for the first time, and registers // as different from the last seen volume, because it is initially None. Don't play // the earcons sound on that set. self.play_volume_sound(new_user_volume); } self.last_user_volumes.insert(stream_type, new_user_volume); } } /// Invoked when a new `AudioInfo` is retrieved. Determines whether an /// earcon should be played and plays sound if necessary. async fn on_audio_info(&mut self, audio_info: AudioInfo) { let changed_streams = match audio_info.modified_counters { None => Vec::new(), Some(counters) => self.calculate_changed_streams(audio_info.streams, counters), }; let media_user_volume = self.get_user_volume(changed_streams.clone(), AudioStreamType::Media); let interruption_user_volume = self.get_user_volume(changed_streams, AudioStreamType::Interruption); if let Some(media_user_volume) = media_user_volume { self.on_audio_info_for_stream(media_user_volume, AudioStreamType::Media).await; } if let Some(interruption_user_volume) = interruption_user_volume { self.on_audio_info_for_stream(interruption_user_volume, AudioStreamType::Interruption) .await; } } /// Play the earcons sound given the changed volume streams. /// /// The parameters are packaged together. See [VolumeChangeParams]. fn play_volume_sound(&self, volume: f32) { let common_earcons_params = self.common_earcons_params.clone(); let publisher = self.publisher.clone(); fasync::Task::spawn(async move { // Connect to the SoundPlayer if not already connected. connect_to_sound_player( publisher, common_earcons_params.service_context.clone(), common_earcons_params.sound_player_connection.clone(), ) .await; let sound_player_connection_clone = common_earcons_params.sound_player_connection.clone(); let sound_player_connection = sound_player_connection_clone.lock().await; let sound_player_added_files = common_earcons_params.sound_player_added_files; if let (Some(sound_player_proxy), volume_level) = (sound_player_connection.as_ref(), volume) { if volume_level >= 1.0 { play_sound( &sound_player_proxy, VOLUME_MAX_FILE_PATH, VOLUME_MAX_SOUND_ID, sound_player_added_files.clone(), ) .await .ok(); } else if volume_level > 0.0 { play_sound( &sound_player_proxy, VOLUME_CHANGED_FILE_PATH, VOLUME_CHANGED_SOUND_ID, sound_player_added_files.clone(), ) .await .ok(); } } }) .detach(); } } #[cfg(test)] mod tests { use super::*; use crate::audio::default_audio_info; use crate::message::base::MessengerType; use crate::message::MessageHubUtil; use crate::service_context::ServiceContext; use futures::lock::Mutex; use std::sync::Arc; fn fake_values() -> ( [AudioStream; 5], // fake_streams ModifiedCounters, // old_counters ModifiedCounters, // new_counters Vec<AudioStream>, // expected_changed_streams ) { let fake_streams = default_audio_info().streams; let old_timestamps = create_default_modified_counters(); let new_timestamps = [ (AudioStreamType::Background, 0), (AudioStreamType::Media, 1), (AudioStreamType::Interruption, 0), (AudioStreamType::SystemAgent, 2), (AudioStreamType::Communication, 3), ] .iter() .cloned() .collect(); let expected_changed_streams = [fake_streams[1], fake_streams[3], fake_streams[4]].to_vec(); (fake_streams, old_timestamps, new_timestamps, expected_changed_streams) } #[fuchsia_async::run_until_stalled(test)] async fn test_changed_streams() { let (fake_streams, old_timestamps, new_timestamps, expected_changed_streams) = fake_values(); let delegate = service::MessageHub::create_hub(); let publisher = event::Publisher::create(&delegate, MessengerType::Unbound).await; let mut last_user_volumes = HashMap::new(); last_user_volumes.insert(AudioStreamType::Media, 1.0); last_user_volumes.insert(AudioStreamType::Interruption, 0.5); let mut handler = VolumeChangeHandler { common_earcons_params: CommonEarconsParams { service_context: Arc::new(ServiceContext::new(None, None)), sound_player_added_files: Arc::new(Mutex::new(HashSet::new())), sound_player_connection: Arc::new(Mutex::new(None)), }, last_user_volumes, modified_counters: old_timestamps, publisher, }; let changed_streams = handler.calculate_changed_streams(fake_streams, new_timestamps); assert_eq!(changed_streams, expected_changed_streams); } }
Politicians in city and county governments across California are thumbing their noses at federal law in order to harbor illegal aliens, including criminal illegal aliens like Kate Steinle’s alleged killer in San Francisco. These deadly sanctuary policies not only violate federal law…they are killing innocent people, degrading our communities and costing law abiding citizens billions of dollars a year in public services! Taxpayers in San Diego County, for example, were footing a $101 million a year bill for the health care and public safety costs of illegal immigration…and that number has undoubtedly increased. And just recently, Judicial Watch investigators uncovered that the California Legislature is paying former Obama Attorney General Eric Holder $25,000 a month to develop legal strategies to fight the Trump administration’s rule of law agenda! Taxpayer money is being frittered away across the state on sanctuary policies like these, all while the state’s combined unfunded pension liabilities stand at over $366 billion dollars. Clearly state politicians are prioritizing liberal cronies and their lawless agenda over the safety and financial well-being of hard working citizens! Sign the petition to support Judicial Watch’s legal efforts fighting California’s sanctuary policies!
export default sap.ui.model.odata.ODataMetaModel;
def _device_status_status(value: DeviceStatus) -> Sensor.Status: if value == DeviceStatus.OK: return Sensor.Status.NOMINAL elif value == DeviceStatus.DEGRADED: return Sensor.Status.WARN else: return Sensor.Status.ERROR
def pairLevelsDispersion(d, N, D, singlyOccupiedList): pairEnergies, singleEnergies = [[] for i in singlyOccupiedList], [[] for i in singlyOccupiedList] for j in range(len(singlyOccupiedList)): for i in range(N): if testBit(singlyOccupiedList[j], N-i-1)==0: pairEnergies[j].append(eps(i, d, D)) else: singleEnergies[j].append(eps(i, d, D)) pairEnergies = np.array([np.array(i) for i in pairEnergies]) singleEnergies = np.array([np.array(i) for i in singleEnergies]) return pairEnergies, singleEnergies
Subscribers to the CWO newsletter (both American and beyond) have been sharing some of their favorite British crime stories with us. Writing from Pennsylvania, Breanne’s favourite (see was we did there?) is Grantchester: Hot priest turns sleuth. Women swoon. Lily from Portland, ME says we should check out Robbie Coltrane in Cracker: Haggrid uses his giant brain to bust those who must not be named. Vicki from Calgary wants us to tune in to River: He’s smart. He haunted. Maybe you don’t begin a conversation with him by saying, “How ya doing?” Sian from Scotland wants us to see The Fall: It’s Scully chasing serial killers instead of aliens. Jacqueline, a Brit living in Florida, loves Broadchurch: Flawed cop pisses off everyone in town looking for a boy’s killer. Jane, another ex-pat living in NY, recommends Scott & Bailey: It’s “Cagney & Lacey,” if Cagney and Lacey were from Manchester and were turned on by blood splatter patterns. Philippa from the UK wants us to see the BBC’s Line of Duty: It’s a police unit that tracks down corrupt cops, bad bobbies and pilfering peelers. Dan from Wrexham UK votes for Idris Elba’s Luther. It’s Stringer Bell from “The Wire,” only he’s a good guy and talks in his own accent. Sarah from Sydney wants you to see Happy Valley: Spoiler alert: Ain’t nobody happy in Happy Valley. Susan from Edgewater, Maryland says you can’t beat Helen Mirren in Prime Suspect: It’s the Queen flushing out Jacks and Jokers. Jan in Australia recommends the long-running Midsomer Murders: It’s the deadliest rural location since Cabot Cove. Shana from Michigan makes the case for the Australian show Miss Fisher’s Murder Mysteries: Set in the Roaring 1920s, it’s Miss Marple meets The Great Gatsby. Among Cass from Sydney’s many picks is Blue Murder: Single mom fights crime while still getting the kids to cricket practice.
def __check_service(service_object): svc = service_object() if not isinstance(svc, StackInABoxService): raise TypeError( "Service is not a Stack-In-A-Box Service" )
/** * Add results to the total results, * this will not allow adding results beyond the maxItemsToReturn (max) number * * @param searchResult the result to add * @return true if the item was added OR false if it was null or the max number is already added */ public boolean addResult(SearchResult searchResult) { boolean added = false; if (searchResult != null) { if (this.maxItemsToReturn <= 0 || getResults().size() < this.maxItemsToReturn) { getResults().add(searchResult); if (getResults().size() > this.totalItemsMatched) { this.totalItemsMatched = getResults().size(); } added = true; } } return added; }
<reponame>xswz8015/infra # Copyright 2019 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Special logic of pre compile analysis. Build with compile failures will be pre-processed to determine if a new compile analysis is needed or not. """ import logging from google.appengine.ext import ndb from google.protobuf.field_mask_pb2 import FieldMask from services import gerrit from services import git from services import deps from waterfall import waterfall_config from common.waterfall import buildbucket_client from findit_v2.model import luci_build from findit_v2.model import compile_failure from findit_v2.model.compile_failure import CompileFailure from findit_v2.model.compile_failure import CompileFailureAnalysis from findit_v2.model.compile_failure import CompileFailureGroup from findit_v2.model.compile_failure import CompileRerunBuild from findit_v2.model.culprit_action import CulpritAction from findit_v2.services import build_util from findit_v2.services import constants from findit_v2.services import projects from findit_v2.services.analysis.analysis_api import AnalysisAPI from findit_v2.services.failure_type import StepTypeEnum class CompileAnalysisAPI(AnalysisAPI): @property def step_type(self): return StepTypeEnum.COMPILE def _GetMergedFailureKey(self, failure_entities, referred_build_id, step_ui_name, atomic_failure): return CompileFailure.GetMergedFailureKey( failure_entities, referred_build_id, step_ui_name, atomic_failure) def _GetFailuresInBuild(self, project_api, build, failed_steps): return project_api.GetCompileFailures(build, failed_steps) def _GetFailuresWithMatchingFailureGroups(self, project_api, context, build, first_failures_in_current_build): return project_api.GetFailuresWithMatchingCompileFailureGroups( context, build, first_failures_in_current_build) def _CreateFailure(self, failed_build_key, step_ui_name, first_failed_build_id, last_passed_build_id, merged_failure_key, atomic_failure, properties): """Creates a CompileFailure entity.""" return CompileFailure.Create( failed_build_key=failed_build_key, step_ui_name=step_ui_name, output_targets=list(atomic_failure or []), rule=(properties or {}).get('rule'), first_failed_build_id=first_failed_build_id, last_passed_build_id=last_passed_build_id, # Default to first_failed_build_id, will be updated later if matching # group exists. failure_group_build_id=first_failed_build_id, merged_failure_key=merged_failure_key, properties=properties) def GetFailureEntitiesForABuild(self, build): compile_failure_entities = CompileFailure.query( ancestor=ndb.Key(luci_build.LuciFailedBuild, build.id)).fetch() assert compile_failure_entities, ( 'No compile failure saved in datastore for build {}'.format(build.id)) return compile_failure_entities def _CreateFailureGroup(self, context, build, compile_failure_keys, last_passed_gitiles_id, last_passed_commit_position, first_failed_commit_position): group_entity = CompileFailureGroup.Create( luci_project=context.luci_project_name, luci_bucket=build.builder.bucket, build_id=build.id, gitiles_host=context.gitiles_host, gitiles_project=context.gitiles_project, gitiles_ref=context.gitiles_ref, last_passed_gitiles_id=last_passed_gitiles_id, last_passed_commit_position=last_passed_commit_position, first_failed_gitiles_id=context.gitiles_id, first_failed_commit_position=first_failed_commit_position, compile_failure_keys=compile_failure_keys) return group_entity def _CreateFailureAnalysis( self, luci_project, context, build, last_passed_gitiles_id, last_passed_commit_position, first_failed_commit_position, rerun_builder_id, compile_failure_keys): analysis = CompileFailureAnalysis.Create( luci_project=luci_project, luci_bucket=build.builder.bucket, luci_builder=build.builder.builder, build_id=build.id, gitiles_host=context.gitiles_host, gitiles_project=context.gitiles_project, gitiles_ref=context.gitiles_ref, last_passed_gitiles_id=last_passed_gitiles_id, last_passed_commit_position=last_passed_commit_position, first_failed_gitiles_id=context.gitiles_id, first_failed_commit_position=first_failed_commit_position, rerun_builder_id=rerun_builder_id, compile_failure_keys=compile_failure_keys) return analysis def _GetFailuresInAnalysis(self, analysis): return ndb.get_multi(analysis.compile_failure_keys) def _FetchRerunBuildsOfAnalysis(self, analysis): return CompileRerunBuild.query(ancestor=analysis.key).order( CompileRerunBuild.gitiles_commit.commit_position).fetch() def _GetFailureAnalysis(self, analyzed_build_id): analysis = CompileFailureAnalysis.GetVersion(analyzed_build_id) assert analysis, 'Failed to get CompileFailureAnalysis for build {}'.format( analyzed_build_id) return analysis def _GetFailuresToRerun(self, failure_entities): return compile_failure.GetFailedTargets(failure_entities) def _GetExistingRerunBuild(self, analysis_key, rerun_commit): return CompileRerunBuild.SearchBuildOnCommit(analysis_key, rerun_commit) def _CreateRerunBuild(self, rerun_builder, new_build, rerun_commit, analysis_key): return CompileRerunBuild.Create( luci_project=rerun_builder.project, luci_bucket=rerun_builder.bucket, luci_builder=rerun_builder.builder, build_id=new_build.id, legacy_build_number=new_build.number, gitiles_host=rerun_commit.gitiles_host, gitiles_project=rerun_commit.gitiles_project, gitiles_ref=rerun_commit.gitiles_ref, gitiles_id=rerun_commit.gitiles_id, commit_position=rerun_commit.commit_position, status=new_build.status, create_time=new_build.create_time.ToDatetime(), parent_key=analysis_key) def _GetRerunBuildTags(self, analyzed_build_id): return [ { 'key': constants.RERUN_BUILD_PURPOSE_TAG_KEY, 'value': constants.COMPILE_RERUN_BUILD_PURPOSE, }, { 'key': constants.ANALYZED_BUILD_ID_TAG_KEY, 'value': str(analyzed_build_id), }, ] def _GetRerunBuildInputProperties(self, project_api, rerun_failures, analyzed_build_id): return project_api.GetCompileRerunBuildInputProperties( rerun_failures, analyzed_build_id) def GetSuspectedCulprits(self, project_api, context, build, first_failures_in_current_build): failure_info = project_api.GetCompileFailureInfo( context, build, first_failures_in_current_build) # Projects that support heuristic analysis for compile must implement # GetCompileFailureInfo. if failure_info: signals = project_api.ExtractSignalsForCompileFailure(failure_info) change_logs = git.PullChangeLogs( first_failures_in_current_build['last_passed_build']['commit_id'], context.gitiles_id) deps_info = deps.ExtractDepsInfo(failure_info, change_logs) return project_api.HeuristicAnalysisForCompile(failure_info, change_logs, deps_info, signals) return None def _GetFailureGroupByContext(self, context): groups = CompileFailureGroup.query( CompileFailureGroup.luci_project == context.luci_project_name).filter( CompileFailureGroup.first_failed_commit.gitiles_id == context .gitiles_id).fetch() return groups[0] if groups else None def _CheckIfCulpritIsASuspect(self, culprit, failures): """Checks if culprit is one of the suspects for failures.""" all_suspect_keys = [] for failure in failures: all_suspect_keys.extend(failure.suspect_commit_key) all_suspects = [suspect_key.get() for suspect_key in all_suspect_keys] for suspect in all_suspects: if (culprit.gitiles_id == suspect.gitiles_id and culprit.gitiles_host == suspect.gitiles_host and culprit.gitiles_project == suspect.gitiles_project and culprit.gitiles_ref == suspect.gitiles_ref): return True return False def OnCulpritFound(self, context, analyzed_build_id, culprit, failures): """Decides and executes the action for the found culprit change. This possible actions include: - No action. - Notify the culprit CL. - Create revert and request that it's reviewed. - Create a revert and submit it. Selecting the appropriate action will be based on the project's configured options and daily limits as well as whether the action can be taken safely. Refer to the code below for details. Args: context (findit_v2.services.context.Context): Scope of the analysis. analyzed_build_id: Buildbucket id of the continuous build being analyzed. culprit: The Culprit entity for the change identified as causing the failures. failures: Failure entities associated with the culprit. Returns: The CulpritAction entity describing the action taken, None if no action was performed. """ project_api = projects.GetProjectAPI(context.luci_project_name) project_config = projects.PROJECT_CFG.get(context.luci_project_name, {}) action_settings = waterfall_config.GetActionSettings() if not action_settings.get('v2_actions', False): logging.info('V2 auto-action flow globally disabled') return None if not project_config.get('auto_actions_enabled_for_project', False): return self._NoAction(culprit, 'Auto-actions disabled for project') if not build_util.AllLaterBuildsHaveOverlappingFailure( context, analyzed_build_id, culprit): return self._NoAction(culprit, 'Build has recovered') # Check if the culprit is one of the suspects if context.luci_project_name == "chromium": if not self._CheckIfCulpritIsASuspect(culprit, failures): return self._NoAction(culprit, 'Culprit is not a suspect') change_info, gerrit_client = ( project_api.gerrit_actions.ChangeInfoAndClientFromCommit(culprit)) cl_details = gerrit_client.GetClDetails(change_info['review_change_id']) if bool(cl_details.revert_of): return self._Notify(project_api, culprit, 'The culprit is a revert') reverted, by_findit = self._CheckIfReverted( cl_details, culprit, project_config.get('auto_actions_service_account', '')) if reverted and by_findit: return self._NoAction(culprit, 'We already created a revert for this culprit') if reverted: return self._Notify( project_api, culprit, 'A revert was manually created for this culprit', silent=True) if len( CulpritAction.GetRecentActionsByType( CulpritAction.REVERT, revert_committed=False)) >= action_settings.get( 'auto_create_revert_daily_threshold_compile', 10): return self._Notify(project_api, culprit, 'Reached revert creation quota') if not project_config.get('auto_revert_enabled_for_project', False): return self._Notify(project_api, culprit, 'Auto-revert disabled for this project') if cl_details.auto_revert_off: return self._Notify(project_api, culprit, 'The culprit has been tagged with NOAUTOREVERT=True') if gerrit.ExistCQedDependingChanges(change_info): return self._Notify(project_api, culprit, 'Changes already in the CQ depend on culprit') if not git.ChangeCommittedWithinTime( culprit.gitiles_id, repo_url=git.GetRepoUrlFromContext(context), hours=project_config.get('max_revertible_culprit_age_hours', 24)): return self._Notify(project_api, culprit, 'Culprit is too old to auto-revert') if cl_details.owner_email in project_config.get( 'automated_account_allowlist', []): return self._Notify(project_api, culprit, 'Culprit was created by an allowed account') revert_description = self._ComposeRevertDescription(project_api, culprit) if project_config.get('auto_commit_enabled_for_project', False): if len( CulpritAction.GetRecentActionsByType( CulpritAction.REVERT, revert_committed=True)) < action_settings.get( 'auto_commit_revert_daily_threshold_compile', 4): action = self._CommitRevert(project_api, revert_description, culprit) if action: return action logging.info( 'Could not land revert automatically, requesting manual review') else: logging.info('Reached auto-commit quota, requesting manual review') else: logging.info('Auto-committing disabled, requesting manual review') return self._RequestReview(project_api, revert_description, culprit)
def assertConstructHeaderDatasetName( self, _map: HDFMapDigiTemplate, _group: h5py.Group ): kwargs_list = [] for cname, config in _map.configs.items(): for adc in config["adc"]: for conn in config[adc]: brd = conn[0] chs = conn[1] for ch in chs: kwargs_list.append( { "board": brd, "channel": ch, "config_name": cname, "adc": adc, } ) for kwargs in kwargs_list: if kwargs["config_name"] not in _map.active_configs: with self.assertRaises(ValueError): _map.construct_dataset_name(**kwargs) else: dset_name = _map.construct_dataset_name(**kwargs) self.assertIsInstance(dset_name, str) self.assertIsNotNone(_group.get(dset_name))
def delete(models, **kwargs): delete_async(models, **kwargs).get_result()
from collections import defaultdict def read_in(): polymer = input() input() rules = {} while value := input(): k, v = value.split(' -> ') rules[k] = v return polymer, rules def run_for_n_rounds(polymer, rules, n): p_map = defaultdict(int) e_count = defaultdict(int) for i in range(len(polymer) - 1): k = polymer[i] + polymer[i + 1] p_map[k] += 1 e_count[polymer[i]] += 1 e_count[polymer[-1]] += 1 for _ in range(n): new_p_map = defaultdict(int) for k in p_map: v = p_map[k] new_k = k[0] + rules[k] new_p_map[new_k] += v new_k = rules[k] + k[1] new_p_map[new_k] += v e_count[rules[k]] += v p_map = new_p_map return max(e_count.values()) - min(e_count.values()) def part_one(data): print(run_for_n_rounds(data[0], data[1], 10)) def part_two(data): print(run_for_n_rounds(data[0], data[1], 40)) if __name__ == '__main__': d = read_in() part_one(d) print() part_two(d)
<reponame>xavaz/androidtv-Leanback<gh_stars>0 package com.example.android.tvleanback.ui; import android.Manifest; import android.app.AlertDialog; import android.app.ProgressDialog; import android.content.DialogInterface; import android.content.Intent; import android.content.pm.PackageManager; import android.media.MediaMetadataRetriever; import android.net.Uri; import android.os.Bundle; import android.os.Environment; import android.preference.PreferenceManager; import android.support.v4.app.ActivityCompat; import android.text.TextUtils; import android.util.Log; import android.widget.Toast; import com.example.android.tvleanback.R; import com.example.android.tvleanback.Utils; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import nl.bravobit.ffmpeg.ExecuteBinaryResponseHandler; import nl.bravobit.ffmpeg.FFmpeg; import nl.bravobit.ffmpeg.FFprobe; import static android.os.Environment.DIRECTORY_MOVIES; import static android.os.Environment.getExternalStoragePublicDirectory; public class KaraokeActivity extends LeanbackActivity { FFmpeg ffmpeg; private ProgressDialog progressDialog; private String TAG = "KaraokeActivity"; private static String[] PERMISSIONS_STORAGE = {Manifest.permission.READ_EXTERNAL_STORAGE, Manifest.permission.WRITE_EXTERNAL_STORAGE, Manifest.permission.INTERNET}; private static String duration,number,company; /** * Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); progressDialog = new ProgressDialog(this); progressDialog.setTitle(null); Intent intent = this.getIntent(); company = intent.getStringExtra("company"); number = intent.getStringExtra("number"); duration = intent.getStringExtra("duration"); if(duration == null || duration.equals("") || duration.equals("0")) { //Toast.makeText(this,"ffprobe",Toast.LENGTH_SHORT).show(); downloadMP3(company, number); } else { loadFFMpegBinary(company, number, duration); } } private static String getDuration(File file) { MediaMetadataRetriever mediaMetadataRetriever = new MediaMetadataRetriever(); mediaMetadataRetriever.setDataSource(file.getAbsolutePath()); String durationStr = mediaMetadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION); return Utils.formatMilliSeccond2Seconds(Long.parseLong(durationStr)); } private List<String> getListFiles(File parentDir, String fileNameToSearch) { ArrayList<String> inFiles = new ArrayList<String>(); File[] files = parentDir.listFiles(); if(files!=null){ for (File file : files) { if (file.isDirectory()) { if(file.getAbsolutePath().toString().toLowerCase().endsWith(fileNameToSearch.toLowerCase()) || file.getName().toString().toUpperCase().contains(fileNameToSearch.toUpperCase())){ inFiles.add(file.getName().toString()); }else { inFiles.addAll(getListFiles(file, fileNameToSearch)); } } else { if(file.getAbsolutePath().toString().toLowerCase().endsWith(fileNameToSearch.toLowerCase()) || file.getName().toString().toUpperCase().contains(fileNameToSearch.toUpperCase())){ inFiles.add(file.getName().toString()); } } } } return inFiles; } public String chooseVideo(String durationSong){ int intDurationSong = Integer.parseInt(durationSong); if(intDurationSong==0) return Environment.getExternalStorageDirectory().getPath() + "/" + DIRECTORY_MOVIES + "/source.mp4"; List<String> fileList = getListFiles(new File(Environment.getExternalStorageDirectory().getPath()+"/"+ DIRECTORY_MOVIES),"mp4"); List<String> possibleList = new ArrayList<String>(); if(fileList.size()>0){ for(String file : fileList){ int length = 0; for(String fname : file.split(".")) if(TextUtils.isDigitsOnly(fname)) length = Integer.getInteger(fname); if(length>intDurationSong) possibleList.add(file); } } String result = ""; if(fileList==null || fileList.size() == 0 || possibleList == null || possibleList.size()<1) { result = Environment.getExternalStorageDirectory().getPath() + "/" + DIRECTORY_MOVIES + "/source.mp4"; } else{ if(possibleList.size()>1) Collections.shuffle(possibleList); result = String.format(getExternalStoragePublicDirectory(DIRECTORY_MOVIES).toString()+"/%s",possibleList.get(0)); } return result; } public int getDurationVideo(File videoFile){ MediaMetadataRetriever retriever = new MediaMetadataRetriever(); retriever.setDataSource(this, Uri.fromFile(videoFile)); String time = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION); int sec = (int) Long.parseLong(time)/1000; return sec; } public static boolean copyFile(String from, String to) { try { int bytesum = 0; int byteread = 0; File oldfile = new File(from); if (oldfile.exists()) { InputStream inStream = new FileInputStream(from); FileOutputStream fs = new FileOutputStream(to); byte[] buffer = new byte[1444]; while ((byteread = inStream.read(buffer)) != -1) { bytesum += byteread; fs.write(buffer, 0, byteread); } inStream.close(); fs.close(); } return true; } catch (Exception e) { return false; } } public void getFiles(String from, String toFolder, String toFile) throws IOException { if(PreferenceManager.getDefaultSharedPreferences(this).getBoolean(getString(R.string.pref_key_USB),false)==true){ String to = toFolder+"/"+toFile; copy(new File(from), new File(to)); } else { new DownloadTask(this).execute(toFolder, toFile, from); } } public static void copy(File src, File dst) throws IOException { try (InputStream in = new FileInputStream(src)) { try (OutputStream out = new FileOutputStream(dst)) { // Transfer bytes from in to out byte[] buf = new byte[1024]; int len; while ((len = in.read(buf)) > 0) { out.write(buf, 0, len); } } } } public void downloadMP3(String company, String number){ String downloadURL = String.format("http://fytoz.asuscomm.com/4TB/%s/%s/%s.mp3",company,number.substring(0,2),number); new DownloadTask(this){ @Override protected void onPostExecute(String result){ cmdFFprobe(String.format("-v error -show_entries format=duration -of default=noprint_wrappers=1:nokey=1 %s",Environment.getExternalStorageDirectory().getPath() + "/" + DIRECTORY_MOVIES+"/temp.mp3")); } }.execute(Environment.getExternalStorageDirectory().getPath() + "/" + DIRECTORY_MOVIES, "temp.mp3", downloadURL, "SearchFragment"); } public void start(String company, String number, String duration){ if (ActivityCompat.checkSelfPermission(this, Manifest.permission.READ_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED || ActivityCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) { if (ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.READ_EXTERNAL_STORAGE) || ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.WRITE_EXTERNAL_STORAGE)) { // Provide an additional rationale to the user if the permission was not granted // and the user would benefit from additional context for the use of the permission. // For example, if the request has been denied previously. Toast.makeText(this, "we need read and write permission on external storage for downloading the videos and the mixing", Toast.LENGTH_LONG).show(); } else { // Contact permissions have not been granted yet. Request them directly. ActivityCompat.requestPermissions(this, PERMISSIONS_STORAGE, 1); } } else { File tempaudio = new File(Environment.getExternalStorageDirectory().getPath() + "/" + DIRECTORY_MOVIES + "/temp.mp3"); File templyrics = new File(Environment.getExternalStorageDirectory().getPath() + "/" + DIRECTORY_MOVIES + "/temp.ass"); if (tempaudio.exists()) { tempaudio.delete(); } if (templyrics.exists()) { templyrics.delete(); } if(PreferenceManager.getDefaultSharedPreferences(this).getBoolean(getString(R.string.pref_key_USB),false)==true) { String storagePath = PreferenceManager.getDefaultSharedPreferences(this).getString("USB_path","null"); File lyricsFrom = new File(String.format(storagePath + "/%s/%s/%s.ass", company, number.substring(0, 2), number)); File lyricsTo = new File(Environment.getExternalStorageDirectory().getPath() + "/" + DIRECTORY_MOVIES + "/temp.ass"); //Toast.makeText(this,lyricsFrom.toString()+lyricsFrom.exists(),Toast.LENGTH_LONG).show(); File audioFrom = new File(String.format(storagePath + "/%s/%s/%s.mp3", company, number.substring(0, 2), number)); File audioTo = new File(Environment.getExternalStorageDirectory().getPath() + "/" + DIRECTORY_MOVIES + "/temp.mp3"); //Toast.makeText(this,audioFrom.toString()+lyricsFrom.exists(),Toast.LENGTH_LONG).show(); try{ copy(lyricsFrom,lyricsTo); copy(audioFrom,audioTo); } catch (IOException e) { e.printStackTrace(); } File source = new File(Environment.getExternalStorageDirectory().getPath() + "/" + DIRECTORY_MOVIES + "/source.mp4"); if(!source.exists()){ File videoFrom = new File(String.format(storagePath + "/%s", "source.mp4")); File videoTo = new File(Environment.getExternalStorageDirectory().getPath() + "/" + DIRECTORY_MOVIES + "/source.mp4"); Toast.makeText(this,videoFrom.toString()+lyricsFrom.exists(),Toast.LENGTH_LONG).show(); try { copy(videoFrom,videoTo); } catch (IOException e) { e.printStackTrace(); } } } else{ String downloadURL = String.format("http://fytoz.asuscomm.com/4TB/%s/%s/%s.mp3",company,number.substring(0,2),number); new DownloadTask(this).execute(Environment.getExternalStorageDirectory().getPath() + "/" + DIRECTORY_MOVIES, "temp.ass", downloadURL.replace(".mp3",".ass"), "SearchFragment"); new DownloadTask(this).execute(Environment.getExternalStorageDirectory().getPath() + "/" + DIRECTORY_MOVIES, "temp.mp3", downloadURL, "SearchFragment"); } //Log.d("dxd", "start: "+downloadURL); String cmdFormat="-i %s -i %s -c copy -map 0:v:0 -map 1:a:0 %s-y %s"; //List<String> fileList = getListFiles(getExternalStoragePublicDirectory(DIRECTORY_MOVIES),"mp4"); //List<String> fileList = getListFiles(new File(Environment.getExternalStorageDirectory().getPath()+"/"+Environment.DIRECTORY_MOVIES),"mp4"); //String durationCalculated = getDuration(new File(Environment.getExternalStorageDirectory().getPath() + "/" + DIRECTORY_MOVIES, "temp.mp3")); // if(duration.equals("0") || duration == null) { // cmdFFprobe(String.format("-v error -show_entries format=duration -of default=noprint_wrappers=1:nokey=1 %s",Environment.getExternalStorageDirectory().getPath() + "/" + DIRECTORY_MOVIES+"/temp.mp3")); // } String filePath = chooseVideo(duration); //String joined = TextUtils.join(", ", fileList); //Toast.makeText(this, joined, Toast.LENGTH_SHORT).show(); //Log.d("dxd", "start: "+duration+"|"+lengthofFile); String shortest = "-shortest "; //Toast.makeText(this, shortest+duration+"|"+lengthofFile, Toast.LENGTH_LONG).show(); String audioPath = Environment.getExternalStorageDirectory().getPath()+"/"+ DIRECTORY_MOVIES+"/temp.mp3"; String cmd1 = String.format(cmdFormat, filePath, audioPath, shortest, Environment.getExternalStorageDirectory().getPath()+"/"+ DIRECTORY_MOVIES+"/temp.mkv" ); String cmd2 = String.format(cmdFormat, PreferenceManager.getDefaultSharedPreferences(this).getBoolean(getString(R.string.pref_key_USB),false) ? Environment.getExternalStorageDirectory().getPath()+"/"+ DIRECTORY_MOVIES+"/source.mp4" : "http://fytoz.asuscomm.com/4TB/audio/source.mp4" , audioPath, shortest, Environment.getExternalStorageDirectory().getPath()+"/"+ DIRECTORY_MOVIES+"/temp.mkv" ); //Log.d("kkk:", Environment.getExternalStorageDirectory().getPath()+"/"+Environment.DIRECTORY_MOVIES); String[] command1 = cmd1.split(" "); String[] command2 = cmd2.split(" "); if(checkDownloadedFile(filePath, tempaudio, templyrics)){ //Toast.makeText(this, cmd1, Toast.LENGTH_LONG).show(); this.execFFmpegBinary(command1); } else{ //Toast.makeText(this, cmd2, Toast.LENGTH_LONG).show(); this.execFFmpegBinary(command2); } } } public boolean checkDownloadedFile(String filePath, File tempaudio, File templyrics){ if(new File(filePath).exists() && tempaudio.exists() && templyrics.exists()) return true; else { return false; } } public void cmdFFprobe(String cmd){ loadFFprobeBinary(cmd); } public void loadFFprobeBinary(String command){ if (FFprobe.getInstance(this).isSupported()) { String[] cmd = command.split(" "); execFFprobeBinary(cmd); } else { Toast.makeText(getApplicationContext(), "FFprobe is not Suportted", Toast.LENGTH_SHORT).show(); } } public void loadFFprobeBinary(){ if (FFprobe.getInstance(this).isSupported()) { } else { showUnsupportedExceptionDialog(); } } public void execFFprobeBinary(final String[] command){ FFprobe ffprobe = FFprobe.getInstance(this); // to execute "ffprobe -version" command you just need to pass "-version" ffprobe.execute(command, new ExecuteBinaryResponseHandler() { @Override public void onStart() {} @Override public void onProgress(String message) { Toast.makeText(getApplicationContext(), "start: "+message, Toast.LENGTH_SHORT).show(); } @Override public void onFailure(String message) { Toast.makeText(getApplicationContext(), "fail: "+message, Toast.LENGTH_SHORT).show(); } @Override public void onSuccess(String message) { duration=String.valueOf((int) Math.ceil(Float.parseFloat(message))); Toast.makeText(getApplicationContext(), "success: "+message, Toast.LENGTH_SHORT).show(); } @Override public void onFinish() { Toast.makeText(getApplicationContext(), "success2: "+duration, Toast.LENGTH_SHORT).show(); start(company,number,duration); } }); } public void loadFFMpegBinary(String company, String number, String duration){ if (FFmpeg.getInstance(this).isSupported()) { start(company, number, duration); } else { showUnsupportedExceptionDialog(); } } public void loadFFMpegBinary() { if (FFmpeg.getInstance(this).isSupported()) { } else { Toast.makeText(getApplicationContext(), "FFMpeg is not Suportted", Toast.LENGTH_SHORT).show(); } } public void execFFmpegBinary(final String[] command) { FFmpeg ffmpeg = FFmpeg.getInstance(this); try { ffmpeg.execute(command, new ExecuteBinaryResponseHandler() { @Override public void onFailure(String s) { Log.d(TAG, "FAILED with output : "+s); Toast.makeText(getApplicationContext(), "FAILED1", Toast.LENGTH_SHORT).show(); } @Override public void onSuccess(String s) { Log.d(TAG, "SUCCESS with output : "+s); //Toast.makeText(getApplicationContext(), "SUCCESS", Toast.LENGTH_SHORT).show(); Uri videoUri = Uri.parse(Environment.getExternalStorageDirectory().getPath()+"/"+Environment.DIRECTORY_MOVIES+"/temp.mkv"); intent2mxplayer(videoUri, "video/*"); } @Override public void onProgress(String s) { Log.d(TAG, "Started command : ffmpeg "+command); //addTextViewToLayout("progress : "+s); progressDialog.setMessage("Processing\n"+s); } @Override public void onStart() { // outputLayout.removeAllViews(); Log.d(TAG, "Started command : ffmpeg " + command); progressDialog.setMessage("Processing Started"); progressDialog.show(); } @Override public void onFinish() { Log.d(TAG, "Finished command : ffmpeg "+command); progressDialog.dismiss(); } }); } catch (Exception e) { Toast.makeText(this, "FAILED2"+e.toString(), Toast.LENGTH_LONG).show(); // do nothing for now } } public void intent2mxplayer(Uri videoUri, String dataType){ Intent intent = new Intent(Intent.ACTION_VIEW); intent.setDataAndType( videoUri, dataType ); intent.setPackage( "com.mxtech.videoplayer.pro" ); byte DECODER_SW = 2; intent.putExtra("decode_mode", DECODER_SW); finish(); startActivity( intent ); } private void showUnsupportedExceptionDialog() { new AlertDialog.Builder(KaraokeActivity.this) .setIcon(android.R.drawable.ic_dialog_alert) .setTitle(getString(R.string.device_not_supported)) .setMessage(getString(R.string.device_not_supported_message)) .setCancelable(false) .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { KaraokeActivity.this.finish(); } }) .create() .show(); } }
/** * Diamond, which represents the position that player have to move Crate to */ public class Diamond extends AbstractGameObject { private transient Image DIAMOND_IMAGE; /** * Instantiates a new Diamond. * * @param linksTo the links to * @param atX the at x * @param atY the at y */ public Diamond(Level linksTo, int atX, int atY) { super(linksTo, atX, atY); } @Override public char getCharSymbol() { return 'D'; } @Override public String getStringSymbol() { return "DIAMOND"; } private Image getDiamondImage() { if (DIAMOND_IMAGE == null) { DIAMOND_IMAGE = (Image)ResourceFactory.getResource("DIAMOND_IMAGE_" + ColourPreferenceController.selectedDiamondColour.getValue(), ResourceType.Image); } return DIAMOND_IMAGE; } /** * @return the Diamond image with selected colour */ @Override public ImageView render() { if (this.view == null) { this.view = new ImageView(getDiamondImage()); this.view.setFitHeight(16); this.view.setFitWidth(16); this.view.setTranslateX(16); this.view.setTranslateY(-10); } return this.view; } }
Cambridge city council leader says he regrets following 'bureaucratic guideline' to remove punctuation from street signs A council has been praised for its "sensible decision" to reverse a policy of banning apostrophes on street signs. Cambridge city council faced criticism from self-declared defenders of grammar for its decision to remove punctuation in new road names, with some campaigners using marker pens to fill in apostrophes missing from signs. The local authority claimed it was only following national guidelines that claimed punctuation could confuse emergency services. Tim Bick, leader of the council, said an "executive decision" had now been taken to make clear that for future street names "we will not be obliged to avoid proper punctuation". He said: "After consulting with my colleague Tim Ward, executive councillor for planning, we decided we must call time on the great apostrophe debate. "Councillor Ward has taken an executive decision to amend our street-naming policy to make clear that for future new street names in Cambridge we will not be obliged to avoid proper punctuation when it is required by the relevant name. "It is now clear that the original decision made two years ago to ban the apostrophe from street names flew below everyone's radar, amazingly even after public consultation at the time. "It is a nonsense to deny the English language when applying it to everyday terms describing where people live." Bick said the minor matter had become a major story and it was time to put it to bed. "We rue the day we allowed ourselves to be influenced by a bureaucratic guideline, which nobody has been able to defend to us now that it has come under the spotlight," he added. Kathy Salaman, director of the Cambridgeshire-based Good Grammar Company, said: "Obviously it's a great decision. A sensible decision. There was a great strength of feeling out there. "I acknowledge that apostrophes are not a matter of life or death, but it's important when we're trying to raise literacy standards in the country and raise Britain's profile throughout the world as we languish in literacy tables." Salaman said claims that apostrophes on street signs could hinder emergency services had been "blown out of the water", and said fire, police and ambulance services in the area had said it was "not a problem". A street sign in Cambridge reading "Scholars Way leading to Pepys Court and Fitzgerald Place" was among those altered with a marker pen, with apostrophes added to the words "Scholars" and "Pepys". Ward said: "If the system is able to accommodate all the old street names which still have apostrophes and there are also councils which have never signed up to the policy, then the system can accommodate the relatively small proportion that will need one as new streets get named. "We understand that the body whose guidance we were following when determining our street-naming policy in 2011 has subsequently reversed its own position on punctuation, so, there is now no reason at all for us to retain it as a part of our policy. "We already subscribe to keeping street names as simple as possible for the purposes of easy recognition and we do so in consultation with the emergency services and Royal Mail."
/// Block execution of activity, preventing ANY kind of message not specified in the `wait_for` argument. /// Once `wait_for` clause is satisfied, the function returns. /// /// Returns the message which satisfied the clause /// /// NOTE: The view is redrawn as usual pub(super) fn wait_for_pending_msg(&mut self, wait_for: &[Msg]) -> Msg { self.redraw = true; loop { // Poll match self.app.tick(PollStrategy::Once) { Ok(mut messages) => { if !messages.is_empty() { self.redraw = true; } let found = messages.iter().position(|m| wait_for.contains(m)); // Return if found if let Some(index) = found { return messages.remove(index); } else { // Update for msg in messages.into_iter() { let mut msg = Some(msg); while msg.is_some() { msg = self.update(msg); } } } } Err(err) => { error!("Application error: {}", err); } } // Redraw if self.redraw { self.view(); } } }
package virtualmachine import ( . "github.com/onsi/gomega" "testing" "github.com/orbs-network/go-scaffold/utils/logger" _statestorage "github.com/orbs-network/go-scaffold/services/statestorage" "github.com/orbs-network/go-scaffold/types/services/statestorage" "errors" ) var transferTable = []struct{ to string read1Err error read2Err error write1Err error write2Err error output int32 errs bool }{ {"user2", errors.New("a"), nil, nil, nil, 0, true}, {"user2", nil, errors.New("a"), nil, nil, 0, true}, {"user2", nil, nil, errors.New("a"), nil, 0, true}, {"user2", nil, nil, nil, errors.New("a"), 0, true}, {"user2", nil, nil, nil, nil, 90, false}, {"user1", nil, nil, nil, nil, 100,false}, } func TestTransfer(t *testing.T) { Ω := NewGomegaWithT(t) for _, tt := range transferTable { stop := make(chan error, 10) s := NewService(&logger.StubLogger{}) ss := &_statestorage.MockService{} s.Start(ss, &stop) ss.When("ReadKey", &statestorage.ReadKeyInput{Key: "user1"}).Return(&statestorage.ReadKeyOutput{Value: 100}, tt.read1Err) ss.When("ReadKey", &statestorage.ReadKeyInput{Key: "user2"}).Return(&statestorage.ReadKeyOutput{Value: 50}, tt.read2Err) ss.When("WriteKey", &statestorage.WriteKeyInput{Key: "user1", Value: 90}).Return(&statestorage.WriteKeyOutput{}, tt.write1Err) ss.When("WriteKey", &statestorage.WriteKeyInput{Key: "user2", Value: 60}).Return(&statestorage.WriteKeyOutput{}, tt.write2Err) output, err := s.(*service).processTransfer("user1", tt.to, 10) if tt.errs { Ω.Expect(err).To(HaveOccurred()) } else { Ω.Expect(err).ToNot(HaveOccurred()) Ω.Expect(output).To(BeEquivalentTo(tt.output)) } } } var balanceTable = []struct{ read1Err error output int32 errs bool }{ {errors.New("a"), 0, true}, {nil, 100, false}, } func TestGetBalance(t *testing.T) { Ω := NewGomegaWithT(t) for _, tt := range balanceTable { stop := make(chan error, 10) s := NewService(&logger.StubLogger{}) ss := &_statestorage.MockService{} s.Start(ss, &stop) ss.When("ReadKey", &statestorage.ReadKeyInput{Key: "user1"}).Return(&statestorage.ReadKeyOutput{Value: 100}, tt.read1Err) output, err := s.(*service).processGetBalance("user1") if tt.errs { Ω.Expect(err).To(HaveOccurred()) } else { Ω.Expect(err).ToNot(HaveOccurred()) Ω.Expect(output).To(BeEquivalentTo(tt.output)) } } }
/* Copyright (C) 2014-2017 Carl Leonardsson * * This file is part of Nidhugg. * * Nidhugg is free software: you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Nidhugg is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see * <http://www.gnu.org/licenses/>. */ #include <config.h> #ifndef __SPIN_ASSUME_PASS_H__ #define __SPIN_ASSUME_PASS_H__ #include <llvm/Analysis/LoopPass.h> #include <llvm/Pass.h> /* Avoid including huge header files, we just need a forward * declarations */ namespace llvm { class Instruction; } /* The DeclareAssumePass checks that __VERIFIER_assume is correctly * declared in the module. If they are incorrectly declared, an * error is raised. If they are not declared, then their (correct) * declaration is added to the module. * * This pass is a prerequisite for SpinAssumePass. */ class DeclareAssumePass : public llvm::ModulePass { public: static char ID; DeclareAssumePass() : llvm::ModulePass(ID) {} virtual bool runOnModule(llvm::Module &M); }; /* The SpinAssumePass identifies side-effect-free spin loops and * replaces them with a single, non-looping, call to * __VERIFIER_assume, while maintaining reachability for the module. * * This transformation can often drastically reduce the number of * traces that have to be explored. * * This pass may create dead basic blocks. * * Example: * * A spin loop like this: * * while(x < 2) {} * * is replaced by a call like this: * * __VERIFIER_assume(!(x < 2)); */ class SpinAssumePass : public llvm::LoopPass{ public: static char ID; SpinAssumePass() : llvm::LoopPass(ID) {} virtual void getAnalysisUsage(llvm::AnalysisUsage &AU) const; virtual bool runOnLoop(llvm::Loop *L, llvm::LPPassManager &LPM); #ifdef LLVM_PASS_GETPASSNAME_IS_STRINGREF virtual llvm::StringRef getPassName() const { return "SpinAssumePass"; } #else virtual const char *getPassName() const { return "SpinAssumePass"; } #endif protected: bool is_spin(const llvm::Loop *l) const; bool is_assume(llvm::Instruction &I) const; bool assumify_loop(llvm::Loop *l, llvm::LPPassManager &LPM); /* Remove basic blocks that have become disconnected in l and * parent loops of l. */ void remove_disconnected(llvm::Loop *l); llvm::Function *F_assume; }; #endif
def org_organisation_requires(updateable=False, required=False): requires = IS_ONE_OF(current.db, "org_organisation.id", org_organisation_represent, updateable = updateable, orderby = "org_organisation.name", sort = True) if not required: requires = IS_NULL_OR(requires) return requires
#include "./LibraryMainFunct.h" void SetConfig(const MainConfig cfg) { FILE * config = fopen(MAIN_CONFIG_FILE, "wt"); if(config) { fprintf(config, "%u ", cfg.AutoLoad); fprintf(config, "%u ", cfg.MaxDays); fprintf(config, "%u ", cfg.Books); fprintf(config, "%u ", cfg.Users); fprintf(config, "%u ", cfg.Borrowed); fclose(config); } } _lib_ret Settings(int arg, FILE ** out, struct _lib_data * data) { _lib_ret ret; ret.menucmd=DEFAULT; MainConfig tmp=GetConfig(); struct ChoiceLevel * CurrentChoice[CCHOICE_SIZE]= {NULL}; if(arg!=4) { static struct ChoiceItem Set_Auto_i[2] = {NULL, 0}; static struct ChoiceLevel Set_Auto = {Set_Auto_i, SetConfig, "Automatyczne wczytywanie ostatniego pliku", 2}; CurrentChoice[0] = &Set_Auto; strcpy(Set_Auto_i[0].str, "Nieaktywne"); Set_Auto_i[0].val = 0; strcpy(Set_Auto_i[1].str, "Aktywne"); Set_Auto_i[1].val = 1; static struct ChoiceItem Set_Book_i[10] = {NULL, 0}; static struct ChoiceLevel Set_Book = {Set_Book_i, SetConfig, "Domy\x98lne sortowanie ksi\xA5\xBE\x65k ", 10}; CurrentChoice[1] = &Set_Book; strcpy(Set_Book_i[0].str, "ID rosn\xA5\x63o"); Set_Book_i[0].val = 1000+sID; strcpy(Set_Book_i[1].str, "ID malej\xA5\x63o"); Set_Book_i[1].val = sID; strcpy(Set_Book_i[2].str, "TYTU\x9D rosn\xA5\x63o"); Set_Book_i[2].val = 1000+sTITLE; strcpy(Set_Book_i[3].str, "TYTU\x9D malej\xA5\x63o"); Set_Book_i[3].val = sTITLE; strcpy(Set_Book_i[4].str, "IMI\xA8 AUTORA rosn\xA5\x63o"); Set_Book_i[4].val = 1000+sNAME; strcpy(Set_Book_i[5].str, "IMI\xA8 AUTORA malej\xA5\x63o"); Set_Book_i[5].val = sNAME; strcpy(Set_Book_i[6].str, "NAZWISKO AUTORA rosn\xA5\x63o"); Set_Book_i[6].val = 1000+sSURNAME; strcpy(Set_Book_i[7].str, "NAZWISKO AUTORA malej\xA5\x63o"); Set_Book_i[7].val = sSURNAME; strcpy(Set_Book_i[8].str, "STAN rosn\xA5\x63o"); Set_Book_i[8].val = 1000+sBORROWED; strcpy(Set_Book_i[9].str, "STAN malej\xA5\x63o"); Set_Book_i[9].val = sBORROWED; static struct ChoiceItem Set_Borrowed_i[10] = {NULL, 0}; static struct ChoiceLevel Set_Borrowed = {Set_Borrowed_i, SetConfig, "Domy\x98lne sortowanie wypo\xBEyczonych ksi\xA5\xBE\x65k", 10}; CurrentChoice[2] = &Set_Borrowed; strcpy(Set_Borrowed_i[0].str, "ID rosn\xA5\x63o"); Set_Borrowed_i[0].val = 1000+sID; strcpy(Set_Borrowed_i[1].str, "ID malej\xA5\x63o"); Set_Borrowed_i[1].val = sID; strcpy(Set_Borrowed_i[2].str, "TYTU\x9D rosn\xA5\x63o"); Set_Borrowed_i[2].val = 1000+sTITLE; strcpy(Set_Borrowed_i[3].str, "TYTU\x9D malej\xA5\x63o"); Set_Borrowed_i[3].val = sTITLE; strcpy(Set_Borrowed_i[4].str, "IMI\xA8 AUTORA rosn\xA5\x63o"); Set_Borrowed_i[4].val = 1000+sNAME; strcpy(Set_Borrowed_i[5].str, "IMI\xA8 AUTORA malej\xA5\x63o"); Set_Borrowed_i[5].val = sNAME; strcpy(Set_Borrowed_i[6].str, "NAZWISKO AUTORA rosn\xA5\x63o"); Set_Borrowed_i[6].val = 1000+sSURNAME; strcpy(Set_Borrowed_i[7].str, "NAZWISKO AUTORA malej\xA5\x63o"); Set_Borrowed_i[7].val = sSURNAME; strcpy(Set_Borrowed_i[8].str, "<NAME> od zdanych"); Set_Borrowed_i[8].val = 1000+sBORROWED; strcpy(Set_Borrowed_i[9].str, "<NAME> od wyp."); Set_Borrowed_i[9].val = sBORROWED; static struct ChoiceItem Set_Users_i[6] = {NULL, 0}; static struct ChoiceLevel Set_Users = {Set_Users_i, SetConfig, "Domy\x98lne sortowanie czytelnik\xA2w ", 6}; CurrentChoice[3] = &Set_Users; strcpy(Set_Users_i[0].str, "IMI\xA8 rosn\xA5\x63o"); Set_Users_i[0].val = 1000+sNAME; // what = 4 strcpy(Set_Users_i[1].str, "IMI\xA8 malej\xA5\x63o"); Set_Users_i[1].val = sNAME; // what = 4 strcpy(Set_Users_i[2].str, "NAZWISKO rosn\xA5\x63o"); Set_Users_i[2].val = 1000+sSURNAME; // what = 4 strcpy(Set_Users_i[3].str, "NAZWISKO malej\xA5\x63o"); Set_Users_i[3].val = sSURNAME; // what = 4 strcpy(Set_Users_i[4].str, "PESEL rosn\xA5\x63o"); Set_Users_i[4].val = 1000+sPESEL; // what = 4 strcpy(Set_Users_i[5].str, "PESEL malej\xA5\x63o"); Set_Users_i[5].val = sPESEL; // what = 4 static struct ChoiceItem Set_Days_i[365] = {NULL, 0}; static struct ChoiceLevel Set_Days = {Set_Days_i, SetConfig, "Domy\x98lny termin zwrotu ", 365}; CurrentChoice[4] = &Set_Days; strcpy(Set_Days_i[0].str, "1 dzie\xE4"); Set_Days_i[0].val = 1; for (int i=1; i<365; ++i) { itoa(i+1, Set_Days_i[i].str, 10); strcat(Set_Days_i[i].str, " dni"); Set_Days_i[i].val = i+1; } } unsigned int setting = 0; unsigned int pos[CCHOICE_SIZE]= {0}; pos[0]=tmp.AutoLoad; pos[1]=tmp.Books>1000?tmp.Books-1000:tmp.Books+1; pos[2]=tmp.Borrowed>1000?tmp.Borrowed-1000:tmp.Borrowed+1; pos[3]=tmp.Users>1000?tmp.Users-1000:tmp.Users+1; if(pos[3]==sPESEL) pos[3]-=6; else pos[3]-=4; pos[4]=tmp.MaxDays-1; unsigned char key; bool redraw = true; while(1) { if(redraw) { ChoicePrint(CurrentChoice, CCHOICE_SIZE, pos, setting); redraw=false; } key=getch(); if(key==224) { key=getch(); if(key==72) // <arrow up> action { if(setting!=0 && setting < CCHOICE_SIZE) { redraw = true; --setting; } } else if(key==80) // <arrow down> action { if(setting>=0 && setting < CCHOICE_SIZE-1) { redraw = true; ++setting; } } } else if(key==45) // <-> action { // if(pos[setting] == 0) // pos[setting]=(CurrentChoice[setting])->MSize-1; // else // --pos[setting]; if(pos[setting]!=0 && pos[setting] < (CurrentChoice[setting])->MSize) { redraw = true; --pos[setting]; } } else if(key==61||key==43) // <+or=> action { // if(pos[setting] == (CurrentChoice[setting])->MSize-1) // pos[setting]=0; // else // ++pos[setting]; if(pos[setting]>=0 && pos[setting] < (CurrentChoice[setting])->MSize-1) { redraw = true; ++pos[setting]; } } else if (key==13) // <enter> action { redraw = true; YN_option(Czy na pewno chcesz zapisac ustawienia biblioteki, { for(int i=0; i<CCHOICE_SIZE; ++i) { switch(i) { case 0: tmp.AutoLoad=CurrentChoice[i]->MItems[pos[i]].val; break; case 1: tmp.Books=CurrentChoice[i]->MItems[pos[i]].val; break; case 2: tmp.Borrowed=CurrentChoice[i]->MItems[pos[i]].val; break; case 3: tmp.Users=CurrentChoice[i]->MItems[pos[i]].val; break; case 4: tmp.MaxDays=CurrentChoice[i]->MItems[pos[i]].val; break; }; } SetConfig(tmp); setcolor(bfcolor(BLACK, LIGHTGREEN)); printf("\n\n Pomy\x98lnie zapisano ustawienia!"); setDcolor(); ContinuePause(); }, { setcolor(bfcolor(BLACK, LIGHTRED)); printf("\n\n Nie zapisano ustawie\xE4!"); setDcolor(); ContinuePause(); }) } else if (key==27) // <ESC> key { break; } } return ret; } _lib_ret About(int arg, FILE ** out, struct _lib_data * data) { _lib_ret ret; ret.menucmd=DEFAULT; clrscr(); printf("\n"); setcolor(bfcolor(BLACK, GRAY)); PrintUMenuPos("Menu g\x88\xA2wne / O programie"); setDcolor(); printf("\n Program biblioteczny"); printf("\n Wersja programu: %s\n", FULLVERSION_STRING); printf("\n Autor programu: <NAME>"); printf("\n Numer indeksu: 101247"); printf("\n Numer grupy Ps z przedmiotu 'Podstawy Programowania': 8"); // for(int i=0; i<14; ++i) // { // printf("\n\n KOD B\x9D\xA8\x44U [%.2d]: ",i); // PrintErrorInfo(i); // } MMenuBackPause(); return ret; } _lib_ret CloseLib(int arg, FILE ** libfile, struct _lib_data * data) { _lib_ret ret; ret.menucmd=DEFAULT; printf("\n "); PrintDoubleLine(10); YN_option(Czy na pewno chcesz zamkn\xA5\x86 aktualn\xA5 bibliotek\xA9, { Books_Free(&data->book); data->book=NULL; Users_Free(&data->user); data->user=NULL; data->cfg.libname=NULL; data->cfg.maxBorrowDays=GetConfig().MaxDays; data->cfg.BUIDnext=0; data->cfg.UUIDnext=0; ret.menucmd=DEFAULT; break; }, { ret.menucmd=NO_JUMP; break; }) return ret; } _lib_ret CloseApp(int arg, FILE ** libfile, struct _lib_data * data) { _lib_ret ret; ret.menucmd=DEFAULT; printf("\n "); PrintDoubleLine(10); YN_option(Czy na pewno chcesz zako\xE4\x63zyc program, { Books_Free(&data->book); data->book=NULL; Users_Free(&data->user); data->user=NULL; exit(0); }, { ret.menucmd=NO_JUMP; break; }) return ret; } _lib_ret NewFile(int arg, FILE ** libfile, struct _lib_data * data) { _lib_ret ret; ret.menucmd=DEFAULT; clrscr(); setcolor(bfcolor(BLACK, WHITE)); PrintHelp(FILENAME_LOAD); setcolor(bfcolor(BLACK, GRAY)); printf("\n"); PrintUMenuPos("Menu g\x88\xA2wne / Nowa biblioteka"); printf("\n "); setDcolor(); PrintU2MenuPos("Istniej\xA5\x63\x65 pliki biblioteczne:"); system("DIR *.lib /A-D /B"); printf("\n\n Wprowad\xAB nazw\xA9 nowego pliku biblioteki (*.lib): "); static char fname[256]; get256(fname); if(strcmp(fname, "")) { libfile = fopen(fname, "rt"); if(!libfile) // if file does not exists create it { libfile = fopen(fname, "wt"); fclose(libfile); data->cfg.libname=fname; data->cfg.maxBorrowDays=GetConfig().MaxDays; data->cfg.BUIDnext=0; data->cfg.UUIDnext=0; SaveLib(libfile, data); } else { ret.menucmd=NO_JUMP; clrscr(); setcolor(bfcolor(BLACK, GRAY)); printf("\n"); PrintUMenuPos("Menu g\x88\xA2wne / Nowa biblioteka"); setcolor(bfcolor(BLACK, LIGHTRED)); printf("\n B\x9D\xA4\x44: Plik '%s' ju\xBE istnieje!\n Nie utworzono pliku!", fname); setDcolor(); MMenuBackPause(); } } else ret.menucmd=NO_JUMP; return ret; } _lib_ret LoadFile(int arg, FILE ** libfile, struct _lib_data * data) { _lib_ret ret; ret.menucmd=DEFAULT; clrscr(); if(!arg) //arg==0 { setcolor(bfcolor(BLACK, WHITE)); PrintHelp(FILENAME_LOAD); setcolor(bfcolor(BLACK, GRAY)); printf("\n"); PrintUMenuPos("Menu g\x88\xA2wne / Otw\xA2rz bibliotek\xA9"); printf("\n "); setDcolor(); PrintU2MenuPos("Istniej\xA5\x63\x65 pliki biblioteczne:"); system("DIR *.lib /A-D /B"); printf("\n\n Wprowad\xAB nazw\xA9 pliku biblioteki (*.lib): "); } static char fname[256]; if(arg) //arg==1 strcpy(fname, GetLastFile()); else //arg==0 get256(fname); if(strcmp(fname, "")) //if fname equals "" strcmp will return 0, otherwise strcmp return 1 so file name is appropiate and file might be loaded { libfile = fopen(fname, "rt"); if(!libfile) { ret.menucmd=NO_JUMP; clrscr(); printf("\n"); setcolor(bfcolor(BLACK, GRAY)); if(arg) //arg==1 { PrintUMenuPos("Menu g\x88\xA2wne / Otw\xA2rz ostatni\xA5 bibliotek\xA9"); SetLastFile(""); } else //arg==0 PrintUMenuPos("Menu g\x88\xA2wne / Otw\xA2rz bibliotek\xA9"); setcolor(bfcolor(BLACK, LIGHTRED)); printf("\n B\x9D\xA4\x44: Plik '%s' nie istnieje!", fname); setDcolor(); MMenuBackPause(); } else { unsigned short err_code = LoadLib(libfile, data); if(err_code==6) // if success loading { ret.menucmd=DEFAULT; data->cfg.libname=fname; SetLastFile(fname); } else // if error occuredd while loading { ret.menucmd=NO_JUMP; printf("\n "); PrintDoubleLine(10); setcolor(bfcolor(BLACK, LIGHTRED)); printf("\n B\x9D\xA4\x44: Plik '%s' jest niepoprawny lub uszkodzony!", fname); setDcolor(); printf("\n\n KOD B\x9D\xA8\x44U [%.2d]: ", err_code); PrintErrorInfo(err_code); MMenuBackPause(); } fclose(libfile); } } else // fname == "" { ret.menucmd=NO_JUMP; // if(arg==1) // { // setcolor(bfcolor(BLACK, LIGHTRED)); // printf("\n\n Nie ma ostatniego pliku do otwarcia!"); // setDcolor(); // //#if SLEEP_PAUSE == 0 // Sleep(ACTION_MESSAGE_TIME); //#else // ContinuePause(); //#endif // PAUSE // } } return ret; } const char * GetLastFile(void) { static char fname[256]; FILE * config = fopen(LAST_FILE, "rt"); if(config) { fget256static(config, fname); fclose(config); } else strcpy(fname, ""); return fname; } void SetLastFile(const char fname[256]) { FILE * config = fopen(LAST_FILE, "wt"); if(config) { fputs(fname, config); fprintf(config, "\n"); fclose(config); } }
/** * Resets the Method for reusablility. * @param method Method to reset. */ void releaseConnection(HttpRequestBase method) { if (method != null) { method.reset(); } }
Piperidino Substituted Borametallocenophanes. Synthesis, Reactivity, and Structure ansa-Metallocenes and related complexes have considerable potential as catalyst precursors in the Ziegler-Natta type olefin polymerization. In the present paper we report about the synthesis of piperidinyl-substituted borametallocenophanes of titanium and zirconium. Furthermore, the first example of a base stabilized alkyl-substituted borazirconocenophane was fully characterized and tested for its properties in the polymerization of ethene.
package service import ( "context" "fmt" "sync" "time" "github.com/titpetric/factory" "github.com/cortezaproject/corteza-server/pkg/auth" ) type ( recordSet []*RecordImportSession importSession struct { l sync.Mutex records recordSet } ImportSessionService interface { FindByID(ctx context.Context, sessionID uint64) (*RecordImportSession, error) SetByID(ctx context.Context, sessionID, namespaceID, moduleID uint64, fields map[string]string, progress *RecordImportProgress, decoder Decoder) (*RecordImportSession, error) DeleteByID(ctx context.Context, sessionID uint64) error } ) func ImportSession() *importSession { return &importSession{ records: recordSet{}, } } func (svc importSession) indexOf(userID, sessionID uint64) int { for i, r := range svc.records { if r.SessionID == sessionID && r.UserID == userID { return i } } return -1 } func (svc *importSession) FindByID(ctx context.Context, sessionID uint64) (*RecordImportSession, error) { svc.l.Lock() defer svc.l.Unlock() userID := auth.GetIdentityFromContext(ctx).Identity() i := svc.indexOf(userID, sessionID) if i >= 0 { return svc.records[i], nil } return nil, fmt.Errorf("compose.service.RecordImportSessionNotFound") } func (svc *importSession) SetByID(ctx context.Context, sessionID, namespaceID, moduleID uint64, fields map[string]string, progress *RecordImportProgress, decoder Decoder) (*RecordImportSession, error) { svc.l.Lock() defer svc.l.Unlock() userID := auth.GetIdentityFromContext(ctx).Identity() i := svc.indexOf(userID, sessionID) var ris *RecordImportSession if i >= 0 { ris = svc.records[i] } else { ris = &RecordImportSession{ SessionID: factory.Sonyflake.NextID(), CreatedAt: time.Now(), } svc.records = append(svc.records, ris) ris.UserID = userID } ris.UpdatedAt = time.Now() if namespaceID > 0 { ris.NamespaceID = namespaceID } if moduleID > 0 { ris.ModuleID = moduleID } if fields != nil { ris.Fields = fields } if progress != nil { ris.Progress = *progress } if ris.Progress.FinishedAt != nil { ris.Decoder = nil } else if decoder != nil { ris.Decoder = decoder } return ris, nil } // https://stackoverflow.com/a/37335777 func remove(s recordSet, i int) recordSet { s[len(s)-1], s[i] = s[i], s[len(s)-1] return s[:len(s)-1] } func (svc *importSession) DeleteByID(ctx context.Context, sessionID uint64) error { svc.l.Lock() defer svc.l.Unlock() userID := auth.GetIdentityFromContext(ctx).Identity() i := svc.indexOf(userID, sessionID) if i >= 0 { svc.records = remove(svc.records, i) } return nil } // @todo run this in some interval func (svc *importSession) clean(ctx context.Context) { svc.l.Lock() defer svc.l.Unlock() for i := len(svc.records) - 1; i >= 0; i-- { r := svc.records[i] if time.Now().After(r.UpdatedAt.Add(time.Hour * 24 * 3)) { svc.records = remove(svc.records, i) } } }
def read( self: HDF_File, group_name: str = None, dataset_name: str = None, attr_name: str = None, return_dataset_shape: bool = False, return_dataset_dtype: bool = False, return_dataset_slice: slice = slice(None), swmr: bool = False, ): with h5py.File(self.file_name, "r", swmr=swmr) as hdf_file: if group_name is None: group = hdf_file group_name = "/" else: try: group = hdf_file[group_name] except KeyError as k: raise KeyError( f"Group {group_name} does not exist in {self}. Error {k}" ) if dataset_name is None: if attr_name is None: return sorted(group) elif attr_name != "": try: return group.attrs[attr_name] except KeyError: raise KeyError( f"Attribute {attr_name} does not exist for " f"group {group_name} of {self}." ) else: return dict(group.attrs) else: try: dataset = group[dataset_name] except KeyError: raise KeyError( f"Dataset {dataset_name} does not exist for " f"group {group_name} of {self}." ) if attr_name is None: if isinstance(dataset, h5py.Dataset): if return_dataset_shape: return dataset.shape elif return_dataset_dtype: return dataset.dtype else: array = dataset[return_dataset_slice] if array.dtype == object: array = array.astype(str) return array elif dataset.attrs["is_pd_dataframe"]: if return_dataset_shape: columns = list(dataset) return ( len(dataset[columns[0]]), len(columns) ) elif return_dataset_dtype: return [ dataset[column].dtype for column in sorted( dataset ) ] else: df = pd.DataFrame( { column: dataset[column][ return_dataset_slice ] for column in sorted(dataset) } ) for column in dataset: if df[column].dtype == object: df[column] = df[column].apply( lambda x: x if isinstance(x, str) else x.decode('UTF-8') ) return df else: raise ValueError( f"{dataset_name} is not a valid dataset in " f"group {group_name} of {self}." ) elif attr_name != "": try: return dataset.attrs[attr_name] except KeyError: raise KeyError( f"Attribute {attr_name} does not exist for " f"dataset {dataset_name} of group " f"{group_name} of {self}." ) else: return dict(dataset.attrs)
package functions import ( "github.com/stretchr/testify/assert" "testing" ) func TestChr(t *testing.T) { assert.Equal(t, "a", Chr(97)) assert.Equal(t, "c", Chr(99)) // Overflow behavior //assert.Equal(t, "a", Chr(-159)) // Overflow behavior //assert.Equal(t, "A", Chr(833)) // Overflow behavior }
/** * Copyright 2017 SPeCS. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. under the License. */ package eu.antarex.clang.parser.tests; import org.junit.After; import org.junit.BeforeClass; import org.junit.Test; import eu.antarex.clang.parser.AClangAstTester; import eu.antarex.clang.parser.CxxTester; public class CxxProblematicTester { @BeforeClass public static void setup() throws Exception { AClangAstTester.clear(); } @After public void tearDown() throws Exception { AClangAstTester.clear(); } @Test public void testBoost() { // Jenkins machine does not have boost installed new CxxTester("boost.cpp").test(); } @Test public void testOperator() { // GCC on Jenkins machine does not support .operator bool() new CxxTester("problematic/operator.cpp").test(); } @Test public void testClasses() { // Unimplemented features new CxxTester("classes.cpp").test(); } // @Test // public void testLambda() { // // Lambdas not implemented // // Reference for tests: https://msdn.microsoft.com/en-us/library/dd293608.aspx // new CxxTester("lambda.cpp") // .addFlags("-std=c++14") // .test(); // } @Test public void testTemplateAuto() { // FieldDeclParser, consumes name but has no name new CxxTester("problematic/template_auto.cpp").test(); } @Test public void testImplicitReference() { // FieldDeclParser, consumes name but has no name new CxxTester("problematic/implicit_reference.cpp").test(); } @Test public void testSortedId() { new CxxTester("bench/sorted_id.cpp", "bench/sorted_id.h").test(); } @Test public void testDummy() { new CxxTester("problematic/dummy.cpp").showClavaAst().test(); // onePass().showCode().showClangAst().showClavaAst() } @Test public void testMsAsm() { new CxxTester("problematic/ms_asm.c").addFlags("-fasm-blocks").onePass().showCode().test(); } }
use crate::types::{RevertContext, RevertWitness}; use anyhow::{anyhow, Result}; use ckb_types::prelude::Reader; use ckb_types::prelude::{Builder, Entity}; use gw_common::smt::Blake2bHasher; use gw_common::H256; use gw_types::core::Status; use gw_types::offchain::{CellInfo, RollupContext}; use gw_types::packed::BlockMerkleState; use gw_types::packed::ChallengeLockArgsReader; use gw_types::packed::RawL2Block; use gw_types::packed::RollupRevert; use gw_types::packed::{ CellOutput, ChallengeLockArgs, GlobalState, RollupAction, RollupActionUnion, Script, WitnessArgs, }; use gw_types::prelude::Unpack; use gw_types::{bytes::Bytes, prelude::Pack}; pub struct Revert<'a> { finality_blocks: u64, reward_burn_rate: u8, prev_global_state: GlobalState, challenge_cell: &'a CellInfo, // capacity and rewards lock stake_cells: &'a [CellInfo], // calculate rewards burn_lock: Script, post_reverted_block_root: [u8; 32], revert_witness: RevertWitness, } pub struct RevertOutput { pub post_global_state: GlobalState, pub reward_cells: Vec<(CellOutput, Bytes)>, pub burn_cells: Vec<(CellOutput, Bytes)>, pub rollup_witness: WitnessArgs, } impl<'a> Revert<'a> { pub fn new( rollup_context: &RollupContext, prev_global_state: GlobalState, challenge_cell: &'a CellInfo, stake_cells: &'a [CellInfo], burn_lock: Script, revert_context: RevertContext, ) -> Self { let reward_burn_rate = rollup_context.rollup_config.reward_burn_rate().into(); let finality_blocks = rollup_context.rollup_config.finality_blocks().unpack(); Revert { finality_blocks, prev_global_state, challenge_cell, stake_cells, burn_lock, reward_burn_rate, post_reverted_block_root: revert_context.post_reverted_block_root.into(), revert_witness: revert_context.revert_witness, } } pub fn build_output(self) -> Result<RevertOutput> { // Rewards let challenge_lock_args = { let lock_args: Bytes = self.challenge_cell.output.lock().args().unpack(); match ChallengeLockArgsReader::verify(&lock_args.slice(32..), false) { Ok(_) => ChallengeLockArgs::new_unchecked(lock_args.slice(32..)), Err(err) => return Err(anyhow!("invalid challenge lock args {}", err)), } }; let reward_lock = challenge_lock_args.rewards_receiver_lock(); let rewards = Rewards::new(self.stake_cells, self.challenge_cell, self.reward_burn_rate); let rewards_output = rewards.build_output(reward_lock, self.burn_lock); // Post global state let first_reverted_block = { let blocks = &self.revert_witness.reverted_blocks; blocks.get(0).ok_or_else(|| anyhow!("no first block"))? }; let block_merkle_state = { let leaves = { let to_leave = |b: RawL2Block| (b.smt_key().into(), H256::zero()); let reverted_blocks = self.revert_witness.reverted_blocks.clone(); reverted_blocks.into_iter().map(to_leave) }; let block_merkle_proof = self.revert_witness.block_proof.clone(); let block_root = block_merkle_proof.compute_root::<Blake2bHasher>(leaves.collect())?; let block_count = first_reverted_block.number(); BlockMerkleState::new_builder() .merkle_root(block_root.pack()) .count(block_count) .build() }; let last_finalized_block_number = { let number = first_reverted_block.number().unpack(); number .saturating_sub(1) .saturating_sub(self.finality_blocks) }; let running_status: u8 = Status::Running.into(); let post_global_state = self .prev_global_state .as_builder() .account(first_reverted_block.prev_account()) .block(block_merkle_state) .tip_block_hash(first_reverted_block.parent_block_hash()) .last_finalized_block_number(last_finalized_block_number.pack()) .reverted_block_root(self.post_reverted_block_root.pack()) .status(running_status.into()) .build(); // Witness let revert = RollupRevert::new_builder() .reverted_blocks(self.revert_witness.reverted_blocks) .block_proof(self.revert_witness.block_proof.0.pack()) .reverted_block_proof(self.revert_witness.reverted_block_proof.0.pack()) .build(); let rollup_action = RollupAction::new_builder() .set(RollupActionUnion::RollupRevert(revert)) .build(); let rollup_witness = WitnessArgs::new_builder() .output_type(Some(rollup_action.as_bytes()).pack()) .build(); Ok(RevertOutput { post_global_state, reward_cells: rewards_output.reward_cells, burn_cells: rewards_output.burn_cells, rollup_witness, }) } } struct Rewards { receive_capacity: u128, burn_capacity: u128, } struct RewardsOutput { reward_cells: Vec<(CellOutput, Bytes)>, burn_cells: Vec<(CellOutput, Bytes)>, } impl Rewards { fn new(stake_cells: &[CellInfo], challenge_cell: &CellInfo, reward_burn_rate: u8) -> Self { let to_capacity = |c: &CellInfo| c.output.capacity().unpack() as u128; let total_stake_capacity: u128 = stake_cells.iter().map(to_capacity).sum(); let reward_capacity = total_stake_capacity.saturating_mul(reward_burn_rate.into()) / 100; let burn_capacity = total_stake_capacity.saturating_sub(reward_capacity); let challenge_capacity = to_capacity(challenge_cell); let receive_capacity = reward_capacity.saturating_add(challenge_capacity); Self { receive_capacity, burn_capacity, } } fn build_output(self, reward_lock: Script, burn_lock: Script) -> RewardsOutput { let build_outputs = |total_capacity: u128, lock: Script| -> Vec<(CellOutput, Bytes)> { let build = |capacity: u64, lock: Script| -> (CellOutput, Bytes) { let output = CellOutput::new_builder() .capacity(capacity.pack()) .lock(lock) .build(); (output, Bytes::new()) }; let mut outputs = Vec::new(); if total_capacity < u64::MAX as u128 { outputs.push(build(total_capacity as u64, lock)); return outputs; } let min_capacity = (8 + lock.as_slice().len()) as u64 * 100_000_000; let mut remaind = total_capacity; while remaind > 0 { let max = remaind.saturating_sub(min_capacity as u128); match max.checked_sub(u64::MAX as u128) { Some(cap) => { outputs.push(build(u64::MAX, lock.clone())); remaind = cap.saturating_add(min_capacity as u128); } None if max.saturating_add(min_capacity as u128) > u64::MAX as u128 => { let max = max.saturating_add(min_capacity as u128); let half = max / 2; outputs.push(build(half as u64, lock.clone())); outputs.push(build(max.saturating_sub(half) as u64, lock.clone())); remaind = 0; } None => { let cap = (max as u64).saturating_add(min_capacity); outputs.push(build(cap, lock.clone())); remaind = 0; } } } outputs }; RewardsOutput { reward_cells: build_outputs(self.receive_capacity, reward_lock), burn_cells: build_outputs(self.burn_capacity, burn_lock), } } }
package workers import ( "bytes" "testing" "github.com/kilgaloon/leprechaun/config" "github.com/kilgaloon/leprechaun/context" "github.com/kilgaloon/leprechaun/log" "github.com/kilgaloon/leprechaun/recipe" ) var ( configs = config.NewConfigs() ConfigWithSettings = configs.New("test", "../tests/configs/config_regular.ini") ConfigWithQueueSettings = configs.New("test", "../tests/configs/config_test_queue.ini") workers2 = New( ConfigWithSettings, log.Logs{}, context.New(), true, ) r, _ = recipe.Build("../tests/etc/leprechaun/recipes/schedule.yml") canErrorRecipe, _ = recipe.Build("../tests/etc/leprechaun/recipes/schedule_canerror.yml") worker, errr = workers2.CreateWorker(r) worker2, _ = workers2.CreateWorker(canErrorRecipe) ) func TestQueue(t *testing.T) { workers2.Queue.empty() if !workers2.Queue.isEmpty() { t.Fatalf("Queue expected to be empty") } workers2.Queue.push(worker) if workers2.Queue.isEmpty() { t.Fatalf("Queue should not be empty") } w := workers2.Queue.pop() if w == nil { t.Fatalf("No worker poped from queue") } } func TestWorkerErrorStep(t *testing.T) { steps := canErrorRecipe.GetSteps() for _, step := range steps { s := Step(step) if !s.Validate() { return } var cmd *Cmd var err error var in bytes.Buffer cmd, err = NewCmd(s, &in, nil, true, "bash") if err != nil { t.Fatalf("Creating NewCmd failed") } // Pipe override Async // -> echo "Something" }> // will not be executed async because we wan't to pass // output to next step, if this task start async then next step // will start and output won't be passed to it if s.IsAsync() && !s.IsPipe() && s.CanError() { go worker2.workOnStep(cmd) } else { err = worker2.workOnStep(cmd) // there was error with step and step can't error // we break loop of step linear execution if err == nil && s.CanError() { t.Fatal(err) } else { break; } } } }