content
stringlengths
10
4.9M
<gh_stars>0 #!/usr/bin/env python3 """ 分配与释放IP地址 """ import socket class IpaddrNoEnoughErr(Exception): """IP地址资源不够 """ pass class ip4addr(object): __base_ipaddr = 0 __mask = 0 __recycle_ips = None # 当前最大IP地址 __current_max_ipaddr = 0 def __init__(self, ipaddr, mask_size): """ :param ipaddr:192.168.1.0 :param mask:25 :return: """ self.__recycle_ips = [] if mask_size < 1: raise ValueError("the mask_size must be number and the value must be more than 0") if mask_size > 31: raise ValueError("the mask_size must be number and the value must be less than 32") for i in range(mask_size): n = 32 - i self.__mask |= 1 << n self.__base_ipaddr = self.__get_int_ipaddr_from_sIpaddr(ipaddr) return def __get_int_ipaddr_from_sIpaddr(self, ipaddr): nbytes = socket.inet_aton(ipaddr) return (nbytes[0] << 24) | (nbytes[1] << 16) | (nbytes[2] << 8) | nbytes[3] def get_addr(self): """获取IP地址 :param addr: :return: """ if len(self.__recycle_ips) > 20: return self.__recycle_ips.pop(0) n = self.__current_max_ipaddr + 1 host_n = self.__base_ipaddr & self.__mask if host_n < n: if self.__recycle_ips: return self.__recycle_ips.pop(0) raise IpaddrNoEnoughErr new_int_ip = self.__base_ipaddr + n self.__current_max_ipaddr = n a = (new_int_ip & 0xff000000) >> 24 b = (new_int_ip & 0x00ff0000) >> 16 c = (new_int_ip & 0x0000ff00) >> 8 d = new_int_ip & 0x000000ff return bytes([a, b, c, d]) def put_addr(self, ipaddr): """回收IP资源 :param ipaddr: :return: """ if ipaddr not in self.__recycle_ips: self.__recycle_ips.append(ipaddr)
<reponame>Rayman2200/cryptobox<gh_stars>1-10 /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pdfbox.crypto.core; /** * The provides hold all necessary elements that are needed for signature verification. * It is based on the lowest level of signature creation and will complaint to the ISO 32000-1:2008 spezification. * * @author <NAME> */ public class VerificationProvider { protected final static String DEFAULT_KEY_CRYPTO_PROVIDER = "BC"; // BouncyCastleProvider.PROVIDER_NAME protected String crypoProvider; protected VerificationProvider() { // Set some default values setCrypoProvider(DEFAULT_KEY_CRYPTO_PROVIDER); } public static VerificationProvider getInstance() { return new VerificationProvider(); } public String getCrypoProvider() { return crypoProvider; } public void setCrypoProvider(String crypoProvider) { this.crypoProvider = crypoProvider; } }
/** * registers the service under the given interfaces and sets it's location in the root-context */ protected void registerServiceAtRootLocation(Object service, String location, Class<?>... interfazes) { Dictionary<String, Object> props = new Hashtable<String, Object>(); props.put("location.root", new String[]{ location }); registerService(service, props, interfazes); }
def node_controller(message): mqtt_client.reconnect() d: dict = parse_line(message + b" 0") tag: str = d["tags"]["tag"] Device.objects.update_or_create(tag=tag) Sensor.objects.update_or_create( tag=Device.objects.get(tag=tag), defaults={ "ph_voltage": d["fields"]["ph_voltage"], "tds_voltage": d["fields"]["tds_voltage"], "ph_level": d["fields"]["ph_level"], "tds_level": d["fields"]["tds_level"], "water_tk_lvl": d["fields"]["water_tk_lvl"], "nutrient_tk_lvl": d["fields"]["nutrient_tk_lvl"], "ph_downer_tk_lvl": d["fields"]["ph_downer_tk_lvl"], }, ) try: cfg = Config.objects.get(tag=Device.objects.get(tag=tag)) except Config.DoesNotExist: set_default_config(tag=tag) finally: cfg = Config.objects.get(tag=Device.objects.get(tag=tag)) water_pump_signal: bool = is_any_require_water(tag=tag) try: fctl = ForceController.objects.get(tag=Device.objects.get(tag=tag)) except ForceController.DoesNotExist: class A: pass fctl = A() fctl.water_pump_signal = False fctl.nutrient_pump_signal = False fctl.ph_downer_pump_signal = False fctl.mixer_pump_signal = False fctl.force_water_pump_signal = False fctl.force_nutrient_pump_signal = False fctl.force_ph_downer_pump_signal = False fctl.force_mixer_pump_signal = False callback_d: dict = WaterCtrlDict( p1=int(water_pump_signal), fps1=int(fctl.force_water_pump_signal), fps2=int(fctl.force_nutrient_pump_signal), fps3=int(fctl.force_ph_downer_pump_signal), fps4=int(fctl.force_mixer_pump_signal), fp1=int(fctl.water_pump_signal), fp2=int(fctl.nutrient_pump_signal), fp3=int(fctl.ph_downer_pump_signal), fp4=int(fctl.mixer_pump_signal), tmax=cfg.tds_max_level, tmin=cfg.tds_min_level, pmax=cfg.ph_max_level, pmin=cfg.ph_min_level, spc=count_linked_sprinkler(tag) ) mqtt_client.publish(join(MQTT_WATER_CONTROLLER_TOPIC, tag), json.dumps(callback_d))
def quit_and_restart_maestral(): pid = os.getpid() config_name = os.getenv("MAESTRAL_CONFIG", "maestral") if is_macos_bundle: launch_command = os.path.join(sys._MEIPASS, "main") Popen("lsof -p {0} +r 1 &>/dev/null; {0}".format(launch_command), shell=True) if platform.system() == "Darwin": Popen("lsof -p {0} +r 1 &>/dev/null; maestral gui --config-name='{1}'".format( pid, config_name), shell=True) elif platform.system() == "Linux": Popen("tail --pid={0} -f /dev/null; maestral gui --config-name='{1}'".format( pid, config_name), shell=True) QtCore.QCoreApplication.quit() sys.exit(0)
<reponame>georgeclaghorn/georgix #[inline(always)] pub unsafe fn inb(port: u16) -> u8 { let value: u8; asm!("in al, dx", in("dx") port, out("al") value, options(nomem, nostack)); value } #[inline(always)] pub unsafe fn outb(port: u16, value: u8) { asm!("out dx, al", in("dx") port, in("al") value, options(nomem, nostack)) } #[inline(always)] pub unsafe fn inl(port: u16) -> u32 { let value: u32; asm!("in eax, dx", in("dx") port, out("eax") value, options(nomem, nostack)); value } #[inline(always)] pub unsafe fn outl(port: u16, value: u32) { asm!("out dx, eax", in("dx") port, in("eax") value, options(nomem, nostack)) }
Pebble couldn’t have been pleased with the smartwatch portion of Apple’s Worldwide Developers Conference keynote last week. Apple seemed to have taken a page straight out of Pebble’s playbook with an Apple Watch feature called Time Travel. Coming this fall in watchOS 2, Time Travel will let users scroll the Watch’s Digital Crown to view information from the past or future, straight from their main watch face. A forward scroll, for instance, could show tomorrow’s weather forecast, and a backwards scroll could reveal the score of last night’s game. Months earlier, Pebble had revealed a similar feature called Timeline as part of its new Pebble Time smartwatch. By pressing up or down on the watch’s buttons, users can jump into the past or future to see relevant bits of information. Pebble clearly saw Timeline as a crowning achievement in smartwatch software, with CEO Eric Migicovsky telling The Verge that it was unlike anything Apple or Google had come up with. “We’ve found a new framework to use as an interaction model on the watch,” he said in February. Coincidence or not, Apple has taken a liking to this interaction model as well. But is the Apple version of time-based information a pale imitation, a shameless rip-off, or a clever iteration? Upon closer inspection, it’s a combination of all three. The importance of time Before we dive in, let’s back up and consider why Apple needed a feature like Time Travel in the first place. In my experience, the Apple Watch has been a beautiful piece of hardware with deeply flawed software. While the design is sharper than any other smartwatch on the market—even on the basic Apple Watch Sport I’ve been wearing for two weeks—watchOS can seem awfully dumb. Most reviews picked up on the obvious drawbacks, such as the confusing interface and the sluggish, non-native apps (the latter of which should also be addressed by watchOS 2). But my complaint is more fundamental: In regular use, I’m rarely compelled to do anything on the watch besides looking at notifications and checking the time. Everything else, from futzing with the app launcher to fiddling with Glances, feels like a waste of energy. I might as well just take out my phone. The missing ingredient is context. With smartwatches, I’ve taken to saying that if you see an app launcher, they blew it. Sure, picking an app from a list makes sense in a handful of situations, but most of the time, I want my smartwatch to figure things out for me. Using context, it should know what information I need, and understand when to serve it. That way, the watch not only saves time, but provides valuable insight into my day. It turns out that as a contextual tool, time is pretty important. It dominates so many aspects of our lives—time for the next appointment, time to water the plants, time to watch the game—that a smartwatch could become pretty clever just by organizing its information around time. With Pebble’s Timeline feature, future events are always just a tap away. That’s why Pebble’s Timeline and Apple’s Time Travel are such important features. Instead of just reminding us of important events through notifications, as our phones already do, these smartwatch features give us a quick high-level overview on demand. They’re much more powerful than notifications, yet faster than digging through apps or swiping through Glances. In other words, they’re a great interaction model for a smartwatch. Pebble’s Migicovsky was spot-on. Pick a time, any time While Pebble and Apple seem to have created similar systems, in some ways they are very different. With Time Travel, Apple’s use of “complications” on the watch face could let users combine data points for better insights. Say, for instance, you have one complication for sports scores, and another for sports headlines. A quick scroll back in time might give you the score and a one-line recap. Apple’s watchOS 2 preview site gives an even simpler example: With calendar and weather complications, you could see the weather forecast for tomorrow’s meeting—just in case you’re thinking of having it outside. The robustness of Apple’s platform also makes a huge difference. Let’s say you wanted to read more of that game recap. A tap on the complication would let you dive into the app for a lengthier description, and from there you might even be able to send the story to your iPhone with Handoff. Want to reschedule your meeting based on the weather? A quick chat with Siri could help you get that done. While you can also launch apps from Pebble’s Timeline, they’re not nearly as capable. Combining multiple points of data could come in handy with Time Travel on the Apple Watch. If there’s an advantage for Pebble, it’s that Timeline is less constrained in how much information it can reveal. Whereas the Apple Watch is limited by the number complications that fit on the screen—you can’t pack in more than five right now—with Pebble you can stuff as many points of data into the Timeline as you want, The fact that Pebble’s Timeline is separate from the watch face is also beneficial, in a way that nicely suits Pebble Time’s always-on display. Right now, my Pebble Time review unit is rocking a picture of Mega Man on a blue background, and I wouldn’t have it any other way. With the Apple Watch, four out of 10 watch faces don’t support complications at all, and it looks like the upcoming Photo watch face won’t have complications either. Using any of these faces will mean missing out on Time Travel entirely. Where Apple excels Beyond these conceptual distinctions, there are some differences of execution to consider, and here it’s hard not to see Apple as the victor. Right now, Pebble’s Timeline doesn’t have a lot of developer support—I count 21 apps that integrate with Timeline—and as a result it doesn’t feel like the game-changing feature that it could be. In fairness, it’s early days for the Pebble Time, but we can safely assume Apple is going to have less trouble getting developers on board. Even before launch, the Apple Watch accrued more than 1,000 apps—many from major brands that haven’t touched Pebble—and Time Travel gives them a chance to occupy prime real estate on users’ wrists. Apple’s hardware also lends itself to a time-based interface, with the ability to scroll through time instead of having to tap repeatedly on buttons. I don’t get much use out of the Digital Crown now—usually it’s easier to just swipe on the screen—but Time Travel could finally make this hardware flourish seem essential. That’s not to say Apple’s mimicry renders the Pebble Time obsolete. If you enjoy the original Pebble’s always-on display and multi-day battery life, the Pebble Time is a fine improvement. (My biggest complaints so far: The screen can look dim when it’s not in direct light, and the battery keeps falling short of the advertised week-long runtime.) I’ve ordered a Pebble Time Steel for myself, and don’t regret it. At the same time, I don’t fault Apple for running with Pebble’s signature software feature. This is how competition works, and now it’s on Pebble to make its brilliant idea even better. May the smartest watch win.
// This suite tests the following: // * That the generated event is consumed by consumer-topic // * That the consumed event is processed, and an adequate response is generated // on Kafka response-topic // * That the aggregate-version is read and applied from event-meta table // * That the processed event gets stored in Cassandra event-store func TestEventPersistence(t *testing.T) { log.Println("Reading environment file") err := godotenv.Load("../.env") if err != nil { err = errors.Wrap(err, ".env file not found, env-vars will be read as set in environment", ) log.Println(err) } missingVar, err := commonutil.ValidateEnv( "CASSANDRA_HOSTS", "CASSANDRA_KEYSPACE", "CASSANDRA_EVENT_TABLE", "CASSANDRA_EVENT_META_TABLE", "KAFKA_BROKERS", "KAFKA_CONSUMER_GROUP", "KAFKA_CONSUMER_TOPICS", ) if err != nil { err = errors.Wrapf(err, "Env-var %s is required, but is not set", missingVar) log.Fatalln(err) } RegisterFailHandler(Fail) RunSpecs(t, "EventPersistence Suite") }
But now there are folks in the Brewers' camp who are disappointed right-hander Zack Greinke wasn't chosen and surprised that La Russa said the Milwaukee ace wasn't considered because of his pitching schedule. The 2009 Cy Young Award winner with the Kansas City Royals is in fact lined up to pitch Saturday, a decision made to actually improve his chances for pitching in the All-Star Game, Brewers sources told ESPN The Magazine's Buster Olney on Monday. The Brewers' staff went to Greinke recently, the sources said, and presented him with some options, including a schedule that would allow him to be in position to pitch next Tuesday in the Midsummer Classic in Kansas City -- where Greinke began his career. The Brewers, who rallied to beat Miami on Monday despite a subpar outing from Greinke, have only one player going to Kansas City in reserve outfielder Ryan Braun. Greinke allowed five runs in six innings Monday but did not factor in the decision and still owns a 9-2 record with a 3.08 ERA. He spoke about his All-Star ommission with the Milwaukee Journal Sentinel, acknowledging that he had hoped to make the team but that he does not feel snubbed. "I thought, depending on how many relievers made it, I might," Greinke told the Journal Sentinel. "There are so many good starters this year. There's half a dozen that have done just as good as the guys that made it. I can't really complain. The starters this year, it's got to be the best in the last 20 years, maybe 30 years. It's pretty amazing. You can't please everyone." Greinke also decided not to criticize La Russa. "I wanted to make the team. It's not an easy job picking them," he told the paper. "There's other guys that have pitched great, too. You can't make everybody happy. No matter who (La Russa) picks, there's going to be arguments. It's kind of like the BCS stuff. No matter what you do, it's going to be bad." Baker's Reds will be sending three players. But it's two players who weren't selected -- Cueto, a right-hander, and second baseman Phillips -- that had the manager floating a theory stemming from a 2010 brawl with La Russa's Cardinals. "A snub like that looks bad," Baker told reporters. "Johnny and Brandon were at the center of a skirmish between us and the Cardinals. Some of the Cardinals who aren't there anymore are making some of the selections." Reds first baseman Joey Votto was selected by the fans to start, while reliever Aroldis Chapman was voted in by the players and outfielder Jay Bruce was a coaches' selection. But Baker, in saying he was pleased by the trio's inclusion, on Sunday pointed to the 2010 incident. "I'm happy for the guys who made it big time, especially for Chapman," Baker said. "You figured that Joey would make it. We knew Jay had an outside chance. You also figured that Johnny Cueto and Brandon Phillips had a great chance. I don't understand that one."
<reponame>tmunsch/InfoProxy<gh_stars>1-10 {-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE DeriveGeneric #-} module ServerPing where import qualified Data.Text as T import Data.Aeson import Data.Maybe (catMaybes) import Control.Exception import GHC.Generics import MCText import Control.Monad import Control.Applicative ((<|>)) newtype PingException = PingException String instance Show PingException where show (PingException msg) = "PingException: " ++ msg instance Exception PingException data ServerPing = ServerPing { description :: Description , players :: Players , version :: Version , favicon :: Maybe T.Text } deriving (Generic, Show) data Description = DString T.Text | DMCText BaseComponent deriving (Show) data Players = Players { max :: Int , online :: Int , sample :: Maybe [Player] } deriving (Generic, Show) data Player = Player { name :: T.Text , id :: T.Text } deriving (Generic, Show) data Version = Version { name :: T.Text , protocol :: Int } deriving (Generic, Show) instance FromJSON Players instance FromJSON Player instance FromJSON Version instance FromJSON Description where parseJSON = (DString <$$> withText "Description17" return) <||> (DMCText <$$> parseJSON) where (<$$>) = (<$>).(<$>) (<||>) = liftM2 (<|>) instance FromJSON ServerPing instance ToJSON Players where toJSON Players{..} = object $ catMaybes [ "max" .=? Just max , "online" .=? Just online , "sample" .=? sample ] instance ToJSON Player instance ToJSON Version instance ToJSON Description where toJSON (DString t) = String t toJSON (DMCText t) = toJSON t instance ToJSON ServerPing where toJSON ServerPing{..} = object $ catMaybes [ "description" .=? Just description , "players" .=? Just players , "version" .=? Just version , "favicon" .=? favicon ] infixr 8 .=? (.=?) :: (ToJSON v, KeyValue kv) => T.Text -> Maybe v -> Maybe kv k .=? v = (k .=) <$> v
<reponame>yandixuan/mybatis-plus /* * Copyright (c) 2011-2020, baomidou (<EMAIL>). * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * <p> * https://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.baomidou.mybatisplus.core.toolkit; import org.apache.ibatis.mapping.BoundSql; import org.apache.ibatis.reflection.MetaObject; import org.apache.ibatis.reflection.SystemMetaObject; import java.lang.reflect.Field; import java.lang.reflect.Proxy; import java.util.Map; /** * 插件工具类 * * @author TaoYu , hubin * @since 2017-06-20 */ public abstract class PluginUtils { public static final String DELEGATE_BOUNDSQL_SQL = "delegate.boundSql.sql"; private final static Field additionalParametersField = initBoundSqlAdditionalParametersField(); private static Field initBoundSqlAdditionalParametersField() { try { Field field = BoundSql.class.getDeclaredField("additionalParameters"); field.setAccessible(true); return field; } catch (NoSuchFieldException e) { throw ExceptionUtils.mpe("can not find field['additionalParameters'] from BoundSql, why?", e); } } /** * 获得真正的处理对象,可能多层代理. */ @SuppressWarnings("unchecked") public static <T> T realTarget(Object target) { if (Proxy.isProxyClass(target.getClass())) { MetaObject metaObject = SystemMetaObject.forObject(target); return realTarget(metaObject.getValue("h.target")); } return (T) target; } /** * 获取 BoundSql 属性值 additionalParameters * * @param boundSql BoundSql * @return additionalParameters */ @SuppressWarnings("unchecked") public static Map<String, Object> getAdditionalParameter(BoundSql boundSql) { try { return (Map<String, Object>) additionalParametersField.get(boundSql); } catch (IllegalAccessException e) { throw ExceptionUtils.mpe("获取 BoundSql 属性值 additionalParameters 失败: " + e, e); } } /** * 给 BoundSql 设置 additionalParameters * * @param boundSql BoundSql * @param additionalParameters additionalParameters */ public static void setAdditionalParameter(BoundSql boundSql, Map<String, Object> additionalParameters) { additionalParameters.forEach(boundSql::setAdditionalParameter); } }
<gh_stars>0 package com.textrazor; /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ /** * * @author GRV */ import edu.stanford.nlp.ling.CoreAnnotations; import edu.stanford.nlp.neural.rnn.RNNCoreAnnotations; import edu.stanford.nlp.pipeline.Annotation; import edu.stanford.nlp.pipeline.StanfordCoreNLP; import edu.stanford.nlp.sentiment.SentimentCoreAnnotations.SentimentAnnotatedTree; import edu.stanford.nlp.trees.Tree; import edu.stanford.nlp.util.CoreMap; public class NLP { static StanfordCoreNLP pipeline; public static void init() { pipeline = new StanfordCoreNLP("MyPropFile2.properties"); } public static int findSentiment(String tweet) { int mainSentiment = 0; if (tweet != null && tweet.length() > 0) { int longest = 0; Annotation annotation = pipeline.process(tweet); for (CoreMap sentence : annotation .get(CoreAnnotations.SentencesAnnotation.class)) { Tree tree = sentence.get(SentimentAnnotatedTree.class); int sentiment = RNNCoreAnnotations.getPredictedClass(tree); String partText = sentence.toString(); if (partText.length() > longest) { mainSentiment = sentiment; longest = partText.length(); } } } return mainSentiment; } static StanfordCoreNLP pTraining; static StanfordCoreNLP nTraining; public static void positiveWordsTraining() { pTraining = new StanfordCoreNLP("positive-words.txt"); } public static void negativeWordsTraining() { nTraining = new StanfordCoreNLP("negative-words.txt"); } public static int annotators(String tweet) { int mainSentiment = 0; if (tweet != null && tweet.length() > 0) { int longest = 0; Annotation annotation = pipeline.process(tweet); for (CoreMap sentence : annotation .get(CoreAnnotations.SentencesAnnotation.class)) { Tree tree = sentence.get(SentimentAnnotatedTree.class); int sentiment = RNNCoreAnnotations.getPredictedClass(tree); String partText = sentence.toString(); if (partText.length() > longest) { mainSentiment = sentiment; longest = partText.length(); } } } return mainSentiment; } public static int tokenize(String tweet) { int mainSentiment = 0; if (tweet != null && tweet.length() > 0) { int longest = 0; Annotation annotation = pipeline.process(tweet); for (CoreMap sentence : annotation .get(CoreAnnotations.SentencesAnnotation.class)) { Tree tree = sentence.get(SentimentAnnotatedTree.class); int sentiment = RNNCoreAnnotations.getPredictedClass(tree); String partText = sentence.toString(); if (partText.length() > longest) { mainSentiment = sentiment; longest = partText.length(); } } } return mainSentiment; } }
<reponame>cyliustack/scout<filename>dt-bench/dist-dnn.py<gh_stars>0 #!/usr/bin/python import numpy as np import csv import json import sys import argparse import multiprocessing as mp import subprocess import glob, os from functools import partial class bcolors: HEADER = '\033[95m' OKBLUE = '\033[94m' OKGREEN = '\033[92m' WARNING = '\033[93m' FAIL = '\033[91m' ENDC = '\033[0m' BOLD = '\033[1m' UNDERLINE = '\033[4m' DATA = '\033[5;30;47m' TITLE = '\033[7;34;47m' C_NONE='\033[0;00m' C_RED='\033[1;31m' C_GREEN='\033[1;32m' def print_title(content): print('\n') print(bcolors.TITLE + content + bcolors.ENDC) def print_error(content): print(bcolors.C_RED + "[ERROR] " + content + bcolors.ENDC) def print_warning(content): print(bcolors.WARNING + "[WARNING] " + content + bcolors.ENDC) def print_info(content): print(bcolors.OKGREEN + "[INFO] " + content + bcolors.ENDC) def print_progress(content): print(bcolors.OKBLUE + "[INFO] " + content + bcolors.ENDC) if __name__ == "__main__": logdir = './scoutlog/' sync = None model = None hostfile = 'hostfile.txt' sys.stdout.flush() parser = argparse.ArgumentParser(description='DT-Bench') parser.add_argument('--logdir', metavar='/path/to/logdir/', type=str, required=False, help='path to the directory of dt-bench logged files') parser.add_argument('sync', type=str, nargs=1, metavar='<ps|replicated>') parser.add_argument('model', type=str, nargs=1, metavar='<alexnet|vgg16|resnet50>') args = parser.parse_args() if args.logdir != None: logdir = args.logdir + '/' if args.sync != None: sync = args.sync[0] if args.model != None: model = args.model[0] print_info("logdir = %s" % logdir ) print_info("sync = %s" % sync ) print_info("model = %s" % model ) subprocess.call(['mkdir', '-p', logdir]) with open('%s/scout.log'%logdir, 'w') as logfile: #subprocess.call(['hostname']) subprocess.call(['python', '~/scout/t-bench/scripts/tf_cnn_benchmarks/tf_cnn_benchmarks.py'])
/// Split the value of an `$interface` reference into a base name and a /// fragment. pub fn split_interface_ref(interface_ref: &str) -> (&str, Option<&str>) { let fragment_pos = interface_ref.find('#'); if let Some(fragment_pos) = fragment_pos { ( &interface_ref[..fragment_pos], Some(&interface_ref[fragment_pos..]), ) } else { (interface_ref, None) } }
import I8 from 'i18next'; import en from './en-US.json'; import mn from './mn-MN.json'; const translationGetters = { // lazy requires (metro bundler does not support symlinks) 'en-US': () => en, 'mn-MN': () => mn, }; const defaultLocale = 'mn-MN'; const changeLanguage = (languageTag: string = defaultLocale) => { return I8.changeLanguage(languageTag); }; if (!I8.isInitialized) { I8.init({ compatibilityJSON: 'v3', lng: defaultLocale, debug: __DEV__, keySeparator: '.', fallbackLng: 'mn-MN', ns: ['translations'], defaultNS: 'translations', resources: { ['mn-MN']: { translations: translationGetters['mn-MN']() }, ['en-US']: { translations: translationGetters['en-US']() }, }, interpolation: { escapeValue: false, formatSeparator: ',', }, //updateMissing: false, //missingKeyNoValueFallbackToKey: true, parseMissingKeyHandler: function (key) { return !key || typeof key !== 'string' ? '' : key.split('.').pop() + ''; }, //react: { wait: true }, }).then(() => { I8.isInitialized = false; }); } const t = (key: string, options?: any): string => { return I8.t(key, options); }; export { I8, t, changeLanguage };
import { ResponsiveValue, ResponsiveValueArray } from '../types'; function normalizeResponsiveValue<T>( responsiveValue: ReadonlyArray<T> ): ResponsiveValue<T> { const normalizedResult: ResponsiveValueArray<T | null> = [responsiveValue[0]]; for (let index = 1; index < responsiveValue.length; index++) { const currentValue = responsiveValue[index]; const previousValue = responsiveValue[index - 1]; if (currentValue === previousValue) { const nextDefinedValue = findNextDefined( responsiveValue.slice(index), currentValue ); if (currentValue === nextDefinedValue) { continue; } normalizedResult.push(currentValue); normalizedResult[index - 1] = null; } else { normalizedResult.push(currentValue); } } if (normalizedResult.length === 1 && normalizedResult[0] !== null) { return normalizedResult[0]; } return normalizedResult; } function findNextDefined<T>(array: Array<T>, lastDefined: T): T { for (const value of array) { if (value !== lastDefined) { return value as T; } } return lastDefined; } export { normalizeResponsiveValue };
At the completion of Boston's season in April, Bruins centerman Patrice Bergeron revealed that he played through the 2016-17 campaign with a sports hernia issue that ended up requiring offseason surgery. Despite the injury, Bergeron produced respectable offensive numbers (21 goals, 32 assists) while earning his fourth Selke Trophy as the games' best defensive forward. And, following a summer of rehab, Bergeron finally feels healthy once again. "Still doing some rehab. I'll be ready for camp," said Bergeron, according to Matt Kalman of NHL.com. "I feel good. There's still some restriction there that we have to work on, but overall, I feel pretty good. I feel good on the ice, I feel good in the gym. "We still have to definitely do some treatments, but definitely I feel good and I know I'll be ready for camp." While Bergeron admits he still has work to do to get back to 100 percent, he was also open about how the injury affected his production last campaign. " ... It was hard that way, it was in my head also, but once I was past that hurdle of realizing it's going to be there for the whole year and just kind of not worrying about it, I felt better," Bergeron said. "But that being said, it's still one of those where - it's not an excuse for the slow start I had and missing some chances - but still it was slowing me down a bit and was just annoying." Regardless of last season's dip in offense, Bergeron is still one of the best two-way forwards in the game and an integral part of the Bruins' core. A healthy Bergeron to start the year can only mean good things for Boston. "Hopefully I'll have a better start," he said. "That being said, I'm trying to get back to my game, playing both ways. You always want to push your limits, be better offensively but also defensively and in every aspect of the game."
/** * Tests equals() if the expected result is true. */ @Test public void testEqualsTrue() { FileExtensionFilter filter = new FileExtensionFilter(DESC, "tst", "eq"); FileExtensionFilter c = new FileExtensionFilter( TextResource.fromText(DESC.getPlainText()), filter.getExtensions()); JGuiraffeTestHelper.checkEquals(filter, c, true); }
<filename>src/main/java/nl/mvdr/adventofcode/adventofcode2018/day03/SlicePart1.java package nl.mvdr.adventofcode.adventofcode2018.day03; import java.util.List; import java.util.Map; import java.util.Set; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Solution to the day 3 puzzle of 2018's Advent of Code: * <a href="https://adventofcode.com/2018/day/3">No Matter How You Slice It</a>. * * @author <NAME> */ public class SlicePart1 extends Slice<Long> { private static final Logger LOGGER = LoggerFactory.getLogger(SlicePart1.class); @Override protected Long solve(List<Claim> claims, Map<SquareInch, Set<Claim>> claimedFabric) { long result = claimedFabric.values().stream() .filter(cs -> 2 <= cs.size()) .count(); return Long.valueOf(result); } /** * Main method. * * @param args commandline arguments; these are ignored */ public static void main(String[] args) { SlicePart1 instance = new SlicePart1(); String result = instance.solve("input-day03-2018.txt"); LOGGER.info(result); } }
# coding: utf-8 # ------Matheus Barbosa de Freitas----- #30/11/2016 integers = map(int, raw_input().split()) shovel_price = integers[0] r = integers[1] a = shovel_price % 10 shovel_qnt = 0 if a == r: shovel_qnt = 1 elif a == 5: shovel_qnt = 2 else: b = a shovel_qnt = 0 while ((b % 10) != r) & ((b % 10) != 0): shovel_qnt += 1 b += a shovel_qnt += 1 print shovel_qnt
/** * bnx2i_show_ccell_info - returns command cell (HQ) size * @dev: device pointer * @buf: buffer to return current SQ size parameter * * returns per-connection TCP history queue size parameter */ static ssize_t bnx2i_show_ccell_info(struct device *dev, struct device_attribute *attr, char *buf) { struct bnx2i_hba *hba = bnx2i_dev_to_hba(dev); return sprintf(buf, "0x%x\n", hba->num_ccell); }
<filename>aoc-2021/day10.py import aoc import numpy as np data = "[({(<(())[]>[[{[]{<()<>> [(()[<>])]({[<{<<[]>>( {([(<{}[<>[]}>{[]{[(<()> (((({<>}<{<{<>}{[]{[]{} [[<[([]))<([[{}[[()]]] [{[{({}]{}}([{[{{{}}([] {<[[]]>}<{[{[{[]{()[[[] [<(<(<(<{}))><([]([]() <{([([[(<>()){}]>(<<{{ <{([{{}}[<[[[<>{}]]]>[]]".split() data = aoc.get_data(10)[0] pairs = {"(": ")", "{": "}", "[": "]", "<": ">"} counters = {v: 0 for _, v in pairs.items()} print(f"{counters}") corrupted = [] for line in data: stack = [] for char in line: if char in pairs: stack.append(char) elif char != pairs[stack.pop()]: print(f"invalid : {char}") counters[char] += 1 corrupted.append(line) break points = {")": 3, "]": 57, "}": 1197, ">": 25137} print(f"{counters}") result = 0 for k, v in counters.items(): result += v * points[k] print(f"{result}") for line in corrupted: data.remove(line) comp = [] for line in data: stack = [] completion = [] for char in line: if char in pairs: stack.append(char) else: stack.pop() while stack: completion.append(pairs[stack.pop()]) print(f"{completion}") comp.append(completion) points = {")": 1, "]": 2, "}": 3, ">": 4} scores = [] for c in comp: score = 0 for char in c: score *= 5 score += points[char] print(f"{score}") scores.append(score) print(f"{np.median(scores)}")
<filename>com.yasoon.jira/dialogs/renderer/fields/SingleTextField.ts<gh_stars>1-10 /// <reference path="../Field.ts" /> /// <reference path="../getter/GetTextValue.ts" /> /// <reference path="../setter/SetValue.ts" /> @getter(GetterType.Text) @setter(SetterType.Text) class SingleTextField extends Field { getDomValue(): string { return $('#' + this.id).val(); } hookEventHandler(): void { $('#' + this.id).change(e => this.triggerValueChange()); }; render(container: JQuery) { container.append($(`<input class="text long-field" id="${this.id}" name="${this.id}" type="text" />`)); }; }
package service_test import ( "errors" "github.com/containerssh/service" ) type testService struct { crash chan bool name string crashStartup bool } func (t *testService) String() string { return t.name } func (t *testService) RunWithLifecycle(lifecycle service.Lifecycle) error { if t.crashStartup { return errors.New("crash") } lifecycle.Running() ctx := lifecycle.Context() select { case <-ctx.Done(): lifecycle.Stopping() return nil case <-t.crash: return errors.New("crash") } } func (t *testService) CrashStartup() { t.crashStartup = true } func (t *testService) Crash() { select { case t.crash <- true: default: } } func newTestService(name string) *testService { return &testService{ name: name, crash: make(chan bool, 1), } }
import _ from "lodash"; import { ActionType, PendingDecisionType } from "../../../dto/enums"; import { ActionDto, PendingDecisionDto, SortableIncomeDto } from "../../../dto/interfaces"; import { ActionWorkflow } from "../action-workflow.base"; import { ActiveView, Command, CommonWorkflowStates } from "../types"; const WaitingForSorting = 0; export interface SortIncomesDecisionDto extends PendingDecisionDto { type: PendingDecisionType.SortIncomes; description: string; powerIncomes: SortableIncomeDto[]; powerTokenIncomes: SortableIncomeDto[]; } export interface SortIncomesActionDto extends ActionDto { Type: ActionType.SortIncomes; SortedIncomes: number[]; } export class SortIncomesWorkflow extends ActionWorkflow { constructor(private readonly _powerIncomes: SortableIncomeDto[], private readonly _powerTokenIncomes: SortableIncomeDto[]) { super(null, true); this.init(); } get unsortedIncomes(): SortableIncomeDto[] { return [...this._powerIncomes, ...this._powerTokenIncomes]; } protected init(): void { this.states = [ { id: WaitingForSorting, message: "You must decide how to sort power and power token incomes", view: ActiveView.SortIncomesDialog, data: { powers: this._powerIncomes, powerTokens: this._powerTokenIncomes, }, }, ]; this.currentState = _.first(this.states)!; } handleCommand(command: Command): ActionDto | null { if (this.stateId !== WaitingForSorting) { return null; } switch (command.nextState) { case CommonWorkflowStates.PERFORM_ACTION: const sortedIncomes = command.data as number[]; const action: SortIncomesActionDto = { Type: ActionType.SortIncomes, SortedIncomes: sortedIncomes, }; return action; default: throw new Error(`State ${command.nextState} not handled.`); } } }
Rep. Joe Walsh (R-Ill.) held another lively town hall meeting this weekend, playing host to a constituent who accused President Barack Obama of "sedition" because he had allegedly lied to voters about his true political allegiances to "socialism, communism and Nazism." In a discussion about health care reform, a woman told Walsh that she believed the United States was losing its freedom because of elected officials who were falsifying their ideologies to get elected. “It is sedition. I mean, they did it underground. If they are honest brokers and they believe in what they’re saying and where they want this country to go, like Obama, then you’re right. He should have said it before he was elected, and said 'I’m a socialist, I believe in socialism, in communism, Nazism,' whatever, and say 'this is where I want to lead the country' -- not do it underhandedly,” she said. Walsh followed up on the constituent's claim without challenging it. “However you want to label or define it, don’t you think after three and a half years as a country now we have a really good idea where this president wants to go?” he said. "There are people in this audience -- it's fine -- who believe in this America we're living under right now. That's fine, that's a legitimate viewpoint. I will fight to my last breath against it, but I wish politicians on the other side would be honest about how they feel." Walsh has displayed penchant for creating controversy at town halls in the past. Late last month, he told a gathering that Obama had gotten elected in 2008 because he was black. "He was a historic figure. He’s our first African-American president. The country voted for him because of that. It made us feel good about [ourselves]." Walsh said. "I’ve said it before, it helped that John McCain was about 142 years old. It helped that the economy was tanking. A lot of these things helped. But he never would have gotten there without his historic nature."
// FetchLastTxUntilHeight returns the last tx before a height, inclusive, of specified hash func (c *chainFetcher) FetchLastTxUntilHeight(txsha *wire.Hash, height uint64) (*wire.MsgTx, error) { rep, err := c.db.FetchTxBySha(txsha) if err != nil && err != storage.ErrNotFound && err != database.ErrTxShaMissing { return nil, err } for i := 1; i <= len(rep); i++ { if rep[len(rep)-i].Height <= height { return rep[len(rep)-i].Tx, nil } } return nil, nil }
const tdistribution = { '5': [12.71, 4.303, 3.182, 2.776, 2.571, 2.447, 2.365, 2.306, 2.262, 2.228, 2.201, 2.179, 2.160, 2.145, 2.131, 2.120, 2.110, 2.101, 2.093, 2.086, 2.080, 2.074, 2.069, 2.064, 2.060, 2.056, 2.052, 2.048, 2.045, 2.042, 2.042, 2.042, 2.042, 2.042, 2.042, 2.042, 2.042, 2.042, 2.042, 2.021, 2.021, 2.021, 2.021, 2.021, 2.021, 2.021, 2.021, 2.021, 2.021, 2.009, 2.009, 2.009, 2.009, 2.009, 2.009, 2.009, 2.009, 2.009, 2.009, 2.000, 2.000, 2.000, 2.000, 2.000, 2.000, 2.000, 2.000, 2.000, 2.000, 2.000, 2.000, 2.000, 2.000, 2.000, 2.000, 2.000, 2.000, 2.000, 2.000, 1.990, 1.990, 1.990, 1.990, 1.990, 1.990, 1.990, 1.990, 1.990, 1.990, 1.990, 1.990, 1.990, 1.990, 1.990, 1.990, 1.990, 1.990, 1.990, 1.990, 1.984 ], '1': [63.66, 9.925, 5.841, 4.604, 4.032, 3.707, 3.499, 3.355, 3.250, 3.169, 3.106, 3.055, 3.012, 2.977, 2.947, 2.921, 2.898, 2.878, 2.861, 2.845, 2.831, 2.819, 2.807, 2.797, 2.787, 2.779, 2.771, 2.763, 2.756, 2.750, 2.750, 2.750, 2.750, 2.750, 2.750, 2.750, 2.750, 2.750, 2.750, 2.704, 2.704, 2.704, 2.704, 2.704, 2.704, 2.704, 2.704, 2.704, 2.704, 2.678, 2.678, 2.678, 2.678, 2.678, 2.678, 2.678, 2.678, 2.678, 2.678, 2.660, 2.660, 2.660, 2.660, 2.660, 2.660, 2.660, 2.660, 2.660, 2.660, 2.660, 2.660, 2.660, 2.660, 2.660, 2.660, 2.660, 2.660, 2.660, 2.660, 2.639, 2.639, 2.639, 2.639, 2.639, 2.639, 2.639, 2.639, 2.639, 2.639, 2.639, 2.639, 2.639, 2.639, 2.639, 2.639, 2.639, 2.639, 2.639, 2.639, 2.626], '0.1': [636.6, 31.60, 12.92, 8.610, 6.869, 5.959, 5.408, 5.041, 4.781, 4.587, 4.437, 4.318, 4.221, 4.140, 4.073, 4.015, 3.965, 3.922, 3.883, 3.850, 3.819, 3.792, 3.767, 3.745, 3.725, 3.707, 3.690, 3.674, 3.659, 3.646, 3.646, 3.646, 3.646, 3.646, 3.646, 3.646, 3.646, 3.646, 3.646, 3.551, 3.551, 3.551, 3.551, 3.551, 3.551, 3.551, 3.551, 3.551, 3.551, 3.496, 3.496, 3.496, 3.496, 3.496, 3.496, 3.496, 3.496, 3.496, 3.496, 3.460, 3.460, 3.460, 3.460, 3.460, 3.460, 3.460, 3.460, 3.460, 3.460, 3.460, 3.460, 3.460, 3.460, 3.460, 3.460, 3.460, 3.460, 3.460, 3.460, 3.416, 3.416, 3.416, 3.416, 3.416, 3.416, 3.416, 3.416, 3.416, 3.416, 3.416, 3.416, 3.416, 3.416, 3.416, 3.416, 3.416, 3.416, 3.416, 3.416, 3.390], '51': [6.314, 2.920, 2.353, 2.132, 2.015, 1.943, 1.895, 1.860, 1.833, 1.812, 1.796, 1.782, 1.771, 1.761, 1.753, 1.746, 1.740, 1.734, 1.729, 1.725, 1.721, 1.717, 1.714, 1.711, 1.708, 1.706, 1.703, 1.701, 1.699, 1.697, 1.697, 1.697, 1.697, 1.697, 1.697, 1.697, 1.697, 1.697, 1.697, 1.684, 1.684, 1.684, 1.684, 1.684, 1.684, 1.684, 1.684, 1.684, 1.684, 1.676, 1.676, 1.676, 1.676, 1.676, 1.676, 1.676, 1.676, 1.676, 1.676, 1.671, 1.671, 1.671, 1.671, 1.671, 1.671, 1.671, 1.671, 1.671, 1.671, 1.671, 1.671, 1.671, 1.671, 1.671, 1.671, 1.671, 1.671, 1.671, 1.671, 1.664, 1.664, 1.664, 1.664, 1.664, 1.664, 1.664, 1.664, 1.664, 1.664, 1.664, 1.664, 1.664, 1.664, 1.664, 1.664, 1.664, 1.664, 1.664, 1.664, 1.660], '11': [31.82, 6.965, 4.541, 3.747, 3.365, 3.143, 2.998, 2.896, 2.821, 2.764, 2.718, 2.681, 2.650, 2.624, 2.602, 2.583, 2.567, 2.552, 2.539, 2.528, 2.518, 2.508, 2.500, 2.492, 2.485, 2.479, 2.473, 2.467, 2.462, 2.457, 2.457, 2.457, 2.457, 2.457, 2.457, 2.457, 2.457, 2.457, 2.457, 2.423, 2.423, 2.423, 2.423, 2.423, 2.423, 2.423, 2.423, 2.423, 2.423, 2.403, 2.403, 2.403, 2.403, 2.403, 2.403, 2.403, 2.403, 2.403, 2.403, 2.390, 2.390, 2.390, 2.390, 2.390, 2.390, 2.390, 2.390, 2.390, 2.390, 2.390, 2.390, 2.390, 2.390, 2.390, 2.390, 2.390, 2.390, 2.390, 2.390, 2.374, 2.374, 2.374, 2.374, 2.374, 2.374, 2.374, 2.374, 2.374, 2.374, 2.374, 2.374, 2.374, 2.374, 2.374, 2.374, 2.374, 2.374, 2.374, 2.374, 2.364], '0.11': [318.3, 22.33, 10.21, 7.173, 5.893, 5.208, 4.785, 4.501, 4.297, 4.144, 4.025, 3.930, 3.852, 3.787, 3.733, 3.686, 3.646, 3.610, 3.579, 3.552, 3.527, 3.505, 3.485, 3.467, 3.450, 3.435, 3.421, 3.408, 3.396, 3.385, 3.385, 3.385, 3.385, 3.385, 3.385, 3.385, 3.385, 3.385, 3.385, 3.307, 3.307, 3.307, 3.307, 3.307, 3.307, 3.307, 3.307, 3.307, 3.307, 3.261, 3.261, 3.261, 3.261, 3.261, 3.261, 3.261, 3.261, 3.261, 3.261, 3.232, 3.232, 3.232, 3.232, 3.232, 3.232, 3.232, 3.232, 3.232, 3.232, 3.232, 3.232, 3.232, 3.232, 3.232, 3.232, 3.232, 3.232, 3.232, 3.232, 3.195, 3.195, 3.195, 3.195, 3.195, 3.195, 3.195, 3.195, 3.195, 3.195, 3.195, 3.195, 3.195, 3.195, 3.195, 3.195, 3.195, 3.195, 3.195, 3.195, 3.174] } export class Formulas { static calculateMedian(arr: Array < number > ) { const mid = Math.floor(arr.length / 2), nums = [...arr].sort((a, b) => a - b); return arr.length % 2 !== 0 ? nums[mid] : (nums[mid - 1] + nums[mid]) / 2; } static calculateArithmeticAverage(arr: Array < number > ) { return arr.reduce((prev, curr) => prev + curr) / arr.length; } static calculateMode(arr: Array < number > ) { let counted = arr.reduce((acc, curr) => { if (curr in acc) { acc[curr]++; } else { acc[curr] = 1; } return acc; }, {}); let mode = Object.keys(counted).reduce((a, b) => counted[a] > counted[b] ? a : b); return parseInt(mode); } static calculateMinimumSampleSize(confidenceInterval : number, StdOfDistribution: number, maxError: number) { return Math.ceil(Math.pow((confidenceInterval * StdOfDistribution / maxError), 2)); } static calculateMinimumSampleSizeAverage(confidenceInterval: number, SampleSize: number, StdOfDistribution: number, maxError: number) { const t = tdistribution[confidenceInterval][SampleSize - 2]; return Math.ceil(Math.pow((t * StdOfDistribution / maxError), 2)); } static calculateZValue(confidenceInterval: number, StdOfDistribution: number, SampleSize: number, AverageValue: number, ExpectedValue: number) { if (Math.abs((AverageValue - ExpectedValue) / StdOfDistribution * Math.sqrt(SampleSize)) > confidenceInterval) { return false; } return true; } static calculateZValueAverage(confidenceInterval: number, StdOfDistribution: number, SampleSize: number, AverageValue: number, ExpectedValue: number) { const t = tdistribution[confidenceInterval][SampleSize - 2]; if (Math.abs((AverageValue - ExpectedValue) / StdOfDistribution * Math.sqrt(SampleSize)) > t) { return false; } return true; } }
FinTech FinTech Impact on Consumer Behavior – Mobile Payments Understanding the concept of FinTech and its importance to the entire world is just the first step along the way. For businesses, innovations in the FinTech world will change the way they do business forever. From a consumer point of view, new and innovative payment methods are being introduced now and then, most of which still rely on traditional financial infrastructure. In fact, consumers might be the biggest benefactors of FinTech innovation. Also read: Understanding FinTech – What is it And Why Do We Need It? FinTech Influences Consumer Behavior Everyday consumers want access to quick, convenient, and widely-accepted payment methods. Over the past few years, more and more countries have slowly started shifting to cashless transactions, relying on traditional payment methods such as bank and credit cards. For most consumers, this form of payment works perfectly fine, although they remain tied to one bank controlling their financial assets. If there is anything the financial crisis of 2008 has taught the world, it is that banks are selling nothing but empty promises and the illusion of trust. Ever since that time, people have started moving funds out of their bank account, albeit there weren’t too many alternatives available at that point. Thanks to the world of FinTech, these consumers now have access to various options. Smartphones and other mobile smart devices played a major role in this shift in consumerism. Accessing data and information are at everyone’s disposal, and only takes a few taps or swipes. Furthermore, various services have taken on a digital life of their own as well, including checking bank account balances, finding new company employees, and much more. As you would come to expect from the FinTech industry, however, most of the focus lies in bringing financial instruments to as many consumers as possible. While credit and bank card payments are a great tool for most purchases, there are still plenty of locations where these payment methods are not accepted. Owning a card payment terminal is subject to monthly fees and high transaction costs, making it less of a favorable option for small businesses. Coming up with additional forms of payments that are acceptable to both consumers and retailers is not an easy task. Mobile payments are on the rise all over the world, and the number of smart devices in circulation keeps increasing year over year. Several players have launched their mobile payment solution, such as Apple Pay and Samsung Pay. Mobile Payments Still Need A Lot of Work Despite the launch of these mobile payments as part of the FinTech ecosystem, there is still a lot of work to be done. Mobile payment solutions in existence today all rely on traditional financial solutions, such as bank accounts and credit and debit cards. However, none of the instruments is designed to be used outside of the traditional financial ecosystem, let alone the online or mobile space. One could argue how solutions such as Apple Pay reduce the overhead costs of processing credit and debit card payments. Nothing could be further from the truth, however, as this type of mobile payments is equally expensive compared to processing these cards on a regular basis. A portion of the regular fees goes to Apple, but the same percentages still apply. Furthermore, mobile payment solutions are still not widely embraced by either merchants or consumers. There is still a lot of work to be done to make this form of payment more appealing in the next few years. But this is also the interesting aspect of FinTech; there are so many brilliant developers in this space who can make innovation happen. What are your thoughts on the FinTech industry to date? Is there anything you would like to see change? Let us know in the comments below. Images courtesy of Research EA, Shutterstock, Payment Law Advisor
/** * Resource builder responsible for creating and opening an AmqpConnection instance. */ public class AmqpConnectionBuilder extends AmqpResourceBuilder<AmqpConnection, AmqpProvider, JmsConnectionInfo, Connection> { private static final Logger LOG = LoggerFactory.getLogger(AmqpConnectionBuilder.class); public AmqpConnectionBuilder(AmqpProvider parent, JmsConnectionInfo resourceInfo) { super(parent, resourceInfo); } @Override public void buildResource(final AsyncResult request) { AsyncResult connectionRequest = new AsyncResult() { @Override public void onSuccess() { // Create a Session for this connection that is used for Temporary Destinations // and perhaps later on management and advisory monitoring. JmsSessionInfo sessionInfo = new JmsSessionInfo(getResourceInfo(), -1); sessionInfo.setAcknowledgementMode(Session.AUTO_ACKNOWLEDGE); final AmqpConnectionSessionBuilder builder = new AmqpConnectionSessionBuilder(getResource(), sessionInfo); builder.buildResource(new AsyncResult() { @Override public boolean isComplete() { return builder.getResource().isOpen(); } @Override public void onSuccess() { LOG.debug("{} is now open: ", getResource()); request.onSuccess(); } @Override public void onFailure(Throwable result) { LOG.debug("AMQP Connection Session failed to open."); request.onFailure(result); } }); } @Override public void onFailure(Throwable result) { request.onFailure(result); } @Override public boolean isComplete() { return getResource().isOpen(); } }; super.buildResource(connectionRequest); } @Override protected Connection createEndpoint(JmsConnectionInfo resourceInfo) { String hostname = getParent().getVhost(); if (hostname == null) { hostname = getParent().getRemoteURI().getHost(); } else if (hostname.isEmpty()) { hostname = null; } Map<Symbol, Object> props = new LinkedHashMap<Symbol, Object>(); props.put(AmqpSupport.PRODUCT, MetaDataSupport.PROVIDER_NAME); props.put(AmqpSupport.VERSION, MetaDataSupport.PROVIDER_VERSION); props.put(AmqpSupport.PLATFORM, MetaDataSupport.PLATFORM_DETAILS); Connection connection = getParent().getProtonConnection(); connection.setHostname(hostname); connection.setContainer(resourceInfo.getClientId()); connection.setDesiredCapabilities(new Symbol[] { SOLE_CONNECTION_CAPABILITY }); connection.setProperties(props); return connection; } @Override protected AmqpConnection createResource(AmqpProvider parent, JmsConnectionInfo resourceInfo, Connection endpoint) { return new AmqpConnection(parent, resourceInfo, endpoint); } @Override protected void afterOpened() { // Initialize the connection properties so that the state of the remote can // be determined, this allows us to check for close pending. getResource().getProperties().initialize( getEndpoint().getRemoteOfferedCapabilities(), getEndpoint().getRemoteProperties()); } @Override protected boolean isClosePending() { return getResource().getProperties().isConnectionOpenFailed(); } @Override protected long getRequestTimeout() { return getParent().getProvider().getConnectTimeout(); } }
##__________________________________________________________________ ## ## Author: GogolGrind ##__________________________________________________________________ from sys import * from math import * def solve(): n = int(input()) f = [] for i in range(n): f.append([e for e in input()]) k = 1 leftover = 0 for i in range(n): for j in range(n): if (f[i][j] == '#'): if (i+k < n and j+k < n and i-k >= 0 and j-k >= 0 and f[i+k][j] == '#' and f[i][j+k] == '#' and f[i-k][j] == '#' and f[i][j-k] == '#' ): f[i+k][j],f[i-k][j],f[i][j+k],f[i][j-k],f[i][j] = ['*']*5 for i in range(n): for j in range(n): if (f[i][j] == '#'): leftover += 1 return leftover == 0 def main (): t = 1 for ii in range(t): print ('YES' if solve() else 'NO') if __name__ == '__main__': main()
def evaluate_p(self, X, best_p, p_range=(0, 1), sample_density=100): return np.mean((best_p - self.estimate_best_p(X, p_range, sample_density)) ** 2)
<filename>3.JavaCollections/task26/task2613/exception/NotEnoughMoneyException.java package com.javarush.task.task26.task2613.exception; public class NotEnoughMoneyException extends Exception { }
#pragma once #include <string> #include <vector> #include <typeindex> #include <variant> #include "Export.h" namespace drea::core { using OptionValue = std::variant<std::monostate,bool,int,double,std::string>; struct DREA_CORE_API Option { enum class Scope { Both, File, Line, None }; std::string mName; std::string mParamName; std::string mDescription; std::vector<OptionValue> mValues = {}; std::type_index mType = typeid( std::string ); Scope mScope = Scope::Both; int mNbParams = 1; std::string mShortVersion = ""; static const int mUnlimitedParams = 0xfffffffa; [[nodiscard]] int numberOfParams() const { if( mParamName.empty() ){ return 0; }else{ return mNbParams; } } [[nodiscard]] std::string toString( const OptionValue & val ) const; [[nodiscard]] OptionValue fromString( const std::string & val ) const; [[nodiscard]] bool helpInLine() const { return mScope == Scope::Both || mScope == Scope::Line; } [[nodiscard]] bool helpInFileOnly() const { return mScope == Scope::File; } }; }
// Search searches and update lights for some time using SSDP and // fills the map with new lights found indexed by its ID. lightfound // is called with the newly found light, usually to start listening it func Search(time int, localAddr string, lights map[string]*Light, lightfound func(light *Light)) error { err := ssdp.SetMulticastSendAddrIPv4(mcastAddress) if err != nil { return err } list, err := ssdp.Search(searchType, time, localAddr) if err != nil { return err } for _, srv := range list { light, err := Parse(srv.Header()) if err != nil { log.Errorf("Invalid response from %s: %s", srv.Location, err) return err } if lights[light.ID] == nil { light.Status = SSDP lights[light.ID] = light if lightfound != nil { lightfound(light) } } } return nil }
/* unsigned int HashTable::KeyToIndex(register char * string) ** ** Creates an index from <string> which is the url to be used in the hash table ** Used before lookup(), insert() and remove() ** */ unsigned int HashTable::KeyToIndex(register char *string) { register unsigned int result; register int c; result = 0; while (1) { c = *string; string++; if (c == 0) { break; } result += (result << 3) + c; } return (result & (NUM_BUCKETS - 1)); }
Very compact connector for coupling optical fibers to high-power laser This work presents the description of a very compact optical connector for coupling high-power laser radiation (Nd:YAG) to optical fibers. GRIN-rod lenses have been used because they have several characteristics (such as small dimensions, plane and parallel faces with focus near the exit face) which make them suitable for fiber components. A special coupling unit to connect the microlens with the fiber has been designed, thus obtaining a very compact and stable element. Furthermore, the insertion of a conventional prefocusing lens at the laser exit has been provided, in order to obtain better coupling efficiency and to increase misalignment tolerances of the GRIN rod-fiber component A theoretical analysis by means of an optical design program, together with the description of the experimental characterization of the coupling system, are reported.
import { autoinject } from 'aurelia-framework'; import { App } from '../../app'; import { IPrinter } from '../../printer'; @autoinject export class ScreenEditBed { isHidden: boolean = true; private state: number; private tempActive: number; private tempStandby: number; private autoBedLevelingEnabled: boolean; constructor( private app: App, private printer: IPrinter ) { } public updateStatus() { this.state = this.printer.view.bedState; this.tempActive = this.printer.view.bedActiveTemp; this.tempStandby = this.printer.view.bedStandbyTemp; this.autoBedLevelingEnabled = this.printer.view.bedCompensation.toLowerCase() != 'none'; } clickedActive() { this.printer.activateBed(); //this.isHidden = true; } clickedStandby() { this.printer.standbyBed(); //this.isHidden = true; } clickedOff() { this.printer.switchOffBed(); //this.isHidden = true; } editTempActive() { this.app.editValue('Bed Temperature °C', this.tempActive, (value: number) => this.updateTempActive(value)); } editTempStandby() { } updateTempActive(value: number) { this.printer.activateBed(value); //this.isHidden = true; } clickedBedMesh() { this.app.screenEditBedMesh.loadHeightMap(); this.app.screenEditBedMesh.isHidden = false; } }
#ifndef INTROSORT_HPP #define INTROSORT_HPP 1 #include <algorithm> #include <cmath> #include <iterator> #include "heapsort.hpp" #include "insertionSort.hpp" #include "quicksort.hpp" namespace Introsort { const size_t SORT_THRESHOLD = 16; template<class BidirIt, class Compare, typename = std::enable_if_t<std::is_base_of<std::bidirectional_iterator_tag, typename std::iterator_traits<BidirIt>::iterator_category>::value>> //std::distance and std::find_if_not require the passed iterator to be an InputIterator //std::rotate requires the passed iterator to be an ForwardIterator //Heapsort::sort, std::reverse_iterator, and Quicksort::Partition require the passed iterator to be an BidirectionalIterator void SortLimitedDepth(BidirIt begin, BidirIt end, int depthLimit, Compare Comp) { //This method uses a while loop to continually sort the // left half of the list and recursively sort the right, // rather than recursively sorting both while (static_cast<size_t>(std::distance(begin,end)) > SORT_THRESHOLD) { //While the length of our list is greater than the specified threshold if (depthLimit == 0) { //Hit our depth limit(pivot selection was too unbalanced), do heapsort now Heapsort::Sort(begin, end, Comp); return; } --depthLimit; //Didnt hit the depth limit yet, continue quicksort BidirIt partition = Quicksort::Partition(begin, end, Comp); SortLimitedDepth(partition, end, depthLimit, Comp); end = partition; } } template<class BidirIt, class Compare = std::less<>, typename = std::enable_if_t<std::is_base_of<std::bidirectional_iterator_tag, typename std::iterator_traits<BidirIt>::iterator_category>::value>> //SortLimitedDepth and InsertionSort::Sort require the passed iterator to be an BidirectionalIterator void Sort(BidirIt begin, BidirIt end, Compare Comp = Compare()) { size_t length = std::distance(begin,end); if (length < 2) { return; } int maxDepth = 2 * static_cast<int>(log2(length)); //Use Quicksort until it's repeatedly too unbalanced // (maxDepth prevents too many recursions) SortLimitedDepth(begin, end, maxDepth, Comp); //This leaves unsorted chunks of no more than size 'SORT_THRESHOLD', // insertion sort should do well here InsertionSort::Sort(begin, end, Comp); } } //namespace Introsort #endif //INTROSORT_HPP
TEL AVIV – The sister of a Palestinian terrorist who plowed his truck into a group of soldiers in Jerusalem on Sunday, killing four and wounding 16, said the family was “thankful” for his “most beautiful martyrdom.” “Praise be to Allah that he became a martyr. It is the most beautiful kind of saintly death,” Fadi al Qanbar’s sister Shadia, 28, told Palestinian media. “Allah chose him for this martyrdom. Thank God. We are patient and we thank Allah for this,” she added. Shadia said her brother was not a member of any terror group and had acted on his own. She added that Qanbar, a father of four, was devoutly religious. Qanbar rammed his truck into a crowd of cadets and officers who were touring the promenade overlooking the Old City of Jerusalem in the southern neighborhood of Armon Hanatziv. According to eyewitnesses, after the terrorist ran his flatbed truck over the soldiers, he reversed over them a second time. All four of the soldiers who died were in their twenties, and three of them were women. Gaza-based terror group Hamas on Sunday praised the attack as a “heroic” act and encouraged other Palestinians to do the same and “escalate the resistance.” In the coastal enclave, Palestinians were photographed giving out sweets to passing drivers in celebration of the attack. Hamas spokesman Abdul-Latif Qanou said the truck-ramming attack showed that the wave of Palestinian violent “resistance” against Israel has not ceased. “It may be quiet, it may linger, but it will never end,” he said. At least nine people have been arrested in connection with the attack, mostly members of the Qanbar family. Prime Minister Benjamin Netanyahu said there are signs Qanbar may have supported the Islamic State. He also suggested a connection between Sunday’s attack and the truck rammings in Berlin and Nice last year.
/// Converts the RunnerArgs to a run.py command line invocation. fn as_cmd(&'a self) -> Vec<String> { use std::ops::Add; // Add features for build let kernel_features = String::from(self.kernel_features.join(",")); let user_features = String::from(self.user_features.join(",")); let log_level = match std::env::var("RUST_LOG") { Ok(lvl) if lvl == "debug" => "debug", Ok(lvl) if lvl == "trace" => "trace", Ok(lvl) if lvl == "warn" => "warn", Ok(lvl) if lvl == "error" => "error", Ok(lvl) if lvl == "info" => "info", _ => "info", }; let mut cmd = vec![ String::from("run.py"), String::from("--kfeatures"), kernel_features, String::from("--cmd"), format!("log={} {}", log_level, self.cmd.unwrap_or("")), String::from("--nic"), String::from(self.nic), ]; if !self.mods.is_empty() { cmd.push("--mods".to_string()); cmd.push(self.mods.join(" ")); } match self.user_features.is_empty() { false => { cmd.push(String::from("--ufeatures")); cmd.push(user_features); } true => {} }; if self.release { cmd.push(String::from("--release")); } match &self.machine { Machine::Qemu => { cmd.push(String::from("--qemu-cores")); cmd.push(format!("{}", self.cores)); cmd.push(String::from("--qemu-nodes")); cmd.push(format!("{}", self.nodes)); cmd.push(String::from("--qemu-memory")); cmd.push(format!("{}", self.memory)); if self.setaffinity { cmd.push(String::from("--qemu-affinity")); } if self.prealloc { cmd.push(String::from("--qemu-prealloc")); } // Form arguments for QEMU let mut qemu_args: Vec<String> = self.qemu_args.iter().map(|arg| arg.to_string()).collect(); if !qemu_args.is_empty() { cmd.push(format!("--qemu-settings={}", qemu_args.join(" "))); } } Machine::Baremetal(mname) => { cmd.push(format!("--machine={}", mname)); } } // Don't run qemu, just build? match self.norun { false => {} true => { cmd.push(String::from("--norun")); } }; cmd }
/** * Table renderer based on the core PagedTable widget. */ @ApplicationScoped @Named(SelectorRenderer.UUID + "_renderer") public class SelectorRenderer extends AbstractRendererLibrary { public static final String UUID = "Standard"; @PostConstruct private void init() { RendererLibLocator.get().registerRenderer(DisplayerType.SELECTOR, UUID, true); } @Override public String getUUID() { return UUID; } @Override public Displayer lookupDisplayer(DisplayerSettings displayerSettings) { DisplayerType type = displayerSettings.getType(); if (DisplayerType.SELECTOR.equals(type)) return new SelectorDisplayer(); return null; } }
<filename>kalmfl/disambiguation/main/java/edu/stonybrook/cs/frame/Role.java package main.java.edu.stonybrook.cs.frame; import java.util.*; import it.uniroma1.lcl.babelnet.BabelSynset; import main.java.edu.stonybrook.cs.correction.SynsetOverride; import main.java.edu.stonybrook.cs.utils.BabelNetConnector; import main.java.edu.stonybrook.cs.utils.BabelNetShareResource; import main.java.edu.stonybrook.cs.computation.RoleToFillerThread; import main.java.edu.stonybrook.cs.computation.FillerToRoleThread; /** * This class represents a (role, role-filler) relation. This relation is able to create computation threads to get the * score of each (role synset, role-filler synset) pair and give a sorted list of role-filler synsets by score. */ public class Role { private final String roleName; // role name private final String[] roleNameSynsetIDs; // the synsets of the role private String roleNameSynsetGloss; // the main gloss of the role private String filler; // role-filler private String fillerPOS; // the part-of-speech of the role-filler private String fillerIndex; private String fillerClass; private String fillerQuant; // the quantity of the role-filler private String fillerBestSynsetID; // the specified role-filler synsetID private String fillerBestSynsetGloss; // the specified role-filler score private Double fillerBestSynsetScore; // the specified role-filler score private String pairKey; // key of (role, filler), "role-filler:{synsets of the role}", this is needed because we want to know if the threads brought by this pair is added private List<String> fillerSynsetIDs; // synsets of the role-filler, it is a sorted list by score private List<String> fillerSynsetGlosses; // glosses of the role-filler, it is a sorted list score private List<ScoreTriplet> fillerScoreTriplets; // (synsetID, gloss, score) private List<ScoreTriplet> exactMatchFillerScoreTriplets; private String roleDataType; // data type of the role private Hashtable<String,String> synsetIdGlossMap = new Hashtable<String,String>(); // table storing (role-filler synsetID, gloss) private Hashtable<String,Double> synsetIdScoreMap = new Hashtable<String,Double>(); // table storing (role-filler synsetID, score) private Hashtable<String,String> synsetIdPathMap = new Hashtable<String,String>(); // table storing (role-filler synsetID, path in string) private HashSet<String> roleNameSynsetIDSet = new HashSet<String>(); // set storing (role name synsetIDs) private ArrayList<String> prohibitedEdgeList; // the role cannot be reached by the edges of such type private boolean hasExactMatch; private final double defaulScore = 1.2; private final HashSet<String> personPronouns = new HashSet<String>(){{ add("i"); add("you"); add("he"); add("she"); add("we"); }}; private final HashSet<String> entityPronouns = new HashSet<String>(){{ add("they"); add("it"); add("this"); add("that"); add("these"); add("those"); }}; public static class ScoreTriplet { public String fillerSynsetID = null; public String fillerGloss = null; public double score = 0.0; } /** * @param roleName * @param roleNameSynsetIDs * (role, role-filler) relation constructing called by LoadFrame by FrameExtractor. */ public Role(String roleName, String[] roleNameSynsetIDs) // role name and the synsets of the role name { this.roleName = roleName; this.roleNameSynsetIDs = roleNameSynsetIDs; this.filler = null; this.fillerPOS = null; this.fillerIndex = null; this.fillerClass = null; this.fillerQuant = null; this.fillerBestSynsetID = null; this.fillerBestSynsetGloss = null; this.fillerBestSynsetScore = 0.0; this.fillerSynsetIDs = new ArrayList<String>(); this.fillerSynsetGlosses = new ArrayList<String>(); if (!roleName.equals("Time")) { BabelSynset synset = BabelNetConnector.getSynsetById(roleNameSynsetIDs[0]); // get the first synset of the role this.roleNameSynsetGloss = BabelNetConnector.getMainGloss(synset); // get the gloss of the first synset } this.roleDataType = null; this.pairKey = null; this.fillerScoreTriplets = new ArrayList<ScoreTriplet>(); this.exactMatchFillerScoreTriplets = new ArrayList<ScoreTriplet>(); this.prohibitedEdgeList = null; this.hasExactMatch = false; Collections.addAll(roleNameSynsetIDSet, roleNameSynsetIDs); } /** * Default display of the dropdown menu, the first synset of the role-filler with the highest score since * the list is sorted. */ public void setBestResult() { if(fillerSynsetIDs.size() > 0) { this.fillerBestSynsetID = fillerSynsetIDs.get(0); this.fillerBestSynsetGloss = fillerSynsetGlosses.get(0); this.fillerBestSynsetScore = synsetIdScoreMap.get(fillerBestSynsetID); } } /** * @param FEValSynsetId the selected synset Id in the dropdown menu * If the synset Id is changed, the relevant data to be showcased changes accordingly. */ public void changeFEValSynsetId(String FEValSynsetId) { this.fillerBestSynsetID = FEValSynsetId; this.fillerBestSynsetGloss = synsetIdGlossMap.get(FEValSynsetId); this.fillerBestSynsetScore = synsetIdScoreMap.get(FEValSynsetId); } /** * @return a list of threads * This method creates computation threads of the bidirectional search. Multiple roles vs multiple role-fillers, * each (role, role-filler) and (role-filler, role) is a thread. */ public List<Thread> createSematicScoreComputationThreads() { if (roleName.equals("Time")) { fillerSynsetIDs.add("time"); fillerSynsetGlosses.add("a time value"); synsetIdScoreMap.put("time", 1.0); synsetIdGlossMap.put("time", "a time value"); synsetIdPathMap.put("time", ""); return null; } if (roleDataType != null && roleDataType.equals("Integer")) { fillerSynsetIDs.add("integer"); fillerSynsetGlosses.add("an integer"); synsetIdScoreMap.put("integer", defaulScore); synsetIdGlossMap.put("integer", "an integer"); synsetIdPathMap.put("integer", ""); return null; } if (roleDataType != null && roleDataType.equals("Currency")) { fillerSynsetIDs.add("currency"); fillerSynsetGlosses.add("some currency"); synsetIdScoreMap.put("currency", defaulScore); synsetIdGlossMap.put("currency", "some currency"); synsetIdPathMap.put("currency", ""); return null; } List<BabelSynset> fillerSynsets = BabelNetConnector.getBabelNetSynsetsByWord(filler, fillerPOS); // get all the synsets of a role-filler int synsetSize = 0; for (BabelSynset synset : fillerSynsets) { String synsetId = synset.getId().getID(); if (SynsetOverride.isSynsetOverridden(filler + "-" + synsetId)) continue; synsetSize++; } String changedFiller = filler; String[] splitFiller = filler.split(" "); if (synsetSize == 0) { if (splitFiller[0].equals("a") || splitFiller[0].equals("the")) { changedFiller = String.join(" ", Arrays.copyOfRange(splitFiller, 1, splitFiller.length)); fillerSynsets = BabelNetConnector.getBabelNetSynsetsByWord(changedFiller, fillerPOS); for (BabelSynset synset : fillerSynsets) { String synsetId = synset.getId().getID(); if (SynsetOverride.isSynsetOverridden(changedFiller + "-" + synsetId)) continue; synsetSize++; } } if (synsetSize == 0) { changedFiller = splitFiller[splitFiller.length-1]; fillerSynsets = BabelNetConnector.getBabelNetSynsetsByWord(changedFiller, fillerPOS); } } for (BabelSynset synset : fillerSynsets) { // iterate every synset of the role-filler, rewrite glosses String synsetId = synset.getId().getID(); String mainGloss = BabelNetConnector.getMainGloss(synset); // get the main gloss of a synset, only one if (SynsetOverride.isSynsetOverridden(changedFiller + "-" + synsetId)) continue; // if word-synset relation is overridden, check next synset if (mainGloss != null && mainGloss.length() != 0) { // if this synset has a main gloss if (synset.isKeyConcept()) { // if a synset is a key concept (something related to FrameNet according to BabelNet.org) if (fillerPOS.equals("propn")) continue;// and POS is a name, check next synset } else if (synset.getSynsetType().name().equals("NAMED_ENTITY")) { // the synset means a name if (!fillerPOS.equals("propn") && !filler.equals("cooking school" ) && !filler.equals("us intelligence") && !filler.equals("us history") && !filler.equals("us election") && !filler.equals("kyoto protocol")) continue; // but POS is not a name (stanza conflicts with babelnet), check next synset } // else { // if (fillerPOS.equals("propn")) continue;// if it is a name // } } synsetIdGlossMap.put(synsetId, mainGloss); // generate synset-gloss table ScoreTriplet fillerScoreTriplet = new ScoreTriplet(); // which is an inner static class fillerScoreTriplet.fillerSynsetID = synsetId; fillerScoreTriplet.fillerGloss = mainGloss; fillerScoreTriplets.add(fillerScoreTriplet); // Tuple(synsetID, gloss, score) } if (entityPronouns.contains(filler)) { ScoreTriplet fillerScoreTriplet = new ScoreTriplet(); fillerScoreTriplet.fillerSynsetID = "bn:00031027n"; fillerScoreTriplet.fillerGloss = "That which is perceived or known or inferred to have its own distinct existence (living or nonliving)"; synsetIdGlossMap.put(fillerScoreTriplet.fillerSynsetID, fillerScoreTriplet.fillerGloss); fillerScoreTriplets.add(fillerScoreTriplet); } if (fillerClass.equals("person") || personPronouns.contains(filler)) { ScoreTriplet fillerScoreTriplet = new ScoreTriplet(); fillerScoreTriplet.fillerSynsetID = "bn:00046516n"; fillerScoreTriplet.fillerGloss = "A human being"; synsetIdGlossMap.put(fillerScoreTriplet.fillerSynsetID, fillerScoreTriplet.fillerGloss); fillerScoreTriplets.add(fillerScoreTriplet); // fillerScoreTriplet = new ScoreTriplet(); // fillerScoreTriplet.fillerSynsetID = "bn:00044576n"; // fillerScoreTriplet.fillerGloss = "Any living or extinct member of the family Hominidae characterized by superior intelligence, articulate speech, and erect carriage"; // synsetIdGlossMap.put(fillerScoreTriplet.fillerSynsetID, fillerScoreTriplet.fillerGloss); // fillerScoreTriplets.add(fillerScoreTriplet); } if (fillerClass.equals("gpe")) { ScoreTriplet fillerScoreTriplet = new ScoreTriplet(); fillerScoreTriplet.fillerSynsetID = "bn:00062699n"; fillerScoreTriplet.fillerGloss = "A point located with respect to surface features of some region"; synsetIdGlossMap.put(fillerScoreTriplet.fillerSynsetID, fillerScoreTriplet.fillerGloss); fillerScoreTriplets.add(fillerScoreTriplet); fillerScoreTriplet = new ScoreTriplet(); fillerScoreTriplet.fillerSynsetID = "bn:00066884n"; fillerScoreTriplet.fillerGloss = "A large indefinite location on the surface of the Earth"; synsetIdGlossMap.put(fillerScoreTriplet.fillerSynsetID, fillerScoreTriplet.fillerGloss); fillerScoreTriplets.add(fillerScoreTriplet); fillerScoreTriplet = new ScoreTriplet(); fillerScoreTriplet.fillerSynsetID = "bn:00051760n"; fillerScoreTriplet.fillerGloss = "A point or extent in space"; synsetIdGlossMap.put(fillerScoreTriplet.fillerSynsetID, fillerScoreTriplet.fillerGloss); fillerScoreTriplets.add(fillerScoreTriplet); } if (fillerClass.equals("work_of_art")) { ScoreTriplet fillerScoreTriplet = new ScoreTriplet(); fillerScoreTriplet.fillerSynsetID = "bn:00077409n"; fillerScoreTriplet.fillerGloss = "The name of a work of art or literary composition etc."; synsetIdGlossMap.put(fillerScoreTriplet.fillerSynsetID, fillerScoreTriplet.fillerGloss); fillerScoreTriplets.add(fillerScoreTriplet); } if (fillerClass.equals("org")) { ScoreTriplet fillerScoreTriplet = new ScoreTriplet(); fillerScoreTriplet.fillerSynsetID = "bn:00059480n"; fillerScoreTriplet.fillerGloss = "A group of people who work together"; synsetIdGlossMap.put(fillerScoreTriplet.fillerSynsetID, fillerScoreTriplet.fillerGloss); fillerScoreTriplets.add(fillerScoreTriplet); } if (fillerClass.equals("norp")) { ScoreTriplet fillerScoreTriplet = new ScoreTriplet(); fillerScoreTriplet.fillerSynsetID = "bn:00056964n"; fillerScoreTriplet.fillerGloss = "The status of belonging to a particular nation by birth or naturalization"; synsetIdGlossMap.put(fillerScoreTriplet.fillerSynsetID, fillerScoreTriplet.fillerGloss); fillerScoreTriplets.add(fillerScoreTriplet); // fillerScoreTriplet = new ScoreTriplet(); // fillerScoreTriplet.fillerSynsetID = "bn:00032768n"; // fillerScoreTriplet.fillerGloss = "A strong belief in a supernatural power or powers that control human destiny"; // synsetIdGlossMap.put(fillerScoreTriplet.fillerSynsetID, fillerScoreTriplet.fillerGloss); // fillerScoreTriplets.add(fillerScoreTriplet); } removeExactMatch(); // does not add threads for exact matches since we manually assign them scores if (fillerScoreTriplets.size() == 0) { if (!hasExactMatch) { fillerSynsetIDs.add(filler); fillerSynsetGlosses.add("not found"); synsetIdScoreMap.put(filler, 0.01); synsetIdGlossMap.put(filler, "not found"); synsetIdPathMap.put(filler, ""); } return null; } BabelNetShareResource.clear(); List<Thread> threadPool = new ArrayList<Thread>(); // list for threads String[] fillerSIDs = new String[fillerScoreTriplets.size()]; for (int i = 0; i < fillerSIDs.length; i++) { fillerSIDs[i] = fillerScoreTriplets.get(i).fillerSynsetID; } for (String roleNameSID : roleNameSynsetIDs) { // iterate every synset of the role, new inverse Bidirectional Search threads, from role to role-filler Thread roleToFillerThread = new RoleToFillerThread(Arrays.toString(roleNameSynsetIDs), pairKey, roleNameSID, fillerSIDs, prohibitedEdgeList); threadPool.add(roleToFillerThread); // inverse ones are added to threadPool } for (int i = 0; i < fillerScoreTriplets.size(); i++) { // iterate every synset of the role-filler, new forward Bidirectional Search threads, from role-filler to role String fillerSynsetID = fillerScoreTriplets.get(i).fillerSynsetID; Thread fillerToRoleThread = new FillerToRoleThread(pairKey, i, fillerSynsetID, roleNameSynsetIDs, prohibitedEdgeList); threadPool.add(fillerToRoleThread); // forward ones are added to threadPool as well } return threadPool; // every pair of (role synset, filler synset) relation is added to the pool } /** * This method deal with all the calculated info in FEValTupleList and sort the elements by score then save them * to FEValSynsetIdList and FEValSynsetGlossList for final use. */ public void sortSynsetsByScore() { if(fillerScoreTriplets.size() == 0) { // if there's no synset id return; } for(int i = 0; i < fillerScoreTriplets.size(); i++) { // for every synset of the role-filler String fillerSynsetID = fillerScoreTriplets.get(i).fillerSynsetID; double score = BabelNetShareResource.getScore(pairKey, i); // get the best score of the i-th synset of the role-filler String path = BabelNetShareResource.getPath(pairKey, i); // get the best path of the i-th synset of the role-filler to the role synsetIdScoreMap.put(fillerSynsetID, score); // add the result of the i-th synset to the map synsetIdPathMap.put(fillerSynsetID, path); fillerScoreTriplets.get(i).score = score; } fillerScoreTriplets.sort(new Comparator<ScoreTriplet>() { // sort the role-filler synset tuple list by score public int compare(ScoreTriplet triplet1, ScoreTriplet triplet2) { return Double.compare(triplet2.score, triplet1.score); } }); for(ScoreTriplet fillerScoreTriplet : fillerScoreTriplets) { fillerSynsetIDs.add(fillerScoreTriplet.fillerSynsetID); // update role-filler synset ID list after sorting fillerSynsetGlosses.add(fillerScoreTriplet.fillerGloss); // update role-filler gloss list after sorting // System.out.println(fillerScoreTriplet.fillerSynsetID + "-" + roleNameSynsetIDs[0]+ " " + fillerScoreTriplet.score); } } /** * This method is to deal with the situation that the role and the role-filler share an identical * synset. If the role-filler is who or where and has a match, not consider this role-filler any more. If not who or where * and has a match, delete the matched synset in the role-filler. */ private void removeExactMatch() { Iterator<ScoreTriplet> tupleListIterator = fillerScoreTriplets.iterator(); while (tupleListIterator.hasNext()) { // iterate every synset of the role-filler ScoreTriplet element = tupleListIterator.next(); if(roleNameSynsetIDSet.contains(element.fillerSynsetID)) { hasExactMatch = true; // mark as has exact match tupleListIterator.remove(); // delete this synset in the role-filler exactMatchFillerScoreTriplets.add(element); // add this match } } } /** * @return a boolean value to show if there's an exact match between the role and the role-filler. */ public boolean existExactMatch() { return hasExactMatch; } /** * @param score a given score for an exact match * This method gives an exact match a highest rank and assign a score to it without computing. */ public void setExactMatchResult(double score) { for(ScoreTriplet fillerScoreTriplet : exactMatchFillerScoreTriplets) { // for every matched synset fillerSynsetIDs.add(0, fillerScoreTriplet.fillerSynsetID); // set this synset to the very beginning of the synset list of the role-filler fillerSynsetGlosses.add(0, fillerScoreTriplet.fillerGloss); synsetIdScoreMap.put(fillerScoreTriplet.fillerSynsetID, score); // set this a predefined score synsetIdPathMap.put(fillerScoreTriplet.fillerSynsetID, ""); } } // private boolean isFEValMatchFEName(List<ScoreTriplet> fillerScoreTripletList) // { // boolean isMatched = false; // for(ScoreTriplet fillerScoreTriplet : fillerScoreTripletList) // { // if(roleNameSynsetIDSet.contains(fillerScoreTriplet.fillerSynsetID)) // { // isMatched = true; // fillerSynsetIDs.add(fillerScoreTriplet.fillerSynsetID); // fillerSynsetGlosses.add(fillerScoreTriplet.fillerGloss); // synsetIdScoreMap.put(fillerScoreTriplet.fillerSynsetID, defaulScore); // synsetIdPathMap.put(fillerScoreTriplet.fillerSynsetID, ""); // break; // } // } // if(isMatched) // { // for(ScoreTriplet fillerScoreTriplet : fillerScoreTripletList) // { // if(!roleNameSynsetIDSet.contains(fillerScoreTriplet.fillerSynsetID)) // { // fillerSynsetIDs.add(fillerScoreTriplet.fillerSynsetID); // fillerSynsetGlosses.add(fillerScoreTriplet.fillerGloss); // synsetIdScoreMap.put(fillerScoreTriplet.fillerSynsetID, 0.0); // synsetIdPathMap.put(fillerScoreTriplet.fillerSynsetID, ""); // } // } // return true; // } // else // { // return false; // } // } public void setRoleFiller(String filler, String fillerIndex, String fillerPOS, String fillerClass, String fillerQuant) { this.filler = filler; this.fillerIndex = fillerIndex; this.fillerPOS = fillerPOS; this.fillerClass = fillerClass; this.fillerQuant = fillerQuant; String roleNameSIDs = ""; for(String roleNameSynsetID : roleNameSynsetIDs) { roleNameSIDs += roleNameSynsetID; } this.pairKey = filler + ":" + fillerPOS + ":" + roleNameSIDs; } /** * @return the score of the synset that is currently being shown. */ public double getBestSynsetScore() { return this.fillerBestSynsetScore; } public String getRoleName() { return roleName; } public String getFiller() { return filler; } public String getFillerIndex() { return fillerIndex; } public String getFillerQuant() { return fillerQuant; } public String getFENameSynsetId() { return roleNameSynsetIDs[0]; } public String getFillerBestSynsetID() { return fillerBestSynsetID; } public String getFillerBestSynsetGloss() { return fillerBestSynsetGloss; } public String getFillerSynsetId(int index) { return fillerSynsetIDs.get(index); } public String getSynsetGlossById(String synsetId) { return synsetIdGlossMap.get(synsetId); } public double getSynsetScoreById(String synsetId) { return synsetIdScoreMap.get(synsetId); } public String getRoleNameSynsetGloss() { return roleNameSynsetGloss; } public int getNumberOfSynsets() { return fillerSynsetIDs.size(); } public String getPairKey() { return this.pairKey; } public void setRoleDataType(String roleDataType) { this.roleDataType = roleDataType; } public boolean checkInteger(String val) { try { Integer.parseInt(val); } catch(NumberFormatException | NullPointerException e) { return false; } return true; } public void setProhibitedEdgeList(ArrayList<String> prohibitedEdgeList) { this.prohibitedEdgeList = prohibitedEdgeList; } public String print() { String s = roleName + " = " + filler + " = " + fillerIndex + " = " + fillerBestSynsetScore + " = " + fillerBestSynsetGloss + "\n"; if (fillerSynsetIDs.size() > 0) { String synset = fillerSynsetIDs.get(0); if (synsetIdScoreMap.get(synset) > 0.0) { if (!synsetIdPathMap.get(synset).equals("")) { s += "\t" + synsetIdPathMap.get(synset) + "\n"; } } } return s; } /** * @return A tuple in string contains role, role-filler and the score of best synset(s) of the role-filler. * So-called top result is most suitable synset given the role-filler, which itself (the role-filler) may be wrong. */ public String getTopResult() { if(fillerSynsetIDs.size() == 0) { return null; } String synsetSet = "'" + fillerSynsetIDs.get(0) + "'"; Double score = synsetIdScoreMap.get(fillerSynsetIDs.get(0)); for(int i = 1; i < fillerSynsetIDs.size(); i++) { String synset = fillerSynsetIDs.get(i); if(Double.toString(synsetIdScoreMap.get(synset)).equals(Double.toString(score))) { synsetSet += "/'" + synset + "'"; } } return "triple('" + roleName + "','" + filler + "'," + synsetSet + ")"; } }
<filename>src/main/java/ru/sosnov/projectmanagement/controller/LoginController.java package ru.sosnov.projectmanagement.controller; import lombok.RequiredArgsConstructor; import org.springframework.stereotype.Controller; import org.springframework.ui.ModelMap; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import ru.sosnov.projectmanagement.model.User; import ru.sosnov.projectmanagement.model.enums.RoleType; import ru.sosnov.projectmanagement.repository.UserRepository; import ru.sosnov.projectmanagement.util.SecurityContextUtil; import java.nio.charset.Charset; import java.util.Random; @Controller @RequiredArgsConstructor public class LoginController { private final UserRepository userRepository; @RequestMapping(value = "/login", method = {RequestMethod.GET, RequestMethod.HEAD}) public String login( @RequestParam(value = "error", required = false) String error, ModelMap map) { if(error != null) { map.addAttribute("error", "Invalid username or password!"); } User user = SecurityContextUtil.getAuthUser(); if(user != null) return "redirect:/"; else return "login"; } @PostMapping("/register") public String register(@RequestParam String email, @RequestParam String password) { User user = new User(); String salt = generateSalt(7); user.setEmail(email); user.setPassword(md5(password, salt)); user.setRole(RoleType.USER); user.setSalt(salt); userRepository.save(user); return "redirect:/login"; } @RequestMapping("/") public String home() { return "redirect:/login"; } private static String generateSalt(int lenght) { char[] chars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890".toCharArray(); StringBuilder stringBuilder = new StringBuilder(); Random random = new Random(); for (int i = 0; i < lenght; i++) { char c = chars[random.nextInt(chars.length)]; stringBuilder.append(c); } return stringBuilder.toString(); } public static String md5(String line, String salt) { String password = mergePassSalt(line, salt); try { java.security.MessageDigest md = java.security.MessageDigest.getInstance("MD5"); byte[] array = md.digest(password.getBytes(Charset.forName("UTF-8"))); StringBuffer sb = new StringBuffer(); for (int i = 0; i < array.length; ++i) { sb.append(Integer.toHexString((array[i] & 0xFF) | 0x100), 1, 3); } return sb.toString(); } catch (java.security.NoSuchAlgorithmException e) { e.printStackTrace(); } return null; } private static String mergePassSalt(String password, String salt) { return password + salt; } }
Involvement of sigma 54 in exponential silencing of the Pseudomonas putida TOL plasmid Pu promoter. The sigma 54-dependent Pu promoter of the TOL plasmid pWW0 of Pseudomonas putida becomes activated by the prokaryotic enhancer-binding XyIR protein when cells encounter m-xylene in the medium. However, even in the presence of the aromatic inducer, Pu activity is silenced in vivo during rapid exponential growth of the cells in rich medium. Various elements known to be involved in the control of the transcriptional activity of the promoter were examined to ascertain the mechanism by which expression of Pu is limited during the exponential phase of growth. A truncated and fully constitutive XyIR derivative deleted of its signal-reception N-terminal domain was found to be subjected to the same exponential silencing as the wild-type XyIR when exposed to m-xylene. This indicated that the phenomenon is not due to a late activation of XyIR by the aromatic effector. A Pu variant in which the integration host factor (IHF)-binding site had been functionally replaced by a statically curved DNA segment showed the same induction pattern, thus ruling out variations in the intracellular levels of IHF changes during growth as the element responsible for the inactivity of Pu in rapidly growing cells. On the contrary, overproduction of the sigma 54 factor allowed Pu expression during exponential phase. As sigma 54 protein levels remained approximately constant during growth, the exponential silencing of Pu could be caused ultimately by changes in the activity of the factor itself. This effect may not be exclusive to Pu, but could be a general co-regulation mechanism in sigma 54-dependent promoters that connects transcription of a specific set of genes with the general physiological status of the cells.
<reponame>denysvitali/blurz use dbus::{Connection, BusType, Message, MessageItem, Props}; use std::error::Error; static ADAPTER_INTERFACE: &'static str = "org.bluez.Adapter1"; static DEVICE_INTERFACE: &'static str = "org.bluez.Device1"; static SERVICE_INTERFACE: &'static str = "org.bluez.GattService1"; static CHARACTERISTIC_INTERFACE: &'static str = "org.bluez.GattCharacteristic1"; static DESCRIPTOR_INTERFACE: &'static str = "org.bluez.GattDescriptor1"; static SERVICE_NAME: &'static str = "org.bluez"; fn get_managed_objects(c: &Connection) -> Result<Vec<MessageItem>, Box<Error>> { let m = try!(Message::new_method_call(SERVICE_NAME, "/", "org.freedesktop.DBus.ObjectManager", "GetManagedObjects")); let r = try!(c.send_with_reply_and_block(m, 1000)); Ok(r.get_items()) } pub fn get_adapters() -> Result<Vec<String>, Box<Error>> { let mut adapters: Vec<String> = Vec::new(); let c = try!(Connection::get_private(BusType::System)); let objects: Vec<MessageItem> = try!(get_managed_objects(&c)); let z: &[MessageItem] = objects.get(0).unwrap().inner().unwrap(); for y in z { let (path, interfaces) = y.inner().unwrap(); let x: &[MessageItem] = interfaces.inner().unwrap(); for interface in x { let (i,_) = interface.inner().unwrap(); let name: &str = i.inner().unwrap(); if name == ADAPTER_INTERFACE { let p: &str = path.inner().unwrap(); adapters.push(String::from(p)); } } } Ok(adapters) } pub fn list_devices(adapter_path: &String) -> Result<Vec<String>, Box<Error>> { list_item(DEVICE_INTERFACE, adapter_path, "Adapter") } pub fn list_services(device_path: &String) -> Result<Vec<String>, Box<Error>> { list_item(SERVICE_INTERFACE, device_path, "Device") } pub fn list_characteristics(device_path: &String) -> Result<Vec<String>, Box<Error>> { list_item(CHARACTERISTIC_INTERFACE, device_path, "Service") } pub fn list_descriptors(device_path: &String) -> Result<Vec<String>, Box<Error>> { list_item(DESCRIPTOR_INTERFACE, device_path, "Characteristic") } fn list_item(item_interface: &str, item_path: &str, item_property: &str) -> Result<Vec<String>, Box<Error>> { let mut v: Vec<String> = Vec::new(); let c = try!(Connection::get_private(BusType::System)); let objects: Vec<MessageItem> = try!(get_managed_objects(&c)); let z: &[MessageItem] = objects.get(0).unwrap().inner().unwrap(); for y in z { let (path, interfaces) = y.inner().unwrap(); let x: &[MessageItem] = interfaces.inner().unwrap(); for interface in x { let (i,_) = interface.inner().unwrap(); let name: &str = i.inner().unwrap(); if name == item_interface { let objpath: &str = path.inner().unwrap(); let prop = try!(get_property(item_interface, objpath, item_property)); let prop_path = prop.inner::<&str>().unwrap(); if prop_path == item_path { v.push(String::from(objpath)); } } } } Ok(v) } pub fn get_property(interface: &str, object_path: &str, prop: &str) -> Result<MessageItem, Box<Error>> { let c = try!(Connection::get_private(BusType::System)); let p = Props::new(&c, SERVICE_NAME, object_path, interface, 1000); Ok(try!(p.get(prop)).clone()) } pub fn set_property<T>(interface: &str, object_path: &str, prop: &str, value: T) -> Result<(), Box<Error>> where T: Into<MessageItem> { let c = try!(Connection::get_private(BusType::System)); let p = Props::new(&c, SERVICE_NAME, object_path, interface, 1000); Ok(try!(p.set(prop, value.into()))) } pub fn call_method(interface: &str, object_path: &str, method: &str, param: Option<&[MessageItem]>) -> Result<(), Box<Error>> { let c = try!(Connection::get_private(BusType::System)); let mut m = try!(Message::new_method_call(SERVICE_NAME, object_path, interface, method)); match param { Some(p) => m.append_items(p), None => (), }; try!(c.send_with_reply_and_block(m, 1000)); Ok(()) }
def sections(row): text = row.get("title") abstract = row.get("abstract") if abstract: text += " " + abstract return [(None, text)]
<filename>src/csic/iiia/ftl/learning/lazymethods/similarity/Debug.java /* * To change this template, choose Tools | Templates * and open the template in the editor. */ /** * Copyright (c) 2013, <NAME> All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are permitted provided that the * following conditions are met: * * Redistributions of source code must retain the above copyright notice, this list of conditions and the following * disclaimer. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the * following disclaimer in the documentation and/or other materials provided with the distribution. Neither the name of * the IIIA-CSIC nor the names of its contributors may be used to endorse or promote products derived from this software * without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ package csic.iiia.ftl.learning.lazymethods.similarity; import csic.iiia.ftl.base.bridges.NOOSParser; import csic.iiia.ftl.base.core.BaseOntology; import csic.iiia.ftl.base.core.FTKBase; import csic.iiia.ftl.base.core.FeatureTerm; import csic.iiia.ftl.base.core.Ontology; import csic.iiia.ftl.base.utils.FeatureTermException; import csic.iiia.ftl.learning.core.TrainingSetProperties; import csic.iiia.ftl.learning.core.TrainingSetUtils; // TODO: Auto-generated Javadoc /** * The Class Debug. * * @author santi */ public class Debug { /** * The main method. * * @param args * the arguments * @throws FeatureTermException * the feature term exception */ public static void main(String[] args) throws FeatureTermException { Ontology base_ontology = new BaseOntology(); Ontology o = new Ontology(); FTKBase dm = new FTKBase(); FTKBase case_base = new FTKBase(); o.uses(base_ontology); case_base.uses(dm); dm.create_boolean_objects(o); try { TrainingSetProperties ts = TrainingSetUtils.loadTrainingSet(TrainingSetUtils.TRAINS_DATASET, o, dm, case_base); FeatureTerm f1 = NOOSParser.parse("(define (car))", dm, o); FeatureTerm f2 = NOOSParser.parse("(define (car) (loc 1) (nwhl 2))", dm, o); FeatureTerm f3 = NOOSParser.parse("(define (car) (loc 2))", dm, o); FeatureTerm f4 = NOOSParser.parse("(define (car) (nwhl 2))", dm, o); // KashimaKernelSparse d = new KashimaKernelSparse(); KashimaKernelDAGs d = new KashimaKernelDAGs(); System.out.println("1 - 2 : " + d.distance(f1, f2, o, dm)); System.out.println("1 - 3 : " + d.distance(f1, f3, o, dm)); System.out.println("1 - 4 : " + d.distance(f1, f4, o, dm)); System.out.println("2 - 3 : " + d.distance(f2, f3, o, dm)); System.out.println("2 - 4 : " + d.distance(f2, f4, o, dm)); System.out.println("3 - 4 : " + d.distance(f3, f4, o, dm)); // System.out.println("" + KashimaKernel.labelSimilarity(NOOSParser.parse("(define (car) (nwhl 2))", dm, o), // NOOSParser.parse("(define (car))", dm, o), dm)); // System.out.println("" + KashimaKernel.labelSimilarity(f2,f3, dm)); // System.out.println("" + (0.05*0.03333 + 0.45*0.15*0.66666 + 0.5*(0.33333*0.66666 + 0.33333))); } catch (Exception e) { e.printStackTrace(); } } }
package com.ctrip.hermes.metaserver.event; import java.util.concurrent.ExecutorService; import com.ctrip.hermes.metaserver.cluster.ClusterStateHolder; /** * @author <NAME>(<EMAIL>) * */ public interface EventBus { void pubEvent(Event event); ExecutorService getExecutor(); void start(ClusterStateHolder clusterStateHolder); }
/// Enqueue a single packet with the given header into the buffer, and /// return a reference to its payload, or return `Err(Error::Exhausted)` /// if the buffer is full, or return `Err(Error::Truncated)` if the buffer /// does not have enough spare payload space. pub fn enqueue(&mut self, size: usize, header: H) -> Result<&mut [u8]> { if self.payload_ring.capacity() < size { return Err(Error::Truncated) } if self.metadata_ring.is_full() { return Err(Error::Exhausted) } let window = self.payload_ring.window(); let contig_window = self.payload_ring.contiguous_window(); if window < size { return Err(Error::Exhausted) } else if contig_window < size { if window - contig_window < size { // The buffer length is larger than the current contiguous window // and is larger than the contiguous window will be after adding // the padding necessary to circle around to the beginning of the // ring buffer. return Err(Error::Exhausted) } else { // Add padding to the end of the ring buffer so that the // contiguous window is at the beginning of the ring buffer. *self.metadata_ring.enqueue_one()? = PacketMetadata::padding(contig_window); self.payload_ring.enqueue_many(contig_window); } } *self.metadata_ring.enqueue_one()? = PacketMetadata::packet(size, header); let payload_buf = self.payload_ring.enqueue_many(size); debug_assert!(payload_buf.len() == size); Ok(payload_buf) }
def extract(filing_json): filing_contents = {} if not os.path.exists(filing_json['extracted_filing_directory']) and not os.path.exists(filing_json['extracted_filing_directory'] + ".zip"): logger.info("\n\n\n\n\tExtracting Filing Documents:\n") try: filing_contents = extract_complete_submission_filing(filing_json['filing_filepath'], output_directory=filing_json['extracted_filing_directory']) except UnicodeDecodeError as E: logger.error(f"\n\n\n\nError Decoding \n\n{E}") logger.info("\n\n\n\n\tExtraction Complete\n") return filing_contents
export function assertRequiredType(config: any, property: string, type: 'string' | 'number' | 'boolean') { if (typeof config[property] !== type) { throw new Error(`The required "${property}" property needs to be a ${type}.`); } } export function assertOptionalType(config: any, property: string, type: 'string' | 'number' | 'boolean') { if (config[property] !== undefined && typeof config[property] !== type) { throw new Error(`The optional "${property}" property needs to be a ${type}.`); } }
#pragma once #include "Json2Settings.h" class Settings { public: using bSetting = Json2Settings::bSetting; Settings() = delete; static bool LoadSettings(bool a_dumpParse = false); static bSetting disableDialogueCollision; static bSetting disableAllyCollision; private: static constexpr char FILE_NAME[] = "Data\\SKSE\\Plugins\\ImWalkinHere.json"; };
Bakunin vs. Marx By Ulli Diemer I propose in this article to examine some of the most common anarchist objections to "Marxism". The issues I shall single out are all raised in the recent works cited in the preceding article ( Anarchism vs. Marxism ). All of them were raised, often for the first time, by Bakunin at the time when anarchism first emerged as a self-conscious movement defining itself in opposition to all other currents on the left. Therefore I will concentrate primarily on Bakunin in the following discussion, and on some of his differences with Marx. While I realize that Bakunin is not the only interpreter of anarchism, I think this is a valid approach for a number of reasons: (a) it is not possible to cover everything and everybody in a short essay; (b) the Bakunin/Marx split was the formative event in the history of anarchism; (c) Bakunin is still the most widely read, quoted, and admired anarchist in the anarchist movement itself; (d) many of the key anarchist objections to Marxism originate with Bakunin, and these objections continue to be used today; to the extent that it is possible to call them into question, it is possible to call into question current anarchist pre-conceptions about Marxism and to inaugurate a genuine dialogue. How do anarchists see the Marxist/anarchist split? What are their claims? The following beliefs seem to be generally accepted by anarchists: 1. Marxists believe in the creation of a "peoples' state" or a "workers' state"; anarchists believe in the abolition of the state. 2. "Anarchists look to a society in which real decision making involves everyone who lives in it"; Marxism instead would set up "a few discipline freaks pulling the strings on a so-called 'proletarian' dictatorship." 3. Marx was an "economic determinist"; Bakunin "emphasized the psychological subjective factors in revolution." Marxism is the ego trip of intellectuals who try to fit everything into their "theory of byzantine complexity" - dialectical materialism - which is of "doubtful usefulness" at best and which mainly serves to make it possible for Marxist leaders to establish "control over the movement". 4. Anarchists believe that revolutionary organizations should be open, egalitarian, and completely democratic; Marxists on the other hand advocate "hierarchical, power-tripping leadership", as exemplified by the vanguard party and democratic centralism. 5. The original split in the First International between the factions headed by Bakunin and Marx came over the issue of authoritarianism; Marx and Bakunin expelled from the International on trumped-up charges because Bakunin opposed Marx's dictatorial, centralized regime over the International. 6. Marxism is "authoritarian"; anarchism is "libertarian". What of these objections? 1. The peoples' state Perhaps it is not surprising that it is widely believed that Marx originated this concept, given the number of "Peoples' Republics", "Workers' States", etc. in the world today that call themselves "Marxist". Both the Leninists who use the concept, and the anarchists who oppose it, seem quite unaware that it is nowhere to be found in Marx's writings. Marx, on the contrary, specifically rejected it. (See for example Marx's Critique of the Gotha Program.) It is indicative of Bakunin's methods that he repeatedly accused Marx of advocating a "Peoples' State" (see for example Dolgoff, ed., Bakunin on Anarchy, Vintage, 1972), an accusation that in view of his failure to cite any evidence to support it (check the sources and see if Bakunin ever offers a single quote to back up his claims) and in view of Marx's and Engels' repeated and explicit repudiation of the concept, can only be interpreted as a deliberate fabrication on Bakunin's part. And it is hardly to the credit of several generations of anarchists that they have continued to swallow Bakunin's fictions on this matter without ever bothering to look for evidence to back them up. Marx and Engels' position on the state, while not free of ambiguities and not above criticism, was quite different from what Bakunin claimed. It is spelled out most extensively in Marx's The Civil War in France, but is developed in numerous other works as well. What Marx foresaw was that during the revolutionary period of struggle against the bourgeoisie, the proletariat would use the state apparatus to crush the bourgeoisie: "to achieve its liberation it employs means which will be discarded after the liberation". (Marx, Conspectus of Bakunin's State and Anarchy, 1874-75). After the vanquishing of the bourgeoisie, the state has outlived its usefulness. Marx pointed to the Paris Commune as being very close to what he had in mind; Bakunin too was enthusiastic about the commune, yet continued to accuse Marx of secretly holding very different views. This Bakuninist nonsense has been repeated by other anarchists as well. For example, the anarchist writer Arthur Mueller Lehning writes that "It is an irony of history that at the very moment when the battle between the authoritarians and the anti-authoritarians in the International reached its apogee, Marx should in effect endorse the program of the anti-authoritarian tendency.... The Commune of Paris had nothing in common with the state socialism of Marx and was more in accord with the ideas of Proudhon and the federalist theories of Bakunin. Civil War in France is in full contradiction with all Marx's writings on the question of the State." (quoted in Bakunin on Anarchy, P. 260). This is a remarkable piece of doublethink. Marx's major work on the state is said to be "in full contradiction" with "all" his writings on the state! What writings on the state is Lehning referring to then? We don't know, because he doesn't say. As always in anarchist polemics, we have to take him in faith. Certainly Lehning cannot be referring to the Poverty of Philosophy, written in 1847, or the Communist Manifesto, written in 1848, or the Critique of the Gotha Program, written in 1875, or to the private letters Marx was writing at the same time as the publication of The Civil War in France in 1871. All of these consistently maintain that the state is incompatible with socialism. Together they comprise most, if not "all" of Marx's writings on the state. But Lehning (and Bakunin, and Dolgoff, and Avrich, and Brothers, and Murtaugh, and...) know better. Somewhere, in some mythical world known only to anarchists, there are to be found Marx's real views on the state, the "People's State of Marx" (Bakunin on Anarchy, P. 318), which is "completely identical" with "the aristocratic-monarchic state of Bismarck". (Bakunin on Anarchy, P.319). How does one refute an "argument" which, without a single shred of evidence, except racial predisposition ("as a German and a Jew, he (Marx) is from head to toe an authoritarian" - Bakunin in 1872) without a single quotation, attributes ideas and concepts to Marx that Marx repeatedly attacked? There are two alternatives: either one swallows everything Bakunin, Dolgoff, and Co. say, on faith, because they are anarchists, or one takes the path of intellectual integrity, and tries to discover Marx and Engels' views on the state by reading what Marx and Engels said about the state. If one takes the latter course, one might start by reading Engels' March 1875 letter to Bebel, in which he says "it is pure nonsense to talk of a free people's state: so long as the proletariat still uses the state, it does not use it in the interests of freedom but in order to hold down its adversaries, and as soon as it becomes possible to speak of freedom the state as such ceases to exist. We would therefore propose to replace state everywhere by Gemeinwesen, a good old German word which can very well convey the meaning of the French word 'commune.'" It is possible, of course, to argue that the use of the state by the proletariat in the brief transitional period is dangerous, and could lead to the establishment of a permanent state. It must be noted, however, that Bakunin himself envisioned a form of post-revolutionary state, complete with elections, delegates, a parliament, an executive committee, and an army. (Bakunin on Anarchy, P. 153) Anarchists are curiously quiet about this however. Nevertheless, it remains a fact that in balance, the concern Bakunin expressed about the possible degeneration of the revolution proved to be a valid one, and that Marx for his part failed to give sufficient consideration to the dangers posed by this threat to a future revolution. This criticism, however, must itself be qualified in a number of ways; and it is certainly a far cry from the claims of Bakunin and the anarchists that Marxism was a theory that aimed at the subjection of society to state. 2. Dictatorship of the Proletariat . A closely related question is that of the dictatorship of the proletariat, one of the most abused and misunderstood terms of all of Marxism. The question of the transition from capitalism to socialism, and Marx's view of it, is an extremely complicated one that cannot be covered in a few paragraphs. But the point here is simply to dispose of the grossest misunderstandings of the term, fostered by its appropriation by the Bolsheviks, and by the related fact that dictatorship has come to have a quite different meaning today than it had in Marx's time. As Dolgoff puts it, there was then a "loose sense in which the term 'dictatorship' was used by nineteenth-century socialists to mean simply the preponderant influence of a class, as in Marx's 'dictatorship of the proletariat'" (Bakunin on Anarchy, P. 12). Or to put it more precisely, the dictatorship of the proletariat means the rule by the proletariat as a class, and the suppression of the bourgeoisie as a class. It is perfectly compatible with, and indeed presupposes, the most thorough-going democracy within the working class. The best brief exposition of the Marxian concept, and how it differs from the Leninist concepts of dictatorship, comes from Rosa Luxemburg's 1918 polemic against the Bolsheviks: "We have always distinguished the social kernel from the political form of bourgeois democracy; we have always revealed the hard kernel of social inequality and lack of freedom hidden under the sweet shell of formal equality and freedom - not in order to reject the latter but to spur the working class into not being satisfied with the shell, but rather, by conquering political power, to create a socialist democracy to replace bourgeois democracy - not to eliminate democracy altogether. "But social democracy is not something which begins only in the promised land after the foundations of socialist economy are created; it does not come as some sort of Christmas present for the worthy people, who, in the interim, have loyally supported a handful of socialist dictators. Socialist democracy begins simultaneously with the beginnings of the destruction of class rule and of the construction of socialism. It begins at the very moment of the seizure of power by the socialist party. It is the same thing as the dictatorship of the proletariat. "Yes, dictatorship! But this dictatorship consists in the manner of applying democracy, not in its elimination, in energetic, resolute attacks upon the well-entrenched rights and economic relationships of bourgeois society, without which a socialist transformation cannot be accomplished. But this dictatorship must be the work of the class and not of a little leading minority in the name of the class - that is, it must proceed step by step out of the active participation of the masses." (Rosa Luxemburg, The Russian Revolution, Ann Arbor paperback, P. 77-8). 3. "Economic Determinism" The question of Marxian materialism and Marx's emphasis on the relations of productions is again an extremely difficult one which simply cannot be dealt with intelligently in a brief article. At this point it is possible only to say that it raises difficult problems which have to be seriously analyzed. However, while a re-examination of Marx's theory and the admitted contradictions in it are on the agenda, it must be said that the typical anarchist portrayals of it and objections to it are ill-informed misconceptions that contribute less than nothing to the discussion. For example, Marx was not an economic determinist; he rejected economic determinism and what he called "crude materialism" out of hand. He did not attempt to reduce all phenomena to economic ones; it is necessary only to read any of his political works to know this. As Engels says, "According to the materialist conception of history, the ultimately determining element in history is the production and reproduction of real life. More than this neither Marx nor I have ever asserted. Hence if somebody twists this into saying that the economic element is the only determining one he transforms that proposition into a meaningless, abstract senseless phrase." (letter to Joseph Block, Sept. 21-22, 1890, in Lewis Feuer, ed., Marx and Engels: Basic Writings on Politics and Philosophy, P. 397-398.) Anarchists like Paul Avrich, however, have their own view of 'what Marx really meant'. See how Avrich crudely contrasts Marx's and Bakunin's views: (Bakunin) "rejected the view that social change depends on the gradual unfolding of 'objective' historical conditions. He believed, on the contrary, that men shape their own destinies..." It is unfortunate that Avrich has never read, for example, Marx's third thesis on Feuerbach: "The materialist doctrine (of Feuerbach) that men are the products of circumstances and upbringing, and that, therefore, changed men are the products of other circumstances and changed upbringing, forgets that it is men that change circumstances and that the educator himself needs educating." Or The Holy Family: "History does nothing, it 'does not possess immense riches', it does not fight battles'. It is men, real, living men, who do all this, who possess things and fight battles. It is not 'history' which uses men as a means of achieving - as if it were an individual person - its own ends. History is nothing but the activity of men in pursuit of their ends." (Bottomore, ed., Karl Marx, Selected Writings in Sociology and Social Philosophy, Pelican P. 78.) 4.5.6. The nature of the revolutionary organization; authoritarianism and libertarianism These too are very complicated questions: it is impossible to do justice to either Marx's or Bakunin's views in a short and rather polemical articles that aims at challenging certain gross misconceptions rather than at evaluating and criticizing their ideas and practice in a rigorous and comprehensive way. It is necessary to understand, first of all, that the ideas of both Marx and Bakunin, as expressed in their writings, are in certain respects contradictory; neither Marx, nor certainly Bakunin, was totally consistent throughout his life. Secondly, the practice of both men was sometimes at variance with what they advocated. Neither was able always to live up to the standards set down. Both men displayed streaks of arrogance and authoritarianism in their own personalities. Nevertheless, there remains a body of writing and practice that makes it possible to evaluate what Marx and Bakunin stood for. I shall argue that a serious examination of the question yields the following points: 1. Bakunin deliberately distorted and falsified Marx's views on the issues under dispute. 2. The accusation that led to Bakunin's expulsion from the International, that of heading a secret society which aimed to infiltrate and take over the International, was true. (Since this seems to be accepted by most historians, this point will not be pursued. See for example Woodcock's Anarchism, P. 168, or Aileen Kelly's article in the January 22, 1976 issues of the New York Review of Books.) The only point worth noting here is that the "authoritarian" federal structures of the International that Bakunin protested against so vehemently in 1871 and 1872 were introduced to the International shortly before, not on the initiative of the General council of which Marx was a member, but on the motion of Bakunin's supporters, with Bakunin's active participation and support. It was only after he failed to gain control over the structures of the International that Bakunin suddenly discovered their "authoritarianism". 3. The charge of authoritarianism and dictatorial views can be directed against Bakunin with a great deal more justification than they can against Marx. Bakunin's deliberate misrepresentations of Marx's views on the state were noted earlier. Bakunin was obsessed with the idea that all Germans held identically authoritarian views, and consistently attributed the views of some of Marx's bitterest enemies, such as Bismarck and Lasalle, to Marx. Marx's fury at this tactic is a matter of record. Bakunin, in many of his polemics against Marx, argues from the premise that Marx must obviously be authoritarian because he is a German and a Jew, who are by definition authoritarians and statists. (Because of selective editing, this is not evident in Dolgoff's Bakunin anthology.) Bakunin went even further, claiming that Marx was part of an international conspiracy with Bismarck and Rothschild. Such accusations are of course not worthy of reply, but surely they make it clear that it is necessary to treat the "facts" and arguments of the man making them with the greatest caution. A similar disregard for the most elementary rules of evidence, not to mention decency, permeated most of Bakunin's polemics against Marx. He charged, again and again, that Marx advocated a universal dictatorship, that he believed in a socialism "decreed from the top down." He ignored Marx's lifelong insistence that "the emancipation of the working classes can only be the work of the working classes themselves," and Marx's intransigent opposition to the state. Nor did he attempt to support his accusations with facts or quotations. In reading Bakunin's caricature of Marx's views - the only "version" of Marxism most anarchists have bothered to familiarize themselves with! - readers will search in vain for one single quotation amidst the hysterical confusion of wild, unsubstantiated charges. There simply are none. Almost as bad are those anarchists who lambaste Marx for his "advocacy" of "democratic centralism" and the "vanguard party." Is it really necessary to point out that these concepts were developed long after Marx's death, that Marx never belonged to an organization practising either; that he consistently opposed the tiny conspiratorial sects of his day; that he made it a condition of his joining the Communist League that they scrap their closed, undemocratic organizational forms; that he always, and angrily, refused attempts by socialists of his day to single him out for special honours or titles in the movement? And has it been completely forgotten that one of Marx's chief themes in his criticism of Bakunin was the latter's eternal fascination with conspiratorial, manipulative, sectarian politics? For there is, unfortunately for those who believe in anarchist fairy tales, a substantial body of evidence for the contention that Bakunin held precisely those "authoritarian" views which he brazenly attributed to Marx. Those who seek evidence of a penchant for dictatorial, Machiavellian politics will find a good deal of material in the writings not of Marx, but of Bakunin. (This is not to say that Bakunin consistently held such views; there are serious contradictions in his thought amounting to a basic polarity.) Bakunin's advocacy of a post-revolutionary state, which continued most of the forms of the pre-revolutionary state, such as elections, parliament, army, etc., was noted earlier, and can be found, for example, in Bakunin on Anarchy, P. 153. Similarly, despite his much-vaunted opposition to any form of independent political action by the working class, one can find him advocating, in his letters, not simply political action, but working-class support and action on behalf of bourgeois political parties. (See for example Bakunin on Anarchy, P. 219.) And elsewhere, one finds him advocating that anarchists should run for Parliament (Bakunin on Anarchy, P. 218). Nor are these merely products of his naive, youthful days, which are so often used to excuse some of his grossest aberrations, as for example when we find the 'young' Bakunin (at age 35) writing appeals to the Czar, while Marx, four years younger, is advocating the revolutionary overthrow of the state. No, these pronouncements, and many others like them, are issued privately at precisely the time that Bakunin is publicly proclaiming his opposition to Marxism because it advocates political action by the working class, and a transitional dictatorship of the proletariat in the immediate post-revolutionary period. It is also worth contrasting Bakunin's proclamation of the principle, for the future anarchist society, of "from each according to his ability, to each according to his work" (my emphasis) with Marx, who held to the much more radical principle, "from each according to his ability, to each according to his needs." Or consider Bakunin's Rules for his International Alliance, not a passing whim, but the organization to which he gave his primary allegiance while participating in the First International. Here is a sample, written in 1869: "it is necessary that in the midst of popular anarchy, which will make up the very life and all the energy of the revolution, the unity of revolutionary thought and action should be embodied in a certain organ. That organ must be the secret and world-wide association of the international brothers..." "...the only thing a well-organized secret society can do it to assist the birth of revolution by spreading among the masses ideas that accord with the instinct of the masses, and to organise, not the army of the revolution - that army must always be the people, but a revolutionary General Staff composed of devoted, energetic and intelligent individuals who are above all sincere - not vain or ambitious - friends of the people, capable of serving as intermediaries between the revolutionary ideas and the popular instincts." "The number of these individuals should not, therefore, be too large. For the international organisation throughout Europe one hundred serious and firmly united revolutionaries would be sufficient. Two or three hundred revolutionaries would be enough for the organisation of the largest country." As the authoritarian Marx said of this libertarian idea: "To say that the hundred international brothers must 'serve as intermediaries between the revolutionary idea and the popular instincts,' is to create an unbridgeable gulf between the Alliance's revolutionary idea and the proletarian masses; it means proclaiming that these hundred guardsmen cannot be recruited anywhere but from among the privileged classes." When one sees the views of Bakunin and Marx side by side, it is difficult to remember that it is Marx, not Bakunin, who is supposed to be the father of "Marxism-Leninism" and Bakunin, not Marx, who is supposed to be the father of "anarchism". Bakunin's authoritarian tendencies were at their most extreme at precisely the time that he was splitting the International. This was the time of his association with the notorious Nechaev. Most anarchists sources treat this as a passing aberration on Bakunin's part, and indeed he did repudiate Nechaev when he found out the true nature of his activities. But the fact remains that Bakunin did enter into partnership with Nechaev, and under his influence wrote a number of tracts that displayed a despotic, Machiavellian approach to revolution that far surpassed anything he ever accused Marx of. The authorship of some of the pieces in question has been disputed, but the relevant point is that Bakunin allowed these pamphlets to be published bearing his name and actively worked to distribute them knowing they bore his name. In these pamphlets, Nechaev and Bakunin advocate a new social order, to be erected "by concentrating all the means of social existence in the hands of Our Committee, and the proclamation of compulsory physical labour for everyone," compulsory residence in communal dormitories, rules for hours of work, feeding of children, and other minutae. As the "authoritarian" Marx put it: "What a beautiful model of barrack-room communism! Here you have it all: communal eating, communal sleeping, assessors and offices regulating education, production, consumption, in a word, all social activity, and to crown all, Our Committee, anonymous and unknown to anyone, as the supreme dictator. This indeed is the purest anti-authoritarianism..." When one looks at Bakunin's views on authority and revolution in detail, it is hard to disagree with Marx's and Engels' assertion that Bakunin and his followers simply used the word "authoritarian" to mean something they didn't like. The label "authoritarian" was then, and remains today for many libertarians, a way of avoiding serious political questions. The fact is that not all authority is bad; that in certain situations authority is necessary and unavoidable. As Engels says, "A revolution is certainly the most authoritarian thing there is; it is the act whereby one part of the population imposes its will upon the other part by means of rifles, bayonets, and cannon - authoritarian means, if such there be at all." And some form of authority, i.e. decision-making structure, is necessary in any form of interaction, co-operation, or organization that is social rather than individual. In a socialist society, it will still be necessary to makes decisions about things; these decisions will necessarily reflect the will, i.e. the authority, of the majority. This is not a violation of collectivity, but an absolutely indispensable component of it. To say, as many anarchists do, that they reject all forms of authority, even that which is willingly accepted; even that which is the result of democratic decision-making, is simply to advocate either rule by a minority, or a return to the purest form of free-market capitalism, as is advocated by the "libertarian" right. No amount of talk about "consensus" or local autonomy or individual initiative will alter this fact. Consensus is not always attainable, because sometimes people do not agree. Then a decision-making process is necessary, and if it is democratic, the minority will have to accede to the majority. Autonomy and individual initiative can still have the fullest possible play, but this does not alter the fact that the authority of the majority has prevailed in the question at hand. There is another aspect of Bakunin that must be confronted because, like his ill-defined views on authority, it has remained a part of the anarchist movement. Running through all of Bakunin's thought and subsequent anarchist thought and practice is a dark thread, an infatuation with violence, with destruction for the sake of destruction, action for the sake of action, distrust of logic, intellect, and knowledge, and a love for conspiratorial, tightly controlled organizations. For the most part, these things remained subsidiary to his - and his successors' - genuinely libertarian and humanistic instincts. During the period of Bakunin's association with Nechaev, who was attracted solely by Bakunin's dark side, this aspect took over. Then, confronted with the realization of this dark side in practice, in the person of Nechaev, Bakunin shrank back in genuine horror. However, as Aileen Kelly notes, "even then he managed to integrate Nechaev's villainy into his own fantasies, writing to his astonished friends that Nechaev's methods were those of a "pure" and "saintly" nature who, faced with the apathy of the masses and intellectuals in Russia, saw no other way but coercion to mold the latter into a force determined to move the masses to revolution. Such reasoning, Bakunin concluded, 'contains, alas! much truth.'" Kelly continues: "This grotesque assessment of Nechaev is very revealing. At a time when the gap between man's empirical and ideal nature seemed enormous, Bakunin, albeit reluctantly, concluded that if men do not wish to liberate themselves, it might be necessary for those with their highest interests at heart to liberate them against their will." To Bakunin's credit, he continually struggled against the implications of this aspect of his thought. Always fascinated by all 'revolutionary' shortcuts, he nevertheless strove to remain loyal as well to his libertarian instincts, and it is this aspect of his remarkably polarized vision that he left as his lasting heritage. The anarchist movement he fathered has also been plagued by the same polarity, by the tension between real libertarianism on the one side, and the sometimes irresistible attraction of anti-intellectualism, terrorism, and conspiracy, on the other. The anarchist movement needs to come to grips with Bakunin's ambiguous heritage. And to do so, it also needs to come to terms with Marx. Published in The Red Menace , Vol. 2, No. 2, Spring 1978, along with a companion article, Anarchism vs. Marxism . Ulli Diemer www.Diemer.ca Red Menace home page Subject Headings : Abolition of the State - Anarchism - Anarchism/Critiques - Anti-Authoritarianism - Anti-Democratic Ideologies - Anti-Marxism - Left, The - Left History - Libertarian Politics - Libertarian Socialism - Libertarianism - Marx, Karl - Marxism - Marxism Overviews - Marxist Theory of Revolution - Marxist Theory of the State - Radical Political Theory - Revolution - Revolutionary Politics - Socialism - Strategies for Social Change - Tyranny of the Minority
<gh_stars>0 #[doc = "Writer for register INTCLR"] pub type W = crate::W<u32, super::INTCLR>; #[doc = "Register INTCLR `reset()`'s with value 0"] impl crate::ResetValue for super::INTCLR { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Write proxy for field `FUFIC`"] pub struct FUFIC_W<'a> { w: &'a mut W, } impl<'a> FUFIC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Write proxy for field `LNBUIC`"] pub struct LNBUIC_W<'a> { w: &'a mut W, } impl<'a> LNBUIC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Write proxy for field `VCOMPIC`"] pub struct VCOMPIC_W<'a> { w: &'a mut W, } impl<'a> VCOMPIC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "Write proxy for field `BERIC`"] pub struct BERIC_W<'a> { w: &'a mut W, } impl<'a> BERIC_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } impl W { #[doc = "Bit 1 - FIFO underflow interrupt clear. Writing a 1 to this bit clears the FIFO underflow interrupt."] #[inline(always)] pub fn fufic(&mut self) -> FUFIC_W { FUFIC_W { w: self } } #[doc = "Bit 2 - LCD next address base update interrupt clear. Writing a 1 to this bit clears the LCD next address base update interrupt."] #[inline(always)] pub fn lnbuic(&mut self) -> LNBUIC_W { LNBUIC_W { w: self } } #[doc = "Bit 3 - Vertical compare interrupt clear. Writing a 1 to this bit clears the vertical compare interrupt."] #[inline(always)] pub fn vcompic(&mut self) -> VCOMPIC_W { VCOMPIC_W { w: self } } #[doc = "Bit 4 - AHB master error interrupt clear. Writing a 1 to this bit clears the AHB master error interrupt."] #[inline(always)] pub fn beric(&mut self) -> BERIC_W { BERIC_W { w: self } } }
/** * @author Santhosh Kumar Tekuri */ class BlockingCallListener implements CallListener{ public ResultMessage result; public WAMPException error; @Override public void onResult(WAMPClient client, ResultMessage result){ synchronized(this){ this.result = result; notifyAll(); } } @Override public void onError(WAMPClient client, WAMPException error){ synchronized(this){ this.error = error; notifyAll(); } } }
// queryJSON executes the query in builder and loads the resulting JSON into // a bytes slice compatible. // // Returns ErrNotFound if nothing was found func (ex *Execer) queryJSONFn() ([]byte, error) { fullSQL, args, blob, err := ex.cacheOrSQL() if err != nil { return nil, err } if blob != nil { return blob, nil } defer logExecutionTime(time.Now(), fullSQL, args) jsonSQL := fmt.Sprintf("SELECT TO_JSON(ARRAY_AGG(__datq.*)) FROM (%s) AS __datq", fullSQL) err = ex.database.Get(&blob, jsonSQL, args...) if err != nil { logSQLError(err, "queryJSON", jsonSQL, args) } ex.setCache(blob, dtBytes) return blob, err }
Low dissolved oxygen levels increase stress in piava (Megaleporinus obtusidens): iono-regulatory, metabolic and oxidative responses. The aquatic environment presents daily and/or seasonal variations in dissolved oxygen (DO) levels. Piava faces different DO levels in the water due to its distributional characteristics. The goal of this study was to describe the effects of low DO levels on plasma ion, biochemical and oxidative variables in piava juveniles. Fish were exposed to different DO levels, including 1.0, 2.0, 3.0, 4.0 and 5.0 mg L-1 of DO for 96 h, after which blood and tissue samples (liver, kidney, gill and muscle) were collected. The decrease in DO levels decreased plasma Na+, Cl-, K+ and NH3 levels as well as protein and glycogen levels in the liver, kidney and muscle; increased Na+/K+-ATPase activity in the gills and kidney as well as glucose and ammonia levels in the liver, kidney and muscle; and increased lactate levels in the kidney and muscle. Thiobarbituric acid-reacting substances, catalase and non-protein thiol levels decreased in the tissues of piavas exposed to low DO levels. It is concluded that piava can apparently cope with hypoxic conditions; however, low DO levels are a stressor, and the tolerance of piava to hypoxia involves iono-regulatory, metabolic and oxidative adjustments.
Localized variation in the ocean's transmission properties: Its drastic effect on a sonar display A ship-positioning sonar system repeatedly experienced severe problems while operating in rough seas. The visual display for this system is a CRT screen with a bright spot that represents the position of an undersea beacon relative to a fixed point on the ship in the X-Y plane parallel to the sea surface. In operation, the bright spot is manipulated to the center of the screen by moving the ship in a compensating direction, the object being to keep the vessel stationary directly over the beacon. Difficulties in performance arose when the seas became very rough. The bright spot jumped erratically and sometimes took excursions off the screen entirely. This left the operator very uncertain of the ship's position and caused a general loss of confidence in the system. At first the problem was thought to be a malfunction of the hardware that manifested itself when the ship pitched and rolled severely. However, a careful analysis of test data showed that the problem originated in the transmission medium itself. Because of the rough seas, aerated patches of water were introduced into the signal path between the beacon and the receiving hydrophones beneath the hull. The presence of this aerated water reduced the phase velocity and increased the attenuation of the beacon signal, accounting for the erratic behavior on the sonar display.
Controversial tycoons' turbulent Kop years With their ill-fated tenure at Anfield set to end - and not without a fight -charts the disastrous reign of 'Uncle George and Tom'...Dubai International Capital (DIC) pull out of a takeover bid after they try in vain to force Liverpool to come to a decision while the club's board consider a new offer made by American tycoons George Gillett and Tom Hicks.Gillett and Hicks offer £435 million for the ownership of Liverpool. This includes £215m for the building of a proposed new stadium on Stanley Park. The club's board, led by chairman David Moores and chief executive Rick Parry, unanimously recommend that this offer be accepted.The offer from Hicks and Gillett is accepted, valuing the club at £218.9m (£5,000 per share), and confirming debts of £44.8m. In their original press conference, Gillett promises work on Liverpool's new stadium would begin immediately, saying: "The spade has to be in the ground within 60 days."Liverpool are beaten by AC Milan in the Champions League final, with Hicks promising afterwards that serious funds will be made available to manager Rafael Benitez with which he can strengthen the squad. It results in the infamous soundbyte: "If Rafa said he wanted to buy Snoogy Doogy we would back him."Liverpool embark on a major spending spree, adding Spanish star Fernando Torres, as well as Ryan Babel and Yossi Benayoun. Supporters welcome Torres, a club record signing at around £20m, in particular.Benitez and the owners fall out over the Spaniard's transfer targets for January 2008. Gillett and Hicks reportedly tell Benitez to 'concentrate on training and coaching the players he already has'. Benitez responds with a frosty press conference in which he repeats that phrase in answer to every question. Liverpool's form, impressive in the early weeks of the season, begins to deteriorate.Hicks admits the club had made an approach to former Germany manager Juergen Klinsmann, with a view to Klinsmann replacing Benitez as manager. The meeting took place at the height of the feud between Benitez and the owners in November. Hicks says, however, that such a move was "an insurance policy, to have him become manager if Rafa left for Real Madrid". Klin-cher | Juergen approach belittled Benitez Liverpool supporters, who have formed the supporters' union Spirit of Shankly, protest against the Americans' ownership before, during and after the 2-2 draw with Aston Villa. Banners pledge their support for Benitez, as well as urging Gillett and Hicks to sell to DIC, who are rumoured to be interested.It emerges that Gillett and Hicks are barely on speaking terms, throwing the club into turmoil. Rick Parry, it transpires, was present when Gillett and Hicks met with Klinsmann the previous November, further souring his relationship with Benitez. Days later, Hicks rounds on Parry, accusing the chief executive of failing in his commercial and professional duties, and advising the termination of his contract. Parry would leave his role at the end of the following season.Benitez is left frustrated by the club's transfer policy, as funds are denied following the sale of striker Robbie Keane, and the club fail to act swiftly over the contract of Daniel Agger. Benitez himself refuses to commit to a long-term deal until assurances are made regarding the control of the club.Rumours surface ahead of the Reds' Champions League clash with Real Madrid that Benitez has quit. They prove to be unfounded, and days later it is announced that Parry will be leaving the club at the end of the season. Liverpool finish the campaign second, just four points short of Manchester United.Christian Purslow, financial expert and Liverpool supporter, is appointed as managing director. His brief is to find £100m of fresh investment, which will help satisfy Liverpool's creditors. The playing side suffers, with Xabi Alonso and Alvaro Arbeloa sold, and Alberto Aquilani and Glen Johnson added, but further funds are denied. September 2009 - Gillett criticises Benitez in a meeting with supporters' union Spirit of Shankly. The American claims that Liverpool's financial situation is healthier than that of Manchester United, and also denies promising fans that work on a new stadium would begin 'within 60 days' of his arrival at Anfield. January 2010 - Liverpool fans are left incensed after Tom Hicks Jnr, a board member at Anfield, responds to an e-mail from a supporter with a foul-mouthed tirade. Hicks Jnr is forced to resign, with commercial director Ian Ayre and financial director Philip Nash added to the board. March 2010 - Reports surface suggesting a bid of £110m has been made from the Rhone Group - a New York-based private equity firm - for a 40 per cent stake in Liverpool. Gillett and Hicks fail to respond to the offer within the deadline. April 2010 - Gillett and Hicks announce the appointment of British Airways chairman Martin Broughton, who will oversee the formal sale of the club as soon as possible. The Americans' statement reads: "Owning Liverpool Football Club over these past three years has been a rewarding and exciting experience for us and our families. Having grown the Club this far we have now decided together to look to sell the Club to owners committed to take the Club through its next level of growth and development." May 2010 - Sunday Telegraph reports that Dubai International Capital were close to a £500m takeover prove wide of the mark. The group that initially missed out on buying the club to Hicks and Gillett and were believed to be ready to commit funds for player transfers and for a new stadium. August 2010 - Chinese investor Kenny Huang announces his interest in acquiring the club. After initial claims of backing from his nation's government and ability to buy any player Roy Hodgson wanted, the 46-year-old officially announces his withdrawal from the race on August 21. The Huang situation was matched by the approach from businessman Yahya Kirdi. The Syrian - who headed a consortium of Middle East investors - initially claimed to be in advanced negotiations on August 4, before he announced he expected to conclude a preliminary deal before the season's opening fixture against Arsenal on the 15th. Again, no offer was agreed. September 2010 - In a bid to retain control, co-owner Tom Hicks attempts to refinance the £237m debt to the Royal Bank of Scotland. If unable to find fresh credit, the Texan could be forced to relinquish his investment on October 15. Reports emerge on September 19 that a £280m package had been agreed with private equity firm Blackstone. A large-scale campaign from Liverpool supporters follows and the company announces the following day that they had no intention of concluding a deal to leave Hicks stranded. October 2010 - With the date looming to repay RBS or face a £60m penalty charge, rumours emerge that a proposed deal with the New England Sports Venture (NESV) - headed by multi-millionaire Boston Red Sox owner John W. Henry - has been agreed on the 5th. A boardroom struggle ensues, with an official statement from the club detailing Hicks and Gillett's attempts to remove chairman Martin Broughton, managing director Christian Purslow and commercial director Ian Ayre from the board and install Mack Hicks and Lori Kay McCutcheon. The following day, a statement is released by chairman Martin Broughton to confirm that a proposed sale to NESV had been agreed subject to legal challenge from the American duo. Royal Bank of Scotland, the club's major creditors, take out an injunction to prevent Hicks and Gillett changing the makeup of the board to prevent NESV's proposed takeover, and the full case is heard at the Royal Courts of Justice on October 12.Returning his verdict on the 13th, Justice Floyd rules that Hicks and Gillett have indeed given up the right to change the board's makeup — paving the way for Broughton et al to complete the sale of the club to NESV pending a board meeting later in the day.John W. Henry is surprisingly part of the board meeting, saying he is “confident” as he enters the building for talks. However, as discussions are taking place, it emerges Hicks and Gillett have been granted an injunction by a Texan court to halt the sale of the club to NESV.Another hearing is called for October 14 after Hicks and Gillett are granted the injunction. However, the owners are accused of “outrageous” behaviour as it’s revealed the Texan court knew little about the court hearing taking place. Justice Floyd rules against Hicks and Gillett, meaning the English directors of Liverpool are allowed to discuss the £300m sale of the club to NESV.
// preloadBootstrapFromRuntime tries to load the runtime image from tarballs, using both the // default registry, and the user-configured registry (on the off chance they've retagged the // images in the tarball to match their private registry). func preloadBootstrapFromRuntime(imagesDir string, resolver *images.Resolver) (v1.Image, error) { var refs []name.Reference runtimeRef, err := resolver.GetReference(images.Runtime) if err != nil { return nil, err } if runtimeRef.Context().Registry.Name() == images.DefaultRegistry { refs = []name.Reference{runtimeRef} } else { defaultRef, err := resolver.GetReference(images.Runtime, images.WithRegistry(images.DefaultRegistry)) if err != nil { return nil, err } refs = []name.Reference{defaultRef, runtimeRef} } for _, ref := range refs { img, err := tarfile.FindImage(imagesDir, ref) if img != nil { return img, err } if err != nil { logrus.Warnf("Failed to load runtime image %s from tarball: %v", ref.Name(), err) } } return nil, nil }
/** * abstract class to be used by every technology-specific implementation */ @Slf4j public abstract class Adapter extends TestAbstractHandler { private static final String CHECKFAILED = "check failed:\n"; @Autowired protected OpenTestingConfig openTestingConfig; @Autowired private JwtReceiver jwtReceiver; @Autowired private Encryption encryption; @Autowired private Lock lock; @Autowired private RandomData randomData; /** * used service name in test definition */ public abstract String getServicename(); /** * inject data into your adapter */ public abstract boolean inject(String testid, TestCaseInjectionDTO inject); /** * execute check within your adapter */ public abstract boolean check(String testid, TestCaseCheckDTO check, Object... args); /** * create required components to execute the checks */ public abstract void createRequiredComponents(TestCaseDTO test); /** * create required timers at startup, default off */ public List<String> getRequiredTimerCrons() { return new ArrayList<>(); } /** * defines if timer should validate age only (default: false) */ public boolean timerValidateAgeOnly() { return false; } /** * pause method could be overridden * @param byLabel */ public void pause(boolean value, String byLabel) { } /** * service alias method could be overridden */ public List<String> getServicenameAlias() { return new ArrayList<>(); } /** * service alias method could be overridden */ public List<String> getAllServicenames() { List<String> res = new ArrayList<>(getServicenameAlias()); res.add(this.getServicename()); return res; } /** * read file from internal store and replace placeholders with random data * @param testid test case ID * @param filename filename * @param randomdata random data store * @return file content * @throws NotFoundException */ protected String getFileAndAddTestData(String testid, String filename, TestCaseRandomDataDTO randomdata) throws NotFoundException { return randomData.addRandomData(this.getFile(testid, filename), randomdata); } /** * inject random data into content * @param data content * @param randomdata random data store * @return content */ protected String addRandomData(String data, TestCaseRandomDataDTO randomdata) { return randomData.addRandomData(data, randomdata); } /** * check if checks contain 1 of a list of services * @param test test case * @param services service names * @return existence */ protected boolean doChecksContainServices(TestCaseDTO test, List<String> services) { if (test.getChecks() != null) { for (TestCaseCheckDTO check : test.getChecks()) { if (services.contains(check.getService().getType())) return true; } } return false; } /** * create connection key */ protected String createConnectionKey(String testid, String connectstring, String connectuser, String connectpassword) { //test id at the beginning - this is used to remove blocks return testid+"#"+connectstring+"#"+connectuser+"#"+connectpassword; } /** * create the headers map * @throws NotFoundException */ @LogExecutionTime protected Map<String,String> getHeaders(String testid, String header, TestCaseRandomDataDTO randomdata, TestCaseServiceDTO service) throws JsonProcessingException, NotFoundException { Map<String,String> headerMap = new HashMap<>(); //check if there is a header file if (header != null && header.length() > 0) { //read the file and replace random data String headers = this.getFileAndAddTestData(testid, header, randomdata); //parse the JSON TypeReference<Map<String,String>> ref = new TypeReference<Map<String,String>>() {}; Map<String,String> headersmap = openTestingConversion.json2object(headers, ref); //replace connectuser and connectpassword for (Map.Entry<String,String> entry : headersmap.entrySet()) { headerMap.put(entry.getKey(), replaceCustomAndUsernamePassword(entry.getValue(), service)); } } return headerMap; } /** * replace our placeholders */ protected String replaceCustomAndUsernamePassword(String input, TestCaseServiceDTO service) { input = replace("username", service.getUsername(), input); input = replace("password", service.getPassword(), input); for (TestCaseCustomParameterDTO custom : service.getCustom()) { input = replace(custom.getKey(), custom.getValue(), input); } return input; } /** * replace #key# in content with decrypted value * @param key key * @param value value (can be encrypted) * @param content content * @return content */ private String replace(String key, String value, String content) { if (value != null) return content.replace("#"+key+"#", decryptPassword(value)); return content.replace("#"+key+"#", ""); } /** * add jwt token if required */ @LogExecutionTime public Map<String,String> addJwt(String testid, TestCaseServiceDTO service, Map<String,String> headerMap, String key, String prefix) { String jwtPost = service.getCustom("jwtpost").getValue(); String jwtParam = service.getCustom("jwtparam").getValue(); String jwtHeader = service.getCustom("jwtheader").getValue(); //replace placeholders like #jwtpassword# and #jwtusername# for (TestCaseCustomParameterDTO custom : service.getCustom()) { jwtPost = replace(custom.getKey(), custom.getValue(), jwtPost); jwtParam = replace(custom.getKey(), custom.getValue(), jwtParam); jwtHeader = replace(custom.getKey(), custom.getValue(), jwtHeader); } //do auth if (jwtPost != null && jwtPost.length() > 0) { //Authorization: "Bearer <insert_your_JWT_here>" String token = jwtReceiver.requestToken(testid, jwtPost, jwtParam, jwtHeader); if (token != null) { headerMap.put(key, prefix + token); } } return headerMap; } /** * add JSON result to random data * @param testid test ID * @param check check * @param randomdata random data * @param response json * @return random data */ @LogExecutionTime public TestCaseRandomDataDTO addCheckResult2Random(String testid, TestCaseCheckDTO check, TestCaseRandomDataDTO randomdata, String response) { if (check.getResult2random() != null && !check.getResult2random().isEmpty()) { //add data using the clean check id randomdata = addResult2Random(testid, check.getCheckid(), randomdata, response, check.getResult2random()); } return randomdata; } /** * add JSON result to random data * @param testid test ID * @param randomdata random data * @param response json * @return random data */ @LogExecutionTime public TestCaseRandomDataDTO addInjectResult2Random(String testid, TestCaseInjectionDTO injection, TestCaseRandomDataDTO randomdata, String response) { if (injection.getResult2random() != null && !injection.getResult2random().isEmpty()) { //add data using the inject id randomdata = addResult2Random(testid, injection.getInjectid(), randomdata, response, injection.getResult2random()); } return randomdata; } /** * internally add values to random data * @param testid test ID * @param id check or inject ID * @param randomdatadto random data * @param response json * @return random data */ private TestCaseRandomDataDTO addResult2Random(String testid, String id, TestCaseRandomDataDTO randomdatadto, String response, List<String> attributes) { try { //parse JSON Map<String, Object> jsonMap = openTestingConversion.json2Map(response); randomdatadto = checkRecursive(jsonMap, attributes, randomdatadto, id); } catch (Exception e) { log.warn(testid+"."+id+" cannot add result to random data: "+e.getMessage(), e); } return randomdatadto; } @SuppressWarnings("unchecked") private TestCaseRandomDataDTO checkRecursive(Map<String, Object> jsonMap, List<String> attributes, TestCaseRandomDataDTO randomdatadto, String id) { //add data for (Map.Entry<String, Object> entry : jsonMap.entrySet()) { if (!(entry.getValue() instanceof Map)) { if (attributes.contains(entry.getKey())) { if (randomdatadto == null) { randomdatadto = new TestCaseRandomDataDTO(); randomdatadto.setDatamap(new HashMap<>()); } randomdatadto.getDatamap().put("#"+id + "." + entry.getKey()+"#", "" + entry.getValue()); } } else { //recursion randomdatadto.getDatamap().putAll(checkRecursive((Map<String, Object>)entry.getValue(), attributes, randomdatadto, id).getDatamap()); } } return randomdatadto; } /** * decrypt password */ protected String decryptPassword(String password) { return encryption.decrypt(password); } /** * add failed Connector to cache */ protected void addFailedConnector(String connector) { lock.addLock(connector); } /** * remove failed Connector starting with */ protected void removeFailedConnectorStartingWith(String connectorprefix) { lock.removeLocksStartingWith(connectorprefix); } /** * check if connector already failed */ protected boolean isFailedConnector(String connector) { return lock.isLock(connector); } /** * check adapter result using the validation objects * @return check successful or not * @throws NotFoundException */ protected boolean validateResult(String testid, TestCaseCheckDTO check, TestCaseValidationDTO validation, String result, String logInfo) throws NotFoundException { boolean retvalue = true; switch (validation.getType()) { case TestCaseValidationDTO.TYPE_CONTAINS: if (!validateContains(testid, check, validation, result)) retvalue = false; break; case TestCaseValidationDTO.TYPE_EQUALS: if (!validateEquals(testid, check, validation, result)) retvalue = false; break; case TestCaseValidationDTO.TYPE_CONTAINSNOT: if (!validateContainsNot(testid, check, validation, result)) retvalue = false; break; case TestCaseValidationDTO.TYPE_CONTAINSONEOF: retvalue = false; if (!validateContainsOneOf(testid, check, validation, result)) retvalue = true; break; default: log.warn("unknown type: >"+validation.getType()+"<"); } //add result2random attributes check.setRandomdata(this.addCheckResult2Random(testid, check, check.getRandomdata(), result)); //check failed if (!retvalue) log.info(CHECKFAILED+logInfo); return retvalue; } private boolean validateContains(String testid, TestCaseCheckDTO check, TestCaseValidationDTO validation, String result) throws NotFoundException { for (String response : validation.getResponse()) { String expected = this.getFileAndAddTestData(testid, response, check.getRandomdata()); if (!result.contains(expected)) { log.info("validateContains:\n actual: >"+result+"<\n expected: >"+expected+"<"); return false; } } return true; } private boolean validateEquals(String testid, TestCaseCheckDTO check, TestCaseValidationDTO validation, String result) throws NotFoundException { for (String response : validation.getResponse()) { String expected = this.getFileAndAddTestData(testid, response, check.getRandomdata()); if (!result.equals(expected)) { log.info("equals:\n actual: >"+result+"<\n expected: >"+expected+"<"); return false; } } return true; } private boolean validateContainsNot(String testid, TestCaseCheckDTO check, TestCaseValidationDTO validation, String result) throws NotFoundException { for (String response : validation.getResponse()) { String expected = this.getFileAndAddTestData(testid, response, check.getRandomdata()); if (result.contains(expected)) { log.info("contains not:\n actual: >"+result+"<\n expected: >"+expected+"<"); return false; } } return true; } private boolean validateContainsOneOf(String testid, TestCaseCheckDTO check, TestCaseValidationDTO validation, String result) throws NotFoundException { for (String response : validation.getResponse()) { String expected = this.getFileAndAddTestData(testid, response, check.getRandomdata()); if (result.contains(expected)) return true; } return false; } /** * sort validations by order * @return sorted list */ public List<TestCaseValidationDTO> sortValidations(List<TestCaseValidationDTO> input) { Collections.sort(input, (a, b) -> Integer.valueOf(a.getOrder()).compareTo(Integer.valueOf(b.getOrder()))); return input; } }
// Code generated by MockGen. DO NOT EDIT. // Source: txnotice.go // Package p2pmock is a generated GoMock package. package p2pmock import ( types "github.com/Cofresi/aergo/types" gomock "github.com/golang/mock/gomock" reflect "reflect" ) // MockTxNoticeTracer is a mock of TxNoticeTracer interface type MockTxNoticeTracer struct { ctrl *gomock.Controller recorder *MockTxNoticeTracerMockRecorder } // MockTxNoticeTracerMockRecorder is the mock recorder for MockTxNoticeTracer type MockTxNoticeTracerMockRecorder struct { mock *MockTxNoticeTracer } // NewMockTxNoticeTracer creates a new mock instance func NewMockTxNoticeTracer(ctrl *gomock.Controller) *MockTxNoticeTracer { mock := &MockTxNoticeTracer{ctrl: ctrl} mock.recorder = &MockTxNoticeTracerMockRecorder{mock} return mock } // EXPECT returns an object that allows the caller to indicate expected use func (m *MockTxNoticeTracer) EXPECT() *MockTxNoticeTracerMockRecorder { return m.recorder } // RegisterTxNotice mocks base method func (m *MockTxNoticeTracer) RegisterTxNotice(txIDs []types.TxID, cnt int, alreadySent []types.PeerID) { m.ctrl.T.Helper() m.ctrl.Call(m, "RegisterTxNotice", txIDs, cnt) } // RegisterTxNotice indicates an expected call of RegisterTxNotice func (mr *MockTxNoticeTracerMockRecorder) RegisterTxNotice(txIDs, cnt interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RegisterTxNotice", reflect.TypeOf((*MockTxNoticeTracer)(nil).RegisterTxNotice), txIDs, cnt) } // ReportSend mocks base method func (m *MockTxNoticeTracer) ReportSend(txIDs []types.TxID, peerID types.PeerID) { m.ctrl.T.Helper() m.ctrl.Call(m, "ReportSend", txIDs, peerID) } // ReportSend indicates an expected call of ReportSend func (mr *MockTxNoticeTracerMockRecorder) ReportSend(txIDs, peerID interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ReportSend", reflect.TypeOf((*MockTxNoticeTracer)(nil).ReportSend), txIDs, peerID) } // ReportNotSend mocks base method func (m *MockTxNoticeTracer) ReportNotSend(txIDs []types.TxID, cnt int) { m.ctrl.T.Helper() m.ctrl.Call(m, "ReportNotSend", txIDs, cnt) } // ReportNotSend indicates an expected call of ReportNotSend func (mr *MockTxNoticeTracerMockRecorder) ReportNotSend(txIDs, cnt interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "ReportNotSend", reflect.TypeOf((*MockTxNoticeTracer)(nil).ReportNotSend), txIDs, cnt) }
<gh_stars>1-10 // type C only export interface AmedasDataMini { temp: [number, number]; snow1h: [number, number]; snow6h: [number, number]; snow12h: [number, number]; snow24h: [number, number]; sun10m: [number, number]; sun1h: [number, number]; precipitation10m: [number, number]; precipitation1h: [number, number]; precipitation3h: [number, number]; precipitation24h: [number, number]; windDirection: [number, number]; wind: [number, number]; } export interface AmedasData extends AmedasDataMini { pressure: [number, number]; normalPressure: [number, number]; humidity: [number, number]; visibility: [number, number]; snow: [number, number]; weather: [number, number]; } export interface AmedasInfo { type: "A" | "B" | "C" | "D" | "E" | "F"; elems: string; lat: [number, number]; lon: [number, number]; alt: number; kjName: string; knName: string; enName: string; } // export type Amedas = AmedasData & AmedasInfo; export interface Amedas { info: AmedasInfo; data: AmedasData; }
On a Study of Magnetic Force Evaluation by Double-Layer Approach The double-layer approach (DLA) possesses superior features for the analysis of static electromagnetic problems. In this article, dealing with the magnetostatic analysis, we introduce two kinds of double layers: the first one on the surface of the magnetic body and the second one on the cut-surface within the exciting current loop. From the double layers on the cut-surface, we were able to derive a novel, unified exciting potential, which facilitates the treatment of any magnetostatic problem, including multiply connected problems. Furthermore, in this article, we use for the first time DLA for the analysis of magnetic forces acting on the magnetic bodies. The law of action and reaction provides a self-check function within DLA, enabling thus the simple and reliable tool for the confirmation if the computed results are adequate and accurate. This improved, DLA-based approach for the force analysis together with the introduced “action-reaction” principle for the self-checking of the force calculation are, to the authors’ best knowledge, not known in the scientific community so far. Finally, we would like to emphasize that the presented DLA enables accurate, self-checked, generic analysis of the real-world devices, providing advanced treatment of both geometrical singularities (edges, corners) and materials used (high permeability, nonlinearities).
/** * Return true if the stream looks like the contents of a zip file. * Checks only the magic number, not the whole file for validity. The * stream will be marked and reset to its current position. * @param in a stream open on possible zip file content * @throws IOException */ public static boolean isZipFile(BufferedInputStream in) throws IOException { byte buf[] = new byte[4]; in.mark(4); StreamUtil.readBytes(in, buf, buf.length); long v = ByteArray.decodeLong(buf); in.reset(); return (v == ZIP_MAGIC); }
#include <iostream> #include <algorithm> #include <vector> #include <iomanip> using namespace std; #define ll long long int #define vi vector<int> #define vii vector<vi > #define foi(x) for(int i=0;i<x;i++) vii vec; vi cnt; vector<double> res; void calc_child(int x) { int ans=0; foi(vec[x].size()) { calc_child(vec[x][i]); ans+=cnt[vec[x][i]]; } ans+=1; cnt[x]=ans; //cout<<x<<" "<<cnt[x]<<endl; return; } void calc_tim(int x) { double tot_chld=0; foi(vec[x].size()) { tot_chld+=cnt[vec[x][i]]; } foi(vec[x].size()) { res[vec[x][i]]=1.0+res[x]+double(tot_chld-cnt[vec[x][i]])/2.0; calc_tim(vec[x][i]); } return; } int main() { int n,var; cin>>n; vec.resize(n+1); res.resize(n+1); cnt.resize(n+1); for(int i=2;i<=n;i++) { cin>>var; vec[var].push_back(i); } calc_child(1); res[1]=1.0; for(int i=2;i<=n;i++) { res[i]=0.0; } calc_tim(1); for(int i=1;i<=n;i++) { cout<<fixed<<setprecision(8)<<res[i]<<" "; } cout<<endl; }
package main import ( "io/ioutil" "log" "os" "github.com/james-lawrence/bw/agent/acme" "github.com/james-lawrence/bw/internal/x/protox" ) func main() { var ( challenge *acme.ChallengeResponse ) log.Println("args", os.Args) priv, err := ioutil.ReadFile(os.Args[1]) if err != nil { log.Fatalln("failed to read private key", os.Args[1]) } cert, err := ioutil.ReadFile(os.Args[2]) if err != nil { log.Fatalln("failed to read cert", os.Args[2]) } auth, err := ioutil.ReadFile(os.Args[3]) if err != nil { log.Fatalln("failed to read authority", os.Args[3]) } challenge = &acme.ChallengeResponse{ Private: priv, Certificate: cert, Authority: auth, } protox.WriteFile(os.Args[4], 0600, challenge) }
<filename>FormularServer/src/main/java/io/github/formular_team/formular/core/geom/FrenetFrames.java /* * Copyright 2012 <NAME>, <EMAIL> * * This file is part of Parallax project. * * Parallax is free software: you can redistribute it and/or modify it * under the terms of the Creative Commons Attribution 3.0 Unported License. * * Parallax is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the Creative Commons Attribution * 3.0 Unported License. for more details. * * You should have received a copy of the the Creative Commons Attribution * 3.0 Unported License along with Parallax. * If not, see http://creativecommons.org/licenses/by/3.0/. */ package io.github.formular_team.formular.core.geom; import java.util.ArrayList; import java.util.List; import io.github.formular_team.formular.core.math.curve.Curve; import io.github.formular_team.formular.core.math.Matrix4; import io.github.formular_team.formular.core.math.Mth; import io.github.formular_team.formular.core.math.Vector3; public class FrenetFrames { private static final float EPSILON = 0.0001F; private final List<Vector3> tangents; private final List<Vector3> normals; private final List<Vector3> binormals; private final Curve path; public FrenetFrames(final Curve path, final int segments, final boolean closed) { this.path = path; this.tangents = new ArrayList<>(); this.normals = new ArrayList<>(); this.binormals = new ArrayList<>(); final Matrix4 mat = new Matrix4(); // compute the tangent vectors for each segment on the path for (int i = 0; i <= segments; i++) { final float u = i / (float) segments; final Vector3 vec = (Vector3) path.getTangentAt(u); this.tangents.add(vec.normalize()); } this.initialNormal3(); final Vector3 vec = new Vector3(); // compute the slowly-varying normal and binormal vectors for each segment on the path for (int i = 1; i <= segments; i++) { this.normals.add(this.normals.get(i - 1).copy()); this.binormals.add(this.binormals.get(i - 1).copy()); vec.cross(this.tangents.get(i - 1), this.tangents.get(i)); if (vec.length() > EPSILON) { vec.normalize(); final float aCos = this.tangents.get(i - 1).dot(this.tangents.get(i)); final float theta = Mth.acos(Math.min(1.0F, aCos)); this.normals.get(i).apply(mat.makeRotationAxis(vec, theta)); } this.binormals.get(i).cross(this.tangents.get(i), this.normals.get(i)); } // if the curve is closed, post-process the vectors so the first and last normal vectors are the same if (closed) { float theta = Mth.acos(this.normals.get(0).dot(this.normals.get(segments))) / segments; if (theta > EPSILON) { if (this.tangents.get(0).dot(vec.cross(this.normals.get(0), this.normals.get(segments))) > 0) { theta = -theta; } for (int i = 1; i <= segments; i++) { this.normals.get(i).apply(mat.makeRotationAxis(this.tangents.get(i), theta * i)); this.binormals.get(i).cross(this.tangents.get(i), this.normals.get(i)); } } } } public List<Vector3> getTangents() { return this.tangents; } public List<Vector3> getNormals() { return this.normals; } public List<Vector3> getBinormals() { return this.binormals; } private void initialNormal1() { this.initialNormal1(new Vector3(0.0F, 0.0F, 1.0F)); } private void initialNormal1(final Vector3 lastBinormal) { // fixed start binormal. Has dangers of 0 vectors this.normals.add(new Vector3()); this.binormals.add(new Vector3()); this.normals.get(0).cross(lastBinormal, this.tangents.get(0)).normalize(); this.binormals.get(0).cross(this.tangents.get(0), this.normals.get(0)).normalize(); } private void initialNormal2() { // This uses the Frenet-Serret formula for deriving binormal final Vector3 t2 = (Vector3) this.path.getTangentAt(EPSILON); this.normals.add(new Vector3().sub(t2, this.tangents.get(0)).normalize()); this.binormals.add(new Vector3().cross(this.tangents.get(0), this.normals.get(0))); this.normals.get(0).cross(this.binormals.get(0), this.tangents.get(0)).normalize(); // last binormal x tangent this.binormals.get(0).cross(this.tangents.get(0), this.normals.get(0)).normalize(); } /* * select an initial normal vector perpendicular to the first tangent vector, * and in the direction of the smallest tangent xyz component */ private void initialNormal3() { this.normals.add(0, new Vector3()); this.binormals.add(0, new Vector3()); float smallest = Float.MAX_VALUE; final float tx = Math.abs(this.tangents.get(0).getX()); final float ty = Math.abs(this.tangents.get(0).getY()); final float tz = Math.abs(this.tangents.get(0).getZ()); final Vector3 normal = new Vector3(); if (tx <= smallest) { smallest = tx; normal.set(1.0F, 0.0F, 0.0F); } if (ty <= smallest) { smallest = ty; normal.set(0.0F, 1.0F, 0.0F); } if (tz <= smallest) { normal.set(0.0F, 0.0F, 1.0F); } final Vector3 vec = new Vector3(); vec.cross(this.tangents.get(0), normal).normalize(); this.normals.get(0).cross(this.tangents.get(0), vec); this.binormals.get(0).cross(this.tangents.get(0), this.normals.get(0)); } }
<gh_stars>1-10 package geodbtools import ( "bytes" "testing" "github.com/golang/mock/gomock" "github.com/stretchr/testify/assert" ) //go:generate mockgen -package geodbtools -self_package github.com/anexia-it/geodbtools -destination mock_format_test.go github.com/anexia-it/geodbtools Format func TestRegisterFormat(t *testing.T) { t.Run("IsRegistered", func(t *testing.T) { formatRegistryMu.Lock() originalFormatRegistry := formatRegistry formatRegistry = make(map[string]Format) formatRegistryMu.Unlock() defer func() { formatRegistryMu.Lock() defer formatRegistryMu.Unlock() formatRegistry = originalFormatRegistry }() ctrl := gomock.NewController(t) defer ctrl.Finish() existingFormat := NewMockFormat(ctrl) formatRegistry["test"] = existingFormat newFormat := NewMockFormat(ctrl) newFormat.EXPECT().FormatName().Return("test") err := RegisterFormat(newFormat) assert.EqualError(t, err, ErrFormatIsRegistered.Error()) assert.Len(t, formatRegistry, 1) assert.EqualValues(t, existingFormat, formatRegistry["test"]) }) t.Run("OK", func(t *testing.T) { formatRegistryMu.Lock() originalFormatRegistry := formatRegistry formatRegistry = make(map[string]Format) formatRegistryMu.Unlock() defer func() { formatRegistryMu.Lock() defer formatRegistryMu.Unlock() formatRegistry = originalFormatRegistry }() ctrl := gomock.NewController(t) defer ctrl.Finish() newFormat := NewMockFormat(ctrl) newFormat.EXPECT().FormatName().Return("test") err := RegisterFormat(newFormat) assert.NoError(t, err) assert.Len(t, formatRegistry, 1) assert.EqualValues(t, newFormat, formatRegistry["test"]) }) } func TestMustRegisterFormat(t *testing.T) { t.Run("IsRegistered", func(t *testing.T) { formatRegistryMu.Lock() originalFormatRegistry := formatRegistry formatRegistry = make(map[string]Format) formatRegistryMu.Unlock() defer func() { formatRegistryMu.Lock() defer formatRegistryMu.Unlock() formatRegistry = originalFormatRegistry }() ctrl := gomock.NewController(t) defer ctrl.Finish() existingFormat := NewMockFormat(ctrl) formatRegistry["test"] = existingFormat newFormat := NewMockFormat(ctrl) newFormat.EXPECT().FormatName().Return("test") assert.PanicsWithValue(t, ErrFormatIsRegistered, func() { MustRegisterFormat(newFormat) }) assert.Len(t, formatRegistry, 1) assert.EqualValues(t, existingFormat, formatRegistry["test"]) }) t.Run("OK", func(t *testing.T) { formatRegistryMu.Lock() originalFormatRegistry := formatRegistry formatRegistry = make(map[string]Format) formatRegistryMu.Unlock() defer func() { formatRegistryMu.Lock() defer formatRegistryMu.Unlock() formatRegistry = originalFormatRegistry }() ctrl := gomock.NewController(t) defer ctrl.Finish() newFormat := NewMockFormat(ctrl) newFormat.EXPECT().FormatName().Return("test") assert.NotPanics(t, func() { MustRegisterFormat(newFormat) }) assert.Len(t, formatRegistry, 1) assert.EqualValues(t, newFormat, formatRegistry["test"]) }) } func TestFormatNames(t *testing.T) { formatRegistryMu.Lock() originalFormatRegistry := formatRegistry formatRegistry = make(map[string]Format) formatRegistryMu.Unlock() defer func() { formatRegistryMu.Lock() defer formatRegistryMu.Unlock() formatRegistry = originalFormatRegistry }() ctrl := gomock.NewController(t) defer ctrl.Finish() testFormat := NewMockFormat(ctrl) formatRegistry = map[string]Format{ "a": testFormat, "b": testFormat, "c": testFormat, } assert.EqualValues(t, []string{"a", "b", "c"}, FormatNames()) } func TestLookupFormat(t *testing.T) { t.Run("NotFound", func(t *testing.T) { formatRegistryMu.Lock() originalFormatRegistry := formatRegistry formatRegistry = make(map[string]Format) formatRegistryMu.Unlock() defer func() { formatRegistryMu.Lock() defer formatRegistryMu.Unlock() formatRegistry = originalFormatRegistry }() ctrl := gomock.NewController(t) defer ctrl.Finish() test1Format := NewMockFormat(ctrl) formatRegistry["test1"] = test1Format format, err := LookupFormat("test0") assert.Nil(t, format) assert.EqualError(t, err, ErrFormatNotFound.Error()) }) t.Run("OK", func(t *testing.T) { formatRegistryMu.Lock() originalFormatRegistry := formatRegistry formatRegistry = make(map[string]Format) formatRegistryMu.Unlock() defer func() { formatRegistryMu.Lock() defer formatRegistryMu.Unlock() formatRegistry = originalFormatRegistry }() ctrl := gomock.NewController(t) defer ctrl.Finish() test0Format := NewMockFormat(ctrl) test0Format.EXPECT().FormatName().Return("test0") formatRegistry["test0"] = test0Format test1Format := NewMockFormat(ctrl) formatRegistry["test1"] = test1Format format, err := LookupFormat("test0") assert.NoError(t, err) if assert.NotNil(t, format) { assert.EqualValues(t, test0Format, format) assert.EqualValues(t, "test0", format.FormatName()) } }) } func TestDetectFormat(t *testing.T) { t.Run("NotFound", func(t *testing.T) { formatRegistryMu.Lock() originalFormatRegistry := formatRegistry formatRegistry = make(map[string]Format) formatRegistryMu.Unlock() defer func() { formatRegistryMu.Lock() defer formatRegistryMu.Unlock() formatRegistry = originalFormatRegistry }() ctrl := gomock.NewController(t) defer ctrl.Finish() r := NewReaderSourceWrapper(bytes.NewReader([]byte{0x0}), 1) test0Format := NewMockFormat(ctrl) test0Format.EXPECT().DetectFormat(r).Return(false) formatRegistry["test0"] = test0Format test1Format := NewMockFormat(ctrl) test1Format.EXPECT().DetectFormat(r).Return(false) formatRegistry["test1"] = test1Format format, err := DetectFormat(r) assert.Nil(t, format) assert.EqualError(t, err, ErrFormatNotFound.Error()) }) t.Run("OK", func(t *testing.T) { formatRegistryMu.Lock() originalFormatRegistry := formatRegistry formatRegistry = make(map[string]Format) formatRegistryMu.Unlock() defer func() { formatRegistryMu.Lock() defer formatRegistryMu.Unlock() formatRegistry = originalFormatRegistry }() ctrl := gomock.NewController(t) defer ctrl.Finish() r := NewReaderSourceWrapper(bytes.NewReader([]byte{0x0}), 1) test0Format := NewMockFormat(ctrl) test0Format.EXPECT().DetectFormat(r).Return(true).AnyTimes() test0Format.EXPECT().FormatName().Return("test0") formatRegistry["test0"] = test0Format test1Format := NewMockFormat(ctrl) test1Format.EXPECT().DetectFormat(r).Return(false).AnyTimes() formatRegistry["test1"] = test1Format format, err := DetectFormat(r) assert.NoError(t, err) if assert.NotNil(t, format) { assert.EqualValues(t, test0Format, format) assert.EqualValues(t, "test0", format.FormatName()) } }) }
#include<bits/stdc++.h> using namespace std; int main(){ int n,arr[101]; cin>>n; for(int i=0;i<n;i++)cin>>arr[i]; int even=0,odd=0; for(int i=0;i<n;i++){ if(arr[i]%2==0){ even++; } else{ odd++; } } bool che=false,cho=false; int key; if(even>odd){ cho=true; } else{ che=true; } if(che){ for(int i=0;i<n;i++){ if(arr[i]%2==0){ key=i; } } } if(cho){ for(int i=0;i<n;i++){ if(arr[i]%2!=0){ key=i; } } } cout<<key+1<<endl; }
# Little Petya very much likes gifts. Recently he has received a new laptop as a New Year gift from his mother. He immediately decided to give it to somebody else as what can be more pleasant than giving somebody gifts. And on this occasion he organized a New Year party at his place and invited n his friends there. # If there's one thing Petya likes more that receiving gifts, that's watching others giving gifts to somebody else. Thus, he safely hid the laptop until the next New Year and made up his mind to watch his friends exchanging gifts while he does not participate in the process. He numbered all his friends with integers from 1 to n. Petya remembered that a friend number i gave a gift to a friend number pi. He also remembered that each of his friends received exactly one gift. # Now Petya wants to know for each friend i the number of a friend who has given him a gift. # Input # The first line contains one integer n(1 ≤ n ≤ 100) — the quantity of friends Petya invited to the party. The second line contains n space-separated integers: the i-th number is pi — the number of a friend who gave a gift to friend number i. It is guaranteed that each friend received exactly one gift. It is possible that some friends do not share Petya's ideas of giving gifts to somebody else. Those friends gave the gifts to themselves. # Output # Print n space-separated integers: the i-th number should equal the number of the friend who gave a gift to friend number i. # Examples # inputCopy # 4 # 2 3 4 1 # outputCopy # 4 1 2 3 # inputCopy # 3 # 1 3 2 # outputCopy # 1 3 2 # inputCopy # 2 # 1 2 # outputCopy # 1 2 n = int(input()) gift_to = list(map(int, input().split())) gift_from = [] for i in range(1, n+1): gift_from.append(str(gift_to.index(i)+1)) print(' '.join(gift_from))
The row over housing benefit has led to warnings of "social cleansing". But can those on low incomes really have an entitlement to stay in expensive localities? They are postcodes synonymous with wealth and aspiration; the kind of districts that attract estate agents, upmarket retail chains and endless TV property shows. They are also the places that many low-income families call home. Some of these people might be long-term residents of places like London's Islington and Notting Hill that were, within living memory, down-at-heel, but have since gentrified beyond all recognition. Others might live in social housing adjoining wealthy areas - like the Dumbiedykes estate in Edinburgh, which shares a postcode with the Palace of Holyroodhouse, the Queen's official residence in Scotland. 'Let me stay' Job-hunting IT worker Christian Romane, 53, lives in a bedsit in leafy Earl's Court, west London, with £125 a week in housing benefits At the moment I spend 40 hours a week looking for work, but if these changes go through that would stop. To make up the shortfall in rent I'd have to cancel my broadband so it would be harder to search for jobs and keep up my IT skills. I have no other spare funds - as it is I get by on one meal a day right now. I could move further out of London, but most of the work I'm looking for is in the city and the increased transport costs mean I'd be no better off. I've lived here for 20 years, this is my home. It doesn't seem fair that I could be thrown out because of a political decision. Or they could be private tenants claiming Local Housing Allowance (LHA) based on the local average market rates, rising as high as £2,000 a week for a five-bedroom house. Whether they are claiming housing benefit because they are pensioners, low-waged, unemployed or facing long-term health problems, their presence in well-to-do districts might, to foreigners, seem incongruous in a country widely noted abroad for its preoccupation with class distinctions and social status. Yet for all its clearly-defined hierarchies, within a city like London the rich and poor still co-exist in relative proximity compared with somewhere like Paris, with its plush inner districts ringed by notorious banlieues. Now this balance is at the fulcrum of the row over government plans to cap housing benefit at £400-a-week for the largest homes or £290-a-week for two-bed flats. In addition, LHA will be based on the cheapest third of local rents rather than the market average. It is a move the government insists is right and necessary. Prime Minister David Cameron has told MPs it was unfair that middle-income Britons were "working hard to give benefits so people can live in homes they couldn't even dream of". Ministers have further appealed to voters' sense of justice by insisting that claimants will still be able to receive a maximum of £21,000 a year - more, they say, than most working families have to spend on their housing costs. Yet opponents from across the political spectrum say the policy ignores the huge disparities in housing costs across the country and thousands will be displaced from their homes and communities - or "sociologically cleansed", as Labour's Chris Bryant has described the process. You can talk about your right to live in the community where you grew up, but where do you get the right to spend other people's money? Shaun Bailey In the capital, where councils have warned that up to 82,000 people could lose their homes, the Conservative Mayor, Boris Johnson, said he would "emphatically resist any attempt to recreate a London where the rich and poor cannot live together". In essence, the debate can be boiled down to a philosophical question: do the poor have the right to live in areas they could not otherwise afford? Shaun Bailey is one government supporter who believes they do not. Having grown up in a working class single-parent household in London's North Kensington - a once-deprived area which has since become fashionable - the former Conservative candidate believes it is unfair that middle-income couples find themselves commuting from the capital's outer reaches because of high housing costs while the poor have their rents in prime locations guaranteed. "You can talk about your right to live in the community where you grew up, but where do you get the right to spend other people's money? I'd love to live in Buckingham Palace but I can't afford it," he adds. "The current system only suits private landlords, who do very well out of housing benefit, and the liberal left, who want poor people ghettoised in the inner cities for their votes. "The flipside of having a right to stay somewhere is that people aren't prepared to move around. The middle class have always been prepared to go all over the country to find work." It is a provocative position, but one which appears to enjoy public sympathy. A poll by YouGov for the Sunday Times at the end of October found that 72% of people supported the planned cap. Such sentiments have been fuelled by well-publicised cases such as that of Abdi Nur, an unemployed bus conductor who decided he didn't like his taxpayer-funded home in Kensal Rise, north London, and so signed a £2,000-a-week lease for a £2.1m townhouse in Notting Hill, and presented the local council with the bill. No 'right' to high rents Alex Morton, research fellow, Policy Exchange, a centre-right think tank It would be impossible to provide everyone with a house in a desirable area, so any "right" for particular individuals would be based on government arbitrarily selecting certain people and taxing everyone else to pay the high costs necessary. It would mean treating one group of people much better than everyone else, which is why it offends a basic sense of fairness, and why a majority of voters across all the parties support the £400-a-week cap on housing benefit. Government should instead focus on improving the number of desirable areas to live, through better policies on schools, housing and planning, and policing, not take up time and energy working out how to select some lucky individuals who then receive overwhelming individual subsidy. Nonetheless, opponents of the reforms insist such cases are extremely rare, and that it is not the feckless and work-shy who will lose out - according to the homelessness charity Crisis, more LHA claimants are in low-paid work (26%) than are unemployed (22%). At the same time, it adds, some 1.6 million people receiving housing benefit are pensioners while many others are disabled or are carers. Additionally, the recent past offers warnings about what happens when the urban poor are displaced from their communities, Lynsey Hanley, author of Estates: An Intimate History, argues. Ms Hanley, who herself grew up on a council estate on the edge of Birmingham, has chronicled the ghettoisation, social breakdown and increased pressure on services that resulted from moving the working class to peripheral housing schemes. Gentrification has caused many low-income households to suffer, she argues, pricing them out of communities that they once called their own. And she argues the poor have every right to live in wealthy areas - because the wealthy rely on them more than they admit. "Thousands of people working in cleaning, catering and retail earn the minimum wage and can't live in cities without housing benefit, but without their labour places like London would stop functioning altogether," she says. "If you take away housing benefit and shift them out, this country's high transport costs mean they'll have no incentive to come into our cities to work. "What I'd say to David Cameron is: come back to me when the minimum wage is £12 an hour." There may have been murmurings of discontent from within the coalition benches, but whether or not the housing benefit reforms go through - and Mr Cameron insists they will - the social balance of the UK's communities looks set to change regardless. In a little-reported development, LHA rates will be linked to the Consumer Price Index (CPI) from 2013/14 as a result of June's budget. According to Roger Harding, head of policy, research and public affairs at the housing charity Shelter, once inflation takes its toll this will drastically reduce the benefit's ability to keep up with rises in accommodation costs. "Over a period of 10 years it's going to change the fundamental value of housing benefit," he says. "That will be the most dramatic development in housing policy we've witnessed for years." Whatever the philosophical arguments for and against, the social composition of many areas looks set to transform. Whether that is right or wrong will be for voters - and history - to decide.
Sorafenib-Related Adverse Events in Predicting the Early Radiologic Responses of Hepatocellular Carcinoma Background Hepatocellular carcinoma (HCC) has a poor prognosis with low chemotherapeutic efficiency to medications except to sorafenib. Previous studies showed that adverse events (AEs) of sorafenib can predict therapy efficacy to HCC. The aim of the study is to evaluate the early efficacy and AEs of sorafenib therapy. Methods The database of HCC patients receiving sorafenib at Taichung Veterans General Hospital during the period from June 2012 to October 2016 was analyzed. All HCC cases were Barcelona Clinic Liver Cancer (BCLC) classification stage C. The early efficacy of sorafenib was classified according to the mRECIST criteria as either partial response (PR), stable disease (SD) or progressive disease (PD). Responses were recorded within 6 weeks after the start of sorafenib treatment. AEs were defined as the appearance of hand-foot skin reaction (HFSR), hypertension (HTN) and diarrhea. Exclusion criteria were poor performance status, poor drug compliance, discontinued follow-up or mortality occurring within 1 day after medication. Results From a total of 222 subjects, eight cases (3.6%) were classified as PR, 82 cases (36.9%) SD, and 132 cases (59.5%) PD. The PR group had the highest ratio of HFSR (62.4%) and hypertension (37.5%). Pooling cases of PR and SD together, the presence of HFSR adjusted odd ratio (aOR) 2.80, 95% confidence interval (CI) 1.52 - 5.16) and diarrhea (aOR 3.42, 95% CI 1.67 - 7.01) were good predictors of favorable responses to sorafenib therapy. Conclusions HFSR and diarrhea are good predictors of early therapy efficacy to the sorafenib treatment. Introduction Hepatocellular carcinoma (HCC) is the commonest primary liver cancer. The Barcelona Clinic Liver Cancer staging system (BCLC) is widely used for selecting its treatment, which is determined collectively by the tumor characteristics, such as size, number, presence of vascular invasion or extrahepatic metastasis, and the hepatic function and performance status of the patient . Advanced HCC, such as BCLC stage C, is typically treated with sorafenib, which is an orally administered inhibitor of multiple protein kinases (such as c-Raf, B-Raf, mitogen-activated protein kinase kinase, extracellular signal regulated kinase, and vascular endothelial growth factor) . The studies of phase III SHARP trial and Asia-Pacific trial regarding sorafenib treatment of advanced HCC patients both reported improvements compared with placebo in terms of their median overall survival (OS) and time to progression (TTP) . Although sorafenib is currently the recommended first-line medication for patients with BCLC stage C HCC, a substantial number of patients (with a disease-control rate as as high as 43%) fail to respond to the sorafenib . Good predictive factors for the sorafenib efficacy remain unclear. Analyses of serological markers in patients participating in the SHARP trial, showed that the serum concentrations of vascular endothelial growth factor (VEGF) and angiopoietin-2 are good predictors of the patient survival, although not good predictors on the response to treatment . On the contrary, some studies on HCC patients reported the adverse events (AEs) of sorafenib treatment, such as diarrhea, hand-foot syndrome reaction (HFSR) and hypertension (HTN), may predict the efficacy of medication . The aim of the present study is to determine what factors can influence the efficacy of sorafenib in terms of the occurrence of common AEs like HFS, hypertension and diarrhea. Materials and Methods Data for the subjects with HCC receiving sorafenib at Taichung Veterans General Hospital from June 2012 to October 2016 were evaluated. HCC was diagnosed according to the American Association for the Study of Liver Disease (AASLD) guideline . All cases were stage C HCC determined with the BCLC classification. Data of the enrolled patients included the According to the mRECIST criteria , the early radiologic efficacy of sorafenib was classified as complete response (CR), partial response (PR), stable disease (SD) and progressive disease (PD), as observed within a period of 6 weeks after the beginning of medication. CR was defined as disappearance of all HCC lesions; PR is defined as at least a 30% decrease in the sum of the diameters of variable HCC lesions; SD is defined as any cases that do not qualify for either PR or PD; PD is defined as an increase of at least 20% in the sum of the diameters of variable HCC lesions. AEs included the appearance of HFSR, HTN, or diarrhea. The AE of HTN is defined as new-onset of HTN (> 140/90 mm Hg) in the cases without underline HTN, or more increased blood pressure that need addition medications in those with underline HTN. The associations between AEs and the efficacy of sorafenib were analyzed. Data were expressed as standard deviation of mean for each of the measured parameters. Gender and positive ratio of each stratified group were expressed as the percentage of total patient number. Statistical comparisons were made using Pearson's Chi-square test to determine the effects of gender and positive ratio of each stratified group. Independent ttest was used to analyze age, serum bilirubin, ALT, AFP and daily sorafenib dosage. Statistical significance was set at P < 0.05. Multivariate Cox's regression was used to determine the strength of association between the individuals with each sorafenib-associated AE and the sorafenib efficacy, as shown by odd ratios (OR) with 95% confidence interval (CI). Results From a total of 222 enrolled subjects, eight (3.6%) of them belonging to group PR, 82 (36.9%) belonging to group SD, and 132 (59.5%) belonging to group PD. No case was classified as radiologic CR. The general data of each group are listed in Table 1. Their average ages (67.00 vs. 65.27 vs. 63.80 years, P = 0.355) were similar, so were their gender distributions (male ratios: 87.5% vs. 82.9% vs. 86.4%, P = 0.775). The ratio of PVT was as follows, group PR: 25.0%, group SD: 54.9%, and group PD: 59.8%. Their extra-hepatic metastasis was group PR: 75.0%, group SD: 53.7%, and group PD: 51.5%. The occurrence of HBV was group PR: 37.5%, group SD: 45.1%, and group PD: 50.8%. Their HCV was group PR: 75.0%, group SD: 43.9%, and group PD: 37.1%. These intergroup differences were not statistically significant. The levels of serum bilirunin and ALT were similar across groups. The PD group showed the significant highest average serum level of AFP (24.52 × 10 4 ng/mL in PD group vs. 3.11 × 10 4 ng/mL in PR group vs. 7.76 × 10 4 ng/mL in SD group, P = 0.012). The average daily dosage of sorafenib was 3.50 × 200 mg in group PR, 3.48 × 200 mg in group SD, and 3.29 × 200 mg in group PD, with no significant differences across groups (P = 0.745). AEs detected in each group are shown in Table 2. Among all subjects, 78 cases (36.9%) had HFRS, 23 cases (10.4%) had HTN and 51 cases (23.0%) had diarrhea. The PR group had the highest and significant ratio of HFSR (62.4% in PR group vs. 48.8% in SD group vs. 25.0% in PD group, P = 0.001) and HTN (37.5% in PR group vs. 13.4% in SD group vs. 6.9% in PD group, P = 0.025), compared with other groups. On the contrary, the SD group had the highest and significant ratio of diarrhea (34.6% in SD group vs. 25.0% in PR group vs. 15.9% in PD group, P = 0.004). Discussion Sorafenib is an orally active multikinase inhibitor that is known to prolong OS and TTP in patients with advanced HCC . Common sorafenib-related AEs are diarrhea, fatigue, anorexia, HTN and dermatological toxicities, mainly HFSR. In the SHARP trial, the overall incidence of treatment-related AEs is 80% in the sorafenib group (versus 52% in the placebo group), with serious AEs of 52% in the treated group (versus 54% in the placebo group). The grade 3 treatment-related AEs are more frequent in the sorafenib group: including diarrhea (8%), HFSR (8%) and HTN (2%) . In the Asia-Pacific trial, the overall incidence of treatment-related AEs is 81.9% in the sorafenib group (versus 38.7% in the placebo group), and the most frequent grade 3/4 drug-related AEs in the sorafenib group are HFSR (10.7%), diarrhea (6.0%) and fatigue (3.4%) . The percentages of AEs in our sorafenib-treated groups were: 36.9% HFSR, 10.4% HTN and 23.0% diarrhea. These proportions are higher than those reported in the above mentioned clinical trials. The discrepancy in results might be due to that in our study we adopted the self-reported design and analyzed all grade AEs. HFSR is characterized by erythema, dysesthesia or paresthesia on the palms and soles, together with rash. This host of symptoms suggested the involvement of inhibition in one or more of these receptors/pathways (such as VEGFR, platelet derived growth factor receptor (PDGFR), c-KIT or FLT-3) in the development of HFSR . Since sorafenib leads to tu- mor vessel regression which could inhibit endothelial cells, the capillary endothelium might be the first target in HFSR development . In a previous study in Japan, patients who had developed HFSR (62%) survive significantly longer than those without cutaneous AEs (OR 0.449, 95% CI 0.256 -0.786, P = 0.005) . In a Korean retrospective study of 99 patients with advanced HCC (BCLC stage C), the presence of HFSR is predictive of a longer TTP (OR 0.40, 95% CI 0.19 -0.82, P = 0.007) and better OS (OR 0.40; 95% CI 0.24 -0.67; P = 0.001) . These above findings are however in contradiction to other studies that reported that HFSR has no prognostic significance . For our patients, the adjusted OR for PR was 5.76 (95% CI 1.19 -27.88), and for SD was 2.60 (95% CI 1.39 -4.87). Those with HFSR had therefore better sorafenib efficacy compared with those without, when judged based on a short-term radiologic presentation. Arterial HTN is considered a class-specific toxicity of antiangiogenic treatments. Impaired angiogenesis could result in fewer microvessels, and endothelial dysfunctions due to reduced nitric oxide production and the activation of the endothelin-1 system, which is a potent vasoconstrictor . One study enrolling 41 patients with advanced HCC who received sorafenib, and the result found significantly longer OS in patients with HTN regardless of its grade than in patients without HTN during treatment (median OS 18.2 vs. 4.5 months, P = 0.016) . Another study enrolling 38 patients with advanced HCC disclosed HTN, that occurred within the first 2 weeks following the start of sorafenib treatment, is correlated with a better TTP (153 vs. 50.5 days, P = 0.017) and OS (1,329 vs. 302 days, P = 0.003) . However, another study concluded on the contrary that treatment-related HTN shows no correlation with clinical outcomes . Our results showed that the presentation of HTN was correlated with the responses in PR group (adjusted OS 7.68, 95% CI 1.50 -39.23), but not in the SD group (adjusted OS 2.02, 95% CI 0.76 -5.39). When cases from the PR and SD groups were combined for analysis, HTN showed no prognostic significance (adjusted OS 2.48, 95% CI 0.98 -6.29). As VEGF plays a role in maintaining the normal adult vasculature, inhibiting VEGFR with sorafenib could cause diarrhoea via reduction of capillary networks in the intestinal villi . Other hypotheses speculate that sorafenib cause diarrhoea by inducing pancreatic exocrine dysfunction, since VEGFR inhibitors reduce the density of the capillaries in pancreatic islets and decrease zymogen granules . One retrospective study enrolling 112 patients with advanced HCC reported through multivariate Cox regression analysis, that diarrhea is an independent positive prognostic factor (OR 0.41, P = 0.001) and those cases with diarrhea have a significantly longer median OS than those without (14.1 vs. 7.1 months, P = 0.011) . Another sorafenib prospective study on 46 patients with advanced HCC reported that subjects with grade 2/3 diarrhoea developed at any stage during treatment (41%, n = 19) have longer OS compared to those without (P = 0.009) . Our results showed that the presentation of diarrhea was correlated with sorafenib treatment efficacy in the SD group (adjusted OR 3.54, 95% CI 1.70 -7.40), but not in the PR group (adjusted OR 2.02, 95% CI 0.36 -11.42 ). When cases from the PR and SD groups were pooled for analysis, diarrhea still showed significant correlation with sorafenib treatment efficacy (adjusted OR 3.42, 95% CI 1.67 -7.01 ). Apart from HCC, a similar correlation between the development of AEs and treatment efficacy has been reported in breast cancer patients treated with endocrine therapy as well as in non-small cell lung cancer patients treated with chemotherapy with or without VEGF inhibitors . The availability of reliable predictive biomarkers would help identifying which individuals are likely to benefit from antitumoral treatment and to minimize unnecessary toxicity in potentially resistant subjects. In this study on HCC, knowledge on the development of AEs in patients as a surrogate marker of sorafenib efficacy is clinically relevant. Here, we found that the incidences of HFRS and diarrhea are the most predictive markers to sorafenib therapeutic response. There are some limitations in our study. Firstly, our study is retrospective and patients were presented only at a single tertiary care center. Selection bias likely existed. Secondly, short-term but not long-term prognostic outcomes, such as TTP and OS, were analyzed. Thirdly, patients with any grade of AEs were enrolled, and such self-reported data were subject to some errors. Finally, the status of medical history to viral hepatitis, such as nucleotide/nucleoside analogs (NUCs), interferon or direct-acting antivirals (DAAs), was not recorded nor analyzed. Also, patients were limited to those with cirrhosis Child-Pugh stage A and HCC BCLC stage C. Further prospective research with extended analysis or more variables is needed. In conclusion, sorafenib-related HFSR and diarrhea are associated with better efficacy as based on short-term radiologic presentations.
<reponame>vecin2/em-dev-tools from jinja2 import Environment, meta, Template, nodes, FileSystemLoader, select_autoescape import pytest import sys from anytree import Node from sql_gen.sql_gen.filter_loader import load_filters import os templates_path =os.environ['SQL_TEMPLATES_PATH'] env = Environment() load_filters(env) def test_include_templates(): ast = env.parse("{% include 'add_process_descriptor.sql' %} hola marco") template_vars = meta.find_undeclared_variables(ast) assert 0 == len(template_vars) for field, value in ast.iter_fields(): assert "body" == field #assert "sddf" == value # assert "Hello " == value[0] def test_list_variable_names(): ast = env.parse('Hello {{ name }}!') template_vars = meta.find_undeclared_variables(ast) assert "name" == list(template_vars)[0] def test_fields(): ast = env.parse('Hello {{ name }}!') #Template(body=[Output(nodes=[TemplateData(data=u'Hello '), Name(name='name', ctx='load'), TemplateData(data=u'!')])]) fields="" for field in ast.fields: fields = fields + field assert "body" == fields def test_template_body_structure(): ast = env.parse('Hello {{ name }}!') for field, value in ast.iter_fields(): assert "body" == field assert value == ast.body assert "[Output(nodes=[TemplateData(data=u'Hello '), Name(name='name', ctx='load'), TemplateData(data=u'!')])]" == str(value) assert "Hello " == value[0].nodes[0].data assert "name" == value[0].nodes[1].name assert "!" == value[0].nodes[2].data def test_get_description_value(): ast = env.parse("Hello {{ name | default ('Mundo') }}!") body_string="Output(nodes=[TemplateData(data=u'Hello '), "+ \ "Filter(node=Name(name='name', ctx='load'), "+\ "name='default', "+\ "args=[Const(value=u'Mundo')], "+\ "kwargs=[], "+\ "dyn_args=None, "+\ "dyn_kwargs=None"+\ "), "+\ "TemplateData(data=u'!')])" assert body_string ==str(ast.body[0]) #field, value = ast.iter_fields() for field, value in ast.iter_fields(): assert "body" == field assert "Hello " == value[0].nodes[0].data assert "default" == value[0].nodes[1].name assert "name" == value[0].nodes[1].node.name assert "!" == value[0].nodes[2].data def test_pipe_default_descripion_filters(): ast = env.parse("Hello {{ name | default ('Mundo') | description ('World in english') }}!") body_string="Output(nodes=[TemplateData(data=u'Hello '), "+ \ "Filter(node=Filter(" +\ "node=Name(name='name', ctx='load'), "+\ "name='default', "+\ "args=[Const(value=u'Mundo')], "+\ "kwargs=[], "+\ "dyn_args=None, "+\ "dyn_kwargs=None"+\ "), "+\ "name='description', "+\ "args=[Const(value=u'World in english')], "+\ "kwargs=[], dyn_args=None, "+\ "dyn_kwargs=None"+\ "), "+\ "TemplateData(data=u'!')])" assert body_string ==str(ast.body[0]) def test_anytree_node(): ast = env.parse("Hello {{ name | default ('Mundo') | description ('World in english') }}!") filter_node =ast.body[0].nodes[1] assert "description" ==filter_node.name description_node = Node("description") nameNode = Node("name", parent=description_node, value=filter_node) assert "name"== description_node.children[0].name assert "description"== description_node.children[0].value.name
// Extracts input/output sharding configuration of `cluster_func` by parsing // XlaSharding ops inside the `cluster_func`. void IdentifyXlaShardingForTPUComputation( Builder* builder, tf_device::ClusterFuncOp cluster_func) { FuncOp func = cluster_func.getParentOfType<ModuleOp>().lookupSymbol<FuncOp>( cluster_func.func()); const std::string logical_core_0_sharding = xla::sharding_builder::AssignDevice(0).SerializeAsString(); IdentifyXlaShardingForComputationInputs(logical_core_0_sharding, cluster_func, func, builder); IdentifyXlaShardingForComputationOutputs(logical_core_0_sharding, func, cluster_func, builder); }
<reponame>Beliaar/bGrease<gh_stars>1-10 import unittest class TestCollisionComp(dict): def __init__(self): self.new_entities = set() self.deleted_entities = set() def set(self, entity, left=0, bottom=0, right=0, top=0, radius=0, from_mask=0xffffffff, into_mask=0xffffffff,): if entity in self: data = self[entity] else: data = self[entity] = Data() data.entity = entity data.aabb = Data(left=left, top=top, right=right, bottom=bottom) data.radius = radius data.from_mask = from_mask data.into_mask = into_mask return entity class TestPositionComp(dict): def set(self, entity, position): from bGrease.geometry import Vec2d if entity in self: data = self[entity] else: data = self[entity] = Data() data.entity = entity data.position = Vec2d(position) class TestWorld(object): def __init__(self): self.components = self self.collision = TestCollisionComp() self.position = TestPositionComp() def join(self, *names): for entity in getattr(self, names[0]): try: yield tuple(getattr(self, name)[entity] for name in names) except KeyError: pass class Data(object): def __init__(self, **kw): self.__dict__.update(kw) def __eq__(self, other): return self.__class__ is other.__class__ and self.__dict__ == other.__dict__ def __repr__(self): return "Data(%r)" % self.__dict__ class TestCollisionSys(object): collision_component = 'collision' runtime = 0 last_from_mask = None def __init__(self, pairs=()): self.collision_pairs = pairs or set() def set_world(self, world): self.world = world def step(self, dt): self.runtime += dt def query_point(self, x, y=None, from_mask=None): self.last_from_mask = from_mask return set(self.world.collision) class PairTestCase(unittest.TestCase): def test_create_pair(self): from bGrease.collision import Pair p = Pair(3, 4) self.assertEqual(sorted(p), [3, 4]) self.assertRaises(TypeError, p, 1, 2, 3) def test_symmetric_hash(self): from bGrease.collision import Pair self.assertEqual(hash(Pair('spam', 'eggs')), hash(Pair('eggs', 'spam'))) def test_unordered_comparison(self): from bGrease.collision import Pair self.assertEqual(Pair(42, 24), Pair(24, 42)) def test_pair_set(self): from bGrease.collision import Pair p1 = Pair(3,4) p2 = Pair(4,5) pairs = set([p1, p2]) self.assertTrue(Pair(3,4) in pairs) self.assertTrue(Pair(4,3) in pairs) self.assertTrue(Pair(4,5) in pairs) self.assertTrue(Pair(5,4) in pairs) def test_pair_repr(self): from bGrease.collision import Pair self.assertEqual(repr(Pair(2,1)), "Pair(2, 1)") class BroadSweepAndPruneTestCase(unittest.TestCase): def test_before_step(self): # Queries should be well behaved even before the controller is run from bGrease.collision import BroadSweepAndPrune coll = BroadSweepAndPrune() self.assertEqual(coll.collision_pairs, set()) self.assertEqual(coll.query_point(0,0), set()) def test_collision_pairs_no_collision(self): from bGrease.collision import BroadSweepAndPrune world = TestWorld() coll = BroadSweepAndPrune() set_entity = world.collision.set set_entity(1, 10, 10, 20, 20) set_entity(2, 0, 0, 3, 3) set_entity(3, 11, 21, 15, 40) set_entity(4, -5, 0, -3, 100) coll.set_world(world) self.assertTrue(coll.world is world) coll.step(0) self.assertEqual(coll.collision_pairs, set()) coll.step(0) self.assertEqual(coll.collision_pairs, set()) def assertPairs(self, set1, *pairs): pairs = set(pairs) self.assertEqual(set1, pairs, "%r not found, %r not expected" % (tuple(pairs - set1), tuple(set1 - pairs))) def test_collision_pairs_static_collision(self): from bGrease.collision import BroadSweepAndPrune, Pair world = TestWorld() coll = BroadSweepAndPrune() coll.set_world(world) set_entity = world.collision.set set_entity(1, 10, 10, 20, 20) set_entity(2, 15, 15, 25, 25) set_entity(3, 5, 12, 30, 15) # boxes fully enclosed set_entity(4, 31, 10, 40, 13) set_entity(5, 32, 11, 39, 12) set_entity(6, 0, 0, 2, 2) set_entity(7, -1, 0.5, 1, 1.5) # no collisions with below set_entity(8, 2.1, 2.1, 2.2, 2.2) set_entity(9, 50, -40, 55, 40) set_entity(10, -50, -50, 50, -45) coll.step(0) pairs = set(coll.collision_pairs) self.assertPairs(pairs, Pair(1,2), Pair(1,3), Pair(2,3), Pair(4,5), Pair(6,7)) coll.step(0) self.assertEqual(coll.collision_pairs, pairs) def test_collision_pairs_no_collide_then_collide(self): from bGrease.collision import BroadSweepAndPrune, Pair world = TestWorld() coll = BroadSweepAndPrune() coll.set_world(world) set_entity = world.collision.set # Start with no collisions set_entity(1, 0, 0, 10, 10) set_entity(2, 7, 12, 9, 13) set_entity(3, -2.1, 1, -0.1, 2) coll.step(0) self.assertEqual(coll.collision_pairs, set()) # One pair collides set_entity(2, 8, 11, 10, 12) set_entity(3, -2, 1, 0, 2) coll.step(0) self.assertPairs(coll.collision_pairs, Pair(1,3)) # Now two pair set_entity(2, 9, 10, 11, 11) set_entity(3, -1.9, 1, 0.1, 2) coll.step(0) self.assertPairs(coll.collision_pairs, Pair(1,3), Pair(1,2)) # Same set_entity(2, 10, 9, 12, 10) set_entity(3, -1.8, 1, 0.2, 2) coll.step(0) self.assertPairs(coll.collision_pairs, Pair(1,3), Pair(1,2)) # Now just one again set_entity(2, 11, 8, 13, 9) set_entity(3, -1.7, 1, 0.3, 2) coll.step(0) self.assertPairs(coll.collision_pairs, Pair(1,3)) def test_collision_pairs_new_entities(self): from bGrease.collision import BroadSweepAndPrune, Pair world = TestWorld() coll = BroadSweepAndPrune() coll.set_world(world) set_entity = world.collision.set # Start with a couple not colliding set_entity(1, 1, 1, 3, 3) set_entity(2, 4, 0, 10, 3) coll.step(0) self.assertEqual(coll.collision_pairs, set()) # Add one that collides, one that doesn't set_entity(3, 2, 2, 4, 4) set_entity(4, 20, 5, 25, 7) world.collision.new_entities.add(3) world.collision.new_entities.add(4) coll.step(0) self.assertPairs(coll.collision_pairs, Pair(1,3), Pair(3, 2)) # Add one more and move one into collision and one out set_entity(5, 19, 8, 21, 14) set_entity(4, 20, 6, 25, 8) set_entity(2, 5, 0, 11, 3) world.collision.new_entities.clear() world.collision.new_entities.add(5) coll.step(0) self.assertPairs(coll.collision_pairs, Pair(1,3), Pair(4,5)) def test_collision_pairs_deleted_entities(self): from bGrease.collision import BroadSweepAndPrune, Pair world = TestWorld() coll = BroadSweepAndPrune() coll.set_world(world) set_entity = world.collision.set # Add some colliding pairs set_entity(1, 1, 1, 5, 2) set_entity(2, 2, 0, 3, 5) set_entity(3, 0, 0, 2, 2) set_entity(4, 4, 0, 5, 2) coll.step(0) self.assertPairs(coll.collision_pairs, Pair(1,2), Pair(1,3), Pair(2,3), Pair(1,4)) # Remove one world.collision.deleted_entities.add(3) coll.step(0) self.assertPairs(coll.collision_pairs, Pair(1,2), Pair(1,4)) # Remove another and move one into collision world.collision.deleted_entities.clear() world.collision.deleted_entities.add(1) set_entity(2, 4, 0, 5, 5) coll.step(0) self.assertPairs(coll.collision_pairs, Pair(4,2)) def test_collision_pairs_with_masks(self): from bGrease.collision import BroadSweepAndPrune, Pair world = TestWorld() coll = BroadSweepAndPrune() coll.set_world(world) set_entity = world.collision.set set_entity(1, 0, 0, 1, 1, from_mask=1, into_mask=0) set_entity(2, 0, 0, 1, 1, from_mask=0, into_mask=2) set_entity(3, 0, 0, 1, 1, from_mask=2, into_mask=1) set_entity(4, 0, 0, 1, 1, from_mask=0, into_mask=0) set_entity(5, 0, 0, 1, 1, from_mask=0xffffffff, into_mask=0xffffffff) coll.step(0) self.assertPairs(coll.collision_pairs, Pair(1,3), Pair(1,5), Pair(2,3), Pair(2,5), Pair(3,1), Pair(3,5)) def test_query_point(self): from bGrease.collision import BroadSweepAndPrune, Pair world = TestWorld() coll = BroadSweepAndPrune() coll.set_world(world) set_entity = world.collision.set set_entity(1, -1, -1, 3, 1) set_entity(2, 4, 4, 8, 8) set_entity(3, 6, 6, 9, 9) # Queries before the first step should always return no hits self.assertEqual(coll.query_point(0, 0), set()) coll.step(0) self.assertEqual(coll.query_point(0, 0), set([1])) self.assertEqual(coll.query_point([0, 0]), set([1])) set_entity(2, 4, 4, 8, 8) set_entity(3, 6, 6, 9, 9) world.collision.new_entities.add(2) world.collision.new_entities.add(3) coll.step(0) self.assertEqual(coll.query_point(0, 0), set([1])) self.assertEqual(coll.query_point([0, 0]), set([1])) self.assertEqual(coll.query_point(-1, 0), set([1])) self.assertEqual(coll.query_point(-1.0001, 0), set()) self.assertEqual(coll.query_point(3, 0), set([1])) self.assertEqual(coll.query_point(3.0001, 0), set()) self.assertEqual(coll.query_point(0, -1), set([1])) self.assertEqual(coll.query_point(0, -1.0001), set()) self.assertEqual(coll.query_point(0, 1), set([1])) self.assertEqual(coll.query_point(0, 1.0001), set()) self.assertEqual(coll.query_point(-1, -1), set([1])) self.assertEqual(coll.query_point(3, -1), set([1])) self.assertEqual(coll.query_point(3, 1), set([1])) self.assertEqual(coll.query_point(3, 1), set([1])) self.assertEqual(coll.query_point(5, 5), set([2])) self.assertEqual(coll.query_point([6, 7]), set([2, 3])) self.assertEqual(coll.query_point([7, 7]), set([2, 3])) self.assertEqual(coll.query_point([7, 8]), set([2, 3])) self.assertEqual(coll.query_point([8.5, 8.5]), set([3])) self.assertEqual(coll.query_point(-2, 0), set()) self.assertEqual(coll.query_point(10, 5), set()) self.assertEqual(coll.query_point(7, 10), set()) self.assertEqual(coll.query_point(7, -10), set()) self.assertEqual(coll.query_point(-200, 100), set()) def test_query_point_with_mask(self): from bGrease.collision import BroadSweepAndPrune, Pair world = TestWorld() coll = BroadSweepAndPrune() coll.set_world(world) set_entity = world.collision.set set_entity(1, 0, 0, 2, 2, into_mask=1) set_entity(2, 0, 0, 2, 2, into_mask=2) set_entity(3, 0, 0, 2, 2, into_mask=5) coll.step(0) self.assertEqual(coll.query_point(1, 1), set([1, 2, 3])) self.assertEqual(coll.query_point(1, 1, from_mask=1), set([1, 3])) self.assertEqual(coll.query_point(1, 1, from_mask=2), set([2])) self.assertEqual(coll.query_point(1, 1, from_mask=3), set([1, 2, 3])) self.assertEqual(coll.query_point(1, 1, from_mask=4), set([3])) self.assertEqual(coll.query_point(1, 1, from_mask=5), set([1, 3])) self.assertEqual(coll.query_point(1, 1, from_mask=8), set()) class CircularTestCase(unittest.TestCase): def test_defaults(self): from bGrease.collision import Circular, BroadSweepAndPrune coll = Circular() self.assertEqual(tuple(coll.handlers), ()) self.assertTrue(isinstance(coll.broad_phase, BroadSweepAndPrune)) self.assertEqual(coll.position_component, 'position') self.assertEqual(coll.collision_component, 'collision') self.assertTrue(coll.update_aabbs) def test_overrides(self): from bGrease.collision import Circular broad = TestCollisionSys() handlers = (object(), object()) coll = Circular(broad_phase=broad, position_component='posi', collision_component='hal', update_aabbs=False, handlers=handlers) self.assertEqual(tuple(coll.handlers), handlers) self.assertTrue(coll.broad_phase is broad) self.assertEqual(coll.position_component, 'posi') self.assertEqual(coll.collision_component, 'hal') self.assertFalse(coll.update_aabbs) def test_before_step(self): # Queries should be well behaved even before the controller is run from bGrease.collision import Circular world = TestWorld() broad = TestCollisionSys() coll = Circular(broad_phase=broad) coll.set_world(world) self.assertEqual(coll.collision_pairs, set()) self.assertEqual(coll.query_point(0,0), set()) def test_step(self): from bGrease.collision import Circular # Stepping the circular collision system should also step the broad phase broad = TestCollisionSys() world = TestWorld() coll = Circular(broad_phase=broad) coll.set_world(world) self.assertTrue(coll.world is world) self.assertTrue(coll.broad_phase.world is world) self.assertEqual(coll.collision_pairs, set()) self.assertEqual(broad.runtime, 0) coll.step(2) self.assertEqual(broad.runtime, 2) coll.step(1) self.assertEqual(broad.runtime, 3) self.assertEqual(coll.collision_pairs, set()) def test_handlers(self): from bGrease.collision import Circular world = TestWorld() handler_calls = [0, 0] def handler1(system): self.assertTrue(system is coll, system) handler_calls[0] += 1 def handler2(system): self.assertTrue(system is coll, system) handler_calls[1] += 1 coll = Circular(handlers=(handler1, handler2)) coll.set_world(world) coll.step(0) self.assertEqual(handler_calls, [1, 1]) coll.step(0) self.assertEqual(handler_calls, [2, 2]) coll.handlers = (handler2,) coll.step(0) self.assertEqual(handler_calls, [2, 3]) def test_update_aabbs(self): from bGrease.collision import Circular broad = TestCollisionSys() world = TestWorld() coll = Circular(broad_phase=broad) coll.set_world(world) pos = world.position col = world.collision pos.set(1, (0, 0)) col.set(1, radius=2) pos.set(2, (2, -3)) col.set(2, radius=0.5) pos.set(3, (-5, -2)) col.set(3, radius=10) for i in range(3): aabb = col[i + 1].aabb self.assertTrue(aabb.left == aabb.top == aabb.right == aabb.bottom == 0, aabb) coll.step(0) self.assertEqual(col[1].aabb, Data(left=-2, top=2, right=2, bottom=-2)) self.assertEqual(col[2].aabb, Data(left=1.5, top=-2.5, right=2.5, bottom=-3.5)) self.assertEqual(col[3].aabb, Data(left=-15, top=8, right=5, bottom=-12)) pos.set(1, (2, 0)) pos.set(2, (0, 0)) col.set(3, radius=5) coll.step(0) self.assertEqual(col[1].aabb, Data(left=0, top=2, right=4, bottom=-2)) self.assertEqual(col[2].aabb, Data(left=-0.5, top=0.5, right=0.5, bottom=-0.5)) self.assertEqual(col[3].aabb, Data(left=-10, top=3, right=0, bottom=-7)) coll.update_aabbs = False pos[1].position = (0, 0) col[1].radius = 3 col[2].radius = 0.75 col[3].position = (-3, 0) # aabbs should not change with update_aabbs set False coll.step(0) self.assertEqual(col[1].aabb, Data(left=0, top=2, right=4, bottom=-2)) self.assertEqual(col[2].aabb, Data(left=-0.5, top=0.5, right=0.5, bottom=-0.5)) self.assertEqual(col[3].aabb, Data(left=-10, top=3, right=0, bottom=-7)) def test_collision_pairs(self): from bGrease.collision import Circular, Pair broad = TestCollisionSys() world = TestWorld() coll = Circular(broad_phase=broad) coll.set_world(world) pos_set = world.position.set col_set = world.collision.set pos_set(1, (0, 0)) col_set(1, radius=5) pos_set(2, (3, 3)) col_set(2, radius=0) pos_set(3, (6, 0)) col_set(3, radius=1) pos_set(4, (-10, 4)) col_set(4, radius=2) pos_set(5, (-13, 4)) col_set(5, radius=2) pos_set(6, (0, 7)) col_set(6, radius=1.99) # Pair everything and make sure the narrow phase sorts it out broad.collision_pairs = set([ Pair(x+1, y+1) for x in range(6) for y in range(6) if x != y]) coll.step(0) self.assertEqual(coll.collision_pairs, set([Pair(1,2), Pair(1, 3), Pair(4, 5)])) def test_collision_point_and_normal(self): from bGrease.collision import Circular, Pair broad = TestCollisionSys() world = TestWorld() coll = Circular(broad_phase=broad) coll.set_world(world) pos_set = world.position.set col_set = world.collision.set pos_set(1, (0, 0)) col_set(1, radius=2) pos_set(2, (4, 0)) col_set(2, radius=3) broad.collision_pairs = set([Pair(1,2)]) coll.step(0) pair = list(coll.collision_pairs)[0] (e1, p1, n1), (e2, p2, n2) = pair.info self.assertEqual(e1, 1) self.assertEqual(p1, (2, 0)) self.assertEqual(n1, (1, 0)) self.assertEqual(e2, 2) self.assertEqual(p2, (1, 0)) self.assertEqual(n2, (-1, 0)) pos_set(2, (0, -5)) col_set(2, radius=3.5) broad.collision_pairs = set([Pair(1,2)]) coll.step(0) pair = list(coll.collision_pairs)[0] (e1, p1, n1), (e2, p2, n2) = pair.info self.assertEqual(e1, 1) self.assertEqual(p1, (0, -2)) self.assertEqual(n1, (0, -1)) self.assertEqual(e2, 2) self.assertEqual(p2, (0, -1.5)) self.assertEqual(n2, (0, 1)) def test_query_point(self): from bGrease.collision import Circular, Pair world = TestWorld() broad = TestCollisionSys() coll = Circular(broad_phase=broad) coll.set_world(world) pos_set = world.position.set col_set = world.collision.set pos_set(1, (0, 0)) col_set(1, radius=1) pos_set(2, (0, 2)) col_set(2, radius=1.5) pos_set(3, (-4, 3)) col_set(3, radius=3) coll.step(0) self.assertEqual(broad.last_from_mask, None) self.assertEqual(coll.query_point(0,0), set([1])) self.assertEqual(coll.query_point(0,1), set([1, 2])) self.assertEqual(coll.query_point([0,1]), set([1, 2])) self.assertEqual(coll.query_point(1, 0), set([1])) self.assertEqual(coll.query_point(1.0001, 0), set()) self.assertEqual(coll.query_point(-1, 3), set([2,3])) self.assertEqual(coll.query_point(-5, 3), set([3])) self.assertEqual(broad.last_from_mask, 0xffffffff) coll.query_point([0, 0], from_mask=0xff) self.assertEqual(broad.last_from_mask, 0xff) class TestEntity(object): def __init__(self): self.collisions = set() def on_collide(self, other, point, normal): self.collisions.add((other, point, normal)) class CollisionHandlerTestCase(unittest.TestCase): def test_dispatch_events_all_pairs(self): from bGrease.collision import dispatch_events, Pair world = TestWorld() col = world.collision entities = [col.set(TestEntity()) for i in range(4)] system = TestCollisionSys(pairs=[ Pair(entities[0], entities[1]), Pair(entities[1], entities[2]), Pair(entities[0], entities[2]), ]) system.set_world(world) dispatch_events(system) self.assertEqual(entities[0].collisions, set([(entities[1], None, None), (entities[2], None, None)])) self.assertEqual(entities[1].collisions, set([(entities[0], None, None), (entities[2], None, None)])) self.assertEqual(entities[2].collisions, set([(entities[0], None, None), (entities[1], None, None)])) self.assertEqual(entities[3].collisions, set()) # The handler should tolerate an entity missing from # the collision component without complaint del col[entities[1]] for entity in entities: entity.collisions.clear() dispatch_events(system) self.assertEqual(entities[0].collisions, set([(entities[2], None, None)])) self.assertEqual(entities[1].collisions, set([])) self.assertEqual(entities[2].collisions, set([(entities[0], None, None)])) self.assertEqual(entities[3].collisions, set()) def test_dispatch_events_missing_method(self): from bGrease.collision import dispatch_events, Pair world = TestWorld() col = world.collision class NoEventEntity(object): pass entities = [col.set(NoEventEntity()) for i in range(4)] system = TestCollisionSys(pairs=[ Pair(entities[0], entities[1]), Pair(entities[1], entities[2]), Pair(entities[0], entities[2]), ]) system.set_world(world) dispatch_events(system) def test_dispatch_events_respects_masks(self): from bGrease.collision import dispatch_events, Pair world = TestWorld() col = world.collision masks = [ (1, 1), (3, 0), (2, 7), (0, 0), ] entities = [col.set(TestEntity(), from_mask=frmask, into_mask=inmask) for frmask, inmask in masks] # Create all possible pairs pairs = [Pair(entities[i], entities[j]) for i in range(len(masks)) for j in range(len(masks)) if i != j] system = TestCollisionSys(pairs=pairs) system.set_world(world) dispatch_events(system) self.assertEqual(entities[0].collisions, set([(entities[1], None, None)])) self.assertEqual(entities[1].collisions, set()) self.assertEqual(entities[2].collisions, set([(entities[0], None, None), (entities[1], None, None)])) self.assertEqual(entities[3].collisions, set()) if __name__ == '__main__': unittest.main()
<gh_stars>0 # Main file for my mac changer # So we can run command line processes in our python code import subprocess # To take inpout from the user import optparse # Used so we can work with regular expressions import re #--------------------------------------------------------------------------------------------------------------------------------------------------- def get_user_inputs(): # Using the import optparse we set different optional arguments parse_object = optparse.OptionParser() parse_object.add_option("-i", "--interface", dest="interface", help="Interface to change!") parse_object.add_option("-m", "--mac", dest="mac_address", help="Ne Mac Address") # Create the tuple return parse_object.parse_args() # Old Tests # user_interface = user_inputs.interface # user_mac_address = user_inputs.mac_address # In order to hard code to test parts of code I will comment this out. ## Global variable to put the name of our interface. Ex: wlan0 #user_interface = "" ## Global variable to put the name of the interface we would like to change to our desired interface #user_mac_address = "" # Old Test # print("MyMacChanger has been started.") def change_mac_address(user_interface, user_mac_address): # Executing in command line using sub processes subprocess.call(["ifconfig", user_interface, "down"]) subprocess.call(["ifconfig", user_interface, "hw", "ether", user_mac_address]) subprocess.call(["ifconfig", user_interface, "up"]) # Automating the check success process def control_new_mac(interface): ifconfig = subprocess.check_output(["ifconfig", interface]) # Search for mac addresses in the output of the command ifconfig new_mac = re.search(r"\w\w:\w\w:\w\w:\w\w:\w\w:\w\w", str(ifconfig)) # Here is where we take only the first mac address provided by ifconfig if new_mac: return new_mac.group(0) else: return None # Replaced down here to tie all the code together (user_input, arguments) = get_user_input() change_mac_address(user_input.interface, user_input.user_mac_address) finalized_mac = control_new_mac(str(user_input.interface)) # Check to see if it changed to the proper mac address if finalized_mac == user_input.mac_address: print("Success!") else: print("Error!")
def findSum(n, r,c, lim, grid): for i in range(lim): for j in range(lim): if grid[r][i] + grid[j][c] == n: return True return False def main(): n = int(input()) grid = [] for i in range(n): grid.append([ int(n) for n in input().split()]) for r in range(n): for c in range(n): if grid[r][c] == 1: continue if not findSum(grid[r][c], r, c, n,grid): print("No") return print("Yes") if __name__ == "__main__": main()
/** * @return the Unity Editor version resolved from String resources, or <code>null</code> if the * value was not present. This method can be invoked directly; access via the instance method * from UnityVersionProvider is provided to support mocking while testing. */ @Nullable public static synchronized String resolveUnityEditorVersion(Context context) { if (isUnityVersionSet) { return unityVersion; } final int id = CommonUtils.getResourcesIdentifier(context, UNITY_EDITOR_VERSION, "string"); if (id != 0) { unityVersion = context.getResources().getString(id); isUnityVersionSet = true; Logger.getLogger().v("Unity Editor version is: " + unityVersion); } return unityVersion; }
<reponame>vcubells/tc1031 // // Persona.hpp // ordenamiento_generico // // Created by <NAME> on 04/09/20. // Copyright © 2020 <NAME>. All rights reserved. // #ifndef Persona_hpp #define Persona_hpp #include <iostream> class Persona { private: std::string nombre; int edad; public: Persona():Persona("",0) {} Persona(std::string _nombre, int _edad) : nombre(_nombre), edad(_edad) {} static bool edad_asc(Persona, Persona); static bool edad_desc(Persona, Persona); bool operator<(Persona & persona); bool operator>(Persona & persona); friend std::ostream & operator<<(std::ostream & os, const Persona & persona); }; #endif /* Persona_hpp */
package insider_test import ( "context" "testing" "github.com/insidersec/insider" "github.com/insidersec/insider/engine" "github.com/insidersec/insider/report" "github.com/insidersec/insider/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) type fakeEngine struct { result report.Result err error } func (e fakeEngine) Scan(ctx context.Context, dir string) (report.Result, error) { return e.result, e.err } type fakeTechAnalyzer struct { report report.Reporter err error } func (a fakeTechAnalyzer) Analyze(ctx context.Context, dir string) (report.Reporter, error) { return a.report, a.err } func TestAnalyzer(t *testing.T) { testcases := []struct { name string tech insider.TechAnalyzer engine insider.Engine err bool expectedReport report.Reporter }{ { name: "Test Analyze with default report generated", tech: fakeTechAnalyzer{ report: report.Report{ LibraryIssues: []report.LibraryVulnerability{{}, {}}, }, }, engine: fakeEngine{ result: &engine.Result{ Vulnerabilities: []report.Vulnerability{ {CVSS: 0}, {CVSS: 2.3}, {CVSS: 6.7}, }, Size: 10, }, }, expectedReport: report.Report{ Info: report.SASTInfo{ Size: "10 Bytes", }, Base: report.Base{ Vulnerabilities: []report.Vulnerability{{CVSS: 0}, {CVSS: 2.3}, {CVSS: 6.7}}, None: 1, High: 0, Medium: 1, Low: 1, Total: 3, }, LibraryIssues: []report.LibraryVulnerability{{}, {}}, }, }, { name: "Test Analyze with Android report generated", tech: fakeTechAnalyzer{ report: report.AndroidReporter{ AndroidInfo: report.AndroidInfo{ Title: "testing", }, }, }, engine: fakeEngine{ result: &engine.Result{ Vulnerabilities: []report.Vulnerability{{CVSS: 6.7}, {CVSS: 8.1}, {CVSS: 7.2}, {CVSS: 9.2}}, Size: 57, }, }, expectedReport: report.AndroidReporter{ AndroidInfo: report.AndroidInfo{ Title: "testing", Size: "57 Bytes", }, Base: report.Base{ Vulnerabilities: []report.Vulnerability{{CVSS: 6.7}, {CVSS: 8.1}, {CVSS: 7.2}, {CVSS: 9.2}}, None: 0, Low: 0, Medium: 1, High: 2, Critical: 1, Total: 4, }, }, }, { name: "Test Analyze with Ios report generated", tech: fakeTechAnalyzer{ report: report.IOSReporter{ IOSInfo: report.IOSInfo{ AppName: "testing", }, }, }, engine: fakeEngine{ result: &engine.Result{ Vulnerabilities: []report.Vulnerability{ {CVSS: 3.9}, {CVSS: 4.0}, {CVSS: 6.9}, {CVSS: 7.0}, {CVSS: 8.9}, {CVSS: 9.8}, }, Size: 57, }, }, expectedReport: report.IOSReporter{ IOSInfo: report.IOSInfo{ AppName: "testing", Size: "57 Bytes", }, Base: report.Base{ Vulnerabilities: []report.Vulnerability{ {CVSS: 3.9}, {CVSS: 4.0}, {CVSS: 6.9}, {CVSS: 7.0}, {CVSS: 8.9}, {CVSS: 9.8}, }, None: 0, Low: 0, Medium: 1, High: 2, Critical: 1, Total: 4, }, }, }, } for _, tt := range testcases { t.Run(tt.name, func(t *testing.T) { analyzer := insider.NewAnalyzer(tt.engine, tt.tech, testutil.NewTestLogger(t)) r, err := analyzer.Analyze(context.Background(), "") if tt.err { require.NotNil(t, err) } else { require.Nil(t, err) } assert.Equal(t, tt.expectedReport, r) }) } }
<filename>src/devices/uart/device.rs use std::collections::VecDeque; use std::sync::{Arc, Mutex}; use std::thread::{self, JoinHandle}; use std::time::Duration; use crossbeam_channel::{self as chan, select}; use crate::devices::{vic::Interrupt, Device, Probe}; use crate::memory::{MemException::*, MemResult, Memory}; /// Aggregate type to configure which Interrupts should be generated by the UART #[derive(Debug)] pub struct UartInterrupts { pub rx: Interrupt, pub tx: Interrupt, pub combo: Interrupt, } /// List of interrupts generated by each of the various UARTs on the TS-7200 pub mod interrupts { use super::UartInterrupts; use crate::devices::vic::Interrupt::*; /// Interrupts associated with UART1 pub const UART1: UartInterrupts = UartInterrupts { rx: Uart1RxIntr1, tx: Uart1TxIntr1, combo: IntUart1, }; /// Interrupts associated with UART2 pub const UART2: UartInterrupts = UartInterrupts { rx: Uart2RxIntr2, tx: Uart2TxIntr2, combo: IntUart2, }; /// Interrupts associated with UART3 pub const UART3: UartInterrupts = UartInterrupts { rx: Uart3RxIntr3, tx: Uart3TxIntr3, combo: IntUart3, }; } /// Derived from section 14 of the EP93xx User's Guide and the provided value /// for bauddiv from CS452. // TODO: A better source for UARTCLK_HZ would be appreciated. const UARTCLK_HZ: u64 = 7_372_800; /// UART internal register state. /// /// Shared between the UART device and it's workers using a Mutex #[derive(Debug)] struct State { label: &'static str, interrupts: UartInterrupts, hack_inf_uart_rx: bool, hack_nodelay_uart_tx: bool, linctrl_latched: bool, linctrl_latch: [u32; 3], linctrl: [u32; 3], ctrl: u32, // FIXME: Need to separate out bit time for the timeout interrupt bittime: Duration, word_len: u32, fifo_size: usize, overrun: bool, busy: bool, timeout: bool, cts_change: bool, rx_buf: VecDeque<u8>, tx_buf_size: usize, rx_int_asserted: bool, tx_int_asserted: bool, combo_int_asserted: bool, } impl State { fn new(label: &'static str, interrupts: UartInterrupts) -> State { let mut s = State { label, interrupts, hack_inf_uart_rx: false, hack_nodelay_uart_tx: false, linctrl_latched: false, linctrl_latch: [0, 0, 0], linctrl: [0, 0, 0], ctrl: 0, // set to proper defaults once update_linctrl is called below bittime: Duration::default(), word_len: 0, fifo_size: 0, overrun: false, busy: false, timeout: false, cts_change: false, rx_buf: VecDeque::new(), tx_buf_size: 0, rx_int_asserted: false, tx_int_asserted: false, combo_int_asserted: false, }; s.update_linctrl(); s } fn new_hle(label: &'static str, interrupts: UartInterrupts) -> State { let mut s = State::new(label, interrupts); // 8 bit word, FIFO enable s.linctrl[0] = 0x70; // 115200 baud s.linctrl[1] = 0; s.linctrl[2] = 3; // UART enable s.ctrl = 1; s.update_linctrl(); s } fn update_linctrl(&mut self) { let high = self.linctrl[0]; let bauddiv = ((self.linctrl[1] & 0xff) as u64) << 32 | (self.linctrl[2] as u64); let baud = UARTCLK_HZ / 16 / (bauddiv + 1); self.bittime = Duration::from_nanos(1_000_000_000 / baud); self.word_len = 1 // start bit + 8 // word length TODO: Allow for other word lengths than 8 + (if high & 0x8 != 0 { 2 } else { 1 }) // stop bits + (if high & 0x2 != 0 { 1 } else { 0 }); // parity bit self.fifo_size = if (high & 0x10) != 0 { 16 } else { 1 } } /// Returns the interrupt status in the format of the UARTxIntIDIntClr /// register fn get_int_id(&self) -> u8 { let mut result = 0; if self.timeout { result |= 8; } if self.tx_buf_size * 2 <= self.fifo_size { result |= 4; } if self.rx_buf.len() * 2 >= self.fifo_size { result |= 2; } if self.cts_change { result |= 1; } // the control register has the int enable data 3 bits up in the right order (result & (self.ctrl >> 3)) as u8 } fn update_interrupts(&mut self, interrupt_bus: &chan::Sender<(Interrupt, bool)>) { let int_id = self.get_int_id(); macro_rules! update_interrupt { ($hw_int:expr, $is_asserted:expr, $mask:expr) => { let assert = (int_id & $mask) != 0; if assert != $is_asserted { $is_asserted = assert; trace!( "UART {} setting interrupt {:?} to {:?} from {}", self.label, $hw_int, assert, int_id ); interrupt_bus.send(($hw_int, assert)).unwrap(); } }; } update_interrupt!(self.interrupts.tx, self.tx_int_asserted, 0b1000); update_interrupt!(self.interrupts.rx, self.rx_int_asserted, 0b0010); update_interrupt!(self.interrupts.combo, self.combo_int_asserted, 0b1111); } } struct Exit; /// Structured return type for the various channels created as part of spawning /// a UART input buffer thread struct InputBufferThreadChans { pub exit: chan::Sender<Exit>, pub uart_input: chan::Sender<u8>, } fn spawn_input_buffer_thread( label: &'static str, state: Arc<Mutex<State>>, interrupt_bus: chan::Sender<(Interrupt, bool)>, ) -> (JoinHandle<()>, InputBufferThreadChans) { let (uart_tx, uart_rx) = chan::unbounded(); let (exit_tx, exit_rx) = chan::bounded(1); let thread = move || loop { let (can_timeout, bittime, word_len) = { let state = state.lock().unwrap(); ( !state.rx_buf.is_empty() && !state.timeout, state.bittime, state.word_len, ) }; let b = if can_timeout { select! { recv(uart_rx) -> b => match b { Ok(b) => Some(b), Err(chan::RecvError) => panic!("uart_rx closed unexpectedly"), }, recv(exit_rx) -> _ => break, default(bittime * 32) => None, } } else { select! { recv(uart_rx) -> b => match b { Ok(b) => Some(b), Err(chan::RecvError) => panic!("uart_rx closed unexpectedly"), }, recv(exit_rx) -> _ => break, } }; match b { Some(b) => { thread::sleep(bittime * word_len); let mut state = state.lock().unwrap(); if state.rx_buf.len() < state.fifo_size || state.hack_inf_uart_rx { state.rx_buf.push_back(b); state.update_interrupts(&interrupt_bus); } else { warn!("UART {} dropping received byte due to full FIFO", label); state.overrun = true; } } None => { let mut state = state.lock().unwrap(); if !state.rx_buf.is_empty() { state.timeout = true; state.update_interrupts(&interrupt_bus); } } } }; let handle = thread::Builder::new() .name(format!("{} | UART Internal Reader", label)) .spawn(thread) .unwrap(); ( handle, InputBufferThreadChans { exit: exit_tx, uart_input: uart_tx, }, ) } /// Structured return type for the various channels created as part of spawning /// a UART output buffer thread struct OutputBufferThreadChans { pub exit: chan::Sender<Exit>, pub uart_output: chan::Receiver<u8>, pub device_output: chan::Sender<u8>, } fn spawn_output_buffer_thread( label: &'static str, state: Arc<Mutex<State>>, interrupt_bus: chan::Sender<(Interrupt, bool)>, ) -> (JoinHandle<()>, OutputBufferThreadChans) { let (uart_tx, uart_rx) = chan::unbounded(); let (device_tx, device_rx) = chan::unbounded(); let (exit_tx, exit_rx) = chan::bounded(1); let thread = move || { loop { let b = select! { recv(device_rx) -> b => match b { Ok(b) => b, Err(chan::RecvError) => panic!("tx closed unexpectedly"), }, recv(exit_rx) -> _ => break, }; // Sleep for the appropriate time let (bittime, word_len, hack_nodelay_uart_tx) = { let mut state = state.lock().unwrap(); if !state.busy { state.busy = true; state.cts_change = true; state.update_interrupts(&interrupt_bus); } (state.bittime, state.word_len, state.hack_nodelay_uart_tx) }; if !hack_nodelay_uart_tx { thread::sleep(bittime * word_len); } match uart_tx.send(b) { Ok(()) => (), Err(chan::SendError(_)) => { // Receiving end closed return; } } { let mut state = state.lock().unwrap(); state.tx_buf_size -= 1; if state.tx_buf_size == 0 { state.busy = false; state.cts_change = true; } state.update_interrupts(&interrupt_bus); } } for b in device_rx.try_iter() { uart_tx.send(b).expect("io receiver closed unexpectedly") } }; let handle = thread::Builder::new() .name(format!("{} | UART Internal Writer", label)) .spawn(thread) .unwrap(); ( handle, OutputBufferThreadChans { exit: exit_tx, uart_output: uart_rx, device_output: device_tx, }, ) } /// User-provided task for providing input into a UART #[derive(Debug)] pub struct ReaderTask { handle: JoinHandle<()>, } impl ReaderTask { /// Create a new ReaderTask pub fn new(handle: JoinHandle<()>) -> ReaderTask { ReaderTask { handle } } } /// User-provided task for providing input into a UART #[derive(Debug)] pub struct WriterTask { handle: JoinHandle<()>, } impl WriterTask { /// Create a new WriterTask pub fn new(handle: JoinHandle<()>) -> WriterTask { WriterTask { handle } } } /// Owner of the UART's internal Input buffer and Output buffer threads, their /// associated channels, and any User provided Reader/Writer tasks. /// /// When dropped, the UartWorker ensures that the UART's internal buffer threads /// are terminated _before_ waiting for any user provided Reader/Writer threads /// to terminate. #[derive(Debug)] struct UartWorker { input_buffer_thread_exit: chan::Sender<Exit>, output_buffer_thread_exit: chan::Sender<Exit>, // must be optional, as `.join()` can only be called on an owned JoinHandle input_buffer_thread: Option<JoinHandle<()>>, output_buffer_thread: Option<JoinHandle<()>>, uart_input_chan: chan::Sender<u8>, uart_output_chan: chan::Receiver<u8>, device_output_chan: chan::Sender<u8>, user_reader_task: Option<ReaderTask>, user_writer_task: Option<WriterTask>, } impl Drop for UartWorker { fn drop(&mut self) { self.input_buffer_thread_exit .send(Exit) .expect("uart worker reader thread was unexpectedly terminated"); self.output_buffer_thread_exit .send(Exit) .expect("uart worker writer thread was unexpectedly terminated"); self.input_buffer_thread.take().unwrap().join().unwrap(); self.output_buffer_thread.take().unwrap().join().unwrap(); // HACK: don't actually join on the user_reader_thread // reader threads are typically blocked on IO, and don't have an easy way to // check if the other end of their send channel has closed. // TODO: provide a mechanism to cleanly close ReaderTask tasks // if let Some(user_reader_task) = self.user_reader_task.take() { // user_reader_task.0.join().unwrap(); // } if let Some(user_writer_task) = self.user_writer_task.take() { user_writer_task.handle.join().unwrap(); }; } } impl UartWorker { fn new( label: &'static str, state: Arc<Mutex<State>>, interrupt_bus: chan::Sender<(Interrupt, bool)>, ) -> UartWorker { let (input_buffer_thread, input_chans) = spawn_input_buffer_thread(label, state.clone(), interrupt_bus.clone()); let (output_buffer_thread, output_chans) = spawn_output_buffer_thread(label, state, interrupt_bus); UartWorker { input_buffer_thread_exit: input_chans.exit, output_buffer_thread_exit: output_chans.exit, input_buffer_thread: Some(input_buffer_thread), output_buffer_thread: Some(output_buffer_thread), uart_input_chan: input_chans.uart_input, uart_output_chan: output_chans.uart_output, device_output_chan: output_chans.device_output, user_reader_task: None, user_writer_task: None, } } } /// UART device implementing all behavior shared by UARTs 1, 2, and 3 on the /// TS-7200. i.e: this device doesn't include any UART-specific functionality, /// such as HDCL or Modem controls. /// /// As described in sections 14, 15, and 16 of the EP93xx User's Guide. #[derive(Debug)] pub struct Uart { label: &'static str, state: Arc<Mutex<State>>, interrupt_bus: chan::Sender<(Interrupt, bool)>, worker: UartWorker, } impl Uart { /// Create a new uart pub fn new_hle( label: &'static str, interrupt_bus: chan::Sender<(Interrupt, bool)>, interrupts: UartInterrupts, ) -> Uart { let state = Arc::new(Mutex::new(State::new_hle(label, interrupts))); let worker = UartWorker::new(label, state.clone(), interrupt_bus.clone()); Uart { label, state, interrupt_bus, worker, } } /// Register a pair of Input and Output tasks with the UART. /// /// The provided tasks SHOULD send/receive data to/from UART via the /// provided Sender/Receiver channels, and MUST terminate if the /// Sender/Receiver hang up. /// /// Returns the ReaderTask/WriterTask of any previous tasks that may have /// been registered with the UART. pub fn install_io_tasks<E>( &mut self, install_io_tasks: impl FnOnce( chan::Sender<u8>, chan::Receiver<u8>, ) -> Result<(Option<ReaderTask>, Option<WriterTask>), E>, ) -> Result<(Option<ReaderTask>, Option<WriterTask>), E> { let ret = ( self.worker.user_reader_task.take(), self.worker.user_writer_task.take(), ); let (in_handle, out_handle) = install_io_tasks( self.worker.uart_input_chan.clone(), self.worker.uart_output_chan.clone(), )?; self.worker.user_reader_task = in_handle; self.worker.user_writer_task = out_handle; Ok(ret) } /// HACK: sets the UART to have an infinite RX FIFO pub fn hack_inf_uart_rx(&mut self, active: bool) { self.state.lock().unwrap().hack_inf_uart_rx = active; } /// HACK: Disables all tx output delay on the UART. pub fn hack_nodelay_uart_tx(&mut self, active: bool) { self.state.lock().unwrap().hack_nodelay_uart_tx = active; } } impl Device for Uart { fn kind(&self) -> &'static str { "UART" } fn label(&self) -> Option<&str> { Some(self.label) } fn probe(&self, offset: u32) -> Probe<'_> { let reg = match offset { 0x00 => "Data", 0x04 => "RXSts", 0x08 => "LinCtrlHigh", 0x0C => "LinCtrlMid", 0x10 => "LinCtrlLow", 0x14 => "Ctrl", 0x18 => "Flag", 0x1C => "IntIDIntClr", 0x20 => "IrLowPwrCntr", 0x28 => "DMACtrl", _ => return Probe::Unmapped, }; Probe::Register(reg) } } impl Memory for Uart { fn r32(&mut self, offset: u32) -> MemResult<u32> { let mut state = self.state.lock().unwrap(); match offset { // data (8-bit) 0x00 => { // If the buffer is empty return a dummy value let val = match state.rx_buf.pop_front() { Some(v) => v as u32, None => { return Err(ContractViolation { msg: "Reading from empty UART FIFO".to_string(), severity: log::Level::Warn, stub_val: None, }) } }; if state.rx_buf.is_empty() { state.timeout = false; } state.update_interrupts(&self.interrupt_bus); Ok(val) } // read status 0x04 => Ok(if state.overrun { 8 } else { 0 }), // line control high 0x08 => Ok(state.linctrl[0]), // line control mid 0x0C => { if state.linctrl_latched { Err(ContractViolation { msg: "Tried to read stale data (did you forget to update LinCtrlHigh?)" .to_string(), severity: log::Level::Warn, stub_val: None, }) } else { Ok(state.linctrl[1]) } } // line control low 0x10 => { if state.linctrl_latched { Err(ContractViolation { msg: "Tried to read stale data (did you forget to update LinCtrlHigh?)" .to_string(), severity: log::Level::Warn, stub_val: None, }) } else { Ok(state.linctrl[2]) } } // control 0x14 => Ok(state.ctrl), // flag 0x18 => { let mut result = 0; if state.tx_buf_size == 0 { result |= 0x80; } if state.rx_buf.len() >= state.fifo_size { result |= 0x40; } if state.tx_buf_size >= state.fifo_size { result |= 0x20; } if state.rx_buf.is_empty() { result |= 0x10; } if state.busy { result |= 0x8; } else { // XXX: set cts when not sending data // TODO: determine a better way to do cts result |= 0x1; } Ok(result) } // interrupt identification and clear register 0x1C => Ok(state.get_int_id() as u32), // dma control 0x28 => Err(Unimplemented), _ => Err(Unexpected), } } fn w32(&mut self, offset: u32, val: u32) -> MemResult<()> { let mut state = self.state.lock().unwrap(); match offset { // data (8-bit) 0x00 => { // Drop the byte if the fifo is full if state.tx_buf_size < state.fifo_size { // A little awkward, but it is important that // this send happens while under lock, as // otherwise it could lead to a race condition // where the sender thread locks state before // this thread does. self.worker.device_output_chan.send(val as u8).unwrap(); state.tx_buf_size += 1; state.update_interrupts(&self.interrupt_bus); } else { warn!("{} dropping sent byte due to full FIFO", self.label); } Ok(()) } // write status 0x04 => { state.overrun = false; state.update_interrupts(&self.interrupt_bus); Ok(()) } // line control high 0x08 => { state.linctrl_latched = false; state.linctrl_latch[0] = val; state.linctrl = state.linctrl_latch; state.update_linctrl(); state.update_interrupts(&self.interrupt_bus); Ok(()) } // line control mid 0x0C => { state.linctrl_latched = true; state.linctrl_latch[1] = val; Ok(()) } // line control low 0x10 => { state.linctrl_latched = true; state.linctrl_latch[2] = val; Ok(()) } // control 0x14 => { state.ctrl = val; state.update_interrupts(&self.interrupt_bus); Ok(()) } // flag 0x18 => Err(InvalidAccess), // interrupt identification and clear register 0x1C => { if state.cts_change { trace!("{} clearing cts interrupt", self.label); } state.cts_change = false; state.update_interrupts(&self.interrupt_bus); Ok(()) } // dma control 0x28 => Err(Unimplemented), _ => Err(Unexpected), } } }
def update_progress(progress, target): progress_percent = 100.0 * float(progress) / target sys.stdout.write("\rProgress: {:.2f}% ({}/{} tasks finished)".format(progress_percent, progress, target)) sys.stdout.flush()
/* * Copyright (C) 2012 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #define _GNU_SOURCE #define _FILE_OFFSET_BITS 64 #define _LARGEFILE64_SOURCE 1 #include <fcntl.h> #include <stdarg.h> #include <stdbool.h> #include <stdint.h> #include <stdio.h> #include <stdlib.h> #include <string.h> #include <unistd.h> #include <sparse/sparse.h> #include "defs.h" #include "output_file.h" #include "sparse_crc32.h" #include "sparse_file.h" #include "sparse_format.h" #if defined(__APPLE__) && defined(__MACH__) #define lseek64 lseek #define off64_t off_t #endif #define SPARSE_HEADER_MAJOR_VER 1 #define SPARSE_HEADER_LEN (sizeof(sparse_header_t)) #define CHUNK_HEADER_LEN (sizeof(chunk_header_t)) #define COPY_BUF_SIZE (1024U*1024U) static char *copybuf; #define min(a, b) \ ({ typeof(a) _a = (a); typeof(b) _b = (b); (_a < _b) ? _a : _b; }) static void verbose_error(bool verbose, int err, const char *fmt, ...) { char *s = ""; char *at = ""; if (fmt) { va_list argp; int size; va_start(argp, fmt); size = vsnprintf(NULL, 0, fmt, argp); va_end(argp); if (size < 0) { return; } at = malloc(size + 1); if (at == NULL) { return; } va_start(argp, fmt); vsnprintf(at, size, fmt, argp); va_end(argp); at[size] = 0; s = " at "; } if (verbose) { #ifndef USE_MINGW if (err == -EOVERFLOW) { sparse_print_verbose("EOF while reading file%s%s\n", s, at); } else #endif if (err == -EINVAL) { sparse_print_verbose("Invalid sparse file format%s%s\n", s, at); } else if (err == -ENOMEM) { sparse_print_verbose("Failed allocation while reading file%s%s\n", s, at); } else { sparse_print_verbose("Unknown error %d%s%s\n", err, s, at); } } if (fmt) { free(at); } } static int process_raw_chunk(struct sparse_file *s, unsigned int chunk_size, int fd, int64_t offset, unsigned int blocks, unsigned int block, uint32_t *crc32) { int ret; int chunk; unsigned int len = blocks * s->block_size; if (chunk_size % s->block_size != 0) { return -EINVAL; } if (chunk_size / s->block_size != blocks) { return -EINVAL; } ret = sparse_file_add_fd(s, fd, offset, len, block); if (ret < 0) { return ret; } if (crc32) { while (len) { chunk = min(len, COPY_BUF_SIZE); ret = read_all(fd, copybuf, chunk); if (ret < 0) { return ret; } *crc32 = sparse_crc32(*crc32, copybuf, chunk); len -= chunk; } } else { lseek64(fd, len, SEEK_CUR); } return 0; } static int process_fill_chunk(struct sparse_file *s, unsigned int chunk_size, int fd, unsigned int blocks, unsigned int block, uint32_t *crc32) { int ret; int chunk; int64_t len = (int64_t)blocks * s->block_size; uint32_t fill_val; uint32_t *fillbuf; unsigned int i; if (chunk_size != sizeof(fill_val)) { return -EINVAL; } ret = read_all(fd, &fill_val, sizeof(fill_val)); if (ret < 0) { return ret; } ret = sparse_file_add_fill(s, fill_val, len, block); if (ret < 0) { return ret; } if (crc32) { /* Fill copy_buf with the fill value */ fillbuf = (uint32_t *)copybuf; for (i = 0; i < (COPY_BUF_SIZE / sizeof(fill_val)); i++) { fillbuf[i] = fill_val; } while (len) { chunk = min(len, COPY_BUF_SIZE); *crc32 = sparse_crc32(*crc32, copybuf, chunk); len -= chunk; } } return 0; } static int process_skip_chunk(struct sparse_file *s, unsigned int chunk_size, int fd __unused, unsigned int blocks, unsigned int block __unused, uint32_t *crc32) { if (chunk_size != 0) { return -EINVAL; } if (crc32) { int64_t len = (int64_t)blocks * s->block_size; memset(copybuf, 0, COPY_BUF_SIZE); while (len) { int chunk = min(len, COPY_BUF_SIZE); *crc32 = sparse_crc32(*crc32, copybuf, chunk); len -= chunk; } } return 0; } static int process_crc32_chunk(int fd, unsigned int chunk_size, uint32_t crc32) { uint32_t file_crc32; int ret; if (chunk_size != sizeof(file_crc32)) { return -EINVAL; } ret = read_all(fd, &file_crc32, sizeof(file_crc32)); if (ret < 0) { return ret; } if (file_crc32 != crc32) { return -EINVAL; } return 0; } static int process_chunk(struct sparse_file *s, int fd, off64_t offset, unsigned int chunk_hdr_sz, chunk_header_t *chunk_header, unsigned int cur_block, uint32_t *crc_ptr) { int ret; unsigned int chunk_data_size; chunk_data_size = chunk_header->total_sz - chunk_hdr_sz; switch (chunk_header->chunk_type) { case CHUNK_TYPE_RAW: ret = process_raw_chunk(s, chunk_data_size, fd, offset, chunk_header->chunk_sz, cur_block, crc_ptr); if (ret < 0) { verbose_error(s->verbose, ret, "data block at %lld", offset); return ret; } return chunk_header->chunk_sz; case CHUNK_TYPE_FILL: ret = process_fill_chunk(s, chunk_data_size, fd, chunk_header->chunk_sz, cur_block, crc_ptr); if (ret < 0) { verbose_error(s->verbose, ret, "fill block at %lld", offset); return ret; } return chunk_header->chunk_sz; case CHUNK_TYPE_DONT_CARE: ret = process_skip_chunk(s, chunk_data_size, fd, chunk_header->chunk_sz, cur_block, crc_ptr); if (chunk_data_size != 0) { if (ret < 0) { verbose_error(s->verbose, ret, "skip block at %lld", offset); return ret; } } return chunk_header->chunk_sz; case CHUNK_TYPE_CRC32: ret = process_crc32_chunk(fd, chunk_data_size, *crc_ptr); if (ret < 0) { verbose_error(s->verbose, -EINVAL, "crc block at %lld", offset); return ret; } return 0; default: verbose_error(s->verbose, -EINVAL, "unknown block %04X at %lld", chunk_header->chunk_type, offset); } return 0; } static int sparse_file_read_sparse(struct sparse_file *s, int fd, bool crc) { int ret; unsigned int i; sparse_header_t sparse_header; chunk_header_t chunk_header; uint32_t crc32 = 0; uint32_t *crc_ptr = 0; unsigned int cur_block = 0; off64_t offset; if (!copybuf) { copybuf = malloc(COPY_BUF_SIZE); } if (!copybuf) { return -ENOMEM; } if (crc) { crc_ptr = &crc32; } ret = read_all(fd, &sparse_header, sizeof(sparse_header)); if (ret < 0) { return ret; } if (sparse_header.magic != SPARSE_HEADER_MAGIC) { return -EINVAL; } if (sparse_header.major_version != SPARSE_HEADER_MAJOR_VER) { return -EINVAL; } if (sparse_header.file_hdr_sz < SPARSE_HEADER_LEN) { return -EINVAL; } if (sparse_header.chunk_hdr_sz < sizeof(chunk_header)) { return -EINVAL; } if (sparse_header.file_hdr_sz > SPARSE_HEADER_LEN) { /* Skip the remaining bytes in a header that is longer than * we expected. */ lseek64(fd, sparse_header.file_hdr_sz - SPARSE_HEADER_LEN, SEEK_CUR); } for (i = 0; i < sparse_header.total_chunks; i++) { ret = read_all(fd, &chunk_header, sizeof(chunk_header)); if (ret < 0) { return ret; } if (sparse_header.chunk_hdr_sz > CHUNK_HEADER_LEN) { /* Skip the remaining bytes in a header that is longer than * we expected. */ lseek64(fd, sparse_header.chunk_hdr_sz - CHUNK_HEADER_LEN, SEEK_CUR); } offset = lseek64(fd, 0, SEEK_CUR); ret = process_chunk(s, fd, offset, sparse_header.chunk_hdr_sz, &chunk_header, cur_block, crc_ptr); if (ret < 0) { return ret; } cur_block += ret; } if (sparse_header.total_blks != cur_block) { return -EINVAL; } return 0; } static int sparse_file_read_normal(struct sparse_file *s, int fd) { int ret; uint32_t *buf = malloc(s->block_size); unsigned int block = 0; int64_t remain = s->len; int64_t offset = 0; unsigned int to_read; unsigned int i; bool sparse_block; if (!buf) { return -ENOMEM; } while (remain > 0) { to_read = min(remain, s->block_size); ret = read_all(fd, buf, to_read); if (ret < 0) { error("failed to read sparse file"); return ret; } if (to_read == s->block_size) { sparse_block = true; for (i = 1; i < s->block_size / sizeof(uint32_t); i++) { if (buf[0] != buf[i]) { sparse_block = false; break; } } } else { sparse_block = false; } if (sparse_block) { /* TODO: add flag to use skip instead of fill for buf[0] == 0 */ sparse_file_add_fill(s, buf[0], to_read, block); } else { sparse_file_add_fd(s, fd, offset, to_read, block); } remain -= to_read; offset += to_read; block++; } return 0; } int sparse_file_read(struct sparse_file *s, int fd, bool sparse, bool crc) { if (crc && !sparse) { return -EINVAL; } if (sparse) { return sparse_file_read_sparse(s, fd, crc); } else { return sparse_file_read_normal(s, fd); } } struct sparse_file *sparse_file_import(int fd, bool verbose, bool crc) { int ret; sparse_header_t sparse_header; int64_t len; struct sparse_file *s; ret = read_all(fd, &sparse_header, sizeof(sparse_header)); if (ret < 0) { verbose_error(verbose, ret, "header"); return NULL; } if (sparse_header.magic != SPARSE_HEADER_MAGIC) { verbose_error(verbose, -EINVAL, "header magic"); return NULL; } if (sparse_header.major_version != SPARSE_HEADER_MAJOR_VER) { verbose_error(verbose, -EINVAL, "header major version"); return NULL; } if (sparse_header.file_hdr_sz < SPARSE_HEADER_LEN) { return NULL; } if (sparse_header.chunk_hdr_sz < sizeof(chunk_header_t)) { return NULL; } len = (int64_t)sparse_header.total_blks * sparse_header.blk_sz; s = sparse_file_new(sparse_header.blk_sz, len); if (!s) { verbose_error(verbose, -EINVAL, NULL); return NULL; } ret = lseek64(fd, 0, SEEK_SET); if (ret < 0) { verbose_error(verbose, ret, "seeking"); sparse_file_destroy(s); return NULL; } s->verbose = verbose; ret = sparse_file_read(s, fd, true, crc); if (ret < 0) { sparse_file_destroy(s); return NULL; } return s; } struct sparse_file *sparse_file_import_auto(int fd, bool crc, bool verbose) { struct sparse_file *s; int64_t len; int ret; s = sparse_file_import(fd, verbose, crc); if (s) { return s; } len = lseek64(fd, 0, SEEK_END); if (len < 0) { return NULL; } lseek64(fd, 0, SEEK_SET); s = sparse_file_new(4096, len); if (!s) { return NULL; } ret = sparse_file_read_normal(s, fd); if (ret < 0) { sparse_file_destroy(s); return NULL; } return s; }
def run_with_config(self, config): task = Task( self.train_fn, {'args': self.args, 'config': config}, DistributedResource(**self.resource) ) reporter = FakeReporter() task.args['reporter'] = reporter return self.run_job(task)
// PendingManaOnOutput predicts how much mana (bm2) will be pledged to a node if the output specified is spent. func PendingManaOnOutput(outputID ledgerstate.OutputID) (float64, time.Time) { cachedOutputMetadata := Tangle().LedgerState.OutputMetadata(outputID) defer cachedOutputMetadata.Release() outputMetadata := cachedOutputMetadata.Unwrap() if outputMetadata == nil || outputMetadata.ConsumerCount() > 0 { return 0, time.Time{} } var value float64 Tangle().LedgerState.Output(outputID).Consume(func(output ledgerstate.Output) { output.Balances().ForEach(func(color ledgerstate.Color, balance uint64) bool { value += float64(balance) return true }) }) cachedTx := Tangle().LedgerState.Transaction(outputID.TransactionID()) defer cachedTx.Release() tx := cachedTx.Unwrap() txTimestamp := tx.Essence().Timestamp() return GetPendingMana(value, time.Since(txTimestamp)), txTimestamp }
<filename>src/services/translations/index.ts import { Platform, NativeModules } from 'react-native'; import I18n from 'i18n-js'; import pt_BR from './pt_BR'; import en from './en'; const deviceLanguage: string = Platform.OS === 'ios' ? NativeModules.SettingsManager.settings.AppleLocale || NativeModules.SettingsManager.settings.AppleLanguages[0] : NativeModules.I18nManager.localeIdentifier; I18n.defaultLocale = 'pt-BR'; I18n.locale = deviceLanguage; I18n.fallbacks = true; I18n.translations = { pt_BR: pt_BR, en_US: en, en_GB: en, }; const translate = ( scope: I18n.Scope, options?: I18n.TranslateOptions, ): string => I18n.t(scope, options); export default translate;
import Avatar from 'components/post-avatar'; import Date from 'components/date-formatter'; import CoverImage from 'components/post-cover-image'; import PostTitle from 'components/post-title'; import CardIcons from 'components/card-icons'; import Author from 'types/author'; import TechIcons from 'types/tech-icons'; type PostHeaderProps = { title: string; coverImage: string; author: Author; date: string; // slug?: string; }; export default function PostHeader({ title, date, author, coverImage }: PostHeaderProps) { return ( <> <div className='max-w-screen font-polished'> <div className='mb-4 md:mb-4 -mx-5 sm:mx-0'> <CoverImage title={title} coverImage={coverImage} /> </div> <PostTitle>{title}</PostTitle> <div className='max-w-4xl mx-auto align-middle content-center justify-center text-center'> <div className='hidden md:block align-middle content-center text-center justify-center'> <Avatar avatar={author.avatar} name={author.name} /> </div> <div className='text-xs font-subpolished'> Published&nbsp; <Date dateString={date} /> </div> <div className='block md:hidden w-inherit'> <Avatar avatar={author.avatar} name={author.name} /> </div> {/* <div className='flex flex-col'> <Categories categories={categories} /> </div> */} <CardIcons antdesign={author.antdesign} apollo={author.apollo} auth0={author.auth0} graphql={author.graphql} next={author.next} react={author.react} tailwindcss={author.tailwindcss} typescript={author.typescript} wordpress={author.wordpress} /> </div> <hr className='border-customGray w-4xl' /> </div> </> ); }
According to boxer Mike Tyson, “Everybody has a plan until they get punched in the face.” Industrial civilization punched the planet in the face, repeatedly. We wonder why Earth isn’t the same planet with which we grew up. We offer apologies, including recycling and hybrid automobiles, but we’re unwilling — as a society — to pursue substantive change. And now it’s too late. Earth is done with humans. We’re walking around to save money on funeral expenses. My response: Pursue what you love. Pursue a life of excellence. Throw off the cultural shackles. All of ’em. We’re all in hospice now. Let’s give freely of our time, wisdom, and material possessions. Let’s throw ourselves into humanity and the living planet. Let’s act with compassion and courage. Let’s endow ourselves with dignity. Even if all the data and forecasts are incorrect, even if Earth can support infinite growth on a finite planet with no adverse consequences, please convince me there is a better way to live. And let’s not forget the immortal words of writer Edward Abbey: “Action is the antidote to despair.” After all, I am not now suggesting, nor have I ever suggested, giving up. Our insignificant lives have never been about us. They’re about the shards of the living planet we leave in our wake. As pointed out by Desmond Tutu, “If you are neutral in situations of injustice, you have chosen the side of the oppressor.” Anybody can cheer for winners. Essentially everybody I know sits on the sidelines cheering the favorites. I’m proposing we stop sitting on the sidelines. I’m proposing we get to work on behalf the underdogs. I’m proposing we start working to save the living planet. I’m proposing we start by dismantling industrial civilization. Is there a better metric of a person’s character than how she treats those who can do nothing for her? Let’s treat others with respect as we leave this mortal coil. Let’s extend the concept of “other” to humans within non-industrial cultures. Let’s extend the concept further, to non-human species. You might think they’ve been doing nothing for us, but they’ve been providing for our own existence. Let’s return the favor. ____________ I presented at MultiKulti in Chicago on Sunday night, 30 March 2014. Description and audio file are here. Video is embedded below. ____________ I was interviewed on the radio by Mike Nowak on the morning of 30 March 2014. The podcast is here. ____________ I was interviewed by Dahr Jamail for his article at Truth-Out: The Vanishing Arctic Ice Cap was published 31 March 2014. ____________ Radio interview from Tampa, Florida the morning of 1 April 2014 is described and linked here ____________ East-coast tour (detailed information to come, including exact locations and times; follow on Facebook here): 5 April 2014, Ithaca, New York 6 April 2014, Niagara Falls, New York Monday, 7 April 2014, 7:00 p.m., McMartin House National Historic Site, 125 Gore Street Perth, Ontario, Canada. RSVP and follow on Facebook here. Tuesday, 8 April 2014, 7:00 p.m. Ottawa, Ontario, Canada, Conversation With Paul Beckwith (to be live-streamed) Wednesday, 9 April 2014, 6:00 p.m., Steelworker’s Hall, 66 Brady Street, Sudbury, Ontario, Canada, “Climate Chaos” (Facebook announcement and invitation) Thursday, 10 April 2014, 12:00 noon, Meeting Room 1, Greater Sudbury Public Library, 74 Mackenzie Street, Greater Sudbury, Ontario, Canada, “Responding to Climate Chaos” (Facebook announcement and invitation) Saturday, 12 April 2014, 6:30 p.m., East Hall, 2 Lippitt Road, University of Rhode Island, Kingston, Rhode Island, “Climate Chaos” Wednesday, 16 April 2014, New York, New York, sponsored by Deep Green Resistance, “Climate Chaos” Thursday, 17 April 2014, 4:00 p.m. Curiouser on Hudson 20 April 2014, Potluck dinner with attendees of last May’s presentation at the Mt. Kisco Library The Next Step: Living Courageously in a World of Transition, a 14-day seminar, 12-25 June 2014, Izabal, Guatemala, Central America. I will be included with an otherwise-excellent group of facilitators for this remarkable two-week experience. Late October through early November 2014, European tour. Details forthcoming. Follow on Facebook here. ____________ Going Dark is available from the publisher here, from Amazon here, from Amazon on Kindle here, from Barnes & Noble on Nook here, and as a Google e-book here. Going Dark was reviewed by Carolyn Baker at Speaking Truth to Power and by more than two dozen readers at Amazon.
<reponame>mateuszwwwrobel/Expense_Tracker_Django from datetime import datetime from django.contrib.auth.models import User from django.core.exceptions import ObjectDoesNotExist from django.db import models from django.db.models import Sum categories = ( ('Food', 'Food'), ('Takeaway', 'Takeaway'), ('Entertainment', 'Entertainment'), ('Bills', 'Bills'), ('Household Items', 'Household Items'), ('Other', 'Other'), ('Travel', 'Travel'), ) currencies = ( ('PLN', 'PLN'), ('GBP', 'GBP'), ('EUR', 'EUR'), ) class Budget(models.Model): name = models.CharField(max_length=100) currency = models.CharField(max_length=3, choices=currencies) created_by = models.ForeignKey(User, on_delete=models.PROTECT, related_name='created_by') users = models.ManyToManyField(User, related_name='users', blank=True) created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(auto_now=True) def __str__(self): return self.name class Expense(models.Model): user = models.ForeignKey(User, on_delete=models.PROTECT) name = models.CharField(max_length=100) price = models.DecimalField(max_digits=10, decimal_places=2) category = models.CharField(max_length=15, choices=categories) budget = models.ForeignKey(Budget, on_delete=models.CASCADE) created_at = models.DateTimeField(default=datetime.now) updated_at = models.DateTimeField(auto_now=True) def __str__(self): return f"{self.price} - {self.category}" class Meta: ordering = ('created_at', ) @classmethod def get_by_id(cls, id): try: expense = cls.objects.get(id=id) except ObjectDoesNotExist: return None else: return expense @staticmethod def get_total_expenses_for_current_year(budget_id): today = datetime.today() expenses = Expense.objects.filter(budget=budget_id).filter(created_at__year=today.year) total = expenses.aggregate(total=Sum('price'))['total'] if total is None: exp_sum = 0 else: exp_sum = round(total, 2) return exp_sum @staticmethod def get_total_expenses_for_current_month(budget_id): today = datetime.today() expenses = Expense.objects.filter(budget=budget_id).filter(created_at__month=today.month) total = expenses.aggregate(total=Sum('price'))['total'] if total is None: exp_sum = 0 else: exp_sum = round(total, 2) return exp_sum
/** Called when the activity is first created. */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); requestWindowFeature((int) Window.FEATURE_NO_TITLE); setContentView(R.layout.activity_select_building); app = (AnyplaceApp) getApplication(); mAnyplaceCache = ObjectCache.getInstance(app); btnRefreshWorldBuildings = (Button) findViewById(R.id.btnWorldBuildingsRefresh); btnRefreshWorldBuildings.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (isBuildingsJobRunning) { Toast.makeText(getBaseContext(), "Another building request is in process...", Toast.LENGTH_SHORT).show(); return; } startBuildingsFetch(false, true); } }); btnRefreshNearmeBuildings = (Button) findViewById(R.id.btnNearmeBuildingsRefresh); btnRefreshNearmeBuildings.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (isBuildingsJobRunning) { Toast.makeText(getBaseContext(), "Another building request is in process...", Toast.LENGTH_SHORT).show(); return; } startBuildingsFetch(true, false); } }); spinnerFloors = (Spinner) findViewById(R.id.floors); spinnerFloors.setOnItemSelectedListener(new OnItemSelectedListener() { @Override public void onItemSelected(AdapterView<?> parent, View view, int pos, long id) { selectedFloorIndex = pos; } @Override public void onNothingSelected(AdapterView<?> parent) { } }); spinnerBuildings = (Spinner) findViewById(R.id.buildings); spinnerBuildings.setOnItemSelectedListener(new OnItemSelectedListener() { @Override public void onItemSelected(AdapterView<?> parent, View view, int pos, long id) { if (isFloorsJobRunning) { Toast.makeText(getBaseContext(), "Another request is in process...", Toast.LENGTH_SHORT).show(); return; } mAnyplaceCache.setSelectedBuildingIndex(pos); BuildingModel build = mAnyplaceCache.getSelectedBuilding(); if (build != null && build.isFloorsLoaded()) { setFloorSpinner(build.getLoadedFloors()); try { spinnerFloors.setSelection(build.getSelectedFloorIndex()); } catch (IndexOutOfBoundsException ex) { } onFloorsLoaded(build.getLoadedFloors()); } else { startFloorFetch(); } } @Override public void onNothingSelected(AdapterView<?> parent) { } }); btnRefreshFloors = (Button) findViewById(R.id.btnFloorsRefresh); btnRefreshFloors.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (isFloorsJobRunning) { Toast.makeText(getBaseContext(), "Another floor request is in process...", Toast.LENGTH_SHORT).show(); return; } try { startFloorFetch(); } catch (IndexOutOfBoundsException ex) { Toast.makeText(getBaseContext(), "Check again the building selected...", Toast.LENGTH_SHORT).show(); } } }); Button done = (Button) findViewById(R.id.btnDone); done.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { startFloorPlanTask(); } }); Bundle b = this.getIntent().getExtras(); if (b == null) { mode = Mode.NONE; lat = "0.0"; lon = "0.0"; } else { mode = (Mode) b.getSerializable("mode"); if (mode == null) mode = Mode.NONE; lat = b.getString("coordinates_lat"); if (lat == null) { lat = "0.0"; mode = Mode.NONE; btnRefreshNearmeBuildings.setEnabled(false); } lon = b.getString("coordinates_lon"); if (lon == null) { lon = "0.0"; mode = Mode.NONE; btnRefreshNearmeBuildings.setEnabled(false); } } if (mode != Mode.NONE) { floorSelectorDialog = new ProgressDialog(SelectBuildingActivityOLD.this); floorSelectorDialog.setIndeterminate(true); floorSelectorDialog.setTitle("Detecting floor"); floorSelectorDialog.setMessage("Please be patient..."); floorSelectorDialog.setCancelable(true); floorSelectorDialog.setCanceledOnTouchOutside(false); floorSelector = new Algo1Server(app); floorSelector.addListener((FloorSelector.FloorAnyplaceFloorListener) this); floorSelector.addListener((FloorSelector.ErrorAnyplaceFloorListener) this); isBuildingsLoadingFinished = false; isfloorSelectorJobFinished = false; floorSelector.Start(lat, lon); startBuildingsFetch(true, false); } else { List<BuildingModel> buildings = mAnyplaceCache.getSpinnerBuildings(); if (buildings.size() == 0) { startBuildingsFetch(false, false); } else { setBuildingSpinner(buildings); spinnerBuildings.setSelection(mAnyplaceCache.getSelectedBuildingIndex()); } } }
/// Control 1 Register pub mod CTRL1 { /// This 3-bit field defines the length of the Propagation Segment in the bit time pub mod PROPSEG { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (3 bits: 0b111 << 0) pub const mask: u32 = 0b111 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// This bit configures FLEXCAN to operate in Listen Only Mode pub mod LOM { /// Offset (3 bits) pub const offset: u32 = 3; /// Mask (1 bit: 1 << 3) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values pub mod RW { /// 0b0: Listen Only Mode is deactivated pub const LOM_0: u32 = 0b0; /// 0b1: FLEXCAN module operates in Listen Only Mode pub const LOM_1: u32 = 0b1; } } /// This bit defines the ordering mechanism for Message Buffer transmission pub mod LBUF { /// Offset (4 bits) pub const offset: u32 = 4; /// Mask (1 bit: 1 << 4) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values pub mod RW { /// 0b0: Buffer with highest priority is transmitted first pub const LBUF_0: u32 = 0b0; /// 0b1: Lowest number buffer is transmitted first pub const LBUF_1: u32 = 0b1; } } /// This bit enables a mechanism that resets the free-running timer each time a message is received in Message Buffer 0 pub mod TSYN { /// Offset (5 bits) pub const offset: u32 = 5; /// Mask (1 bit: 1 << 5) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values pub mod RW { /// 0b0: Timer Sync feature disabled pub const TSYN_0: u32 = 0b0; /// 0b1: Timer Sync feature enabled pub const TSYN_1: u32 = 0b1; } } /// This bit defines how FLEXCAN recovers from Bus Off state pub mod BOFFREC { /// Offset (6 bits) pub const offset: u32 = 6; /// Mask (1 bit: 1 << 6) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values pub mod RW { /// 0b0: Automatic recovering from Bus Off state enabled, according to CAN Spec 2.0 part B pub const BOFFREC_0: u32 = 0b0; /// 0b1: Automatic recovering from Bus Off state disabled pub const BOFFREC_1: u32 = 0b1; } } /// This bit defines the sampling mode of CAN bits at the FLEXCAN_RX pub mod SMP { /// Offset (7 bits) pub const offset: u32 = 7; /// Mask (1 bit: 1 << 7) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values pub mod RW { /// 0b0: Just one sample is used to determine the bit value pub const SMP_0: u32 = 0b0; /// 0b1: Three samples are used to determine the value of the received bit: the regular one (sample point) and 2 preceding samples, a majority rule is used pub const SMP_1: u32 = 0b1; } } /// This bit provides a mask for the Rx Warning Interrupt associated with the RWRN_INT flag in the Error and Status Register pub mod RWRNMSK { /// Offset (10 bits) pub const offset: u32 = 10; /// Mask (1 bit: 1 << 10) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values pub mod RW { /// 0b0: Rx Warning Interrupt disabled pub const RWRNMSK_0: u32 = 0b0; /// 0b1: Rx Warning Interrupt enabled pub const RWRNMSK_1: u32 = 0b1; } } /// This bit provides a mask for the Tx Warning Interrupt associated with the TWRN_INT flag in the Error and Status Register pub mod TWRNMSK { /// Offset (11 bits) pub const offset: u32 = 11; /// Mask (1 bit: 1 << 11) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values pub mod RW { /// 0b0: Tx Warning Interrupt disabled pub const TWRNMSK_0: u32 = 0b0; /// 0b1: Tx Warning Interrupt enabled pub const TWRNMSK_1: u32 = 0b1; } } /// This bit configures FlexCAN to operate in Loop-Back Mode pub mod LPB { /// Offset (12 bits) pub const offset: u32 = 12; /// Mask (1 bit: 1 << 12) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values pub mod RW { /// 0b0: Loop Back disabled pub const LPB_0: u32 = 0b0; /// 0b1: Loop Back enabled pub const LPB_1: u32 = 0b1; } } /// This bit provides a mask for the Error Interrupt. pub mod ERRMSK { /// Offset (14 bits) pub const offset: u32 = 14; /// Mask (1 bit: 1 << 14) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values pub mod RW { /// 0b0: Error interrupt disabled pub const ERRMSK_0: u32 = 0b0; /// 0b1: Error interrupt enabled pub const ERRMSK_1: u32 = 0b1; } } /// This bit provides a mask for the Bus Off Interrupt. pub mod BOFFMSK { /// Offset (15 bits) pub const offset: u32 = 15; /// Mask (1 bit: 1 << 15) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values pub mod RW { /// 0b0: Bus Off interrupt disabled pub const BOFFMSK_0: u32 = 0b0; /// 0b1: Bus Off interrupt enabled pub const BOFFMSK_1: u32 = 0b1; } } /// This 3-bit field defines the length of Phase Buffer Segment 2 in the bit time pub mod PSEG2 { /// Offset (16 bits) pub const offset: u32 = 16; /// Mask (3 bits: 0b111 << 16) pub const mask: u32 = 0b111 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// This 3-bit field defines the length of Phase Buffer Segment 1 in the bit time pub mod PSEG1 { /// Offset (19 bits) pub const offset: u32 = 19; /// Mask (3 bits: 0b111 << 19) pub const mask: u32 = 0b111 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// This 2-bit field defines the maximum number of time quanta One time quantum is equal to the Sclock period pub mod RJW { /// Offset (22 bits) pub const offset: u32 = 22; /// Mask (2 bits: 0b11 << 22) pub const mask: u32 = 0b11 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// This 8-bit field defines the ratio between the PE clock frequency and the Serial Clock (Sclock) frequency pub mod PRESDIV { /// Offset (24 bits) pub const offset: u32 = 24; /// Mask (8 bits: 0xff << 24) pub const mask: u32 = 0xff << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } }
<filename>lib/googlecloudsdk/command_lib/sql/import_util.py<gh_stars>0 # -*- coding: utf-8 -*- # # Copyright 2017 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Common utility functions for sql import commands.""" from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals from googlecloudsdk.api_lib.sql import import_util from googlecloudsdk.api_lib.sql import operations from googlecloudsdk.api_lib.sql import validate from googlecloudsdk.calliope import base from googlecloudsdk.command_lib.sql import flags from googlecloudsdk.core import log from googlecloudsdk.core import properties from googlecloudsdk.core.console import console_io def AddBaseImportFlags(parser, filetype): base.ASYNC_FLAG.AddToParser(parser) flags.AddInstanceArgument(parser) flags.AddUriArgument( parser, 'Path to the {filetype} file in Google Cloud Storage from which' ' the import is made. The URI is in the form gs://bucketName/fileName.' ' Compressed gzip files (.gz) are also supported.' .format(filetype=filetype)) flags.AddUser(parser, 'PostgreSQL user for this import operation.') def RunImportCommand(args, client, import_context): """Imports data into a Cloud SQL instance. Args: args: argparse.Namespace, The arguments that this command was invoked with. client: SqlClient instance, with sql_client and sql_messages props, for use in generating messages and making API calls. import_context: ImportContext; format-specific import metadata. Returns: A dict representing the import operation resource, if '--async' is used, or else None. Raises: HttpException: An HTTP error response was received while executing API request. ToolException: An error other than HTTP error occurred while executing the command. """ sql_client = client.sql_client sql_messages = client.sql_messages validate.ValidateInstanceName(args.instance) instance_ref = client.resource_parser.Parse( args.instance, params={'project': properties.VALUES.core.project.GetOrFail}, collection='sql.instances') console_io.PromptContinue( message='Data from [{uri}] will be imported to [{instance}].'.format( uri=args.uri, instance=args.instance), default=True, cancel_on_no=True) import_request = sql_messages.SqlInstancesImportRequest( instance=instance_ref.instance, project=instance_ref.project, instancesImportRequest=sql_messages.InstancesImportRequest( importContext=import_context)) result_operation = sql_client.instances.Import(import_request) operation_ref = client.resource_parser.Create( 'sql.operations', operation=result_operation.name, project=instance_ref.project) if args.async: return sql_client.operations.Get( sql_messages.SqlOperationsGetRequest( project=operation_ref.project, operation=operation_ref.operation)) operations.OperationsV1Beta4.WaitForOperation( sql_client, operation_ref, 'Importing data into Cloud SQL instance') log.status.write('Imported data from [{bucket}] into [{instance}].\n'.format( instance=instance_ref, bucket=args.uri)) return None def RunSqlImportCommand(args, client): """Imports data from a SQL dump file into Cloud SQL instance. Args: args: argparse.Namespace, The arguments that this command was invoked with. client: SqlClient instance, with sql_client and sql_messages props, for use in generating messages and making API calls. Returns: A dict representing the import operation resource, if '--async' is used, or else None. """ sql_import_context = import_util.SqlImportContext( client.sql_messages, args.uri, args.database, args.user) return RunImportCommand(args, client, sql_import_context) def RunCsvImportCommand(args, client): """Imports data from a CSV file into Cloud SQL instance. Args: args: argparse.Namespace, The arguments that this command was invoked with. client: SqlClient instance, with sql_client and sql_messages props, for use in generating messages and making API calls. Returns: A dict representing the import operation resource, if '--async' is used, or else None. """ csv_import_context = import_util.CsvImportContext( client.sql_messages, args.uri, args.database, args.table, args.columns, args.user) return RunImportCommand(args, client, csv_import_context)
def send_data(self, **kwargs): request_type = "input/" url = self.base_url + request_type + self.public_key + '?' + 'private_key=' + self.private_key response = requests.post(url, kwargs) if response.status_code != 200: print ("Request was not successful:\n" + response.text) return False else: print "Data sent successfully." return response
<filename>aidos/node_emulation_test.go<gh_stars>1-10 // Copyright (c) 2017 Aidos Developer // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. package aidos import ( "math" "math/rand" "sort" "strings" "testing" "time" "github.com/AidosKuneen/gadk" ) type dummy1 struct { acc2adr map[string][]gadk.Address adr2acc map[gadk.Address]string vals map[gadk.Address]int64 mtrytes map[gadk.Trytes]gadk.Transaction t *testing.T isConf bool ch chan struct{} txs map[gadk.Address][]*gadk.Transaction bundle gadk.Bundle broadcasted []gadk.Transaction stored []gadk.Transaction } func (d *dummy1) bundleAmount() float64 { var amount int64 for _, tx := range d.bundle { amount += tx.Value } return float64(amount) / 100000000 } func (d *dummy1) list(ac string, count, skip int) []*gadk.Transaction { var res []*gadk.Transaction sort.Slice(d.acc2adr[ac], func(i, j int) bool { return strings.Compare(string(d.acc2adr[ac][i]), string(d.acc2adr[ac][j])) > 0 }) for _, adr := range d.acc2adr[ac] { res = append(res, d.txs[adr]...) } for i := len(res)/2 - 1; i >= 0; i-- { res[i], res[len(res)-1-i] = res[len(res)-1-i], res[i] } return res[skip : skip+count] } func (d *dummy1) listall() []*gadk.Transaction { var adrs []gadk.Address for adr := range d.txs { adrs = append(adrs, adr) } sort.Slice(adrs, func(i, j int) bool { return strings.Compare(string(adrs[i]), string(adrs[j])) > 0 }) var res []*gadk.Transaction for _, adr := range adrs { res = append(res, d.txs[adr]...) } for i := len(res)/2 - 1; i >= 0; i-- { res[i], res[len(res)-1-i] = res[len(res)-1-i], res[i] } return res } func newdummy(accadr map[string][]gadk.Address, t *testing.T) *dummy1 { rand.Seed(time.Now().Unix()) adr2acc := make(map[gadk.Address]string) for k, v := range accadr { for _, vv := range v { adr2acc[vv] = k } } d1 := &dummy1{ acc2adr: accadr, adr2acc: adr2acc, vals: make(map[gadk.Address]int64), mtrytes: make(map[gadk.Trytes]gadk.Transaction), t: t, isConf: false, ch: make(chan struct{}), txs: make(map[gadk.Address][]*gadk.Transaction), } d1.setupTXs() return d1 } func (d *dummy1) setupTXs() { c := []string{"A", "B", "C"} for adr := range d.adr2acc { var sum int64 for i := 0; i < 5; i++ { val := int64(rand.Int31() - math.MaxInt32/2) sum += val tx := &gadk.Transaction{ Address: adr, Value: val, Timestamp: time.Now().Add(time.Duration(rand.Int31()-math.MaxInt32/2) * time.Second), Bundle: gadk.Trytes("B"+c[i%3]) + gadk.EmptyHash[2:], } if i == 4 { for sum < 0.2*100000000 { val = int64(rand.Int31()) tx.Value += val sum += val } } d.txs[adr] = append(d.txs[adr], tx) } d.vals[adr] = sum } for i := 0; i < 5; i++ { tx := gadk.Transaction{ Address: gadk.EmptyAddress, Value: int64(rand.Int31() - math.MaxInt32/2), Timestamp: time.Now().Add(time.Duration(-rand.Int31()%100000) * time.Second), CurrentIndex: int64(i), } for tx.Address = range d.adr2acc { } if i == 3 { tx.Value = 0 } d.vals[tx.Address] += tx.Value d.bundle = append(d.bundle, tx) } for i := range d.bundle { d.bundle[i].Bundle = d.bundle.Hash() } } func (d *dummy1) Balances(adr []gadk.Address) (gadk.Balances, error) { b := make(gadk.Balances, len(adr)) for i, a := range adr { v, ok := d.vals[a] if !ok { d.t.Error("invalid adr in balances") } b[i] = gadk.Balance{ Address: a, Value: v, } } return b, nil } func (d *dummy1) FindTransactions(ft *gadk.FindTransactionsRequest) (*gadk.FindTransactionsResponse, error) { var res gadk.FindTransactionsResponse if ft.Addresses != nil { for _, tx := range d.bundle { res.Hashes = append(res.Hashes, tx.Hash()) } sort.Slice(ft.Addresses, func(i, j int) bool { return strings.Compare(string(ft.Addresses[i]), string(ft.Addresses[j])) > 0 }) for _, a := range ft.Addresses { for _, tx := range d.txs[a] { res.Hashes = append(res.Hashes, tx.Hash()) } } } return &res, nil } func (d *dummy1) GetTrytes(hashes []gadk.Trytes) (*gadk.GetTrytesResponse, error) { var res gadk.GetTrytesResponse for _, h := range hashes { exist := false loop: for _, txs := range d.txs { for _, tx := range txs { if tx.Hash() == h { res.Trytes = append(res.Trytes, *tx) exist = true break loop } } } if exist { continue } loop2: for _, tx := range d.bundle { if tx.Hash() == h { res.Trytes = append(res.Trytes, tx) exist = true break loop2 } } if !exist { d.t.Error("invalid hashe in gettrytes", h) } } return &res, nil } var trunk = gadk.EmptyHash[:len(gadk.EmptyAddress)-1] + "T" var branch = gadk.EmptyHash[:len(gadk.EmptyAddress)-1] + "B" func (d *dummy1) GetTransactionsToApprove(depth int64) (*gadk.GetTransactionsToApproveResponse, error) { return &gadk.GetTransactionsToApproveResponse{ TrunkTransaction: trunk, BranchTransaction: branch, }, nil } func (d *dummy1) BroadcastTransactions(trytes []gadk.Transaction) error { d.broadcasted = trytes d.ch <- struct{}{} return nil } func (d *dummy1) StoreTransactions(trytes []gadk.Transaction) error { d.stored = trytes return nil } func (d *dummy1) GetInclusionStates(tx []gadk.Trytes, tips []gadk.Trytes) (*gadk.GetInclusionStatesResponse, error) { ret := make([]bool, len(tx)) if d.isConf { for i := range ret { ret[i] = true } } return &gadk.GetInclusionStatesResponse{ States: ret, }, nil } func (d *dummy1) GetNodeInfo() (*gadk.GetNodeInfoResponse, error) { return &gadk.GetNodeInfoResponse{}, nil }
While the US and UK stand by Israel over its military offensive in Gaza, governments across Latin America have moved to cut their links. In a series of coordinated diplomatic and financial measures, Latin America is making a stand, heavily criticising Israel and cutting ties with the Jewish state, writes Channel 4 News reporter Guillermo Galdos. El Salvador, Chile, Peru, Brazil and Ecuador have all recalled their ambassadors, while Brazil, Argentina, Venezuela, Uruguay and Paraguay have suspended the Free Trade Agreement talks, demanding an immediate ceasefire in Gaza. In a joint statement they called for an end to the “disproportionate use of force by the Israeli army in the Gaza Strip which in the majority affects civilians, including children and women.” ‘Terrorist state’ Chile – with the largest Palestinian Arab population outside of the Middle East – was the first to cut ties. It is no small gesture, as the country has deep economic and military connections with Israel. And Bolivian President Evo Morales (pictured, above right) followed with a feisty speech on Wednesday – accusing Israel of being a “terrorist state” and has said that from now on Israelis would need a visa to visit his country. Israel doesn’t guarantee the principle of respect for live, and the basic right to live in harmony and peace. Evo Morales, Bolivian president “Israel doesn’t guarantee the principle of respect for life, and the basic right to live in harmony and peace in the international community,” he said. Israel’s foreign ministry said it was disappointed with the recalling of ambassadors – and that such actions only encourage Hamas “a group recognised as a terror organisation by many countries around the world”. ‘Diplomatic dwarf’ But the foreign ministry also made light of the measures, calling Brazil a “diplomatic dwarf”, and even joked about their semifinal loss in this summer’s World Cup. President Dilma Rouseff (pictured, above left) responded by saying: “Brazil was the first country to recognise Israel. Brazil is a friend to Israel… but there is a massacre ongoing in the Gaza Strip.” Uruguay and Argentina, which have large Jewish communities, have not followed suit, keeping their ambassadors in Israel. Colombia and Mexico are also maintaining diplomatic relations. But the stand by many in Latin America is being seen as a departure – and is being hailed here by many who are proud that the region is taking a stand.
// AI_Character.h // ////////////////////////////////////////////////////// #ifndef __AI_CHARACTER_H__ #define __AI_CHARACTER_H__ #include "Type.h" #include "State.h" #include "OResultDef.h" class Obj; class Obj_Character; class AI_Character { friend class State; friend class AIScript; public: AI_Character( VOID ); virtual ~AI_Character( VOID ); virtual BOOL Init( Obj_Character *pCharacter ); virtual VOID Term( VOID ); //==================================================================== // 全部Action部分 //==================================================================== protected: ORESULT Obj_Move(const WORLD_POS* pTar); ORESULT Obj_UseSkill(ObjID_t idSkill, BYTE nLevel, ObjID_t idTarget, FLOAT fTargetX, FLOAT fTargetZ, FLOAT fDir = -1.f, GUID_t guidTarget=INVALID_ID); ORESULT Obj_UseItem(ID_t nBagIndex, ObjID_t nTargetObj, WORLD_POS const& posTarget, PET_GUID_t const& guidTargetPet, ID_t nTargetItem); public: // 必将经过State过滤的操作 ORESULT Jump(VOID); ORESULT Stop(VOID); ORESULT Stall(VOID); ORESULT UseAbility(VOID); ORESULT Move(const WORLD_POS* pTar); ORESULT Move(INT nHandleID, WORD& wNumTargetPos, WORLD_POS *paTargetPos, BOOL bLine = FALSE); ORESULT UseSkill(ObjID_t idSkill, BYTE nLevel, ObjID_t idTarget, FLOAT fTargetX, FLOAT fTargetZ, FLOAT fDir = -1.f, GUID_t guidTarget=INVALID_ID); ORESULT UseItem(ID_t nBagIndex, ObjID_t nTargetObj, WORLD_POS const& posTarget, PET_GUID_t const& guidTargetPet, ID_t nTargetItem); BOOL CanUseSkill(VOID) const { return _state->CanUseSkill(this); } BOOL CanUseItem(VOID) const { return _state->CanUseItem(this); } //==================================================================== // 全部AI逻辑部分 //==================================================================== public: BOOL Logic(UINT uTime) { return _state->Logic(this, uTime); }; VOID ChangeState(ENUM_STATE eState) { if (eState == _state->GetStateID() ) { return; } State* s = g_StateList.InstanceState(eState); if (s) { _state = s; } } protected: // common logic VOID AI_Logic_Terror(UINT uTime); virtual VOID AI_Logic_Idle(UINT uTime) {}; virtual VOID AI_Logic_Dead(UINT uTime) {}; virtual VOID AI_Logic_Combat(UINT uTime) {}; // monste Logic virtual VOID AI_Logic_Flee(UINT uTime) {}; virtual VOID AI_Logic_Patrol(UINT uTime) {}; virtual VOID AI_Logic_Gohome(UINT uTime) {}; virtual VOID AI_Logic_Service(UINT uTime) {}; virtual VOID AI_Logic_Approach(UINT uTime) {}; // human logic virtual VOID AI_Logic_Sit(UINT uTime) {}; //virtual VOID AI_Logic_Mount(UINT uTime) {}; virtual VOID AI_Logic_TeamFollow(UINT uTime) {}; protected: State* _state; private: INT m_nIntervalTime; //==================================================================== // 全部事件相关 //==================================================================== public: VOID OnDie(Obj *pKiller = NULL); VOID OnRelive(Obj *pKiller = NULL); VOID OnDamage(INT nDamage, Obj_Character* pAttacker); VOID OnBeSkill(Obj_Character* pCharacter, INT nGoodEffect); protected: virtual VOID Event_OnDie(Obj* pKiller = NULL) {}; virtual VOID Event_OnDamage(INT nDamage, Obj_Character* pAttacker) {}; virtual VOID Event_OnBeSkill(Obj_Character* pCharacter, INT nGoodEffect) {}; //==================================================================== // 其他部分相关 //==================================================================== public: VOID SetAIState(State* s) { _state = s; } State* GetAIState(VOID) const { return _state; } virtual VOID Relive( BOOL bSkillRelive ) {}; protected: Obj_Character *GetCharacter()const{ return m_pCharacter; } WORLD_POS GetRandPosOfCircle(const WORLD_POS* pPos, FLOAT fRadio); protected: Obj_Character *m_pCharacter; }; #endif // __AI_CHARACTER_H__
/** * Create an instance of the SQLiteStorageAdapter using a specific database name. * For testing purposes only. * @param schemaRegistry The schema registry for the adapter. * @param modelProvider The model provider with the desired models. * @param databaseName The name of the database file. * @return An instance of SQLiteStorageAdapter backed by the database file name specified. */ public static SQLiteStorageAdapter create(SchemaRegistry schemaRegistry, ModelProvider modelProvider, String databaseName) { return SQLiteStorageAdapter.forModels(schemaRegistry, modelProvider, databaseName); }
package com.tybob12.xycraft.tileentity; import com.tybob12.xycraft.block.BlockEnergyNodeCore; import com.tybob12.xycraft.init.HexBlocks; import com.tybob12.xycraft.init.HexConfig; import com.tybob12.xycraft.util.HexDevice; import com.tybob12.xycraft.util.HexEnergyNode; import com.tybob12.xycraft.util.HexUtils; import com.tybob12.xycraft.util.NetworkAnalyzer; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.tileentity.TileEntity; import net.minecraft.util.ChatComponentTranslation; import java.util.ArrayList; /** * @author Thorinair <<EMAIL>> */ public class TileEnergyNodePortHEX extends TileEntity implements ITileHexEnergyPort, ITileHexEnergySource, ITileHexEnergyDrain { /**** Static Values ****/ public static final String ID = "tileEnergyNodePortHEX"; // NBT Names private static final String NBT_ENERGY_SOURCES = "energy_sources"; private static final String NBT_ENERGY_DRAINS = "energy_drains"; private static final String NBT_ENERGY_PORTS = "energy_ports"; private static final String NBT_ENERGY_BUFFER_FILLED = "energy_buffer_filled"; private static final String NBT_ENERGY_BUFFER_DRAINED = "energy_buffer_drained"; private static final String NBT_USABLE_SOURCES = "usable_sources"; private static final String NBT_HAS_ENERGY = "has_energy"; private static final String NBT_LINKED_PORT_EXISTS = "linked_port_exists"; private static final String NBT_LINKED_PORT = "linked_port"; private static final String NBT_PORT_TIER = "port_tier"; /**** Variables ****/ // Prepare lists. private ArrayList<HexDevice> energySources; private ArrayList<HexDevice> energyDrains; private ArrayList<HexDevice> energyPorts; // Prepare energy buffer variables. private float energyBufferTotal; private float energyBufferFilled; private float energyBufferDrained; // Prepare state variables. private int usableSources; private boolean hasEnergy; // Prepare port variables. private HexDevice linkedPort; private int portTier; private int portType; // Prepare the recheck variables. private int recheckCountdown; private int recheckCounter; private boolean shouldRecheck; /**** Common TileEntity Methods ****/ public TileEnergyNodePortHEX() { this.energyBufferTotal = 0; this.energyBufferFilled = 0; this.energyBufferDrained = 0; this.usableSources = 0; this.hasEnergy = false; this.portTier = 0; this.portType = HexEnergyNode.PORT_TYPE_HEX; this.recheckCountdown = 10; this.recheckCounter = 0; this.shouldRecheck = false; } /** * Writes the tags to NBT. */ @Override public void writeToNBT(NBTTagCompound tagCompound) { super.writeToNBT(tagCompound); // Write the machine lists. HexUtils.writeHexDevicesArrayToNBT(tagCompound, NBT_ENERGY_SOURCES, energySources); HexUtils.writeHexDevicesArrayToNBT(tagCompound, NBT_ENERGY_DRAINS, energyDrains); HexUtils.writeHexDevicesArrayToNBT(tagCompound, NBT_ENERGY_PORTS, energyPorts); // Write the energy buffer variables. tagCompound.setFloat(NBT_ENERGY_BUFFER_FILLED, energyBufferFilled); tagCompound.setFloat(NBT_ENERGY_BUFFER_DRAINED, energyBufferDrained); // Write the state variables. tagCompound.setInteger(NBT_USABLE_SOURCES, usableSources); tagCompound.setBoolean(NBT_HAS_ENERGY, hasEnergy); // Write the port variables. HexUtils.writeHexDeviceToNBT(tagCompound, NBT_LINKED_PORT, linkedPort); tagCompound.setBoolean(NBT_LINKED_PORT_EXISTS, linkedPort != null); tagCompound.setInteger(NBT_PORT_TIER, portTier); } /** * Reads the tags from NBT. */ @Override public void readFromNBT(NBTTagCompound tagCompound) { super.readFromNBT(tagCompound); // Read the machine lists. energySources = HexUtils.readHexDevicesArrayFromNBT(tagCompound, NBT_ENERGY_SOURCES); energyDrains = HexUtils.readHexDevicesArrayFromNBT(tagCompound, NBT_ENERGY_DRAINS); energyPorts = HexUtils.readHexDevicesArrayFromNBT(tagCompound, NBT_ENERGY_PORTS); // Read the energy buffer variables. energyBufferFilled = tagCompound.getFloat(NBT_ENERGY_BUFFER_FILLED); energyBufferDrained = tagCompound.getFloat(NBT_ENERGY_BUFFER_DRAINED); // Read the state variables. usableSources = tagCompound.getInteger(NBT_USABLE_SOURCES); hasEnergy = tagCompound.getBoolean(NBT_HAS_ENERGY); // Read the port variables. if (tagCompound.getBoolean(NBT_LINKED_PORT_EXISTS)) linkedPort = HexUtils.readHexDeviceFromNBT(tagCompound, NBT_LINKED_PORT); else linkedPort = null; portTier = tagCompound.getInteger(NBT_PORT_TIER); energyBufferTotal = HexEnergyNode.parseEnergyPerTick(portType, portTier) * 2; } /** * Fired on every tick. Main processing is done here. */ @Override public void updateEntity() { // Confirm that this is server side. if (!worldObj.isRemote) { if (linkedPort != null) { // Situation in which the linked port is input, and this port is output. if (HexUtils.getMetaBitBiInt(HexEnergyNode.META_MODE_0, HexEnergyNode.META_MODE_1, worldObj, linkedPort.x, linkedPort.y, linkedPort.z) == HexEnergyNode.PORT_MODE_INPUT && HexUtils.getMetaBitBiInt(HexEnergyNode.META_MODE_0, HexEnergyNode.META_MODE_1, worldObj, xCoord, yCoord, zCoord) == HexEnergyNode.PORT_MODE_OUTPUT) { // Fill the port buffer until it is full. if (energyBufferFilled < energyBufferTotal) { ITileHexEnergyPort port = (ITileHexEnergyPort) worldObj.getTileEntity(linkedPort.x, linkedPort.y, linkedPort.z); if (port != null) { float multi = port.getMultiplier(portType, portTier); float conv = HexEnergyNode.parseConversionMultiplier(port.getPortType(), portType); energyBufferFilled = energyBufferFilled + port.drainPortEnergy((energyBufferTotal - energyBufferFilled) / conv) * multi; } } // Check if states have changed and send a recheck if so. if (energyBufferFilled > 0 && !hasEnergy) { hasEnergy = true; shouldRecheck = true; } else if (energyBufferFilled <= 0 && hasEnergy) { hasEnergy = false; shouldRecheck = true; } // Recheck only on certain ticks. if (recheckCounter >= recheckCountdown) { if (shouldRecheck) sendRecheck(); shouldRecheck = false; recheckCounter = 0; } else recheckCounter++; energyBufferDrained = 0; } // Situation in which the linked port is output, and this port is input. else if (HexUtils.getMetaBitBiInt(HexEnergyNode.META_MODE_0, HexEnergyNode.META_MODE_1, worldObj, linkedPort.x, linkedPort.y, linkedPort.z) == HexEnergyNode.PORT_MODE_OUTPUT && HexUtils.getMetaBitBiInt(HexEnergyNode.META_MODE_0, HexEnergyNode.META_MODE_1, worldObj, xCoord, yCoord, zCoord) == HexEnergyNode.PORT_MODE_INPUT) { energyBufferDrained = 0; // Check the situation in which the machine has available energy sources and items to process. if (canDrainSource() && (energyBufferFilled < energyBufferTotal)) { // Drain from all sources. drainFromSources(); energyBufferFilled = energyBufferFilled + energyBufferDrained; } hasEnergy = false; } else hasEnergy = false; } else hasEnergy = false; } } /**** ITileHexEnergySource Methods ****/ /** * Saves the ArrayList of energy drains. * @param energyDrains The ArrayList to save. */ @Override public void setDrains(ArrayList<HexDevice> energyDrains) { this.energyDrains = energyDrains; if (HexConfig.cfgGeneralMachineNetworkDebug && HexConfig.cfgGeneralNetworkDebug) System.out.println("[Energy Node Port: HEX] (" + xCoord + ", " + yCoord + ", " + zCoord + "): Drains received. d: " + energyDrains.size()); markDirty(); } /** * Called by drains to check if they can drain energy. * @return Boolean if energy can be drained. */ @Override public boolean canDrainEnergy() { return hasEnergy && linkedPort != null && HexUtils.getMetaBitBiInt(HexEnergyNode.META_MODE_0, HexEnergyNode.META_MODE_1, worldObj, xCoord, yCoord, zCoord) == HexEnergyNode.PORT_MODE_OUTPUT; } /** * Called by drains to drain energy. * @param amount The amount of energy requested. * @return The amount of energy actually drained. */ @Override public float drainEnergy(float amount) { if (canDrainEnergy()) { // If there is enough energy left this tick, return full requested energy to drain. if (energyBufferDrained + amount < getEnergyPerTick()) { energyBufferFilled = energyBufferFilled - amount; energyBufferDrained = energyBufferDrained + amount; if (HexConfig.cfgGeneralMachineNetworkDebug && HexConfig.cfgGeneralNetworkDebug) System.out.println("[Energy Node Port: HEX] (" + xCoord + ", " + yCoord + ", " + zCoord + "): Drain requested. r: " + amount + " d(f): " + amount + " t: " + energyBufferDrained); return amount; } // Otherwise, return only the remaining energy. else { float partial = getEnergyPerTick() - energyBufferDrained; energyBufferFilled = energyBufferFilled - partial; energyBufferDrained = getEnergyPerTick(); if (HexConfig.cfgGeneralMachineNetworkDebug && HexConfig.cfgGeneralNetworkDebug) System.out.println("[Energy Node Port: HEX] (" + xCoord + ", " + yCoord + ", " + zCoord + "): Drain requested. r: " + amount + " d(p): " + partial + " t: " + energyBufferDrained); return partial; } } // If the source cannot provide energy, return 0. else { if (HexConfig.cfgGeneralMachineNetworkDebug && HexConfig.cfgGeneralNetworkDebug) System.out.println("[Energy Node Port: HEX] (" + xCoord + ", " + yCoord + ", " + zCoord + "): Drain requested. r: " + amount + " d(n): " + 0 + " t: " + energyBufferDrained); return 0; } } /** * Called by drains to determine the amount of energy available per tick. * @return The amount of energy available per tick. */ @Override public float getEnergyPerTick() { if (canDrainEnergy()) { ITileHexEnergyPort port = (ITileHexEnergyPort) worldObj.getTileEntity(linkedPort.x, linkedPort.y, linkedPort.z); return HexEnergyNode.parseEnergyPerTick(port.getPortType(), port.getPortTier()) * port.getMultiplier(portType, portTier); } else return 0; } /** * Called by Hexorium Probe to display tile entity info to chat. * @param player Player to show the message to. */ @Override public void displayInfoSource(EntityPlayer player) { displayInfoPort(player); } /**** Custom Methods ****/ /** * Sends a recheck request to all drains. */ private void sendRecheck() { if (HexConfig.cfgGeneralMachineNetworkDebug && HexConfig.cfgGeneralNetworkDebug) System.out.println("[Energy Node Port: HEX] (" + xCoord + ", " + yCoord + ", " + zCoord + "): Requesting recheck."); if (energyDrains != null) // Parse the whole energyDrains ArrayList and call recheckSources() on each of them. for (HexDevice entry : energyDrains) if (HexUtils.checkChunk(worldObj, entry.x, entry.z)) { ITileHexEnergyDrain energyDrain = (ITileHexEnergyDrain) worldObj.getTileEntity(entry.x, entry.y, entry.z); if (energyDrain != null) energyDrain.recheckSources(); } } /**** ITileHexEnergyDrain Methods ****/ /** * Saves the ArrayList of energy sources. * @param energySources The ArrayList to save. */ @Override public void setSources(ArrayList<HexDevice> energySources) { this.energySources = energySources; if (HexConfig.cfgGeneralMachineNetworkDebug && HexConfig.cfgGeneralNetworkDebug) System.out.println("[Energy Node Port: HEX] (" + xCoord + ", " + yCoord + ", " + zCoord + "): Sources received. s: " + energySources.size()); recheckSources(); } /** * Called by sources to force drains to recheck them. */ @Override public void recheckSources() { scanSources(); if (HexConfig.cfgGeneralMachineNetworkDebug && HexConfig.cfgGeneralNetworkDebug) System.out.println("[Energy Node Port: HEX] (" + xCoord + ", " + yCoord + ", " + zCoord + "): Recheck requested. s: " + usableSources); } /** * Called by Hexorium Probe to display tile entity info to chat. * @param player Player to show the message to. */ @Override public void displayInfoDrain(EntityPlayer player) { displayInfoPort(player); } /**** Custom Methods ****/ /** * Called to check if there are any usable sources available. */ private void scanSources() { usableSources = 0; if(energySources != null) // Parse the whole energySources ArrayList and call canDrainSource() on each of them. for (HexDevice entry : energySources) if (HexUtils.checkChunk(worldObj, entry.x, entry.z)) { ITileHexEnergySource energySource = (ITileHexEnergySource) worldObj.getTileEntity(entry.x, entry.y, entry.z); if (energySource != null) if (energySource.canDrainEnergy()) usableSources++; } markDirty(); } /** * Called to drain from all sources. */ private void drainFromSources() { if (energySources != null) // Parse the whole energySources ArrayList and attempt to drain energy from every source. for (HexDevice entry : energySources) if (HexUtils.checkChunk(worldObj, entry.x, entry.z)) { ITileHexEnergySource energySource = (ITileHexEnergySource) worldObj.getTileEntity(entry.x, entry.y, entry.z); if (energySource != null) if (energySource.canDrainEnergy()) if (energyBufferFilled + energyBufferDrained < energyBufferTotal) { if (energyBufferTotal - energyBufferFilled - energyBufferDrained < energySource.getEnergyPerTick()) energyBufferDrained = energyBufferDrained + energySource.drainEnergy(energyBufferTotal - energyBufferFilled - energyBufferDrained); else energyBufferDrained = energyBufferDrained + energySource.drainEnergy(energySource.getEnergyPerTick()); } } } /** * Called to check if there are any usable sources available. * @return If there are any usable sources. */ private boolean canDrainSource() { return usableSources > 0; } /**** ITileHexEnergyPort Methods ****/ /** * Saves the ArrayList of energy ports. * @param energyPorts The ArrayList to save. */ @Override public void setPorts(ArrayList<HexDevice> energyPorts) { if (energyPorts != null && energyPorts.size() != 0) { this.energyPorts = energyPorts; // If the port is already linked, analyze the incoming list and unlink if necessary. if (this.linkedPort != null) { boolean checkLink = false; for (HexDevice entry : this.energyPorts) if (entry.x == this.linkedPort.x && entry.y == this.linkedPort.y && entry.z == this.linkedPort.z) checkLink = true; if (!checkLink) { breakPortLink(); } } } else { this.energyPorts = null; breakPortLink(); } if (HexConfig.cfgGeneralMachineNetworkDebug && HexConfig.cfgGeneralNetworkDebug) { if (this.energyPorts != null) System.out.println("[Energy Node Port: HEX] (" + xCoord + ", " + yCoord + ", " + zCoord + "): Ports received. n: " + this.energyPorts.size()); else System.out.println("[Energy Node Port: HEX] (" + xCoord + ", " + yCoord + ", " + zCoord + "): Ports received. n: " + 0); } markDirty(); } /** * Called when setting up an Energy Node to set the tier. * @param portTier Tier of the port. */ @Override public void setPortTier(int portTier) { this.portTier = portTier; this.energyBufferTotal = HexEnergyNode.parseEnergyPerTick(this.portType, this.portTier) * 2; if (HexConfig.cfgEnergyNodeDebug) System.out.println("[Energy Node Port: HEX] (" + xCoord + ", " + yCoord + ", " + zCoord + "): Port tier set to: " + portTier); markDirty(); } /** * Called when retrieving the tier. * @return Tier integer. */ @Override public int getPortTier() { return this.portTier; } /** * Called when retrieving the type. * @return Tier integer. */ @Override public int getPortType() { return this.portType; } /** * Checks if ports are connected via network. * @param x X coordinate of the target port. * @param y Y coordinate of the target port. * @param z Z coordinate of the target port. * @return Boolean whether the port are on same network. */ @Override public boolean checkPortConnectivity(int x, int y, int z) { // Return true if the port exists in the list. if (energyPorts != null) for (HexDevice entry : energyPorts) if (entry.x == x && entry.y == y && entry.z == z) return true; return false; } /** * Checks if ports are already linked. * @param x X coordinate of the target port. * @param y Y coordinate of the target port. * @param z Z coordinate of the target port. * @return Boolean whether the ports are already linked. */ @Override public boolean checkPortLinked(int x, int y, int z) { // Return true if the port is already linked with target. return linkedPort != null && linkedPort.x == x && linkedPort.y == y && linkedPort.z == z; } /** * Called when linking ports. * @param x X coordinate of the calling port. * @param y Y coordinate of the calling port. * @param z Z coordinate of the calling port. * @return Whether the linking was successful. */ @Override public boolean linkPort(int x, int y, int z) { ITileHexEnergyPort energyPort = (ITileHexEnergyPort) worldObj.getTileEntity(x, y, z); if (energyPort != null) { // If the port is already linked, unlink it. if (linkedPort != null) { ITileHexEnergyPort old = (ITileHexEnergyPort) worldObj.getTileEntity(linkedPort.x, linkedPort.y, linkedPort.z); if (old != null) { old.breakPortLink(); } } // Link the port with new target. linkedPort = new HexDevice(x, y, z, worldObj.getBlock(xCoord, yCoord, zCoord)); markDirty(); return true; } return false; } /** * Breaks a link between two ports. */ @Override public void breakPortLink() { if (linkedPort != null) { ITileHexEnergyPort port = (ITileHexEnergyPort) worldObj.getTileEntity(linkedPort.x, linkedPort.y, linkedPort.z); unlinkPort(); if (port != null) port.unlinkPort(); unlinkPortAnalyze(); if (port instanceof TileEnergyNodePortHEX) { TileEnergyNodePortHEX tileEnergyNodePortHEX = (TileEnergyNodePortHEX) port; tileEnergyNodePortHEX.unlinkPortAnalyze(); } markDirty(); } } /** * Called when unlinking ports to set the link to null. */ @Override public void unlinkPort() { linkedPort = null; markDirty(); } /** * Called to empty the buffer. */ @Override public void emptyBuffer() { energyBufferFilled = 0; markDirty(); } /** * Called by output ports to determine the conversion multiplier. * @param typeOut The type of energy of output port. * @param tierOut The tier of core of output port. * @return The multiplier to use. */ @Override public float getMultiplier(int typeOut, int tierOut) { return HexEnergyNode.parseConversionMultiplier(portType, typeOut) * HexEnergyNode.parseEfficiencyMultiplier(portTier, tierOut); } /** * Called by output ports to drain energy. * @param amount The amount of energy requested. * @return The amount of energy actually drained. */ @Override public float drainPortEnergy(float amount) { if (amount < energyBufferFilled) { energyBufferFilled = energyBufferFilled - amount; if (HexConfig.cfgEnergyNodeVerboseDebug && HexConfig.cfgEnergyNodeDebug) System.out.println("[Energy Node Port: HEX] (" + xCoord + ", " + yCoord + ", " + zCoord + "): Port conversion requested. r: " + amount + " d(f): " + amount + " f: " + energyBufferFilled); return amount; } else { float partial = energyBufferFilled; energyBufferFilled = 0; if (HexConfig.cfgEnergyNodeVerboseDebug && HexConfig.cfgEnergyNodeDebug) System.out.println("[Energy Node Port: HEX] (" + xCoord + ", " + yCoord + ", " + zCoord + "): Port conversion requested. r: " + amount + " d(p): " + partial + " f: " + energyBufferFilled); return partial; } } /** * Called by Hexorium Probe to display tile entity info to chat. * @param player Player to show the message to. */ @Override public void displayInfoPort(EntityPlayer player) { HexUtils.addChatProbeTitle(player); // If player is not sneaking. if (!player.isSneaking()) { HexUtils.addChatProbeGenericInfo(player, worldObj, xCoord, yCoord, zCoord); player.addChatMessage(new ChatComponentTranslation("msg.probeTypePort.txt")); int mode = HexUtils.getMetaBitBiInt(HexEnergyNode.META_MODE_0, HexEnergyNode.META_MODE_1, worldObj, xCoord, yCoord, zCoord); player.addChatMessage(new ChatComponentTranslation("msg.probePortMode" + (mode + 1) + ".txt")); player.addChatMessage(new ChatComponentTranslation("msg.probeEnergy.txt", Math.round(energyBufferFilled), "HEX", Math.round(energyBufferTotal), "HEX")); boolean isPart = HexUtils.getMetaBit(HexBlocks.META_STRUCTURE_IS_PART, worldObj, xCoord, yCoord, zCoord); if (isPart && linkedPort != null && mode == HexEnergyNode.PORT_MODE_INPUT) { ITileHexEnergyPort port = (ITileHexEnergyPort) worldObj.getTileEntity(linkedPort.x, linkedPort.y, linkedPort.z); player.addChatMessage(new ChatComponentTranslation("msg.probeEfficiency.txt", (portTier + 1), Math.round((1 - HexEnergyNode.parseEfficiencyMultiplier(portTier, port.getPortTier())) * 100), 0, "HEX")); } else if (isPart && linkedPort != null && mode == HexEnergyNode.PORT_MODE_OUTPUT) { ITileHexEnergyPort port = (ITileHexEnergyPort) worldObj.getTileEntity(linkedPort.x, linkedPort.y, linkedPort.z); player.addChatMessage(new ChatComponentTranslation("msg.probeEfficiency.txt", (portTier + 1), Math.round((1 - HexEnergyNode.parseEfficiencyMultiplier(portTier, port.getPortTier())) * 100), Math.round(getEnergyPerTick()), "HEX")); } else if (isPart) player.addChatMessage(new ChatComponentTranslation("msg.probeEfficiencyTier.txt", (portTier + 1))); else player.addChatMessage(new ChatComponentTranslation("msg.probeEfficiencyNotFormed.txt")); if (isPart) player.addChatMessage(new ChatComponentTranslation("msg.probeFormedYes.txt")); else player.addChatMessage(new ChatComponentTranslation("msg.probeFormedNo.txt")); if (linkedPort != null) { player.addChatMessage(new ChatComponentTranslation("msg.probeLinkedYes.txt")); player.addChatMessage(new ChatComponentTranslation("msg.probeConnectedEntry.txt", linkedPort.x, linkedPort.y, linkedPort.z, worldObj.getBlock(linkedPort.x, linkedPort.y, linkedPort.z).getLocalizedName())); } else player.addChatMessage(new ChatComponentTranslation("msg.probeLinkedNo.txt")); } // If player is sneaking. else { player.addChatMessage(new ChatComponentTranslation("msg.probeConnectedSources.txt")); HexUtils.addChatProbeConnectedMachines(player, energySources, worldObj, xCoord, yCoord, zCoord); player.addChatMessage(new ChatComponentTranslation("msg.probeConnectedDrains.txt")); HexUtils.addChatProbeConnectedMachines(player, energyDrains, worldObj, xCoord, yCoord, zCoord); player.addChatMessage(new ChatComponentTranslation("msg.probeConnectedPorts.txt")); HexUtils.addChatProbeConnectedMachines(player, energyPorts, worldObj, xCoord, yCoord, zCoord); } } /**** Custom Methods ****/ /** * Called to get the tunneled port. */ public HexDevice getTunnel() { return linkedPort; } /** * Called when unlinking ports, this method is called after unlinkPort. * Used to rescan the network. */ public void unlinkPortAnalyze() { if (HexUtils.getMetaBitBiInt(HexEnergyNode.META_MODE_0, HexEnergyNode.META_MODE_1, worldObj, xCoord, yCoord, zCoord) == HexEnergyNode.PORT_MODE_TUNNEL) { // Locate the Energy Node Core. int xc = xCoord; int yc = yCoord; int zc = zCoord; int xp = xCoord; int yp = yCoord; int zp = zCoord; int side = 0; if (worldObj.getBlock(xCoord, yCoord + 1, zCoord) instanceof BlockEnergyNodeCore) side = 0; else if (worldObj.getBlock(xCoord, yCoord - 1, zCoord) instanceof BlockEnergyNodeCore) side = 1; else if (worldObj.getBlock(xCoord, yCoord, zCoord + 1) instanceof BlockEnergyNodeCore) side = 2; else if (worldObj.getBlock(xCoord, yCoord, zCoord - 1) instanceof BlockEnergyNodeCore) side = 3; else if (worldObj.getBlock(xCoord + 1, yCoord, zCoord) instanceof BlockEnergyNodeCore) side = 4; else if (worldObj.getBlock(xCoord - 1, yCoord, zCoord) instanceof BlockEnergyNodeCore) side = 5; switch (side) { case 0: yc++; yp--; break; case 1: yc--; yp++; break; case 2: zc++; zp--; break; case 3: zc--; zp++; break; case 4: xc++; xp--; break; case 5: xc--; xp++; break; } /* DO ANALYSIS */ NetworkAnalyzer analyzerCore = new NetworkAnalyzer(); analyzerCore.analyzeCable(worldObj, xc, yc, zc, worldObj.getBlock(xc, yc, zc)); NetworkAnalyzer analyzerPort = new NetworkAnalyzer(); analyzerPort.analyzeCable(worldObj, xp, yp, zp, worldObj.getBlock(xp, yp, zp)); } } }