content
stringlengths
10
4.9M
<reponame>Arcensoth/pyckaxe import re from abc import ABC, abstractmethod from dataclasses import dataclass, field from pathlib import Path from typing import Any, Coroutine, Dict, Generic, List, TypeVar from pyckaxe.lib.pack.abc.resource import Resource from pyckaxe.lib.pack.abc.resource_deserializer import ResourceDeserializer from pyckaxe.lib.pack.physical_resource_location import PhysicalResourceLocation from pyckaxe.lib.pack.resource_loader.errors import ( DuplicateResourceError, FailedToLoadResourceError, NoSuchResourceError, ResourceLoaderError, ) from pyckaxe.utils import DEFAULT __all__ = ("CommonResourceLoader",) ResourceType = TypeVar("ResourceType", bound=Resource) RawType = TypeVar("RawType") # @implements ResourceLoader @dataclass class CommonResourceLoader(ABC, Generic[ResourceType, RawType]): """ Loads a resource from an absolute resource location. This class is responsible for loading a file of a certain data format (such as NBT, JSON, or plain text) from a `PhysicalResourceLocation`, and then converting the raw data of type `RawType` into an instance of the corresponding `ResourceType`. A new implementation is typically only required when a new type of file format or a new loading mechanism needs to be supported. Attributes ---------- deserializer Turns raw data into a resource. options Arbitrary options to pass to the underlying loading mechanism. """ deserializer: ResourceDeserializer[ResourceType, RawType] options: Dict[str, Any] = field(default=DEFAULT) @abstractmethod async def _load_raw(self, location: PhysicalResourceLocation) -> RawType: ... @classmethod def default_options_factory(cls) -> Dict[str, Any]: return dict() def __post_init__(self): if self.options is DEFAULT: self.options = self.default_options_factory() async def _get_matching_paths( self, location: PhysicalResourceLocation ) -> List[Path]: """Get all file paths matching `location`.""" # Since glob isn't expressive enough, we need to do a second pass with regex. # TODO Should glob be async? #async-file-io pattern = re.compile(r"^" + location.path.name + r"(?:\.[^\.]*)?$") all_paths = [path for path in location.path.parent.glob("*") if path.is_file()] matching_paths = [p for p in all_paths if pattern.match(p.name)] return matching_paths async def _get_path_to_load(self, location: PhysicalResourceLocation) -> Path: """ Get the first file path matching `location`. Raises ------ NoSuchResourceError If no file paths matching `location` are found. """ paths = await self._get_matching_paths(location) if len(paths) < 1: raise NoSuchResourceError(location.path) elif len(paths) > 1: raise DuplicateResourceError(location.path, paths) return paths[0] async def load(self, location: PhysicalResourceLocation) -> ResourceType: """Load a `Resource` from `location`.""" try: # Load the raw data from file. raw = await self._load_raw(location) # Deserialize the raw data into an object. resource = self.deserializer(raw) return resource except ResourceLoaderError: raise except Exception as ex: raise FailedToLoadResourceError(location.path) from ex # @implements ResourceLoader def __call__( self, location: PhysicalResourceLocation ) -> Coroutine[None, None, ResourceType]: return self.load(location)
New record of Phyllodiaptomus (Ctenodiaptomus) praedictus sulawensis Alekseev & Vaillant, 2013 (Hexanauplia, Copepoda, Calanoida, Diaptomidae) in the Philippines (Luzon Island) A study originally intended to update the taxonomy and distribution of calanoid copepods in selected freshwater ecosystems of Central Luzon has led to the discovery of a new record of Phyllodiaptomus Kiefer, 1936 in Candaba Swamp, Pampanga. Since 1979, the only calanoid copepods recorded from this area included Filipinodiaptomus insulanus (Wright S., 1928) and Tropodiaptomus australis Kiefer, 1936. Later studies on calanoid copepods in the region have since been non-existent. Analyses of pertinent key morphological characters revealed that the specimens at hand belonged to Phyllodiaptomus (Ctenodiaptomus) praedictus sulawensis Alekseev & Vaillant, 2013, a freshwater diaptomid calanoid copepod subspecies discovered and known to be endemic only in Indonesia. Provided in this paper are baseline information on the morphological characters of the Philippine members of the subspecies accompanied by line drawings as well as a comparison between the recorded morphological data presented by Alekseev, Haffner, Vaillant & Yusoff (2013) and the current dataset to support the identification of the specimen. The discovery of P. (C.) praedictus sulawensis in the Philippines, which was thought to be endemic in Indonesia, presents a new record of this species in the country and the first such record outside of its country of origin.
async def multi(self, ctx): settings = await self._strawpoll.guild(ctx.guild).all() if settings["multi"] == "true": settings["multi"] = "false" await ctx.send("Multiple choice no longer available in the poll") else: settings["multi"] = "true" await ctx.send("Multiple choice is now available on the polls.") await self.save_db(ctx, settings)
<filename>sds-biz-suite/bizcore/WEB-INF/caf_core_src/com/terapico/caf/InvocationBase.java package com.terapico.caf; public class InvocationBase { }
def crop_mtx(csv_mtx, start_pos, end_pos): roi_mtx = [] for yi in range(end_pos[0], end_pos[1] + 1): roi_mtx.append(csv_mtx[yi][start_pos[0]:start_pos[1] + 1]) return roi_mtx
<filename>common/src/services/registration/directRegistrationService.ts import { ServiceFactory } from "../../factories/serviceFactory"; import { IRegistrationManagementService } from "../../models/services/IRegistrationManagementService"; import { IRegistrationService } from "../../models/services/IRegistrationService"; import { IRegistration } from "../../models/services/registration/IRegistration"; /** * Service to handle the storage directly with management service. */ export class DirectRegistrationService implements IRegistrationService { /** * The registration management service. */ private readonly _registrationManagementService: IRegistrationManagementService; /** * Create a new instance of DirectRegistrationService * @param registrationManagementServiceName The api configuration. */ constructor(registrationManagementServiceName: string) { this._registrationManagementService = ServiceFactory.get<IRegistrationManagementService>(registrationManagementServiceName); } /** * Create a new registration. * @param registrationId The registration id of the item. * @param itemName Name of the item. * @param itemType The type of the item. * @param root The initial root for the mam channel. * @param sideKey The private key for the mam channel. * @returns The response from the request. */ public async register( registrationId: string, itemName?: string, itemType?: string, root?: string, sideKey?: string): Promise<{ /** * The root used for the channel from the registration. */ root?: string; /** * The private key used for decoding the channel. */ sideKey?: string; }> { const registration: IRegistration = { id: registrationId, created: Date.now(), itemName: itemName, itemType: itemType }; await this._registrationManagementService.addRegistration(registration, root, sideKey); return { sideKey: registration.returnMamChannel && registration.returnMamChannel.sideKey, root: registration.returnMamChannel && registration.returnMamChannel.initialRoot }; } /** * Remove a registration. * @param registrationId The registration id of the item. * @param sideKey The client mam channel side key used for remove validation. */ public async unregister(registrationId: string, sideKey: string): Promise<void> { await this._registrationManagementService.removeRegistration(registrationId, sideKey); } }
/** * (C) Copyright IBM Corp. 2010, 2015 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *  */ package com.ibm.bi.dml.runtime.instructions.spark; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.spark.api.java.JavaPairRDD; import com.ibm.bi.dml.parser.Expression.DataType; import com.ibm.bi.dml.parser.Expression.ValueType; import com.ibm.bi.dml.runtime.DMLRuntimeException; import com.ibm.bi.dml.runtime.DMLUnsupportedOperationException; import com.ibm.bi.dml.runtime.controlprogram.caching.MatrixObject; import com.ibm.bi.dml.runtime.controlprogram.context.ExecutionContext; import com.ibm.bi.dml.runtime.controlprogram.context.SparkExecutionContext; import com.ibm.bi.dml.runtime.instructions.Instruction; import com.ibm.bi.dml.runtime.instructions.InstructionUtils; import com.ibm.bi.dml.runtime.instructions.cp.CPOperand; import com.ibm.bi.dml.runtime.instructions.spark.utils.RDDConverterUtils; import com.ibm.bi.dml.runtime.matrix.MatrixCharacteristics; import com.ibm.bi.dml.runtime.matrix.MatrixFormatMetaData; import com.ibm.bi.dml.runtime.matrix.data.InputInfo; import com.ibm.bi.dml.runtime.matrix.data.MatrixBlock; import com.ibm.bi.dml.runtime.matrix.data.MatrixIndexes; import com.ibm.bi.dml.runtime.matrix.operators.Operator; public class CSVReblockSPInstruction extends UnarySPInstruction { @SuppressWarnings("unused") private static final String _COPYRIGHT = "Licensed Materials - Property of IBM\n(C) Copyright IBM Corp. 2010, 2015\n" + "US Government Users Restricted Rights - Use, duplication disclosure restricted by GSA ADP Schedule Contract with IBM Corp."; private int brlen; private int bclen; private boolean hasHeader; private String delim; private boolean fill; private double missingValue; public CSVReblockSPInstruction(Operator op, CPOperand in, CPOperand out, int br, int bc, boolean hasHeader, String delim, boolean fill, double missingValue, String opcode, String instr) { super(op, in, out, opcode, instr); brlen = br; bclen = bc; this.hasHeader = hasHeader; this.delim = delim; this.fill = fill; this.missingValue = missingValue; } public static Instruction parseInstruction(String str) throws DMLRuntimeException { String opcode = InstructionUtils.getOpCode(str); if (opcode.compareTo("csvrblk") != 0) { throw new DMLRuntimeException( "Incorrect opcode for CSVReblockSPInstruction:" + opcode); } // Example parts of CSVReblockSPInstruction: // [csvrblk, pREADmissing_val_maps·MATRIX·DOUBLE, _mVar37·MATRIX·DOUBLE, // 1000, 1000, false, ,, true, 0.0] String parts[] = InstructionUtils.getInstructionPartsWithValueType(str); CPOperand in = new CPOperand("", ValueType.UNKNOWN, DataType.UNKNOWN); CPOperand out = new CPOperand("", ValueType.UNKNOWN, DataType.UNKNOWN); in.split(parts[1]); out.split(parts[2]); int brlen = Integer.parseInt(parts[3]); int bclen = Integer.parseInt(parts[4]); boolean hasHeader = Boolean.parseBoolean(parts[5]); String delim = parts[6]; boolean fill = Boolean.parseBoolean(parts[7]); double missingValue = Double.parseDouble(parts[8]); Operator op = null; // no operator for ReblockSPInstruction return new CSVReblockSPInstruction(op, in, out, brlen, bclen, hasHeader, delim, fill, missingValue, opcode, str); } @Override @SuppressWarnings("unchecked") public void processInstruction(ExecutionContext ec) throws DMLRuntimeException, DMLUnsupportedOperationException { SparkExecutionContext sec = (SparkExecutionContext) ec; //sanity check input info MatrixObject mo = sec.getMatrixObject(input1.getName()); MatrixFormatMetaData iimd = (MatrixFormatMetaData) mo.getMetaData(); if (iimd.getInputInfo() != InputInfo.CSVInputInfo) { throw new DMLRuntimeException("The given InputInfo is not implemented for " + "CSVReblockSPInstruction:" + iimd.getInputInfo()); } //set output characteristics MatrixCharacteristics mcIn = sec.getMatrixCharacteristics(input1.getName()); MatrixCharacteristics mcOut = sec.getMatrixCharacteristics(output.getName()); mcOut.set(mcIn.getRows(), mcIn.getCols(), brlen, bclen); //check jdk version (prevent double.parseDouble contention on <jdk8) sec.checkAndRaiseValidationWarningJDKVersion(); //check input rdd JavaPairRDD<LongWritable, Text> lines = (JavaPairRDD<LongWritable, Text>) sec.getRDDHandleForVariable(input1.getName(), iimd.getInputInfo()); //reblock csv to binary block JavaPairRDD<MatrixIndexes, MatrixBlock> out = RDDConverterUtils.csvToBinaryBlock(sec.getSparkContext(), lines, mcOut, hasHeader, delim, fill, missingValue); // put output RDD handle into symbol table sec.setRDDHandleForVariable(output.getName(), out); sec.addLineageRDD(output.getName(), input1.getName()); } }
<reponame>alexkalderimis/tech-posts -------------------------------------------------------------------------------- {-# LANGUAGE OverloadedStrings #-} import Hakyll import qualified Hakyll.Core.Metadata as MD import Site.Contexts import Site.Feed import Site.Pandoc (pandocFeedCompiler, tocCompiler) -------------------------------------------------------------------------------- main :: IO () main = hakyll $ do let postsPattern = "posts/*" matchMetadata postsPattern published compileFeedEntry matchMetadata postsPattern published compilePost match "images/*" $ do route idRoute compile copyFileCompiler match "css/*" $ do route idRoute compile compressCssCompiler match "scripts/*" $ do route idRoute compile copyFileCompiler match (fromList ["about.md", "contact.markdown"]) $ do route $ setExtension "html" compile $ pandocCompiler >>= loadAndApplyTemplate "templates/default.html" baseContext >>= relativizeUrls create ["archive.html"] $ do route idRoute let ctx = archiveCtx postsPattern compile $ makeItem "" >>= loadAndApplyTemplate "templates/archive.html" ctx >>= loadAndApplyTemplate "templates/default.html" ctx >>= relativizeUrls create ["404.html"] $ do let fields = customTitleField "Not Found" <> baseContext route idRoute compile $ makeItem "" >>= loadAndApplyTemplate "templates/404.html" baseContext >>= loadAndApplyTemplate "templates/default.html" fields match "index.html" $ do route idRoute compile $ do posts <- recentFirst =<< loadAll (postsPattern .&&. hasNoVersion) let indexCtx = listField "posts" postCtx (return posts) <> baseContext getResourceBody >>= applyAsTemplate indexCtx >>= loadAndApplyTemplate "templates/default.html" indexCtx >>= relativizeUrls match "templates/*" $ compile templateBodyCompiler createFeed postsPattern -------------------------------------------------------------------------------- -- helpers: compileFeedEntry :: Rules () compileFeedEntry = version "feed" $ compile pandocFeedCompiler compilePost :: Rules () compilePost = do route $ setExtension "html" compile $ tocCompiler >>= (\(doc, toc) -> loadAndApplyTemplate "templates/post.html" (toc <> postCtx) doc) >>= loadAndApplyTemplate "templates/default.html" postCtx >>= relativizeUrls published :: MD.Metadata -> Bool published = (/= Just "true") . MD.lookupString "draft"
/***************************************************************************** * * Floating-point division external interface * *****************************************************************************/ void Expand_Float_Divide(TN *result, TN *src1, TN *src2, TYPE_ID mtype, OPS *ops) { FmtAssert(FALSE, ("Not Yet Implemented")); }
Story highlights The office of Attorney General Ray Cooper declines to comment A group of clergy argues the ban violates the First and 14th amendments North Carolina approved a constitutional amendment to outlaw same-sex marriage Same-sex marriage is legal in 17 U.S states and the District of Columbia A group of clergy in North Carolina on Monday filed a federal lawsuit seeking to overturn the state's ban on same-sex marriage. The group argues the ban violates the First and 14 amendments and stigmatizes same-sex couples and the people and institutions that would support them. "Marriage between two loving individuals is both a fundamental legal right and a cornerstone of almost every religion," says the lawsuit, which was filed in U.S. District Court for the Western District of North Carolina in Charlotte. "By depriving the Plaintiffs of the freedom to perform religious marriage ceremonies or to marry, North Carolina stigmatizes Plaintiffs and their religious beliefs, and the State relegates the Couple Plaintiffs to second-class status." The lawsuit was filed by the United Church of Christ, a Protestant religious denomination with some 1.1 million members, various religious leaders, including a rabbi, and some same-sex couples. Among the defendants are North Carolina Attorney General Ray Cooper, several country district attorneys, and register of deeds. Noelle Talley, a spokeswoman for Cooper, said that as a rule his office does not comment on pending litigation. The state has received the lawsuit, and is currently reviewing it. "By denying same-sex couples the right to marry and by prohibiting religious denominations even from performing marriage ceremonies for same-sex couples, the State of North Carolina stigmatizes same-sex couples, as well as the religious institutions and clergy that believe in equal rights," the suit says. North Carolina voted in 2012 to outlaw same-sex marriage, which was already prohibited in the state. Supporters pushed for -- and won -- a constitutional amendment that defines marriage as solely between a man and a woman. Voters approved the amendment by a large margin. Same-sex marriage is legal in 17 U.S states and the District of Columbia: California, Connecticut, Delaware, Hawaii, Illinois, Iowa, Maine, Maryland, Massachusetts, Minnesota, New Hampshire, New Jersey, New Mexico, New York, Rhode Island, Vermont and Washington. Besides North Carolina, same-sex marriage is banned by state constitutional amendment or state law in Alabama, Alaska, Arizona, Arkansas, Colorado, Florida, Georgia, Idaho, Indiana, Kansas, Kentucky, Louisiana, Mississippi, Missouri, Montana, Nebraska, Nevada, North Dakota, Ohio, Oregon, Pennsylvania, South Carolina, South Dakota, Tennessee, Virginia, West Virginia, Wisconsin and Wyoming. Worldwide, 16 other countries -- and parts of Mexico -- also have laws allowing same-sex marriage and domestic partnerships. Most of these are in Europe and South America.
Ordinary Differential Equations and Dynamical Systems This book provides a self-contained introduction to ordinary differential equations and dynamical systems suitable for beginning graduate students. The first part begins with some simple examples of explicitly solvable equations and a first glance at qualitative methods. Then the fundamental results concerning the initial value problem are proved: existence, uniqueness, extensibility, dependence on initial conditions. Furthermore, linear equations are considered, including the Floquet theorem, and some perturbation results. As somewhat independent topics, the Frobenius method for linear equations in the complex domain is established and Sturm-Liouville boundary value problems, including oscillation theory, are investigated. The second part introduces the concept of a dynamical system. The Poincare-Bendixson theorem is proved, and several examples of planar systems from classical mechanics, ecology, and electrical engineering are investigated. Moreover, attractors, Hamiltonian systems, the KAM theorem, and periodic solutions are discussed. Finally, stability is studied, including the stable manifold and the Hartman-Grobman theorem for both continuous and discrete systems. The third part introduces chaos, beginning with the basics for iterated interval maps and ending with the Smale-Birkhoff theorem and the Melnikov method for homoclinic orbits. The text contains almost three hundred exercises. Additionally, the use of mathematical software systems is incorporated throughout, showing how they can help in the study of differential equations.
<filename>src/rxjs/operators/transforming/window-toggle.ts // RxJS v6+ import { interval, timer } from 'rxjs'; import { mergeAll, take, tap, windowToggle } from 'rxjs/operators'; // emit immediately then every 1s const source = timer(0, 1000); // toggle window on every 5 const toggle = interval(5000); const example = source.pipe( // turn window on every 5s windowToggle(toggle, val => interval((val + 1) * 1000)), tap(_ => console.log('NEW WINDOW!')) ); example .pipe( // window emits nested observable mergeAll(), take(10) ) .subscribe(val => console.log(val));
<reponame>karpierz/libcurl #*************************************************************************** # _ _ ____ _ # Project ___| | | | _ \| | # / __| | | | |_) | | # | (__| |_| | _ <| |___ # \___|\___/|_| \_\_____| # # Copyright (C) 1998 - 2020, <NAME>, <<EMAIL>>, et al. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. The terms # are also available at https://curl.se/docs/copyright.html. # # You may opt to use, copy, modify, merge, publish, distribute and/or sell # copies of the Software, and permit persons to whom the Software is # furnished to do so, under the terms of the COPYING file. # # This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY # KIND, either express or implied. # #*************************************************************************** """ Download a given URL into a local file named page.out. """ import sys import ctypes as ct from pathlib import Path import libcurl as lcurl from curltestutils import * # noqa here = Path(__file__).resolve().parent PAGE_FILENAME = here/"page.out" @lcurl.write_callback def write_function(buffer, size, nitems, outstream): file = lcurl.from_oid(outstream) buffer_size = size * nitems if buffer_size == 0: return 0 bwritten = bytes(buffer[:buffer_size]) nwritten = file.write(bwritten) return nwritten def main(argv=sys.argv[1:]): if len(argv) < 1: print("Usage: %s <URL>" % sys.argv[0]) return 1 url: bytes = argv[0].encode("utf-8") lcurl.global_init(lcurl.CURL_GLOBAL_ALL) # init the curl session curl: ct.POINTER(lcurl.CURL) = lcurl.easy_init() with curl_guard(True, curl): # set URL to get here lcurl.easy_setopt(curl, lcurl.CURLOPT_URL, url) if defined("SKIP_PEER_VERIFICATION"): lcurl.easy_setopt(curl, lcurl.CURLOPT_SSL_VERIFYPEER, 0) # Switch on full protocol/debug output while testing lcurl.easy_setopt(curl, lcurl.CURLOPT_VERBOSE, 1) # disable progress meter, set to 0L to enable it lcurl.easy_setopt(curl, lcurl.CURLOPT_NOPROGRESS, 1) # send all data to this function lcurl.easy_setopt(curl, lcurl.CURLOPT_WRITEFUNCTION, write_function) # open the file with PAGE_FILENAME.open("wb") as page_file: # write the page body to this file handle lcurl.easy_setopt(curl, lcurl.CURLOPT_WRITEDATA, id(page_file)) # get it! lcurl.easy_perform(curl) return 0 sys.exit(main())
Great SCOT! Rapid tool for carbon sequestration science, engineering, and economics CO2 capture and storage (CCS) technology is likely to be widely deployed in coming decades in response to major climate and economics drivers: CCS is part of every clean energy pathway that limits global warming to 2C or less and receives significant CO2 tax credits in the United States. These drivers are likely to stimulate capture, transport, and storage of hundreds of millions or billions of tonnes of CO2 annually. A key part of the CCS puzzle will be identifying and characterizing suitable storage sites for vast amounts of CO2. We introduce a new software tool called SCO2T (Sequestration of CO2 Tool, pronounced"Scott") to rapidly characterizing saline storage reservoirs. The tool is designed to rapidly screen hundreds of thousands of reservoirs, perform sensitivity and uncertainty analyses, and link sequestration engineering (injection rates, reservoir capacities, plume dimensions) to sequestration economics (costs constructed from around 70 separate economic inputs). We describe the novel science developments supporting SCO2T including a new approach to estimating CO2 injection rates and CO2 plume dimensions as well as key advances linking sequestration engineering with economics. Next, we perform a sensitivity and uncertainty analysis of geology combinations (including formation depth, thickness, permeability, porosity, and temperature) to understand the impact on carbon sequestration. Through the sensitivity analysis we show that increasing depth and permeability both can lead to increased CO2 injection rates, increased storage potential, and reduced costs, while increasing porosity reduces costs without impacting the injection rate (CO2 is injected at a constant pressure in all cases) by increasing the reservoir capacity. INTRODUCTION CO2 capture and storage (CCS) technology is likely to be widely deployed in coming decades due to major climate drivers (CCS is part of every major climate policy that limits global warming to 2°C) and economic drivers (significant CO2 tax credits in the United States). These drivers are expected to stimulate capture, transport, and storage of hundreds of millions or billions of tonnes of CO2 annually (100s MtCO2/yr to 1+ GtCO2/yr). Sequestering large amounts of CO2 requires identifying hundreds or thousands of potential storage sites and understanding how geological characteristics (such as formation depth, thickness, permeability, porosity, and temperature) and logistical parameters (such as well patterns/spacing and brine treatment/disposal) will impact sequestration potential and associated costs . Despite this, the science and approach to identifying adequate sequestration sites on this scale-billions of tonnes of CO2 annually-do not exist. To meet this challenge, we introduce a fast-running tool called SCO2T (Sequestration of CO2 Tool, pronounced "Scott"). The tool that can rapidly screen hundreds of thousands of reservoirs, perform sensitivity and uncertainty analysis, and estimate geologic CO2 storage costs. SCO2T is the first tool that comprehensively and directly links key outputs from sequestration simulations with detailed sequestration economics. Consequently, the tool can explore complex and potentially counterintuitive relationships of reservoir characteristics, such as the impact of increasing depth, thickness, permeability, porosity, and temperature on sequestration engineering and costs. In addition to providing an approach to understand how sequestration costs can vary across storage site parameters including geology and logistics, SCO2T estimates site-wide reservoir capacities by calculating the amount of CO2 that can be realistically or actually injected and stored in a given 3D geological block. This takes into account injection rates, the number of wells and well patterns, and plume dimensions. These are used to calculate how much CO2 can actually be stored. Contrariwise, other site-wide approaches. such as the FE/NETL CO2 Saline Storage Cost Model , typically use a storage efficiency and other calculations to estimate storage capacity and don't focus on the impact of injection rates and plume characteristics. SCO2T is underlain by full-physics sequestration simulations, including pressure-and temperature-dependent multiphase flow (FEHM ), which allows it to rapidly calculate outputs without sacrificing detailed accuracy. This paper proceeds as follows. First, in Background, we first present a literature review of research that our work has grown out from. Second, in Approach, we introduce the SCO2T framework including background information and work on SCO2T inputs and outputs, the modified ROMster approach , a detailed overview of the economic inputs and calculations, and an overview of the SCO2T framework. This Approach section includes highlighting some of the novel scientific advances that were required to build SCO2T. as well as validation. Third, in Results and Discussion, we present a sensitivity analysis and uncertainty analysis of multiple carbon sequestration scenarios. Fourth, in Conclusions and Future Research, we highlight the major findings from our study and outline future research directions. All data used in this study (model runs and data outputs for the figures) are included in a supporting Microsoft Excel file including all original figures. BACKGROUND In recent years, multiple approaches, models, and tools have been developed for performance assessment of geologic CO2 sequestration (GCS). Here, we provide some of the most relevant works to put the SCO2T tool and its supporting science in context. Stauffer et al. introduced a system model called CO2-PENS (Predicting Engineered Natural Systems) for sequestering CO2 in geologic reservoirs based on the GoldSim platform. GoldSim is a system-modeling package which is designed for stochastic modelling applications of engineered geologic systems (such as GCS), particularly those with large uncertainties. CO2-PENS was designed to conduct probabilistic simulations of CO2 capture, transportation, and injection in different geologic formations. It can be used to explore relationships between uncertain variables and can help to distinguish the likely performance of potential sequestration sites. In addition, CO2-PENS has the capability to link an injection module with a simple economic module and was modified for use in the SimCCS CCS decision support tool. Zhang et al. developed a system-level model for GCS including CO2 capture, compression, CO2 transportation, and injection which was also based on GoldSim. Oldenburg et al. developed a certification framework (CF) for certifying the safety and effectiveness of GCS sites; CF relates effective CO2 trapping to leakage risk. Through the generality and flexibility, the CF approach can help with the assessment of CO2/brine leakage risk as part of the certification process for permitting of GCS sites. Metcalfe et al. developed a generic system model using Quintessa's QPAC software , and the model was then adapted and applied to the demonstration of CO2 storage at Krechba, near In Salah in central Algeria. Most recently, the US DOE-funded National Risk Assessment Partnership (NRAP) developed an integrated assessment model (NRAP-IAM-CS ) that can be used to simulate CO2 injection, migration, and associated impacts (e.g., potential geochemical impacts to groundwater) at GCS sites. NRAP-IAM-CS incorporates a system-modeling-based approach which accounts for the full subsurface system from the storage reservoir to groundwater aquifers and the atmosphere. NRAP has recently released on open source IAM called NRAP-Open-IAM which provides similar functionality to NRAP-IAM-CS, but draws on a larger set of model analysis tools, is cross-platform, and can execute simulations concurrently on parallel computational resources . Except for CO2-PENS, none of the above approaches or models can directly link key outputs (CO2 injection rates, plume dimensions, etc.) from sequestration simulations with sequestration costs or economics. APPROACH SCO2T uses a set of reduced-order models (ROMs) to calculate two key sequestration engineering outputs for any given formation: CO2 injection rate and CO2 plume area. These engineering outputs are then used with other inputs to calculate the annualized cost of geologic CO2 storage. Because SCO2T is a rapid sequestration science and screening tool, it is not designed to replace detailed sequestration modeling of individual sites, where fine-scale reservoir heterogeneity and fluid characteristics have substantial impacts on the storage estimates and injection well patterns and designs. Consequently, SCO2T makes several high-level assumptions including reservoir homogeneity and does not consider issues such as leakage or reservoir fluid composition . Inputs and outputs SCO2T requires five pieces of geologic information to characterize a reservoir for any one realization: formation depth, thickness, permeability, porosity, and geothermal gradient or temperature. These are described in more depth in the supporting information (SI). These five pieces of geologic information are used as inputs to the reduced-order models described in the next section. SCO2T needs further information to calculate site-wide sequestration engineering and economics including reservoir 2D area (km 2 ), maximum injection pressure as a fraction of lithostatic pressure (currently cannot be changed from a default value of 0.8) based on a grain density of 2650 kg/m 3 and porosity of 0.15 (fraction) for the rocks overlying the storage, maximum well injection rate (currently cannot be changed and is assumed to be 1 MtCO2/yr per well), and injection period (in years; current version only allows a 30-year period). SCO2T also requires additional inputs including economics (a capital charge factor), the cost of treating/disposing a cubic meter of brine ($/m 3 ; the value can be zero), the number/proportion of brine extraction wells relative to injection wells needed to be drilled for each injection well (can be zero), whether or not a pump is needed for each CO2 injection well (if the CO2 arrives at the site at sufficient pipeline pressure, a pump may not be required), and well construction design including whether fractional wells can be drilled (useful for sensitivity analysis where you could place 1.1 wells in a small area), square (aligned) vs. hexagonal (staggered) well spacing (hexagonal is the default), and whether CO2 plumes can overlap. SCO2T uses the five geologic inputs in combination with the calculated maximum injection pressure to calculate the maximum injection rate (MtCO2/yr), plume radius (km), and plume volume (million cubic meters or Mm 3 ) for an individual injection well. Here, plume volume is used interchangeably with "injected CO2 volume" and does not take into account rock pore space (i.e., plume volume is the mass of CO2 divided by the CO2 density). For cases where the maximum injection rate exceeds 1 MtCO2/yr, SCO2T recalculates the plume radius and volume using the maximum injection rate of 1 MtCO2/yr. Even though many combinations of geologic parameters could conceivably exceed 1 MtCO2/yr, typical well construction, including standard casing and drilling technology, do not typically permit flow rates in excess of 1 MtCO2/yr. These individual well outputs are then translated into site-wide outputs. First, the site-wide area is divided by the individual plume areas to calculate how many wells can be placed at the site. This considers whether wells are hexagonally or squarely spaced and if plumes can overlap. Second, the injection rate and number of wells are combined to calculate a site-wide reservoir capacity. Third, the number of wells and site-wide values are translated into a detailed series of individual economic estimates that are integrated to generate a final sequestration cost ($/tCO2). Reduced order models (ROMs) SCO2T uses a set of reduced-order models (ROMs) to calculate two key sequestration engineering outputs for any given formation: (1) CO2 injection rate per well and (2) CO2 plume area (results are often reported as a radius in this paper). The plume area can also be calculated using the Nordbotten analytical solutions rather than the ROMs; this is largely available for reference purposes. ROMs are a widely used and powerful approach to reducing the complexity of predictive physics-based numerical simulations in GCS . They allow fast computations of entire system performance even for periods of hundreds to thousands of years. Traditional approaches generate a single ROM for each simulated responses (e.g., CO2 injection rate) based on a set of training simulations . Chen et al. demonstrated that a single ROM can display excellent overall predictive statistics, but have predictions that dramatically and unacceptably deviate from simulator responses. To avoid the potential pitfall of traditional ROMs development, Chen et al. developed a ROMster approach and showed how this approach can reduce the average absolute error from 200% to only 4% in their study. The current version of SCO2T calculates the injected CO2 volume by dividing the injected mass by the density of the CO2 in the reservoir (i.e., plume volume is the same as injected CO2 volume). Injected volume is used to estimate how much brine might be displaced during CO2 injection and storage and, subsequently, how much could be extracted and disposed or treated. The SCO2T ROMs were trained using a set range for the five geologic inputs-formation depth, thickness, permeability, porosity, and geothermal gradient-based on the reasonably expected range for potential sequestration sites (Table 1). ROMs, particularly using spline fitting, have no guarantee of performing well outside their trained range and often have difficulties when they are approaching the bounds of their trained range . Consequently, the ROMs were trained for a ±10% range for each geologic input beyond their "operational" range (slightly larger lower bounds for thickness and porosity). Training ranges and user ranges are listed in Table 1. Full details of how the injection rate and plume area ROMs were developed are presented in the SI. Economics Sequestration economics is perhaps the ultimate driver for identifying suitable CO2 sites; issues such as environmental impact and risks are vitally important, but economic issues are arguably a showstopper for sequestration feasibility. For example, a site with expected high sequestration costs would arguably not pass the financing stage. Typically, sequestration costs are only calculated on a site-by-site basis within individual projects. There is less broad understanding of how sequestration costs vary across storage site parameters including geology (e.g., formation depth, thickness, permeability, porosity, and temperature) and logistics, such as well spacing and brine treatment/disposal costs. Well spacing is described in depth in the SI. This broad understanding is critical for site screening, which is often the first step in identifying storage potential of different geographical regions or reservoirs. Screening activity might involve examining hundreds or even many thousands of potential sites to down-select suitable candidates for deeper exploration. No tool exists for this down-selection process currently and is an engineering and science gap in CO2 sequestration science. Screening also often involves sensitivity analysis, whereby a user seeks to understand the impact of one or more parameters on sequestration potential. SCO2T addresses both of these challenges. SCO2T directly links key outputs from sequestration simulations (CO2 injection rates, plume dimensions, displaced brine volumes, etc.) with comprehensive sequestration costs or economics. Although other tools have done this too, such as CO2-PENS, SCO2T is the first approach to take realistic physics-based estimations of CO2 injection and storage and link this with detailed economics. Consequently, the tool can explore complex and potentially counterintuitive relationships of reservoir characteristics, such as the impact of increasing depth, thickness, permeability, porosity, and temperatures on engineering and costs. The SCO2T approach of connecting sequestration simulations or engineering with detailed economics was first developed in Middleton et al. and preliminary versions used in subsequent studies. These preliminary versions were used to parametrize datasets for the CCS infrastructure model SimCCS and to conduct studies of the impact of geology on sequestration costs. For example, Middleton and Yaw examined 20 potential storage reservoirs in Alberta, Canada, identifying the impact of uncertainty (formation thickness, permeability, and porosity) on storage costs including a threshold where injectivities below 0.25 MtCO2/yr leads to exponentially rising costs. Further details on the economics approach are detailed in the SI. SCO2T framework SCO2T (Figure 1) is developed in Microsoft Excel and the code (2000+ lines) written in Visual Basic for Applications (VBA). Excel was chosen for multiple key reasons: the vast majority of users already have access to Excel, no installation is required, it's useable on PC and Mac systems, Excel encapsulates the data in an easy-to-read/enter format and the open-source coding language within a single platform, and it has in-built capability to visualize outputs (each execution of SCO2T produces 81 individual charts exploring outputs). SCO2T is also fast running: even though VBA is an un-compiled language, SCO2T typically performs 10,000 separate realizations in a second on a single processor of a typical modern PC or Mac. For comparison, the original 10,000 FEHM training simulations, used to train the SCO2T's ROMs, took on average five hours for a single simulation with many taking several days . The fast-running capability is important for site screening. For example, SCO2T could perform a single simulation of all 186,675 10x10 km grid cells in the NATCARB Atlas in less than 30 seconds. Speed is important for sequestration site screening as well, where hundreds of thousands or millions of simulations to support sensitivity and uncertainty analysis can be performed in minutes. In addition to printing output to the SCO2T tool itself (i.e., the main Worksheet), SCO2T can export the data to a text file or export directly into a format to be read by SimCCS . SCO2T is being publicly released with this manuscript. Validation SCO2T utilizes the novel ROMster approach to predict CO2 injection rate and plume area given formation depth, thickness, permeability, porosity, and temperature. Figure 2 illustrates the success of the ROMster approach for the six CO2 injection ROMs across the entire injection space defined by the 10,000 FEHM simulations. None of the six ROMs perform well over large parts of the injection space (Figure 2a/b), let alone the whole range, though each ROM can be seen to perform well in their trained range (Table SI in ther SI). As ROMs move outside of their trained injection range, their prediction success rapidly deteriorates with errors reaching two or more orders of magnitude. For example, in Figure 2a it can be seen that ROM A (red dots) has very high accuracy in its trained range-it's trained on FEHM runs that led to an injection rate of 0-0.1 MtCO2/yr-given by the 1:1 line between the FEHM output (x-axis) and the SCO2T-predicted injection rate (y-axis). However, when ROM A is used with combinations of depth, thickness, permeability, porosity, and temperature that lead to an FEHM injection rate greater than 0.1 MtCO2/yr, the ROM performance dramatically pulls away from the 1:1 relationship. The same is true for the other five ROMs. For example, ROM B (green dots, trained on 0.05 to 0.5 MtCO2/yr injection rates) performs poorly both below 0.05 MtCO2/yr (both over-and under-predicting injection rates) and above 0.5 mCO2/yr (under-predicting injection rates). These errors are particularly pronounced in the key injection range of 0.25-1 MtCO2/yr. This range is key because injection rates below 0.25 MtCO2/yr are likely to lead to prohibitively high injection costs and injection rates are capped at 1 MtCO2/yr based on well dimensions. That is, real-world geologic carbon sequestration is likely to happen in this range. Figure 2b plots combinations of ROM performance for the FEHM-predicted range 0-1.5 MtCO2/yr (x-axis) and the SCO2T-predicted range -1.5 to 1.5 MtCO2/yr. Focusing on this key injection range shows that ROMs quickly perform badly outside their range even to the extent where all ROMs apart from ROM A (trained on the range 0-0.1 MtCO2/yr) can predict negative CO2 injection rates! Using the ROMster approach, the six ROMs can be merged to provide an excellent prediction of CO2 injection rates across the entire range ( Figure 2c) with an R 2 of 0.9989. Even with such high predictive power, there can be minor gaps between ROMs. For example, a sensitivity analysis of increasing depth from 1000 m to 5000 m means SCO2T will have to jump from making predictions with ROM A to ROM B. Consequently, SCO2T support so that the user can either decide to always use one discrete ROM for the prediction or, for injectivities that fall between any two ROMs, use a weighted average of predictions (this is the suggested and default approach). Figure 3 illustrates the validation of the CO2 plume area ROMs. The blue points and linear regression equation in Figure 3 (left) indicate that the ROMs reproduce the FEHM outputs with high accuracy (note the R 2 value and the 1:1 relationship). The ROMs are not colored separately for each of the three plume area ROMs because, unlike for injection rate, the ROMs all overlap (e.g., a large plume area could indicate a high injection rate in thick formation or a low injection rate in a thin formation). The red dots refer to the Nordbotten analytical solution for plume evolution that formed the basis for plume radius used originally in CO2-PENS . SCO2T allows the user to calculate the plume area using either the Nordbotten analytical solution or the ROM. Even though the Nordbotten approach uses simplifying assumptions in a first principles derivation, the analytical solution performs fairly well compared with SCO2T for unlimited injection rates. Figure 3 (right) shows the validation for the SCO2T and Nordbotten plume area approaches for the ratecontrolled simulations. Nordbotten solutions generally have a much greater scatter from the 1:1 reference and, due to simplifying assumptions, are not able to capture the complexity of interactions between the five major geologic parameters. Currently, SCO2T estimates the CO2 injected volume by dividing the total injected mass of CO2 by the density of the stored CO2. Future versions will potentially create a separate CO2 plume volume set of ROMs. The plume volume is used to calculate the amount of brine produced (taking into account CO2 and brine density) to keep the reservoir at hydrostatic pressure. The volume of the injected CO2 is also required for Nordbotten's plume area calculation along with CO2 and water viscosity. CO2 and water density and viscosity lookup tables are included with the SCO2T tool for temperatures ranging from 0.5°C to 300°C (every 0.5°C) and pressures ranging from 0.25 to 60 MPa (every 0.25 MPa); these were specifically developed for SCO2T using the NIST Chemistry WebBook . Increasing temperature reduces CO2 density and generally decreases the viscosity (though not always; see Figure S6), while increasing pressure increases both CO2 density and viscosity. To find the density and viscosity for CO2/water, typical approaches bilinearly interpolate between temperatures and pressures in the lookup table. However, CO2 density and viscosity both respond nonlinearly to changes in temperature and pressure and so linear averages introduce errors (see SI). To address this, SCO2T allows the option to use a polynomial weighting that almost entirely removes this inaccuracy (see Figure S7 and explanation in the SI); this is the suggested and default option. Sensitivity analysis The following two sections summarize sensitivity and uncertainty analyses to demonstrate the power of SCO2T and to identify key CO2 sequestration issues as well as the impact on economics. The five geologic inputs-depth, thickness, permeability, porosity, and temperature-each impact CO2 injection rates, plume dimensions, storage effectiveness, and extracted brine in different ways, and therefore with differing and sometimes counterintuitive impacts on economics. For sensitivity analysis, four of the five variables are typically held constant while the remaining variable is varied to understand its impact. This is a local or transect sensitivity analysis rather than a full global sensitivity analysis. (Figure 4f). Storage area density is a measure of the mass of CO2 that can be effectively stored in the reservoir normalized by the reservoir's area (MtCO2/km 2 )-as opposed to the storage capacity of an entire reservoir which is simply based on the overall pore space of the reservoir without considerations of the actual ability to utilize that space-and is a good measure of storage potential given geologic inputs. Storage area density changes with the plume mass, radius (or area), and CO2 density. The sensitivity analysis of depth generates generally intuitive results. The increase of CO2 density by increasing pressure has a stronger impact than the decrease in CO2 density by the increasing temperature, leading to an overall increase in CO2 density. Given the same reservoir dimensions, porosity, and permeability, this allows for more CO2 to penetrate the reservoir with increasing depth, leading to higher injection rates, higher reservoir capacity, and fewer wells and space needed to do so. As a result the overall injection and storage cost falls with increasing depth because fewer wells are required, each well can inject more CO2, and the reservoir capacity increases. Similar results have been shown previously using simpler tools such as CO2-PENS . (Figure 5f). Changing permeability of the reservoir has a relatively major effect on the injection rate, an intuitive response given permeability has the largest impact on a fluid's ability to flow through a reservoir. When varying permeability, plume radius and injected CO2 volume will largely follow a similar path as injection rate, since increasing permeability will make it more preferential for the plume to expand than to increase pressure in the pore space. It is no surprise that storage area density and reservoir capacity follow similar paths, as lower permeability values make it difficult for the CO2 to penetrate the entirety of the reservoir. The sudden increase in storage area density and reservoir capacity between 10 and 15 mD may be explained by the point at which the CO2 plume is able to effectively reach the majority of the reservoir pore space. Injection and storage costs fall with rising permeability because fewer wells are needed while the reservoir storage capacity increases too. Reservoir storage capacity for SCO2T is based on how much CO2 an operator can actually inject/store in a given time period and not a static or arbitrary calculation based available pore space. Figure 6 illustrates the impact of holding depth, thickness, permeability, and geothermal gradient constant and varying porosity 0.05 to 0.4. These changes can impact CO2 injection rate (Figure 6a), CO2 plume radius and volume (Figure 6b), storage area density (Figure 6c), number of wells (Figure 6d), reservoir capacity ( Figure 6e), and total injection and storage cost (Figure 6f). Increasing porosity has no effect on the injection rate ( Figure 6a) and therefore has no impact on the injected volume (Figure 6b; secondary y-axis). However, the plume radius does fall as porosity increases since the same volume of CO2 occupies a decreasing proportion of the formation (Figure 6b; primary y-axis). Accordingly, the storage area density also increases because the same amount of CO2 needs a lower 2D footprint to be stored (Figure 6c). Because the plume radius decreases but the injection rate is steady, more wells can be placed in the 2D footprint ( Figure 6d) and thus overall reservoir capacity also increases (Figure 6e). Although the number of wells increases across the site-this increases the upfront fixed capital costs-the increased storage area density means that injection and storage costs fall (Figure 6f). Uncertainty analysis In addition to understanding the sensitivity of individual geologic parameters across a set range, users can vary multiple parameters at the same time through probabilistic distributions of input parameters. This is shown in Figure 7 where formation thickness, permeability, and porosity are randomly sampled using a normal distribution with a standard deviation equivalent to 10% of the mean value. By default, SCO2T produces scatterplots with up to 10,000 points; users can increase or reduce that limit. In this case, the users can visualize the cloud of possible outcomes on injection rates (top row in Figure 7), plume radius (middle row), and injection and storage costs (bottom row). In combination with permeability and thickness, the porosity signal can still be seen for impact on plume radius (Figure 7f) but not for the injection rate ( Figure 7c) and final costs (Figure 7i). The total economic impact of randomly varying thickness, permeability, and porosity at the same time can be seen in Figure 8. The histogram highlights the likelihood that the economic performance of a reservoir will fall within a certain range. Unsurprisingly, given that the input variables were assumed to vary with a normal distribution, the output injection and storage costs are largely normally distributed too. SCO2T also allows users to assume a uniform or log-normal distribution for any variables (analysis not shown here). CONCLUSIONS AND FUTURE RESEARCH CCS technology is likely to be widely deployed in coming decades due to both climate and economic drivers. This will require identifying and characterizing suitable storage sites for vast volumes of CO2, including sequestration costs. The SCO2T tool has been developed to address this need including being able to analyze hundreds of thousands or millions of reservoir combinations in only seconds or minutes as well as coupling economics with sequestration engineering. Through the sensitivity analysis we showed that increasing depth and permeability both can lead to increased CO2 injection rates, increased storage potential, and reduced costs, while increasing porosity reduces costs without impacting the injection rate (CO2 is injected at a constant pressure in all cases) by increasing the reservoir capacity. Through uncertainty analysis-where formation thickness, permeability, and porosity are randomly sampled-we showed that that final sequestration costs are normally distributed with upper bound costs around 50% higher than the lower bound costs. While site selection decisions will ultimately require detailed site characterization and permitting, SCO2T provides an inexpensive screening tool that can help prioritize projects based on the complex interplay of reservoir, infrastructure (e.g., proximity to pipelines), and other (e.g., land use, legal) constraints on the suitability of certain regions for CCS. Future work will focus on further developing SCO2T and applying it to local-to-national scenarios. Future developments will include adding functionality to explore injection and storage over multiple timeframes as well as over more complex geologies and other parameters including heterogeneity, different fluid properties, different depositional environments, and the ability to fine tune parameters such as residual water and relative endpoint permeabilities. Because SCO2T is so fast running, we also intend to apply SCO2T to national carbon sequestration problems including the spatial coverage of databases including NATCARB .
def log_handler(self, msg): def missing_config_message(source): _variables = messages["missing_configs"][source] _message = "The following config variable{} need{} to be set:\n • {}".format( *("s", "") if len(_variables) > 1 else ("", "s"), "\n • ".join(_variables)) self.messages["error"].append((source, _message)) def missing_binary_message(source): _binaries = messages["missing_binaries"][source] _message = "The following executable{} {} needed but could not be found:\n • {}".format( *("s", "are") if len(_binaries) > 1 else ("", "is"), "\n • ".join(_binaries)) self.messages["error"].append((source, _message)) def missing_class_message(source, classes=None): _variables = messages["missing_classes"][source] if not _variables: _variables = classes _message = "The following class{} need{} to be set:\n • {}".format( *("es", "") if len(_variables) > 1 else ("", "s"), "\n • ".join(_variables)) if "text" in _variables: _message += "\n\nNote: The 'text' class can also be set using the configuration variable " \ "'import.document_annotation', but only if it refers to an annotation from the " \ "source files." self.messages["error"].append((source, _message)) level = msg["level"] if level == "run_info" and self.use_progressbar: lines = msg["msg"].splitlines()[2:] total_jobs = lines[-1].strip() self.jobs = {} for j in lines[:-1]: _, count, job = j.split("\t") self.jobs[job.replace("::", ":")] = int(count) if self.bar is None: if total_jobs.isdigit(): self.setup_bar(int(total_jobs)) elif level == "progress": if self.use_progressbar: if self.bar is None: self.setup_bar(msg["total"]) self.progress.advance(self.bar) if not console.is_terminal: percentage = (100 * msg["done"]) // msg["total"] if percentage > self.last_percentage: self.last_percentage = percentage print(f"Progress: {percentage}%") if msg["done"] == msg["total"]: self.stop() elif level == "job_info" and self.use_progressbar: if msg["msg"] and self.bar is not None: self.progress.update(self.bar, text=msg["msg"]) elif level == "info": if msg["msg"] == "Nothing to be done.": self.info(msg["msg"]) elif level == "error": handled = False if "SparvErrorMessage" in msg["msg"]: message = re.search( r"{}([^\n]*)\n([^\n]*)\n(.*?){}".format(SparvErrorMessage.start_marker, SparvErrorMessage.end_marker), msg["msg"], flags=re.DOTALL) if message: module, function, error_message = message.groups() error_source = ":".join((module, function)) if module and function else None self.messages["error"].append((error_source, error_message)) handled = True elif "exit status 123" in msg["msg"] or ("SystemExit" in msg["msg"] and "123" in msg["msg"]): handled = True elif "MissingInputException" in msg["msg"] or "MissingOutputException" in msg["msg"]: msg_contents = re.search(r" for rule (\S+):\n(.+)", msg["msg"]) rule_name, filelist = msg_contents.groups() rule_name = rule_name.replace("::", ":") if self.missing_configs_re.search(filelist): handled = True missing_config_message(rule_name) elif self.missing_binaries_re.search(filelist): handled = True missing_binary_message(rule_name) elif self.missing_classes_re.search(filelist): handled = True missing_class_message(rule_name, self.missing_classes_re.findall(filelist)) if not handled: self.messages["unhandled_error"].append(msg) else: self.handled_error = True elif level in ("warning", "job_error"): self.messages["unhandled_error"].append(msg) elif level == "dag_debug" and "job" in msg: if self.missing_configs_re is None: all_configs = set([v for varlist in messages["missing_configs"].values() for v in varlist]) self.missing_configs_re = re.compile(r"\[({})]".format("|".join(all_configs))) if self.missing_binaries_re is None: all_binaries = set([b for binlist in messages["missing_binaries"].values() for b in binlist]) self.missing_binaries_re = re.compile(r"^({})$".format("|".join(all_binaries)), flags=re.MULTILINE) if self.missing_classes_re is None: all_classes = set([v for varlist in messages["missing_classes"].values() for v in varlist]) self.missing_classes_re = re.compile(r"<({})>".format("|".join(all_classes))) if msg["status"] == "selected": job_name = str(msg["job"]).replace("::", ":") if job_name in messages["missing_configs"]: missing_config_message(job_name) self.handled_error = True raise BrokenPipeError()
/** * Serializes and saves a scenario. * * @param scenario the scenario to save * * @throws FileNotFoundException the file not found exception * @throws IOException the i/o exception */ public static void saveScenario(Scenario scenario) throws FileNotFoundException, IOException { XStream xs = new XStream(); xs.alias("scenario", Scenario.class); xs.alias("excel", Spreadsheet_2_5.class); xs.alias("surfaceNode", SurfaceNode.class); xs.alias("orbitalNode", OrbitalNode.class); xs.alias("lagrangeNode", LagrangeNode.class); xs.alias("spaceEdge", SpaceEdge.class); xs.alias("surfaceEdge", SurfaceEdge.class); xs.alias("flightEdge", FlightEdge.class); xs.alias("burn", Burn.class); FileOutputStream fos = new FileOutputStream(scenario.getFilePath()); xs.toXML(scenario, fos); fos.close(); }
def show_git_status(style): gs = git_status() if gs is None: return '' p = style('@') if not gs.bare and gs.stashed: p += style('+', fg=Color.LIGHT_YELLOW, bold=True) head_color = Color.LIGHT_BLUE if gs.detached else Color.LIGHT_GREEN p += style(gs.head, fg=head_color) if gs.ahead: p += style('+{}'.format(gs.ahead), fg=Color.GREEN) if gs.behind: p += style(',') if gs.behind: p += style('-{}'.format(gs.behind), fg=Color.RED) if not gs.bare: if gs.staged and gs.unstaged: p += style('*', fg=Color.LIGHT_YELLOW, bold=True) elif gs.staged: p += style('*', fg=Color.GREEN) elif gs.unstaged: p += style('*', fg=Color.RED) if gs.untracked: p += style('+', fg=Color.RED, bold=True) if gs.state is not None: p += style('[' + gs.state.value + ']', fg=Color.MAGENTA) if gs.conflict: p += style('!', fg=Color.RED, bold=True) return p
<reponame>Zusyaku/Termux-And-Lali-Linux-V2 package cli import ( "log" "os" "strings" "github.com/blackcrw/wafi/cli/cmd" "github.com/blackcrw/wafi/internal" "github.com/blackcrw/wafi/pkg/nettools" "github.com/spf13/cobra" ) var root = &cobra.Command{ Use: "wafi", Short: "WAFI", Long: internal.TextBanner() + `WAFI (Web Application Firewall / Intrusion)`, Run: cmd.RootCMDRun, PostRun: cmd.RootCMDPostRun, } func init() { cobra.OnInitialize(checks_lists) root.PersistentFlags().StringP("url", "u", "", "Target URL (Ex: http(s)://example.com/). ") root.MarkPersistentFlagRequired("url") } func checks_lists() { var target, _ = root.Flags().GetString("url") internal.SimpleBanner() if !strings.HasSuffix(target, "/") { target = target + "/" } if !nettools.NetTools_URLValidate(target) { log.Fatalln("This is URL not validate") } } func Execute() { if err := root.Execute(); err != nil { os.Exit(0) } }
#include "module_system_part3.h" extern "C" { #include "system_part3_loader.c" } /** * Declares the master dynamic link table, that exports individual dynamic link libraries. */ DYNALIB_TABLE_EXTERN(system_module_part3); DYNALIB_TABLE_EXTERN(hal_usb); DYNALIB_TABLE_EXTERN(hal_cellular); DYNALIB_TABLE_EXTERN(hal_socket); DYNALIB_TABLE_EXTERN(hal_bootloader); /** * The order of these declarations MUST MATCH the order of declarations in * the module_system_part3_exports.ld */ extern "C" __attribute__((externally_visible)) const void* const system_part3_module[] = { DYNALIB_TABLE_NAME(system_module_part3), DYNALIB_TABLE_NAME(hal_usb), DYNALIB_TABLE_NAME(hal_cellular), DYNALIB_TABLE_NAME(hal_socket), DYNALIB_TABLE_NAME(hal_bootloader) };
import logging import pathlib import joblib from tqdm import tqdm import pandas as pd from util import init, reduce_mem_usage, AGGREGATION_LEVELS def dump(df, name): df = reduce_mem_usage(df) save_dir = pathlib.Path('../data/04_agg') if not save_dir.exists(): save_dir.mkdir(parents=True) joblib.dump(df, save_dir / f'{name}.joblib', compress=True) def main(run_name): v_sales = joblib.load('../data/01_readcsv/v_sales.joblib') calendar = joblib.load('../data/02_fe/calendar.joblib') prices = joblib.load('../data/02_fe/prices.joblib') snap = joblib.load('../data/02_fe/snap.joblib') v_sales = v_sales.merge(calendar) v_sales = v_sales.merge(snap) v_sales = v_sales.merge(prices) aggregation_functions = [ {}, {}, {'state_id': 'first'}, {}, {'cat_id': 'first'}, {}, {'cat_id': 'first'}, {'state_id': 'first'}, {'state_id': 'first', 'cat_id': 'first'}, {'cat_id': 'first', 'dept_id': 'first'}, {'cat_id': 'first', 'dept_id': 'first'}, {} ] common_functions = { 'sales': 'sum', 'release_ago': 'mean', 'wm_yr_wk': 'first', 'weekday': 'first', 'wday': 'first', 'month': 'first', 'year': 'first', 'event_name_1': 'first', 'event_type_1': 'first', 'event_name_2': 'first', 'event_type_2': 'first', 'snap': 'first', 'day': 'first', 'week': 'first', 'year_delta': 'first', 'week_of_month': 'first', 'day_of_week': 'first', 'weekend': 'first', 'holiday': 'first', 'holiday_in_weekday': 'first', 'christmas_day': 'first', 'sell_price': 'mean', 'diff_price': 'mean', 'price_max': 'mean', 'price_min': 'mean', 'price_std': 'mean', 'price_mean': 'mean', 'price_trend': 'mean', 'price_norm': 'mean', 'diff_price_norm': 'mean', 'price_nunique': 'mean', 'dept_max': 'mean', 'dept_min': 'mean', 'dept_std': 'mean', 'dept_mean': 'mean', 'price_in_dept': 'mean', 'mean_in_dept': 'mean', 'cat_max': 'mean', 'cat_min': 'mean', 'cat_std': 'mean', 'cat_mean': 'mean', 'price_in_cat': 'mean', 'mean_in_cat': 'mean', 'price_in_month': 'mean', 'price_in_year': 'mean', } for a in aggregation_functions: a.update(common_functions) aggregated_dfs = [] for i, level in tqdm(enumerate(AGGREGATION_LEVELS), total=len(AGGREGATION_LEVELS)): logging.info(f'aggregate level = {i}: {level}') if i == 11: # no aggregation df_agg = v_sales.copy() else: df_agg = v_sales.groupby(level + ['d']).agg(aggregation_functions[i]).reset_index() df_agg['sort_key'] = df_agg['d'].str[2:].astype(int) df_agg = df_agg.sort_values(level + ['sort_key']).reset_index(drop=True) if i == 0: df_agg.insert(0, 'id', 'Total_X_evaluation') elif len(level) == 1: id1 = level[0] df_agg.insert(0, 'id', (df_agg[id1] + '_X_evaluation')) else: id1, id2 = level[0], level[1] df_agg.insert(0, 'id', (df_agg[id1] + '_' + df_agg[id2] + '_evaluation')) df_agg.insert(0, 'aggregation_level', i) aggregated_dfs.append(df_agg) dump(pd.concat(aggregated_dfs[:9]), 'v_sales_agg') dump(pd.concat(aggregated_dfs[9:12]), 'v_sales_each') if __name__ == "__main__": run_name = init(__file__) try: main(run_name) except: logging.exception('exception') finally: logging.info('end')
import { ITranslateRequest, TranslateData, ITranslationObject, NgxTranslateData } from './models'; import * as fs from 'fs'; import { clone, eachObj, objValue, setObjValue } from './json-util'; export class Ngx { constructor(public workDir: string, private srcLang = 'en') { } readData(): Promise<TranslateData> { const srcFile = `${this.srcLang}.json`; return new Promise((resolve) => { const files = fs.readdirSync(this.workDir); if (files.indexOf(srcFile) < 0) { // not an ngx-translate i18n dir return resolve(null); } const result: TranslateData = new NgxTranslateData(); const srcLang = JSON.parse(fs.readFileSync(`${this.workDir}/${srcFile}`).toString()); files.forEach((file: string) => { if (file !== srcFile) { const lang = JSON.parse(fs.readFileSync(`${this.workDir}/${file}`).toString()); const object: ITranslationObject = { object: lang, file: file, isChange: false }; result.objects.push(object); const langKey = file.replace('.json', ''); eachObj(srcLang, [], (path, key, value) => { const original = value; const translation = objValue(lang, [...path, key]); // console.log(path, key, original, translation); if (!translation) { object.isChange = true; result.requests.push({ srcLang: this.srcLang, lang: langKey, object: lang, path: [...path, key], original: original }); } }); } }); resolve(result); }); } writeData(data: TranslateData): Promise<any> { return new Promise((resolve) => { data.objects.forEach((t: ITranslationObject) => { if (t.isChange) { console.log(`Writing '${this.workDir}/${t.file}' translation file`); fs.writeFileSync(`${this.workDir}/${t.file}`, JSON.stringify(t.object, null, 2)) } }); resolve(); }); } }
def __from_dictionary(cls, c, d): def recursion(cls, c, d): fieldtypes = {f.name: f.type for f in fields(c)} return c(**{f: cls.__from_dictionary(fieldtypes[f], d[f]) for f in d}) if is_dataclass(c): return recursion(cls, c, d) if get_origin(c) is list: return [recursion(cls, get_args(c)[0], i) for i in d] return d
// <NAME> 2013 #include "GLContext.h" #include <IndyCore/CoreDefines.h> #include "GL/wglew.h" namespace Indy { // static members std::vector<GLContext*> GLContext::m_contexts; GLContext* GLContext::m_currentGLContextBound = NULL; GLContext::GLContext( const OpenGLInfo& openGLInfo) : m_openGLInfo(openGLInfo) { m_contexts.push_back( this); } GLContext::~GLContext( void) { if( isThisTheCurrentGLContext()) BREAKPOINT( GLContext is still bound while trying to be deleted!); // remove this from m_contexts bool found = false; unsigned int numContexts = m_contexts.size(); unsigned int i = 0; for( ; i < numContexts; ++i) if( m_contexts[i] == this) found = true; if( found) m_contexts.erase( m_contexts.begin() + i - 1); else BREAKPOINT(GLContext was not registered); } void GLContext::Enable ( const GLenum param) { if ( !isThisTheCurrentGLContext()) BREAKPOINT(GLEnable is called through context that is currently not bound!); glEnable(param); } void GLContext::Disable( const GLenum param) { if ( !isThisTheCurrentGLContext()) BREAKPOINT(GLDisable is called through context that is currently not bound!); glDisable(param); } void GLContext::EnableDepthBuffer( void) { Enable(GL_DEPTH_TEST); } void GLContext::DisableDepthBuffer( void) { Disable(GL_DEPTH_TEST); } void GLContext::EnableDepthWrite( void) { if ( !isThisTheCurrentGLContext()) BREAKPOINT(EnableDepthWrite is called through context that is currently not bound!); glDepthMask( true); } void GLContext::DisableDepthWrite( void) { if ( !isThisTheCurrentGLContext()) BREAKPOINT(DisableDepthWrite is called through context that is currently not bound!); glDepthMask( false); } void GLContext::EnableCulling( void) { Enable(GL_CULL_FACE); } void GLContext::DisableCulling( void) { Disable(GL_CULL_FACE); } void GLContext::SetCullFace( const GLenum cullFace) { if ( !isThisTheCurrentGLContext()) BREAKPOINT(SetCullFace is called through context that is currently not bound!); glCullFace(cullFace); } void GLContext::EnableAlphaBlending( void) { Enable(GL_BLEND); } void GLContext::DisableAlphaBlending( void) { Disable(GL_BLEND); } void GLContext::SetBlendFunc( const GLenum source, const GLenum dest) { if ( !isThisTheCurrentGLContext()) BREAKPOINT(SetBlendFunc is called through context that is currently not bound!); glBlendFunc(source, dest); } void GLContext::ResizeViewport( const GLint x, const GLint y, const GLsizei width, const GLsizei height) { if ( !isThisTheCurrentGLContext()) BREAKPOINT(ResizeViewport is called through context that is currently not bound!); glViewport( x, y, width, height); } void GLContext::ClearBuffers( const GLulong color /*= 0x0*/, const GLbitfield buffersToClear /*= GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT*/) { if ( !isThisTheCurrentGLContext()) BREAKPOINT(Clear is called through context that is currently not bound!); GLclampf r = (GLclampf)((color >> 24) & 0xff) / 255.0f; GLclampf g = (GLclampf)((color >> 16) & 0xff) / 255.0f; GLclampf b = (GLclampf)((color >> 8) & 0xff) / 255.0f; GLclampf a = (GLclampf)((color) && 0xff) / 255.0f; glClearColor( r, g, b, a); glClear( buffersToClear); } bool GLContext::HasGLError( void) { if ( !isThisTheCurrentGLContext()) BREAKPOINT(HasGLError is called through context that is currently not bound!); #ifdef DEBUG { GLenum error = glGetError(); if( error == GL_NO_ERROR) return false; switch (error) { case GL_INVALID_ENUM: printf("OpenGL Invalid Enum \n"); break; case GL_INVALID_VALUE: printf("OpenGL Invalid Value \n"); break; case GL_INVALID_OPERATION: printf("OpenGL Invalid Operation \n"); break; case GL_STACK_OVERFLOW: printf("OpenGL Stack Overflow \n"); break; case GL_STACK_UNDERFLOW: printf("OpenGL Stack Underflow \n"); break; case GL_OUT_OF_MEMORY: printf("OpenGL Out of Mem \n"); break; } return true; } #endif; return false; } /* --- Private functions --- */ bool GLContext::isThisTheCurrentGLContext( void) { return this == m_currentGLContextBound ? true : false; } }
<filename>src/store/modules/appConfig/getters.ts import { AppConfig } from "./index"; const getLanguage = (state: AppConfig) => state.language; const getTheme = (state: AppConfig) => state.theme; export default { getLanguage, getTheme };
#include<iostream> #include<algorithm> #include<cstdio> #include<cstring> #include<vector> #include<map> #include<set> #include<queue> #include<stack> #include<string> using namespace std; const int INF = 0X3f3f3f3f; typedef long long ll; string str1,str2; int a,b; vector<int> vi,vii; void dfs(int c){ } int main(){ ios::sync_with_stdio(false); while(cin>>a>>b){ cin>>str1>>str2; vii.clear(); ll ans = INF; for(int i = 0; i < str2.length()-str1.length()+1; i++){ ll t = 0; for(int j = 0; j < str1.length(); j++){ //cout<<"str2"<<str2[i+j]<<" "<<"str1"<<" "<<str1[j]<<endl; if(str2[i+j]!=str1[j]){ t++; vi.push_back(j+1); } } if(t<ans){ vii=vi; ans = t; } vi.clear(); } cout<<ans<<endl; for(int i = 0; i < vii.size(); i++){ if(i!=vii.size()-1) cout<<vii[i]<<" "; else cout<<vii[i]<<endl; } } return 0; }
Chained to the Rocks: A short overview of what Mark Rosewater MaRo said: Prometheus gave fire to humans and the gods punished him by chaining him to a rock (or rocks) and having an eagle eat his liver every day and having his liver grow back at night ad infinitum. Here is some additional background information: The eagle is the chosen bird as it is the symbol of Zeus and the action of stealing fire was a direct affront to Zeus (he had taken fire away from humans) The mountain upon which Prometheus was chained is Kazbek, a part of Caucasus which is considered one of the pillars supporting the world. Gift of Immortality: MaRo went over this card as well, but covered the flavor only so far as saying it happened in Greek myth. Here are some of the details: The greeks believed in everlasting souls that lingered in Hades' realm but did not consider that immortality. Immortality to them was centered on physical immortality and thus only was true if it included the body Achilles is perhaps the most famous person with immortality. At birth he was either held in the river Styx (by his heel, thus his weakness) or placed over a fire to burn the mortality away (his mother was interrupted in this process). Even after he is killed, Thetis (his mother) takes his body and resurrects him, granting him an eternal existence in either the Elysian plains or the Islands of the Blessed. Others to gain immortality:Amphiaraus, Ganymede, Ino, Iphigenia, Menelaus, Peleus, Alcmene, Castor, Heracles, and Melicertes Flavorfully, I wish they had they "When enchanted creature dies or is exiled," as I feel that would have captured immortality much better. Interestingly it fits very well with the theme of dying then being resurrected before immortality that is common in the stories. Phalanx Leader: Although not a part of Greek "mythology" the flavor captured seems worthy of mentioning. The phalanx is fairly well known and has come to mean more than just the original Greek military formation. Part of the formations strength was in the unison movement of the group and the cooperation of the unit. The rear ranks of the phalanx would use the back end of their spear to finish off opponents as the unit marched over them, for instance. Having the card buff your entire army is a very fitting ability for a phalanx leader. Curse of the Swine: MaRo touched on this card, but flavor wise just said it had to be made and said that Odysseus' men were changed into swine. The card really only covers a third to half the story: Circe welcomes Odysseus' men into her palace filled with docile lions and wolves. She prepares a large feast full of foods they were familiar with. All the men except Eurylochus gorge themselves and consume a potion added to the food. The potion allows Circe to use her wand to transform them to swine and Eurylochus escapes. Eurylochus makes it to where Odysseus and some of his men are watching the boats. Odysseus goes to free his men but is intercepted by Hermes. Hermes tells Odysseus how to free his men and not fall under Circe's spell. After freeing his men Odysseus and his crew stay and feast for a year while Odysseus gets advice on the remainder of his journey from Circe. Lost in a Labyrinth: The labyrinth in Greek myth was constructed by legendary artificer Daedalus at the behest of King Minos of Crete. King Menos wife had given birth to the Minotaur and they needed a place to keep as his wife did not want him killed. The design of the labyrinth was so clever that Daedalus himself barely managed to find a way out. Eventually Thesseus was given a quest to kill the Minotaur. Ariadne aided Thesseus by giving him thread so he could find his way out again. Personally I think the card would have worked flavourfully much better as a pacifism effect, but losing power has been associated with being disoriented in past cards. Insatiable Harpy Phineus was a king with the gift of prophecy. He used his gift liberally and this eventually upset Zeus who felt he revealed too much. Zeus decided to punish him by blinding him and placing him on an island. Once there Zeus placed a large buffet of food. Every time Phineus was reach for the food a harpy would come down and steal food, and then befoul the remainder. This process would continue in a loop. Although a rather innocuous card, I think the flavor here is hit very well, the name, creature type, flavor text, and art all resonate very well with the story. Sip of Hemlock: The execution of Socrates is fairly well known, though it is somewhat contested if it actually happened as depicted by Plato. Ancient Greece had used Conium (hemlock is the species native to the Mediterranean) for executions for some period. Socrates was put on trial for impiety and corrupting the youth. The jury convicted and determined hemlock execution would be the punishment. Socrates had the chance to flee but did not do so, and in the end drank the liquid himself as he stood by the principle of obedience to law. Given the knowledge of it's deadly properties it would not be hard to believe that hemlock was used as an assassination method. The card itself goes more toward that theory and away from the Socrates story. Granted depicting a suicide on a Magic card would be difficult. Stormbreath Dragon: Sam Stoddard wrote an article on the process this card went through but all that is mentioned flavor wise is that it is modeled after Typhon. Typhon is the last son of Gaia and known as the Father of All Monsters. He was considered the largest and most fearsome of all creatures as well as the most deadly monster. His upper half was so large that it could reach the stars. Various depictions have him as having a human upper half but in place of a head he has 100 dragon heads erupting from neck and shoulders while some have a human head with dragon heads for fingertips. He was even feared by the Olympians. Typhon was asked to kill Zeus by Gaia and during the first battle he stole Zeus' sinews. Hermes recovers them and during the second battle Zeus traps Typhon under Mount Edna. Considering the original aim, I feel the card itself is very far away from Typhon with nothing really recognizable. The art is even missing something to contrast the size leaving no indication of how large the dragon is. Satyr Hedonist: I didn't include Satyrs in my previous section on monsters as I don't think they really qualify as monsters in most senses. Satyrs in early Greek myth were always male - it was not until later that poets added female Satyrs. They were companions of Pan and Dionysus. Interestingly, the original depictions of Satyrs were not half-goat half-man as we currently think of them. Originally they were mean with some goat features (tail, ears and occasionally phallus's). It was when Roman mythology began to influence the Greek versions that the half-goat, half man depictions started. Satyrs are described as care-free and very musically inclined. There were also Satyr plays which would follow a trio of tragedies during celebrations for Dionysus. Only one play remains in existence (Cyclops by Euripides). In Theros the Satyrs are fairly flavorfully similar. I chose Satyr Hedonist to represent them as the card on the whole - art, name, flavor test and to an extent mechanic - is quite apropos. Anax and Cymede: The most obvious inspiration for these two is Leonidas I and Gorgo, King and Queen of Sparta. Akros, the polis that Anax and Cymede lead, is clearly the Sparta influenced Theros polis. Leonidas is most famous for his stand at the Battle of Thermopylae. He was the second son of the first wife of Anaxandridas [the obvious source of Anax's name] and has another brother from Anaxanaridias' second wife who is eldest of the three. Leonidas was not heir to the throne and thus went through the rigorous training all Spartan boys did. Leonidas ascends to the throne after his full brother left in disgust (because their half brother was chosen to rule) and his half brother was deposed due to supposed insanity. It is not known how much animosity there was between Leonidas and his brothers, but his two brother did have animosity between each other. Anax is heir to the throne but in early childhood until presumably his teens was not physically what was expected of a king. There is a clear animosity within the family. Eventually Anax trains to compete in the games and defeats and humiliates his brother, winning over the people. Gorgo's father was Cleomenes, Leonidas' half brother. She was of noble birth and was among the few women in the classical period to travel and heavily participate in society actively. We know very little of Cymede other than she is beautiful and from a powerful family. The card itself hits in some ways but misses in others. A card like this would indicate some of the abilities represent Anax and some Cymede, but which is which? The flavor text could allude to a story of Leonidas: he defended his right and ability to rule due in some part because of his military prowess and physical ability. Ashen Rider: In ancient Greek Archons were important political figures and the chief magistrates of many of the Greek city-states. In Athens the role of Archon was split into several parts. Over time the Archons power and influence waned. They were given a lifetime membership to a council, though it was a token gesture because the council in question had little real power. In Theros the Archons are "a race of mysterious conquerors" who once ruled over the plane. During that time they dispensed justice and ruled with an iron fist. Over time their hold lessoned and one by one they began to fall. Their holdings eventually coalesced into the polises of Theros. I chose Ashen Rider to represent the Archons as its ability and flavor text fit the descriptions fairly well. Shipwreck Singer: The sirens in Greek myth numbered from 2 to 5 and did not always have wings. They would lure men to their islands with their beautiful songs (not always "happy", just beautiful) and the mens ships would then wreck upon the rocks surrounding the islands. They were destined to die when a mortal passed by, heard their song, and did not stop. Thus when Odysseus' crew passed by the sirens apparently perished afterward (Odysseus had all his mean fill their ears with bee's wax and then tie him tightly to the mast so he could not jump into the sea). The Theros sirens are much more hostile and monstrous. They actively attack humans passing by and feed on their flesh. The card does a very good job of displaying flavor through mechanics. The first ability is singing the song and the second is attacking them.
def from_wav(cls, fps): fpi = iter(fps) fs, data = wavfile.read(next(fpi)) hlist = [data] + [wavfile.read(fp)[1] for fp in fpi] h = np.array(hlist) if data.dtype in [np.uint8, np.int16, np.int32]: lim_orig = (np.iinfo(data.dtype).min, np.iinfo(data.dtype).max) lim_new = (-1.0, 1.0) h = rescale(h, lim_orig, lim_new).astype(np.double) return cls.from_time(fs, h)
def resolve(): import math X = int(input()) print(360 // math.gcd(X, 360)) resolve()
def plotResults(self, boolShow=False): mpl.rcParams['xtick.labelsize'] = 26; mpl.rcParams['ytick.labelsize'] = 26; plt.rc('axes', labelsize=26); plt.rc('legend', fontsize=18); mpl.rcParams['ps.useafm'] = True; mpl.rcParams['pdf.use14corefonts'] = True; mpl.rcParams['text.usetex'] = True; print("### Plotting the spectrogram.") plt.figure(figsize=(22,12)) rArray = self.rArray thetaArray = self.thetaArray extent = [rArray[0], rArray[-1], thetaArray[-1], thetaArray[0]] ax = plt.gca() pl = ax.imshow(np.abs(self.spectrogram), interpolation='nearest', aspect="auto", cmap="viridis", extent=extent) for i in range(len(self.zerosPolar)): x, y = self.zerosPolar[i] ax.plot(x, y, 'o', color="white") ax.set_xlim(rArray[0], rArray[-1]) ax.set_ylim(thetaArray[0], thetaArray[-1]) plt.yticks(-np.pi+np.pi*np.arange(5)/2., ["$-\pi$", "$-\pi/2$", "$0$", "$\pi/2$", "$\pi$"]) ax.set_xlabel(r"$r$") ax.set_ylabel(r"$\theta$") plt.colorbar(pl, orientation='horizontal') plt.savefig("spectrogram_kravchuk_"+self.expId+"_p="+str(self.p)+".pdf") plt.savefig("spectrogram_kravchuk_"+self.expId+"_p="+str(self.p)+".eps") if boolShow: plt.show()
/** * Custom Runtime test class. * * @since 5.0.0 */ public class CustomRuntimeTest { CustomRuntime customRuntime; public CustomRuntimeTest() { } @BeforeTest public void setup() throws RuntimeServiceException { customRuntime = new CustomRuntime(); } @Test public void testInitRuntime() { customRuntime.init(); Assert.assertEquals(customRuntime.getState(), RuntimeState.INACTIVE); } @Test(dependsOnMethods = {"testInitRuntime"}) public void testRuntimeStart() throws RuntimeServiceException { customRuntime.start(); Assert.assertEquals(customRuntime.getState(), RuntimeState.ACTIVE); } @Test(dependsOnMethods = {"testRuntimeStart"}) public void testRuntimeStartMaintenance() throws RuntimeServiceException { customRuntime.beginMaintenance(); Assert.assertEquals(customRuntime.getState(), RuntimeState.MAINTENANCE); } @Test(dependsOnMethods = {"testRuntimeStartMaintenance"}) public void testRuntimeStopMaintenance() throws RuntimeServiceException { customRuntime.endMaintenance(); Assert.assertEquals(customRuntime.getState(), RuntimeState.INACTIVE); } @Test(dependsOnMethods = {"testRuntimeStopMaintenance"}) public void testRuntimeStop() throws RuntimeServiceException { customRuntime.stop(); Assert.assertEquals(customRuntime.getState(), RuntimeState.INACTIVE); } }
// Returns command line for child GTest process based on the command line // of current process. |test_names| is a vector of test full names // (e.g. "A.B"), |output_file| is path to the GTest XML output file. CommandLine GetCommandLineForChildGTestProcess( const std::vector<std::string>& test_names, const base::FilePath& output_file) { CommandLine new_cmd_line(*CommandLine::ForCurrentProcess()); new_cmd_line.AppendSwitchPath(switches::kTestLauncherOutput, output_file); new_cmd_line.AppendSwitchASCII(kGTestFilterFlag, JoinString(test_names, ":")); new_cmd_line.AppendSwitch(kSingleProcessTestsFlag); return new_cmd_line; }
def Args(parser): arg_support.AddEndpointNameArg(parser) arg_support.AddAsyncArg(parser)
Santa Clara County prosecutors have found the owners of two cats that were wearing collars found in the car of an accused serial cat killer. The collars are critical pieces of evidence in the prosecution's case.Now that prosecutors know who the owners of the cats that wore those collars are, they can add more counts to the charges against Robert Farmer. His main charge is animal cruelty.ABC7 News first broke the story in late September when a man, whom police say is Farmer , was caught on surveillance video snatching a cat from the lawn of a home in the Cambrian District of San Jose.Neighbors feared there was a serial catnapper and others came forward saying they too had cats missing or found dead of blunt force trauma.Last week, Santa Clara County prosecutors asked for the public's help in finding the owners of two cats, which had these collars when they were snatched:Police found the collars, along with a dead cat, in Farmer's car when the 24-year-old was arrested in October.The pink collar belongs to Thumper, a cat owned by Leo Martinez and his wife. His remains were found in a parking lot near their home.Leo saw the collar on TV last week and called the district attorney."It just brought some really bad feelings about this gentleman as to what he did to our animal," said an emotional Leo. "I'm sorry but, you know, she was part of our family."June Rovai's cat, named Traveller, disappeared the end of September. She then heard about Farmer's arrest and wondered if her beloved feline could have been one of his victims. Rovai's fears were confirmed when she too saw her cat's collar on the news."Every day I looked for him until I saw his collar," she said. "The moment I saw his collar I knew for sure that he wasn't coming home."Farmer has been charged with animal cruelty and other related counts. He'll be back in court Tuesday to enter a plea.
<reponame>michihausheer/opus-blog /** * Provides customer related functionality. * * @author <NAME> * */ module opus.customer { requires opus.address; uses com.opus.address.AddressRepository; }
def tree_force(snap, eps): positions = snap['pos'] masses = snap['mass'] unit_length = u.kpc unit_mass = u.Msun unit_accel = const.G * unit_mass / (unit_length**2) desired_accel_unit = u.km / u.s / u.Myr posarray = positions.to(unit_length).value massarray = masses.to(unit_mass).value eps_in_units = eps.to(unit_length).value forcearray = _jbgrav.tree_force(posarray, massarray, eps_in_units) return forcearray * unit_accel.to(desired_accel_unit)
/* * Copyright (c) 2013-2017 Intel Corporation. All rights reserved. * * This software is available to you under a choice of one of two * licenses. You may choose to be licensed under the terms of the GNU * General Public License (GPL) Version 2, available from the file * COPYING in the main directory of this source tree, or the * BSD license below: * * Redistribution and use in source and binary forms, with or * without modification, are permitted provided that the following * conditions are met: * * - Redistributions of source code must retain the above * copyright notice, this list of conditions and the following * disclaimer. * * - Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials * provided with the distribution. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ #include "ofi_atomic.h" #ifndef UNREFERENCED_PARAMETER #define OFI_UNUSED(var) (void)var #else #define OFI_UNUSED UNREFERENCED_PARAMETER #endif /* * Basic atomic operations */ #define OFI_OP_MIN(type,dst,src) if ((dst) > (src)) (dst) = (src) #define OFI_OP_MAX(type,dst,src) if ((dst) < (src)) (dst) = (src) #define OFI_OP_SUM(type,dst,src) (dst) += (src) #define OFI_OP_PROD(type,dst,src) (dst) *= (src) #define OFI_OP_LOR(type,dst,src) (dst) = (dst) || (src) #define OFI_OP_LAND(type,dst,src) (dst) = (dst) && (src) #define OFI_OP_BOR(type,dst,src) (dst) |= (src) #define OFI_OP_BAND(type,dst,src) (dst) &= (src) #define OFI_OP_LXOR(type,dst,src) (dst) = ((dst) && !(src)) || (!(dst) && (src)) #define OFI_OP_BXOR(type,dst,src) (dst) ^= (src) #define OFI_OP_READ(type,dst,src) /* src unused, dst is written to result */ #define OFI_OP_WRITE(type,dst,src) (dst) = (src) #define OFI_OP_CSWAP_EQ(type,dst,src,cmp) if ((cmp) == (dst)) (dst) = (src) #define OFI_OP_CSWAP_NE(type,dst,src,cmp) if ((cmp) != (dst)) (dst) = (src) #define OFI_OP_CSWAP_LE(type,dst,src,cmp) if ((cmp) <= (dst)) (dst) = (src) #define OFI_OP_CSWAP_LT(type,dst,src,cmp) if ((cmp) < (dst)) (dst) = (src) #define OFI_OP_CSWAP_GE(type,dst,src,cmp) if ((cmp) >= (dst)) (dst) = (src) #define OFI_OP_CSWAP_GT(type,dst,src,cmp) if ((cmp) > (dst)) (dst) = (src) #define OFI_OP_MSWAP(type,dst,src,cmp) (dst) = (((src) & (cmp)) | \ ((dst) & ~(cmp))) /* Need special handlers for complex datatypes for portability */ #define OFI_OP_SUM_COMPLEX(type,dst,src) (dst) = ofi_complex_sum_##type(dst,src) #define OFI_OP_PROD_COMPLEX(type,dst,src) (dst) = ofi_complex_prod_##type(dst,src) #define OFI_OP_LOR_COMPLEX(type,dst,src) (dst) = ofi_complex_lor_##type(dst,src) #define OFI_OP_LAND_COMPLEX(type,dst,src) (dst) = ofi_complex_land_##type(dst,src) #define OFI_OP_LXOR_COMPLEX(type,dst,src) (dst) = ofi_complex_lxor_##type(dst,src) #define OFI_OP_READ_COMPLEX OFI_OP_READ #define OFI_OP_WRITE_COMPLEX OFI_OP_WRITE #define OFI_OP_CSWAP_EQ_COMPLEX(type,dst,src,cmp) \ if (ofi_complex_eq_##type(dst,cmp)) (dst) = (src) #define OFI_OP_CSWAP_NE_COMPLEX(type,dst,src,cmp) \ if (!ofi_complex_eq_##type(dst,cmp)) (dst) = (src) /******************************** * ATOMIC TYPE function templates ********************************/ #define OFI_DEF_NOOP_NAME NULL, #define OFI_DEF_NOOP_FUNC /* * WRITE */ #define OFI_DEF_WRITE_NAME(op, type) ofi_write_## op ##_## type, #define OFI_DEF_WRITE_COMPLEX_NAME(op, type) ofi_write_## op ##_## type, #define OFI_DEF_WRITE_FUNC(op, type) \ static void ofi_write_## op ##_## type \ (void *dst, const void *src, size_t cnt) \ { \ size_t i; \ type *d = (dst); \ const type *s = (src); \ for (i = 0; i < cnt; i++) \ op(type, d[i], s[i]); \ } #define OFI_DEF_WRITE_COMPLEX_FUNC(op, type) \ static void ofi_write_## op ##_## type \ (void *dst, const void *src, size_t cnt) \ { \ size_t i; \ ofi_complex_##type *d = (dst); \ const ofi_complex_##type *s = (src); \ for (i = 0; i < cnt; i++) \ op(type, d[i], s[i]); \ } /* * READ (fetch) */ #define OFI_DEF_READ_NAME(op, type) ofi_read_## op ##_## type, #define OFI_DEF_READ_COMPLEX_NAME(op, type) ofi_read_## op ##_## type, #define OFI_DEF_READ_FUNC(op, type) \ static void ofi_read_## op ##_## type \ (void *dst, const void *src, void *res, size_t cnt) \ { \ size_t i; \ type *d = (dst); \ type *r = (res); \ OFI_UNUSED(src); \ for (i = 0; i < cnt; i++) \ r[i] = d[i]; \ } #define OFI_DEF_READ_COMPLEX_FUNC(op, type) \ static void ofi_read_## op ##_## type \ (void *dst, const void *src, void *res, size_t cnt) \ { \ size_t i; \ ofi_complex_##type *d = (dst); \ ofi_complex_##type *r = (res); \ OFI_UNUSED(src); \ for (i = 0; i < cnt; i++) \ r[i] = d[i]; \ } /* * READWRITE (fetch-write) */ #define OFI_DEF_READWRITE_NAME(op, type) ofi_readwrite_## op ##_## type, #define OFI_DEF_READWRITE_COMPLEX_NAME(op, type) ofi_readwrite_## op ##_## type, #define OFI_DEF_READWRITE_FUNC(op, type) \ static void ofi_readwrite_## op ##_## type \ (void *dst, const void *src, void *res, size_t cnt) \ { \ size_t i; \ type *d = (dst); \ const type *s = (src); \ type *r = (res); \ for (i = 0; i < cnt; i++) { \ r[i] = d[i]; \ op(type, d[i], s[i]); \ } \ } #define OFI_DEF_READWRITE_COMPLEX_FUNC(op, type) \ static void ofi_readwrite_## op ##_## type \ (void *dst, const void *src, void *res, size_t cnt) \ { \ size_t i; \ ofi_complex_##type *d = (dst); \ const ofi_complex_##type *s = (src); \ ofi_complex_##type *r = (res); \ for (i = 0; i < cnt; i++) { \ r[i] = d[i]; \ op(type, d[i], s[i]); \ } \ } /* * CSWAP */ #define OFI_DEF_CSWAP_NAME(op, type) ofi_cswap_## op ##_## type, #define OFI_DEF_CSWAP_COMPLEX_NAME(op, type) ofi_cswap_## op ##_## type, #define OFI_DEF_CSWAP_FUNC(op, type) \ static void ofi_cswap_## op ##_## type \ (void *dst, const void *src, const void *cmp, \ void *res, size_t cnt) \ { \ size_t i; \ type *d = (dst); \ const type *s = (src); \ const type *c = (cmp); \ type *r = (res); \ for (i = 0; i < cnt; i++) { \ r[i] = d[i]; \ op(type, d[i], s[i], c[i]); \ } \ } #define OFI_DEF_CSWAP_COMPLEX_FUNC(op, type) \ static void ofi_cswap_## op ##_## type \ (void *dst, const void *src, const void *cmp, \ void *res, size_t cnt) \ { \ size_t i; \ ofi_complex_##type *d = (dst); \ const ofi_complex_##type *s = (src); \ const ofi_complex_##type *c = (cmp); \ ofi_complex_##type *r = (res); \ for (i = 0; i < cnt; i++) { \ r[i] = d[i]; \ op(type, d[i], s[i], c[i]); \ } \ } #ifdef HAVE___INT128 /* If __int128 is supported, the existing macros work. */ #define OFI_DEF_WRITE_INT128_NAME(op, type) OFI_DEF_WRITE_NAME(op, type) #define OFI_DEF_WRITE_INT128_FUNC(op, type) OFI_DEF_WRITE_FUNC(op, type) #define OFI_DEF_READ_INT128_NAME(op, type) OFI_DEF_READ_NAME(op, type) #define OFI_DEF_READ_INT128_FUNC(op, type) OFI_DEF_READ_FUNC(op, type) #define OFI_DEF_READWRITE_INT128_NAME(op, type) OFI_DEF_READWRITE_NAME(op, type) #define OFI_DEF_READWRITE_INT128_FUNC(op, type) OFI_DEF_READWRITE_FUNC(op, type) #define OFI_DEF_CSWAP_INT128_NAME(op, type) OFI_DEF_CSWAP_NAME(op, type) #define OFI_DEF_CSWAP_INT128_FUNC(op, type) OFI_DEF_CSWAP_FUNC(op, type) #else /* HAVE___INT128 */ /* If __int128 is not supported, verfication not done. */ #define OFI_DEF_WRITE_INT128_NAME(op, type) NULL, #define OFI_DEF_WRITE_INT128_FUNC(op, type) #define OFI_DEF_READ_INT128_NAME(op, type) NULL, #define OFI_DEF_READ_INT128_FUNC(op, type) #define OFI_DEF_READWRITE_INT128_NAME(op, type) NULL, #define OFI_DEF_READWRITE_INT128_FUNC(op, type) #define OFI_DEF_CSWAP_INT128_NAME(op, type) NULL, #define OFI_DEF_CSWAP_INT128_FUNC(op, type) #endif /* HAVE___INT128 */ /********************************************************************* * Macros create atomic functions for each operation for each datatype *********************************************************************/ /* * Define all handlers in order to populate the dispatch table correctly. * * ATOMICTYPE - WRITE, READ, READWRITE, CSWAP, MSWAP * FUNCNAME - Define function or simply generate function name * The latter is needed to populate the dispatch table * op - OFI_OP_XXX function should perform (e.g. OFI_OP_MIN) */ #define OFI_DEFINE_ALL_HANDLERS(ATOMICTYPE, FUNCNAME, op) \ OFI_DEF_##ATOMICTYPE##_##FUNCNAME(op, int8_t) \ OFI_DEF_##ATOMICTYPE##_##FUNCNAME(op, uint8_t) \ OFI_DEF_##ATOMICTYPE##_##FUNCNAME(op, int16_t) \ OFI_DEF_##ATOMICTYPE##_##FUNCNAME(op, uint16_t) \ OFI_DEF_##ATOMICTYPE##_##FUNCNAME(op, int32_t) \ OFI_DEF_##ATOMICTYPE##_##FUNCNAME(op, uint32_t) \ OFI_DEF_##ATOMICTYPE##_##FUNCNAME(op, int64_t) \ OFI_DEF_##ATOMICTYPE##_##FUNCNAME(op, uint64_t) \ OFI_DEF_##ATOMICTYPE##_##FUNCNAME(op, float) \ OFI_DEF_##ATOMICTYPE##_##FUNCNAME(op, double) \ OFI_DEF_##ATOMICTYPE##_COMPLEX_##FUNCNAME(op ##_COMPLEX, float) \ OFI_DEF_##ATOMICTYPE##_COMPLEX_##FUNCNAME(op ##_COMPLEX, double)\ OFI_DEF_##ATOMICTYPE##_##FUNCNAME(op, long_double) \ OFI_DEF_##ATOMICTYPE##_COMPLEX_##FUNCNAME(op ##_COMPLEX, long_double) \ OFI_DEF_##ATOMICTYPE##_INT128_##FUNCNAME(op, ofi_int128_t) \ OFI_DEF_##ATOMICTYPE##_INT128_##FUNCNAME(op, ofi_uint128_t) #define OFI_DEFINE_REALNO_HANDLERS(ATOMICTYPE, FUNCNAME, op) \ OFI_DEF_##ATOMICTYPE##_##FUNCNAME(op, int8_t) \ OFI_DEF_##ATOMICTYPE##_##FUNCNAME(op, uint8_t) \ OFI_DEF_##ATOMICTYPE##_##FUNCNAME(op, int16_t) \ OFI_DEF_##ATOMICTYPE##_##FUNCNAME(op, uint16_t) \ OFI_DEF_##ATOMICTYPE##_##FUNCNAME(op, int32_t) \ OFI_DEF_##ATOMICTYPE##_##FUNCNAME(op, uint32_t) \ OFI_DEF_##ATOMICTYPE##_##FUNCNAME(op, int64_t) \ OFI_DEF_##ATOMICTYPE##_##FUNCNAME(op, uint64_t) \ OFI_DEF_##ATOMICTYPE##_##FUNCNAME(op, float) \ OFI_DEF_##ATOMICTYPE##_##FUNCNAME(op, double) \ OFI_DEF_NOOP_##FUNCNAME \ OFI_DEF_NOOP_##FUNCNAME \ OFI_DEF_##ATOMICTYPE##_##FUNCNAME(op, long_double) \ OFI_DEF_NOOP_##FUNCNAME \ OFI_DEF_##ATOMICTYPE##_INT128_##FUNCNAME(op, ofi_int128_t) \ OFI_DEF_##ATOMICTYPE##_INT128_##FUNCNAME(op, ofi_uint128_t) #define OFI_DEFINE_INT_HANDLERS(ATOMICTYPE, FUNCNAME, op) \ OFI_DEF_##ATOMICTYPE##_##FUNCNAME(op, int8_t) \ OFI_DEF_##ATOMICTYPE##_##FUNCNAME(op, uint8_t) \ OFI_DEF_##ATOMICTYPE##_##FUNCNAME(op, int16_t) \ OFI_DEF_##ATOMICTYPE##_##FUNCNAME(op, uint16_t) \ OFI_DEF_##ATOMICTYPE##_##FUNCNAME(op, int32_t) \ OFI_DEF_##ATOMICTYPE##_##FUNCNAME(op, uint32_t) \ OFI_DEF_##ATOMICTYPE##_##FUNCNAME(op, int64_t) \ OFI_DEF_##ATOMICTYPE##_##FUNCNAME(op, uint64_t) \ OFI_DEF_NOOP_##FUNCNAME \ OFI_DEF_NOOP_##FUNCNAME \ OFI_DEF_NOOP_##FUNCNAME \ OFI_DEF_NOOP_##FUNCNAME \ OFI_DEF_NOOP_##FUNCNAME \ OFI_DEF_NOOP_##FUNCNAME \ OFI_DEF_##ATOMICTYPE##_INT128_##FUNCNAME(op, ofi_int128_t) \ OFI_DEF_##ATOMICTYPE##_INT128_##FUNCNAME(op, ofi_uint128_t) /********************** * Write dispatch table **********************/ OFI_DEFINE_REALNO_HANDLERS(WRITE, FUNC, OFI_OP_MIN) OFI_DEFINE_REALNO_HANDLERS(WRITE, FUNC, OFI_OP_MAX) OFI_DEFINE_ALL_HANDLERS(WRITE, FUNC, OFI_OP_SUM) OFI_DEFINE_ALL_HANDLERS(WRITE, FUNC, OFI_OP_PROD) OFI_DEFINE_ALL_HANDLERS(WRITE, FUNC, OFI_OP_LOR) OFI_DEFINE_ALL_HANDLERS(WRITE, FUNC, OFI_OP_LAND) OFI_DEFINE_INT_HANDLERS(WRITE, FUNC, OFI_OP_BOR) OFI_DEFINE_INT_HANDLERS(WRITE, FUNC, OFI_OP_BAND) OFI_DEFINE_ALL_HANDLERS(WRITE, FUNC, OFI_OP_LXOR) OFI_DEFINE_INT_HANDLERS(WRITE, FUNC, OFI_OP_BXOR) OFI_DEFINE_ALL_HANDLERS(WRITE, FUNC, OFI_OP_WRITE) /* 5 per line to be easily counted by inspection. */ #define OFI_OP_NOT_SUPPORTED(op) \ NULL, NULL, NULL, NULL, NULL, \ NULL, NULL, NULL, NULL, NULL, \ NULL, NULL, NULL, NULL, NULL, \ NULL void (*ofi_atomic_write_handlers[OFI_WRITE_OP_CNT][OFI_DATATYPE_CNT]) (void *dst, const void *src, size_t cnt) = { { OFI_DEFINE_REALNO_HANDLERS(WRITE, NAME, OFI_OP_MIN) }, { OFI_DEFINE_REALNO_HANDLERS(WRITE, NAME, OFI_OP_MAX) }, { OFI_DEFINE_ALL_HANDLERS(WRITE, NAME, OFI_OP_SUM) }, { OFI_DEFINE_ALL_HANDLERS(WRITE, NAME, OFI_OP_PROD) }, { OFI_DEFINE_ALL_HANDLERS(WRITE, NAME, OFI_OP_LOR) }, { OFI_DEFINE_ALL_HANDLERS(WRITE, NAME, OFI_OP_LAND) }, { OFI_DEFINE_INT_HANDLERS(WRITE, NAME, OFI_OP_BOR) }, { OFI_DEFINE_INT_HANDLERS(WRITE, NAME, OFI_OP_BAND) }, { OFI_DEFINE_ALL_HANDLERS(WRITE, NAME, OFI_OP_LXOR) }, { OFI_DEFINE_INT_HANDLERS(WRITE, NAME, OFI_OP_BXOR) }, /* no-op: FI_ATOMIC_READ */ { OFI_OP_NOT_SUPPORTED(READ) }, { OFI_DEFINE_ALL_HANDLERS(WRITE, NAME, OFI_OP_WRITE) }, }; /*************************** * Read-write dispatch table ***************************/ OFI_DEFINE_REALNO_HANDLERS(READWRITE, FUNC, OFI_OP_MIN) OFI_DEFINE_REALNO_HANDLERS(READWRITE, FUNC, OFI_OP_MAX) OFI_DEFINE_ALL_HANDLERS(READWRITE, FUNC, OFI_OP_SUM) OFI_DEFINE_ALL_HANDLERS(READWRITE, FUNC, OFI_OP_PROD) OFI_DEFINE_ALL_HANDLERS(READWRITE, FUNC, OFI_OP_LOR) OFI_DEFINE_ALL_HANDLERS(READWRITE, FUNC, OFI_OP_LAND) OFI_DEFINE_INT_HANDLERS(READWRITE, FUNC, OFI_OP_BOR) OFI_DEFINE_INT_HANDLERS(READWRITE, FUNC, OFI_OP_BAND) OFI_DEFINE_ALL_HANDLERS(READWRITE, FUNC, OFI_OP_LXOR) OFI_DEFINE_INT_HANDLERS(READWRITE, FUNC, OFI_OP_BXOR) OFI_DEFINE_ALL_HANDLERS(READ, FUNC, OFI_OP_READ) OFI_DEFINE_ALL_HANDLERS(READWRITE, FUNC, OFI_OP_WRITE) void (*ofi_atomic_readwrite_handlers[OFI_READWRITE_OP_CNT][OFI_DATATYPE_CNT]) (void *dst, const void *src, void *res, size_t cnt) = { { OFI_DEFINE_REALNO_HANDLERS(READWRITE, NAME, OFI_OP_MIN) }, { OFI_DEFINE_REALNO_HANDLERS(READWRITE, NAME, OFI_OP_MAX) }, { OFI_DEFINE_ALL_HANDLERS(READWRITE, NAME, OFI_OP_SUM) }, { OFI_DEFINE_ALL_HANDLERS(READWRITE, NAME, OFI_OP_PROD) }, { OFI_DEFINE_ALL_HANDLERS(READWRITE, NAME, OFI_OP_LOR) }, { OFI_DEFINE_ALL_HANDLERS(READWRITE, NAME, OFI_OP_LAND) }, { OFI_DEFINE_INT_HANDLERS(READWRITE, NAME, OFI_OP_BOR) }, { OFI_DEFINE_INT_HANDLERS(READWRITE, NAME, OFI_OP_BAND) }, { OFI_DEFINE_ALL_HANDLERS(READWRITE, NAME, OFI_OP_LXOR) }, { OFI_DEFINE_INT_HANDLERS(READWRITE, NAME, OFI_OP_BXOR) }, { OFI_DEFINE_ALL_HANDLERS(READ, NAME, OFI_OP_READ) }, { OFI_DEFINE_ALL_HANDLERS(READWRITE, NAME, OFI_OP_WRITE) }, }; /***************************** * Compare-swap dispatch table *****************************/ OFI_DEFINE_ALL_HANDLERS(CSWAP, FUNC, OFI_OP_CSWAP_EQ) OFI_DEFINE_ALL_HANDLERS(CSWAP, FUNC, OFI_OP_CSWAP_NE) OFI_DEFINE_REALNO_HANDLERS(CSWAP, FUNC, OFI_OP_CSWAP_LE) OFI_DEFINE_REALNO_HANDLERS(CSWAP, FUNC, OFI_OP_CSWAP_LT) OFI_DEFINE_REALNO_HANDLERS(CSWAP, FUNC, OFI_OP_CSWAP_GE) OFI_DEFINE_REALNO_HANDLERS(CSWAP, FUNC, OFI_OP_CSWAP_GT) OFI_DEFINE_INT_HANDLERS(CSWAP, FUNC, OFI_OP_MSWAP) void (*ofi_atomic_swap_handlers[OFI_SWAP_OP_CNT][OFI_DATATYPE_CNT]) (void *dst, const void *src, const void *cmp, void *res, size_t cnt) = { { OFI_DEFINE_ALL_HANDLERS(CSWAP, NAME, OFI_OP_CSWAP_EQ) }, { OFI_DEFINE_ALL_HANDLERS(CSWAP, NAME, OFI_OP_CSWAP_NE) }, { OFI_DEFINE_REALNO_HANDLERS(CSWAP, NAME, OFI_OP_CSWAP_LE) }, { OFI_DEFINE_REALNO_HANDLERS(CSWAP, NAME, OFI_OP_CSWAP_LT) }, { OFI_DEFINE_REALNO_HANDLERS(CSWAP, NAME, OFI_OP_CSWAP_GE) }, { OFI_DEFINE_REALNO_HANDLERS(CSWAP, NAME, OFI_OP_CSWAP_GT) }, { OFI_DEFINE_INT_HANDLERS(CSWAP, NAME, OFI_OP_MSWAP) }, };
/* * NOTE: * * Transformers are deprecated. For the kind of mappings they were be * used by they turned out to be too complicated. * * They have been superseeded by the simpler PixelMapper, see pixel-mapper.h */ #include "deprecated-transformer.h"
def allreduce_gradients(gradients, current_device, all_devices, comm_index, mean=False, name="GRADIENT_REDUCE"): comm_name = name + "_{}" with ops.device(current_device): comm = create_communicator(name=comm_name.format(comm_index), devices=all_devices) return comm.batch_allreduce(gradients, mean=mean)
/** * Incrementally maintains and updates sum and sum of squares of a <i>weighted</i> data sequence. * * Assume we have already recorded some data sequence elements * and know their sum and sum of squares. * Assume further, we are to record some more elements * and to derive updated values of sum and sum of squares. * <p> * This method computes those updated values without needing to know the already recorded elements. * This is interesting for interactive online monitoring and/or applications that cannot keep the entire huge data sequence in memory. * <p> * <br>Definition of sum: <tt>sum = Sum ( data[i] * weights[i] )</tt>. * <br>Definition of sumOfSquares: <tt>sumOfSquares = Sum ( data[i] * data[i] * weights[i])</tt>. * * * @param data the additional elements to be incorporated into min, max, etc. * @param weights the weight of each element within <tt>data</tt>. * @param from the index of the first element within <tt>data</tt> (and <tt>weights</tt>) to consider. * @param to the index of the last element within <tt>data</tt> (and <tt>weights</tt>) to consider. * The method incorporates elements <tt>data[from], ..., data[to]</tt>. * @param inOut the old values in the following format: * <ul> * <li><tt>inOut[0]</tt> is the old sum. * <li><tt>inOut[1]</tt> is the old sum of squares. * </ul> * If no data sequence elements have so far been recorded set the values as follows * <ul> * <li><tt>inOut[0] = 0.0</tt> as the old sum. * <li><tt>inOut[1] = 0.0</tt> as the old sum of squares. * </ul> * * @return the updated values filled into the <tt>inOut</tt> array. */ public static void incrementalWeightedUpdate(DoubleArrayList data, DoubleArrayList weights, int from, int to, double[] inOut) { int dataSize = data.size(); checkRangeFromTo(from,to,dataSize); if (dataSize != weights.size()) throw new IllegalArgumentException("from="+from+", to="+to+", data.size()="+dataSize+", weights.size()="+weights.size()); double sum = inOut[0]; double sumOfSquares = inOut[1]; double[] elements = data.elements(); double[] w = weights.elements(); for (int i=from-1; ++i<=to; ) { double element = elements[i]; double weight = w[i]; double prod = element*weight; sum += prod; sumOfSquares += element * prod; } inOut[0] = sum; inOut[1] = sumOfSquares; }
N = int(input()) A = list(map(int, input().split())) money = [[1000, 0]] ans = 1000 B = [1 for _ in range(N)] #-1 -> buy 1 ->sell 0 -> nothing for k in range(N-1): if A[k] < A[k+1]: B[k] = -1 elif A[k] == A[k+1]: B[k] = 0 for k in range(N): if B[k] == -1: new = [] for j in range(len(money)): if A[k] <= money[j][0]: new.append([money[j][0]%A[k], money[j][1] + money[j][0]//A[k]]) money += new if B[k] == 1: ans = 0 for j in range(len(money)): ans = max(money[j][0] + money[j][1]*A[k], ans) money[0] = [ans, 0] print(ans)
use rusty_sword_arena::game::{Color, PlayerState}; use rusty_sword_arena::gfx::{Shape, Window}; #[derive(Debug)] pub struct Player { pub state: PlayerState, pub body: Shape, pub sword: Shape, } impl Player { pub fn new(state: PlayerState, window: &Window) -> Self { let body = Shape::new_circle( window, state.radius, state.pos, state.direction, state.color, ); let sword = Shape::new_ring( window, state.weapon.radius, state.pos, state.direction, Color::new(1.0, 0.0, 0.0), ); Self { state, body, sword, } } pub fn update_state(&mut self, state: PlayerState) { self.body.pos = state.pos; self.body.direction = state.direction; self.sword.pos = state.pos; self.sword.direction = state.direction; self.state = state; } }
package slayout import ( "fyne.io/fyne/v2" ) type minLayout struct { minWidth, minHeight float32 } // NewMinWidthLayout creates a new layout with a minimum width. func NewMinWidthLayout(minWidth float32) fyne.Layout { return &minLayout{minWidth: minWidth} } // NewMinHeightLayout creates a new layout with a minimum height. func NewMinHeightLayout(minHeight float32) fyne.Layout { return &minLayout{minHeight: minHeight} } // Layout is called to pack all child objects into a specified size. func (l *minLayout) Layout(objects []fyne.CanvasObject, size fyne.Size) { topLeft := fyne.NewPos(0, 0) for _, child := range objects { child.Move(topLeft) child.Resize(size) } } // MinSize finds the smallest size that satisfies all the child objects. func (l *minLayout) MinSize(objects []fyne.CanvasObject) fyne.Size { min := fyne.NewSize(0, 0) for _, child := range objects { if !child.Visible() { continue } min = min.Max(child.MinSize()) } min = min.Max(fyne.NewSize(l.minWidth, l.minHeight)) return min }
Fermentation is a process that I found intriguing but intimidating, and it took several months before I gathered enough courage to begin trying it for myself. First I read Michael Pollen’s Cooked, next was Sally Fallon’s Nourishing Traditions, and then I dug even deeper in Sandor Katz’s The Art of Fermentation. Still, the idea of leaving food out at room temperature for days, even weeks, seemed frightening to me. Finally one of my best friends got me motivated to give it a try. She has four kids and homeschools but has somehow managed to incorporate fermentation into her regular kitchen routine. After having tried several batches of ferments myself now, I’ve been pleasantly surprised to discover the relative simplicity and ease of the whole process. Since our CSA share has exploded lately with all kinds of fresh produce, fermenting seems to be the perfect solution when there’s a bit more than needed for the week. So far our favorite ferment has been kimchi, usually made with napa cabbage, but this time I used kale and radishes from our CSA share. Turns out kale and radishes make a wonderful kimchi as well. Like a good sriracha sauce, kimchi seems to go with everything and makes great a condiment to accompany a wide variety of dishes. There’s been a lot of discussion going on about what type of jars/containers are best for fermentation and the controversy is beyond the scope of this blog. I have successfully used weck jars and le parfait canning jars, but the type of vessel is not as important as making sure the vegetables are completely submerged under the brine. The photo above shows the kale and radishes prior to being fermented so you can see how much brine was created just from the vegetables after adding salt. To keep the vegetables submerged, I usually place a small glass prep bowl filled with water as a weight directly on top of the vegetables. A plastic bag filled with water and sealed can also be used. There’s so much to be said about fermentation, its history, methods, and benefits, and the above mentioned books are excellent sources of information. Fermentation is really not as hard or as strange as it might seem and while it’s helpful to research and read, there’s a lot to be learned simply by just doing and practicing.
0 CMS leaders vote to allow miniature horses inside the classroom CHARLOTTE, N.C. - Charlotte-Mecklenburg Schools officials voted to allow miniature horses to be used as service animals for students in the classroom. The policy brought the district in line with the American Disabilities Act. Board members said the miniature horses would be just like service dogs. They would guide their student everywhere, from the cafeteria to hallways. “I think that can be a bit distracting," parent Bambi Ahad said. Even school board members think the distractions caused by a miniature horse would be anything but miniature. "You try to visualize in your mind, a miniature horse coming into your school building and your classroom, and the uproar that especially little kids would have," chair Mary McCray said. Judy Kidd, with the Classroom Teachers Association, said it's absurd and had no further comments. The school board updated their policy to match the Americans with Disabilities Act, which lists miniature horses as service animals. "I think it's a great idea, and I think children are flexible and would eventually adapt,” teacher Linda Havrun said. “However, I don't think the classrooms, we don't have the room for it." Miniature horses are being used as service animals because dogs are considered unclean in some religious cultures. "They're trained to do what they're doing and typically, you know, you would not want people petting them and talking to them," school board member Paul Bailey said. To minimize distractions, students and teachers would have to go through specialized training before interacting with the horses. The school board said the jury is still out on how they would handle other issues like when the horses would eat and go outside. There's no word yet on whether any students have applied to use a miniature horse as a service animal this school year. Read more top trending stories on wsoctv.com: © 2019 Cox Media Group.
/** * Copyright (c) 2013, <NAME> and <NAME> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * * The names <NAME> and <NAME> may not be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL MICHAEL BOSTOCK BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.github.gwtd3.api; import com.github.gwtd3.api.arrays.Array; import com.github.gwtd3.api.core.Color; import com.github.gwtd3.api.core.HSLColor; import com.github.gwtd3.api.core.RGBColor; import com.github.gwtd3.api.core.Transform; import com.github.gwtd3.api.core.Value; import com.github.gwtd3.api.interpolators.Interpolator; import com.github.gwtd3.api.interpolators.InterpolatorFactory; import com.github.gwtd3.api.interpolators.JSNIInterpolatorFactory; import com.github.gwtd3.api.interpolators.JavascriptFunctionInterpolator; import com.github.gwtd3.api.interpolators.JavascriptFunctionInterpolatorDecorator; import com.google.gwt.core.client.JavaScriptObject; import com.google.gwt.core.client.UnsafeNativeLong; /** * Provide access to the d3.interpolat* methods. * <p> * * @author <a href="mailto:<EMAIL>"><NAME></a> * */ public class Interpolators { /** * Returns a string interpolator between the two strings a and b. * <p> * The string interpolator finds numbers embedded in a and b, where each * number is of the form: * * <pre> * {@code * /[-+]?(?:\d+\.?\d*|\.?\d+)(?:[eE][-+]?\d+)?/g * } * </pre> * <p> * For each number embedded in b, the interpolator will attempt to find a * corresponding number in a. If a corresponding number is found, a numeric * interpolator is created using interpolateNumber. The remaining parts of * the string b are used as a template: the static parts of the string b * remain constant for the interpolation, with the interpolated numeric * values embedded in the template. * <p> * For example, if a is "300 12px sans-serif", and b is * "500 36px Comic-Sans", two embedded numbers are found. The remaining * static parts of the string are a space between the two numbers (" "), and * the suffix ("px Comic-Sans"). * * The result of the interpolator at t = .5 is "400 24px Comic-Sans". * * @param a * the start * @param b * the end * @return the interpolator */ public static final Interpolator<String> interpolateString(final String a, final String b) { return new JavascriptFunctionInterpolatorDecorator<String>( interpolate0(a, b)); } /** * Returns an RGB color space interpolator between the two colors a and b. * <p> * The colors a and b need not be in RGB, but they will be converted to RGB * using {@link D3#rgb(String)}. * <p> * The red, green and blue channels are interpolated linearly in a manner * equivalent to interpolateRound, as fractional channel values are not * allowed. * <p> * Also note that the interpolator modifies the returned color instance, and * thus a copy must be made if you wish to avoid modification. * * @param a * the start color * @param b * the end color * @return the interpolator */ public static final Interpolator<RGBColor> interpolateRgb(final String a, final String b) { return new JavascriptFunctionInterpolatorDecorator<RGBColor>( interpolateRgb0(Colors.rgb(a), Colors.rgb(b))) { @Override public RGBColor cast(final Value v) { return Colors.rgb(v.asString()); } }; } /** * Returns an RGB color space interpolator between the two colors a and b. * <p> * The colors a and b need not be in RGB, but they will be converted to RGB * using {@link D3#rgb(String)}. * <p> * The red, green and blue channels are interpolated linearly in a manner * equivalent to interpolateRound, as fractional channel values are not * allowed. * <p> * Also note that the interpolator modifies the returned color instance, and * thus a copy must be made if you wish to avoid modification. * * @param a * the start color * @param b * the end color * @return the interpolator */ public static final Interpolator<RGBColor> interpolateRgb(final Color a, final Color b) { return new JavascriptFunctionInterpolatorDecorator<RGBColor>( interpolateRgb0(a, b)) { @Override public RGBColor cast(final Value v) { return Colors.rgb(v.asString()); } }; } /** * Returns an HSL color space interpolator between the two colors a and b. * <p> * The colors a and b need not be in HSL, but they will be converted to HSL * using {@link D3#hsl(String)}. * <p> * The hue, saturation and lightness are interpolated linearly in a manner * equivalent to interpolateNumber. (The shortest path between the start and * end hue is used.) * <p> * Also note that the interpolator modifies the returned color instance, and * thus a copy must be made if you wish to avoid modification. * * @param a * the start color * @param b * the end color * @return the interpolator */ public static final Interpolator<HSLColor> interpolateHsl(final String a, final String b) { return new JavascriptFunctionInterpolatorDecorator<HSLColor>( interpolateHsl0(Colors.hsl(a), Colors.hsl(b))) { @Override public HSLColor cast(final Value v) { return Colors.hsl(v.asString()); } }; } /** * Returns an RGB color space interpolator between the two colors a and b. * <p> * The colors a and b need not be in RGB, but they will be converted to RGB * using {@link D3#rgb(String)}. * <p> * The red, green and blue channels are interpolated linearly in a manner * equivalent to interpolateRound, as fractional channel values are not * allowed. * <p> * Also note that the interpolator modifies the returned color instance, and * thus a copy must be made if you wish to avoid modification. * * @param a * the start color * @param b * the end color * @return the interpolator */ public static final Interpolator<HSLColor> interpolateHsl(final Color a, final Color b) { return new JavascriptFunctionInterpolatorDecorator<HSLColor>( interpolateHsl0(a, b)) { @Override public HSLColor cast(final Value v) { return Colors.hsl(v.asString()); } }; } /** * Returns a numeric interpolator between the two numbers a and b. The * returned interpolator is equivalent to: * * <pre> * {@code * function interpolate(t) { * return a * (1 - t) + b * t; * } * } * </pre> * <p> * Caution: avoid interpolating to or from the number zero when the * interpolator is used to generate a string (such as with attr). Very small * values, when stringified, may be converted to scientific notation and * cause a temporarily invalid attribute or style property value. For * example, the number 0.0000001 is converted to the string "1e-7". This is * particularly noticeable when interpolating opacity values. To avoid * scientific notation, start or end the transition at 1e-6, which is the * smallest value that is not stringified in exponential notation. * * @param a * the start * @param b * the end * @return the interpolator */ public static final Interpolator<Double> interpolateNumber(final double a, final double b) { return new JavascriptFunctionInterpolatorDecorator<Double>( interpolateNumber0(a, b)) { @Override public Double cast(final Value v) { return new Double(v.asDouble()); } }; } /** * Returns an array interpolator between the two arrays a and b. * <p> * Internally, an array template is created that is the same length in b. * * For each element in b, if there exists a corresponding element in a, a * generic interpolator is created for the two elements using interpolate. * * If there is no such element, the static value from b is used in the * template. * * Then, for the given parameter t, the template's embedded interpolators * are evaluated. The updated array template is then returned. * <p> * For example, if a is the array [0, 1] and b is the array [1, 10, 100], * * then the result of the interpolator for t = .5 is the array [.5, 5.5, * 100]. * * <p> * Note: no defensive copy of the template array is created; modifications * of the returned array may adversely affect subsequent evaluation of the * interpolator. No copy is made because interpolators should be fast, as * they are part of the inner loop of animation. * * @param a * the array a * @param b * the array b * @return the interpolator */ public static final Interpolator<Array<?>> interpolateArray( final Array<?> a, final Array<?> b) { return new JavascriptFunctionInterpolatorDecorator<Array<?>>( interpolateArray0(a, b)) { @Override public Array<?> cast(final Value v) { return v.as(); } }; } /** * Returns an object interpolator between the two objects a and b. * <p> * Internally, an object template is created that has the same properties as * b. * <p> * For each property in b, if there exists a corresponding property in a, a * generic interpolator is created for the two elements using interpolate. * <p> * If there is no such property, the static value from b is used in the * template. * <p> * Then, for the given parameter t, the template's embedded interpolators * are evaluated and the updated object template is then returned. * <p> * For example, if a is the object {x: 0, y: 1} and b is the object {x: 1, * y: 10, z: 100}, the result of the interpolator for t = .5 is the object * {x: .5, y: 5.5, z: 100}. * <p> * Object interpolation is particularly useful for dataspace interpolation, * where data is interpolated rather than attribute values. For example, you * can interpolate an object which describes an arc in a pie chart, and then * use d3.svg.arc to compute the new SVG path data. * <p> * Note: no defensive copy of the template object is created; modifications * of the returned object may adversely affect subsequent evaluation of the * interpolator. No copy is made because interpolators should be fast, as * they are part of the inner loop of animation. * * @param a * the object a * @param b * the object b * @return the interpolator */ public static final <T extends JavaScriptObject> Interpolator<T> interpolateObject( final T a, final T b) { return new JavascriptFunctionInterpolatorDecorator<T>( interpolateObject0(a, b)) { @Override public T cast(final Value v) { return v.<T> as(); } }; } /** * Returns an interpolator between the two 2D affine transforms represented * by a and b. Each transform is decomposed to a standard representation of * translate, rotate, x-skew and scale; these component transformations are * then interpolated. This behavior is standardized by CSS: see matrix * decomposition for animation. * * @param a * the object a * @param b * the object b * @return the interpolator */ public static final Interpolator<Transform> interpolateTransform( final Transform a, final Transform b) { return new JavascriptFunctionInterpolatorDecorator<Transform>( interpolateTransform0(a, b)) { @Override public Transform cast(final Value v) { return Transform.parse(v.asString()); } }; } /** * See {@link #interpolateNumber(double, double)}. * * @param a * the start * @param b * the end * @return the interpolator */ public static final Interpolator<Double> interpolateNumber(final int a, final int b) { return interpolateNumber((double) a, (double) b); } /** * See {@link #interpolate(double, double)}. * * @param a * the start * @param b * the end * @return the interpolator */ public static final Interpolator<Double> interpolateNumber(final byte a, final byte b) { return interpolateNumber((double) a, (double) b); } /** * See {@link #interpolate(double, double)}. * * @param a * the start * @param b * the end * @return the interpolator */ public static final Interpolator<Double> interpolateNumber(final float a, final float b) { return interpolateNumber((double) a, (double) b); } /** * See {@link #interpolate(double, double)}. * * @param a * the start * @param b * the end * @return the interpolator */ public static final Interpolator<Double> interpolateNumber(final long a, final long b) { return interpolateNumber((double) a, (double) b); } /** * See {@link #interpolate(double, double)}. * * @param a * the start * @param b * the end * @return the interpolator */ public static final Interpolator<Double> interpolateNumber(final short a, final short b) { return interpolateNumber((double) a, (double) b); } /** * Returns a numeric interpolator between the two numbers a and b; the * interpolator is similar to {@link #interpolate(double, double)}, except * it will round the resulting value to the nearest integer. * <p> * * @param a * the start * @param b * the end * @return the interpolator */ public static final Interpolator<Long> interpolateRound(final double a, final double b) { return new JavascriptFunctionInterpolatorDecorator<Long>( interpolateRound0(a, b)) { @Override public Long cast(final Value v) { return new Long((long) v.asDouble()); } }; } /** * See {@link #interpolateRound(double, double)}. * * @param a * the start * @param b * the end * @return the interpolator */ public static final Interpolator<Byte> interpolateRound(final byte a, final byte b) { return new JavascriptFunctionInterpolatorDecorator<Byte>( interpolateRound0(a, b)) { @Override public Byte cast(final Value v) { return new Byte(v.asByte()); } }; } /** * See {@link #interpolateRound(double, double)}. * * @param a * the start * @param b * the end * @return the interpolator */ public static final Interpolator<Character> interpolateRound(final char a, final char b) { return new JavascriptFunctionInterpolatorDecorator<Character>( interpolateRound0(a, b)) { @Override public Character cast(final Value v) { return new Character(v.asChar()); } }; } /** * See {@link #interpolateRound(double, double)}. * * @param a * the start * @param b * the end * @return the interpolator */ public static final Interpolator<Integer> interpolateRound(final int a, final int b) { return new JavascriptFunctionInterpolatorDecorator<Integer>( interpolateRound0(a, b)) { @Override public Integer cast(final Value v) { return new Integer(v.asInt()); } }; } /** * See {@link #interpolateRound(double, double)}. * * @param a * the start * @param b * the end * @return the interpolator */ public static final Interpolator<Long> interpolateRound(final long a, final long b) { return new JavascriptFunctionInterpolatorDecorator<Long>( interpolateRound0(a, b)) { @Override public Long cast(final Value v) { // this will not work !!! // see // https://developers.google.com/web-toolkit/doc/latest/DevGuideCodingBasicsJSNI#important // v.asLong() return new Long((long) v.asDouble()); } }; } /** * See {@link #interpolateRound(double, double)}. * * @param a * the start * @param b * the end * @return the interpolator */ public static final Interpolator<Short> interpolateRound(final short a, final short b) { return new JavascriptFunctionInterpolatorDecorator<Short>( interpolateRound0(a, b)) { @Override public Short cast(final Value v) { return new Short(v.asShort()); } }; } /** * Returns a smooth interpolator between the two views a and b of a * two-dimensional plane, based on “Smooth and efficient zooming and * panning” by <NAME> and <NAME>. * <p> * Each view is defined as an array of three numbers: cx, cy and width. The * first two coordinates cx, cy represent the center of the viewport; the * last coordinate width represents the size of the viewport. * <p> * The returned interpolator also has a duration property which encodes the * recommended transition duration in milliseconds. This duration is based * on the path length of the curved trajectory through x,y space. If you * want to a slower or faster transition, multiply this by an arbitrary * scale factor (V as described in the original paper). * <p> * * * @param a * the start * @param b * the end * @return the interpolator */ public static final Interpolator<Array<Double>> interpolateZoom( final Array<Double> a, final Array<Double> b) { return new JavascriptFunctionInterpolatorDecorator<Array<Double>>( interpolateZoom0(a, b)) { @Override public Array<Double> cast(final Value v) { return v.as(); } }; } // public static final <T> Interpolator<T> interpolate(final T a, final T b) // { // return new JavascriptFunctionInterpolatorDecorator<T>(interpolate0(a, b)) // { // @Override // public T interpolate(final double t) { // return delegate.interpolate(t).as(); // } // }; // } /** * Actual JSNI implementation; the result is auto-casted to a * {@link JavascriptFunctionInterpolator} and can be used by more specific * versions of the * * @param a * @param b * @return */ @UnsafeNativeLong private static final native <T> JavascriptFunctionInterpolator interpolate0( T a, T b) /*-{ var result = $wnd.d3.interpolate(a, b); return result; }-*/; @UnsafeNativeLong private static final native JavascriptFunctionInterpolator interpolateNumber0( double a, double b) /*-{ return $wnd.d3.interpolateNumber(a, b); }-*/; @UnsafeNativeLong private static final native JavascriptFunctionInterpolator interpolateRound0( double a, double b) /*-{ return $wnd.d3.interpolateRound(a, b); }-*/; @UnsafeNativeLong private static final native JavascriptFunctionInterpolator interpolateZoom0( Array<Double> a, Array<Double> b) /*-{ return $wnd.d3.interpolateZoom(a, b); }-*/; @UnsafeNativeLong private static final native JavascriptFunctionInterpolator interpolateRgb0( Color a, Color b) /*-{ return $wnd.d3.interpolateRgb(a, b); }-*/; private static final native JavascriptFunctionInterpolator interpolateHsl0( Color a, Color b) /*-{ return $wnd.d3.interpolateHsl(a, b); }-*/; // private static final native JavascriptFunctionInterpolator // interpolateHcl0(String a, String b) /*-{ // return $wnd.d3.interpolateHcl(a, b); // }-*/; // // private static final native JavascriptFunctionInterpolator // interpolateLab0(String a, String b) /*-{ // return $wnd.d3.interpolateLab(a, b); // }-*/; private static final native JavascriptFunctionInterpolator interpolateObject0( JavaScriptObject a, JavaScriptObject b) /*-{ return $wnd.d3.interpolateObject(a, b); }-*/; private static final native JavascriptFunctionInterpolator interpolateArray0( Array<?> a, Array<?> b) /*-{ return $wnd.d3.interpolateArray(a, b); }-*/; private static final native JavascriptFunctionInterpolator interpolateTransform0( Transform a, Transform b) /*-{ return $wnd.d3.interpolateTransform(a, b); }-*/; // FIXME access to interpolators does not work as expected // see issue #42 // private static final native JavascriptFunctionInterpolator // interpolateTransform0(String a, String b) /*-{ // return $wnd.d3.interpolateTransform(a, b); // }-*/; /** * The interpolator factory used by * {@link #interpolateNumber(double, double)} */ // public static final InterpolatorFactory<Double> interpolateNumber = // interpolateNumberFactory(); /** * The interpolator factory used by * {@link #interpolateRound(double, double)} */ // public static final InterpolatorFactory<Long> interpolateRound = // interpolateRoundFactory(); /** * The interpolator factory used by * {@link #interpolateString(String, String)} */ // public static final InterpolatorFactory<String> interpolateString = // interpolateStringFactory(); /** * The interpolator factory used by {@link #interpolateRgb(Color, Color)} */ // FIXME: providing access to the interpolator factory does not work as is // since it // public static final InterpolatorFactory<RGBColor> interpolateRgb = // interpolateRgbFactory(); // public static final InterpolatorFactory<HCLColor> interpolateHcl = // interpolateHclFactory(); /** * The interpolator factory used by {@link #interpolateHsl(Color, Color)} */ // public static final InterpolatorFactory<HSLColor> interpolateHsl = // interpolateHslFactory(); // public static final InterpolatorFactory<LabColor> interpolateLab = // interpolateLabFactory(); /** * The interpolator factory used by {@link #interpolateArray(Array, Array)} */ // public static final InterpolatorFactory<Array<?>> interpolateArray = // interpolateArrayFactory(); /** * The interpolator factory used by * {@link #interpolateObject(JavaScriptObject, JavaScriptObject)} */ // public static final InterpolatorFactory<JavaScriptObject> // interpolateObject = interpolateObjectFactory(); // public static final InterpolatorFactory<String> interpolateTransform = // interpolateTransformFactory(); private static final native JSNIInterpolatorFactory<Double> interpolateNumberFactory()/*-{ return $wnd.d3.interpolateNumber; }-*/; // // private static final native JSNIInterpolatorFactory<Long> // interpolateRoundFactory()/*-{ // return $wnd.d3.interpolateRound; // }-*/; // // private static final native JSNIInterpolatorFactory<String> // interpolateStringFactory()/*-{ // return $wnd.d3.interpolateString; // }-*/; // // private static final native JSNIInterpolatorFactory<RGBColor> // interpolateRgbFactory()/*-{ // return $wnd.d3.interpolateRgb; // }-*/; // // private static final native JSNIInterpolatorFactory<HSLColor> // interpolateHslFactory()/*-{ // return $wnd.d3.interpolateHsl; // }-*/; // private static final native JSNIInterpolatorFactory<HCLColor> // interpolateHclFactory()/*-{ // return $wnd.d3.interpolateHcl; // }-*/; // // private static final native JSNIInterpolatorFactory<LabColor> // interpolatLabFactory()/*-{ // return $wnd.d3.interpolateLab; // }-*/; // private static final native JSNIInterpolatorFactory<Array<?>> // interpolateArrayFactory()/*-{ // return $wnd.d3.interpolateArray; // }-*/; // // private static final native JSNIInterpolatorFactory<JavaScriptObject> // interpolateObjectFactory()/*-{ // return $wnd.d3.interpolateObject; // }-*/; // private static final native JSNIInterpolatorFactory<Transform> // interpolateTransformFactory()/*-{ // return $wnd.d3.interpolateTransform; // }-*/; /** * The array of built-in interpolator factories, as used by #interpolate(). * <p> * Additional interpolator factories may be pushed onto the end of this * array. * <p> * Each factory may return an interpolator, if it supports interpolating the * two specified input values; otherwise, the factory should return a falsey * value and other interpolators will be tried. * * @return the array of interpolator factories */ public static final native Array<InterpolatorFactory<?>> interpolators()/*-{ return $wnd.d3.interpolators; }-*/; }
[Update] Here are the 10,000+ Songs that Google Pixel 2’s Now Playing Feature can Recognize UPDATE 10/19/2017: Google reached out to us to inform us that the database is updated weekly, is regional, and can recognize tens of thousands of songs. Please read this follow-up article for more details. Google Pixel 2’s newest features is called Now Playing, and what it does is automatically detects songs playing in the background and displays information about it on the lock screen. Google says the ambient-music recognition feature can work offline and does not need to offload any data to their servers to aid in song recognition. Furthermore, the company states that their database can match over 10,000 tunes and that this database can be updated with support for recognizing more songs in the future. But exactly what songs has Google chosen for its initial Now Playing recognition database? After some digging, we can now share the full, 10,000+ list of songs that the Google Pixel 2‘s Now Playing feature can recognize. We achieved this by pulling the 53MB matcher.leveldb file located in /system/etc/ambient . LevelDB is a key-value storage library that we presumed contained the list of songs for the Now Playing feature. We sent this file to Kieron Quinn, known on our forums as XDA Recognized Contributor Quinny899, who confirmed that this file was indeed the database needed by the Pixel Ambient Services app (which has the Now Playing feature). When trying to run this app, the app would crash stating that it “could not locate music recognizer core shard.” With the help of APKTool, Quinny899 was able to find the code where this error message was thrown. Lo and behold, the file that Pixel Ambient Services was looking for is the matcher.leveldb file. After confirming this, Quinny899 then ran a script to dump the contents of database, then another script of his that parsed the result to fix the formatting. The result is the “Google Pixel Ambient Song List,” a table of 17,300 songs containing the song name and artist of every tune that Now Playing can recognize. Why 17,300? No reason in particular. Quinny899 isn’t sure if this is all of the songs as it’s possible that the script did not dump all of them. Some songs appear more than once, too, but we doubt there are thousands of duplicates in there. Keep in mind that while this Now Playing song list is most likely comprehensive for now, it may not be in the future. This is because, as mentioned previously, Google will be updating their database. It’s unclear whether updating the database will require an OTA update or if the Pixel Ambient Services app can update the database on its own, though. Update on “AmbientSense” We earlier believed this feature to be related to previous research on a technology called “AmbientSense” given the matching name and topic, but Google reached out to us to state that their Now Playing feature is not based on AmbientSense. Presumably, this means that the app’s package name matching that AmbientSense paper is not relevant. We’ve reached out to Google for further information on the Now Playing feature and will update our articles when we hear back.
package net.minecraft.block; import java.util.Random; import net.minecraft.block.Block; import net.minecraft.block.BlockContainer; import net.minecraft.block.material.Material; import net.minecraft.block.properties.IProperty; import net.minecraft.block.properties.PropertyDirection; import net.minecraft.block.properties.PropertyInteger; import net.minecraft.block.state.BlockState; import net.minecraft.block.state.IBlockState; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.init.Items; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.tileentity.TileEntity; import net.minecraft.tileentity.TileEntityBanner; import net.minecraft.util.AxisAlignedBB; import net.minecraft.util.BlockPos; import net.minecraft.util.EnumFacing; import net.minecraft.world.IBlockAccess; import net.minecraft.world.World; public class BlockBanner extends BlockContainer { public static final PropertyDirection field_176449_a = PropertyDirection.func_177712_a("facing", EnumFacing.Plane.HORIZONTAL); public static final PropertyInteger field_176448_b = PropertyInteger.func_177719_a("rotation", 0, 15); private static final String __OBFID = "CL_00002143"; protected BlockBanner() { super(Material.field_151575_d); float var1 = 0.25F; float var2 = 1.0F; this.func_149676_a(0.5F - var1, 0.0F, 0.5F - var1, 0.5F + var1, var2, 0.5F + var1); } public AxisAlignedBB func_180640_a(World p_180640_1_, BlockPos p_180640_2_, IBlockState p_180640_3_) { return null; } public AxisAlignedBB func_180646_a(World p_180646_1_, BlockPos p_180646_2_) { this.func_180654_a(p_180646_1_, p_180646_2_); return super.func_180646_a(p_180646_1_, p_180646_2_); } public boolean func_149686_d() { return false; } public boolean func_176205_b(IBlockAccess p_176205_1_, BlockPos p_176205_2_) { return true; } public boolean func_149662_c() { return false; } public TileEntity func_149915_a(World p_149915_1_, int p_149915_2_) { return new TileEntityBanner(); } public Item func_180660_a(IBlockState p_180660_1_, Random p_180660_2_, int p_180660_3_) { return Items.field_179564_cE; } public Item func_180665_b(World p_180665_1_, BlockPos p_180665_2_) { return Items.field_179564_cE; } public void func_180653_a(World p_180653_1_, BlockPos p_180653_2_, IBlockState p_180653_3_, float p_180653_4_, int p_180653_5_) { TileEntity var6 = p_180653_1_.func_175625_s(p_180653_2_); if(var6 instanceof TileEntityBanner) { ItemStack var7 = new ItemStack(Items.field_179564_cE, 1, ((TileEntityBanner)var6).func_175115_b()); NBTTagCompound var8 = new NBTTagCompound(); var6.func_145841_b(var8); var8.func_82580_o("x"); var8.func_82580_o("y"); var8.func_82580_o("z"); var8.func_82580_o("id"); var7.func_77983_a("BlockEntityTag", var8); func_180635_a(p_180653_1_, p_180653_2_, var7); } else { super.func_180653_a(p_180653_1_, p_180653_2_, p_180653_3_, p_180653_4_, p_180653_5_); } } public void func_180657_a(World p_180657_1_, EntityPlayer p_180657_2_, BlockPos p_180657_3_, IBlockState p_180657_4_, TileEntity p_180657_5_) { if(p_180657_5_ instanceof TileEntityBanner) { ItemStack var6 = new ItemStack(Items.field_179564_cE, 1, ((TileEntityBanner)p_180657_5_).func_175115_b()); NBTTagCompound var7 = new NBTTagCompound(); p_180657_5_.func_145841_b(var7); var7.func_82580_o("x"); var7.func_82580_o("y"); var7.func_82580_o("z"); var7.func_82580_o("id"); var6.func_77983_a("BlockEntityTag", var7); func_180635_a(p_180657_1_, p_180657_3_, var6); } else { super.func_180657_a(p_180657_1_, p_180657_2_, p_180657_3_, p_180657_4_, (TileEntity)null); } } public static class BlockBannerHanging extends BlockBanner { private static final String __OBFID = "CL_00002140"; public BlockBannerHanging() { this.func_180632_j(this.field_176227_L.func_177621_b().func_177226_a(field_176449_a, EnumFacing.NORTH)); } public void func_180654_a(IBlockAccess p_180654_1_, BlockPos p_180654_2_) { EnumFacing var3 = (EnumFacing)p_180654_1_.func_180495_p(p_180654_2_).func_177229_b(field_176449_a); float var4 = 0.0F; float var5 = 0.78125F; float var6 = 0.0F; float var7 = 1.0F; float var8 = 0.125F; this.func_149676_a(0.0F, 0.0F, 0.0F, 1.0F, 1.0F, 1.0F); switch(BlockBanner.SwitchEnumFacing.field_180370_a[var3.ordinal()]) { case 1: default: this.func_149676_a(var6, var4, 1.0F - var8, var7, var5, 1.0F); break; case 2: this.func_149676_a(var6, var4, 0.0F, var7, var5, var8); break; case 3: this.func_149676_a(1.0F - var8, var4, var6, 1.0F, var5, var7); break; case 4: this.func_149676_a(0.0F, var4, var6, var8, var5, var7); } } public void func_176204_a(World p_176204_1_, BlockPos p_176204_2_, IBlockState p_176204_3_, Block p_176204_4_) { EnumFacing var5 = (EnumFacing)p_176204_3_.func_177229_b(field_176449_a); if(!p_176204_1_.func_180495_p(p_176204_2_.func_177972_a(var5.func_176734_d())).func_177230_c().func_149688_o().func_76220_a()) { this.func_176226_b(p_176204_1_, p_176204_2_, p_176204_3_, 0); p_176204_1_.func_175698_g(p_176204_2_); } super.func_176204_a(p_176204_1_, p_176204_2_, p_176204_3_, p_176204_4_); } public IBlockState func_176203_a(int p_176203_1_) { EnumFacing var2 = EnumFacing.func_82600_a(p_176203_1_); if(var2.func_176740_k() == EnumFacing.Axis.Y) { var2 = EnumFacing.NORTH; } return this.func_176223_P().func_177226_a(field_176449_a, var2); } public int func_176201_c(IBlockState p_176201_1_) { return ((EnumFacing)p_176201_1_.func_177229_b(field_176449_a)).func_176745_a(); } protected BlockState func_180661_e() { return new BlockState(this, new IProperty[]{field_176449_a}); } } public static class BlockBannerStanding extends BlockBanner { private static final String __OBFID = "CL_00002141"; public BlockBannerStanding() { this.func_180632_j(this.field_176227_L.func_177621_b().func_177226_a(field_176448_b, Integer.valueOf(0))); } public void func_176204_a(World p_176204_1_, BlockPos p_176204_2_, IBlockState p_176204_3_, Block p_176204_4_) { if(!p_176204_1_.func_180495_p(p_176204_2_.func_177977_b()).func_177230_c().func_149688_o().func_76220_a()) { this.func_176226_b(p_176204_1_, p_176204_2_, p_176204_3_, 0); p_176204_1_.func_175698_g(p_176204_2_); } super.func_176204_a(p_176204_1_, p_176204_2_, p_176204_3_, p_176204_4_); } public IBlockState func_176203_a(int p_176203_1_) { return this.func_176223_P().func_177226_a(field_176448_b, Integer.valueOf(p_176203_1_)); } public int func_176201_c(IBlockState p_176201_1_) { return ((Integer)p_176201_1_.func_177229_b(field_176448_b)).intValue(); } protected BlockState func_180661_e() { return new BlockState(this, new IProperty[]{field_176448_b}); } } // $FF: synthetic class static final class SwitchEnumFacing { // $FF: synthetic field static final int[] field_180370_a = new int[EnumFacing.values().length]; private static final String __OBFID = "CL_00002142"; static { try { field_180370_a[EnumFacing.NORTH.ordinal()] = 1; } catch (NoSuchFieldError var4) { ; } try { field_180370_a[EnumFacing.SOUTH.ordinal()] = 2; } catch (NoSuchFieldError var3) { ; } try { field_180370_a[EnumFacing.WEST.ordinal()] = 3; } catch (NoSuchFieldError var2) { ; } try { field_180370_a[EnumFacing.EAST.ordinal()] = 4; } catch (NoSuchFieldError var1) { ; } } } }
Few 18th-century authors have captured contemporary imaginations so thoroughly as the novelist Jane Austen, who died 200 years ago this year. Among British authors, probably only Shakespeare gets as much attention in film, theater and popular culture. And the Austen craze shows no signs of waning — especially not in an anniversary year, when fans are expected to descend en masse on Hampshire, the southern English county where the novelist lived and wrote. Special Austen-related exhibitions, performances, lectures and guided walks will be on offer throughout 2017 in both the historic city of Winchester, where Austen died, and the nearby towns where she composed “Emma,” “Persuasion” and other beloved works. If there’s a Jewish angle here, it eludes me. But for Austen fans of any (ahem) persuasion, the anniversary activity makes a case for exploring both the rich Jewish heritage and the unspoiled charm of England’s provincial south. Winchester is an easy side trip from London; it’s less than two hours by car or train, tucked inland between the larger coastal cities of Southampton and Brighton. Today Winchester is a picturesque backwater of 40,000 … but a millennium ago, it was the capital of England and a major hub of Jewish life. Get Jewish Week's Newsletter by email and never miss our top stories Free Sign Up Why Winchester? According to laws of the time, Jewish activities had to be registered in certain cities; Winchester was one. Much of that activity was money-lending, so Jews became prominent in finance (their earliest recorded presence was one Jewess’ payment of a 15-pound fine in order to avoid marriage). In addition to hosting a royal castle for King Henry III, who took Jews under his protection, Winchester was on a major trade route and hosted fairs that drew merchants from around England. By the mid-1200s, Winchester had a Jewish mayor — wool merchant Simon Le Draper — and three financial dynasties headed by Jewish women, including the notorious Licoricia, who controlled vast sums but was eventually murdered during a robbery. Modern scholars think Licoricia is probably buried along with generations of medieval Jews in the historic Winchester cemetery. Jewry Street, which runs through the center of Winchester, was the site of a medieval synagogue and Jewish merchants’ homes. But no real Jewish presence remains; the community faced persecution before England expelled its Jews in 1290, and the nearest Jewish congregations today are in Southampton. Henry’s castle fared a little better. You can tour the remaining Great Hall, a gloomy stone edifice whose arches and stained glass windows give it a churchy feel. The town itself is endlessly picturesque, with its old wooden mill, burbling river and cobblestoned lanes. Jane Austen is buried at Winchester Cathedral; a permanent exhibition on her life comes with a tour (followed, of course, by tea). Austen arrived in Winchester a few months before dying at (gulp) 41, but despite her brief tenure, the city will host three exhibitions beginning in late spring. Two explore Austen’s medical travails: “Jane and her Alton Apothecary” is at the Allen Gallery, a charming, vintage space with displays of English porcelain, while “Jane’s Winchester: Malady and Medicine” traces the author’s last months as a patient and the role of medicine in her life and books. The latter will be on view in the City Space at Winchester Discovery Center, alongside “The Mysterious Miss Austen,” which features portraits of Jane alongside manuscripts, silk coats and purses, and other memorabilia in partnership with the Jane Austen House Museum. True Austen fans will want to make the half-hour pilgrimage through rolling English countryside to the village of Chawton, where the House Museum occupies a graceful brick manse. Austen lived here with her mother and sister for much of her adult life, composing or revising her best-known works at a small round table with views of her verdant gardens and the village green. That writing table is still on view, along with her pianoforte, portraits and turquoise jewelry. For the anniversary year, the House Museum has unveiled the exhibition “Jane Austen in 41 Objects” (through December). Other Austen-related events take place at the ports of Gosport and Southampton. But first-time area visitors should consider a visit to the nearby village of Selborne, where the Gilbert White and Oates estate offers a glimpse into England’s age of exploration. Set on 25 acres of gardens, the Rev. White’s manse greets visitors with a tea parlor and exhibits dedicated to the two Oates explorers — Frank, who sailed to Africa and the Americas in the 1800s, and Captain Lawrence, who journeyed to the South Pole in 1911 and memorably headed to his death in an Antarctic blizzard with the quintessentially British last words, “I am just going outside and may be sometime.”
<gh_stars>1-10 import matplotlib matplotlib.use('Agg') from matplotlib import pyplot as plt import seaborn as sns import pandas as pd from scipy import stats import matplotlib matplotlib.use('Agg') biome = pd.read_table('cold/biome.txt', squeeze=True, index_col=0) computed = pd.read_table('cold/sample.computed.tsv', index_col=0) total = computed.insertsHQ print("Loaded basic") meta = pd.read_table('/g/bork1/coelho/DD_DeCaF/genecats.cold/selected-cleaned-metadata-100.tsv', index_col=0) marine_index = biome.index[biome == 'marine'] meta_marine = meta.loc[marine_index] csamples = pd.read_table('tables/cogs.counts.txt', index_col=0) scotu = csamples.iloc[:,1:].mean(1) gene = csamples['0'] gene_pc = gene/scotu groups = [] fig,ax = plt.subplots() for f in set(meta_marine.env_feature) : sel = meta_marine.index[meta_marine.env_feature == f] if len(sel) < 20: continue values = gene_pc[sel].values values.sort() sns.distplot(values[1:-1], label=f, hist=False, ax=ax) groups.append(values) ax.legend(loc='best') ax.set_xlabel('Number of conspecific genes') ax.set_ylabel('Density') fig.tight_layout() fig.savefig('plots/gpscotu-marine-sub.svg') print(stats.kruskal(*groups)) meta_soil = meta.loc[meta.study_accession == 'ERP009498'] groups = [] fig,ax = plt.subplots() for f in set(meta_soil['soil_taxonomic/local classification']): sel = meta_soil['soil_taxonomic/local classification'] == f sel = sel.index[sel] if len(sel) < 20: continue values = gene_pc[sel].values values.sort() sns.distplot(values[1:-1], label=f, hist=False, ax=ax) groups.append(values) ax.legend(loc='best') ax.set_xlabel('Number of conspecific genes') ax.set_ylabel('Density') fig.tight_layout() fig.savefig('plots/gpscotu-soil-sub.svg') print(stats.kruskal(*groups))
/** * Called when the last forwarded number changes for a call. With IMS, the last forwarded * number changes due to a supplemental service notification, so it is not pressent at the * start of the call. * * @param call The call. */ public void onLastForwardedNumberChange(Call call) { final List<CallUpdateListener> listeners = mCallUpdateListenerMap.get(call.getId()); if (listeners != null) { for (CallUpdateListener listener : listeners) { listener.onLastForwardedNumberChange(); } } }
package org.firstinspires.ftc.teamcode; import com.qualcomm.robotcore.eventloop.opmode.Autonomous; import org.firstinspires.ftc.robotcore.external.navigation.DistanceUnit; import org.firstinspires.ftc.robotcore.external.tfod.Recognition; import java.util.List; @Autonomous(name="Test Auto.") public class testAutonomous extends MetaAutomation { enum GoldView { ONE, TWO, OUT } @Override public void runOpMode() { initVuforia(); initTfod(); waitForStart(); GoldPosition mineralPosition = enhancedMineralDetection(10000); switch (mineralPosition) { case left: telemetry.addData("Final Decision", "LEFT"); break; case center: telemetry.addData("Final Decision", "CENTER"); break; case right: telemetry.addData("Final Decision", "RIGHT"); break; default: telemetry.addData("Final Decision", "CENTER -> undecided"); break; } telemetry.update(); sleep(10000); tfod.shutdown(); } }
<filename>selections.go package ecobee import( "fmt" ) type Selections struct { IncludeRuntime string `json:"includeRuntime"` IncludeExtendedRuntime string `json:"includeExtendedRuntime"` IncludeElectricity string `json:"includeElectricity"` IncludeSettings string `json:"includeSettings"` IncludeLocation string `json:"includeLocation"` IncludeProgram string `json:"includeProgram"` IncludeEvents string `json:"includeEvents"` IncludeDevice string `json:"includeDevice"` IncludeTechnician string `json:"includeTechnician"` IncludeUtility string `json:"includeUtility"` IncludeAlerts string `json:"includeAlerts"` IncludeWeather string `json:"includeWeather"` IncludeOemConfig string `json:"includeOemConfig"` IncludeEquipmentStatus string `json:"includeEquipmentStatus"` IncludeNotificationSettings string `json:"includeNotificationSettings"` IncludePrivacy string `json:"includePrivacy"` IncludeVersion string `json:"includeVersion"` IncludeSecuritySettings string `json:"includeSecuritySettings"` IncludeSensors string `json:"includeSensors"` } func (s *Selections) BuildSelections() string { return fmt.Sprintf(`{"selection":{"selectionType":"registered","selectionMatch":"","includeRuntime":%s,"includeExtendedRuntime":%s,"includeElectricity":%s,"includeSettings":%s,"includeLocation":%s,"includeProgram":%s,"includeEvents":%s,"includeDevice":%s,"includeTechnician":%s,"includeUtility":%s,"includeAlerts":%s,"includeWeather":%s,"includeOemConfig":%s,"includeEquipmentStatus":%s,"includeNotificationSettings":%s,"includePrivacy":%s,"includeVersion":%s,"includeSecuritySettings":%s,"includeSensors":%s}}`, s.IncludeRuntime, s.IncludeExtendedRuntime, s.IncludeElectricity, s.IncludeSettings, s.IncludeLocation, s.IncludeProgram, s.IncludeEvents, s.IncludeDevice, s.IncludeTechnician, s.IncludeUtility, s.IncludeAlerts, s.IncludeWeather, s.IncludeOemConfig, s.IncludeEquipmentStatus, s.IncludeNotificationSettings, s.IncludePrivacy, s.IncludeVersion, s.IncludeSecuritySettings, s.IncludeSensors,) }
// The polygon should exist entirely within the thick bands around all original roads -- it just // carves up part of that space, doesn't reach past it. // Also returns a list of labeled polygons for debugging. pub fn intersection_polygon( i: &Intersection, roads: &mut BTreeMap<OriginalRoad, Road>, timer: &mut Timer, ) -> Result<(Polygon, Vec<(String, Polygon)>), String> { if i.roads.is_empty() { panic!("{} has no roads", i.id); } // Turn all of the incident roads into two PolyLines (the "forwards" and "backwards" borders of // the road, if the roads were oriented to both be incoming to the intersection), both ending // at the intersection, and the last segment of the center line. // TODO Maybe express the two incoming PolyLines as the "right" and "left" let mut lines: Vec<(OriginalRoad, Line, PolyLine, PolyLine)> = Vec::new(); for id in &i.roads { let r = &roads[id]; let pl = if r.src_i == i.id { r.trimmed_center_pts.reversed() } else if r.dst_i == i.id { r.trimmed_center_pts.clone() } else { panic!("Incident road {} doesn't have an endpoint at {}", id, i.id); }; let pl_normal = pl.shift_right(r.half_width)?; let pl_reverse = pl.shift_left(r.half_width)?; lines.push((*id, pl.last_line(), pl_normal, pl_reverse)); } // Sort the polylines by the angle their last segment makes to the common point. let intersection_center = lines[0].1.pt2(); lines.sort_by_key(|(_, l, _, _)| { l.pt1().angle_to(intersection_center).normalized_degrees() as i64 }); if lines.len() == 1 { return deadend(roads, i.id, &lines); } let rollback = lines .iter() .map(|(r, _, _, _)| (*r, roads[r].trimmed_center_pts.clone())) .collect::<Vec<_>>(); if let Some(result) = on_off_ramp(roads, i.id, lines.clone()) { Ok(result) } else { for (r, trimmed_center_pts) in rollback { roads.get_mut(&r).unwrap().trimmed_center_pts = trimmed_center_pts; } generalized_trim_back(roads, i.id, &lines, timer) } }
def value_from_object(self, obj): value = getattr(obj, self.attname) if not value or value == "": return value return yaml.dump( value, Dumper=DjangoSafeDumper, default_flow_style=False )
def apply_to_miz(self, source_miz_file: str, out_miz_file: str, overwrite: bool = False) -> None: _source_miz_file_path = Path(source_miz_file).absolute() _out_miz_file_path = Path(out_miz_file).absolute() if not _source_miz_file_path.exists(): raise MizFileNotFoundError(str(_source_miz_file_path)) if _out_miz_file_path.exists() and not overwrite: raise MizFileAlreadyExistsError(str(_out_miz_file_path)) with Miz(str(_source_miz_file_path)) as miz: LOGGER.debug('applying time to miz: %s', self.iso_format) miz.mission.day = self.day miz.mission.month = self.month miz.mission.year = self.year miz.mission.mission_start_time = self.mission_start_time miz.zip(str(_out_miz_file_path))
/** * Created by Administrator on 2016/3/16. */ public class BarcodePlugin extends CordovaPlugin { public static CallbackContext cbContext = null;; @Override public boolean execute(String action, final JSONArray args, CallbackContext callbackContext) throws JSONException { cbContext = callbackContext; if (action.equals("startScan")) { this.cordova.getActivity().startActivity(new Intent(this.cordova.getActivity().getApplicationContext(), CaptureActivity.class)); } return true; } }
/** A runnable that spins in a loop asking for tasks to launch and launching them. */ private class TaskLaunchRunnable implements Runnable { /** Client to use to communicate with each scheduler (indexed by scheduler hostname). */ private HashMap<String, GetTaskService.Client> schedulerClients = Maps.newHashMap(); /** Client to use to communicate with each application backend (indexed by backend address). */ private HashMap<InetSocketAddress, BackendService.Client> backendClients = Maps.newHashMap(); @Override public void run() { while (true) { TaskSpec task = scheduler.getNextTask(); // blocks until task is ready List<TTaskLaunchSpec> taskLaunchSpecs = executeGetTaskRpc(task); AUDIT_LOG.info(Logging.auditEventString("node_monitor_get_task_complete", task.requestId, nodeMonitorInternalAddress.getHost())); if (taskLaunchSpecs.isEmpty()) { LOG.debug("Didn't receive a task for request " + task.requestId); scheduler.noTaskForReservation(task); continue; } if (taskLaunchSpecs.size() > 1) { LOG.warn("Received " + taskLaunchSpecs + " task launch specifications; ignoring all but the first one."); } task.taskSpec = taskLaunchSpecs.get(0); LOG.info("Received task for request " + task.requestId + ", task " + task.taskSpec.getTaskId()); // Launch the task on the backend. AUDIT_LOG.info(Logging.auditEventString("node_monitor_task_launch", System.currentTimeMillis(), task.requestId, nodeMonitorInternalAddress.getHost(), task.taskSpec.getTaskId(), task.previousRequestId, task.previousTaskId)); executeLaunchTaskRpc(task); LOG.info("Launched task " + task.taskSpec.getTaskId() + " for request " + task.requestId + " on application backend at system time " + System.currentTimeMillis()); } } /** Uses a getTask() RPC to get the task specification from the appropriate scheduler. */ private List<TTaskLaunchSpec> executeGetTaskRpc(TaskSpec task) { String schedulerAddress = task.schedulerAddress.getAddress().getHostAddress(); if (!schedulerClients.containsKey(schedulerAddress)) { try { schedulerClients.put(schedulerAddress, TClients.createBlockingGetTaskClient( task.schedulerAddress.getAddress().getHostAddress(), SchedulerThrift.DEFAULT_GET_TASK_PORT)); } catch (IOException e) { LOG.error("Error creating thrift client: " + e.getMessage()); List<TTaskLaunchSpec> emptyTaskLaunchSpecs = Lists.newArrayList(); return emptyTaskLaunchSpecs; } } GetTaskService.Client getTaskClient = schedulerClients.get(schedulerAddress); long startTimeMillis = System.currentTimeMillis(); long startGCCount = Logging.getGCCount(); LOG.debug("Attempting to get task for request " + task.requestId); AUDIT_LOG.debug(Logging.auditEventString("node_monitor_get_task_launch", task.requestId, nodeMonitorInternalAddress.getHost())); List<TTaskLaunchSpec> taskLaunchSpecs; try { taskLaunchSpecs = getTaskClient.getTask(task.requestId, nodeMonitorInternalAddress, task.originalNodeMonitorAddress); } catch (TException e) { LOG.error("Error when launching getTask RPC:" + e.getMessage()); List<TTaskLaunchSpec> emptyTaskLaunchSpecs = Lists.newArrayList(); return emptyTaskLaunchSpecs; } long rpcTime = System.currentTimeMillis() - startTimeMillis; long numGarbageCollections = Logging.getGCCount() - startGCCount; LOG.debug("GetTask() RPC for request " + task.requestId + " completed in " + rpcTime + "ms (" + numGarbageCollections + "GCs occured during RPC)"); return taskLaunchSpecs; } /** Executes an RPC to launch a task on an application backend. */ private void executeLaunchTaskRpc(TaskSpec task) { if (!backendClients.containsKey(task.appBackendAddress)) { try { backendClients.put(task.appBackendAddress, TClients.createBlockingBackendClient(task.appBackendAddress)); } catch (IOException e) { LOG.error("Error creating thrift client: " + e.getMessage()); return; } } BackendService.Client backendClient = backendClients.get(task.appBackendAddress); THostPort schedulerHostPort = Network.socketAddressToThrift(task.schedulerAddress); TFullTaskId taskId = new TFullTaskId(task.taskSpec.getTaskId(), task.requestId, task.appId, schedulerHostPort); try { backendClient.launchTask(task.taskSpec.bufferForMessage(), taskId, task.user); } catch (TException e) { LOG.error("Unable to launch task on backend " + task.appBackendAddress + ":" + e); } } }
The Role of Exosomes in Breast Cancer Diagnosis The importance of molecular re-characterization of metastatic disease with the purpose of monitoring tumor evolution has been acknowledged in numerous clinical guidelines for the management of advanced malignancies. In this context, an attractive alternative to overcome the limitations of repeated tissue sampling is represented by the analysis of peripheral blood samples as a ‘liquid biopsy’. In recent years, liquid biopsies have been studied for the early diagnosis of cancer, the monitoring of tumor burden, tumor heterogeneity and the emergence of molecular resistance, along with the detection of minimal residual disease. Interestingly, liquid biopsy consents the analysis of circulating tumor cells, circulating tumor DNA and extracellular vesicles (EVs). In particular, EVs play a crucial role in cell communication, carrying transmembrane and nonmembrane proteins, as well as metabolites, lipids and nucleic acids. Of all EVs, exosomes mirror the biological fingerprints of the parental cells from which they originate, and therefore, are considered one of the most promising predictors of early cancer diagnosis and treatment response. The present review discusses current knowledge on the possible applications of exosomes in breast cancer (BC) diagnosis, with a focus on patients at higher risk. Liquid Biopsy and Extracellular Vesicles Cancer is a dynamic and heterogeneous entity following the principles of clonal evolution. Different areas of the same primary tumor show different genomic profiles, while metastases acquire new molecular aberrations compared to primary tumors. Therapyrelated biomarkers may change throughout cancer progression 'in time and space'. As a result, the measurement of the biomarker of interest at multiple time points and different sites of the tumor may provide crucial information for patient management. On these grounds, precision oncology has highlighted the need of providing the most appropriate and effective treatment to each cancer patient, assuming that inter-and intra-tumor genetic heterogeneity could explain sensitivity or resistance to anticancer agents . The primary goal of precision oncology is, therefore, to discover molecular biomarkers predicting prognosis and response to specific therapies, helping to anticipate the emergence of unexplained drug resistance . Nevertheless, obtaining serial samples of tumor tissue is impractical and complicated by spatial heterogeneity and sampling bias. Indeed, more comprehensive and accessible tumor genome information is needed to provide an accurate account of the whole tumor than that obtained through single biopsy. Interestingly, an attractive alternative to overcome the limitation of repeated tissue sampling is provided by the analysis of peripheral blood samples as 'liquid biopsy'. Liquid biopsy is being developed as a promising new technique in the field of precision oncology. It is a minimally invasive prognostic and diagnostic tool that could overcome the limits of surgical biopsy . Blood draws can easily be performed serially. Thus, blood is an ideal compartment for the detection of prognostic and predictive biomarkers. Moreover, liquid biopsy has several potential clinical applications. These include early tumor diagnosis , the monitoring of tumor burden , tumor heterogeneity and the emergence of molecular resistance , and the detection of minimal residual disease . In particular, liquid biopsy mainly targets materials pulling away from tumor edges and swept away by the bloodstream, including circulating tumor cells, circulating tumor DNA and extracellular vesicles (EVs) . It is well-known that nucleic acids are present in biological fluids in healthy subjects in stable low concentrations and are immunologically inactive; however, they change dramatically in cancer and autoimmune disorders . The circulating DNA is also internalized in EVs, which protect it from nuclease degradation or recognition as dangerous by immune cells and provide their effective clearance. The features of circulating DNA and its packaging in vesicles reflect the state of cell of origin, such as apoptosis, necrosis, phagocytosis or active secretion . EVs are small lipid bilayer-enclosed vesicles, actively released by all viable cells that play a vital role in cell communication . They carry transmembrane and nonmembrane proteins as well as metabolites, lipids, messenger RNAs, microRNAs, long-noncoding RNA, and DNA . In recent years, the interest in EVs has rapidly increased and several studies have demonstrated their potential use as diagnostic, prognostic and therapeutic agents in clinical settings . In 2014, the International Society for Extracellular Vesicles (ISEV) board members provided a list of minimal information regarding EVs, updated in 2018. According to ISEV guidelines, the term EVs includes three types of vesicles, namely exosomes, microvesicles, and apoptotic bodies, based on origin and size of diameter . In detail, exosomes are defined as intra-luminal vesicles with a diameter ranging from 30 to 150 nm derived from the multi-vesicular (MV) bodies, formed by budding of the endosomal membranes and secreted in the extracellular space upon fusion of late endocytic compartments with the plasma membrane. Microvesicles include different populations of vesicles, which are in the nano-range of 50-200 nm, and larger vesicles up to 1 µm, which include the pre-apoptotic vesicles. They are generated by plasma membrane budding and are shed in the extracellular space. Apoptotic bodies, with a diameter ranging from 1 to 5 µm, are a class of vesicles released by cells exclusively during apoptotic cell death and their cargo is mainly enriched with nuclear fragments ( Figure 1A). In particular, exosomes are extremely abundant in all biological fluids, including serum, cerebrospinal fluid plasma, saliva, breast milk and urine . When exosomes were discovered in 1983 , they were first believed to operate as cellular garbage disposal . Since then, several researchers have investigated their biological roles. These include, but are not limited to, antigen presentation, immune regulation, apoptosis evasion, drug resistance and immune surveillance escape . Moreover, exosomes derived from cancer cells have been demonstrated to play a key role in facilitating tumorigenesis by regulating angiogenesis, immunity, and metastasis (Figure 1B). By way of example, Peinado et al. observed how melanoma-derived exosomes increase the metastatic behavior of primary tumors by permanently "educating" bone marrow progenitors via the MET receptor. Besides, melanoma-derived exosomes induce vascularization at pre-metastatic sites and reprogram bone marrow progenitors towards a pro-vasculogenic phenotype. Al-Nedawi et al. demonstrated that the transmission of the constitutively active EGFRvIII via EVs not only transfer oncogenic activity among cancer cells but also activates autocrine VEGF signaling in endothelial cells stimulating tumor angiogenesis . Finally, another key example of the role of exosomes in metastatization has been shown in pancreatic cancer, where EVs promote pre-metastatic niche formation in the liver through macrophage inhibitory factor signaling and consequent fibrotic liver environment . Clinical Applications of Exosome Research Cancer cells may secrete a larger number of exosomes related to normal cells with a number of cancer-specific biomarkers. As they are easily accessible and stab vitro, exosomes have been considered to be one of the most promising predictors of cancer diagnosis and treatment response . Based on increasing awareness of the importance of exosomal content, two tes based on liquid biopsy approach have been made commercially available since 20 detect prostate and lung cancer markers. These are ExoDx ® Prostate (IntelliScore) ExoDx ® Lung(ALK) . ExoDx ® Prostate (IntelliScore) is a urine exosome gene expres assay suitable for men after 50 years of age with a prostate-specific antigen (PSA) of Focusing on Exosome Isolation and Characterization Despite growing interest in this field, our understanding of the biogenesis, release, uptake and function of EVs remains limited. A key limitation to the specific characterization of EV subpopulations has been the technical difficulty in isolating and characterizing pure populations of specific subtypes. This appears to be the case because the methods currently available lead to the systematic co-isolation of EVs of distinct subcellular origins . Within the EV population, exosomes have sparked great scientific interest in recent years. This is due to biological fingerprints practically mirroring those of the parental cells from which they originate . Exosomes are identified primarily by their size, which, however, has not been universally defined. Typically, exosomes are considered as vesicles of 30-150 nm in diameter up to 200 nm. However, based on their biogenesis, recent studies have identified three transmembrane proteins belonging to the tetraspanin family (CD9, CD81 and CD63). These are commonly found in exosomes and are often enriched in the vesicles compared to cell lysate ( Figure 1C). The key role of exosomes in cell-to-cell communication and tumorigenesis has led to their isolation and quantification as major challenges in both basic research and clinical applications . The need for standard exosome characterization methodologies that are reproducible became imperative to allow for the use of exosomes as potential biotools in diagnosis and treatment of various diseases. As the first step toward improving knowledge in this field and developing exosome-based assays, exosomes have to be reliably and efficiently isolated from several body fluids. To date, five groups of exosome isolation techniques have been developed. These are differential ultracentrifugation-based techniques, size-based techniques, immunoaffinity capture-based techniques, exosome precipitation and microfluidics-based techniques . Differential ultracentrifugation or commercial kits allowing for the precipitation of smaller EVs are among the most common methods for isolating exosomes from serum or plasma. Exosome precipitation is easy to use and does not require any specialized equipment, allowing for easy integration into clinical usage . Alongside isolation techniques, the characterization of the exosomes is also critical to developing exosome-based assays. Characterization methods of exosomes are categorized into biophysical and molecular methods. Biophysical methods, of which the most common are nanoparticle tracking analysis (NTA) and transmission electron microscopy (TEM), are normally used to determine the size distribution of exosomes in samples. However, molecular information is not achieved. Molecular methods such as flow cytometry allow for the identification of molecular markers, e.g., surface receptors, membrane proteins present on exosomes. Moreover, since exosomes are of intracellular origin, they are packaged with a bio-macromolecular cargo of DNA, RNA and proteins ( Figure 1C). The exosome cargo therefore represents molecular bioprint of the cell-of-origin and is important in initiating or suppressing various signaling pathways in recipient cells, also responsible for metastasis and drug-resistance . Clinical Applications of Exosome Research Cancer cells may secrete a larger number of exosomes related to normal cells , with a number of cancer-specific biomarkers. As they are easily accessible and stable in vitro, exosomes have been considered to be one of the most promising predictors of early cancer diagnosis and treatment response . Based on increasing awareness of the importance of exosomal content, two test kits based on liquid biopsy approach have been made commercially available since 2016 to detect prostate and lung cancer markers. These are ExoDx ® Prostate (IntelliScore) and ExoDx ® Lung(ALK) . ExoDx ® Prostate (IntelliScore) is a urine exosome gene expression assay suitable for men after 50 years of age with a prostate-specific antigen (PSA) of 2-10 ng/mL or PSA in a "gray zone", considering initial biopsy. ExoDx ® Prostate (IntelliScore) returns a risk score that determines patients' risk of clinically significant prostate cancer (Gleason Score ≥ 7) on prostate biopsy. A score above the validated cutpoint of 15.6 is associated with increased likelihood of Gleason Score ≥ 7 prostate cancer on biopsy . Similarly, ExoDx ® Lung (ALK)-validated in the Exosome Diagnostics CLIA laboratory-isolates and analyzes exosomal RNA contained in blood specimens for the purpose of detecting EML4-ALK fusion transcripts in the plasma of lung cancer patients whose primary tumors carry this type of mutation. The ExoDx ® Lung (ALK) test can be used both at baseline to help guide treatment choice, and longitudinally to display patient progress during therapy . The search for circulating tumor materials is emerging as a novel method for breast cancer (BC) diagnosis as well. Since stage at diagnosis remains the main prognostic factor , accurate blood tests matching the sensitivity and specificity of mammographic screening would be helpful in early detection . Against this backdrop, a large number of researchers are studying exosomes due to their potential as highly accessible source of detailed information on tumor biological features (proteins and nucleic acids) obtained through liquid biopsy . The present review discusses current knowledge on the possible applications of exosomes in early BC diagnosis, with a focus on patients at higher lifetime BC risk. Exosomal Proteins in Breast Cancer Diagnosis Proteins located on the surface of, as well as within exosomes, may also be used as cancer biomarkers. As shown by proteomic results available in the ExoCarta and EVPedia databases , exosomes exhibit specific protein profiles according to cellular origin. As previously mentioned, tetraspanins are abundantly expressed in exosomes . These are a protein superfamily that interacts with a large variety of transmembrane and cytosolic signaling proteins . In particular, tetraspanin CD9, along with metalloprotease ADAM10, heat-shock protein HSP70 and Annexin-1, are general marker proteins detected in serum and pleural effusion-derived exosomes from patients with BC or BC cell lines . Interestingly, Wang and colleagues recently showed that the level of exosomal tetraspanin CD82 was significantly higher in the serum of BC patients compared to healthy controls, while the expression of CD82 significantly increased with malignant breast cancer progression. Furthermore, the combined expression of urinary exosomal tetraspanin CD63 and miR-21 had a 95% sensitivity to early BC detection, although both markers are not specific to BC . Rupp et al. reported that the epithelial cell adhesion molecules EpCAM and CD24 could be used as markers to specifically identify cancer-derived exosomes in ascites and pleural effusions from BC and ovarian cancer. In the same period, Moon and colleagues found that both plasma levels of developmental endothelial locus-1 protein (Del-1) and fibronectin expressed by circulating exosomes were significantly higher in patients with BC than in controls. Moreover, they almost returned to normal after tumor removal, proving to be closely related to tumor presence. Additionally, Khan et al. demonstrated that exosomal-Survivin, particularly Survivin-2B, may be employed as a diagnostic and/or prognostic marker in early BC patients. Interestingly, exosomes from gastric, breast and pancreatic cancer carry members from the human epidermal growth factor receptor (HER) family . In HER2-overexpressing BC cell lines, HER2-positive exosomes modulate sensitivity to Trastuzumab and, consequently, HER2-driven tumor aggressiveness . Although not specific to early BC diagnosis, HER2 could be a useful biomarker for anticipating drug-resistance during treatment, which represents the principal limiting factor to the development of cures in cancer patients. Additionally, Melo and colleagues identified a cell surface proteoglycan, glypican-1 (GPC1), specifically enriched on cancer cell-derived exosomes. They observed that GPC1-positive circulating exosomes were specifically and sensitively detectable in the serum of patients with pancreatic cancer. Elevated GPC1 levels have also been observed on exosomes from BC cells, suggesting a possible use of this exosomal biomarker to identify BC early . More recently, Kibria et al. used an automated micro flow cytometer to profile protein expression of exosomes isolated from cell lines and blood of BC patients and healthy controls. They observed a significant reduction in CD47 expression in circulating exosomes from BC patients, compared to controls. Notably, CD47 is a cancer-related surface protein whose expression prevents recognition of cancer cells by the innate immune system, thus facilitating tumor progression . Finally, other studies demonstrated the higher expression of serum exosomal-annexin A2 (exo-AnxA2) in BC patients compared to non-cancer females, especially for triplenegative BC (TNBC) rather than luminal and HER2-positive BC. Besides, high expression of exo-AnxA2 levels in BC was significantly associated with tumor grade, poor overall survival and poor disease-free survival. This study also showed that exo-AnxA2 promotes angiogenesis. Therefore, exo-AnxA2 represents a potential prognostic biomarker and therapeutic target of TNBC . Exosomal MicroRNAs in Breast Cancer Diagnosis MicroRNAs (miRNAs) are short, noncoding single-stranded RNAs that regulate gene expression at a post transcriptional level by binding to the 3 untranslated region of its target mRNA, leading to translational inhibition or mRNA degradation . Exosomes contain plenty of miRNAs , and several studies investigated the role of exosomal miRNA expression in mediating biological effects in receiving cells . In particular, miRNAs stably exist in body fluids by virtue of their packaging in exosomes, which protects them from degradation . Interestingly, exosomal miRNAs can act as novel ideal biomarkers in BC, because their expression profile correlates with tumorigenesis and tumor progression . In 2016, Hannafon et al. showed that the levels of exosomal miR-21 and miR-1246 in plasma were markedly higher in BC patients than in healthy subjects. This suggests their potential use as biomarkers in BC, although miR-21 and miR-1246 are ubiquitous in human exosomes. These data are in keeping with other studies that described high levels of these miRNAs in serum or plasma from BC patients. In detail, Shimomura and colleagues evaluated serum miRNA expression profiles using highly sensitive microarray analysis, discovering a combination of five miRNA (miR-1246, miR-1307-3p, miR-4634, miR-6861-5p and miR-6875-5p) able to detect BC with high sensitivity, specificity and accuracy, even in the case of ductal carcinoma in situ (DCIS). Additionally, Fu et al. found that miR-382-3p and miR-1246 were significantly upregulated in the serum of BC patients, while miR-598-3p and miR-184 were significantly downregulated. Finally, a meta-analysis of Li and colleagues suggested that miR-21 is a potential biomarker for early diagnosis, with high sensitivity and specificity being significantly up-regulated in BC. Although miR-145, miR-155, and miR-382 have been proposed as non-invasive biomarkers to distinguish BC patients from healthy individuals , in 2019, Gonzalez-Villasana et al. isolated these miRNAs in the exosomes from serum of both BC patients and healthy donors. However, this study confirmed significantly higher concentrations of exosomes in BC patients compared to healthy donors, supporting the hypothesis of an association between exosome concentration and the presence of BC. In another study of 50 BC cases and 12 healthy controls, Eichelser and colleagues reported that exosomal miR-101 and miR-372 were BC-specific, as confirmed by significantly higher serum levels in BC patients than in the control group. Moreover, Yoshikawa et al. showed that plasma exosome-encapsulated miR-223-3p levels may be a useful preoperative biomarker to identify invasive lesions in patients diagnosed with DCIS by biopsy. In particular, exosomal miR-223-3p level was significantly increased in BC patients compared to healthy controls and showed a significant correlation with histological type, pT stage, pN stage, pathological stage, lymphatic invasion and nuclear grade. In 2019, in order to investigate the enrichment of exosomal miRNAs in the pathogenesis of BC and DCIS, Ni et al. discovered an increase of exosomal miR-16 levels in plasma of BC and DCIS patients compared to healthy women, especially in cases of luminal tumors. Moreover, lower levels of exosomal miR-30b were associated with recurrence, and exosomal miR-93 was upregulated in DCIS patients . Interestingly, Li and colleagues demonstrated that serum exosomal miR-148a levels were significantly downregulated in patients with BC as compared to healthy patients with benign breast tumors. Besides, the downregulation of serum exosomal miR-148a is closely associated with staging at diagnosis and disease relapse, indicating that it might be a promising non-invasive diagnostic and prognostic biomarker for BC. On the other hand, Rodriguez-Martinez and colleagues investigated the use of serum exosomal miRNAs as diagnostic biomarkers in 53 patients initially diagnosed with locally advanced BC. They discovered that before neoadjuvant therapy, exosomal miR-21 and miR-105 expression levels were higher in metastatic versus non-metastatic patients and healthy controls. Based on these results, the authors suggested adding miR-21 and miR-105 analysis to mammogram tests, in order to identify those patients with metastatic disease who are misdiagnosed as non-metastatic by current clinical methods. Based on a case-control study of 69 BC patients vs. 40 healthy controls, interestingly, Hirschfeld and colleagues have recently identified a specific panel of four urinary mi-croRNA (miR-424, miR-423, miR-660, and let7-i) as a highly specific combinatory biomarker tool discriminating BC patients from healthy controls, with 98.6% sensitivity and 100% specificity. Studies of exosomal miRNA detected in serum and plasma of BC patients and potentially useful for early diagnosis are summarized in Table 1. To date, numerous studies on exosomal miRNAs linked to tumors, and BC in particular, have been published. This number is destined to increase, given the growing curiosity and attention toward this new potential diagnostic and prognostic tool. However, further research is needed in order to identify the most focused and promising set of miRNAs. Exosomal MicroRNAs for Early Diagnosis in High-Risk Patients For women in Western countries, the average lifetime risk to develop BC is approximately 13% (i.e., 1 in 8 to 1 in 7 women) . For the purpose of screening recommendations, women are stratified into two categories: average risk and increased risk. According to National Comprehensive Cancer Network (NCCN) guidelines , increased risk is determined by one of the following factors: (1) prior history of BC; (2) age ≥ 35 years with a 5-year risk of invasive BC ≥ 1.7% (Gail model ); (3) lifetime risk of BC > 20% based on a history of mammary lesions of uncertain malignant potential (lobular carcinoma in situ, atypical ductal or lobular hyperplasia); (4) lifetime risk of BC > 20% based on models dependent on family history; (5) thoracic irradiation before 30 years of age (e.g., mantle irradiation) and (6) known genetic predisposition to BC. Women with an increased risk to develop BC undergo more intensive screening that includes semestral to annual clinical examination and periodic breast imaging, often starting at an earlier age than the rest of the population . Screening programs are associated with the risk of false-positive results and consequent over-diagnosis and overtreatments. In addition, they are associated with the possibility of false-negative results , which must be taken into account in case of elevated BC risk, especially in the presence of familial or known genetic predisposition to BC. These aspects contribute to increasing anxiety and distress in high-risk women, who would benefit from less invasive and more accurate diagnostic strategies. Considering hereditary BC tumors, most cases are due to germline mutations in the breast cancer genes BRCA1/2. In this regard, the frequency of BRCA1/2 pathogenic variants in the population has been estimated to be one in 400-500 . Women's risk of developing BC is 46-87% in BRCA1 mutational carriers and 38-84% in BRCA2 mutational carriers before 70 years of age . While BRCA2-associated BCs are quite similar to sporadic tumors, BRCA1-related BCs have a triple-negative phenotype in 68% of cases . In women undergoing regular BC screening, TNBC usually presents as an interval cancer (between two mammograms). Even in cases of more intensive screening programs, patients often present with palpable tumor because of the high proliferation index of TNBC . Moreover, despite their large size at diagnosis, TNBCs can be occult on initial mammography . Therefore, a more sensitive and less invasive screening method that would allow for early diagnosis is sorely needed in this setting. The analysis of exosomal miRNAs gathered through liquid biopsy could be a promising tool. Given the high percentage of TNBC in BRCA1 mutational carriers, the isolation of TNBC-specific exosomal miRNAs might be a valid approach. As mentioned above, serum and/or plasmatic levels of exosomal-annexin A2 and of several exosomal miRNAs have been shown to be altered in TNBC . Some studies on tumor specimens and cell lines discovered different miRNA selectively expressed in TNBC and possibly detectable through liquid biopsy. In detail, miR-155 is upregulated in tumors with BRCA1 loss of function . Moreover, miR-210 is upregulated in TNBC compared to luminal BC , and it is highly expressed in familial BCs compared to non-familial ones . Besides, miR-221/222 are basal-like specific miRNAs that promote cell migration and invasion , while miR-34a levels are more than three-fold lower in TNBC cell lines than in normal and HER2-positive cell lines . Until now, the potential role of exosomal miRNAs in early BC diagnosis in highrisk patients has not yet been investigated. Nevertheless, in BRCA1 mutation carriers, a panel of multiple TNBC-specific exosomal miRNAs on serum or plasma performed more frequently than breast imaging might help to anticipate the TNBC diagnosis. This would be instrumental in reducing related morbidity and mortality and should therefore be further studied in this setting. Conclusions The clinical benefit of exosomes as a diagnostic biomarker in BC requires further data from large clinical trials, as most of the existing evidence is based on small cohort studies. Periodic assessment of several BC-specific exosomal miRNAs detected in the studies mentioned above may be useful to anticipate radiological diagnosis, particularly in women at increased risk of developing BC, thus overcoming the limits of present screening programs. Conflicts of Interest: The authors declare no conflict of interest associated with this manuscript.
def register_listener(self, callback : Callable): self.__listeners.append(callback)
package main import ( "fmt" "syscall/js" ) //Desired functionality: Ability to position elements relatively and absolutely, insert/add new divs and elements and such, edit innerHTML. type Page struct { Title string Description string } type Widget struct { Tag string Id string Type string Functionality map[string]JsFunction Associations map[string]string Subs []*Widget } type JsFunction struct { Name string Function func(this js.Value, val []js.Value) (i interface{}) } var FUNC_TABLE map[string]func(this js.Value, val []js.Value) (i interface{}) = make(map[string]func(this js.Value, val []js.Value) (i interface{})) func (j *JsFunction) Init(name string, f func(this js.Value, val []js.Value) (i interface{})) { j.Name = name j.Function = f } func (j *JsFunction) Expose(GlobalScope js.Value) { GlobalScope.Set(j.Name, js.FuncOf(j.Function)) FUNC_TABLE[j.Name] = j.Function } func (w *Widget) CreateAssociations() (retString string) { // var k string // var v func() // for k, v = range w.Functionality { // } return retString } func (w *Widget) CreateFunctionality() (retString string) { return retString } func (w *Widget) Create() (retString string) { retString = fmt.Sprintf("<%s id=\"%s\" type=\"%s\" %s %s %s></%s>", w.Tag, w.Id, w.Type, w.CreateAssociations(), w.CreateFunctionality()) return }
<gh_stars>1-10 #ifndef AWS_NITRO_ENCLAVES_ATTESTATION_H #define AWS_NITRO_ENCLAVES_ATTESTATION_H /** * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ #include <aws/nitro_enclaves/exports.h> #include <aws/nitro_enclaves/kms.h> #include <aws/common/allocator.h> #include <aws/common/byte_buf.h> AWS_EXTERN_C_BEGIN enum aws_rsa_key_size { AWS_RSA_2048 = 2048, AWS_RSA_3072 = 3072, AWS_RSA_4096 = 4096, }; struct aws_rsa_keypair { struct aws_allocator *allocator; void *key_impl; }; /** * Generates an RSA key pair used for attestation. * * @param[in] allocator The allocator to use. * @param[in] key_size The RSA keypair size. * * @return The generated keypair. */ AWS_NITRO_ENCLAVES_API struct aws_rsa_keypair *aws_attestation_rsa_keypair_new( struct aws_allocator *allocator, enum aws_rsa_key_size key_size); /** * Cleanups internal structures for a previously generated RSA keypair. * * @param[in] aws_rsa_keypair The RSA keypair previously allocated via @aws_attestation_keypair_new. */ AWS_NITRO_ENCLAVES_API void aws_attestation_rsa_keypair_destroy(struct aws_rsa_keypair *keypair); /** * Generates attestation data. * * @param[in] allocator The allocator to use. * @param[in] public_key The public key used for attestation. * @param[out] attestation_doc The public key used for attestation. * * @return Returns the error code. If SUCCESS, then attestation_doc is populated. */ AWS_NITRO_ENCLAVES_API int aws_attestation_request( struct aws_allocator *allocator, struct aws_rsa_keypair *keypair, struct aws_byte_buf *attestion_doc); /** * Decrypts the provided ciphertext data using the specified private key. * Uses the cipher text allocator. * * @param[in] allocator The allocator used to initialize plaintext. * @param[in] keypair The keypair used to decrypt. * @param[in] ciphertext The ciphertext to decrypt. * @param[out] plaintext The decrypted ciphertext. * * @return The result of the operation. On SUCCESS, the result will be placed in plaintext. */ AWS_NITRO_ENCLAVES_API int aws_attestation_rsa_decrypt( struct aws_allocator *allocator, struct aws_rsa_keypair *keypair, struct aws_byte_buf *ciphertext, struct aws_byte_buf *plaintext); AWS_EXTERN_C_END #endif /* AWS_NITRO_ENCLAVES_ATTESTATION_H */
United Talent Agency, one of the half-dozen major talent agencies in Hollywood, has canceled its annual Oscar party and will instead host a rally in support of refugees at its Los Angeles office, according to a report. According to the Hollywood Reporter, UTA CEO Jeremy Zimmer informed agency staff of the change in a letter this week. The company — home to big-name clients like Gwyneth Paltrow, Angelina Jolie, Mariah Carey, Toby Keith and dozens of others — also plans to donate $250,000 to the American Civil Liberties Union, which has worked to oppose President Donald Trump’s executive order temporarily prohibiting immigration from seven countries, and to the International Rescue Committee. “This is a moment that demands our generosity, awareness and restlessness,” Zimmer reportedly wrote to his staff. “Our world is a better place for the free exchange of artists, ideas and creative expression. If our nation ceases to be the place where artists the world over can come to express themselves freely, then we cease, in my opinion, to be America.” The move to cancel the glitzy annual Oscars party comes as Hollywood has become increasingly vocal about Trump’s policy proposals and executive actions, particularly his action on immigration. At the Screen Actors Guild Awards last month, several film and television stars used their acceptance speeches to speak out against Trump’s order; actress Sarah Paulson specifically called for donations to the ACLU. THR further reported that Ari Emanuel, co-CEO of mega-agency and UTA competitor WME-IMG, sent an email to staff Wednesday notifying them that the company would be forming a political action committee, though it wasn’t immediately clear on what the PAC would focus. “This company’s greatest asset is the diversity of our backgrounds and beliefs,” Emanuel wrote to the agency. “Please know that we will do everything in our power to support and protect this diversity now and in the months and years ahead.” Follow Daniel Nussbaum on Twitter: @dznussbaum
<filename>hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestXAttrWithSnapshot.java<gh_stars>0 begin_unit|revision:0.9.5;language:Java;cregit-version:0.0.1 begin_comment comment|/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ end_comment begin_package DECL|package|org.apache.hadoop.hdfs.server.namenode.snapshot package|package name|org operator|. name|apache operator|. name|hadoop operator|. name|hdfs operator|. name|server operator|. name|namenode operator|. name|snapshot package|; end_package begin_import import|import static name|org operator|. name|junit operator|. name|Assert operator|. name|assertArrayEquals import|; end_import begin_import import|import static name|org operator|. name|junit operator|. name|Assert operator|. name|assertEquals import|; end_import begin_import import|import name|java operator|. name|util operator|. name|EnumSet import|; end_import begin_import import|import name|java operator|. name|util operator|. name|Map import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|conf operator|. name|Configuration import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|fs operator|. name|FileSystem import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|fs operator|. name|FsShell import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|fs operator|. name|Path import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|fs operator|. name|XAttrSetFlag import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|fs operator|. name|permission operator|. name|FsPermission import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|hdfs operator|. name|DFSConfigKeys import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|hdfs operator|. name|DistributedFileSystem import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|hdfs operator|. name|MiniDFSCluster import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|hdfs operator|. name|protocol operator|. name|HdfsConstants import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|hdfs operator|. name|protocol operator|. name|NSQuotaExceededException import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|hdfs operator|. name|protocol operator|. name|SnapshotAccessControlException import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|hdfs operator|. name|server operator|. name|namenode operator|. name|NameNode import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|hdfs operator|. name|server operator|. name|namenode operator|. name|NameNodeAdapter import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|io operator|. name|IOUtils import|; end_import begin_import import|import name|org operator|. name|apache operator|. name|hadoop operator|. name|util operator|. name|ToolRunner import|; end_import begin_import import|import name|org operator|. name|junit operator|. name|AfterClass import|; end_import begin_import import|import name|org operator|. name|junit operator|. name|Assert import|; end_import begin_import import|import name|org operator|. name|junit operator|. name|Before import|; end_import begin_import import|import name|org operator|. name|junit operator|. name|BeforeClass import|; end_import begin_import import|import name|org operator|. name|junit operator|. name|Rule import|; end_import begin_import import|import name|org operator|. name|junit operator|. name|Test import|; end_import begin_import import|import name|org operator|. name|junit operator|. name|rules operator|. name|ExpectedException import|; end_import begin_comment comment|/** * Tests interaction of XAttrs with snapshots. */ end_comment begin_class DECL|class|TestXAttrWithSnapshot specifier|public class|class name|TestXAttrWithSnapshot block|{ DECL|field|cluster specifier|private specifier|static name|MiniDFSCluster name|cluster decl_stmt|; DECL|field|conf specifier|private specifier|static name|Configuration name|conf decl_stmt|; DECL|field|hdfs specifier|private specifier|static name|DistributedFileSystem name|hdfs decl_stmt|; DECL|field|pathCount specifier|private specifier|static name|int name|pathCount init|= literal|0 decl_stmt|; DECL|field|path DECL|field|snapshotPath DECL|field|snapshotPath2 DECL|field|snapshotPath3 specifier|private specifier|static name|Path name|path decl_stmt|, name|snapshotPath decl_stmt|, name|snapshotPath2 decl_stmt|, name|snapshotPath3 decl_stmt|; DECL|field|snapshotName DECL|field|snapshotName2 DECL|field|snapshotName3 specifier|private specifier|static name|String name|snapshotName decl_stmt|, name|snapshotName2 decl_stmt|, name|snapshotName3 decl_stmt|; DECL|field|SUCCESS specifier|private specifier|final name|int name|SUCCESS init|= literal|0 decl_stmt|; comment|// XAttrs DECL|field|name1 specifier|private specifier|static specifier|final name|String name|name1 init|= literal|"user.a1" decl_stmt|; DECL|field|value1 specifier|private specifier|static specifier|final name|byte index|[] name|value1 init|= block|{ literal|0x31 block|, literal|0x32 block|, literal|0x33 block|} decl_stmt|; DECL|field|newValue1 specifier|private specifier|static specifier|final name|byte index|[] name|newValue1 init|= block|{ literal|0x31 block|, literal|0x31 block|, literal|0x31 block|} decl_stmt|; DECL|field|name2 specifier|private specifier|static specifier|final name|String name|name2 init|= literal|"user.a2" decl_stmt|; DECL|field|value2 specifier|private specifier|static specifier|final name|byte index|[] name|value2 init|= block|{ literal|0x37 block|, literal|0x38 block|, literal|0x39 block|} decl_stmt|; annotation|@ name|Rule DECL|field|exception specifier|public name|ExpectedException name|exception init|= name|ExpectedException operator|. name|none argument_list|() decl_stmt|; annotation|@ name|BeforeClass DECL|method|init () specifier|public specifier|static name|void name|init parameter_list|() throws|throws name|Exception block|{ name|conf operator|= operator|new name|Configuration argument_list|() expr_stmt|; name|conf operator|. name|setBoolean argument_list|( name|DFSConfigKeys operator|. name|DFS_NAMENODE_XATTRS_ENABLED_KEY argument_list|, literal|true argument_list|) expr_stmt|; name|initCluster argument_list|( literal|true argument_list|) expr_stmt|; block|} annotation|@ name|AfterClass DECL|method|shutdown () specifier|public specifier|static name|void name|shutdown parameter_list|() throws|throws name|Exception block|{ name|IOUtils operator|. name|cleanup argument_list|( literal|null argument_list|, name|hdfs argument_list|) expr_stmt|; if|if condition|( name|cluster operator|!= literal|null condition|) block|{ name|cluster operator|. name|shutdown argument_list|() expr_stmt|; block|} block|} annotation|@ name|Before DECL|method|setUp () specifier|public name|void name|setUp parameter_list|() block|{ operator|++ name|pathCount expr_stmt|; name|path operator|= operator|new name|Path argument_list|( literal|"/p" operator|+ name|pathCount argument_list|) expr_stmt|; name|snapshotName operator|= literal|"snapshot" operator|+ name|pathCount expr_stmt|; name|snapshotName2 operator|= name|snapshotName operator|+ literal|"-2" expr_stmt|; name|snapshotName3 operator|= name|snapshotName operator|+ literal|"-3" expr_stmt|; name|snapshotPath operator|= operator|new name|Path argument_list|( name|path argument_list|, operator|new name|Path argument_list|( literal|".snapshot" argument_list|, name|snapshotName argument_list|) argument_list|) expr_stmt|; name|snapshotPath2 operator|= operator|new name|Path argument_list|( name|path argument_list|, operator|new name|Path argument_list|( literal|".snapshot" argument_list|, name|snapshotName2 argument_list|) argument_list|) expr_stmt|; name|snapshotPath3 operator|= operator|new name|Path argument_list|( name|path argument_list|, operator|new name|Path argument_list|( literal|".snapshot" argument_list|, name|snapshotName3 argument_list|) argument_list|) expr_stmt|; block|} comment|/** * Tests modifying xattrs on a directory that has been snapshotted */ annotation|@ name|Test argument_list|( name|timeout operator|= literal|120000 argument_list|) DECL|method|testModifyReadsCurrentState () specifier|public name|void name|testModifyReadsCurrentState parameter_list|() throws|throws name|Exception block|{ comment|// Init name|FileSystem operator|. name|mkdirs argument_list|( name|hdfs argument_list|, name|path argument_list|, name|FsPermission operator|. name|createImmutable argument_list|( operator|( name|short operator|) literal|0700 argument_list|) argument_list|) expr_stmt|; name|SnapshotTestHelper operator|. name|createSnapshot argument_list|( name|hdfs argument_list|, name|path argument_list|, name|snapshotName argument_list|) expr_stmt|; name|hdfs operator|. name|setXAttr argument_list|( name|path argument_list|, name|name1 argument_list|, name|value1 argument_list|) expr_stmt|; name|hdfs operator|. name|setXAttr argument_list|( name|path argument_list|, name|name2 argument_list|, name|value2 argument_list|) expr_stmt|; comment|// Verify that current path reflects xattrs, snapshot doesn't name|Map argument_list|< name|String argument_list|, name|byte index|[] argument_list|> name|xattrs init|= name|hdfs operator|. name|getXAttrs argument_list|( name|path argument_list|) decl_stmt|; name|assertEquals argument_list|( name|xattrs operator|. name|size argument_list|() argument_list|, literal|2 argument_list|) expr_stmt|; name|assertArrayEquals argument_list|( name|value1 argument_list|, name|xattrs operator|. name|get argument_list|( name|name1 argument_list|) argument_list|) expr_stmt|; name|assertArrayEquals argument_list|( name|value2 argument_list|, name|xattrs operator|. name|get argument_list|( name|name2 argument_list|) argument_list|) expr_stmt|; name|xattrs operator|= name|hdfs operator|. name|getXAttrs argument_list|( name|snapshotPath argument_list|) expr_stmt|; name|assertEquals argument_list|( name|xattrs operator|. name|size argument_list|() argument_list|, literal|0 argument_list|) expr_stmt|; comment|// Modify each xattr and make sure it's reflected name|hdfs operator|. name|setXAttr argument_list|( name|path argument_list|, name|name1 argument_list|, name|value2 argument_list|, name|EnumSet operator|. name|of argument_list|( name|XAttrSetFlag operator|. name|REPLACE argument_list|) argument_list|) expr_stmt|; name|xattrs operator|= name|hdfs operator|. name|getXAttrs argument_list|( name|path argument_list|) expr_stmt|; name|assertEquals argument_list|( name|xattrs operator|. name|size argument_list|() argument_list|, literal|2 argument_list|) expr_stmt|; name|assertArrayEquals argument_list|( name|value2 argument_list|, name|xattrs operator|. name|get argument_list|( name|name1 argument_list|) argument_list|) expr_stmt|; name|assertArrayEquals argument_list|( name|value2 argument_list|, name|xattrs operator|. name|get argument_list|( name|name2 argument_list|) argument_list|) expr_stmt|; name|hdfs operator|. name|setXAttr argument_list|( name|path argument_list|, name|name2 argument_list|, name|value1 argument_list|, name|EnumSet operator|. name|of argument_list|( name|XAttrSetFlag operator|. name|REPLACE argument_list|) argument_list|) expr_stmt|; name|xattrs operator|= name|hdfs operator|. name|getXAttrs argument_list|( name|path argument_list|) expr_stmt|; name|assertEquals argument_list|( name|xattrs operator|. name|size argument_list|() argument_list|, literal|2 argument_list|) expr_stmt|; name|assertArrayEquals argument_list|( name|value2 argument_list|, name|xattrs operator|. name|get argument_list|( name|name1 argument_list|) argument_list|) expr_stmt|; name|assertArrayEquals argument_list|( name|value1 argument_list|, name|xattrs operator|. name|get argument_list|( name|name2 argument_list|) argument_list|) expr_stmt|; comment|// Paranoia checks name|xattrs operator|= name|hdfs operator|. name|getXAttrs argument_list|( name|snapshotPath argument_list|) expr_stmt|; name|assertEquals argument_list|( name|xattrs operator|. name|size argument_list|() argument_list|, literal|0 argument_list|) expr_stmt|; name|hdfs operator|. name|removeXAttr argument_list|( name|path argument_list|, name|name1 argument_list|) expr_stmt|; name|hdfs operator|. name|removeXAttr argument_list|( name|path argument_list|, name|name2 argument_list|) expr_stmt|; name|xattrs operator|= name|hdfs operator|. name|getXAttrs argument_list|( name|path argument_list|) expr_stmt|; name|assertEquals argument_list|( name|xattrs operator|. name|size argument_list|() argument_list|, literal|0 argument_list|) expr_stmt|; block|} comment|/** * Tests removing xattrs on a directory that has been snapshotted */ annotation|@ name|Test argument_list|( name|timeout operator|= literal|120000 argument_list|) DECL|method|testRemoveReadsCurrentState () specifier|public name|void name|testRemoveReadsCurrentState parameter_list|() throws|throws name|Exception block|{ comment|// Init name|FileSystem operator|. name|mkdirs argument_list|( name|hdfs argument_list|, name|path argument_list|, name|FsPermission operator|. name|createImmutable argument_list|( operator|( name|short operator|) literal|0700 argument_list|) argument_list|) expr_stmt|; name|SnapshotTestHelper operator|. name|createSnapshot argument_list|( name|hdfs argument_list|, name|path argument_list|, name|snapshotName argument_list|) expr_stmt|; name|hdfs operator|. name|setXAttr argument_list|( name|path argument_list|, name|name1 argument_list|, name|value1 argument_list|) expr_stmt|; name|hdfs operator|. name|setXAttr argument_list|( name|path argument_list|, name|name2 argument_list|, name|value2 argument_list|) expr_stmt|; comment|// Verify that current path reflects xattrs, snapshot doesn't name|Map argument_list|< name|String argument_list|, name|byte index|[] argument_list|> name|xattrs init|= name|hdfs operator|. name|getXAttrs argument_list|( name|path argument_list|) decl_stmt|; name|assertEquals argument_list|( name|xattrs operator|. name|size argument_list|() argument_list|, literal|2 argument_list|) expr_stmt|; name|assertArrayEquals argument_list|( name|value1 argument_list|, name|xattrs operator|. name|get argument_list|( name|name1 argument_list|) argument_list|) expr_stmt|; name|assertArrayEquals argument_list|( name|value2 argument_list|, name|xattrs operator|. name|get argument_list|( name|name2 argument_list|) argument_list|) expr_stmt|; name|xattrs operator|= name|hdfs operator|. name|getXAttrs argument_list|( name|snapshotPath argument_list|) expr_stmt|; name|assertEquals argument_list|( name|xattrs operator|. name|size argument_list|() argument_list|, literal|0 argument_list|) expr_stmt|; comment|// Remove xattrs and verify one-by-one name|hdfs operator|. name|removeXAttr argument_list|( name|path argument_list|, name|name2 argument_list|) expr_stmt|; name|xattrs operator|= name|hdfs operator|. name|getXAttrs argument_list|( name|path argument_list|) expr_stmt|; name|assertEquals argument_list|( name|xattrs operator|. name|size argument_list|() argument_list|, literal|1 argument_list|) expr_stmt|; name|assertArrayEquals argument_list|( name|value1 argument_list|, name|xattrs operator|. name|get argument_list|( name|name1 argument_list|) argument_list|) expr_stmt|; name|hdfs operator|. name|removeXAttr argument_list|( name|path argument_list|, name|name1 argument_list|) expr_stmt|; name|xattrs operator|= name|hdfs operator|. name|getXAttrs argument_list|( name|path argument_list|) expr_stmt|; name|assertEquals argument_list|( name|xattrs operator|. name|size argument_list|() argument_list|, literal|0 argument_list|) expr_stmt|; block|} comment|/** * 1) Save xattrs, then create snapshot. Assert that inode of original and * snapshot have same xattrs. 2) Change the original xattrs, assert snapshot * still has old xattrs. */ annotation|@ name|Test DECL|method|testXAttrForSnapshotRootAfterChange () specifier|public name|void name|testXAttrForSnapshotRootAfterChange parameter_list|() throws|throws name|Exception block|{ name|FileSystem operator|. name|mkdirs argument_list|( name|hdfs argument_list|, name|path argument_list|, name|FsPermission operator|. name|createImmutable argument_list|( operator|( name|short operator|) literal|0700 argument_list|) argument_list|) expr_stmt|; name|hdfs operator|. name|setXAttr argument_list|( name|path argument_list|, name|name1 argument_list|, name|value1 argument_list|) expr_stmt|; name|hdfs operator|. name|setXAttr argument_list|( name|path argument_list|, name|name2 argument_list|, name|value2 argument_list|) expr_stmt|; name|SnapshotTestHelper operator|. name|createSnapshot argument_list|( name|hdfs argument_list|, name|path argument_list|, name|snapshotName argument_list|) expr_stmt|; comment|// Both original and snapshot have same XAttrs. name|Map argument_list|< name|String argument_list|, name|byte index|[] argument_list|> name|xattrs init|= name|hdfs operator|. name|getXAttrs argument_list|( name|path argument_list|) decl_stmt|; name|Assert operator|. name|assertEquals argument_list|( name|xattrs operator|. name|size argument_list|() argument_list|, literal|2 argument_list|) expr_stmt|; name|Assert operator|. name|assertArrayEquals argument_list|( name|value1 argument_list|, name|xattrs operator|. name|get argument_list|( name|name1 argument_list|) argument_list|) expr_stmt|; name|Assert operator|. name|assertArrayEquals argument_list|( name|value2 argument_list|, name|xattrs operator|. name|get argument_list|( name|name2 argument_list|) argument_list|) expr_stmt|; name|xattrs operator|= name|hdfs operator|. name|getXAttrs argument_list|( name|snapshotPath argument_list|) expr_stmt|; name|Assert operator|. name|assertEquals argument_list|( name|xattrs operator|. name|size argument_list|() argument_list|, literal|2 argument_list|) expr_stmt|; name|Assert operator|. name|assertArrayEquals argument_list|( name|value1 argument_list|, name|xattrs operator|. name|get argument_list|( name|name1 argument_list|) argument_list|) expr_stmt|; name|Assert operator|. name|assertArrayEquals argument_list|( name|value2 argument_list|, name|xattrs operator|. name|get argument_list|( name|name2 argument_list|) argument_list|) expr_stmt|; comment|// Original XAttrs have changed, but snapshot still has old XAttrs. name|hdfs operator|. name|setXAttr argument_list|( name|path argument_list|, name|name1 argument_list|, name|newValue1 argument_list|) expr_stmt|; name|doSnapshotRootChangeAssertions argument_list|( name|path argument_list|, name|snapshotPath argument_list|) expr_stmt|; name|restart argument_list|( literal|false argument_list|) expr_stmt|; name|doSnapshotRootChangeAssertions argument_list|( name|path argument_list|, name|snapshotPath argument_list|) expr_stmt|; name|restart argument_list|( literal|true argument_list|) expr_stmt|; name|doSnapshotRootChangeAssertions argument_list|( name|path argument_list|, name|snapshotPath argument_list|) expr_stmt|; block|} DECL|method|doSnapshotRootChangeAssertions (Path path, Path snapshotPath) specifier|private specifier|static name|void name|doSnapshotRootChangeAssertions parameter_list|( name|Path name|path parameter_list|, name|Path name|snapshotPath parameter_list|) throws|throws name|Exception block|{ name|Map argument_list|< name|String argument_list|, name|byte index|[] argument_list|> name|xattrs init|= name|hdfs operator|. name|getXAttrs argument_list|( name|path argument_list|) decl_stmt|; name|Assert operator|. name|assertEquals argument_list|( name|xattrs operator|. name|size argument_list|() argument_list|, literal|2 argument_list|) expr_stmt|; name|Assert operator|. name|assertArrayEquals argument_list|( name|newValue1 argument_list|, name|xattrs operator|. name|get argument_list|( name|name1 argument_list|) argument_list|) expr_stmt|; name|Assert operator|. name|assertArrayEquals argument_list|( name|value2 argument_list|, name|xattrs operator|. name|get argument_list|( name|name2 argument_list|) argument_list|) expr_stmt|; name|xattrs operator|= name|hdfs operator|. name|getXAttrs argument_list|( name|snapshotPath argument_list|) expr_stmt|; name|Assert operator|. name|assertEquals argument_list|( name|xattrs operator|. name|size argument_list|() argument_list|, literal|2 argument_list|) expr_stmt|; name|Assert operator|. name|assertArrayEquals argument_list|( name|value1 argument_list|, name|xattrs operator|. name|get argument_list|( name|name1 argument_list|) argument_list|) expr_stmt|; name|Assert operator|. name|assertArrayEquals argument_list|( name|value2 argument_list|, name|xattrs operator|. name|get argument_list|( name|name2 argument_list|) argument_list|) expr_stmt|; block|} comment|/** * 1) Save xattrs, then create snapshot. Assert that inode of original and * snapshot have same xattrs. 2) Remove some original xattrs, assert snapshot * still has old xattrs. */ annotation|@ name|Test DECL|method|testXAttrForSnapshotRootAfterRemove () specifier|public name|void name|testXAttrForSnapshotRootAfterRemove parameter_list|() throws|throws name|Exception block|{ name|FileSystem operator|. name|mkdirs argument_list|( name|hdfs argument_list|, name|path argument_list|, name|FsPermission operator|. name|createImmutable argument_list|( operator|( name|short operator|) literal|0700 argument_list|) argument_list|) expr_stmt|; name|hdfs operator|. name|setXAttr argument_list|( name|path argument_list|, name|name1 argument_list|, name|value1 argument_list|) expr_stmt|; name|hdfs operator|. name|setXAttr argument_list|( name|path argument_list|, name|name2 argument_list|, name|value2 argument_list|) expr_stmt|; name|SnapshotTestHelper operator|. name|createSnapshot argument_list|( name|hdfs argument_list|, name|path argument_list|, name|snapshotName argument_list|) expr_stmt|; comment|// Both original and snapshot have same XAttrs. name|Map argument_list|< name|String argument_list|, name|byte index|[] argument_list|> name|xattrs init|= name|hdfs operator|. name|getXAttrs argument_list|( name|path argument_list|) decl_stmt|; name|Assert operator|. name|assertEquals argument_list|( name|xattrs operator|. name|size argument_list|() argument_list|, literal|2 argument_list|) expr_stmt|; name|Assert operator|. name|assertArrayEquals argument_list|( name|value1 argument_list|, name|xattrs operator|. name|get argument_list|( name|name1 argument_list|) argument_list|) expr_stmt|; name|Assert operator|. name|assertArrayEquals argument_list|( name|value2 argument_list|, name|xattrs operator|. name|get argument_list|( name|name2 argument_list|) argument_list|) expr_stmt|; name|xattrs operator|= name|hdfs operator|. name|getXAttrs argument_list|( name|snapshotPath argument_list|) expr_stmt|; name|Assert operator|. name|assertEquals argument_list|( name|xattrs operator|. name|size argument_list|() argument_list|, literal|2 argument_list|) expr_stmt|; name|Assert operator|. name|assertArrayEquals argument_list|( name|value1 argument_list|, name|xattrs operator|. name|get argument_list|( name|name1 argument_list|) argument_list|) expr_stmt|; name|Assert operator|. name|assertArrayEquals argument_list|( name|value2 argument_list|, name|xattrs operator|. name|get argument_list|( name|name2 argument_list|) argument_list|) expr_stmt|; comment|// Original XAttrs have been removed, but snapshot still has old XAttrs. name|hdfs operator|. name|removeXAttr argument_list|( name|path argument_list|, name|name1 argument_list|) expr_stmt|; name|hdfs operator|. name|removeXAttr argument_list|( name|path argument_list|, name|name2 argument_list|) expr_stmt|; name|doSnapshotRootRemovalAssertions argument_list|( name|path argument_list|, name|snapshotPath argument_list|) expr_stmt|; name|restart argument_list|( literal|false argument_list|) expr_stmt|; name|doSnapshotRootRemovalAssertions argument_list|( name|path argument_list|, name|snapshotPath argument_list|) expr_stmt|; name|restart argument_list|( literal|true argument_list|) expr_stmt|; name|doSnapshotRootRemovalAssertions argument_list|( name|path argument_list|, name|snapshotPath argument_list|) expr_stmt|; block|} DECL|method|doSnapshotRootRemovalAssertions (Path path, Path snapshotPath) specifier|private specifier|static name|void name|doSnapshotRootRemovalAssertions parameter_list|( name|Path name|path parameter_list|, name|Path name|snapshotPath parameter_list|) throws|throws name|Exception block|{ name|Map argument_list|< name|String argument_list|, name|byte index|[] argument_list|> name|xattrs init|= name|hdfs operator|. name|getXAttrs argument_list|( name|path argument_list|) decl_stmt|; name|Assert operator|. name|assertEquals argument_list|( literal|0 argument_list|, name|xattrs operator|. name|size argument_list|() argument_list|) expr_stmt|; name|xattrs operator|= name|hdfs operator|. name|getXAttrs argument_list|( name|snapshotPath argument_list|) expr_stmt|; name|Assert operator|. name|assertEquals argument_list|( literal|2 argument_list|, name|xattrs operator|. name|size argument_list|() argument_list|) expr_stmt|; name|Assert operator|. name|assertArrayEquals argument_list|( name|value1 argument_list|, name|xattrs operator|. name|get argument_list|( name|name1 argument_list|) argument_list|) expr_stmt|; name|Assert operator|. name|assertArrayEquals argument_list|( name|value2 argument_list|, name|xattrs operator|. name|get argument_list|( name|name2 argument_list|) argument_list|) expr_stmt|; block|} comment|/** * Test successive snapshots in between modifications of XAttrs. * Also verify that snapshot XAttrs are not altered when a * snapshot is deleted. */ annotation|@ name|Test DECL|method|testSuccessiveSnapshotXAttrChanges () specifier|public name|void name|testSuccessiveSnapshotXAttrChanges parameter_list|() throws|throws name|Exception block|{ comment|// First snapshot name|FileSystem operator|. name|mkdirs argument_list|( name|hdfs argument_list|, name|path argument_list|, name|FsPermission operator|. name|createImmutable argument_list|( operator|( name|short operator|) literal|0700 argument_list|) argument_list|) expr_stmt|; name|hdfs operator|. name|setXAttr argument_list|( name|path argument_list|, name|name1 argument_list|, name|value1 argument_list|) expr_stmt|; name|SnapshotTestHelper operator|. name|createSnapshot argument_list|( name|hdfs argument_list|, name|path argument_list|, name|snapshotName argument_list|) expr_stmt|; name|Map argument_list|< name|String argument_list|, name|byte index|[] argument_list|> name|xattrs init|= name|hdfs operator|. name|getXAttrs argument_list|( name|snapshotPath argument_list|) decl_stmt|; name|Assert operator|. name|assertEquals argument_list|( literal|1 argument_list|, name|xattrs operator|. name|size argument_list|() argument_list|) expr_stmt|; name|Assert operator|. name|assertArrayEquals argument_list|( name|value1 argument_list|, name|xattrs operator|. name|get argument_list|( name|name1 argument_list|) argument_list|) expr_stmt|; comment|// Second snapshot name|hdfs operator|. name|setXAttr argument_list|( name|path argument_list|, name|name1 argument_list|, name|newValue1 argument_list|) expr_stmt|; name|hdfs operator|. name|setXAttr argument_list|( name|path argument_list|, name|name2 argument_list|, name|value2 argument_list|) expr_stmt|; name|SnapshotTestHelper operator|. name|createSnapshot argument_list|( name|hdfs argument_list|, name|path argument_list|, name|snapshotName2 argument_list|) expr_stmt|; name|xattrs operator|= name|hdfs operator|. name|getXAttrs argument_list|( name|snapshotPath2 argument_list|) expr_stmt|; name|Assert operator|. name|assertEquals argument_list|( literal|2 argument_list|, name|xattrs operator|. name|size argument_list|() argument_list|) expr_stmt|; name|Assert operator|. name|assertArrayEquals argument_list|( name|newValue1 argument_list|, name|xattrs operator|. name|get argument_list|( name|name1 argument_list|) argument_list|) expr_stmt|; name|Assert operator|. name|assertArrayEquals argument_list|( name|value2 argument_list|, name|xattrs operator|. name|get argument_list|( name|name2 argument_list|) argument_list|) expr_stmt|; comment|// Third snapshot name|hdfs operator|. name|setXAttr argument_list|( name|path argument_list|, name|name1 argument_list|, name|value1 argument_list|) expr_stmt|; name|hdfs operator|. name|removeXAttr argument_list|( name|path argument_list|, name|name2 argument_list|) expr_stmt|; name|SnapshotTestHelper operator|. name|createSnapshot argument_list|( name|hdfs argument_list|, name|path argument_list|, name|snapshotName3 argument_list|) expr_stmt|; name|xattrs operator|= name|hdfs operator|. name|getXAttrs argument_list|( name|snapshotPath3 argument_list|) expr_stmt|; name|Assert operator|. name|assertEquals argument_list|( literal|1 argument_list|, name|xattrs operator|. name|size argument_list|() argument_list|) expr_stmt|; name|Assert operator|. name|assertArrayEquals argument_list|( name|value1 argument_list|, name|xattrs operator|. name|get argument_list|( name|name1 argument_list|) argument_list|) expr_stmt|; comment|// Check that the first and second snapshots' comment|// XAttrs have stayed constant name|xattrs operator|= name|hdfs operator|. name|getXAttrs argument_list|( name|snapshotPath argument_list|) expr_stmt|; name|Assert operator|. name|assertEquals argument_list|( literal|1 argument_list|, name|xattrs operator|. name|size argument_list|() argument_list|) expr_stmt|; name|Assert operator|. name|assertArrayEquals argument_list|( name|value1 argument_list|, name|xattrs operator|. name|get argument_list|( name|name1 argument_list|) argument_list|) expr_stmt|; name|xattrs operator|= name|hdfs operator|. name|getXAttrs argument_list|( name|snapshotPath2 argument_list|) expr_stmt|; name|Assert operator|. name|assertEquals argument_list|( literal|2 argument_list|, name|xattrs operator|. name|size argument_list|() argument_list|) expr_stmt|; name|Assert operator|. name|assertArrayEquals argument_list|( name|newValue1 argument_list|, name|xattrs operator|. name|get argument_list|( name|name1 argument_list|) argument_list|) expr_stmt|; name|Assert operator|. name|assertArrayEquals argument_list|( name|value2 argument_list|, name|xattrs operator|. name|get argument_list|( name|name2 argument_list|) argument_list|) expr_stmt|; comment|// Remove the second snapshot and verify the first and comment|// third snapshots' XAttrs have stayed constant name|hdfs operator|. name|deleteSnapshot argument_list|( name|path argument_list|, name|snapshotName2 argument_list|) expr_stmt|; name|xattrs operator|= name|hdfs operator|. name|getXAttrs argument_list|( name|snapshotPath argument_list|) expr_stmt|; name|Assert operator|. name|assertEquals argument_list|( literal|1 argument_list|, name|xattrs operator|. name|size argument_list|() argument_list|) expr_stmt|; name|Assert operator|. name|assertArrayEquals argument_list|( name|value1 argument_list|, name|xattrs operator|. name|get argument_list|( name|name1 argument_list|) argument_list|) expr_stmt|; name|xattrs operator|= name|hdfs operator|. name|getXAttrs argument_list|( name|snapshotPath3 argument_list|) expr_stmt|; name|Assert operator|. name|assertEquals argument_list|( literal|1 argument_list|, name|xattrs operator|. name|size argument_list|() argument_list|) expr_stmt|; name|Assert operator|. name|assertArrayEquals argument_list|( name|value1 argument_list|, name|xattrs operator|. name|get argument_list|( name|name1 argument_list|) argument_list|) expr_stmt|; name|hdfs operator|. name|deleteSnapshot argument_list|( name|path argument_list|, name|snapshotName argument_list|) expr_stmt|; name|hdfs operator|. name|deleteSnapshot argument_list|( name|path argument_list|, name|snapshotName3 argument_list|) expr_stmt|; block|} comment|/** * Assert exception of setting xattr on read-only snapshot. */ annotation|@ name|Test DECL|method|testSetXAttrSnapshotPath () specifier|public name|void name|testSetXAttrSnapshotPath parameter_list|() throws|throws name|Exception block|{ name|FileSystem operator|. name|mkdirs argument_list|( name|hdfs argument_list|, name|path argument_list|, name|FsPermission operator|. name|createImmutable argument_list|( operator|( name|short operator|) literal|0700 argument_list|) argument_list|) expr_stmt|; name|SnapshotTestHelper operator|. name|createSnapshot argument_list|( name|hdfs argument_list|, name|path argument_list|, name|snapshotName argument_list|) expr_stmt|; name|exception operator|. name|expect argument_list|( name|SnapshotAccessControlException operator|. name|class argument_list|) expr_stmt|; name|hdfs operator|. name|setXAttr argument_list|( name|snapshotPath argument_list|, name|name1 argument_list|, name|value1 argument_list|) expr_stmt|; block|} comment|/** * Assert exception of removing xattr on read-only snapshot. */ annotation|@ name|Test DECL|method|testRemoveXAttrSnapshotPath () specifier|public name|void name|testRemoveXAttrSnapshotPath parameter_list|() throws|throws name|Exception block|{ name|FileSystem operator|. name|mkdirs argument_list|( name|hdfs argument_list|, name|path argument_list|, name|FsPermission operator|. name|createImmutable argument_list|( operator|( name|short operator|) literal|0700 argument_list|) argument_list|) expr_stmt|; name|hdfs operator|. name|setXAttr argument_list|( name|path argument_list|, name|name1 argument_list|, name|value1 argument_list|) expr_stmt|; name|SnapshotTestHelper operator|. name|createSnapshot argument_list|( name|hdfs argument_list|, name|path argument_list|, name|snapshotName argument_list|) expr_stmt|; name|exception operator|. name|expect argument_list|( name|SnapshotAccessControlException operator|. name|class argument_list|) expr_stmt|; name|hdfs operator|. name|removeXAttr argument_list|( name|snapshotPath argument_list|, name|name1 argument_list|) expr_stmt|; block|} comment|/** * Test that users can copy a snapshot while preserving its xattrs. */ annotation|@ name|Test argument_list|( name|timeout operator|= literal|120000 argument_list|) DECL|method|testCopySnapshotShouldPreserveXAttrs () specifier|public name|void name|testCopySnapshotShouldPreserveXAttrs parameter_list|() throws|throws name|Exception block|{ name|FileSystem operator|. name|mkdirs argument_list|( name|hdfs argument_list|, name|path argument_list|, name|FsPermission operator|. name|createImmutable argument_list|( operator|( name|short operator|) literal|0700 argument_list|) argument_list|) expr_stmt|; name|hdfs operator|. name|setXAttr argument_list|( name|path argument_list|, name|name1 argument_list|, name|value1 argument_list|) expr_stmt|; name|hdfs operator|. name|setXAttr argument_list|( name|path argument_list|, name|name2 argument_list|, name|value2 argument_list|) expr_stmt|; name|SnapshotTestHelper operator|. name|createSnapshot argument_list|( name|hdfs argument_list|, name|path argument_list|, name|snapshotName argument_list|) expr_stmt|; name|Path name|snapshotCopy init|= operator|new name|Path argument_list|( name|path operator|. name|toString argument_list|() operator|+ literal|"-copy" argument_list|) decl_stmt|; name|String index|[] name|argv init|= operator|new name|String index|[] block|{ literal|"-cp" block|, literal|"-px" block|, name|snapshotPath operator|. name|toUri argument_list|() operator|. name|toString argument_list|() block|, name|snapshotCopy operator|. name|toUri argument_list|() operator|. name|toString argument_list|() block|} decl_stmt|; name|int name|ret init|= name|ToolRunner operator|. name|run argument_list|( operator|new name|FsShell argument_list|( name|conf argument_list|) argument_list|, name|argv argument_list|) decl_stmt|; name|assertEquals argument_list|( literal|"cp -px is not working on a snapshot" argument_list|, name|SUCCESS argument_list|, name|ret argument_list|) expr_stmt|; name|Map argument_list|< name|String argument_list|, name|byte index|[] argument_list|> name|xattrs init|= name|hdfs operator|. name|getXAttrs argument_list|( name|snapshotCopy argument_list|) decl_stmt|; name|assertArrayEquals argument_list|( name|value1 argument_list|, name|xattrs operator|. name|get argument_list|( name|name1 argument_list|) argument_list|) expr_stmt|; name|assertArrayEquals argument_list|( name|value2 argument_list|, name|xattrs operator|. name|get argument_list|( name|name2 argument_list|) argument_list|) expr_stmt|; block|} comment|/** * Initialize the cluster, wait for it to become active, and get FileSystem * instances for our test users. * * @param format if true, format the NameNode and DataNodes before starting up * @throws Exception if any step fails */ DECL|method|initCluster (boolean format) specifier|private specifier|static name|void name|initCluster parameter_list|( name|boolean name|format parameter_list|) throws|throws name|Exception block|{ name|cluster operator|= operator|new name|MiniDFSCluster operator|. name|Builder argument_list|( name|conf argument_list|) operator|. name|numDataNodes argument_list|( literal|1 argument_list|) operator|. name|format argument_list|( name|format argument_list|) operator|. name|build argument_list|() expr_stmt|; name|cluster operator|. name|waitActive argument_list|() expr_stmt|; name|hdfs operator|= name|cluster operator|. name|getFileSystem argument_list|() expr_stmt|; block|} comment|/** * Restart the cluster, optionally saving a new checkpoint. * * @param checkpoint boolean true to save a new checkpoint * @throws Exception if restart fails */ DECL|method|restart (boolean checkpoint) specifier|private specifier|static name|void name|restart parameter_list|( name|boolean name|checkpoint parameter_list|) throws|throws name|Exception block|{ name|NameNode name|nameNode init|= name|cluster operator|. name|getNameNode argument_list|() decl_stmt|; if|if condition|( name|checkpoint condition|) block|{ name|NameNodeAdapter operator|. name|enterSafeMode argument_list|( name|nameNode argument_list|, literal|false argument_list|) expr_stmt|; name|NameNodeAdapter operator|. name|saveNamespace argument_list|( name|nameNode argument_list|) expr_stmt|; block|} name|shutdown argument_list|() expr_stmt|; name|initCluster argument_list|( literal|false argument_list|) expr_stmt|; block|} block|} end_class end_unit
The Continuing Disaster Of Open Government In Germany from the surprising-failures dept Recently, Techdirt noted that the European "database right" could pose a threat to releasing public data there. But that assumes that central governments are at least trying to open things up. A splendid piece by Sebastian Haselbeck on the Open Gov Germany blog, with the self-explanatory title "German government screws up open data," underlines that things can fail because the government itself sabotages transparency moves. As he recounts, things began so well. The German government commissioned a study on open data, which was published in August last year. It's a massive, 572-page document put together with commendable thoroughness. A key section is the introduction from the German Interior Minister, Hans-Peter Friedrich, who made the following comments: The [German] federal government has set itself the goal of a more open government and administration. The basis for this is freely available data and information that must be available for others and in standardized formats. That all sounds great -- open data made freely available in standard formats to promote open government. But then things started to go downhill, culminating in the horribly symbolic decision to name a new site not "Open Government Portal Germany" as originally planned, but just "GovData -- the data portal for Germany". In other words, some data, but without the openness. As Haselbeck comments: Following the development the last few weeks it seemed clear that the conservative elements in the higher echelons either just did not get what it means to finally go "open government" in the data dimension, or they were just too scared to follow through. … Experts can but shake their heads, and sigh at the squandered opportunities of this government, which would love to be very innovative in economic dimensions, but is actually a very backwards cabinet with lots of conservatives in key positions and a liberal coalition partner that is mostly occupied with its own ultra-low poll numbers. What's worrying is that this high-profile retreat is part of a larger failure to improve transparency in Germany. Haselbeck explains: All this adds to a series of disasters in open government in Germany. One is the stubborn denial to join the Open Government Partnership (OGP), along with the partners in crime Austria, Switzerland and Lichtenstein. As an act of spite, they formed the "DACHLi" (the acronym for the countries' licence plate IDs) initiative, a series of workshops and cooperation agreements to mostly push information exchange and open data cooperation in a way that they have nothing to fear from it, and provide ample platforms for lobbyists to talk CIOs into purchasing proprietary IT solutions for "open" government. All the while, you can count the actual attempts for more cultural and managerial change towards openness with one hand. Another of those disasters is the government’s battle against a community-built Freedom of Information platform (fragdenstaat.de) and its failure to make publicly accessible studies produced as part of the parliamentary research service (after all, paid by the taxpayer). A third thing comes to mind: the failure to ratify the UN Convention against Corruption, along with a handful of other rogue states, because it would require reform of the federal criminal code that would tighten rules for politicians' leeway to accept campaign donations and stricter transparency on their side-jobs. Look it up, Germany is in good company there, even Myanmar is ratifying the convention. It's sad to see such a generally tech-savvy nation fall behind here, as other countries open up their government and its data ever-more deeply. That's a loss not just for German society, through diminished political transparency, but also for the burgeoning number of digital start-ups in cities like Berlin, which are deprived of a key 21st-century raw material: government data. Follow me @glynmoody on Twitter or identi.ca, and on Google+ Filed Under: germany, open government
<filename>RSE/MMITSS_MRP_PerformanceObserver_Field/IDMSGcnt.h<gh_stars>0 //********************************************************************************** // // © 2015 Arizona Board of Regents on behalf of the University of Arizona with rights // granted for USDOT OSADP distribution with the Apache 2.0 open source license. // //********************************************************************************** #pragma once #include <fstream> #include <iostream> using namespace std; class IDMSGcnt { public: int TempID; int MSGcnt; // 0-127 public: IDMSGcnt() { TempID=0; MSGcnt=0; } IDMSGcnt(int id,int msgcnt) { TempID=id; MSGcnt=msgcnt; } IDMSGcnt(const IDMSGcnt& that) { TempID=that.TempID; MSGcnt=that.MSGcnt; } public: ~IDMSGcnt(){} friend ostream &operator <<(ostream &stream, IDMSGcnt e) { stream<<"TempID is: "<<e.TempID<<", MSGcnt is: "<<e.MSGcnt<<endl; return stream; } };
module Category.TypedGraph.FinalPullbackComplement where import Abstract.Category.FinalPullbackComplement import Abstract.Category.FinitaryCategory import Category.TypedGraph.Cocomplete () import Data.Graphs as G import qualified Data.Graphs.Morphism as GM import Data.TypedGraph.Morphism instance FinalPullbackComplement (TypedGraphMorphism a b) where -- @ -- l -- K──────▶L -- │ V -- k │ (1) │ m -- ▼ ▼ -- D──────▶A -- l' -- @ -- -- This function receives m and l, it creates (k,l') as the -- the final pullback complement on (1). -- -- __morphism m must be injective__ -- -- The algorithm follows Construction 6 of Sesqui-pushout rewriting. -- Available on: -- http://www.ti.inf.uni-due.de/publications/koenig/icgt06b.pdf -- -- It is a naive implementation focused on correction and not performance. -- Performance may be reasonable for epi pairs rewrite, but poor when large contexts. -- -- The resulting graph D contains a copy of K, a copy of the largest -- subgraph of A which is not in the image of m, and a suitable number -- of copies of each edge of A incident to a node in m(l(K)): -- this has the effect of "cloning" part of A. -- -- This function is divided in four steps, -- first two for nodes and the lasts for edges. calculateFinalPullbackComplement m l = step4 where typedGraphK = domain l typedGraphA = codomain m graphK = domain typedGraphK graphA = domain typedGraphA edgeTypeInK = GM.applyEdgeIdUnsafe typedGraphK edgeTypeInA = GM.applyEdgeIdUnsafe typedGraphA nodeTypeInK = GM.applyNodeIdUnsafe typedGraphK nodeTypeInA = GM.applyNodeIdUnsafe typedGraphA typeGraph = codomain typedGraphK -- Inits (k:K->D, l':D->A) with D as empty. initD = GM.empty empty typeGraph initK = buildTypedGraphMorphism typedGraphK initD (GM.empty graphK empty) initL' = buildTypedGraphMorphism initD typedGraphA (GM.empty empty graphA) -- Step1 adds in D a copy of the nodes of K. step1 = foldr updateNodesFromK (initK,initL') nodesAddFromK nodesAddFromK = zip (nodeIdsFromDomain l) ([0..]::[Int]) updateNodesFromK (n,newId) (k,l') = (updatedK2,updatedL') where newNode = NodeId newId typeN = nodeTypeInK n appliedL = applyNodeIdUnsafe l n appliedA = applyNodeIdUnsafe m appliedL updatedK = createNodeOnCodomain newNode typeN k updatedK2 = untypedUpdateNodeRelation n newNode updatedK updatedL' = createNodeOnDomain newNode typeN appliedA l' -- Step2 adds in D the nodes out of the image of m. step2 = foldr updateNodesFromA step1 nodesAddFromMatch nodesAddFromMatch = zip (orphanTypedNodeIds m) ([(length nodesAddFromK)..]::[Int]) updateNodesFromA (n,newId) (k,l') = (updatedK,updatedL') where newNode = NodeId newId typeN = nodeTypeInA n updatedK = createNodeOnCodomain newNode typeN k updatedL' = createNodeOnDomain newNode typeN n l' -- Step3 adds in D a copy of the edges of K. step3@(_,edgesL') = foldr updateEdgesFromK step2 edgesAddFromK edgesAddFromK = zip (edgesFromDomain l) ([0..]::[Int]) updateEdgesFromK (e,newId) (k,l') = (updatedK2,updatedL') where newEdge = EdgeId newId appliedL = applyEdgeIdUnsafe l (edgeId e) appliedA = applyEdgeIdUnsafe m appliedL typeE = edgeTypeInK (edgeId e) src = applyNodeIdUnsafe k (sourceId e) tgt = applyNodeIdUnsafe k (targetId e) updatedK = createEdgeOnCodomain newEdge src tgt typeE k updatedK2 = updateEdgeRelation (edgeId e) newEdge updatedK updatedL' = createEdgeOnDomain newEdge src tgt typeE appliedA l' -- Step4 adds in D a replication of edges out of the image of m, -- where source and target nodes may have been cloned in D. step4 = foldr updateEdgesFromA step3 edgesAddFromMatch edgesAddFromMatch = zip edgesFromA ([(length edgesAddFromK)..]::[Int]) where edgesFromA = [(edgeId e, u, v) | e <- orphanTypedEdges m, u <- nodeIdsFromDomain edgesL', v <- nodeIdsFromDomain edgesL', sourceId e == applyNodeIdUnsafe edgesL' u, targetId e == applyNodeIdUnsafe edgesL' v] updateEdgesFromA ((e,u,v),newId) (k,l') = (updatedK,updatedL') where newEdge = EdgeId newId typeE = edgeTypeInA e updatedK = createEdgeOnCodomain newEdge u v typeE k updatedL' = createEdgeOnDomain newEdge u v typeE e l' hasFinalPullbackComplement (Monomorphism, _) _ = True hasFinalPullbackComplement _ _ = error "Final pullback complement is not implemented for non monomorphic matches"
#include<cstdio> bool tbl[1000000]; int d[100000]; int pow10(int a) { if(!a) return 1; return 10 * pow10(a-1); } int main(void) { int n; scanf("%d",&n); for(int i=0; i<n; i++) scanf("%d",&d[i]); for(int k=0; ; k++) { // k+1 digits for(int i=0; i+k<n; i++) { int t=0; for(int j=i; j-i<=k; j++) { t*=10; t+=d[j]; } tbl[t] = true; } for(int i=0; i<pow10(k+1); i++) { if(!tbl[i]) { printf("%d\n",i); return 0; } } } return 0; }
package com.example.im.handler.filter; import lombok.Data; import org.tio.core.ChannelContext; import org.tio.core.ChannelContextFilter; @Data public class ExculdeMineChannelContextFilter implements ChannelContextFilter { private ChannelContext currentContext; @Override public boolean filter(ChannelContext channelContext) { // 过滤当前用户,不需要发送消息 if (currentContext.userid.equals(channelContext.userid)) { return false; } return true; } }
// match checks if a given log matches the filter func (l *LogFilter) match(log *iotextypes.Log) bool { addrMatch := len(l.pbFilter.Address) == 0 if !addrMatch { for _, e := range l.pbFilter.Address { if e == log.ContractAddress { addrMatch = true break } } } if !addrMatch { return false } if len(l.pbFilter.Topics) > len(log.Topics) { return false } if len(l.pbFilter.Topics) == 0 { return true } for i, e := range l.pbFilter.Topics { if e == nil || len(e.Topic) == 0 { continue } target := log.Topics[i] match := false for _, v := range e.Topic { if bytes.Compare(v, target) == 0 { match = true break } } if !match { return false } } return true }
// Go runs the Transaction it has in from Gem.Begin Begin // Running Go will defer control of Rollback and // Commit functionality to the system. // If required a user can manually run a transaction using the // *sql.DB connection. // Returns the result and whether the transaction was successful func (o *Txn) Go() (result Result, success bool) { o.Tx, o.result.Error = o.gem.DB.Begin() if o.result.Error != nil { return o.result, false } o.gem.txMu.Lock() o.gem.tx = o o.result = o.funcAction(Transaction{o}) for key, txStmt := range o.gem.txPreparedStatements { txStmt.Close() o.gem.txPreparedStatements[key] = nil } if o.result.Error != nil { goto rollback } else { goto commit } rollback: { o.result.Error = o.Rollback() result = o.result success = false goto done } commit: { o.result.Error = o.Commit() if o.result.Error != nil { goto rollback } result = o.result success = true } done: o.gem.tx = nil o.gem.txMu.Unlock() return result, success }
#!/usr/bin/env python3 # # Copyright (c) 2019, Arista Networks, Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # Neither the name of Arista Networks nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # 'AS IS' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL ARISTA NETWORKS # BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR # BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE # OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN # IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # __author__ = '<NAME>' # various tools to support data translation import yaml import sys import csv import os from time import time as time from datetime import datetime as datetime import translate # this one is used by eval() def load_yaml(filename, ignore_error=False, debug=True): if debug: # to debug load time for big YAMLs print('Loading %s started at: ' % filename, time_stamp()) try: file = open(filename, mode='r') except Exception as _: if ignore_error: pass else: sys.exit('Can not open %s\nERROR: %s' % (filename, _)) else: try: yaml_data = yaml.load(file, Loader=yaml.FullLoader) file.close() except Exception as _: return False else: if debug: # to debug load time for big YAMLs print('%s was loaded successfully at: ' % filename, time_stamp()) return yaml_data class NoAliasDumper(yaml.SafeDumper): def ignore_aliases(self, data): return True def write_yaml(filename, d): try: with open(filename, 'w') as file: yaml.SafeDumper = NoAliasDumper yaml.safe_dump(d, file, default_flow_style=False) except Exception as _: sys.exit('Can not create %s\nERROR: %s' % (filename, _)) def time_stamp(): """ time_stamp function can be used for debugging or to display timestamp for specific event to a user :return: returns current system time as a string in Y-M-D H-M-S format """ time_not_formatted = time() time_formatted = datetime.fromtimestamp(time_not_formatted).strftime('%Y-%m-%d:%H:%M:%S.%f') return time_formatted def get_realpath(file_or_dir_name, parent_dir=''): if os.path.isdir(file_or_dir_name) or os.path.isdir(os.path.dirname(file_or_dir_name)): return file_or_dir_name else: parent_dir_realpath = '' try: parent_dir_realpath = os.path.realpath(parent_dir) except Exception as _: pass # ignore exception if parent_dir_realpath: realpath = os.path.join(parent_dir_realpath, file_or_dir_name) else: script_realpath = os.path.realpath(__file__) script_dir = os.path.dirname(script_realpath) if os.path.isdir(os.path.join(script_dir, parent_dir)): realpath = os.path.join( os.path.join(script_dir, parent_dir), file_or_dir_name ) else: realpath = os.path.join(script_dir, file_or_dir_name) return realpath def read_from_csv(csv_filename): # this file reads CSV and returns list of dictionaries row_list = list() header_row = list() with open(csv_filename) as csv_file: csv_reader = csv.reader(csv_file, delimiter=',') for index, row in enumerate(csv_reader): if index == 0: header_row = row if index > 0: d = dict() for e_index, element in enumerate(row): try: key = header_row[e_index] except Exception as _: key = 'Column_number_{}'.format(e_index) d.update({key: element}) row_list.append(d) return row_list def run_translation(cfg_yaml_filename): cfg = load_yaml(cfg_yaml_filename)['translate_data_config'] expected_src_prefix = '' expected_dst_prefix = '' test_src_prefix = '' test_dst_prefix = '' # if location prefix was changed, use new prefix for later cases for translate_case in cfg['translate_cases']: if 'expected_src_prefix' in translate_case.keys(): expected_src_prefix = translate_case['expected_src_prefix'] if 'expected_dst_prefix' in translate_case.keys(): expected_dst_prefix = translate_case['expected_dst_prefix'] if 'test_src_prefix' in translate_case.keys(): test_src_prefix = translate_case['test_src_prefix'] if 'test_dst_prefix' in translate_case.keys(): test_dst_prefix = translate_case['test_dst_prefix'] if 'mode' in translate_case.keys(): mode = translate_case['mode'] if 'expected' in mode: src_filename = get_realpath(translate_case['src'], expected_src_prefix) # used by eval() dst_filename = get_realpath(translate_case['dst'], expected_dst_prefix) # used by eval() else: src_filename = get_realpath(translate_case['src'], test_src_prefix) # used by eval() dst_filename = get_realpath(translate_case['dst'], test_dst_prefix) # used by eval() # run the translation case if mode.startswith('translate/'): # simple way to keep eval() safe eval(mode.replace('/', '.') + '(src_filename, dst_filename)')
use proconio::input; #[allow(clippy::unreadable_literal)] fn main() { input! { n: usize, mut r#as: [u128; n], } r#as.sort(); r#as.push(0xdeadbeef); if let Some((prod, _)) = itertools::iterate((1, 0), |&(p, i)| (p * r#as[i], i + 1)) .take_while(|&(p, _)| p <= 10u128.pow(18)) .nth(n) { println!("{}", prod); } else { println!("-1"); } }
def dashboard(request): user = request.user tables = CafeTable.objects.filter( university=user.university, table_id__in=user.cafe_table_ids.values_list('table_id', flat=True) ) notifications = Notification.objects.filter(table_id__in=tables)[:9] notifications = sorted(notifications, key=attrgetter("date"), reverse=True) for notif in notifications: notif.date = pytz.utc.localize(notif.date).isoformat() users = CoffeeUser.objects.filter(is_staff=False) sorted_users = sorted(users, key=attrgetter("points"), reverse=True) if len(sorted_users) > 10: sorted_users = sorted_users[:9] if user.is_staff is False: if request.method == 'POST': form = StudyBreaksForm(request.POST) if form.is_valid(): mins = form.cleaned_data.get('minutes_studying_for') break_time = datetime.datetime.now() + \ datetime.timedelta(minutes=mins) user.studying_until = break_time user.save() current_user_points = user.points points_level = check_points_treshold(current_user_points) link_img = list_coffee_link[int(points_level)] name_coffee = list_coffee_name[int(points_level)] previous_collectables = [] index_list = 0 while index_list < int(points_level): previous_collectables.append(list_coffee_link[index_list]) index_list += 1 points_to_go_next_collectable = int(how_much_to_go(points_level)) if (0 < points_to_go_next_collectable < 10): n_text = user.first_name + " " + user.last_name + \ " has less than 10 points to go until their next collectable!" notif = Notification(table_id=tables[0], notification_type=1, text_preview=n_text) notif.save() if user.studying_until: if user.studying_until <= datetime.datetime.now(): user.studying_until = None user.save() studying = False else: studying = True else: studying = False else: link_img = '' points_to_go_next_collectable = 0 name_coffee = '' previous_collectables = [] studying = False can_set_tasks = True if user.tasks_set_today >= 2 and not user.is_staff and \ user.next_possible_set > datetime.date.today(): can_set_tasks = False tz_date = pytz.utc.localize(user.date_joined).isoformat() context = { 'firstName': user.first_name, 'lastName': user.last_name, 'email': user.email, 'university': user.get_university_display(), 'dateJoined': tz_date, 'points': user.points, 'users': sorted_users, 'collectable': link_img, 'pointsToGo': points_to_go_next_collectable, 'nameCollectable': name_coffee, 'previousCollectables': previous_collectables, 'listOfCoffeeLink': list_coffee_link, 'num_users': get_number_current_users(), 'break_form': StudyBreaksForm(), 'studying': studying, 'pk': user.pk, 'staff': user.is_staff, 'can_set_tasks': can_set_tasks, 'notifications': notifications, } return render(request, "dashboard.html", context)
""" Model for making Neural Network models for classification of source code. The module provides factory class for making Neural Network models """ import sys import tensorflow from tensorflow.keras import models from tensorflow.keras import layers from tensorflow.keras import initializers from ModelUtils import * class SeqModelFactory(SeqBlockFactory): """ Factory of DNNs for code classification and similarity analysis Provides functions for creating DNNs of different types If the number of lables > 1 the constructed DNN uses; - softmax for classification - sparse_categorical_crossentropy for loss function othervise constructed DNN uses: - sigmoid for classification - binary_crossentropy for loss function """ def __init__(self, tokens, labels): """ Initialize factory for DNNs Parameters: - tokens -- number of types of tokens Actually it is the size (dimensionality) of sequence vectors. For similarity analysis it is twice the number of tokens - labels -- number of labels (classes) """ self.n_labels = labels super(SeqModelFactory, self).__init__(tokens) def denseDNN(self, dense, bias = True, regular = None, optimizer = "adam"): """ Make DNN with dense (fully connected) layers Parameters: - dense -- list of hidden layers widths for all layers except the last one If w_hidden is none or empty list, Linear classifier is constructed - bias -- use or do not use bias on dense layers - regular -- regularizer as a pair (l1, l2) or None if no regularization is applied. Regularization is applied dense layers - optimizer -- training optimizer """ _regularizer = self.computeRegularizer(regular) _dnn = models.Sequential() _dnn.add(layers.Input(shape=(self.n_tokens))) self.addDense(_dnn, dense, bias = bias, regularizer = _regularizer) self.addClassifier(_dnn, self.n_labels, bias = bias, regularizer = _regularizer) return self.compileDNN(_dnn, self.compLoss(self.n_labels), optimizer = optimizer) def cnnDNN(self, convolutions, dense, pool = 'max', conv_act = None, conv_bias = True, dense_bias = True, input_type = "categorical", regular = None, optimizer = "rmsprop", embedding_dim = None, dropout_rate = 0, regul_dense_only = True): """ Make convolutional model Parameters: - convolutions -- specification of convolutional layers as list of tuples: either (kernel width, n filters, stride) or (kernel width, n filters) if stride = 1 - dense -- specification of dense layers as list of their width except the last one - pool -- pooling operation either: 'max' for maximum or 'aver' for average - conv_act -- activation of convolutional layers - conv_bias -- use bias for convolutional layers - dense_bias -- use bias for dense layers - input_type -- type of input layer: - one_hot - one hot coding of input samples It also used for predefined input vectors, even if they are not one-hot. - categorical - categorical coding decoded by preprocessing layer - trainable - categorical coding processed by trainable embedding layer - regular -- regularizer as a pair (l1, l2) or None if no regularization is applied. Regularization is applied to both convolution and dense layers - embedding_dim -- size of embedding vectors It effects trainable embedding only, i.e. input_type == "trainable" - dropout_rate -- dropout rate for inserted dropout layer If it is 0 or None no droput layer is inserted - optimizer -- training optimizer - regul_dense_only -- flag to regulirize only dense layers Returns: - compiled dnn """ _regularizer = self.computeRegularizer(regular) _dnn = self.convBlock( convolutions, dense = dense, pool = pool, conv_act =conv_act, conv_bias = conv_bias, dense_bias = dense_bias, input_type = input_type, regularizer = _regularizer, embedding_dim = embedding_dim, dropout_rate = dropout_rate, regul_dense_only = regul_dense_only) self.addClassifier(_dnn, self.n_labels, bias = dense_bias, regularizer = _regularizer) return self.compileDNN(_dnn, self.compLoss(self.n_labels), optimizer = optimizer) #---------------- End of class SeqModelFactory ----------------------------
def forward_kinematics(config: List[BaseJoint], joint_coordinates: List[float], subtract_offset=False) -> ndarray: if len(config) == len(joint_coordinates): for joint, coordinate_value in zip(config, joint_coordinates): if subtract_offset: joint.mul(joint_value=coordinate_value - joint.zero_offset) else: joint.mul(joint_value=coordinate_value) if len(config) > 1: return multi_dot([joint.matrix for joint in config]) return np.array(config[0].matrix) raise ValueError('Joint coordinates and joints must be of same length.')
def generate_python_module(model_name: str, model_data: str, ul: UserLevel, use_async: bool) -> str: model = SystemModel() model.build_from_string(model_data) result = generate_header(model_name, use_async) typenames = infer_type_names(model, ul) result += generate_typedefs(model, typenames, ul, use_async) result += generate_device_class(model, typenames, ul, use_async) return result
In case the Heartbleed bug's name hasn't already convinced you of the seriousness of this security vulnerability, allow me to make this clear: you should change the password you use for every website you've visited in the last two years. That message has been repeated ad nauseum since the vulnerability was first revealed earlier this week.. Tumblr has asked its users to change their passwords. Mozilla has advised FireFox users who rely on the same password for multiple sites to do the same. So have the New York Times, the Wire, and countless other news sites. Again, in case the bleeding heart metaphor wasn't enough to convince you that this is a real problem: change all of your passwords. Now. That's easier said than done, of course. While there are various tools that can generate strong passwords and keep them in sync across multiple platforms, there isn't an "Oh shit!" button that can automatically reset all of those passwords when something like this happens. It's up to you to remember all of the websites you've visited, the passwords you used for those sites, and to create new passwords that anyone knowing your old ones won't be able to guess. That's not necessarily a bad thing: having to manually change the passwords could help protect against any potential flaws hiding in the generators used by tools like 1Password or LastPass. (Note: I'm not saying the tools have flaws, I'm just saying they hypothetically could, company representatives.) The good news is that passwords for services like Facebook and Gmail can be changed. It would be much harder to protect against compromised biometric security measures -- what are you gonna do, burn your finger tips and tattoo some new patterns onto them? Having to change all of your passwords sucks. Not being able to adapt to compromises in the security measures that protect all of your personal information, however, would be even worse. Reactions from around the Web The Globe and Mail reports that many companies can't know if their information has been compromised: 'I don’t think anyone that had been using this technology is in a position to definitively say they weren’t compromised,' [Codenomicon CEO David] Chartier said. Chartier and other computer security experts are advising people to consider changing all their online passwords. 'I would change every password everywhere because it’s possible something was sniffed out,' said Wolfgang Kandek, chief technology officer for Qualys, a maker of security-analysis software. 'You don’t know because an attack wouldn’t have left a distinct footprint.' The Los Angeles Times explains why you should change all passwords instead of waiting for a company to say that their servers were compromised: The Heartbleed bug affects only one version of OpenSSL, and a fix for the problem has already been issued. But the vulnerability was only recently discovered, and the affected version of OpenSSL has been around for two years. It is also impossible to trace whether a hacker has taken advantage of the bug to steal data from any websites and online services that were using the vulnerable version of OpenSSL. Vox.com notes that changing your passwords only helps if you don't use the same password on vulnerable sites: Unfortunately, there's nothing users can do to protect themselves if they visit a vulnerable website. The administrators of vulnerable websites will need to upgrade their software before users will be protected. However, once an affected website has fixed the problem on their end, users can protect themselves by changing their passwords. Attackers might have intercepted user passwords in the meantime, and Felten says there's probably no way for users to tell whether anyone intercepted their passwords. The Wire imagines the two types of people who might be interested in exploiting this bug: You know, anyone with basic programming skills who might want some sensitive user data at their finger tips. Or, as many have suggested , there are some government agencies known to have a fondness for collecting user information and web traffic in bulk. If they knew about it before its exposure, Heartbleed could have been a big Christmas present to those efforts. Pando's David Sirota wrote about the dangers of biometric security measures when Apple announced TouchID last year: However, when the success of the iPhone inevitably leads to a future in which lots of different technologies in your life are locked and unlocked by a finite number of biometrics, then far more than your phone is at risk. The scale of such biometric security systems would mean your whole life could be held hostage because the locks and keys have been fundamentally changed. Think about it in practical terms. Whereas in today's password-based system you can protect yourself after a security breach with a simple password change, in tomorrow's biometric-based system, you have far fewer - if any - ways to protect yourself after a security breach. That's because you cannot so easily change your fingers, your eyes or your face. They are basically permanent. Yes, it's true - security-wise, those biological characteristics may (and I stress "may") be less vulnerable to a hack than a password. But if and when they are hacked in a society reorganized around biometric security systems, those systems allow for far less damage control than does a password-based system. In effect, your physical identity is stolen - and you can't get it back. [Image via Wikimedia
<reponame>wpride/opta from time import sleep from typing import TYPE_CHECKING, Dict, List, Optional import boto3 from botocore.config import Config from mypy_boto3_autoscaling import AutoScalingClient from mypy_boto3_ec2 import EC2Client from mypy_boto3_ec2.type_defs import NetworkInterfaceTypeDef from mypy_boto3_logs import CloudWatchLogsClient from opta.exceptions import UserErrors from opta.module_processors.base import ModuleProcessor from opta.utils import logger if TYPE_CHECKING: from opta.layer import Layer from opta.module import Module class AwsEksProcessor(ModuleProcessor): def __init__(self, module: "Module", layer: "Layer"): if (module.aliased_type or module.type) != "aws-eks": raise Exception( f"The module {module.name} was expected to be of type aws eks" ) super(AwsEksProcessor, self).__init__(module, layer) def post_delete(self, module_idx: int) -> None: providers = self.layer.gen_providers(0) region = providers["provider"]["aws"]["region"] self.cleanup_cloudwatch_log_group(region) self.cleanup_dangling_enis(region) def cleanup_cloudwatch_log_group(self, region: str) -> None: logger.info( "Seeking dangling cloudwatch log group for k8s cluster just destroyed." ) client: CloudWatchLogsClient = boto3.client( "logs", config=Config(region_name=region) ) log_group_name = f"/aws/eks/opta-{self.layer.name}/cluster" log_groups = client.describe_log_groups(logGroupNamePrefix=log_group_name) if len(log_groups["logGroups"]) == 0: return logger.info( f"Found dangling cloudwatch log group {log_group_name}. Deleting it now" ) client.delete_log_group(logGroupName=log_group_name) sleep(3) log_groups = client.describe_log_groups(logGroupNamePrefix=log_group_name) if len(log_groups["logGroups"]) != 0: logger.warning( f"Cloudwatch Log group {log_group_name} has recreated itself. Not stopping the destroy, but you will " "wanna check this out." ) def cleanup_dangling_enis(self, region: str) -> None: client: EC2Client = boto3.client("ec2", config=Config(region_name=region)) vpcs = client.describe_vpcs( Filters=[ {"Name": "tag:layer", "Values": [self.layer.name]}, {"Name": "tag:opta", "Values": ["true"]}, ] )["Vpcs"] if len(vpcs) == 0: logger.debug(f"Opta vpc for layer {self.layer.name} not found") return elif len(vpcs) > 1: logger.debug( f"Weird, found multiple vpcs for layer {self.layer.name}: {[x['VpcId'] for x in vpcs]}" ) return vpc = vpcs[0] vpc_id = vpc["VpcId"] dangling_enis: List[NetworkInterfaceTypeDef] = [] next_token = None logger.info("Seeking dangling enis from k8s cluster just destroyed") while True: if next_token is None: describe_enis = client.describe_network_interfaces( Filters=[{"Name": "vpc-id", "Values": [vpc_id]}] ) else: describe_enis = client.describe_network_interfaces( # type: ignore Filters=[{"Name": "vpc-id", "Values": [vpc_id]}], NextToken=next_token ) for eni in describe_enis["NetworkInterfaces"]: if eni["Description"] == f"Amazon EKS opta-{self.layer.name}" or ( eni["Description"].startswith("aws-K8S") and eni["Status"] == "available" ): logger.info( f"Identified dangling EKS network interface {eni['NetworkInterfaceId']}" ) dangling_enis.append(eni) next_token = describe_enis.get("NextToken", None) if next_token is None: break for eni in dangling_enis: logger.info( f"Now deleting dangling network interface {eni['NetworkInterfaceId']}" ) client.delete_network_interface(NetworkInterfaceId=eni["NetworkInterfaceId"]) def post_hook(self, module_idx: int, exception: Optional[Exception]) -> None: if exception is not None or not self.module.data.get("enable_metrics", False): logger.debug( "Not enabling metrics for default node group's autoscaling group" ) return providers = self.layer.gen_providers(0) region = providers["provider"]["aws"]["region"] autoscaling_client: AutoScalingClient = boto3.client( "autoscaling", config=Config(region_name=region) ) kwargs: Dict[str, str] = {} while True: response = autoscaling_client.describe_auto_scaling_groups( **kwargs # type: ignore ) current_token = response.get("NextToken", "") kwargs["NextToken"] = current_token current_groups = response["AutoScalingGroups"] cluster_name = f"opta-{self.layer.root().name}" for group in current_groups: tag_dict = {x["Key"]: x["Value"] for x in group["Tags"]} if tag_dict.get( f"kubernetes.io/cluster/{cluster_name}" ) == "owned" and tag_dict.get("eks:nodegroup-name", "").startswith( f"{cluster_name}-default" ): group_name = group["AutoScalingGroupName"] logger.debug(f"Enabling metrics for autoscaling group {group_name}") autoscaling_client.enable_metrics_collection( AutoScalingGroupName=group_name, Granularity="1Minute" ) return None if current_token == "": # nosec break def process(self, module_idx: int) -> None: aws_base_modules = self.layer.get_module_by_type("aws-base", module_idx) if len(aws_base_modules) == 0: raise UserErrors( "Could not find aws base module in this opta yaml-- you need to have it for eks to work" ) aws_base_module = aws_base_modules[0] self.module.data[ "private_subnet_ids" ] = f"${{{{module.{aws_base_module.name}.private_subnet_ids}}}}" self.module.data[ "kms_account_key_arn" ] = f"${{{{module.{aws_base_module.name}.kms_account_key_arn}}}}" super(AwsEksProcessor, self).process(module_idx)
// Accepts one 10 ms block of input audio (i.e., sample_rate_hz() / 100 * // num_channels() samples). Multi-channel audio must be sample-interleaved. // If successful, the encoder produces zero or more bytes of output in // |encoded|, and provides the number of encoded bytes in |encoded_bytes|. // In case of error, false is returned, otherwise true. It is an error for the // encoder to attempt to produce more than |max_encoded_bytes| bytes of // output. bool Encode(uint32_t timestamp, const int16_t* audio, size_t num_samples_per_channel, size_t max_encoded_bytes, uint8_t* encoded, EncodedInfo* info) { CHECK_EQ(num_samples_per_channel, static_cast<size_t>(sample_rate_hz() / 100)); bool ret = EncodeInternal(timestamp, audio, max_encoded_bytes, encoded, info); CHECK_LE(info->encoded_bytes, max_encoded_bytes); return ret; }
package ekiden import ( "context" "errors" api "github.com/oasislabs/oasis-gateway/ekiden/grpc" "google.golang.org/grpc" ) type Runtime struct { conn *grpc.ClientConn } func DialRuntimeContext(ctx context.Context, url string) (*Runtime, error) { transport := grpc.WithInsecure() conn, err := grpc.DialContext(ctx, url, transport) if err != nil { return nil, err } return &Runtime{conn: conn}, nil } // Submit a transaction to the ekiden node and handle the response func (r *Runtime) Submit(ctx context.Context, req *SubmitRequest) (*SubmitResponse, error) { p, err := MarshalRequest(&RequestPayload{ Method: req.Method, Args: req.Data, }) if err != nil { return nil, err } runtime := api.NewRuntimeClient(r.conn) res, err := runtime.SubmitTx(ctx, &api.SubmitTxRequest{ RuntimeId: req.RuntimeID, Data: p, }) if err != nil { return nil, err } var payload ResponsePayload if err := UnmarshalResponse(res.Result, &payload); err != nil { return nil, err } if len(payload.Error) > 0 { return nil, errors.New(payload.Error) } return &SubmitResponse{Result: payload}, nil } // Submit a transaction to the ekiden node and handle the response func (r *Runtime) EthereumTransaction( ctx context.Context, req *EthereumTransactionRequest, ) (*EthereumTransactionResponse, error) { res, err := r.Submit(ctx, &SubmitRequest{ Method: "ethereum_transaction", RuntimeID: req.RuntimeID, Data: req.Data, }) if err != nil { return nil, err } return &EthereumTransactionResponse{Result: res.Result}, nil }
/** * Instructs the receiver to write its data to the adaptor for external storage. * @param adaptor The data adaptor corresponding to this object's data node. */ public void write( final DataAdaptor adaptor ) { adaptor.writeNodes( RULES ); adaptor.writeNode( FILE_WATCHER ); adaptor.writeNode( LAUNCHER ); }
class SessionProvider: """A session provider is used to obtain boto3 Sessions for accounts. This is an abstract base class and cannot be instantiated directly. Implementors must ensure that the boto3 Sessions returned have the necessary credentials included. The Session object should be ready to use by the user upon request. """ def session(self, acct_id): """Returns a boto3 Session with credentials for the requested account. The `acct_id` is a string containing the AWS account ID. The returned boto3 Session object is ready to use and loaded with the requested credentials. """ raise NotImplementedError
//IsDisjoint returns true if s shared no elements with other. Note that the empty set is disjoint with everything. func (s String) IsDisjoint(other String) bool { for k := range s { if _, ok := other[k]; ok { return false } } return true }
// Initialize `prettyChartNameList` for usage information. func init() { chartTemplateNames := make([]string, 0, len(ChartTemplates)) for name := range ChartTemplates { chartTemplateNames = append(chartTemplateNames, name) } PrettyChartNameList = MakePrettyChartNameList(chartTemplateNames...) }
package utils import "regexp" func IsImage(mimeType string) bool { isMatch,_ := regexp.MatchString("^image/[A-Za-z]{3,4}$",mimeType) return isMatch }
Watershed Processes and Aquatic Resources: A Literature Review The ways that energy and materials are produced, stored, and move across the land are processes that occur naturally, and have been the primary drivers that have shaped the development of habitat. In turn, human development of the land can significantly alter the magnitude and timing of the processes themselves. It is also true that these alterations can cause changes in habitat formation and stability downstream and downslope from the original disturbance. Most commonly, this effect is mediated by hydraulically driven processes: the delivery and routing of water, sediment, large and small wood, nutrients and toxicants.
// Tests that ContentScriptTracker detects content scripts injected via // <webview> (aka GuestView) APIs. This test covers a scenario where the // `addContentScripts` API is called in the middle of the test - after // a matching guest content has already loaded (no content scripts there) // but before a matching about:blank guest navigation happens (need to detect // content scripts there). IN_PROC_BROWSER_TEST_F(ContentScriptTrackerAppBrowserTest, WebViewContentScriptForLateAboutBlank) { TestExtensionDir dir; const char kManifest[] = R"( { "name": "ContentScriptTrackerBrowserTest - App", "version": "1.0", "manifest_version": 2, "permissions": ["*://*/*", "webview"], "app": { "background": { "scripts": ["background_script.js"] } } } )"; dir.WriteManifest(kManifest); const char kBackgroundScript[] = R"( chrome.app.runtime.onLaunched.addListener(function() { chrome.app.window.create('page.html', {}, function () {}); }); )"; dir.WriteFile(FILE_PATH_LITERAL("background_script.js"), kBackgroundScript); const char kPage[] = R"( <div id="webview-tag-container"></div> )"; dir.WriteFile(FILE_PATH_LITERAL("page.html"), kPage); const Extension* app = LoadAndLaunchApp(dir.UnpackedPath()); ASSERT_TRUE(app); content::WebContents* app_contents = GetFirstAppWindowWebContents(); ASSERT_TRUE(content::WaitForLoadStop(app_contents)); content::WebContents* guest_contents = nullptr; { const char kWebViewInjectionScriptTemplate[] = R"( document.querySelector('#webview-tag-container').innerHTML = '<webview style="width: 100px; height: 100px;"></webview>'; var webview = document.querySelector('webview'); webview.src = $1; )"; GURL guest_url1(embedded_test_server()->GetURL("foo.com", "/title1.html")); content::WebContentsAddedObserver guest_contents_observer; ASSERT_TRUE(ExecuteScript( app_contents, content::JsReplace(kWebViewInjectionScriptTemplate, guest_url1))); guest_contents = guest_contents_observer.GetWebContents(); EXPECT_TRUE(WaitForLoadStop(guest_contents)); } content::RenderProcessHost* guest_process = guest_contents->GetMainFrame()->GetProcess(); EXPECT_FALSE(ContentScriptTracker::DidProcessRunContentScriptFromExtension( *guest_process, app->id())); { const char kContentScriptDeclarationScriptTemplate[] = R"( var webview = document.querySelector('webview'); webview.addContentScripts([{ name: 'rule', all_frames: true, match_about_blank: true, matches: ['*://foo.com/*'], js: { code: $1 }, run_at: 'document_end'}]); )"; const char kContentScript[] = R"( chrome.test.sendMessage("Hello from content script!"); )"; std::string script = content::JsReplace( kContentScriptDeclarationScriptTemplate, kContentScript); UserScriptManager* user_script_manager = ExtensionSystem::Get(guest_process->GetBrowserContext()) ->user_script_manager(); ExtensionUserScriptLoader* user_script_loader = user_script_manager->GetUserScriptLoaderForExtension(app->id()); ContentScriptLoadWaiter content_script_load_waiter(user_script_loader); content::ExecuteScriptAsync(app_contents, script); content_script_load_waiter.Wait(); } { ExtensionTestMessageListener listener("Hello from content script!", false); content::TestNavigationObserver nav_observer(guest_contents); const char kAboutBlankScript[] = R"( var f = document.createElement('iframe'); f.src = 'about:blank'; document.body.appendChild(f); )"; content::ExecuteScriptAsync(guest_contents, kAboutBlankScript); nav_observer.Wait(); ASSERT_TRUE(listener.WaitUntilSatisfied()); } EXPECT_TRUE(ContentScriptTracker::DidProcessRunContentScriptFromExtension( *guest_process, app->id())); }
/*--------------------------------------------------------------------------------------------- * Copyright (c) Jinan Tony Robotics Co., Ltd. All rights reserved. * Author: <NAME> *--------------------------------------------------------------------------------------------*/ import { TPromise } from 'vs/base/common/winjs.base'; import fs = require('fs'); export interface Distro { id: number; name: string; url: string; } export interface Package { id: number; isMetaPackage: boolean; name: string; version: string; distro: string; url: string; status: string; isInstalled: boolean; description: string; homepage: string; isAvailable: boolean; } export class PackageDb { private filename: string; private data: any; constructor(filename: string) { this.filename = filename; try { this.data = JSON.parse(fs.readFileSync(filename, 'utf8')); } catch (e) { this.data = []; } } private initPackage(row: any): Package { return { id: row.id, isMetaPackage: row.isMetaPackage, name: row.name, version: row.version, distro: row.distro, url: row.url, status: row.status, isInstalled: row.isInstalled, description: row.description, homepage: row.homepage, isAvailable: row.isAvailable }; } public getMetaPackagesByLikename(partition: number, distro: string, name: string): TPromise<Array<Package>> { var self = this; var reg = new RegExp(name.replace(/\\/g, '\\\\')); return new TPromise<Array<Package>>((complete, fail) => { var result = []; for (var item of self.data) { if (reg.test(String(item.name)) && item.isMetaPackage) { result.push(self.initPackage(item)); } } complete(result); }); } public getPackagesByLikename(partition: number, distro: string, name: string): TPromise<Array<Package>> { var self = this; var reg = new RegExp(name.replace(/\\/g, '\\\\')); return new TPromise<Array<Package>>((complete, fail) => { var result = []; for (var item of self.data) { if (reg.test(String(item.name))) { result.push(self.initPackage(item)); } } complete(result); }); } public updatePackageIsInstalled(id: number, isInstalled: boolean): TPromise<void> { var self = this; return new TPromise<void>((complete, fail) => { for (var item of self.data) { if (item.id === id) { item.isInstalled = isInstalled; try { fs.writeFileSync(self.filename, JSON.stringify(self.data, null, 2)); } catch (e) { } break; } } }); } }
def random_with_bias(messages: list, word: str): last_word_messages = [] non_last_word_messages = [] for m in messages: words = m.split() if words[-1].lower() == word: last_word_messages.append(m) else: non_last_word_messages.append(m) if not last_word_messages: return random.choice(non_last_word_messages) elif not non_last_word_messages: return random.choice(last_word_messages) else: return random.choice(last_word_messages if random.randint(0, 5) == 0 else non_last_word_messages)
def cache(self, con): try: if self._reset == 2: con.reset() else: if self._reset or con._transaction: try: con.rollback() except Exception: pass self._cache.put(con, 0) except Full: con.close() if self._connections: self._connections.release()
<gh_stars>10-100 from typing import Optional, Collection from injectable import InjectionContainer from injectable.container.injectable import Injectable from injectable.constants import DEFAULT_NAMESPACE def register_injectables( injectables: Collection[Injectable], klass: Optional[type] = None, qualifier: Optional[str] = None, namespace: str = DEFAULT_NAMESPACE, propagate: bool = False, ): """ Utility function to manually register injectables in a given namespace for the provided class and/or qualifier. At least one of ``klass`` or ``qualifier`` parameters need to be defined. Otherwise a :class:`ValueError` will be raised. :param injectables: a collection of injectables to register. :param klass: (optional) the class for which the injectables will be registered. This parameter is optional as long as ``qualifier`` is provided. Injectables registering won't be propagated to base classes unless otherwise specified by the ``propagate`` parameter. Defaults to None. :param qualifier: (optional) the qualifier for which the injectables will be registered. This parameter is optional as long as ``klass`` is provided. Defaults to None. :param namespace: (optional) namespace in which the injectable will be registered. Defaults to :const:`injectable.constants.DEFAULT_NAMESPACE`. :param propagate: (optional) When True injectables registering will be propagated to base classes of ``klass`` recursively. Setting this parameter to True and not specifying the parameter ``klass`` will raise a :class:`ValueError`. Defaults to False. Usage:: >>> from injectable import Injectable >>> from injectable.testing import register_injectables >>> injectable = Injectable(constructor=lambda: 42) >>> register_injectables({injectable}, qualifier="foo") .. versionadded:: 3.3.0 """ if not klass and not qualifier: raise ValueError( "At least one of 'klass' or 'qualifier' parameters must to be defined" ) if propagate and not klass: raise ValueError( "When 'propagate' is True the parameter 'klass' must be defined" ) namespace = InjectionContainer._get_namespace_entry(namespace) for injectable in injectables: namespace.register_injectable(injectable, klass, qualifier, propagate)
def get_directions(t1,t2): xx = t2[0]-t1[0] yy = t2[1]-t1[1] if xx == 0: if yy>0: return 'north' else: return 'south' else: if xx>0: return 'east' else: return 'west' def tell_ture_false(d1,d2): if d1 == 'north': if d2 == 'west': return True elif d2 == 'east': return False elif d1 == 'south': if d2 == 'west': return False elif d2 == 'east': return True elif d1 == 'east': if d2 == 'north': return True elif d2 == 'south': return False elif d1 == 'west': if d2 == 'south': return True elif d2 == 'north': return False n = int(raw_input()) direction_list = [] ans = 0 for i in xrange(n): x,y = map(int,raw_input().split()) direction_list.append((x,y)) #print direction_list for i in xrange(n-2): d1 = get_directions(direction_list[i],direction_list[i+1]) d2 = get_directions(direction_list[i+1],direction_list[i+2]) if tell_ture_false(d1,d2) == True: ans+=1 print ans
/** An object defining a single tokenizaion rule. */ public class TokenDictRule extends GenericModel { protected String text; protected List<String> tokens; protected List<String> readings; @SerializedName("part_of_speech") protected String partOfSpeech; /** Builder. */ public static class Builder { private String text; private List<String> tokens; private List<String> readings; private String partOfSpeech; private Builder(TokenDictRule tokenDictRule) { this.text = tokenDictRule.text; this.tokens = tokenDictRule.tokens; this.readings = tokenDictRule.readings; this.partOfSpeech = tokenDictRule.partOfSpeech; } /** Instantiates a new builder. */ public Builder() {} /** * Instantiates a new builder with required properties. * * @param text the text * @param tokens the tokens * @param partOfSpeech the partOfSpeech */ public Builder(String text, List<String> tokens, String partOfSpeech) { this.text = text; this.tokens = tokens; this.partOfSpeech = partOfSpeech; } /** * Builds a TokenDictRule. * * @return the tokenDictRule */ public TokenDictRule build() { return new TokenDictRule(this); } /** * Adds an tokens to tokens. * * @param tokens the new tokens * @return the TokenDictRule builder */ public Builder addTokens(String tokens) { com.ibm.cloud.sdk.core.util.Validator.notNull(tokens, "tokens cannot be null"); if (this.tokens == null) { this.tokens = new ArrayList<String>(); } this.tokens.add(tokens); return this; } /** * Adds an readings to readings. * * @param readings the new readings * @return the TokenDictRule builder */ public Builder addReadings(String readings) { com.ibm.cloud.sdk.core.util.Validator.notNull(readings, "readings cannot be null"); if (this.readings == null) { this.readings = new ArrayList<String>(); } this.readings.add(readings); return this; } /** * Set the text. * * @param text the text * @return the TokenDictRule builder */ public Builder text(String text) { this.text = text; return this; } /** * Set the tokens. Existing tokens will be replaced. * * @param tokens the tokens * @return the TokenDictRule builder */ public Builder tokens(List<String> tokens) { this.tokens = tokens; return this; } /** * Set the readings. Existing readings will be replaced. * * @param readings the readings * @return the TokenDictRule builder */ public Builder readings(List<String> readings) { this.readings = readings; return this; } /** * Set the partOfSpeech. * * @param partOfSpeech the partOfSpeech * @return the TokenDictRule builder */ public Builder partOfSpeech(String partOfSpeech) { this.partOfSpeech = partOfSpeech; return this; } } protected TokenDictRule(Builder builder) { com.ibm.cloud.sdk.core.util.Validator.notNull(builder.text, "text cannot be null"); com.ibm.cloud.sdk.core.util.Validator.notNull(builder.tokens, "tokens cannot be null"); com.ibm.cloud.sdk.core.util.Validator.notNull( builder.partOfSpeech, "partOfSpeech cannot be null"); text = builder.text; tokens = builder.tokens; readings = builder.readings; partOfSpeech = builder.partOfSpeech; } /** * New builder. * * @return a TokenDictRule builder */ public Builder newBuilder() { return new Builder(this); } /** * Gets the text. * * <p>The string to tokenize. * * @return the text */ public String text() { return text; } /** * Gets the tokens. * * <p>Array of tokens that the `text` field is split into when found. * * @return the tokens */ public List<String> tokens() { return tokens; } /** * Gets the readings. * * <p>Array of tokens that represent the content of the `text` field in an alternate character * set. * * @return the readings */ public List<String> readings() { return readings; } /** * Gets the partOfSpeech. * * <p>The part of speech that the `text` string belongs to. For example `noun`. Custom parts of * speech can be specified. * * @return the partOfSpeech */ public String partOfSpeech() { return partOfSpeech; } }
import java.io.BufferedReader; import java.io.InputStreamReader; import java.io.PrintWriter; public class Driver { public static void main(String args[]) { try { BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); PrintWriter pw = new PrintWriter(System.out); String[] str = br.readLine().trim().split(" "); int n = Integer.parseInt(str[0]); int m = Integer.parseInt(str[1]); String[] out = new String[n]; for(int i = 0; i < n; i++){ out[i] = br.readLine().trim(); } int leftMin = m - 1; int rightMax = 0; int topMin = n - 1; int bottomMax = 0; for(int i = 0; i < n; i++){ for(int j = 0; j < out[i].length(); j++){ if(out[i].charAt(j) == '*'){ if(j < leftMin){ leftMin = j; } if(j > rightMax){ rightMax = j; } if(i < topMin){ topMin = i; } if(i > bottomMax){ bottomMax = i; } } } } for(int i = 0; i < n; i++){ if(i >= topMin && i <= bottomMax){ StringBuilder b = new StringBuilder(); for(int j = 0; j < out[i].length(); j++){ if(j >= leftMin && j <= rightMax){ b.append(out[i].charAt(j)); } } pw.println(b); } } pw.close(); } catch(Exception ex) { System.out.println("ERROR"); } } }
// Copyright 2018 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Simple echo test server to test a qemu backed connection package main import ( "flag" "fmt" "io" "log" "os" "github.com/google/waterfall/golang/net/qemu" "golang.org/x/net/context" "golang.org/x/sync/errgroup" ) var ( conns int recN int logFile string ) func init() { flag.IntVar(&conns, "conns", 1, "number of concurrent connections to accept") flag.IntVar(&recN, "rec_n", 4*1024*1024, "read rec_n before closing the connection") flag.StringVar(&logFile, "log_file", "", "write logs to file instead of stdout") } func main() { flag.Parse() if logFile != "" { f, err := os.Create(logFile) if err != nil { log.Fatal(err) } defer f.Close() log.SetOutput(f) } lis, err := qemu.MakePipe("sockets/h2o") if err != nil { log.Fatalf("error opening pipe: %v", err) } defer lis.Close() eg, _ := errgroup.WithContext(context.Background()) log.Printf("Runing %d conns \n", conns) for i := 0; i < conns; i++ { // Lets avoid variable aliasing func() { log.Println("Accepting connection...") c, err := lis.Accept() if err != nil { log.Fatalf("error accepting connection: %v", err) } log.Println("Accepted connection...") r, w := io.Pipe() eg.Go(func() error { defer w.Close() log.Println("Reading from conn...") n, err := io.Copy(w, c) if err != nil && err != io.EOF { return err } if n != int64(recN) { return fmt.Errorf("read %d bytes but was supposed to read %d", n, recN) } log.Println("Done reading from conn...") return nil }) eg.Go(func() error { defer c.Close() log.Println("Writing to conn...") defer r.Close() n, err := io.Copy(c, r) if err != nil && err != io.EOF { return err } if n != int64(recN) { return fmt.Errorf("wrote %d bytes but was supposed to write %d", n, recN) } log.Println("Done Writing to conn...") return nil }) }() } if err := eg.Wait(); err != nil { log.Fatalf("got error: %v", err) } log.Println("Dying...") }
// Verify that the texture is correctly cleared to 0 before its first usage as a read-only storage // texture in a render pass. TEST_P(StorageTextureZeroInitTests, ReadonlyStorageTextureClearsToZeroInRenderPass) { DAWN_SKIP_TEST_IF(IsOpenGLES() && IsBackendValidationEnabled()); wgpu::Texture readonlyStorageTexture = CreateTexture(wgpu::TextureFormat::R32Uint, wgpu::TextureUsage::Storage); const char* kVertexShader = kSimpleVertexShader; const std::string kFragmentShader = std::string(R"( [[group(0), binding(0)]] var<uniform_constant> srcImage : [[access(read)]] texture_storage_2d<r32uint>; [[location(0)]] var<out> o_color : vec4<f32>; )") + kCommonReadOnlyZeroInitTestCode + R"( [[stage(fragment)]] fn main() -> void { if (doTest()) { o_color = vec4<f32>(0.0, 1.0, 0.0, 1.0); } else { o_color = vec4<f32>(1.0, 0.0, 0.0, 1.0); } })"; CheckDrawsGreen(kVertexShader, kFragmentShader.c_str(), readonlyStorageTexture); }
/** * Master recovery when the znode already exists. Internally, this * test differs from {@link #testMasterSessionExpired} because here * the master znode will exist in ZK. */ @Test(timeout=60000) public void testMasterZKSessionRecoveryFailure() throws Exception { MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster(); HMaster m = cluster.getMaster(); m.abort("Test recovery from zk session expired", new KeeperException.SessionExpiredException()); assertFalse(m.isStopped()); testSanity(); }
#include <Arduino.h> #include <ESP8266WiFi.h> #include <UniversalTelegramBot.h> #include <WiFiClientSecure.h> #include <EEPROM.h> #define BOTtoken "<KEY>:<KEY>" const char* ssid = ""; const char* password = ""; int eeprom_first = 12; int Bot_mtbs = 1000; //mean time between scan messages long Bot_lasttime; //last time messages' scan has been done int first_run; bool new_chat_id_reg = false; int wifiStatus; String authorized_chat_id = ""; WiFiClientSecure client; UniversalTelegramBot bot(BOTtoken, client); /////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////// void setup() { Serial.begin(115200); /* EEPROM.begin(512); for (int i = 0; i < 100; i++) { EEPROM.write(i,'0'); } EEPROM.commit(); *//* Serial.println(EEPROM.read(eeprom_first)); if (EEPROM.read(eeprom_first) == 0) { new_chat_id_reg = true; Serial.println("new_reg"); } else { String read_data = "000000000"; for (int i = 0; i < 9; i++) { read_data[i]=EEPROM.read(i); } Serial.println("load_auth"); authorized_chat_id = read_data; }*/ //////////////////////////////////////////////////////////////////////////// //Wifi connecting delay(200); WiFi.begin(ssid, password); while (WiFi.status() != WL_CONNECTED) { delay(500); Serial.print("."); } Serial.println("Wifi connected"); //Wifi connecting bot.sendMessage(authorized_chat_id,"Device connected to internet!", "Markdown"); delay(3000); bot.sendMessage(authorized_chat_id,"Rufus Security System is running....", "Markdown"); } /////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////// void handleNewMessages(int numNewMessages) { Serial.println("handleNewMessages"); Serial.println(String(numNewMessages)); for(int i=0; i<numNewMessages; i++) { String chat_id = String(bot.messages[i].chat_id); /* //Registration if(new_chat_id_reg == true){ Serial.println("ChatId registration is running!"); EEPROM.begin(512); for (int i = 0; i < chat_id.length(); i++) { EEPROM.write(i,chat_id[i]); } EEPROM.commit(); EEPROM.begin(512); EEPROM.write(eeprom_first,1); EEPROM.commit(); new_chat_id_reg= false; authorized_chat_id=chat_id; Serial.print("Registration successful!"); } //Registration */ if (chat_id == authorized_chat_id){ String text = bot.messages[i].text; //------------------------------------------- if (text == "/help") { String welcome = "Welcome to Rufus Security System!\n"; welcome = welcome + "/menu \n"; welcome = welcome + "/status : \n"; welcome = welcome + "/options : \n"; bot.sendMessage(chat_id, welcome, "Markdown"); } //-------------------------------------------- //----------------------------------------------- if (text == "/asd") { String asd = "Welcome"; bot.sendMessage(chat_id,asd , "Markdown"); } //------------------------------------------------ if (text == "/menu") { String menu ="Menu"; bot.sendMessage(chat_id,menu, "Markdown"); } //------------------------------------------------- if (text == "/status") { String status ="Status\n"; status=status +"Authorized ChatId is \n" + authorized_chat_id; bot.sendMessage(chat_id,status, "Markdown"); } //---------------------------------------------------- if (text == "/options") { String options="Options"; bot.sendMessage(chat_id,options, "Markdown"); } //----------------------------------------------------- } else { bot.sendMessage(chat_id,"Unauthorized access!!!!", "Markdown"); bot.sendMessage(authorized_chat_id,"Unauthorized access attempt!!! ChatId: "+chat_id,"Markdown"); } } } /////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////// void loop() { if (millis() > Bot_lasttime + Bot_mtbs) { int numNewMessages = bot.getUpdates(bot.last_message_received + 1); while(numNewMessages) { Serial.println("got response"); handleNewMessages(numNewMessages); numNewMessages = bot.getUpdates(bot.last_message_received + 1); } Bot_lasttime = millis(); } }
<gh_stars>1-10 import type { JsonTextComponent } from '@arguments' export class JsonTextComponentClass { jsonTextComponent: JsonTextComponent constructor(jsonTextComponent: JsonTextComponent) { this.jsonTextComponent = jsonTextComponent } toString() { // We want a compact output return JSON.stringify(this.jsonTextComponent, function (key: string, value: any) { /* * If we are in an array, our component could be a custom object (like a Selector) that is directly used as a chat component. * Therefore, we must try to transform it into a chat component, or a json object. * If not possible, we fallback on the original value. */ if (Array.isArray(this)) { /* * The value given is not the real original value, but sometimes it is the stringified value. * Therefore, we must get back the real one. */ const realValue = this[parseInt(key, 10)] return realValue._toChatComponent?.() ?? realValue.toJSON?.() ?? realValue } return value._toChatComponent?.() ?? value.toJSON?.() ?? value }, 0) } toJSON() { return JSON.parse(this.toString()) } }
/** * This implements the 'in' command. Returns the rows selected from table1. * <p> * <strong>NOTE:</strong> This is actually an incorrect implementation. We * only keep for compatibility with DQL system. The may return multiple * values from 'table1' */ static IntegerVector origIn(Table table1, Table table2, int column1, int column2) { Table small_table; Table large_table; int small_column; int large_column; if (table1.getRowCount() < table2.getRowCount()) { small_table = table1; large_table = table2; small_column = column1; large_column = column2; } else { small_table = table2; large_table = table1; small_column = column2; large_column = column1; } IntegerVector result_rows = new IntegerVector(); RowEnumeration e = small_table.rowEnumeration(); Operator EQUALSOP = Operator.get("="); while (e.hasMoreRows()) { int small_row_index = e.nextRowIndex(); TObject cell = small_table.getCellContents(small_column, small_row_index); IntegerVector selected_set = large_table.selectRows(large_column, EQUALSOP, cell); if (selected_set.size() > 0) { if (large_table == table1) { result_rows.append(selected_set); } else { result_rows.addInt(small_row_index); } } } return result_rows; }