content
stringlengths
10
4.9M
<reponame>philconst/dynatrace-service<filename>internal/sli/dashboard/custom_charting_tile_processing.go package dashboard import ( "fmt" "github.com/keptn-contrib/dynatrace-service/internal/adapter" "github.com/keptn-contrib/dynatrace-service/internal/common" "github.com/keptn-contrib/dynatrace-service/internal/dynatrace" "github.com/keptn-contrib/dynatrace-service/internal/sli/metrics" keptnv2 "github.com/keptn/go-utils/pkg/lib/v0_2_0" log "github.com/sirupsen/logrus" "strconv" "strings" "time" ) type CustomChartingTileProcessing struct { client dynatrace.ClientInterface eventData adapter.EventContentAdapter customFilters []*keptnv2.SLIFilter startUnix time.Time endUnix time.Time } func NewCustomChartingTileProcessing(client dynatrace.ClientInterface, eventData adapter.EventContentAdapter, customFilters []*keptnv2.SLIFilter, startUnix time.Time, endUnix time.Time) *CustomChartingTileProcessing { return &CustomChartingTileProcessing{ client: client, eventData: eventData, customFilters: customFilters, startUnix: startUnix, endUnix: endUnix, } } func (p *CustomChartingTileProcessing) Process(tile *dynatrace.Tile, dashboardFilter *dynatrace.DashboardFilter) []*TileResult { tileTitle := tile.Title() // first - lets figure out if this tile should be included in SLI validation or not - we parse the title and look for "sli=sliname" sloDefinition := common.ParsePassAndWarningWithoutDefaultsFrom(tileTitle) if sloDefinition.SLI == "" { log.WithField("tileTitle", tileTitle).Debug("Tile not included as name doesnt include sli=SLINAME") return nil } log.WithFields( log.Fields{ "tileTitle": tileTitle, "baseIndicatorName": sloDefinition.SLI, }).Debug("Processing custom chart") // get the tile specific management zone filter that might be needed by different tile processors // Check for tile management zone filter - this would overwrite the dashboardManagementZoneFilter tileManagementZoneFilter := NewManagementZoneFilter(dashboardFilter, tile.TileFilter.ManagementZone) if tile.FilterConfig == nil { return nil } var tileResults []*TileResult // we can potentially have multiple series on that chart for _, series := range tile.FilterConfig.ChartConfig.Series { // First lets generate the query and extract all important metric information we need for generating SLIs & SLOs metricQuery, err := p.generateMetricQueryFromChart(series, tileManagementZoneFilter, tile.FilterConfig.FiltersPerEntityType, p.startUnix, p.endUnix) // if there was no error we generate the SLO & SLO definition if err != nil { log.WithError(err).Warn("generateMetricQueryFromChart returned an error, SLI will not be used") continue } results := NewMetricsQueryProcessing(p.client).Process(len(series.Dimensions), sloDefinition, metricQuery) tileResults = append(tileResults, results...) } return tileResults } // Looks at the ChartSeries configuration of a regular chart and generates the Metrics Query // // Returns a queryComponents object // - metricId, e.g: built-in:mymetric // - metricUnit, e.g: MilliSeconds // - metricQuery, e.g: metricSelector=metric&filter... // - fullMetricQuery, e.g: metricQuery&from=123213&to=2323 // - entitySelectorSLIDefinition, e.g: ,entityid(FILTERDIMENSIONVALUE) // - filterSLIDefinitionAggregator, e.g: , filter(eq(Test Step,FILTERDIMENSIONVALUE)) func (p *CustomChartingTileProcessing) generateMetricQueryFromChart(series dynatrace.Series, tileManagementZoneFilter *ManagementZoneFilter, filtersPerEntityType map[string]map[string][]string, startUnix time.Time, endUnix time.Time) (*queryComponents, error) { // Lets query the metric definition as we need to know how many dimension the metric has metricDefinition, err := dynatrace.NewMetricsClient(p.client).GetByID(series.Metric) if err != nil { log.WithError(err).WithField("metric", series.Metric).Debug("Error retrieving metric description") return nil, err } // building the merge aggregator string, e.g: merge("dt.entity.disk"):merge("dt.entity.host") - or merge("dt.entity.service") // TODO: 2021-09-20: Check for redundant code after update to use dimension keys rather than indexes metricDimensionCount := len(metricDefinition.DimensionDefinitions) metricAggregation := metricDefinition.DefaultAggregation.Type mergeAggregator := "" filterAggregator := "" filterSLIDefinitionAggregator := "" entitySelectorSLIDefinition := "" // now we need to merge all the dimensions that are not part of the series.dimensions, e.g: if the metric has two dimensions but only one dimension is used in the chart we need to merge the others // as multiple-merges are possible but as they are executed in sequence we have to use the right index for metricDimIx := metricDimensionCount - 1; metricDimIx >= 0; metricDimIx-- { doMergeDimension := true metricDimIxAsString := strconv.Itoa(metricDimIx) // lets check if this dimension is in the chart for _, seriesDim := range series.Dimensions { log.WithFields( log.Fields{ "seriesDim.id": seriesDim.ID, "metricDimIx": metricDimIxAsString, }).Debug("check") if strings.Compare(seriesDim.ID, metricDimIxAsString) == 0 { // this is a dimension we want to keep and not merge log.WithField("dimension", metricDefinition.DimensionDefinitions[metricDimIx].Name).Debug("not merging dimension") doMergeDimension = false // lets check if we need to apply a dimension filter // TODO: support multiple filters - right now we only support 1 if len(seriesDim.Values) > 0 { filterAggregator = fmt.Sprintf(":filter(eq(%s,%s))", seriesDim.Name, seriesDim.Values[0]) } else { // we need this for the generation of the SLI for each individual dimension value // if the dimension is a dt.entity we have to add an addiotnal entityId to the entitySelector - otherwise we add a filter for the dimension if strings.HasPrefix(seriesDim.Name, "dt.entity.") { entitySelectorSLIDefinition = fmt.Sprintf(",entityId(FILTERDIMENSIONVALUE)") } else { filterSLIDefinitionAggregator = fmt.Sprintf(":filter(eq(%s,FILTERDIMENSIONVALUE))", seriesDim.Name) } } } } if doMergeDimension { // this is a dimension we want to merge as it is not split by in the chart log.WithField("dimension", metricDefinition.DimensionDefinitions[metricDimIx].Name).Debug("merging dimension") mergeAggregator = mergeAggregator + fmt.Sprintf(":merge(\"%s\")", metricDefinition.DimensionDefinitions[metricDimIx].Key) } } // handle aggregation. If "NONE" is specified we go to the defaultAggregration if series.Aggregation != "NONE" { metricAggregation = series.Aggregation } // for percentile we need to specify the percentile itself if metricAggregation == "PERCENTILE" { metricAggregation = fmt.Sprintf("%s(%f)", metricAggregation, series.Percentile) } // for rate measures such as failure rate we take average if it is "OF_INTEREST_RATIO" if metricAggregation == "OF_INTEREST_RATIO" { metricAggregation = "avg" } // for rate measures charting also provides the "OTHER_RATIO" option which is the inverse // TODO: not supported via API - so we default to avg if metricAggregation == "OTHER_RATIO" { metricAggregation = "avg" } // TODO - handle aggregation rates -> probably doesnt make sense as we always evalute a short timeframe // if series.AggregationRate // lets get the true entity type as the one in the dashboard might not be accurate, e.g: IOT might be used instead of CUSTOM_DEVICE // so - if the metric definition has EntityTypes defined we take the first one entityType := series.EntityType if len(metricDefinition.EntityType) > 0 { entityType = metricDefinition.EntityType[0] } // Need to implement chart filters per entity type, e.g: its possible that a chart has a filter on entites or tags // lets see if we have a FiltersPerEntityType for the tiles EntityType entityTileFilter := getEntitySelectorFromEntityFilter(filtersPerEntityType, entityType) // lets create the metricSelector and entitySelector // ATTENTION: adding :names so we also get the names of the dimensions and not just the entities. This means we get two values for each dimension metricQuery := fmt.Sprintf("metricSelector=%s%s%s:%s:names&entitySelector=type(%s)%s%s", series.Metric, mergeAggregator, filterAggregator, strings.ToLower(metricAggregation), entityType, entityTileFilter, tileManagementZoneFilter.ForEntitySelector()) // lets build the Dynatrace API Metric query for the proposed timeframe and additional filters! fullMetricQuery, metricID, err := metrics.NewQueryBuilder(p.eventData, p.customFilters).Build(metricQuery, startUnix, endUnix) if err != nil { return nil, err } return &queryComponents{ metricID: metricID, metricUnit: metricDefinition.Unit, metricQuery: metricQuery, fullMetricQueryString: fullMetricQuery, entitySelectorSLIDefinition: entitySelectorSLIDefinition, filterSLIDefinitionAggregator: filterSLIDefinitionAggregator, }, nil } // getEntitySelectorFromEntityFilter Parses the filtersPerEntityType dashboard definition and returns the entitySelector query filter - // the return value always starts with a , (comma) // return example: ,entityId("ABAD-222121321321") func getEntitySelectorFromEntityFilter(filtersPerEntityType map[string]map[string][]string, entityType string) string { entityTileFilter := "" if filtersPerEntityType, containsEntityType := filtersPerEntityType[entityType]; containsEntityType { // Check for SPECIFIC_ENTITIES - if we have an array then we filter for each entity if entityArray, containsSpecificEntities := filtersPerEntityType["SPECIFIC_ENTITIES"]; containsSpecificEntities { for _, entityId := range entityArray { entityTileFilter = entityTileFilter + "," entityTileFilter = entityTileFilter + fmt.Sprintf("entityId(\"%s\")", entityId) } } // Check for SPECIFIC_ENTITIES - if we have an array then we filter for each entity if tagArray, containsAutoTags := filtersPerEntityType["AUTO_TAGS"]; containsAutoTags { for _, tag := range tagArray { entityTileFilter = entityTileFilter + "," entityTileFilter = entityTileFilter + fmt.Sprintf("tag(\"%s\")", tag) } } } return entityTileFilter }
/** * Publish a message to /light topic */ public void publishPin(String time, String name) { pin = StringUtil.getRandomNumberString(); String payload = pin + "-" + time + "-" + name; byte[] encodedPayload = new byte[0]; try { encodedPayload = payload.getBytes("UTF-8"); MqttMessage message = new MqttMessage(encodedPayload); client.publish(ShiftrIO.PIN_TOPIC, message); } catch (UnsupportedEncodingException | MqttException e) { e.printStackTrace(); } }
<reponame>jeffhain/jadecy /* * Copyright 2016 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.jadecy.graph; import java.util.ArrayList; import java.util.Collection; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Set; import net.jadecy.utils.ArgsUtils; /** * Implements the cycle detection algorithm (not the ranking part) described in the paper * "Efficient Retrieval and Ranking of Undesired Package Cycles in Large Software Systems", * by <NAME>, <NAME>, <NAME>, and <NAME>, * published in "Journal of Object Technology": * http://www.jot.fm/issues/issue_2012_04/article4.pdf * modulo: * - handling of cycles of size 1, * - possibility to ignore cycles above a certain size. * * Serves as reference, to make sure our optimized algorithm is at least as good * in computing shortest cycles. */ class RefShortestCyclesComputer { /* * The basic idea of paper's algorithm: "the decomposition of a SCC in * multiple short cycles covering all dependencies of the SCC", * by iterating on vertices, and for each of them, pushing a * Breadth-First Search (BFS) until all predecessors have been reached, * and processing corresponding cycles if not processed already. */ /* * This implementation provides cycles in their normalized form, but only * accidentally, due to normalizing work vertices cycles to make sure * a same cycle is only processed once, and SCCs computation treatment * providing vertices in their natural order, which is preserved when * creating work graph. * The spec indicates that cycles might not be in their normalized form, * to allow for eventual optimizations with that regard. */ //-------------------------------------------------------------------------- // PRIVATE CLASSES //-------------------------------------------------------------------------- private static class MySccProcessor implements InterfaceVertexCollProcessor { private final int maxSize; private final InterfaceVertexCollProcessor processor; private final ArrayList<InterfaceVertex> tmpScc = new ArrayList<InterfaceVertex>(); public MySccProcessor( int maxSize, InterfaceVertexCollProcessor processor) { this.maxSize = maxSize; this.processor = processor; } @Override public void processCollBegin() { this.tmpScc.clear(); } @Override public void processCollVertex(InterfaceVertex vertex) { this.tmpScc.add(vertex); } @Override public boolean processCollEnd() { return computeShortestCycles_onScc( this.tmpScc, this.maxSize, this.processor); } } //-------------------------------------------------------------------------- // PUBLIC METHODS //-------------------------------------------------------------------------- /** * Computes a set of cycles that cover all dependencies of each SCC, doing * best effort in making this set and these cycles as small as possible, * i.e. some processed cycles might only cover dependencies already covered * by other processed cycles. * Cycles of size 1 are also processed. * * For a same input, and regardless of vertices hash codes, * this method always produces the same output. * * Calls to processor.processCollVertex(...) are ordered according to the order * in which elements appear in the cycle. * For performances purpose, the first call is not necessarily done on the * lowest element, i.e. the specified cycle is not necessarily in normalized * form. * * @param graph Graph of which shortest cycles must be computed. * Must be consistent, i.e. not contain duplicates, and contain all * vertices reachable from contained ones. * @param maxSize Max size of processed cycles. If < 0, no limit. * @param processor Processor to process the cycles with. */ public static void computeShortestCycles( Collection<? extends InterfaceVertex> graph, int maxSize, InterfaceVertexCollProcessor processor) { ArgsUtils.requireNonNull(processor); // Implicit null check. if ((graph.size() == 0) || (maxSize == 0)) { return; } final MySccProcessor sccProcessor = new MySccProcessor( maxSize, processor); SccsComputer.computeSccs(graph, sccProcessor); } //-------------------------------------------------------------------------- // PRIVATE METHODS //-------------------------------------------------------------------------- private RefShortestCyclesComputer() { } /* * */ /** * @param maxSize Must be != 0. * @return True if must stop, false otherwise. */ private static boolean computeShortestCycles_onScc( ArrayList<InterfaceVertex> scc, int maxSize, InterfaceVertexCollProcessor processor) { if (maxSize == 0) { throw new AssertionError(); } /* * Using a work graph, not so much to have predecessors (which could * easily be computed by iterating on successors of each vertex), than * not to have edges dangling outside the SCC, and to be able to remove * cycles of size 1, to make things simpler. */ final ArrayList<WorkVertex> workScc = WorkGraphUtilz.newWorkGraphAsArrayList( scc); // Modif/paper: taking care of elementary circuits, and removing them. for (WorkVertex v : workScc) { if (WorkGraphUtilz.removeEdge(v, v)) { if (processOneVertexCycle(v, processor)) { return true; } } } if (maxSize == 1) { // No need to go further here. return false; } /* * name_in_paper/name_in_code: * * parent: predecessor (parent/child more suited to tree-like graphs) * C: normCycleSet (set of cycles) * x: x (vertex) * V: visitedSet (set of vertices) * A: predToVisitSet (set of vertices) * Q: qList (FIFO of vertices) + qSet (for contains(...)) * p: v (vertex) * y: succ (Vertex) * c: cycle * B: v.successors() (set of vertices) */ final Set<List<WorkVertex>> normCycleSet = new HashSet<List<WorkVertex>>(); for (WorkVertex x : workScc) { final Set<WorkVertex> visitedSet = new HashSet<WorkVertex>(); final Set<WorkVertex> predToVisitSet = new HashSet<WorkVertex>( x.predecessors()); setBfsPred(x, null); final LinkedList<WorkVertex> qList = new LinkedList<WorkVertex>(); // For fast contains(...). final HashSet<WorkVertex> qSet = new HashSet<WorkVertex>(); push(qList, qSet, x); while (predToVisitSet.size() != 0) { // Queue must not be empty, as long as we work on a SCC. final WorkVertex v = pop(qList, qSet); for (WorkVertex succ : v.successors()) { if ((!visitedSet.contains(succ)) && (!qSet.contains(succ))) { setBfsPred(succ, v); push(qList, qSet, succ); } if (predToVisitSet.remove(succ)) { /* * Found a cycle, new or not. */ final ArrayList<WorkVertex> cycle = newReversedCycle( workScc.size(), succ); final boolean smallEnoughForProcess = (maxSize < 0) || (cycle.size() <= maxSize); if (smallEnoughForProcess) { dereverse(cycle); normalize(cycle); if (normCycleSet.add(cycle)) { if (processCycle(cycle, processor)) { return true; } } } } } visitedSet.add(v); } } return false; } /* * */ /** * @return The reversed cycle. */ private static ArrayList<WorkVertex> newReversedCycle( int sccSize, WorkVertex succ) { final ArrayList<WorkVertex> reversedCycle = new ArrayList<WorkVertex>(); { WorkVertex tmpV = succ; // Building the cycle. while (tmpV != null) { if (reversedCycle.size() == sccSize) { // If happens, means bug somewhere. throw new AssertionError("infinite loop"); } reversedCycle.add(tmpV); tmpV = getBfsPred(tmpV); } } return reversedCycle; } /** * @param cycle (in,out) In: reverse cycle. Out: de-reversed cycle. */ private static void dereverse(ArrayList<WorkVertex> cycle) { final int nMinus1 = cycle.size() - 1; // (size-1)/2 final int maxI = (nMinus1 >> 1); for (int i = 0; i <= maxI; i++) { final int j = nMinus1 - i; final WorkVertex v = cycle.get(i); cycle.set(i, cycle.get(j)); cycle.set(j, v); } } /** * Normalizes (according to work vertices ordering). * * @param cycle (in,out) In: cycle. Out: normalized cycle. */ private static void normalize(ArrayList<WorkVertex> cycle) { final WorkVertex[] cycleAsArr = cycle.toArray( new WorkVertex[cycle.size()]); CyclesUtils.normalizeCycle(cycleAsArr); cycle.clear(); for (WorkVertex v : cycleAsArr) { cycle.add(v); } } /* * */ private static boolean processOneVertexCycle( WorkVertex vertex, InterfaceVertexCollProcessor processor) { processor.processCollBegin(); processor.processCollVertex(vertex.backingVertex()); return processor.processCollEnd(); } private static boolean processCycle( ArrayList<WorkVertex> cycle, InterfaceVertexCollProcessor processor) { final int size = cycle.size(); processor.processCollBegin(); for (int i = 0; i < size; i++) { processor.processCollVertex(cycle.get(i).backingVertex()); } return processor.processCollEnd(); } /* * */ private static void setBfsPred(WorkVertex v, WorkVertex bfsPred) { v.setData(bfsPred); } private static WorkVertex getBfsPred(WorkVertex v) { return (WorkVertex) v.getData(); } /* * FIFO */ private static void push( LinkedList<WorkVertex> qList, HashSet<WorkVertex> qSet, WorkVertex vertex) { if (!qSet.add(vertex)) { throw new AssertionError(); } qList.addLast(vertex); } private static WorkVertex pop( LinkedList<WorkVertex> qList, HashSet<WorkVertex> qSet) { final WorkVertex vertex = qList.removeFirst(); if (!qSet.remove(vertex)) { throw new AssertionError(); } return vertex; } }
// Copyright 2018 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include <errno.h> #include <fcntl.h> #include <stdlib.h> #include <unistd.h> #include <fbl/algorithm.h> #include <fbl/string.h> #include <fbl/unique_fd.h> #include <fuchsia/sysinfo/c/fidl.h> #include <lib/devmgr-integration-test/fixture.h> #include <lib/fdio/directory.h> #include <lib/fdio/fd.h> #include <lib/fdio/fdio.h> #include <zircon/status.h> #include <zxtest/base/log-sink.h> #include <zxtest/zxtest.h> namespace { using devmgr_integration_test::RecursiveWaitForFile; fbl::String GetTestFilter() { constexpr char kSysInfoPath[] = "/dev/misc/sysinfo"; fbl::unique_fd sysinfo(open(kSysInfoPath, O_RDWR)); if (!sysinfo) { return "Unknown"; } zx::channel channel; if (fdio_get_service_handle(sysinfo.release(), channel.reset_and_get_address()) != ZX_OK) { return "Unknown"; } char board_name[fuchsia_sysinfo_SYSINFO_BOARD_NAME_LEN + 1]; zx_status_t status; size_t actual_size; zx_status_t fidl_status = fuchsia_sysinfo_DeviceGetBoardName( channel.get(), &status, board_name, sizeof(board_name), &actual_size); if (fidl_status != ZX_OK || status != ZX_OK) { return "Unknown"; } board_name[actual_size] = '\0'; printf("Found board %s\n", board_name); if (!strcmp(board_name, "qemu")) { return "*Qemu*"; } else if (!strcmp(board_name, "vim2")) { return "*Vim2*"; } else if (!strcmp(board_name, "astro")) { return "*Astro*"; } else if (!strcmp(board_name, "cleo")) { return "*Cleo*"; } else if (!strcmp(board_name, "sherlock")) { return "*Sherlock*"; } else if (!strcmp(board_name, "mt8167s_ref")) { return "*Mt8167sRef*"; } else if (!strcmp(board_name, "msm8x53-som")) { return "*Msm8x53Som*"; } else if (!strcmp(board_name, "as370")) { return "*As370*"; } else if (!strcmp(board_name, "visalia")) { return "*Visalia*"; } else if (!strcmp(board_name, "hikey960")) { return "*Hikey960*"; } return "Unknown"; } class DeviceEnumerationTest : public zxtest::Test { protected: void TestRunner(const char** device_paths, size_t paths_num) { fbl::unique_fd devfs_root(open("/dev", O_RDWR)); ASSERT_TRUE(devfs_root); fbl::unique_fd fd; for (size_t i = 0; i < paths_num; ++i) { printf("Checking %s\n", device_paths[i]); EXPECT_OK(RecursiveWaitForFile(devfs_root, device_paths[i], &fd), "%s", device_paths[i]); } } }; TEST_F(DeviceEnumerationTest, QemuTest) { static const char* kDevicePaths[] = { "sys/platform/qemu-bus", "sys/platform/00:00:6/rtc", "sys/pci/00:00.0", }; ASSERT_NO_FATAL_FAILURES(TestRunner(kDevicePaths, fbl::count_of(kDevicePaths))); } TEST_F(DeviceEnumerationTest, Vim2Test) { static const char* kDevicePaths[] = { "sys/platform/vim", "sys/platform/00:00:1b/sysmem", "sys/platform/05:02:1/aml-gxl-gpio", "sys/platform/05:00:2/aml-i2c", "sys/platform/05:02:4/clocks", "sys/platform/05:00:10/aml-canvas", // TODO(ZX-4069): Test SDMMC binding. // "sys/platform/05:00:3/aml-uart/serial/bt-transport-uart/bcm-hci", // "sys/platform/05:00:6/aml-sd-emmc/sdio", "sys/platform/05:00:2/aml-i2c/i2c/i2c-1-81/rtc", "sys/platform/05:00:2/aml-i2c/i2c/i2c-0-70/led2472g", "sys/platform/00:00:2/xhci/usb-bus", "sys/platform/05:02:17/aml-gpu", "dwmac/eth_phy/phy_null_device", "dwmac/Designware MAC/ethernet", "display/vim2-display/display-controller", "vim-thermal/vim-thermal", "gpio-light/gpio-light", "wifi/broadcom-wlanphy", }; ASSERT_NO_FATAL_FAILURES(TestRunner(kDevicePaths, fbl::count_of(kDevicePaths))); } TEST_F(DeviceEnumerationTest, AstroTest) { static const char* kDevicePaths[] = { "sys/platform/aml-bus", "sys/platform/05:03:1/aml-axg-gpio", "sys/platform/00:00:13/hid-buttons/hid-device-000", "sys/platform/05:00:2/aml-i2c", "sys/platform/05:03:17/aml-gpu", "sys/platform/05:00:18/aml-usb-phy-v2", // XHCI driver will not be loaded if we are in USB peripheral mode. // "xhci/xhci/usb-bus", // TODO(ZX-4087): Astro can have one of two possible touch screens // so we can't just test that one of them is bound. That is why the // following test is disabled. // "sys/platform/03:03:5/gt92xx HidDevice/hid-device-000", "sys/platform/05:00:2/aml-i2c/i2c/i2c-2-44/ti-lp8556", "display/astro-display/display-controller", "sys/platform/05:00:10/aml-canvas", "sys/platform/00:00:e/optee-tz", "sys/platform/05:03:e/amlogic_video", "sys/platform/00:00:f/fallback-rtc", "sys/platform/05:00:f/aml-raw_nand/nand/tpl/skip-block", "sys/platform/05:00:f/aml-raw_nand/nand/fts/skip-block", "sys/platform/05:00:f/aml-raw_nand/nand/factory/skip-block", "sys/platform/05:00:f/aml-raw_nand/nand/zircon-b/skip-block", "sys/platform/05:00:f/aml-raw_nand/nand/zircon-a/skip-block", "sys/platform/05:00:f/aml-raw_nand/nand/zircon-r/skip-block", "sys/platform/05:00:f/aml-raw_nand/nand/fvm/ftl/block/fvm/blobfs-p-1/block", "sys/platform/05:00:f/aml-raw_nand/nand/fvm/ftl/block/fvm/minfs-p-2/block/zxcrypt/unsealed/block", "sys/platform/05:00:f/aml-raw_nand/nand/sys-config/skip-block", "sys/platform/05:00:f/aml-raw_nand/nand/migration/skip-block", "sys/platform/05:00:7/aml-sd-emmc/sdmmc", "wifi", "tcs3400-light/tcs-3400/hid-device-000", "sys/platform/05:03:11/clocks", "sys/platform/05:03:a/thermal", "AstroAudio/astro-audio-out", "sys/platform/05:03:13/astro-audio-in", //"sys/platform/05:05:3/aml-uart/serial/bt-transport-uart/bcm-hci", }; ASSERT_NO_FATAL_FAILURES(TestRunner(kDevicePaths, fbl::count_of(kDevicePaths))); } TEST_F(DeviceEnumerationTest, CleoTest) { static const char* kDevicePaths[] = { "sys/platform/mt8167s_ref", "sys/platform/0d:00:1/mt8167-gpio", "sys/platform/0d:00:6/mt8167-i2c", "sys/platform/0d:00:8/mtk-clk", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-000/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-001/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-002/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-003/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-004/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-005/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-006/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-007/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-008/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-009/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-010/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-011/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-012/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-013/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-014/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-015/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-016/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-017/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-018/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-019/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-020/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-021/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-022/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-023/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-024/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-025/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-026/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-027/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-028/block", "sys/platform/0d:00:4/mtk-sdmmc", "sys/platform/00:00:13/hid-buttons/hid-device-000", "sys/platform/0d:00:14/mt-usb/usb-peripheral/function-000/cdc-eth-function/ethernet", "touch/focaltouch HidDevice/hid-device-000", "sys/platform/0d:00:9/mtk-thermal", "sys/platform/0d:00:6/mt8167-i2c/i2c/i2c-0-83/ltr-578als/hid-device-000", }; ASSERT_NO_FATAL_FAILURES(TestRunner(kDevicePaths, fbl::count_of(kDevicePaths))); } TEST_F(DeviceEnumerationTest, SherlockTest) { static const char* kDevicePaths[] = { "sys/platform/sherlock", "sys/platform/05:04:1/aml-axg-gpio", "sys/platform/05:00:14/clocks", "sys/platform/05:00:2/aml-i2c", "sys/platform/05:00:10/aml-canvas", "sys/platform/05:03:a/thermal", "sys/platform/00:00:1e/dw-dsi", "display/astro-display/display-controller", "sys/platform/05:00:18/aml-usb-phy-v2", // XHCI driver will not be loaded if we are in USB peripheral mode. // "xhci/xhci/usb-bus", "sys/platform/05:00:8/aml-sd-emmc/sdmmc/sdmmc-mmc/block/part-000/block", "sys/platform/05:00:8/aml-sd-emmc/sdmmc/sdmmc-mmc/block/part-002/block", "sys/platform/05:00:8/aml-sd-emmc/sdmmc/sdmmc-mmc/block/part-003/block", "sys/platform/05:00:8/aml-sd-emmc/sdmmc/sdmmc-mmc/block/part-004/block", "sys/platform/05:00:8/aml-sd-emmc/sdmmc/sdmmc-mmc/block/part-005/block", "sys/platform/05:00:8/aml-sd-emmc/sdmmc/sdmmc-mmc/block/part-006/block", "sys/platform/05:00:8/aml-sd-emmc/sdmmc/sdmmc-mmc/block/part-007/block", "sys/platform/05:00:8/aml-sd-emmc/sdmmc/sdmmc-mmc/block/part-008/block/fvm/blobfs-p-1/block", "sys/platform/05:00:8/aml-sd-emmc/sdmmc/sdmmc-mmc/block/part-008/block/fvm/minfs-p-2/block", "sys/platform/05:00:8/aml-sd-emmc/sdmmc/sdmmc-mmc/block/part-009/block", "sys/platform/05:00:8/aml-sd-emmc/sdmmc/sdmmc-mmc/block/part-010/block", "sys/platform/05:00:6/aml-sd-emmc/sdmmc/sdmmc-sdio/sdmmc-sdio-1", "sys/platform/05:00:6/aml-sd-emmc/sdmmc/sdmmc-sdio/sdmmc-sdio-2", "wifi", "sys/platform/05:04:15/aml-mipi", "sys/platform/12:02:2/gdc", "imx227-sensor", "isp", "sys/platform/05:04:e/amlogic_video", "sys/platform/05:04:17/aml-gpu", "sys/platform/05:04:16/sherlock-audio-in", "SherlockAudio", "ft5726-touch", "sys/platform/00:00:e/optee-tz", "sys/platform/00:00:f/fallback-rtc", "spi/aml-spi-0/spi/spi-0-0" }; ASSERT_NO_FATAL_FAILURES(TestRunner(kDevicePaths, fbl::count_of(kDevicePaths))); } TEST_F(DeviceEnumerationTest, Mt8167sRefTest) { static const char* kDevicePaths[] = { "sys/platform/mt8167s_ref", "sys/platform/0d:00:1/mt8167-gpio", "sys/platform/0d:00:6/mt8167-i2c", "sys/platform/0d:00:8/mtk-clk", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-000/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-001/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-002/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-003/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-004/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-005/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-006/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-007/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-008/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-009/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-010/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-011/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-012/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-013/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-014/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-015/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-016/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-017/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-018/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-019/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-020/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-021/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-022/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-023/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-024/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-025/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-026/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-027/block", "sys/platform/0d:00:2/mtk-sdmmc/sdmmc/sdmmc-mmc/block/part-028/block", "sys/platform/0d:00:4/mtk-sdmmc", "sys/platform/00:00:13/hid-buttons/hid-device-000", "sys/platform/0d:00:14/mt-usb/usb-peripheral/function-000/cdc-eth-function/ethernet", "sys/platform/0d:00:9/mtk-thermal", }; ASSERT_NO_FATAL_FAILURES(TestRunner(kDevicePaths, fbl::count_of(kDevicePaths))); } TEST_F(DeviceEnumerationTest, Msm8x53SomTest) { static const char* kDevicePaths[] = { "sys/platform/msm8x53", "sys/platform/13:01:1", "sys/platform/13:00:3/msm8x53-sdhci", "sys/platform/13:00:2/qcom-pil", "sys/platform/13:01:4/msm-clk", "sys/platform/13:01:5/msm8x53-power"}; ASSERT_NO_FATAL_FAILURES(TestRunner(kDevicePaths, fbl::count_of(kDevicePaths))); } TEST_F(DeviceEnumerationTest, As370Test) { static const char* kDevicePaths[] = { "sys/platform/as370", "sys/platform/14:01:1", "sys/platform/14:01:1/as370-gpio", "sys/platform/00:00:9", "sys/platform/00:00:9/dw-i2c", "sys/platform/14:01:2/as370-usb-phy", "dwc2-usb", "audio-max98373", "as370-audio-out", }; ASSERT_NO_FATAL_FAILURES(TestRunner(kDevicePaths, fbl::count_of(kDevicePaths))); } TEST_F(DeviceEnumerationTest, VisaliaTest) { static const char* kDevicePaths[] = { "sys/platform/as370", "sys/platform/14:01:1", "sys/platform/14:01:1/as370-gpio", "sys/platform/00:00:9", "sys/platform/00:00:9/dw-i2c", "sys/platform/14:01:2/as370-usb-phy", "dwc2-usb", }; ASSERT_NO_FATAL_FAILURES(TestRunner(kDevicePaths, fbl::count_of(kDevicePaths))); } TEST_F(DeviceEnumerationTest, Hikey960Test) { static const char* kDevicePaths[] = { "sys/platform/hikey960", "sys/platform/02:00:6/hi3660-gpio", "sys/platform/00:00:9/dw-i2c", "sys/platform/02:00:2/hi3660-clk", "hikey-usb/dwc3", "dwc3/dwc3/usb-peripheral", }; ASSERT_NO_FATAL_FAILURES(TestRunner(kDevicePaths, fbl::count_of(kDevicePaths))); } } // namespace int main(int argc, char** argv) { fbl::Vector<fbl::String> errors; auto options = zxtest::Runner::Options::FromArgs(argc, argv, &errors); zxtest::LogSink* log_sink = zxtest::Runner::GetInstance()->mutable_reporter()->mutable_log_sink(); if (!errors.is_empty()) { for (const auto& error : errors) { log_sink->Write("%s\n", error.c_str()); } options.help = true; } options.filter = fbl::StringPrintf("%s:%s", GetTestFilter().c_str(), options.filter.c_str()); // Errors will always set help to true. if (options.help) { zxtest::Runner::Options::Usage(argv[0], log_sink); return errors.is_empty(); } if (options.list) { zxtest::Runner::GetInstance()->List(options); return 0; } return zxtest::Runner::GetInstance()->Run(options); }
<gh_stars>0 import React from 'react'; import { shallow } from 'enzyme'; import toJson from 'enzyme-to-json'; import IconButton from './IconButton'; describe('IconButton', () => { const icon = ( <svg viewBox="0 0 64 64"> <path d="M58.245 12.152C54.943 5.81 49.81 3.455 44.231 4.91c-5.098 1.33-9.79 5.742-11.762 11.094L32 17.277l-.47-1.274c-1.97-5.352-6.662-9.765-11.76-11.093-5.578-1.454-10.712.9-14.014 7.243C1.012 21.262 9.709 37.021 32 59.294 54.291 37.02 62.988 21.263 58.245 12.152z" /> </svg> ); it('renders', () => { const onClick = jest.fn(); const wrapper = shallow( <IconButton name="Test" icon={icon} onClick={onClick} /> ); expect(toJson(wrapper)).toMatchSnapshot(); wrapper.find('button').simulate('click'); expect(onClick).toHaveBeenCalled(); }); });
Coming to a Laundromat Near You: Video Game-controlled Washing Machines! Doing laundry may no longer be the boring, meaningless task it’s been for the past several hundred centuries. In fact, successfully getting your digs clean in the future might depend on how well coordinated you are…and a handful of extra laundry change. Introducing the “arcade washing machine” by Lee Wei Chen, a Taiwanese graduate student at Kingston University, London. Chen’s creation combines a traditional standup arcade game with a front-loading coin-operated washing machine, adding both fun and a real challenge to the menial task of washing clothes. What used to be as easy as the push of a button now requires manipulating a steering wheel, a gear shifter and two buttons. Chen, 27, came up with the idea after contemplating a more productive use for the “wasted but enjoyable time” he spent playing video games. After realizing the skills he had acquired in the virtual world were impractical in real life, he embarked on a quest to make them useful. By linking the circuitry of the two devices, the washing cycle is dependent on the user’s gaming skills. Thus, if the gamer sucks at video games, he or she will have to insert more coins in order to complete the cycle. This may not be the most ingenious machine ever created, but I dare think it’ll be as hard any longer for wives to get their husbands and the kids off the sofa to lend a hand in the laundry room. (Link) Share this: Facebook Google Twitter Pinterest Reddit
<filename>src/ADempiere/modules/dashboard/store/Dashboard/actions.ts<gh_stars>1-10 import { ActionContext, ActionTree } from 'vuex' import { requestListDashboards, DashboardState } from '@/ADempiere/modules/dashboard' import { IRootState } from '@/store' import { getCurrentRole } from '@/ADempiere/shared/utils/auth' import { IDashboardDataExtended, IListDashboardsResponse } from '../../DashboardType' type DashboardActionTree = ActionTree<DashboardState, IRootState> type DashboardActionContext = ActionContext<DashboardState, IRootState> export const actions: DashboardActionTree = { // refreshDashboard(context: DashboardActionContext, parameters: any) { // context.commit('notifyDashboardRefresh', parameters) // }, listDashboard( context: DashboardActionContext, payload: { roleId: number roleUuid: string } ) { if (payload.roleUuid) { payload.roleUuid = context.rootGetters.getRoleUuid if (payload.roleUuid) { payload.roleUuid = getCurrentRole()! } } return new Promise<IDashboardDataExtended[]>(resolve => { requestListDashboards({ roleId: payload.roleId, roleUuid: payload.roleUuid }) .then((dashboardResponse: IListDashboardsResponse) => { // TODO: verify it with uuid const roleDashboards: IDashboardDataExtended[] = dashboardResponse.list.map(item => { return { ...item, roleUuid: payload.roleUuid } }) context.commit('addDashboard', roleDashboards) resolve(roleDashboards) }) .catch(error => { console.warn( `Error getting List Dashboards: ${error.message}. Code: ${error.code}.` ) }) }) } }
What are the factors affecting survival after autologous stem cell transplantation in patients with multiple myeloma? Introduction: High-dose chemotherapy (HDC) and autologous stem cell transplantation(ASCT) still remains in the treatment of myeloma patients even during the period of new agents. Materials and Methods: We analysed the prognostic affect of pretransplant characteristics and transplant modalities on response, in 150 autologous transplant of 144 multiple myeloma (MM) patients who were transplanted in our centre between 2008 to 2017. We evaluated the affect of age, type of MM, previous treatment regimens, status pre and postfrom transplantation, time of ASCT, neutrophil and platelet engraftmant days, dose of reinfused CD34+ cells, plasma cell infiltration, international staging system(ISS) and Durie -Salmon stage at diagnosis. We examined the affect of these status on overall survival(OS) and eventfree survival(EFS). Results: The median OS and EFS after transplanation were 41 and 28 months, respectively. Median OS after the diagnosis was 57 months. Transplant-related mortality was 3,3%. We found that the lower β2- microglobulin levels,lower ISS stage,lower plasma cell infiltration, achievement good responds at the +100th day of post transplant were statistically significant independent predictor factors for longer EFS and OS. When the patients were given chemotherapy regimen with bortezomib before transplantation, these patients were seen to be a better response rate. There was showed a relationship between the using of bortezomib before transplantation with EFS(P = 0.017), but there was no relationship with OS. Conclusions: Our analysis confirms HDCT-ASCT as an effective and safe therapeutic strategy in multiple myeloma patients. This results were independent of age, first line treatment regimens and renal insufficiency. Patients with a high ISS stage were found to have shorter survival(P = 0.002). However, the EFS and OS were longer of the patients whose have good response at the 100th day of transplantation(P = 0,002, P = 0,02).
R + C Factors and Sacro Occipital Technique Orthopedic Blocking: a pilot study using pre and post VAS assessment. INTRODUCTION The concept of a systematic or predictive relationship between distant vertebral levels distinct from accumulative functional compensatory mechanisms, such as in scoliosis, has been perpetuated within chiropractic technique systems based on clinical observation and experience. This study seeks to investigate this relationship between the cervical and lumbar vertebrae. METHODS Patients (experimental group n=26 and control group n=12) were selected from the patient base of one office, and were limited to patients that had sensitivity at specific cervical reflex points. Using a pre and post outcome measurement and sacro occipital technique R + C protocols, the related lumbar vertebra was adjusted in the direction indicated by the cervical vertebral sensitivity. RESULTS Statistical analysis revealed there was a statistically significant difference between pre- and post-VAS measurements and found that the notable difference in mean change in VAS scores were statistically significantly different between the experimental and control groups (p < .001). CONCLUSION The findings of this study suggest that further research into cervical and lumbar vertebra interrelationships, and the efficacy of orthopedic block treatment, may be warranted. Further studies are needed to confirm whether a causal relationship exists between lumbar manipulation and decreased cervical spine sensitivity.
/* Copyright 2015 <NAME> L.P. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ #include <poll.h> #include <list.h> #include "comdb2.h" #include "sql.h" #include "osql_srs.h" #include "sqloffload.h" #include "comdb2uuid.h" #include "debug_switches.h" extern int gbl_osql_verify_retries_max; typedef struct srs_tran_query { int iscommit; void *stmt; LINKC_T(struct srs_tran_query) lnk; /* next query */ } srs_tran_query_t; static void *save_stmt(struct sqlclntstate *clnt) { return clnt->plugin.save_stmt(clnt, NULL); } static void restore_stmt(struct sqlclntstate *clnt, srs_tran_query_t *item) { clnt->plugin.restore_stmt(clnt, item->stmt); } static void destroy_stmt(struct sqlclntstate *clnt, srs_tran_query_t *item) { clnt->plugin.destroy_stmt(clnt, item->stmt); } static char *print_stmt(struct sqlclntstate *clnt, srs_tran_query_t *item) { return clnt->plugin.print_stmt(clnt, item->stmt); } typedef struct srs_tran { LISTC_T(struct srs_tran_query) lst; /* list of queries up to this point */ } srs_tran_t; /** * Free a statement entry. */ static void srs_free_tran_entry(struct sqlclntstate *clnt, srs_tran_query_t *item) { if (item == NULL) return; destroy_stmt(clnt, item); free(item); } /** * Create a history of sql for this transaction * that will allow replay in the case of verify errors */ int srs_tran_create(struct sqlclntstate *clnt) { osqlstate_t *osql = &clnt->osql; int rc = 0; if (osql->history) { logmsg(LOGMSG_ERROR, "%s: the history should be empty!\n", __func__); cheap_stack_trace(); rc = srs_tran_destroy(clnt); if (rc) return rc; } osql->history = (srs_tran_t *)calloc(1, sizeof(srs_tran_t)); if (!osql->history) { logmsg(LOGMSG_ERROR, "malloc %zu\n", sizeof(srs_tran_t)); return -1; } #if 0 printf ("%d Creating history %p\n", pthread_self(), osql->history); #endif listc_init(&osql->history->lst, offsetof(struct srs_tran_query, lnk)); return 0; } /* set replay status * at the end of it all, we may replay the entire list * of statements in a transaction from srs_tran_replay, * depending on the replay status */ void osql_set_replay(const char *file, int line, struct sqlclntstate *clnt, int replay) { osqlstate_t *osql = &clnt->osql; osql->replay_file = (char *)file; osql->replay_line = line; osql->last_replay = osql->replay; /* logmsg(LOGMSG_ERROR, "Replay set from %s:%d clnt %p from %d to %d\n", file, line, clnt, osql->replay, replay); */ osql->replay = replay; } /** * Destroy the sql transaction history * */ int srs_tran_destroy(struct sqlclntstate *clnt) { osqlstate_t *osql = &clnt->osql; #if 0 printf ("%d Destroying history %p \n", pthread_self(), osql->history); #endif if (!osql->history) goto done; srs_tran_empty(clnt); free(osql->history); osql->history = NULL; done: if (osql->replay != OSQL_RETRY_NONE) { logmsg(LOGMSG_ERROR, "%s: cleaned history but state is wrong %d, fixing\n", __func__, osql->replay); cheap_stack_trace(); osql_set_replay(__FILE__, __LINE__, clnt, OSQL_RETRY_NONE); } return 0; } int srs_tran_del_last_query(struct sqlclntstate *clnt) { if (clnt->osql.history == NULL) return 0; srs_tran_query_t *item = NULL; item = listc_rbl(&clnt->osql.history->lst); srs_free_tran_entry(clnt, item); return 0; } /** * Add a new query to the transaction * */ int srs_tran_add_query(struct sqlclntstate *clnt) { osqlstate_t *osql = &clnt->osql; srs_tran_query_t *item = NULL; if (clnt->verifyretry_off || clnt->isselect || clnt->dbtran.trans_has_sp || clnt->has_recording) { return 0; } /* don't grow session when the transaction is simply repeated */ if (osql->replay != OSQL_RETRY_NONE) { if (!osql->history) { logmsg(LOGMSG_ERROR, "%s: state is %d but no history???\n", __func__, osql->replay); cheap_stack_trace(); return -1; } return 0; } if (!osql->history) { if (srs_tran_create(clnt)) return -1; } item = malloc(sizeof(srs_tran_query_t)); item->stmt = save_stmt(clnt); item->iscommit = strcasecmp("commit", clnt->sql) == 0; listc_abl(&osql->history->lst, item); clnt->added_to_hist = 1; /* if previous item is a 'commit', abort for debugging */ item = item->lnk.prev; if (item && item->iscommit) { abort(); } return 0; } /** * Empty the context of the transaction * */ int srs_tran_empty(struct sqlclntstate *clnt) { osqlstate_t *osql = &clnt->osql; srs_tran_query_t *item = NULL, *tmp = NULL; LISTC_FOR_EACH_SAFE(&osql->history->lst, item, tmp, lnk) { listc_rfl(&osql->history->lst, item); srs_free_tran_entry(clnt, item); } return 0; } long long gbl_verify_tran_replays = 0; /** * Replay transaction using the current history * */ static int srs_tran_replay_int(struct sqlclntstate *clnt, int(dispatch_fn)(struct sqlclntstate *)) { osqlstate_t *osql = &clnt->osql; srs_tran_query_t *item = 0; int rc = 0; int nq = 0; int tnq = 0; clnt->verify_retries = 0; if (!osql->history) { logmsg(LOGMSG_ERROR, "Trying to replay, but no history?\n"); cheap_stack_trace(); return -1; } do { reset_query_effects(clnt); /* Reset it for each retry*/ if (!osql->history) { logmsg(LOGMSG_ERROR, "Trying to replay, but no history?\n"); abort(); } clnt->verify_retries++; gbl_verify_tran_replays++; if (clnt->verify_retries % 10 == 0) { // whoah slow down there pal int ms = clnt->verify_retries / 10; poll(NULL, 0, 10 * ms); } /* Replays for SERIAL or SNAPISOL will never have select or selectv */ if (clnt->dbtran.mode == TRANLEVEL_RECOM) { /* we need to free all the shadows but selectv table (recgenid) */ rc = osql_shadtbl_reset_for_selectv(clnt); if (rc) { logmsg(LOGMSG_ERROR, "Failed to reset selectv in read committed\n"); abort(); cheap_stack_trace(); return -1; } } else { osql_shadtbl_close(clnt); } if (clnt->verify_retries == gbl_osql_verify_retries_max + 1) { osql_set_replay(__FILE__, __LINE__, clnt, OSQL_RETRY_LAST); } if (0 /*!bdb_am_i_coherent(thedb->bdb_env)*/) { logmsg(LOGMSG_ERROR, "Cannot replay, I am incoherent id=%d retries=%d\n", clnt->queryid, clnt->verify_retries); rc = CDB2ERR_VERIFY_ERROR; break; } nq = 0; clnt->start_gen = bdb_get_rep_gen(thedb->bdb_env); LISTC_FOR_EACH(&osql->history->lst, item, lnk) { restore_stmt(clnt, item); if ((rc = dispatch_fn(clnt)) != 0) break; if (!osql->history) break; nq++; } if (rc == 0) tnq = nq; /* don't repeat if we fail with unexplicable error, i.e. not a logical * error */ if (rc < 0) { if (clnt->osql.replay != OSQL_RETRY_NONE) { logmsg(LOGMSG_ERROR, "%p Replaying failed abnormally, calling abort, nq=%d tnq=%d\n", clnt, nq, tnq); if (debug_switch_osql_verbose_history_replay()) { if (osql->history) { LISTC_FOR_EACH(&osql->history->lst, item, lnk) { logmsg(LOGMSG_DEBUG, "\"%s\"\n", print_stmt(clnt, item)); } } } int type = tran2req(clnt->dbtran.mode); osql_sock_abort(clnt, type); } break; } } while (clnt->osql.replay == OSQL_RETRY_DO && clnt->verify_retries <= gbl_osql_verify_retries_max); if (clnt->verify_retries >= gbl_osql_verify_retries_max) { uuidstr_t us; logmsg(LOGMSG_ERROR, "transaction %llx %s failed %d times with verify errors\n", clnt->osql.rqid, comdb2uuidstr(clnt->osql.uuid, us), clnt->verify_retries); /* Set to NONE to suppress the error from srs_tran_destroy(). */ osql_set_replay(__FILE__, __LINE__, clnt, OSQL_RETRY_NONE); } /* replayed, free the session */ if (srs_tran_destroy(clnt)) { logmsg(LOGMSG_ERROR, "%s Fail to destroy transaction replay session\n", __func__); } if (rc && clnt->verify_retries < gbl_osql_verify_retries_max) { logmsg(LOGMSG_ERROR, "Uncommitable transaction %d retried %d times, " "rc=%d [global retr=%lld] nq=%d tnq=%d\n", clnt->queryid, clnt->verify_retries, rc, gbl_verify_tran_replays, nq, tnq); } osql_set_replay(__FILE__, __LINE__, clnt, OSQL_RETRY_NONE); return rc; } static int run_sql_query(struct sqlclntstate *clnt) { sqlengine_work_appsock(clnt->thd, clnt); return 0; } int srs_tran_replay_inline(struct sqlclntstate *clnt) { return srs_tran_replay_int(clnt, run_sql_query); } int srs_tran_replay(struct sqlclntstate *clnt) { return srs_tran_replay_int(clnt, dispatch_sql_query); }
class FileHierarchy: """ This class can be used to populate the calculated fields on the Files model (like project, privileges, etc.). This class uses the query results returned by :func:`build_file_hierarchy_query` that are returned in a specific way (aka has the right queried columns). """ def __init__(self, results, file_table, project_table): self.results = [row._asdict() for row in results] self.file_table = file_table self.project_table = project_table self.file_key = inspect(self.file_table).name self.project_key = inspect(self.project_table).name self.project_calculator = ProjectCalculator(results[0], self.project_table) if self.project_key is None or self.file_key is None: raise RuntimeError("the file_table or project_table need to be aliased") @property def project(self) -> Projects: return self.results[0][self.project_key] @property def file(self) -> Files: return self.results[0][self.file_key] def calculate_properties(self, user_ids): self.project_calculator.calculate_privileges(user_ids) self.file.calculated_project = self.project_calculator.project parent_deleted = False parent_recycled = False for row in reversed(self.results): file: Files = row[self.file_key] file.calculated_parent_deleted = parent_deleted file.calculated_parent_recycled = parent_recycled parent_deleted = parent_deleted or file.deleted parent_recycled = parent_recycled or file.recycled def calculate_privileges(self, user_ids): parent_file: Optional[Files] = None # We need to iterate through the files from parent to child because # permissions are inherited and must be calculated in that order for row in reversed(self.results): file: Files = row[self.file_key] for user_id in user_ids: project_manageable = row[f'has_project-admin_{user_id}'] project_readable = row[f'has_project-read_{user_id}'] project_writable = row[f'has_project-write_{user_id}'] file_readable = row[f'has_file-read_{user_id}'] file_writable = row[f'has_file-write_{user_id}'] file_commentable = row[f'has_file-comment_{user_id}'] parent_privileges = parent_file.calculated_privileges[ user_id] if parent_file else None commentable = any([ project_manageable, project_readable and project_writable, file_commentable, parent_privileges and parent_privileges.commentable, ]) writable = any([ project_manageable, project_writable, file_writable, parent_privileges and parent_privileges.writable, ]) readable = commentable or writable or any([ project_manageable, project_readable, file_readable, file.public, parent_privileges and parent_privileges.readable, ]) commentable = commentable or writable privileges = FilePrivileges( readable=readable, writable=writable, commentable=commentable, ) file.calculated_privileges[user_id] = privileges parent_file = file
def main(): sys.tracebacklimit = 0 args = commands() for filename in args.filenames: convert(filename, args.dos)
def generate_bucket_http_url(bucket_config: BucketConfig, path_in_bucket: Union[None, str, PurePosixPath], with_credentials: bool = False) -> urllib.parse.ParseResult: url = generate_bucketfs_http_url(bucket_config.bucketfs_config, with_credentials) if path_in_bucket is not None: path_in_bucket = _make_path_relative(path_in_bucket) else: path_in_bucket = "" encoded_bucket_and_path_in_bucket = \ "/".join( _encode_url_part(part) for part in PurePosixPath(bucket_config.bucket_name, path_in_bucket).parts) url = urllib.parse.urljoin(url.geturl(), encoded_bucket_and_path_in_bucket) urlparse = urllib.parse.urlparse(url) return urlparse
Metformin in the treatment of polycystic ovary syndrome. Polycystic ovary syndrome (PCOS) is one of the most frequent diseases that affects women in their reproductive age. The heterogeneity of PCOS makes not only the diagnosis but also the choice of an adequate treatment difficult. The biguanide, N, N' dimethyl-biguanide: Metformin is an antidiabetic drug that increases glucose utilization in insulin-sensitive tissues and is useful in the reduction of both insulin resistance and circulating androgens as well as restoring ovulation. However, metformin is being clinically used without a complete understanding of the mechanism involved. The present review explores some of the actions and efficacy of metformin in the treatment of PCOS during different reproductive periods.
import { createStore } from "vuex"; import { Coin } from "@/types"; const allCoins = [ new Coin("bitcoin", "BTC", "Bitcoin", "icons/btc.png"), new Coin("ethereum", "ETH", "Ethereum", "icons/eth.png"), new Coin("cardano", "ADA", "Cardano", "icons/ada.jpg"), new Coin("binancecoin", "BNB", "Binance Coin", "icons/bnb.png"), new Coin("polkadot", "DOT", "Polkadot", "icons/dot.png", new Date("2020-08-20")), new Coin("solana", "SOL", "Solana", "icons/sol.jpeg"), new Coin("cosmos", "ATOM", "Cosmos", "icons/atom.png"), new Coin("matic-network", "MATIC", "Polygon", "icons/matic-polygon.png"), new Coin("dogecoin", "DOGE", "Dogecoin", "icons/doge.jpeg"), new Coin("terra-luna", "LUNA", "Terra", "icons/luna.png"), new Coin("thorchain", "RUNE", "THORChain", "icons/rune.jpeg"), new Coin("uniswap", "UNI", "Uniswap", "icons/uni.png", new Date("2020-09-18")), new Coin("chainlink", "LINK", "Chainlink", "icons/link.png"), new Coin("aave", "AAVE", "Aave", "icons/aave.png"), new Coin( "pancakeswap-token", "CAKE", "PancakeSwap", "icons/cake.png", new Date("2020-09-30") ), new Coin("maker", "MKR", "Maker", "icons/mkr.png"), new Coin("havven", "SNX", "Synthetix", "icons/snx.png"), new Coin( "compound-governance-token", "COMP", "Compound", "icons/comp.png", new Date("2020-06-17") ), new Coin("sushi", "SUSHI", "Sushi", "icons/sushi.png", new Date("2020-08-30")), new Coin( "yearn-finance", "YFI", "Yearn Finance", "icons/yfi.png", new Date("2020-07-19") ), new Coin("the-graph", "GRT", "The Graph", "icons/grt.png", new Date("2020-12-18")), new Coin("bancor", "BNT", "Bancor", "icons/bnt.png"), new Coin("uma", "UMA", "UMA", "icons/uma.png"), new Coin("0x", "ZRX", "0x", "icons/zrx.png"), new Coin("nxm", "NXM", "Nexus Mutual", "icons/nxm.jpg", new Date("2020-07-13")), new Coin("curve-dao-token", "CRV", "Curve", "icons/crv.png", new Date("2020-08-15")), new Coin("1inch", "1INCH", "1inch", "icons/1inch.png", new Date("2020-12-26")), new Coin("loopring", "LRC", "Loopring", "icons/lrc.png"), new Coin("republic-protocol", "REN", "REN", "icons/ren.png"), new Coin("venus", "XVS", "Venus", "icons/xvs.png", new Date("2020-10-07")), new Coin("balancer", "BAL", "Balancer", "icons/bal.png", new Date("2020-06-24")), new Coin( "mirror-protocol", "MIR", "Mirror Protocol", "icons/mir.png", new Date("2020-12-05") ), new Coin("band-protocol", "BAND", "Band Protocol", "icons/band.png"), new Coin( "alpha-finance", "ALPHA", "Alpha Finance", "icons/alpha.png", new Date("2020-10-11") ), new Coin("alchemix", "ALCX", "Alchemix", "icons/alcx.png", new Date("2021-03-01")), new Coin("serum", "SRM", "Serum", "icons/srm.png", new Date("2020-08-12")), new Coin( "anchor-protocol", "ANC", "Anchor Protocol", "icons/anc.png", new Date("2021-03-19") ), new Coin("raydium", "RAY", "Raydium", "icons/ray.png", new Date("2021-02-22")), new Coin("rocket-pool", "RPL", "Rocket Pool", "icons/rpl.png"), new Coin( "perpetual-protocol", "PERP", "Perpetual Protocol", "icons/perp.jpg", new Date("2020-09-10") ), new Coin("badger-dao", "BADGER", "Badger", "icons/badger.png", new Date("2020-12-05")), new Coin("rook", "ROOK", "KeeperDAO", "icons/rook.png", new Date("2020-11-09")), new Coin("barnbridge", "BOND", "BarnBridge", "icons/bond.png", new Date("2020-10-27")), new Coin("cream-2", "CREAM", "C.R.E.A.M.", "icons/cream.png", new Date("2020-08-06")), new Coin("hegic", "HEGIC", "Hegic", "icons/hegic.png", new Date("2020-09-13")), new Coin("lido-dao", "LDO", "Lido", "icons/ldo.png", new Date("2021-01-06")), new Coin("akash-network", "AKT", "Akash", "icons/akt.png", new Date("2020-10-17")), new Coin("secret", "SCRT", "Secret Network", "icons/scrt.png", new Date("2020-10-02")), new Coin("reserve-rights-token", "RSR", "Reserve Rights Token", "icons/rsr.png"), new Coin("tribe-2", "TRIBE", "Tribe", "icons/tribe.png", new Date("2021-04-05")), new Coin( "injective-protocol", "INJ", "Injective Protocol", "icons/inj.jpeg", new Date("2020-10-22") ), new Coin( "harvest-finance", "FARM", "Harvest Finance", "icons/farm.png", new Date("2020-09-03") ), new Coin( "rari-governance-token", "RGT", "Rari Capital", "icons/rgt.jpeg", new Date("2020-10-23") ), new Coin("meta", "MTA", "mStable", "icons/mta.png", new Date("2020-07-19")), new Coin("haven", "XHV", "Haven", "icons/xhv.png"), new Coin("kusama", "KSM", "Kusama", "icons/ksm.jpeg"), new Coin("fantom", "FTM", "Fantom", "icons/ftm.png"), // new Coin("olympus", "OHM", "Olympus", "icons/ohm.png", new Date("")), new Coin( "wootrade-network", "WOO", "Wootrade Network", "icons/woo.jpeg", new Date("2020-10-31") ), new Coin("gitcoin", "GTC", "Gitcoin", "icons/gtc.png", new Date("2021-05-27")), new Coin("api3", "API3", "API3", "icons/api3.jpeg", new Date("2020-12-01")), ]; console.log("Total number of coins:", allCoins.length); export default createStore({ state: { overlayShown: false, sidebarShown: true, currency: { id: "", symbol: "", }, startDate: new Date(), selectedCoins: [] as Coin[], allCoins, counter: 0, }, });
///////////////////////////////////////////////////////////////////////////// // Name: src/palmos/menu.cpp // Purpose: wxMenu, wxMenuBar, wxMenuItem // Author: <NAME> - minimal working wxPalmOS port // Modified by: // Created: 10/12/04 // RCS-ID: $Id: menu.cpp,v 1.6 2005/07/01 19:36:56 ABX Exp $ // Copyright: (c) <NAME> // Licence: wxWindows licence ///////////////////////////////////////////////////////////////////////////// // =========================================================================== // declarations // =========================================================================== // --------------------------------------------------------------------------- // headers // --------------------------------------------------------------------------- #if defined(__GNUG__) && !defined(NO_GCC_PRAGMA) #pragma implementation "menu.h" #endif // For compilers that support precompilation, includes "wx.h". #include "wx/wxprec.h" #ifdef __BORLANDC__ #pragma hdrstop #endif #if wxUSE_MENUS #ifndef WX_PRECOMP #include "wx/frame.h" #include "wx/menu.h" #include "wx/utils.h" #include "wx/intl.h" #include "wx/log.h" #endif #if wxUSE_OWNER_DRAWN #include "wx/ownerdrw.h" #endif #include <Loader.h> #include <Form.h> #include <Menu.h> // ---------------------------------------------------------------------------- // global variables // ---------------------------------------------------------------------------- extern wxMenu *wxCurrentPopupMenu; // ---------------------------------------------------------------------------- // constants // ---------------------------------------------------------------------------- // the (popup) menu title has this special id static const int idMenuTitle = -3; // ---------------------------------------------------------------------------- // private functions // ---------------------------------------------------------------------------- // ============================================================================ // implementation // ============================================================================ #include <wx/listimpl.cpp> WX_DEFINE_LIST( wxMenuInfoList ) ; #if wxUSE_EXTENDED_RTTI WX_DEFINE_FLAGS( wxMenuStyle ) wxBEGIN_FLAGS( wxMenuStyle ) wxFLAGS_MEMBER(wxMENU_TEAROFF) wxEND_FLAGS( wxMenuStyle ) IMPLEMENT_DYNAMIC_CLASS_XTI(wxMenu, wxEvtHandler,"wx/menu.h") wxCOLLECTION_TYPE_INFO( wxMenuItem * , wxMenuItemList ) ; template<> void wxCollectionToVariantArray( wxMenuItemList const &theList, wxxVariantArray &value) { wxListCollectionToVariantArray<wxMenuItemList::compatibility_iterator>( theList , value ) ; } wxBEGIN_PROPERTIES_TABLE(wxMenu) wxEVENT_PROPERTY( Select , wxEVT_COMMAND_MENU_SELECTED , wxCommandEvent) wxPROPERTY( Title, wxString , SetTitle, GetTitle, wxString(), 0 /*flags*/ , wxT("Helpstring") , wxT("group") ) wxREADONLY_PROPERTY_FLAGS( MenuStyle , wxMenuStyle , long , GetStyle , EMPTY_MACROVALUE , 0 /*flags*/ , wxT("Helpstring") , wxT("group")) // style wxPROPERTY_COLLECTION( MenuItems , wxMenuItemList , wxMenuItem* , Append , GetMenuItems , 0 /*flags*/ , wxT("Helpstring") , wxT("group")) wxEND_PROPERTIES_TABLE() wxBEGIN_HANDLERS_TABLE(wxMenu) wxEND_HANDLERS_TABLE() wxDIRECT_CONSTRUCTOR_2( wxMenu , wxString , Title , long , MenuStyle ) WX_DEFINE_FLAGS( wxMenuBarStyle ) wxBEGIN_FLAGS( wxMenuBarStyle ) wxFLAGS_MEMBER(wxMB_DOCKABLE) wxEND_FLAGS( wxMenuBarStyle ) // the negative id would lead the window (its superclass !) to vetoe streaming out otherwise bool wxMenuBarStreamingCallback( const wxObject *WXUNUSED(object), wxWriter * , wxPersister * , wxxVariantArray & ) { return true ; } IMPLEMENT_DYNAMIC_CLASS_XTI_CALLBACK(wxMenuBar, wxWindow ,"wx/menu.h",wxMenuBarStreamingCallback) IMPLEMENT_DYNAMIC_CLASS_XTI(wxMenuInfo, wxObject , "wx/menu.h" ) wxBEGIN_PROPERTIES_TABLE(wxMenuInfo) wxREADONLY_PROPERTY( Menu , wxMenu* , GetMenu , EMPTY_MACROVALUE , 0 /*flags*/ , wxT("Helpstring") , wxT("group")) wxREADONLY_PROPERTY( Title , wxString , GetTitle , wxString() , 0 /*flags*/ , wxT("Helpstring") , wxT("group")) wxEND_PROPERTIES_TABLE() wxBEGIN_HANDLERS_TABLE(wxMenuInfo) wxEND_HANDLERS_TABLE() wxCONSTRUCTOR_2( wxMenuInfo , wxMenu* , Menu , wxString , Title ) wxCOLLECTION_TYPE_INFO( wxMenuInfo * , wxMenuInfoList ) ; template<> void wxCollectionToVariantArray( wxMenuInfoList const &theList, wxxVariantArray &value) { wxListCollectionToVariantArray<wxMenuInfoList::compatibility_iterator>( theList , value ) ; } wxBEGIN_PROPERTIES_TABLE(wxMenuBar) wxPROPERTY_COLLECTION( MenuInfos , wxMenuInfoList , wxMenuInfo* , Append , GetMenuInfos , 0 /*flags*/ , wxT("Helpstring") , wxT("group")) wxEND_PROPERTIES_TABLE() wxBEGIN_HANDLERS_TABLE(wxMenuBar) wxEND_HANDLERS_TABLE() wxCONSTRUCTOR_DUMMY( wxMenuBar ) #else IMPLEMENT_DYNAMIC_CLASS(wxMenu, wxEvtHandler) IMPLEMENT_DYNAMIC_CLASS(wxMenuBar, wxWindow) IMPLEMENT_DYNAMIC_CLASS(wxMenuInfo, wxObject) #endif const wxMenuInfoList& wxMenuBar::GetMenuInfos() const { wxMenuInfoList* list = const_cast< wxMenuInfoList* >( &m_menuInfos ) ; WX_CLEAR_LIST( wxMenuInfoList , *list ) ; for( size_t i = 0 ; i < GetMenuCount() ; ++i ) { wxMenuInfo* info = new wxMenuInfo() ; info->Create( const_cast<wxMenuBar*>(this)->GetMenu(i) , GetLabelTop(i) ) ; list->Append( info ) ; } return m_menuInfos ; } // --------------------------------------------------------------------------- // wxMenu construction, adding and removing menu items // --------------------------------------------------------------------------- // Construct a menu with optional title (then use append) void wxMenu::Init() { } // The wxWindow destructor will take care of deleting the submenus. wxMenu::~wxMenu() { } void wxMenu::Break() { } void wxMenu::Attach(wxMenuBarBase *menubar) { wxMenuBase::Attach(menubar); } #if wxUSE_ACCEL int wxMenu::FindAccel(int id) const { return wxNOT_FOUND; } void wxMenu::UpdateAccel(wxMenuItem *item) { } #endif // wxUSE_ACCEL // append a new item or submenu to the menu bool wxMenu::DoInsertOrAppend(wxMenuItem *pItem, size_t pos) { if ( IsAttached() && GetMenuBar()->IsAttached() ) { // Regenerate the menu resource GetMenuBar()->Refresh(); } return true; } void wxMenu::EndRadioGroup() { } wxMenuItem* wxMenu::DoAppend(wxMenuItem *item) { wxCHECK_MSG( item, NULL, _T("NULL item in wxMenu::DoAppend") ); if(!wxMenuBase::DoAppend(item) || !DoInsertOrAppend(item)) { return NULL; } else if(IsAttached() && GetMenuBar()->IsAttached()) { // Regenerate the menu resource GetMenuBar()->Refresh(); } return item; } wxMenuItem* wxMenu::DoInsert(size_t pos, wxMenuItem *item) { if (wxMenuBase::DoInsert(pos, item) && DoInsertOrAppend(item, pos)) return item; else return NULL; } wxMenuItem *wxMenu::DoRemove(wxMenuItem *item) { // we need to find the items position in the child list size_t pos; wxMenuItemList::compatibility_iterator node = GetMenuItems().GetFirst(); for ( pos = 0; node; pos++ ) { if ( node->GetData() == item ) break; node = node->GetNext(); } // DoRemove() (unlike Remove) can only be called for existing item! wxCHECK_MSG( node, NULL, wxT("bug in wxMenu::Remove logic") ); // remove the item from the menu wxMenuItem *ret=wxMenuBase::DoRemove(item); if ( IsAttached() && GetMenuBar()->IsAttached() ) { // Regenerate the menu resource GetMenuBar()->Refresh(); } return ret; } // --------------------------------------------------------------------------- // accelerator helpers // --------------------------------------------------------------------------- #if wxUSE_ACCEL // create the wxAcceleratorEntries for our accels and put them into provided // array - return the number of accels we have size_t wxMenu::CopyAccels(wxAcceleratorEntry *accels) const { size_t count = GetAccelCount(); for ( size_t n = 0; n < count; n++ ) { *accels++ = *m_accels[n]; } return count; } #endif // wxUSE_ACCEL // --------------------------------------------------------------------------- // set wxMenu title // --------------------------------------------------------------------------- void wxMenu::SetTitle(const wxString& label) { m_title = label; if ( IsAttached() && GetMenuBar()->IsAttached() ) { // Regenerate the menu resource GetMenuBar()->Refresh(); } } // --------------------------------------------------------------------------- // event processing // --------------------------------------------------------------------------- bool wxMenu::PalmCommand(WXUINT WXUNUSED(param), WXWORD id) { return false; } // --------------------------------------------------------------------------- // other // --------------------------------------------------------------------------- wxWindow *wxMenu::GetWindow() const { return NULL; } // --------------------------------------------------------------------------- // Menu Bar // --------------------------------------------------------------------------- void wxMenuBar::Init() { } wxMenuBar::wxMenuBar() { } wxMenuBar::wxMenuBar( long WXUNUSED(style) ) { } wxMenuBar::wxMenuBar(size_t count, wxMenu *menus[], const wxString titles[], long WXUNUSED(style)) { } wxMenuBar::~wxMenuBar() { } // --------------------------------------------------------------------------- // wxMenuBar helpers // --------------------------------------------------------------------------- void wxMenuBar::Refresh() { wxCHECK_RET( IsAttached(), wxT("can't refresh unattached menubar") ); // Regenerate the menu resource LoadMenu(); } WXHMENU wxMenuBar::Create() { return NULL; } int wxMenuBar::PalmPositionForWxMenu(wxMenu *menu, int wxpos) { return -1; } // --------------------------------------------------------------------------- // wxMenuBar functions to work with the top level submenus // --------------------------------------------------------------------------- void wxMenuBar::EnableTop(size_t pos, bool enable) { // Palm OS does not have support for grayed or disabled items } void wxMenuBar::SetLabelTop(size_t pos, const wxString& label) { wxCHECK_RET( pos < GetMenuCount(), wxT("invalid menu index") ); m_titles[pos]=wxStripMenuCodes(label); if ( !IsAttached() ) { return; } // Regenerate the menu resource Refresh(); } wxString wxMenuBar::GetLabelTop(size_t pos) const { wxCHECK_MSG( pos < GetMenuCount(), wxEmptyString, wxT("invalid menu index in wxMenuBar::GetLabelTop") ); return wxMenuItem::GetLabelFromText(m_titles[pos]); } // --------------------------------------------------------------------------- // wxMenuBar construction // --------------------------------------------------------------------------- wxMenu *wxMenuBar::Replace(size_t pos, wxMenu *menu, const wxString& title) { wxMenu *menuOld = wxMenuBarBase::Replace(pos, menu, title); if ( !menuOld ) return NULL; m_titles[pos]=wxStripMenuCodes(title); if ( IsAttached() ) { // Regenerate the menu resource Refresh(); } return menuOld; } bool wxMenuBar::Insert(size_t pos, wxMenu *menu, const wxString& title) { if ( !wxMenuBarBase::Insert(pos, menu, title) ) return false; m_titles.Insert(wxStripMenuCodes(title), pos); if ( IsAttached() ) { // Regenerate the menu resource Refresh(); } return true; } bool wxMenuBar::Append(wxMenu *menu, const wxString& title) { if ( !wxMenuBarBase::Append(menu, title) ) return false; m_titles.Add(wxStripMenuCodes(title)); if(IsAttached()) { // Regenerate the menu resource Refresh(); } return true; } wxMenu *wxMenuBar::Remove(size_t pos) { wxMenu *menu = wxMenuBarBase::Remove(pos); if ( !menu ) return NULL; m_titles.RemoveAt(pos); if (IsAttached()) { // Regenerate the menu resource Refresh(); } return menu; } #if wxUSE_ACCEL void wxMenuBar::RebuildAccelTable() { } #endif // wxUSE_ACCEL int wxMenuBar::ProcessCommand(int ItemID) { if(!IsAttached()) return -1; int MenuNum=(ItemID/1000)-1; int ItemNum=(ItemID-(1000*(MenuNum+1))); // Should never happen, but it doesn't hurt to check anyway. if(MenuNum>GetMenuCount()) return -1; // Get the menu wxMenu *ActiveMenu=GetMenu(MenuNum); // Make sure this is a valid item. if(ItemNum>ActiveMenu->GetMenuItemCount()) return -1; // Get the item wxMenuItem *ActiveItem=ActiveMenu->FindItemByPosition(ItemNum); int ActiveID=ActiveItem->GetId(); return ActiveID; } /* Palm OS does not have good dynamic menu support. About all you can do with * the standard API calls is to add new items to an existing drop-down menu and * hide/show items in a drop-down menu. It is impossible to add, hide, or * change the label on a drop-down menu. * * The easiest and simplest way around this limitation is to modify the Palm OS * MenuBarType structure directly. This gives limited ability to change the * label on a drop-down menu. I have not been able to find a safe way to add, * delete, or resize drop-down menus in OS 6. * * The following routine attempt to work around these limitations present in the * Palm OS API to provide limited dynamic menu support. This solution is far * from perfect, but the only other option is to wait for PalmSource to add full * dynamic menu support, or to recreate the Palm OS menu system from scratch. * * This system is limited in that no more than 4 drop-down menus are allowed per * menu bar, and the label for each drop-down menu is limited to 8 characters of * text. However, this menu system should work for most applications. * * Basically the menu routines select one of four menu bars, depending on * whether or not the requested menu bar has one, two, three, or four drop-down * menus. * * These four "template" menu bars contain one, two, three, or four drop-down * menus. Each menu has a dummy menu item attached to it to allow the Palm OS * MenuAddItem function to add the real items. * * The labels on the drop-down menus are then replaced with the labels of the * real menus. * * The menu is then attached to the active window and the MenuAddItem API * function is called to add the items to each drop-down menu. Finally, * MenuHideItem is called to remove the dummy items from each drop-down menu. */ void wxMenuBar::LoadMenu() { int i=0; int j=0; // Handle to the currently running application database DmOpenRef AppDB; // Get app database reference - needed for some Palm OS Menu API calls. SysGetModuleDatabase(SysGetRefNum(), NULL, &AppDB); // Get the number of menus int NumMenus=GetMenuCount(); // Set up the pointers and handles char *PalmOSMenuBarPtr; MemHandle PalmOSMenuBar; // Load the menu template and set up the menu pointers if(NumMenus==1) { PalmOSMenuBar=DmGetResource(AppDB,'MBAR',1000); PalmOSMenuBarPtr=(char *)MemHandleLock(PalmOSMenuBar); PalmOSMenuBarPtr+=74; } else if(NumMenus==2) { PalmOSMenuBar=DmGetResource(AppDB,'MBAR',2000); PalmOSMenuBarPtr=(char *)MemHandleLock(PalmOSMenuBar); PalmOSMenuBarPtr+=116; } else if(NumMenus==3) { PalmOSMenuBar=DmGetResource(AppDB,'MBAR',3000); PalmOSMenuBarPtr=(char *)MemHandleLock(PalmOSMenuBar); PalmOSMenuBarPtr+=158; } else { // We support a maximum of 4 menus, so make sure that do not create // more than we can handle. NumMenus=4; PalmOSMenuBar=DmGetResource(AppDB,'MBAR',4000); PalmOSMenuBarPtr=(char *)MemHandleLock(PalmOSMenuBar); PalmOSMenuBarPtr+=200; } // Set the proper names for the drop-down triggers. for(i=0;i<NumMenus;i++) { // Clear out the old label char buffer[8]={' ',' ',' ',' ',' ',' ',' ',' '}; MemMove(PalmOSMenuBarPtr,buffer,8); wxString MenuTitle=m_titles.Item(i); // Make sure we don't copy more than 8 bytes for the label int LengthToCopy=MenuTitle.length(); if(LengthToCopy>8) LengthToCopy=8; MemMove(PalmOSMenuBarPtr,MenuTitle,LengthToCopy); PalmOSMenuBarPtr+=11; } // We are done with the menu pointer. MemHandleUnlock(PalmOSMenuBar); DmReleaseResource(PalmOSMenuBar); // We must make the menu active before we can add items to the drop-down // triggers. FrmSetMenu(FrmGetActiveForm(),AppDB,NumMenus*1000); /* Add the menu items to the drop-down triggers. This must be done after * setting the triggers, because setting the names of drop-down triggers * that have a variable number of items requires carefull calculation of * the offsets in the MenuBarType structure. Setting the triggers first * avoids this. */ for(i=0;i<NumMenus;i++) { wxMenu *CurrentMenu=GetMenu(i); for(j=0;j<CurrentMenu->GetMenuItemCount();j++) { wxMenuItem *CurrentItem=CurrentMenu->FindItemByPosition(j); wxString ItemLabel=CurrentItem->GetLabel(); if(CurrentItem->IsSeparator()==true) { char Separator=MenuSeparatorChar; if(j==0) MenuAddItem(9000+i,((i*1000)+1000)+j,0x00,&Separator); else MenuAddItem(((i*1000)+1000)+j-1,((i*1000)+1000)+j,0x00,&Separator); } else { if(j==0) MenuAddItem(9000+i,((i*1000)+1000)+j,0x00,ItemLabel); else MenuAddItem(((i*1000)+1000)+j-1,((i*1000)+1000)+j,0x00,ItemLabel); } } // Hide the dummy menu item, since we don't need it anymore. MenuHideItem(9000+i); } } void wxMenuBar::Attach(wxFrame *frame) { // before attaching preprocess menus to not include wxID_EXIT item // as PalmOS guidelines suggest wxMenuItem *item; wxMenu *menu; int i; while( item = FindItem(wxID_EXIT) ) { menu = item->GetMenu(); if( !menu ) break; // something broken ? size_t count = menu->GetMenuItemCount(); if( count == 0 ) break; // something broken ? // if EXIT is last item in menu if( menu->FindItemByPosition( count - 1 ) == item ) { menu->Destroy( item ); // was more than one item? // was previous separator ? if( count > 2 ) { item = menu->FindItemByPosition( count - 2 ); if(item && item->IsSeparator()) menu->Destroy( item ); } } // if EXIT is first item in menu else if( menu->FindItemByPosition( 0 ) == item ) { menu->Destroy( item ); // was more than one item? // was previous separator ? if( count > 2 ) { item = menu->FindItemByPosition( 0 ); if(item && item->IsSeparator()) menu->Destroy( item ); } } // if EXIT is in the middle but before and after are selectors else { i = 1; // 0 case already done while ( (i < count) && (menu->FindItemByPosition( 0 ) != item) ) { i++; } if (i >= count) break; if (menu->FindItemByPosition( i ) != item) break; menu->Destroy( item ); item = menu->FindItemByPosition( i ); if ( item && item->IsSeparator() && menu->FindItemByPosition( i-1 )->IsSeparator() ) { // noe need for two neighbouring separators menu->Destroy( item ); } } } // check if we received any empty menu! i = 0; while(i < GetMenuCount()) { menu = GetMenu(i); if( menu && (menu->GetMenuItemCount()==0) ) { menu = Remove( i ); delete menu; } else i++; } wxMenuBarBase::Attach(frame); LoadMenu(); } void wxMenuBar::Detach() { wxMenuBarBase::Detach(); } #endif // wxUSE_MENUS
'You don't need to look far to research' terrorists, Cumberbatch tells MTV News about playing the 'Into Darkness' villain. Is he Khan? Is he Gary Mitchell? For now, he's John Harrison — and beyond that "name," the only thing we really know about Benedict Cumberbatch's "Star Trek Into Darkness" villain is that he is a ruthless and effective terrorist. So, while we can't ask Cumberbatch if he studied up on old "Star Trek" episodes and movies to research the character he's playing in J.J. Abrams' upcoming sequel — that would be tipping his hand too much, after all — we can ask the fan-favorite "Sherlock" actor about where else in the world he found inspiration for the horrific Harrison. When faced with that question, MTV News learned that Cumberbatch didn't look much further than Harrison's own vocation to fuel his performance. "[Look at] real social history and present history, everything that's going on: uprisings, people who are trying to spread democracy or fight their cause, not necessarily through political means ... he is a terrorist, and sadly, that's part of the fabric of our modern world," he explained to MTV when we spoke at a "Star Trek" event in late 2012. "You don't need to look far to research that one." Cumberbatch continued, saying he looked towards "certain terrorist groups in the past" to see how they operated. But he wasn't only concerned with real-life terrorism. The actor emphasized the importance of turning towards his character's roots within the "Star Trek" universe itself, saying, "It was important to me to ground him in a reality that's based more on his story than, say, a parallel in the real world." "What should certainly be chilling are the parallels to the modern world," he added cryptically. Are you excited or terrified to behold Cumberbatch's "Trek" villain? Let us know what you think in the comments section or let me know on Twitter @roundhoward! Check out everything we've got on "Star Trek Into Darkness."
#include <cstdio> #include <iostream> using namespace std; typedef long long int Lint; const int MAXN=100100; int N,res; Lint T; Lint sum[MAXN]; int main(){ cin >> N >> T; for( int a,i=1 ; i<=N ; i++ ){ scanf(" %d",&a); sum[i]+=sum[i-1]+a; } int b=1,e=1; while( e<=N ){ if( sum[e]-sum[b-1]<=T ) res=max(res,e-b+1); if( e==N ) break; while( sum[e+1]-sum[b-1]>T && b<=e ) b++; e++; } printf("%d\n",res); return 0; }
def django_settings(default_settings_module='settings'): denv = dot_env() env.django_settings_module = denv.get('DJANGO_SETTINGS_MODULE', default_settings_module) os.environ.setdefault('DJANGO_SETTINGS_MODULE', env.django_settings_module) from django.conf import settings import django django.setup() return settings
/** * Parses the move string into int representation, with corresponding * en passant, pawn start, castle, capture and promotion bits set. * * @param boardStructure * @param moveStr * @return int representation of move * @throws NullPointerException if the BoardStructure is null * @throws IllegalArgumentException if the move string is empty */ protected static int parseMove(BoardStructure boardStructure, String moveStr) { if (boardStructure == null) throw new NullPointerException("null BoardStructure"); if (moveStr.length() == 0) throw new IllegalArgumentException("empty move string"); if (moveStr.charAt(1) > '8' || moveStr.charAt(1) < '1' || moveStr.charAt(3) > '8' || moveStr.charAt(3) < '1' || moveStr.charAt(0) > 'h' || moveStr.charAt(0) < 'a' || moveStr.charAt(2) > 'h' || moveStr.charAt(2) < 'a' ) return BoardUtils.NO_MOVE; int from = BoardUtils.convertFileRankToSqr(moveStr.charAt(0) - 'a', moveStr.charAt(1) - '1'); int to = BoardUtils.convertFileRankToSqr(moveStr.charAt(2) - 'a', moveStr.charAt(3) - '1'); MoveList moveList = new MoveList(); MoveGenerator.generateAllMoves(boardStructure, moveList); int move, promotedPiece; for (int i = 0; i < moveList.getCount(); i++) { move = moveList.getMove(i).getMove(); if (from(move) == from && to(move) == to) { promotedPiece = promoted(move); if (promotedPiece != BoardPiece.EMPTY.value) { if (BoardUtils.isPieceRookOrQueen(promotedPiece) && !BoardUtils.isPieceBishopOrQueen(promotedPiece) && moveStr.charAt(4) == 'r') return move; else if (!BoardUtils.isPieceRookOrQueen(promotedPiece) && BoardUtils.isPieceBishopOrQueen(promotedPiece) && moveStr.charAt(4) == 'b') return move; else if (BoardUtils.isPieceRookOrQueen(promotedPiece) && BoardUtils.isPieceBishopOrQueen(promotedPiece) && moveStr.charAt(4) == 'q') return move; else if (BoardUtils.isPieceKnight(promotedPiece) && moveStr.charAt(4) == 'n') return move; continue; } return move; } } return BoardUtils.NO_MOVE; }
def _get_json_pile(fn, container): with open(fn, encoding='ascii') as f: for line in f: entry = container(*json.loads(line)) yield entry
/// Use big-endian to ensure byte ordering. impl From<Height> for HeightKey { fn from(height: Height) -> HeightKey { HeightKey(u64::to_be_bytes(height.get())) } }
export { treatments } from './treatments-number'; export { mostInvolvedTeeth } from './most-involved-teeth'; export { genderPie } from './gender-pie'; export { ageBar } from './age-bar'; export { mostAppliedTreatments } from './most-applied-treatments'; export { appointmentsByDate } from './appointments-by-day'; export { financesByDate } from './finances-by-date'; export { treatmentsByGender } from './treatments-gender';
<reponame>18580255110/Guns-1<filename>guns-rest/src/main/java/com/stylefeng/guns/rest/modular/education/requester/StudentAddRequester.java<gh_stars>0 package com.stylefeng.guns.rest.modular.education.requester; import com.stylefeng.guns.modular.system.model.Attachment; import com.stylefeng.guns.rest.core.SimpleRequester; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import javax.validation.constraints.NotBlank; import javax.validation.constraints.NotNull; import javax.validation.constraints.Pattern; import javax.validation.constraints.Size; /** * Created by 罗华. */ @ApiModel(value = "StudentAddRequester", description = "添加学员") public class StudentAddRequester extends SimpleRequester { private static final long serialVersionUID = -1976690924147728427L; @ApiModelProperty(name = "userName", value = "用户名", required = true, position = 0, example = "18580255110") @NotBlank(message = "用户名不能为空") private String userName; @ApiModelProperty(name = "name", value = "学员姓名", required = true, position = 1, example = "小明") @NotBlank(message = "学员姓名不能为空") @Size(min = 1, max = 8, message = "用户名长度只能是1 - 8 位字符") private String name; @ApiModelProperty(name = "gender", value = "学员性别", required = true, position = 2, example = "1") @NotNull(message = "学员性别不能为空") private Integer gender; @ApiModelProperty(name = "grade", value = "在读年级", required = true, position = 3, example = "4") @NotNull(message = "在读年级不能为空") private Integer grade; @ApiModelProperty(name = "school", value = "在读学校", position = 4, example = "重庆谢家湾小学") @Size(max = 64, message = "在读学校名称过长") private String school; @ApiModelProperty(name = "targetSchool", value = "目标学校", position = 5, example = "重庆三中") @Size(max = 64, message = "目标学校名称过长") private String targetSchool; @ApiModelProperty(name = "avator", value = "头像", position = 6) private Long avatorId; public String getUserName() { return userName; } public void setUserName(String userName) { this.userName = userName; } public String getName() { return name; } public void setName(String name) { this.name = name; } public Integer getGender() { return gender; } public void setGender(Integer gender) { this.gender = gender; } public Integer getGrade() { return grade; } public void setGrade(Integer grade) { this.grade = grade; } public String getSchool() { return school; } public void setSchool(String school) { this.school = school; } public String getTargetSchool() { return targetSchool; } public void setTargetSchool(String targetSchool) { this.targetSchool = targetSchool; } public Long getAvatorId() { return avatorId; } public void setAvatorId(Long avatorId) { this.avatorId = avatorId; } @Override public boolean checkValidate() { return false; } }
<filename>projects/ngx-dynamic-dashboard/src/modules/shared/index.ts // export * from "./shared-directives/dynamic-component-load.directive"; // export * from "./shared-directives/template-reference.directive"; // export * from "./shared-directives/validation.directive"; // export * from "./shared-services/shared-filter.service"; // export * from "./shared.module";
class Solution: def longestCommonPrefix(self, strs: List[str]) -> str: n = len(strs) # exceptions if n == 0: return "" elif n == 1: return strs[0] # traverse the list ans = strs[0] ix = 1 while ix < n: jx = 0 while jx < len(strs[ix]) and jx < len(ans) and strs[ix][jx] == ans[jx]: jx += 1 if jx == 0: return "" else: ans = ans[:jx] ix += 1 return ans
// Connected flags a vehicle as connected func (v vehicleList) Connected(vehicleID string) { db.SetWithTTL(v.connectionKey(vehicleID), true, time.Second*5) v.Publish(nil) }
<filename>src/main/java/com/ebstrada/formreturn/manager/ui/frame/ErrorLogFrame.java package com.ebstrada.formreturn.manager.ui.frame; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Insets; import java.awt.Toolkit; import java.awt.datatransfer.Clipboard; import java.awt.datatransfer.ClipboardOwner; import java.awt.datatransfer.StringSelection; import java.awt.datatransfer.Transferable; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.ByteArrayOutputStream; import java.io.PrintStream; import java.util.Calendar; import java.util.GregorianCalendar; import java.util.TimeZone; import javax.swing.*; import javax.swing.JButton; import javax.swing.JFrame; import javax.swing.JLabel; import javax.swing.JList; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JTextPane; import javax.swing.ListModel; import javax.swing.border.EmptyBorder; import org.apache.log4j.spi.LoggingEvent; import com.ebstrada.formreturn.manager.gef.util.Localizer; import com.ebstrada.formreturn.manager.log4j.ListAppenderScrollPane; import com.ebstrada.formreturn.manager.log4j.LoggingEventModel; import com.ebstrada.formreturn.manager.ui.Main; public class ErrorLogFrame extends JPanel implements ClipboardOwner { private static final long serialVersionUID = 1L; private LoggingEventModel loggingEventModel; private LoggingEvent selectedEvent; private JFrame mainFrame; public ErrorLogFrame(JFrame frame) { mainFrame = frame; initComponents(); setName(Localizer.localize("UI", "ErrorLogFrameTitle")); messageSplitPane.setDividerLocation(100); scrollPane1.getHorizontalScrollBar().setValue(0); scrollPane2.getHorizontalScrollBar().setValue(0); } private void button2ActionPerformed(ActionEvent e) { JScrollPane jsp = ((Main) mainFrame).getSystemConsoleScrollPane(); JList jlist = ((ListAppenderScrollPane) jsp).getList(); if (jlist.getSelectedIndex() == -1) { loggingEventModel.removeElementAt(0); } else { loggingEventModel.removeElementAt(jlist.getSelectedIndex()); } Main.getInstance().deactivateErrorLogFrame(); } private void button1ActionPerformed(ActionEvent e) { Main.getInstance().deactivateErrorLogFrame(); } private void copyToClipboardButtonActionPerformed(ActionEvent e) { String propertiesString = ""; propertiesString += textPane2.getText() + "\n\n"; String lineEnding = System.getProperty("line.separator"); Object[] keys = System.getProperties().keySet().toArray(); Object[] values = System.getProperties().values().toArray(); for (int i = 0; i < keys.length; i++) { propertiesString += keys[i] + " = " + values[i] + lineEnding; } setClipboardContents(propertiesString); } public void setClipboardContents(String aString) { StringSelection stringSelection = new StringSelection(aString); Clipboard clipboard = Toolkit.getDefaultToolkit().getSystemClipboard(); clipboard.setContents(stringSelection, this); } private void initComponents() { // JFormDesigner - Component initialization - DO NOT MODIFY // //GEN-BEGIN:initComponents panel3 = new JPanel(); deleteMessageButton = new JButton(); copyToClipboardButton = new JButton(); closeButton = new JButton(); panel2 = new JPanel(); messageTypeLabel = new JLabel(); errorType = new JLabel(); messageTimeLabel = new JLabel(); errorTime = new JLabel(); panel1 = new JPanel(); messageLabel = new JLabel(); messageSplitPane = new JSplitPane(); scrollPane2 = new JScrollPane(); textPane1 = new JTextPane(); scrollPane1 = new JScrollPane(); textPane2 = new JTextPane(); //======== this ======== setLayout(new GridBagLayout()); ((GridBagLayout) getLayout()).columnWidths = new int[] {0, 0}; ((GridBagLayout) getLayout()).rowHeights = new int[] {0, 0, 0, 0}; ((GridBagLayout) getLayout()).columnWeights = new double[] {1.0, 1.0E-4}; ((GridBagLayout) getLayout()).rowWeights = new double[] {0.0, 1.0, 0.0, 1.0E-4}; //======== panel3 ======== { panel3.setBorder(new EmptyBorder(8, 8, 8, 8)); panel3.setLayout(new GridBagLayout()); ((GridBagLayout) panel3.getLayout()).columnWidths = new int[] {0, 0, 0, 0, 0}; ((GridBagLayout) panel3.getLayout()).rowHeights = new int[] {0, 0}; ((GridBagLayout) panel3.getLayout()).columnWeights = new double[] {1.0, 0.0, 0.0, 0.0, 1.0E-4}; ((GridBagLayout) panel3.getLayout()).rowWeights = new double[] {0.0, 1.0E-4}; //---- deleteMessageButton ---- deleteMessageButton.setFont(UIManager.getFont("Button.font")); deleteMessageButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { button2ActionPerformed(e); } }); deleteMessageButton .setText(Localizer.localize("UI", "ErrorLogFrameDeleteMessageButtonText")); panel3.add(deleteMessageButton, new GridBagConstraints(1, 0, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 5), 0, 0)); //---- copyToClipboardButton ---- copyToClipboardButton.setFont(UIManager.getFont("Button.font")); copyToClipboardButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { copyToClipboardButtonActionPerformed(e); } }); copyToClipboardButton.setText(Localizer.localize("UI", "CopyToClipboardButtonText")); panel3.add(copyToClipboardButton, new GridBagConstraints(2, 0, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 5), 0, 0)); //---- closeButton ---- closeButton.setFont(UIManager.getFont("Button.font")); closeButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { button1ActionPerformed(e); } }); closeButton.setText(Localizer.localize("UI", "CloseButtonText")); panel3.add(closeButton, new GridBagConstraints(3, 0, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); } add(panel3, new GridBagConstraints(0, 2, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); //======== panel2 ======== { panel2.setBorder(new EmptyBorder(8, 8, 8, 8)); panel2.setLayout(new GridBagLayout()); ((GridBagLayout) panel2.getLayout()).columnWidths = new int[] {0, 0, 15, 0, 0, 0, 0}; ((GridBagLayout) panel2.getLayout()).rowHeights = new int[] {0, 0}; ((GridBagLayout) panel2.getLayout()).columnWeights = new double[] {0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0E-4}; ((GridBagLayout) panel2.getLayout()).rowWeights = new double[] {0.0, 1.0E-4}; //---- messageTypeLabel ---- messageTypeLabel.setFont(UIManager.getFont("Label.font")); messageTypeLabel.setText(Localizer.localize("UI", "ErrorLogFrameMessageTypeLabel")); panel2.add(messageTypeLabel, new GridBagConstraints(0, 0, 1, 1, 0.0, 0.0, GridBagConstraints.EAST, GridBagConstraints.VERTICAL, new Insets(0, 0, 0, 5), 0, 0)); panel2.add(errorType, new GridBagConstraints(1, 0, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 5), 0, 0)); //---- messageTimeLabel ---- messageTimeLabel.setFont(UIManager.getFont("Label.font")); messageTimeLabel.setText(Localizer.localize("UI", "ErrorLogFrameMessageTimeLabel")); panel2.add(messageTimeLabel, new GridBagConstraints(3, 0, 1, 1, 0.0, 0.0, GridBagConstraints.EAST, GridBagConstraints.VERTICAL, new Insets(0, 0, 0, 5), 0, 0)); //---- errorTime ---- errorTime.setFont(UIManager.getFont("Label.font")); panel2.add(errorTime, new GridBagConstraints(4, 0, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 5), 0, 0)); } add(panel2, new GridBagConstraints(0, 0, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); //======== panel1 ======== { panel1.setBorder(new EmptyBorder(8, 8, 8, 8)); panel1.setLayout(new GridBagLayout()); ((GridBagLayout) panel1.getLayout()).columnWidths = new int[] {0, 0}; ((GridBagLayout) panel1.getLayout()).rowHeights = new int[] {0, 0, 0}; ((GridBagLayout) panel1.getLayout()).columnWeights = new double[] {1.0, 1.0E-4}; ((GridBagLayout) panel1.getLayout()).rowWeights = new double[] {0.0, 1.0, 1.0E-4}; //---- messageLabel ---- messageLabel.setFont(UIManager.getFont("TitledBorder.font")); messageLabel.setText(Localizer.localize("UI", "ErrorLogFrameMessageLabel")); panel1.add(messageLabel, new GridBagConstraints(0, 0, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 5, 0), 0, 0)); //======== messageSplitPane ======== { messageSplitPane.setOrientation(JSplitPane.VERTICAL_SPLIT); messageSplitPane.setOneTouchExpandable(true); //======== scrollPane2 ======== { //---- textPane1 ---- textPane1.setFont(UIManager.getFont("TextPane.font")); scrollPane2.setViewportView(textPane1); } messageSplitPane.setTopComponent(scrollPane2); //======== scrollPane1 ======== { //---- textPane2 ---- textPane2.setFont(UIManager.getFont("TextPane.font")); scrollPane1.setViewportView(textPane2); } messageSplitPane.setBottomComponent(scrollPane1); } panel1.add(messageSplitPane, new GridBagConstraints(0, 1, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); } add(panel1, new GridBagConstraints(0, 1, 1, 1, 0.0, 0.0, GridBagConstraints.CENTER, GridBagConstraints.BOTH, new Insets(0, 0, 0, 0), 0, 0)); // //GEN-END:initComponents } public void setErrorLogModel(ListModel lem) { loggingEventModel = (LoggingEventModel) lem; } public LoggingEventModel getErrorLogModel() { return loggingEventModel; } public void setErrorObject(Object obj) { selectedEvent = (LoggingEvent) obj; refresh(); } public LoggingEvent getErrorObject() { return selectedEvent; } public void refresh() { if (selectedEvent != null) { try { Throwable ex = selectedEvent.getThrowableInformation().getThrowable(); if (ex != null) { textPane1.setText(ex.getMessage() + "\n\n"); } else { textPane1.setText(""); } ByteArrayOutputStream baos = new ByteArrayOutputStream(); ex.printStackTrace(new PrintStream(baos, true)); textPane2.setText(baos.toString()); errorType.setText(selectedEvent.getLevel().toString()); Calendar gpsTime = new GregorianCalendar(TimeZone.getTimeZone("GMT")); gpsTime.setTimeInMillis(selectedEvent.timeStamp); errorTime.setText(gpsTime.getTime().toString()); } catch (Exception e) { // TODO: handle exception } scrollPane1.getHorizontalScrollBar().setValue(0); scrollPane2.getHorizontalScrollBar().setValue(0); } } // JFormDesigner - Variables declaration - DO NOT MODIFY // //GEN-BEGIN:variables private JPanel panel3; private JButton deleteMessageButton; private JButton copyToClipboardButton; private JButton closeButton; private JPanel panel2; private JLabel messageTypeLabel; private JLabel errorType; private JLabel messageTimeLabel; private JLabel errorTime; private JPanel panel1; private JLabel messageLabel; private JSplitPane messageSplitPane; private JScrollPane scrollPane2; private JTextPane textPane1; private JScrollPane scrollPane1; private JTextPane textPane2; // JFormDesigner - End of variables declaration //GEN-END:variables public void lostOwnership(Clipboard clipboard, Transferable contents) { // do nothing } }
use crate::core::{ integrator::{Integrator, SamplerIntegrator, SamplerIntegratorBase}, geometry::{Bounds2i, Ray, Vector3f, Normal3}, camera::Camera, sampler::{Sampler}, profiler::Profiler, scene::Scene, pbrt::{Spectrum, Float}, interaction::{Interaction, SurfaceInteraction}, material::TransportMode, sampling::{cosine_sample_hemisphere, cosine_hemisphere_pdf, uniform_sample_hemisphere, uniform_hemisphere_pdf} }; use std::sync::Arc; use obstack::Obstack; pub struct AOIntegrator { base: SamplerIntegratorBase, cos_sample: bool, n_samples: u32 } impl AOIntegrator { pub fn new( cos_sample: bool, ns: usize, camera: Arc<dyn Camera + Send + Sync>, sampler: Arc<dyn Sampler + Send + Sync>, pixel_bounds: Bounds2i ) -> AOIntegrator { let n_samples = sampler.round_count(ns); if n_samples != n_samples { warn!("Taking {} samples, not {} as specified", n_samples, ns); } sampler.request_2d_array(n_samples); AOIntegrator { base: SamplerIntegratorBase::new(camera, sampler, pixel_bounds), cos_sample, n_samples: n_samples as u32 } } } impl SamplerIntegrator for AOIntegrator { fn li(&self, ray: &Ray, scene: &Scene, sampler: Box<dyn Sampler>, areana: &mut Obstack, depth: i32) -> Spectrum { let p = Profiler::instance().profile("SamplerIntegrator::li()"); let mut l = Spectrum::new(0.0); // Intersect _ray_ with scene and store intersection in _isect_ let mut isect = SurfaceInteraction::default(); let mut ray = ray.clone(); loop { if let Some(si) = scene.intersect(&ray) { isect = si; isect.compute_scattering_functions(&ray, areana, true, TransportMode::Radiance); if isect.get_bsdf().is_none() { vlog!(2, "Skipping intersection due to None bsdf"); ray = isect.spawn_ray(&ray.d); continue; } // Compute coordinate frame based on true geometry, not shading // geometry. let n = isect.n.face_forward(&Normal3{ x: -ray.d.x, y: -ray.d.y, z: -ray.d.z }); let s = isect.dpdu.normalize(); let t = Vector3f::cross(&isect.n.into(), &s); if let Some(u) = sampler.get_2d_array(self.n_samples as usize) { for i in 0..self.n_samples as usize { let (mut wi, pdf) = if self.cos_sample { let wi = cosine_sample_hemisphere(&u[i]); (wi, cosine_hemisphere_pdf(wi.z.abs())) } else { let wi = uniform_sample_hemisphere(&u[i]); (wi, uniform_hemisphere_pdf()) }; // Transform wi from local frame to world space. wi = Vector3f::new( s.x * wi.x + t.x * wi.y + n.x * wi.z, s.y * wi.x + t.y * wi.y + n.y * wi.z, s.z * wi.x + t.z * wi.y + n.z * wi.z, ); if !scene.intersect_p(&isect.spawn_ray(&wi)) { l += Spectrum::new(wi.dot(&n.into()) / (pdf * self.n_samples as Float)); } } } } return l; } } } impl Integrator for AOIntegrator { fn render(&self, scene: &Scene) { self.base.render(self, scene); } fn specular_reflect( &self, ray: &Ray, isect: &SurfaceInteraction, scene: &Scene, sampler: Box<dyn Sampler>, arena: &mut Obstack, depth: i32 ) -> Spectrum { self.base.specular_reflect(self, ray, isect, scene, sampler, arena, depth) } }
<filename>graphics/shader/shader_set.hpp<gh_stars>0 #pragma once #include "../api.h" #include "../protos.h" #include "../typedefs.h" #include "shader.hpp" namespace natus { namespace graphics { class NATUS_GRAPHICS_API shader_set { natus_this_typedefs( shader_set ) ; private: natus::graphics::shader_t _vs ; natus::graphics::shader_t _ps ; natus::graphics::shader_t _gs ; public: shader_set( void_t ) {} shader_set( this_cref_t rhv ) noexcept { *this = rhv ; } shader_set( this_rref_t rhv ) noexcept { *this = ::std::move( rhv ) ; } this_ref_t operator = ( this_cref_t rhv ) noexcept { _vs = rhv._vs ; _ps = rhv._ps ; _gs = rhv._gs ; return *this ; } this_ref_t operator = ( this_rref_t rhv ) noexcept { _vs = ::std::move( rhv._vs ) ; _ps = ::std::move( rhv._ps ) ; _gs = ::std::move( rhv._gs ) ; return *this ; } public: this_ref_t set_vertex_shader( natus::graphics::shader_in_t s ) noexcept { _vs = s ; return *this ; } this_ref_t set_pixel_shader( natus::graphics::shader_t s ) noexcept { _ps = s ; return *this ; } this_ref_t set_geometry_shader( natus::graphics::shader_t s ) noexcept { _gs = s ; return *this ; } natus::graphics::shader_cref_t vertex_shader( void_t ) const noexcept { return _vs ; } natus::graphics::shader_cref_t pixel_shader( void_t ) const noexcept { return _ps ; } natus::graphics::shader_cref_t geometry_shader( void_t ) const noexcept { return _gs ; } bool_t has_vertex_shader( void_t ) const noexcept { return natus::core::is_not( _vs.code().empty() ) ; } bool_t has_geometry_shader( void_t ) const noexcept { return natus::core::is_not( _gs.code().empty() ) ; } bool_t has_pixel_shader( void_t ) const noexcept { return natus::core::is_not( _ps.code().empty() ) ; } }; natus_typedef( shader_set ) ; } }
/* eslint-disable no-unused-expressions */ import { exec, ExecOptions } from 'child_process'; import { Uri } from 'vscode'; import { GnuPGGlobal } from './gnupgglobal'; import { GnuPGKey } from './gnupgkey'; import { i18n } from './i18n'; import { call, callStreaming, decrypt, decryptToFile, encrypt } from './lib/gpg'; import { getWorkspaceUri, isKeyRingDirectory } from './utils'; export async function asyncCheckVersion(): Promise<Buffer> { return new Promise<Buffer>((resolve, reject) => { let args = GnuPGGlobal.homedirArg; args = args.concat(['--version']); call('', args, (err?: Error, stdout?: Buffer) => { if (err) { reject(getLastGnuPGError(err)); } else { resolve(stdout); } }); }); } export async function asyncCheckWorkspaceAsHomeDir(): Promise<string | undefined> { return new Promise<string | undefined>((resolve) => { let path = getWorkspaceUri(); if (path) { let iskeyring = isKeyRingDirectory(path); if (iskeyring) { resolve(path.fsPath); } } resolve(undefined); }); } export async function asyncListPublicKeys(): Promise<Buffer> { return new Promise<Buffer>((resolve, reject) => { let args = GnuPGGlobal.homedirArg; args = args.concat(['-k', '--with-colons', '--fingerprint']); call('', args, (err?: Error, stdout?: Buffer) => { err ? reject(getLastGnuPGError(err)) : resolve(stdout); }); }); } export async function asyncListSecretKeys(): Promise<Buffer> { return new Promise<Buffer>((resolve, reject) => { let args = GnuPGGlobal.homedirArg; args = args.concat(['-K', '--with-colons', '--fingerprint']); call('', args, (err?: Error, stdout?: Buffer) => { err ? reject(getLastGnuPGError(err)) : resolve(stdout); }); }); } export async function asyncEncryptAsymBuffer(content: Buffer, keys?: { fingerprint: string }[]): Promise<Buffer> { return new Promise<Buffer>((resolve, reject) => { let args = GnuPGGlobal.homedirArg; args = args.concat(['--armor']); if (keys !== undefined) { keys.forEach((recipient) => { args = args.concat(['--recipient', recipient.fingerprint]); }); } encrypt(content, args, (err?: Error, stdout?: Buffer) => { err ? reject(getLastGnuPGError(err)) : resolve(stdout); }); }); } export async function asyncEncryptSymBuffer(content: Buffer): Promise<Buffer> { return new Promise<Buffer>((resolve, reject) => { let args = GnuPGGlobal.homedirArg; args = args.concat(['--armor', '--symmetric']); call(content, args, (err?: Error, stdout?: Buffer) => { err ? reject(getLastGnuPGError(err)) : resolve(stdout); }); }); } export async function asyncEncryptAsymUri(uri: Uri, keys?: { fingerprint: string }[]): Promise<Buffer> { return new Promise<Buffer>((resolve, reject) => { let args = GnuPGGlobal.homedirArg; args = args.concat(['--batch', '--yes', '--armor']); if (keys !== undefined) { keys.forEach((recipient) => { args = args.concat(['--recipient', recipient.fingerprint]); }); } args = args.concat(['--encrypt', uri.fsPath]); callStreaming(uri.fsPath, uri.fsPath + '.asc', args, (err?: Error, stdout?: Buffer) => { err ? reject(getLastGnuPGError(err)) : resolve(stdout); }); }); } export function argsEncryptSymUri(uri: Uri): string[] { let args: string[] = []; switch (GnuPGGlobal.majorVersion) { case 1: args = GnuPGGlobal.homedirArg; args = args.concat(['--armor']); args = args.concat(['--output', uri.fsPath + '.asc']); args = args.concat(['--symmetric', uri.fsPath]); break; case 2: args = GnuPGGlobal.homedirArg; args = args.concat(['--batch', '--yes', '--armor']); args = args.concat(['--symmetric', uri.fsPath]); break; default: } return args; } export async function asyncEncryptSymUri(uri: Uri): Promise<Buffer> { return new Promise<Buffer>((resolve, reject) => { let args = argsEncryptSymUri(uri); callStreaming(uri.fsPath, uri.fsPath + '.asc', args, (err?: Error, stdout?: Buffer) => { err ? reject(getLastGnuPGError(err)) : resolve(stdout); }); }); } export async function asyncDecryptBuffer(content: Buffer): Promise<Buffer> { return new Promise<Buffer>((resolve, reject) => { let args = GnuPGGlobal.homedirArg; decrypt(content, args, (err?: Error, stdout?: Buffer) => { err ? reject(getLastGnuPGError(err)) : resolve(stdout); }); }); } export function argsDecryptUri(uri: Uri): string[] { let args: string[] = []; switch (GnuPGGlobal.majorVersion) { case 1: let dest = ''; if (uri.fsPath.match(/.*\.(asc|gpg)$/i)) { dest = uri.fsPath.slice(0, -'.asc'.length); } else { dest = uri.fsPath + '.decrypted'; } args = GnuPGGlobal.homedirArg; args = args.concat(['--output', dest, '--decrypt', uri.fsPath]); break; case 2: args = GnuPGGlobal.homedirArg; break; default: } return args; } export async function asyncDecryptUri(uri: Uri): Promise<Buffer> { return new Promise<Buffer>((resolve, reject) => { let dest = ''; if (uri.fsPath.match(/.*\.(asc|gpg)$/i)) { dest = uri.fsPath.slice(0, -'.asc'.length); } else { dest = uri.fsPath + '.decrypted'; } let args = argsDecryptUri(uri); decryptToFile({ source: uri.fsPath, dest: dest }, args, (err?: Error, stdout?: Buffer) => { err ? reject(getLastGnuPGError(err)) : resolve(stdout); }); }); } export async function asyncShowSmartcard(): Promise<Buffer> { return new Promise<Buffer>((resolve, reject) => { let args = GnuPGGlobal.homedirArg; args = args.concat(['--batch', '--yes', '--card-status']); call('', args, (err?: Error, stdout?: Buffer) => { err ? reject(getLastGnuPGError(err)) : resolve(stdout); }); }); } export async function asyncKillGpgAgent(): Promise<void> { //https://www.gnupg.org/documentation/manuals/gnupg/Invoking-GPG_002dAGENT.html //https://www.gnupg.org/documentation/manuals/gnupg/Controlling-gpg_002dconnect_002dagent.html#Controlling-gpg_002dconnect_002dagent //gpgconf --kill gpg-agent: works on Windows //gpg-connect-agent killagent /bye switch (GnuPGGlobal.majorVersion) { case 1: return new Promise((resolve) => resolve()); case 2: // gpg-connect-agent since v2 asyncExec('gpg-connect-agent killagent /bye', {}); if (GnuPGGlobal.homedir) { let homedir = '--homedir ' + GnuPGGlobal.homedir; asyncExec('gpg-connect-agent ' + homedir + ' killagent /bye', {}); } break; default: return new Promise((resolve) => resolve()); } } export async function asyncExec(cmd: string, opts: ExecOptions): Promise<void> { return new Promise((resolve, reject) => { exec( cmd, opts, (err) => err ? reject(err) : // : resolve({ -> Promise<{ stdout: string; stderr: string }> // stdout: stdout, // stderr: stderr // }) resolve() // -> Promise<void> ); }); } export function argsSign(uri: Uri, key?: { fingerprint: string }): string[] { let args: string[] = []; //v1,2 args = GnuPGGlobal.homedirArg; args = args.concat(['--yes', '--armor']); args = args.concat(['--output', uri.fsPath + '.sig']); if (key !== undefined) { args = args.concat(['--local-user', key.fingerprint]); } args = args.concat(['--detach-sign']); args = args.concat([uri.fsPath]); return args; } export async function asyncSign(uri: Uri, key?: { fingerprint: string }): Promise<Buffer> { return new Promise<Buffer>((resolve, reject) => { let args = argsSign(uri, key); call('', args, (err?: Error, stdout?: Buffer) => { err ? reject(getLastGnuPGError(err)) : resolve(stdout); }); }); } export function argsClearSign(uri: Uri, key?: { fingerprint: string }): string[] { let args: string[] = []; switch (GnuPGGlobal.majorVersion) { case 1: args = GnuPGGlobal.homedirArg; args = args.concat(['--yes', '--armor']); if (key !== undefined) { args = args.concat(['--local-user', key.fingerprint]); } args = args.concat(['--clearsign']); //<--v1 args = args.concat([uri.fsPath]); break; case 2: args = GnuPGGlobal.homedirArg; args = args.concat(['--yes', '--armor']); if (key !== undefined) { args = args.concat(['--local-user', key.fingerprint]); } args = args.concat(['--clear-sign']); //<--v2 args = args.concat([uri.fsPath]); break; default: } return args; } export async function asyncClearSign(uri: Uri, key?: { fingerprint: string }): Promise<Buffer> { return new Promise<Buffer>((resolve, reject) => { let args = argsClearSign(uri, key); call('', args, (err?: Error, stdout?: Buffer) => { err ? reject(getLastGnuPGError(err)) : resolve(stdout); }); }); } export async function asyncVerify(uri: Uri): Promise<Buffer> { return new Promise<Buffer>((resolve, reject) => { // GnuPG prints (at least some of) this output to stderr, not stdout. //for detached or clear-signed file ... let args = GnuPGGlobal.homedirArg; args = args.concat(['--batch', '--yes', '--verify', uri.fsPath]); //when detached signed file, add fsPath of data file if (uri.fsPath.endsWith('.sig')) { args = args.concat([uri.fsPath.slice(0, -'.sig'.length)]); } call('', args, (err?: Error, stdout?: Buffer, stderr?: Buffer, data?: string) => { err ? reject(getLastGnuPGError(err)) : resolve(new Buffer(data || '')); }); }); } export async function asyncImportKeys(uri: Uri): Promise<Buffer> { return new Promise<Buffer>((resolve, reject) => { let args = GnuPGGlobal.homedirArg; args = args.concat(['--batch', '--yes', '--import']); args = args.concat([uri.fsPath]); call('', args, (err?: Error) => { err ? reject(getLastGnuPGError(err)) : resolve(new Buffer(i18n().GnuPGKeyImportSuccessfully)); }); }); } export async function asyncExportPublicKeys( uri: Uri, key?: { label: string; description: string; detail: string; fingerprint: string; } ): Promise<Buffer> { return new Promise<Buffer>((resolve, reject) => { let args = GnuPGGlobal.homedirArg; args = args.concat(['--batch', '--yes', '--armor', '--output', uri.fsPath]); args = args.concat('--export'); args = args.concat(key ? key.fingerprint : ''); call('', args, (err?: Error) => { err ? reject(getLastGnuPGError(err)) : resolve(new Buffer(i18n().GnuPGKeyExportSuccessfully)); }); }); } export async function asyncExportSecretKeys( uri: Uri, key?: { label: string; description: string; detail: string; fingerprint: string; } ): Promise<Buffer> { return new Promise<Buffer>((resolve, reject) => { let args = GnuPGGlobal.homedirArg; args = args.concat(['--batch', '--yes', '--armor', '--output', uri.fsPath]); args = args.concat('--export-secret-keys'); args = args.concat(key ? key.fingerprint : ''); call('', args, (err?: Error) => { err ? reject(getLastGnuPGError(err)) : resolve(new Buffer(i18n().GnuPGKeyExportSuccessfully)); }); }); } export async function asyncExportSecretSubKeys( uri: Uri, key?: { label: string; description: string; detail: string; fingerprint: string; } ): Promise<Buffer> { return new Promise<Buffer>((resolve, reject) => { let args = GnuPGGlobal.homedirArg; args = args.concat(['--batch', '--yes', '--armor', '--output', uri.fsPath]); args = args.concat('--export-secret-subkeys'); args = args.concat(key ? key.fingerprint : ''); call('', args, (err?: Error) => { err ? reject(getLastGnuPGError(err)) : resolve(new Buffer(i18n().GnuPGKeyExportSuccessfully)); }); }); } export function argsGenerateKey() { let args = GnuPGGlobal.homedirArg; switch (GnuPGGlobal.majorVersion) { case 1: args = args.concat(['--gen-key']); break; case 2: args = args.concat(['--full-generate-key']); break; default: } return args; } export function argsEditKey(key?: { fingerprint: string; userId: string }) { let args: string[] = []; //v1,2 if (key) { args = GnuPGGlobal.homedirArg; args = args.concat(['--edit-key', key.fingerprint]); } return args; } export function argsDeletePublicKey(key?: { fingerprint: string; userId: string }): string[] { let args: string[] = []; //v1,2 if (key) { args = GnuPGGlobal.homedirArg; args = args.concat(['--batch', '--yes', '--delete-keys']); args = args.concat([key.fingerprint]); } return args; } export function argsGitSetUserSigningKey(key?: { fingerprint: string; userId: string }) { let args: string[] = []; ////git config --global user.signingkey <keyid> if (key) { args = GnuPGGlobal.homedirArg; args = args.concat(['config', '--global', 'user.signingkey', key.fingerprint]); } return args; } export function argsGitUnsetUserSigningKey() { let args: string[] = []; //git config --global --unset user.signingkey <keyid> args = args.concat(['config', '--global', '--unset', 'user.signingkey']); return args; } export async function asyncDeletePublicKey(key?: { fingerprint: string; userId: string }): Promise<Buffer> { return new Promise<Buffer>((resolve, reject) => { if (key) { let args = argsDeletePublicKey(key); call('', args, (err?: Error, stdout?: Buffer) => { err ? reject(getLastGnuPGError(err)) : resolve(stdout); }); } }); } export function argsDeleteSecretKey(key?: { fingerprint: string; userId: string }): string[] { let args: string[] = []; //v1,2 if (key) { args = GnuPGGlobal.homedirArg; args = args.concat(['--batch', '--yes', '--delete-secret-keys']); args = args.concat([key.fingerprint]); } return args; } export async function asyncDeleteSecretKey(key?: { fingerprint: string; userId: string }): Promise<Buffer> { return new Promise<Buffer>((resolve, reject) => { if (key) { let args = argsDeleteSecretKey(key); call('', args, (err?: Error, stdout?: Buffer) => { err ? reject(getLastGnuPGError(err)) : resolve(stdout); }); } }); } export function parseKeys(stdout: Buffer): Map<string, GnuPGKey> { //see source: gnupg-2.2.12\doc\DETAILS //https://github.com/gpg/gnupg/blob/master/doc/DETAILS let lines = stdout .toString() .trim() .split(/(\r\n|\n)/g); let keys = new Map<string, GnuPGKey>(); let key: GnuPGKey | null = null; for (var i = 0; i < lines.length; i++) { const line = lines[i].trim(); const record = line.split(':'); switch (GnuPGGlobal.majorVersion) { case 1: //#region Records //pub:u:2048:1:6766822C85E0F4E6:2019-09-04:::u:Delete Me (weg) <<EMAIL>>::scSC: //pub :: Public key //crt :: X.509 certificate //crs :: X.509 certificate and private key available //sub :: Subkey (secondary key) //sec :: Private key //ssb :: Private subkey (secondary key) //uid :: User id //uat :: User attribute (same as user id except for field 10). //sig :: Signature //rev :: Revocation signature //rvs :: Revocation signature (standalone) [since 2.2.9] //fpr :: Fingerprint (fingerprint is in field 10) //pkd :: Public key data [*] //grp :: Keygrip //rvk :: Revocation key //tfs :: TOFU statistics [*] //tru :: Trust database information [*] //spk :: Signature subpacket [*] //cfg :: Configuration data [*] //#endregion switch (record[0]) { case 'pub': case 'sec': //#region Details pub Record: //pub:u:4096:1:069E5389FFEE0B51:2019-09-04:::u:<NAME> (The real Donald ...) <<EMAIL>>::scSC: //record[0]: Type of record //record[1]: Validity //record[2]: Key length //record[3]: Public key algorithm //record[4]: KeyID //record[5]: Creation date //record[6]: Expiration date //record[7]: Certificate S/N, UID hash, trust signature info //record[8]: Ownertrust //record[9]: User-ID //record[10]: Signature class //record[11]: Key capabilities //record[12]: Issuer certificate fingerprint or other info //record[13]: Flag field //record[14]: S/N of a token //record[15]: Hash algorithm //record[16]: Curve name //record[17]: Compliance flags //record[18]: Last update //record[19]: Origin //record[20]: Comment //#endregion // Add previous key, if exists if (key !== null && !keys.has(key.keyId)) { keys.set(key.keyId, key); } //create new key key = new GnuPGKey(); (key.keyId = record[4]), (key.expiration = record[5]), (key.capabilities = record[11]), (key.validity = record[1]); // v1 key.addUserId(record[9]); break; case 'fpr': //#region Details fpr Record //record[9]: fingerprint //#endregion // Fingerprint contains keyId if (key !== null && record[9].endsWith(key.keyId)) { key.fingerprint = record[9]; } break; case 'uid': //#region Details uid Record //record[9]: userid //#endregion // User Id: name email if (key !== null) { key.addUserId(record[9]); } break; } break; case 2: //#region Records //pub :: Public key //crt :: X.509 certificate //crs :: X.509 certificate and private key available //sub :: Subkey (secondary key) //sec :: Private key //ssb :: Private subkey (secondary key) //uid :: User id //uat :: User attribute (same as user id except for field 10). //sig :: Signature //rev :: Revocation signature //rvs :: Revocation signature (standalone) [since 2.2.9] //fpr :: Fingerprint (fingerprint is in field 10) //pkd :: Public key data [*] //grp :: Keygrip //rvk :: Revocation key //tfs :: TOFU statistics [*] //tru :: Trust database information [*] //spk :: Signature subpacket [*] //cfg :: Configuration data [*] //#endregion switch (record[0]) { case 'pub': case 'sec': //#region Details pub Record: //pub:u:4096:1:8E04619523BDC7D4:1577740263:::u:::scESCA::::::23::0: //uid:u::::1577740263::19196D19B299EFD63210E523D1F0AC5BFF893FB0::<NAME> <<EMAIL>>::::::::::0 //record[0]: Type of record //record[1]: Validity //record[2]: Key length //record[3]: Public key algorithm //record[4]: KeyID //record[5]: Creation date //record[6]: Expiration date //record[7]: Certificate S/N, UID hash, trust signature info //record[8]: Ownertrust //record[9]: User-ID //record[10]: Signature class //record[11]: Key capabilities //record[12]: Issuer certificate fingerprint or other info //record[13]: Flag field //record[14]: S/N of a token //record[15]: Hash algorithm //record[16]: Curve name //record[17]: Compliance flags //record[18]: Last update //record[19]: Origin //record[20]: Comment //#endregion // Add previous key, if exists if (key !== null && !keys.has(key.keyId)) { keys.set(key.keyId, key); } //create new key key = new GnuPGKey(); (key.keyId = record[4]), (key.expiration = record[6]), (key.capabilities = record[11]), (key.validity = record[1]); break; case 'fpr': //#region Details fpr Record //record[9]: fingerprint //#endregion // Fingerprint contains keyId if (key !== null && record[9].endsWith(key.keyId)) { key.fingerprint = record[9]; } break; case 'uid': //#region Details uid Record //record[9]: userid //#endregion // User Id: name email if (key !== null) { key.addUserId(record[9]); } break; } break; default: break; } // Add last key if (key !== null && !keys.has(key.keyId)) { keys.set(key.keyId, key); } } return keys; } function getLastGnuPGError(err?: Error): string { let gpgerror = ''; if (err && err.stack) { err.stack.split(/(\r|)\n/).forEach((entry: string) => { if (gpgerror.length === 0) { gpgerror = entry; } if (entry.match(/gpg:/)) { gpgerror = entry; } }); } return gpgerror; }
<reponame>Muflhi01/apex<filename>apex/transformer/_data/__init__.py from apex.transformer._data._batchsampler import MegatronPretrainingRandomSampler from apex.transformer._data._batchsampler import MegatronPretrainingSampler __all__ = [ "MegatronPretrainingRandomSampler", "MegatronPretrainingSampler", ]
import React from 'react'; import H2 from '@govtnz/ds/build/react-ts/H2'; import P from '@govtnz/ds/build/react-ts/P'; import A from '@govtnz/ds/build/react-ts/A'; import './color-swatch.scss'; type Props = { title: string; }; const GetInTouch = ({ title }: Props) => ( <div className="g-inset-text g-inset-text--borderless"> <H2 styleSize="medium">Get in touch</H2> <P> If you’ve got a question, idea, or suggestion, email the Design System (DS) team at{' '} <A href="mailto:<EMAIL>"><EMAIL></A>, use our{' '} <A href="https://nz-aog-design-system.slack.com">NZGDS Slack app</A>, or{' '} <A href="https://github.com/GOVTNZ/govtnz-design-system/issues"> discuss {title || ''} on 'GitHub issues'. </A> </P> </div> ); export default GetInTouch;
<reponame>netzeln/EatenList-CR<filename>app/calorie-calculate.component.ts import {Component, EventEmitter} from 'angular2/core'; import {Eaten} from './eaten.model'; import {EatenDetailsComponent} from './eaten-details.component'; @Component({ selector: 'calorie-calc', inputs: ['eatenFoods'], // outputs:['countedCalories'], template: ` <h2> Current Caloric Consumption: {{totalCalories}}</h2> <button (click)="countCalories(eatenFoods)">Calculate</button> ` }) export class CalorieCalculateComponent{ public eaten: Eaten[]; // public calorieArray: number[]; // public countedCalories: EventEmitter<number[]>; // public countedCalories: number; public totalCalories: number = 0; constructor(){ // this.countedCalories = new EventEmitter(); // this.countedCalories = 0; }; countCalories(foodsEaten){ this.totalCalories = 0; var foods = foodsEaten; for(var food of foods){ this.totalCalories = this.totalCalories + food.calories; } // this.countedCalories = totalCalories; // console.log(this.countedCalories); console.log(this.totalCalories); // this.countedCalories.emit([this.totalCalories]); // return this.countedCalories; } }
//search through the specified EB, find the last PP that was written to, then get the latest PPOmap[] from it and return the latest LSImarker value static unsigned short readmap_PPO(unsigned short EBI) { unsigned short LSImarker; unsigned short PageAddr; unsigned char i; unsigned char SplitSize = 32; if(EBI > 1023) return 0x3FFF; PageAddr = (64 * EBI) + SplitSize; for(i=0; i<6; i++) { read_PP(PageAddr); read_buffer((unsigned char*) &LSImarker, 2048 + 2, 2); if((LSImarker & 0x3FFF) < 880) PageAddr = PageAddr + ((SplitSize + 0) / 2); else PageAddr = PageAddr - ((SplitSize + 1) / 2); SplitSize = SplitSize / 2; } if(DiskInfo.BufferPageAddr != PageAddr) { read_PP(PageAddr); read_buffer((unsigned char*) &LSImarker, 2048 + 2, 2); } read_buffer(PPOmap, 2048 + 4, 56); DiskInfo.PPOmapValidEBI = EBI; if((LSImarker & 0xC000) == 0xC000) DiskInfo.PPOmapLastPPO = 0xFF; else DiskInfo.PPOmapLastPPO = PageAddr % 64; return LSImarker; }
/** * Frame input buffer for HTTP/2 blocking connections. * * @since 5.0 */ public final class FrameInputBuffer { private final BasicH2TransportMetrics metrics; private final int maxFramePayloadSize; private final byte[] buffer; private int off; private int dataLen; FrameInputBuffer(final BasicH2TransportMetrics metrics, final int bufferLen, final int maxFramePayloadSize) { Args.notNull(metrics, "HTTP2 transport metrcis"); Args.positive(maxFramePayloadSize, "Maximum payload size"); this.metrics = metrics; this.maxFramePayloadSize = maxFramePayloadSize; this.buffer = new byte[bufferLen]; this.dataLen = 0; } public FrameInputBuffer(final BasicH2TransportMetrics metrics, final int maxFramePayloadSize) { this(metrics, FrameConsts.HEAD_LEN + maxFramePayloadSize, maxFramePayloadSize); } public FrameInputBuffer(final int maxFramePayloadSize) { this(new BasicH2TransportMetrics(), maxFramePayloadSize); } boolean hasData() { return this.dataLen > 0; } void fillBuffer(final InputStream inStream, final int requiredLen) throws IOException { while (dataLen < requiredLen) { if (off > 0) { System.arraycopy(buffer, off, buffer, 0, dataLen); off = 0; } final int bytesRead = inStream.read(buffer, off + dataLen, buffer.length - dataLen); if (bytesRead == -1) { if (dataLen > 0) { throw new H2CorruptFrameException("Corrupt or incomplete HTTP2 frame"); } throw new ConnectionClosedException(); } dataLen += bytesRead; this.metrics.incrementBytesTransferred(bytesRead); } } public RawFrame read(final InputStream inStream) throws IOException { fillBuffer(inStream, FrameConsts.HEAD_LEN); final int payloadOff = FrameConsts.HEAD_LEN; final int payloadLen = (buffer[off] & 0xff) << 16 | (buffer[off + 1] & 0xff) << 8 | (buffer[off + 2] & 0xff); final int type = buffer[off + 3] & 0xff; final int flags = buffer[off + 4] & 0xff; final int streamId = Math.abs(buffer[off + 5] & 0xff) << 24 | (buffer[off + 6] & 0xff << 16) | (buffer[off + 7] & 0xff) << 8 | (buffer[off + 8] & 0xff); if (payloadLen > maxFramePayloadSize) { throw new H2ConnectionException(H2Error.FRAME_SIZE_ERROR, "Frame size exceeds maximum"); } final int frameLen = payloadOff + payloadLen; fillBuffer(inStream, frameLen); if ((flags & FrameFlag.PADDED.getValue()) > 0) { if (payloadLen == 0) { throw new H2ConnectionException(H2Error.PROTOCOL_ERROR, "Inconsistent padding"); } final int padding = buffer[off + FrameConsts.HEAD_LEN] & 0xff; if (payloadLen < padding + 1) { throw new H2ConnectionException(H2Error.PROTOCOL_ERROR, "Inconsistent padding"); } } final ByteBuffer payload = payloadLen > 0 ? ByteBuffer.wrap(buffer, off + payloadOff, payloadLen) : null; final RawFrame frame = new RawFrame(type, flags, streamId, payload); off += frameLen; dataLen -= frameLen; this.metrics.incrementFramesTransferred(); return frame; } public H2TransportMetrics getMetrics() { return metrics; } }
package ca.corefacility.bioinformatics.irida.ria.integration.analysis; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.security.test.context.support.WithMockUser; import org.springframework.security.test.context.support.WithSecurityContextTestExcecutionListener; import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.TestExecutionListeners; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.springframework.test.context.support.DependencyInjectionTestExecutionListener; import org.springframework.test.context.web.AnnotationConfigWebContextLoader; import org.springframework.test.context.web.WebAppConfiguration; import ca.corefacility.bioinformatics.irida.config.data.IridaApiJdbcDataSourceConfig; import ca.corefacility.bioinformatics.irida.config.IridaWebTestScopeConfig; import ca.corefacility.bioinformatics.irida.config.services.IridaApiPropertyPlaceholderConfig; import ca.corefacility.bioinformatics.irida.config.services.IridaApiServicesConfig; import ca.corefacility.bioinformatics.irida.config.web.IridaUIWebConfig; import ca.corefacility.bioinformatics.irida.model.joins.Join; import ca.corefacility.bioinformatics.irida.model.project.Project; import ca.corefacility.bioinformatics.irida.model.sample.Sample; import ca.corefacility.bioinformatics.irida.ria.web.analysis.CartController; import ca.corefacility.bioinformatics.irida.service.ProjectService; import ca.corefacility.bioinformatics.irida.service.sample.SampleService; import com.github.springtestdbunit.DbUnitTestExecutionListener; import com.github.springtestdbunit.annotation.DatabaseSetup; import com.github.springtestdbunit.annotation.DatabaseTearDown; import com.google.common.collect.Sets; @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(loader = AnnotationConfigWebContextLoader.class, classes = { IridaApiJdbcDataSourceConfig.class, IridaApiPropertyPlaceholderConfig.class, IridaApiServicesConfig.class, IridaUIWebConfig.class, IridaWebTestScopeConfig.class }) @TestExecutionListeners({ DependencyInjectionTestExecutionListener.class, DbUnitTestExecutionListener.class, WithSecurityContextTestExcecutionListener.class }) @ActiveProfiles("it") @WebAppConfiguration @DatabaseSetup("/ca/corefacility/bioinformatics/irida/ria/web/analysis/CartControllerIT.xml") @DatabaseTearDown("classpath:/ca/corefacility/bioinformatics/irida/test/integration/TableReset.xml") public class CartControllerIT { @Autowired CartController controller; @Autowired ProjectService projectService; @Autowired SampleService sampleService; @Before public void setUp() { controller.addProject(1L); } @After public void teardown() { controller.clearCart(); } @Test @WithMockUser(username = "mrtest", roles = "ADMIN") public void testAddProjectSample() { Long projectId = 2L; Project project = projectService.read(projectId); Set<Long> sampleIds = Sets.newHashSet(4L); Map<String, Object> addProjectSample = controller.addProjectSample(projectId, sampleIds, Locale.US); assertEquals("Should be 1 sample in the cart", "1 sample was added to the cart from project2.", addProjectSample.get("message")); Set<Sample> selectedSamplesForProject = controller.getSelected().get(project); for (Sample s : selectedSamplesForProject) { assertTrue(sampleIds.contains(s.getId())); } } @Test @WithMockUser(username = "mrtest", roles = "ADMIN") public void testAddProject() { Long projectId = 2L; Project project = projectService.read(projectId); Map<String, Object> addProjectSample = controller.addProject(projectId); assertTrue((boolean) addProjectSample.get("success")); List<Join<Project, Sample>> samplesForProject = sampleService.getSamplesForProject(project); Set<Sample> selectedSamplesForProject = controller.getSelected().get(project); for (Join<Project, Sample> j : samplesForProject) { assertTrue(selectedSamplesForProject.contains(j.getObject())); } } @Test @WithMockUser(username = "mrtest", roles = "ADMIN") public void testClearCart(){ Map<String, Object> clearCart = controller.clearCart(); assertTrue((boolean) clearCart.get("success")); } }
<filename>benches/benchmark.rs use criterion::{criterion_group, criterion_main, Criterion, BenchmarkId, Throughput, black_box}; extern crate magnet_url; use magnet_url::Magnet; fn criterion_benchmark(c: &mut Criterion) { let magnet = black_box("magnet:?xt=urn:btih:08ada5a7a6183aae1e09d831df6748d566095a10&dn=Sintel&tr=udp%3A%2F%2Fexplodie.org%3A6969&tr=udp%3A%2F%2Ftracker.coppersurfer.tk%3A6969&tr=udp%3A%2F%2Ftracker.empire-js.us%3A1337&tr=udp%3A%2F%2Ftracker.leechers-paradise.org%3A6969&tr=udp%3A%2F%2Ftracker.opentrackr.org%3A1337&tr=wss%3A%2F%2Ftracker.btorrent.xyz&tr=wss%3A%2F%2Ftracker.fastcast.nz&tr=wss%3A%2F%2Ftracker.openwebtorrent.com&ws=https%3A%2F%2Fwebtorrent.io%2Ftorrents%2F&xs=https%3A%2F%2Fwebtorrent.io%2Ftorrents%2Fsintel.torrent"); let mut group = c.benchmark_group("sintel"); group.throughput(Throughput::Elements(1)); group.bench_with_input(BenchmarkId::new("sintel", magnet), &magnet, |b, &s| { b.iter(|| Magnet::new(s)); }); group.finish(); } criterion_group!(benches, criterion_benchmark); criterion_main!(benches);
def calculateC_sam(C0_sam, B): layers = B.shape[0] C_all = zeros((layers, 4), dtype='complex') for l in range(layers): C_all[l] = B[l] * C0_sam return C_all
def path_order (x, y): if x == y: return 0 xg = get_grist (x) yg = get_grist (y) if yg and not xg: return -1 elif xg and not yg: return 1 else: if not xg: x = feature.expand_subfeatures([x]) y = feature.expand_subfeatures([y]) if x < y: return -1 elif x > y: return 1 else: return 0
// FromCtx retrieves the current span from the context. func FromCtx(ctx context.Context) *Span { span := opentracing.SpanFromContext(ctx) if span != nil { if sp, ok := span.(*Span); ok { return sp } return NewSpan(span) } return nil }
/** * @file parms_viewer.h * @author Zhongze Li * @date Tue Oct 17 12:01:47 2006 * * @brief Functions related to parms_Viewer objects. * */ #ifndef _PARMS_VIEWER_H_ #define _PARMS_VIEWER_H_ #include <stdio.h> #include <string.h> #include "parms_sys.h" PARMS_CXX_BEGIN typedef struct parms_Viewer_ *parms_Viewer; /** * Create a parms_Viewer object. * * If PARMS_COUT and PARMS_CERR are input as fname, they stand for * standard output and standard error, respectively. Otherwise, each * PE create a file "fnameID.dat". ID stands for ID of PE. * * @param self A pointer to the parms_Viewer object. * @param fname A file name to store data. * * @return 0 on success. */ extern int parms_ViewerCreate(parms_Viewer *self, char *fname); /** * Free the memory of the parms_Viewer object. * * @param self A pointer to the parms_Viewer object. * * @return 0 on success. */ extern int parms_ViewerFree(parms_Viewer *self); /** * Get a pointer to file pointer. * * @param self A parms_Viewer object. * @param fp A pointer to the file pointer. * * @return 0 on success. */ extern int parms_ViewerGetFP(parms_Viewer self, FILE **fp); /** * Store fp to the parms_Viewer object. * * @param self A parms_Viewer object. * @param fp A file pointer. * * @return 0 on success. */ extern int parms_ViewerStoreFP(parms_Viewer self, FILE *fp); /** * Retrieve the file name. * * @param self A parms_Viewer object. * @param fname The file name retrieved. * * @return 0 on success. */ extern int parms_ViewerGetFname(parms_Viewer self, char **fname); /* * * Fortran Wrapper Functions * */ extern void parms_viewercreate_(parms_Viewer *self, char *fname, int *ierr, int len); extern void parms_viewerfree_(parms_Viewer *self, int *ierr); extern void parms_viewergetfp_(parms_Viewer *self, FILE **fp, int *ierr); extern void parms_viewergetfname_(parms_Viewer *self, char **fname, int *ierr); extern void parms_viewerstorefp_(parms_Viewer *self, FILE *fp, int *ierr); /* * * End Fortran Wrapper Functions * */ PARMS_CXX_END #endif
<filename>3.7.0/lldb-3.7.0.src/test/functionalities/command_script/import/thepackage/TPunitB.py def command(debugger, command, result, internal_dict): result.PutCString(u"hello world B") return None
a = input().split() x = int(a[0])//int(a[2])+(int(a[0])%int(a[2]) != 0) y = int(a[1])//int(a[2])+(int(a[1])%int(a[2]) != 0) print(x*y)
/// Remove the marble at the given position relative to the active marble. /// /// Positive offsets represent the clockwise direction /// Negative offsets represent the counter-clockwise direction fn remove(&mut self, offset: isize) -> usize { assert!(!self.nodes.is_empty()); let to_remove = self.walk(self.active_idx, offset); let prev = self.walk(to_remove, -1); let next = self.walk(to_remove, 1); self.nodes[prev].next = next; self.nodes[next].prev = prev; to_remove }
package nssac.utility; import java.io.BufferedWriter; import java.io.FileWriter; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Paths; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; public class OutputProcessor { public static void main(String[] args) { OutputProcessor outProcessor = new OutputProcessor(); outProcessor.combineReplicates(); } private void combineReplicates() { // replicate filename: M11-rappahannockOut13.csv // probability filename: M11-rappahannockOutProb78.csv -- not processing in this function int noReplicates = 100; int ticks = 10; String modelVersion = "M11"; String region = "rappahannock"; String outFile = "D://SEEDS2//DiffusionModel//NotInGitFiles//output-data//<region>//<modelVersion>//<modelVersion>-<region>OutConsolidated.csv"; outFile = outFile.replaceAll("<modelVersion>", modelVersion); outFile = outFile.replaceAll("<region>", region); String inFile = "D://SEEDS2//DiffusionModel//NotInGitFiles//output-data//<region>//<modelVersion>//<modelVersion>-<region>Out##.csv"; Map<Integer, List<Integer>> out = new HashMap<Integer, List<Integer>>(ticks+1); try { for(int i=1; i<=noReplicates; i++) { String tFile = inFile.replace("##", ""+i); tFile = tFile.replaceAll("<modelVersion>", modelVersion); tFile = tFile.replaceAll("<region>", region); Files.readAllLines(Paths.get(tFile)).stream().filter(row -> !row.startsWith("Replicate")).forEach(row -> { String[] points = row.split(","); int tickNo = Integer.parseInt(points[1]); int count = Integer.parseInt(points[2]); if(!out.containsKey(tickNo)) { out.put(tickNo, new ArrayList<Integer>(noReplicates+1)); } List<Integer> countList = out.get(tickNo); countList.add(count); out.put(tickNo, countList); }); } BufferedWriter bw = new BufferedWriter(new FileWriter(outFile)); String separator = ","; String header = "Tick-Replicate"; for(int k=1; k<=noReplicates; k++) { header = header + separator + k; } header = header + separator + "Average" + separator + "Min" + separator + "Max" + separator + "StdDev" + "\n"; bw.write(header); for(Integer tick : out.keySet()) { List<Integer> clist = out.get(tick); int avg = (int) Math.ceil(clist.stream().mapToInt(Integer::intValue).average().getAsDouble()); int min = clist.stream().mapToInt(Integer::intValue).min().getAsInt(); int max = clist.stream().mapToInt(Integer::intValue).max().getAsInt(); int stdDev = calculateStdDev(clist, avg); String val = clist.toString(); val = val.substring(1, val.length()-1); bw.write(tick+separator+val+separator+avg+separator+min+separator+max+separator+stdDev+"\n"); } bw.flush(); bw.close(); } catch (IOException e) { e.printStackTrace(); } } private int calculateStdDev(List<Integer> clist, int avg) { double stdDev = 0; for(int i=0; i<clist.size(); i++) { stdDev = stdDev + Math.pow((clist.get(i)-avg), 2); } stdDev = stdDev/clist.size(); stdDev= Math.sqrt(stdDev); return (int) Math.round(stdDev); } }
// GetNode retrieves a nodegroup that represents all inherited values for a node func (enc *ENC) GetNode(nodeName string) (*Nodegroup, error) { var ( matchedNodegroups []*Nodegroup ) chains, err := enc.GetChains(nodeName) errCheck(err) commonChain, alteredChains := enc.findCommonChain(chains) masterNodegroup := &Nodegroup{} for _, chain := range alteredChains { if chain != "" { matchedNodegroups = append(matchedNodegroups, enc.getMergedChainNodegroup(chain)) } } if len(matchedNodegroups) > 1 { masterNodegroup, err = enc.ConflictMerge(matchedNodegroups) errCheck(err) } masterNodegroup = enc.mergeNodegroups(enc.getMergedChainNodegroup(commonChain), masterNodegroup) return masterNodegroup, nil }
You can now cast a ballot for your favourite type of marijuana — medical, that is. Voting is now open for the first annual Canadian Cannabis Awards. The contest, the first of its kind in Canada, wants members of the marijuana community to vote on their favourite smoke shop, marijuana strains and weed-related social media accounts. The awards are run by the organization Lift, a platform created for the medical marijuana community. Lift aims to “promote the production and consumption of high-quality and ethical Canadian marijuana.” Lift also sells products such as vaporizers and apparel. According to Lift, the awards are meant to “create an opportunity for the Canadian medical marijuana community to recognize leaders in the field for innovation, creativity and for pushing the industry forward.” Top cannabis crusader sought The contest features a special award for the 2014 Cannabis Crusader — an individual who has made a difference in the medical marijuana community in Canada. There are eight nominees, including Vancouver's Jodie Emery, who hopes to run for the Liberals federally, and is the wife of Marc Emery, Canada’s self-proclaimed Prince of Pot. Other candidates for Cannabis Crusader include: Dr. Mark Ware, a professor at McGill University in Montreal. John Conroy, a B.C. lawyer with a background in medical marijuana cases. Voting for the awards opened on Tuesday, the same day Marc Emery returned to Canada after serving a five-year prison sentence in the U.S. for selling cannabis seeds. Emery arrived in Windsor, Ont., at roughly 4:20 p.m. ET, a perhaps cheeky ode to the Code 420, which is frequently associated with the cannabis subculture. Lift hopes the awards will unite the industry and recognize leaders in the medical marijuana community who are helping to drive it forward in Canada. Canadians can vote at the contest's website here until Sept. 23. The winners will be announced Oct. 1.
/** * Collection of settings for the fuzzing process */ public class FuzzerParam extends AbstractParam { private static final String DEFAULT_CATEGORY = "fuzzer.defaultCategory"; private static final String THREADS_PER_SCAN = "fuzzer.threadPerScan"; private static final String DELAY_IN_MS = "fuzzer.delayInMs"; private static final String LAST_SELECTED_DIRECTORY_ADD_CUSTOM_FILE_KEY = "fuzzer.lastSelectedDirectoryAddCustomFile"; private String defaultCategory = "XSS"; private int threadPerScan = 5; private int delayInMs = 0; private File lastSelectedDirectory = new File(""); public FuzzerParam() { } @Override protected void parse() { try { setThreadPerScan(getConfig().getInt(THREADS_PER_SCAN, 1)); } catch (Exception e) { } try { setDefaultCategory(getConfig().getString(DEFAULT_CATEGORY, "XSS")); } catch (Exception e) { } try { setDelayInMs(getConfig().getInt(DELAY_IN_MS, 0)); } catch (Exception e) { } setLastSelectedDirectory(getConfig().getString( LAST_SELECTED_DIRECTORY_ADD_CUSTOM_FILE_KEY, "")); } public void setDelayInMs(int delayInMs) { this.delayInMs = delayInMs; getConfig().setProperty(DELAY_IN_MS, Integer.toString(this.delayInMs)); } public int getDelayInMs() { return delayInMs; } public String getDefaultCategory() { return defaultCategory; } public void setDefaultCategory(String defaultCategory) { this.defaultCategory = defaultCategory; getConfig().setProperty(DEFAULT_CATEGORY, this.defaultCategory); } public int getThreadPerScan() { return threadPerScan; } public void setThreadPerScan(int threadPerScan) { this.threadPerScan = threadPerScan; getConfig().setProperty(THREADS_PER_SCAN, Integer.toString(this.threadPerScan)); } public File getLastSelectedDirectory() { return lastSelectedDirectory; } public void setLastSelectedDirectory(File directory) { if (directory == null) { throw new IllegalArgumentException( "Parameter directory must not be null."); } if (!directory.isDirectory()) { throw new IllegalArgumentException( "Parameter directory must be a directory."); } if (lastSelectedDirectory.equals(directory)) { return; } lastSelectedDirectory = directory; getConfig().setProperty(LAST_SELECTED_DIRECTORY_ADD_CUSTOM_FILE_KEY, lastSelectedDirectory.getAbsolutePath()); } private void setLastSelectedDirectory(String pathDirectory) { final File directory = new File(pathDirectory); if (!directory.isDirectory()) { return; } lastSelectedDirectory = directory; } }
Exploring AI & Machine Intelligence With Google, Facebook, MIT & More By Yulia Ivanova, Summit Creator - RE•WORK August 14, 2016 Following the success of the inaugural RE•WORK Machine Intelligence Summit in Berlin, the summit is now coming to New York! On, over 200 attendees will come together in New York to hear keynote presentations and panel discussions, explore a startup showcase area, and participate in open mic Q&As, all focusing on the latest developments in machine intelligence. Machine learning and artificial intelligence tools are solving challenges such as natural language processing, data analytics and pattern recognition in industries including finance, communications, transport, retail and healthcare. Machine Intelligence is creating a significant impact on the speed at which large amounts of data are processed and analysed, allowing businesses and organisations to solve problems such as disease detection, security breaches and customer engagement. At the summit, global experts from the field will look at use cases and applications for these technologies, as well as exploring how these tools and techniques work, providing a broad view and understanding of machine intelligence, and discussing where business and society will feel its impact. Confirmed speakers include: Siddhartha Dalal, Chief Data Scientist, AIG Tara Sainath, Senior Research Scientist, Google Antoine Bordes, AI Researcher, Facebook Nicholas Roy, Associate Professor of Robotics, MIT Ross Goodwin, AI Researcher, NYU Clement Farabet, Senior Software Engineer, Twitter Top 5 topics not to be missed at the summit: Computer Vision, used in areas such as video analysis, allows machines to gain high-level understanding from digital images, and make decisions based on the data extracted. This technology is widely used for industrial automation and autonomous vehicles. Natural Language Processing capabilities allow machines to understand spoken and written human speech. Though primarily applied to communications and customer service, it can also be used to process data relating to many other areas, including product design and improvement. Computational Creativity is an emerging area within machine intelligence. Whether composing music, creating new cooking recipes or writing screen-plays, machines are proving that they too can be creative. Machine Learning in Healthcare is enabling huge leaps forward in drug discovery, gene identification and diagnostics, as well showing promise in areas such as doctor time and efficiency, surgical robotics and aiding mental health. Deep Learning Algorithms can help content-based businesses build adaptive systems. This is set to impact everything from healthcare to entertainment, in technologies such as personalised medicine, and recommendation systems for video and music services. View the full schedule here. Who you will meet: The Machine Intelligence Summit is a unique opportunity to meet leading academics, industry pioneers, influential technologists and cutting-edge startups. Interact with and learn from experts in machine learning, computer vision, IoT, data mining, analytics and predictive intelligence, to share best practices to advance the impact and opportunities of AI. View a video summary for the Machine Intelligence Summit in Berlin, June 2016: Tickets & Registration Early Bird passes are available until 16 September! Book now to receive a discounted rate on your ticket. For further information and to register, go to: re-work.co/events/machine-intelligence-summit-usa-2016 Students and Groups can receive further discounts - please email [email protected] for more information. For media enquiries, interviews and images, please email Sophie at [email protected] Machine Learning Deep Learning Algorithms NLP Computer Vision AI Healthcare Machine Intelligence Summit Press Releases Computational Creativity Machine Intelligence Text Analysis 0 Comments
import json import os import unittest from monero.backends.offline import OfflineWallet from monero.wallet import Wallet from tests.utils import classproperty class Tests(object): @classproperty def __test__(cls): return issubclass(cls, unittest.TestCase) def setUp(self): self.subaddresses = json.load(open(os.path.join( os.path.dirname(__file__), 'data', '{}-subaddrs.json'.format(self.net)))) self.wallet = Wallet(OfflineWallet(self.addr, view_key=self.svk)) def test_subaddresses(self): major = 0 for acc in self.subaddresses: minor = 0 for subaddr in acc: self.assertEqual( self.wallet.get_address(major, minor), subaddr, msg='major={}, minor={}'.format(major,minor)) minor += 1 major += 1 class AddressTestCase(Tests, unittest.TestCase): addr = '<KEY>' ssk = 'e0fe01d5794e240a26609250c0d7e01673219eececa3f499d5cfa20a75739b0a' svk = '<KEY>' net = 'mainnet' def test_subaddress_out_of_range(self): self.assertRaises(ValueError, self.wallet.get_address, 0, -1) self.assertRaises(ValueError, self.wallet.get_address, -1, 0) self.assertRaises(ValueError, self.wallet.get_address, 1, 2**32) self.assertRaises(ValueError, self.wallet.get_address, 2**32, 1) class TestnetAddressTestCase(Tests, unittest.TestCase): addr = '<KEY>' ssk = '<KEY>' svk = '<KEY>' net = 'testnet' class StagenetAddressTestCase(Tests, unittest.TestCase): addr = '<KEY>' ssk = 'a8733c61797115db4ec8a5ce39fb811f81dd4ec163b880526683e059c7e62503' svk = 'fd5c0d25f8f994268079a4f7844274dc870a7c2b88fbfc24ba318375e1d9430f' net = 'stagenet'
// String returns the name of the primary environment variable for the // directory. func (d Dir) String() string { if d.env == "" { panic("xdgdir.Dir.String() on zero Dir") } return d.env }
""" The GUI of matlego 1.5. """ from PyQt5 import QtCore, QtGui, QtWidgets import pyqtgraph.opengl as gl from PyQt5.QtCore import pyqtSignal import copy class mytreewidget(QtWidgets.QTreeWidget): """ To rewrite the Qtwidgets.QTreeWidget. """ def __init__(self): super(mytreewidget, self).__init__() self.setAnimated(True) self.setDragDropMode(QtGui.QAbstractItemView.InternalMove) self.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows) class Myviewwidget(gl.GLViewWidget): # 重写glviewwidget """ To rewrite the gl.GLViewwidget to achieve the drawrectangle function of self.view_widget. """ num1 = 0 signal_release = pyqtSignal(tuple, tuple) def __init__(self): super(Myviewwidget, self).__init__() self.current_pos = None self.init_pos = None def wheelEvent(self, event): if self.num1 == 1: ... elif self.num1 == 0: return super().wheelEvent(event) def mousePressEvent(self, event): try: if self.num1 == 1: if event.button() == 1: # 鼠标左键按下 self.init_pos = (event.pos().x(), event.pos().y()) elif event.button() != 1: # 不是鼠标左键按下 self.init_pos = None elif self.num1 == 0: return super().mousePressEvent(event) except Exception as e: print(e) def mouseReleaseEvent(self, event): try: if self.num1 == 1: self.signal_release.emit(self.init_pos, self.current_pos) self.current_pos = copy.deepcopy(self.init_pos) self.update() self.num1 = 0 self.repaint() self.num1 = 1 self.init_pos == None self.current_pos == None elif self.num1 == 0: return super().mousePressEvent(event) except Exception as e: print(e) def focusInEvent(self, event): if self.num1 == 1: print('focusInEvent') elif self.num1 == 0: return super().focusInEvent(event) def focusOutEvent(self, event): if self.num1 == 1: print('focusOutEvent') elif self.num1 == 0: return super().focusOutEvent(event) def mouseMoveEvent(self, event): try: if self.num1 == 1: print('mousemoveEvent') self.current_pos = (event.pos().x(), event.pos().y()) self.update() # 调用paintevent self.num1 = 0 self.repaint() self.num1 = 1 elif self.num1 == 0: return super().mouseMoveEvent(event) except Exception as e: print(e) def paintEvent(self, event): try: if self.num1 == 1: self.q = QtGui.QPainter(self) self.q.begin(self) self.drawRect(self.q) self.q.end() elif self.num1 == 0: return super().paintEvent(event) except Exception as e: print(e) def drawRect(self, qp): try: qp.setPen(QtGui.QColor(255, 255, 255)) self.recwidth = self.current_pos[0] - self.init_pos[0] self.recheight = self.current_pos[1] - self.init_pos[1] self.num1 = 0 self.repaint() qp.drawRect(self.init_pos[0], self.init_pos[1], self.recwidth, self.recheight) self.num1 = 1 except Exception as e: print(e) def enterEvent(self, event): # 鼠标移入label if self.num1 == 1: print('enterEvent') elif self.num1 == 0: return super().enterEvent(event) class Ui_MainWindow(object): # 主窗口 """ The layout of the mainwindow. """ def setupUi(self, MainWindow): MainWindow.setObjectName("MainWindow") MainWindow.showFullScreen() font = QtGui.QFont() font.setPointSize(9) MainWindow.setFont(font) # toolbar self.toolbarBox1 = QtGui.QToolBar(self) self.toolbarBox2 = QtGui.QToolBar(self) self.addToolBar(self.toolbarBox1) self.addToolBar(self.toolbarBox2) self.sonwindow = QtWidgets.QWidget() MainWindow.setCentralWidget(self.sonwindow) layout = QtWidgets.QHBoxLayout() self.sonwindow.setLayout(layout) splitter1 = QtWidgets.QSplitter(QtCore.Qt.Horizontal) # 水平 splitter2 = QtWidgets.QSplitter(QtCore.Qt.Vertical) # 垂直 # parametertree self.tree = mytreewidget() self.tree.setColumnCount(4) self.tree.setHeaderLabels(['Files', 'Cell Formula', 'Heterostructure', 'Conductivity']) self.tree.setSelectionMode(QtGui.QAbstractItemView.ExtendedSelection) self.tree.setStyleSheet(""" QTreeView { background-color: rgb(35, 38, 41); alternate-background-color: rgb(200, 200, 200); color: rgb(238, 238, 238); } QLabel { color: rgb(238, 238, 238); } QTreeView::item:has-children { background-color: '#212627'; color: rgb(233, 185, 110); } QTreeView::item:selected { background-color: rgb(92, 53, 102); }""") self.tab_tree = QtWidgets.QTabWidget() # self.tab_tree.setMaximumSize(500, 1000) self.tab_tree_project = QtWidgets.QWidget() self.tab_tree.addTab(self.tab_tree_project, "Project") tab_tree_widgetlayout = QtWidgets.QVBoxLayout() tab_tree_Filterlayout = QtWidgets.QHBoxLayout() # 给FileTab添加一个过滤器 filterlabel1 = QtWidgets.QLabel("Filter from") self.filterLineEdit1 = QtWidgets.QLineEdit() self.filterLineEdit1.setMaximumWidth(200) self.filterLineEdit2 = QtWidgets.QLineEdit() self.filterLineEdit2.setMaximumWidth(200) filterlabel2 = QtWidgets.QLabel("to") self.filterbutton = QtWidgets.QPushButton('Ok') tab_tree_Filterlayout.addWidget(filterlabel1) tab_tree_Filterlayout.addWidget(self.filterLineEdit1) tab_tree_Filterlayout.addWidget(filterlabel2) tab_tree_Filterlayout.addWidget(self.filterLineEdit2) tab_tree_Filterlayout.addWidget(self.filterbutton) tab_tree_widgetlayout.addLayout(tab_tree_Filterlayout) tab_tree_widgetlayout.addWidget(self.tree) # 给projectTab设置一个搜索框 searchlayout = QtWidgets.QHBoxLayout() self.searchLineEdit = QtWidgets.QLineEdit() self.searchLineEdit.setPlaceholderText("Search...") searchlayout.addWidget(self.searchLineEdit) tab_tree_widgetlayout.addLayout(searchlayout) self.tab_tree_project.setLayout(tab_tree_widgetlayout) # 左边控件 - object Verticallayout = QtWidgets.QVBoxLayout() self.calculate_distance = QtWidgets.QPushButton('Distance') self.calculate_degree = QtWidgets.QPushButton('Degree') self.calculate_vector = QtWidgets.QPushButton('Vector') Horizonlayout = QtWidgets.QHBoxLayout() Horizonlayout.addWidget(self.calculate_distance) Horizonlayout.addWidget(self.calculate_vector) Horizonlayout.addWidget(self.calculate_degree) Verticallayout.addLayout(Horizonlayout) Splitter_object = QtWidgets.QSplitter(QtCore.Qt.Vertical) self.tablewidget = QtWidgets.QTableWidget() self.tablewidget.setStyleSheet("color: rgb(205, 205, 205);background-color: rgb(35, 38, 41);") self.tablewidget.setColumnCount(3) self.tablewidget.setHorizontalHeaderLabels(["Atom", 'Color', 'Atom radii(Å)']) self.tablewidget.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers) self.tablewidget.setSelectionBehavior(QtWidgets.QAbstractItemView.SelectRows) self.tablewidget.setColumnWidth(1, 70) Splitter_object.addWidget(self.tablewidget) self.objectTextwidget = QtWidgets.QTextEdit() self.objectTextwidget.setStyleSheet("color: rgb(205, 205, 205);background-color: rgb(35, 38, 41);") Splitter_object.addWidget(self.objectTextwidget) Verticallayout.addWidget(Splitter_object) self.ballfilling_button = QtWidgets.QPushButton('Space filling') self.ball_stick_button = QtWidgets.QPushButton('Ball-and-stick') Horizonlayout1 = QtWidgets.QHBoxLayout() Horizonlayout1.addWidget(self.ballfilling_button) Horizonlayout1.addWidget(self.ball_stick_button) Verticallayout.addLayout(Horizonlayout1) self.tab_tree_Object = QtWidgets.QWidget() self.tab_tree.addTab(self.tab_tree_Object, "Object") self.tab_tree_Object.setLayout(Verticallayout) splitter1.addWidget(self.tab_tree) ## 右边的控件 self.view_widget = Myviewwidget() self.view_widget.setSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding) self.view_widget.setBackgroundColor((35, 38, 41, 0)) self.widget_view_widget = QtWidgets.QWidget() view_widget_vertical_layout = QtWidgets.QVBoxLayout() view_widget_vertical_layout.addWidget(self.view_widget) self.widget_view_widget.setLayout(view_widget_vertical_layout) self.splitterview_widget = QtWidgets.QSplitter(QtCore.Qt.Horizontal) # self.splitterview_widget.addWidget(self.view_widget) self.splitterview_widget.addWidget(self.widget_view_widget) self.vertical_widget = QtWidgets.QWidget() database_vertical_layout = QtWidgets.QVBoxLayout() self.vertical_widget.setLayout(database_vertical_layout) self.datatable = QtWidgets.QTableWidget() self.datatable.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers) # self.datatable.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) ######允许右键产生子菜单 # self.datatable.customContextMenuRequested.connect(self.generateMenu) ####右键菜单 self.datatable.setColumnCount(9) # self.datatable.setHorizontalHeaderLabels(['Formula', 'a(Å)', 'b(Å)', 'c(Å)', 'α(°)', 'β(°)', 'γ(°)', 'Volume']) self.datatable.setHorizontalHeaderLabels(['ID', 'Formula', 'a(Å)', 'b(Å)', 'c(Å)', 'α(°)', 'β(°)', 'γ(°)', 'Volume']) self.show_image_table = QtWidgets.QTableWidget() self.show_image_table.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers) self.tab_view_widget = QtWidgets.QTabWidget() self.tab_view_widget_init = QtWidgets.QWidget() self.tab_view_widget.addTab(self.tab_view_widget_init, "MatLego") tab_view_widgetlayout = QtWidgets.QVBoxLayout() # database 里的搜索框 searchdatabase_layout = QtWidgets.QHBoxLayout() self.searchdatabase_LineEdit = QtWidgets.QLineEdit() self.searchdatabase_LineEdit.setPlaceholderText("Search...") searchdatabase_layout.addWidget(self.searchdatabase_LineEdit) # databaseradiobutton Hlayout0 = QtWidgets.QHBoxLayout() self.btn1 = QtWidgets.QRadioButton("Crystal") self.btn1.setChecked(True) Hlayout0.addWidget(self.btn1) self.btn2 = QtWidgets.QRadioButton("Hetero-junction") self.btn2.setChecked(False) Hlayout0.addWidget(self.btn2) self.btn3 = QtWidgets.QRadioButton("Device") self.btn3.setChecked(False) Hlayout0.addWidget(self.btn3) database_vertical_layout.addLayout(Hlayout0) database_vertical_layout.addLayout(searchdatabase_layout) splitter_data_image = QtWidgets.QSplitter(QtCore.Qt.Vertical) splitter_data_image.addWidget(self.datatable) splitter_data_image.addWidget(self.show_image_table) database_vertical_layout.addWidget(splitter_data_image) self.splitterview_widget.addWidget(self.vertical_widget) tab_view_widgetlayout.addWidget(self.splitterview_widget) self.tab_view_widget_init.setLayout(tab_view_widgetlayout) splitter2.addWidget(self.tab_view_widget) # 右下 self.tab = QtWidgets.QTabWidget() self.tab1 = QtWidgets.QWidget() self.tab2 = QtWidgets.QWidget() self.tab3 = QtWidgets.QWidget() self.tab.addTab(self.tab1, "Crystal information") self.tab.addTab(self.tab2, "Atom information") self.tab.addTab(self.tab3, "Hetero-junction/Device information") # tab1的布局 self.text_widgt = QtWidgets.QTextEdit() self.text_widgt.setReadOnly(True) self.text_widgt.resize(24, 20) self.text_widgt.setStyleSheet("color: rgb(205, 205, 205);background-color: rgb(35, 38, 41);") # self.text_widgt.setStyleSheet("color: rgb(205, 205, 205);") tab1layout = QtWidgets.QVBoxLayout() tab1layout.addWidget(self.text_widgt) self.tab1.setLayout(tab1layout) # tab2的布局 self.text_widgt2 = QtWidgets.QTextEdit() self.text_widgt2.setReadOnly(True) self.text_widgt2.resize(24, 20) self.text_widgt2.setStyleSheet("color: rgb(205, 205, 205);background-color: rgb(35, 38, 41);") # self.text_widgt2.setStyleSheet("color: rgb(205, 205, 205);background-color: rgb(0, 0, 0);") tab2layout = QtWidgets.QVBoxLayout() tab2layout.addWidget(self.text_widgt2) self.tab2.setLayout(tab2layout) # tab3的布局 self.tab3_table_widget1 = QtWidgets.QTableWidget() self.tab3_table_widget1.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers) self.tab3_table_widget1.setStyleSheet("color: rgb(255, 255, 255);background-color: rgb(35, 38, 41);") self.tab3_table_widget1.setColumnCount(5) self.tab3_table_widget1.setHorizontalHeaderLabels(['Hetero-junction', 'Optimal Match', '#Layer', 'Binding energy (eV/MoS2)', 'Schottky barrier (eV)']) self.tab3_table_widget1.resizeColumnsToContents() self.tab3_table_widget2 = QtWidgets.QTableWidget() self.tab3_table_widget2.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers) self.tab3_table_widget2.setStyleSheet("color: rgb(255, 255, 255);background-color: rgb(35, 38, 41);") self.tab3_table_widget2.setColumnCount(5) self.tab3_table_widget2.setHorizontalHeaderLabels(['Device', 'Optimal Match', '#Layer', 'Schottky barrier (eV)', 'I-V curve (Theory)']) self.tab3_table_widget2.resizeColumnsToContents() tab3layout = QtWidgets.QVBoxLayout() tab3splitter = QtWidgets.QSplitter(QtCore.Qt.Horizontal) # 水平 tab3splitter.addWidget(self.tab3_table_widget1) tab3splitter.addWidget(self.tab3_table_widget2) tab3layout.addWidget(tab3splitter) self.tab3.setLayout(tab3layout) # splitter2.addWidget(self.text_widgt) splitter2.addWidget(self.tab) splitter2.setStretchFactor(0, 5) splitter2.setStretchFactor(1, 2) # 实现控件可以拉拽 splitter1.addWidget(splitter2) splitter1.setStretchFactor(0, 2) splitter1.setStretchFactor(1, 8) # 以上均为主界面布局 self.view_widget.setWindowTitle('view_cell') self.view_widget.setCameraPosition(distance=15, azimuth=-90) layout.addWidget(splitter1) self.menuBar = QtWidgets.QMenuBar(MainWindow) # self.menuBar.setGeometry(QtCore.QRect(0, 0, 1274, 30)) self.document_menu = QtWidgets.QMenu(self.menuBar) # self.document_menu.setGeometry(QtCore.QRect(0, 0, 800, 300)) self.function_menu = QtWidgets.QMenu(self.menuBar) self.view_menu = QtWidgets.QMenu(self.menuBar) self.database_menu = QtWidgets.QMenu(self.menuBar) self.Setting_menu = QtWidgets.QMenu(self.menuBar) self.help_menu = QtWidgets.QMenu(self.menuBar) MainWindow.setMenuBar(self.menuBar) self.mainToolBar = QtWidgets.QToolBar(MainWindow) MainWindow.addToolBar(QtCore.Qt.TopToolBarArea, self.mainToolBar) # New self.actionNew = QtWidgets.QAction(MainWindow) self.actionNewToolBar = QtWidgets.QAction(QtGui.QIcon(r"..\resources\New.png"), "New", MainWindow) self.toolbarBox1.addAction(self.actionNewToolBar) # Open self.actionOpen = QtWidgets.QAction(MainWindow) self.actionOpenToolBar = QtWidgets.QAction(QtGui.QIcon(r"..\resources\open1.png"), "Open", MainWindow) self.toolbarBox1.addAction(self.actionOpenToolBar) # Save self.actionSave = QtWidgets.QAction(MainWindow) # exit self.actionExit = QtWidgets.QAction(MainWindow) # Cut self.actionCut = QtWidgets.QAction(MainWindow) self.actionTraversal_Cut = QtWidgets.QAction(MainWindow) # Cut ToolBar self.actionCutToolBar = QtWidgets.QAction(QtGui.QIcon(r"..\resources\Cut7.png"), "Exfoliate", MainWindow) self.toolbarBox1.addAction(self.actionCutToolBar) # Judge3layer self.actionJudge2layer = QtWidgets.QAction(MainWindow) self.actionJudge3layer = QtWidgets.QAction(MainWindow) self.actionJudgecustomize = QtWidgets.QAction(MainWindow) # self.actionGuide = QtWidgets.QAction(MainWindow) self.action_setting_atom = QtWidgets.QAction(MainWindow) # Stack self.actionStack = QtWidgets.QAction(MainWindow) self.actionStackToolbar = QtWidgets.QAction(QtGui.QIcon(r"..\resources\Stack2.png"), "Stack", MainWindow) self.toolbarBox1.addAction(self.actionStackToolbar) # Rotate self.actionRotate = QtWidgets.QAction(MainWindow) self.actionRotate_cutomize = QtWidgets.QAction(MainWindow) # drag_rectangle self.actiondrag_rectangle = QtWidgets.QAction(QtGui.QIcon(r"..\resources\dragrectangle0.png"), "Drag rectangle", MainWindow) self.actionNormalchoose = QtWidgets.QAction(QtGui.QIcon(r"..\resources\drag6.png"), "Drag", MainWindow) self.actionSetlayer = QtWidgets.QAction(QtGui.QIcon(r"..\resources\setlayer.png"), "Set layer", MainWindow) self.actionMovelayer = QtWidgets.QAction(QtGui.QIcon(r"..\resources\movelayer.png"), "Movelayer", MainWindow) self.actionleft_screen = QtWidgets.QAction(QtGui.QIcon(r"..\resources\left.png"), "left screen", MainWindow) self.actionright_screen = QtWidgets.QAction(QtGui.QIcon(r"..\resources\right.png"), "right screen", MainWindow) self.actionup_screen = QtWidgets.QAction(QtGui.QIcon(r"..\resources\up.png"), "up screen", MainWindow) self.actiondown_screen = QtWidgets.QAction(QtGui.QIcon(r"..\resources\down.png"), "down screen", MainWindow) self.actionmiddle_screen = QtWidgets.QAction(QtGui.QIcon(r"..\resources\middle.png"), "middle screen", MainWindow) self.toolbarBox1.addAction(self.actiondrag_rectangle) self.toolbarBox1.addAction(self.actionNormalchoose) self.toolbarBox1.addAction(self.actionSetlayer) self.toolbarBox1.addAction(self.actionMovelayer) self.toolbarBox2.addAction(self.actionleft_screen) self.toolbarBox2.addAction(self.actionright_screen) self.toolbarBox2.addAction(self.actionup_screen) self.toolbarBox2.addAction(self.actiondown_screen) self.toolbarBox2.addAction(self.actionmiddle_screen) self.actionTraversal_rotate = QtWidgets.QAction(MainWindow) self.actionTraversal_Stack1 = QtWidgets.QAction(MainWindow) self.actionTraversal_Stack2 = QtWidgets.QAction(MainWindow) self.actionClassification = QtWidgets.QAction(MainWindow) self.actionImport = QtWidgets.QAction(MainWindow) self.actionExport = QtWidgets.QAction(MainWindow) self.actiona = QtWidgets.QAction(MainWindow) self.actionb = QtWidgets.QAction(MainWindow) self.actionc = QtWidgets.QAction(MainWindow) self.actionViewCell = QtWidgets.QAction(QtGui.QIcon(r"..\resources\dot1.png"), "View cell", MainWindow) self.actionViewCell.setIconVisibleInMenu(True) self.actionViewCoordinate_System = QtWidgets.QAction(MainWindow) self.actionViewCoordinate_cell = QtWidgets.QAction(MainWindow) self.actionViewGrid = QtWidgets.QAction(MainWindow) # view tool window self.actionViewDatabase = QtWidgets.QAction(MainWindow) self.actionViewProject = QtWidgets.QAction(MainWindow) self.actionViewText = QtWidgets.QAction(MainWindow) self.actionPerspective = QtWidgets.QAction(MainWindow) self.actionOrthogonal = QtWidgets.QAction(MainWindow) # document menu self.document_menu.addAction(self.actionNew) self.document_menu.addAction(self.actionOpen) self.document_menu.addSeparator() self.document_menu.addAction(self.actionSave) self.document_menu.addSeparator() self.document_menu.addAction(self.actionImport) self.document_menu.addAction(self.actionExport) self.document_menu.addSeparator() self.document_menu.addAction(self.actionExit) # function menu self.function_menu.addAction(self.actionCut) # self.function_menu.addSeparator() self.function_menu.addAction(self.actionStack) self.function_menu.addAction(self.actionRotate) self.function_menu.addAction(self.actionRotate_cutomize) self.function_menu.addSeparator() self.function_menu.addAction(self.actionTraversal_rotate) self.function_menu.addAction(self.actionTraversal_Stack1) self.function_menu.addAction(self.actionTraversal_Stack2) self.function_menu.addAction(self.actionTraversal_Cut) self.Judge = self.function_menu.addMenu("Judge") self.Judge.addAction(self.actionJudge2layer) self.Judge.addAction(self.actionJudge3layer) self.Judge.addAction(self.actionJudgecustomize) self.function_menu.addSeparator() self.function_menu.addAction(self.actionClassification) self.function_menu.addSeparator() # view menu self.toolwindow_menu = self.view_menu.addMenu("Tool Window") self.toolwindow_menu.addAction(self.actionViewDatabase) self.toolwindow_menu.addAction(self.actionViewProject) self.toolwindow_menu.addAction(self.actionViewText) # self.viewport_menu = self.view_menu.addMenu("Viewport") self.viewport_menu.addAction(self.actionPerspective) self.viewport_menu.addAction(self.actionOrthogonal) self.view_from = self.view_menu.addMenu("view from") self.view_from.addAction(self.actiona) self.view_from.addAction(self.actionb) self.view_from.addAction(self.actionc) self.view_menu.addAction(self.actionViewCell) self.coordinate_system_menu = self.view_menu.addMenu("Coordinate System") self.coordinate_system_menu.addAction(self.actionViewCoordinate_System) self.coordinate_system_menu.addAction(self.actionViewCoordinate_cell) self.view_menu.addAction(self.actionViewGrid) # help menu self.help_menu.addAction(self.actionGuide) # Setting menu self.Setting_menu.addAction(self.action_setting_atom) # database_menu self.action_db_connect = QtWidgets.QAction(MainWindow) self.action_add_data = QtWidgets.QAction(MainWindow) self.action_cif_to_db = QtWidgets.QAction(MainWindow) self.action_db_to_cifs = QtWidgets.QAction(MainWindow) self.database_menu.addAction(self.action_db_connect) self.database_menu.addAction(self.action_add_data) self.database_menu.addAction(self.action_cif_to_db) self.database_menu.addAction(self.action_db_to_cifs) # self.menuBar.addAction(self.document_menu.menuAction()) self.menuBar.addAction(self.function_menu.menuAction()) self.menuBar.addAction(self.view_menu.menuAction()) self.menuBar.addAction(self.help_menu.menuAction()) self.menuBar.addAction(self.Setting_menu.menuAction()) self.menuBar.addAction(self.database_menu.menuAction()) self.init_database() # calculate menu self.retranslateUi(MainWindow) QtCore.QMetaObject.connectSlotsByName(MainWindow) def retranslateUi(self, MainWindow): _translate = QtCore.QCoreApplication.translate MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow")) self.document_menu.setTitle(_translate("MainWindow", "File")) self.function_menu.setTitle(_translate("MainWindow", "Function")) self.view_menu.setTitle(_translate("MainWindow", "View")) self.database_menu.setTitle(_translate("MainWindow", "Database")) self.help_menu.setTitle(_translate("MainWindow", "Help")) self.Setting_menu.setTitle(_translate("MainWindow", "Settings")) # Judge2layer self.actionJudge2layer.setText(_translate("MainWindow", "Two - layer electronic device")) self.actionJudge3layer.setText(_translate("MainWindow", "Three - layer electronic device")) self.actionJudgecustomize.setText(_translate("MainWindow", "Customize")) # actionOpen self.actionNew.setText(_translate("MainWindow", "New")) self.actionNew.setShortcut('Ctrl+N') self.actionOpen.setText(_translate("MainWindow", "Open")) self.actionOpen.setShortcut('Ctrl+O') # self.actionOpen.setStatusTip("打开") # 状态栏显示 # actionsave self.actionSave.setText(_translate("MainWindow", "Save")) # actionExit self.actionExit.setText(_translate("MainWindow", "Exit")) # actionRotate self.actionRotate.setText(_translate("MainWindow", "Rotate")) self.actionRotate_cutomize.setText(_translate("MainWindow", "Rotate(customize)")) # actionCut self.actionCut.setText(_translate("MainWindow", "Exfoliate")) # actionGuide self.actionGuide.setText(_translate("MainWindow", "Guide")) # Setting self.action_setting_atom.setText(_translate("MainWindow", "Atom parameter")) # actionStack self.actionStack.setText(_translate("MainWindow", "Stack")) # actionTraversal self.actionTraversal_rotate.setText(_translate("MainWindow", "Traversal-Rotate")) self.actionTraversal_Stack1.setText(_translate("MainWindow", "Traversal-Stack(1)")) self.actionTraversal_Stack2.setText(_translate("MainWindow", "Traversal-Stack(2)")) # TraversalCut self.actionTraversal_Cut.setText(_translate("MainWindow", "Traversal-Cut")) # actionClassification self.actionClassification.setText(_translate("MainWindow", "Classification")) # actionImport self.actionImport.setText(_translate("MainWindow", "Import")) # actionExport self.actionExport.setText(_translate("MainWindow", "Export")) self.actionViewProject.setText(_translate("MainWindow", "Project")) self.actiona.setText(_translate("MainWindow", "a-axis")) self.actionb.setText(_translate("MainWindow", "b-axis")) self.actionc.setText(_translate("MainWindow", "c-axis")) self.actionViewCell.setText(_translate("MainWindow", "Cell")) self.actionViewCoordinate_System.setText(_translate("MainWindow", "Cartesian")) self.actionViewCoordinate_cell.setText(_translate("MainWindow", "Cell Coordinate")) self.actionViewGrid.setText(_translate("MainWindow", "Grid")) self.actionViewDatabase.setText(_translate("MainWindow", "Database")) self.actionViewText.setText(_translate("MainWindow", "Text")) self.actionOrthogonal.setText(_translate("MainWindow", "Orthogonal viewport")) self.actionPerspective.setText(_translate("MainWindow", "Perspective viewport")) self.action_db_connect.setText(_translate("MainWindow", "Connect")) self.action_add_data.setText(_translate("MainWindow", "Add CIF")) self.action_cif_to_db.setText(_translate("MainWindow", "Create a database")) self.action_db_to_cifs.setText(_translate("MainWindow", "Composite CIF file")) def init_database(self): self.datatable.setRowCount(10) # 添加信息 self.datatable.setShowGrid(True) self.vertical_widget.setMaximumWidth(1200) # self.vertical_widget.(1000, 550)
Systematic Analysis of Cluster Similarity Indices: How to Validate Validation Measures Many cluster similarity indices are used to evaluate clustering algorithms, and choosing the best one for a particular task remains an open problem. We demonstrate that this problem is crucial: there are many disagreements among the indices, these disagreements do affect which algorithms are preferred in applications, and this can lead to degraded performance in real-world systems. We propose a theoretical framework to tackle this problem: we develop a list of desirable properties and conduct an extensive theoretical analysis to verify which indices satisfy them. This allows for making an informed choice: given a particular application, one can first select properties that are desirable for the task and then identify indices satisfying these. Our work unifies and considerably extends existing attempts at analyzing cluster similarity indices: we introduce new properties, formalize existing ones, and mathematically prove or disprove each property for an extensive list of validation indices. This broader and more rigorous approach leads to recommendations that considerably differ from how validation indices are currently being chosen by practitioners. Some of the most popular indices are even shown to be dominated by previously overlooked ones. Introduction Clustering is an unsupervised machine learning problem, where the task is to group objects that are similar to each other. In network analysis, a related problem is called community detection, where groupings are based on relations between items (links), and the obtained clusters are expected to be densely interconnected. Clustering is used across various applications, including text mining, online advertisement, anomaly detection, and many others (Xu & Tian, 2015;Allahyari et al., 2017). To measure the quality of a clustering algorithm, one can use either internal or external measures. Internal measures evaluate the consistency of the clustering result with the data being clustered, e.g., Silhouette, Hubert-Gamma, Dunn indices or modularity in network analysis (Newman & Girvan, 2004). Unfortunately, it is often unclear whether optimizing any of these measures would translate into improved quality in practical applications. External (cluster similarity) measures compare the candidate partition with a reference one (obtained, e.g., by human assessors). A comparison with such a gold standard partition, when it is available, is more reliable. There are many tasks where external evaluation is applicable: text clustering (Amigó et al., 2009), topic modeling (Virtanen & Girolami, 2019), Web categorization (Wibowo & Williams, 2002), face clustering (Wang et al., 2019), news aggregation (see Section 3), and others. Often, when there is no reference partition available, it is possible to let a group of experts annotate a subset of items and compare the algorithms on this subset. Dozens of cluster similarity measures exist and which one should be used is a subject of debate (Lei et al., 2017). In this paper, we systematically analyze the problem of choosing the best cluster similarity index. We start with a series of experiments demonstrating the importance of the problem (Section 3). First, we construct simple examples showing the inconsistency of all pairs of different similarity indices. Then, we demonstrate that such disagreements often occur in practice when well-known clustering algorithms are applied to real datasets. Finally, we illustrate how an improper choice of a similarity index can affect the performance of production systems. So, the question is: how to compare cluster similarity indices and decide which one is best for a particular application? Ideally, we would want to choose an index for which good similarity scores translate to good real-world performance. However, opportunities to experimentally perform such a validation of validation indices are rare, typically expensive, and do not generalize to other applications. In contrast, we suggest a theoretical approach: we formally define properties that are desirable across various applications, discuss their importance, and formally analyze which similarity indices satisfy them (Section 4). This theoretical framework allows practitioners to choose the best index based on relevant properties for their applications. In Section 5, we show how this choice can be made and discuss indices that are expected to be suitable across various applications. Among the considered properties, constant baseline is arguably the most important and non-trivial one. Informally, a sensible index should not prefer one candidate partition over another just because it has too large or too small clusters. Constant baseline is a particular focus of the current research. We develop a rigorous theoretical framework for analyzing this property. In this respect, our work improves over the previous (mostly empirical) research on constant baseline of particular indices (Strehl, 2002;Albatineh et al., 2006;Vinh et al., 2009;2010;Lei et al., 2017). While the ideas discussed in the paper can be applied to all similarity indices, we provide an additional theoretical characterization of pair-counting ones (e.g., Rand and Jaccard), which gives an analytical background for further studies of pair-counting indices. We formally prove that among dozens of known indices, only two have all the properties except for being a distance: Correlation Coefficient and Sokal & Sneath's first index (Lei et al., 2017). Surprisingly, both indices are rarely used for cluster evaluation. Correlation Coefficient has the additional advantage of being easily convertible to a distance measure via the arccosine function. The obtained index has all the properties except constant baseline, which is still satisfied asymptotically. To sum up, our main contributions are the following: • We formally define properties that are desirable across various applications. We analyze an extensive list of cluster similarity indices and mathematically prove or disprove all properties for each of them (Tables 3, 4). • We provide a methodology for choosing a suitable validation index for a particular application. In particular, we identify previously overlooked indices that dominate the most popular ones (Section 5). • We formalize the notion of constant baseline and provide a framework for its analysis; for pair-counting indices, we introduce the notion of asymptotic constant baseline (Section 4.6). We also provide a definition for monotonicity that unifies and extends previous attempts; for pair-counting indices, we introduce a strengthening of monotonicity (Section 4.5). We believe that our unified and extensive analysis provides a useful tool for researchers and practitioners because research outcomes and application performances are highly dependent on the validation index that is chosen. Comparison with prior work While there are previous attempts to analyze cluster similarity indices, our work unifies and significantly extends them. In particular, Lei et al. (2017) only consider biases of pair-counting indices, Meilȃ (2007) analyzes properties of Variation of Information, and Vinh et al. (2010) analyze information-theoretic indices. Amigó et al. (2009) consider properties desirable for text clustering and mostly focus on monotonicity. Most importantly, Amigó et al. (2009) do not consider constant baseline (the absence of preference towards specific cluster sizes), which we found to be extremely important. In contrast, the problem of indices favoring clusterings with smaller or larger clusters has been identified by, e.g., Albatineh et al. (2006); Lei et al. (2017); Vinh et al. (2009;2010). This problem is typically addressed by modifying a particular index (or family of indices) such that the obtained measure does not suffer from this problem. However, as we show in this paper, these modifications often lead to other important properties not being satisfied. We refer to Appendix A for a more detailed comparison to related research. In the current paper, we introduce new properties, formalize existing ones, and mathematically prove or disprove each property for an extensive list of validation indices. This broader and more rigorous approach leads to conclusions that considerably differ from how validation indices are currently being chosen. Cluster Similarity Indices We consider clustering n elements numbered from 1 to n, so that a clustering can be represented by a partition of {1, . . . , n} into disjoint subsets. Capital letters A, B, C will be used to name the clusterings, and we will represent them as A = {A 1 , . . . , A k A }, where A i is the set of elements belonging to i-th cluster. If a pair of elements v, w ∈ V lie in the same cluster in A, we refer to them as an intra-cluster pair of A, while inter-cluster pair will be used otherwise. The total number of pairs is denoted by N = n 2 . The value that an index V assigns to the similarity between partitions A and B will be denoted by V (A, B). We now define some of the indices used throughout the paper. A more comprehensive list, together with formal definitions, is given in Appendices B.1, B.2. Pair-counting indices consider clusterings to be similar if they agree on many pairs. Formally, let A be the N -dimensional vector indexed by the set of element-pairs, where the entry corresponding to (v, w) equals 1 if (v, w) is an intra-cluster pair and 0 otherwise. Let M AB be the N × 2 matrix that results from concatenating the two (column-) vectors A and B. Each row of M AB is either 11, 10, 01, or 00. Let the pair-counts N 11 , N 10 , N 01 , N 00 denote the number of occurrences for each of these rows in M AB . Definition 1. A pair-counting index is a similarity index that can be expressed as a function of the pair-counts N 11 , N 10 , N 01 , N 00 . Some popular pair-counting indices are Rand and Jaccard: Adjusted Rand (AR) is an adaptation of Rand ensuring that when B is random, we have AR(A, B) = 0 in expectation. A less widely used index is the Pearson Correlation Coefficient (CC) between the binary incidence vectors A and B. 1 Another index, which we discuss further in more details, is the Correlation Distance CD(A, B) := NMI is known to be biased towards smaller clusters, and several modifications try to mitigate this bias: Adjusted Mutual Information (AMI) and Standardized Mutual Information (SMI) subtract the expected mutual information from M (A, B) and normalize the obtained value (Vinh et al., 2009), while Fair NMI (FNMI) multiplies NMI by a penalty factor e −|k A −k B |/k A (Amelio & Pizzuti, 2015). Motivating Experiments Evidently, many different cluster similarity indices are used by researchers and practitioners. A natural question is: how to choose the best one? Before trying to answer this question, it is important to understand whether the problem is relevant. Indeed, if the indices are very similar to each other and agree in most practical applications, then one can safely 1 Spearman and Pearson correlation are equal when comparing binary vectors. Kendall rank correlation for binary vectors coincides with the Hubert index that is linearly equivalent to Rand. First, we illustrate the inconsistency of all indices. We say that two indices V 1 and V 2 are inconsistent for a triplet of partitions (A, . We took 15 popular cluster similarity measures and constructed just four triplets such that each pair of indices is inconsistent for at least one triplet. One such triplet is shown in Figure 1: for this simple example, about half of the indices prefer the left candidate, while the others prefer the right one. Other examples can be found in Appendix F.1. Thus, we see that the indices differ. But can this affect conclusions obtained in experiments on real data? To check that, we ran 8 well-known clustering algorithms (Scikitlearn, 2020) on 16 real-world datasets from the UCI machine learning repository (Dua & Graff, 2017). Each dataset, together with a pair of algorithms, gives a triplet of partitions (A, B 1 , B 2 ), where A is a reference partition and B 1 , B 2 are provided by two algorithms. For a given pair of indices and all such triplets, we look at whether the indices are consistent. Table 1 shows the relative inconsistency for several popular indices. 2 The inconsistency rate is significant: e.g., popular measures Adjusted Rand and Variation of Information disagree in almost 40% of the cases. Importantly, the best agreeing indices are S&S and CC, which satisfy most of our properties, as shown in the next section. 2 The extended table together with a detailed description of the experimental setup and more analysis is given in Appendix F.2. To demonstrate that the choice of similarity index may affect the final performance in a real production scenario, we conducted an experiment within a major news aggregator system. The system groups news articles to events and shows the list of most important events to users. For grouping, a clustering algorithm is used, and the quality of this algorithm affects the user experience: merging different clusters may lead to not showing an important event, while too much splitting may cause duplicate events. When comparing several candidate clustering algorithms, it is important to determine which one is the best for the system. Online experiments are expensive and can be used only for the best candidates. Thus, we need a tool for an offline comparison. For this purpose, we manually created a reference partition on a small fraction of news articles to evaluate the candidates. We performed such an offline comparison for two candidate algorithms A 1 and A 2 and observed that different indices preferred different algorithms (see Table 2). In particular, well-known FNMI, AMI, and Rand prefer A 1 that disagrees with most of the indices. Then, we launched an online user experiment and verified that the candidate A 2 is better for the system according to user preferences. This shows the importance of choosing the right index for offline comparisons. See Appendix F.3 for a more detailed description of this experiment. Analysis of Cluster Similarity Indices In this section, we motivate and formally define properties that are desirable for cluster similarity indices. We start with simple and intuitive ones that can be useful in some applications but not always necessary. Then, we discuss more complicated properties, ending with constant baseline, which is extremely important but least trivial. In Tables 3 and 4, indices of particular interest are listed along with the properties satisfied. In Appendix C, we give the proofs for all entries of these tables. For pair-counting indices we perform a more detailed analysis and define additional properties. For such indices, we interchangeably use the notation V (A, B) and V (N 11 , N 10 , N 01 , N 00 ). Some of the indices have slight variants that are essentially the same. For example, the Hubert index (Hubert, 1977) is a linear transformation of the Rand index: H = 2R − 1. All the properties defined in this paper are invariant under linear transformations and interchanging A and B. Hence, we define the following linear equivalence relation on similarity indices and check the properties for at most one representative of each equivalence class. Definition 2. Similarity indices V 1 and V 2 are linearly equivalent if there exists a nonconstant linear function f such that either This allows us to conveniently restrict to indices for which higher numerical values indicate higher similarity of partitions. Appendix Table 6 in lists equivalences among indices. Property 1: Maximal Agreement The numerical value that an index assigns to a similarity must be easily interpretable. In particular, it should be easy to see whether the candidate clustering is maximally similar to (i.e., coincides with) the reference clustering. Formally, we require that V (A, A) = c max is constant and is a strict upper bound for V (A, B) for all A = B. The equivalence from Definition 2 allows us to assume that V (A, A) is a maximum w.l.o.g. This property is easy to check, and it is satisfied by almost all indices, except for SMI and Wallace. Property 1 : Minimal Agreement The maximal agreement property makes the upper range of the index interpretable. Similarly, a numerical value for low agreement would make the lower range interpretable. A minimal agreement is not well defined for general partitions: it is unclear which partition is most dissimilar to a given one. However, by Lemma 1 in Appendix B.3, pair-counting indices form a subclass of graph similarity indices. For a graph with edge-set E, it is clear that the most dissimilar graph is its complement (i.e., with edge-set E C ). Comparing a graph to its complement results in pair-counts N 11 = N 00 = 0 and N 10 + N 01 = N . This motivates the following definition: Property 2: Symmetry Similarity is intuitively understood as a symmetric concept. Therefore, a good similarity index is expected to be symmetric, i.e., V (A, B) = V (B, A) for all partitions A, B. 3 Tables 3 and 4 show that most indices are symmetric. The asymmetric ones are precision and recall (Wallace) and FNMI (Amelio & Pizzuti, 2015), which is a product of NMI and an asymmetric penalty factor. Property 3: Linear Complexity For clustering tasks on large datasets, running time is crucial, and algorithms with superlinear time can be infeasible. In these cases, a validation index with superlinear running time would be a significant bottleneck. Furthermore, computationally heavy indices also tend to be complicated and hard to interpret intuitively. We say that an index has linear complexity when its worst-case running time is O(n). In Appendix C.2, we prove that any pair-counting index has O(n) complexity. Many general indices have this property as well, except for SMI and AMI. Property 4. Distance For some applications, a distance-interpretation of dissimilarity may be desirable: whenever A is similar to B and B is similar to C, then A should also be somewhat similar to C. For example, assume that the reference clustering (e.g., labeled by experts) is an approximation of the ground truth. In such situations, it may be reasonable to argue that the reference clustering is at most a distance ε from the true one, so that the triangle inequality bounds the dissimilarity of the candidate clustering to the unknown true clustering. A function d is a distance metric if it satisfies three distance axioms: 1) symmetry (d(A, B) = d(B, A)); 2) positivedefiniteness (d(A, B) ≥ 0 with equality iff A = B); 3) the triangle inequality (d(A, C) ≤ d(A, B)+d(B, C)). We say that V is linearly transformable to a distance metric if there exists a linearly equivalent index that satisfies these three distance axioms. Note that all three axioms are invariant under rescaling of d. We have already imposed symmetry as a separate property, and positive-definiteness is equivalent to the maximal agreement property. Therefore, whenever V has these two properties, it satisfies the distance property iff d(A, B) = c max − V (A, B) satisfies the triangle inequality, for c max as defined in Section 4.1. Examples of popular indices having this property are Variation of Information and the Mirkin metric. In Vinh et al. (2010), it is proved that when Mutual Information is nor-malized by the maximum of entropies, the resulting NMI is equivalent to a distance metric. A proof that the Jaccard index is equivalent to a distance is given in Kosub (2019). See Appendix C.1 for all the proofs. Correlation Distance Among all the considered indices, there are two pair-counting ones having all the properties except for being a distance: Sokal&Sneath and Correlation Coefficient. However, the correlation coefficient can be transformed to a distance metric via a non-linear transformation. We define Correlation Distance (CD) as CD(A, B) := 1 π arccos CC(A, B), where CC is the Pearson correlation coefficient and the factor 1 /π scales the index to . To the best of our knowledge, this Correlation Distance has never before been used as a similarity index for comparing clusterings throughout the literature. Theorem 1. The Correlation Distance is indeed a distance. Proof. A proof of this is given in (Van Dongen & Enright, 2012). We give an alternative proof that allows for a geometric interpretation. First, we map each partition A to an N -dimensional vector on the unit sphere by where 1 is the N -dimensional all-one vector, A is the binary vector representation of a partition introduced in Section 2, and m A = N 11 + N 10 is the number of intracommunity pairs of A. Straightforward computation gives and standard inner product so that the inner product indeed corresponds to CC: It is a well-known fact that the inner product of two vectors of unit length corresponds to the cosine of their angle. Hence, taking the arccosine gives us the angle. The angle between unit vectors corresponds to the distance along the unit hypersphere. As u is an injection from the set of partitions to points on the unit sphere, we may conclude that this index is indeed a distance on the set of partitions. In Section 4.6, we show that the distance property of Correlation Distance is achieved at the cost of not having the exact constant baseline, though it is still satisfied asymptotically. Property 5: Monotonicity When one clustering is changed such that it resembles the other clustering more, the similarity score ought to improve. Hence, we require an index to be monotone w.r.t. changes that increase the similarity. This can be formalized via the following definition. The trivial cases k A = 1 and k A = n were excluded to avoid inconsistencies with the constant baseline property defined in Section 4.6. To look at monotonicity from a different perspective, we define the following operations: • The following theorem gives an alternative definition of monotonicity and is proven in Appendix E.1. can be obtained from B by a sequence of perfect splits and perfect merges. Note that this monotonicity is a stronger form of the first two constraints defined in (Amigó et al., 2009): Cluster Homo-geneity is a weaker form of our monotonicity w.r.t. perfect splits, while Cluster Equivalence is equivalent to our monotonicity w.r.t. perfect merges. Monotonicity is a critical property that should be satisfied by any sensible index. Surprisingly, not all indices satisfy this: we have found counterexamples that prove that SMI, FNMI, and Wallace do not have the monotonicity property. Furthermore, for NMI, whether monotonicity is satisfied depends on the normalization: the normalization by the average of the entropies has monotonicity, while the normalization by the maximum of the entropies does not. Property 5 . Strong Monotonicity For pair-counting indices, we can define a stronger monotonicity property in terms of pair-counts. Definition 6. A pair-counting index V satisfies strong monotonicity if it is increasing in N 11 , N 00 when N 10 + N 01 > 0, and decreasing in N 10 , N 01 when N 11 +N 00 > 0. Note that the conditions N 10 +N 01 > 0 and N 11 +N 00 > 0 are needed to avoid contradicting maximal and minimal agreement respectively. This property is stronger than monotonicity as it additionally allows for comparing similarities across different settings: we could compare the similarity between A 1 , B 1 on n 1 elements with the similarity between A 2 , B 2 on n 2 elements, even when n 1 = n 2 . This ability to compare similarity scores across different numbers of elements is similar to the Few data points property of SMI (Romano et al., 2014) that allows its scale to have a similar interpretation across different settings. We found several examples of indices that have Property 5 while not satisfying Property 5 . Jaccard and Dice indices are constant w.r.t. N 00 , so they are not strongly monotone. A more interesting example is the Adjusted Rand index, which may become strictly larger if we only increase N 10 . Property 6. Constant Baseline This property is arguably the most significant: it is less intuitive than the other ones and may lead to unexpected consequences in practice. Informally, a good similarity index should not give a preference to a candidate clustering B over another clustering C just because B has many or few clusters. This intuition can be formalized using random partitions: assume that we have some reference clustering A and two random partitions B and C. While intuitively both random guesses are equally bad approximations of A, it has been known throughout the literature (Albatineh et al., 2006;Vinh et al., 2009;2010;Romano et al., 2014) that some indices tend to give higher scores for random guesses with a larger number of clusters. Ideally, we want the similarity value of a random candidate w.r.t. the reference partition to have a fixed expected value c base (independent of A or the sizes of B). However, this does require a careful formalization of random candidates. Definition 7. We say that a distribution over clusterings B is element-symmetric if for every two clusterings B and B that have the same cluster-sizes, B returns B and B with equal probabilities. This allows us to define the constant baseline property. Definition 8. An index V satisfies the constant baseline property if there exists a constant c base so that, for any clustering A with 1 < k A < n and every element- In the definition, we have excluded the cases where A is a trivial clustering consisting of either 1 or n clusters. Including them would cause contradictions with maximal agreement whenever we choose B as the (element-symmetric) distribution that returns A with probability 1. In Appendix D.1, we prove that to verify whether an index satisfies Definition 8, it suffices to check whether it holds for distributions B that are uniform over clusterings with fixed cluster sizes. From this equivalence, it will also follow that Definition 8 is indeed symmetric. Note that the formulation in terms of element-symmetric distributions allows for a wide range of clustering distributions. For example, the cluster sizes could be drawn from a power-law distribution, which is often observed in practice (Arenas et al., 2004;Clauset et al., 2004). Constant baseline is extremely important in many practical applications: if an index violates this property, then its optimization may lead to undesirably biased results. For instance, if a biased index is used to choose the best algorithm among several candidates, then it is likely that the decision will be biased towards those who produce too large or too small clusters. This problem is often attributed to NMI (Vinh et al., 2009;Romano et al., 2014), but we found that almost all indices suffer from it. The only indices that satisfy the constant baseline property are Adjusted Rand index, Correlation Coefficient, SMI, and AMI with c base = 0 and Sokal&Sneath with c base = 1 /2. Interestingly, out of these five indices, three were specifically designed to satisfy this property, which made them less intuitive and resulted in other important properties being violated. The only condition under which the constant baseline property can be safely ignored is knowing in advance all cluster sizes. In this case, bias towards particular cluster sizes would not affect decisions. However, we are not aware of any practical application where such an assumption can be made. Note that knowing only the number of clusters is insufficient. We illustrate this in Appendix D.4, where we also show that the bias of indices violating the constant baseline is easy to identify empirically. Property 6 : Asymptotic Constant Baseline For paircounting indices, a deeper analysis of the constant baseline property is possible. Let m A = N 11 + N 10 , m B = N 11 + N 01 be the number of intra-cluster pairs of A and B, respectively. If the distribution B is uniform over clusterings with given sizes, then m A and m B are both constant. Furthermore, the pair-counts N 10 , N 01 , N 00 are functions of N, m A , m B , N 11 . Hence, to find the expected value of the index, we need to inspect it as a function of a single random variable N 11 . For a random pair, the probability that it is an intra-cluster pair of both clusterings is m A m B /N 2 , so the expected values of the pair-counts are We can use these values to define a weaker variant of constant baseline. Definition 9. A pair-counting index V has an asymptotic constant baseline if there exists a constant c base so that V N 11 , N 10 , N 01 , N 00 = c base for all m A , m B ∈ (0, N ). In contrast to Definition 8, asymptotic constant baseline is very easy to verify: one can substitute the values from (1) to the index and check whether the obtained value is constant. Another important observation is that under mild assumptions V (N 11 , N 10 , N 01 , N 00 ) converges in probability to V N 11 , N 10 , N 01 , N 00 as n grows which justifies the usage of the name asymptotic constant baseline, see Appendix D.2 for more details. Note that the non-linear transformation of Correlation Coefficient to Correlation Distance makes the latter one violate the constant baseline property. CD does, however, still have the asymptotic constant baseline at 1 /2 and we prove in Appendix E.2 that the expectation in Definition 8 is very close to this value. 5 Biases of Cluster Similarity Indices Given the fact that there are so many biased indices, one may be interested in what kind of candidates they favor. While it is unclear how to formalize this concept for general validation indices, we can do this for pair-counting ones by analyzing them in terms of a single variable: the number of inter-cluster pairs. This value characterizes the granularity of a clustering: it is high when the clustering consists of many small clusters while it is low if it consists of a few large clusters. Informally, we say that an index suffers from PairDec bias if it may favor less inter-cluster pairs. Similarly, PairInc bias means that an index may prefer more inter-cluster pairs. These biases can be formalized as follows. Definition 10. Let V be a pair-counting index and define V (s) (m A , m B ) = V N 11 , N 10 , N 01 , N 00 for the expected pair-counts as defined in (1). We say that Note that this definition does require V (s) to be differentiable in m A and m B . However, this is the case for all pair-counting indices in this work. Applying this definition to Jaccard mediately shows that Jaccard suffers from PairDec bias and Rand suffers from both biases. The direction of the monotonicity for the bias of Rand is determined by the condition 2m A > N . Performing the same for Wallace and Dice shows that both suffer from PairDec bias. Note that an index satisfying the asymptotic constant baseline property will not have any of these biases as V (s) (m A , m B ) = c base . While there have been previous attempts to characterize types of biases (Lei et al., 2017), they mostly rely on analyses based on the number of clusters. However, our analysis shows that the number of clusters is not the correct variable for such a characterization of pair-counting indices. While having many clusters often goes hand-in-hand with having many inter-cluster pairs, it is not always the case: if there are significant differences between the cluster sizes (e.g., one large cluster and many small clusters), then the clustering may consist of many clusters while having relatively few inter-cluster pairs. We discuss this in more detail in Appendix E.3. Additionally, Experiments shown in Figures 3 5 There is also another transformation of CC to a distance CD = 2(1 − CC). However, it can be shown that CD approximates a constant baseline less well than CD. and 4 of the Appendix show that in such cases, most indices have a similar bias as if there were few clusters, which is consistent with our characterization of such biases in terms of the number of inter-cluster pairs. Discussion and Conclusion At this point, we better understand the theoretical properties of cluster similarity indices, so it is time to answer the question: which index is the best? Unfortunately, there is no simple answer, but we can make an informed decision. In this section, we sum up what we have learned, argue that there are indices that are strictly better alternatives than some widely used ones, and give practical advice on how to choose a suitable index for a given application. Among all properties discussed in this paper, monotonicity is the most crucial one. Violating this property is a fatal problem: such indices can prefer candidates which are strictly worse than others. Hence, we advise against using the well-known NMI max , FMeasure, FNMI, and SMI indices. The constant baseline property is much less trivial but is equally important: it addresses the problem of preferring some partitions only because they have small or large clusters. This property is essential unless you know all cluster sizes. Since we are not aware of practical applications where all cluster sizes are known, we assume below that this is not the case. 6 This requirement is satisfied by just a few indices, so we are only left with AMI, Adjusted Rand (AR), Correlation Coefficient (CC), and Sokal&Sneath (S&S). Additionally, Correlation Distance (CD) satisfies constant baseline asymptotically and deviations from the exact constant baseline are extremely small (see Appendix E.2). Let us note that among the remaining indices, AR is strictly dominated by CC and S&S since it does not have the minimum agreement and strong monotonicity. Also, similarly to AMI, AR is specifically created to have a constant baseline, which made this index more complex and less intuitive than other pair-counting indices. Hence, we are only left with four indices: AMI, S&S, CC, and CD. According to their theoretical properties, all these indices are good, and any of them can be chosen. Figure 2 illustrates how a final decision can be made. First, one can decide whether the distance property is needed. For example, suppose one wants to cluster the algorithms by comparing the partitions provided by them. If one would want to use a metric clustering algorithm for this, the index would have to be a distance. In this case, CD would be the best choice. If the distance property is not needed, one could base the decision on computational complexity. In many large-scale applications, using clustering algorithms with higher than linear running time is infeasible. Understandably, it is undesirable if the computation of a validation score takes longer than the actual clustering algorithm. Another example is multiple comparisons: choosing the best algorithm among many candidates (differing, e.g., by a parameter value). If fast computation is required, then AMI is not a proper choice, and one has to choose between CC and S&S. Otherwise, all three indices are suitable according to our formal constraints. Let us discuss an (informal) criterion that may help to choose between AMI and pair-counting alternatives. Different indices may favor a different balance between errors in small and large clusters. In particular, all pair-counting indices give larger weights to errors in large clusters: misclassifying one element in a cluster of size k costs k − 1 incorrect pairs. It is known (empirically) that informationtheoretic indices do not have this property and give a higher weight to small clusters (Amigó et al., 2009). 7 Amigó et al. (2009) argue that for their particular application (text clustering), it is desirable not to give a higher weight to large clusters. In contrast, there are applications where the opposite may hold. For instance, consider a system that groups user photos based on identity and shows these clusters to a user as a ranked list. In this case, a user is likely to investigate the largest clusters consisting of known people and would rarely spot an error in a small cluster. The same applies to any system that ranks the clusters, e.g., to news aggregators. Based on what is desirable for a particular application, one can choose between AMI and pair-counting CC and S&S. The final decision between CC and S&S is hard to make 7 This is an interesting aspect that has not received much attention in our research since we believe that the desired balance between large and small clusters may differ per application and we are not aware of a proper formalization of this "level of balance" in a general form. since they are equally good in terms of their theoretical properties. Interestingly, although some works (Choi et al., 2010;Lei et al., 2017) list Pearson correlation as a cluster similarity index, it has not received attention that our results suggest it deserves, similarly to S&S. First, both indices are interpretable. CC is a correlation between the two incidence vectors, which is a very natural concept. S&S is the average of precision, recall (for binary classification of pairs) and their inverted counterparts, which can also be intuitively understood. Also, CC and S&S usually agree in practice: in Tables 1 and 7 we can see that they have the largest agreement. Hence, one can take any of these indices. Another option would be to check whether there are situations where these indices disagree and, if this happens, perform an experiment similar to what we did in Section 3 for news aggregation. While some properties listed in Tables 3 and 4 are not mentioned in the discussion above, they can be important for particular applications. For instance, maximum and minimum agreements are useful for interpretability, but they can also be essential if some operations are performed over the index values: e.g., averaging the scores of different algorithms. Symmetry can be necessary if there is no "gold standard" partition, but algorithms are compared only to each other. Finally, let us remark that in an early version of this paper, we conjectured that the constant baseline and distance properties are mutually exclusive. This turns out to be true: in ongoing work, we prove an impossibility theorem: for pair-counting indices monotonicity, distance, and constant baseline cannot be simultaneously satisfied. A. Further Related Work Several attempts to the comparative analysis of cluster similarity indices have been made in the literature, both in machine learning and complex networks communities. In particular, the problem of indices favoring clusterings with smaller or larger clusters has been identified (Albatineh et al., 2006;Vinh et al., 2009;2010;Lei et al., 2017). The most popular approach to resolving the bias of an index is to subtract its expected value and normalize the resulting quantity to obtain an index that satisfies the maximum agreement property. This approach has let to 'adjusted' indices such as AR (Hubert & Arabie, 1985) and AMI (Vinh et al., 2009). In Albatineh et al. (2006), the family of pair-counting indices L is introduced for which adjusted forms can be computed easily. This family corresponds to the set of all pair-counting indices that are linear functions of N 11 for fixed N 11 + N 10 , N 11 + N 01 . In (Romano et al., 2016), a generalization of information-theoretic indices by the Tsallis q-entropy is given and this is shown to correspond to pair-counting indices for q = 2. Formulas are provided for adjusting these generalized indices for chance. A disadvantage of this adjustment scheme is that an index can be normalized in many ways, while it is difficult to grasp the differences between these normalizations intuitively. (2015) multiply NMI with a penalty factor that decreases with the difference in the number of clusters. In summary, all these works take a popular biased index and 'patch' it to get rid of this bias. This approach has two disadvantages: firstly, these patches often introduce new problems (e.g., FNMI and SMI fail monotonicity), and secondly, the resulting index is usually less interpretable than the original. We have taken a different approach in our work: instead of patching existing indices, we analyze previously introduced indices to see whether they satisfy more properties. Our analysis shows that AR is dominated by Pearson correlation, which was introduced more than 100 years before AR. Therefore, there was no need to construct AR from Rand in the first place. In Lei et al. (2017), the biases of pair-counting indices are characterized. They define these biases as a preference towards either few or many clusters. They prove that the direction of Rand's bias depends on the Havrda-Charvat entropy of the reference clustering. In the present work, we show that the number of clusters is not an adequate quantity for expressing these biases. We introduce methods to easily analyze the bias of any pair-counting index and simplify the condition for the direction of Rand's bias to m A < N/2. A paper closely related to the current research (Amigó et al., 2009) formulates several constraints (axioms) for cluster similarity indices. Their cluster homogeneity is a weaker analog of our monotonicity w.r.t. perfect splits while their cluster equivalence is equivalent to our monotonicity w.r.t. perfect merges. The third rag bag constraint is motivated by a subjective claim that "introducing disorder into a disordered cluster is less harmful than introducing disorder into a clean cluster". While this is important for their particular application (text clustering), we found no other work that deemed this constraint necessary; hence, we disregarded this constraint in the current research. The last constraint by Amigó et al. (2009) concerns the balance between making errors in large and small clusters. Though this is an interesting aspect that has not received much attention in our research, this constraint poses a particular balance while we believe that the desired balance may differ per application. Hence, this property seems to be non-binary and we are not aware of a proper formalization of this "level of balance" in a general form. Hence, we do not include this in our list of formal properties. The most principal difference of our work compared to Amigó et al. (2009) is the constant baseline which was not analyzed in their work. We find this property extremely important while it is failed by most of the widely used indices including their BCubed. To conclude, our research gives a more comprehensive list of constraints and focuses on those that are desirable in a wide range of applications. We also cover all similarity indices often used in the literature and give formal proofs for all index-property combinations. A property similar to our monotonicity property is also given in Meilȃ (2007), where the similarity between clusterings A and B is upper-bounded by the similarity between A and A ⊗ B (as defined in Section C.4). One can show that this property is implied by our monotonicity but not vice versa, i.e., the variant proposed by Meilȃ (2007) is weaker. Our analysis of monotonicity generalizes and unifies previous approaches to this problem, see Theorem 2, which relates consistent improvements to perfect splits and merges. While we focus on external cluster similarity indices that compare a candidate partition with a reference one, there are also internal similarity measures that estimate the quality of partitions with respect to internal structure of data (e.g., Silhouette, Hubert-Gamma, Dunn, and many other indices). Kleinberg (2002) used an axiomatic approach for internal measures and proved an impossibility theorem: there are three simple and natural constraints such that no internal clustering measure can satisfy all of them. More work in this direction can be found in, e.g., Ben-David & Ackerman (2008). In network analysis, internal measures compare a candidate partition with the underlying graph structure. They quantify how well a community structure (given by a partition) fits the graph and are often referred to as goodness or quality measures. The most well-known example is modularity (Newman & Girvan, 2004). Axioms that these measures ought to satisfy are given in (Ben-David & Ackerman, 2008;Van Laarhoven & Marchiori, 2014). Note that all pair-counting indices discussed in this paper can also be used for graph-partition similarity, as we discuss in Section B.3. B.1. General Indices Here we give the definitions of the indices listed in Table 3. We define the contingency variables as n ij = |A i ∩ B j |. We note that all indices discussed in this paper can be expressed as functions of these contingency variables. The F-Measure is defined as the harmonic mean of recall and precision. Recall is defined as and precision is its symmetric counterpart r(B, A). In (Amigó et al., 2009), recall is redefined as and BCubed is defined as the harmonic mean of r (A, B) and r (B, A). The remainder of the indices are information-theoretic and require some additional definitions. Let p 1 , . . . , p be a discrete distribution (i.e., all values are nonnegative and sum to 1). The Shannon entropy is then defined as The entropy of a clustering is defined as the entropy of the cluster-label distribution of a random item, i.e., The mutual information between A and B is upper-bounded by H(A) and H(B) , which gives multiple possibilities to normalize the mutual information. In this paper, we discuss two normalizations: normalization by the average of the entropies 1 2 (H(A) + H(B)), and normalization by the maximum of entropies max{H(A), H(B)}. We will refer to the corresponding indices as NMI and NMI max , respectively: Fair NMI is a variant of NMI that includes a factor that penalizes large differences in the number of clusters (Amelio & Pizzuti, 2015). It is given by In this definition, NMI may be normalized in various ways. We note that a different normalization would not result in more properties being satisfied. where σ denotes the standard deviation. Calculating the expected value and standard deviation of the mutual information is nontrivial and requires significantly more computation power than other indices. For this, we refer to the original paper (Romano et al., 2014). Note that this index is symmetric since it does not matter whether we keep A constant while randomly permuting B or keep B constant while randomly permuting A. B.2. Pair-counting Indices and Their Equivalences Pair-counting similarity indices are defined in Table 5. Table 6 lists linearly equivalent indices (see Definition 2). Note that our linear equivalence differs from the less restrictive monotonous equivalence given in (Batagelj & Bren, 1995). In the current work, we have to restrict to linear equivalence as the constant baseline property is not invariant to non-linear transformations. B.3. Defining the Subclass of Pair-counting Indices From Definition 1, it follows that a pair-counting index is a function of two binary vectors A, B of length N . Note that this binary-vector representation has some redundancy: whenever u, v and v, w form intra-cluster pairs, we know that u, w must also be an intra-cluster pair. Hence, not every binary vector of length N represents a clustering. The class of N -dimensional binary vectors is, however, isomorphic to the class of undirected graphs on n vertices. Therefore, pair-counting indices are also able to measure the similarity between graphs. For example, for an undirected graph G = (V, E), one can consider its incidence vector G = (1{{v, w} ∈ E}) v,w∈V . Hence, pair-counting indices can be used to measure the similarity between two graphs or between a graph and a clustering. So, one may see a connection between graph and cluster similarity indices. For example, the Mirkin metric is a pair-counting index that coincides with the Hamming distance between the edge-sets of two graphs (Donnat & Holmes, 2018). Another example is the Jaccard graph distance, which turns out to be more appropriate for comparing sparse graphs (Donnat & Holmes, 2018). Thus, all pair-counting indices and their properties discussed in the current paper can also be applied to graph-graph and graph-partition similarities. In this section, we show that the subclass of pair-counting similarity indices can be uniquely defined by the property of being pair-symmetric. For two graphs G 1 and G 2 let M G1G2 denote the N × 2 matrix that is obtained by concatenating their adjacency vectors. Let us write V (G) M (M G1G2 ) for the similarity between two graphs G 1 , G 2 according to some graph similarity index V (G) . We will now characterize all pair-counting similarity indices as a subclass of the class of similarity indices between undirected graphs. Definition 11. We define a graph similarity index V Lemma 1. The class of pair-symmetric graph similarity indices coincides with the class of pair-counting cluster similarity indices. Proof. A matrix is an ordered list of its rows. An unordered list is a multiset. Hence, when we disregard the ordering of C. Checking Properties for Indices In this section, we check all non-trivial properties for all indices. The properties of symmetry, maximal/minimal agreement and asymptotic constant baseline can trivially be tested by simply checking V (B, A) = V (A, B), V (A, A) = c max , V (0, N 10 , N 01 , 0) = c min and V N 11 , N 10 , N 01 , N 00 = c base respectively. For pair-counting indices, we will frequently use the notation p AB = N 11 /N, p A = (N 11 + N 10 )/N, p B = (N 11 + N 01 )/N and write V (p) (p AB , p A , p B ) instead of V (N 11 , N 10 , N 01 , N 00 ). C.1.1. POSITIVE CASES NMI and VI. In (Vinh et al., 2010) it is proven that for max-normalization 1 − NMI is a distance, while in (Meilȃ, 2007) it is proven that VI is a distance. Rand. The Mirkin metric 1 − R corresponds to a rescaled version of the size of the symmetric difference between the sets of intra-cluster pairs. The symmetric difference is known to be a distance metric. Jaccard. In (Kosub, 2019), it is proven that the Jaccard distance 1 − J is indeed a distance. Correlation Distance. In Theorem 1 it is proven that Correlation Distance is indeed a distance. C.1.2. NEGATIVE CASES To prove that an index that satisfies symmetry and maximal agreement is not linearly transformable to a distance metric, we only need to disprove the triangle inequality for one instance of its equivalence class that is nonnegative and equals zero for maximal agreement. Then p AB = p BC = 1/6 and p AC = 0 while p A = p C = 1/6 and p B = 1/3. By substituting these variables, one can see that holds for each of these indices, contradicting the triangle inequality. The same A, B and C also form a counter-example for AMI. C.2. Linear Complexity We will frequently make use of the following lemma: Lemma 2. The nonzero values of n ij can be computed in O(n). Proof. We will store these nonzero values in a hash-table that maps the pairs (i, j) to their value n ij . These values are obtained by iterating through all n elements and incrementing the corresponding value of n ij . For hash-tables, searches and insertions are known to have amortized complexity complexity O(1), meaning that any sequence of n such actions has worst-case running time of O(n), from which the result follows. Both of these require the computation of the expected mutual information. It has been known (Romano et al., 2016) that this has a worst-case running time of O(n · max{k A , k B }) while max{k A , k A } can be O(n). Correlation Distance. The correlation distance satisfies strong monotonicity as it is a monotone transformation of the correlation coefficient, which meets the property. Sokal&Sneath. All four fractions are nondecreasing in N 11 , N 00 and nonincreasing in N 10 , N 01 while for each of the variables there is one fraction that satisfies the monotonicity strictly so that the index is strongly monotonous. Rand Index. For the Rand index, it can be easily seen from the form of the index that it is increasing in N 11 , N 00 and decreasing in N 10 , N 01 so that it meets the property. C.3.2. NEGATIVE CASES Jaccard, Wallace, Dice. All these three indices are constant w.r.t. N 00 . Therefore, these indices do not satisfy strong monotonicity. Jaccard and Dice. It can be easily seen that these indices are increasing in N 11 while decreasing in N 10 , N 01 . For N 00 , we note that whenever N 00 gets increased, either N 10 or N 01 must decrease, resulting in an increase of the index. Therefore, these indices satisfy monotonicity. Adjusted Rand. Note that for b, b + d > 0, it holds that We will let a, b denote the numerator and denomenator of Adjusted Rand while c, d will denote their change when incrementing N 11 or N 00 while decrementing N 10 or N 01 . For Adjusted Rand, we have a = N 11 − 1 N (N 11 + N 10 )(N 11 + N 01 ), b = a + 1 2 (N 10 + N 01 ). Because of this, when we increment either N 11 or N 00 while decrementing either N 10 or N 01 , we get d = c − 1 2 . Hence, we need to prove c > a(c − 1 2 )/b, or, equivalently For simplicity we rewrite this to where p AB = N11 N , p A = 1 N (N 11 + N 10 ) and p B = 1 N (N 11 + N 01 ). If we increment N 00 while decrementing either N 10 or N 01 , then c ∈ {p A , p B }. The symmetry of AR allows us to w.l.o.g. assume that c = p A . We write When p A ≤ 1 2 , then this is clearly positive. For the case p A > 1 2 , we bound p AB ≤ p A and bound the numerator by This proves the monotonicity for increasing N 00 . When incrementing N 11 while decrementing either N 10 or N 01 , we get c ∈ {1 − p A , 1 − p B }. Again, we assume w.l.o.g. that c = 1 − p A and write BCubed. Note that a perfect merge increases BCubed recall while leaving BCubed precision unchanged and that a perfect split increases precision while leaving recall unchanged. Hence, the harmonic mean increases. C.4.2. NEGATIVE CASES FMeasure. We give a numerical counter-example: Wallace. Let k A = 1 and let k B > 1. Then any merge of B is a perfect merge, but no increase occurs since W 1 (A, B) = 1. C.5. Constant Baseline C.5.1. POSITIVE CASES AMI and SMI. Both of these indices satisfy the constant baseline by construction since the expected mutual information is subtracted from the actual mutual information in the numerator. Adjusted Rand, Correlation Coefficient and Sokal&Sneath. These indices all satisfy ACB while being linear in p AB -linear for fixed p A , p B . Thus, by linearity of expectation, the expected value equals the asymptotic constant. C.5.2. NEGATIVE CASES For all the following indices, we will analyse the counter-example given by k A = k B = n − 1. For each index, we will compute the expected value and show that it is not constant. All of these indices satisfy the maximal agreement property and maximal agreement is achieved with probability 1/N (the probability that the single intra-pair of A coincides with the single intra-pair of B). Furthermore, each case where the intra-pairs do not coincide will result in the same contingency variables and hence the same value of the index. We will refer to this value as c n (V ). Therefore, the expected value will only have to be taken over two values and will be given by For each of these indices we will conclude that this is a non-constant function of n so that the index does not satisfy the constant baseline property. Jaccard and Dice. For both these indices we have c max = 1 and c n (V ) = 0 (as N 11 = 0 whenever the intra-pairs do not coincide). Hence, E = 1 N , which is not constant. Rand and Wallace. As both functions are linear in N 11 for fixed m A = N 11 + N 10 , m B = N 11 + N 01 , we can compute the expected value by simply substituting N 11 = m A m B /N . This will result in expected values 1 − 2/N + 2/N 2 and 1/N for Rand and Wallace respectively, which are both non-constant. Correlation distance. Here c max = 0 and c n (CD) = 1 so that the expected value will be given by This is non-constant (it evaluates to 0.44, 0.47 for n = 3, 4 respectively). Note that this expected value converges to 1 2 for n → ∞, which is indeed the asymptotic baseline of the index. FNMI and NMI. Note that in this case k A = k B so that the penalty term of FNMI will equal 1 and FNMI will coincide with NMI. Again c max = 1. For the case where the intra-pairs do not coincide, the joint entropy will equal H(A, B) = ln(n) while each of the marginal entropies will equal Note that as H(A) = H(B), all normalizations of MI will be equal so that this counter-example proves that none of the variants of (F)NMI satisfy the constant baseline property. Variation of Information. In this case c max = 0. We will use the entropies from the NMI-computations to conclude that which is again non-constant. F-measure. Here c max = 1. In the case where the intra-pairs do not coincide, all contingency variables will be either one or zero so that both recall and precision will equal 1 − 1/n so that c n (FM) = 1 − 1/n. This results in the following non-constant expected value Note that because recall equals precision in both cases, this counter-example also works for other averages than the harmonic average. BCubed. Again c max = 1. In the other case, the recall and precision will again be equal. Because for BCubed, the contribution of cluster i is given by 1 n max{n 2 ij }/|A i |, the contributions of the one-and two-clusters will be given by 1 n , 1 2n respectively. Hence, c n (BC) = n−2 n + 1 2n = 1 − 3 2n and we get the non-constant We note that again, this counter-example can be extended to non-harmonic averages of the BCubed recall and precision. Table 4 are scale-invariant. For such indices, we will write V (p) (p AB , p A , p B ). Note that when B ∼ C(s) for some s, the values p A , p B are constants while p AB is a random variable. Therefore, we further write P AB to stress that this is a random variable. All indices in Theorem 3. Let V be a scale-invariant pair-counting index, and consider a sequence of clusterings A (n) and cluster-size specifications s (n) . Let N Proof. We prove the equivalent statement We first prove that P (n) → 0 so that the above follows from the continuous mapping theorem. Chebychev's inequality gives The last step follows from the fact that Var(N 11 ) = o(n 4 ), as we will prove in the remainder of this section. Even though in the definition, A is fixed while B is randomly permuted, it is convenient to equivalently consider both clusterings are randomly permuted for this proof. We will show that Var(N 11 ) = o(n 4 ). To compute the variance, we first inspect the second moment. We take the expectation where v 1 , v 2 , v 3 ∈ V distinct and e 1 ∩ e 2 = ∅. The first two terms are obviously o(n 4 ). We inspect the last term Now we rewrite E 2 to Note that n 2 E > n−2 2 E so that the difference between (3) and E 2 can be bounded by As n 2 n−2 2 = O(n 4 ), what remains to be proven is Note that it is sufficient to prove that Hence, the difference will be given by as required. D.3. Statistical Tests for Constant Baseline In this section, we provide two statistical tests: one test to check whether an index V satisfies the constant baseline property and another to check whether V has a selection bias towards certain cluster sizes. Checking constant baseline. Given a reference clustering A and a number of cluster sizes specifications s 1 , . . . , s k , we test the null hypothesis that is constant in i = 1, . . . , k. We do so by using one-way Analysis Of Variance (ANOVA). For each cluster sizes specification, we generate r clusterings. Although ANOVA assumes the data to be normally distributed, it is known to be robust for sufficiently large groups (i.e., large r). Checking selection bias. In (Romano et al., 2014) it is observed that some indices with a constant baseline do have a selection bias; when we have a pool of random clusterings of various sizes and select the one that has the highest score w.r.t. a reference clustering, there is a bias of selecting certain cluster sizes. We test this bias in the following way: given a reference clustering A and cluster sizes specifications s 1 , . . . , s k , we repeatedly generate B 1 ∼ C(s 1 ), . . . , B k ∼ C(s k ). The null-hypothesis will be that each of these clusterings B i has an equal chance of maximizing V (A, B i ). We test this hypothesis by generating r pools and using the Chi-squared test. We emphasize that these statistical tests cannot prove whether an index satisfies the property or has a bias. Both will return a confidence level p with which the null hypothesis can be rejected. Furthermore, for an index to not have these biases, the null hypothesis should be true for all choices of A, s 1 , . . . , s k , which is impossible to verify statistically. The statistical tests have been implemented in Python and the code is available at https://github.com/ MartijnGosgens/validation_indices. We applied the tests to the indices of Tables 3 and 4. We chose n = 50, 100, 150, . . . , 1000 and r = 500. For the cluster sizes, we define the balanced cluster sizes BS(n, k) to be the cluster-size specification for k clusters of which n − k * n/k clusters have size n/k while the remainder have size n/k . Then we choose A (n) to be a clustering with sizes BS(n, n 0.5 ) and consider candidates with sizes s For each n, the statistical test returns a p-value. We use Fisher's method to combine these p-values into one single p-value and then reject the constant baseline if p < 0.05. The obtained results agree with Tables 3 and 4 except for Correlation Distance, which is so close to having a constant baseline that the tests are unable to detect it. D.4. Illustrating Significance of Constant Baseline In this section, we conduct two experiments illustrating the biases of various indices. We perform two experiments that allow us to identify the direction of the bias in different situations. Our reference clustering corresponds to the expert-annotated clustering of the production experiment described in Section 3 and Appendix F.3, where n = 924 items are grouped into k A = 431 clusters (305 of them consist of a single element). In the first experiment, we randomly cluster the items into k approximately equally sized clusters for various k. Figure 3 shows the averages and 90% confidence bands for each index. It can be seen that some indices (e.g., NMI and Rand) have a clear increasing baseline while others (e.g., Jaccard and VI) have a decreasing baseline. In contrast, all unbiased indices have a constant baseline. In Section 4.6 we argued that these biases could not be described in terms of the number of clusters alone. Our second experiment illustrates that the bias also heavily depends on the sizes of the clusters. In this case, items are randomly clustered into 32 clusters, 31 of which are "small" clusters of size s while one cluster has size n − 31 · s, where s is varied between 1 and 28. In Figure 4, that the biases are clearly visible. This shows that, even when fixing the number of clusters, biased indices may heavily distort an experiment's outcome. Finally, recall that we have proven that the baseline of CD is only asymptotically constant. Figures 3 and 4 show that for practical purposes its baseline can be considered constant. E.1. Proof of Theorem 2 Let B be an A-consistent improvement of B. We define and show that B ⊗ B can be obtained from B by a sequence of perfect splits, while B can be obtained from B ⊗ B by a sequence of perfect merges. Indeed, the assumption that B does not introduce new disagreeing pairs guarantees that any B j ∈ B can be split into B j ∩ B 1 , . . . , B j ∩ B k B without splitting over any intra-cluster pairs of A. Let us prove that B can be obtained from B ⊗ B by perfect merges. Suppose there are two B 1 , B 2 ∈ B ⊗ B such that both are subsets of some B j . Assume that this merge is not perfect, then there must be v ∈ B 1 , w ∈ B 2 such that v, w are in different clusters of A. As v, w are in the same cluster of B , it follows from the definition of B ⊗ B that v, w must be in different clusters of B. Hence, v, w is an inter-cluster pair in both A and B, while it is an intra-cluster pair of B , contradicting the assumption that B is an A-consistent improvement of B. This concludes the proof. E.2. Deviation of CD from Constant Baseline Theorem. Given ground truth A with a number of clusters 1 < k A < n, a cluster-size specification s and a random partition B ∼ C(s), the expected difference between Correlation Distance and its baseline is given by Proof. We take the Taylor expansion of the arccosine around CC(A, B) = 0 and get We take the expectation of both sides and note that the first moment of CC equals zero, so the starting index is k = 1. For B ∼ C(s) and large n, the value CC(A, B) will be concentrated around 0. This explains that in practice, the mean tends to be very close to the asymptotic baseline. specific Ground Truth (GT), i.e., on the reference partition. In particular, the authors conclude from numerical experiments that Jaccard suffers from NCdec and analytically prove that Rand suffers from GTbias, where the direction of the bias depends on the quadratic entropy of the ground truth clustering. Here we argue that these biases are not well defined, suggest replacing them by well-defined analogs, and show how our analysis allows to easily test indices on these biases. We argue that the quantity of interest should not be the number of clusters, but the number of inter-cluster pairs of the candidate. Theorem 3 shows that the asymptotic value of the index depends on the number of intra-cluster pairs of both clusterings (or equivalently, the number of inter-cluster pairs). The key insight is that more clusters do not necessarily imply more inter-cluster pairs. For example, let s denote a cluster-sizes specification for 3 clusters each of size > 2. Now let s be the cluster-sizes specification for one cluster of size 2 and clusters of size 1. Then, any B ∼ C(s) will have 3 clusters and N − 3 2 inter-cluster pairs while any B ∼ C(s ) will have + 1 > 3 clusters and N − 2 2 < N − 3 2 intra-cluster pairs. For any ground truth A with cluster-sizes s, we have E > E because of a smaller amount of inter-cluster pairs In contrast, Lei et al. (2017) classifies Jaccard as an NCdec index, so that we would expect the inequality to be the other way around, contradicting the definition of NCdec. The PairInc and PairDec biases that are defined in Definition 10 are sound versions of these NCinc and NCdec biases because they depend on the expected number of agreeing pairs. This allows to analytically determine which bias a given pair-counting index has. F.1. Synthetic Experiment In this experiment, we construct several simple examples to illustrate the inconsistency among the indices. Recall that two indices V 1 and V 2 are inconsistent for a triplet of partitions (A, B 1 We take all indices from Tables 3 and 4 and construct several triplets of partitions to distinguish them all. Let us note that the pairs Dice vs Jaccard and CC vs CD cannot be inconsistent since they are monotonically transformable to each other. Also, we do not compare with SMI since it is much more computationally complex than all other indices. Thus, we end up with 13 indices and are looking for simple inconsistency examples. The theoretical minimum of examples needed to find inconsistency for all pairs of 13 indices is 4. We were able to find such four examples, see Figure 5. In this figure, we show four inconsistency triplets. For each triplet, the shapes (triangle, square, etc.) denote the reference partition A. Left and right figures show candidate partitions B 1 and B 2 . In the caption, we specify which similarity indices favor this candidate partition over the other one. It is easy to see that for each pair of indices, there is a simple example where they disagree. For example, NMI and NMI max are inconsistent for triplets 3. Also, we know that Jaccard in general favors larger clusters, while Rand and NMI often prefer smaller ones. Hence, they often disagree in this way (see the triplets 2 and 4). F.2. Experiments on Real Datasets In this section, we test whether the inconsistency affects conclusions obtained in experiments on real data. On these datasets, we ran 8 well-known clustering algorithms (Scikit-learn, 2020): KMeans, AffinityPropagation, Mean-Shift, AgglomerativeClustering, DBSCAN, OPTICS, Birch, GaussianMixture. For AgglomerativeClustering, we used 4 different linkage types ('ward', 'average', 'complete', 'single'). For GaussianMixture, we used 4 different covariance types ('spherical', 'diag', 'tied', 'full'). For methods requiring the number of clusters as a parameter (KMeans, Birch, AgglomerativeClustering, GaussianMixture), we took up to 4 different values (less than 4 if some of them are equal): 2, ref-clusters, max(2,ref-clusters/2), min(items, 2·ref-clusters), where ref-clusters is the number of clusters in the reference partition and items is the number of elements in the dataset. For MeanShift, we used the option cluster all = T rue. All other settings were default or taken from examples in the sklearn manual. For all datasets, we calculated all the partitions for all methods described above. We removed all partitions having only one cluster or which raised any calculation error. Then, we considered all possible triplets A, B 1 , B 2 , where A is a reference partition and B 1 and B 2 are candidates obtained with two different algorithms. We have 8688 such triplets in total. For each triplet, we check whether the indices are consistent. The inconsistency frequency is shown in Table 7. Note that Wallace is highly asymmetrical and does not satisfy most of the properties, so it is not surprising that it is in general very inconsistent with others. However, the inconsistency rates are significant even for widely used pairs of indices such as, e.g., Variation of Information vs NMI (40.3%, which is an extremely high disagreement). Interestingly, the best agreeing indices are S&S and CC which satisfy most of our properties. This means that conclusions made with these indices are likely to be similar. Actually, one can show that all indices are inconsistent using only one dataset. This holds for 11 out of 16 datasets: heart-statlog, iris, segment, thy, arrhythmia, vehicle, zoo, ecoli, balance-scale, letter, wine. We do not present statistics for individual datasets since we found the aggregated Table 7 to be more useful. Finally, to illustrate the biases of indices, we compare two KMeans algorithms with k = 2 and k = 2·ref-clusters. The comparison is performed on 10 datasets (where both algorithms are successfully completed). The results are shown in Table 8. In this table, biases and inconsistency are clearly seen. We see that NMI and NMI max almost always prefer the larger number of clusters. In contrast, Variation of Information and Rand usually prefer k = 2 (Rand prefers k = 2 in all cases). F.3. Production Experiment To show that the choice of similarity index may have an effect on the final quality of a production algorithm, we conducted an experiment within a major news aggregator system. The system aggregates all news articles to events and shows the list of most important events to users. For grouping, a clustering algorithm is used and the quality of this algorithm affects the user experience: merging different clusters may lead to not showing an important event, while too much splitting may cause the presence of duplicate events. There is an algorithm A prod currently used in production and two alternative algorithms A 1 and A 2 . To decide which alternative is better for the system, we need to compare them. For that, it is possible to either perform an online experiment or make an offline comparison, which is much cheaper and allows us to compare more alternatives. For the offline comparison, we manually grouped 1K news articles about volleyball, collected during a period of three days, into events. Then, we compared the obtained reference partition with partitions A prod , A 1 , and A 2 obtained by A prod , A 1 , and A 2 , respectively (see Table 9). According to most of the indices, A 2 is closer to the reference partition than A 1 , and A 1 is closer than A prod . However, according to some indices, including the well-known NMI max , NMI, and Rand, A 1 better corresponds to the reference partition than A 2 . As a result, we see that in practical application different similarity indices may differently rank the algorithms. To further see which algorithm better agrees with user preferences, we launched the following online experiment. During one week we compared A prod and A 1 and during another -A prod and A 2 (it is not technically possible to compare A 1 and A 2 simultaneously). In the first experiment, A 1 gave +0.75% clicks on events shown to users; in the second, A 2 gave +2.7%, which clearly confirms that these algorithms have different effects on user experience and A 2 is a better alternative than A 1 . Most similarity indices having nice properties, including CC, CD, and S&S, are in agreement with user preferences. In contrast, AMI ranks A 1 higher than A 2 . This can be explained by the fact that AMI gives more weight to small clusters compared to pair-counting indices, which can be undesirable for this particular application, as we discuss in Section 5.
#include <bits/stdc++.h> using namespace std; #define f first #define s second #define mp make_pair #define pb push_back #define ll long long bool myfun(long a,long b) { if(a>b) return 1; else return 0; } long long findmin(int a[],int n,int vis[]) { long long mn=INT_MAX,ind; for(long long i=0;i<n;i++) { if(mn>a[i]&&vis[i]==0) { mn=a[i]; ind=i; } } return ind; } long long kadane(long long temp[],long long m,long long k) { long long mx=INT_MIN,i=0,j=0,sum=0; while(j<m) { sum+=temp[j]; if(sum<=k) { mx=max(mx,j-i+1); } if(sum<k) j++; else { sum-=temp[i]; i++; } } //cout<<"mx"<<mx<<" "; return mx; } int gcd(int a,int b) { //cout<<a<<','<<b<<" "; if(a>b) { //cout<<'b'<<a; if(b==0) return a; else return gcd(b,a%b); } else { //cout<<'a'<<b; if(a==0) return b; else return gcd(a,b%a); } } int arr[1000001]; void sieve() { long long i,j,k; for(i=0;i<=1000000;i++) arr[i]=1; for(i=2;i<=sqrt(1000000);i++) { if(arr[i]==1) { for(j=i+i;j<=1000000;j+=i) { arr[j]=0; } } } // for(i=0;i<=10;i++) // cout<<arr[i].f<<','<<arr[i].s.f<<","<<arr[i].s.s<<' '; } struct aa { int x,y; }; int valid(int i,int j,int n,int m) { if(i>=0&&i<n&&j>=0&&j<m) return 1; else return 0; } //int cnt; void dfs(int u,int vis[],vector <int> a[],int n,int &cnt) { vis[u]=1; int i,f=0; cout<<u<<' '; for(i=0;i<a[u].size();i++) { if(vis[a[u][i]]==0) { dfs(a[u][i],vis,a,n,cnt); f=1; } } if(f==0) cnt++; }/* void djikstra(int start,int n,vector <pair <ll, ll > > adj[]) { ll parent[n+1],visit[n+1],dist[n+1]; for(int i=0;i<=n;i++) visit[i]=0; for(int i=2; i<=n; i++) dist[i] = INT_MAX; set <pair <ll,ll> > s; s.insert(mp(0, start)); while(!s.empty()){ pair <ll, ll> p = *s.begin(); s.erase(s.begin()); ll node = p.second; ll di = p.first; if(visit[node]) continue; visit[node] = true; dist[node] = di; for(int i=0; i<adj[node].size(); i++){ ll pt = adj[node][i].first; ll wt = adj[node][i].second; if(dist[pt] > dist[node] + wt){ parent[pt] = node; dist[pt] = dist[node] + wt; s.insert(mp(dist[pt], pt)); } } } } */ struct aa1{ vector <int> v; int a; }; aa1 f1(int mid,vector <pair<int,int> > a,int k) { vector <int> arr1; aa1 b; //cout<<'\n'; int n=a.size(),i,l=0; for(i=0;i<a.size();i++) { // if(a[i]<mid) // continue; int p=arr1.size(); int u=min(a[i].first/mid,k-p); //l+=a[i].first/mid; //cout<<u<<':'; l+=u; if(u>0) { int x=u; while(x--) { arr1.push_back(a[i].s); } } } //cout<<'l'<<k<<' '; if(l>=k) { b.v=arr1; b.a=1; return b; } else { b.v=arr1; b.a=0; return b; } } int GetRightPosition(vector <int> A, int l, int r, int key) { int m; while( r - l > 1 ) { m = l + (r - l)/2; if( A[m] <= key ) l = m; else r = m; } return l; } int GetLeftPosition(vector <int> A, int l, int r, int key) { int m; while( r - l > 1 ) { m = l + (r - l)/2; if( A[m] >= key ) r = m; else l = m; } return r; } int main() { ios::sync_with_stdio(false); cin.tie(0); cout.tie(0); long long n,m,i,j,k,x,y,t,z,l,r,mx,mn,sum,f,mid; cin>>n>>m; x=m/n; if(m%n==0) cout<<x<<'\n'; else cout<<x+1<<'\n'; }
<gh_stars>0 #ifdef vxWorks #include <vxWorks.h> #endif #include <stdlib.h> #include <stdio.h> #include <string.h> #include <ctype.h> #include <tickLib.h> #include "dbDefs.h" #include "sCalcPostfix.h" #include "sCalcPostfixPvt.h" /* need USES_STRING for testing */ #include "postfix.h" long test_sCalcPostfix(char *pinfix) { short i, error; long stat; char *p_postfix=NULL; char cbuf[1000]; ULONG start; double parg[12], result; char *ppsarg[12], space[1200], sresult[100]; for (start = tickGet(); start==tickGet(); ); for (i=0, start = tickGet(); start==tickGet(); i++) stat = postfix(pinfix, cbuf, &error); printf("%d calls to postfix() in 1/60 s: %f us per call\n", i, 1.e6/(i*sysClkRateGet())); for (start = tickGet(); start==tickGet(); ); for (i=0, start = tickGet(); start==tickGet(); i++) stat = calcPerform(parg, &result, cbuf); printf("%d calls to calcPerform() in 1/60 s: %f us per call\n", i, 1.e6/(i*sysClkRateGet())); for (i=0; i<12; i++) ppsarg[i] = &space[i*100]; for (start = tickGet(); start==tickGet(); ); for (i=0, start = tickGet(); start==tickGet(); i++) stat = sCalcPostfix(pinfix, &p_postfix, &error); printf("%d calls to sCalcPostfix() in 1/60 s: %f us per call\n", i, 1.e6/(i*sysClkRateGet())); for (start = tickGet(); start==tickGet(); ); for (i=0, start = tickGet(); start==tickGet(); i++) stat = sCalcPerform(parg, 12, ppsarg, 12, &result, sresult, 100, p_postfix); printf("%d calls to sCalcPerform() in 1/60 s: %f us per call\n", i, 1.e6/(i*sysClkRateGet())); return(0); } long test_sCalcPerform(char *pinfix) { int i; short error; long stat; char *p_postfix=NULL; char cbuf[1000]; ULONG start; double parg[12], result; char *ppsarg[12], space[1200], sresult[100]; for (i=0; i<12; i++) { parg[i] = (double)i; ppsarg[i] = &space[i*100]; sprintf(ppsarg[i], "%d", i); } stat = postfix(pinfix, cbuf, &error); for (start = tickGet(); start==tickGet(); ); for (i=0, start = tickGet(); start==tickGet(); i++) stat = calcPerform(parg, &result, cbuf); printf(":calcPerform() : %.2f us/call\n", 1.e6/(i*sysClkRateGet())); stat = sCalcPostfix(pinfix, &p_postfix, &error); for (start = tickGet(); start==tickGet(); ); for (i=0, start = tickGet(); start==tickGet(); i++) stat = sCalcPerform(parg, 12, ppsarg, 12, &result, sresult, 100, p_postfix); printf(":sCalcPerform() : %.2f us/call\n", 1.e6/(i*sysClkRateGet())); for (start = tickGet(); start==tickGet(); ); for (i=0, start = tickGet(); start==tickGet(); i++) stat = sCalcPerform(parg, 12, NULL, 0, &result, NULL, 0, p_postfix); printf(":sCalcPerform(no-string-buf) : %.2f us/call\n", 1.e6/(i*sysClkRateGet())); if (p_postfix) free(p_postfix); return(0); }
<reponame>wmc3721/jwt import {Component} from '@angular/core'; import {IonicPage, NavController, NavParams} from 'ionic-angular'; import {LockFeatureConfirmPage} from '../lock-feature-confirm/lock-feature-confirm' import {InOrOutDisplayFunctionSelectionPage} from '../in-or-out-display-function-selection/in-or-out-display-function-selection' import {AdditionalBeepSettingsPage} from '../additional-beep-settings/additional-beep-settings' import {NetworkConnectionDetectionPage} from '../network-connection-detection/network-connection-detection' import {MessagingMethodSettingsPage} from '../messaging-method-settings/messaging-method-settings' /** * Generated class for the DevicePage page. * * See https://ionicframework.com/docs/components/#navigation for more info on * Ionic pages and navigation. */ @IonicPage() @Component({ selector: 'page-device', templateUrl: 'device.html', }) export class DevicePage { deviceItems: any[]; constructor(public navCtrl: NavController, public navParams: NavParams) { this.deviceItems = [ { "deviceId": 1, "name": "现有锁具特征确认", "profilePic": "assets/img/device/ios7-locked.png", }, { "deviceId": 2, "name": "出入门消息显示功能选择", "profilePic": "assets/img/device/ios7-home.png", }, { "deviceId": 3, "name": "附加提示音设置", "profilePic": "assets/img/device/android-volume.png", }, { "deviceId": 4, "name": "网络连接检测", "profilePic": "assets/img/device/wifi.png", }, { "deviceId": 5, "name": "消息传递方式设置", "profilePic": "assets/img/device/android-alarm.png", } ]; } ionViewDidLoad() { console.log('ionViewDidLoad DevicePage'); } openItem(deviceId: number) { if (deviceId == 1) { this.navCtrl.push(LockFeatureConfirmPage); } else if (deviceId == 2) { this.navCtrl.push(InOrOutDisplayFunctionSelectionPage); } else if (deviceId == 3) { this.navCtrl.push(AdditionalBeepSettingsPage); } else if (deviceId == 4) { this.navCtrl.push(NetworkConnectionDetectionPage); } else if (deviceId == 5) { this.navCtrl.push(MessagingMethodSettingsPage); } } }
/** * * Test Suite for Role Inheritance * */ @RunWith(Suite.class) @Suite.SuiteClasses({ // Propagation Tests AccessPropagationNotInherited.class, // AccessPropagation.class, // DeletedAccessPropagation.class, // Inheritance Tests HorizontalInheritance.class, // VerticalInheritance.class, // // Restrictions Test RoleInheritanceRestrictions.class, // // Recalculate Permissions Process Test RecalculatePermissions.class, // }) public class RoleInheritanceTestSuite { }
<filename>thread/incoming/deck/precon_deck.go package deck type PreconDeck struct { Id string `json:"id"` Name string `json:"name"` Description string `json:"description"` Format string `json:"format"` ResourceId string `json:"resourceId"` DeckTileId int `json:"deckTileId"` MainDeck []Card `json:"mainDeck"` Sideboard []Card `json:"sideboard"` LastUpdated string `json:"lastUpdated"` LockedForUse bool `json:"lockedForUse"` LockedForEdit bool `json:"lockedForEdit"` CardBack string `json:"cardBack"` IsValid bool `json:"isValid"` } type Card struct { Id string `json:"id"` Quantity int `json:"quantity"` }
/** * Replaces placeholder tags in notification meta in the subscription JSON string with valid * notification meta. * * @param text JSON string containing replaceable tags * @return Processed content */ private String replaceTagsInNotificationMeta(String text) { text = text.replaceAll("\\$\\{rest\\.host\\}", "localhost"); text = text.replaceAll("\\$\\{rest\\.port\\}", String.valueOf(restServer.getLocalPort())); text = text.replaceAll("\\$\\{rest\\.raw.body\\}", REST_ENDPOINT_RAW_BODY); text = text.replaceAll("\\$\\{rest\\.endpoint\\}", REST_ENDPOINT); text = text.replaceAll("\\$\\{rest\\.endpoint\\.auth\\}", REST_ENDPOINT_AUTH); text = text.replaceAll("\\$\\{rest\\.endpoint\\.params\\}", REST_ENDPOINT_PARAMS); text = text.replaceAll("\\$\\{rest\\.endpoint\\.auth\\.params\\}", REST_ENDPOINT_AUTH_PARAMS); text = text.replaceAll("\\$\\{rest\\.endpoint\\.bad\\}", REST_ENDPOINT_BAD); return text; }
import { SFASRecordIdentification } from "../../sfas-integration/sfas-files/sfas-record-identification"; /** * Common method that services importing data from SFAS must * implement to process a SFAS record being imported. */ export interface SFASDataImporter { /** * Process the import process of a SFAS record. * @param record record to be imported. * @param extractedDate date and time that the record * was extracted from SFAS. */ importSFASRecord( record: SFASRecordIdentification, extractedDate: Date, ): Promise<void>; }
<reponame>gasparakos/airbyte<gh_stars>0 import styled from "styled-components"; type IProps = { center?: boolean; }; export const H1 = styled.h1<IProps>` font-style: normal; font-weight: 500; font-size: 28px; line-height: 34px; display: block; text-align: ${props => (props.center ? "center" : "left")}; color: ${({ theme }) => theme.textColor}; margin: 0; `; export const H2 = styled(H1).attrs({ as: "h2" })` font-size: 26px; line-height: 32px; `; export const H3 = styled(H1).attrs({ as: "h3" })` font-size: 20px; line-height: 24px; `; export const H4 = styled(H1).attrs({ as: "h4" })` font-size: 18px; line-height: 22px; `; export const H5 = styled(H1).attrs({ as: "h5" })` font-size: 15px; line-height: 18px; `;
A former GCHQ whistleblower has condemned plans by government lawyers to increase prison sentences and expand the definition of espionage for the digital age. Katharine Gun, a former translator for the monitoring agency who leaked details of an operation to bug United Nations offices before the 2003 invasion of Iraq, has spoken out following the publication of Law Commission plans suggesting that maximum jail terms for those leaking information should rise from two years to 14 years. In the past, Gun has called for a public interest defence to be introduced into the Official Secrets Act (OSA) to protect whistleblowers and prevent governments from hiding politically embarrassing information. Government advisers accused of 'full-frontal attack' on whistleblowers Read more She said: “The Official Secrets Act 1989 may need reforming for the digital era, but I would argue that at its heart there should be protection for whistleblowers. As it stands, the OSA is reputedly one of the most draconian secrecy laws in the world. It seems to me to have been very effective at dissuading and preventing the 99.9% of British citizens who have signed to it from making unauthorised disclosures. “On the rare occasions where it has been breached, it could be argued that it was done to expose lies or misrepresentations of the truth and other forms of skulduggery. Examples include Clive Ponting, Peter Wright, David Shayler, myself, David Keogh and Leo O’Connor. Peter Wright’s case followed the publication of his book, Spycatcher, Lord Goff [a judge] said at the time, ‘In a free society, there is a continuing public interest that the workings of government should be open to scrutiny and criticism.’ “It seems to me that we are living in an increasingly unfree society. The government and its intelligence and security apparatus have amassed ever broader and deeper powers through legislation like the Justice and Security Act 2013 and the Investigatory Powers Act 2016. These laws enable it to survey all private communications and online activity, carry out bulk collection and storage of data, hack private devices, detain and interrogate at whim and demand CMP [closed material procedures] in court, preventing evidence and information from being disclosed in the interest of national security. Interview: Katharine Gun Read more “If the proposals to reform or rewrite the OSA extend the overall dragnet nature of the act, increase the penalty limit and disregard a public interest defence, it will exacerbate the concentration of power in the hands of the government and deter or even prevent whistleblowers from revealing government lies and abuse of power.” Gun was charged under the OSA in 2003, but the prosecution was dropped when it came to court the following year amid speculation that there would have been a political outcry. Referring to the request in 2003 from the US National Security Agency to bug the offices of countries on the UN security council before the vote on invading Iraq, Gun said: “I was enraged by the subterfuge and potential blackmail they wanted us to carry out. I went public and it was published in the Observer.” Commenting on the Law Commission proposals, the the Liberal Democrat peer and civil liberties campaigner Paul Strasburger said:“This attempt to suppress dissent and prevent the public knowing the truth is typical of Mrs May’s time at the home office. She has carried the same authoritarian approach into Brexit by doing everything she can to limit debate and keep voters in the dark. “These proposals might be appropriate for a banana-republic dictatorship. They are completely out of the question in our democracy which depends on brave whistleblowers and a free press to hold the government to account when they let us down through incompetence or corruption. “Liberal Democrats will strongly resist these extremely dangerous proposals.” Whistleblower protection and the public interest | Letters Read more Martha Spurrier, director of Liberty, said: “It’s disturbing that the Law Commission considers a single meeting adequate consultation to inform such drastic and dangerous proposals. We don’t – and we will be submitting a thorough response to the public consultation. “These oppressive plans have no place in a democracy. They would skew the balance even further in favour of state secrecy, irrespective of potentially profound public interest. By increasing the prospect of prosecutions for revelations that are merely embarrassing or inconvenient, they would silence whistle-blowers and gag our press. “This is the latest hypocritical move from a government intent on operating in the shadows while monitoring every move the rest of us make. When the agencies’ illegal mass surveillance was exposed by Snowden’s courageous actions, this government responded with the eye-wateringly authoritarian investigatory powers act. That law makes it illegal to disclose even the mere existence of warrants to intercept people’s communications, meaning those who are unlawfully spied on, including journalists, will never know about it – unless whistle-blowers come forward.” The National Union of Journalists said it would robustly defend the rights of its members. Michelle Stanistreet, NUJ general secretary, said: “The ramifications of these recommendations are huge for journalists and freedom of the press. Journalists face being criminalised for simply doing their job and the public’s right to know will be severely curtailed by these proposals. The union will respond robustly to the Law Commission’s consultation on changes to the Official Secrets Act.” She said: “It was the present prime minister Theresa May who was the ‘Big Sister’ and original architect of the snoopers’ charter. We need to protect the public from a government which seems intent on threatening journalists from finding information the government finds inconvenient to be exposed. Democracy is under attack when a government does all it can to do its business in secret.”
from data_resource.shared_utils.log_factory import LogFactory logger = LogFactory.get_console_logger("generator:resource-delete") class ResourceDelete: def delete_one(self, id, data_resource): """Delete a single object from the data model based on it's primary key. Args: id (any): The primary key for the specific object. data_resource (object): SQLAlchemy ORM model. Return: dict, int: The response object and the HTTP status code. """ pass
// TCPPort returns a TCP corev1.ServicePort func TCPPort(name string, port int32, targetPort intstr.IntOrString) corev1.ServicePort { return corev1.ServicePort{ Name: name, Port: port, TargetPort: targetPort, Protocol: corev1.ProtocolTCP, } }
/* * Function: Positions the kwin dialog either to the saved location * or the center of the screen if no saved location. * * Parameters: * hwnd - the window to center on the screen. */ static void position_dialog(HWND hwnd) { int scrwidth, scrheight; HDC hdc; int x, y, cx, cy; if (hwnd == NULL) return; hdc = GetDC(NULL); scrwidth = GetDeviceCaps(hdc, HORZRES); scrheight = GetDeviceCaps(hdc, VERTRES); ReleaseDC(NULL, hdc); x = cns_res.x; y = cns_res.y; cx = cns_res.cx; cy = cns_res.cy; if (x > scrwidth || y > scrheight || x + cx <= 0 || y + cy <= 0) center_dialog(hwnd); else MoveWindow(hwnd, x, y, cx, cy, TRUE); }
// CopyInto copies from sv into target - it creates new slices and fills them // with the pre-release and build IDs func (sv SV) CopyInto(target *SV) { target.Major = sv.Major target.Minor = sv.Minor target.Patch = sv.Patch target.PreRelIDs = make([]string, len(sv.PreRelIDs)) copy(target.PreRelIDs, sv.PreRelIDs) target.BuildIDs = make([]string, len(sv.BuildIDs)) copy(target.BuildIDs, sv.BuildIDs) }
def polygon_to_mask(poly, x, y, width, downsample): mask = Image.new('L', (int(width/downsample), int(width/downsample)), 0) mapp = mapping(poly) if mapp['type'] == 'Polygon' or mapp['type'] == 'MultiPolygon': coords = mapp['coordinates'] elif mapp['type'] == 'GeometryCollection': coords = [] for key, val in mapp.items(): if key == 'geometries': for d in val: if d['type'] == 'Polygon': coords.append(d['coordinates']) else: return mask if len(coords) > 1: coords = [item for sublist in coords for item in sublist[0]] else: coords = coords[0] if len(coords) > 2 and isinstance(coords[0], tuple): coords = [((c[0] - x)/downsample, (c[1] - y)/downsample) for c in coords] ImageDraw.Draw(mask).polygon(coords, outline=1, fill=1) mask = np.array(mask) return mask
#include <iostream> #include<set> #define ll long long using namespace std; set<ll>V,H; multiset<ll>mV,mH; int main() { ll h,v,n,x; cin>>v>>h>>n; char s; V.insert(0); V.insert(v); H.insert(0); H.insert(h); mV.insert(v); mH.insert(h); for(int i=0;i<n;++i) { cin>>s>>x; if(s=='H') { H.insert(x); int before=(*(--H.find(x))); int after=(*(++H.find(x))); mH.insert(after-x); mH.insert(x-before); mH.erase(mH.find(after-before)); } else { V.insert(x); int before=(*(--V.find(x))); int after=(*(++V.find(x))); mV.insert(after-x); mV.insert(x-before); mV.erase(mV.find(after-before)); } cout<<*(--mV.end())**(--mH.end())<<endl; } return 0; }
<reponame>ahochul/MegaFS-webdav<gh_stars>10-100 /* * Copyright 1999,2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.sf.webdav.methods; import java.io.IOException; import java.util.Enumeration; import java.util.HashMap; import java.util.Hashtable; import java.util.Vector; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.xml.parsers.DocumentBuilder; import net.sf.webdav.IMimeTyper; import net.sf.webdav.ITransaction; import net.sf.webdav.IWebdavStore; import net.sf.webdav.StoredObject; import net.sf.webdav.WebdavStatus; import net.sf.webdav.exceptions.AccessDeniedException; import net.sf.webdav.exceptions.LockFailedException; import net.sf.webdav.exceptions.WebdavException; import net.sf.webdav.fromcatalina.URLEncoder; import net.sf.webdav.fromcatalina.XMLHelper; import net.sf.webdav.fromcatalina.XMLWriter; import net.sf.webdav.locking.LockedObject; import net.sf.webdav.locking.ResourceLocks; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.xml.sax.InputSource; public class DoPropfind extends AbstractMethod { private static org.slf4j.Logger LOG = org.slf4j.LoggerFactory .getLogger(DoPropfind.class); /** * Array containing the safe characters set. */ protected static URLEncoder URL_ENCODER; /** * PROPFIND - Specify a property mask. */ private static final int FIND_BY_PROPERTY = 0; /** * PROPFIND - Display all properties. */ private static final int FIND_ALL_PROP = 1; /** * PROPFIND - Return property names. */ private static final int FIND_PROPERTY_NAMES = 2; private IWebdavStore _store; private ResourceLocks _resourceLocks; private IMimeTyper _mimeTyper; private int _depth; public DoPropfind(IWebdavStore store, ResourceLocks resLocks, IMimeTyper mimeTyper) { _store = store; _resourceLocks = resLocks; _mimeTyper = mimeTyper; } public void execute(ITransaction transaction, HttpServletRequest req, HttpServletResponse resp) throws IOException, LockFailedException { LOG.trace("-- " + this.getClass().getName()); // Retrieve the resources String path = getCleanPath(getRelativePath(req)); String tempLockOwner = "doPropfind" + System.currentTimeMillis() + req.toString(); _depth = getDepth(req); if (_resourceLocks.lock(transaction, path, tempLockOwner, false, _depth, TEMP_TIMEOUT, TEMPORARY)) { StoredObject so = null; try { so = _store.getStoredObject(transaction, path); if (so == null) { resp.setContentType("text/xml; charset=UTF-8"); resp.sendError(HttpServletResponse.SC_NOT_FOUND, req .getRequestURI()); return; } Vector<String> properties = null; path = getCleanPath(getRelativePath(req)); int propertyFindType = FIND_ALL_PROP; Node propNode = null; if (req.getContentLength() != 0) { DocumentBuilder documentBuilder = getDocumentBuilder(); try { Document document = documentBuilder .parse(new InputSource(req.getInputStream())); // Get the root element of the document Element rootElement = document.getDocumentElement(); propNode = XMLHelper .findSubElement(rootElement, "prop"); if (propNode != null) { propertyFindType = FIND_BY_PROPERTY; } else if (XMLHelper.findSubElement(rootElement, "propname") != null) { propertyFindType = FIND_PROPERTY_NAMES; } else if (XMLHelper.findSubElement(rootElement, "allprop") != null) { propertyFindType = FIND_ALL_PROP; } } catch (Exception e) { resp.sendError(WebdavStatus.SC_INTERNAL_SERVER_ERROR); return; } } else { // no content, which means it is a allprop request propertyFindType = FIND_ALL_PROP; } HashMap<String, String> namespaces = new HashMap<String, String>(); namespaces.put("DAV:", "D"); if (propertyFindType == FIND_BY_PROPERTY) { propertyFindType = 0; properties = XMLHelper.getPropertiesFromXML(propNode); } resp.setStatus(WebdavStatus.SC_MULTI_STATUS); resp.setContentType("text/xml; charset=UTF-8"); // Create multistatus object XMLWriter generatedXML = new XMLWriter(resp.getWriter(), namespaces); generatedXML.writeXMLHeader(); generatedXML .writeElement("DAV::multistatus", XMLWriter.OPENING); if (_depth == 0) { parseProperties(transaction, req, generatedXML, path, propertyFindType, properties, _mimeTyper .getMimeType(transaction, path)); } else { recursiveParseProperties(transaction, path, req, generatedXML, propertyFindType, properties, _depth, _mimeTyper.getMimeType(transaction, path)); } generatedXML .writeElement("DAV::multistatus", XMLWriter.CLOSING); generatedXML.sendData(); } catch (AccessDeniedException e) { resp.sendError(WebdavStatus.SC_FORBIDDEN); } catch (WebdavException e) { LOG.warn("Sending internal error!"); resp.sendError(WebdavStatus.SC_INTERNAL_SERVER_ERROR); } catch (ServletException e) { e.printStackTrace(); // To change body of catch statement use // File | Settings | File Templates. } finally { _resourceLocks.unlockTemporaryLockedObjects(transaction, path, tempLockOwner); } } else { Hashtable<String, Integer> errorList = new Hashtable<String, Integer>(); errorList.put(path, WebdavStatus.SC_LOCKED); sendReport(req, resp, errorList); } } /** * goes recursive through all folders. used by propfind * * @param currentPath * the current path * @param req * HttpServletRequest * @param generatedXML * @param propertyFindType * @param properties * @param depth * depth of the propfind * @throws IOException * if an error in the underlying store occurs */ private void recursiveParseProperties(ITransaction transaction, String currentPath, HttpServletRequest req, XMLWriter generatedXML, int propertyFindType, Vector<String> properties, int depth, String mimeType) throws WebdavException { parseProperties(transaction, req, generatedXML, currentPath, propertyFindType, properties, mimeType); if (depth > 0) { // no need to get name if depth is already zero String[] names = _store.getChildrenNames(transaction, currentPath); names = names == null ? new String[] {} : names; String newPath = null; for (String name : names) { newPath = currentPath; if (!(newPath.endsWith("/"))) { newPath += "/"; } newPath += name; recursiveParseProperties(transaction, newPath, req, generatedXML, propertyFindType, properties, depth - 1, mimeType); } } } /** * Propfind helper method. * * @param req * The servlet request * @param generatedXML * XML response to the Propfind request * @param path * Path of the current resource * @param type * Propfind type * @param propertiesVector * If the propfind type is find properties by name, then this Vector * contains those properties */ private void parseProperties(ITransaction transaction, HttpServletRequest req, XMLWriter generatedXML, String path, int type, Vector<String> propertiesVector, String mimeType) throws WebdavException { StoredObject so = _store.getStoredObject(transaction, path); boolean isFolder = so.isFolder(); final String creationdate = creationDateFormat(so.getCreationDate()); final String lastModified = lastModifiedDateFormat(so.getLastModified()); String resourceLength = String.valueOf(so.getResourceLength()); // ResourceInfo resourceInfo = new ResourceInfo(path, resources); generatedXML.writeElement("DAV::response", XMLWriter.OPENING); String status = new String("HTTP/1.1 " + WebdavStatus.SC_OK + " " + WebdavStatus.getStatusText(WebdavStatus.SC_OK)); // Generating href element generatedXML.writeElement("DAV::href", XMLWriter.OPENING); String href = req.getContextPath(); String servletPath = req.getServletPath(); if (servletPath != null) { if ((href.endsWith("/")) && (servletPath.startsWith("/"))) href += servletPath.substring(1); else href += servletPath; } if ((href.endsWith("/")) && (path.startsWith("/"))) href += path.substring(1); else href += path; if ((isFolder) && (!href.endsWith("/"))) href += "/"; generatedXML.writeText(rewriteUrl(href)); generatedXML.writeElement("DAV::href", XMLWriter.CLOSING); String resourceName = path; int lastSlash = path.lastIndexOf('/'); if (lastSlash != -1) resourceName = resourceName.substring(lastSlash + 1); switch (type) { case FIND_ALL_PROP: generatedXML.writeElement("DAV::propstat", XMLWriter.OPENING); generatedXML.writeElement("DAV::prop", XMLWriter.OPENING); generatedXML.writeProperty("DAV::creationdate", creationdate); generatedXML.writeElement("DAV::displayname", XMLWriter.OPENING); generatedXML.writeData(resourceName); generatedXML.writeElement("DAV::displayname", XMLWriter.CLOSING); if (!isFolder) { generatedXML .writeProperty("DAV::getlastmodified", lastModified); generatedXML.writeProperty("DAV::getcontentlength", resourceLength); String contentType = mimeType; if (contentType != null) { generatedXML.writeProperty("DAV::getcontenttype", contentType); } generatedXML.writeProperty("DAV::getetag", getETag(so)); generatedXML.writeElement("DAV::resourcetype", XMLWriter.NO_CONTENT); } else { generatedXML.writeElement("DAV::resourcetype", XMLWriter.OPENING); generatedXML.writeElement("DAV::collection", XMLWriter.NO_CONTENT); generatedXML.writeElement("DAV::resourcetype", XMLWriter.CLOSING); } writeSupportedLockElements(transaction, generatedXML, path); writeLockDiscoveryElements(transaction, generatedXML, path); generatedXML.writeProperty("DAV::source", ""); generatedXML.writeElement("DAV::prop", XMLWriter.CLOSING); generatedXML.writeElement("DAV::status", XMLWriter.OPENING); generatedXML.writeText(status); generatedXML.writeElement("DAV::status", XMLWriter.CLOSING); generatedXML.writeElement("DAV::propstat", XMLWriter.CLOSING); break; case FIND_PROPERTY_NAMES: generatedXML.writeElement("DAV::propstat", XMLWriter.OPENING); generatedXML.writeElement("DAV::prop", XMLWriter.OPENING); generatedXML .writeElement("DAV::creationdate", XMLWriter.NO_CONTENT); generatedXML.writeElement("DAV::displayname", XMLWriter.NO_CONTENT); if (!isFolder) { generatedXML.writeElement("DAV::getcontentlanguage", XMLWriter.NO_CONTENT); generatedXML.writeElement("DAV::getcontentlength", XMLWriter.NO_CONTENT); generatedXML.writeElement("DAV::getcontenttype", XMLWriter.NO_CONTENT); generatedXML.writeElement("DAV::getetag", XMLWriter.NO_CONTENT); generatedXML.writeElement("DAV::getlastmodified", XMLWriter.NO_CONTENT); } generatedXML .writeElement("DAV::resourcetype", XMLWriter.NO_CONTENT); generatedXML.writeElement("DAV::supportedlock", XMLWriter.NO_CONTENT); generatedXML.writeElement("DAV::source", XMLWriter.NO_CONTENT); generatedXML.writeElement("DAV::prop", XMLWriter.CLOSING); generatedXML.writeElement("DAV::status", XMLWriter.OPENING); generatedXML.writeText(status); generatedXML.writeElement("DAV::status", XMLWriter.CLOSING); generatedXML.writeElement("DAV::propstat", XMLWriter.CLOSING); break; case FIND_BY_PROPERTY: Vector<String> propertiesNotFound = new Vector<String>(); // Parse the list of properties generatedXML.writeElement("DAV::propstat", XMLWriter.OPENING); generatedXML.writeElement("DAV::prop", XMLWriter.OPENING); Enumeration<String> properties = propertiesVector.elements(); while (properties.hasMoreElements()) { String property = (String) properties.nextElement(); if (property.equals("DAV::creationdate")) { generatedXML.writeProperty("DAV::creationdate", creationdate); } else if (property.equals("DAV::displayname")) { generatedXML.writeElement("DAV::displayname", XMLWriter.OPENING); generatedXML.writeData(resourceName); generatedXML.writeElement("DAV::displayname", XMLWriter.CLOSING); } else if (property.equals("DAV::getcontentlanguage")) { if (isFolder) { propertiesNotFound.addElement(property); } else { generatedXML.writeElement("DAV::getcontentlanguage", XMLWriter.NO_CONTENT); } } else if (property.equals("DAV::getcontentlength")) { if (isFolder) { propertiesNotFound.addElement(property); } else { generatedXML.writeProperty("DAV::getcontentlength", resourceLength); } } else if (property.equals("DAV::getcontenttype")) { if (isFolder) { propertiesNotFound.addElement(property); } else { generatedXML.writeProperty("DAV::getcontenttype", mimeType); } } else if (property.equals("DAV::getetag")) { if (isFolder || so.isNullResource()) { propertiesNotFound.addElement(property); } else { generatedXML.writeProperty("DAV::getetag", getETag(so)); } } else if (property.equals("DAV::getlastmodified")) { if (isFolder) { propertiesNotFound.addElement(property); } else { generatedXML.writeProperty("DAV::getlastmodified", lastModified); } } else if (property.equals("DAV::resourcetype")) { if (isFolder) { generatedXML.writeElement("DAV::resourcetype", XMLWriter.OPENING); generatedXML.writeElement("DAV::collection", XMLWriter.NO_CONTENT); generatedXML.writeElement("DAV::resourcetype", XMLWriter.CLOSING); } else { generatedXML.writeElement("DAV::resourcetype", XMLWriter.NO_CONTENT); } } else if (property.equals("DAV::source")) { generatedXML.writeProperty("DAV::source", ""); } else if (property.equals("DAV::supportedlock")) { writeSupportedLockElements(transaction, generatedXML, path); } else if (property.equals("DAV::lockdiscovery")) { writeLockDiscoveryElements(transaction, generatedXML, path); } else { propertiesNotFound.addElement(property); } } generatedXML.writeElement("DAV::prop", XMLWriter.CLOSING); generatedXML.writeElement("DAV::status", XMLWriter.OPENING); generatedXML.writeText(status); generatedXML.writeElement("DAV::status", XMLWriter.CLOSING); generatedXML.writeElement("DAV::propstat", XMLWriter.CLOSING); Enumeration<String> propertiesNotFoundList = propertiesNotFound .elements(); if (propertiesNotFoundList.hasMoreElements()) { status = new String("HTTP/1.1 " + WebdavStatus.SC_NOT_FOUND + " " + WebdavStatus.getStatusText(WebdavStatus.SC_NOT_FOUND)); generatedXML.writeElement("DAV::propstat", XMLWriter.OPENING); generatedXML.writeElement("DAV::prop", XMLWriter.OPENING); while (propertiesNotFoundList.hasMoreElements()) { generatedXML.writeElement((String) propertiesNotFoundList .nextElement(), XMLWriter.NO_CONTENT); } generatedXML.writeElement("DAV::prop", XMLWriter.CLOSING); generatedXML.writeElement("DAV::status", XMLWriter.OPENING); generatedXML.writeText(status); generatedXML.writeElement("DAV::status", XMLWriter.CLOSING); generatedXML.writeElement("DAV::propstat", XMLWriter.CLOSING); } break; } generatedXML.writeElement("DAV::response", XMLWriter.CLOSING); so = null; } private void writeSupportedLockElements(ITransaction transaction, XMLWriter generatedXML, String path) { LockedObject lo = _resourceLocks.getLockedObjectByPath(transaction, path); generatedXML.writeElement("DAV::supportedlock", XMLWriter.OPENING); if (lo == null) { // both locks (shared/exclusive) can be granted generatedXML.writeElement("DAV::lockentry", XMLWriter.OPENING); generatedXML.writeElement("DAV::lockscope", XMLWriter.OPENING); generatedXML.writeElement("DAV::exclusive", XMLWriter.NO_CONTENT); generatedXML.writeElement("DAV::lockscope", XMLWriter.CLOSING); generatedXML.writeElement("DAV::locktype", XMLWriter.OPENING); generatedXML.writeElement("DAV::write", XMLWriter.NO_CONTENT); generatedXML.writeElement("DAV::locktype", XMLWriter.CLOSING); generatedXML.writeElement("DAV::lockentry", XMLWriter.CLOSING); generatedXML.writeElement("DAV::lockentry", XMLWriter.OPENING); generatedXML.writeElement("DAV::lockscope", XMLWriter.OPENING); generatedXML.writeElement("DAV::shared", XMLWriter.NO_CONTENT); generatedXML.writeElement("DAV::lockscope", XMLWriter.CLOSING); generatedXML.writeElement("DAV::locktype", XMLWriter.OPENING); generatedXML.writeElement("DAV::write", XMLWriter.NO_CONTENT); generatedXML.writeElement("DAV::locktype", XMLWriter.CLOSING); generatedXML.writeElement("DAV::lockentry", XMLWriter.CLOSING); } else { // LockObject exists, checking lock state // if an exclusive lock exists, no further lock is possible if (lo.isShared()) { generatedXML.writeElement("DAV::lockentry", XMLWriter.OPENING); generatedXML.writeElement("DAV::lockscope", XMLWriter.OPENING); generatedXML.writeElement("DAV::shared", XMLWriter.NO_CONTENT); generatedXML.writeElement("DAV::lockscope", XMLWriter.CLOSING); generatedXML.writeElement("DAV::locktype", XMLWriter.OPENING); generatedXML.writeElement("DAV::" + lo.getType(), XMLWriter.NO_CONTENT); generatedXML.writeElement("DAV::locktype", XMLWriter.CLOSING); generatedXML.writeElement("DAV::lockentry", XMLWriter.CLOSING); } } generatedXML.writeElement("DAV::supportedlock", XMLWriter.CLOSING); lo = null; } private void writeLockDiscoveryElements(ITransaction transaction, XMLWriter generatedXML, String path) { LockedObject lo = _resourceLocks.getLockedObjectByPath(transaction, path); if (lo != null && !lo.hasExpired()) { generatedXML.writeElement("DAV::lockdiscovery", XMLWriter.OPENING); generatedXML.writeElement("DAV::activelock", XMLWriter.OPENING); generatedXML.writeElement("DAV::locktype", XMLWriter.OPENING); generatedXML.writeProperty("DAV::" + lo.getType()); generatedXML.writeElement("DAV::locktype", XMLWriter.CLOSING); generatedXML.writeElement("DAV::lockscope", XMLWriter.OPENING); if (lo.isExclusive()) { generatedXML.writeProperty("DAV::exclusive"); } else { generatedXML.writeProperty("DAV::shared"); } generatedXML.writeElement("DAV::lockscope", XMLWriter.CLOSING); generatedXML.writeElement("DAV::depth", XMLWriter.OPENING); if (_depth == INFINITY) { generatedXML.writeText("Infinity"); } else { generatedXML.writeText(String.valueOf(_depth)); } generatedXML.writeElement("DAV::depth", XMLWriter.CLOSING); String[] owners = lo.getOwner(); if (owners != null) { for (int i = 0; i < owners.length; i++) { generatedXML.writeElement("DAV::owner", XMLWriter.OPENING); generatedXML.writeElement("DAV::href", XMLWriter.OPENING); generatedXML.writeText(owners[i]); generatedXML.writeElement("DAV::href", XMLWriter.CLOSING); generatedXML.writeElement("DAV::owner", XMLWriter.CLOSING); } } else { generatedXML.writeElement("DAV::owner", XMLWriter.NO_CONTENT); } int timeout = (int) (lo.getTimeoutMillis() / 1000); String timeoutStr = new Integer(timeout).toString(); generatedXML.writeElement("DAV::timeout", XMLWriter.OPENING); generatedXML.writeText("Second-" + timeoutStr); generatedXML.writeElement("DAV::timeout", XMLWriter.CLOSING); String lockToken = lo.getID(); generatedXML.writeElement("DAV::locktoken", XMLWriter.OPENING); generatedXML.writeElement("DAV::href", XMLWriter.OPENING); generatedXML.writeText("opaquelocktoken:" + lockToken); generatedXML.writeElement("DAV::href", XMLWriter.CLOSING); generatedXML.writeElement("DAV::locktoken", XMLWriter.CLOSING); generatedXML.writeElement("DAV::activelock", XMLWriter.CLOSING); generatedXML.writeElement("DAV::lockdiscovery", XMLWriter.CLOSING); } else { generatedXML.writeElement("DAV::lockdiscovery", XMLWriter.NO_CONTENT); } lo = null; } }
def is_ignored(release_info, section_name="filter_ignore"): release_name = release_info.release_name.lower() if any((pattern.search(release_name) for pattern in pattern_season)): return True for key in app.config.options(section_name): value = app.config.get_default(section_name, key, None) if value is None: continue if key.startswith("string"): if value.lower() in release_name: log.debug("Matched string ignore pattern {0} {1}".format(key, release_name)) return True elif key.startswith("rx"): if re.match(value, release_name, re.I): log.debug("Matched regex ignore pattern {0} {1}".format(key, release_name)) return True else: log.warning("Invalid ignore configuration key found: {0}".format(key)) return False
import pyper import pandas as pd # Python で CSV のデータを読み出す wine = pd.read_csv("wine.csv") # R のインスタンスを作る r = pyper.R(use_pandas='True') # Python のオブジェクトを R に渡す r.assign("data", wine) # R のソースコードを実行する r("source(file='scatter.R')") print("wine") # R のコードを実行する r("res1 = cor.test(data$WRAIN, data$LPRICE2)") r("data1 = subset(data, LPRICE2 < 0)") r("res2 = cor.test(data1$WRAIN, data1$LPRICE2)") # Python で R のオブジェクトを読む res1 = pd.Series(r.get("res1")) res2 = pd.Series(r.get("res2")) print(res1) """ statistic 0.680681 parameter 25 p.value 0.50233 estimate 0.134892 null.value 0 alternative two.sided method Pearson's product-moment correlation data.name data$WRAIN and data$LPRICE2 conf.int [-0.258366126613384, 0.489798400688013] dtype: object """ print(res2) """ statistic -0.129213 parameter 24 p.value 0.898266 estimate -0.0263663 null.value 0 alternative two.sided method Pearson's product-moment correlation data.name data1$WRAIN and data1$LPRICE2 conf.int [-0.409535600260672, 0.364710477639889] dtype: object """
// NukeKeysForSecret blows away all keys for a given secret // due to the fact that they all need to be updated when a secret is updated. func NukeKeysForSecret(secret Secret) error { _, err := db.DeleteFrom("keys"). Where("secret = $1", secret.ID). Exec() return err }
publicunofficial YaBB God Posts: 8,101 angryGreatnessYaBB GodPosts: 8,101 Re: DSA hits 25K members - Largest Socialist group in the country since WWII « Reply #11 on: August 06, 2017, 12:41:27 pm » Quote from: Famous Mortimer on August 06, 2017, 01:24:04 am Quote from: Badger on August 06, 2017, 01:00:20 am Quote from: Famous Mortimer on August 05, 2017, 05:26:04 pm Big news from the convention so far: DSA has voted to leave the Socialist International. I have mixed feelings about this. On a practical level, the SI barely exists anymore, it costs money, and there's no benefit to being a member. On an ideological level though, I don't like what this represents at all. It represents a rejection of social democracy and a rejection of serious, mainstream politics. So many of the new DSA members call themselves anarchists or anarcho communists. I wonder why they even joined the DSA at all. The DSA has always existed to promote the creation of a European style social democratic party. If you don't think that's desirable, why not join one of the other more radical parties already in existence like the Party for Socialism and Liberation or the Socialist Party USA (which has better branding anyway). Not good for being taken seriously. On anything. When your leftist party essentially decides people like Jeremy Corbin are too mainstream to emulate or at least work with, you're fu@×ed. Not good for being taken seriously. On anything.When your leftist party essentially decides people like Jeremy Corbin are too mainstream to emulate or at least work with, you're fu@×ed. In fairness to the DSA, most members are willing to work with people like Corbyn. They just consider him the most right-wing person they would be willing to work with. In fairness to the DSA, most members are willing to work with people like Corbyn. They just consider him the most right-wing person they would be willing to work with. The convention hall broke into "Oh Jeremy Corbyn" chants multiple times, what the f**K are you talking about? The convention hall broke into "Oh Jeremy Corbyn" chants multiple times, what the f**K are you talking about?
The Anti Austerity Alliance (AAA) was refused a permit to collect money because of its involvement in protests which led to public disorder, a Garda chief superintendent has said. Chief Supt Orla McPartlin said the alliance had been involved in protests which had led to arrests, and some of those arrested were members of the organisation. Ms McPartlin first refused the alliance a permit to collect in the Tallaght/Jobstown area last month. She cited her decision under section 9 (c) of the Street and House to House Collection Act 1962. The Act states that a “chief superintendent shall not grant a collection permit for any collection in respect of which (s)he is of opinion that . . . proceeds of the collection or any portion thereof would be used in such a manner as to encourage, either directly or indirectly, the commission of an unlawful act”. No further explanation She gave no further explanation. Following a letter from AAA councillor Mike Murphy, Ms McPartlin elaborated on her earlier decision. She explained: “Previous protests in my division, as participated in by the Anti Austerity Alliance, have resulted in persons being arrested for public order offences, and indeed some of those arrested persons were Anti Austerity Alliance personnel. “The collection permit has been refused because I believe that the proceeds of the collection or a portion thereof would be used to facilitate protests sponsored by the Anti Austerity Alliance. I believe any further protests within my division would see further public order offences being committed.” The letter was posted on the Facebook Page of Anti Austerity Alliance TD Paul Murphy, who claimed it was proof of “blatant political policing”. He suggested the refusal by Ms McPartlin was related to the anti-water charges protests in Jobstown, in which Tánaiste Joan Burton was detained in her car for several hours. He said it had been leaked to the media that 23 people would be charged over the Jobstown protest, but “we’re still waiting”. ‘Nobody charged yet’ “Bear in mind that nobody has been charged yet over Jobstown, never mind convicted. Innocent until proven guilty - unless you’re trying to build an anti-austerity movement to end the rule of the 1 per cent?” he wrote. “We will be appealing this blatant political policing all the way through the courts.” Separately, Garda Commissioner Noirin O’Sullivan said Ms McPartlin’s decision was a matter for the officer involved. “I wouldn’t second guess a decision of a chief superintendent,” she said. The alliance has three Dáil deputies, including Paul Murphy. He is set to face criminal charges for his alleged role in a protest last November when Tánaiste Joan Burton has trapped in her car for several hours by anti-water charge protesters.
package logplus import ( "fmt" ) type LogMessage struct { message string color Color } func NewFormattedLogMessage(color Color, format string, log ...interface{}) *LogMessage { return &LogMessage{ message: fmt.Sprintf(format, log...), color: color, } } func NewLogMessage(color Color, log ...interface{}) *LogMessage { return &LogMessage{ message: fmt.Sprint(log...), color: color, } }
More than 40 percent of homeowners seeking help from the Obama administration's flagship effort to rescue those at risk of foreclosure have dropped out of the program. The latest report on the program suggests foreclosures could rise in the second half of the year and weaken an ailing housing market. About 530,000 borrowers have fallen out of the program as of last month, the Treasury Department said Tuesday. Nearly 1.3 million homeowners had enrolled since March 2009. Treasury officials say few of these borrowers will wind up in foreclosure. But many analysts are concerned that a new wave of foreclosures could greatly impact the struggling housing industry. Another 390,000 homeowners, or 30 percent of those who started the program, have received permanent loan modifications and are making payments on time. A major reason so many have fallen out of the program is the Obama administration initially pressured banks to sign up borrowers without insisting first on proof of their income. When banks later moved to collect the information, many troubled homeowners were disqualified or dropped out. Many borrowers complain of a bureaucratic nightmare. They say banks often lose their documents and then claim borrowers did not send back the necessary paperwork. The banking industry said borrowers weren't sending back the necessary paperwork. The Obama plan was designed to help people in financial trouble by lowering their monthly mortgage payments. Homeowners who qualify can receive an interest rate as low as 2 percent for five years and a longer repayment period. The average monthly payment has been cut by about $500 on average. The homeowners receive temporary modifications. These are supposed to become permanent after borrowers make three payments on time and complete the required paperwork. That includes proof of income and a letter explaining the reason for their troubles. In practice, though, the process has taken far longer. The more than 100 participating mortgage companies get taxpayer incentives to reduce payments. But as of mid-May only $132 million has been spent out of a potential $75 billion, according to the Government Accountability Office. Though the program has been widely criticized for making only a small dent in the foreclosure crisis, administration officials defend their efforts. They say that the foreclosure prevention program has spurred changes in the mortgage industry, prodding lenders to make more significant cuts to borrowers' monthly payments than before the government effort started. © Copyright 2019 The Associated Press. All rights reserved. This material may not be published, broadcast, rewritten or redistributed.
<filename>packages/nest-lunr/dist/types/suggest/types/ignore-options.d.ts export interface IgnoreOptions { reset?: boolean; }
def plot_recordings(self, data): plot_recording(data.T, self.fs)
async def content_as_bytes(self, max_connections=1): stream = BytesIO() await self.download_to_stream(stream, max_connections=max_connections) return stream.getvalue()
package project import ( "bytes" "fmt" "os" "path/filepath" "runtime" "github.com/pkg/errors" "github.com/spf13/cobra" "github.com/mutagen-io/mutagen/cmd" "github.com/mutagen-io/mutagen/cmd/mutagen/daemon" "github.com/mutagen-io/mutagen/cmd/mutagen/forward" "github.com/mutagen-io/mutagen/cmd/mutagen/sync" "github.com/mutagen-io/mutagen/pkg/configuration/global" "github.com/mutagen-io/mutagen/pkg/filesystem/locking" "github.com/mutagen-io/mutagen/pkg/forwarding" "github.com/mutagen-io/mutagen/pkg/identifier" "github.com/mutagen-io/mutagen/pkg/project" "github.com/mutagen-io/mutagen/pkg/selection" forwardingsvc "github.com/mutagen-io/mutagen/pkg/service/forwarding" synchronizationsvc "github.com/mutagen-io/mutagen/pkg/service/synchronization" "github.com/mutagen-io/mutagen/pkg/synchronization" "github.com/mutagen-io/mutagen/pkg/url" ) // startMain is the entry point for the start command. func startMain(_ *cobra.Command, _ []string) error { // Compute the name of the configuration file and ensure that our working // directory is that in which the file resides. This is required for // relative paths (including relative synchronization paths and relative // Unix Domain Socket paths) to be resolved relative to the project // configuration file. configurationFileName := project.DefaultConfigurationFileName if startConfiguration.projectFile != "" { var directory string directory, configurationFileName = filepath.Split(startConfiguration.projectFile) if directory != "" { if err := os.Chdir(directory); err != nil { return errors.Wrap(err, "unable to switch to target directory") } } } // Compute the lock path. lockPath := configurationFileName + project.LockFileExtension // Track whether or not we should remove the lock file on return. var removeLockFileOnReturn bool // Create a locker and defer its closure and potential removal. On Windows // systems, we have to handle this removal after the file is closed. locker, err := locking.NewLocker(lockPath, 0600) if err != nil { return errors.Wrap(err, "unable to create project locker") } defer func() { locker.Close() if removeLockFileOnReturn && runtime.GOOS == "windows" { os.Remove(lockPath) } }() // Acquire the project lock and defer its release and potential removal. On // Windows systems, we can't remove the lock file if it's locked or even // just opened, so we handle removal for Windows systems after we close the // lock file (see above). In this case, we truncate the lock file before // releasing it to ensure that any other process that opens or acquires the // lock file before we manage to remove it will simply see an empty lock // file, which it will ignore or attempt to remove. if err := locker.Lock(true); err != nil { return errors.Wrap(err, "unable to acquire project lock") } defer func() { if removeLockFileOnReturn { if runtime.GOOS == "windows" { locker.Truncate(0) } else { os.Remove(lockPath) } } locker.Unlock() }() // Read the full contents of the lock file and ensure that it's empty. buffer := &bytes.Buffer{} if length, err := buffer.ReadFrom(locker); err != nil { return errors.Wrap(err, "unable to read project lock") } else if length != 0 { return errors.New("project already running") } // At this point we know that there was no previous project running, but we // haven't yet created any resources, so defer removal of the lock file that // we've created in case we run into any errors loading configuration // information. removeLockFileOnReturn = true // Create a unique project identifier. identifier, err := identifier.New(identifier.PrefixProject) if err != nil { return errors.Wrap(err, "unable to generate project identifier") } // Write the project identifier to the lock file. if _, err := locker.Write([]byte(identifier)); err != nil { return errors.Wrap(err, "unable to write project identifier") } // Load the configuration file. configuration, err := project.LoadConfiguration(configurationFileName) if err != nil { return errors.Wrap(err, "unable to load configuration file") } // Unless disabled, attempt to load configuration from the global // configuration file and use it as the base for our core session // configurations. globalConfigurationForwarding := &forwarding.Configuration{} globalConfigurationSynchronization := &synchronization.Configuration{} if !startConfiguration.noGlobalConfiguration { // Compute the path to the global configuration file. globalConfigurationPath, err := global.ConfigurationPath() if err != nil { return errors.Wrap(err, "unable to compute path to global configuration file") } // Attempt to load and validate the file. We allow it to not exist. globalConfiguration, err := global.LoadConfiguration(globalConfigurationPath) if err != nil { if !os.IsNotExist(err) { return errors.Wrap(err, "unable to load global configuration") } } else { globalConfigurationForwarding = globalConfiguration.Forwarding.Defaults.Configuration() if err := globalConfigurationForwarding.EnsureValid(false); err != nil { return errors.Wrap(err, "invalid global forwarding configuration") } globalConfigurationSynchronization = globalConfiguration.Synchronization.Defaults.Configuration() if err := globalConfigurationSynchronization.EnsureValid(false); err != nil { return errors.Wrap(err, "invalid global synchronization configuration") } } } // Extract and validate forwarding defaults. var defaultSource, defaultDestination string defaultConfigurationForwarding := &forwarding.Configuration{} defaultConfigurationSource := &forwarding.Configuration{} defaultConfigurationDestination := &forwarding.Configuration{} if defaults, ok := configuration.Forwarding["defaults"]; ok { defaultSource = defaults.Source defaultDestination = defaults.Destination defaultConfigurationForwarding = defaults.Configuration.Configuration() if err := defaultConfigurationForwarding.EnsureValid(false); err != nil { return errors.Wrap(err, "invalid default forwarding configuration") } defaultConfigurationSource = defaults.ConfigurationSource.Configuration() if err := defaultConfigurationSource.EnsureValid(true); err != nil { return errors.Wrap(err, "invalid default forwarding source configuration") } defaultConfigurationDestination = defaults.ConfigurationDestination.Configuration() if err := defaultConfigurationDestination.EnsureValid(true); err != nil { return errors.Wrap(err, "invalid default forwarding destination configuration") } } // Extract and validate synchronization defaults. var defaultAlpha, defaultBeta string var defaultFlushOnCreate project.FlushOnCreateBehavior defaultConfigurationSynchronization := &synchronization.Configuration{} defaultConfigurationAlpha := &synchronization.Configuration{} defaultConfigurationBeta := &synchronization.Configuration{} if defaults, ok := configuration.Synchronization["defaults"]; ok { defaultAlpha = defaults.Alpha defaultBeta = defaults.Beta defaultFlushOnCreate = defaults.FlushOnCreate defaultConfigurationSynchronization = defaults.Configuration.Configuration() if err := defaultConfigurationSynchronization.EnsureValid(false); err != nil { return errors.Wrap(err, "invalid default synchronization configuration") } defaultConfigurationAlpha = defaults.ConfigurationAlpha.Configuration() if err := defaultConfigurationAlpha.EnsureValid(true); err != nil { return errors.Wrap(err, "invalid default synchronization alpha configuration") } defaultConfigurationBeta = defaults.ConfigurationBeta.Configuration() if err := defaultConfigurationBeta.EnsureValid(true); err != nil { return errors.Wrap(err, "invalid default synchronization beta configuration") } } // Merge global and default configurations, with defaults taking priority. defaultConfigurationForwarding = forwarding.MergeConfigurations( globalConfigurationForwarding, defaultConfigurationForwarding, ) defaultConfigurationSynchronization = synchronization.MergeConfigurations( globalConfigurationSynchronization, defaultConfigurationSynchronization, ) // Generate forward session creation specifications. var forwardingSpecifications []*forwardingsvc.CreationSpecification for name, session := range configuration.Forwarding { // Ignore defaults. if name == "defaults" { continue } // Verify that the name is valid. if err := selection.EnsureNameValid(name); err != nil { return errors.Errorf("invalid forwarding session name (%s): %v", name, err) } // Compute URLs. source := session.Source if source == "" { source = defaultSource } destination := session.Destination if destination == "" { destination = defaultDestination } // Parse URLs. sourceURL, err := url.Parse(source, url.Kind_Forwarding, true) if err != nil { return errors.Errorf("unable to parse forwarding source URL (%s): %v", source, err) } destinationURL, err := url.Parse(destination, url.Kind_Forwarding, false) if err != nil { return errors.Errorf("unable to parse forwarding destination URL (%s): %v", destination, err) } // Compute configuration. configuration := session.Configuration.Configuration() if err := configuration.EnsureValid(false); err != nil { return errors.Errorf("invalid forwarding session configuration for %s: %v", name, err) } configuration = forwarding.MergeConfigurations(defaultConfigurationForwarding, configuration) // Compute source-specific configuration. sourceConfiguration := session.ConfigurationSource.Configuration() if err := sourceConfiguration.EnsureValid(true); err != nil { return errors.Errorf("invalid forwarding session source configuration for %s: %v", name, err) } sourceConfiguration = forwarding.MergeConfigurations(defaultConfigurationSource, sourceConfiguration) // Compute destination-specific configuration. destinationConfiguration := session.ConfigurationDestination.Configuration() if err := destinationConfiguration.EnsureValid(true); err != nil { return errors.Errorf("invalid forwarding session destination configuration for %s: %v", name, err) } destinationConfiguration = forwarding.MergeConfigurations(defaultConfigurationDestination, destinationConfiguration) // Record the specification. forwardingSpecifications = append(forwardingSpecifications, &forwardingsvc.CreationSpecification{ Source: sourceURL, Destination: destinationURL, Configuration: configuration, ConfigurationSource: sourceConfiguration, ConfigurationDestination: destinationConfiguration, Name: name, Labels: map[string]string{ project.LabelKey: identifier, }, Paused: startConfiguration.paused, }) } // Generate synchronization session creation specifications and keep track // of those that we should flush on creation. var synchronizationSpecifications []*synchronizationsvc.CreationSpecification var flushOnCreateByIndex []bool for name, session := range configuration.Synchronization { // Ignore defaults. if name == "defaults" { continue } // Verify that the name is valid. if err := selection.EnsureNameValid(name); err != nil { return errors.Errorf("invalid synchronization session name (%s): %v", name, err) } // Compute URLs. alpha := session.Alpha if alpha == "" { alpha = defaultAlpha } beta := session.Beta if beta == "" { beta = defaultBeta } // Parse URLs. alphaURL, err := url.Parse(alpha, url.Kind_Synchronization, true) if err != nil { return errors.Errorf("unable to parse synchronization alpha URL (%s): %v", alpha, err) } betaURL, err := url.Parse(beta, url.Kind_Synchronization, false) if err != nil { return errors.Errorf("unable to parse synchronization beta URL (%s): %v", beta, err) } // Compute configuration. configuration := session.Configuration.Configuration() if err := configuration.EnsureValid(false); err != nil { return errors.Errorf("invalid synchronization session configuration for %s: %v", name, err) } configuration = synchronization.MergeConfigurations(defaultConfigurationSynchronization, configuration) // Compute alpha-specific configuration. alphaConfiguration := session.ConfigurationAlpha.Configuration() if err := alphaConfiguration.EnsureValid(true); err != nil { return errors.Errorf("invalid synchronization session alpha configuration for %s: %v", name, err) } alphaConfiguration = synchronization.MergeConfigurations(defaultConfigurationAlpha, alphaConfiguration) // Compute beta-specific configuration. betaConfiguration := session.ConfigurationBeta.Configuration() if err := betaConfiguration.EnsureValid(true); err != nil { return errors.Errorf("invalid synchronization session beta configuration for %s: %v", name, err) } betaConfiguration = synchronization.MergeConfigurations(defaultConfigurationBeta, betaConfiguration) // Record the specification. synchronizationSpecifications = append(synchronizationSpecifications, &synchronizationsvc.CreationSpecification{ Alpha: alphaURL, Beta: betaURL, Configuration: configuration, ConfigurationAlpha: alphaConfiguration, ConfigurationBeta: betaConfiguration, Name: name, Labels: map[string]string{ project.LabelKey: identifier, }, Paused: startConfiguration.paused, }) // Compute and store flush-on-creation behavior. if session.FlushOnCreate.IsDefault() { flushOnCreateByIndex = append(flushOnCreateByIndex, defaultFlushOnCreate.FlushOnCreate()) } else { flushOnCreateByIndex = append(flushOnCreateByIndex, session.FlushOnCreate.FlushOnCreate()) } } // Connect to the daemon and defer closure of the connection. daemonConnection, err := daemon.Connect(true, true) if err != nil { return errors.Wrap(err, "unable to connect to daemon") } defer daemonConnection.Close() // At this point, we're going to try to create resources, so we need to // maintain the lock file in case even some of them are successful. removeLockFileOnReturn = false // Perform pre-creation commands. for _, command := range configuration.BeforeCreate { fmt.Println(">", command) if err := runInShell(command); err != nil { return errors.Wrap(err, "pre-create command failed") } } // Create forwarding sessions. for _, specification := range forwardingSpecifications { if _, err := forward.CreateWithSpecification(daemonConnection, specification); err != nil { return errors.Errorf("unable to create forwarding session (%s): %v", specification.Name, err) } } // Create synchronization sessions and track those that we should flush. var sessionsToFlush []string for s, specification := range synchronizationSpecifications { // Perform session creation. session, err := sync.CreateWithSpecification(daemonConnection, specification) if err != nil { return errors.Errorf("unable to create synchronization session (%s): %v", specification.Name, err) } // Determine whether or not to flush this session. if !startConfiguration.paused && flushOnCreateByIndex[s] { sessionsToFlush = append(sessionsToFlush, session) } } // Flush synchronization sessions for which flushing has been requested. if len(sessionsToFlush) > 0 { flushSelection := &selection.Selection{Specifications: sessionsToFlush} if err := sync.FlushWithSelection(daemonConnection, flushSelection, false); err != nil { return errors.Wrap(err, "unable to flush synchronization session(s)") } } // Perform post-creation commands. for _, command := range configuration.AfterCreate { fmt.Println(">", command) if err := runInShell(command); err != nil { return errors.Wrap(err, "post-create command failed") } } // Success. return nil } // startCommand is the start command. var startCommand = &cobra.Command{ Use: "start", Short: "Start project sessions", Args: cmd.DisallowArguments, RunE: startMain, SilenceUsage: true, } // startConfiguration stores configuration for the start command. var startConfiguration struct { // help indicates whether or not to show help information and exit. help bool // projectFile is the path to the project file, if non-default. projectFile string // paused indicates whether or not to create sessions in a pre-paused state. paused bool // noGlobalConfiguration specifies whether or not the global configuration // file should be ignored. noGlobalConfiguration bool } func init() { // Grab a handle for the command line flags. flags := startCommand.Flags() // Disable alphabetical sorting of flags in help output. flags.SortFlags = false // Manually add a help flag to override the default message. Cobra will // still implement its logic automatically. flags.BoolVarP(&startConfiguration.help, "help", "h", false, "Show help information") // Wire up project file flags. flags.StringVarP(&startConfiguration.projectFile, "project-file", "f", "", "Specify project file") // Wire up paused flags. flags.BoolVarP(&startConfiguration.paused, "paused", "p", false, "Create the session pre-paused") // Wire up general configuration flags. flags.BoolVar(&startConfiguration.noGlobalConfiguration, "no-global-configuration", false, "Ignore the global configuration file") }
<filename>node_modules/@ethersproject/hash/src.ts/_version.ts export const version = "hash/5.4.0";
/** * Receives data sent from GeocodeIntentService and updates the UI in MainActivity. */ @Override protected void onReceiveResult(int resultCode, Bundle resultData) { if (getActivity() == null) { return; } Log.v(LOG_TAG, "onReceiveResult, hashCode=" + this.hashCode() + ", " + "resultCode = [" + resultCode + "], resultData = [" + resultData + "]"); if (GeocodeIntentService.SUCCESS_RESULT == resultCode) { if (resultData.containsKey(GeocodeIntentService.RESULT_ADDRESS_KEY)) { Address address = resultData.getParcelable(GeocodeIntentService.RESULT_ADDRESS_KEY); if (address == null) { Log.e(LOG_TAG, "onReceiveResult: address result = NULL!!!"); } else { mLocationInput = Utils.getLocationInput(address.getLatitude(), address.getLongitude()); mEditLocation.setText(mLocationInput); mLocationAddresses = new Address[]{address}; mLocationListAdapter.clear(); mLocationListAdapter.addAll(mLocationAddresses); mPosition = 0; simulateClick(); } } else if (resultData.containsKey(GeocodeIntentService.RESULT_ADDRESSES_KEY)) { mLocationAddresses = Utils.castParcelableArray(Address.class, resultData.getParcelableArray(GeocodeIntentService.RESULT_ADDRESSES_KEY)); mLocationListAdapter.clear(); mLocationListAdapter.addAll(mLocationAddresses); if (mLocationAddresses != null && mLocationAddresses.length == 1) { mPosition = 0; simulateClick(); } } else { Log.e(LOG_TAG, "onReceiveResult - SUCCESS without address - should never happen..."); } } else if (getActivity() == null) { return; } else { if (mLastLocation != null) { if (Utils.isNetworkUnavailable(getActivity())) { Toast.makeText(getActivity(), R.string.msg_service_network_not_available, Toast.LENGTH_LONG).show(); updateLocationText(); } else if (GeocodeIntentService.FAILURE_SERVICE_NOT_AVAILABLE == resultCode) { Toast.makeText(getActivity(), R.string.msg_service_not_available, Toast.LENGTH_LONG).show(); updateLocationText(); } } else { int toastRes; switch (resultCode) { case GeocodeIntentService.FAILURE_INVALID_LAT_LONG_USED: toastRes = R.string.msg_invalid_lat_long; break; case GeocodeIntentService.FAILURE_NO_LOCATION_DATA_PROVIDED: toastRes = R.string.msg_no_location_provided; break; default: return; } Toast.makeText(getActivity(), toastRes, Toast.LENGTH_LONG).show(); } } }
<reponame>mmaquevice/harbor import { async, ComponentFixture, TestBed } from '@angular/core/testing'; import { InlineAlertComponent } from "../../shared/inline-alert/inline-alert.component"; import { ImmutableTagComponent } from './immutable-tag.component'; import { ClarityModule } from '@clr/angular'; import { FormsModule } from '@angular/forms'; import { AddRuleComponent } from './add-rule/add-rule.component'; import { CUSTOM_ELEMENTS_SCHEMA } from '@angular/core'; import { TranslateModule } from '@ngx-translate/core'; import { ImmutableTagService } from './immutable-tag.service'; import { HttpClientTestingModule } from '@angular/common/http/testing'; import { NoopAnimationsModule } from "@angular/platform-browser/animations"; import { ActivatedRoute } from '@angular/router'; import { of, throwError } from 'rxjs'; import { ErrorHandler, DefaultErrorHandler, clone } from '@harbor/ui'; describe('ImmutableTagComponent', () => { let component: ImmutableTagComponent; let addRuleComponent: AddRuleComponent; let immutableTagService: ImmutableTagService; let errorHandler: ErrorHandler; let fixture: ComponentFixture<ImmutableTagComponent>; let fixtureAddrule: ComponentFixture<AddRuleComponent>; let mockMetadata = { "templates": [ { "rule_template": "latestPushedK", "display_text": "the most recently pushed # images", "action": "retain", "params": [ { "type": "int", "unit": "COUNT", "required": true } ] }, { "rule_template": "latestPulledN", "display_text": "the most recently pulled # images", "action": "retain", "params": [ { "type": "int", "unit": "COUNT", "required": true } ] }, { "rule_template": "nDaysSinceLastPush", "display_text": "pushed within the last # days", "action": "retain", "params": [ { "type": "int", "unit": "DAYS", "required": true } ] }, { "rule_template": "nDaysSinceLastPull", "display_text": "pulled within the last # days", "action": "retain", "params": [ { "type": "int", "unit": "DAYS", "required": true } ] }, { "rule_template": "always", "display_text": "always", "action": "retain", "params": [] } ], "scope_selectors": [ { "display_text": "Repositories", "kind": "doublestar", "decorations": [ "repoMatches", "repoExcludes" ] } ], "tag_selectors": [ { "display_text": "Tags", "kind": "doublestar", "decorations": [ "matches", "excludes" ] } ] }; let mockRules = [ { "id": 1, "project_id": 1, "disabled": false, "priority": 0, "action": "immutable", "template": "immutable_template", "tag_selectors": [ { "kind": "doublestar", "decoration": "matches", "pattern": "**" } ], "scope_selectors": { "repository": [ { "kind": "doublestar", "decoration": "repoMatches", "pattern": "**" } ] } }, { "id": 2, "project_id": 1, "disabled": false, "priority": 0, "action": "immutable", "template": "immutable_template", "tag_selectors": [ { "kind": "doublestar", "decoration": "matches", "pattern": "44" } ], "scope_selectors": { "repository": [ { "kind": "doublestar", "decoration": "repoMatches", "pattern": "**" } ] } }, { "id": 3, "project_id": 1, "disabled": false, "priority": 0, "action": "immutable", "template": "immutable_template", "tag_selectors": [ { "kind": "doublestar", "decoration": "matches", "pattern": "555" } ], "scope_selectors": { "repository": [ { "kind": "doublestar", "decoration": "repoMatches", "pattern": "**" } ] } }, { "id": 4, "project_id": 1, "disabled": false, "priority": 0, "action": "immutable", "template": "immutable_template", "tag_selectors": [ { "kind": "doublestar", "decoration": "matches", "pattern": "fff**" } ], "scope_selectors": { "repository": [ { "kind": "doublestar", "decoration": "repoMatches", "pattern": "**ggg" } ] } } ]; let cloneRule = clone(mockRules[0]); cloneRule.tag_selectors[0].pattern = 'rep'; let cloneRuleNoId = clone(mockRules[0]); cloneRuleNoId.id = null; let cloneDisableRule = clone(mockRules[0]); cloneDisableRule.disabled = true; beforeEach(async(() => { TestBed.configureTestingModule({ declarations: [ImmutableTagComponent, AddRuleComponent, InlineAlertComponent], schemas: [ CUSTOM_ELEMENTS_SCHEMA ], imports: [ NoopAnimationsModule, ClarityModule, FormsModule, HttpClientTestingModule, TranslateModule.forRoot() ], providers: [ ImmutableTagService, { provide: ActivatedRoute, useValue: { paramMap: of({ get: (key) => 'value' }), snapshot: { parent: { params: { id: 1 } }, data: 1 } } }, { provide: ErrorHandler, useClass: DefaultErrorHandler } ] }) .compileComponents(); })); beforeEach(() => { fixture = TestBed.createComponent(ImmutableTagComponent); fixtureAddrule = TestBed.createComponent(AddRuleComponent); component = fixture.componentInstance; addRuleComponent = fixtureAddrule.componentInstance; addRuleComponent.open = () => { return null; }; component.projectId = 1; component.addRuleComponent = TestBed.createComponent(AddRuleComponent).componentInstance; component.addRuleComponent = TestBed.createComponent(AddRuleComponent).componentInstance; component.addRuleComponent.open = () => { return null; }; component.addRuleComponent.inlineAlert = TestBed.createComponent(InlineAlertComponent).componentInstance; immutableTagService = fixture.debugElement.injector.get(ImmutableTagService); errorHandler = fixture.debugElement.injector.get(ErrorHandler); spyOn(immutableTagService, "getRetentionMetadata") .and.returnValue(of(mockMetadata, throwError('error'))); spyOn(immutableTagService, "getRules") .withArgs(component.projectId) .and.returnValue(of(mockRules)) .withArgs(0) .and.returnValue(throwError('error')); spyOn(immutableTagService, "updateRule") .withArgs(component.projectId, mockRules[0]) .and.returnValue(of(null)) .withArgs(component.projectId, cloneRule) .and.returnValue(of(null)) .withArgs(component.projectId, cloneDisableRule) .and.returnValue(of(null)); spyOn(immutableTagService, "deleteRule") .withArgs(component.projectId, mockRules[3].id) .and.returnValue(of(null)); spyOn(immutableTagService, "createRule") .withArgs(component.projectId, cloneRuleNoId) .and.returnValue(of(null)) .withArgs(0, cloneRuleNoId) .and.returnValue(throwError({error: { message: 'error'}})); spyOn(immutableTagService, "getProjectInfo") .withArgs(component.projectId) .and.returnValue(of(null)); spyOn(errorHandler, "error") .and.returnValue(null); fixture.detectChanges(); }); it('should create', () => { expect(component).toBeTruthy(); }); it("should show some rules in page", async(() => { fixture.detectChanges(); fixture.whenStable().then(() => { fixture.detectChanges(); let elRep: HTMLLIElement[] = fixture.nativeElement.querySelectorAll(".rule"); expect(elRep).toBeTruthy(); expect(elRep.length).toEqual(4); }); })); it("should show error in list rule", async(() => { fixture.detectChanges(); component.projectId = 0; component.getRules(); fixture.whenStable().then(() => { fixture.detectChanges(); component.projectId = 1; }); })); it("should toggle disable and enable", async(() => { fixture.detectChanges(); let elRep: HTMLButtonElement = fixture.nativeElement.querySelector("#action0"); elRep.dispatchEvent(new Event('click')); elRep.click(); let elRepDisable: HTMLButtonElement = fixture.nativeElement.querySelector("#disable-btn0"); expect(elRepDisable).toBeTruthy(); elRepDisable.dispatchEvent(new Event('click')); elRepDisable.click(); mockRules[0].disabled = true; fixture.whenStable().then(() => { fixture.detectChanges(); let elRepDisableIcon: HTMLButtonElement = fixture.nativeElement.querySelector("#disable-icon0"); expect(elRepDisableIcon).toBeTruthy(); }); })); it("should be deleted", async(() => { fixture.detectChanges(); let elRep: HTMLButtonElement = fixture.nativeElement.querySelector("#action0"); elRep.dispatchEvent(new Event('click')); elRep.click(); let elRepDisable: HTMLButtonElement = fixture.nativeElement.querySelector("#delete-btn3"); expect(elRepDisable).toBeTruthy(); elRepDisable.dispatchEvent(new Event('click')); elRepDisable.click(); let rule = mockRules.pop(); fixture.whenStable().then(() => { fixture.detectChanges(); let elRepRule: HTMLLIElement[] = fixture.nativeElement.querySelectorAll(".rule"); expect(elRepRule.length).toEqual(3); mockRules.push(rule); }); })); it("should be add rule", async(() => { fixture.detectChanges(); component.clickAdd(cloneRuleNoId); mockRules.push(cloneRuleNoId); fixture.whenStable().then(() => { fixture.detectChanges(); let elRepRule: HTMLLIElement[] = fixture.nativeElement.querySelectorAll(".rule"); expect(elRepRule.length).toEqual(5); mockRules.pop(); }); })); it("should be add rule error", async(() => { fixture.detectChanges(); component.projectId = 0; component.clickAdd(cloneRuleNoId); // mockRules.push(cloneRuleNoId); fixture.whenStable().then(() => { fixture.detectChanges(); component.projectId = 1; let elRepRule: HTMLLIElement[] = fixture.nativeElement.querySelectorAll(".rule"); expect(elRepRule.length).toEqual(4); // mockRules.pop(); }); })); it("should be edit rule ", async(() => { fixture.detectChanges(); component.clickAdd(cloneRule); mockRules[0].tag_selectors[0].pattern = 'rep'; fixture.whenStable().then(() => { fixture.detectChanges(); let elRepRule: HTMLLIElement = fixture.nativeElement.querySelector("#tag-selectors-patten0"); expect(elRepRule.textContent).toEqual('rep'); mockRules[0].tag_selectors[0].pattern = '**'; }); })); it("should be edit rule with no add", async(() => { fixture.detectChanges(); component.addRuleComponent.isAdd = false; component.clickAdd(cloneRule); mockRules[0].tag_selectors[0].pattern = 'rep'; fixture.whenStable().then(() => { fixture.detectChanges(); let elRepRule: HTMLLIElement = fixture.nativeElement.querySelector("#tag-selectors-patten0"); expect(elRepRule.textContent).toEqual('rep'); mockRules[0].tag_selectors[0].pattern = '**'; component.addRuleComponent.isAdd = true; }); })); });
def attach_application_by_name(self, name, msg=None): self._module.action(Application.Action.ATTACH_APPLICATION_BY_NAME, Application.create_value_container(name=name, msg=msg), msg)
import type { NextPage } from 'next'; import Head from 'next/head'; import { useRouter } from 'next/router'; import { useEffect, useState } from 'react'; import Wrapper from '../src/components/ui/Wrapper/wrapper'; import { useAuth } from '../src/hooks/useAuth'; import MapBoxContainer from '../src/components/containers/Map/mapBox.container'; import useUsersMarkers from '../src/hooks/useUsersMarkers'; import Banner from '../src/components/ui/Banner/banner'; import { useTranslation } from 'react-i18next'; import useUsersGeolocation from '../src/hooks/useUsersGeolocation'; import { UserGeolocation } from '../src/models/usersGeolocation'; import { useGeolocationProvider } from '../src/hooks/useGeolocationProvider'; import UsersListGroup from '../src/components/containers/UsersList/usersListGroup'; import PageTitle from '../src/components/ui/PageTitle/PageTitle'; import { MapHolder, MapHolderFooter, } from '../src/components/containers/Map/mapBox.styled'; import Widget from '../src/components/ui/Widget/widget'; import useChartData from '../src/hooks/useChartData'; import { Grid } from '@mui/material'; import Loader from '../src/components/ui/Loader/loader'; import { getSplashScreen } from '../src/components/ui/SplashScreen/SplashScreen.model'; import { SplashScreen } from '../src/components/ui/SplashScreen/SplashScreen'; import ScrollToTop from '../src/components/ui/ScrollToTop/scrollToTop'; const Home: NextPage = () => { const [selectedUser, setSelectedUser] = useState<UserGeolocation | null>( null ); const router = useRouter(); const { t } = useTranslation(); const { user, loading } = useAuth(); const { isLocationAllowed } = useGeolocationProvider(); const usersGeolocation = useUsersGeolocation(); const { statusesData, citiesData } = useChartData(usersGeolocation); const markers = useUsersMarkers(usersGeolocation); useEffect(() => { if (!loading && !user) { router.push('/sign-in'); } }, [user, loading, router]); if (!getSplashScreen()) { return <SplashScreen />; } if (!usersGeolocation) { return <Loader />; } const handleUserClick = (user: UserGeolocation) => setSelectedUser(user); return ( <Wrapper> <Head> <title>whereiam</title> <meta name="description" content="Generated by create next app" /> <link rel="icon" href="/favicon.ico" /> </Head> <main> {!isLocationAllowed ? ( <Banner title={t('dashboard.bannerTitle')} buttonText={t('dashboard.bannerButtonText')} buttonHref="/user/profile" /> ) : ( <> <PageTitle title={t('dashboard.users')} /> <MapHolder> <MapBoxContainer markers={markers} selectedUser={selectedUser} /> <MapHolderFooter> <UsersListGroup users={usersGeolocation} onUserClick={handleUserClick} /> </MapHolderFooter> </MapHolder> <Grid container spacing={2} sx={{ pt: 2 }}> <Grid item lg={6} md={6} sm={12} xs={12}> <Widget data={statusesData} title="Statuses" /> </Grid> <Grid item lg={6} md={6} sm={12} xs={12}> <Widget data={citiesData} title="Cities" /> </Grid> </Grid> </> )} </main> <ScrollToTop /> </Wrapper> ); }; export default Home;
#include<stdio.h> #define INF 1000000000 #define NMAX 200 int dist[NMAX][NMAX], visit[NMAX], r[NMAX]; void Floyd(int); int Dfs(int, int, int, int); int main(void) { int i, j, v, e, vr, s, t, d, res; scanf("%d %d %d", &v, &e, &vr); for (i = 0; i < v; i++) { for (j = 0; j < v; j++) { dist[i][j] = ((i == j) ? 0 : INF); } } for (i = 0; i < vr; i++) { scanf("%d", &r[i]); r[i]--; } for (i = 0; i < e; i++) { scanf("%d %d %d", &s, &t, &d); if (dist[s-1][t-1] > d) dist[s-1][t-1] = dist[t-1][s-1] = d; } Floyd(v); res = Dfs(0, -1, 0, vr); printf("%d\n", res); return 0; } void Floyd(int size) { int k, i, j; for (k = 0; k < size; k++) { for (i = 0; i < size; i++) { for (j = 0; j < size; j++) { if (dist[i][j] > dist[i][k] + dist[k][j]) { dist[i][j] = dist[i][k] + dist[k][j]; } } } } } int Dfs(int n, int prev, int path, int size) { int i, rtemp, res = INF; if (n == size) return path; for (i = 0; i < size; i++) { if (!visit[i]) { visit[i] = 1; if (prev == -1) { rtemp = Dfs(n + 1, i, 0, size); if (res > rtemp) res = rtemp; } else { rtemp = Dfs(n + 1, i, path + dist[r[prev]][r[i]], size); if (res > rtemp) res = rtemp; } visit[i] = 0; } } return res; }
#pragma once #include <string> #include <functional> #include <msw/std/memory.hpp> #include <rubetek/remote_link/channel.hpp> #include <rubetek/remote_link/channel_set.hpp> namespace rubetek { namespace client { struct remote_channels : remote_channel_set<unique_client_id::type> { typedef std::function<void(unique_channel_id_type, std::string const& ip, unique_client_id::const_reference, remote_link_interface<>*)> channel_open; remote_channels ( channel_open channel_open , channel_close channel_close , send_payload send_payload ) : remote_channel_set<unique_client_id::type> ( channel_close, send_payload ) , channel_open_ ( channel_open ) {} void on_open(unique_channel_id_type channel_id, unique_client_id::const_reference rem_id, std::string const& ip) { if (channel_by_remote_id_list_.find(rem_id) != channel_by_remote_id_list_.end()) { logger_.error("channel to ", rem_id, " already exists"); } else { logger_.info("insert new channel to ", rem_id, ", channel id: ", channel_id); channel_by_remote_id_list_.insert(std::make_pair(rem_id, channel_id)); auto res = channels_.insert ( std::make_pair ( channel_id , msw::make_unique<remote_channel<remote_id>> ( rem_id , [this, channel_id](msw::range<msw::byte const> payload) { send_payload_(channel_id, payload); } ) ) ); if (!res.second) throw std::runtime_error("adding channel fail"); if (!res.first->second) throw std::runtime_error("remote channel fail ptr"); channel_open_(channel_id, ip, rem_id, res.first->second.get()); } } private: channel_open channel_open_; }; }}
/** * Create encoder corresponding to given codec. * @param options Erasure codec options * @return erasure encoder */ public static ErasureEncoder createEncoder(Configuration conf, ErasureCodecOptions options) { Preconditions.checkNotNull(conf); Preconditions.checkNotNull(options); String codecKey = getCodecClassName(conf, options.getSchema().getCodecName()); ErasureCodec codec = createCodec(conf, codecKey, options); return codec.createEncoder(); }
<gh_stars>0 const cheerio = require('cheerio') as CheerioAPI interface WallHaven { url: string, downloadUrl: string, resolution: string } /** * 根据 WallHaven 返回的 html 解析来获取搜索后的地址信息 * @param html */ export default function getWallHavenUrls (html: string): Array<WallHaven> { const result: Array<WallHaven> = [] const $ = cheerio.load(html) $('figure[data-wallpaper-id]').each((i, element) => { const $node = $(element) // 展示的 small url const url = $node.find('img').data('src') const suffix = url.split('small/')[1] // 下载时使用的高清图片地址(根据 WallHaven 拼接) const downloadUrl = 'https://w.wallhaven.cc/full/' + suffix.replace('/', '/wallhaven-') // 分辨率信息 const resolution = $node.find('span.wall-res').text() result.push({ url, downloadUrl, resolution }) }) return result }
<gh_stars>0 {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeFamilies #-} module Database ( Database (..), ListTasks (..), AddTask (..), UpdateTask (..), DeleteTasks (..), tasks, openDatabase ) where import ViewModels.Task import Data.Acid import Data.SafeCopy import Control.Monad.Reader (asks, ask) import Control.Monad.State (get, put) import Data.Maybe (maybe) import qualified Data.Map.Strict as M import Control.Lens newtype Database0 = Database0 { _tasks0 :: M.Map Integer Task } $(deriveSafeCopy 0 'base ''Database0) data Database = Database { _tasks :: M.Map Integer Task, _lastId :: Integer } $(makeLenses ''Database) $(deriveSafeCopy 1 'extension ''Database) instance Migrate Database where type MigrateFrom Database = Database0 migrate (Database0 tasks0) = Database { _tasks = tasks0, _lastId = maybe 1 ((+1) . fst) $ M.lookupMax tasks0 } listTasks :: Query Database [QueryTask] listTasks = fmap taskToQueryTask . M.toAscList <$> view tasks where taskToQueryTask (tId, Task tName tFinished) = QueryTask tId tName tFinished addTask :: NewTask -> Update Database () addTask (NewTask tName) = do tId <- (+1) <$> use lastId tasks %= M.insert tId (Task tName False) lastId .= tId updateTask :: ToggleTask -> Update Database () updateTask (ToggleTask tId) = tasks . ix tId %= toggle where toggle (Task tName tFinished) = Task tName $ not tFinished deleteTasks :: Update Database () deleteTasks = tasks %= M.filter (not . _taskFinished) openDatabase ::IO (AcidState Database) openDatabase = openLocalState $ Database M.empty 0 $(makeAcidic ''Database ['listTasks, 'addTask, 'updateTask, 'deleteTasks])
French Fiasco How does one describe the Model F1 French Grand Prix? In two words: Total chaos. Felipe Massa and Pastor Maldonado qualified 7th and 11th on the grid for the race, respectively. The chaos already began before the five lights even went out, when McLaren did not show up for the third time this season. Maldonado dueled with the Red Bull of Jenson Button early on and made contact entering the chicane, losing an end plate. However, he was able to rebound and remain in the top 10 for the remainder of the race, finishing 8th. Meanwhile, Massa also stayed within the top 10 for the majority of the race until disaster on lap 31 when his car lost power. Afterwards, team boss macus16 stated [the race] “certainly wasn’t the best weekend we’ve had – but by all means it wasn’t the worst. Our #1 Maldo had a good run to pull himself up into the points. We’ve just hit a little rough patch with the reliability, we can still keep on truckin’.” Next week, the Model F1 World Championship crosses the English Channel into Silverstone for the British Grand Prix. It’s In The Game We all know about Codemasters’ work with the F1 games. Most of us may even remember when 989 Sports and EA Sports created them. With 989 now defunct and EA having withdrawn from motorsports game developing, it looked as if Codemasters has a firm grip on the sport. Until now. Earlier on Tuesday, Massa and Maldonado were summoned by macus16 for a great sponsorship opportunity that all teams are receiving: EA Sports is planning to develop the first ever Model F1 video game, and in exchange for each team getting a cover shoot, they will receive an EA Sports associate sponsorship for next week’s British Grand Prix. As there are ten teams still active, there will be ten covers. The duo spent much of Thursday morning working on the shoot, having to work in haste as EA had arranged for the Mercedes drivers to have their session in the afternoon, followed by Williams in the evening. The shoot lasted from Wednesday to Saturday (3 teams per day), and despite initial concerns that it would interfere with the teams’ practice and qualifying schedule, there were no problems throughout the week. The game will be released worldwide on 25 March, with the covers depending on the region: in Manor’s case, their cover will be on the South American releases. Advertisements
#include <stdio.h> #include <set> #include <vector> #include <algorithm> #include <utility> #define ii std::pair<int,int> #define iii std::pair<int,ii> int main(void){ int n,k; scanf("%d %d",&n,&k); std::vector<int> seq; std::vector<int> kk; std::vector<int> nc; std::vector<iii> lis; std::set<int> ks; int a; for(int i = 0;i<n;i++){ scanf("%d",&a); seq.push_back(a); }; for(int i = 0;i<k;i++){ scanf("%d",&a); ks.insert(a-1); kk.push_back(a-1); nc.push_back(seq[a-1]); }; int pos = 1; for(int i = 1;i<nc.size();i++){ int ab = nc[i] - nc[i-1]; if(ab < kk[i] - kk[i-1]){ pos = 0; break; }; }; if(pos){ for(int i = 0;i<n;i++){ int akt = seq[i] - i; if(ks.count(i)){ while(lis.size() && (akt < lis.back().first)){ lis.pop_back(); }; lis.push_back(iii(akt,ii(seq[i],i))); }else{ if(!lis.size()){ lis.push_back(iii(akt,ii(seq[i],i))); continue; }; auto it = std::lower_bound(lis.begin(),lis.end(),iii(akt,ii(seq[i],-1))); int idx = it - lis.begin(); if(it != lis.end()){ if(!ks.count(lis[idx].second.second)){ lis[idx] = iii(akt,ii(seq[i],i)); }; }else{ lis.push_back(iii(akt,ii(seq[i],i))); }; }; }; /* for(auto i: lis){ printf("%d\n",i.second.first); }; */ printf("%d\n",n-lis.size()); }else{ printf("-1\n"); }; return 0; };
<reponame>WANGXiaowen0904/Lance #!/usr/bin/env python descr = """Python module to compute the RESCAL tensor factorization""" import os import sys from pkg_resources import require DISTNAME = 'rescal' DESCRIPTION = descr LONG_DESCRIPTION = descr MAINTAINER = '<NAME>', MAINTAINER_EMAIL = '<EMAIL>', URL = 'http://github.com/mnick/rescal.py' LICENSE = 'GPLv3' DOWNLOAD_URL = URL PACKAGE_NAME = 'rescal' EXTRA_INFO = dict( classifiers=[ "Development Status :: 4 - Beta", 'Intended Audience :: Developers', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', 'Topic :: Scientific/Engineering', 'Topic :: Software Development', 'Operating System :: Microsoft :: Windows', 'Operating System :: POSIX', 'Operating System :: Unix', 'Operating System :: MacOS', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', ] ) try: import setuptools # If you want to enable 'python setup.py develop' EXTRA_INFO.update(dict( zip_safe=False, # the package can run out of an .egg file include_package_data=True, )) except: print('setuptools module not found.') print("Install setuptools if you want to enable 'python setup.py develop'.") require('numpy', 'scipy', 'nose') def configuration(parent_package='', top_path=None, package_name=DISTNAME): if os.path.exists('MANIFEST'): os.remove('MANIFEST') from numpy.distutils.misc_util import Configuration config = Configuration(None, parent_package, top_path) # Avoid non-useful msg: "Ignoring attempt to set 'name' (from ... " config.set_options( ignore_setup_xxx_py=True, assume_default_configuration=True, delegate_options_to_subpackages=True, quiet=True ) config.add_subpackage(PACKAGE_NAME) return config def get_version(): """Obtain the version number""" import imp mod = imp.load_source('version', os.path.join(PACKAGE_NAME, 'version.py')) return mod.__version__ # Documentation building command try: import subprocess from sphinx.setup_command import BuildDoc as SphinxBuildDoc class BuildDoc(SphinxBuildDoc): """Run in-place build before Sphinx doc build""" def run(self): ret = subprocess.call( [sys.executable, sys.argv[0], 'build_ext', '-i'] ) if ret != 0: raise RuntimeError("Building Scipy failed!") SphinxBuildDoc.run(self) cmdclass = {'build_sphinx': BuildDoc} except ImportError: cmdclass = {} def setup_package(): # Call the setup function metadata = dict( name=DISTNAME, maintainer=MAINTAINER, maintainer_email=MAINTAINER_EMAIL, description=DESCRIPTION, license=LICENSE, url=URL, download_url=DOWNLOAD_URL, long_description=LONG_DESCRIPTION, version=get_version(), #test_suite="nose.collector", cmdclass=cmdclass, **EXTRA_INFO ) if (len(sys.argv) >= 2 and ('--help' in sys.argv[1:] or sys.argv[1] in ('--help-commands', 'egg_info', '--version', 'clean'))): # For these actions, NumPy is not required. # # They are required to succeed without Numpy for example when # pip is used to install Scikit when Numpy is not yet present in # the system. try: from setuptools import setup except ImportError: from distutils.core import setup metadata['version'] = get_version() else: metadata['configuration'] = configuration from numpy.distutils.core import setup setup(**metadata) if __name__ == "__main__": setup_package()
/** * Defines behavior for a Parent Node that only accepts one input * * @author Mariusz Derezinski-Choo */ public abstract class UnaryOperationNode extends ParentNode { /** * Construct a UnaryOperationNode * @param text the user-inputted text associated with this construction */ public UnaryOperationNode(String text){ super(1, text); } @Override protected void validateArguments(){ if(arguments.size() < myMinArguments){ throw new InsufficientArgumentException(toString()); } } @Override protected double runValidated(TurtleContext context){ double returning = 0; for(ParserNode node : arguments){ returning = evaluate(node, context); } return returning; } /** * Evaluate the node for the given node * @param node the node that is passed as an input * @param context the context from which data can be fetched * @return the double result of the computation/logic performing */ protected abstract double evaluate(ParserNode node, TurtleContext context); }
/** * Destroy an image buffer, freeing native resources that it uses * (such as native memory used to hold a pixmap). In Desktop java, * this does nothing, because this is left to the garbage collector. * In GEM-based systems, it should be equivalent to * DVBBufferedImage.dispose(). See * <a href="http://wiki.java.net/bin/view/Mobileandembedded/BDJImageMemoryManagement">http://wiki.java.net/bin/view/Mobileandembedded/BDJImageMemoryManagement</a> * for a discussion of images and memory management. * * @see #createCompatibleImageBuffer(java.awt.Component, int, int) **/ public static void destroyImageBuffer(Image buffer) { if (helper != null) { helper.destroyImageBufferHelper(buffer); } }
a, b, c, d = map(int, input().split()) alice = [] bob = [] for i in range(a, b+1): alice.append(i) #print(alice) for j in range(c, d+1): bob.append(j) #print(bob) count=0 for i in range(len(alice)): if alice[i] in bob: count += 1 if count>0: print(count-1) else: print(count)
A Mass-Change Model for the Estimation of Debris-Flow Runout, a Second Discussion: Conditions for the Application of the Rocket Equation A geology colleague, Dr. Alan Benimoff, brought to my attention the article on debris flow in this journal written by Cannon and Savage (1988) and the subsequent discussion by Hungr (1990) and reply by Cannon and Savage. The purpose of this note is to clarify and extend the discussion and also to point out a problem with Cannon and Savage's viscous force. A reader of the Journal who is unfamiliar with the problem of systems that gain or lose mass would perhaps have been confused by the discussion and reply. Both Hungr and Cannon and Savage (hereafter abbreviated as CS) are using the same rocket equation except, unfortunately, they use the symbol u to mean two different things. Hungr uses it to denote relative velocity with respect to the main mass (the rocket), whereas CS use it to denote absolute velocity with respect to the ground. If we denote Hungr's a by UH and CS's u by Ucs then UH = Ucs v. Making this replacement in Hungr's eq. (1) we find
// Write implements fs.FileOperations.Write. func (s *SocketOperations) Write(ctx context.Context, _ *fs.File, src usermem.IOSequence, _ int64) (int64, error) { f := &ioSequencePayload{ctx: ctx, src: src} n, resCh, err := s.Endpoint.Write(f, tcpip.WriteOptions{}) if err == tcpip.ErrWouldBlock { return 0, syserror.ErrWouldBlock } if resCh != nil { t := ctx.(*kernel.Task) if err := t.Block(resCh); err != nil { return 0, syserr.FromError(err).ToError() } n, _, err = s.Endpoint.Write(f, tcpip.WriteOptions{}) } if err != nil { return 0, syserr.TranslateNetstackError(err).ToError() } if int64(n) < src.NumBytes() { return int64(n), syserror.ErrWouldBlock } return int64(n), nil }
use core::fmt; use std::fmt::{Display, Formatter}; use crate::lexer::token_kinds::kind::TokenKind; #[derive(Clone)] pub struct Token { pub kind: TokenKind, pub line_number: u64, pub column_number: u64, } impl Token { pub fn new(kind: TokenKind, line_number: u64, column_number: u64) -> Self { Self { kind, line_number, column_number, } } } impl Display for Token { fn fmt(&self, f: &mut Formatter) -> fmt::Result { write!(f, "Token:\n{}Line number: {} \nColumn number: {}\n", self.kind, self.line_number, self.column_number) } }