code
stringlengths
3
1.05M
repo_name
stringlengths
4
116
path
stringlengths
3
942
language
stringclasses
30 values
license
stringclasses
15 values
size
int32
3
1.05M
/** * IsmpSpEngine_PortType.java * * This file was auto-generated from WSDL * by the Apache Axis 1.4 Apr 22, 2006 (06:55:48 PDT) WSDL2Java emitter. */ package com.bxtel.dx.gx.webservice1; public interface IsmpSpEngine_PortType extends java.rmi.Remote { public com.bxtel.dx.gx.webservice1.Response orderRelationUpdateNotify(com.bxtel.dx.gx.webservice1.OrderRelationUpdateNotifyReq orderRelationUpdateNotifyReq) throws java.rmi.RemoteException; public com.bxtel.dx.gx.webservice1.Response serviceConsumeNotify(com.bxtel.dx.gx.webservice1.ServiceConsumeNotifyReq serviceConsumeNotifyReqPara) throws java.rmi.RemoteException; public com.bxtel.dx.gx.webservice1.Response spWithdrawSubscription(com.bxtel.dx.gx.webservice1.SPWithdrawSubscriptionReq spWithdrawSubscriptionReqPara) throws java.rmi.RemoteException; public com.bxtel.dx.gx.webservice1.NotifyManagementInfoRsp notifyManagementInfo(com.bxtel.dx.gx.webservice1.NotifyManagementInfoReq notifyManagementInfoReq) throws java.rmi.RemoteException; }
zzsoszz/axiswebservice
wsdl/电信/广西和集团/com/bxtel/dx/gx/webservice1/IsmpSpEngine_PortType.java
Java
apache-2.0
1,022
import sys import requests from termcolor import colored ERROR_STATUS_FORMAT = 'Singularity responded with an invalid status code ({0})' def get_json_response(uri, args, params={}): singularity_response = requests.get(uri, params=params, headers=args.headers) if singularity_response.status_code < 199 or singularity_response.status_code > 299: sys.stderr.write('{0} params:{1}\n'.format(uri, str(params))) sys.stderr.write(colored(ERROR_STATUS_FORMAT.format(singularity_response.status_code), 'red') + '\n') return {} return singularity_response.json()
tejasmanohar/Singularity
scripts/logfetch/singularity_request.py
Python
apache-2.0
575
package com.jess.arms.mvp; import com.jess.arms.integration.IRepositoryManager; /** * Created by jess on 8/5/16 12:55 * contact with [email protected] */ public class BaseModel implements IModel { protected IRepositoryManager mRepositoryManager;//用于管理网络请求层,以及数据缓存层 public BaseModel(IRepositoryManager repositoryManager) { this.mRepositoryManager = repositoryManager; } @Override public void onDestroy() { mRepositoryManager = null; } }
coca-cola33/CloudFilm
arms/src/main/java/com/jess/arms/mvp/BaseModel.java
Java
apache-2.0
523
[![Travis CI](https://img.shields.io/travis/apache/bigtop.svg?branch=master)](https://travis-ci.org/apache/bigtop) Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. [Apache Bigtop](http://bigtop.apache.org/) ========================================== ...is a project for the development of packaging and tests of the [Apache Hadoop](http://hadoop.apache.org/) ecosystem. The primary goal of Apache Bigtop is to build a community around the packaging and interoperability testing of Apache Hadoop-related projects. This includes testing at various levels (packaging, platform, runtime, upgrade, etc...) developed by a community with a focus on the system as a whole, rather than individual projects. Immediately Get Started with Deployment and Smoke Testing of BigTop =================================================================== The simplest way to get a feel for how bigtop works, is to just cd into `provisioner` and try out the recipes under vagrant or docker. Each one rapidly spins up, and runs the bigtop smoke tests on, a local bigtop based big data distribution. Once you get the gist, you can hack around with the recipes to learn how the puppet/rpm/smoke-tests all work together, going deeper into the components you are interested in as described below. Quick overview of source code directories ========================================= * __bigtop-deploy__ : deployment scripts and puppet stuff for Apache Bigtop. * __bigtop-packages__ : RPM/DEB specifications for Apache Bigtop subcomponents. * __bigtop-test-framework__ : The source code for the iTest utilities (framework used by smoke tests). * __bigtop-tests__ : * __test-artifacts__ : source for tests. * __test-execution__ : maven pom drivers for running the integration tests found in test-artifacts. * __bigtop-toolchain__ : puppet scripts for setting up an instance which can build Apache Bigtop, sets up utils like jdk/maven/protobufs/... * __provisioner__ : Vagrant and Docker Provisioner that automatically spin up Hadoop environment with one click. * __docker__ : Dockerfiles and Docker Sandbox build scripts. Also, there is a new project underway, Apache Bigtop blueprints, which aims to create templates/examples that demonstrate/compare various Apache Hadoop ecosystem components with one another. Contributing ============ There are lots of ways to contribute. People with different expertise can help with various subprojects: * __puppet__ : Much of the Apache Bigtop deploy and packaging tools use puppet to bootstrap and set up a cluster. But recipes for other tools are also welcome (ie. Chef, Ansible, etc.) * __groovy__ : Primary language used to write the Apache Bigtop smokes and itest framework. * __maven__ : Used to build Apache Bigtop smokes and also to define the high level Apache Bigtop project. * __RPM/DEB__ : Used to package Apache Hadoop ecosystem related projects into GNU/Linux installable packages for most popular GNU/Linux distributions. So one could add a new project or improve existing packages. * __hadoop__ : Apache Hadoop users can also contribute by using the Apache Bigtop smokes, improving them, and evaluating their breadth. * __contributing your workloads__ : Contributing your workloads enable us to tests projects against real use cases and enable you to have people verifying the use cases you care about are always working. * __documentation__ : We are always in need of a better documentation! * __giving feedback__ : Tell us how you use Apache Bigtop, what was great and what was not so great. Also, what are you expecting from it and what would you like to see in the future? Also, opening [JIRA's](https://issues.apache.org/jira/browse/BIGTOP) and getting started by posting on the mailing list is helpful. What do people use Apache Bigtop for? ============================== You can go to the [Apache Bigtop website](http://bigtop.apache.org/) for notes on how to do "common" tasks like: * Apache Hadoop App developers: Download an Apache Bigtop built Apache Hadoop 2.0 VM from the website, so you can have a running psuedodistributed Apache Hadoop cluster to test your code on. * Cluster administers or deployment gurus: Run the Apache Bigtop smoke tests to ensure that your cluster is working. * Vendors: Build your own Apache Hadoop distribution, customized from Apache Bigtop bits. Getting Started =============== Below are some recipes for getting started with using Apache Bigtop. As Apache Bigtop has different subprojects, these recipes will continue to evolve. For specific questions it's always a good idea to ping the mailing list at [email protected] to get some immediate feedback, or [open a JIRA](https://issues.apache.org/jira/browse/BIGTOP). For Users: Running the smoke tests ----------------------------------- The simplest way to test bigtop is described in bigtop-tests/smoke-tests/README file For integration (API level) testing with maven, read on. For Users: Running the integration tests ----------------------------------------- WARNING: since testing packages requires installing them on a live system it is highly recommended to use VMs for that. Testing Apache Bigtop is done using iTest framework. The tests are organized in maven submodules, with one submodule per Apache Bigtop component. The bigtop-tests/test-execution/smokes/pom.xml defines all submodules to be tested, and each submodule is in its own directory under smokes/, for example: *smokes/hadoop/pom.xml* *smokes/hive/pom.xml* *... and so on.* * New way (with Gradle build in place) * Step 1: install smoke tests for one or more components * Example 1: gradle installTestArtifacts * Example 2: Installing just Hadoop-specific smoke tests gradle install-hadoop * Step 2: Run the the smoke tests on your cluster (see Step 3 and/or Step 4 below) We are on the route of migrating subprojects under top-level gradle build. Currently converted projects could be listed by running gradle projects To see the list of tasks in a subproject, ie itest-common, you can run gradle itest-common:tasks * Old Way * Step 1: Build the smokes with snapshots. This ensures that all transitive dependencies etc.. are in your repo mvn clean install -DskipTests -DskipITs -DperformRelease -f ./bigtop-test-framework/pom.xml mvn clean install -DskipTests -DskipITs -DperformRelease -f ./test-artifacts/pom.xml * Step 2: Now, rebuild in "offline" mode. This will make sure that your local changes to bigtop are embeded in the changes. mvn clean install -DskipTests -DskipITs -DperformRelease -o -nsu -f ./bigtop-test-framework/pom.xml mvn clean install -DskipTests -DskipITs -DperformRelease -o -nsu -f ./bigtop-tests/test-artifacts/pom.xml * Step 3: Now, you can run the smoke tests on your cluster. * Example 1: Running all the smoke tests with TRACE level logging (shows std out from each mr job). mvn clean verify -Dorg.apache.bigtop.itest.log4j.level=TRACE -f ./bigtop/bigtop-tests/test-execution/smokes/pom.xml * Just running hadoop examples, nothing else. mvn clean verify -D'org.apache.maven-failsafe-plugin.testInclude=**/*TestHadoopExamples*' -f bigtop-tests/test-execution/smokes/hadoop/pom.xml Note: A minor bug/issue: you need the "testInclude" regular expression above, even if you don't want to customize the tests, since existing test names don't follow the maven integration test naming convention of IT*, but instead, follow the surefire (unit test) convention of Test*. For Users: Creating Your Own Apache Hadoop Environment ----------------------------------------------- Another common use case for Apache Bigtop is creating / setting up your own Apache Hadoop distribution. For details on this, check out the bigtop-deploy/README.md file, which describes how to use the puppet repos to create and setup your VMs. You can also try out provisioner to quickly get the idea how it works. For Developers: Building the entire distribution from scratch ------------------------------------------------------------- Packages have been built for CentOS, Fedora, OpenSUSE, Ubuntu, and Debian. They can probably be built for other platforms as well. Some of the binary artifacts might be compatible with other closely related distributions. __On all systems, Building Apache Bigtop requires certain set of tools__ To bootstrap the development environment from scratch execute ./gradlew toolchain This build task expected Puppet to be installed; user has to have sudo permissions. The task will pull down and install all development dependencies, frameworks and SDKs, required to build the stack on your platform. Before executing the above command, user can use the following script to install Puppet: sudo bigtop_toolchain/bin/puppetize.sh Note for CentOS (and RHEL, which is not supported officially but on a best effort basis) 8 users: on these distros, puppetize.sh installs the puppet command into /opt/puppetlabs/bin, which is not included usually in secure_path defined in /etc/sudoers. So users may have to add that path to secure_path manually. Also, RHEL 8 users may have to enable their subscriptions themselves for using EPEL. cf. https://fedoraproject.org/wiki/EPEL#How_can_I_use_these_extra_packages.3F To immediately set environment after running toolchain, run . /etc/profile.d/bigtop.sh * __Building packages__ : `gradle [component-name]-pkg` If -Dbuildwithdeps=true is set, the Gradle will follow the order of the build specified in the "dependencies" section of bigtop.bom file. Otherwise just a single component will get build (original behavior). To use an alternative definition of a stack composition (aka BOM), specify its name with -Dbomfile=<filename> system property in the build time. You can visualize all tasks dependencies by running `gradle tasks --all` * __Building local YUM/APT repositories__ : `gradle [yum|apt]` * __Recommended build environments__ Bigtop provides "development in the can" environments, using Docker containers. These have the build tools set by the toolchain, as well as the user and build environment configured and cached. All currently supported OSes could be pulled from official Bigtop repository at https://hub.docker.com/r/bigtop/slaves/tags/ To build a component (bigtop-groovy) for a particular OS (ubuntu-16.04) you can run the following from a clone of Bigtop workspace (assuming your system has Docker engine setup and working) ```docker run --rm -u jenkins:jenkins -v `pwd`:/ws --workdir /ws bigtop/slaves:trunk-ubuntu-16.04 bash -l -c './gradlew allclean ; ./gradlew bigtop-groovy-pkg'``` For Developers: Building and modifying the web site --------------------------------------------------- The website can be built by running `mvn site:site` from the root directory of the project. The main page can be accessed from "project_root/target/site/index.html". The source for the website is located in "project_root/src/site/". For Developers: Building a component from Git repository -------------------------------------------------------- Prerequisites * You will need git installed. * You will need java 8 installed. * You will need to use [gradlew](https://docs.gradle.org/current/userguide/gradle_wrapper.html) which is included in the source code. (Right in the root of the project folder) * This project's gradlew has more documentation [here](https://cwiki.apache.org/confluence/display/BIGTOP/Quickstart+Guide%3A+Bigtop+Integration+Test+Framework+2.0) * Use git to download BigTop : `git clone https://github.com/apache/bigtop.git` * move into the root project folder: `cd bigtop` To fetch source from a Git repository, there're two ways to achieve this: a). modify `./bigtop.bom` and add JSON snippets to your component/package, or b). specify properties at command line * __bigtop.bom__ Add following JSON snippets to the desired component/package: ``` git { repo = ""; ref = ""; dir = ""; commit_hash = "" } ``` * `repo` - SSH, HTTP or local path to Git repo. * `ref` - branch, tag or commit hash to check out. * `dir` - [OPTIONAL] directory name to write source into. * `commit_hash` - [OPTIONAL] a commit hash to reset to. Some packages have different names for source directory and source tarball (`hbase-0.98.5-src.tar.gz` contains `hbase-0.98.5` directory). By default source will be fetched in a directory named by `tarball { source = TARBALL_SRC }` without `.t*` extension. To explicitly set directory name use the `dir` option. When `commit_hash` specified, the repo to build the package will be reset to the commit hash. Example for HBase: ``` name = 'hbase' version { base = '1.3.2'; pkg = base; release = 1 } git { repo = "https://github.com/apache/hbase.git" ref = "branch-1.3" dir = "${name}-${version.base}" commit_hash = "1bedb5bfbb5a99067e7bc54718c3124f632b6e17" } ``` * __command line__ ``` ./gradlew [component]-pkg-ind -Pgit_repo="" -Pgit_ref="" -Pgit_dir="" -Pgit_commit_hash="" -Pbase_version="" ``` Where `git_repo`, `git_ref`, `git_dir`, and `git_commit_hash` are exactly the same with what we set in JSON. And `base_version` is to overwrite: ``` version { base = ''} ``` Example for Kafka: ``` ./gradlew kafka-pkg-ind -Pgit_repo=https://github.com/apache/kafka.git -Pgit_ref=trunk -Pgit_commit_hash=dc0601a1c604bea3f426ed25b6c20176ff444079 -Pbase_version=2.2.0 ``` You can mix both ways to build from Git, but command line always overwrites `bigtop.bom`. Contact us ---------- You can get in touch with us on [the Apache Bigtop mailing lists](http://bigtop.apache.org/mail-lists.html).
sekikn/bigtop
README.md
Markdown
apache-2.0
14,520
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.analysis; import com.google.devtools.build.lib.analysis.MakeVariableExpander.ExpansionException; import com.google.devtools.build.lib.analysis.config.BuildConfiguration; import com.google.devtools.build.lib.packages.Package; import com.google.devtools.build.lib.syntax.SkylarkDict; import java.util.LinkedHashMap; import java.util.Map; /** * Implements make variable expansion for make variables that depend on the * configuration and the target (not on behavior of the * {@link ConfiguredTarget} implementation) */ public class ConfigurationMakeVariableContext implements MakeVariableExpander.Context { private final Package pkg; private final Map<String, String> commandLineEnv; private final Map<String, String> globalEnv; private final String platform; public ConfigurationMakeVariableContext(Package pkg, BuildConfiguration configuration) { this.pkg = pkg; commandLineEnv = configuration.getCommandLineDefines(); globalEnv = configuration.getGlobalMakeEnvironment(); platform = configuration.getPlatformName(); } @Override public String lookupMakeVariable(String var) throws ExpansionException { String value = commandLineEnv.get(var); if (value == null) { value = pkg.lookupMakeVariable(var, platform); } if (value == null) { value = globalEnv.get(var); } if (value == null) { throw new MakeVariableExpander.ExpansionException("$(" + var + ") not defined"); } return value; } public SkylarkDict<String, String> collectMakeVariables() { Map<String, String> map = new LinkedHashMap<>(); // Collect variables in the reverse order as in lookupMakeVariable // because each update is overwriting. map.putAll(pkg.getAllMakeVariables(platform)); map.putAll(globalEnv); map.putAll(commandLineEnv); return SkylarkDict.<String, String>copyOf(null, map); } }
abergmeier-dsfishlabs/bazel
src/main/java/com/google/devtools/build/lib/analysis/ConfigurationMakeVariableContext.java
Java
apache-2.0
2,534
# Version 1.2 import logging, os, re, datetime from IPython.display import display from arcgis.gis import GIS ''' ********************** SCRIPT CONFIGURATION START ********************** ''' #What is the ID of the Feature Layer you want to download attachments from? FeatureLayerId = '092d075f4b3a40f78cf1329b20b0d5e7' #What are your ArcGIS Enterprise/ArcGIS Online credentials? This is case sensitive. PortalUserName = '' PortalPassword = '' PortalUrl = 'https://www.arcgis.com' #Where do you want your attachments stored? SaveAttachmentsTo = 'C:\ScriptDownloads' SaveLogsTo = 'Logging' #How do you want your attachments stored? Options are GroupedFolder and IndividualFolder #GroupedFolder - Attachments from every feature in each layer is stored in the same folder - attachments are renamed in the format OBJECTID-ATTACHMENTID-OriginalFileName #IndividualFolder - A new folder is created for each OBJECTID, and associated attachments are stored in that folder - attachments are renamed in the format ATTACHMENTID-OriginalFileName AttachmentStorage = 'GroupedFolder' #Set to False if ArcGIS Enterprise cert is not valid PortalCertVerification = True #Setup logging - levels are DEBUG,INFO,WARNING,ERROR,CRITICAL logging.basicConfig(level=logging.INFO) ''' ********************** SCRIPT CONFIGURATION END ********************** ''' #https://stackoverflow.com/questions/273192/how-can-i-create-a-directory-if-it-does-not-exist def createFolder(folderPath): if not os.path.exists(folderPath): try: os.makedirs(folderPath) except OSError as e: if e.errno != errno.EEXIST: raise def renameFile(currentAttachmentPath, newAttachmentPath): #Rename file - ensure new attachment path does not exist already if not os.path.exists(newAttachmentPath): os.rename(currentAttachmentPath, newAttachmentPath) logger.info('{} being renamed as {}'.format(currentAttachmentPath, newAttachmentPath)) else: logger.warning('Not able to rename {} as {} because file already exists. Removing {}'.format(currentAttachmentPath, newAttachmentPath, currentAttachmentPath)) os.remove(currentAttachmentPath) #Create specified folder if it does not exist already createFolder(SaveAttachmentsTo) createFolder(SaveLogsTo) #Logging level specified in script configuration logger = logging.getLogger(__name__) logFileName = datetime.datetime.now().strftime('%Y-%m-%d %H-%M-%S') fileHandler = logging.handlers.RotatingFileHandler('{}/{}.log'.format(SaveLogsTo, logFileName), maxBytes=100000, backupCount=5) formatter = logging.Formatter('%(asctime)s %(levelname)s %(relativeCreated)d \n%(filename)s %(module)s %(funcName)s %(lineno)d \n%(message)s\n') fileHandler.setFormatter(formatter) logger.addHandler(fileHandler) logger.info('Script Starting at {}'.format(str(datetime.datetime.now()))) #Connect to GIS, and get Feature Layer information if PortalUserName == '' and PortalPassword == '': gis = GIS() else: gis = GIS(PortalUrl, PortalUserName, PortalPassword, verify_cert=PortalCertVerification) downloadCounter = 0 nonDownloadCounter = 0 downloadSizeCounter = 0 itemObject = gis.content.get(FeatureLayerId) logger.info('Iterating through layers in Feature Layer "{}"'.format(itemObject.name)) display(itemObject) #Loop through layers in Feature Layer for i in range(len(itemObject.layers)): featureLayer = itemObject.layers[i] #Skip layer if attachments are not enabled if featureLayer.properties.hasAttachments == True: #Remove any characters from feature layer name that may cause problems and ensure it's unique... featureLayerName = '{}-{}'.format(str(i), re.sub(r'[^A-Za-z0-9]+', '', featureLayer.properties.name)) featureLayerFolder = SaveAttachmentsTo + r'\\' + featureLayerName createFolder(featureLayerFolder) #Query to get list of object ids in layer featureObjectIds = featureLayer.query(where='1=1', return_ids_only=True) #Provide some updates to user... logger.info('Time: {}'.format(str(datetime.datetime.now()))) logger.info('Currently looping through feature attachments in layer {} of {}: storing in folder named "{}"'.format(str(i + 1), str(len(itemObject.layers)), featureLayerName)) logger.info('There are {} features to iterate in this layer'.format(str(len(featureObjectIds['objectIds'])))) #Loop through features in layer emptyAttachments = 0 for j in range(len(featureObjectIds['objectIds'])): currentObjectId = featureObjectIds['objectIds'][j] currentObjectIdAttachments = featureLayer.attachments.get_list(oid=currentObjectId) if len(currentObjectIdAttachments) > 0: #Loop through feature attachments and download to appropriate folder for k in range(len(currentObjectIdAttachments)): attachmentId = currentObjectIdAttachments[k]['id'] attachmentName = currentObjectIdAttachments[k]['name'] attachmentSize = currentObjectIdAttachments[k]['size'] if AttachmentStorage == 'IndividualFolder': currentFolder = featureLayerFolder + r'\\' + str(currentObjectId) #Create a folder for attachments createFolder(currentFolder) fileName = '{}-{}'.format(attachmentId, attachmentName) newAttachmentPath = '{}\\{}'.format(currentFolder, fileName) if not os.path.isfile(newAttachmentPath): logger.info('The size of the current attachment being downloaded is {}MB'.format((attachmentSize/1000000))) currentAttachmentPath = featureLayer.attachments.download(oid=currentObjectId, attachment_id=attachmentId, save_path=currentFolder) #Rename to ensure file name is unique renameFile(currentAttachmentPath, newAttachmentPath) downloadCounter += 1 downloadSizeCounter += attachmentSize else: logger.info('File {} already exists. Not downloading again!'.format(newAttachmentPath)) nonDownloadCounter += 1 elif AttachmentStorage == 'GroupedFolder': fileName = '{}-{}-{}'.format(currentObjectId, attachmentId, attachmentName) newAttachmentPath = '{}\\{}'.format(featureLayerFolder, fileName) if not os.path.isfile(newAttachmentPath): logger.info('The size of the current attachment being downloaded is {}MB'.format((attachmentSize/1000000))) currentAttachmentPath = featureLayer.attachments.download(oid=currentObjectId, attachment_id=attachmentId, save_path=featureLayerFolder) #Rename to ensure file name is unique renameFile(currentAttachmentPath, newAttachmentPath) downloadCounter += 1 downloadSizeCounter += attachmentSize else: logger.info('File {} already exists. Not downloading again!'.format(newAttachmentPath)) nonDownloadCounter += 1 else: logger.error('AttachmentStorage option not valid: {}. Valid options are IndividualFolder and GroupedFolder'.format(AttachmentStorage)) else: emptyAttachments += 1 logger.info('{} of these features do not contain attachments'.format(str(emptyAttachments))) else: logger.info('Layer {} does not have attachments enabled'.format(featureLayer.properties.name)) logger.info('Summary: {} new files have been downloaded totalling {}MB in size'.format(downloadCounter, (downloadSizeCounter/1000000))) logger.info('Summary: {} attachments already existed so were not downloaded again'.format(nonDownloadCounter))
Esri/developer-support
python/general-python/download-attachments/DownloadAttachments.py
Python
apache-2.0
8,202
/******************************************************************************* * * Pentaho Data Integration * * Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.core.gui; public interface GUIPositionInterface { public Point getLocation(); public void setLocation(Point p); public void setLocation(int x, int y); public boolean isSelected(); public void setSelected(boolean selected); }
soluvas/pdi-ce
src-core/org/pentaho/di/core/gui/GUIPositionInterface.java
Java
apache-2.0
1,191
/* * Licensed to The Apereo Foundation under one or more contributor license * agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * The Apereo Foundation licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. * */ package org.unitime.localization.impl; import javax.servlet.jsp.tagext.TagData; import javax.servlet.jsp.tagext.TagExtraInfo; import javax.servlet.jsp.tagext.VariableInfo; import org.unitime.localization.impl.Localization; /** * @author Tomas Muller */ public class BundleTei extends TagExtraInfo { public VariableInfo [] getVariableInfo(TagData data) { String name = data.getAttributeString("name"); String id = data.getAttributeString("id"); return new VariableInfo[] { new VariableInfo(id == null ? BundleTag.DEFAULT_ID : id, Localization.ROOT + name, true, VariableInfo.NESTED) }; } }
zuzanamullerova/unitime
JavaSource/org/unitime/localization/impl/BundleTei.java
Java
apache-2.0
1,415
<html> <head> <meta http-equiv="Content-Type" content="text/html; charset=US-ASCII"> <title>basic_seq_packet_socket::native_handle_type</title> <link rel="stylesheet" href="../../../../../doc/src/boostbook.css" type="text/css"> <meta name="generator" content="DocBook XSL Stylesheets V1.78.1"> <link rel="home" href="../../../boost_asio.html" title="Boost.Asio"> <link rel="up" href="../basic_seq_packet_socket.html" title="basic_seq_packet_socket"> <link rel="prev" href="native_handle.html" title="basic_seq_packet_socket::native_handle"> <link rel="next" href="native_non_blocking.html" title="basic_seq_packet_socket::native_non_blocking"> </head> <body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF"> <table cellpadding="2" width="100%"><tr> <td valign="top"><img alt="Boost C++ Libraries" width="277" height="86" src="../../../../../boost.png"></td> <td align="center"><a href="../../../../../index.html">Home</a></td> <td align="center"><a href="../../../../../libs/libraries.htm">Libraries</a></td> <td align="center"><a href="http://www.boost.org/users/people.html">People</a></td> <td align="center"><a href="http://www.boost.org/users/faq.html">FAQ</a></td> <td align="center"><a href="../../../../../more/index.htm">More</a></td> </tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="native_handle.html"><img src="../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../basic_seq_packet_socket.html"><img src="../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../../boost_asio.html"><img src="../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="native_non_blocking.html"><img src="../../../../../doc/src/images/next.png" alt="Next"></a> </div> <div class="section"> <div class="titlepage"><div><div><h4 class="title"> <a name="boost_asio.reference.basic_seq_packet_socket.native_handle_type"></a><a class="link" href="native_handle_type.html" title="basic_seq_packet_socket::native_handle_type">basic_seq_packet_socket::native_handle_type</a> </h4></div></div></div> <p> <a class="indexterm" name="idp134644544"></a> The native representation of a socket. </p> <pre class="programlisting"><span class="keyword">typedef</span> <span class="identifier">SeqPacketSocketService</span><span class="special">::</span><span class="identifier">native_handle_type</span> <span class="identifier">native_handle_type</span><span class="special">;</span> </pre> <h6> <a name="boost_asio.reference.basic_seq_packet_socket.native_handle_type.h0"></a> <span class="phrase"><a name="boost_asio.reference.basic_seq_packet_socket.native_handle_type.requirements"></a></span><a class="link" href="native_handle_type.html#boost_asio.reference.basic_seq_packet_socket.native_handle_type.requirements">Requirements</a> </h6> <p> <span class="emphasis"><em>Header: </em></span><code class="literal">boost/asio/basic_seq_packet_socket.hpp</code> </p> <p> <span class="emphasis"><em>Convenience header: </em></span><code class="literal">boost/asio.hpp</code> </p> </div> <table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr> <td align="left"></td> <td align="right"><div class="copyright-footer">Copyright &#169; 2003-2014 Christopher M. Kohlhoff<p> Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at <a href="http://www.boost.org/LICENSE_1_0.txt" target="_top">http://www.boost.org/LICENSE_1_0.txt</a>) </p> </div></td> </tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="native_handle.html"><img src="../../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../basic_seq_packet_socket.html"><img src="../../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../../boost_asio.html"><img src="../../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="native_non_blocking.html"><img src="../../../../../doc/src/images/next.png" alt="Next"></a> </div> </body> </html>
biospi/seamass-windeps
src/boost_1_57_0/doc/html/boost_asio/reference/basic_seq_packet_socket/native_handle_type.html
HTML
apache-2.0
4,150
/* * Copyright 2019 OICR * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.dockstore.webservice;
ga4gh/dockstore
dockstore-webservice/src/test/java/io/dockstore/webservice/package-info.java
Java
apache-2.0
651
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.rel.metadata; /** Placeholder for null values. */ public enum NullSentinel { /** Placeholder for a null value. */ INSTANCE { @Override public String toString() { return "NULL"; } }, /** Placeholder that means that a request for metadata is already active, * therefore this request forms a cycle. */ ACTIVE; public static Comparable mask(Comparable value) { if (value == null) { return INSTANCE; } return value; } public static Object mask(Object value) { if (value == null) { return INSTANCE; } return value; } }
julianhyde/calcite
core/src/main/java/org/apache/calcite/rel/metadata/NullSentinel.java
Java
apache-2.0
1,413
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.imaging.formats.psd.dataparsers; import org.apache.commons.imaging.formats.psd.ImageContents; public class DataParserGrayscale extends DataParser { @Override protected int getRGB(final int[][][] data, final int x, final int y, final ImageContents imageContents) { final int sample = 0xff & data[0][y][x]; final int alpha = 0xff; return ((0xff & alpha) << 24) | ((0xff & sample) << 16) | ((0xff & sample) << 8) | ((0xff & sample) << 0); } @Override public int getBasicChannelsCount() { return 1; } }
yuuhayashi/commons-imaging
src/main/java/org/apache/commons/imaging/formats/psd/dataparsers/DataParserGrayscale.java
Java
apache-2.0
1,446
/* * Copyright 2011-2012 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ load('vertx.js') // We set the buffer sizes small so we don't run out of RAM - each connection // will have its own buffer // You will also probably need to increase your ulimit for file handles // default maximum is really small on most OSes. // Google it to find out how to do that, it depends on your OS // The other barrier you will hit on the client side is each connection gets its // own ephemeral port on the client side, and all ports must like 0-65535 // So to really test how many connections vert.x can handle, you need to test the // server with multiple clients on different machines. var client = vertx.createNetClient().setSendBufferSize(2048).setReceiveBufferSize(2048); var numConns = 50000; var received = 0; connect(0); function connect(num) { client.connect(1234, 'localhost', function(sock) { stdout.println("connected " + num); sock.dataHandler(function(buffer) { stdout.println("received " + received); received++; }); sock.write("X"); num++; if (num < numConns) { vertx.runOnLoop(function() { connect(num) }); } }); }
ericmckean/collide
deps/vert.x-1.1.0.final/examples/javascript/conn_perf/conn_client.js
JavaScript
apache-2.0
1,729
import logging import re import unittest from functools import wraps import pexpect from flexmock import flexmock_teardown from hamcrest import assert_that, equal_to from tests.util.global_reactor import TEST_SWITCHES def with_protocol(test): @wraps(test) def wrapper(self): try: logging.info(">>>> CONNECTING [%s]" % self.protocol.name) self.protocol.connect() logging.info(">>>> START") test(self, self.protocol) logging.info(">>>> SUCCESS") finally: self.protocol.disconnect() return wrapper class LoggingFileInterface(object): def __init__(self, prefix): self.prefix = prefix def write(self, data): for line in data.rstrip(b'\r\n').split(b'\r\n'): logging.info(self.prefix + repr(line)) def flush(self): pass class ProtocolTester(object): def __init__(self, name, host, port, username, password, conf=None): self.name = name self.host = host self.port = port self.username = username self.password = password self.conf = conf self.child = None def connect(self): self.child = pexpect.spawn(self.get_ssh_connect_command()) self.child.delaybeforesend = 0.0005 self.child.logfile = None self.child.logfile_read = LoggingFileInterface(prefix="[%s] " % self.name) self.child.timeout = 1 self.login() def disconnect(self): self.child.close() def get_ssh_connect_command(self): pass def login(self): pass def read(self, expected, regex=False): self.wait_for(expected, regex) assert_that(self.child.before, equal_to(b"")) def readln(self, expected, regex=False): self.read(expected + "\r\n", regex=regex) def read_lines_until(self, expected): self.wait_for(expected) lines = self.child.before.decode().split('\r\n') return lines def read_eof(self): self.child.expect(pexpect.EOF) def wait_for(self, expected, regex=False): pattern = re.escape(expected) if not regex else expected self.child.expect(pattern) def write(self, data): self.child.sendline(data.encode()) self.read(data + "\r\n") def write_invisible(self, data): self.child.sendline(data.encode()) self.read("\r\n") def write_stars(self, data): self.child.sendline(data.encode()) self.read(len(data) * "*" + "\r\n") def write_raw(self, data): self.child.send(data.encode()) class SshTester(ProtocolTester): CONF_KEY = "ssh" def get_ssh_connect_command(self): return 'ssh %s@%s -p %s -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null' \ % (self.username, self.host, self.port) def login(self): self.wait_for('[pP]assword: ', regex=True) self.write_invisible(self.password) self.wait_for('[>#]$', regex=True) class TelnetTester(ProtocolTester): CONF_KEY = "telnet" def get_ssh_connect_command(self): return 'telnet %s %s' \ % (self.host, self.port) def login(self): self.wait_for("Username: ") self.write(self.username) self.wait_for("[pP]assword: ", True) self.write_invisible(self.password) self.wait_for('[>#]$', regex=True) class ProtocolTest(unittest.TestCase): tester_class = SshTester test_switch = None def setUp(self): conf = TEST_SWITCHES[self.test_switch] self.protocol = self.tester_class(self.tester_class.CONF_KEY, "127.0.0.1", conf[self.tester_class.CONF_KEY], u'root', u'root', conf) def tearDown(self): flexmock_teardown()
internap/fake-switches
tests/util/protocol_util.py
Python
apache-2.0
3,985
#!/usr/bin/env ruby # -------------------------------------------------------------------------- */ # Copyright 2002-2015, OpenNebula Project (OpenNebula.org), C12G Labs # # Licensed under the Apache License, Version 2.0 (the "License"); you may */ # not use this file except in compliance with the License. You may obtain */ # a copy of the License at */ # */ # http://www.apache.org/licenses/LICENSE-2.0 */ # */ # Unless required by applicable law or agreed to in writing, software */ # distributed under the License is distributed on an "AS IS" BASIS, */ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. */ # See the License for the specific language governing permissions and */ # limitations under the License. */ # -------------------------------------------------------------------------- */ # ---------------------------------------------------------------------------- # Set up the environment for the driver # ---------------------------------------------------------------------------- ONE_LOCATION = ENV["ONE_LOCATION"] if !ONE_LOCATION RUBY_LIB_LOCATION = "/usr/lib/one/ruby" VAR_LOCATION = "/var/lib/one" else RUBY_LIB_LOCATION = ONE_LOCATION + "/lib/ruby" VAR_LOCATION = ONE_LOCATION + "/var" end $: << RUBY_LIB_LOCATION require "OpenNebulaDriver" require 'getoptlong' require 'base64' require 'rexml/document' # This class provides basic messaging and logging functionality # to implement Datastore Drivers. A datastore driver # is a program (or a set of) that specialize the OpenNebula behavior # by interfacing with specific infrastructure storage solutions. class DatastoreDriver < OpenNebulaDriver # Image Driver Protocol constants ACTION = { :cp => "CP", :rm => "RM", :mkfs => "MKFS", :log => "LOG", :stat => "STAT", :clone => "CLONE", :monitor => "MONITOR", :snap_delete => "SNAP_DELETE", :snap_revert => "SNAP_REVERT", :snap_flatten=> "SNAP_FLATTEN" } # Register default actions for the protocol def initialize(ds_type, options={}) @options={ :concurrency => 10, :threaded => true, :retries => 0, :local_actions => { ACTION[:stat] => nil, ACTION[:cp] => nil, ACTION[:rm] => nil, ACTION[:mkfs] => nil, ACTION[:clone] => nil, ACTION[:monitor] => nil, ACTION[:snap_delete] => nil, ACTION[:snap_revert] => nil, ACTION[:snap_flatten] => nil } }.merge!(options) super("datastore/", @options) if ds_type == nil @types = Dir["#{@local_scripts_path}/*/"].map do |d| d.split('/')[-1] end elsif ds_type.class == String @types = [ds_type] else @types = ds_type end register_action(ACTION[:cp].to_sym, method("cp")) register_action(ACTION[:rm].to_sym, method("rm")) register_action(ACTION[:mkfs].to_sym, method("mkfs")) register_action(ACTION[:stat].to_sym, method("stat")) register_action(ACTION[:clone].to_sym, method("clone")) register_action(ACTION[:monitor].to_sym, method("monitor")) register_action(ACTION[:snap_delete].to_sym, method("snap_delete")) register_action(ACTION[:snap_revert].to_sym, method("snap_revert")) register_action(ACTION[:snap_flatten].to_sym, method("snap_flatten")) end ############################################################################ # Image Manager Protocol Actions (generic implementation) ############################################################################ def cp(id, drv_message) ds = get_ds_type(drv_message) do_image_action(id, ds, :cp, "#{drv_message} #{id}") end def rm(id, drv_message) ds = get_ds_type(drv_message) do_image_action(id, ds, :rm, "#{drv_message} #{id}") end def mkfs(id, drv_message) ds = get_ds_type(drv_message) do_image_action(id, ds, :mkfs, "#{drv_message} #{id}") end def stat(id, drv_message) ds = get_ds_type(drv_message) do_image_action(id, ds, :stat, "#{drv_message} #{id}") end def clone(id, drv_message) ds = get_ds_type(drv_message) do_image_action(id, ds, :clone, "#{drv_message} #{id}") end def monitor(id, drv_message) ds = get_ds_type(drv_message) do_image_action(id, ds, :monitor, "#{drv_message} #{id}", true) end def snap_delete(id, drv_message) ds = get_ds_type(drv_message) do_image_action(id, ds, :snap_delete, "#{drv_message} #{id}") end def snap_revert(id, drv_message) ds = get_ds_type(drv_message) do_image_action(id, ds, :snap_revert, "#{drv_message} #{id}") end def snap_flatten(id, drv_message) ds = get_ds_type(drv_message) do_image_action(id, ds, :snap_flatten, "#{drv_message} #{id}") end private def is_available?(ds, id, action) if @types.include?(ds) return true else send_message(ACTION[action], RESULT[:failure], id, "Datastore driver '#{ds}' not available") return false end end def do_image_action(id, ds, action, arguments, encode64=false) return if not is_available?(ds,id,action) path = File.join(@local_scripts_path, ds) cmd = File.join(path, ACTION[action].downcase) cmd << " " << arguments rc = LocalCommand.run(cmd, log_method(id)) result, info = get_info_from_execution(rc) info = Base64::encode64(info).strip.delete("\n") if encode64 send_message(ACTION[action], result, id, info) end def get_ds_type(drv_message) message = Base64.decode64(drv_message) xml_doc = REXML::Document.new(message) dsxml = xml_doc.root.elements['/DS_DRIVER_ACTION_DATA/DATASTORE/DS_MAD'] dstxt = dsxml.text if dsxml return dstxt end end ################################################################################ ################################################################################ # DatastoreDriver Main program ################################################################################ ################################################################################ opts = GetoptLong.new( [ '--threads', '-t', GetoptLong::OPTIONAL_ARGUMENT ], [ '--ds-types', '-d', GetoptLong::OPTIONAL_ARGUMENT ] ) ds_type = nil threads = 15 begin opts.each do |opt, arg| case opt when '--threads' threads = arg.to_i when '--ds-types' ds_type = arg.split(',').map {|a| a.strip } end end rescue Exception => e exit(-1) end ds_driver = DatastoreDriver.new(ds_type, :concurrency => threads) ds_driver.start_driver
ggalancs/one
src/datastore_mad/one_datastore.rb
Ruby
apache-2.0
7,408
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.metron.rest.controller; import static org.apache.metron.rest.MetronRestConstants.TEST_PROFILE; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.notNullValue; import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.csrf; import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.httpBasic; import static org.springframework.security.test.web.servlet.setup.SecurityMockMvcConfigurers.springSecurity; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import com.google.common.collect.ImmutableMap; import java.util.ArrayList; import org.adrianwalker.multilinestring.Multiline; import org.apache.curator.framework.CuratorFramework; import org.apache.metron.common.utils.JSONUtils; import org.apache.metron.indexing.dao.InMemoryMetaAlertDao; import org.apache.metron.indexing.dao.SearchIntegrationTest; import org.apache.metron.indexing.dao.metaalert.MetaAlertAddRemoveRequest; import org.apache.metron.indexing.dao.metaalert.MetaAlertConstants; import org.apache.metron.indexing.dao.search.GetRequest; import org.apache.metron.rest.service.MetaAlertService; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.http.MediaType; import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.junit4.SpringRunner; import org.springframework.test.web.servlet.MockMvc; import org.springframework.test.web.servlet.ResultActions; import org.springframework.test.web.servlet.setup.MockMvcBuilders; import org.springframework.web.context.WebApplicationContext; @RunWith(SpringRunner.class) @SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT) @ActiveProfiles(TEST_PROFILE) public class MetaAlertControllerIntegrationTest extends DaoControllerTest { @Autowired private MetaAlertService metaAlertService; @Autowired public CuratorFramework client; @Autowired private WebApplicationContext wac; private MockMvc mockMvc; private String metaalertUrl = "/api/v1/metaalert"; private String user = "user"; private String password = "password"; private String metaAlertIndex = "metaalert_index"; /** { "alerts" : [ { "guid": "bro_1", "sensorType": "bro", "index": "bro_index_2017.01.01.01" }, { "guid": "snort_2", "sensorType": "snort", "index": "snort_index_2017.01.01.01" } ], "groups" : ["group_one", "group_two"] } */ @Multiline public static String create; /** * [ *{"guid":"meta_1","metron_alert":[{"guid":"bro_1", "source.type":"bro"}],"average":"5.0","min":"5.0","median":"5.0","max":"5.0","count":"1.0","sum":"5.0", "status":"active"}, *{"guid":"meta_2","metron_alert":[{"guid":"bro_1", "source.type":"bro"},{"guid":"bro_2", "source.type":"bro"},{"guid":"snort_1", "source.type":"snort"}],"average":"5.0","min":"0.0","median":"5.0","max":"10.0","count":"3.0","sum":"15.0"} * ] */ @Multiline public static String metaAlertData; @Before public void setup() throws Exception { this.mockMvc = MockMvcBuilders.webAppContextSetup(this.wac).apply(springSecurity()).build(); ImmutableMap<String, String> testData = ImmutableMap.of( "bro_index_2017.01.01.01", SearchIntegrationTest.broData, "snort_index_2017.01.01.01", SearchIntegrationTest.snortData, metaAlertIndex, metaAlertData ); loadTestData(testData); } @After public void cleanup() { InMemoryMetaAlertDao.clear(); } @Test public void test() throws Exception { // Testing searching by alert // Test no meta alert String guid = "missing_1"; ResultActions result = this.mockMvc.perform( post(metaalertUrl + "/searchByAlert") .with(httpBasic(user, password)).with(csrf()) .contentType(MediaType.parseMediaType("text/plain;charset=UTF-8")) .content(guid)); result.andExpect(status().isOk()) .andExpect( content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8"))) .andExpect(jsonPath("$.total").value(0)); // Test single meta alert guid = "snort_1"; result = this.mockMvc.perform( post(metaalertUrl + "/searchByAlert") .with(httpBasic(user, password)).with(csrf()) .contentType(MediaType.parseMediaType("text/plain;charset=UTF-8")) .content(guid)); result.andExpect(status().isOk()) .andExpect( content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8"))) .andExpect(jsonPath("$.total").value(1)) .andExpect(jsonPath("$.results[0].source.guid").value("meta_2")) .andExpect(jsonPath("$.results[0].source.count").value(3.0)); // Test multiple meta alerts guid = "bro_1"; result = this.mockMvc.perform( post(metaalertUrl + "/searchByAlert") .with(httpBasic(user, password)).with(csrf()) .contentType(MediaType.parseMediaType("text/plain;charset=UTF-8")) .content(guid)); result.andExpect(status().isOk()) .andExpect( content().contentType(MediaType.parseMediaType("application/json;charset=UTF-8"))) .andExpect(jsonPath("$.total").value(2)) .andExpect(jsonPath("$.results[0].source.guid").value("meta_2")) .andExpect(jsonPath("$.results[0].source.count").value(3.0)) .andExpect(jsonPath("$.results[1].source.guid").value("meta_1")) .andExpect(jsonPath("$.results[1].source.count").value(1.0)); } @Test public void shouldCreateMetaAlert() throws Exception { ResultActions result = this.mockMvc.perform( post(metaalertUrl + "/create") .with(httpBasic(user, password)).with(csrf()) .contentType(MediaType.parseMediaType("application/json;charset=UTF-8")) .content(create)); result.andExpect(status().isOk()) .andExpect(jsonPath("$.guid", notNullValue())) .andExpect(jsonPath("$.timestamp", greaterThan(0L))) .andExpect(jsonPath("$.sensorType").value(MetaAlertConstants.METAALERT_TYPE)) .andExpect(jsonPath("$.document.timestamp", greaterThan(0L))) .andExpect(jsonPath("$.document['source.type']").value(MetaAlertConstants.METAALERT_TYPE)) .andExpect(jsonPath("$.document.status").value("active")) .andExpect(jsonPath("$.document.groups[0]").value("group_one")) .andExpect(jsonPath("$.document.groups[1]").value("group_two")) .andExpect(jsonPath("$.document.metron_alert[0].guid").value("bro_1")) .andExpect(jsonPath("$.document.metron_alert[1].guid").value("snort_2")); } @Test public void shouldAddRemoveAlerts() throws Exception { MetaAlertAddRemoveRequest addRequest = new MetaAlertAddRemoveRequest(); addRequest.setMetaAlertGuid("meta_1"); addRequest.setAlerts(new ArrayList<GetRequest>() {{ add(new GetRequest("bro_2", "bro", "bro_index_2017.01.01.01")); add(new GetRequest("bro_3", "bro", "bro_index_2017.01.01.01")); }}); ResultActions result = this.mockMvc.perform( post(metaalertUrl + "/add/alert") .with(httpBasic(user, password)).with(csrf()) .contentType(MediaType.parseMediaType("application/json;charset=UTF-8")) .content(JSONUtils.INSTANCE.toJSON(addRequest, false))); result.andExpect(status().isOk()) .andExpect(jsonPath("$.guid").value("meta_1")) .andExpect(jsonPath("$.sensorType").value(MetaAlertConstants.METAALERT_TYPE)) .andExpect(jsonPath("$.document.metron_alert[0].guid").value("bro_1")) .andExpect(jsonPath("$.document.metron_alert[1].guid").value("bro_2")) .andExpect(jsonPath("$.document.metron_alert[2].metaalerts").value("meta_1")) .andExpect(jsonPath("$.document.metron_alert[2].guid").value("bro_3")) .andExpect(jsonPath("$.document.metron_alert[2].metaalerts").value("meta_1")); MetaAlertAddRemoveRequest addDuplicateRequest = new MetaAlertAddRemoveRequest(); addDuplicateRequest.setMetaAlertGuid("meta_1"); addDuplicateRequest.setAlerts(new ArrayList<GetRequest>() {{ add(new GetRequest("bro_1", "bro")); }}); result = this.mockMvc.perform( post(metaalertUrl + "/add/alert") .with(httpBasic(user, password)).with(csrf()) .contentType(MediaType.parseMediaType("application/json;charset=UTF-8")) .content(JSONUtils.INSTANCE.toJSON(addDuplicateRequest, false))); result.andExpect(status().isOk()) .andExpect(jsonPath("$.guid").value("meta_1")) .andExpect(jsonPath("$.sensorType").value(MetaAlertConstants.METAALERT_TYPE)) .andExpect(jsonPath("$.document.metron_alert[0].guid").value("bro_1")) .andExpect(jsonPath("$.document.metron_alert[1].guid").value("bro_2")) .andExpect(jsonPath("$.document.metron_alert[2].metaalerts").value("meta_1")) .andExpect(jsonPath("$.document.metron_alert[2].guid").value("bro_3")) .andExpect(jsonPath("$.document.metron_alert[2].metaalerts").value("meta_1")); MetaAlertAddRemoveRequest removeRequest = new MetaAlertAddRemoveRequest(); removeRequest.setMetaAlertGuid("meta_1"); removeRequest.setAlerts(new ArrayList<GetRequest>() {{ add(new GetRequest("bro_2", "bro")); add(new GetRequest("bro_3", "bro")); }}); result = this.mockMvc.perform( post(metaalertUrl + "/remove/alert") .with(httpBasic(user, password)).with(csrf()) .contentType(MediaType.parseMediaType("application/json;charset=UTF-8")) .content(JSONUtils.INSTANCE.toJSON(removeRequest, false))); result.andExpect(status().isOk()) .andExpect(jsonPath("$.guid").value("meta_1")) .andExpect(jsonPath("$.sensorType").value(MetaAlertConstants.METAALERT_TYPE)) .andExpect(jsonPath("$.document.metron_alert.*", hasSize(equalTo(1)))) .andExpect(jsonPath("$.document.metron_alert[0].guid").value("bro_1")); MetaAlertAddRemoveRequest removeMissingRequest = new MetaAlertAddRemoveRequest(); removeMissingRequest.setMetaAlertGuid("meta_1"); removeMissingRequest.setAlerts(new ArrayList<GetRequest>() {{ add(new GetRequest("bro_2", "bro")); }}); result = this.mockMvc.perform( post(metaalertUrl + "/remove/alert") .with(httpBasic(user, password)).with(csrf()) .contentType(MediaType.parseMediaType("application/json;charset=UTF-8")) .content(JSONUtils.INSTANCE.toJSON(removeMissingRequest, false))); result.andExpect(status().isOk()) .andExpect(jsonPath("$.guid").value("meta_1")) .andExpect(jsonPath("$.sensorType").value(MetaAlertConstants.METAALERT_TYPE)) .andExpect(jsonPath("$.document.metron_alert.*", hasSize(equalTo(1)))) .andExpect(jsonPath("$.document.metron_alert[0].guid").value("bro_1")); MetaAlertAddRemoveRequest emptyMetaAlertRequest = new MetaAlertAddRemoveRequest(); emptyMetaAlertRequest.setMetaAlertGuid("meta_1"); emptyMetaAlertRequest.setAlerts(new ArrayList<GetRequest>() {{ add(new GetRequest("bro_1", "bro")); }}); result = this.mockMvc.perform( post(metaalertUrl + "/remove/alert") .with(httpBasic(user, password)).with(csrf()) .contentType(MediaType.parseMediaType("application/json;charset=UTF-8")) .content(JSONUtils.INSTANCE.toJSON(emptyMetaAlertRequest, false))); result.andExpect(status().isInternalServerError()) .andExpect(jsonPath("$.message").value("Removing these alerts will result in an empty meta alert. Empty meta alerts are not allowed.")) .andExpect(jsonPath("$.fullMessage").value("IllegalStateException: Removing these alerts will result in an empty meta alert. Empty meta alerts are not allowed.")); } @Test public void shouldUpdateStatus() throws Exception { ResultActions result = this.mockMvc.perform( post(metaalertUrl + "/update/status/meta_2/inactive") .with(httpBasic(user, password)).with(csrf()) .contentType(MediaType.parseMediaType("application/json;charset=UTF-8"))); result.andExpect(status().isOk()) .andExpect(jsonPath("$.guid").value("meta_2")) .andExpect(jsonPath("$.sensorType").value(MetaAlertConstants.METAALERT_TYPE)) .andExpect(jsonPath("$.document.status").value("inactive")); result = this.mockMvc.perform( post(metaalertUrl + "/update/status/meta_2/active") .with(httpBasic(user, password)).with(csrf()) .contentType(MediaType.parseMediaType("application/json;charset=UTF-8"))); result.andExpect(status().isOk()) .andExpect(jsonPath("$.guid").value("meta_2")) .andExpect(jsonPath("$.sensorType").value(MetaAlertConstants.METAALERT_TYPE)) .andExpect(jsonPath("$.document.status").value("active")); } }
justinleet/incubator-metron
metron-interface/metron-rest/src/test/java/org/apache/metron/rest/controller/MetaAlertControllerIntegrationTest.java
Java
apache-2.0
14,540
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.index.mapper; import org.apache.lucene.document.StoredField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.lucene.search.function.ScriptScoreQuery; import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.fielddata.BooleanScriptFieldData; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.script.BooleanFieldScript; import org.elasticsearch.script.ScoreScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptCompiler; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonList; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class BooleanScriptFieldTypeTests extends AbstractNonTextScriptFieldTypeTestCase { @Override public void testDocValues() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true, false]}")))); List<Long> results = new ArrayList<>(); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); BooleanScriptFieldType ft = simpleMappedFieldType(); BooleanScriptFieldData ifd = ft.fielddataBuilder("test", mockContext()::lookup).build(null, null); searcher.search(new MatchAllDocsQuery(), new Collector() { @Override public ScoreMode scoreMode() { return ScoreMode.COMPLETE_NO_SCORES; } @Override public LeafCollector getLeafCollector(LeafReaderContext context) { SortedNumericDocValues dv = ifd.load(context).getLongValues(); return new LeafCollector() { @Override public void setScorer(Scorable scorer) {} @Override public void collect(int doc) throws IOException { if (dv.advanceExact(doc)) { for (int i = 0; i < dv.docValueCount(); i++) { results.add(dv.nextValue()); } } } }; } }); assertThat(results, equalTo(List.of(1L, 0L, 1L))); } } } @Override public void testSort() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); BooleanScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder("test", mockContext()::lookup).build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); assertThat(reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [false]}")); assertThat(reader.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [true]}")); } } } @Override public void testUsedInScript() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); SearchExecutionContext searchContext = mockContext(true, simpleMappedFieldType()); assertThat(searcher.count(new ScriptScoreQuery(new MatchAllDocsQuery(), new Script("test"), new ScoreScript.LeafFactory() { @Override public boolean needs_score() { return false; } @Override public ScoreScript newInstance(LeafReaderContext ctx) { return new ScoreScript(Map.of(), searchContext.lookup(), ctx) { @Override public double execute(ExplanationHolder explanation) { ScriptDocValues.Booleans booleans = (ScriptDocValues.Booleans) getDoc().get("test"); return booleans.get(0) ? 3 : 0; } }; } }, 2.5f, "test", 0, Version.CURRENT)), equalTo(1)); } } } @Override public void testExistsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true, false]}")))); iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": []}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().existsQuery(mockContext())), equalTo(3)); } } } @Override public void testRangeQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); MappedFieldType ft = simpleMappedFieldType(); assertThat(searcher.count(ft.rangeQuery(true, true, true, true, null, null, null, mockContext())), equalTo(1)); assertThat(searcher.count(ft.rangeQuery(false, true, true, true, null, null, null, mockContext())), equalTo(1)); assertThat(searcher.count(ft.rangeQuery(false, true, false, true, null, null, null, mockContext())), equalTo(1)); assertThat(searcher.count(ft.rangeQuery(false, false, true, true, null, null, null, mockContext())), equalTo(0)); } } try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); MappedFieldType ft = simpleMappedFieldType(); assertThat(searcher.count(ft.rangeQuery(false, false, true, true, null, null, null, mockContext())), equalTo(1)); assertThat(searcher.count(ft.rangeQuery(false, true, true, true, null, null, null, mockContext())), equalTo(1)); assertThat(searcher.count(ft.rangeQuery(false, true, true, false, null, null, null, mockContext())), equalTo(1)); assertThat(searcher.count(ft.rangeQuery(true, true, true, true, null, null, null, mockContext())), equalTo(0)); } } try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); MappedFieldType ft = simpleMappedFieldType(); assertThat(searcher.count(ft.rangeQuery(false, false, true, true, null, null, null, mockContext())), equalTo(1)); assertThat(searcher.count(ft.rangeQuery(true, true, true, true, null, null, null, mockContext())), equalTo(1)); assertThat(searcher.count(ft.rangeQuery(false, true, true, true, null, null, null, mockContext())), equalTo(2)); assertThat(searcher.count(ft.rangeQuery(false, false, false, false, null, null, null, mockContext())), equalTo(0)); assertThat(searcher.count(ft.rangeQuery(true, true, false, false, null, null, null, mockContext())), equalTo(0)); } } } public void testRangeQueryDegeneratesIntoNotExpensive() throws IOException { assertThat( simpleMappedFieldType().rangeQuery(true, true, false, false, null, null, null, mockContext()), instanceOf(MatchNoDocsQuery.class) ); assertThat( simpleMappedFieldType().rangeQuery(false, false, false, false, null, null, null, mockContext()), instanceOf(MatchNoDocsQuery.class) ); // Even if the running the field would blow up because it loops the query *still* just returns none. assertThat( loopFieldType().rangeQuery(true, true, false, false, null, null, null, mockContext()), instanceOf(MatchNoDocsQuery.class) ); assertThat( loopFieldType().rangeQuery(false, false, false, false, null, null, null, mockContext()), instanceOf(MatchNoDocsQuery.class) ); } @Override protected Query randomRangeQuery(MappedFieldType ft, SearchExecutionContext ctx) { // Builds a random range query that doesn't degenerate into match none switch (randomInt(2)) { case 0: return ft.rangeQuery(true, true, true, true, null, null, null, ctx); case 1: return ft.rangeQuery(false, true, true, true, null, null, null, ctx); case 2: return ft.rangeQuery(false, true, false, true, null, null, null, ctx); default: throw new UnsupportedOperationException(); } } @Override public void testTermQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().termQuery(true, mockContext())), equalTo(1)); assertThat(searcher.count(simpleMappedFieldType().termQuery("true", mockContext())), equalTo(1)); assertThat(searcher.count(simpleMappedFieldType().termQuery(false, mockContext())), equalTo(0)); assertThat(searcher.count(build("xor_param", Map.of("param", false)).termQuery(true, mockContext())), equalTo(1)); } } try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().termQuery(false, mockContext())), equalTo(1)); assertThat(searcher.count(simpleMappedFieldType().termQuery("false", mockContext())), equalTo(1)); assertThat(searcher.count(simpleMappedFieldType().termQuery(null, mockContext())), equalTo(1)); assertThat(searcher.count(simpleMappedFieldType().termQuery(true, mockContext())), equalTo(0)); assertThat(searcher.count(build("xor_param", Map.of("param", false)).termQuery(false, mockContext())), equalTo(1)); } } } @Override protected Query randomTermQuery(MappedFieldType ft, SearchExecutionContext ctx) { return ft.termQuery(randomBoolean(), ctx); } @Override public void testTermsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of(true, true), mockContext())), equalTo(1)); assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of("true", "true"), mockContext())), equalTo(1)); assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of(false, false), mockContext())), equalTo(0)); assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of(true, false), mockContext())), equalTo(1)); } } try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of(false, false), mockContext())), equalTo(1)); assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of("false", "false"), mockContext())), equalTo(1)); assertThat(searcher.count(simpleMappedFieldType().termsQuery(singletonList(null), mockContext())), equalTo(1)); assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of(true, true), mockContext())), equalTo(0)); assertThat(searcher.count(simpleMappedFieldType().termsQuery(List.of(true, false), mockContext())), equalTo(1)); } } } public void testEmptyTermsQueryDegeneratesIntoMatchNone() throws IOException { assertThat(simpleMappedFieldType().termsQuery(List.of(), mockContext()), instanceOf(MatchNoDocsQuery.class)); } @Override protected Query randomTermsQuery(MappedFieldType ft, SearchExecutionContext ctx) { switch (randomInt(2)) { case 0: return ft.termsQuery(List.of(true), ctx); case 1: return ft.termsQuery(List.of(false), ctx); case 2: return ft.termsQuery(List.of(false, true), ctx); default: throw new UnsupportedOperationException(); } } public void testDualingQueries() throws IOException { BooleanFieldMapper ootb = new BooleanFieldMapper.Builder("foo", ScriptCompiler.NONE).build(new ContentPath()); try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { List<Boolean> values = randomList(0, 2, ESTestCase::randomBoolean); String source = "{\"foo\": " + values + "}"; ParseContext ctx = mock(ParseContext.class); when(ctx.parser()).thenReturn(createParser(JsonXContent.jsonXContent, source)); ParseContext.Document doc = new ParseContext.Document(); when(ctx.doc()).thenReturn(doc); when(ctx.sourceToParse()).thenReturn(new SourceToParse("test", "test", new BytesArray(source), XContentType.JSON)); doc.add(new StoredField("_source", new BytesRef(source))); ctx.parser().nextToken(); ctx.parser().nextToken(); ctx.parser().nextToken(); while (ctx.parser().nextToken() != Token.END_ARRAY) { ootb.parse(ctx); } iw.addDocument(doc); try (DirectoryReader reader = iw.getReader()) { IndexSearcher searcher = newSearcher(reader); assertSameCount( searcher, source, "*", simpleMappedFieldType().existsQuery(mockContext()), ootb.fieldType().existsQuery(mockContext()) ); boolean term = randomBoolean(); assertSameCount( searcher, source, term, simpleMappedFieldType().termQuery(term, mockContext()), ootb.fieldType().termQuery(term, mockContext()) ); List<Boolean> terms = randomList(0, 3, ESTestCase::randomBoolean); assertSameCount( searcher, source, terms, simpleMappedFieldType().termsQuery(terms, mockContext()), ootb.fieldType().termsQuery(terms, mockContext()) ); boolean low; boolean high; if (randomBoolean()) { low = high = randomBoolean(); } else { low = false; high = true; } boolean includeLow = randomBoolean(); boolean includeHigh = randomBoolean(); assertSameCount( searcher, source, (includeLow ? "[" : "(") + low + "," + high + (includeHigh ? "]" : ")"), simpleMappedFieldType().rangeQuery(low, high, includeLow, includeHigh, null, null, null, mockContext()), ootb.fieldType().rangeQuery(low, high, includeLow, includeHigh, null, null, null, mockContext()) ); } } } private void assertSameCount(IndexSearcher searcher, String source, Object queryDescription, Query scriptedQuery, Query ootbQuery) throws IOException { assertThat( "source=" + source + ",query=" + queryDescription + ",scripted=" + scriptedQuery + ",ootb=" + ootbQuery, searcher.count(scriptedQuery), equalTo(searcher.count(ootbQuery)) ); } @Override protected BooleanScriptFieldType simpleMappedFieldType() { return build("read_foo", Map.of()); } @Override protected MappedFieldType loopFieldType() { return build("loop", Map.of()); } @Override protected String typeName() { return "boolean"; } private static BooleanScriptFieldType build(String code, Map<String, Object> params) { return build(new Script(ScriptType.INLINE, "test", code, params)); } private static BooleanFieldScript.Factory factory(Script script) { switch (script.getIdOrCode()) { case "read_foo": return (fieldName, params, lookup) -> (ctx) -> new BooleanFieldScript(fieldName, params, lookup, ctx) { @Override public void execute() { for (Object foo : (List<?>) lookup.source().get("foo")) { emit((Boolean) foo); } } }; case "xor_param": return (fieldName, params, lookup) -> (ctx) -> new BooleanFieldScript(fieldName, params, lookup, ctx) { @Override public void execute() { for (Object foo : (List<?>) lookup.source().get("foo")) { emit((Boolean) foo ^ ((Boolean) getParams().get("param"))); } } }; case "loop": return (fieldName, params, lookup) -> { // Indicate that this script wants the field call "test", which *is* the name of this field lookup.forkAndTrackFieldReferences("test"); throw new IllegalStateException("should have thrown on the line above"); }; default: throw new IllegalArgumentException("unsupported script [" + script.getIdOrCode() + "]"); } } private static BooleanScriptFieldType build(Script script) { return new BooleanScriptFieldType("test", factory(script), script, emptyMap(), (builder, params) -> builder); } }
robin13/elasticsearch
server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java
Java
apache-2.0
23,082
/* Copyright (c) 2016 Microsoft Corporation. All rights reserved. Released under Apache 2.0 license as described in the file LICENSE. Author: Leonardo de Moura */ #pragma once #include "library/type_context.h" namespace lean { struct elim_match_result { expr m_fn; list<expr> m_lemmas; elim_match_result(expr const & fn, list<expr> const & lemmas):m_fn(fn), m_lemmas(lemmas) {} }; elim_match_result elim_match(environment & env, options const & opts, metavar_context & mctx, local_context const & lctx, expr const & eqns); expr mk_nonrec(environment & env, options const & opts, metavar_context & mctx, local_context const & lctx, expr const & eqns); void initialize_elim_match(); void finalize_elim_match(); }
fgdorais/lean
src/library/equations_compiler/elim_match.h
C
apache-2.0
745
.detal{padding-bottom:50px;background:#edf1f1;color:#505050} .detal .container{position: relative;} .detal .dt_left{width:865px;height:auto;min-height: 500px;overflow: hidden;padding:0;text-align: center;border-right:solid 1px #e3e2d1;background:#fff;float:left} .detal .dt_left img{height:auto;margin-top:-4px;max-width:100%} .detal .dt_right{height:auto;width:241px;padding:0 30px;border:solid 1px #f6f5e2;background:#fffeeb;color:#0D0D0D;float:left;position: absolute;right: 0;top: 0;} .dt_right .box{width:100%;display:none} .dt_right .box.active{display: block;} .dt_right .title{height:56px;line-height:56px;font-size:18px;border-bottom:solid 1px #facd89;overflow:hidden;text-overflow:ellipsis;white-space:nowrap} .dt_right .title h2{font-size:18px} .dt_right .buy p{line-height:40px} .dt_right .buy .p1{margin:14px 0 26px} .dt_right .buy p .recharge{display:inline-block;height:26px;width:50px;line-height:26px;text-align:center;border-radius:4px;background-color:#fc5043;color:#fff;float:right} .dt_right .buy p .recharge:hover{transition:background-color .2s;background:#CE3D32} .dt_right .buy p b{font-weight:600} .dt_right .buy p em{color:#f77d07;font-weight:600} .dt_right .buy .amount{position:relative;margin-bottom:3px} .dt_right .buy .amount span{display:inline-block;height:30px;width:40px;position:absolute;right:0;top:6px;text-align:center;line-height:30px;color:#d58610;z-index: 3;} .dt_right .buy .amount input{height:30px;width:195px;padding:0 40px 0 4px;border-radius:2px;border:solid 1px #facd89;position:relative;z-index:2} .dt_right .buy .amount label{display:block;position:absolute;width:233px;height:auto;text-align:center;left:0;top:0;line-height:20px;transition:all .3s;z-index:1;padding:4px;background:#facd89;border-radius:2px;opacity:0;z-index: 999;} .dt_right .buy .amount label.active{top:38px;opacity:1} .dt_right .buy .amount label:before{position:absolute;display:block;top:-6px;left:50%;margin-left:-6px;content:"";height:6px;width:12px;background:transparent url(../img/icon/arrow_up01.png) no-repeat left top} .dt_right .buy .check_box{float:left;display:inline-block;width:12px;height:12px;margin:6px 5px 0 0;border:1px solid #000;cursor:pointer;background:#FFF none repeat scroll 0 0} .dt_right .buy .check_box.active{background:#FFF url(../img/icon/risk_confirm.png) no-repeat scroll center bottom} .dt_right .buy .agreement{max-width: 215px;text-overflow:ellipsis;overflow: hidden;white-space: nowrap;display: inline-block;} .dt_right .buy .agreement a{color:#518BC6;cursor:pointer} .dt_right .buy p .btn{display:block;width:239px;height:34px;line-height:34px;color:#fc5043;text-align:center;border:solid 1px #fc5043;border-radius:4px;margin-top:20px} .dt_right .buy p .btn.ban:hover{background:#9E9E9E none repeat scroll 0 0;color: #ffffff!important;} .dt_right .buy p .btn.ban{background:#9E9E9E none repeat scroll 0 0;color:#fff;border:solid 1px #9E9E9E} .dt_right .buy p .btn.ban:after{content:none} .dt_right .buy .text p{padding:10px 0 20px;margin-top:15px;line-height:20px;font-size:13px;color:#8A8A8A;border-top:dashed 1px #A0A0A0} .pop_btn{display: none;} .ph_mask{display: none;} /*支付页风险提示弹窗*/ .risk_pop{position:fixed;top:0;left:0;display:none;width:100%;height:100%;background:transparent url(../V2/img/bg/top_bg.png) repeat scroll left top;z-index: 9999;transition: all .2s linear;opacity: 0;} .risk_pop.open_risk{opacity: 1;} .risk_pop .cont{position:fixed;top:50%;left:50%;margin:-115px 0 0 -245px;width:490px;height:230px;background:#fff;transition: all .2s linear;border-radius: 6px;} .risk_pop .cont h1{margin:10px 0 10px;width:100%;height:50px;color:#0d0d0d;text-align:center;font-size:18px;line-height:50px} .risk_pop .cont .item{line-height:35px;height:35px} .risk_pop .cont .item label{width:30%;text-align:right;display:block;float:left} .risk_pop .cont .item p{width:70%;float:left;color:#fd994f;text-indent:10px;overflow:hidden;text-overflow:ellipsis;white-space:nowrap} .risk_pop a{float:left;display:block;margin:25px 2% 0 23%;width:25%;height:35px;border-radius: 3px;background:#fd994f;color:#fff;text-align:center;font-size:16px;line-height:35px;cursor:pointer} .risk_pop .btn_no{float:left;display:block;margin:25px 0 0 2%;width:25%;height: 33px;line-height: 33px;color:#333333;background: #ffffff;border: solid 1px #333333;text-align:center;font-size:16px;border-radius: 3px;cursor:pointer} .risk_pop a:hover{color: #ffffff;background: #BAB9B9;border-color: #BAB9B9;} /*支付页确认成功弹窗*/ .success_pop{position:fixed;top:0;left:0;display:none;width:100%;height:100%;background:transparent url(../V2/img/bg/top_bg.png) repeat scroll left top;z-index: 9999;transition: all .2s linear;opacity: 0;} .success_pop.open_success{opacity: 1;} .success_pop .cont{position:fixed;top:50%;left:50%;margin:-110px 0 0 -255px;width:510px;height:220px;background:#fff;color:#0d0d0d;transition: all .2s linear;} .success_pop .cont h1{margin-top:10px;width:100%;height:70px;text-align:center;font-size:24px;line-height:70px} .success_pop .cont p{width:100%;line-height:20px;color:#505050;font-size:14px;text-align:center;float:left} .success_pop .btn{float:left;display:block;margin:25px 15px 0 185px;width:140px;height:40px;border-radius:3px;background:#fd994f;color:#fff;text-align:center;font-size:18px;line-height:40px;cursor:pointer} .success_pop .btn:hover{background:#BAB9B9} @media screen and (min-width:1020px) and (max-width:1230px){ .detal .dt_left{width:685px;padding:0} .detal .dt_right{padding:0 20px} .detal .dt_left img{margin-top: -4px;} } @media screen and (min-width:750px) and (max-width:1019px){ .detal .dt_left{width:100%;padding:286px 0px 20px;border-right:none;box-sizing:border-box} .detal .dt_right{padding:0 20px;width:100%;box-sizing:border-box;top: 0!important;} .dt_right .buy .amount label{width:100%;box-sizing:border-box} .dt_right .buy .amount input{width:100%;height:32px;box-sizing:border-box} .dt_right .buy p .btn{width:100%;height:36px;box-sizing:border-box} .dt_right .buy .agreement{max-width: 600px;} .detal .dt_left img{margin-top: -4px;} /*弹窗*/ .risk_pop .cont{margin:-115px 0 0 -350px;width:700px} } @media screen and (max-width:749px){ .detal .dt_left{height:auto;width:100%;padding:0 0px 10px;border-right:none;box-sizing:border-box} .dt_right .buy .amount label{width:100%;box-sizing:border-box} .detal .dt_right{padding:0 20px 10px;width:100%;height:auto;box-sizing:border-box;position:fixed;bottom:-360px;top: initial;z-index:1001;transition:all .3s ease-in} .dt_right .buy .amount input{width:100%;height:32px;box-sizing:border-box} .dt_right .buy p .btn{width:100%;height:36px;box-sizing:border-box;margin-top: 5px;} .detal .dt_right.dt_pop{bottom:0;opacity:1} .pop_btn{display:block;position:fixed;left:0;bottom:0;z-index:999;opacity:1;height:46px;width:100%} .dt_pop .pop_btn{opacity:0} .detal .dt_left img{margin-top: -4px;} .pop_btn.pop_btn_hide{opacity:0;display:none} .pop_btn a{display:block;height:46px;width:100%;line-height:46px;color:#FFF;background:#EF4810 none repeat scroll 0 0;text-align:center;cursor:pointer} .dt_right .title{height:36px;line-height:36px} .dt_right .title h2{font-size:16px} .dt_right .buy .p1{margin:5px 0} .dt_right .buy div>p:nth-of-type(5){line-height: 20px;} .dt_right .buy .agreement{max-width: none;white-space: normal;text-overflow: initial;word-break: break-all;display: inline;} .pop_btn{display:block;position:fixed;left:0;bottom:0;z-index:999;opacity:1;height:46px;width:100%} .dt_pop .pop_btn{opacity:0} .pop_btn.pop_btn_hide{opacity:0;display:none} .pop_btn a{display:block;height:46px;width:100%;line-height:46px;color:#FFF;background:#EF4810 none repeat scroll 0 0;text-align:center;cursor:pointer} .pop_cont .pop_box img{display:block;margin:0 auto;height:auto;width:100%;top:50%;position:absolute;transform:translate(0,-50%)} .ph_mask{position:fixed;height:100%;width:100%;left:0;top:0;background:rgba(0,0,0,.6);z-index:1000;display:none} /*弹窗*/ .risk_pop .cont{bottom:-40px;border-radius: 0;top:initial;left:0;width:100%;margin:0;height:210px;max-height:70%;overflow:auto;padding:0 2%;opacity: 0;} .risk_pop.open_risk .cont{bottom: 0;opacity: 1;} .risk_pop .cont h1{margin-top:5px;height:60px;font-size:20px;line-height:60px} .risk_pop a{width:40%;margin:10px 3% 0 7%;float:left} .risk_pop .btn_no{margin:10px 0 0 3%;float:left;width: 40%;text-align: center;height: 35px;line-height: 35px;} .success_pop .cont{position:fixed;bottom:-40px;top:initial;left:0;width:100%;margin:0;height:auto;padding:0 2%;opacity: 0;} .success_pop.open_success .cont{bottom: 0;opacity: 1;} .success_pop .cont h1{margin-top:5px;height:50px;font-size:20px;line-height:50px} .success_pop .cont p{float:none;text-align:center} .success_pop .cont p br{display:none} .success_pop .btn{margin:10px 0;width:100%} }
ZYongF/web
learnWeb/WebRoot/css/product_QS.css
CSS
apache-2.0
8,798
import logging import sys import unittest import numpy import numpy.testing as nptst import math from sppy import csarray class csarrayTest(unittest.TestCase): def setUp(self): logging.basicConfig(stream=sys.stdout, level=logging.DEBUG) self.A = csarray((5, 5)) nrow = 5 ncol = 7 self.B = csarray((nrow, ncol)) self.B[0, 1] = 1 self.B[1, 3] = 5.2 self.B[3, 3] = -0.2 self.B[0, 6] = -1.23 self.B[4, 4] = 12.2 nrow = 100 ncol = 100 self.C = csarray((nrow, ncol)) self.C[0, 1] = 1 self.C[10, 3] = 5.2 self.C[30, 34] = -0.2 self.C[0, 62] = -1.23 self.C[4, 41] = 12.2 self.D = csarray((5, 5)) self.D[0, 0] = 23.1 self.D[2, 0] = -3.1 self.D[3, 0] = -10.0 self.D[2, 1] = -5 self.D[3, 1] = 5 self.E = csarray((0, 0)) self.F = csarray((6, 6), dtype=numpy.int) self.F[0, 0] = 23 self.F[2, 0] = -3 self.F[3, 0] = -10 self.F[2, 1] = -5 self.F[3, 1] = 5 self.G = csarray((6, 6), storagetype="row") self.G[0, 0] = 23 self.G[2, 0] = -3 self.G[3, 0] = -10 self.G[2, 1] = -5 self.G[3, 1] = 5 self.H = csarray((5, 7), storagetype="row") self.H[0, 1] = 1 self.H[1, 3] = 5.2 self.H[3, 3] = -0.2 self.H[0, 6] = -1.23 self.H[4, 4] = 12.2 self.I = csarray((5, 5), storagetype="row") self.a = csarray(10, dtype=numpy.float) self.a[0] = 23 self.a[3] = 1.2 self.a[4] = -8 self.b = csarray(10, dtype=numpy.int) self.b[0] = 23 self.b[5] = 1 self.b[8] = -8 self.c = csarray((3, ), dtype=numpy.float) self.d = csarray((0, ), dtype=numpy.float) self.storagetypes = ["row", "col"] def testInit(self): A = csarray((5, 7)) self.assertEquals(A.shape, (5, 7)) A = csarray((1, 1)) self.assertEquals(A.shape, (1, 1)) A = csarray((1, 0)) self.assertEquals(A.shape, (1, 0)) A = csarray((0, 0)) self.assertEquals(A.shape, (0, 0)) a = csarray((5)) self.assertEquals(a.shape, (5,)) a = csarray(0) self.assertEquals(a.shape, (0,)) #Test bad input params self.assertRaises(ValueError, csarray, (0,1,2)) self.assertRaises(ValueError, csarray, "a") self.assertRaises(ValueError, csarray, (5, 5), numpy.float, "abc") #TODO: Test other dtypes A = csarray((5, 5)) self.assertEquals(A.dtype, numpy.float) self.assertEquals(self.F[0, 0], 23) self.F[1, 1] = 51.2 self.assertEquals(self.F[1, 1], 51) #Test assignment with a numpy array A = numpy.array([[3.1, 0, 100], [1.11, 0, 4], [0, 0, 5.2]]) B = csarray(A) nptst.assert_array_equal(B.toarray(), A) A = numpy.array([3.1, 0, 100]) B = csarray(A) nptst.assert_array_equal(B.toarray(), A) B = csarray(A, dtype=numpy.int8) nptst.assert_array_equal(B.toarray(), numpy.array(A, numpy.int8)) B = csarray(A, dtype=numpy.int8, storagetype="row") nptst.assert_array_equal(B.toarray(), numpy.array(A, numpy.int8)) #Assignment to other csarray B = csarray(self.B, numpy.int) for i in range(B.shape[0]): for j in range(B.shape[1]): self.assertEquals(B[i, j], int(self.B[i, j])) B = csarray(self.B, numpy.int, storagetype="row") for i in range(B.shape[0]): for j in range(B.shape[1]): self.assertEquals(B[i, j], int(self.B[i, j])) F = csarray(self.F, numpy.float) F[0, 0] += 0.1 self.assertEquals(F[0, 0], 23.1) #This doesn't work as we can't instantiate using an array #b = csarray(self.b, numpy.int) #for i in range(b.shape[0]): # self.assertEquals(b[i], int(self.b[i])) def testNDim(self): A = csarray((5, 7)) self.assertEquals(A.ndim, 2) A = csarray((5, 7), storagetype="row") self.assertEquals(A.ndim, 2) A = csarray((0, 0)) self.assertEquals(A.ndim, 2) self.assertEquals(self.a.ndim, 1) self.assertEquals(self.b.ndim, 1) def testSize(self): self.assertEquals(self.A.size, 25) self.assertEquals(self.B.size, 35) self.assertEquals(self.C.size, 10000) self.assertEquals(self.F.size, 36) self.assertEquals(self.G.size, 36) self.assertEquals(self.a.size, 10) self.assertEquals(self.b.size, 10) self.assertEquals(self.c.size, 3) def testGetnnz(self): A = csarray((5, 7)) self.assertEquals(A.getnnz(), 0) A[0, 0] = 1.0 self.assertEquals(A.getnnz(), 1) A[2, 1] = 1.0 self.assertEquals(A.getnnz(), 2) A[2, 5] = 1.0 A[3, 5] = 1.0 self.assertEquals(A.getnnz(), 4) #If we insert a zero it is not registered as zero A[4, 4] = 0.0 self.assertEquals(A.getnnz(), 4) #But erasing an item keeps it (can call prune) A[3, 5] = 0.0 self.assertEquals(A.getnnz(), 4) B = csarray((5, 7)) B[(numpy.array([1, 2, 3]), numpy.array([4, 5, 6]))] = 1 self.assertEquals(B.getnnz(), 3) for i in range(5): for j in range(7): B[i, j] = 1 self.assertEquals(B.getnnz(), 35) self.assertEquals(self.A.getnnz(), 0) self.assertEquals(self.B.getnnz(), 5) self.assertEquals(self.C.getnnz(), 5) self.assertEquals(self.F.getnnz(), 5) self.assertEquals(self.G.getnnz(), 5) self.assertEquals(self.a.getnnz(), 3) self.assertEquals(self.b.getnnz(), 3) self.assertEquals(self.c.getnnz(), 0) def testSetItem(self): nrow = 5 ncol = 7 storagetypes = ["col", "row"] for storagetype in storagetypes: A = csarray((nrow, ncol), storagetype=storagetype) A[0, 1] = 1 A[1, 3] = 5.2 A[3, 3] = -0.2 self.assertEquals(A[0, 1], 1) self.assertAlmostEquals(A[1, 3], 5.2) self.assertAlmostEquals(A[3, 3], -0.2) a = csarray(nrow) a[0] = 1 a[1] = 5.2 a[3] = -0.2 self.assertEquals(a[0], 1) self.assertAlmostEquals(a[1], 5.2) self.assertAlmostEquals(a[3], -0.2) for i in range(nrow): for j in range(ncol): if (i, j) != (0, 1) and (i, j) != (1, 3) and (i, j) != (3, 3): self.assertEquals(A[i, j], 0) self.assertRaises(ValueError, A.__setitem__, (20, 1), 1) self.assertRaises(TypeError, A.__setitem__, (1, 1), "a") self.assertRaises(ValueError, A.__setitem__, (1, 100), 1) self.assertRaises(ValueError, A.__setitem__, (-1, 1), 1) self.assertRaises(ValueError, A.__setitem__, (0, -1), 1) self.assertRaises(ValueError, a.__setitem__, (0, 0), 1) result = A[(numpy.array([0, 1, 3]), numpy.array([1, 3, 3]))] self.assertEquals(result[0], 1) self.assertEquals(result[1], 5.2) self.assertEquals(result[2], -0.2) #Replace value of A A[0, 1] = 2 self.assertEquals(A[0, 1], 2) self.assertAlmostEquals(A[1, 3], 5.2) self.assertAlmostEquals(A[3, 3], -0.2) for i in range(nrow): for j in range(ncol): if (i, j) != (0, 1) and (i, j) != (1, 3) and (i, j) != (3, 3): self.assertEquals(A[i, j], 0) a[0] = 10 self.assertEquals(a[0], 10) #Try setting items with arrays A = csarray((nrow, ncol), storagetype=storagetype) A[numpy.array([0, 1]), numpy.array([2, 3])] = numpy.array([1.2, 2.4]) self.assertEquals(A.getnnz(), 2) self.assertEquals(A[0, 2], 1.2) self.assertEquals(A[1, 3], 2.4) A[numpy.array([2, 4]), numpy.array([2, 3])] = 5 self.assertEquals(A[2, 2], 5) self.assertEquals(A[4, 3], 5) a = csarray(nrow, storagetype=storagetype) a[numpy.array([0, 2])] = numpy.array([1.2, 2.4]) self.assertEquals(a.getnnz(), 2) self.assertEquals(a[0], 1.2) self.assertEquals(a[2], 2.4) def testStr(self): nrow = 5 ncol = 7 storagetypes = ["col", "row"] for storagetype in storagetypes: A = csarray((nrow, ncol), storagetype=storagetype) A[0, 1] = 1 A[1, 3] = 5.2 A[3, 3] = -0.2 outputStr = "csarray dtype:float64 shape:(5, 7) non-zeros:3 storage:" + A.storagetype + "\n" outputStr += "(0, 1) 1.0\n" outputStr += "(1, 3) 5.2\n" outputStr += "(3, 3) -0.2" self.assertEquals(str(A), outputStr) B = csarray((5, 5), storagetype=storagetype) outputStr = "csarray dtype:float64 shape:(5, 5) non-zeros:0 storage:" + B.storagetype + "\n" self.assertEquals(str(B), outputStr) outputStr = "csarray dtype:float64 shape:(10,) non-zeros:3\n" outputStr +="(0) 23.0\n" outputStr +="(3) 1.2\n" outputStr +="(4) -8.0" self.assertEquals(str(self.a), outputStr) outputStr = "csarray dtype:float64 shape:(3,) non-zeros:0\n" self.assertEquals(str(self.c), outputStr) def testSum(self): nrow = 5 ncol = 7 storagetypes = ["col", "row"] for storagetype in storagetypes: A = csarray((nrow, ncol), storagetype=storagetype) A[0, 1] = 1 A[1, 3] = 5.2 A[3, 3] = -0.2 self.assertEquals(A.sum(), 6.0) A[3, 4] = -1.2 self.assertEquals(A.sum(), 4.8) A[0, 0] = 1.34 self.assertEquals(A.sum(), 6.14) A[0, 0] = 0 self.assertEquals(A.sum(), 4.8) self.assertEquals(self.A.sum(), 0.0) self.assertEquals(self.B.sum(), 16.97) self.assertEquals(self.C.sum(), 16.97) self.assertAlmostEquals(self.D.sum(), 10) self.assertEquals(self.F.sum(), 10) self.assertEquals(self.G.sum(), 10) self.assertEquals(self.a.sum(), 16.2) self.assertEquals(self.b.sum(), 16) self.assertEquals(self.c.sum(), 0) #Test sum along axes nptst.assert_array_equal(self.A.sum(0), numpy.zeros(5)) nptst.assert_array_equal(self.B.sum(0), numpy.array([0, 1, 0, 5, 12.2, 0, -1.23])) nptst.assert_array_equal(self.D.sum(0), numpy.array([10, 0, 0, 0, 0])) nptst.assert_array_equal(self.F.sum(0), self.G.sum(0)) nptst.assert_array_equal(self.A.sum(1), numpy.zeros(5)) nptst.assert_array_almost_equal(self.B.sum(1), numpy.array([-0.23, 5.2, 0, -0.2, 12.2])) nptst.assert_array_equal(self.D.sum(1), numpy.array([23.1, 0, -8.1, -5, 0])) nptst.assert_array_equal(self.F.sum(1), self.G.sum(1)) def testGet(self): self.assertEquals(self.B[0, 1], 1) self.assertEquals(self.B[1, 3], 5.2) self.assertEquals(self.B[3, 3], -0.2) self.assertEquals(self.B.getnnz(), 5) self.assertEquals(self.G[0, 0], 23) self.assertEquals(self.G[2, 0], -3) self.assertEquals(self.a[0], 23) self.assertEquals(self.a[3], 1.2) self.assertEquals(self.a[4], -8) self.assertEquals(self.a.getnnz(), 3) #Test negative indices self.assertEquals(self.B[-5, -6], 1) self.assertEquals(self.B[-1, -3], 12.2) self.assertEquals(self.G[-3, -5], 5) self.assertEquals(self.b[-2], -8) self.assertRaises(ValueError, self.B.__getitem__, (20, 1)) self.assertRaises(ValueError, self.B.__getitem__, (1, 20)) self.assertRaises(ValueError, self.B.__getitem__, (-6, 1)) self.assertRaises(ValueError, self.B.__getitem__, (1, -8)) self.assertRaises(TypeError, self.B.__getitem__, (1)) self.assertRaises(ValueError, self.B.__getitem__, "a") self.assertRaises(Exception, self.B.__getitem__, ("a", "c")) #Test array indexing using arrays C = self.B[numpy.array([0, 1, 3]), numpy.array([1, 3, 3])] self.assertEquals(C.shape[0], 3) self.assertEquals(C[0], 1) self.assertEquals(C[1], 5.2) self.assertEquals(C[2], -0.2) C = self.A[numpy.array([0, 1, 3]), numpy.array([1, 3, 3])] self.assertEquals(C[0], 0) self.assertEquals(C[1], 0) self.assertEquals(C[2], 0) C = self.G[numpy.array([0, 2, 3]), numpy.array([0, 0, 0])] self.assertEquals(C[0], 23) self.assertEquals(C[1], -3) self.assertEquals(C[2], -10) d = self.a[numpy.array([0, 3, 4])] self.assertEquals(d[0], 23) self.assertEquals(d[1], 1.2) self.assertEquals(d[2], -8) A = csarray((2, 2)) self.assertRaises(ValueError, A.__getitem__, (numpy.array([0, 1]), numpy.array([1, 3]))) A = csarray((2, 2)) self.assertRaises(ValueError, A.__getitem__, (numpy.array([0, 2]), numpy.array([1, 1]))) A = csarray((0, 0)) self.assertRaises(ValueError, A.__getitem__, (numpy.array([0, 1, 3]), numpy.array([1, 3, 3]))) #Test submatrix indexing C = self.B[:, :] for i in range(C.shape[0]): for j in range(C.shape[1]): C[i, j] = self.B[i, j] C = self.B[0:5, 0:7] for i in range(C.shape[0]): for j in range(C.shape[1]): C[i, j] = self.B[i, j] C = self.G[0:3, 0:4] for i in range(C.shape[0]): for j in range(C.shape[1]): C[i, j] = self.G[i, j] C = self.B[numpy.array([0, 1, 3]), :] self.assertEquals(C.shape, (3, 7)) self.assertEquals(C.getnnz(), 4) self.assertEquals(C[0, 1], 1) self.assertEquals(C[1, 3], 5.2) self.assertEquals(C[2, 3], -0.2) self.assertEquals(C[0, 6], -1.23) C = self.H[numpy.array([0, 1, 3]), :] self.assertEquals(C.shape, (3, 7)) self.assertEquals(C.getnnz(), 4) self.assertEquals(C[0, 1], 1) self.assertEquals(C[1, 3], 5.2) self.assertEquals(C[2, 3], -0.2) self.assertEquals(C[0, 6], -1.23) C = self.B[numpy.array([0, 1, 3]), 0:7] self.assertEquals(C.shape, (3, 7)) self.assertEquals(C.getnnz(), 4) self.assertEquals(C[0, 1], 1) self.assertEquals(C[1, 3], 5.2) self.assertEquals(C[2, 3], -0.2) self.assertEquals(C[0, 6], -1.23) C = self.H[numpy.array([0, 1, 3]), :] self.assertEquals(C.shape, (3, 7)) self.assertEquals(C.getnnz(), 4) self.assertEquals(C[0, 1], 1) self.assertEquals(C[1, 3], 5.2) self.assertEquals(C[2, 3], -0.2) self.assertEquals(C[0, 6], -1.23) C = self.B[:, numpy.array([3])] self.assertEquals(C.shape, (5, 1)) self.assertEquals(C.getnnz(), 2) self.assertEquals(C[1, 0], 5.2) self.assertEquals(C[3, 0], -0.2) C = self.H[:, numpy.array([3])] self.assertEquals(C.shape, (5, 1)) self.assertEquals(C.getnnz(), 2) self.assertEquals(C[1, 0], 5.2) self.assertEquals(C[3, 0], -0.2) self.assertEquals(self.F[0, 0], 23) d = self.a[0:4] self.assertEquals(d.shape, (4, )) self.assertEquals(d[0], 23) self.assertEquals(d[3], 1.2) def testSubArray(self): rowInds = numpy.array([0, 1], numpy.int) colInds = numpy.array([1, 3, 6], numpy.int) A = self.B.subArray(rowInds, colInds) for i in range(A.shape[0]): for j in range(A.shape[1]): self.assertEquals(A[i, j], self.B[rowInds[i], colInds[j]]) A = self.H.subArray(rowInds, colInds) for i in range(A.shape[0]): for j in range(A.shape[1]): self.assertEquals(A[i, j], self.B[rowInds[i], colInds[j]]) #Try all rows/cols rowInds = numpy.arange(5) colInds = numpy.arange(7) A = self.B.subArray(rowInds, colInds) for i in range(A.shape[0]): for j in range(A.shape[1]): self.assertEquals(A[i, j], self.B[rowInds[i], colInds[j]]) A = self.H.subArray(rowInds, colInds) for i in range(A.shape[0]): for j in range(A.shape[1]): self.assertEquals(A[i, j], self.B[rowInds[i], colInds[j]]) #No rows/cols rowInds = numpy.array([], numpy.int) colInds = numpy.array([], numpy.int) A = self.B.subArray(rowInds, colInds) self.assertEquals(A.shape, (0, 0)) A = self.A.subArray(rowInds, colInds) self.assertEquals(A.shape, (0, 0)) #@unittest.skip("") def testNonZeroInds(self): (rowInds, colInds) = self.B.nonzero() for i in range(rowInds.shape[0]): self.assertNotEqual(self.B[rowInds[i], colInds[i]], 0) self.assertEquals(self.B.getnnz(), rowInds.shape[0]) self.assertEquals(self.B.sum(), self.B[rowInds, colInds].sum()) (rowInds, colInds) = self.C.nonzero() for i in range(rowInds.shape[0]): self.assertNotEqual(self.C[rowInds[i], colInds[i]], 0) self.assertEquals(self.C.getnnz(), rowInds.shape[0]) self.assertEquals(self.C.sum(), self.C[rowInds, colInds].sum()) (rowInds, colInds) = self.F.nonzero() for i in range(rowInds.shape[0]): self.assertNotEqual(self.F[rowInds[i], colInds[i]], 0) self.assertEquals(self.F.getnnz(), rowInds.shape[0]) self.assertEquals(self.F.sum(), self.F[rowInds, colInds].sum()) (rowInds, colInds) = self.G.nonzero() for i in range(rowInds.shape[0]): self.assertNotEqual(self.G[rowInds[i], colInds[i]], 0) self.assertEquals(self.G.getnnz(), rowInds.shape[0]) self.assertEquals(self.G.sum(), self.G[rowInds, colInds].sum()) (inds, ) = self.a.nonzero() for i in range(inds.shape[0]): self.assertNotEqual(self.a[inds[i]], 0) #Try an array with no non zeros nrow = 5 ncol = 7 storagetypes = ["col", "row"] for storagetype in storagetypes: A = csarray((nrow, ncol), storagetype=storagetype) (rowInds, colInds) = A.nonzero() self.assertEquals(A.getnnz(), rowInds.shape[0]) self.assertEquals(rowInds.shape[0], 0) self.assertEquals(colInds.shape[0], 0) (inds, ) = self.c.nonzero() self.assertEquals(inds.shape[0], 0) #Zero size array nrow = 0 ncol = 0 A = csarray((nrow, ncol)) (rowInds, colInds) = A.nonzero() self.assertEquals(A.getnnz(), rowInds.shape[0]) self.assertEquals(rowInds.shape[0], 0) self.assertEquals(colInds.shape[0], 0) (inds, ) = self.d.nonzero() self.assertEquals(inds.shape[0], 0) def testDiag(self): nptst.assert_array_equal(self.A.diag(), numpy.zeros(5)) nptst.assert_array_equal(self.B.diag(), numpy.array([ 0, 0, 0, -0.2, 12.2])) nptst.assert_array_equal(self.C.diag(), numpy.zeros(100)) nptst.assert_array_equal(self.H.diag(), numpy.array([ 0, 0, 0, -0.2, 12.2])) D = csarray((3, 3)) D[0, 0] = -1 D[1, 1] = 3.2 D[2, 2] = 34 nptst.assert_array_equal(D.diag(), numpy.array([-1, 3.2, 34])) E = csarray((0, 0)) nptst.assert_array_equal(E.diag(), numpy.array([])) nptst.assert_array_equal(self.F.diag(), numpy.array([23, 0, 0, 0, 0, 0]) ) #@unittest.skip("") def testMean(self): self.assertEquals(self.A.mean(), 0) self.assertAlmostEquals(self.B.mean(), 0.4848571428571428) self.assertAlmostEquals(self.C.mean(), 0.001697) self.assertAlmostEquals(self.H.mean(), 0.4848571428571428) D = csarray((0, 0)) self.assertTrue(math.isnan(D.mean())) self.assertEquals(self.F.mean(), 10/float(36)) nptst.assert_array_equal(self.A.mean(0), self.A.sum(0)/self.A.shape[0]) nptst.assert_array_equal(self.B.mean(0), self.B.sum(0)/self.B.shape[0]) nptst.assert_array_equal(self.C.mean(0), self.C.sum(0)/self.C.shape[0]) nptst.assert_array_equal(self.D.mean(0), self.D.sum(0)/self.D.shape[0]) nptst.assert_array_equal(self.F.mean(0), self.F.sum(0)/float(self.F.shape[0])) nptst.assert_array_equal(self.G.mean(0), self.G.sum(0)/self.G.shape[0]) nptst.assert_array_equal(self.H.mean(0), self.H.sum(0)/self.H.shape[0]) nptst.assert_array_equal(self.A.mean(1), self.A.sum(1)/self.A.shape[1]) nptst.assert_array_equal(self.B.mean(1), self.B.sum(1)/self.B.shape[1]) nptst.assert_array_equal(self.C.mean(1), self.C.sum(1)/self.C.shape[1]) nptst.assert_array_equal(self.D.mean(1), self.D.sum(1)/self.D.shape[1]) nptst.assert_array_equal(self.F.mean(1), self.F.sum(1)/float(self.F.shape[1])) nptst.assert_array_equal(self.G.mean(1), self.G.sum(1)/self.G.shape[1]) nptst.assert_array_equal(self.H.mean(1), self.H.sum(1)/self.H.shape[1]) self.assertEquals(self.a.mean(), 1.6199999999999999) self.assertEquals(self.b.mean(), 1.6) self.assertEquals(self.c.mean(), 0.0) self.assertTrue(math.isnan(self.d.mean())) def testCopy(self): A = csarray((5, 5)) A[0, 0] = 1 A[1, 0] = 2 A[4, 2] = 3 self.assertEquals(A[0, 0], 1) self.assertEquals(A[1, 0], 2) self.assertEquals(A[4, 2], 3) B = A.copy() A[0, 0] = 2 A[1, 0] = 3 A[4, 2] = 4 A[4, 4] = 5 self.assertEquals(A[0, 0], 2) self.assertEquals(A[1, 0], 3) self.assertEquals(A[4, 2], 4) self.assertEquals(A[4, 4], 5) self.assertEquals(A.getnnz(), 4) self.assertEquals(B[0, 0], 1) self.assertEquals(B[1, 0], 2) self.assertEquals(B[4, 2], 3) self.assertEquals(B.getnnz(), 3) F = self.F.copy() F[0, 0] = -15 self.assertEquals(F[0, 0], -15) self.assertEquals(self.F[0, 0], 23) G = self.G.copy() G[0, 0] = -15 self.assertEquals(G[0, 0], -15) self.assertEquals(self.G[0, 0], 23) #Now try with 1d arrays a2 = self.a.copy() self.a[0] = 10 self.a[3] = 1 self.a[4] = 2 self.assertEquals(a2[0], 23) self.assertEquals(a2[3], 1.2) self.assertEquals(a2[4], -8) def testMultiply(self): val = 2.0 C = self.B * val self.assertEquals(self.B[0, 1], 1) self.assertEquals(self.B[1, 3], 5.2) self.assertEquals(self.B[3, 3], -0.2) self.assertEquals(self.B[0, 6], -1.23) self.assertEquals(self.B[4, 4], 12.2) self.assertEquals(C[0, 1], self.B[0, 1]*val) self.assertEquals(C[1, 3], self.B[1, 3]*val) self.assertEquals(C[3, 3], self.B[3, 3]*val) self.assertEquals(C[0, 6], self.B[0, 6]*val) self.assertEquals(C[4, 4], self.B[4, 4]*val) C = self.H * val self.assertEquals(self.H[0, 1], 1) self.assertEquals(self.H[1, 3], 5.2) self.assertEquals(self.H[3, 3], -0.2) self.assertEquals(self.H[0, 6], -1.23) self.assertEquals(self.H[4, 4], 12.2) self.assertEquals(C[0, 1], self.H[0, 1]*val) self.assertEquals(C[1, 3], self.H[1, 3]*val) self.assertEquals(C[3, 3], self.H[3, 3]*val) self.assertEquals(C[0, 6], self.H[0, 6]*val) self.assertEquals(C[4, 4], self.H[4, 4]*val) G = self.F*val self.assertEquals(self.F[0, 0], 23) self.assertEquals(G[0, 0], 46) #Now with vectors a2 = self.a*val self.assertEquals(a2[0], self.a[0]*val) self.assertEquals(a2[3], self.a[3]*val) self.assertEquals(a2[4], self.a[4]*val) def testTrace(self): self.assertEquals(self.A.trace(), 0) self.assertEquals(self.B.trace(), 12) self.assertEquals(self.C.trace(), 0) self.assertEquals(self.D.trace(), 23.1) self.assertEquals(self.F.trace(), 23) self.assertEquals(self.G.trace(), 23) self.assertEquals(self.H.trace(), 12) #@unittest.skip("") def testToarray(self): A = self.A.toarray() self.assertEquals(type(A), numpy.ndarray) self.assertEquals(A.shape, self.A.shape) self.assertEquals(A.sum(), 0) B = self.B.toarray() self.assertEquals(type(B), numpy.ndarray) self.assertEquals(B.shape, self.B.shape) self.assertEquals(B[0, 1], 1) self.assertEquals(B[1, 3], 5.2) self.assertEquals(B[3, 3], -0.2) self.assertEquals(B[0, 6], -1.23) self.assertEquals(B[4, 4], 12.2) self.assertEquals(B.sum(), self.B.sum()) D = self.D.toarray() self.assertEquals(type(D), numpy.ndarray) self.assertEquals(D.shape, self.D.shape) self.assertEquals(D[0, 0], 23.1) self.assertEquals(D[2, 0], -3.1) self.assertEquals(D[3, 0], -10.0) self.assertEquals(D[2, 1], -5) self.assertEquals(D[3, 1], 5) self.assertAlmostEquals(D.sum(), self.D.sum()) F = self.F.toarray() self.assertEquals(type(F), numpy.ndarray) self.assertEquals(F.shape, self.F.shape) self.assertEquals(F[0, 0], 23) self.assertEquals(F[2, 0], -3) self.assertEquals(F[3, 0], -10) self.assertEquals(F[2, 1], -5) self.assertEquals(F[3, 1], 5) self.assertAlmostEquals(F.sum(), self.F.sum()) G = self.G.toarray() self.assertEquals(type(G), numpy.ndarray) self.assertEquals(G.shape, self.G.shape) self.assertEquals(G[0, 0], 23) self.assertEquals(G[2, 0], -3) self.assertEquals(G[3, 0], -10) self.assertEquals(G[2, 1], -5) self.assertEquals(G[3, 1], 5) self.assertAlmostEquals(G.sum(), self.G.sum()) #Vectors a2 = self.a.toarray() self.assertEquals(type(a2), numpy.ndarray) self.assertEquals(a2.shape, self.a.shape) self.assertEquals(a2[0], 23) self.assertEquals(a2[3], 1.2) self.assertEquals(a2[4], -8) self.assertAlmostEquals(a2.sum(), self.a.sum()) b2 = self.b.toarray() self.assertEquals(type(b2), numpy.ndarray) self.assertEquals(b2.shape, self.b.shape) self.assertEquals(b2[0], 23) self.assertEquals(b2[5], 1) self.assertEquals(b2[8], -8) self.assertAlmostEquals(b2.sum(), self.b.sum()) def testMin(self): self.assertEquals(self.A.min(), 0) self.assertEquals(self.B.min(), -1.23) self.assertEquals(self.C.min(), -1.23) self.assertEquals(self.D.min(), -10) self.assertTrue(math.isnan(self.E.min())) self.assertEquals(self.F.min(), -10) self.assertEquals(self.G.min(), -10) self.assertEquals(self.H.min(), -1.23) self.assertEquals(self.a.min(), -8) self.assertEquals(self.b.min(), -8) self.assertEquals(self.c.min(), 0) self.assertTrue(math.isnan(self.d.min())) def testMax(self): self.assertEquals(self.A.max(), 0) self.assertEquals(self.B.max(), 12.2) self.assertEquals(self.C.max(), 12.2) self.assertEquals(self.D.max(), 23.1) self.assertTrue(math.isnan(self.E.max())) self.assertEquals(self.F.max(), 23) self.assertEquals(self.G.max(), 23) self.assertEquals(self.H.max(), 12.2) self.assertEquals(self.a.max(), 23) self.assertEquals(self.b.max(), 23) self.assertEquals(self.c.max(), 0) self.assertTrue(math.isnan(self.d.max())) #@unittest.skip("") def testVar(self): self.assertEquals(self.A.var(), self.A.toarray().var()) self.assertAlmostEquals(self.B.var(), self.B.toarray().var()) self.assertAlmostEquals(self.C.var(), self.C.toarray().var()) self.assertAlmostEquals(self.D.var(), self.D.toarray().var()) self.assertAlmostEquals(self.F.var(), self.F.toarray().var()) self.assertAlmostEquals(self.G.var(), self.G.toarray().var()) self.assertAlmostEquals(self.H.var(), self.H.toarray().var()) self.assertAlmostEquals(self.a.var(), self.a.toarray().var()) self.assertAlmostEquals(self.b.var(), self.b.toarray().var()) #@unittest.skip("") def testStd(self): self.assertEquals(self.A.std(), self.A.toarray().std()) self.assertAlmostEquals(self.B.std(), self.B.toarray().std()) self.assertAlmostEquals(self.C.std(), self.C.toarray().std()) self.assertAlmostEquals(self.D.std(), self.D.toarray().std()) self.assertAlmostEquals(self.F.std(), self.F.toarray().std()) self.assertAlmostEquals(self.G.std(), self.G.toarray().std()) self.assertAlmostEquals(self.H.std(), self.H.toarray().std()) self.assertAlmostEquals(self.a.std(), self.a.toarray().std()) self.assertAlmostEquals(self.b.std(), self.b.toarray().std()) def testAbs(self): nptst.assert_array_equal(abs(self.A).toarray(), abs(self.A.toarray())) nptst.assert_array_equal(abs(self.B).toarray(), abs(self.B.toarray())) nptst.assert_array_equal(abs(self.C).toarray(), abs(self.C.toarray())) nptst.assert_array_equal(abs(self.D).toarray(), abs(self.D.toarray())) nptst.assert_array_equal(abs(self.F).toarray(), abs(self.F.toarray())) nptst.assert_array_equal(abs(self.G).toarray(), abs(self.G.toarray())) nptst.assert_array_equal(abs(self.H).toarray(), abs(self.H.toarray())) nptst.assert_array_equal(abs(self.a).toarray(), abs(self.a.toarray())) nptst.assert_array_equal(abs(self.b).toarray(), abs(self.b.toarray())) def testNeg(self): nptst.assert_array_equal((-self.A).toarray(), -self.A.toarray()) nptst.assert_array_equal((-self.B).toarray(), -self.B.toarray()) nptst.assert_array_equal((-self.C).toarray(), -self.C.toarray()) nptst.assert_array_equal((-self.D).toarray(), -self.D.toarray()) nptst.assert_array_equal((-self.F).toarray(), -self.F.toarray()) nptst.assert_array_equal((-self.G).toarray(), -self.G.toarray()) nptst.assert_array_equal((-self.H).toarray(), -self.H.toarray()) nptst.assert_array_equal((-self.a).toarray(), -self.a.toarray()) nptst.assert_array_equal((-self.b).toarray(), -self.b.toarray()) def testAdd(self): #print(self.A.__add__(self.A._array)) nptst.assert_array_equal((self.A + self.A).toarray(), self.A.toarray()*2) nptst.assert_array_equal((self.B + self.B).toarray(), self.B.toarray()*2) nptst.assert_array_equal((self.C + self.C).toarray(), self.C.toarray()*2) nptst.assert_array_equal((self.D + self.D).toarray(), self.D.toarray()*2) nptst.assert_array_equal((self.F + self.F).toarray(), self.F.toarray()*2) nptst.assert_array_equal((self.G + self.G).toarray(), self.G.toarray()*2) nptst.assert_array_equal((self.H + self.H).toarray(), self.H.toarray()*2) A = csarray((5, 5)) A[0, 1] = 4 A[1, 3] = 2 A[3, 3] = 1 B = csarray((5, 5)) B[0, 2] = 9.2 B[2, 3] = -5 B[3, 4] = 12 nptst.assert_array_equal((A + B).toarray(), A.toarray()+B.toarray()) nptst.assert_array_equal((self.a + self.a).toarray(), self.a.toarray()*2) nptst.assert_array_equal((self.b + self.b).toarray(), self.b.toarray()*2) nptst.assert_array_equal((self.c + self.c).toarray(), self.c.toarray()*2) def testSub(self): nptst.assert_array_equal((self.A - self.A).toarray(), self.A.toarray()*0) nptst.assert_array_equal((self.B - self.B).toarray(), self.B.toarray()*0) nptst.assert_array_equal((self.C - self.C).toarray(), self.C.toarray()*0) nptst.assert_array_equal((self.D - self.D).toarray(), self.D.toarray()*0) nptst.assert_array_equal((self.F - self.F).toarray(), self.F.toarray()*0) nptst.assert_array_equal((self.G - self.G).toarray(), self.G.toarray()*0) nptst.assert_array_equal((self.H - self.H).toarray(), self.H.toarray()*0) nptst.assert_array_equal((self.B*2 - self.B).toarray(), self.B.toarray()) A = csarray((5, 5)) A[0, 1] = 4 A[1, 3] = 2 A[3, 3] = 1 B = csarray((5, 5)) B[0, 2] = 9.2 B[2, 3] = -5 B[3, 4] = 12 nptst.assert_array_equal((A - B).toarray(), A.toarray()-B.toarray()) nptst.assert_array_equal((self.a - self.a).toarray(), self.a.toarray()*0) nptst.assert_array_equal((self.b - self.b).toarray(), self.b.toarray()*0) nptst.assert_array_equal((self.c - self.c).toarray(), self.c.toarray()*0) def testHadamard(self): nptst.assert_array_equal((self.A.hadamard(self.A)).toarray(), (self.A.toarray())**2) nptst.assert_array_equal((self.B.hadamard(self.B)).toarray(), self.B.toarray()**2) nptst.assert_array_equal((self.C.hadamard(self.C)).toarray(), self.C.toarray()**2) nptst.assert_array_equal((self.D.hadamard(self.D)).toarray(), self.D.toarray()**2) nptst.assert_array_equal((self.F.hadamard(self.F)).toarray(), self.F.toarray()**2) nptst.assert_array_equal((self.G.hadamard(self.G)).toarray(), self.G.toarray()**2) nptst.assert_array_equal((self.H.hadamard(self.H)).toarray(), self.H.toarray()**2) for storagetype in self.storagetypes: A = csarray((5, 5), storagetype=storagetype) A[0, 1] = 4 A[2, 3] = -1.2 A[1, 3] = 2 A[3, 3] = 1 B = csarray((5, 5), storagetype=storagetype) B[0, 2] = 9.2 B[2, 3] = -5 B[3, 4] = 12 B[3, 3] = 12 C = csarray((5, 5), storagetype=storagetype) nptst.assert_array_equal((A.hadamard(B)).toarray(), A.toarray()*B.toarray()) nptst.assert_array_equal((A.hadamard(C)).toarray(), C.toarray()) nptst.assert_array_equal((self.a.hadamard(self.a)).toarray(), (self.a.toarray())**2) nptst.assert_array_equal((self.b.hadamard(self.b)).toarray(), (self.b.toarray())**2) nptst.assert_array_equal((self.c.hadamard(self.c)).toarray(), (self.c.toarray())**2) def testReserve(self): for storagetype in self.storagetypes: A = csarray((5, 5), storagetype=storagetype) A.reserve(5) A[0, 1] = 4 A[2, 3] = -1.2 A[1, 3] = 2 A[3, 3] = 1 def testCompress(self): for storagetype in self.storagetypes: A = csarray((5, 5), storagetype=storagetype) A[0, 1] = 4 A[2, 3] = -1.2 A[1, 3] = 2 A[3, 3] = 1 A.compress() def testDot(self): for storagetype in self.storagetypes: A = csarray((5, 5), storagetype=storagetype) A[0, 1] = 4 A[2, 3] = -1.2 A[1, 3] = 2 A[3, 3] = 1 B = A.dot(A) nptst.assert_array_equal(B.toarray(), A.toarray().dot(A.toarray())) B = self.D.dot(self.D) nptst.assert_array_equal(B.toarray(), self.D.toarray().dot(self.D.toarray())) C = csarray((5, 2)) for i in range(5): for j in range(2): C[i, j] = 1 self.assertRaises(ValueError, C.dot, C) B = A.dot(C) nptst.assert_array_equal(B.toarray(), A.toarray().dot(C.toarray())) self.assertEquals((self.a.dot(self.a)), (self.a.dot(self.a))) self.assertEquals((self.b.dot(self.b)), (self.b.dot(self.b))) self.assertEquals((self.c.dot(self.c)), (self.c.dot(self.c))) #Now test dot product with ndarray D = self.D.toarray() B = self.D.dot(D) nptst.assert_array_equal(B, D.dot(D)) A = numpy.random.rand(10, 5) B = numpy.random.rand(5, 6) C = A.dot(B) Ahat = csarray(A) Chat = Ahat.dot(B) nptst.assert_array_equal(C, Chat) #Try some random matrices numRuns = 10 for i in range(numRuns): m = numpy.random.randint(1, 50) n = numpy.random.randint(1, 50) p = numpy.random.randint(1, 50) A = numpy.random.rand(m, n) B = numpy.random.rand(n, p) v = numpy.random.rand(n, p) C = A.dot(B) Ahat = csarray(A) Chat = Ahat.dot(B) nptst.assert_array_almost_equal(C, Chat) u = A.dot(v) uHat = Ahat.dot(v) nptst.assert_array_almost_equal(u, uHat) def testPdot(self): """ D = numpy.ascontiguousarray(self.D.toarray()) B = self.D.pdot(D) nptst.assert_array_equal(B, D.dot(D)) A = numpy.random.rand(10, 5) B = numpy.random.rand(5, 6) C = A.dot(B) Ahat = csarray(A) Chat = Ahat.pdot(B) nptst.assert_array_almost_equal(C, Chat) """ #Try some random matrices numRuns = 10 for i in range(numRuns): m = numpy.random.randint(1, 50) n = numpy.random.randint(1, 50) p = numpy.random.randint(1, 50) A = numpy.random.rand(m, n) B = numpy.random.rand(n, p) v = numpy.random.rand(n, p) C = A.dot(B) Ahat = csarray(A, storagetype="row") Chat = Ahat.pdot(B) nptst.assert_array_almost_equal(C, Chat, 3) u = A.dot(v) uHat = Ahat.pdot(v) nptst.assert_array_almost_equal(u, uHat) def testTranspose(self): for storagetype in self.storagetypes: A = csarray((5, 5), storagetype=storagetype) A[0, 1] = 4 A[2, 3] = -1.2 A[1, 3] = 2 A[3, 3] = 1 self.assertEquals(type(A.T), csarray) nptst.assert_array_equal(A.transpose().toarray(), A.toarray().T) nptst.assert_array_equal(self.A.T.toarray(), self.A.toarray().T) nptst.assert_array_equal(self.B.T.toarray(), self.B.toarray().T) nptst.assert_array_equal(self.C.transpose().toarray(), self.C.toarray().T) nptst.assert_array_equal(self.D.transpose().toarray(), self.D.toarray().T) nptst.assert_array_equal(self.E.transpose().toarray(), self.E.toarray().T) nptst.assert_array_equal(self.F.transpose().toarray(), self.F.toarray().T) nptst.assert_array_equal(self.G.transpose().toarray(), self.G.toarray().T) nptst.assert_array_equal(self.H.transpose().toarray(), self.H.toarray().T) def testOnes(self): self.a.ones() nptst.assert_array_equal(self.a.toarray(), numpy.ones(self.a.shape[0])) self.A.ones() nptst.assert_array_equal(self.A.toarray(), numpy.ones(self.A.shape)) self.G.ones() nptst.assert_array_equal(self.G.toarray(), numpy.ones(self.G.shape)) def testRowInds(self): nptst.assert_array_equal(self.B.rowInds(0), numpy.array([1, 6])) nptst.assert_array_equal(self.B.rowInds(1), numpy.array([3])) nptst.assert_array_equal(self.H.rowInds(0), numpy.array([1, 6])) nptst.assert_array_equal(self.H.rowInds(1), numpy.array([3])) nptst.assert_array_equal(self.C.rowInds(0), numpy.array([1, 62])) nptst.assert_array_equal(self.C.rowInds(1), numpy.array([])) def testColInds(self): nptst.assert_array_equal(self.B.colInds(3), numpy.array([1, 3])) nptst.assert_array_equal(self.B.colInds(1), numpy.array([0])) nptst.assert_array_equal(self.H.colInds(3), numpy.array([1, 3])) nptst.assert_array_equal(self.H.colInds(1), numpy.array([0])) nptst.assert_array_equal(self.D.colInds(0), numpy.array([0, 2, 3])) nptst.assert_array_equal(self.D.colInds(2), numpy.array([])) def testValues(self): nptst.assert_array_equal(self.A.values(), self.A[self.A.nonzero()]) nptst.assert_array_equal(self.B.values(), self.B[self.B.nonzero()]) nptst.assert_array_equal(self.C.values(), self.C[self.C.nonzero()]) nptst.assert_array_equal(self.D.values(), self.D[self.D.nonzero()]) nptst.assert_array_equal(self.E.values(), self.E[self.E.nonzero()]) nptst.assert_array_equal(self.F.values(), self.F[self.F.nonzero()]) nptst.assert_array_equal(self.G.values(), self.G[self.G.nonzero()]) nptst.assert_array_equal(self.H.values(), self.H[self.H.nonzero()]) def testToScipyCsc(self): try: import scipy.sparse A = self.A.toScipyCsc() B = self.B.toScipyCsc() C = self.C.toScipyCsc() D = self.D.toScipyCsc() F = self.F.toScipyCsc() self.assertEquals(A.getnnz(), self.A.getnnz()) self.assertEquals(B.getnnz(), self.B.getnnz()) self.assertEquals(C.getnnz(), self.C.getnnz()) self.assertEquals(D.getnnz(), self.D.getnnz()) self.assertEquals(F.getnnz(), self.F.getnnz()) #Now check elements are correct (rowInds, colInds) = self.B.nonzero() for i in range(rowInds.shape[0]): self.assertEquals(B[rowInds[i], colInds[i]], self.B[rowInds[i], colInds[i]]) (rowInds, colInds) = self.C.nonzero() for i in range(rowInds.shape[0]): self.assertEquals(C[rowInds[i], colInds[i]], self.C[rowInds[i], colInds[i]]) (rowInds, colInds) = self.D.nonzero() for i in range(rowInds.shape[0]): self.assertEquals(D[rowInds[i], colInds[i]], self.D[rowInds[i], colInds[i]]) (rowInds, colInds) = self.F.nonzero() for i in range(rowInds.shape[0]): self.assertEquals(F[rowInds[i], colInds[i]], self.F[rowInds[i], colInds[i]]) except ImportError as err: print(err) def testToScipyCsr(self): try: import scipy.sparse G = self.G.toScipyCsr() H = self.H.toScipyCsr() I = self.I.toScipyCsr() self.assertEquals(G.getnnz(), self.G.getnnz()) self.assertEquals(H.getnnz(), self.H.getnnz()) self.assertEquals(I.getnnz(), self.I.getnnz()) #Now check elements are correct (rowInds, colInds) = self.G.nonzero() for i in range(rowInds.shape[0]): self.assertEquals(G[rowInds[i], colInds[i]], self.G[rowInds[i], colInds[i]]) (rowInds, colInds) = self.H.nonzero() for i in range(rowInds.shape[0]): self.assertEquals(H[rowInds[i], colInds[i]], self.H[rowInds[i], colInds[i]]) except ImportError as err: print(err) def testPut(self): A = csarray((10, 10)) rowInds = numpy.array([1, 2, 5, 7], numpy.int32) colInds = numpy.array([4, 1, 9, 0], numpy.int32) vals = numpy.random.randn(rowInds.shape[0]) A.put(vals, rowInds, colInds) for i in range(rowInds.shape[0]): self.assertEquals(A[rowInds[i], colInds[i]], vals[i]) self.assertEquals(A.nnz, rowInds.shape[0]) def testPutInit(self): A = csarray((10, 10), storagetype="col") rowInds = numpy.array([1, 2, 5, 7, 8, 1], numpy.int32) colInds = numpy.array([0, 0, 0, 1, 1, 2], numpy.int32) vals = numpy.random.randn(rowInds.shape[0]) A.put(vals, rowInds, colInds, True) for i in range(rowInds.shape[0]): self.assertEquals(A[rowInds[i], colInds[i]], vals[i]) self.assertEquals(A.nnz, rowInds.shape[0]) #Test row format A = csarray((10, 10), storagetype="row") rowInds = numpy.array([1, 1, 2, 5, 7, 8], numpy.int32) colInds = numpy.array([0, 2, 0, 0, 1, 1], numpy.int32) A.put(vals, rowInds, colInds, True) for i in range(rowInds.shape[0]): self.assertEquals(A[rowInds[i], colInds[i]], vals[i]) self.assertEquals(A.nnz, rowInds.shape[0]) #Try a larger matrix numpy.random.seed(21) m = 1000000 n = 1000000 numInds = 1000 inds = numpy.random.randint(0, m*n, numInds) inds = numpy.unique(inds) vals = numpy.random.randn(inds.shape[0]) rowInds, colInds = numpy.unravel_index(inds, (m, n), order="FORTRAN") rowInds = numpy.array(rowInds, numpy.int32) colInds = numpy.array(colInds, numpy.int32) A = csarray((m, n), storagetype="col") A.put(vals, rowInds, colInds) for i in range(vals.shape[0]): self.assertEquals(A[rowInds[i], colInds[i]], vals[i]) self.assertEquals(A.nnz, vals.shape[0]) def testBiCGSTAB(self): #This doesn't always converge numRuns = 10 for i in range(numRuns): n = numpy.random.randint(5, 20) A = numpy.random.rand(n, n) x = numpy.random.rand(n) b = A.dot(x) A = csarray(A) x2, output = A.biCGSTAB(b, tol=10**-6, maxIter=n) if output == 0: nptst.assert_array_almost_equal(x, x2, 3) #Try with bad input m = 3 n = 5 A = numpy.random.rand(n, m) A = csarray(A) x = numpy.random.rand(m) b = A.dot(x) self.assertRaises(ValueError, A.biCGSTAB, b) A = numpy.random.rand(n, n) A = csarray(A) b = numpy.array(n+1) self.assertRaises(ValueError, A.biCGSTAB, b) def testPrune(self): A = csarray((10, 10), storagetype="row") A[0, 1] = 10 A[2, 3] = -0.4 A[3, 3] = 0 A[4, 8] = 0 A.prune(10**-6) self.assertEquals(A.nnz, 2) A[0, 1] = 0 self.assertEquals(A.nnz, 2) A.prune() self.assertEquals(A.nnz, 1) if __name__ == "__main__": unittest.main()
ICML14MoMCompare/spectral-learn
code/spectral/build/sppy/sppy/test/csarrayTest.py
Python
apache-2.0
49,304
<?php /* * This file is part of the Predis package. * * (c) Daniele Alessandri <[email protected]> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ namespace Predis\Command; /** * @link http://redis.io/commands/subscribe * @author Daniele Alessandri <[email protected]> */ class PubSubSubscribe extends AbstractCommand implements PrefixableCommandInterface { /** * {@inheritdoc} */ public function getId() { return 'SUBSCRIBE'; } /** * {@inheritdoc} */ protected function filterArguments(Array $arguments) { return self::normalizeArguments($arguments); } /** * {@inheritdoc} */ public function prefixKeys($prefix) { PrefixHelpers::all($this, $prefix); } }
raphandowski/UDIHISProject
vendor/predis/predis/lib/Predis/Command/PubSubSubscribe.php
PHP
apache-2.0
903
<html> <head> <meta http-equiv="Content-Type" content="text/html; charset=US-ASCII"> <title>Class template date_generator_parser</title> <link rel="stylesheet" href="../../../../doc/src/boostbook.css" type="text/css"> <meta name="generator" content="DocBook XSL Stylesheets V1.78.1"> <link rel="home" href="../../index.html" title="The Boost C++ Libraries BoostBook Documentation Subset"> <link rel="up" href="../../date_time/doxy.html#header.boost.date_time.date_generator_parser_hpp" title="Header &lt;boost/date_time/date_generator_parser.hpp&gt;"> <link rel="prev" href="date_generator_formatter.html" title="Class template date_generator_formatter"> <link rel="next" href="year_based_generator.html" title="Class template year_based_generator"> </head> <body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF"> <table cellpadding="2" width="100%"><tr> <td valign="top"><img alt="Boost C++ Libraries" width="277" height="86" src="../../../../boost.png"></td> <td align="center"><a href="../../../../index.html">Home</a></td> <td align="center"><a href="../../../../libs/libraries.htm">Libraries</a></td> <td align="center"><a href="http://www.boost.org/users/people.html">People</a></td> <td align="center"><a href="http://www.boost.org/users/faq.html">FAQ</a></td> <td align="center"><a href="../../../../more/index.htm">More</a></td> </tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="date_generator_formatter.html"><img src="../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../../date_time/doxy.html#header.boost.date_time.date_generator_parser_hpp"><img src="../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../index.html"><img src="../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="year_based_generator.html"><img src="../../../../doc/src/images/next.png" alt="Next"></a> </div> <div class="refentry"> <a name="boost.date_time.date_generator_parser"></a><div class="titlepage"></div> <div class="refnamediv"> <h2><span class="refentrytitle">Class template date_generator_parser</span></h2> <p>boost::date_time::date_generator_parser &#8212; Class for date_generator parsing. </p> </div> <h2 xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" class="refsynopsisdiv-title">Synopsis</h2> <div xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" class="refsynopsisdiv"><pre class="synopsis"><span class="comment">// In header: &lt;<a class="link" href="../../date_time/doxy.html#header.boost.date_time.date_generator_parser_hpp" title="Header &lt;boost/date_time/date_generator_parser.hpp&gt;">boost/date_time/date_generator_parser.hpp</a>&gt; </span><span class="keyword">template</span><span class="special">&lt;</span><span class="keyword">typename</span> date_type<span class="special">,</span> <span class="keyword">typename</span> charT<span class="special">&gt;</span> <span class="keyword">class</span> <a class="link" href="date_generator_parser.html" title="Class template date_generator_parser">date_generator_parser</a> <span class="special">{</span> <span class="keyword">public</span><span class="special">:</span> <span class="comment">// types</span> <span class="keyword">typedef</span> <span class="identifier">std</span><span class="special">::</span><span class="identifier">basic_string</span><span class="special">&lt;</span> <span class="identifier">charT</span> <span class="special">&gt;</span> <a name="boost.date_time.date_generator_parser.string_type"></a><span class="identifier">string_type</span><span class="special">;</span> <span class="keyword">typedef</span> <span class="identifier">std</span><span class="special">::</span><span class="identifier">istreambuf_iterator</span><span class="special">&lt;</span> <span class="identifier">charT</span> <span class="special">&gt;</span> <a name="boost.date_time.date_generator_parser.stream_itr_type"></a><span class="identifier">stream_itr_type</span><span class="special">;</span> <span class="keyword">typedef</span> <span class="identifier">date_type</span><span class="special">::</span><span class="identifier">month_type</span> <a name="boost.date_time.date_generator_parser.month_type"></a><span class="identifier">month_type</span><span class="special">;</span> <span class="keyword">typedef</span> <span class="identifier">date_type</span><span class="special">::</span><span class="identifier">day_of_week_type</span> <a name="boost.date_time.date_generator_parser.day_of_week_type"></a><span class="identifier">day_of_week_type</span><span class="special">;</span> <span class="keyword">typedef</span> <span class="identifier">date_type</span><span class="special">::</span><span class="identifier">day_type</span> <a name="boost.date_time.date_generator_parser.day_type"></a><span class="identifier">day_type</span><span class="special">;</span> <span class="keyword">typedef</span> <a class="link" href="string_parse_tree.html" title="Struct template string_parse_tree">string_parse_tree</a><span class="special">&lt;</span> <span class="identifier">charT</span> <span class="special">&gt;</span> <a name="boost.date_time.date_generator_parser.parse_tree_type"></a><span class="identifier">parse_tree_type</span><span class="special">;</span> <span class="keyword">typedef</span> parse_tree_type::parse_match_result_type <a name="boost.date_time.date_generator_parser.match_results"></a><span class="identifier">match_results</span><span class="special">;</span> <span class="keyword">typedef</span> <span class="identifier">std</span><span class="special">::</span><span class="identifier">vector</span><span class="special">&lt;</span> <span class="identifier">std</span><span class="special">::</span><span class="identifier">basic_string</span><span class="special">&lt;</span> <span class="identifier">charT</span> <span class="special">&gt;</span> <span class="special">&gt;</span> <a name="boost.date_time.date_generator_parser.collection_type"></a><span class="identifier">collection_type</span><span class="special">;</span> <span class="keyword">typedef</span> <a class="link" href="partial_date.html" title="Class template partial_date">partial_date</a><span class="special">&lt;</span> <span class="identifier">date_type</span> <span class="special">&gt;</span> <a name="boost.date_time.date_generator_parser.partial_date_type"></a><span class="identifier">partial_date_type</span><span class="special">;</span> <span class="keyword">typedef</span> <a class="link" href="nth_kday_of_month.html" title="Class template nth_kday_of_month">nth_kday_of_month</a><span class="special">&lt;</span> <span class="identifier">date_type</span> <span class="special">&gt;</span> <a name="boost.date_time.date_generator_parser.nth_kday_type"></a><span class="identifier">nth_kday_type</span><span class="special">;</span> <span class="keyword">typedef</span> <a class="link" href="first_kday_of_month.html" title="Class template first_kday_of_month">first_kday_of_month</a><span class="special">&lt;</span> <span class="identifier">date_type</span> <span class="special">&gt;</span> <a name="boost.date_time.date_generator_parser.first_kday_type"></a><span class="identifier">first_kday_type</span><span class="special">;</span> <span class="keyword">typedef</span> <a class="link" href="last_kday_of_month.html" title="Class template last_kday_of_month">last_kday_of_month</a><span class="special">&lt;</span> <span class="identifier">date_type</span> <span class="special">&gt;</span> <a name="boost.date_time.date_generator_parser.last_kday_type"></a><span class="identifier">last_kday_type</span><span class="special">;</span> <span class="keyword">typedef</span> <a class="link" href="first_kday_after.html" title="Class template first_kday_after">first_kday_after</a><span class="special">&lt;</span> <span class="identifier">date_type</span> <span class="special">&gt;</span> <a name="boost.date_time.date_generator_parser.kday_after_type"></a><span class="identifier">kday_after_type</span><span class="special">;</span> <span class="keyword">typedef</span> <a class="link" href="first_kday_before.html" title="Class template first_kday_before">first_kday_before</a><span class="special">&lt;</span> <span class="identifier">date_type</span> <span class="special">&gt;</span> <a name="boost.date_time.date_generator_parser.kday_before_type"></a><span class="identifier">kday_before_type</span><span class="special">;</span> <span class="keyword">typedef</span> <span class="identifier">charT</span> <a name="boost.date_time.date_generator_parser.char_type"></a><span class="identifier">char_type</span><span class="special">;</span> <span class="keyword">enum</span> <a name="boost.date_time.date_generator_parser.phrase_elements"></a>phrase_elements <span class="special">{</span> first = =0, second, third, fourth, fifth, last, before, after, of, number_of_phrase_elements <span class="special">}</span><span class="special">;</span> <span class="comment">// <a class="link" href="date_generator_parser.html#boost.date_time.date_generator_parserconstruct-copy-destruct">construct/copy/destruct</a></span> <a class="link" href="date_generator_parser.html#idp40203712-bb"><span class="identifier">date_generator_parser</span></a><span class="special">(</span><span class="special">)</span><span class="special">;</span> <a class="link" href="date_generator_parser.html#idp49347584-bb"><span class="identifier">date_generator_parser</span></a><span class="special">(</span><span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span><span class="special">,</span> <span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span><span class="special">,</span> <span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span><span class="special">,</span> <span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span><span class="special">,</span> <span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span><span class="special">,</span> <span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span><span class="special">,</span> <span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span><span class="special">,</span> <span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span><span class="special">,</span> <span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span><span class="special">)</span><span class="special">;</span> <span class="comment">// <a class="link" href="date_generator_parser.html#idp110590032-bb">public member functions</a></span> <span class="keyword">void</span> <a class="link" href="date_generator_parser.html#idp110590592-bb"><span class="identifier">element_strings</span></a><span class="special">(</span><span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span><span class="special">,</span> <span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span><span class="special">,</span> <span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span><span class="special">,</span> <span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span><span class="special">,</span> <span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span><span class="special">,</span> <span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span><span class="special">,</span> <span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span><span class="special">,</span> <span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span><span class="special">,</span> <span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span><span class="special">)</span><span class="special">;</span> <span class="keyword">void</span> <a class="link" href="date_generator_parser.html#idp108296528-bb"><span class="identifier">element_strings</span></a><span class="special">(</span><span class="keyword">const</span> <span class="identifier">collection_type</span> <span class="special">&amp;</span><span class="special">)</span><span class="special">;</span> <span class="keyword">template</span><span class="special">&lt;</span><span class="keyword">typename</span> facet_type<span class="special">&gt;</span> <a class="link" href="date_generator_parser.html#boost.date_time.date_generator_parser.partial_date_type">partial_date_type</a> <a class="link" href="date_generator_parser.html#idp69651328-bb"><span class="identifier">get_partial_date_type</span></a><span class="special">(</span><span class="identifier">stream_itr_type</span> <span class="special">&amp;</span><span class="special">,</span> <span class="identifier">stream_itr_type</span> <span class="special">&amp;</span><span class="special">,</span> <span class="identifier">std</span><span class="special">::</span><span class="identifier">ios_base</span> <span class="special">&amp;</span><span class="special">,</span> <span class="keyword">const</span> <span class="identifier">facet_type</span> <span class="special">&amp;</span><span class="special">)</span> <span class="keyword">const</span><span class="special">;</span> <span class="keyword">template</span><span class="special">&lt;</span><span class="keyword">typename</span> facet_type<span class="special">&gt;</span> <a class="link" href="date_generator_parser.html#boost.date_time.date_generator_parser.nth_kday_type">nth_kday_type</a> <a class="link" href="date_generator_parser.html#idp43087152-bb"><span class="identifier">get_nth_kday_type</span></a><span class="special">(</span><span class="identifier">stream_itr_type</span> <span class="special">&amp;</span><span class="special">,</span> <span class="identifier">stream_itr_type</span> <span class="special">&amp;</span><span class="special">,</span> <span class="identifier">std</span><span class="special">::</span><span class="identifier">ios_base</span> <span class="special">&amp;</span><span class="special">,</span> <span class="keyword">const</span> <span class="identifier">facet_type</span> <span class="special">&amp;</span><span class="special">)</span> <span class="keyword">const</span><span class="special">;</span> <span class="keyword">template</span><span class="special">&lt;</span><span class="keyword">typename</span> facet_type<span class="special">&gt;</span> <a class="link" href="date_generator_parser.html#boost.date_time.date_generator_parser.first_kday_type">first_kday_type</a> <a class="link" href="date_generator_parser.html#idp49877744-bb"><span class="identifier">get_first_kday_type</span></a><span class="special">(</span><span class="identifier">stream_itr_type</span> <span class="special">&amp;</span><span class="special">,</span> <span class="identifier">stream_itr_type</span> <span class="special">&amp;</span><span class="special">,</span> <span class="identifier">std</span><span class="special">::</span><span class="identifier">ios_base</span> <span class="special">&amp;</span><span class="special">,</span> <span class="keyword">const</span> <span class="identifier">facet_type</span> <span class="special">&amp;</span><span class="special">)</span> <span class="keyword">const</span><span class="special">;</span> <span class="keyword">template</span><span class="special">&lt;</span><span class="keyword">typename</span> facet_type<span class="special">&gt;</span> <a class="link" href="date_generator_parser.html#boost.date_time.date_generator_parser.last_kday_type">last_kday_type</a> <a class="link" href="date_generator_parser.html#idp111115728-bb"><span class="identifier">get_last_kday_type</span></a><span class="special">(</span><span class="identifier">stream_itr_type</span> <span class="special">&amp;</span><span class="special">,</span> <span class="identifier">stream_itr_type</span> <span class="special">&amp;</span><span class="special">,</span> <span class="identifier">std</span><span class="special">::</span><span class="identifier">ios_base</span> <span class="special">&amp;</span><span class="special">,</span> <span class="keyword">const</span> <span class="identifier">facet_type</span> <span class="special">&amp;</span><span class="special">)</span> <span class="keyword">const</span><span class="special">;</span> <span class="keyword">template</span><span class="special">&lt;</span><span class="keyword">typename</span> facet_type<span class="special">&gt;</span> <a class="link" href="date_generator_parser.html#boost.date_time.date_generator_parser.kday_before_type">kday_before_type</a> <a class="link" href="date_generator_parser.html#idp108386352-bb"><span class="identifier">get_kday_before_type</span></a><span class="special">(</span><span class="identifier">stream_itr_type</span> <span class="special">&amp;</span><span class="special">,</span> <span class="identifier">stream_itr_type</span> <span class="special">&amp;</span><span class="special">,</span> <span class="identifier">std</span><span class="special">::</span><span class="identifier">ios_base</span> <span class="special">&amp;</span><span class="special">,</span> <span class="keyword">const</span> <span class="identifier">facet_type</span> <span class="special">&amp;</span><span class="special">)</span> <span class="keyword">const</span><span class="special">;</span> <span class="keyword">template</span><span class="special">&lt;</span><span class="keyword">typename</span> facet_type<span class="special">&gt;</span> <a class="link" href="date_generator_parser.html#boost.date_time.date_generator_parser.kday_after_type">kday_after_type</a> <a class="link" href="date_generator_parser.html#idp105249184-bb"><span class="identifier">get_kday_after_type</span></a><span class="special">(</span><span class="identifier">stream_itr_type</span> <span class="special">&amp;</span><span class="special">,</span> <span class="identifier">stream_itr_type</span> <span class="special">&amp;</span><span class="special">,</span> <span class="identifier">std</span><span class="special">::</span><span class="identifier">ios_base</span> <span class="special">&amp;</span><span class="special">,</span> <span class="keyword">const</span> <span class="identifier">facet_type</span> <span class="special">&amp;</span><span class="special">)</span> <span class="keyword">const</span><span class="special">;</span> <span class="comment">// <a class="link" href="date_generator_parser.html#idp98555120-bb">private member functions</a></span> <span class="keyword">void</span> <a class="link" href="date_generator_parser.html#idp98555648-bb"><span class="identifier">extract_element</span></a><span class="special">(</span><span class="identifier">stream_itr_type</span> <span class="special">&amp;</span><span class="special">,</span> <span class="identifier">stream_itr_type</span> <span class="special">&amp;</span><span class="special">,</span> <span class="keyword">typename</span> <span class="identifier">date_generator_parser</span><span class="special">::</span><span class="identifier">phrase_elements</span><span class="special">)</span> <span class="keyword">const</span><span class="special">;</span> <span class="comment">// public data members</span> <span class="keyword">static</span> <span class="keyword">const</span> <span class="identifier">char_type</span> <span class="identifier">first_string</span><span class="special">;</span> <span class="keyword">static</span> <span class="keyword">const</span> <span class="identifier">char_type</span> <span class="identifier">second_string</span><span class="special">;</span> <span class="keyword">static</span> <span class="keyword">const</span> <span class="identifier">char_type</span> <span class="identifier">third_string</span><span class="special">;</span> <span class="keyword">static</span> <span class="keyword">const</span> <span class="identifier">char_type</span> <span class="identifier">fourth_string</span><span class="special">;</span> <span class="keyword">static</span> <span class="keyword">const</span> <span class="identifier">char_type</span> <span class="identifier">fifth_string</span><span class="special">;</span> <span class="keyword">static</span> <span class="keyword">const</span> <span class="identifier">char_type</span> <span class="identifier">last_string</span><span class="special">;</span> <span class="keyword">static</span> <span class="keyword">const</span> <span class="identifier">char_type</span> <span class="identifier">before_string</span><span class="special">;</span> <span class="keyword">static</span> <span class="keyword">const</span> <span class="identifier">char_type</span> <span class="identifier">after_string</span><span class="special">;</span> <span class="keyword">static</span> <span class="keyword">const</span> <span class="identifier">char_type</span> <span class="identifier">of_string</span><span class="special">;</span> <span class="special">}</span><span class="special">;</span></pre></div> <div class="refsect1"> <a name="idp186737520"></a><h2>Description</h2> <p>The elements of a date_generator "phrase" are parsed from the input stream in a particular order. All elements are required and the order in which they appear cannot change, however, the elements themselves can be changed. The default elements and their order are as follows:</p> <div class="itemizedlist"><ul class="itemizedlist" style="list-style-type: disc; "> <li class="listitem"><p><a class="link" href="partial_date.html" title="Class template partial_date">partial_date</a> =&gt; "dd Month"</p></li> <li class="listitem"><p>nth_day_of_the_week_in_month =&gt; "nth weekday of month"</p></li> <li class="listitem"><p>first_day_of_the_week_in_month =&gt; "first weekday of month"</p></li> <li class="listitem"><p>last_day_of_the_week_in_month =&gt; "last weekday of month"</p></li> <li class="listitem"><p>first_day_of_the_week_after =&gt; "weekday after"</p></li> <li class="listitem"><p>first_day_of_the_week_before =&gt; "weekday before"</p></li> </ul></div> <p> </p> <p>Weekday and Month names and formats are handled via the <a class="link" href="date_input_facet.html" title="Class template date_input_facet">date_input_facet</a>. </p> <div class="refsect2"> <a name="idp186743664"></a><h3> <a name="boost.date_time.date_generator_parserconstruct-copy-destruct"></a><code class="computeroutput">date_generator_parser</code> public construct/copy/destruct</h3> <div class="orderedlist"><ol class="orderedlist" type="1"> <li class="listitem"> <pre class="literallayout"><a name="idp40203712-bb"></a><span class="identifier">date_generator_parser</span><span class="special">(</span><span class="special">)</span><span class="special">;</span></pre>Creates a <code class="computeroutput"><a class="link" href="date_generator_parser.html" title="Class template date_generator_parser">date_generator_parser</a></code> with the default set of "element_strings". </li> <li class="listitem"> <pre class="literallayout"><a name="idp49347584-bb"></a><span class="identifier">date_generator_parser</span><span class="special">(</span><span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span> first_str<span class="special">,</span> <span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span> second_str<span class="special">,</span> <span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span> third_str<span class="special">,</span> <span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span> fourth_str<span class="special">,</span> <span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span> fifth_str<span class="special">,</span> <span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span> last_str<span class="special">,</span> <span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span> before_str<span class="special">,</span> <span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span> after_str<span class="special">,</span> <span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span> of_str<span class="special">)</span><span class="special">;</span></pre>Creates a <code class="computeroutput"><a class="link" href="date_generator_parser.html" title="Class template date_generator_parser">date_generator_parser</a></code> using a user defined set of element strings. </li> </ol></div> </div> <div class="refsect2"> <a name="idp186780368"></a><h3> <a name="idp110590032-bb"></a><code class="computeroutput">date_generator_parser</code> public member functions</h3> <div class="orderedlist"><ol class="orderedlist" type="1"> <li class="listitem"> <pre class="literallayout"><span class="keyword">void</span> <a name="idp110590592-bb"></a><span class="identifier">element_strings</span><span class="special">(</span><span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span> first_str<span class="special">,</span> <span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span> second_str<span class="special">,</span> <span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span> third_str<span class="special">,</span> <span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span> fourth_str<span class="special">,</span> <span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span> fifth_str<span class="special">,</span> <span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span> last_str<span class="special">,</span> <span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span> before_str<span class="special">,</span> <span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span> after_str<span class="special">,</span> <span class="keyword">const</span> <span class="identifier">string_type</span> <span class="special">&amp;</span> of_str<span class="special">)</span><span class="special">;</span></pre>Replace strings that determine nth week for generator. </li> <li class="listitem"><pre class="literallayout"><span class="keyword">void</span> <a name="idp108296528-bb"></a><span class="identifier">element_strings</span><span class="special">(</span><span class="keyword">const</span> <span class="identifier">collection_type</span> <span class="special">&amp;</span> col<span class="special">)</span><span class="special">;</span></pre></li> <li class="listitem"> <pre class="literallayout"><span class="keyword">template</span><span class="special">&lt;</span><span class="keyword">typename</span> facet_type<span class="special">&gt;</span> <a class="link" href="date_generator_parser.html#boost.date_time.date_generator_parser.partial_date_type">partial_date_type</a> <a name="idp69651328-bb"></a><span class="identifier">get_partial_date_type</span><span class="special">(</span><span class="identifier">stream_itr_type</span> <span class="special">&amp;</span> sitr<span class="special">,</span> <span class="identifier">stream_itr_type</span> <span class="special">&amp;</span> stream_end<span class="special">,</span> <span class="identifier">std</span><span class="special">::</span><span class="identifier">ios_base</span> <span class="special">&amp;</span> a_ios<span class="special">,</span> <span class="keyword">const</span> <span class="identifier">facet_type</span> <span class="special">&amp;</span> facet<span class="special">)</span> <span class="keyword">const</span><span class="special">;</span></pre>returns <code class="computeroutput"><a class="link" href="partial_date.html" title="Class template partial_date">partial_date</a></code> parsed from stream </li> <li class="listitem"> <pre class="literallayout"><span class="keyword">template</span><span class="special">&lt;</span><span class="keyword">typename</span> facet_type<span class="special">&gt;</span> <a class="link" href="date_generator_parser.html#boost.date_time.date_generator_parser.nth_kday_type">nth_kday_type</a> <a name="idp43087152-bb"></a><span class="identifier">get_nth_kday_type</span><span class="special">(</span><span class="identifier">stream_itr_type</span> <span class="special">&amp;</span> sitr<span class="special">,</span> <span class="identifier">stream_itr_type</span> <span class="special">&amp;</span> stream_end<span class="special">,</span> <span class="identifier">std</span><span class="special">::</span><span class="identifier">ios_base</span> <span class="special">&amp;</span> a_ios<span class="special">,</span> <span class="keyword">const</span> <span class="identifier">facet_type</span> <span class="special">&amp;</span> facet<span class="special">)</span> <span class="keyword">const</span><span class="special">;</span></pre>returns nth_kday_of_week parsed from stream </li> <li class="listitem"> <pre class="literallayout"><span class="keyword">template</span><span class="special">&lt;</span><span class="keyword">typename</span> facet_type<span class="special">&gt;</span> <a class="link" href="date_generator_parser.html#boost.date_time.date_generator_parser.first_kday_type">first_kday_type</a> <a name="idp49877744-bb"></a><span class="identifier">get_first_kday_type</span><span class="special">(</span><span class="identifier">stream_itr_type</span> <span class="special">&amp;</span> sitr<span class="special">,</span> <span class="identifier">stream_itr_type</span> <span class="special">&amp;</span> stream_end<span class="special">,</span> <span class="identifier">std</span><span class="special">::</span><span class="identifier">ios_base</span> <span class="special">&amp;</span> a_ios<span class="special">,</span> <span class="keyword">const</span> <span class="identifier">facet_type</span> <span class="special">&amp;</span> facet<span class="special">)</span> <span class="keyword">const</span><span class="special">;</span></pre>returns first_kday_of_week parsed from stream </li> <li class="listitem"> <pre class="literallayout"><span class="keyword">template</span><span class="special">&lt;</span><span class="keyword">typename</span> facet_type<span class="special">&gt;</span> <a class="link" href="date_generator_parser.html#boost.date_time.date_generator_parser.last_kday_type">last_kday_type</a> <a name="idp111115728-bb"></a><span class="identifier">get_last_kday_type</span><span class="special">(</span><span class="identifier">stream_itr_type</span> <span class="special">&amp;</span> sitr<span class="special">,</span> <span class="identifier">stream_itr_type</span> <span class="special">&amp;</span> stream_end<span class="special">,</span> <span class="identifier">std</span><span class="special">::</span><span class="identifier">ios_base</span> <span class="special">&amp;</span> a_ios<span class="special">,</span> <span class="keyword">const</span> <span class="identifier">facet_type</span> <span class="special">&amp;</span> facet<span class="special">)</span> <span class="keyword">const</span><span class="special">;</span></pre>returns last_kday_of_week parsed from stream </li> <li class="listitem"> <pre class="literallayout"><span class="keyword">template</span><span class="special">&lt;</span><span class="keyword">typename</span> facet_type<span class="special">&gt;</span> <a class="link" href="date_generator_parser.html#boost.date_time.date_generator_parser.kday_before_type">kday_before_type</a> <a name="idp108386352-bb"></a><span class="identifier">get_kday_before_type</span><span class="special">(</span><span class="identifier">stream_itr_type</span> <span class="special">&amp;</span> sitr<span class="special">,</span> <span class="identifier">stream_itr_type</span> <span class="special">&amp;</span> stream_end<span class="special">,</span> <span class="identifier">std</span><span class="special">::</span><span class="identifier">ios_base</span> <span class="special">&amp;</span> a_ios<span class="special">,</span> <span class="keyword">const</span> <span class="identifier">facet_type</span> <span class="special">&amp;</span> facet<span class="special">)</span> <span class="keyword">const</span><span class="special">;</span></pre>returns first_kday_of_week parsed from stream </li> <li class="listitem"> <pre class="literallayout"><span class="keyword">template</span><span class="special">&lt;</span><span class="keyword">typename</span> facet_type<span class="special">&gt;</span> <a class="link" href="date_generator_parser.html#boost.date_time.date_generator_parser.kday_after_type">kday_after_type</a> <a name="idp105249184-bb"></a><span class="identifier">get_kday_after_type</span><span class="special">(</span><span class="identifier">stream_itr_type</span> <span class="special">&amp;</span> sitr<span class="special">,</span> <span class="identifier">stream_itr_type</span> <span class="special">&amp;</span> stream_end<span class="special">,</span> <span class="identifier">std</span><span class="special">::</span><span class="identifier">ios_base</span> <span class="special">&amp;</span> a_ios<span class="special">,</span> <span class="keyword">const</span> <span class="identifier">facet_type</span> <span class="special">&amp;</span> facet<span class="special">)</span> <span class="keyword">const</span><span class="special">;</span></pre>returns first_kday_of_week parsed from stream </li> </ol></div> </div> <div class="refsect2"> <a name="idp186922160"></a><h3> <a name="idp98555120-bb"></a><code class="computeroutput">date_generator_parser</code> private member functions</h3> <div class="orderedlist"><ol class="orderedlist" type="1"><li class="listitem"> <pre class="literallayout"><span class="keyword">void</span> <a name="idp98555648-bb"></a><span class="identifier">extract_element</span><span class="special">(</span><span class="identifier">stream_itr_type</span> <span class="special">&amp;</span> sitr<span class="special">,</span> <span class="identifier">stream_itr_type</span> <span class="special">&amp;</span> stream_end<span class="special">,</span> <span class="keyword">typename</span> <span class="identifier">date_generator_parser</span><span class="special">::</span><span class="identifier">phrase_elements</span> ele<span class="special">)</span> <span class="keyword">const</span><span class="special">;</span></pre>Extracts phrase element from input. Throws ios_base::failure on error. </li></ol></div> </div> </div> </div> <table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr> <td align="left"></td> <td align="right"><div class="copyright-footer">Copyright &#169; 2001-2005 CrystalClear Software, Inc<p>Subject to the Boost Software License, Version 1.0. (See accompanying file <code class="filename">LICENSE_1_0.txt</code> or copy at <a href="http://www.boost.org/LICENSE_1_0.txt" target="_top">http://www.boost.org/LICENSE_1_0.txt</a>)</p> </div></td> </tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="date_generator_formatter.html"><img src="../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../../date_time/doxy.html#header.boost.date_time.date_generator_parser_hpp"><img src="../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../index.html"><img src="../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="year_based_generator.html"><img src="../../../../doc/src/images/next.png" alt="Next"></a> </div> </body> </html>
biospi/seamass-windeps
src/boost_1_57_0/doc/html/boost/date_time/date_generator_parser.html
HTML
apache-2.0
38,032
/* * Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.internal.ascii.rest; import com.hazelcast.internal.ascii.CommandParser; import com.hazelcast.internal.ascii.TextCommand; import com.hazelcast.internal.ascii.memcache.ErrorCommand; import com.hazelcast.nio.ascii.TextDecoder; import java.util.StringTokenizer; import static com.hazelcast.internal.ascii.TextCommandConstants.TextCommandType.ERROR_CLIENT; public class HttpPostCommandParser implements CommandParser { @Override public TextCommand parser(TextDecoder decoder, String cmd, int space) { StringTokenizer st = new StringTokenizer(cmd); st.nextToken(); String uri; if (st.hasMoreTokens()) { uri = st.nextToken(); } else { return new ErrorCommand(ERROR_CLIENT); } return new HttpPostCommand(decoder, uri); } }
tufangorel/hazelcast
hazelcast/src/main/java/com/hazelcast/internal/ascii/rest/HttpPostCommandParser.java
Java
apache-2.0
1,461
# `amazoncorretto:8-alpine3.14` ## Docker Metadata - Image ID: `sha256:67cad79a3a664e972af93336fae8c1051a89e4e54143d94047d8ac1a1971af98` - Created: `2022-01-19T22:02:42.781390919Z` - Virtual Size: ~ 201.71 Mb (total size of all layers on-disk) - Arch: `linux`/`amd64` - Command: `["/bin/sh"]` - Environment: - `PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/lib/jvm/default-jvm/bin` - `LANG=C.UTF-8` - `JAVA_HOME=/usr/lib/jvm/default-jvm` ## `apk` (`.apk`-based packages) ### `apk` package: `alpine-baselayout` ```console alpine-baselayout-3.2.0-r16 description: Alpine base dir structure and init scripts alpine-baselayout-3.2.0-r16 webpage: https://git.alpinelinux.org/cgit/aports/tree/main/alpine-baselayout alpine-baselayout-3.2.0-r16 installed size: 404 KiB alpine-baselayout-3.2.0-r16 license: GPL-2.0-only ``` ### `apk` package: `alpine-keys` ```console alpine-keys-2.4-r0 description: Public keys for Alpine Linux packages alpine-keys-2.4-r0 webpage: https://alpinelinux.org alpine-keys-2.4-r0 installed size: 156 KiB alpine-keys-2.4-r0 license: MIT ``` ### `apk` package: `amazon-corretto-8` ```console amazon-corretto-8-8.322.06.2-r0 description: Corretto-8 amazon-corretto-8-8.322.06.2-r0 webpage: https://github.com/corretto/corretto-8 amazon-corretto-8-8.322.06.2-r0 installed size: 87 MiB amazon-corretto-8-8.322.06.2-r0 license: https://github.com/corretto/corretto-8/blob/develop/LICENSE ``` ### `apk` package: `amazon-corretto-8-jre` ```console amazon-corretto-8-jre-8.322.06.2-r0 description: Amazon Corretto 8 Java Runtime amazon-corretto-8-jre-8.322.06.2-r0 webpage: https://github.com/corretto/corretto-8 amazon-corretto-8-jre-8.322.06.2-r0 installed size: 99 MiB amazon-corretto-8-jre-8.322.06.2-r0 license: https://github.com/corretto/corretto-8/blob/develop/LICENSE ``` ### `apk` package: `apk-tools` ```console apk-tools-2.12.7-r0 description: Alpine Package Keeper - package manager for alpine apk-tools-2.12.7-r0 webpage: https://gitlab.alpinelinux.org/alpine/apk-tools apk-tools-2.12.7-r0 installed size: 304 KiB apk-tools-2.12.7-r0 license: GPL-2.0-only ``` ### `apk` package: `busybox` ```console busybox-1.33.1-r6 description: Size optimized toolbox of many common UNIX utilities busybox-1.33.1-r6 webpage: https://busybox.net/ busybox-1.33.1-r6 installed size: 928 KiB busybox-1.33.1-r6 license: GPL-2.0-only ``` ### `apk` package: `ca-certificates-bundle` ```console ca-certificates-bundle-20191127-r5 description: Pre generated bundle of Mozilla certificates ca-certificates-bundle-20191127-r5 webpage: https://www.mozilla.org/en-US/about/governance/policies/security-group/certs/ ca-certificates-bundle-20191127-r5 installed size: 228 KiB ca-certificates-bundle-20191127-r5 license: MPL-2.0 AND MIT ``` ### `apk` package: `java-common` ```console java-common-0.4-r0 description: Java common (updates java links) java-common-0.4-r0 webpage: https://git.alpinelinux.org/aports/ java-common-0.4-r0 installed size: 12 KiB java-common-0.4-r0 license: GPL-2.0-or-later ``` ### `apk` package: `libc-utils` ```console libc-utils-0.7.2-r3 description: Meta package to pull in correct libc libc-utils-0.7.2-r3 webpage: https://alpinelinux.org libc-utils-0.7.2-r3 installed size: 4096 B libc-utils-0.7.2-r3 license: BSD-2-Clause AND BSD-3-Clause ``` ### `apk` package: `libcrypto1.1` ```console libcrypto1.1-1.1.1l-r0 description: Crypto library from openssl libcrypto1.1-1.1.1l-r0 webpage: https://www.openssl.org/ libcrypto1.1-1.1.1l-r0 installed size: 2704 KiB libcrypto1.1-1.1.1l-r0 license: OpenSSL ``` ### `apk` package: `libretls` ```console libretls-3.3.3p1-r2 description: port of libtls from libressl to openssl libretls-3.3.3p1-r2 webpage: https://git.causal.agency/libretls/ libretls-3.3.3p1-r2 installed size: 84 KiB libretls-3.3.3p1-r2 license: ISC AND (BSD-3-Clause OR MIT) ``` ### `apk` package: `libssl1.1` ```console libssl1.1-1.1.1l-r0 description: SSL shared libraries libssl1.1-1.1.1l-r0 webpage: https://www.openssl.org/ libssl1.1-1.1.1l-r0 installed size: 528 KiB libssl1.1-1.1.1l-r0 license: OpenSSL ``` ### `apk` package: `musl` ```console musl-1.2.2-r3 description: the musl c library (libc) implementation musl-1.2.2-r3 webpage: https://musl.libc.org/ musl-1.2.2-r3 installed size: 608 KiB musl-1.2.2-r3 license: MIT ``` ### `apk` package: `musl-utils` ```console musl-utils-1.2.2-r3 description: the musl c library (libc) implementation musl-utils-1.2.2-r3 webpage: https://musl.libc.org/ musl-utils-1.2.2-r3 installed size: 144 KiB musl-utils-1.2.2-r3 license: MIT BSD GPL2+ ``` ### `apk` package: `scanelf` ```console scanelf-1.3.2-r0 description: Scan ELF binaries for stuff scanelf-1.3.2-r0 webpage: https://wiki.gentoo.org/wiki/Hardened/PaX_Utilities scanelf-1.3.2-r0 installed size: 92 KiB scanelf-1.3.2-r0 license: GPL-2.0-only ``` ### `apk` package: `ssl_client` ```console ssl_client-1.33.1-r6 description: EXternal ssl_client for busybox wget ssl_client-1.33.1-r6 webpage: https://busybox.net/ ssl_client-1.33.1-r6 installed size: 28 KiB ssl_client-1.33.1-r6 license: GPL-2.0-only ``` ### `apk` package: `zlib` ```console zlib-1.2.11-r3 description: A compression/decompression Library zlib-1.2.11-r3 webpage: https://zlib.net/ zlib-1.2.11-r3 installed size: 108 KiB zlib-1.2.11-r3 license: Zlib ```
docker-library/repo-info
repos/amazoncorretto/local/8u322-alpine3.14.md
Markdown
apache-2.0
5,401
//*********************************************************// // Copyright (c) Microsoft. All rights reserved. // // Apache 2.0 License // // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or // implied. See the License for the specific language governing // permissions and limitations under the License. // //*********************************************************// using System.IO; namespace Microsoft.NodejsTools.Npm.SPI { internal class NodeModules : AbstractNodeModules { public NodeModules(IRootPackage parent, bool showMissingDevOptionalSubPackages) { var modulesBase = Path.Combine(parent.Path, "node_modules"); if (modulesBase.Length < NativeMethods.MAX_FOLDER_PATH && Directory.Exists(modulesBase)) { var bin = string.Format("{0}.bin", Path.DirectorySeparatorChar); foreach (var moduleDir in Directory.EnumerateDirectories(modulesBase)) { if (moduleDir.Length < NativeMethods.MAX_FOLDER_PATH && !moduleDir.EndsWith(bin)) { AddModule(new Package(parent, moduleDir, showMissingDevOptionalSubPackages)); } } } var parentPackageJson = parent.PackageJson; if (null != parentPackageJson) { foreach (var dependency in parentPackageJson.AllDependencies) { Package module = null; if (!Contains(dependency.Name)) { var dependencyPath = Path.Combine(modulesBase, dependency.Name); if (dependencyPath.Length < NativeMethods.MAX_FOLDER_PATH) { module = new Package( parent, dependencyPath, showMissingDevOptionalSubPackages); if (parent as IPackage == null || !module.IsMissing || showMissingDevOptionalSubPackages) { AddModule(module); } } } else { module = this[dependency.Name] as Package; } if (null != module) { module.RequestedVersionRange = dependency.VersionRangeText; } } } _packagesSorted.Sort(new PackageComparer()); } } }
nareshjo/nodejstools
Nodejs/Product/Npm/SPI/NodeModules.cs
C#
apache-2.0
2,794
"""Entity for Firmata devices.""" from __future__ import annotations from homeassistant.config_entries import ConfigEntry from homeassistant.helpers.entity import DeviceInfo from .board import FirmataPinType from .const import DOMAIN, FIRMATA_MANUFACTURER from .pin import FirmataBoardPin class FirmataEntity: """Representation of a Firmata entity.""" def __init__(self, api): """Initialize the entity.""" self._api = api @property def device_info(self) -> DeviceInfo: """Return device info.""" return DeviceInfo( connections={}, identifiers={(DOMAIN, self._api.board.name)}, manufacturer=FIRMATA_MANUFACTURER, name=self._api.board.name, sw_version=self._api.board.firmware_version, ) class FirmataPinEntity(FirmataEntity): """Representation of a Firmata pin entity.""" def __init__( self, api: type[FirmataBoardPin], config_entry: ConfigEntry, name: str, pin: FirmataPinType, ) -> None: """Initialize the pin entity.""" super().__init__(api) self._name = name location = (config_entry.entry_id, "pin", pin) self._unique_id = "_".join(str(i) for i in location) @property def name(self) -> str: """Get the name of the pin.""" return self._name @property def should_poll(self) -> bool: """No polling needed.""" return False @property def unique_id(self) -> str: """Return a unique identifier for this device.""" return self._unique_id
jawilson/home-assistant
homeassistant/components/firmata/entity.py
Python
apache-2.0
1,626
/* * Copyright (c) 2013 Juniper Networks, Inc. All rights reserved. */ #ifndef SRC_BGP_BGP_CONDITION_LISTENER_H_ #define SRC_BGP_BGP_CONDITION_LISTENER_H_ #include <boost/intrusive_ptr.hpp> #include <boost/scoped_ptr.hpp> #include <tbb/mutex.h> #include <map> #include <set> #include <string> #include "base/util.h" class BgpRoute; class BgpServer; class BgpTable; class DBEntryBase; class DBTableBase; class DBTablePartBase; class TaskTrigger; // // ConditionMatch // Base class for ConditionMatch // Provides interface to match a implementation specific match condition and // action function // class ConditionMatch { public: ConditionMatch() : deleted_(false), walk_done_(false), num_matchstate_(0) { refcount_ = 0; } virtual ~ConditionMatch() { } // Performs the Match and action function. // Runs in the db::DBTable task context // Actual action can be performed in different task context // Concurrency: DB:DBTable task either from DB Notification context or // from DB Walk context virtual bool Match(BgpServer *server, BgpTable *table, BgpRoute *route, bool deleted) = 0; virtual std::string ToString() const = 0; bool deleted() const { return deleted_; } void IncrementNumMatchstate() { tbb::mutex::scoped_lock lock(mutex_); num_matchstate_++; } void DecrementNumMatchstate() { assert(num_matchstate_); num_matchstate_--; } uint32_t num_matchstate() const { return num_matchstate_; } private: friend class BgpConditionListener; friend void intrusive_ptr_add_ref(ConditionMatch *match); friend void intrusive_ptr_release(ConditionMatch *match); void SetDeleted() { deleted_ = true; } bool walk_done() const { return walk_done_; } void set_walk_done() { walk_done_ = true; } void reset_walk_done() { walk_done_ = false; } bool deleted_; bool walk_done_; tbb::mutex mutex_; uint32_t num_matchstate_; tbb::atomic<int> refcount_; }; inline void intrusive_ptr_add_ref(ConditionMatch *match) { match->refcount_.fetch_and_increment(); } inline void intrusive_ptr_release(ConditionMatch *match) { int prev = match->refcount_.fetch_and_decrement(); if (prev == 1) { delete match; } } // // Intrusive Pointer for life time management of Condition Match object // typedef boost::intrusive_ptr<ConditionMatch> ConditionMatchPtr; // // ConditionMatchState // Base class for meta data that Condition Match adds against BgpRoute // class ConditionMatchState { public: ConditionMatchState() : refcount_(0), deleted_(false) { } virtual ~ConditionMatchState() { } uint32_t refcnt() const { return refcount_; } void IncrementRefCnt() { refcount_++; } void set_deleted() { deleted_ = true; } void reset_deleted() { deleted_ = false; } bool deleted() const { return deleted_; } void DecrementRefCnt() { assert(refcount_); refcount_--; } private: uint32_t refcount_; bool deleted_; }; // // Helper classes // // // Store the state managed by BgpConditionListener against each table // it is registered // class ConditionMatchTableState; // // BgpConditionListener // Provides a generic interface to match a condition and call action function // Application module registers with this module with ConditionMatch class // to start applying the match condition on all BgpRoutes // Provides an interface to add/remove module specific data on each BgpRoute // // A mutex is used to serialize access from multiple bgp::ConfigHelper tasks. // class BgpConditionListener { public: typedef std::map<BgpTable *, ConditionMatchTableState *> TableMap; // Called upon completion of Add or Delete operations typedef boost::function<void(BgpTable *, ConditionMatch *)> RequestDoneCb; explicit BgpConditionListener(BgpServer *server); // Add a new match condition // All subsequent DB Table notification matches this condition // DB Table is walked to match this condition for existing entries void AddMatchCondition(BgpTable *table, ConditionMatch *obj, RequestDoneCb addDoneCb); // Delete a match condition // DB table should be walked to match this deleting condition and // revert the action taken on previous match // DeleteDone callback indicates the calling application about Remove // completion. Application should call UnregisterMatchCondition to // remove the ConditionMatch object from the table void RemoveMatchCondition(BgpTable *table, ConditionMatch *obj, RequestDoneCb deleteDonecb); // Return the meta-data added by the module requested for Match ConditionMatchState *GetMatchState(BgpTable *table, BgpRoute *route, ConditionMatch *obj); bool CheckMatchState(BgpTable *table, BgpRoute *route, ConditionMatch *obj); // Set the module specific meta-data after the match/action void SetMatchState(BgpTable *table, BgpRoute *route, ConditionMatch *obj, ConditionMatchState *state = NULL); // Clear the module specific DBState void RemoveMatchState(BgpTable *table, BgpRoute *route, ConditionMatch *obj); // API to remove condition object from the table void UnregisterMatchCondition(BgpTable *table, ConditionMatch *obj); BgpServer *server() { return server_; } private: template <typename U> friend class PathResolverTest; typedef std::set<ConditionMatchTableState *> PurgeTableStateList; // Table listener bool BgpRouteNotify(BgpServer *server, DBTablePartBase *root, DBEntryBase *entry); void TableWalk(ConditionMatchTableState *ts, ConditionMatch *obj, RequestDoneCb cb); bool PurgeTableState(); // WalkComplete function void WalkDone(ConditionMatchTableState *ts, DBTableBase *table); // For testing only. void DisableTableWalkProcessing(); void EnableTableWalkProcessing(); BgpServer *server_; tbb::mutex mutex_; TableMap map_; PurgeTableStateList purge_list_; boost::scoped_ptr<TaskTrigger> purge_trigger_; DISALLOW_COPY_AND_ASSIGN(BgpConditionListener); }; #endif // SRC_BGP_BGP_CONDITION_LISTENER_H_
nischalsheth/contrail-controller
src/bgp/bgp_condition_listener.h
C
apache-2.0
6,485
/* GENERATED SOURCE. DO NOT MODIFY. */ // © 2016 and later: Unicode, Inc. and others. // License & terms of use: http://www.unicode.org/copyright.html#License /** ******************************************************************************* * Copyright (C) 2001-2016 International Business Machines Corporation and * others. All Rights Reserved. ******************************************************************************* */ package android.icu.lang; import java.util.BitSet; import java.util.Locale; import android.icu.impl.UCharacterProperty; import android.icu.util.ULocale; /** * Constants for ISO 15924 script codes, and related functions. * * <p>The current set of script code constants supports at least all scripts * that are encoded in the version of Unicode which ICU currently supports. * The names of the constants are usually derived from the * Unicode script property value aliases. * See UAX #24 Unicode Script Property (http://www.unicode.org/reports/tr24/) * and http://www.unicode.org/Public/UCD/latest/ucd/PropertyValueAliases.txt . * * <p>In addition, constants for many ISO 15924 script codes * are included, for use with language tags, CLDR data, and similar. * Some of those codes are not used in the Unicode Character Database (UCD). * For example, there are no characters that have a UCD script property value of * Hans or Hant. All Han ideographs have the Hani script property value in Unicode. * * <p>Private-use codes Qaaa..Qabx are not included, except as used in the UCD or in CLDR. * * <p>Starting with ICU 55, script codes are only added when their scripts * have been or will certainly be encoded in Unicode, * and have been assigned Unicode script property value aliases, * to ensure that their script names are stable and match the names of the constants. * Script codes like Latf and Aran that are not subject to separate encoding * may be added at any time. */ public final class UScript { /** * Invalid code */ public static final int INVALID_CODE = -1; /** * Common */ public static final int COMMON = 0; /* Zyyy */ /** * Inherited */ public static final int INHERITED = 1; /* Zinh */ /* "Code for inherited script", for non-spacing combining marks; also Qaai */ /** * Arabic */ public static final int ARABIC = 2; /* Arab */ /** * Armenian */ public static final int ARMENIAN = 3; /* Armn */ /** * Bengali */ public static final int BENGALI = 4; /* Beng */ /** * Bopomofo */ public static final int BOPOMOFO = 5; /* Bopo */ /** * Cherokee */ public static final int CHEROKEE = 6; /* Cher */ /** * Coptic */ public static final int COPTIC = 7; /* Qaac */ /** * Cyrillic */ public static final int CYRILLIC = 8; /* Cyrl (Cyrs) */ /** * Deseret */ public static final int DESERET = 9; /* Dsrt */ /** * Devanagari */ public static final int DEVANAGARI = 10; /* Deva */ /** * Ethiopic */ public static final int ETHIOPIC = 11; /* Ethi */ /** * Georgian */ public static final int GEORGIAN = 12; /* Geor (Geon; Geoa) */ /** * Gothic */ public static final int GOTHIC = 13; /* Goth */ /** * Greek */ public static final int GREEK = 14; /* Grek */ /** * Gujarati */ public static final int GUJARATI = 15; /* Gujr */ /** * Gurmukhi */ public static final int GURMUKHI = 16; /* Guru */ /** * Han */ public static final int HAN = 17; /* Hani */ /** * Hangul */ public static final int HANGUL = 18; /* Hang */ /** * Hebrew */ public static final int HEBREW = 19; /* Hebr */ /** * Hiragana */ public static final int HIRAGANA = 20; /* Hira */ /** * Kannada */ public static final int KANNADA = 21; /* Knda */ /** * Katakana */ public static final int KATAKANA = 22; /* Kana */ /** * Khmer */ public static final int KHMER = 23; /* Khmr */ /** * Lao */ public static final int LAO = 24; /* Laoo */ /** * Latin */ public static final int LATIN = 25; /* Latn (Latf; Latg) */ /** * Malayalam */ public static final int MALAYALAM = 26; /* Mlym */ /** * Mangolian */ public static final int MONGOLIAN = 27; /* Mong */ /** * Myammar */ public static final int MYANMAR = 28; /* Mymr */ /** * Ogham */ public static final int OGHAM = 29; /* Ogam */ /** * Old Itallic */ public static final int OLD_ITALIC = 30; /* Ital */ /** * Oriya */ public static final int ORIYA = 31; /* Orya */ /** * Runic */ public static final int RUNIC = 32; /* Runr */ /** * Sinhala */ public static final int SINHALA = 33; /* Sinh */ /** * Syriac */ public static final int SYRIAC = 34; /* Syrc (Syrj; Syrn; Syre) */ /** * Tamil */ public static final int TAMIL = 35; /* Taml */ /** * Telugu */ public static final int TELUGU = 36; /* Telu */ /** * Thana */ public static final int THAANA = 37; /* Thaa */ /** * Thai */ public static final int THAI = 38; /* Thai */ /** * Tibetan */ public static final int TIBETAN = 39; /* Tibt */ /** * Unified Canadian Aboriginal Symbols */ public static final int CANADIAN_ABORIGINAL = 40; /* Cans */ /** * Unified Canadian Aboriginal Symbols (alias) */ public static final int UCAS = CANADIAN_ABORIGINAL; /* Cans */ /** * Yi syllables */ public static final int YI = 41; /* Yiii */ /** * Tagalog */ public static final int TAGALOG = 42; /* Tglg */ /** * Hanunooo */ public static final int HANUNOO = 43; /* Hano */ /** * Buhid */ public static final int BUHID = 44; /* Buhd */ /** * Tagbanwa */ public static final int TAGBANWA = 45; /* Tagb */ /** * Braille * Script in Unicode 4 * */ public static final int BRAILLE = 46; /* Brai */ /** * Cypriot * Script in Unicode 4 * */ public static final int CYPRIOT = 47; /* Cprt */ /** * Limbu * Script in Unicode 4 * */ public static final int LIMBU = 48; /* Limb */ /** * Linear B * Script in Unicode 4 * */ public static final int LINEAR_B = 49; /* Linb */ /** * Osmanya * Script in Unicode 4 * */ public static final int OSMANYA = 50; /* Osma */ /** * Shavian * Script in Unicode 4 * */ public static final int SHAVIAN = 51; /* Shaw */ /** * Tai Le * Script in Unicode 4 * */ public static final int TAI_LE = 52; /* Tale */ /** * Ugaritic * Script in Unicode 4 * */ public static final int UGARITIC = 53; /* Ugar */ /** * Script in Unicode 4.0.1 */ public static final int KATAKANA_OR_HIRAGANA = 54; /*Hrkt */ /** * Script in Unicode 4.1 */ public static final int BUGINESE = 55; /* Bugi */ /** * Script in Unicode 4.1 */ public static final int GLAGOLITIC = 56; /* Glag */ /** * Script in Unicode 4.1 */ public static final int KHAROSHTHI = 57; /* Khar */ /** * Script in Unicode 4.1 */ public static final int SYLOTI_NAGRI = 58; /* Sylo */ /** * Script in Unicode 4.1 */ public static final int NEW_TAI_LUE = 59; /* Talu */ /** * Script in Unicode 4.1 */ public static final int TIFINAGH = 60; /* Tfng */ /** * Script in Unicode 4.1 */ public static final int OLD_PERSIAN = 61; /* Xpeo */ /** * ISO 15924 script code */ public static final int BALINESE = 62; /* Bali */ /** * ISO 15924 script code */ public static final int BATAK = 63; /* Batk */ /** * ISO 15924 script code */ public static final int BLISSYMBOLS = 64; /* Blis */ /** * ISO 15924 script code */ public static final int BRAHMI = 65; /* Brah */ /** * ISO 15924 script code */ public static final int CHAM = 66; /* Cham */ /** * ISO 15924 script code */ public static final int CIRTH = 67; /* Cirt */ /** * ISO 15924 script code */ public static final int OLD_CHURCH_SLAVONIC_CYRILLIC = 68; /* Cyrs */ /** * ISO 15924 script code */ public static final int DEMOTIC_EGYPTIAN = 69; /* Egyd */ /** * ISO 15924 script code */ public static final int HIERATIC_EGYPTIAN = 70; /* Egyh */ /** * ISO 15924 script code */ public static final int EGYPTIAN_HIEROGLYPHS = 71; /* Egyp */ /** * ISO 15924 script code */ public static final int KHUTSURI = 72; /* Geok */ /** * ISO 15924 script code */ public static final int SIMPLIFIED_HAN = 73; /* Hans */ /** * ISO 15924 script code */ public static final int TRADITIONAL_HAN = 74; /* Hant */ /** * ISO 15924 script code */ public static final int PAHAWH_HMONG = 75; /* Hmng */ /** * ISO 15924 script code */ public static final int OLD_HUNGARIAN = 76; /* Hung */ /** * ISO 15924 script code */ public static final int HARAPPAN_INDUS = 77; /* Inds */ /** * ISO 15924 script code */ public static final int JAVANESE = 78; /* Java */ /** * ISO 15924 script code */ public static final int KAYAH_LI = 79; /* Kali */ /** * ISO 15924 script code */ public static final int LATIN_FRAKTUR = 80; /* Latf */ /** * ISO 15924 script code */ public static final int LATIN_GAELIC = 81; /* Latg */ /** * ISO 15924 script code */ public static final int LEPCHA = 82; /* Lepc */ /** * ISO 15924 script code */ public static final int LINEAR_A = 83; /* Lina */ /** * ISO 15924 script code */ public static final int MANDAIC = 84; /* Mand */ /** * ISO 15924 script code */ public static final int MANDAEAN = MANDAIC; /** * ISO 15924 script code */ public static final int MAYAN_HIEROGLYPHS = 85; /* Maya */ /** * ISO 15924 script code */ public static final int MEROITIC_HIEROGLYPHS = 86; /* Mero */ /** * ISO 15924 script code */ public static final int MEROITIC = MEROITIC_HIEROGLYPHS; /** * ISO 15924 script code */ public static final int NKO = 87; /* Nkoo */ /** * ISO 15924 script code */ public static final int ORKHON = 88; /* Orkh */ /** * ISO 15924 script code */ public static final int OLD_PERMIC = 89; /* Perm */ /** * ISO 15924 script code */ public static final int PHAGS_PA = 90; /* Phag */ /** * ISO 15924 script code */ public static final int PHOENICIAN = 91; /* Phnx */ /** * ISO 15924 script code */ public static final int MIAO = 92; /* Plrd */ /** * ISO 15924 script code */ public static final int PHONETIC_POLLARD = MIAO; /** * ISO 15924 script code */ public static final int RONGORONGO = 93; /* Roro */ /** * ISO 15924 script code */ public static final int SARATI = 94; /* Sara */ /** * ISO 15924 script code */ public static final int ESTRANGELO_SYRIAC = 95; /* Syre */ /** * ISO 15924 script code */ public static final int WESTERN_SYRIAC = 96; /* Syrj */ /** * ISO 15924 script code */ public static final int EASTERN_SYRIAC = 97; /* Syrn */ /** * ISO 15924 script code */ public static final int TENGWAR = 98; /* Teng */ /** * ISO 15924 script code */ public static final int VAI = 99; /* Vaii */ /** * ISO 15924 script code */ public static final int VISIBLE_SPEECH = 100;/* Visp */ /** * ISO 15924 script code */ public static final int CUNEIFORM = 101;/* Xsux */ /** * ISO 15924 script code */ public static final int UNWRITTEN_LANGUAGES = 102;/* Zxxx */ /** * ISO 15924 script code */ public static final int UNKNOWN = 103;/* Zzzz */ /* Unknown="Code for uncoded script", for unassigned code points */ /** * ISO 15924 script code */ public static final int CARIAN = 104;/* Cari */ /** * ISO 15924 script code */ public static final int JAPANESE = 105;/* Jpan */ /** * ISO 15924 script code */ public static final int LANNA = 106;/* Lana */ /** * ISO 15924 script code */ public static final int LYCIAN = 107;/* Lyci */ /** * ISO 15924 script code */ public static final int LYDIAN = 108;/* Lydi */ /** * ISO 15924 script code */ public static final int OL_CHIKI = 109;/* Olck */ /** * ISO 15924 script code */ public static final int REJANG = 110;/* Rjng */ /** * ISO 15924 script code */ public static final int SAURASHTRA = 111;/* Saur */ /** * ISO 15924 script code for Sutton SignWriting */ public static final int SIGN_WRITING = 112;/* Sgnw */ /** * ISO 15924 script code */ public static final int SUNDANESE = 113;/* Sund */ /** * ISO 15924 script code */ public static final int MOON = 114;/* Moon */ /** * ISO 15924 script code */ public static final int MEITEI_MAYEK = 115;/* Mtei */ /** * ISO 15924 script code */ public static final int IMPERIAL_ARAMAIC = 116;/* Armi */ /** * ISO 15924 script code */ public static final int AVESTAN = 117;/* Avst */ /** * ISO 15924 script code */ public static final int CHAKMA = 118;/* Cakm */ /** * ISO 15924 script code */ public static final int KOREAN = 119;/* Kore */ /** * ISO 15924 script code */ public static final int KAITHI = 120;/* Kthi */ /** * ISO 15924 script code */ public static final int MANICHAEAN = 121;/* Mani */ /** * ISO 15924 script code */ public static final int INSCRIPTIONAL_PAHLAVI = 122;/* Phli */ /** * ISO 15924 script code */ public static final int PSALTER_PAHLAVI = 123;/* Phlp */ /** * ISO 15924 script code */ public static final int BOOK_PAHLAVI = 124;/* Phlv */ /** * ISO 15924 script code */ public static final int INSCRIPTIONAL_PARTHIAN = 125;/* Prti */ /** * ISO 15924 script code */ public static final int SAMARITAN = 126;/* Samr */ /** * ISO 15924 script code */ public static final int TAI_VIET = 127;/* Tavt */ /** * ISO 15924 script code */ public static final int MATHEMATICAL_NOTATION = 128;/* Zmth */ /** * ISO 15924 script code */ public static final int SYMBOLS = 129;/* Zsym */ /** * ISO 15924 script code */ public static final int BAMUM = 130;/* Bamu */ /** * ISO 15924 script code */ public static final int LISU = 131;/* Lisu */ /** * ISO 15924 script code */ public static final int NAKHI_GEBA = 132;/* Nkgb */ /** * ISO 15924 script code */ public static final int OLD_SOUTH_ARABIAN = 133;/* Sarb */ /** * ISO 15924 script code */ public static final int BASSA_VAH = 134;/* Bass */ /** * ISO 15924 script code */ public static final int DUPLOYAN = 135;/* Dupl */ /** * Typo, use DUPLOYAN * @deprecated ICU 54 * @hide original deprecated declaration */ @Deprecated public static final int DUPLOYAN_SHORTAND = DUPLOYAN; /** * ISO 15924 script code */ public static final int ELBASAN = 136;/* Elba */ /** * ISO 15924 script code */ public static final int GRANTHA = 137;/* Gran */ /** * ISO 15924 script code */ public static final int KPELLE = 138;/* Kpel */ /** * ISO 15924 script code */ public static final int LOMA = 139;/* Loma */ /** * Mende Kikakui * ISO 15924 script code */ public static final int MENDE = 140;/* Mend */ /** * ISO 15924 script code */ public static final int MEROITIC_CURSIVE = 141;/* Merc */ /** * ISO 15924 script code */ public static final int OLD_NORTH_ARABIAN = 142;/* Narb */ /** * ISO 15924 script code */ public static final int NABATAEAN = 143;/* Nbat */ /** * ISO 15924 script code */ public static final int PALMYRENE = 144;/* Palm */ /** * ISO 15924 script code */ public static final int KHUDAWADI = 145;/* Sind */ /** * ISO 15924 script code */ public static final int SINDHI = KHUDAWADI; /** * ISO 15924 script code */ public static final int WARANG_CITI = 146;/* Wara */ /** * ISO 15924 script code */ public static final int AFAKA = 147;/* Afak */ /** * ISO 15924 script code */ public static final int JURCHEN = 148;/* Jurc */ /** * ISO 15924 script code */ public static final int MRO = 149;/* Mroo */ /** * ISO 15924 script code */ public static final int NUSHU = 150;/* Nshu */ /** * ISO 15924 script code */ public static final int SHARADA = 151;/* Shrd */ /** * ISO 15924 script code */ public static final int SORA_SOMPENG = 152;/* Sora */ /** * ISO 15924 script code */ public static final int TAKRI = 153;/* Takr */ /** * ISO 15924 script code */ public static final int TANGUT = 154;/* Tang */ /** * ISO 15924 script code */ public static final int WOLEAI = 155;/* Wole */ /** * ISO 15924 script code */ public static final int ANATOLIAN_HIEROGLYPHS = 156;/* Hluw */ /** * ISO 15924 script code */ public static final int KHOJKI = 157;/* Khoj */ /** * ISO 15924 script code */ public static final int TIRHUTA = 158;/* Tirh */ /** * ISO 15924 script code */ public static final int CAUCASIAN_ALBANIAN = 159; /* Aghb */ /** * ISO 15924 script code */ public static final int MAHAJANI = 160; /* Mahj */ /** * ISO 15924 script code */ public static final int AHOM = 161; /* Ahom */ /** * ISO 15924 script code */ public static final int HATRAN = 162; /* Hatr */ /** * ISO 15924 script code */ public static final int MODI = 163; /* Modi */ /** * ISO 15924 script code */ public static final int MULTANI = 164; /* Mult */ /** * ISO 15924 script code */ public static final int PAU_CIN_HAU = 165; /* Pauc */ /** * ISO 15924 script code */ public static final int SIDDHAM = 166; /* Sidd */ /** * ISO 15924 script code */ public static final int ADLAM = 167; /* Adlm */ /** * ISO 15924 script code */ public static final int BHAIKSUKI = 168; /* Bhks */ /** * ISO 15924 script code */ public static final int MARCHEN = 169; /* Marc */ /** * ISO 15924 script code */ public static final int NEWA = 170; /* Newa */ /** * ISO 15924 script code */ public static final int OSAGE = 171; /* Osge */ /** * ISO 15924 script code */ public static final int HAN_WITH_BOPOMOFO = 172; /* Hanb */ /** * ISO 15924 script code */ public static final int JAMO = 173; /* Jamo */ /** * ISO 15924 script code */ public static final int SYMBOLS_EMOJI = 174; /* Zsye */ /** * One more than the highest normal UScript code. * The highest value is available via UCharacter.getIntPropertyMaxValue(UProperty.SCRIPT). * * @deprecated ICU 58 The numeric value may change over time, see ICU ticket #12420. * @hide unsupported on Android */ @Deprecated public static final int CODE_LIMIT = 175; private static int[] getCodesFromLocale(ULocale locale) { // Multi-script languages, equivalent to the LocaleScript data // that we used to load from locale resource bundles. String lang = locale.getLanguage(); if(lang.equals("ja")) { return new int[] { UScript.KATAKANA, UScript.HIRAGANA, UScript.HAN }; } if(lang.equals("ko")) { return new int[] { UScript.HANGUL, UScript.HAN }; } String script = locale.getScript(); if(lang.equals("zh") && script.equals("Hant")) { return new int[] { UScript.HAN, UScript.BOPOMOFO }; } // Explicit script code. if(script.length() != 0) { int scriptCode = UScript.getCodeFromName(script); if(scriptCode != UScript.INVALID_CODE) { if(scriptCode == UScript.SIMPLIFIED_HAN || scriptCode == UScript.TRADITIONAL_HAN) { scriptCode = UScript.HAN; } return new int[] { scriptCode }; } } return null; } /** * Helper function to find the code from locale. * @param locale The locale. */ private static int[] findCodeFromLocale(ULocale locale) { int[] result = getCodesFromLocale(locale); if(result != null) { return result; } ULocale likely = ULocale.addLikelySubtags(locale); return getCodesFromLocale(likely); } /** * Gets a script codes associated with the given locale or ISO 15924 abbreviation or name. * Returns MALAYAM given "Malayam" OR "Mlym". * Returns LATIN given "en" OR "en_US" * @param locale Locale * @return The script codes array. null if the the code cannot be found. */ public static final int[] getCode(Locale locale){ return findCodeFromLocale(ULocale.forLocale(locale)); } /** * Gets a script codes associated with the given locale or ISO 15924 abbreviation or name. * Returns MALAYAM given "Malayam" OR "Mlym". * Returns LATIN given "en" OR "en_US" * @param locale ULocale * @return The script codes array. null if the the code cannot be found. */ public static final int[] getCode(ULocale locale){ return findCodeFromLocale(locale); } /** * Gets the script codes associated with the given locale or ISO 15924 abbreviation or name. * Returns MALAYAM given "Malayam" OR "Mlym". * Returns LATIN given "en" OR "en_US" * * <p>Note: To search by short or long script alias only, use * {@link #getCodeFromName(String)} instead. * That does a fast lookup with no access of the locale data. * * @param nameOrAbbrOrLocale name of the script or ISO 15924 code or locale * @return The script codes array. null if the the code cannot be found. */ public static final int[] getCode(String nameOrAbbrOrLocale) { boolean triedCode = false; if (nameOrAbbrOrLocale.indexOf('_') < 0 && nameOrAbbrOrLocale.indexOf('-') < 0) { int propNum = UCharacter.getPropertyValueEnumNoThrow(UProperty.SCRIPT, nameOrAbbrOrLocale); if (propNum != UProperty.UNDEFINED) { return new int[] {propNum}; } triedCode = true; } int[] scripts = findCodeFromLocale(new ULocale(nameOrAbbrOrLocale)); if (scripts != null) { return scripts; } if (!triedCode) { int propNum = UCharacter.getPropertyValueEnumNoThrow(UProperty.SCRIPT, nameOrAbbrOrLocale); if (propNum != UProperty.UNDEFINED) { return new int[] {propNum}; } } return null; } /** * Returns the script code associated with the given Unicode script property alias * (name or abbreviation). * Short aliases are ISO 15924 script codes. * Returns MALAYAM given "Malayam" OR "Mlym". * * @param nameOrAbbr name of the script or ISO 15924 code * @return The script code value, or INVALID_CODE if the code cannot be found. */ public static final int getCodeFromName(String nameOrAbbr) { int propNum = UCharacter.getPropertyValueEnumNoThrow(UProperty.SCRIPT, nameOrAbbr); return propNum == UProperty.UNDEFINED ? INVALID_CODE : propNum; } /** * Gets the script code associated with the given codepoint. * Returns UScript.MALAYAM given 0x0D02 * @param codepoint UChar32 codepoint * @return The script code */ public static final int getScript(int codepoint){ if (codepoint >= UCharacter.MIN_VALUE & codepoint <= UCharacter.MAX_VALUE) { int scriptX=UCharacterProperty.INSTANCE.getAdditional(codepoint, 0)&UCharacterProperty.SCRIPT_X_MASK; if(scriptX<UCharacterProperty.SCRIPT_X_WITH_COMMON) { return scriptX; } else if(scriptX<UCharacterProperty.SCRIPT_X_WITH_INHERITED) { return UScript.COMMON; } else if(scriptX<UCharacterProperty.SCRIPT_X_WITH_OTHER) { return UScript.INHERITED; } else { return UCharacterProperty.INSTANCE.m_scriptExtensions_[scriptX&UCharacterProperty.SCRIPT_MASK_]; } }else{ throw new IllegalArgumentException(Integer.toString(codepoint)); } } /** * Do the Script_Extensions of code point c contain script sc? * If c does not have explicit Script_Extensions, then this tests whether * c has the Script property value sc. * * <p>Some characters are commonly used in multiple scripts. * For more information, see UAX #24: http://www.unicode.org/reports/tr24/. * * @param c code point * @param sc script code * @return true if sc is in Script_Extensions(c) */ public static final boolean hasScript(int c, int sc) { int scriptX=UCharacterProperty.INSTANCE.getAdditional(c, 0)&UCharacterProperty.SCRIPT_X_MASK; if(scriptX<UCharacterProperty.SCRIPT_X_WITH_COMMON) { return sc==scriptX; } char[] scriptExtensions=UCharacterProperty.INSTANCE.m_scriptExtensions_; int scx=scriptX&UCharacterProperty.SCRIPT_MASK_; // index into scriptExtensions if(scriptX>=UCharacterProperty.SCRIPT_X_WITH_OTHER) { scx=scriptExtensions[scx+1]; } if(sc>0x7fff) { // Guard against bogus input that would // make us go past the Script_Extensions terminator. return false; } while(sc>scriptExtensions[scx]) { ++scx; } return sc==(scriptExtensions[scx]&0x7fff); } /** * Sets code point c's Script_Extensions as script code integers into the output BitSet. * <ul> * <li>If c does have Script_Extensions, then the return value is * the negative number of Script_Extensions codes (= -set.cardinality()); * in this case, the Script property value * (normally Common or Inherited) is not included in the set. * <li>If c does not have Script_Extensions, then the one Script code is put into the set * and also returned. * <li>If c is not a valid code point, then the one {@link #UNKNOWN} code is put into the set * and also returned. * </ul> * In other words, if the return value is non-negative, it is c's single Script code * and the set contains exactly this Script code. * If the return value is -n, then the set contains c's n&gt;=2 Script_Extensions script codes. * * <p>Some characters are commonly used in multiple scripts. * For more information, see UAX #24: http://www.unicode.org/reports/tr24/. * * @param c code point * @param set set of script code integers; will be cleared, then bits are set * corresponding to c's Script_Extensions * @return negative number of script codes in c's Script_Extensions, * or the non-negative single Script value */ public static final int getScriptExtensions(int c, BitSet set) { set.clear(); int scriptX=UCharacterProperty.INSTANCE.getAdditional(c, 0)&UCharacterProperty.SCRIPT_X_MASK; if(scriptX<UCharacterProperty.SCRIPT_X_WITH_COMMON) { set.set(scriptX); return scriptX; } char[] scriptExtensions=UCharacterProperty.INSTANCE.m_scriptExtensions_; int scx=scriptX&UCharacterProperty.SCRIPT_MASK_; // index into scriptExtensions if(scriptX>=UCharacterProperty.SCRIPT_X_WITH_OTHER) { scx=scriptExtensions[scx+1]; } int length=0; int sx; do { sx=scriptExtensions[scx++]; set.set(sx&0x7fff); ++length; } while(sx<0x8000); // length==set.cardinality() return -length; } /** * Returns the long Unicode script name, if there is one. * Otherwise returns the 4-letter ISO 15924 script code. * Returns "Malayam" given MALAYALAM. * * @param scriptCode int script code * @return long script name as given in PropertyValueAliases.txt, or the 4-letter code * @throws IllegalArgumentException if the script code is not valid */ public static final String getName(int scriptCode){ return UCharacter.getPropertyValueName(UProperty.SCRIPT, scriptCode, UProperty.NameChoice.LONG); } /** * Returns the 4-letter ISO 15924 script code, * which is the same as the short Unicode script name if Unicode has names for the script. * Returns "Mlym" given MALAYALAM. * * @param scriptCode int script code * @return short script name (4-letter code) * @throws IllegalArgumentException if the script code is not valid */ public static final String getShortName(int scriptCode){ return UCharacter.getPropertyValueName(UProperty.SCRIPT, scriptCode, UProperty.NameChoice.SHORT); } /** * Script metadata (script properties). * See http://unicode.org/cldr/trac/browser/trunk/common/properties/scriptMetadata.txt */ private static final class ScriptMetadata { // 0 = NOT_ENCODED, no sample character, default false script properties. // Bits 20.. 0: sample character // Bits 23..21: usage private static final int UNKNOWN = 1 << 21; private static final int EXCLUSION = 2 << 21; private static final int LIMITED_USE = 3 << 21; private static final int ASPIRATIONAL = 4 << 21; private static final int RECOMMENDED = 5 << 21; // Bits 31..24: Single-bit flags private static final int RTL = 1 << 24; private static final int LB_LETTERS = 1 << 25; private static final int CASED = 1 << 26; private static final int SCRIPT_PROPS[] = { // Begin copy-paste output from // tools/trunk/unicode/py/parsescriptmetadata.py // or from icu/trunk/source/common/uscript_props.cpp 0x0040 | RECOMMENDED, // Zyyy 0x0308 | RECOMMENDED, // Zinh 0x0628 | RECOMMENDED | RTL, // Arab 0x0531 | RECOMMENDED | CASED, // Armn 0x0995 | RECOMMENDED, // Beng 0x3105 | RECOMMENDED | LB_LETTERS, // Bopo 0x13C4 | LIMITED_USE | CASED, // Cher 0x03E2 | EXCLUSION | CASED, // Copt 0x042F | RECOMMENDED | CASED, // Cyrl 0x10414 | EXCLUSION | CASED, // Dsrt 0x0905 | RECOMMENDED, // Deva 0x12A0 | RECOMMENDED, // Ethi 0x10D3 | RECOMMENDED, // Geor 0x10330 | EXCLUSION, // Goth 0x03A9 | RECOMMENDED | CASED, // Grek 0x0A95 | RECOMMENDED, // Gujr 0x0A15 | RECOMMENDED, // Guru 0x5B57 | RECOMMENDED | LB_LETTERS, // Hani 0xAC00 | RECOMMENDED, // Hang 0x05D0 | RECOMMENDED | RTL, // Hebr 0x304B | RECOMMENDED | LB_LETTERS, // Hira 0x0C95 | RECOMMENDED, // Knda 0x30AB | RECOMMENDED | LB_LETTERS, // Kana 0x1780 | RECOMMENDED | LB_LETTERS, // Khmr 0x0EA5 | RECOMMENDED | LB_LETTERS, // Laoo 0x004C | RECOMMENDED | CASED, // Latn 0x0D15 | RECOMMENDED, // Mlym 0x1826 | ASPIRATIONAL, // Mong 0x1000 | RECOMMENDED | LB_LETTERS, // Mymr 0x168F | EXCLUSION, // Ogam 0x10300 | EXCLUSION, // Ital 0x0B15 | RECOMMENDED, // Orya 0x16A0 | EXCLUSION, // Runr 0x0D85 | RECOMMENDED, // Sinh 0x0710 | LIMITED_USE | RTL, // Syrc 0x0B95 | RECOMMENDED, // Taml 0x0C15 | RECOMMENDED, // Telu 0x078C | RECOMMENDED | RTL, // Thaa 0x0E17 | RECOMMENDED | LB_LETTERS, // Thai 0x0F40 | RECOMMENDED, // Tibt 0x14C0 | ASPIRATIONAL, // Cans 0xA288 | ASPIRATIONAL | LB_LETTERS, // Yiii 0x1703 | EXCLUSION, // Tglg 0x1723 | EXCLUSION, // Hano 0x1743 | EXCLUSION, // Buhd 0x1763 | EXCLUSION, // Tagb 0x280E | UNKNOWN, // Brai 0x10800 | EXCLUSION | RTL, // Cprt 0x1900 | LIMITED_USE, // Limb 0x10000 | EXCLUSION, // Linb 0x10480 | EXCLUSION, // Osma 0x10450 | EXCLUSION, // Shaw 0x1950 | LIMITED_USE | LB_LETTERS, // Tale 0x10380 | EXCLUSION, // Ugar 0, 0x1A00 | EXCLUSION, // Bugi 0x2C00 | EXCLUSION | CASED, // Glag 0x10A00 | EXCLUSION | RTL, // Khar 0xA800 | LIMITED_USE, // Sylo 0x1980 | LIMITED_USE | LB_LETTERS, // Talu 0x2D30 | ASPIRATIONAL, // Tfng 0x103A0 | EXCLUSION, // Xpeo 0x1B05 | LIMITED_USE, // Bali 0x1BC0 | LIMITED_USE, // Batk 0, 0x11005 | EXCLUSION, // Brah 0xAA00 | LIMITED_USE, // Cham 0, 0, 0, 0, 0x13153 | EXCLUSION, // Egyp 0, 0x5B57 | RECOMMENDED | LB_LETTERS, // Hans 0x5B57 | RECOMMENDED | LB_LETTERS, // Hant 0x16B1C | EXCLUSION, // Hmng 0x10CA1 | EXCLUSION | RTL | CASED, // Hung 0, 0xA984 | LIMITED_USE, // Java 0xA90A | LIMITED_USE, // Kali 0, 0, 0x1C00 | LIMITED_USE, // Lepc 0x10647 | EXCLUSION, // Lina 0x0840 | LIMITED_USE | RTL, // Mand 0, 0x10980 | EXCLUSION | RTL, // Mero 0x07CA | LIMITED_USE | RTL, // Nkoo 0x10C00 | EXCLUSION | RTL, // Orkh 0x1036B | EXCLUSION, // Perm 0xA840 | EXCLUSION, // Phag 0x10900 | EXCLUSION | RTL, // Phnx 0x16F00 | ASPIRATIONAL, // Plrd 0, 0, 0, 0, 0, 0, 0xA549 | LIMITED_USE, // Vaii 0, 0x12000 | EXCLUSION, // Xsux 0, 0xFDD0 | UNKNOWN, // Zzzz 0x102A0 | EXCLUSION, // Cari 0x304B | RECOMMENDED | LB_LETTERS, // Jpan 0x1A20 | LIMITED_USE | LB_LETTERS, // Lana 0x10280 | EXCLUSION, // Lyci 0x10920 | EXCLUSION | RTL, // Lydi 0x1C5A | LIMITED_USE, // Olck 0xA930 | EXCLUSION, // Rjng 0xA882 | LIMITED_USE, // Saur 0x1D850 | EXCLUSION, // Sgnw 0x1B83 | LIMITED_USE, // Sund 0, 0xABC0 | LIMITED_USE, // Mtei 0x10840 | EXCLUSION | RTL, // Armi 0x10B00 | EXCLUSION | RTL, // Avst 0x11103 | LIMITED_USE, // Cakm 0xAC00 | RECOMMENDED, // Kore 0x11083 | EXCLUSION, // Kthi 0x10AD8 | EXCLUSION | RTL, // Mani 0x10B60 | EXCLUSION | RTL, // Phli 0x10B8F | EXCLUSION | RTL, // Phlp 0, 0x10B40 | EXCLUSION | RTL, // Prti 0x0800 | EXCLUSION | RTL, // Samr 0xAA80 | LIMITED_USE | LB_LETTERS, // Tavt 0, 0, 0xA6A0 | LIMITED_USE, // Bamu 0xA4D0 | LIMITED_USE, // Lisu 0, 0x10A60 | EXCLUSION | RTL, // Sarb 0x16AE6 | EXCLUSION, // Bass 0x1BC20 | EXCLUSION, // Dupl 0x10500 | EXCLUSION, // Elba 0x11315 | EXCLUSION, // Gran 0, 0, 0x1E802 | EXCLUSION | RTL, // Mend 0x109A0 | EXCLUSION | RTL, // Merc 0x10A95 | EXCLUSION | RTL, // Narb 0x10896 | EXCLUSION | RTL, // Nbat 0x10873 | EXCLUSION | RTL, // Palm 0x112BE | EXCLUSION, // Sind 0x118B4 | EXCLUSION | CASED, // Wara 0, 0, 0x16A4F | EXCLUSION, // Mroo 0, 0x11183 | EXCLUSION, // Shrd 0x110D0 | EXCLUSION, // Sora 0x11680 | EXCLUSION, // Takr 0x18229 | EXCLUSION | LB_LETTERS, // Tang 0, 0x14400 | EXCLUSION, // Hluw 0x11208 | EXCLUSION, // Khoj 0x11484 | EXCLUSION, // Tirh 0x10537 | EXCLUSION, // Aghb 0x11152 | EXCLUSION, // Mahj 0x11717 | EXCLUSION | LB_LETTERS, // Ahom 0x108F4 | EXCLUSION | RTL, // Hatr 0x1160E | EXCLUSION, // Modi 0x1128F | EXCLUSION, // Mult 0x11AC0 | EXCLUSION, // Pauc 0x1158E | EXCLUSION, // Sidd 0x1E909 | LIMITED_USE | RTL | CASED, // Adlm 0x11C0E | EXCLUSION, // Bhks 0x11C72 | EXCLUSION, // Marc 0x11412 | LIMITED_USE, // Newa 0x104B5 | LIMITED_USE | CASED, // Osge 0x5B57 | RECOMMENDED | LB_LETTERS, // Hanb 0x1112 | RECOMMENDED, // Jamo 0, // End copy-paste from parsescriptmetadata.py }; private static final int getScriptProps(int script) { if (0 <= script && script < SCRIPT_PROPS.length) { return SCRIPT_PROPS[script]; } else { return 0; } } } /** * Script usage constants. * See UAX #31 Unicode Identifier and Pattern Syntax. * http://www.unicode.org/reports/tr31/#Table_Candidate_Characters_for_Exclusion_from_Identifiers */ public enum ScriptUsage { /** * Not encoded in Unicode. */ NOT_ENCODED, /** * Unknown script usage. */ UNKNOWN, /** * Candidate for Exclusion from Identifiers. */ EXCLUDED, /** * Limited Use script. */ LIMITED_USE, /** * Aspirational Use script. */ ASPIRATIONAL, /** * Recommended script. */ RECOMMENDED } private static final ScriptUsage[] usageValues = ScriptUsage.values(); /** * Returns the script sample character string. * This string normally consists of one code point but might be longer. * The string is empty if the script is not encoded. * * @param script script code * @return the sample character string */ public static final String getSampleString(int script) { int sampleChar = ScriptMetadata.getScriptProps(script) & 0x1fffff; if(sampleChar != 0) { return new StringBuilder().appendCodePoint(sampleChar).toString(); } return ""; } /** * Returns the script usage according to UAX #31 Unicode Identifier and Pattern Syntax. * Returns {@link ScriptUsage#NOT_ENCODED} if the script is not encoded in Unicode. * * @param script script code * @return script usage * @see ScriptUsage */ public static final ScriptUsage getUsage(int script) { return usageValues[(ScriptMetadata.getScriptProps(script) >> 21) & 7]; } /** * Returns true if the script is written right-to-left. * For example, Arab and Hebr. * * @param script script code * @return true if the script is right-to-left */ public static final boolean isRightToLeft(int script) { return (ScriptMetadata.getScriptProps(script) & ScriptMetadata.RTL) != 0; } /** * Returns true if the script allows line breaks between letters (excluding hyphenation). * Such a script typically requires dictionary-based line breaking. * For example, Hani and Thai. * * @param script script code * @return true if the script allows line breaks between letters */ public static final boolean breaksBetweenLetters(int script) { return (ScriptMetadata.getScriptProps(script) & ScriptMetadata.LB_LETTERS) != 0; } /** * Returns true if in modern (or most recent) usage of the script case distinctions are customary. * For example, Latn and Cyrl. * * @param script script code * @return true if the script is cased */ public static final boolean isCased(int script) { return (ScriptMetadata.getScriptProps(script) & ScriptMetadata.CASED) != 0; } ///CLOVER:OFF /** * Private Constructor. Never default construct */ private UScript(){} ///CLOVER:ON }
google/j2objc
jre_emul/android/platform/external/icu/android_icu4j/src/main/java/android/icu/lang/UScript.java
Java
apache-2.0
44,206
# !!!!!!! DO NOT EDIT THIS FILE !!!!!!! # This file is machine-generated by lib/unicore/mktables from the Unicode # database, Version 6.1.0. Any changes made here will be lost! # !!!!!!! INTERNAL PERL USE ONLY !!!!!!! # This file is for internal use by core Perl only. The format and even the # name or existence of this file are subject to change without notice. Don't # use it directly. return <<'END'; FE70 FEFF END
efortuna/AndroidSDKClone
ndk_experimental/prebuilt/linux-x86_64/lib/perl5/5.16.2/unicore/lib/Blk/ArabicP2.pl
Perl
apache-2.0
433
"""Remove JobStep/JobPhase repository_id Revision ID: 26f665189ca0 Revises: 524b3c27203b Create Date: 2014-01-05 22:01:02.648719 """ # revision identifiers, used by Alembic. revision = '26f665189ca0' down_revision = '524b3c27203b' from alembic import op def upgrade(): op.drop_column('jobstep', 'repository_id') op.drop_column('jobphase', 'repository_id') def downgrade(): raise NotImplementedError
alex/changes
migrations/versions/26f665189ca0_remove_jobstep_jobph.py
Python
apache-2.0
419
/*========================================================================= * * Copyright Insight Software Consortium * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0.txt * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * *=========================================================================*/ #ifndef __itkFastMarchingExtensionImageFilterBase_h #define __itkFastMarchingExtensionImageFilterBase_h #include "itkFastMarchingImageFilterBase.h" #include "itkVectorContainer.h" namespace itk { /** \class FastMarchingExtensionImageFilterBase * \brief Extend auxiliary variables smoothly using Fast Marching. * * Fast marching can be used to extend auxiliary variables smoothly * from the zero level set. Starting from an initial position on the * front, this class simultaneously calculate the signed distance and * extend a set of auxiliary values. * * This class is templated over the level set image type, the auxiliary * variable type and the number of auxiliary variables to extended. The initial * front is specified by two containers: one containing the known points * and one containing the trial points. The auxiliary variables on the front * are represented by two auxiliary variable containers: one containing * the value of the variables at the know points and on containing the * value of the variables at the trail points. * * Implemenation of this class is based on Chapter 11 of * "Level Set Methods and Fast Marching Methods", J.A. Sethian, * Cambridge Press, Second edition, 1999. * * \sa FastMarchingImageFilter * \sa LevelSetTypeDefault * \sa AuxVarTypeDefault * * \ingroup LevelSetSegmentation * \ingroup ITKFastMarching */ template< class TInput, class TOutput, typename TAuxValue, unsigned int VAuxDimension > class ITK_EXPORT FastMarchingExtensionImageFilterBase: public FastMarchingImageFilterBase< TInput, TOutput > { public: /** Standard class typdedefs. */ typedef FastMarchingExtensionImageFilterBase Self; typedef FastMarchingImageFilterBase< TInput, TOutput > Superclass; typedef SmartPointer< Self > Pointer; typedef SmartPointer< const Self > ConstPointer; typedef typename Superclass::Traits Traits; /** Method for creation through the object factory. */ itkNewMacro(Self); /** Run-time type information (and related methods). */ itkTypeMacro(FastMarchingExtensionImageFilterBase, FastMarchingImageFilterBase); /** The dimension of the level set. */ itkStaticConstMacro(ImageDimension, unsigned int, Superclass::ImageDimension ); /** Number of auxiliary variables to be extended. */ itkStaticConstMacro(AuxDimension, unsigned int, VAuxDimension); /** AuxVarType typedef support. */ typedef TAuxValue AuxValueType; typedef Vector< AuxValueType, AuxDimension > AuxValueVectorType; typedef VectorContainer< IdentifierType, AuxValueVectorType > AuxValueContainerType; typedef typename AuxValueContainerType::Pointer AuxValueContainerPointer; typedef typename AuxValueContainerType::ConstIterator AuxValueContainerConstIterator; typedef Image< AuxValueType, ImageDimension > AuxImageType; typedef typename AuxImageType::Pointer AuxImagePointer; /** Index typedef support. */ typedef typename Superclass::NodeType NodeType; typedef typename Superclass::NodePairType NodePairType; // typedef typename Superclass::NodeContainerType NodeContainerType; // typedef typename Superclass::NodeContainerPointer NodeContainerPointer; // typedef typename Superclass::NodeContainerConstIterator NodeContainerConstIterator; typedef typename Superclass::NodePairContainerType NodePairContainerType; typedef typename Superclass::NodePairContainerPointer NodePairContainerPointer; typedef typename Superclass::NodePairContainerConstIterator NodePairContainerConstIterator; typedef typename Superclass::OutputImageType OutputImageType; typedef typename Superclass::OutputPixelType OutputPixelType; typedef typename Superclass::InternalNodeStructure InternalNodeStructure; /** Get one of the extended auxiliary variable image. */ AuxImageType * GetAuxiliaryImage( const unsigned int& idx ); /** Set the container auxiliary values at the initial alive points. */ itkSetObjectMacro(AuxiliaryAliveValues, AuxValueContainerType ); itkGetObjectMacro(AuxiliaryAliveValues, AuxValueContainerType ); /** Set the container of auxiliary values at the initial trial points. */ itkSetObjectMacro(AuxiliaryTrialValues, AuxValueContainerType ); itkGetObjectMacro(AuxiliaryTrialValues, AuxValueContainerType ); #ifdef ITK_USE_CONCEPT_CHECKING /** Begin concept checking */ itkConceptMacro( AuxValueHasNumericTraitsCheck, ( Concept::HasNumericTraits< TAuxValue > ) ); /** End concept checking */ #endif protected: FastMarchingExtensionImageFilterBase(); ~FastMarchingExtensionImageFilterBase(){} void PrintSelf(std::ostream & os, Indent indent) const; virtual void InitializeOutput(OutputImageType *); virtual void UpdateValue( OutputImageType* oImage, const NodeType& iValue ); /** Generate the output image meta information */ virtual void GenerateOutputInformation(); virtual void EnlargeOutputRequestedRegion(DataObject *output); AuxValueContainerPointer m_AuxiliaryAliveValues; AuxValueContainerPointer m_AuxiliaryTrialValues; private: FastMarchingExtensionImageFilterBase(const Self &); //purposely not implemented void operator=(const Self &); //purposely not implemented }; } // namespace itk #include "itkFastMarchingExtensionImageFilterBase.hxx" #endif
paulnovo/ITK
Modules/Filtering/FastMarching/include/itkFastMarchingExtensionImageFilterBase.h
C
apache-2.0
6,300
#ifndef _GEN_OUTPUT_ #define _GEN_OUTPUT_ #ifndef _QUERY_OUTPUT_ #include "interface/query_output.h" #endif #ifndef _TYPES_ #include "common/types.h" #endif #ifndef _CONSTANTS_ #include "common/constants.h" #endif #include <ostream> using Interface::QueryOutput; using std::ostream; namespace Client { class GenOutput : public QueryOutput { private: ostream &out; /// Number of (data) attributes in the output schema int numAttrs; /// Types of data attributes Type attrTypes [MAX_ATTRS]; /// Length of attributes int attrLen [MAX_ATTRS]; /// Offsets @ which these are present int offsets [MAX_ATTRS]; /// length of tuples we are expecting int tupleLen; /// Offset of the timestamp int tstampOffset; /// Offset of the sign int signOffset; public: GenOutput (ostream &out); ~GenOutput (); int setNumAttrs (unsigned int numAttrs); int setAttrInfo (unsigned int attrPos, Type attrType, unsigned attrLen); int start (); int putNext (const char *tuple, unsigned int len); int end (); }; } #endif
XingXie/Multilevel-Data-Stream-Management-System
stream-0.6.3/gen_client/gen_output.h
C
apache-2.0
1,087
# -*- coding: utf-8 -*- # Copyright 2013 Mirantis, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from nailgun.api.v1.handlers.base import BaseHandler from nailgun.api.v1.handlers.base import content from nailgun.fake_keystone import generate_token from nailgun.fake_keystone import validate_password_credentials from nailgun.fake_keystone import validate_token from nailgun.settings import settings class TokensHandler(BaseHandler): @content def POST(self): data = self.checked_data() try: if 'passwordCredentials' in data['auth']: if not validate_password_credentials( **data['auth']['passwordCredentials']): raise self.http(401) elif 'token' in data['auth']: if not validate_token(data['auth']['token']['id']): raise self.http(401) else: raise self.http(400) except (KeyError, TypeError): raise self.http(400) token = generate_token() return { "access": { "token": { "issued_at": "2012-07-10T13:37:58.708765", "expires": "2012-07-10T14:37:58Z", "id": token, "tenant": { "description": None, "enabled": True, "id": "12345", "name": "admin" } }, "serviceCatalog": [], "user": { "username": "admin", "roles_links": [], "id": "9876", "roles": [{"name": "admin"}], "name": "admin" }, "metadata": { "is_admin": 0, "roles": ["4567"] } } } class VersionHandler(BaseHandler): @content def GET(self): keystone_href = 'http://{ip_addr}:{port}/keystone/v2.0/'.format( ip_addr=settings.LISTEN_ADDRESS, port=settings.LISTEN_PORT) return { 'version': { 'id': 'v2.0', 'status': 'stable', 'updated': '2014-04-17T00:00:00Z', 'links': [ { 'rel': 'self', 'href': keystone_href, }, { 'rel': 'describedby', 'type': 'text/html', 'href': 'http://docs.openstack.org/', }, ], 'media-types': [ { 'base': 'application/json', 'type': 'application/vnd.openstack.identity-v2.0+json', }, { 'base': 'application/xml', 'type': 'application/vnd.openstack.identity-v2.0+xml', }, ], }, }
nebril/fuel-web
nailgun/nailgun/fake_keystone/handlers.py
Python
apache-2.0
3,603
package org.batfish.common.topology.broadcast; import com.google.common.annotations.VisibleForTesting; import java.util.HashMap; import java.util.Map; import java.util.Set; import javax.annotation.Nonnull; /** * Sends all frames received out all interfaces. Used to reduce quadratic complexity to linear for * physical-physical Ethernet links, and to prevent loops. * * <p>Only connects to {@link PhysicalInterface}. */ public final class EthernetHub extends Node<EthernetTag> { public EthernetHub(String id) { _id = id; _attachedInterfaces = new HashMap<>(); } public @Nonnull String getId() { return _id; } public void addAttachedInterface(PhysicalInterface iface, Edge<EthernetTag, EthernetTag> edge) { Edge<EthernetTag, EthernetTag> oldEdge = _attachedInterfaces.putIfAbsent(iface, edge); assert oldEdge == null; } public void broadcast(EthernetTag tag, Set<L3Interface> domain, Set<NodeAndData<?, ?>> visited) { if (!visited.add(new NodeAndData<>(this, tag))) { return; } _attachedInterfaces.forEach( (iface, edge) -> edge.traverse(tag).ifPresent(newTag -> iface.receive(newTag, domain, visited))); } // Internal implementation details. @VisibleForTesting @Nonnull Map<PhysicalInterface, Edge<EthernetTag, EthernetTag>> getAttachedInterfacesForTesting() { return _attachedInterfaces; } @Override public boolean equals(Object o) { if (this == o) { return true; } else if (!(o instanceof EthernetHub)) { return false; } EthernetHub that = (EthernetHub) o; return _id.equals(that._id); } @Override public int hashCode() { return 31 * EthernetHub.class.hashCode() + _id.hashCode(); } private final @Nonnull String _id; private final @Nonnull Map<PhysicalInterface, Edge<EthernetTag, EthernetTag>> _attachedInterfaces; }
arifogel/batfish
projects/batfish-common-protocol/src/main/java/org/batfish/common/topology/broadcast/EthernetHub.java
Java
apache-2.0
1,877
/* * Copyright 2015 Goldman Sachs. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gs.collections.impl.collection.mutable; import java.util.Collection; import com.gs.collections.api.block.function.Function; import com.gs.collections.api.block.function.Function0; import com.gs.collections.api.block.function.Function2; import com.gs.collections.api.block.function.Function3; import com.gs.collections.api.block.predicate.Predicate; import com.gs.collections.api.block.predicate.Predicate2; import com.gs.collections.api.block.procedure.Procedure2; import com.gs.collections.api.collection.MutableCollection; import com.gs.collections.api.list.MutableList; import com.gs.collections.api.map.MutableMap; import com.gs.collections.api.tuple.Twin; import com.gs.collections.impl.collection.AbstractSynchronizedRichIterable; import net.jcip.annotations.ThreadSafe; @ThreadSafe public abstract class AbstractSynchronizedMutableCollection<T> extends AbstractSynchronizedRichIterable<T> implements MutableCollection<T> { protected AbstractSynchronizedMutableCollection(MutableCollection<T> delegate) { this(delegate, null); } protected AbstractSynchronizedMutableCollection(MutableCollection<T> delegate, Object lock) { super(delegate, lock); } @Override protected MutableCollection<T> getDelegate() { return (MutableCollection<T>) super.getDelegate(); } public boolean add(T o) { synchronized (this.getLock()) { return this.getDelegate().add(o); } } public boolean remove(Object o) { synchronized (this.getLock()) { return this.getDelegate().remove(o); } } public boolean addAll(Collection<? extends T> coll) { synchronized (this.getLock()) { return this.getDelegate().addAll(coll); } } public boolean removeAll(Collection<?> coll) { synchronized (this.getLock()) { return this.getDelegate().removeAll(coll); } } public boolean retainAll(Collection<?> coll) { synchronized (this.getLock()) { return this.getDelegate().retainAll(coll); } } public void clear() { synchronized (this.getLock()) { this.getDelegate().clear(); } } public void removeIf(Predicate<? super T> predicate) { synchronized (this.lock) { this.getDelegate().removeIf(predicate); } } public <P> void removeIfWith(Predicate2<? super T, ? super P> predicate, P parameter) { synchronized (this.lock) { this.getDelegate().removeIfWith(predicate, parameter); } } public boolean addAllIterable(Iterable<? extends T> iterable) { synchronized (this.getLock()) { return this.getDelegate().addAllIterable(iterable); } } public boolean removeAllIterable(Iterable<?> iterable) { synchronized (this.getLock()) { return this.getDelegate().removeAllIterable(iterable); } } public boolean retainAllIterable(Iterable<?> iterable) { synchronized (this.getLock()) { return this.getDelegate().retainAllIterable(iterable); } } public <P> Twin<MutableList<T>> selectAndRejectWith( Predicate2<? super T, ? super P> predicate, P parameter) { synchronized (this.lock) { return this.getDelegate().selectAndRejectWith(predicate, parameter); } } public <IV, P> IV injectIntoWith( IV injectValue, Function3<? super IV, ? super T, ? super P, ? extends IV> function, P parameter) { synchronized (this.lock) { return this.getDelegate().injectIntoWith(injectValue, function, parameter); } } public <K, V> MutableMap<K, V> aggregateInPlaceBy( Function<? super T, ? extends K> groupBy, Function0<? extends V> zeroValueFactory, Procedure2<? super V, ? super T> mutatingAggregator) { synchronized (this.getLock()) { return this.getDelegate().aggregateInPlaceBy(groupBy, zeroValueFactory, mutatingAggregator); } } public <K, V> MutableMap<K, V> aggregateBy( Function<? super T, ? extends K> groupBy, Function0<? extends V> zeroValueFactory, Function2<? super V, ? super T, ? extends V> nonMutatingAggregator) { synchronized (this.getLock()) { return this.getDelegate().aggregateBy(groupBy, zeroValueFactory, nonMutatingAggregator); } } }
Pelumi/gs-collections
collections/src/main/java/com/gs/collections/impl/collection/mutable/AbstractSynchronizedMutableCollection.java
Java
apache-2.0
5,324
"""Test the National Weather Service (NWS) config flow.""" from unittest.mock import patch import aiohttp from homeassistant import config_entries, setup from homeassistant.components.nws.const import DOMAIN async def test_form(hass, mock_simple_nws_config): """Test we get the form.""" hass.config.latitude = 35 hass.config.longitude = -90 await setup.async_setup_component(hass, "persistent_notification", {}) result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == "form" assert result["errors"] == {} with patch( "homeassistant.components.nws.async_setup_entry", return_value=True, ) as mock_setup_entry: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"api_key": "test"} ) await hass.async_block_till_done() assert result2["type"] == "create_entry" assert result2["title"] == "ABC" assert result2["data"] == { "api_key": "test", "latitude": 35, "longitude": -90, "station": "ABC", } assert len(mock_setup_entry.mock_calls) == 1 async def test_form_cannot_connect(hass, mock_simple_nws_config): """Test we handle cannot connect error.""" mock_instance = mock_simple_nws_config.return_value mock_instance.set_station.side_effect = aiohttp.ClientError result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"api_key": "test"}, ) assert result2["type"] == "form" assert result2["errors"] == {"base": "cannot_connect"} async def test_form_unknown_error(hass, mock_simple_nws_config): """Test we handle unknown error.""" mock_instance = mock_simple_nws_config.return_value mock_instance.set_station.side_effect = ValueError result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"api_key": "test"}, ) assert result2["type"] == "form" assert result2["errors"] == {"base": "unknown"} async def test_form_already_configured(hass, mock_simple_nws_config): """Test we handle duplicate entries.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) with patch( "homeassistant.components.nws.async_setup_entry", return_value=True, ) as mock_setup_entry: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"api_key": "test"}, ) await hass.async_block_till_done() assert result2["type"] == "create_entry" assert len(mock_setup_entry.mock_calls) == 1 result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) with patch( "homeassistant.components.nws.async_setup_entry", return_value=True, ) as mock_setup_entry: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"api_key": "test"}, ) assert result2["type"] == "abort" assert result2["reason"] == "already_configured" await hass.async_block_till_done() assert len(mock_setup_entry.mock_calls) == 0
sander76/home-assistant
tests/components/nws/test_config_flow.py
Python
apache-2.0
3,564
/* * Copyright (c) 2017 Nordic Semiconductor ASA * Copyright (c) 2017 Linaro Limited * * SPDX-License-Identifier: Apache-2.0 */ #ifndef __FLASH_IMG_H__ #define __FLASH_IMG_H__ #ifdef __cplusplus extern "C" { #endif struct flash_img_context { u8_t buf[CONFIG_IMG_BLOCK_BUF_SIZE]; struct device *dev; size_t bytes_written; u16_t buf_bytes; }; /** * @brief Initialize context needed for writing the image to the flash. * * @param ctx context to be initialized * @param dev flash driver to used while writing the image */ void flash_img_init(struct flash_img_context *ctx, struct device *dev); /** * @brief Read number of bytes of the image written to the flash. * * @param ctx context * * @return Number of bytes written to the image flash. */ size_t flash_img_bytes_written(struct flash_img_context *ctx); /** * @brief Process input buffers to be written to the image slot 1. flash * memory in single blocks. Will store remainder between calls. * * A final call to this function with flush set to true * will write out the remaining block buffer to flash. Since flash is written to * in blocks, the contents of flash from the last byte written up to the next * multiple of CONFIG_IMG_BLOCK_BUF_SIZE is padded with 0xff. * * @param ctx context * @param data data to write * @param len Number of bytes to write * @param flush when true this forces any buffered * data to be written to flash * * @return 0 on success, negative errno code on fail */ int flash_img_buffered_write(struct flash_img_context *ctx, u8_t *data, size_t len, bool flush); #ifdef __cplusplus } #endif #endif /* __FLASH_IMG_H__ */
fbsder/zephyr
include/dfu/flash_img.h
C
apache-2.0
1,650
// // SysetmHttpDebugController.h // LXFrameWork_OC // // Created by 李晓 on 15/8/10. // Copyright (c) 2015年 LXFrameWork. All rights reserved. // #import "BasePullTableViewController.h" @interface SysetmHttpDebugController : BasePullTableViewController @end
lixiao7215981/RoyalTeapot
RoyalTeapot/Pods/LXFrameWork_OC/LXFrameWork_OC/LXFrameWork_OC/LXFrameWork/Class/Controller/SysetmHttpDebugController.h
C
apache-2.0
269
# -*- coding: utf-8 -*- # # Copyright 2012-2015 Spotify AB # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from helpers import unittest import luigi import luigi.worker import luigi.date_interval import luigi.notifications luigi.notifications.DEBUG = True class InstanceTest(unittest.TestCase): def test_simple(self): class DummyTask(luigi.Task): x = luigi.Parameter() dummy_1 = DummyTask(1) dummy_2 = DummyTask(2) dummy_1b = DummyTask(1) self.assertNotEqual(dummy_1, dummy_2) self.assertEqual(dummy_1, dummy_1b) def test_dep(self): test = self class A(luigi.Task): task_namespace = 'instance' # to prevent task name conflict between tests def __init__(self): self.has_run = False super(A, self).__init__() def run(self): self.has_run = True class B(luigi.Task): x = luigi.Parameter() def requires(self): return A() # This will end up referring to the same object def run(self): test.assertTrue(self.requires().has_run) luigi.build([B(1), B(2)], local_scheduler=True) def test_external_instance_cache(self): class A(luigi.Task): task_namespace = 'instance' # to prevent task name conflict between tests pass class OtherA(luigi.ExternalTask): task_family = "A" oa = OtherA() a = A() self.assertNotEqual(oa, a) def test_date(self): ''' Adding unit test because we had a problem with this ''' class DummyTask(luigi.Task): x = luigi.DateIntervalParameter() dummy_1 = DummyTask(luigi.date_interval.Year(2012)) dummy_2 = DummyTask(luigi.date_interval.Year(2013)) dummy_1b = DummyTask(luigi.date_interval.Year(2012)) self.assertNotEqual(dummy_1, dummy_2) self.assertEqual(dummy_1, dummy_1b) def test_unhashable_type(self): # See #857 class DummyTask(luigi.Task): x = luigi.Parameter() dummy = DummyTask(x={}) # NOQA
riga/luigi
test/instance_test.py
Python
apache-2.0
2,677
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.io.tfrecord; import static org.apache.beam.sdk.io.Compression.AUTO; import static org.apache.beam.sdk.io.common.FileBasedIOITHelper.appendTimestampSuffix; import static org.apache.beam.sdk.io.common.FileBasedIOITHelper.readFileBasedIOITPipelineOptions; import com.google.cloud.Timestamp; import java.nio.charset.StandardCharsets; import java.util.HashSet; import java.util.Set; import java.util.UUID; import java.util.function.Function; import org.apache.beam.sdk.PipelineResult; import org.apache.beam.sdk.io.Compression; import org.apache.beam.sdk.io.GenerateSequence; import org.apache.beam.sdk.io.TFRecordIO; import org.apache.beam.sdk.io.common.FileBasedIOITHelper; import org.apache.beam.sdk.io.common.FileBasedIOITHelper.DeleteFileFn; import org.apache.beam.sdk.io.common.FileBasedIOTestPipelineOptions; import org.apache.beam.sdk.io.common.HashingFn; import org.apache.beam.sdk.testing.PAssert; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.testutils.NamedTestResult; import org.apache.beam.sdk.testutils.metrics.IOITMetrics; import org.apache.beam.sdk.testutils.metrics.MetricsReader; import org.apache.beam.sdk.testutils.metrics.TimeMonitor; import org.apache.beam.sdk.testutils.publishing.InfluxDBSettings; import org.apache.beam.sdk.transforms.Combine; import org.apache.beam.sdk.transforms.Create; import org.apache.beam.sdk.transforms.MapElements; import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.transforms.Reshuffle; import org.apache.beam.sdk.transforms.SimpleFunction; import org.apache.beam.sdk.transforms.View; import org.apache.beam.sdk.values.PCollection; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** * Integration tests for {@link org.apache.beam.sdk.io.TFRecordIO}. * * <p>Run this test using the command below. Pass in connection information via PipelineOptions: * * <pre> * ./gradlew integrationTest -p sdks/java/io/file-based-io-tests * -DintegrationTestPipelineOptions='[ * "--numberOfRecords=100000", * "--datasetSize=12345", * "--expectedHash=99f23ab", * "--filenamePrefix=output_file_path", * "--compressionType=GZIP" * ]' * --tests org.apache.beam.sdk.io.tfrecord.TFRecordIOIT * -DintegrationTestRunner=direct * </pre> * * <p>Please see 'build_rules.gradle' file for instructions regarding running this test using Beam * performance testing framework. */ @RunWith(JUnit4.class) public class TFRecordIOIT { private static final String TFRECORD_NAMESPACE = TFRecordIOIT.class.getName(); // Metric names private static final String WRITE_TIME = "write_time"; private static final String READ_TIME = "read_time"; private static final String DATASET_SIZE = "dataset_size"; private static final String RUN_TIME = "run_time"; private static String filenamePrefix; private static String bigQueryDataset; private static String bigQueryTable; private static Integer numberOfTextLines; private static Integer datasetSize; private static String expectedHash; private static Compression compressionType; private static InfluxDBSettings settings; @Rule public TestPipeline writePipeline = TestPipeline.create(); @Rule public TestPipeline readPipeline = TestPipeline.create(); @BeforeClass public static void setup() { FileBasedIOTestPipelineOptions options = readFileBasedIOITPipelineOptions(); datasetSize = options.getDatasetSize(); expectedHash = options.getExpectedHash(); numberOfTextLines = options.getNumberOfRecords(); compressionType = Compression.valueOf(options.getCompressionType()); filenamePrefix = appendTimestampSuffix(options.getFilenamePrefix()); bigQueryDataset = options.getBigQueryDataset(); bigQueryTable = options.getBigQueryTable(); settings = InfluxDBSettings.builder() .withHost(options.getInfluxHost()) .withDatabase(options.getInfluxDatabase()) .withMeasurement(options.getInfluxMeasurement()) .get(); } private static String createFilenamePattern() { return filenamePrefix + "*"; } // TODO: There are two pipelines due to: https://issues.apache.org/jira/browse/BEAM-3267 @Test public void writeThenReadAll() { final TFRecordIO.Write writeTransform = TFRecordIO.write() .to(filenamePrefix) .withCompression(compressionType) .withSuffix(".tfrecord"); writePipeline .apply("Generate sequence", GenerateSequence.from(0).to(numberOfTextLines)) .apply( "Produce text lines", ParDo.of(new FileBasedIOITHelper.DeterministicallyConstructTestTextLineFn())) .apply("Transform strings to bytes", MapElements.via(new StringToByteArray())) .apply( "Record time before writing", ParDo.of(new TimeMonitor<>(TFRECORD_NAMESPACE, WRITE_TIME))) .apply("Write content to files", writeTransform); final PipelineResult writeResult = writePipeline.run(); writeResult.waitUntilFinish(); String filenamePattern = createFilenamePattern(); PCollection<String> consolidatedHashcode = readPipeline .apply(TFRecordIO.read().from(filenamePattern).withCompression(AUTO)) .apply( "Record time after reading", ParDo.of(new TimeMonitor<>(TFRECORD_NAMESPACE, READ_TIME))) .apply("Transform bytes to strings", MapElements.via(new ByteArrayToString())) .apply("Calculate hashcode", Combine.globally(new HashingFn())) .apply(Reshuffle.viaRandomKey()); PAssert.thatSingleton(consolidatedHashcode).isEqualTo(expectedHash); readPipeline .apply(Create.of(filenamePattern)) .apply( "Delete test files", ParDo.of(new DeleteFileFn()) .withSideInputs(consolidatedHashcode.apply(View.asSingleton()))); final PipelineResult readResult = readPipeline.run(); readResult.waitUntilFinish(); collectAndPublishMetrics(writeResult, readResult); } private void collectAndPublishMetrics( final PipelineResult writeResults, final PipelineResult readResults) { final String uuid = UUID.randomUUID().toString(); final String timestamp = Timestamp.now().toString(); final Set<NamedTestResult> results = new HashSet<>(); results.add( NamedTestResult.create(uuid, timestamp, RUN_TIME, getRunTime(writeResults, readResults))); results.addAll( MetricsReader.ofResults(writeResults, TFRECORD_NAMESPACE) .readAll(getWriteMetricSuppliers(uuid, timestamp))); results.addAll( MetricsReader.ofResults(readResults, TFRECORD_NAMESPACE) .readAll(getReadMetricSuppliers(uuid, timestamp))); IOITMetrics.publish(bigQueryDataset, bigQueryTable, results); IOITMetrics.publishToInflux(uuid, timestamp, results, settings); } private static Set<Function<MetricsReader, NamedTestResult>> getWriteMetricSuppliers( final String uuid, final String timestamp) { final Set<Function<MetricsReader, NamedTestResult>> suppliers = new HashSet<>(); suppliers.add(getTimeMetric(uuid, timestamp, WRITE_TIME)); suppliers.add(ignored -> NamedTestResult.create(uuid, timestamp, DATASET_SIZE, datasetSize)); return suppliers; } private static Set<Function<MetricsReader, NamedTestResult>> getReadMetricSuppliers( final String uuid, final String timestamp) { final Set<Function<MetricsReader, NamedTestResult>> suppliers = new HashSet<>(); suppliers.add(getTimeMetric(uuid, timestamp, READ_TIME)); return suppliers; } private static Function<MetricsReader, NamedTestResult> getTimeMetric( final String uuid, final String timestamp, final String metricName) { return reader -> { final long startTime = reader.getStartTimeMetric(metricName); final long endTime = reader.getEndTimeMetric(metricName); return NamedTestResult.create(uuid, timestamp, metricName, (endTime - startTime) / 1e3); }; } private static double getRunTime( final PipelineResult writeResults, final PipelineResult readResult) { final long startTime = MetricsReader.ofResults(writeResults, TFRECORD_NAMESPACE).getStartTimeMetric(WRITE_TIME); final long endTime = MetricsReader.ofResults(readResult, TFRECORD_NAMESPACE).getEndTimeMetric(READ_TIME); return (endTime - startTime) / 1e3; } static class StringToByteArray extends SimpleFunction<String, byte[]> { @Override public byte[] apply(String input) { return input.getBytes(StandardCharsets.UTF_8); } } static class ByteArrayToString extends SimpleFunction<byte[], String> { @Override public String apply(byte[] input) { return new String(input, StandardCharsets.UTF_8); } } }
robertwb/incubator-beam
sdks/java/io/file-based-io-tests/src/test/java/org/apache/beam/sdk/io/tfrecord/TFRecordIOIT.java
Java
apache-2.0
9,717
/// <reference path='../node_modules/typescript/bin/typescript.d.ts' /> import ts = require('typescript'); import base = require('./base'); import ts2dart = require('./main'); import {FacadeConverter} from './facade_converter'; export default class ModuleTranspiler extends base.TranspilerBase { constructor(tr: ts2dart.Transpiler, private fc: FacadeConverter, private generateLibraryName: boolean) { super(tr); } visitNode(node: ts.Node): boolean { switch (node.kind) { case ts.SyntaxKind.SourceFile: if (this.generateLibraryName) { this.emit('library'); this.emit(this.getLibraryName()); this.emit(';'); } this.fc.emitExtraImports(<ts.SourceFile>node); ts.forEachChild(node, this.visit.bind(this)); break; case ts.SyntaxKind.EndOfFileToken: ts.forEachChild(node, this.visit.bind(this)); break; case ts.SyntaxKind.ImportDeclaration: var importDecl = <ts.ImportDeclaration>node; if (this.isEmptyImport(importDecl)) return true; this.emit('import'); this.visitExternalModuleReferenceExpr(importDecl.moduleSpecifier); if (importDecl.importClause) { this.visit(importDecl.importClause); } else { this.reportError(importDecl, 'bare import is unsupported'); } this.emit(';'); break; case ts.SyntaxKind.ImportClause: var importClause = <ts.ImportClause>node; if (importClause.name) this.fc.visitTypeName(importClause.name); if (importClause.namedBindings) { this.visit(importClause.namedBindings); } break; case ts.SyntaxKind.NamespaceImport: var nsImport = <ts.NamespaceImport>node; this.emit('as'); this.fc.visitTypeName(nsImport.name); break; case ts.SyntaxKind.NamedImports: this.emit('show'); var used = this.filterImports((<ts.NamedImports>node).elements); if (used.length === 0) { this.reportError(node, 'internal error, used imports must not be empty'); } this.visitList(used); break; case ts.SyntaxKind.NamedExports: var exportElements = (<ts.NamedExports>node).elements; this.emit('show'); if (exportElements.length === 0) this.reportError(node, 'empty export list'); this.visitList((<ts.NamedExports>node).elements); break; case ts.SyntaxKind.ImportSpecifier: case ts.SyntaxKind.ExportSpecifier: var spec = <ts.ImportOrExportSpecifier>node; if (spec.propertyName) { this.reportError(spec.propertyName, 'import/export renames are unsupported in Dart'); } this.fc.visitTypeName(spec.name); break; case ts.SyntaxKind.ExportDeclaration: var exportDecl = <ts.ExportDeclaration>node; this.emit('export'); if (exportDecl.moduleSpecifier) { this.visitExternalModuleReferenceExpr(exportDecl.moduleSpecifier); } else { this.reportError(node, 're-exports must have a module URL (export x from "./y").'); } if (exportDecl.exportClause) this.visit(exportDecl.exportClause); this.emit(';'); break; case ts.SyntaxKind.ImportEqualsDeclaration: var importEqDecl = <ts.ImportEqualsDeclaration>node; this.emit('import'); this.visit(importEqDecl.moduleReference); this.emit('as'); this.fc.visitTypeName(importEqDecl.name); this.emit(';'); break; case ts.SyntaxKind.ExternalModuleReference: this.visitExternalModuleReferenceExpr((<ts.ExternalModuleReference>node).expression); break; default: return false; } return true; } private static isIgnoredImport(e: ts.ImportSpecifier) { // TODO: unify with facade_converter.ts var name = base.ident(e.name); switch (name) { case 'CONST': case 'CONST_EXPR': case 'normalizeBlank': case 'forwardRef': case 'ABSTRACT': case 'IMPLEMENTS': return true; default: return false; } } private visitExternalModuleReferenceExpr(expr: ts.Expression) { // TODO: what if this isn't a string literal? var moduleName = <ts.StringLiteral>expr; var text = moduleName.text; if (text.match(/^\.\//)) { // Strip './' to be more Dart-idiomatic. text = text.substring(2); } else if (!text.match(/^\.\.\//)) { // Unprefixed imports are package imports. text = 'package:' + text; } this.emit(JSON.stringify(text + '.dart')); } private isEmptyImport(n: ts.ImportDeclaration): boolean { var bindings = n.importClause.namedBindings; if (bindings.kind != ts.SyntaxKind.NamedImports) return false; var elements = (<ts.NamedImports>bindings).elements; // An import list being empty *after* filtering is ok, but if it's empty in the code itself, // it's nonsensical code, so probably a programming error. if (elements.length === 0) this.reportError(n, 'empty import list'); return elements.every(ModuleTranspiler.isIgnoredImport); } private filterImports(ns: ts.ImportOrExportSpecifier[]) { return ns.filter((e) => !ModuleTranspiler.isIgnoredImport(e)); } // For the Dart keyword list see // https://www.dartlang.org/docs/dart-up-and-running/ch02.html#keywords private static DART_RESERVED_WORDS = ('assert break case catch class const continue default do else enum extends false final ' + 'finally for if in is new null rethrow return super switch this throw true try var void ' + 'while with') .split(/ /); // These are the built-in and limited keywords. private static DART_OTHER_KEYWORDS = ('abstract as async await deferred dynamic export external factory get implements import ' + 'library operator part set static sync typedef yield') .split(/ /); getLibraryName(nameForTest?: string) { var fileName = this.getRelativeFileName(nameForTest); var parts = fileName.split('/'); return parts.filter((p) => p.length > 0) .map((p) => p.replace(/[^\w.]/g, '_')) .map((p) => p.replace(/\.[jt]s$/g, '')) .map((p) => ModuleTranspiler.DART_RESERVED_WORDS.indexOf(p) != -1 ? '_' + p : p) .join('.'); } }
jeffbcross/ts2dart
lib/module.ts
TypeScript
apache-2.0
6,375
/* * Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ #pragma once #include <aws/lightsail/Lightsail_EXPORTS.h> #include <aws/core/utils/memory/stl/AWSString.h> namespace Aws { namespace Lightsail { namespace Model { enum class PortAccessType { NOT_SET, Public, Private }; namespace PortAccessTypeMapper { AWS_LIGHTSAIL_API PortAccessType GetPortAccessTypeForName(const Aws::String& name); AWS_LIGHTSAIL_API Aws::String GetNameForPortAccessType(PortAccessType value); } // namespace PortAccessTypeMapper } // namespace Model } // namespace Lightsail } // namespace Aws
chiaming0914/awe-cpp-sdk
aws-cpp-sdk-lightsail/include/aws/lightsail/model/PortAccessType.h
C
apache-2.0
1,112
<html> <head> <meta http-equiv="Content-Type" content="text/html; charset=US-ASCII"> <title>Function template operator==</title> <link rel="stylesheet" href="../../../../doc/src/boostbook.css" type="text/css"> <meta name="generator" content="DocBook XSL Stylesheets V1.78.1"> <link rel="home" href="../../index.html" title="The Boost C++ Libraries BoostBook Documentation Subset"> <link rel="up" href="../../interprocess/indexes_reference.html#header.boost.interprocess.allocators.private_node_allocator_hpp" title="Header &lt;boost/interprocess/allocators/private_node_allocator.hpp&gt;"> <link rel="prev" href="private_node_allocator/rebind.html" title="Struct template rebind"> <link rel="next" href="operator___idp64889440.html" title="Function template operator!="> </head> <body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF"> <table cellpadding="2" width="100%"><tr> <td valign="top"><img alt="Boost C++ Libraries" width="277" height="86" src="../../../../boost.png"></td> <td align="center"><a href="../../../../index.html">Home</a></td> <td align="center"><a href="../../../../libs/libraries.htm">Libraries</a></td> <td align="center"><a href="http://www.boost.org/users/people.html">People</a></td> <td align="center"><a href="http://www.boost.org/users/faq.html">FAQ</a></td> <td align="center"><a href="../../../../more/index.htm">More</a></td> </tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="private_node_allocator/rebind.html"><img src="../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../../interprocess/indexes_reference.html#header.boost.interprocess.allocators.private_node_allocator_hpp"><img src="../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../index.html"><img src="../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="operator___idp64889440.html"><img src="../../../../doc/src/images/next.png" alt="Next"></a> </div> <div class="refentry"> <a name="boost.interprocess.operator==_idp64881152"></a><div class="titlepage"></div> <div class="refnamediv"> <h2><span class="refentrytitle">Function template operator==</span></h2> <p>boost::interprocess::operator==</p> </div> <h2 xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" class="refsynopsisdiv-title">Synopsis</h2> <div xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" class="refsynopsisdiv"><pre class="synopsis"><span class="comment">// In header: &lt;<a class="link" href="../../interprocess/indexes_reference.html#header.boost.interprocess.allocators.private_node_allocator_hpp" title="Header &lt;boost/interprocess/allocators/private_node_allocator.hpp&gt;">boost/interprocess/allocators/private_node_allocator.hpp</a>&gt; </span> <span class="keyword">template</span><span class="special">&lt;</span><span class="keyword">typename</span> T<span class="special">,</span> <span class="keyword">typename</span> S<span class="special">,</span> <span class="identifier">std</span><span class="special">::</span><span class="identifier">size_t</span> NodesPerBlock<span class="special">,</span> <span class="identifier">std</span><span class="special">::</span><span class="identifier">size_t</span> F<span class="special">,</span> <span class="keyword">unsigned</span> <span class="keyword">char</span> OP<span class="special">&gt;</span> <span class="keyword">bool</span> <span class="keyword">operator</span><span class="special">==</span><span class="special">(</span><span class="keyword">const</span> <a class="link" href="private_node_allocator.html" title="Class template private_node_allocator">private_node_allocator</a><span class="special">&lt;</span> <span class="identifier">T</span><span class="special">,</span> <span class="identifier">S</span><span class="special">,</span> <span class="identifier">NodesPerBlock</span><span class="special">,</span> <span class="identifier">F</span><span class="special">,</span> <span class="identifier">OP</span> <span class="special">&gt;</span> <span class="special">&amp;</span> alloc1<span class="special">,</span> <span class="keyword">const</span> <a class="link" href="private_node_allocator.html" title="Class template private_node_allocator">private_node_allocator</a><span class="special">&lt;</span> <span class="identifier">T</span><span class="special">,</span> <span class="identifier">S</span><span class="special">,</span> <span class="identifier">NodesPerBlock</span><span class="special">,</span> <span class="identifier">F</span><span class="special">,</span> <span class="identifier">OP</span> <span class="special">&gt;</span> <span class="special">&amp;</span> alloc2<span class="special">)</span><span class="special">;</span></pre></div> <div class="refsect1"> <a name="idp238484672"></a><h2>Description</h2> <p>Equality test for same type of <code class="computeroutput"><a class="link" href="private_node_allocator.html" title="Class template private_node_allocator">private_node_allocator</a></code> </p> </div> </div> <table xmlns:rev="http://www.cs.rpi.edu/~gregod/boost/tools/doc/revision" width="100%"><tr> <td align="left"></td> <td align="right"><div class="copyright-footer">Copyright &#169; 2005-2012 Ion Gaztanaga<p> Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at <a href="http://www.boost.org/LICENSE_1_0.txt" target="_top">http://www.boost.org/LICENSE_1_0.txt</a>) </p> </div></td> </tr></table> <hr> <div class="spirit-nav"> <a accesskey="p" href="private_node_allocator/rebind.html"><img src="../../../../doc/src/images/prev.png" alt="Prev"></a><a accesskey="u" href="../../interprocess/indexes_reference.html#header.boost.interprocess.allocators.private_node_allocator_hpp"><img src="../../../../doc/src/images/up.png" alt="Up"></a><a accesskey="h" href="../../index.html"><img src="../../../../doc/src/images/home.png" alt="Home"></a><a accesskey="n" href="operator___idp64889440.html"><img src="../../../../doc/src/images/next.png" alt="Next"></a> </div> </body> </html>
biospi/seamass-windeps
src/boost_1_57_0/doc/html/boost/interprocess/operator___idp64881152.html
HTML
apache-2.0
6,119
<?php /* * $Id: Parser.php 1080 2007-02-10 18:17:08Z jwage $ * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * This software consists of voluntary contributions made by many individuals * and is licensed under the LGPL. For more information, see * <http://www.doctrine-project.org>. */ /** * Doctrine_Parser * * @package Doctrine * @subpackage Parser * @license http://www.opensource.org/licenses/lgpl-license.php LGPL * @link www.doctrine-project.org * @since 1.0 * @version $Revision: 1080 $ * @author Jonathan H. Wage <[email protected]> */ abstract class Doctrine_Parser { /** * loadData * * Override in the parser driver * * @param string $array * @return void * @author Jonathan H. Wage */ abstract public function loadData($array, $charset = 'UTF-8'); /** * dumpData * * Override in the parser driver * * @param string $array * @param string $path * @param string $charset The charset of the data being dumped * @return void * @author Jonathan H. Wage */ abstract public function dumpData($array, $path = null, $charset = null); /** * getParser * * Get instance of the specified parser * * @param string $type * @return void * @author Jonathan H. Wage */ static public function getParser($type) { $class = 'Doctrine_Parser_'.ucfirst($type); return new $class; } /** * load * * Interface for loading and parsing data from a file * * @param string $path * @param string $type * @return void * @author Jonathan H. Wage */ static public function load($path, $type = 'xml', $charset = 'UTF-8') { $parser = self::getParser($type); return (array) $parser->loadData($path, $charset); } /** * dump * * Interface for pulling and dumping data to a file * * @param string $array * @param string $path * @param string $type * @param string $charset The charset of the data being dumped * @return void * @author Jonathan H. Wage */ static public function dump($array, $type = 'xml', $path = null, $charset = null) { $parser = self::getParser($type); return $parser->dumpData($array, $path, $charset); } /** * doLoad * * Get contents whether it is the path to a file file or a string of txt. * Either should allow php code in it. * * @param string $path * @return void */ public function doLoad($path) { ob_start(); if ( ! file_exists($path)) { $contents = $path; $path = sys_get_temp_dir() . DIRECTORY_SEPARATOR . 'dparser_' . microtime(); file_put_contents($path, $contents); } include($path); // Fix #1569. Need to check if it's still all valid $contents = ob_get_clean(); //iconv("UTF-8", "UTF-8", ob_get_clean()); return $contents; } /** * doDump * * @param string $data * @param string $path * @return void */ public function doDump($data, $path = null) { if ($path !== null) { return file_put_contents($path, $data); } else { return $data; } } }
openpne/OpenPNE3
lib/vendor/symfony/lib/plugins/sfDoctrinePlugin/lib/vendor/doctrine/lib/Doctrine/Parser.php
PHP
apache-2.0
4,130
<!doctype html><html lang=en><head><title>Redirecting&mldr;</title><link rel=canonical href=/v1.12/news/releases/1.0.x/announcing-1.0.6/><meta name=robots content="noindex"><meta charset=utf-8><meta http-equiv=refresh content="0; url=/v1.12/news/releases/1.0.x/announcing-1.0.6/"></head><body><h1>Redirecting&mldr;</h1><a href=/v1.12/news/releases/1.0.x/announcing-1.0.6/>Click here if you are not redirected.</a></body></html>
istio/istio.io
archive/v1.12/news/announcing-1.0.6/index.html
HTML
apache-2.0
427
package com.example; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.junit4.SpringRunner; @RunWith(SpringRunner.class) @SpringBootTest public class ConfigServiceApplicationTests { @Test public void contextLoads() { } }
kbastani/oreilly-building-microservices-training
day-1/part-3/config-service/src/test/java/com/example/ConfigServiceApplicationTests.java
Java
apache-2.0
335
package org.apereo.cas.configuration.model.support.passwordless.account; import org.apereo.cas.configuration.model.SpringResourceProperties; import org.apereo.cas.configuration.support.RequiresModule; import com.fasterxml.jackson.annotation.JsonFilter; import lombok.Getter; import lombok.Setter; import lombok.experimental.Accessors; /** * This is {@link PasswordlessAuthenticationJsonAccountsProperties}. * * @author Misagh Moayyed * @since 6.2.0 */ @RequiresModule(name = "cas-server-support-passwordless") @Getter @Setter @Accessors(chain = true) @JsonFilter("PasswordlessAuthenticationJsonAccountsProperties") public class PasswordlessAuthenticationJsonAccountsProperties extends SpringResourceProperties { private static final long serialVersionUID = 8079027843747126083L; }
apereo/cas
api/cas-server-core-api-configuration-model/src/main/java/org/apereo/cas/configuration/model/support/passwordless/account/PasswordlessAuthenticationJsonAccountsProperties.java
Java
apache-2.0
793
using System; using System.Globalization; namespace Lucene.Net.Search.Similarities { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /// <summary> /// Bayesian smoothing using Dirichlet priors. From Chengxiang Zhai and John /// Lafferty. 2001. A study of smoothing methods for language models applied to /// Ad Hoc information retrieval. In Proceedings of the 24th annual international /// ACM SIGIR conference on Research and development in information retrieval /// (SIGIR '01). ACM, New York, NY, USA, 334-342. /// <p> /// The formula as defined the paper assigns a negative score to documents that /// contain the term, but with fewer occurrences than predicted by the collection /// language model. The Lucene implementation returns {@code 0} for such /// documents. /// </p> /// /// @lucene.experimental /// </summary> public class LMDirichletSimilarity : LMSimilarity { /// <summary> /// The &mu; parameter. </summary> private readonly float Mu_Renamed; /// <summary> /// Instantiates the similarity with the provided &mu; parameter. </summary> public LMDirichletSimilarity(CollectionModel collectionModel, float mu) : base(collectionModel) { this.Mu_Renamed = mu; } /// <summary> /// Instantiates the similarity with the provided &mu; parameter. </summary> public LMDirichletSimilarity(float mu) { this.Mu_Renamed = mu; } /// <summary> /// Instantiates the similarity with the default &mu; value of 2000. </summary> public LMDirichletSimilarity(CollectionModel collectionModel) : this(collectionModel, 2000) { } /// <summary> /// Instantiates the similarity with the default &mu; value of 2000. </summary> public LMDirichletSimilarity() : this(2000) { } public override float Score(BasicStats stats, float freq, float docLen) { float score = stats.TotalBoost * (float)(Math.Log(1 + freq / (Mu_Renamed * ((LMStats)stats).CollectionProbability)) + Math.Log(Mu_Renamed / (docLen + Mu_Renamed))); return score > 0.0f ? score : 0.0f; } protected internal override void Explain(Explanation expl, BasicStats stats, int doc, float freq, float docLen) { if (stats.TotalBoost != 1.0f) { expl.AddDetail(new Explanation(stats.TotalBoost, "boost")); } expl.AddDetail(new Explanation(Mu_Renamed, "mu")); Explanation weightExpl = new Explanation(); weightExpl.Value = (float)Math.Log(1 + freq / (Mu_Renamed * ((LMStats)stats).CollectionProbability)); weightExpl.Description = "term weight"; expl.AddDetail(weightExpl); expl.AddDetail(new Explanation((float)Math.Log(Mu_Renamed / (docLen + Mu_Renamed)), "document norm")); base.Explain(expl, stats, doc, freq, docLen); } /// <summary> /// Returns the &mu; parameter. </summary> public virtual float Mu { get { return Mu_Renamed; } } public override string Name { get { return string.Format(CultureInfo.InvariantCulture, "Dirichlet(%f)", Mu); } } } }
jpsullivan/lucenenet
src/Lucene.Net.Core/Search/Similarities/LMDirichletSimilarity.cs
C#
apache-2.0
4,312
package com.github.pires.obd.reader.io; public interface ObdProgressListener { void stateUpdate(final ObdCommandJob job); }
seanhold3n/android-obd-reader
src/main/java/com/github/pires/obd/reader/io/ObdProgressListener.java
Java
apache-2.0
130
/* * Copyright (C) 2012 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.launcher2; import android.view.View; public class CheckLongPressHelper { private View mView; private boolean mHasPerformedLongPress; private CheckForLongPress mPendingCheckForLongPress; class CheckForLongPress implements Runnable { public void run() { if ((mView.getParent() != null) && mView.hasWindowFocus() && !mHasPerformedLongPress) { if (mView.performLongClick()) { mView.setPressed(false); mHasPerformedLongPress = true; } } } } public CheckLongPressHelper(View v) { mView = v; } public void postCheckForLongPress() { mHasPerformedLongPress = false; if (mPendingCheckForLongPress == null) { mPendingCheckForLongPress = new CheckForLongPress(); } mView.postDelayed(mPendingCheckForLongPress, LauncherApplication.getLongPressTimeout()); } public void cancelLongPress() { mHasPerformedLongPress = false; if (mPendingCheckForLongPress != null) { mView.removeCallbacks(mPendingCheckForLongPress); mPendingCheckForLongPress = null; } } public boolean hasPerformedLongPress() { return mHasPerformedLongPress; } }
craigacgomez/flaming_monkey_packages_apps_Launcher2
src/com/android/launcher2/CheckLongPressHelper.java
Java
apache-2.0
1,954
/* * Copyright 2010 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.ide.common.assistant.info.drl; import java.util.ArrayList; import java.util.List; public class RuleDRLContentInfo extends RuleBasicContentInfo { private String ruleName; private List<RuleLineContentInfo> lhs; private List<RuleLineContentInfo> rhs; public RuleDRLContentInfo(Integer offset, String content, DRLContentTypeEnum type, String ruleName, List<RuleLineContentInfo> lhs, List<RuleLineContentInfo> rhs) { super(offset, content, type); this.setRuleName(ruleName); this.lhs = lhs; this.rhs = rhs; } public void setRuleName(String ruleName) { this.ruleName = ruleName; } public String getRuleName() { return ruleName; } public Integer getRuleNameLength() { return ruleName.length(); } public void addLHSRuleLine(RuleLineContentInfo ruleLine) { this.lhs.add(ruleLine); } public List<RuleLineContentInfo> getLHSRuleLines() { return lhs; } public void addRHSRuleLine(RuleLineContentInfo ruleLine) { this.rhs.add(ruleLine); } public List<RuleLineContentInfo> getRHSRuleLines() { return rhs; } public List<RuleLineContentInfo> getAllLines() { List<RuleLineContentInfo> all = new ArrayList<RuleLineContentInfo>(lhs); all.addAll(rhs); return all; } }
Rikkola/guvnor
droolsjbpm-ide-common/src/main/java/org/drools/ide/common/assistant/info/drl/RuleDRLContentInfo.java
Java
apache-2.0
1,971
/** * Copyright 2003-2006 the original author or authors. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jdon.strutsutil.file.filter; import javax.servlet.http.*; import com.jdon.strutsutil.file.UploadFile; import java.util.*; public interface ImageFilter { /** * 获得所有上传文件的ID集合 * @param request * @return */ public Collection getUploadFileID(HttpServletRequest request); /** * 获得某个id的上传文件对象 * @param request * @param id * @return */ public UploadFile getUploadFile(HttpServletRequest request, String id); /** * 删除某个id的上传文件 * @param request * @param uploadFile */ public void deleteUploadFile(HttpServletRequest request, UploadFile uploadFile); /** * 加入新的上传文件 * **/ public void addUploadFile(HttpServletRequest request, UploadFile uploadFile); /** * 取走本次的上传图片 * @param request * @return */ public Collection loadAllUploadFile(HttpServletRequest request); }
linqingyicen/jdonframework
JdonAccessory/jdon-struts1x/src/main/java/com/jdon/strutsutil/file/filter/ImageFilter.java
Java
apache-2.0
1,623
/* * Copyright 2009-2012 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.testdomain; import java.io.Serializable; import java.util.List; public class Category implements Serializable { private String categoryId; private Category parentCategory; private String name; private String description; private List itemList; private List productList; public String getCategoryId() { return categoryId; } public void setCategoryId(String categoryId) { this.categoryId = categoryId; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public List getProductList() { return productList; } public void setProductList(List productList) { this.productList = productList; } public Category getParentCategory() { return parentCategory; } public List getItemList() { return itemList; } public void setItemList(List itemList) { this.itemList = itemList; } public void setParentCategory(Category parentCategory) { this.parentCategory = parentCategory; } }
emacarron/mybatis-3-no-local-cache
src/test/java/com/testdomain/Category.java
Java
apache-2.0
1,832
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.batik.css.engine.value; import org.w3c.dom.DOMException; import org.w3c.dom.css.CSSPrimitiveValue; /** * This class represents float values. * * @author <a href="mailto:[email protected]">Stephane Hillion</a> * @version $Id$ */ public class FloatValue extends AbstractValue { /** * Returns the CSS text associated with the given type/value pair. */ public static String getCssText(short unit, float value) { if (unit < 0 || unit >= UNITS.length) { throw new DOMException(DOMException.SYNTAX_ERR, ""); } String s = String.valueOf(value); if (s.endsWith(".0")) { s = s.substring(0, s.length() - 2); } return s + UNITS[unit - CSSPrimitiveValue.CSS_NUMBER]; } /** * The unit types representations */ protected static final String[] UNITS = { "", "%", "em", "ex", "px", "cm", "mm", "in", "pt", "pc", "deg", "rad", "grad", "ms", "s", "Hz", "kHz", "" }; /** * The float value */ protected float floatValue; /** * The unit type */ protected short unitType; /** * Creates a new value. */ public FloatValue(short unitType, float floatValue) { this.unitType = unitType; this.floatValue = floatValue; } /** * The type of the value. */ public short getPrimitiveType() { return unitType; } /** * Returns the float value. */ public float getFloatValue() { return floatValue; } /** * A string representation of the current value. */ public String getCssText() { return getCssText(unitType, floatValue); } /** * Returns a printable representation of this value. */ public String toString() { return getCssText(); } }
Squeegee/batik
sources/org/apache/batik/css/engine/value/FloatValue.java
Java
apache-2.0
2,667
/*========================================================================= * Copyright (c) 2010-2014 Pivotal Software, Inc. All Rights Reserved. * This product is protected by U.S. and international copyright * and intellectual property laws. Pivotal products are covered by * one or more patents listed at http://www.pivotal.io/patents. *========================================================================= */ package com.gemstone.gemfire.internal.cache; import java.util.UUID; public abstract class VMStatsDiskRegionEntryHeap extends VMStatsDiskRegionEntry { public VMStatsDiskRegionEntryHeap(RegionEntryContext context, Object value) { super(context, value); } private static final VMStatsDiskRegionEntryHeapFactory factory = new VMStatsDiskRegionEntryHeapFactory(); public static RegionEntryFactory getEntryFactory() { return factory; } private static class VMStatsDiskRegionEntryHeapFactory implements RegionEntryFactory { public final RegionEntry createEntry(RegionEntryContext context, Object key, Object value) { if (InlineKeyHelper.INLINE_REGION_KEYS) { Class<?> keyClass = key.getClass(); if (keyClass == Integer.class) { return new VMStatsDiskRegionEntryHeapIntKey(context, (Integer)key, value); } else if (keyClass == Long.class) { return new VMStatsDiskRegionEntryHeapLongKey(context, (Long)key, value); } else if (keyClass == String.class) { final String skey = (String) key; final Boolean info = InlineKeyHelper.canStringBeInlineEncoded(skey); if (info != null) { final boolean byteEncoded = info; if (skey.length() <= InlineKeyHelper.getMaxInlineStringKey(1, byteEncoded)) { return new VMStatsDiskRegionEntryHeapStringKey1(context, skey, value, byteEncoded); } else { return new VMStatsDiskRegionEntryHeapStringKey2(context, skey, value, byteEncoded); } } } else if (keyClass == UUID.class) { return new VMStatsDiskRegionEntryHeapUUIDKey(context, (UUID)key, value); } } return new VMStatsDiskRegionEntryHeapObjectKey(context, key, value); } public final Class getEntryClass() { // The class returned from this method is used to estimate the memory size. // TODO OFFHEAP: This estimate will not take into account the memory saved by inlining the keys. return VMStatsDiskRegionEntryHeapObjectKey.class; } public RegionEntryFactory makeVersioned() { return VersionedStatsDiskRegionEntryHeap.getEntryFactory(); } } }
ameybarve15/incubator-geode
gemfire-core/src/main/java/com/gemstone/gemfire/internal/cache/VMStatsDiskRegionEntryHeap.java
Java
apache-2.0
2,629
/* * Copyright (c) 2008-2018, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.concurrent.atomicreference.operations; import com.hazelcast.concurrent.atomicreference.AtomicReferenceContainer; import com.hazelcast.spi.ReadonlyOperation; import static com.hazelcast.concurrent.atomicreference.AtomicReferenceDataSerializerHook.IS_NULL; public class IsNullOperation extends AbstractAtomicReferenceOperation implements ReadonlyOperation { private boolean returnValue; public IsNullOperation() { } public IsNullOperation(String name) { super(name); } @Override public void run() throws Exception { AtomicReferenceContainer container = getReferenceContainer(); returnValue = container.isNull(); } @Override public Object getResponse() { return returnValue; } @Override public int getId() { return IS_NULL; } }
Donnerbart/hazelcast
hazelcast/src/main/java/com/hazelcast/concurrent/atomicreference/operations/IsNullOperation.java
Java
apache-2.0
1,479
/* * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH * under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright * ownership. Camunda licenses this file to you under the Apache License, * Version 2.0; you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.delegate; import org.camunda.bpm.engine.runtime.VariableInstance; /** * @author Thorben Lindhauer * */ public interface DelegateVariableInstance<T extends BaseDelegateExecution> extends VariableInstance, ProcessEngineServicesAware { String getEventName(); T getSourceExecution(); }
camunda/camunda-bpm-platform
engine/src/main/java/org/camunda/bpm/engine/delegate/DelegateVariableInstance.java
Java
apache-2.0
1,132
/* * Copyright 2010-2012 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.sqs.model.transform; import org.w3c.dom.Node; import com.amazonaws.AmazonServiceException; import com.amazonaws.util.XpathUtils; import com.amazonaws.transform.StandardErrorUnmarshaller; import com.amazonaws.services.sqs.model.MessageNotInflightException; public class MessageNotInflightExceptionUnmarshaller extends StandardErrorUnmarshaller { public MessageNotInflightExceptionUnmarshaller() { super(MessageNotInflightException.class); } public AmazonServiceException unmarshall(Node node) throws Exception { // Bail out if this isn't the right error code that this // marshaller understands. String errorCode = parseErrorCode(node); if (errorCode == null || !errorCode.equals("AWS.SimpleQueueService.MessageNotInflight")) return null; MessageNotInflightException e = (MessageNotInflightException)super.unmarshall(node); return e; } }
XidongHuang/aws-sdk-for-java
src/main/java/com/amazonaws/services/sqs/model/transform/MessageNotInflightExceptionUnmarshaller.java
Java
apache-2.0
1,560
/* copied from js-xls (C) SheetJS Apache2 license */ function xlml_normalize(d) { if(has_buf &&/*::typeof Buffer !== "undefined" && d != null &&*/ Buffer.isBuffer(d)) return d.toString('utf8'); if(typeof d === 'string') return d; throw "badf"; } /* UOS uses CJK in tags, original regex /<(\/?)([a-z0-9]*:|)([\w-]+)[^>]*>/ */ var xlmlregex = /<(\/?)([^\s?>\/:]*:|)([^\s?>]*[^\s?>\/])[^>]*>/mg;
tsinghuariit/DMP
static/app/static/raw/bower_components/js-xlsx/odsbits/23_xlml.js
JavaScript
apache-2.0
397
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jdbi.v3.sqlobject.customizer; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * Annotation used to build customizing annotations. Use this to annotate an annotation. See examples * in the {@link org.jdbi.v3.sqlobject.customizer} package. */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.ANNOTATION_TYPE) public @interface SqlStatementCustomizingAnnotation { /** * Specify a sql statement customizer factory which will be used to create * sql statement customizers. * @return a factory used to create customizers for the customizing annotation */ Class<? extends SqlStatementCustomizerFactory> value(); }
john9x/jdbi
sqlobject/src/main/java/org/jdbi/v3/sqlobject/customizer/SqlStatementCustomizingAnnotation.java
Java
apache-2.0
1,334
name "ohai" maintainer "OpenStreetMap Administrators" maintainer_email "[email protected]" license "Apache-2.0" description "Configures ohai" version "1.0.0" supports "ubuntu"
zerebubuth/openstreetmap-chef
cookbooks/ohai/metadata.rb
Ruby
apache-2.0
240
/** ********************************************************************************* * * @file ald_dbgc.h * @brief DEBUGCON module driver. * * @version V1.0 * @date 04 Jun 2017 * @author AE Team * @note * * Copyright (C) Shanghai Eastsoft Microelectronics Co. Ltd. All rights reserved. * ********************************************************************************* */ #ifndef __ALD_DBGC_H__ #define __ALD_DBGC_H__ #ifdef __cplusplus extern "C" { #endif #include "utils.h" /** @addtogroup ES32FXXX_ALD * @{ */ /** @defgroup DBGC DBGC * @brief DBGC module driver * @{ */ /** @defgroup DBGC_Public_Types DBGC Public Types * @{ */ /** * @brief Debug mode select */ typedef enum { DEBC_MODE_SLEEP = (1u << 0), /**< Sleep mode */ DEBC_MODE_STOP1 = (1u << 1), /**< STOP1 mode */ DEBC_MODE_STOP2 = (1u << 2), /**< STOP2 mode */ DEBC_MODE_STANDBY = (1u << 3), /**< Standby mode */ } dbgc_mode_t; /** * @brief Debug peripheral select */ typedef enum { DEBC_PERH_TIMER0 = (1u << 0), /**< AD16C4T0 */ DEBC_PERH_TIMER1 = (1u << 1), /**< BS16T0 */ DEBC_PERH_TIMER2 = (1u << 2), /**< GP16C2T0 */ DEBC_PERH_TIMER3 = (1u << 3), /**< GP16C2T1 */ DEBC_PERH_TIMER4 = (1u << 4), /**< BS16T1 */ DEBC_PERH_TIMER5 = (1u << 5), /**< BS16T2 */ DEBC_PERH_TIMER6 = (1u << 6), /**< GP16C4T0 */ DEBC_PERH_TIMER7 = (1u << 7), /**< BS16T3 */ DEBC_PERH_I2C0 = (1u << 8), /**< I2C0 SMBUS */ DEBC_PERH_I2C1 = (1u << 9), /**< I2C1 SMBUS */ DEBC_PERH_CAN = (1u << 12), /**< CAN */ DEBC_PERH_LPTIM0 = (1u << 0) | (1u << 16), /**< LPTIM0 */ DEBC_PERH_IWDT = (1u << 8) | (1u << 16), /**< IWDT */ DEBC_PERH_WWDT = (1u << 9) | (1u << 16), /**< WWDT */ DEBC_PERH_RTC = (1u << 10) | (1u << 16), /**< RTC */ } dbgc_perh_t; /** * @} */ /** @defgroup DBGC_Public_Functions DBGC Public Functions * @{ */ /** * @brief Gets version. * @retval Version */ __INLINE uint32_t dbgc_get_rev_id(void) { return (DBGC->IDCODE >> 16); } /** * @brief Gets core id. * @retval Core id */ __INLINE uint32_t dbgc_get_core_id(void) { return (DBGC->IDCODE >> 12) & 0xF; } /** * @brief Gets device id * @retval device id */ __INLINE uint32_t dbgc_get_device_id(void) { return DBGC->IDCODE & 0xFFF; } /** * @brief Configures low power debug mode * @param mode: The mode of low power. * @param state: ENABLE/DISABLE * @retval None */ __INLINE void dbgc_mode_config(dbgc_mode_t mode, type_func_t state) { if (state) SET_BIT(DBGC->CR, mode); else CLEAR_BIT(DBGC->CR, mode); } /** * @brief Configures peripheral debug mode * @param perh: The peripheral. * @param state: ENABLE/DISABLE * @retval None */ __INLINE void dbgc_perh_config(dbgc_perh_t perh, type_func_t state) { if ((perh >> 16) & 0x1) { if (state) SET_BIT(DBGC->APB2FZ, perh); else CLEAR_BIT(DBGC->APB2FZ, perh); } else { if (state) SET_BIT(DBGC->APB1FZ, perh); else CLEAR_BIT(DBGC->APB1FZ, perh); } } /** * @} */ /** * @} */ /** * @} */ #ifdef __cplusplus } #endif #endif
zhaojuntao/rt-thread
bsp/es32f0654/libraries/ES32F065x_ALD_StdPeriph_Driver/Include/ald_dbgc.h
C
apache-2.0
3,381
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.syncope.common.lib.to; import org.apache.syncope.common.lib.AbstractBaseBean; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.ArrayList; import java.util.Collection; import java.util.List; import javax.ws.rs.PathParam; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlElementWrapper; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; import org.apache.commons.collections4.CollectionUtils; @XmlRootElement(name = "attribute") @XmlType public class AttrTO extends AbstractBaseBean { private static final long serialVersionUID = 4941691338796323623L; public static class Builder { private final AttrTO instance = new AttrTO(); public Builder schema(final String schema) { instance.setSchema(schema); return this; } public Builder readonly(final boolean readonly) { instance.setReadonly(readonly); return this; } public Builder value(final String value) { instance.getValues().add(value); return this; } public Builder values(final String... values) { CollectionUtils.addAll(instance.getValues(), values); return this; } public Builder values(final Collection<String> values) { instance.getValues().addAll(values); return this; } public AttrTO build() { return instance; } } /** * Name of the schema that this attribute is referring to. */ private String schema; /** * Set of (string) values of this attribute. */ private final List<String> values = new ArrayList<>(); /** * Whether this attribute is read-only or not. */ private boolean readonly = false; /** * @return the name of the schema that this attribute is referring to */ public String getSchema() { return schema; } /** * @param schema name to be set */ @PathParam("schema") public void setSchema(final String schema) { this.schema = schema; } /** * @return attribute values as strings */ @XmlElementWrapper(name = "values") @XmlElement(name = "value") @JsonProperty("values") public List<String> getValues() { return values; } public boolean isReadonly() { return readonly; } public void setReadonly(final boolean readonly) { this.readonly = readonly; } }
giacomolm/syncope
common/lib/src/main/java/org/apache/syncope/common/lib/to/AttrTO.java
Java
apache-2.0
3,398
/** @file A brief file description @section license License Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ #include "ts/ink_platform.h" #include "ts/ink_memory.h" #include "ts/ink_defs.h" #include "ts/ink_assert.h" #include "ts/ink_error.h" #include "ts/ink_file.h" #include "ts/ink_string.h" #include "ts/ink_time.h" #include "MgmtUtils.h" #include "MultiFile.h" #include "ExpandingArray.h" #include "ts/TextBuffer.h" #include "WebMgmtUtils.h" /**************************************************************************** * * MultiFile.cc - base class to handle reading and displaying config * files and directories * * ****************************************************************************/ MultiFile::MultiFile() { managedDir = nullptr; dirDescript = nullptr; } // void MultiFile::addTableEntries(ExpandingArray* fileList, TextBuffer* output) // // Adds table entries to output from the result of WalkFiles // void MultiFile::addTableEntries(ExpandingArray *fileList, TextBuffer *output) { int numFiles = fileList->getNumEntries(); fileEntry *current; char *safeName; char dateBuf[64]; const char dataOpen[] = "\t<td>"; const char dataClose[] = "</td>\n"; const int dataOpenLen = strlen(dataOpen); const int dataCloseLen = strlen(dataClose); for (int i = 0; i < numFiles; i++) { current = (fileEntry *)((*fileList)[i]); output->copyFrom("<tr>\n", 5); output->copyFrom(dataOpen, dataOpenLen); safeName = substituteForHTMLChars(current->name); output->copyFrom(safeName, strlen(safeName)); delete[] safeName; output->copyFrom(dataClose, dataCloseLen); output->copyFrom(dataOpen, dataOpenLen); if (ink_ctime_r(&current->c_time, dateBuf) == nullptr) { ink_strlcpy(dateBuf, "<em>No time-stamp</em>", sizeof(dateBuf)); } output->copyFrom(dateBuf, strlen(dateBuf)); output->copyFrom(dataClose, dataCloseLen); output->copyFrom("</tr>\n", 6); } } // Mfresult MultiFile::WalkFiles(ExpandingArray* fileList) // // Iterates through the managed directory and adds every managed file // into the parameter snapList // MFresult MultiFile::WalkFiles(ExpandingArray *fileList) { struct dirent *dirEntry; DIR *dir; char *fileName; char *filePath; char *records_config_filePath; struct stat fileInfo; struct stat records_config_fileInfo; fileEntry *fileListEntry; if ((dir = opendir(managedDir)) == nullptr) { mgmt_log("[MultiFile::WalkFiles] Unable to open %s directory: %s: %s\n", dirDescript, managedDir, strerror(errno)); return MF_NO_DIR; } while ((dirEntry = readdir(dir))) { fileName = dirEntry->d_name; filePath = newPathString(managedDir, fileName); records_config_filePath = newPathString(filePath, "records.config"); if (stat(filePath, &fileInfo) < 0) { mgmt_log("[MultiFile::WalkFiles] Stat of a %s failed %s: %s\n", dirDescript, fileName, strerror(errno)); } else { if (stat(records_config_filePath, &records_config_fileInfo) < 0) { delete[] filePath; delete[] records_config_filePath; continue; } // Ignore ., .., and any dot files if (*fileName != '.' && isManaged(fileName)) { fileListEntry = (fileEntry *)ats_malloc(sizeof(fileEntry)); fileListEntry->c_time = fileInfo.st_ctime; ink_strlcpy(fileListEntry->name, fileName, sizeof(fileListEntry->name)); fileList->addEntry(fileListEntry); } } delete[] filePath; delete[] records_config_filePath; } closedir(dir); fileList->sortWithFunction(fileEntryCmpFunc); return MF_OK; } bool MultiFile::isManaged(const char *fileName) { if (fileName == nullptr) { return false; } else { return true; } } void MultiFile::addSelectOptions(TextBuffer *output, ExpandingArray *options) { const char selectEnd[] = "</select>\n"; const char option[] = "\t<option value='"; const int optionLen = strlen(option); const char option_end[] = "'>"; char *safeCurrent; int numOptions = options->getNumEntries(); for (int i = 0; i < numOptions; i++) { output->copyFrom(option, optionLen); safeCurrent = substituteForHTMLChars((char *)((*options)[i])); output->copyFrom(safeCurrent, strlen(safeCurrent)); output->copyFrom(option_end, strlen(option_end)); output->copyFrom(safeCurrent, strlen(safeCurrent)); delete[] safeCurrent; output->copyFrom("\n", 1); } output->copyFrom(selectEnd, strlen(selectEnd)); } // int fileEntryCmpFunc(void* e1, void* e2) // // a cmp function for fileEntry structs that can // used with qsort // // compares c_time // int fileEntryCmpFunc(const void *e1, const void *e2) { fileEntry *entry1 = (fileEntry *)*(void **)e1; fileEntry *entry2 = (fileEntry *)*(void **)e2; if (entry1->c_time > entry2->c_time) { return 1; } else if (entry1->c_time < entry2->c_time) { return -1; } else { return 0; } } // char* MultiFile::newPathString(const char* s1, const char* s2) // // creates a new string that is composed of s1/s2 // Callee is responsible for deleting storage // Method makes sure there is no double slash between s1 and s2 // The code is borrowed from ink_filepath_make with dynamic allocation. // char * MultiFile::newPathString(const char *s1, const char *s2) { char *newStr; int srcLen; // is the length of the src rootpath int addLen; // maximum total path length // Treat null as an empty path. if (!s2) { s2 = ""; } addLen = strlen(s2) + 1; if (*s2 == '/') { // If addpath is rooted, then rootpath is unused. newStr = new char[addLen]; ink_strlcpy(newStr, s2, addLen); return newStr; } if (!s1 || !*s1) { // If there's no rootpath return the addpath newStr = new char[addLen]; ink_strlcpy(newStr, s2, addLen); return newStr; } srcLen = strlen(s1); newStr = new char[srcLen + addLen + 1]; ink_assert(newStr != nullptr); ink_strlcpy(newStr, s1, addLen); if (newStr[srcLen - 1] != '/') { newStr[srcLen++] = '/'; } ink_strlcpy(&newStr[srcLen], s2, addLen - srcLen); return newStr; }
chenglongwei/trafficserver
mgmt/MultiFile.cc
C++
apache-2.0
6,925
exports.name = 'colors'; exports.category = 'color'; exports.homepage = 'https://github.com/marak/colors.js';
ek1437/PeerTutor
www.peertutor.com/node_modules/nodeman/docs/colors.meta.js
JavaScript
apache-2.0
110
#!/usr/bin/env python # -*- mode: python; encoding: utf-8 -*- """Test the hunt_view interface.""" import traceback from grr.gui import runtests_test from grr.lib import access_control from grr.lib import aff4 from grr.lib import flags from grr.lib import flow from grr.lib import flow_runner from grr.lib import hunts from grr.lib import output_plugin from grr.lib import rdfvalue from grr.lib import test_lib from grr.lib.flows.general import file_finder from grr.lib.flows.general import transfer from grr.lib.rdfvalues import client as rdf_client from grr.lib.rdfvalues import foreman as rdf_foreman from grr.lib.rdfvalues import paths as rdf_paths class TestHuntView(test_lib.GRRSeleniumTest): """Test the Cron view GUI.""" reason = "Felt like it!" def CreateSampleHunt(self, flow_runner_args=None, flow_args=None, stopped=False, output_plugins=None, client_limit=0, client_count=10, token=None): token = token or self.token self.client_ids = self.SetupClients(client_count) with hunts.GRRHunt.StartHunt( hunt_name="GenericHunt", flow_runner_args=flow_runner.FlowRunnerArgs( flow_name="GetFile"), flow_args=transfer.GetFileArgs( pathspec=rdf_paths.PathSpec( path="/tmp/evil.txt", pathtype=rdf_paths.PathSpec.PathType.OS, ) ), regex_rules=[rdf_foreman.ForemanAttributeRegex( attribute_name="GRR client", attribute_regex="GRR")], output_plugins=output_plugins or [], client_rate=0, client_limit=client_limit, token=token) as hunt: if not stopped: hunt.Run() with aff4.FACTORY.Open("aff4:/foreman", mode="rw", token=token) as foreman: for client_id in self.client_ids: foreman.AssignTasksToClient(client_id) self.hunt_urn = hunt.urn return aff4.FACTORY.Open(hunt.urn, mode="rw", token=token, age=aff4.ALL_TIMES) def CreateGenericHuntWithCollection(self, values=None): self.client_ids = self.SetupClients(10) if values is None: values = [rdfvalue.RDFURN("aff4:/sample/1"), rdfvalue.RDFURN("aff4:/C.0000000000000001/fs/os/c/bin/bash"), rdfvalue.RDFURN("aff4:/sample/3")] with hunts.GRRHunt.StartHunt( hunt_name="GenericHunt", regex_rules=[rdf_foreman.ForemanAttributeRegex( attribute_name="GRR client", attribute_regex="GRR")], output_plugins=[], token=self.token) as hunt: runner = hunt.GetRunner() runner.Start() with aff4.FACTORY.Create( runner.context.results_collection_urn, aff4_type="RDFValueCollection", mode="w", token=self.token) as collection: for value in values: collection.Add(value) return hunt.urn def SetupTestHuntView(self, client_limit=0, client_count=10): # Create some clients and a hunt to view. with self.CreateSampleHunt(client_limit=client_limit, client_count=client_count) as hunt: hunt.Log("TestLogLine") # Log an error just with some random traceback. hunt.LogClientError(self.client_ids[1], "Client Error 1", traceback.format_exc()) # Run the hunt. client_mock = test_lib.SampleHuntMock() test_lib.TestHuntHelper(client_mock, self.client_ids, False, self.token) hunt = aff4.FACTORY.Open(hunt.urn, token=self.token) all_count, _, _ = hunt.GetClientsCounts() if client_limit == 0: # No limit, so we should have all the clients self.assertEqual(all_count, client_count) else: self.assertEqual(all_count, min(client_count, client_limit)) def CheckState(self, state): self.WaitUntil(self.IsElementPresent, "css=div[state=\"%s\"]" % state) def testHuntView(self): """Test that we can see all the hunt data.""" with self.ACLChecksDisabled(): self.SetupTestHuntView() # Open up and click on View Hunts. self.Open("/") self.WaitUntil(self.IsElementPresent, "client_query") self.Click("css=a[grrtarget=ManageHunts]") self.WaitUntil(self.IsTextPresent, "GenericHunt") # Select a Hunt. self.Click("css=td:contains('GenericHunt')") # Check we can now see the details. self.WaitUntil(self.IsElementPresent, "css=dl.dl-hunt") self.WaitUntil(self.IsTextPresent, "Clients Scheduled") self.WaitUntil(self.IsTextPresent, "Hunt URN") self.WaitUntil(self.IsTextPresent, "Regex Rules") self.WaitUntil(self.IsTextPresent, "Integer Rules") # Click the Log Tab. self.Click("css=li[heading=Log]") self.WaitUntil(self.IsTextPresent, "TestLogLine") # Click the Error Tab. self.Click("css=li[heading=Errors]") self.WaitUntil(self.IsTextPresent, "Client Error 1") def testToolbarStateForStoppedHunt(self): with self.ACLChecksDisabled(): self.CreateSampleHunt(stopped=True) self.Open("/") self.WaitUntil(self.IsElementPresent, "client_query") self.Click("css=a[grrtarget=ManageHunts]") self.WaitUntil(self.IsTextPresent, "GenericHunt") # Select a Hunt. self.Click("css=td:contains('GenericHunt')") # Check we can now see the details. self.WaitUntil(self.IsElementPresent, "css=dl.dl-hunt") self.WaitUntil(self.IsTextPresent, "Clients Scheduled") self.WaitUntil(self.IsTextPresent, "Hunt URN") self.WaitUntil(self.IsElementPresent, "css=button[name=RunHunt]:not([disabled])") self.WaitUntil(self.IsElementPresent, "css=button[name=StopHunt][disabled]") self.WaitUntil(self.IsElementPresent, "css=button[name=ModifyHunt]:not([disabled])") def testToolbarStateForRunningHunt(self): with self.ACLChecksDisabled(): self.CreateSampleHunt(stopped=False) self.Open("/") self.WaitUntil(self.IsElementPresent, "client_query") self.Click("css=a[grrtarget=ManageHunts]") self.WaitUntil(self.IsTextPresent, "GenericHunt") # Select a Hunt. self.Click("css=td:contains('GenericHunt')") # Check we can now see the details. self.WaitUntil(self.IsElementPresent, "css=dl.dl-hunt") self.WaitUntil(self.IsTextPresent, "Clients Scheduled") self.WaitUntil(self.IsTextPresent, "Hunt URN") self.WaitUntil(self.IsElementPresent, "css=button[name=RunHunt][disabled]") self.WaitUntil(self.IsElementPresent, "css=button[name=StopHunt]:not([disabled])") self.WaitUntil(self.IsElementPresent, "css=button[name=ModifyHunt][disabled]") def testRunHunt(self): with self.ACLChecksDisabled(): hunt = self.CreateSampleHunt(stopped=True) self.Open("/") self.WaitUntil(self.IsElementPresent, "client_query") self.Click("css=a[grrtarget=ManageHunts]") self.WaitUntil(self.IsTextPresent, "GenericHunt") # Select a Hunt. self.Click("css=td:contains('GenericHunt')") # Click on Run button and check that dialog appears. self.Click("css=button[name=RunHunt]") self.WaitUntil(self.IsTextPresent, "Are you sure you want to run this hunt?") # Click on "Proceed" and wait for authorization dialog to appear. self.Click("css=button[name=Proceed]") # This should be rejected now and a form request is made. self.WaitUntil(self.IsTextPresent, "Create a new approval") self.Click("css=#acl_dialog button[name=Close]") # Wait for dialog to disappear. self.WaitUntilNot(self.IsVisible, "css=.modal-backdrop") with self.ACLChecksDisabled(): self.GrantHuntApproval(hunt.urn) # Click on Run and wait for dialog again. self.Click("css=button[name=RunHunt]") self.WaitUntil(self.IsTextPresent, "Are you sure you want to run this hunt?") # Click on "Proceed" and wait for success label to appear. # Also check that "Proceed" button gets disabled. self.Click("css=button[name=Proceed]") self.WaitUntil(self.IsTextPresent, "Hunt started successfully!") self.assertTrue(self.IsElementPresent("css=button[name=Proceed][disabled]")) # Click on "Cancel" and check that dialog disappears. self.Click("css=button[name=Cancel]") self.WaitUntilNot(self.IsVisible, "css=.modal-backdrop") # View should be refreshed automatically. self.WaitUntil(self.IsTextPresent, "GenericHunt") # Check the hunt is in a running state. self.CheckState("STARTED") def testStopHunt(self): with self.ACLChecksDisabled(): hunt = self.CreateSampleHunt(stopped=False) self.Open("/") self.WaitUntil(self.IsElementPresent, "client_query") self.Click("css=a[grrtarget=ManageHunts]") self.WaitUntil(self.IsTextPresent, "GenericHunt") # Select a Hunt. self.Click("css=td:contains('GenericHunt')") # Click on Stop button and check that dialog appears. self.Click("css=button[name=StopHunt]") self.WaitUntil(self.IsTextPresent, "Are you sure you want to stop this hunt?") # Click on "Proceed" and wait for authorization dialog to appear. self.Click("css=button[name=Proceed]") # This should be rejected now and a form request is made. self.WaitUntil(self.IsTextPresent, "Create a new approval") self.Click("css=#acl_dialog button[name=Close]") # Wait for dialog to disappear. self.WaitUntilNot(self.IsVisible, "css=.modal-backdrop") with self.ACLChecksDisabled(): self.GrantHuntApproval(hunt.session_id) # Click on Stop and wait for dialog again. self.Click("css=button[name=StopHunt]") self.WaitUntil(self.IsTextPresent, "Are you sure you want to stop this hunt?") # Click on "Proceed" and wait for success label to appear. # Also check that "Proceed" button gets disabled. self.Click("css=button[name=Proceed]") self.WaitUntil(self.IsTextPresent, "Hunt stopped successfully") self.assertTrue(self.IsElementPresent("css=button[name=Proceed][disabled]")) # Click on "Cancel" and check that dialog disappears. self.Click("css=button[name=Cancel]") self.WaitUntilNot(self.IsVisible, "css=.modal-backdrop") # View should be refreshed automatically. self.WaitUntil(self.IsTextPresent, "GenericHunt") # Check the hunt is not in a running state. self.CheckState("STOPPED") def testModifyHunt(self): with self.ACLChecksDisabled(): hunt = self.CreateSampleHunt(stopped=True) self.Open("/") self.WaitUntil(self.IsElementPresent, "client_query") self.Click("css=a[grrtarget=ManageHunts]") self.WaitUntil(self.IsTextPresent, "GenericHunt") # Select a Hunt. self.Click("css=td:contains('GenericHunt')") # Click on Modify button and check that dialog appears. self.Click("css=button[name=ModifyHunt]") self.WaitUntil(self.IsTextPresent, "Modify a hunt") self.Type("css=input[id=v_-client_limit]", "4483") self.Type("css=input[id=v_-expiry_time]", str( rdfvalue.Duration("5m").Expiry())) # Click on Proceed. self.Click("css=button[name=Proceed]") # This should be rejected now and a form request is made. self.WaitUntil(self.IsTextPresent, "Create a new approval") self.Click("css=#acl_dialog button[name=Close]") # Wait for dialog to disappear. self.WaitUntilNot(self.IsVisible, "css=.modal-backdrop") # Now create an approval. with self.ACLChecksDisabled(): self.GrantHuntApproval(hunt.session_id) # Click on Modify button and check that dialog appears. self.Click("css=button[name=ModifyHunt]") self.WaitUntil(self.IsTextPresent, "Modify a hunt") self.Type("css=input[id=v_-client_limit]", "4483") self.Type("css=input[id=v_-expiry_time]", str( rdfvalue.Duration("5m").Expiry())) # Click on "Proceed" and wait for success label to appear. # Also check that "Proceed" button gets disabled. self.Click("css=button[name=Proceed]") self.WaitUntil(self.IsTextPresent, "Hunt modified successfully!") self.assertTrue(self.IsElementPresent("css=button[name=Proceed][disabled]")) # Click on "Cancel" and check that dialog disappears. self.Click("css=button[name=Cancel]") self.WaitUntilNot(self.IsVisible, "css=.modal-backdrop") # View should be refreshed automatically. self.WaitUntil(self.IsTextPresent, "GenericHunt") self.WaitUntil(self.IsTextPresent, "4483") def testDeleteHunt(self): with self.ACLChecksDisabled(): # This needs to be created by a different user so we can test the # approval dialog. hunt = self.CreateSampleHunt( stopped=True, token=access_control.ACLToken( username="random user", reason="test")) self.Open("/") self.WaitUntil(self.IsElementPresent, "client_query") self.Click("css=a[grrtarget=ManageHunts]") self.WaitUntil(self.IsTextPresent, "GenericHunt") # Select a Hunt. self.Click("css=td:contains('GenericHunt')") # Click on delete button. self.Click("css=button[name=DeleteHunt]") self.WaitUntil(self.IsTextPresent, "Delete a hunt") # Click on Proceed. self.Click("css=button[name=Proceed]") # This should be rejected now and a form request is made. self.WaitUntil(self.IsTextPresent, "Create a new approval") self.Click("css=#acl_dialog button[name=Close]") # Wait for dialog to disappear. self.WaitUntilNot(self.IsVisible, "css=.modal-backdrop") # Now create an approval. with self.ACLChecksDisabled(): self.GrantHuntApproval(hunt.session_id) # Select a hunt again, as it's deselected after approval dialog # disappears. TODO(user): if this behavior is not convenient, fix it. self.Click("css=td:contains('GenericHunt')") # Click on Delete button and check that dialog appears. self.Click("css=button[name=DeleteHunt]") self.WaitUntil(self.IsTextPresent, "Delete a hunt") # Click on "Proceed" and wait for success label to appear. # Also check that "Proceed" button gets disabled. self.Click("css=button[name=Proceed]") self.WaitUntil(self.IsTextPresent, "Hunt Deleted!") self.assertTrue(self.IsElementPresent("css=button[name=Proceed][disabled]")) # Click on "Cancel" and check that dialog disappears. self.Click("css=button[name=Cancel]") self.WaitUntilNot(self.IsVisible, "css=.modal-backdrop") def SetupHuntDetailView(self, failrate=2): """Create some clients and a hunt to view.""" with self.CreateSampleHunt() as hunt: hunt.LogClientError(self.client_ids[1], "Client Error 1", traceback.format_exc()) # Run the hunt. client_mock = test_lib.SampleHuntMock(failrate=failrate) test_lib.TestHuntHelper(client_mock, self.client_ids, False, self.token) def testHuntDetailView(self): """Test the detailed client view works.""" with self.ACLChecksDisabled(): self.SetupHuntDetailView(failrate=-1) # Open up and click on View Hunts then the first Hunt. self.Open("/") self.WaitUntil(self.IsElementPresent, "client_query") self.Click("css=a[grrtarget=ManageHunts]") self.WaitUntil(self.IsTextPresent, "GenericHunt") self.Click("css=td:contains('GenericHunt')") # Click the Overview Tab then the Details Link. self.Click("css=li[heading=Overview]") self.WaitUntil(self.IsTextPresent, "Hunt URN") self.Click("css=button[name=ViewHuntDetails]") self.WaitUntil(self.IsTextPresent, "Viewing Hunt aff4:/hunts/") self.WaitUntil(self.IsTextPresent, "COMPLETED") # Select the first client which should have errors. self.Click("css=td:contains('%s')" % self.client_ids[1].Basename()) self.WaitUntil(self.IsTextPresent, "Last Checkin") self.Click("css=a[renderer=HuntLogRenderer]") self.WaitUntil(self.IsTextPresent, "GetFile Flow Completed") self.Click("css=a[renderer=HuntErrorRenderer]") self.WaitUntil(self.IsTextPresent, "Client Error 1") self.Click("css=a[renderer=HuntHostInformationRenderer]") self.WaitUntil(self.IsTextPresent, "CLIENT_INFO") self.WaitUntil(self.IsTextPresent, "VFSGRRClient") def testHuntResultsView(self): with self.ACLChecksDisabled(): self.CreateGenericHuntWithCollection() self.Open("/") self.WaitUntil(self.IsElementPresent, "client_query") self.Click("css=a[grrtarget=ManageHunts]") self.WaitUntil(self.IsTextPresent, "GenericHunt") self.Click("css=td:contains('GenericHunt')") # Click the Results tab. self.Click("css=li[heading=Results]") self.WaitUntil(self.IsTextPresent, "aff4:/sample/1") self.WaitUntil(self.IsTextPresent, "aff4:/C.0000000000000001/fs/os/c/bin/bash") self.WaitUntil(self.IsTextPresent, "aff4:/sample/3") with self.ACLChecksDisabled(): self.GrantClientApproval("C.0000000000000001") self.Click("link=aff4:/C.0000000000000001/fs/os/c/bin/bash") self.WaitUntil(self.IsElementPresent, "css=li.active a:contains('Browse Virtual Filesystem')") def testHuntStatsView(self): with self.ACLChecksDisabled(): self.SetupTestHuntView() self.Open("/") self.WaitUntil(self.IsElementPresent, "client_query") self.Click("css=a[grrtarget=ManageHunts]") self.WaitUntil(self.IsTextPresent, "GenericHunt") self.Click("css=td:contains('GenericHunt')") # Click the Stats tab. self.Click("css=li[heading=Stats]") self.WaitUntil(self.IsTextPresent, "Total number of clients") self.WaitUntil(self.IsTextPresent, "10") self.WaitUntil(self.IsTextPresent, "User CPU mean") self.WaitUntil(self.IsTextPresent, "5.5") self.WaitUntil(self.IsTextPresent, "User CPU stdev") self.WaitUntil(self.IsTextPresent, "2.9") self.WaitUntil(self.IsTextPresent, "System CPU mean") self.WaitUntil(self.IsTextPresent, "11") self.WaitUntil(self.IsTextPresent, "System CPU stdev") self.WaitUntil(self.IsTextPresent, "5.7") self.WaitUntil(self.IsTextPresent, "Network bytes sent mean") self.WaitUntil(self.IsTextPresent, "16.5") self.WaitUntil(self.IsTextPresent, "Network bytes sent stdev") self.WaitUntil(self.IsTextPresent, "8.6") def testDoesNotShowGenerateArchiveButtonForNonExportableRDFValues(self): values = [rdf_client.Process(pid=1), rdf_client.Process(pid=42423)] with self.ACLChecksDisabled(): self.CreateGenericHuntWithCollection(values=values) self.Open("/") self.Click("css=a[grrtarget=ManageHunts]") self.Click("css=td:contains('GenericHunt')") self.Click("css=li[heading=Results]") self.WaitUntil(self.IsTextPresent, "42423") self.WaitUntilNot(self.IsTextPresent, "Files referenced in this collection can be downloaded") def testDoesNotShowGenerateArchiveButtonWhenResultsCollectionIsEmpty(self): with self.ACLChecksDisabled(): self.CreateGenericHuntWithCollection([]) self.Open("/") self.Click("css=a[grrtarget=ManageHunts]") self.Click("css=td:contains('GenericHunt')") self.Click("css=li[heading=Results]") self.WaitUntil(self.IsTextPresent, "Value") self.WaitUntilNot(self.IsTextPresent, "Files referenced in this collection can be downloaded") def testShowsGenerateArchiveButtonForFileFinderHunt(self): stat_entry = rdf_client.StatEntry(aff4path="aff4:/foo/bar") values = [file_finder.FileFinderResult(stat_entry=stat_entry)] with self.ACLChecksDisabled(): self.CreateGenericHuntWithCollection(values=values) self.Open("/") self.Click("css=a[grrtarget=ManageHunts]") self.Click("css=td:contains('GenericHunt')") self.Click("css=li[heading=Results]") self.WaitUntil(self.IsTextPresent, "Files referenced in this collection can be downloaded") def testHuntAuthorizationIsRequiredToGenerateResultsArchive(self): stat_entry = rdf_client.StatEntry(aff4path="aff4:/foo/bar") values = [file_finder.FileFinderResult(stat_entry=stat_entry)] with self.ACLChecksDisabled(): self.CreateGenericHuntWithCollection(values=values) self.Open("/") self.Click("css=a[grrtarget=ManageHunts]") self.Click("css=td:contains('GenericHunt')") self.Click("css=li[heading=Results]") self.Click("css=button.DownloadButton") self.WaitUntil(self.IsElementPresent, "acl_dialog") def testGenerateZipButtonGetsDisabledAfterClick(self): stat_entry = rdf_client.StatEntry(aff4path="aff4:/foo/bar") values = [file_finder.FileFinderResult(stat_entry=stat_entry)] with self.ACLChecksDisabled(): hunt_urn = self.CreateGenericHuntWithCollection(values=values) self.GrantHuntApproval(hunt_urn) self.Open("/") self.Click("css=a[grrtarget=ManageHunts]") self.Click("css=td:contains('GenericHunt')") self.Click("css=li[heading=Results]") self.Click("css=button.DownloadButton") self.WaitUntil(self.IsElementPresent, "css=button.DownloadButton[disabled]") self.WaitUntil(self.IsTextPresent, "Generation has started") def testStartsZipGenerationWhenGenerateZipButtonIsClicked(self): stat_entry = rdf_client.StatEntry(aff4path="aff4:/foo/bar") values = [file_finder.FileFinderResult(stat_entry=stat_entry)] with self.ACLChecksDisabled(): hunt_urn = self.CreateGenericHuntWithCollection(values=values) self.GrantHuntApproval(hunt_urn) self.Open("/") self.Click("css=a[grrtarget=ManageHunts]") self.Click("css=td:contains('GenericHunt')") self.Click("css=li[heading=Results]") self.Click("css=button.DownloadButton") self.WaitUntil(self.IsTextPresent, "Generation has started") with self.ACLChecksDisabled(): flows_dir = aff4.FACTORY.Open("aff4:/flows") flows = list(flows_dir.OpenChildren()) export_flows = [ f for f in flows if f.__class__.__name__ == "ExportHuntResultFilesAsArchive"] self.assertEqual(len(export_flows), 1) self.assertEqual(export_flows[0].args.hunt_urn, hunt_urn) def testListOfCSVFilesIsNotShownWhenHuntProducedNoResults(self): with self.ACLChecksDisabled(): self.client_ids = self.SetupClients(10) # Create hunt without results. self.CreateSampleHunt(output_plugins=[ output_plugin.OutputPluginDescriptor(plugin_name="CSVOutputPlugin")]) self.Open("/#main=ManageHunts") self.Click("css=td:contains('GenericHunt')") # Click the Results tab. self.Click("css=li[heading=Results]") self.WaitUntil(self.IsTextPresent, "CSV Output") self.WaitUntil(self.IsTextPresent, "Nothing was written yet") def testShowsFilesAndAllowsDownloadWhenCSVExportIsUsed(self): with self.ACLChecksDisabled(): self.client_ids = self.SetupClients(10) # Create hunt. self.CreateSampleHunt(output_plugins=[ output_plugin.OutputPluginDescriptor(plugin_name="CSVOutputPlugin")]) # Actually run created hunt. client_mock = test_lib.SampleHuntMock() test_lib.TestHuntHelper(client_mock, self.client_ids, False, self.token) # Make sure results are processed. flow_urn = flow.GRRFlow.StartFlow(flow_name="ProcessHuntResultsCronFlow", token=self.token) for _ in test_lib.TestFlowHelper(flow_urn, token=self.token): pass self.Open("/#main=ManageHunts") self.Click("css=td:contains('GenericHunt')") # Click the Results tab. self.Click("css=li[heading=Results]") self.WaitUntil(self.IsTextPresent, "Following files were written") # Check that displayed file can be downloaded. self.Click("css=a:contains('ExportedFile.csv')") self.WaitUntil(self.FileWasDownloaded) def testLogsTabShowsLogsFromAllClients(self): with self.ACLChecksDisabled(): self.SetupHuntDetailView(failrate=-1) self.Open("/#main=ManageHunts") self.Click("css=td:contains('GenericHunt')") self.Click("css=li[heading=Log]") for client_id in self.client_ids: self.WaitUntil(self.IsTextPresent, str(client_id)) self.WaitUntil(self.IsTextPresent, "Finished reading " + str(client_id.Add("fs/os/tmp/evil.txt"))) def testLogsTabFiltersLogsByString(self): with self.ACLChecksDisabled(): self.SetupHuntDetailView(failrate=-1) self.Open("/#main=ManageHunts") self.Click("css=td:contains('GenericHunt')") self.Click("css=li[heading=Log]") self.Type("css=grr-hunt-log input.search-query", self.client_ids[-1].Basename()) self.Click("css=grr-hunt-log button:contains('Filter')") self.WaitUntil(self.IsTextPresent, str(self.client_ids[-1])) self.WaitUntil(self.IsTextPresent, "Finished reading " + str(self.client_ids[-1].Add("fs/os/tmp/evil.txt"))) for client_id in self.client_ids[:-1]: self.WaitUntilNot(self.IsTextPresent, str(client_id)) self.WaitUntilNot(self.IsTextPresent, "Finished reading " + str(client_id.Add("fs/os/tmp/evil.txt"))) def testErrorsTabShowsErrorsFromAllClients(self): with self.ACLChecksDisabled(): self.SetupHuntDetailView(failrate=1) self.Open("/#main=ManageHunts") self.Click("css=td:contains('GenericHunt')") self.Click("css=li[heading=Errors]") for client_id in self.client_ids: self.WaitUntil(self.IsTextPresent, str(client_id)) def testErrorsTabFiltersErrorsByString(self): with self.ACLChecksDisabled(): self.SetupHuntDetailView(failrate=1) self.Open("/#main=ManageHunts") self.Click("css=td:contains('GenericHunt')") self.Click("css=li[heading=Errors]") self.Type("css=grr-hunt-errors input.search-query", self.client_ids[-1].Basename()) self.Click("css=grr-hunt-errors button:contains('Filter')") self.WaitUntil(self.IsTextPresent, str(self.client_ids[-1])) for client_id in self.client_ids[:-1]: self.WaitUntilNot(self.IsTextPresent, str(client_id)) def main(argv): # Run the full test suite runtests_test.SeleniumTestProgram(argv=argv) if __name__ == "__main__": flags.StartMain(main)
darrenbilby/grr
gui/plugins/hunt_view_test.py
Python
apache-2.0
26,383
/* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.dmn.validation.dtanalysis; import java.io.IOException; import java.io.Reader; import java.math.BigDecimal; import java.util.Arrays; import java.util.List; import org.junit.Test; import org.kie.dmn.api.core.DMNMessage; import org.kie.dmn.feel.runtime.Range.RangeBoundary; import org.kie.dmn.validation.dtanalysis.model.Bound; import org.kie.dmn.validation.dtanalysis.model.DTAnalysis; import org.kie.dmn.validation.dtanalysis.model.Hyperrectangle; import org.kie.dmn.validation.dtanalysis.model.Interval; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.hasSize; import static org.junit.Assert.assertThat; import static org.kie.dmn.validation.DMNValidator.Validation.ANALYZE_DECISION_TABLE; import static org.kie.dmn.validation.DMNValidator.Validation.VALIDATE_COMPILATION; public class AgeKittenTest extends AbstractDTAnalysisTest { @Test public void test_AgeKitten_domainOnTable() { List<DMNMessage> validate = validator.validate(getReader("AgeKitten-domainOnTable.dmn"), VALIDATE_COMPILATION, ANALYZE_DECISION_TABLE); checkAnalysis(validate); } @Test public void test_AgeKitten() { List<DMNMessage> validate = validator.validate(getReader("AgeKitten.dmn"), VALIDATE_COMPILATION, ANALYZE_DECISION_TABLE); checkAnalysis(validate); } @Test public void test_AgeKittenImport() throws IOException { try (final Reader reader0 = getReader("AgeKittenItemDef.dmn"); final Reader reader1 = getReader("AgeKittenImporting.dmn");) { final List<DMNMessage> validate = validator.validateUsing(VALIDATE_COMPILATION, ANALYZE_DECISION_TABLE) .theseModels(reader0, reader1); checkAnalysis(validate); } } private void checkAnalysis(List<DMNMessage> validate) { DTAnalysis analysis = getAnalysis(validate, "_5e3e4546-69c2-43f2-b93a-7ea285878ca0"); assertThat(analysis.getGaps(), hasSize(2)); @SuppressWarnings({"unchecked", "rawtypes"}) List<Hyperrectangle> gaps = Arrays.asList(new Hyperrectangle(2, Arrays.asList(Interval.newFromBounds(new Bound(new BigDecimal("0"), RangeBoundary.CLOSED, null), new Bound(new BigDecimal("12"), RangeBoundary.OPEN, null)))), new Hyperrectangle(2, Arrays.asList(Interval.newFromBounds(new Bound(new BigDecimal("15"), RangeBoundary.CLOSED, null), new Bound(new BigDecimal("18"), RangeBoundary.OPEN, null)), Interval.newFromBounds(new Bound("Dog", RangeBoundary.CLOSED, null), new Bound("Dog", RangeBoundary.CLOSED, null))))); assertThat(gaps, hasSize(2)); // Assert GAPS assertThat(analysis.getGaps(), contains(gaps.toArray())); // assert OVERLAPs count. assertThat(analysis.getOverlaps(), hasSize(0)); } }
droolsjbpm/drools
kie-dmn/kie-dmn-validation/src/test/java/org/kie/dmn/validation/dtanalysis/AgeKittenTest.java
Java
apache-2.0
5,452
/* * Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.durableexecutor.impl; import com.hazelcast.core.HazelcastInstanceNotActiveException; import com.hazelcast.core.Member; import com.hazelcast.core.PartitionAware; import com.hazelcast.durableexecutor.DurableExecutorService; import com.hazelcast.durableexecutor.DurableExecutorServiceFuture; import com.hazelcast.durableexecutor.impl.operations.DisposeResultOperation; import com.hazelcast.durableexecutor.impl.operations.RetrieveAndDisposeResultOperation; import com.hazelcast.durableexecutor.impl.operations.RetrieveResultOperation; import com.hazelcast.durableexecutor.impl.operations.ShutdownOperation; import com.hazelcast.durableexecutor.impl.operations.TaskOperation; import com.hazelcast.executor.impl.RunnableAdapter; import com.hazelcast.nio.Bits; import com.hazelcast.nio.serialization.Data; import com.hazelcast.spi.AbstractDistributedObject; import com.hazelcast.spi.ExecutionService; import com.hazelcast.spi.InternalCompletableFuture; import com.hazelcast.spi.NodeEngine; import com.hazelcast.spi.Operation; import com.hazelcast.spi.OperationService; import com.hazelcast.spi.serialization.SerializationService; import com.hazelcast.util.FutureUtil; import com.hazelcast.util.executor.CompletedFuture; import com.hazelcast.util.executor.DelegatingFuture; import java.util.Collection; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.Random; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.logging.Level; import static com.hazelcast.durableexecutor.impl.DistributedDurableExecutorService.SERVICE_NAME; import static com.hazelcast.util.FutureUtil.logAllExceptions; import static com.hazelcast.util.FutureUtil.waitWithDeadline; import static com.hazelcast.util.Preconditions.checkNotNull; public class DurableExecutorServiceProxy extends AbstractDistributedObject<DistributedDurableExecutorService> implements DurableExecutorService { private static final FutureUtil.ExceptionHandler WHILE_SHUTDOWN_EXCEPTION_HANDLER = logAllExceptions("Exception while ExecutorService shutdown", Level.FINEST); private final Random random = new Random(); private final int partitionCount; private final String name; DurableExecutorServiceProxy(NodeEngine nodeEngine, DistributedDurableExecutorService service, String name) { super(nodeEngine, service); this.name = name; this.partitionCount = nodeEngine.getPartitionService().getPartitionCount(); } @Override public <T> Future<T> retrieveResult(long uniqueId) { int partitionId = Bits.extractInt(uniqueId, false); int sequence = Bits.extractInt(uniqueId, true); Operation op = new RetrieveResultOperation(name, sequence).setPartitionId(partitionId); return invokeOnPartition(op); } @Override public void disposeResult(long uniqueId) { int partitionId = Bits.extractInt(uniqueId, false); int sequence = Bits.extractInt(uniqueId, true); Operation op = new DisposeResultOperation(name, sequence).setPartitionId(partitionId); InternalCompletableFuture<?> future = invokeOnPartition(op); future.join(); } @Override public <T> Future<T> retrieveAndDisposeResult(long uniqueId) { int partitionId = Bits.extractInt(uniqueId, false); int sequence = Bits.extractInt(uniqueId, true); Operation op = new RetrieveAndDisposeResultOperation(name, sequence).setPartitionId(partitionId); return invokeOnPartition(op); } @Override public void execute(Runnable task) { RunnableAdapter runnableAdapter = createRunnableAdapter(task); int partitionId = getTaskPartitionId(runnableAdapter); submitToPartition(runnableAdapter, partitionId, null); } @Override public void executeOnKeyOwner(Runnable task, Object key) { RunnableAdapter runnableAdapter = createRunnableAdapter(task); int partitionId = getPartitionId(key); submitToPartition(runnableAdapter, partitionId, null); } @Override public <T> DurableExecutorServiceFuture<T> submit(Runnable task, T result) { RunnableAdapter<T> runnableAdapter = createRunnableAdapter(task); int partitionId = getTaskPartitionId(runnableAdapter); return submitToPartition(runnableAdapter, partitionId, result); } @Override public DurableExecutorServiceFuture<?> submit(Runnable task) { RunnableAdapter<?> runnableAdapter = createRunnableAdapter(task); int partitionId = getTaskPartitionId(runnableAdapter); return submitToPartition(runnableAdapter, partitionId, null); } public <T> DurableExecutorServiceFuture<T> submit(Callable<T> task) { int partitionId = getTaskPartitionId(task); return submitToPartition(task, partitionId, null); } public <T> DurableExecutorServiceFuture<T> submitToKeyOwner(Callable<T> task, Object key) { int partitionId = getPartitionId(key); return submitToPartition(task, partitionId, null); } @Override public DurableExecutorServiceFuture<?> submitToKeyOwner(Runnable task, Object key) { RunnableAdapter<?> runnableAdapter = createRunnableAdapter(task); int partitionId = getPartitionId(key); return submitToPartition(runnableAdapter, partitionId, null); } @Override public <T> List<Future<T>> invokeAll(Collection<? extends Callable<T>> tasks) throws InterruptedException { throw new UnsupportedOperationException(); } @Override public <T> List<Future<T>> invokeAll(Collection<? extends Callable<T>> tasks, long timeout, TimeUnit unit) throws InterruptedException { throw new UnsupportedOperationException(); } @Override public <T> T invokeAny(Collection<? extends Callable<T>> tasks) throws InterruptedException, ExecutionException { throw new UnsupportedOperationException(); } @Override public <T> T invokeAny(Collection<? extends Callable<T>> tasks, long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException { throw new UnsupportedOperationException(); } @Override public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException { return false; } @Override public void shutdown() { NodeEngine nodeEngine = getNodeEngine(); Collection<Member> members = nodeEngine.getClusterService().getMembers(); OperationService operationService = nodeEngine.getOperationService(); Collection<Future> calls = new LinkedList<Future>(); for (Member member : members) { if (member.localMember()) { getService().shutdownExecutor(name); } else { ShutdownOperation op = new ShutdownOperation(name); Future f = operationService.invokeOnTarget(SERVICE_NAME, op, member.getAddress()); calls.add(f); } } waitWithDeadline(calls, 1, TimeUnit.SECONDS, WHILE_SHUTDOWN_EXCEPTION_HANDLER); } @Override public List<Runnable> shutdownNow() { shutdown(); return Collections.emptyList(); } @Override public boolean isShutdown() { try { return getService().isShutdown(name); } catch (HazelcastInstanceNotActiveException e) { return true; } } @Override public boolean isTerminated() { return isShutdown(); } @Override public String getName() { return name; } @Override public String getServiceName() { return SERVICE_NAME; } @Override protected void throwNotActiveException() { throw new RejectedExecutionException(); } private <T> DurableExecutorServiceFuture<T> submitToPartition(Callable<T> task, int partitionId, T defaultValue) { checkNotNull(task, "task can't be null"); SerializationService serializationService = getNodeEngine().getSerializationService(); Data taskData = serializationService.toData(task); TaskOperation operation = new TaskOperation(name, taskData); operation.setPartitionId(partitionId); InternalCompletableFuture<Integer> future = invokeOnPartition(operation); int sequence; try { sequence = future.get(); } catch (Throwable t) { CompletedFuture<T> completedFuture = new CompletedFuture<T>(serializationService, t, getAsyncExecutor()); return new DurableExecutorServiceDelegateFuture<T>(completedFuture, serializationService, null, -1); } Operation op = new RetrieveResultOperation(name, sequence).setPartitionId(partitionId); InternalCompletableFuture<T> internalCompletableFuture = invokeOnPartition(op); long taskId = Bits.combineToLong(partitionId, sequence); return new DurableExecutorServiceDelegateFuture<T>(internalCompletableFuture, serializationService, defaultValue, taskId); } private ExecutorService getAsyncExecutor() { return getNodeEngine().getExecutionService().getExecutor(ExecutionService.ASYNC_EXECUTOR); } private <T> RunnableAdapter<T> createRunnableAdapter(Runnable command) { checkNotNull(command, "Command can't be null"); return new RunnableAdapter<T>(command); } private <T> int getTaskPartitionId(Callable<T> task) { if (task instanceof PartitionAware) { Object partitionKey = ((PartitionAware) task).getPartitionKey(); if (partitionKey != null) { return getPartitionId(partitionKey); } } return random.nextInt(partitionCount); } private int getPartitionId(Object key) { return getNodeEngine().getPartitionService().getPartitionId(key); } private static class DurableExecutorServiceDelegateFuture<T> extends DelegatingFuture<T> implements DurableExecutorServiceFuture<T> { final long taskId; public DurableExecutorServiceDelegateFuture(InternalCompletableFuture future, SerializationService serializationService, T defaultValue, long taskId) { super(future, serializationService, defaultValue); this.taskId = taskId; } @Override public long getTaskId() { return taskId; } } }
tombujok/hazelcast
hazelcast/src/main/java/com/hazelcast/durableexecutor/impl/DurableExecutorServiceProxy.java
Java
apache-2.0
11,468
/** * Copyright 2005-2016 Red Hat, Inc. * * Red Hat licenses this file to you under the Apache License, version * 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. */ package io.fabric8.apmagent.metrics; import io.fabric8.apmagent.ApmAgent; import io.fabric8.apmagent.ApmConfiguration; import io.fabric8.apmagent.ClassInfo; import io.fabric8.apmagent.MethodDescription; import org.jolokia.jmx.JolokiaMBeanServerUtil; import org.jolokia.jvmagent.JolokiaServer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.management.InstanceAlreadyExistsException; import javax.management.MBeanRegistrationException; import javax.management.MBeanServer; import javax.management.NotCompliantMBeanException; import javax.management.ObjectInstance; import javax.management.ObjectName; import java.lang.management.ManagementFactory; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; public class ApmAgentContext { private static final Logger LOG = LoggerFactory.getLogger(ApmAgent.class); private final String DEFAULT_DOMAIN = "io.fabric8.apmagent"; private final long HOUSE_KEEPING_TIME = TimeUnit.SECONDS.toMillis(2); private final ConcurrentMap<String, ClassInfo> allMethods = new ConcurrentHashMap<>(); private AtomicBoolean initialized = new AtomicBoolean(); private AtomicBoolean started = new AtomicBoolean(); private ConcurrentMap<Thread, ThreadMetrics> threadMetricsMap = new ConcurrentHashMap<>(); private ConcurrentMap<String, MethodMetrics> methodMetricsMap = new ConcurrentHashMap<>(); private ConcurrentMap<Object, ObjectName> objectNameMap = new ConcurrentHashMap<>(); private MBeanServer mBeanServer; private JolokiaServer jolokiaServer; private final ApmAgent apmAgent; private ObjectName agentObjectName; private ObjectName configurationObjectName; private final ApmConfiguration configuration; private final MonitoredMethodMetrics monitoredMethodMetrics; private AtomicBoolean doHouseKeeping = new AtomicBoolean(); private Thread backgroundThread; private boolean monitorByDefault = true; public ApmAgentContext(ApmAgent agent) { this.apmAgent = agent; this.configuration = agent.getConfiguration(); this.monitoredMethodMetrics = new MonitoredMethodMetrics(this); this.monitoredMethodMetrics.setMonitorSize(configuration.getMethodMetricDepth()); } public void enterMethod(Thread currentThread, String fullMethodName, boolean alwaysActive) { if (isInitialized()) { ThreadMetrics threadMetrics = threadMetricsMap.get(currentThread); if (threadMetrics == null) { threadMetrics = new ThreadMetrics(this, currentThread); threadMetricsMap.put(currentThread, threadMetrics); } threadMetrics.enter(fullMethodName, alwaysActive); MethodMetrics methodMetrics = methodMetricsMap.get(fullMethodName); if (methodMetrics == null) { methodMetrics = new MethodMetrics(fullMethodName); methodMetrics.setActive(isMonitorByDefault()); methodMetricsMap.putIfAbsent(fullMethodName, methodMetrics); } } } public void exitMethod(Thread currentThread, String methodName, boolean alwaysActive) { if (isInitialized()) { ThreadMetrics threadMetrics = threadMetricsMap.get(currentThread); long elapsed = -1; if (threadMetrics != null) { elapsed = threadMetrics.exit(methodName, alwaysActive); } if (elapsed >= 0) { MethodMetrics methodMetrics = methodMetricsMap.get(methodName); if (methodMetrics != null) { methodMetrics.update(elapsed); } } doHouseKeeping(); } } public void initialize() { if (initialized.compareAndSet(false, true)) { try { agentObjectName = new ObjectName(DEFAULT_DOMAIN, "type", "apmAgent"); registerMBean(agentObjectName, apmAgent); configurationObjectName = new ObjectName(DEFAULT_DOMAIN, "type", "configuration"); registerMBean(configurationObjectName, configuration); } catch (Throwable e) { LOG.error("Failed to register apmAgent mbeans with mBeanServer ", e); } } } public void start() { if (initialized.get()) { if (started.compareAndSet(false, true)) { backgroundThread = new Thread(new Runnable() { @Override public void run() { while (started.get()) { try { Thread.sleep(HOUSE_KEEPING_TIME); doHouseKeeping.set(true); } catch (Throwable e) { } } } }); backgroundThread.setDaemon(true); backgroundThread.start(); } } } void doHouseKeeping() { //the time is going to be the elapsed time from the latest method call //its not going to be terribly accurate - but then it doesn't really need to be if (doHouseKeeping.compareAndSet(true, false)) { try { List<ThreadMetrics> threadMetricsList = getThreadMetrics(); for (ThreadMetrics tm : threadMetricsList) { if (tm.isDead()) { tm.destroy(); threadMetricsMap.remove(tm.getThread()); } } monitoredMethodMetrics.calculateMethodMetrics(getMethodMetrics()); for (ThreadMetrics threadMetrics : threadMetricsList) { threadMetrics.calculateMethodMetrics(); } } catch (Throwable e) { e.printStackTrace(); } } } public void stop() { if (initialized.get() && started.compareAndSet(true, false)) { for (ObjectName objectName : objectNameMap.values()) { unregisterMBean(objectName); } objectNameMap.clear(); methodMetricsMap.clear(); threadMetricsMap.clear(); } } public void shutDown() { if (initialized.compareAndSet(true, false)) { stop(); unregisterMBean(configurationObjectName); unregisterMBean(agentObjectName); if (jolokiaServer != null) { jolokiaServer.stop(); jolokiaServer = null; } mBeanServer = null; } } public ClassInfo getClassInfo(String className) { String key = className.replace('/', '.'); ClassInfo result = allMethods.get(key); if (result == null) { ClassInfo classInfo = new ClassInfo(); classInfo.setClassName(key); result = allMethods.putIfAbsent(key, classInfo); if (result == null) { result = classInfo; } } return result; } public List<String> getTransformedMethods() { List<String> result = new ArrayList<>(); for (ClassInfo classInfo : allMethods.values()) { for (String methodName : classInfo.getAllTransformedMethodNames()) { result.add(classInfo.getClassName() + "@" + methodName); } } return result; } public List<String> getAllMethods() { List<String> result = new ArrayList<>(); for (ClassInfo classInfo : allMethods.values()) { for (String methodName : classInfo.getAllMethodNames()) { result.add(classInfo.getClassName() + "@" + methodName); } } return result; } public List<ThreadMetrics> getThreadMetrics() { List<ThreadMetrics> result = new ArrayList<>(threadMetricsMap.values()); Collections.sort(result, new Comparator<ThreadMetrics>() { @Override public int compare(ThreadMetrics threadMetrics1, ThreadMetrics threadMetrics2) { return (int) (threadMetrics2.getCpuTime() - threadMetrics1.getCpuTime()); } }); return result; } public List<? extends MethodMetrics> getMethodMetrics() { return MethodMetrics.sortedMetrics(methodMetricsMap.values()); } public boolean isInitialized() { return initialized.get(); } public ApmConfiguration getConfiguration() { return configuration; } public boolean isMonitorByDefault() { return monitorByDefault; } public void setMonitorByDefault(boolean monitorByDefault) { this.monitorByDefault = monitorByDefault; } public void setActive(String fullMethodName, boolean flag) { if (isInitialized()) { for (ThreadMetrics threadMetrics : threadMetricsMap.values()) { threadMetrics.setActive(fullMethodName, flag); } MethodMetrics methodMetrics = methodMetricsMap.get(fullMethodName); if (methodMetrics != null) { methodMetrics.setActive(flag); } } } public List<ClassInfo> buildDeltaList() { List<ClassInfo> result = new ArrayList<>(); for (ClassInfo classInfo : allMethods.values()) { if (classInfo.isTransformed()) { //check to see its still should be audited if (configuration.isAudit(classInfo.getClassName())) { boolean retransform = false; //check to see if there's a change to methods that should be transformed Set<String> transformedMethodNames = classInfo.getAllTransformedMethodNames(); for (String methodName : transformedMethodNames) { if (!configuration.isAudit(classInfo.getClassName(), methodName)) { retransform = true; break; } } if (!retransform) { //check to see if there are methods that should now be audited but weren't Set<String> allMethodNames = classInfo.getAllMethodNames(); for (String methodName : allMethodNames) { if (!transformedMethodNames.contains(methodName) && configuration.isAudit(classInfo.getClassName(), methodName)) { retransform = true; break; } } } if (retransform) { result.add(classInfo); } } else { //we were once audited - but now need to be removed result.add(classInfo); } } else if (configuration.isAudit(classInfo.getClassName())) { if (classInfo.isCanTransform()) { result.add(classInfo); } } } return result; } public void resetMethods(ClassInfo classInfo) { Collection<MethodDescription> list = classInfo.getTransformedMethodDescriptions(); for (MethodDescription methodDescription : list) { if (!configuration.isAudit(classInfo.getClassName(), methodDescription.getMethodName())) { remove(methodDescription); } } } public void resetAll(ClassInfo classInfo) { Collection<MethodDescription> list = classInfo.getTransformedMethodDescriptions(); for (MethodDescription methodDescription : list) { remove(methodDescription); } classInfo.resetTransformed(); } public void methodMetricsDepthChanged() { monitoredMethodMetrics.setMonitorSize(configuration.getMethodMetricDepth()); } public void threadMetricsDepthChanged() { for (ThreadMetrics threadMetrics : threadMetricsMap.values()) { threadMetrics.setMonitorSize(configuration.getThreadMetricDepth()); } } private void remove(MethodDescription methodDescription) { MethodMetrics methodMetrics = this.methodMetricsMap.remove(methodDescription.getFullMethodName()); for (ThreadMetrics threadMetrics : threadMetricsMap.values()) { threadMetrics.remove(methodDescription.getFullMethodName()); } } protected ObjectInstance registerMBean(ObjectName objectName, Object object) throws InstanceAlreadyExistsException, MBeanRegistrationException, NotCompliantMBeanException { MBeanServer server = getMBeanServer(); if (server != null && !server.isRegistered(objectName)) { return server.registerMBean(object, objectName); } return null; } protected void unregisterMBean(ObjectName objectName) { MBeanServer beanServer = getMBeanServer(); if (objectName != null && beanServer != null && beanServer.isRegistered(objectName)) { try { beanServer.unregisterMBean(objectName); } catch (Throwable e) { LOG.error("Failed to unregister " + objectName, e); } } } void registerMethodMetricsMBean(int rank, MethodMetricsProxy methodMetrics) { try { ObjectName objectName = new ObjectName(DEFAULT_DOMAIN + ":" + "type=MethodMetrics" + ",rank=" + ObjectName.quote("rank" + rank)); System.err.println("registered " + objectName); registerMBean(objectName, methodMetrics); objectNameMap.put(methodMetrics, objectName); } catch (Throwable e) { LOG.error("Failed to register mbean " + methodMetrics.toString(), e); } } void registerMethodMetricsMBean(String threadName, long threadId, int rank, MethodMetricsProxy threadMetrics) { try { String threadIdentity = threadName + "[" + threadId + "]"; ObjectName objectName = new ObjectName(DEFAULT_DOMAIN + ":" + "type=ThreadContextMetrics" + ",threadName=" + ObjectName.quote(threadIdentity) + ",rank=" + ObjectName.quote("rank" + rank)); registerMBean(objectName, threadMetrics); objectNameMap.put(threadMetrics, objectName); } catch (Throwable e) { LOG.error("Failed to register mbean " + threadMetrics.toString(), e); } } void unregisterMethodMetricsMBean(MethodMetricsProxy methodMetrics) { ObjectName objectName = objectNameMap.remove(methodMetrics); unregisterMBean(objectName); } private synchronized MBeanServer getMBeanServer() { if (mBeanServer == null) { // return platform mbean server if the option is specified. if (configuration.isUsePlatformMBeanServer()) { mBeanServer = ManagementFactory.getPlatformMBeanServer(); } else { mBeanServer = JolokiaMBeanServerUtil.getJolokiaMBeanServer(); } } return mBeanServer; } }
jludvice/fabric8
apm/src/main/java/io/fabric8/apmagent/metrics/ApmAgentContext.java
Java
apache-2.0
16,412
# Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Class for running uiautomator tests on a single device.""" from pylib import constants from pylib import flag_changer from pylib.device import intent from pylib.instrumentation import test_options as instr_test_options from pylib.instrumentation import test_runner as instr_test_runner class TestRunner(instr_test_runner.TestRunner): """Responsible for running a series of tests connected to a single device.""" def __init__(self, test_options, device, shard_index, test_pkg): """Create a new TestRunner. Args: test_options: A UIAutomatorOptions object. device: Attached android device. shard_index: Shard index. test_pkg: A TestPackage object. """ # Create an InstrumentationOptions object to pass to the super class instrumentation_options = instr_test_options.InstrumentationOptions( test_options.tool, test_options.cleanup_test_files, test_options.push_deps, test_options.annotations, test_options.exclude_annotations, test_options.test_filter, test_options.test_data, test_options.save_perf_json, test_options.screenshot_failures, wait_for_debugger=False, coverage_dir=None, test_apk=None, test_apk_path=None, test_apk_jar_path=None, test_support_apk_path=None) super(TestRunner, self).__init__(instrumentation_options, device, shard_index, test_pkg) cmdline_file = constants.PACKAGE_INFO[test_options.package].cmdline_file self.flags = None if cmdline_file: self.flags = flag_changer.FlagChanger(self.device, cmdline_file) self._package = constants.PACKAGE_INFO[test_options.package].package self._activity = constants.PACKAGE_INFO[test_options.package].activity #override def InstallTestPackage(self): self.test_pkg.Install(self.device) #override def PushDataDeps(self): pass #override def _RunTest(self, test, timeout): self.device.ClearApplicationState(self._package) if self.flags: if 'Feature:FirstRunExperience' in self.test_pkg.GetTestAnnotations(test): self.flags.RemoveFlags(['--disable-fre']) else: self.flags.AddFlags(['--disable-fre']) self.device.StartActivity( intent.Intent(action='android.intent.action.MAIN', activity=self._activity, package=self._package), blocking=True, force_stop=True) return self.device.old_interface.RunUIAutomatorTest( test, self.test_pkg.GetPackageName(), timeout)
appknox/xysec_adb
xysec_adb/pylib/uiautomator/test_runner.py
Python
apache-2.0
2,767
/* * Copyright 2019 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.handler.ssl; import io.netty.util.CharsetUtil; import org.bouncycastle.util.encoders.Hex; import org.junit.Test; import static org.junit.Assert.assertArrayEquals; /** * The test vectors here were provided via: * https://www.ietf.org/mail-archive/web/tls/current/msg03416.html */ public class PseudoRandomFunctionTest { @Test public void testPrfSha256() { byte[] secret = Hex.decode("9b be 43 6b a9 40 f0 17 b1 76 52 84 9a 71 db 35"); byte[] seed = Hex.decode("a0 ba 9f 93 6c da 31 18 27 a6 f7 96 ff d5 19 8c"); byte[] label = "test label".getBytes(CharsetUtil.US_ASCII); byte[] expected = Hex.decode( "e3 f2 29 ba 72 7b e1 7b" + "8d 12 26 20 55 7c d4 53" + "c2 aa b2 1d 07 c3 d4 95" + "32 9b 52 d4 e6 1e db 5a" + "6b 30 17 91 e9 0d 35 c9" + "c9 a4 6b 4e 14 ba f9 af" + "0f a0 22 f7 07 7d ef 17" + "ab fd 37 97 c0 56 4b ab" + "4f bc 91 66 6e 9d ef 9b" + "97 fc e3 4f 79 67 89 ba" + "a4 80 82 d1 22 ee 42 c5" + "a7 2e 5a 51 10 ff f7 01" + "87 34 7b 66"); byte[] actual = PseudoRandomFunction.hash(secret, label, seed, expected.length, "HmacSha256"); assertArrayEquals(expected, actual); } }
bryce-anderson/netty
handler/src/test/java/io/netty/handler/ssl/PseudoRandomFunctionTest.java
Java
apache-2.0
2,030
/* GENERATED SOURCE. DO NOT MODIFY. */ // © 2016 and later: Unicode, Inc. and others. // License & terms of use: http://www.unicode.org/copyright.html#License /* ******************************************************************************* * Copyright (C) 2010-2015, International Business Machines * Corporation and others. All Rights Reserved. ******************************************************************************* * CollationData.java, ported from collationdata.h/.cpp * * C++ version created on: 2010oct27 * created by: Markus W. Scherer */ package android.icu.impl.coll; import android.icu.impl.Normalizer2Impl; import android.icu.impl.Trie2_32; import android.icu.lang.UScript; import android.icu.text.Collator; import android.icu.text.UnicodeSet; import android.icu.util.ICUException; /** * Collation data container. * Immutable data created by a CollationDataBuilder, or loaded from a file, * or deserialized from API-provided binary data. * * Includes data for the collation base (root/default), aliased if this is not the base. * @hide Only a subset of ICU is exposed in Android */ public final class CollationData { // Note: The ucadata.icu loader could discover the reserved ranges by setting an array // parallel with the ranges, and resetting ranges that are indexed. // The reordering builder code could clone the resulting template array. static final int REORDER_RESERVED_BEFORE_LATIN = Collator.ReorderCodes.FIRST + 14; static final int REORDER_RESERVED_AFTER_LATIN = Collator.ReorderCodes.FIRST + 15; static final int MAX_NUM_SPECIAL_REORDER_CODES = 8; CollationData(Normalizer2Impl nfc) { nfcImpl = nfc; } public int getCE32(int c) { return trie.get(c); } int getCE32FromSupplementary(int c) { return trie.get(c); // TODO: port UTRIE2_GET32_FROM_SUPP(trie, c) to Java? } boolean isDigit(int c) { return c < 0x660 ? c <= 0x39 && 0x30 <= c : Collation.hasCE32Tag(getCE32(c), Collation.DIGIT_TAG); } public boolean isUnsafeBackward(int c, boolean numeric) { return unsafeBackwardSet.contains(c) || (numeric && isDigit(c)); } public boolean isCompressibleLeadByte(int b) { return compressibleBytes[b]; } public boolean isCompressiblePrimary(long p) { return isCompressibleLeadByte((int)p >>> 24); } /** * Returns the CE32 from two contexts words. * Access to the defaultCE32 for contraction and prefix matching. */ int getCE32FromContexts(int index) { return ((int)contexts.charAt(index) << 16) | contexts.charAt(index + 1); } /** * Returns the CE32 for an indirect special CE32 (e.g., with DIGIT_TAG). * Requires that ce32 is special. */ int getIndirectCE32(int ce32) { assert(Collation.isSpecialCE32(ce32)); int tag = Collation.tagFromCE32(ce32); if(tag == Collation.DIGIT_TAG) { // Fetch the non-numeric-collation CE32. ce32 = ce32s[Collation.indexFromCE32(ce32)]; } else if(tag == Collation.LEAD_SURROGATE_TAG) { ce32 = Collation.UNASSIGNED_CE32; } else if(tag == Collation.U0000_TAG) { // Fetch the normal ce32 for U+0000. ce32 = ce32s[0]; } return ce32; } /** * Returns the CE32 for an indirect special CE32 (e.g., with DIGIT_TAG), * if ce32 is special. */ int getFinalCE32(int ce32) { if(Collation.isSpecialCE32(ce32)) { ce32 = getIndirectCE32(ce32); } return ce32; } /** * Computes a CE from c's ce32 which has the OFFSET_TAG. */ long getCEFromOffsetCE32(int c, int ce32) { long dataCE = ces[Collation.indexFromCE32(ce32)]; return Collation.makeCE(Collation.getThreeBytePrimaryForOffsetData(c, dataCE)); } /** * Returns the single CE that c maps to. * Throws UnsupportedOperationException if c does not map to a single CE. */ long getSingleCE(int c) { CollationData d; int ce32 = getCE32(c); if(ce32 == Collation.FALLBACK_CE32) { d = base; ce32 = base.getCE32(c); } else { d = this; } while(Collation.isSpecialCE32(ce32)) { switch(Collation.tagFromCE32(ce32)) { case Collation.LATIN_EXPANSION_TAG: case Collation.BUILDER_DATA_TAG: case Collation.PREFIX_TAG: case Collation.CONTRACTION_TAG: case Collation.HANGUL_TAG: case Collation.LEAD_SURROGATE_TAG: throw new UnsupportedOperationException(String.format( "there is not exactly one collation element for U+%04X (CE32 0x%08x)", c, ce32)); case Collation.FALLBACK_TAG: case Collation.RESERVED_TAG_3: throw new AssertionError(String.format( "unexpected CE32 tag for U+%04X (CE32 0x%08x)", c, ce32)); case Collation.LONG_PRIMARY_TAG: return Collation.ceFromLongPrimaryCE32(ce32); case Collation.LONG_SECONDARY_TAG: return Collation.ceFromLongSecondaryCE32(ce32); case Collation.EXPANSION32_TAG: if(Collation.lengthFromCE32(ce32) == 1) { ce32 = d.ce32s[Collation.indexFromCE32(ce32)]; break; } else { throw new UnsupportedOperationException(String.format( "there is not exactly one collation element for U+%04X (CE32 0x%08x)", c, ce32)); } case Collation.EXPANSION_TAG: { if(Collation.lengthFromCE32(ce32) == 1) { return d.ces[Collation.indexFromCE32(ce32)]; } else { throw new UnsupportedOperationException(String.format( "there is not exactly one collation element for U+%04X (CE32 0x%08x)", c, ce32)); } } case Collation.DIGIT_TAG: // Fetch the non-numeric-collation CE32 and continue. ce32 = d.ce32s[Collation.indexFromCE32(ce32)]; break; case Collation.U0000_TAG: assert(c == 0); // Fetch the normal ce32 for U+0000 and continue. ce32 = d.ce32s[0]; break; case Collation.OFFSET_TAG: return d.getCEFromOffsetCE32(c, ce32); case Collation.IMPLICIT_TAG: return Collation.unassignedCEFromCodePoint(c); } } return Collation.ceFromSimpleCE32(ce32); } /** * Returns the FCD16 value for code point c. c must be >= 0. */ int getFCD16(int c) { return nfcImpl.getFCD16(c); } /** * Returns the first primary for the script's reordering group. * @return the primary with only the first primary lead byte of the group * (not necessarily an actual root collator primary weight), * or 0 if the script is unknown */ long getFirstPrimaryForGroup(int script) { int index = getScriptIndex(script); return index == 0 ? 0 : (long)scriptStarts[index] << 16; } /** * Returns the last primary for the script's reordering group. * @return the last primary of the group * (not an actual root collator primary weight), * or 0 if the script is unknown */ public long getLastPrimaryForGroup(int script) { int index = getScriptIndex(script); if(index == 0) { return 0; } long limit = scriptStarts[index + 1]; return (limit << 16) - 1; } /** * Finds the reordering group which contains the primary weight. * @return the first script of the group, or -1 if the weight is beyond the last group */ public int getGroupForPrimary(long p) { p >>= 16; if(p < scriptStarts[1] || scriptStarts[scriptStarts.length - 1] <= p) { return -1; } int index = 1; while(p >= scriptStarts[index + 1]) { ++index; } for(int i = 0; i < numScripts; ++i) { if(scriptsIndex[i] == index) { return i; } } for(int i = 0; i < MAX_NUM_SPECIAL_REORDER_CODES; ++i) { if(scriptsIndex[numScripts + i] == index) { return Collator.ReorderCodes.FIRST + i; } } return -1; } private int getScriptIndex(int script) { if(script < 0) { return 0; } else if(script < numScripts) { return scriptsIndex[script]; } else if(script < Collator.ReorderCodes.FIRST) { return 0; } else { script -= Collator.ReorderCodes.FIRST; if(script < MAX_NUM_SPECIAL_REORDER_CODES) { return scriptsIndex[numScripts + script]; } else { return 0; } } } public int[] getEquivalentScripts(int script) { int index = getScriptIndex(script); if(index == 0) { return EMPTY_INT_ARRAY; } if(script >= Collator.ReorderCodes.FIRST) { // Special groups have no aliases. return new int[] { script }; } int length = 0; for(int i = 0; i < numScripts; ++i) { if(scriptsIndex[i] == index) { ++length; } } int[] dest = new int[length]; if(length == 1) { dest[0] = script; return dest; } length = 0; for(int i = 0; i < numScripts; ++i) { if(scriptsIndex[i] == index) { dest[length++] = i; } } return dest; } /** * Writes the permutation of primary-weight ranges * for the given reordering of scripts and groups. * The caller checks for illegal arguments and * takes care of [DEFAULT] and memory allocation. * * <p>Each list element will be a (limit, offset) pair as described * for the CollationSettings.reorderRanges. * The list will be empty if no ranges are reordered. */ void makeReorderRanges(int[] reorder, UVector32 ranges) { makeReorderRanges(reorder, false, ranges); } private void makeReorderRanges(int[] reorder, boolean latinMustMove, UVector32 ranges) { ranges.removeAllElements(); int length = reorder.length; if(length == 0 || (length == 1 && reorder[0] == UScript.UNKNOWN)) { return; } // Maps each script-or-group range to a new lead byte. short[] table = new short[scriptStarts.length - 1]; // C++: uint8_t[] { // Set "don't care" values for reserved ranges. int index = scriptsIndex[ numScripts + REORDER_RESERVED_BEFORE_LATIN - Collator.ReorderCodes.FIRST]; if(index != 0) { table[index] = 0xff; } index = scriptsIndex[ numScripts + REORDER_RESERVED_AFTER_LATIN - Collator.ReorderCodes.FIRST]; if(index != 0) { table[index] = 0xff; } } // Never reorder special low and high primary lead bytes. assert(scriptStarts.length >= 2); assert(scriptStarts[0] == 0); int lowStart = scriptStarts[1]; assert(lowStart == ((Collation.MERGE_SEPARATOR_BYTE + 1) << 8)); int highLimit = scriptStarts[scriptStarts.length - 1]; assert(highLimit == (Collation.TRAIL_WEIGHT_BYTE << 8)); // Get the set of special reorder codes in the input list. // This supports a fixed number of special reorder codes; // it works for data with codes beyond Collator.ReorderCodes.LIMIT. int specials = 0; for(int i = 0; i < length; ++i) { int reorderCode = reorder[i] - Collator.ReorderCodes.FIRST; if(0 <= reorderCode && reorderCode < MAX_NUM_SPECIAL_REORDER_CODES) { specials |= 1 << reorderCode; } } // Start the reordering with the special low reorder codes that do not occur in the input. for(int i = 0; i < MAX_NUM_SPECIAL_REORDER_CODES; ++i) { int index = scriptsIndex[numScripts + i]; if(index != 0 && (specials & (1 << i)) == 0) { lowStart = addLowScriptRange(table, index, lowStart); } } // Skip the reserved range before Latin if Latin is the first script, // so that we do not move it unnecessarily. int skippedReserved = 0; if(specials == 0 && reorder[0] == UScript.LATIN && !latinMustMove) { int index = scriptsIndex[UScript.LATIN]; assert(index != 0); int start = scriptStarts[index]; assert(lowStart <= start); skippedReserved = start - lowStart; lowStart = start; } // Reorder according to the input scripts, continuing from the bottom of the primary range. boolean hasReorderToEnd = false; for(int i = 0; i < length;) { int script = reorder[i++]; if(script == UScript.UNKNOWN) { // Put the remaining scripts at the top. hasReorderToEnd = true; while(i < length) { script = reorder[--length]; if(script == UScript.UNKNOWN) { // Must occur at most once. throw new IllegalArgumentException( "setReorderCodes(): duplicate UScript.UNKNOWN"); } if(script == Collator.ReorderCodes.DEFAULT) { throw new IllegalArgumentException( "setReorderCodes(): UScript.DEFAULT together with other scripts"); } int index = getScriptIndex(script); if(index == 0) { continue; } if(table[index] != 0) { // Duplicate or equivalent script. throw new IllegalArgumentException( "setReorderCodes(): duplicate or equivalent script " + scriptCodeString(script)); } highLimit = addHighScriptRange(table, index, highLimit); } break; } if(script == Collator.ReorderCodes.DEFAULT) { // The default code must be the only one in the list, and that is handled by the caller. // Otherwise it must not be used. throw new IllegalArgumentException( "setReorderCodes(): UScript.DEFAULT together with other scripts"); } int index = getScriptIndex(script); if(index == 0) { continue; } if(table[index] != 0) { // Duplicate or equivalent script. throw new IllegalArgumentException( "setReorderCodes(): duplicate or equivalent script " + scriptCodeString(script)); } lowStart = addLowScriptRange(table, index, lowStart); } // Put all remaining scripts into the middle. for(int i = 1; i < scriptStarts.length - 1; ++i) { int leadByte = table[i]; if(leadByte != 0) { continue; } int start = scriptStarts[i]; if(!hasReorderToEnd && start > lowStart) { // No need to move this script. lowStart = start; } lowStart = addLowScriptRange(table, i, lowStart); } if(lowStart > highLimit) { if((lowStart - (skippedReserved & 0xff00)) <= highLimit) { // Try not skipping the before-Latin reserved range. makeReorderRanges(reorder, true, ranges); return; } // We need more primary lead bytes than available, despite the reserved ranges. throw new ICUException( "setReorderCodes(): reordering too many partial-primary-lead-byte scripts"); } // Turn lead bytes into a list of (limit, offset) pairs. // Encode each pair in one list element: // Upper 16 bits = limit, lower 16 = signed lead byte offset. int offset = 0; for(int i = 1;; ++i) { int nextOffset = offset; while(i < scriptStarts.length - 1) { int newLeadByte = table[i]; if(newLeadByte == 0xff) { // "Don't care" lead byte for reserved range, continue with current offset. } else { nextOffset = newLeadByte - (scriptStarts[i] >> 8); if(nextOffset != offset) { break; } } ++i; } if(offset != 0 || i < scriptStarts.length - 1) { ranges.addElement(((int)scriptStarts[i] << 16) | (offset & 0xffff)); } if(i == scriptStarts.length - 1) { break; } offset = nextOffset; } } private int addLowScriptRange(short[] table, int index, int lowStart) { int start = scriptStarts[index]; if((start & 0xff) < (lowStart & 0xff)) { lowStart += 0x100; } table[index] = (short)(lowStart >> 8); int limit = scriptStarts[index + 1]; lowStart = ((lowStart & 0xff00) + ((limit & 0xff00) - (start & 0xff00))) | (limit & 0xff); return lowStart; } private int addHighScriptRange(short[] table, int index, int highLimit) { int limit = scriptStarts[index + 1]; if((limit & 0xff) > (highLimit & 0xff)) { highLimit -= 0x100; } int start = scriptStarts[index]; highLimit = ((highLimit & 0xff00) - ((limit & 0xff00) - (start & 0xff00))) | (start & 0xff); table[index] = (short)(highLimit >> 8); return highLimit; } private static String scriptCodeString(int script) { // Do not use the script name here: We do not want to depend on that data. return (script < Collator.ReorderCodes.FIRST) ? Integer.toString(script) : "0x" + Integer.toHexString(script); } private static final int[] EMPTY_INT_ARRAY = new int[0]; /** @see jamoCE32s */ static final int JAMO_CE32S_LENGTH = 19 + 21 + 27; /** Main lookup trie. */ Trie2_32 trie; /** * Array of CE32 values. * At index 0 there must be CE32(U+0000) * to support U+0000's special-tag for NUL-termination handling. */ int[] ce32s; /** Array of CE values for expansions and OFFSET_TAG. */ long[] ces; /** Array of prefix and contraction-suffix matching data. */ String contexts; /** Base collation data, or null if this data itself is a base. */ public CollationData base; /** * Simple array of JAMO_CE32S_LENGTH=19+21+27 CE32s, one per canonical Jamo L/V/T. * They are normally simple CE32s, rarely expansions. * For fast handling of HANGUL_TAG. */ int[] jamoCE32s = new int[JAMO_CE32S_LENGTH]; public Normalizer2Impl nfcImpl; /** The single-byte primary weight (xx000000) for numeric collation. */ long numericPrimary = 0x12000000; /** 256 flags for which primary-weight lead bytes are compressible. */ public boolean[] compressibleBytes; /** * Set of code points that are unsafe for starting string comparison after an identical prefix, * or in backwards CE iteration. */ UnicodeSet unsafeBackwardSet; /** * Fast Latin table for common-Latin-text string comparisons. * Data structure see class CollationFastLatin. */ public char[] fastLatinTable; /** * Header portion of the fastLatinTable. * In C++, these are one array, and the header is skipped for mapping characters. * In Java, two arrays work better. */ char[] fastLatinTableHeader; /** * Data for scripts and reordering groups. * Uses include building a reordering permutation table and * providing script boundaries to AlphabeticIndex. */ int numScripts; /** * The length of scriptsIndex is numScripts+16. * It maps from a UScriptCode or a special reorder code to an entry in scriptStarts. * 16 special reorder codes (not all used) are mapped starting at numScripts. * Up to MAX_NUM_SPECIAL_REORDER_CODES are codes for special groups like space/punct/digit. * There are special codes at the end for reorder-reserved primary ranges. * * <p>Multiple scripts may share a range and index, for example Hira & Kana. */ char[] scriptsIndex; /** * Start primary weight (top 16 bits only) for a group/script/reserved range * indexed by scriptsIndex. * The first range (separators & terminators) and the last range (trailing weights) * are not reorderable, and no scriptsIndex entry points to them. */ char[] scriptStarts; /** * Collation elements in the root collator. * Used by the CollationRootElements class. The data structure is described there. * null in a tailoring. */ public long[] rootElements; }
life-beam/j2objc
jre_emul/android/platform/external/icu/android_icu4j/src/main/java/android/icu/impl/coll/CollationData.java
Java
apache-2.0
21,571
define( ({ _widgetLabel: "Měření" }) );
cob222/CPG
widgets/Measurement/nls/cs/strings.js
JavaScript
apache-2.0
54
package cmd import ( "os" "path/filepath" "launchpad.net/gnuflag" ) func getHome() string { envs := []string{"MEGAM_HOME"} var home string for i := 0; i < len(envs) && home == ""; i++ { home = os.Getenv(envs[i]) } return home } func JoinWithUserDir(p ...string) string { paths := []string{getHome()} paths = append(paths, p...) return filepath.Join(paths...) } func MergeFlagSet(fs1, fs2 *gnuflag.FlagSet) *gnuflag.FlagSet { fs2.VisitAll(func(flag *gnuflag.Flag) { fs1.Var(flag.Value, flag.Name, flag.Usage) }) return fs1 }
rajthilakmca/libgo
cmd/utils.go
GO
apache-2.0
547
export default function() { this.transition( this.hasClass('translate'), this.toValue(value => value === 'start'), this.use('toRight'), this.reverse('toLeft') ); }
hmoco/ember-preprints
app/transitions.js
JavaScript
apache-2.0
205
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common.joda; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.common.time.DateUtils; import org.joda.time.DateTimeZone; import org.joda.time.MutableDateTime; import org.joda.time.format.DateTimeFormatter; import java.time.ZoneId; import java.util.Objects; import java.util.function.LongSupplier; /** * A parser for date/time formatted text with optional date math. * * The format of the datetime is configurable, and unix timestamps can also be used. Datemath * is appended to a datetime with the following syntax: * <code>||[+-/](\d+)?[yMwdhHms]</code>. */ public class JodaDateMathParser implements DateMathParser { private final FormatDateTimeFormatter dateTimeFormatter; public JodaDateMathParser(FormatDateTimeFormatter dateTimeFormatter) { Objects.requireNonNull(dateTimeFormatter); this.dateTimeFormatter = dateTimeFormatter; } // Note: we take a callable here for the timestamp in order to be able to figure out // if it has been used. For instance, the request cache does not cache requests that make // use of `now`. @Override public long parse(String text, LongSupplier now, boolean roundUp, ZoneId tz) { final DateTimeZone timeZone = tz == null ? null : DateUtils.zoneIdToDateTimeZone(tz); long time; String mathString; if (text.startsWith("now")) { try { time = now.getAsLong(); } catch (Exception e) { throw new ElasticsearchParseException("could not read the current timestamp", e); } mathString = text.substring("now".length()); } else { int index = text.indexOf("||"); if (index == -1) { return parseDateTime(text, timeZone, roundUp); } time = parseDateTime(text.substring(0, index), timeZone, false); mathString = text.substring(index + 2); } return parseMath(mathString, time, roundUp, timeZone); } private long parseMath(String mathString, long time, boolean roundUp, DateTimeZone timeZone) throws ElasticsearchParseException { if (timeZone == null) { timeZone = DateTimeZone.UTC; } MutableDateTime dateTime = new MutableDateTime(time, timeZone); for (int i = 0; i < mathString.length(); ) { char c = mathString.charAt(i++); final boolean round; final int sign; if (c == '/') { round = true; sign = 1; } else { round = false; if (c == '+') { sign = 1; } else if (c == '-') { sign = -1; } else { throw new ElasticsearchParseException("operator not supported for date math [{}]", mathString); } } if (i >= mathString.length()) { throw new ElasticsearchParseException("truncated date math [{}]", mathString); } final int num; if (!Character.isDigit(mathString.charAt(i))) { num = 1; } else { int numFrom = i; while (i < mathString.length() && Character.isDigit(mathString.charAt(i))) { i++; } if (i >= mathString.length()) { throw new ElasticsearchParseException("truncated date math [{}]", mathString); } num = Integer.parseInt(mathString.substring(numFrom, i)); } if (round) { if (num != 1) { throw new ElasticsearchParseException("rounding `/` can only be used on single unit types [{}]", mathString); } } char unit = mathString.charAt(i++); MutableDateTime.Property propertyToRound = null; switch (unit) { case 'y': if (round) { propertyToRound = dateTime.yearOfCentury(); } else { dateTime.addYears(sign * num); } break; case 'M': if (round) { propertyToRound = dateTime.monthOfYear(); } else { dateTime.addMonths(sign * num); } break; case 'w': if (round) { propertyToRound = dateTime.weekOfWeekyear(); } else { dateTime.addWeeks(sign * num); } break; case 'd': if (round) { propertyToRound = dateTime.dayOfMonth(); } else { dateTime.addDays(sign * num); } break; case 'h': case 'H': if (round) { propertyToRound = dateTime.hourOfDay(); } else { dateTime.addHours(sign * num); } break; case 'm': if (round) { propertyToRound = dateTime.minuteOfHour(); } else { dateTime.addMinutes(sign * num); } break; case 's': if (round) { propertyToRound = dateTime.secondOfMinute(); } else { dateTime.addSeconds(sign * num); } break; default: throw new ElasticsearchParseException("unit [{}] not supported for date math [{}]", unit, mathString); } if (propertyToRound != null) { if (roundUp) { // we want to go up to the next whole value, even if we are already on a rounded value propertyToRound.add(1); propertyToRound.roundFloor(); dateTime.addMillis(-1); // subtract 1 millisecond to get the largest inclusive value } else { propertyToRound.roundFloor(); } } } return dateTime.getMillis(); } private long parseDateTime(String value, DateTimeZone timeZone, boolean roundUpIfNoTime) { DateTimeFormatter parser = dateTimeFormatter.parser(); if (timeZone != null) { parser = parser.withZone(timeZone); } try { MutableDateTime date; // We use 01/01/1970 as a base date so that things keep working with date // fields that are filled with times without dates if (roundUpIfNoTime) { date = new MutableDateTime(1970, 1, 1, 23, 59, 59, 999, DateTimeZone.UTC); } else { date = new MutableDateTime(1970, 1, 1, 0, 0, 0, 0, DateTimeZone.UTC); } final int end = parser.parseInto(date, value, 0); if (end < 0) { int position = ~end; throw new IllegalArgumentException("Parse failure at index [" + position + "] of [" + value + "]"); } else if (end != value.length()) { throw new IllegalArgumentException("Unrecognized chars at the end of [" + value + "]: [" + value.substring(end) + "]"); } return date.getMillis(); } catch (IllegalArgumentException e) { throw new ElasticsearchParseException("failed to parse date field [{}] with format [{}]", e, value, dateTimeFormatter.format()); } } }
gfyoung/elasticsearch
server/src/main/java/org/elasticsearch/common/joda/JodaDateMathParser.java
Java
apache-2.0
8,771
/* * Licensed to the Apache Software Foundation (ASF) under one * * or more contributor license agreements. See the NOTICE file * * distributed with this work for additional information * * regarding copyright ownership. The ASF licenses this file * * to you under the Apache License, Version 2.0 (the * * "License"); you may not use this file except in compliance * * with the License. You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, * * software distributed under the License is distributed on an * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * * KIND, either express or implied. See the License for the * * specific language governing permissions and limitations * * under the License. * */ package org.apache.rat.analysis.license; import org.apache.rat.api.MetaData; public class MockStandardLicense extends BaseLicense { // public List visitors = new ArrayList(); public MockStandardLicense() { super(new MetaData.Datum(MetaData.RAT_URL_LICENSE_FAMILY_CATEGORY, ""), new MetaData.Datum(MetaData.RAT_URL_LICENSE_FAMILY_NAME, ""), ""); } }
whitesource/cat-core
src/test/java/org/apache/rat/analysis/license/MockStandardLicense.java
Java
apache-2.0
1,458
// BZip2.cs // // Copyright 2004 John Reilly // // This program is free software; you can redistribute it and/or // modify it under the terms of the GNU General Public License // as published by the Free Software Foundation; either version 2 // of the License, or (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program; if not, write to the Free Software // Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. // // Linking this library statically or dynamically with other modules is // making a combined work based on this library. Thus, the terms and // conditions of the GNU General Public License cover the whole // combination. // // As a special exception, the copyright holders of this library give you // permission to link this library with independent modules to produce an // executable, regardless of the license terms of these independent // modules, and to copy and distribute the resulting executable under // terms of your choice, provided that you also meet, for each linked // independent module, the terms and conditions of the license of that // module. An independent module is a module which is not derived from // or based on this library. If you modify this library, you may extend // this exception to your version of the library, but you are not // obligated to do so. If you do not wish to do so, delete this // exception statement from your version. using System; #if !NETCF_1_0 && !NETCF_2_0 using System.Runtime.Serialization; #endif using ICSharpCode.SharpZipLib; namespace ICSharpCode.SharpZipLib.BZip2 { /// <summary> /// BZip2Exception represents exceptions specific to Bzip2 algorithm /// </summary> #if !NETCF_1_0 && !NETCF_2_0 [Serializable] #endif public class BZip2Exception : SharpZipBaseException { #if !NETCF_1_0 && !NETCF_2_0 /// <summary> /// Deserialization constructor /// </summary> /// <param name="info"><see cref="SerializationInfo"/> for this constructor</param> /// <param name="context"><see cref="StreamingContext"/> for this constructor</param> protected BZip2Exception(SerializationInfo info, StreamingContext context) : base(info, context) { } #endif /// <summary> /// Initialise a new instance of BZip2Exception. /// </summary> public BZip2Exception() { } /// <summary> /// Initialise a new instance of BZip2Exception with its message set to message. /// </summary> /// <param name="message">The message describing the error.</param> public BZip2Exception(string message) : base(message) { } /// <summary> /// Initialise an instance of BZip2Exception /// </summary> /// <param name="message">A message describing the error.</param> /// <param name="exception">The exception that is the cause of the current exception.</param> public BZip2Exception(string message, Exception exception) : base(message, exception) { } } }
jeske/StepsDB-alpha
ThirdParty/SharpZipLib/src/BZip2/BZip2Exception.cs
C#
apache-2.0
3,220
/* * Copyright Terracotta, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ehcache.internal.store.disk; import org.ehcache.config.EvictionVeto; import org.ehcache.config.ResourcePoolsBuilder; import org.ehcache.config.StoreConfigurationImpl; import org.ehcache.config.units.MemoryUnit; import org.ehcache.exceptions.CacheAccessException; import org.ehcache.exceptions.CachePersistenceException; import org.ehcache.expiry.Expiry; import org.ehcache.internal.SystemTimeSource; import org.ehcache.internal.TimeSource; import org.ehcache.internal.persistence.TestLocalPersistenceService; import org.ehcache.internal.store.offheap.AbstractOffHeapStore; import org.ehcache.internal.store.offheap.AbstractOffHeapStoreTest; import org.ehcache.spi.ServiceLocator; import org.ehcache.spi.cache.Store; import org.ehcache.spi.serialization.DefaultSerializationProvider; import org.ehcache.spi.serialization.SerializationProvider; import org.ehcache.spi.serialization.Serializer; import org.ehcache.spi.serialization.UnsupportedTypeException; import org.ehcache.spi.service.LocalPersistenceService.PersistenceSpaceIdentifier; import org.ehcache.spi.service.FileBasedPersistenceContext; import org.junit.Rule; import org.junit.Test; import java.io.IOException; import static org.ehcache.expiry.Expirations.noExpiration; import static org.ehcache.spi.TestServiceProvider.providerContaining; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.notNullValue; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class OffHeapDiskStoreTest extends AbstractOffHeapStoreTest { @Rule public final TestLocalPersistenceService persistenceService = new TestLocalPersistenceService(); @Test public void testRecovery() throws CacheAccessException, IOException { OffHeapDiskStore<String, String> offHeapDiskStore = createAndInitStore(SystemTimeSource.INSTANCE, noExpiration()); try { offHeapDiskStore.put("key1", "value1"); assertThat(offHeapDiskStore.get("key1"), notNullValue()); OffHeapDiskStore.Provider.close(offHeapDiskStore); OffHeapDiskStore.Provider.init(offHeapDiskStore); assertThat(offHeapDiskStore.get("key1"), notNullValue()); } finally { destroyStore(offHeapDiskStore); } } @Override protected OffHeapDiskStore<String, String> createAndInitStore(final TimeSource timeSource, final Expiry<? super String, ? super String> expiry) { try { SerializationProvider serializationProvider = new DefaultSerializationProvider(null); serializationProvider.start(providerContaining(persistenceService)); ClassLoader classLoader = getClass().getClassLoader(); Serializer<String> keySerializer = serializationProvider.createKeySerializer(String.class, classLoader); Serializer<String> valueSerializer = serializationProvider.createValueSerializer(String.class, classLoader); StoreConfigurationImpl<String, String> storeConfiguration = new StoreConfigurationImpl<String, String>(String.class, String.class, null, null, classLoader, expiry, null, keySerializer, valueSerializer); OffHeapDiskStore<String, String> offHeapStore = new OffHeapDiskStore<String, String>(getPersistenceContext(), storeConfiguration, timeSource, MemoryUnit.MB.toBytes(1)); OffHeapDiskStore.Provider.init(offHeapStore); return offHeapStore; } catch (UnsupportedTypeException e) { throw new AssertionError(e); } } @Override protected OffHeapDiskStore<String, byte[]> createAndInitStore(TimeSource timeSource, Expiry<? super String, ? super byte[]> expiry, EvictionVeto<? super String, ? super byte[]> evictionVeto) { try { SerializationProvider serializationProvider = new DefaultSerializationProvider(null); serializationProvider.start(providerContaining(persistenceService)); ClassLoader classLoader = getClass().getClassLoader(); Serializer<String> keySerializer = serializationProvider.createKeySerializer(String.class, classLoader); Serializer<byte[]> valueSerializer = serializationProvider.createValueSerializer(byte[].class, classLoader); StoreConfigurationImpl<String, byte[]> storeConfiguration = new StoreConfigurationImpl<String, byte[]>(String.class, byte[].class, evictionVeto, null, getClass().getClassLoader(), expiry, null, keySerializer, valueSerializer); OffHeapDiskStore<String, byte[]> offHeapStore = new OffHeapDiskStore<String, byte[]>(getPersistenceContext(), storeConfiguration, timeSource, MemoryUnit.MB.toBytes(1)); OffHeapDiskStore.Provider.init(offHeapStore); return offHeapStore; } catch (UnsupportedTypeException e) { throw new AssertionError(e); } } @Override protected void destroyStore(AbstractOffHeapStore<?, ?> store) { try { OffHeapDiskStore.Provider.close((OffHeapDiskStore<?, ?>) store); } catch (IOException e) { throw new AssertionError(e); } } @Test public void testStoreInitFailsWithoutLocalPersistenceService() throws Exception { OffHeapDiskStore.Provider provider = new OffHeapDiskStore.Provider(); ServiceLocator serviceLocator = new ServiceLocator(); serviceLocator.addService(provider); serviceLocator.startAllServices(); Store.Configuration<String, String> storeConfig = mock(Store.Configuration.class); when(storeConfig.getKeyType()).thenReturn(String.class); when(storeConfig.getValueType()).thenReturn(String.class); when(storeConfig.getResourcePools()).thenReturn(ResourcePoolsBuilder.newResourcePoolsBuilder() .disk(10, MemoryUnit.MB) .build()); try { provider.createStore(storeConfig); fail("IllegalStateException expected"); } catch (IllegalStateException e) { assertThat(e.getMessage(), containsString("No LocalPersistenceService could be found - did you configure it at the CacheManager level?")); } } private FileBasedPersistenceContext getPersistenceContext() { try { PersistenceSpaceIdentifier space = persistenceService.getOrCreatePersistenceSpace("cache"); return persistenceService.createPersistenceContextWithin(space, "store"); } catch (CachePersistenceException e) { throw new AssertionError(e); } } }
wantstudy/ehcache3
impl/src/test/java/org/ehcache/internal/store/disk/OffHeapDiskStoreTest.java
Java
apache-2.0
6,889
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright 2014 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Reference command-line example for Google Analytics Core Reporting API v3. This application demonstrates how to use the python client library to access all the pieces of data returned by the Google Analytics Core Reporting API v3. The application manages autorization by saving an OAuth2.0 token in a local file and reusing the token for subsequent requests. Before You Begin: Update the client_secrets.json file You must update the clients_secrets.json file with a client id, client secret, and the redirect uri. You get these values by creating a new project in the Google APIs console and registering for OAuth2.0 for installed applications: https://code.google.com/apis/console Learn more about registering your analytics application here: http://developers.google.com/analytics/devguides/reporting/core/v3/gdataAuthorization Supply your TABLE_ID You will also need to identify from which profile to access data by specifying the TABLE_ID constant below. This value is of the form: ga:xxxx where xxxx is the profile ID. You can get the profile ID by either querying the Management API or by looking it up in the account settings of the Google Anlaytics web interface. Sample Usage: $ python core_reporting_v3_reference.py ga:xxxx Where the table ID is used to identify from which Google Anlaytics profile to retrieve data. This ID is in the format ga:xxxx where xxxx is the profile ID. Also you can also get help on all the command-line flags the program understands by running: $ python core_reporting_v3_reference.py --help """ from __future__ import print_function __author__ = '[email protected] (Nick Mihailovski)' import argparse import sys from googleapiclient.errors import HttpError from googleapiclient import sample_tools from oauth2client.client import AccessTokenRefreshError # Declare command-line flags. argparser = argparse.ArgumentParser(add_help=False) argparser.add_argument('table_id', type=str, help=('The table ID of the profile you wish to access. ' 'Format is ga:xxx where xxx is your profile ID.')) def main(argv): # Authenticate and construct service. service, flags = sample_tools.init( argv, 'analytics', 'v3', __doc__, __file__, parents=[argparser], scope='https://www.googleapis.com/auth/analytics.readonly') # Try to make a request to the API. Print the results or handle errors. try: results = get_api_query(service, flags.table_id).execute() print_results(results) except TypeError as error: # Handle errors in constructing a query. print(('There was an error in constructing your query : %s' % error)) except HttpError as error: # Handle API errors. print(('Arg, there was an API error : %s : %s' % (error.resp.status, error._get_reason()))) except AccessTokenRefreshError: # Handle Auth errors. print ('The credentials have been revoked or expired, please re-run ' 'the application to re-authorize') def get_api_query(service, table_id): """Returns a query object to retrieve data from the Core Reporting API. Args: service: The service object built by the Google API Python client library. table_id: str The table ID form which to retrieve data. """ return service.data().ga().get( ids=table_id, start_date='2012-01-01', end_date='2012-01-15', metrics='ga:visits', dimensions='ga:source,ga:keyword', sort='-ga:visits', filters='ga:medium==organic', start_index='1', max_results='25') def print_results(results): """Prints all the results in the Core Reporting API Response. Args: results: The response returned from the Core Reporting API. """ print_report_info(results) print_pagination_info(results) print_profile_info(results) print_query(results) print_column_headers(results) print_totals_for_all_results(results) print_rows(results) def print_report_info(results): """Prints general information about this report. Args: results: The response returned from the Core Reporting API. """ print('Report Infos:') print('Contains Sampled Data = %s' % results.get('containsSampledData')) print('Kind = %s' % results.get('kind')) print('ID = %s' % results.get('id')) print('Self Link = %s' % results.get('selfLink')) print() def print_pagination_info(results): """Prints common pagination details. Args: results: The response returned from the Core Reporting API. """ print('Pagination Infos:') print('Items per page = %s' % results.get('itemsPerPage')) print('Total Results = %s' % results.get('totalResults')) # These only have values if other result pages exist. if results.get('previousLink'): print('Previous Link = %s' % results.get('previousLink')) if results.get('nextLink'): print('Next Link = %s' % results.get('nextLink')) print() def print_profile_info(results): """Prints information about the profile. Args: results: The response returned from the Core Reporting API. """ print('Profile Infos:') info = results.get('profileInfo') print('Account Id = %s' % info.get('accountId')) print('Web Property Id = %s' % info.get('webPropertyId')) print('Profile Id = %s' % info.get('profileId')) print('Table Id = %s' % info.get('tableId')) print('Profile Name = %s' % info.get('profileName')) print() def print_query(results): """The query returns the original report query as a dict. Args: results: The response returned from the Core Reporting API. """ print('Query Parameters:') query = results.get('query') for key, value in query.iteritems(): print('%s = %s' % (key, value)) print() def print_column_headers(results): """Prints the information for each column. The main data from the API is returned as rows of data. The column headers describe the names and types of each column in rows. Args: results: The response returned from the Core Reporting API. """ print('Column Headers:') headers = results.get('columnHeaders') for header in headers: # Print Dimension or Metric name. print('\t%s name: = %s' % (header.get('columnType').title(), header.get('name'))) print('\tColumn Type = %s' % header.get('columnType')) print('\tData Type = %s' % header.get('dataType')) print() def print_totals_for_all_results(results): """Prints the total metric value for all pages the query matched. Args: results: The response returned from the Core Reporting API. """ print('Total Metrics For All Results:') print('This query returned %s rows.' % len(results.get('rows'))) print(('But the query matched %s total results.' % results.get('totalResults'))) print('Here are the metric totals for the matched total results.') totals = results.get('totalsForAllResults') for metric_name, metric_total in totals.iteritems(): print('Metric Name = %s' % metric_name) print('Metric Total = %s' % metric_total) print() def print_rows(results): """Prints all the rows of data returned by the API. Args: results: The response returned from the Core Reporting API. """ print('Rows:') if results.get('rows', []): for row in results.get('rows'): print('\t'.join(row)) else: print('No Rows Found') if __name__ == '__main__': main(sys.argv)
googleapis/google-api-python-client
samples/analytics/core_reporting_v3_reference.py
Python
apache-2.0
8,149
package org.apache.maven.artifact.repository; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; public class MavenArtifactRepositoryTest { private static class MavenArtifactRepositorySubclass extends MavenArtifactRepository { String id; public MavenArtifactRepositorySubclass(String id) { this.id = id; } @Override public String getId() { return id; } } @Test public void testHashCodeEquals() { MavenArtifactRepositorySubclass r1 = new MavenArtifactRepositorySubclass( "foo" ); MavenArtifactRepositorySubclass r2 = new MavenArtifactRepositorySubclass( "foo" ); MavenArtifactRepositorySubclass r3 = new MavenArtifactRepositorySubclass( "bar" ); assertTrue( r1.hashCode() == r2.hashCode() ); assertFalse( r1.hashCode() == r3.hashCode() ); assertTrue( r1.equals( r2 ) ); assertTrue( r2.equals( r1 ) ); assertFalse( r1.equals( r3 ) ); assertFalse( r3.equals( r1 ) ); } }
cstamas/maven
maven-compat/src/test/java/org/apache/maven/artifact/repository/MavenArtifactRepositoryTest.java
Java
apache-2.0
1,978
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.operator; import io.trino.metadata.FunctionArgumentDefinition; import io.trino.metadata.Signature; import java.util.List; public interface ParametricImplementation { Signature getSignature(); boolean hasSpecializedTypeParameters(); boolean isNullable(); List<FunctionArgumentDefinition> getArgumentDefinitions(); }
electrum/presto
core/trino-main/src/main/java/io/trino/operator/ParametricImplementation.java
Java
apache-2.0
916
function LWO3Parser( IFFParser ) { this.IFF = IFFParser; } LWO3Parser.prototype = { constructor: LWO3Parser, parseBlock: function () { this.IFF.debugger.offset = this.IFF.reader.offset; this.IFF.debugger.closeForms(); var blockID = this.IFF.reader.getIDTag(); var length = this.IFF.reader.getUint32(); // size of data in bytes this.IFF.debugger.dataOffset = this.IFF.reader.offset; this.IFF.debugger.length = length; // Data types may be found in either LWO2 OR LWO3 spec switch ( blockID ) { case 'FORM': // form blocks may consist of sub -chunks or sub-forms this.IFF.parseForm( length ); break; // SKIPPED CHUNKS // MISC skipped case 'ICON': // Thumbnail Icon Image case 'VMPA': // Vertex Map Parameter case 'BBOX': // bounding box // case 'VMMD': // case 'VTYP': // normal maps can be specified, normally on models imported from other applications. Currently ignored case 'NORM': // ENVL FORM skipped case 'PRE ': // Pre-loop behavior for the keyframe case 'POST': // Post-loop behavior for the keyframe case 'KEY ': case 'SPAN': // CLIP FORM skipped case 'TIME': case 'CLRS': case 'CLRA': case 'FILT': case 'DITH': case 'CONT': case 'BRIT': case 'SATR': case 'HUE ': case 'GAMM': case 'NEGA': case 'IFLT': case 'PFLT': // Image Map Layer skipped case 'PROJ': case 'AXIS': case 'AAST': case 'PIXB': case 'STCK': // Procedural Textures skipped case 'VALU': // Gradient Textures skipped case 'PNAM': case 'INAM': case 'GRST': case 'GREN': case 'GRPT': case 'FKEY': case 'IKEY': // Texture Mapping Form skipped case 'CSYS': // Surface CHUNKs skipped case 'OPAQ': // top level 'opacity' checkbox case 'CMAP': // clip map // Surface node CHUNKS skipped // These mainly specify the node editor setup in LW case 'NLOC': case 'NZOM': case 'NVER': case 'NSRV': case 'NCRD': case 'NMOD': case 'NSEL': case 'NPRW': case 'NPLA': case 'VERS': case 'ENUM': case 'TAG ': // Car Material CHUNKS case 'CGMD': case 'CGTY': case 'CGST': case 'CGEN': case 'CGTS': case 'CGTE': case 'OSMP': case 'OMDE': case 'OUTR': case 'FLAG': case 'TRNL': case 'SHRP': case 'RFOP': case 'RSAN': case 'TROP': case 'RBLR': case 'TBLR': case 'CLRH': case 'CLRF': case 'ADTR': case 'GLOW': case 'LINE': case 'ALPH': case 'VCOL': case 'ENAB': this.IFF.debugger.skipped = true; this.IFF.reader.skip( length ); break; // Texture node chunks (not in spec) case 'IPIX': // usePixelBlending case 'IMIP': // useMipMaps case 'IMOD': // imageBlendingMode case 'AMOD': // unknown case 'IINV': // imageInvertAlpha case 'INCR': // imageInvertColor case 'IAXS': // imageAxis ( for non-UV maps) case 'IFOT': // imageFallofType case 'ITIM': // timing for animated textures case 'IWRL': case 'IUTI': case 'IINX': case 'IINY': case 'IINZ': case 'IREF': // possibly a VX for reused texture nodes if ( length === 4 ) this.IFF.currentNode[ blockID ] = this.IFF.reader.getInt32(); else this.IFF.reader.skip( length ); break; case 'OTAG': this.IFF.parseObjectTag(); break; case 'LAYR': this.IFF.parseLayer( length ); break; case 'PNTS': this.IFF.parsePoints( length ); break; case 'VMAP': this.IFF.parseVertexMapping( length ); break; case 'POLS': this.IFF.parsePolygonList( length ); break; case 'TAGS': this.IFF.parseTagStrings( length ); break; case 'PTAG': this.IFF.parsePolygonTagMapping( length ); break; case 'VMAD': this.IFF.parseVertexMapping( length, true ); break; // Misc CHUNKS case 'DESC': // Description Line this.IFF.currentForm.description = this.IFF.reader.getString(); break; case 'TEXT': case 'CMNT': case 'NCOM': this.IFF.currentForm.comment = this.IFF.reader.getString(); break; // Envelope Form case 'NAME': this.IFF.currentForm.channelName = this.IFF.reader.getString(); break; // Image Map Layer case 'WRAP': this.IFF.currentForm.wrap = { w: this.IFF.reader.getUint16(), h: this.IFF.reader.getUint16() }; break; case 'IMAG': var index = this.IFF.reader.getVariableLengthIndex(); this.IFF.currentForm.imageIndex = index; break; // Texture Mapping Form case 'OREF': this.IFF.currentForm.referenceObject = this.IFF.reader.getString(); break; case 'ROID': this.IFF.currentForm.referenceObjectID = this.IFF.reader.getUint32(); break; // Surface Blocks case 'SSHN': this.IFF.currentSurface.surfaceShaderName = this.IFF.reader.getString(); break; case 'AOVN': this.IFF.currentSurface.surfaceCustomAOVName = this.IFF.reader.getString(); break; // Nodal Blocks case 'NSTA': this.IFF.currentForm.disabled = this.IFF.reader.getUint16(); break; case 'NRNM': this.IFF.currentForm.realName = this.IFF.reader.getString(); break; case 'NNME': this.IFF.currentForm.refName = this.IFF.reader.getString(); this.IFF.currentSurface.nodes[ this.IFF.currentForm.refName ] = this.IFF.currentForm; break; // Nodal Blocks : connections case 'INME': if ( ! this.IFF.currentForm.nodeName ) this.IFF.currentForm.nodeName = []; this.IFF.currentForm.nodeName.push( this.IFF.reader.getString() ); break; case 'IINN': if ( ! this.IFF.currentForm.inputNodeName ) this.IFF.currentForm.inputNodeName = []; this.IFF.currentForm.inputNodeName.push( this.IFF.reader.getString() ); break; case 'IINM': if ( ! this.IFF.currentForm.inputName ) this.IFF.currentForm.inputName = []; this.IFF.currentForm.inputName.push( this.IFF.reader.getString() ); break; case 'IONM': if ( ! this.IFF.currentForm.inputOutputName ) this.IFF.currentForm.inputOutputName = []; this.IFF.currentForm.inputOutputName.push( this.IFF.reader.getString() ); break; case 'FNAM': this.IFF.currentForm.fileName = this.IFF.reader.getString(); break; case 'CHAN': // NOTE: ENVL Forms may also have CHAN chunk, however ENVL is currently ignored if ( length === 4 ) this.IFF.currentForm.textureChannel = this.IFF.reader.getIDTag(); else this.IFF.reader.skip( length ); break; // LWO2 Spec chunks: these are needed since the SURF FORMs are often in LWO2 format case 'SMAN': var maxSmoothingAngle = this.IFF.reader.getFloat32(); this.IFF.currentSurface.attributes.smooth = ( maxSmoothingAngle < 0 ) ? false : true; break; // LWO2: Basic Surface Parameters case 'COLR': this.IFF.currentSurface.attributes.Color = { value: this.IFF.reader.getFloat32Array( 3 ) }; this.IFF.reader.skip( 2 ); // VX: envelope break; case 'LUMI': this.IFF.currentSurface.attributes.Luminosity = { value: this.IFF.reader.getFloat32() }; this.IFF.reader.skip( 2 ); break; case 'SPEC': this.IFF.currentSurface.attributes.Specular = { value: this.IFF.reader.getFloat32() }; this.IFF.reader.skip( 2 ); break; case 'DIFF': this.IFF.currentSurface.attributes.Diffuse = { value: this.IFF.reader.getFloat32() }; this.IFF.reader.skip( 2 ); break; case 'REFL': this.IFF.currentSurface.attributes.Reflection = { value: this.IFF.reader.getFloat32() }; this.IFF.reader.skip( 2 ); break; case 'GLOS': this.IFF.currentSurface.attributes.Glossiness = { value: this.IFF.reader.getFloat32() }; this.IFF.reader.skip( 2 ); break; case 'TRAN': this.IFF.currentSurface.attributes.opacity = this.IFF.reader.getFloat32(); this.IFF.reader.skip( 2 ); break; case 'BUMP': this.IFF.currentSurface.attributes.bumpStrength = this.IFF.reader.getFloat32(); this.IFF.reader.skip( 2 ); break; case 'SIDE': this.IFF.currentSurface.attributes.side = this.IFF.reader.getUint16(); break; case 'RIMG': this.IFF.currentSurface.attributes.reflectionMap = this.IFF.reader.getVariableLengthIndex(); break; case 'RIND': this.IFF.currentSurface.attributes.refractiveIndex = this.IFF.reader.getFloat32(); this.IFF.reader.skip( 2 ); break; case 'TIMG': this.IFF.currentSurface.attributes.refractionMap = this.IFF.reader.getVariableLengthIndex(); break; case 'IMAP': this.IFF.currentSurface.attributes.imageMapIndex = this.IFF.reader.getUint32(); break; case 'IUVI': // uv channel name this.IFF.currentNode.UVChannel = this.IFF.reader.getString( length ); break; case 'IUTL': // widthWrappingMode: 0 = Reset, 1 = Repeat, 2 = Mirror, 3 = Edge this.IFF.currentNode.widthWrappingMode = this.IFF.reader.getUint32(); break; case 'IVTL': // heightWrappingMode this.IFF.currentNode.heightWrappingMode = this.IFF.reader.getUint32(); break; default: this.IFF.parseUnknownCHUNK( blockID, length ); } if ( blockID != 'FORM' ) { this.IFF.debugger.node = 1; this.IFF.debugger.nodeID = blockID; this.IFF.debugger.log(); } if ( this.IFF.reader.offset >= this.IFF.currentFormEnd ) { this.IFF.currentForm = this.IFF.parentForm; } } }; export { LWO3Parser };
nhibchung/nhibchung.github.io
project/arWebglTest/examples/jsm/loaders/lwo/LWO3Parser.js
JavaScript
apache-2.0
9,311
<html> <head> <link href="../generic.css" rel="stylesheet"/> </head> <body> <p>before</p> <p> [] <span class="endnote">endnote</span> after </p> </body> </html>
corinthia/corinthia-editorlib
tests/cursor/enterPressed-endnote04-expected.html
HTML
apache-2.0
203
/* * #%L * BroadleafCommerce Framework Web * %% * Copyright (C) 2009 - 2013 Broadleaf Commerce * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.broadleafcommerce.core.web.api.wrapper; import org.broadleafcommerce.common.money.Money; import org.broadleafcommerce.core.offer.domain.Adjustment; import org.broadleafcommerce.core.offer.domain.Offer; import java.math.BigDecimal; import javax.servlet.http.HttpServletRequest; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; /** * This is a JAXB wrapper around OrderAdjustmentWrapper. * <p/> * Author: ppatel, bpolster */ @XmlRootElement(name = "adjustment") @XmlAccessorType(value = XmlAccessType.FIELD) public class AdjustmentWrapper extends BaseWrapper implements APIWrapper<Adjustment> { @XmlElement protected Long id; @XmlElement protected Long offerid; @XmlElement protected String reason; @XmlElement protected String marketingMessage; @XmlElement protected Money adjustmentValue; @XmlElement protected String discountType; @XmlElement protected BigDecimal discountAmount; public void wrapDetails(Adjustment model, HttpServletRequest request) { if (model == null) { return; } this.id = model.getId(); this.reason = model.getReason(); Offer offer = model.getOffer(); if (offer != null) { if (model.getReason() == null) { this.reason = "OFFER"; } this.offerid = offer.getId(); this.marketingMessage = offer.getMarketingMessage(); this.discountType = offer.getDiscountType().getType(); this.discountAmount = offer.getValue(); } this.adjustmentValue = model.getValue(); } @Override public void wrapSummary(Adjustment model, HttpServletRequest request) { wrapDetails(model, request); } }
passion1014/metaworks_framework
core/broadleaf-framework-web/src/main/java/org/broadleafcommerce/core/web/api/wrapper/AdjustmentWrapper.java
Java
apache-2.0
2,601
# The Climate Corporation licenses this file to you under under the Apache # License, Version 2.0 (the "License"); you may not use this file except in # compliance with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # See the NOTICE file distributed with this work for additional information # regarding copyright ownership. Unless required by applicable law or agreed # to in writing, software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express # or implied. See the License for the specific language governing permissions # and limitations under the License. # Be sure to restart your server when you modify this file. # You can add backtrace silencers for libraries that you're using but don't wish to see in your backtraces. # Rails.backtrace_cleaner.add_silencer { |line| line =~ /my_noisy_library/ } # You can also remove all the silencers if you're trying to debug a problem that might stem from framework code. # Rails.backtrace_cleaner.remove_silencers!
estsauver/document-services
sojourner/config/initializers/backtrace_silencers.rb
Ruby
apache-2.0
1,103
/** * Copyright 2015 Thomson Reuters * * Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. */ package cmwell.rts import akka.actor.{Actor, ActorRef, ActorSelection} import cmwell.domain.Infoton import k.grid.Grid import com.typesafe.scalalogging.LazyLogging /** * Created by markz on 7/9/14. */ case class AddSubscriber(subscriber: String, rule: Rule) case class RemoveSubscriber(subscriber: String) case class Publish(i: Vector[Infoton]) case class PublishOne(uuid: String) //case class Publish(s : String) class PublishAgent extends Actor with LazyLogging { // need a mapping rule (q) -> actor (label) var rules: Map[String, Rule] = Map.empty[String, Rule] var subMap: Map[String, ActorSelection] = Map.empty[String, ActorSelection] def publish_data(subscriber: String, i: Infoton) { // send val a = subMap(subscriber) logger.debug(s"Send data to $subscriber [$a]") a ! PublishOne(i.uuid) } def receive = { // add a rule to the internal map case AddSubscriber(subscriber: String, rule: Rule) => val addr = sender().path.address val path = s"akka.tcp://${addr.system}@${addr.host.getOrElse("")}:${addr.port.getOrElse(0)}/user/$subscriber" rules += (subscriber -> rule) subMap += (subscriber -> context.actorSelection(path)) logger.debug(s"AddRule rules [${rules}] sub map [${subMap}]") // remove the rule from the internal map case RemoveSubscriber(subscriber: String) => rules -= (subscriber) subMap -= (subscriber) logger.debug(s"RemoveRule ${subscriber} rules [${rules}] sub map [${subMap}]") // this publish the infoton according the rule case Publish(infotonVec: Vector[Infoton]) => { logger.debug(s"in actor $infotonVec") // first lets calc infotonVec.foreach { i => rules.foreach { case (subscriber, rule) => rule match { case NoFilter => publish_data(subscriber, i) case PathFilter(path) => if (path.check(i.path)) publish_data(subscriber, i) case MatchFilter(f) => if (i.fields.isDefined && f.check(i.fields.get)) publish_data(subscriber, i) case PMFilter(p, m) => if (p.check(i.path) && i.fields.isDefined && m.check(i.fields.get)) publish_data(subscriber, i) } } } } case _ => logger.debug("Error") } } object Publisher { val publishAgentActor: ActorRef = Grid.create(classOf[PublishAgent], "publisher") val p: Publisher = new Publisher(publishAgentActor) def init: Unit = {} def publish(i: Vector[Infoton]): Unit = p.publish(i) } class Publisher(val publishAgentActor: ActorRef) { def publish(i: Vector[Infoton]): Unit = { // no block call here publishAgentActor ! Publish(i) } }
TRnonodename/CM-Well
server/cmwell-rts/src/main/scala/cmwell/rts/Publisher.scala
Scala
apache-2.0
3,415
<html><body> <style> body, h1, h2, h3, div, span, p, pre, a { margin: 0; padding: 0; border: 0; font-weight: inherit; font-style: inherit; font-size: 100%; font-family: inherit; vertical-align: baseline; } body { font-size: 13px; padding: 1em; } h1 { font-size: 26px; margin-bottom: 1em; } h2 { font-size: 24px; margin-bottom: 1em; } h3 { font-size: 20px; margin-bottom: 1em; margin-top: 1em; } pre, code { line-height: 1.5; font-family: Monaco, 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', 'Lucida Console', monospace; } pre { margin-top: 0.5em; } h1, h2, h3, p { font-family: Arial, sans serif; } h1, h2, h3 { border-bottom: solid #CCC 1px; } .toc_element { margin-top: 0.5em; } .firstline { margin-left: 2 em; } .method { margin-top: 1em; border: solid 1px #CCC; padding: 1em; background: #EEE; } .details { font-weight: bold; font-size: 14px; } </style> <h1><a href="dialogflow_v2.html">Dialogflow API</a> . <a href="dialogflow_v2.projects.html">projects</a> . <a href="dialogflow_v2.projects.locations.html">locations</a> . <a href="dialogflow_v2.projects.locations.agent.html">agent</a> . <a href="dialogflow_v2.projects.locations.agent.environments.html">environments</a> . <a href="dialogflow_v2.projects.locations.agent.environments.users.html">users</a> . <a href="dialogflow_v2.projects.locations.agent.environments.users.sessions.html">sessions</a> . <a href="dialogflow_v2.projects.locations.agent.environments.users.sessions.entityTypes.html">entityTypes</a></h1> <h2>Instance Methods</h2> <p class="toc_element"> <code><a href="#close">close()</a></code></p> <p class="firstline">Close httplib2 connections.</p> <p class="toc_element"> <code><a href="#create">create(parent, body=None, x__xgafv=None)</a></code></p> <p class="firstline">Creates a session entity type. If the specified session entity type already exists, overrides the session entity type. This method doesn't work with Google Assistant integration. Contact Dialogflow support if you need to use session entities with Google Assistant integration.</p> <p class="toc_element"> <code><a href="#delete">delete(name, x__xgafv=None)</a></code></p> <p class="firstline">Deletes the specified session entity type. This method doesn't work with Google Assistant integration. Contact Dialogflow support if you need to use session entities with Google Assistant integration.</p> <p class="toc_element"> <code><a href="#get">get(name, x__xgafv=None)</a></code></p> <p class="firstline">Retrieves the specified session entity type. This method doesn't work with Google Assistant integration. Contact Dialogflow support if you need to use session entities with Google Assistant integration.</p> <p class="toc_element"> <code><a href="#list">list(parent, pageSize=None, pageToken=None, x__xgafv=None)</a></code></p> <p class="firstline">Returns the list of all session entity types in the specified session. This method doesn't work with Google Assistant integration. Contact Dialogflow support if you need to use session entities with Google Assistant integration.</p> <p class="toc_element"> <code><a href="#list_next">list_next(previous_request, previous_response)</a></code></p> <p class="firstline">Retrieves the next page of results.</p> <p class="toc_element"> <code><a href="#patch">patch(name, body=None, updateMask=None, x__xgafv=None)</a></code></p> <p class="firstline">Updates the specified session entity type. This method doesn't work with Google Assistant integration. Contact Dialogflow support if you need to use session entities with Google Assistant integration.</p> <h3>Method Details</h3> <div class="method"> <code class="details" id="close">close()</code> <pre>Close httplib2 connections.</pre> </div> <div class="method"> <code class="details" id="create">create(parent, body=None, x__xgafv=None)</code> <pre>Creates a session entity type. If the specified session entity type already exists, overrides the session entity type. This method doesn&#x27;t work with Google Assistant integration. Contact Dialogflow support if you need to use session entities with Google Assistant integration. Args: parent: string, Required. The session to create a session entity type for. Format: `projects//agent/sessions/` or `projects//agent/environments//users// sessions/`. If `Environment ID` is not specified, we assume default &#x27;draft&#x27; environment. If `User ID` is not specified, we assume default &#x27;-&#x27; user. (required) body: object, The request body. The object takes the form of: { # A session represents a conversation between a Dialogflow agent and an end-user. You can create special entities, called session entities, during a session. Session entities can extend or replace custom entity types and only exist during the session that they were created for. All session data, including session entities, is stored by Dialogflow for 20 minutes. For more information, see the [session entity guide](https://cloud.google.com/dialogflow/docs/entities-session). &quot;entities&quot;: [ # Required. The collection of entities associated with this session entity type. { # An **entity entry** for an associated entity type. &quot;synonyms&quot;: [ # Required. A collection of value synonyms. For example, if the entity type is *vegetable*, and `value` is *scallions*, a synonym could be *green onions*. For `KIND_LIST` entity types: * This collection must contain exactly one synonym equal to `value`. &quot;A String&quot;, ], &quot;value&quot;: &quot;A String&quot;, # Required. The primary value associated with this entity entry. For example, if the entity type is *vegetable*, the value could be *scallions*. For `KIND_MAP` entity types: * A reference value to be used in place of synonyms. For `KIND_LIST` entity types: * A string that can contain references to other entity types (with or without aliases). }, ], &quot;entityOverrideMode&quot;: &quot;A String&quot;, # Required. Indicates whether the additional data should override or supplement the custom entity type definition. &quot;name&quot;: &quot;A String&quot;, # Required. The unique identifier of this session entity type. Format: `projects//agent/sessions//entityTypes/`, or `projects//agent/environments//users//sessions//entityTypes/`. If `Environment ID` is not specified, we assume default &#x27;draft&#x27; environment. If `User ID` is not specified, we assume default &#x27;-&#x27; user. `` must be the display name of an existing entity type in the same agent that will be overridden or supplemented. } x__xgafv: string, V1 error format. Allowed values 1 - v1 error format 2 - v2 error format Returns: An object of the form: { # A session represents a conversation between a Dialogflow agent and an end-user. You can create special entities, called session entities, during a session. Session entities can extend or replace custom entity types and only exist during the session that they were created for. All session data, including session entities, is stored by Dialogflow for 20 minutes. For more information, see the [session entity guide](https://cloud.google.com/dialogflow/docs/entities-session). &quot;entities&quot;: [ # Required. The collection of entities associated with this session entity type. { # An **entity entry** for an associated entity type. &quot;synonyms&quot;: [ # Required. A collection of value synonyms. For example, if the entity type is *vegetable*, and `value` is *scallions*, a synonym could be *green onions*. For `KIND_LIST` entity types: * This collection must contain exactly one synonym equal to `value`. &quot;A String&quot;, ], &quot;value&quot;: &quot;A String&quot;, # Required. The primary value associated with this entity entry. For example, if the entity type is *vegetable*, the value could be *scallions*. For `KIND_MAP` entity types: * A reference value to be used in place of synonyms. For `KIND_LIST` entity types: * A string that can contain references to other entity types (with or without aliases). }, ], &quot;entityOverrideMode&quot;: &quot;A String&quot;, # Required. Indicates whether the additional data should override or supplement the custom entity type definition. &quot;name&quot;: &quot;A String&quot;, # Required. The unique identifier of this session entity type. Format: `projects//agent/sessions//entityTypes/`, or `projects//agent/environments//users//sessions//entityTypes/`. If `Environment ID` is not specified, we assume default &#x27;draft&#x27; environment. If `User ID` is not specified, we assume default &#x27;-&#x27; user. `` must be the display name of an existing entity type in the same agent that will be overridden or supplemented. }</pre> </div> <div class="method"> <code class="details" id="delete">delete(name, x__xgafv=None)</code> <pre>Deletes the specified session entity type. This method doesn&#x27;t work with Google Assistant integration. Contact Dialogflow support if you need to use session entities with Google Assistant integration. Args: name: string, Required. The name of the entity type to delete. Format: `projects//agent/sessions//entityTypes/` or `projects//agent/environments//users//sessions//entityTypes/`. If `Environment ID` is not specified, we assume default &#x27;draft&#x27; environment. If `User ID` is not specified, we assume default &#x27;-&#x27; user. (required) x__xgafv: string, V1 error format. Allowed values 1 - v1 error format 2 - v2 error format Returns: An object of the form: { # A generic empty message that you can re-use to avoid defining duplicated empty messages in your APIs. A typical example is to use it as the request or the response type of an API method. For instance: service Foo { rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); } The JSON representation for `Empty` is empty JSON object `{}`. }</pre> </div> <div class="method"> <code class="details" id="get">get(name, x__xgafv=None)</code> <pre>Retrieves the specified session entity type. This method doesn&#x27;t work with Google Assistant integration. Contact Dialogflow support if you need to use session entities with Google Assistant integration. Args: name: string, Required. The name of the session entity type. Format: `projects//agent/sessions//entityTypes/` or `projects//agent/environments//users//sessions//entityTypes/`. If `Environment ID` is not specified, we assume default &#x27;draft&#x27; environment. If `User ID` is not specified, we assume default &#x27;-&#x27; user. (required) x__xgafv: string, V1 error format. Allowed values 1 - v1 error format 2 - v2 error format Returns: An object of the form: { # A session represents a conversation between a Dialogflow agent and an end-user. You can create special entities, called session entities, during a session. Session entities can extend or replace custom entity types and only exist during the session that they were created for. All session data, including session entities, is stored by Dialogflow for 20 minutes. For more information, see the [session entity guide](https://cloud.google.com/dialogflow/docs/entities-session). &quot;entities&quot;: [ # Required. The collection of entities associated with this session entity type. { # An **entity entry** for an associated entity type. &quot;synonyms&quot;: [ # Required. A collection of value synonyms. For example, if the entity type is *vegetable*, and `value` is *scallions*, a synonym could be *green onions*. For `KIND_LIST` entity types: * This collection must contain exactly one synonym equal to `value`. &quot;A String&quot;, ], &quot;value&quot;: &quot;A String&quot;, # Required. The primary value associated with this entity entry. For example, if the entity type is *vegetable*, the value could be *scallions*. For `KIND_MAP` entity types: * A reference value to be used in place of synonyms. For `KIND_LIST` entity types: * A string that can contain references to other entity types (with or without aliases). }, ], &quot;entityOverrideMode&quot;: &quot;A String&quot;, # Required. Indicates whether the additional data should override or supplement the custom entity type definition. &quot;name&quot;: &quot;A String&quot;, # Required. The unique identifier of this session entity type. Format: `projects//agent/sessions//entityTypes/`, or `projects//agent/environments//users//sessions//entityTypes/`. If `Environment ID` is not specified, we assume default &#x27;draft&#x27; environment. If `User ID` is not specified, we assume default &#x27;-&#x27; user. `` must be the display name of an existing entity type in the same agent that will be overridden or supplemented. }</pre> </div> <div class="method"> <code class="details" id="list">list(parent, pageSize=None, pageToken=None, x__xgafv=None)</code> <pre>Returns the list of all session entity types in the specified session. This method doesn&#x27;t work with Google Assistant integration. Contact Dialogflow support if you need to use session entities with Google Assistant integration. Args: parent: string, Required. The session to list all session entity types from. Format: `projects//agent/sessions/` or `projects//agent/environments//users// sessions/`. If `Environment ID` is not specified, we assume default &#x27;draft&#x27; environment. If `User ID` is not specified, we assume default &#x27;-&#x27; user. (required) pageSize: integer, Optional. The maximum number of items to return in a single page. By default 100 and at most 1000. pageToken: string, Optional. The next_page_token value returned from a previous list request. x__xgafv: string, V1 error format. Allowed values 1 - v1 error format 2 - v2 error format Returns: An object of the form: { # The response message for SessionEntityTypes.ListSessionEntityTypes. &quot;nextPageToken&quot;: &quot;A String&quot;, # Token to retrieve the next page of results, or empty if there are no more results in the list. &quot;sessionEntityTypes&quot;: [ # The list of session entity types. There will be a maximum number of items returned based on the page_size field in the request. { # A session represents a conversation between a Dialogflow agent and an end-user. You can create special entities, called session entities, during a session. Session entities can extend or replace custom entity types and only exist during the session that they were created for. All session data, including session entities, is stored by Dialogflow for 20 minutes. For more information, see the [session entity guide](https://cloud.google.com/dialogflow/docs/entities-session). &quot;entities&quot;: [ # Required. The collection of entities associated with this session entity type. { # An **entity entry** for an associated entity type. &quot;synonyms&quot;: [ # Required. A collection of value synonyms. For example, if the entity type is *vegetable*, and `value` is *scallions*, a synonym could be *green onions*. For `KIND_LIST` entity types: * This collection must contain exactly one synonym equal to `value`. &quot;A String&quot;, ], &quot;value&quot;: &quot;A String&quot;, # Required. The primary value associated with this entity entry. For example, if the entity type is *vegetable*, the value could be *scallions*. For `KIND_MAP` entity types: * A reference value to be used in place of synonyms. For `KIND_LIST` entity types: * A string that can contain references to other entity types (with or without aliases). }, ], &quot;entityOverrideMode&quot;: &quot;A String&quot;, # Required. Indicates whether the additional data should override or supplement the custom entity type definition. &quot;name&quot;: &quot;A String&quot;, # Required. The unique identifier of this session entity type. Format: `projects//agent/sessions//entityTypes/`, or `projects//agent/environments//users//sessions//entityTypes/`. If `Environment ID` is not specified, we assume default &#x27;draft&#x27; environment. If `User ID` is not specified, we assume default &#x27;-&#x27; user. `` must be the display name of an existing entity type in the same agent that will be overridden or supplemented. }, ], }</pre> </div> <div class="method"> <code class="details" id="list_next">list_next(previous_request, previous_response)</code> <pre>Retrieves the next page of results. Args: previous_request: The request for the previous page. (required) previous_response: The response from the request for the previous page. (required) Returns: A request object that you can call &#x27;execute()&#x27; on to request the next page. Returns None if there are no more items in the collection. </pre> </div> <div class="method"> <code class="details" id="patch">patch(name, body=None, updateMask=None, x__xgafv=None)</code> <pre>Updates the specified session entity type. This method doesn&#x27;t work with Google Assistant integration. Contact Dialogflow support if you need to use session entities with Google Assistant integration. Args: name: string, Required. The unique identifier of this session entity type. Format: `projects//agent/sessions//entityTypes/`, or `projects//agent/environments//users//sessions//entityTypes/`. If `Environment ID` is not specified, we assume default &#x27;draft&#x27; environment. If `User ID` is not specified, we assume default &#x27;-&#x27; user. `` must be the display name of an existing entity type in the same agent that will be overridden or supplemented. (required) body: object, The request body. The object takes the form of: { # A session represents a conversation between a Dialogflow agent and an end-user. You can create special entities, called session entities, during a session. Session entities can extend or replace custom entity types and only exist during the session that they were created for. All session data, including session entities, is stored by Dialogflow for 20 minutes. For more information, see the [session entity guide](https://cloud.google.com/dialogflow/docs/entities-session). &quot;entities&quot;: [ # Required. The collection of entities associated with this session entity type. { # An **entity entry** for an associated entity type. &quot;synonyms&quot;: [ # Required. A collection of value synonyms. For example, if the entity type is *vegetable*, and `value` is *scallions*, a synonym could be *green onions*. For `KIND_LIST` entity types: * This collection must contain exactly one synonym equal to `value`. &quot;A String&quot;, ], &quot;value&quot;: &quot;A String&quot;, # Required. The primary value associated with this entity entry. For example, if the entity type is *vegetable*, the value could be *scallions*. For `KIND_MAP` entity types: * A reference value to be used in place of synonyms. For `KIND_LIST` entity types: * A string that can contain references to other entity types (with or without aliases). }, ], &quot;entityOverrideMode&quot;: &quot;A String&quot;, # Required. Indicates whether the additional data should override or supplement the custom entity type definition. &quot;name&quot;: &quot;A String&quot;, # Required. The unique identifier of this session entity type. Format: `projects//agent/sessions//entityTypes/`, or `projects//agent/environments//users//sessions//entityTypes/`. If `Environment ID` is not specified, we assume default &#x27;draft&#x27; environment. If `User ID` is not specified, we assume default &#x27;-&#x27; user. `` must be the display name of an existing entity type in the same agent that will be overridden or supplemented. } updateMask: string, Optional. The mask to control which fields get updated. x__xgafv: string, V1 error format. Allowed values 1 - v1 error format 2 - v2 error format Returns: An object of the form: { # A session represents a conversation between a Dialogflow agent and an end-user. You can create special entities, called session entities, during a session. Session entities can extend or replace custom entity types and only exist during the session that they were created for. All session data, including session entities, is stored by Dialogflow for 20 minutes. For more information, see the [session entity guide](https://cloud.google.com/dialogflow/docs/entities-session). &quot;entities&quot;: [ # Required. The collection of entities associated with this session entity type. { # An **entity entry** for an associated entity type. &quot;synonyms&quot;: [ # Required. A collection of value synonyms. For example, if the entity type is *vegetable*, and `value` is *scallions*, a synonym could be *green onions*. For `KIND_LIST` entity types: * This collection must contain exactly one synonym equal to `value`. &quot;A String&quot;, ], &quot;value&quot;: &quot;A String&quot;, # Required. The primary value associated with this entity entry. For example, if the entity type is *vegetable*, the value could be *scallions*. For `KIND_MAP` entity types: * A reference value to be used in place of synonyms. For `KIND_LIST` entity types: * A string that can contain references to other entity types (with or without aliases). }, ], &quot;entityOverrideMode&quot;: &quot;A String&quot;, # Required. Indicates whether the additional data should override or supplement the custom entity type definition. &quot;name&quot;: &quot;A String&quot;, # Required. The unique identifier of this session entity type. Format: `projects//agent/sessions//entityTypes/`, or `projects//agent/environments//users//sessions//entityTypes/`. If `Environment ID` is not specified, we assume default &#x27;draft&#x27; environment. If `User ID` is not specified, we assume default &#x27;-&#x27; user. `` must be the display name of an existing entity type in the same agent that will be overridden or supplemented. }</pre> </div> </body></html>
googleapis/google-api-python-client
docs/dyn/dialogflow_v2.projects.locations.agent.environments.users.sessions.entityTypes.html
HTML
apache-2.0
22,134
import json from time import sleep from logger import logger from perfrunner.helpers.cbmonitor import with_stats from perfrunner.tests import PerfTest class ViewTest(PerfTest): """ The test measures time it takes to build views. This is just a base class, actual measurements happen in initial and incremental indexing tests. """ def __init__(self, *args): super(ViewTest, self).__init__(*args) if self.view_settings.disabled_updates: options = {'updateMinChanges': 0, 'replicaUpdateMinChanges': 0} else: options = None self.ddocs = self._parse_ddocs(self.view_settings, options) def define_ddocs(self): for master in self.cluster_spec.yield_masters(): for bucket in self.test_config.buckets: for ddoc_name, ddoc in self.ddocs.iteritems(): self.rest.create_ddoc(master, bucket, ddoc_name, ddoc) def build_index(self): """Query the views in order to build up the index""" for master in self.cluster_spec.yield_masters(): for bucket in self.test_config.buckets: for ddoc_name, ddoc in self.ddocs.iteritems(): for view_name in ddoc[self.view_key]: params = {'limit': 10} if self.view_key == 'views': self.rest.query_view(master, bucket, ddoc_name, view_name, params) elif self.view_key == 'spatial': self.rest.query_spatial(master, bucket, ddoc_name, view_name, params) sleep(self.MONITORING_DELAY) for master in self.cluster_spec.yield_masters(): self.monitor.monitor_task(master, 'indexer') def compact_index(self): for master in self.cluster_spec.yield_masters(): for bucket in self.test_config.buckets: for ddoc_name in self.ddocs: self.rest.trigger_index_compaction(master, bucket, ddoc_name) for master in self.cluster_spec.yield_masters(): self.monitor.monitor_task(master, 'view_compaction') @property def view_settings(self): """The configuration of the views. The settings are in different sections depending on whether it's a mapreduce or spatial view """ raise NotImplementedError( "Any subclass of `ViewTest` must have a view_settings property") @property def view_key(self): """The property defining the views. This is the property in the design document that contains the view definitions. For mapreduce views it's "views" for spatial views it's "spatial". """ raise NotImplementedError( "Any subclass of `ViewTest` must have a view_key property") @staticmethod def _parse_ddocs(view_settings, options): ddocs = {} if view_settings.indexes is None: logger.interrupt('Missing indexes param') for index in view_settings.indexes: ddoc_name, ddoc = index.split('::', 1) ddocs[ddoc_name] = json.loads(ddoc) if options: ddocs[ddoc_name]['options'] = options return ddocs class ViewIndexTest(ViewTest): """ Initial indexing test with access phase for data/index mutation. It is critical to disable automatic index updates so that we can control index building. """ @with_stats def build_init_index(self): return super(ViewIndexTest, self).build_index() @with_stats def build_incr_index(self): super(ViewIndexTest, self).build_index() def run(self): self.load() self.wait_for_persistence() self.compact_bucket() self.reporter.start() self.define_ddocs() from_ts, to_ts = self.build_init_index() time_elapsed = (to_ts - from_ts) / 1000.0 time_elapsed = self.reporter.finish('Initial index', time_elapsed) self.reporter.post_to_sf( *self.metric_helper.get_indexing_meta(value=time_elapsed, index_type='Initial') ) self.access() self.wait_for_persistence() self.compact_bucket() from_ts, to_ts = self.build_incr_index() time_elapsed = (to_ts - from_ts) / 1000.0 time_elapsed = self.reporter.finish('Incremental index', time_elapsed) self.reporter.post_to_sf( *self.metric_helper.get_indexing_meta(value=time_elapsed, index_type='Incremental') ) class ViewQueryTest(ViewTest): """ The base test which defines workflow for different view query tests. Access phase represents mixed KV workload and queries on spatial views. """ @with_stats def access(self): super(ViewTest, self).timer() def run(self): self.load() self.wait_for_persistence() self.compact_bucket() self.hot_load() self.define_ddocs() self.build_index() self.workload = self.test_config.access_settings self.access_bg() self.access()
mikewied/perfrunner
perfrunner/tests/view.py
Python
apache-2.0
5,397
/* ************************************************************************ * Copyright 2013 Advanced Micro Devices, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ************************************************************************/ #ifndef ROT_H_ #define ROT_H_ #include <gtest/gtest.h> #include <clBLAS.h> #include <common.h> #include <BlasBase.h> #include <blas-math.h> using namespace clMath; using ::testing::TestWithParam; class ROT : public TestWithParam< ::std::tr1::tuple< int, // N int, // offx int, // incx int, // offy int, // incy ComplexLong, // C ComplexLong, // S int // numCommandQueues > > { public: void getParams(TestParams *params) { params->N = N; params->offa= offa; //offx params->offb = offb; // offy params->incx = incx; params->incy = incy; params->alpha = alpha; // C params->beta = beta; //S params->numCommandQueues = numCommandQueues; } protected: virtual void SetUp() { N = ::std::tr1::get<0>(GetParam()); offa = ::std::tr1::get<1>(GetParam()); incx = ::std::tr1::get<2>(GetParam()); offb = ::std::tr1::get<3>(GetParam()); incy = ::std::tr1::get<4>(GetParam()); alpha = ::std::tr1::get<5>(GetParam()); beta = ::std::tr1::get<6>(GetParam()); numCommandQueues = ::std::tr1::get<7>(GetParam()); base = ::clMath::BlasBase::getInstance(); useNumCommandQueues = base->useNumCommandQueues(); if (useNumCommandQueues) { numCommandQueues = base->numCommandQueues(); } } size_t N, offa, offb; int incx, incy; ComplexLong alpha; ComplexLong beta; ::clMath::BlasBase *base; bool useNumCommandQueues; cl_uint numCommandQueues; }; #endif
arrayfire/clBLAS
src/tests/include/rot.h
C
apache-2.0
2,402
/* * Hibernate Validator, declare and validate application constraints * * License: Apache License, Version 2.0 * See the license.txt file in the root directory or <http://www.apache.org/licenses/LICENSE-2.0>. */ package org.hibernate.validator.ap.internal.checks; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.util.Collections; import java.util.Set; import javax.lang.model.element.AnnotationMirror; import javax.lang.model.element.TypeElement; import org.hibernate.validator.ap.internal.util.AnnotationApiHelper; import org.hibernate.validator.ap.internal.util.CollectionHelper; /** * Checks, that {@link RetentionPolicy#RUNTIME} is declared for constraint annotation types. * * @author Gunnar Morling */ public class RetentionPolicyCheck extends AbstractConstraintCheck { private final AnnotationApiHelper annotationApiHelper; public RetentionPolicyCheck(AnnotationApiHelper annotationApiHelper) { this.annotationApiHelper = annotationApiHelper; } @Override public Set<ConstraintCheckIssue> checkAnnotationType(TypeElement element, AnnotationMirror annotation) { Retention retention = element.getAnnotation( Retention.class ); if ( retention == null ) { return CollectionHelper.asSet( ConstraintCheckIssue.error( element, null, "CONSTRAINT_TYPE_WITH_MISSING_OR_WRONG_RETENTION" ) ); } if ( !retention.value().equals( RetentionPolicy.RUNTIME ) ) { return CollectionHelper.asSet( ConstraintCheckIssue.error( element, annotationApiHelper.getMirror( element.getAnnotationMirrors(), Retention.class ), "CONSTRAINT_TYPE_WITH_MISSING_OR_WRONG_RETENTION" ) ); } return Collections.emptySet(); } }
shahramgdz/hibernate-validator
annotation-processor/src/main/java/org/hibernate/validator/ap/internal/checks/RetentionPolicyCheck.java
Java
apache-2.0
1,731
<html> <head> <link href="../generic.css" rel="stylesheet"/> </head> <body> <table id="item1" style="width: 100%"> <tbody> <tr> <td style="background-color: silver">[One</td> <td style="background-color: silver">Two</td> <td style="background-color: silver">Three</td> <td style="background-color: silver">Four</td> <td style="background-color: silver">Five</td> </tr> <tr> <td style="background-color: silver">Six</td> <td style="background-color: silver">Seven</td> <td style="background-color: silver">Eight</td> <td style="background-color: silver">Nine</td> <td style="background-color: silver">Ten</td> </tr> <tr> <td style="background-color: silver">Eleven</td> <td style="background-color: silver">Twelve</td> <td style="background-color: silver">Thirteen</td> <td style="background-color: silver">Fourteen</td> <td style="background-color: silver">Fifteen</td> </tr> <tr> <td style="background-color: silver">Sixteen</td> <td style="background-color: silver">Seventeen</td> <td style="background-color: silver">Eighteen</td> <td style="background-color: silver">Nineteen</td> <td style="background-color: silver">Twenty</td> </tr> <tr> <td style="background-color: silver">Twenty one</td> <td style="background-color: silver">Twenty two</td> <td style="background-color: silver">Twenty three</td> <td style="background-color: silver">Twenty four</td> <td style="background-color: silver">Twenty five]</td> </tr> </tbody> </table> </body> </html>
corinthia/corinthia-editorlib
tests/tables/regionFromRange10-expected.html
HTML
apache-2.0
1,817
/* ** ################################################################### ** Processors: LPC54114J256BD64_cm4 ** LPC54114J256UK49_cm4 ** ** Compilers: Keil ARM C/C++ Compiler ** GNU C Compiler ** IAR ANSI C/C++ Compiler for ARM ** MCUXpresso Compiler ** ** Reference manual: LPC5411x User manual Rev. 1.1 25 May 2016 ** Version: rev. 1.0, 2016-04-29 ** Build: b180802 ** ** Abstract: ** Provides a system configuration function and a global variable that ** contains the system frequency. It configures the device and initializes ** the oscillator (PLL) that is part of the microcontroller device. ** ** Copyright 2016 Freescale Semiconductor, Inc. ** Copyright 2016-2018 NXP ** ** SPDX-License-Identifier: BSD-3-Clause ** ** http: www.nxp.com ** mail: [email protected] ** ** Revisions: ** - rev. 1.0 (2016-04-29) ** Initial version. ** ** ################################################################### */ /*! * @file LPC54114_cm4 * @version 1.0 * @date 2016-04-29 * @brief Device specific configuration file for LPC54114_cm4 (header file) * * Provides a system configuration function and a global variable that contains * the system frequency. It configures the device and initializes the oscillator * (PLL) that is part of the microcontroller device. */ #ifndef _SYSTEM_LPC54114_cm4_H_ #define _SYSTEM_LPC54114_cm4_H_ /**< Symbol preventing repeated inclusion */ #ifdef __cplusplus extern "C" { #endif #include <stdint.h> #define DEFAULT_SYSTEM_CLOCK 12000000u /* Default System clock value */ #define CLK_RTC_32K_CLK 32768u /* RTC oscillator 32 kHz output (32k_clk */ #define CLK_FRO_12MHZ 12000000u /* FRO 12 MHz (fro_12m) */ #define CLK_FRO_48MHZ 48000000u /* FRO 48 MHz (fro_48m) */ #define CLK_FRO_96MHZ 96000000u /* FRO 96 MHz (fro_96m) */ #define CLK_CLK_IN 0u /* Default CLK_IN pin clock */ /** * @brief System clock frequency (core clock) * * The system clock frequency supplied to the SysTick timer and the processor * core clock. This variable can be used by the user application to setup the * SysTick timer or configure other parameters. It may also be used by debugger to * query the frequency of the debug timer or configure the trace clock speed * SystemCoreClock is initialized with a correct predefined value. */ extern uint32_t SystemCoreClock; /** * @brief Setup the microcontroller system. * * Typically this function configures the oscillator (PLL) that is part of the * microcontroller device. For systems with variable clock speed it also updates * the variable SystemCoreClock. SystemInit is called from startup_device file. */ void SystemInit (void); /** * @brief Updates the SystemCoreClock variable. * * It must be called whenever the core clock is changed during program * execution. SystemCoreClockUpdate() evaluates the clock register settings and calculates * the current core clock. */ void SystemCoreClockUpdate (void); /** * @brief SystemInit function hook. * * This weak function allows to call specific initialization code during the * SystemInit() execution.This can be used when an application specific code needs * to be called as close to the reset entry as possible (for example the Multicore * Manager MCMGR_EarlyInit() function call). * NOTE: No global r/w variables can be used in this hook function because the * initialization of these variables happens after this function. */ void SystemInitHook (void); #ifdef __cplusplus } #endif #endif /* _SYSTEM_LPC54114_cm4_H_ */
ldts/zephyr
ext/hal/nxp/mcux/devices/LPC54114/system_LPC54114_cm4.h
C
apache-2.0
3,923
// Copyright 2017 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Package ctmapper maps from a verifiable log to verifiable map. package ctmapper import "crypto/sha256" // HashDomain converts a domain into a map index. func HashDomain(key string) []byte { h := sha256.New() h.Write([]byte(key)) return h.Sum(nil) }
Martin2112/trillian
examples/ct/ctmapper/hasher.go
GO
apache-2.0
867
object Test { def main(args: Array[String]): Unit = { val a = E.A println(a) } }
som-snytt/dotty
tests/run/i7410/Test_2.scala
Scala
apache-2.0
92
/* * Copyright 2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Classes that allow defining a native binary platform. */ @org.gradle.api.Incubating package org.gradle.nativeplatform.platform;
gstevey/gradle
subprojects/platform-native/src/main/java/org/gradle/nativeplatform/platform/package-info.java
Java
apache-2.0
753
/*jshint globalstrict:false, strict:false, maxlen: 500 */ /*global assertEqual, AQL_EXECUTE, AQL_EXPLAIN */ //////////////////////////////////////////////////////////////////////////////// /// @brief tests for Ahuacatl, skiplist index queries /// /// @file /// /// DISCLAIMER /// /// Copyright 2010-2012 triagens GmbH, Cologne, Germany /// /// Licensed under the Apache License, Version 2.0 (the "License"); /// you may not use this file except in compliance with the License. /// You may obtain a copy of the License at /// /// http://www.apache.org/licenses/LICENSE-2.0 /// /// Unless required by applicable law or agreed to in writing, software /// distributed under the License is distributed on an "AS IS" BASIS, /// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. /// See the License for the specific language governing permissions and /// limitations under the License. /// /// Copyright holder is triAGENS GmbH, Cologne, Germany /// /// @author Jan Steemann /// @author Copyright 2012, triAGENS GmbH, Cologne, Germany //////////////////////////////////////////////////////////////////////////////// var internal = require("internal"); var jsunity = require("jsunity"); var helper = require("@arangodb/aql-helper"); var getQueryResults = helper.getQueryResults; //////////////////////////////////////////////////////////////////////////////// /// @brief test suite //////////////////////////////////////////////////////////////////////////////// function ahuacatlSkiplistTestSuite () { var skiplist; var explain = function (query, params) { return helper.getCompactPlan(AQL_EXPLAIN(query, params, { optimizer: { rules: [ "-all", "+use-indexes" ] } })).map(function(node) { return node.type; }); }; return { //////////////////////////////////////////////////////////////////////////////// /// @brief set up //////////////////////////////////////////////////////////////////////////////// setUp : function () { internal.db._drop("UnitTestsAhuacatlSkiplist"); skiplist = internal.db._create("UnitTestsAhuacatlSkiplist"); let docs = []; for (var i = 1; i <= 5; ++i) { for (var j = 1; j <= 5; ++j) { docs.push({ "a" : i, "b": j }); } } skiplist.insert(docs); skiplist.ensureSkiplist("a", "b"); }, //////////////////////////////////////////////////////////////////////////////// /// @brief tear down //////////////////////////////////////////////////////////////////////////////// tearDown : function () { internal.db._drop("UnitTestsAhuacatlSkiplist"); skiplist = null; }, //////////////////////////////////////////////////////////////////////////////// /// @brief test the first skiplist index field without results //////////////////////////////////////////////////////////////////////////////// testEqSingleVoid1 : function () { var query = "FOR v IN " + skiplist.name() + " FILTER v.a == 99 RETURN v"; var expected = [ ]; var actual = getQueryResults(query); assertEqual(expected, actual); assertEqual([ "SingletonNode", "ScatterNode", "RemoteNode", "IndexNode", "RemoteNode", "GatherNode", "CalculationNode", "FilterNode", "ReturnNode" ], explain(query)); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test the first skiplist index field without results //////////////////////////////////////////////////////////////////////////////// testEqSingleVoid2 : function () { var query = "FOR v IN " + skiplist.name() + " FILTER 99 == v.a RETURN v"; var expected = [ ]; var actual = getQueryResults(query); assertEqual(expected, actual); assertEqual([ "SingletonNode", "ScatterNode", "RemoteNode", "IndexNode", "RemoteNode", "GatherNode", "CalculationNode", "FilterNode", "ReturnNode" ], explain(query)); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test the first skiplist index field with equality //////////////////////////////////////////////////////////////////////////////// testEqSingle1 : function () { var query = "FOR v IN " + skiplist.name() + " FILTER v.a == 1 SORT v.b RETURN [ v.a, v.b ]"; var expected = [ [ 1, 1 ], [ 1, 2 ], [ 1, 3 ], [ 1, 4 ], [ 1, 5 ] ]; var actual = getQueryResults(query); assertEqual(expected, actual); assertEqual([ "SingletonNode", "ScatterNode", "RemoteNode", "IndexNode", "RemoteNode", "GatherNode", "CalculationNode", "FilterNode", "CalculationNode", "SortNode", "CalculationNode", "ReturnNode" ], explain(query)); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test the first skiplist index field with equality //////////////////////////////////////////////////////////////////////////////// testEqSingle2 : function () { var query = "FOR v IN " + skiplist.name() + " FILTER 1 == v.a SORT v.b RETURN [ v.a, v.b ]"; var expected = [ [ 1, 1 ], [ 1, 2 ], [ 1, 3 ], [ 1, 4 ], [ 1, 5 ] ]; var actual = getQueryResults(query); assertEqual(expected, actual); assertEqual([ "SingletonNode", "ScatterNode", "RemoteNode", "IndexNode", "RemoteNode", "GatherNode", "CalculationNode", "FilterNode", "CalculationNode", "SortNode","CalculationNode", "ReturnNode" ], explain(query)); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test the first skiplist index field with equality //////////////////////////////////////////////////////////////////////////////// testEqSingle3 : function () { var query = "FOR v IN " + skiplist.name() + " FILTER v.a == 5 SORT v.b RETURN [ v.a, v.b ]"; var expected = [ [ 5, 1 ], [ 5, 2 ], [ 5, 3 ], [ 5, 4 ], [ 5, 5 ] ]; var actual = getQueryResults(query); assertEqual(expected, actual); assertEqual([ "SingletonNode", "ScatterNode", "RemoteNode", "IndexNode", "RemoteNode", "GatherNode", "CalculationNode", "FilterNode", "CalculationNode", "SortNode", "CalculationNode", "ReturnNode" ], explain(query)); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test the first skiplist index field with equality //////////////////////////////////////////////////////////////////////////////// testEqSingle4 : function () { var query = "FOR v IN " + skiplist.name() + " FILTER 5 == v.a SORT v.b RETURN [ v.a, v.b ]"; var expected = [ [ 5, 1 ], [ 5, 2 ], [ 5, 3 ], [ 5, 4 ], [ 5, 5 ] ]; var actual = getQueryResults(query); assertEqual(expected, actual); assertEqual([ "SingletonNode", "ScatterNode", "RemoteNode", "IndexNode", "RemoteNode", "GatherNode", "CalculationNode", "FilterNode", "CalculationNode", "SortNode", "CalculationNode", "ReturnNode" ], explain(query)); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test the first skiplist index - optimizer should remove sort. //////////////////////////////////////////////////////////////////////////////// testEqSingle5 : function () { var query = "FOR v IN " + skiplist.name() + " FILTER v.a == 1 SORT v.a, v.b RETURN [ v.a, v.b ]"; var expected = [ [ 1, 1 ], [ 1, 2 ], [ 1, 3 ], [ 1, 4 ], [ 1, 5 ] ]; var actual = getQueryResults(query); assertEqual(expected, actual); assertEqual([ "SingletonNode", "ScatterNode", "RemoteNode", "IndexNode", "RemoteNode", "GatherNode", "CalculationNode", "FilterNode", "CalculationNode", "CalculationNode", "SortNode", "CalculationNode", "ReturnNode" ], explain(query)); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test the first skiplist index - optimizer should remove sort. //////////////////////////////////////////////////////////////////////////////// testEqSingle6 : function () { var query = "FOR v IN " + skiplist.name() + " FILTER v.a >= 4 SORT v.a RETURN v.a"; var expected = [ 4, 4, 4, 4, 4, 5, 5, 5, 5, 5 ]; var actual = getQueryResults(query); assertEqual(expected, actual); assertEqual([ "SingletonNode", "ScatterNode", "RemoteNode", "IndexNode", "RemoteNode", "GatherNode", "CalculationNode", "FilterNode", "CalculationNode", "SortNode", "CalculationNode", "ReturnNode" ], explain(query)); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test the first skiplist index - optimizer should remove sort. //////////////////////////////////////////////////////////////////////////////// testEqSingle7: function () { var query = "FOR v IN " + skiplist.name() + " FILTER v.a >= 4 && v.a < 5 SORT v.a RETURN v.a"; var expected = [ 4, 4, 4, 4, 4 ]; var actual = getQueryResults(query); assertEqual(expected, actual); assertEqual([ "SingletonNode", "ScatterNode", "RemoteNode", "IndexNode", "RemoteNode", "GatherNode", "CalculationNode", "FilterNode", "CalculationNode", "SortNode", "CalculationNode", "ReturnNode" ], explain(query)); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test the first skiplist index field with greater than //////////////////////////////////////////////////////////////////////////////// testGtSingle1 : function () { var expected = [ [ 5, 1 ], [ 5, 2 ], [ 5, 3 ], [ 5, 4 ], [ 5, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a > 4 SORT v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test the first skiplist index field with greater than //////////////////////////////////////////////////////////////////////////////// testGtSingle2 : function () { var expected = [ [ 5, 1 ], [ 5, 2 ], [ 5, 3 ], [ 5, 4 ], [ 5, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 4 < v.a SORT v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test the first skiplist index field with greater equal //////////////////////////////////////////////////////////////////////////////// testGeSingle1 : function () { var query = "FOR v IN " + skiplist.name() + " FILTER v.a >= 5 SORT v.b RETURN [ v.a, v.b ]"; var expected = [ [ 5, 1 ], [ 5, 2 ], [ 5, 3 ], [ 5, 4 ], [ 5, 5 ] ]; var actual = getQueryResults(query); assertEqual(expected, actual); assertEqual([ "SingletonNode", "ScatterNode", "RemoteNode", "IndexNode", "RemoteNode", "GatherNode", "CalculationNode", "FilterNode", "CalculationNode", "SortNode", "CalculationNode", "ReturnNode" ], explain(query)); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test the first skiplist index field with greater equal //////////////////////////////////////////////////////////////////////////////// testLtSingle1 : function () { var expected = [ [ 5, 1 ], [ 5, 2 ], [ 5, 3 ], [ 5, 4 ], [ 5, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 5 <= v.a SORT v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test the first skiplist index field with less than //////////////////////////////////////////////////////////////////////////////// testLtSingle2 : function () { var expected = [ [ 1, 1 ], [ 1, 2 ], [ 1, 3 ], [ 1, 4 ], [ 1, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a < 2 SORT v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test the first skiplist index field with less than //////////////////////////////////////////////////////////////////////////////// testGtSingle3 : function () { var expected = [ [ 1, 1 ], [ 1, 2 ], [ 1, 3 ], [ 1, 4 ], [ 1, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 2 > v.a SORT v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test the first skiplist index field with greater equal //////////////////////////////////////////////////////////////////////////////// testLtSingle4 : function () { var expected = [ [ 1, 1 ], [ 1, 2 ], [ 1, 3 ], [ 1, 4 ], [ 1, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a <= 1 SORT v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test the first skiplist index field with greater equal //////////////////////////////////////////////////////////////////////////////// testGeSingle2 : function () { var expected = [ [ 1, 1 ], [ 1, 2 ], [ 1, 3 ], [ 1, 4 ], [ 1, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 1 >= v.a SORT v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test the first skiplist index field with a range access //////////////////////////////////////////////////////////////////////////////// testRangeSingle1 : function () { var expected = [ [ 1, 1 ], [ 1, 2 ], [ 1, 3 ], [ 1, 4 ], [ 1, 5 ], [ 2, 1 ], [ 2, 2 ], [ 2, 3 ], [ 2, 4 ], [ 2, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a >= 1 && v.a <= 2 SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test the first skiplist index field with a range access //////////////////////////////////////////////////////////////////////////////// testRangeSingle2 : function () { var expected = [ [ 1, 1 ], [ 1, 2 ], [ 1, 3 ], [ 1, 4 ], [ 1, 5 ], [ 2, 1 ], [ 2, 2 ], [ 2, 3 ], [ 2, 4 ], [ 2, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 1 <= v.a && 2 >= v.a SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test the first skiplist index field with a range access //////////////////////////////////////////////////////////////////////////////// testRangeSingle3 : function () { var expected = [ ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a > 1 && v.a < 2 RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test the first skiplist index field with a range access //////////////////////////////////////////////////////////////////////////////// testRangeSingle4 : function () { var expected = [ ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 1 < v.a && 2 > v.a RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test the first skiplist index field with a range access //////////////////////////////////////////////////////////////////////////////// testRangeSingle5 : function () { var expected = [ [ 1, 1 ], [ 1, 2 ], [ 1, 3 ], [ 1, 4 ], [ 1, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 1 <= v.a && 2 > v.a SORT v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test the first skiplist index field with a range access //////////////////////////////////////////////////////////////////////////////// testRangeSingle6 : function () { var expected = [ [ 1, 1 ], [ 1, 2 ], [ 1, 3 ], [ 1, 4 ], [ 1, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 1 <= v.a && 2 > v.a SORT v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test the first skiplist index field with a range access //////////////////////////////////////////////////////////////////////////////// testRangeSingle7 : function () { var expected = [ [ 2, 1 ], [ 2, 2 ], [ 2, 3 ], [ 2, 4 ], [ 2, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a > 1 && v.a <= 2 SORT v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test the first skiplist index field with a range access //////////////////////////////////////////////////////////////////////////////// testRangeSingle8 : function () { var expected = [ [ 2, 1 ], [ 2, 2 ], [ 2, 3 ], [ 2, 4 ], [ 2, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 1 < v.a && 2 >= v.a SORT v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testEqMultiVoid1 : function () { var expected = [ ]; var query = "FOR v IN " + skiplist.name() + " FILTER v.a == 99 && v.b == 1 RETURN v"; var actual = getQueryResults(query); assertEqual(expected, actual); assertEqual([ "SingletonNode", "ScatterNode", "RemoteNode", "IndexNode", "RemoteNode", "GatherNode", "CalculationNode", "FilterNode", "ReturnNode" ], explain(query)); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testEqMultiVoid2 : function () { var expected = [ ]; var query = "FOR v IN " + skiplist.name() + " FILTER 99 == v.a && 1 == v.b RETURN v"; var actual = getQueryResults(query); assertEqual(expected, actual); assertEqual([ "SingletonNode", "ScatterNode", "RemoteNode", "IndexNode", "RemoteNode", "GatherNode", "CalculationNode", "FilterNode", "ReturnNode" ], explain(query)); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testEqMultiVoid3 : function () { var expected = [ ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a == 1 && v.b == 99 RETURN v"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testEqMultiVoid4 : function () { var expected = [ ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 1 == v.a && 99 == v.b RETURN v"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testEqMultiAll1 : function () { for (var i = 1; i <= 5; ++i) { for (var j = 1; j <=5; ++j) { var expected = [ [ i, j ] ]; var query = "FOR v IN " + skiplist.name() + " FILTER v.a == @a && v.b == @b RETURN [ v.a, v.b ]"; var actual = getQueryResults(query, { "a": i, "b": j }); assertEqual(expected, actual); assertEqual([ "SingletonNode", "ScatterNode", "RemoteNode", "IndexNode", "RemoteNode", "GatherNode", "CalculationNode", "FilterNode", "CalculationNode", "ReturnNode" ], explain(query, { a: i, b: j })); } } }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testEqMultiAll2 : function () { for (var i = 1; i <= 5; ++i) { for (var j = 1; j <=5; ++j) { var expected = [ [ i, j ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER @a == v.a && @b == v.b RETURN [ v.a, v.b ]", { "a" : i, "b" : j }); assertEqual(expected, actual); } } }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testEqGt1 : function () { var expected = [ [ 1, 3 ], [ 1, 4 ], [ 1, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a == 1 && v.b > 2 SORT v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testEqGt2 : function () { var expected = [ [ 1, 3 ], [ 1, 4 ], [ 1, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 1 == v.a && 2 < v.b SORT v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testEqGt3 : function () { var expected = [ [ 4, 4 ], [ 4, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a == 4 && v.b > 3 SORT v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testEqGt4 : function () { var expected = [ [ 4, 4 ], [ 4, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 4 == v.a && 3 < v.b SORT v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testEqLt1 : function () { var expected = [ [ 1, 1 ], [ 1, 2 ], [ 1, 3 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a == 1 && v.b < 4 SORT v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testEqLt2 : function () { var expected = [ [ 1, 1 ], [ 1, 2 ], [ 1, 3 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 1 == v.a && 4 > v.b SORT v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testEqLt3 : function () { var expected = [ [ 5, 1 ], [ 5, 2 ], [ 5, 3 ], [ 5, 4 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a == 5 && v.b < 5 SORT v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testEqLt4 : function () { var expected = [ [ 5, 1 ], [ 5, 2 ], [ 5, 3 ], [ 5, 4 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 5 == v.a && 5 > v.b SORT v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testEqLe1 : function () { var expected = [ [ 1, 1 ], [ 1, 2 ], [ 1, 3 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a == 1 && v.b <= 3 SORT v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testEqLe2 : function () { var expected = [ [ 1, 1 ], [ 1, 2 ], [ 1, 3 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 1 == v.a && 3 >= v.b SORT v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testEqLe3 : function () { var expected = [ [ 5, 1 ], [ 5, 2 ], [ 5, 3 ], [ 5, 4 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a == 5 && v.b <= 4 SORT v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testEqLe4 : function () { var expected = [ [ 5, 1 ], [ 5, 2 ], [ 5, 3 ], [ 5, 4 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 5 == v.a && 4 >= v.b SORT v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testLtlt1 : function () { var expected = [ [ 1, 1 ], [ 1, 2 ], [ 2, 1 ], [ 2, 2 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a < 3 && v.b < 3 SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testLtlt2 : function () { var expected = [ [ 1, 1 ], [ 1, 2 ], [ 2, 1 ], [ 2, 2 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 3 > v.a && 3 > v.b SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testLeLe1 : function () { var expected = [ [ 1, 1 ], [ 1, 2 ], [ 1, 3 ], [ 2, 1 ], [ 2, 2 ], [ 2, 3 ], [ 3, 1 ], [ 3, 2 ], [ 3, 3 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a <= 3 && v.b <= 3 SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testLeLe2 : function () { var expected = [ [ 1, 1 ], [ 1, 2 ], [ 1, 3 ], [ 2, 1 ], [ 2, 2 ], [ 2, 3 ], [ 3, 1 ], [ 3, 2 ], [ 3, 3 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 3 >= v.a && 3 >= v.b SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testLtLe1 : function () { var expected = [ [ 1, 1 ], [ 1, 2 ], [ 1, 3 ], [ 2, 1 ], [ 2, 2 ], [ 2, 3 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a < 3 && v.b <= 3 SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testLtLe2 : function () { var expected = [ [ 1, 1 ], [ 1, 2 ], [ 1, 3 ], [ 2, 1 ], [ 2, 2 ], [ 2, 3 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 3 > v.a && 3 >= v.b SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testLeLt1 : function () { var expected = [ [ 1, 1 ], [ 1, 2 ], [ 2, 1 ], [ 2, 2 ], [ 3, 1 ], [ 3, 2 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a <= 3 && v.b < 3 SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testLeLt2 : function () { var expected = [ [ 1, 1 ], [ 1, 2 ], [ 2, 1 ], [ 2, 2 ], [ 3, 1 ], [ 3, 2 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 3 >= v.a && 3 > v.b SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testLtEq1 : function () { var expected = [ [ 1, 4 ], [ 2, 4 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a < 3 && v.b == 4 SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testLtEq2 : function () { var expected = [ [ 1, 4 ], [ 2, 4 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 3 > v.a && 4 == v.b SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testLeEq1 : function () { var expected = [ [ 1, 4 ], [ 2, 4 ], [ 3, 4 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a <= 3 && v.b == 4 SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testLeEq2 : function () { var expected = [ [ 1, 4 ], [ 2, 4 ], [ 3, 4 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 3 >= v.a && 4 == v.b SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testLtGt1 : function () { var expected = [ [ 1, 4 ], [ 1, 5 ], [ 2, 4 ], [ 2, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a < 3 && v.b > 3 SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testLtGt2 : function () { var expected = [ [ 1, 4 ], [ 1, 5 ], [ 2, 4 ], [ 2, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 3 > v.a && 3 < v.b SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testLeGe1 : function () { var expected = [ [ 1, 3 ], [ 1, 4 ], [ 1, 5 ], [ 2, 3 ], [ 2, 4 ], [ 2, 5 ], [ 3, 3 ], [ 3, 4 ], [ 3, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a <= 3 && v.b >= 3 SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testLeGe2 : function () { var expected = [ [ 1, 3 ], [ 1, 4 ], [ 1, 5 ], [ 2, 3 ], [ 2, 4 ], [ 2, 5 ], [ 3, 3 ], [ 3, 4 ], [ 3, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 3 >= v.a && 3 <= v.b SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testLtGe1 : function () { var expected = [ [ 1, 3 ], [ 1, 4 ], [ 1, 5 ], [ 2, 3 ], [ 2, 4 ], [ 2, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a < 3 && v.b >= 3 SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testLtGe2 : function () { var expected = [ [ 1, 3 ], [ 1, 4 ], [ 1, 5 ], [ 2, 3 ], [ 2, 4 ], [ 2, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 3 > v.a && 3 <= v.b SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testLeGt1 : function () { var expected = [ [ 1, 4 ], [ 1, 5 ], [ 2, 4 ], [ 2, 5 ], [ 3, 4 ], [ 3, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a <= 3 && v.b > 3 SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testLeGt2 : function () { var expected = [ [ 1, 4 ], [ 1, 5 ], [ 2, 4 ], [ 2, 5 ], [ 3, 4 ], [ 3, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 3 >= v.a && 3 < v.b SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testGtlt1 : function () { var expected = [ [ 4, 1 ], [ 4, 2 ], [ 5, 1 ], [ 5, 2 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a > 3 && v.b < 3 SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testGtlt2 : function () { var expected = [ [ 4, 1 ], [ 4, 2 ], [ 5, 1 ], [ 5, 2 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 3 < v.a && 3 > v.b SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testGeLe1 : function () { var expected = [ [ 3, 1 ], [ 3, 2 ], [ 3, 3 ], [ 4, 1 ], [ 4, 2 ], [ 4, 3 ], [ 5, 1 ], [ 5, 2 ], [ 5, 3 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a >= 3 && v.b <= 3 SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testGeLe2 : function () { var expected = [ [ 3, 1 ], [ 3, 2 ], [ 3, 3 ], [ 4, 1 ], [ 4, 2 ], [ 4, 3 ], [ 5, 1 ], [ 5, 2 ], [ 5, 3 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 3 <= v.a && 3 >= v.b SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testGtLe1 : function () { var expected = [ [ 4, 1 ], [ 4, 2 ], [ 4, 3 ], [ 5, 1 ], [ 5, 2 ], [ 5, 3 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a > 3 && v.b <= 3 SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testGtLe2 : function () { var expected = [ [ 4, 1 ], [ 4, 2 ], [ 4, 3 ], [ 5, 1 ], [ 5, 2 ], [ 5, 3 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 3 < v.a && 3 >= v.b SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testGeLt1 : function () { var expected = [ [ 3, 1 ], [ 3, 2 ], [ 4, 1 ], [ 4, 2 ], [ 5, 1 ], [ 5, 2 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a >= 3 && v.b < 3 SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testGeLt2 : function () { var expected = [ [ 3, 1 ], [ 3, 2 ], [ 4, 1 ], [ 4, 2 ], [ 5, 1 ], [ 5, 2 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 3 <= v.a && 3 > v.b SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testGtEq1 : function () { var expected = [ [ 4, 4 ], [ 5, 4 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a > 3 && v.b == 4 SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testGtEq2 : function () { var expected = [ [ 4, 4 ], [ 5, 4 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 3 < v.a && 4 == v.b SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testGeEq1 : function () { var expected = [ [ 3, 4 ], [ 4, 4 ], [ 5, 4 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a >= 3 && v.b == 4 SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testGeEq2 : function () { var expected = [ [ 3, 4 ], [ 4, 4 ], [ 5, 4 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 3 <= v.a && 4 == v.b SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testGtGt1 : function () { var expected = [ [ 4, 4 ], [ 4, 5 ], [ 5, 4 ], [ 5, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a > 3 && v.b > 3 SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testGtGt2 : function () { var expected = [ [ 4, 4 ], [ 4, 5 ], [ 5, 4 ], [ 5, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 3 < v.a && 3 < v.b SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testGeGe1 : function () { var expected = [ [ 3, 3 ], [ 3, 4 ], [ 3, 5 ], [ 4, 3 ], [ 4, 4 ], [ 4, 5 ], [ 5, 3 ], [ 5, 4 ], [ 5, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a >= 3 && v.b >= 3 SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testGeGe2 : function () { var expected = [ [ 3, 3 ], [ 3, 4 ], [ 3, 5 ], [ 4, 3 ], [ 4, 4 ], [ 4, 5 ], [ 5, 3 ], [ 5, 4 ], [ 5, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 3 <= v.a && 3 <= v.b SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testGtGe1 : function () { var expected = [ [ 4, 3 ], [ 4, 4 ], [ 4, 5 ], [ 5, 3 ], [ 5, 4 ], [ 5, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a > 3 && v.b >= 3 SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testGtGe2 : function () { var expected = [ [ 4, 3 ], [ 4, 4 ], [ 4, 5 ], [ 5, 3 ], [ 5, 4 ], [ 5, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 3 < v.a && 3 <= v.b SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testGeGt1 : function () { var expected = [ [ 3, 4 ], [ 3, 5 ], [ 4, 4 ], [ 4, 5 ], [ 5, 4 ], [ 5, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER v.a >= 3 && v.b > 3 SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test multiple skiplist fields with multiple operators //////////////////////////////////////////////////////////////////////////////// testGeGt2 : function () { var expected = [ [ 3, 4 ], [ 3, 5 ], [ 4, 4 ], [ 4, 5 ], [ 5, 4 ], [ 5, 5 ] ]; var actual = getQueryResults("FOR v IN " + skiplist.name() + " FILTER 3 <= v.a && 3 < v.b SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test ref access with a constant //////////////////////////////////////////////////////////////////////////////// testRefConst1 : function () { var expected = [ [ 3, 1 ], [ 3, 2 ], [ 3, 3 ], [ 3, 4 ], [ 3, 5 ] ]; var actual = getQueryResults("LET x = 3 FOR v IN " + skiplist.name() + " FILTER v.a == x SORT v.a, v.b RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test ref access with a constant //////////////////////////////////////////////////////////////////////////////// testRefConst2 : function () { var expected = [ [ 3, 5 ] ]; var actual = getQueryResults("LET x = 3 LET y = 5 FOR v IN " + skiplist.name() + " FILTER v.a == x && v.b == y RETURN [ v.a, v.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test ref access //////////////////////////////////////////////////////////////////////////////// testRefSingle1 : function () { var expected = [ [ 1, 1 ], [ 1, 2 ], [ 1, 3 ], [ 1, 4 ], [ 1, 5 ] ]; var actual = getQueryResults("FOR v1 IN " + skiplist.name() + " FOR v2 IN " + skiplist.name() + " FILTER v1.a == 1 && v2.a == v1.a && v1.b == 1 SORT v1.a, v2.b RETURN [ v1.a, v2.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test ref access //////////////////////////////////////////////////////////////////////////////// testRefSingle2 : function () { var expected = [ [ 1, 1 ], [ 1, 2 ], [ 1, 3 ], [ 1, 4 ], [ 1, 5 ] ]; var actual = getQueryResults("FOR v1 IN " + skiplist.name() + " FOR v2 IN " + skiplist.name() + " FILTER 1 == v1.a && v1.a == v2.a && 1 == v1.b SORT v1.a, v2.b RETURN [ v1.a, v2.b ]"); assertEqual(expected, actual); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test ref access with filters on the same attribute //////////////////////////////////////////////////////////////////////////////// testRefFilterSame : function () { skiplist.ensureSkiplist("c"); skiplist.ensureSkiplist("d"); skiplist.truncate({ compact: false }); let docs = []; for (var i = 1; i <= 5; ++i) { for (var j = 1; j <= 5; ++j) { docs.push({ "c" : i, "d": j }); } } skiplist.save(docs); var query = "FOR a IN " + skiplist.name() + " FILTER a.c == a.d SORT a.c RETURN [ a.c, a.d ]"; var expected = [ [ 1, 1 ], [ 2, 2 ], [ 3, 3 ], [ 4, 4 ], [ 5, 5 ] ]; var actual = getQueryResults(query); assertEqual(expected, actual); assertEqual([ "SingletonNode", "ScatterNode", "RemoteNode", "IndexNode", "RemoteNode", "GatherNode", "CalculationNode", "FilterNode", "CalculationNode", "SortNode", "CalculationNode", "ReturnNode" ], explain(query)); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test ref access with filters on the same attribute //////////////////////////////////////////////////////////////////////////////// testRefFilterNonExisting : function () { var query = "FOR a IN " + skiplist.name() + " FILTER a.e == a.f SORT a.a, a.b RETURN [ a.a, a.b ]"; var expected = [ [ 1, 1 ], [ 1, 2 ], [ 1, 3 ], [ 1, 4 ], [ 1, 5 ], [ 2, 1 ], [ 2, 2 ], [ 2, 3 ], [ 2, 4 ], [ 2, 5 ], [ 3, 1 ], [ 3, 2 ], [ 3, 3 ], [ 3, 4 ], [ 3, 5 ], [ 4, 1 ], [ 4, 2 ], [ 4, 3 ], [ 4, 4 ], [ 4, 5 ], [ 5, 1 ], [ 5, 2 ], [ 5, 3 ], [ 5, 4 ], [ 5, 5 ] ]; var actual = getQueryResults(query); assertEqual(expected, actual); assertEqual([ "SingletonNode", "ScatterNode", "RemoteNode", "IndexNode", "RemoteNode", "GatherNode", "CalculationNode", "FilterNode", "CalculationNode", "CalculationNode", "SortNode", "CalculationNode", "ReturnNode" ], explain(query)); }, //////////////////////////////////////////////////////////////////////////////// /// @brief test partial coverage //////////////////////////////////////////////////////////////////////////////// testPartialCoverage : function () { skiplist.save({ "a": 20, "c": 1 }); skiplist.save({ "a": 20, "c": 2 }); skiplist.save({ "a": 21, "c": 1 }); skiplist.save({ "a": 21, "c": 2 }); // c is not indexed, but we still need to find the correct results var query = "FOR a IN " + skiplist.name() + " FILTER a.a == 20 && a.c == 1 RETURN [ a.a, a.c ]"; var actual = getQueryResults(query); var expected = [ [ 20, 1 ] ]; assertEqual(expected, actual); assertEqual([ "SingletonNode", "ScatterNode", "RemoteNode", "IndexNode", "RemoteNode", "GatherNode", "CalculationNode", "FilterNode", "CalculationNode", "ReturnNode" ], explain(query)); query = "FOR a IN " + skiplist.name() + " FILTER a.a == 20 SORT a.a, a.c RETURN [ a.a, a.c ]"; actual = getQueryResults(query); expected = [ [ 20, 1 ], [ 20, 2 ] ]; assertEqual(expected, actual); assertEqual([ "SingletonNode", "ScatterNode", "RemoteNode", "IndexNode", "RemoteNode", "GatherNode", "CalculationNode", "FilterNode", "CalculationNode", "CalculationNode", "SortNode", "CalculationNode", "ReturnNode" ], explain(query)); query = "FOR a IN " + skiplist.name() + " FILTER a.a >= 20 SORT a.a, a.c RETURN [ a.a, a.c ]"; actual = getQueryResults(query); expected = [ [ 20, 1 ], [ 20, 2 ], [ 21, 1 ], [ 21, 2 ] ]; assertEqual(expected, actual); assertEqual([ "SingletonNode", "ScatterNode", "RemoteNode", "IndexNode", "RemoteNode", "GatherNode", "CalculationNode", "FilterNode", "CalculationNode", "CalculationNode", "SortNode", "CalculationNode", "ReturnNode" ], explain(query)); query = "FOR a IN " + skiplist.name() + " FILTER a.a >= 21 && a.a <= 21 SORT a.a, a.c RETURN [ a.a, a.c ]"; actual = getQueryResults(query); expected = [ [ 21, 1 ], [ 21, 2 ] ]; assertEqual(expected, actual); assertEqual([ "SingletonNode", "ScatterNode", "RemoteNode", "IndexNode", "RemoteNode", "GatherNode", "CalculationNode", "FilterNode", "CalculationNode", "CalculationNode", "SortNode", "CalculationNode", "ReturnNode" ], explain(query)); query = "FOR a IN " + skiplist.name() + " FILTER a.a >= 20 && a.a <= 21 && a.c <= 2 SORT a.a, a.c RETURN [ a.a, a.c ]"; actual = getQueryResults(query); expected = [ [ 20, 1 ], [ 20, 2 ], [ 21, 1 ], [ 21, 2 ] ]; assertEqual(expected, actual); assertEqual([ "SingletonNode", "ScatterNode", "RemoteNode", "IndexNode", "RemoteNode", "GatherNode", "CalculationNode", "FilterNode", "CalculationNode", "CalculationNode", "SortNode", "CalculationNode", "ReturnNode" ], explain(query)); query = "FOR a IN " + skiplist.name() + " FILTER a.a == 20 && a.c >= 1 SORT a.a, a.c RETURN [ a.a, a.c ]"; actual = getQueryResults(query); expected = [ [ 20, 1 ], [ 20, 2 ] ]; assertEqual(expected, actual); assertEqual([ "SingletonNode", "ScatterNode", "RemoteNode", "IndexNode", "RemoteNode", "GatherNode", "CalculationNode", "FilterNode", "CalculationNode", "CalculationNode", "SortNode", "CalculationNode", "ReturnNode" ], explain(query)); }, testInvalidValuesinList : function () { var query = "FOR x IN @list FOR i IN " + skiplist.name() + " FILTER i.a == x SORT i.a RETURN i.a"; var bindParams = { list: [ null, 1, // Find this "blub/bla", "noKey", 2, // And this 123456, { "the": "foxx", "is": "wrapped", "in":"objects"}, [15, "man", "on", "the", "dead", "mans", "chest"], 3 // And this ] }; assertEqual([ 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3], AQL_EXECUTE(query, bindParams).json); } }; } //////////////////////////////////////////////////////////////////////////////// /// @brief executes the test suite //////////////////////////////////////////////////////////////////////////////// jsunity.run(ahuacatlSkiplistTestSuite); return jsunity.done();
wiltonlazary/arangodb
tests/js/server/aql/aql-skiplist-cluster.js
JavaScript
apache-2.0
56,096
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.idm.engine.test.api.identity; import java.util.List; import org.flowable.engine.common.api.FlowableException; import org.flowable.engine.common.api.FlowableIllegalArgumentException; import org.flowable.idm.api.User; import org.flowable.idm.api.UserQuery; import org.flowable.idm.engine.impl.persistence.entity.UserEntity; import org.flowable.idm.engine.test.PluggableFlowableIdmTestCase; /** * @author Joram Barrez */ public class UserQueryTest extends PluggableFlowableIdmTestCase { @Override protected void setUp() throws Exception { super.setUp(); createUser("kermit", "Kermit", "Thefrog", "[email protected]"); createUser("fozzie", "Fozzie", "Bear", "[email protected]"); createUser("gonzo", "Gonzo", "The great", "[email protected]"); idmIdentityService.saveGroup(idmIdentityService.newGroup("muppets")); idmIdentityService.saveGroup(idmIdentityService.newGroup("frogs")); idmIdentityService.createMembership("kermit", "muppets"); idmIdentityService.createMembership("kermit", "frogs"); idmIdentityService.createMembership("fozzie", "muppets"); idmIdentityService.createMembership("gonzo", "muppets"); } private User createUser(String id, String firstName, String lastName, String email) { User user = idmIdentityService.newUser(id); user.setFirstName(firstName); user.setLastName(lastName); user.setEmail(email); idmIdentityService.saveUser(user); return user; } @Override protected void tearDown() throws Exception { idmIdentityService.deleteUser("kermit"); idmIdentityService.deleteUser("fozzie"); idmIdentityService.deleteUser("gonzo"); idmIdentityService.deleteGroup("muppets"); idmIdentityService.deleteGroup("frogs"); super.tearDown(); } public void testQueryByNoCriteria() { UserQuery query = idmIdentityService.createUserQuery(); verifyQueryResults(query, 3); } public void testQueryById() { UserQuery query = idmIdentityService.createUserQuery().userId("kermit"); verifyQueryResults(query, 1); } public void testQueryByInvalidId() { UserQuery query = idmIdentityService.createUserQuery().userId("invalid"); verifyQueryResults(query, 0); try { idmIdentityService.createUserQuery().userId(null).singleResult(); fail(); } catch (FlowableIllegalArgumentException e) { } } public void testQueryByIdIgnoreCase() { UserQuery query = idmIdentityService.createUserQuery().userIdIgnoreCase("KErmit"); verifyQueryResults(query, 1); } public void testQueryByFirstName() { UserQuery query = idmIdentityService.createUserQuery().userFirstName("Gonzo"); verifyQueryResults(query, 1); User result = query.singleResult(); assertEquals("gonzo", result.getId()); } public void testQueryByInvalidFirstName() { UserQuery query = idmIdentityService.createUserQuery().userFirstName("invalid"); verifyQueryResults(query, 0); try { idmIdentityService.createUserQuery().userFirstName(null).singleResult(); fail(); } catch (FlowableIllegalArgumentException e) { } } public void testQueryByFirstNameLike() { UserQuery query = idmIdentityService.createUserQuery().userFirstNameLike("%o%"); verifyQueryResults(query, 2); query = idmIdentityService.createUserQuery().userFirstNameLike("Ker%"); verifyQueryResults(query, 1); } public void testQueryByInvalidFirstNameLike() { UserQuery query = idmIdentityService.createUserQuery().userFirstNameLike("%mispiggy%"); verifyQueryResults(query, 0); try { idmIdentityService.createUserQuery().userFirstNameLike(null).singleResult(); fail(); } catch (FlowableIllegalArgumentException e) { } } public void testQueryByFirstNameLikeIgnoreCase() { UserQuery query = idmIdentityService.createUserQuery().userFirstNameLikeIgnoreCase("%O%"); verifyQueryResults(query, 2); query = idmIdentityService.createUserQuery().userFirstNameLikeIgnoreCase("KEr%"); verifyQueryResults(query, 1); } public void testQueryByLastName() { UserQuery query = idmIdentityService.createUserQuery().userLastName("Bear"); verifyQueryResults(query, 1); User result = query.singleResult(); assertEquals("fozzie", result.getId()); } public void testQueryByInvalidLastName() { UserQuery query = idmIdentityService.createUserQuery().userLastName("invalid"); verifyQueryResults(query, 0); try { idmIdentityService.createUserQuery().userLastName(null).singleResult(); fail(); } catch (FlowableIllegalArgumentException e) { } } public void testQueryByLastNameLike() { UserQuery query = idmIdentityService.createUserQuery().userLastNameLike("%rog%"); verifyQueryResults(query, 1); query = idmIdentityService.createUserQuery().userLastNameLike("%ea%"); verifyQueryResults(query, 2); } public void testQueryByLastNameLikeIgnoreCase() { UserQuery query = idmIdentityService.createUserQuery().userLastNameLikeIgnoreCase("%ROg%"); verifyQueryResults(query, 1); query = idmIdentityService.createUserQuery().userLastNameLikeIgnoreCase("%Ea%"); verifyQueryResults(query, 2); } public void testQueryByFullNameLike() { UserQuery query = idmIdentityService.createUserQuery().userFullNameLike("%erm%"); verifyQueryResults(query, 1); query = idmIdentityService.createUserQuery().userFullNameLike("%ea%"); verifyQueryResults(query, 2); query = idmIdentityService.createUserQuery().userFullNameLike("%e%"); verifyQueryResults(query, 3); } public void testQueryByFullNameLikeIgnoreCase() { UserQuery query = idmIdentityService.createUserQuery().userFullNameLikeIgnoreCase("%ERm%"); verifyQueryResults(query, 1); query = idmIdentityService.createUserQuery().userFullNameLikeIgnoreCase("%Ea%"); verifyQueryResults(query, 2); query = idmIdentityService.createUserQuery().userFullNameLikeIgnoreCase("%E%"); verifyQueryResults(query, 3); } public void testQueryByFirstAndLastNameCombinedLike() { UserQuery query = idmIdentityService.createUserQuery().userFullNameLike("%ermit The%"); verifyQueryResults(query, 1); } public void testQueryByInvalidLastNameLike() { UserQuery query = idmIdentityService.createUserQuery().userLastNameLike("%invalid%"); verifyQueryResults(query, 0); try { idmIdentityService.createUserQuery().userLastNameLike(null).singleResult(); fail(); } catch (FlowableIllegalArgumentException e) { } } public void testQueryByEmail() { UserQuery query = idmIdentityService.createUserQuery().userEmail("[email protected]"); verifyQueryResults(query, 1); } public void testQueryByInvalidEmail() { UserQuery query = idmIdentityService.createUserQuery().userEmail("invalid"); verifyQueryResults(query, 0); try { idmIdentityService.createUserQuery().userEmail(null).singleResult(); fail(); } catch (FlowableIllegalArgumentException e) { } } public void testQueryByEmailLike() { UserQuery query = idmIdentityService.createUserQuery().userEmailLike("%muppetshow.com"); verifyQueryResults(query, 3); query = idmIdentityService.createUserQuery().userEmailLike("%kermit%"); verifyQueryResults(query, 1); } public void testQueryByInvalidEmailLike() { UserQuery query = idmIdentityService.createUserQuery().userEmailLike("%invalid%"); verifyQueryResults(query, 0); try { idmIdentityService.createUserQuery().userEmailLike(null).singleResult(); fail(); } catch (FlowableIllegalArgumentException e) { } } public void testQuerySorting() { // asc assertEquals(3, idmIdentityService.createUserQuery().orderByUserId().asc().count()); assertEquals(3, idmIdentityService.createUserQuery().orderByUserEmail().asc().count()); assertEquals(3, idmIdentityService.createUserQuery().orderByUserFirstName().asc().count()); assertEquals(3, idmIdentityService.createUserQuery().orderByUserLastName().asc().count()); // desc assertEquals(3, idmIdentityService.createUserQuery().orderByUserId().desc().count()); assertEquals(3, idmIdentityService.createUserQuery().orderByUserEmail().desc().count()); assertEquals(3, idmIdentityService.createUserQuery().orderByUserFirstName().desc().count()); assertEquals(3, idmIdentityService.createUserQuery().orderByUserLastName().desc().count()); // Combined with criteria UserQuery query = idmIdentityService.createUserQuery().userLastNameLike("%ea%").orderByUserFirstName().asc(); List<User> users = query.list(); assertEquals(2, users.size()); assertEquals("Fozzie", users.get(0).getFirstName()); assertEquals("Gonzo", users.get(1).getFirstName()); } public void testQueryInvalidSortingUsage() { try { idmIdentityService.createUserQuery().orderByUserId().list(); fail(); } catch (FlowableIllegalArgumentException e) { } try { idmIdentityService.createUserQuery().orderByUserId().orderByUserEmail().list(); fail(); } catch (FlowableIllegalArgumentException e) { } } public void testQueryByMemberOf() { UserQuery query = idmIdentityService.createUserQuery().memberOfGroup("muppets"); verifyQueryResults(query, 3); query = idmIdentityService.createUserQuery().memberOfGroup("frogs"); verifyQueryResults(query, 1); User result = query.singleResult(); assertEquals("kermit", result.getId()); } public void testQueryByInvalidMemberOf() { UserQuery query = idmIdentityService.createUserQuery().memberOfGroup("invalid"); verifyQueryResults(query, 0); try { idmIdentityService.createUserQuery().memberOfGroup(null).list(); fail(); } catch (FlowableIllegalArgumentException e) { } } private void verifyQueryResults(UserQuery query, int countExpected) { assertEquals(countExpected, query.list().size()); assertEquals(countExpected, query.count()); if (countExpected == 1) { assertNotNull(query.singleResult()); } else if (countExpected > 1) { verifySingleResultFails(query); } else if (countExpected == 0) { assertNull(query.singleResult()); } } private void verifySingleResultFails(UserQuery query) { try { query.singleResult(); fail(); } catch (FlowableException e) { } } public void testNativeQuery() { assertEquals("ACT_ID_USER", idmManagementService.getTableName(User.class)); assertEquals("ACT_ID_USER", idmManagementService.getTableName(UserEntity.class)); String tableName = idmManagementService.getTableName(User.class); String baseQuerySql = "SELECT * FROM " + tableName; assertEquals(3, idmIdentityService.createNativeUserQuery().sql(baseQuerySql).list().size()); assertEquals(1, idmIdentityService.createNativeUserQuery().sql(baseQuerySql + " where ID_ = #{id}").parameter("id", "kermit").list().size()); // paging assertEquals(2, idmIdentityService.createNativeUserQuery().sql(baseQuerySql).listPage(0, 2).size()); assertEquals(2, idmIdentityService.createNativeUserQuery().sql(baseQuerySql).listPage(1, 3).size()); assertEquals(1, idmIdentityService.createNativeUserQuery().sql(baseQuerySql + " where ID_ = #{id}").parameter("id", "kermit").listPage(0, 1).size()); } }
zwets/flowable-engine
modules/flowable-idm-engine/src/test/java/org/flowable/idm/engine/test/api/identity/UserQueryTest.java
Java
apache-2.0
12,866
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package org.jetbrains.jps.incremental.artifacts.instructions; import com.intellij.openapi.util.Condition; import com.intellij.openapi.util.Conditions; import com.intellij.openapi.util.text.StringUtil; import com.intellij.util.PathUtilRt; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.jps.cmdline.ProjectDescriptor; import org.jetbrains.jps.incremental.artifacts.JarPathUtil; import org.jetbrains.jps.indices.IgnoredFileIndex; import org.jetbrains.jps.indices.ModuleExcludeIndex; import java.io.File; import java.io.IOException; import java.util.List; /** * @author nik */ public abstract class ArtifactCompilerInstructionCreatorBase implements ArtifactCompilerInstructionCreator { protected final ArtifactInstructionsBuilderImpl myInstructionsBuilder; public ArtifactCompilerInstructionCreatorBase(ArtifactInstructionsBuilderImpl instructionsBuilder) { myInstructionsBuilder = instructionsBuilder; } @Override public void addDirectoryCopyInstructions(@NotNull File directoryUrl) { addDirectoryCopyInstructions(directoryUrl, null); } @Override public void addDirectoryCopyInstructions(@NotNull File directory, @Nullable SourceFileFilter filter) { addDirectoryCopyInstructions(directory, filter, FileCopyingHandler.DEFAULT); } @Override public void addDirectoryCopyInstructions(@NotNull File directory, @Nullable SourceFileFilter filter, @NotNull FileCopyingHandler copyingHandler) { final boolean copyExcluded = myInstructionsBuilder.getRootsIndex().isExcluded(directory); SourceFileFilter fileFilter = new SourceFileFilterImpl(filter, myInstructionsBuilder.getRootsIndex(), myInstructionsBuilder.getIgnoredFileIndex(), copyExcluded); DestinationInfo destination = createDirectoryDestination(); if (destination != null) { ArtifactRootDescriptor descriptor = myInstructionsBuilder.createFileBasedRoot(directory, fileFilter, destination, copyingHandler); if (myInstructionsBuilder.addDestination(descriptor)) { onAdded(descriptor); } } } @Override public void addExtractDirectoryInstruction(@NotNull File jarFile, @NotNull String pathInJar) { addExtractDirectoryInstruction(jarFile, pathInJar, Conditions.alwaysTrue()); } @Override public void addExtractDirectoryInstruction(@NotNull File jarFile, @NotNull String pathInJar, @NotNull Condition<String> pathInJarFilter) { //an entry of a jar file is excluded if and only if the jar file itself is excluded. In that case we should unpack entries to the artifact //because the jar itself is explicitly added to the artifact layout. boolean includeExcluded = true; final SourceFileFilterImpl filter = new SourceFileFilterImpl(null, myInstructionsBuilder.getRootsIndex(), myInstructionsBuilder.getIgnoredFileIndex(), includeExcluded); DestinationInfo destination = createDirectoryDestination(); if (destination != null) { ArtifactRootDescriptor descriptor = myInstructionsBuilder.createJarBasedRoot(jarFile, pathInJar, filter, destination, pathInJarFilter); if (myInstructionsBuilder.addDestination(descriptor)) { onAdded(descriptor); } } } @Override public abstract ArtifactCompilerInstructionCreatorBase subFolder(@NotNull String directoryName); @Override public ArtifactCompilerInstructionCreator subFolderByRelativePath(@NotNull String relativeDirectoryPath) { final List<String> folders = StringUtil.split(relativeDirectoryPath, "/"); ArtifactCompilerInstructionCreator current = this; for (String folder : folders) { current = current.subFolder(folder); } return current; } @Override public void addFileCopyInstruction(@NotNull File file, @NotNull String outputFileName) { addFileCopyInstruction(file, outputFileName, FileCopyingHandler.DEFAULT); } @Override public void addFileCopyInstruction(@NotNull File file, @NotNull String outputFileName, @NotNull FileCopyingHandler copyingHandler) { DestinationInfo destination = createFileDestination(outputFileName); if (destination != null) { FileBasedArtifactRootDescriptor root = myInstructionsBuilder.createFileBasedRoot(file, SourceFileFilter.ALL, destination, copyingHandler); if (myInstructionsBuilder.addDestination(root)) { onAdded(root); } } } @Override public ArtifactInstructionsBuilder getInstructionsBuilder() { return myInstructionsBuilder; } @Nullable protected abstract DestinationInfo createDirectoryDestination(); protected abstract DestinationInfo createFileDestination(@NotNull String outputFileName); protected abstract void onAdded(ArtifactRootDescriptor descriptor); private static class SourceFileFilterImpl extends SourceFileFilter { private final SourceFileFilter myBaseFilter; private final ModuleExcludeIndex myRootsIndex; private final IgnoredFileIndex myIgnoredFileIndex; private final boolean myIncludeExcluded; private SourceFileFilterImpl(@Nullable SourceFileFilter baseFilter, @NotNull ModuleExcludeIndex rootsIndex, IgnoredFileIndex patterns, boolean includeExcluded) { myBaseFilter = baseFilter; myRootsIndex = rootsIndex; myIgnoredFileIndex = patterns; myIncludeExcluded = includeExcluded; } @Override public boolean accept(@NotNull String fullFilePath) { if (myBaseFilter != null && !myBaseFilter.accept(fullFilePath)) return false; if (myIgnoredFileIndex.isIgnored(PathUtilRt.getFileName(fullFilePath))) { return false; } if (!myIncludeExcluded) { final File file = JarPathUtil.getLocalFile(fullFilePath); if (myRootsIndex.isExcluded(file)) { return false; } } return true; } @Override public boolean shouldBeCopied(@NotNull String fullFilePath, ProjectDescriptor projectDescriptor) throws IOException { return myBaseFilter == null || myBaseFilter.shouldBeCopied(fullFilePath, projectDescriptor); } } }
msebire/intellij-community
jps/jps-builders/src/org/jetbrains/jps/incremental/artifacts/instructions/ArtifactCompilerInstructionCreatorBase.java
Java
apache-2.0
6,437
// Copyright 2006-2008 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include <errno.h> #include <signal.h> #include <stdio.h> #include "src/v8.h" #include "include/libplatform/libplatform.h" #include "src/assembler.h" #include "src/base/platform/platform.h" #include "src/bootstrapper.h" #include "src/flags.h" #include "src/list.h" #include "src/snapshot/natives.h" #include "src/snapshot/serialize.h" #ifdef V8_OS_RUNTIMEJS #include <kernel/v8platform.h> #endif using namespace v8; class SnapshotWriter { public: explicit SnapshotWriter(const char* snapshot_file) : fp_(GetFileDescriptorOrDie(snapshot_file)), startup_blob_file_(NULL) {} ~SnapshotWriter() { fclose(fp_); if (startup_blob_file_) fclose(startup_blob_file_); } void SetStartupBlobFile(const char* startup_blob_file) { if (startup_blob_file != NULL) startup_blob_file_ = GetFileDescriptorOrDie(startup_blob_file); } void WriteSnapshot(v8::StartupData blob) const { i::Vector<const i::byte> blob_vector( reinterpret_cast<const i::byte*>(blob.data), blob.raw_size); WriteSnapshotFile(blob_vector); MaybeWriteStartupBlob(blob_vector); } private: void MaybeWriteStartupBlob(const i::Vector<const i::byte>& blob) const { if (!startup_blob_file_) return; size_t written = fwrite(blob.begin(), 1, blob.length(), startup_blob_file_); if (written != static_cast<size_t>(blob.length())) { i::PrintF("Writing snapshot file failed.. Aborting.\n"); exit(1); } } void WriteSnapshotFile(const i::Vector<const i::byte>& blob) const { WriteFilePrefix(); WriteData(blob); WriteFileSuffix(); } void WriteFilePrefix() const { fprintf(fp_, "// Autogenerated snapshot file. Do not edit.\n\n"); fprintf(fp_, "#include \"src/v8.h\"\n"); fprintf(fp_, "#include \"src/base/platform/platform.h\"\n\n"); fprintf(fp_, "#include \"src/snapshot/snapshot.h\"\n\n"); fprintf(fp_, "namespace v8 {\n"); fprintf(fp_, "namespace internal {\n\n"); } void WriteFileSuffix() const { fprintf(fp_, "const v8::StartupData* Snapshot::DefaultSnapshotBlob() {\n"); fprintf(fp_, " return &blob;\n"); fprintf(fp_, "}\n\n"); fprintf(fp_, "} // namespace internal\n"); fprintf(fp_, "} // namespace v8\n"); } void WriteData(const i::Vector<const i::byte>& blob) const { fprintf(fp_, "static const byte blob_data[] = {\n"); WriteSnapshotData(blob); fprintf(fp_, "};\n"); fprintf(fp_, "static const int blob_size = %d;\n", blob.length()); fprintf(fp_, "static const v8::StartupData blob =\n"); fprintf(fp_, "{ (const char*) blob_data, blob_size };\n"); } void WriteSnapshotData(const i::Vector<const i::byte>& blob) const { for (int i = 0; i < blob.length(); i++) { if ((i & 0x1f) == 0x1f) fprintf(fp_, "\n"); if (i > 0) fprintf(fp_, ","); fprintf(fp_, "%u", static_cast<unsigned char>(blob.at(i))); } fprintf(fp_, "\n"); } FILE* GetFileDescriptorOrDie(const char* filename) { FILE* fp = base::OS::FOpen(filename, "wb"); if (fp == NULL) { i::PrintF("Unable to open file \"%s\" for writing.\n", filename); exit(1); } return fp; } FILE* fp_; FILE* startup_blob_file_; }; char* GetExtraCode(char* filename) { if (filename == NULL || strlen(filename) == 0) return NULL; ::printf("Embedding extra script: %s\n", filename); FILE* file = base::OS::FOpen(filename, "rb"); if (file == NULL) { fprintf(stderr, "Failed to open '%s': errno %d\n", filename, errno); exit(1); } fseek(file, 0, SEEK_END); size_t size = ftell(file); rewind(file); char* chars = new char[size + 1]; chars[size] = '\0'; for (size_t i = 0; i < size;) { size_t read = fread(&chars[i], 1, size - i, file); if (ferror(file)) { fprintf(stderr, "Failed to read '%s': errno %d\n", filename, errno); exit(1); } i += read; } fclose(file); return chars; } #ifdef V8_OS_RUNTIMEJS int mksnapshot_main(int argc, char** argv) { #else int main(int argc, char** argv) { #endif // By default, log code create information in the snapshot. i::FLAG_log_code = true; i::FLAG_logfile_per_isolate = false; // Print the usage if an error occurs when parsing the command line // flags or if the help flag is set. int result = i::FlagList::SetFlagsFromCommandLine(&argc, argv, true); if (result > 0 || (argc != 2 && argc != 3) || i::FLAG_help) { ::printf("Usage: %s [flag] ... outfile\n", argv[0]); i::FlagList::PrintHelp(); return !i::FLAG_help; } i::CpuFeatures::Probe(true); V8::InitializeICU(); #ifdef V8_OS_RUNTIMEJS v8::Platform* platform = new rt::V8Platform(); #else v8::Platform* platform = v8::platform::CreateDefaultPlatform(); #endif v8::V8::InitializePlatform(platform); v8::V8::Initialize(); { SnapshotWriter writer(argv[1]); if (i::FLAG_startup_blob) writer.SetStartupBlobFile(i::FLAG_startup_blob); char* extra_code = GetExtraCode(argc == 3 ? argv[2] : NULL); StartupData blob = v8::V8::CreateSnapshotDataBlob(extra_code); CHECK(blob.data); writer.WriteSnapshot(blob); delete[] extra_code; delete[] blob.data; } V8::Dispose(); V8::ShutdownPlatform(); delete platform; return 0; }
dawangjiaowolaixunshan/runtime
deps/v8/src/snapshot/mksnapshot.cc
C++
apache-2.0
5,393