repo_name
stringlengths 4
116
| path
stringlengths 3
942
| size
stringlengths 1
7
| content
stringlengths 3
1.05M
| license
stringclasses 15
values |
---|---|---|---|---|
yrcourage/netty | codec-http2/src/main/java/io/netty/handler/codec/http2/Http2FrameListener.java | 11025 | /*
* Copyright 2014 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License, version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package io.netty.handler.codec.http2;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
import io.netty.util.internal.UnstableApi;
/**
* An listener of HTTP/2 frames.
*/
@UnstableApi
public interface Http2FrameListener {
/**
* Handles an inbound {@code DATA} frame.
*
* @param ctx the context from the handler where the frame was read.
* @param streamId the subject stream for the frame.
* @param data payload buffer for the frame. This buffer will be released by the codec.
* @param padding the number of padding bytes found at the end of the frame.
* @param endOfStream Indicates whether this is the last frame to be sent from the remote endpoint for this stream.
* @return the number of bytes that have been processed by the application. The returned bytes are used by the
* inbound flow controller to determine the appropriate time to expand the inbound flow control window (i.e. send
* {@code WINDOW_UPDATE}). Returning a value equal to the length of {@code data} + {@code padding} will effectively
* opt-out of application-level flow control for this frame. Returning a value less than the length of {@code data}
* + {@code padding} will defer the returning of the processed bytes, which the application must later return via
* {@link Http2LocalFlowController#consumeBytes(Http2Stream, int)}. The returned value must
* be >= {@code 0} and <= {@code data.readableBytes()} + {@code padding}.
*/
int onDataRead(ChannelHandlerContext ctx, int streamId, ByteBuf data, int padding,
boolean endOfStream) throws Http2Exception;
/**
* Handles an inbound {@code HEADERS} frame.
* <p>
* Only one of the following methods will be called for each {@code HEADERS} frame sequence.
* One will be called when the {@code END_HEADERS} flag has been received.
* <ul>
* <li>{@link #onHeadersRead(ChannelHandlerContext, int, Http2Headers, int, boolean)}</li>
* <li>{@link #onHeadersRead(ChannelHandlerContext, int, Http2Headers, int, short, boolean, int, boolean)}</li>
* <li>{@link #onPushPromiseRead(ChannelHandlerContext, int, int, Http2Headers, int)}</li>
* </ul>
* <p>
* To say it another way; the {@link Http2Headers} will contain all of the headers
* for the current message exchange step (additional queuing is not necessary).
*
* @param ctx the context from the handler where the frame was read.
* @param streamId the subject stream for the frame.
* @param headers the received headers.
* @param padding the number of padding bytes found at the end of the frame.
* @param endOfStream Indicates whether this is the last frame to be sent from the remote endpoint
* for this stream.
*/
void onHeadersRead(ChannelHandlerContext ctx, int streamId, Http2Headers headers, int padding,
boolean endOfStream) throws Http2Exception;
/**
* Handles an inbound {@code HEADERS} frame with priority information specified.
* Only called if {@code END_HEADERS} encountered.
* <p>
* Only one of the following methods will be called for each {@code HEADERS} frame sequence.
* One will be called when the {@code END_HEADERS} flag has been received.
* <ul>
* <li>{@link #onHeadersRead(ChannelHandlerContext, int, Http2Headers, int, boolean)}</li>
* <li>{@link #onHeadersRead(ChannelHandlerContext, int, Http2Headers, int, short, boolean, int, boolean)}</li>
* <li>{@link #onPushPromiseRead(ChannelHandlerContext, int, int, Http2Headers, int)}</li>
* </ul>
* <p>
* To say it another way; the {@link Http2Headers} will contain all of the headers
* for the current message exchange step (additional queuing is not necessary).
*
* @param ctx the context from the handler where the frame was read.
* @param streamId the subject stream for the frame.
* @param headers the received headers.
* @param streamDependency the stream on which this stream depends, or 0 if dependent on the
* connection.
* @param weight the new weight for the stream.
* @param exclusive whether or not the stream should be the exclusive dependent of its parent.
* @param padding the number of padding bytes found at the end of the frame.
* @param endOfStream Indicates whether this is the last frame to be sent from the remote endpoint
* for this stream.
*/
void onHeadersRead(ChannelHandlerContext ctx, int streamId, Http2Headers headers,
int streamDependency, short weight, boolean exclusive, int padding, boolean endOfStream)
throws Http2Exception;
/**
* Handles an inbound {@code PRIORITY} frame.
* <p>
* Note that is it possible to have this method called and no stream object exist for either
* {@code streamId}, {@code streamDependency}, or both. This is because the {@code PRIORITY} frame can be
* sent/received when streams are in the {@code CLOSED} state.
*
* @param ctx the context from the handler where the frame was read.
* @param streamId the subject stream for the frame.
* @param streamDependency the stream on which this stream depends, or 0 if dependent on the
* connection.
* @param weight the new weight for the stream.
* @param exclusive whether or not the stream should be the exclusive dependent of its parent.
*/
void onPriorityRead(ChannelHandlerContext ctx, int streamId, int streamDependency,
short weight, boolean exclusive) throws Http2Exception;
/**
* Handles an inbound {@code RST_STREAM} frame.
*
* @param ctx the context from the handler where the frame was read.
* @param streamId the stream that is terminating.
* @param errorCode the error code identifying the type of failure.
*/
void onRstStreamRead(ChannelHandlerContext ctx, int streamId, long errorCode) throws Http2Exception;
/**
* Handles an inbound {@code SETTINGS} acknowledgment frame.
* @param ctx the context from the handler where the frame was read.
*/
void onSettingsAckRead(ChannelHandlerContext ctx) throws Http2Exception;
/**
* Handles an inbound {@code SETTINGS} frame.
*
* @param ctx the context from the handler where the frame was read.
* @param settings the settings received from the remote endpoint.
*/
void onSettingsRead(ChannelHandlerContext ctx, Http2Settings settings) throws Http2Exception;
/**
* Handles an inbound {@code PING} frame.
*
* @param ctx the context from the handler where the frame was read.
* @param data the payload of the frame. If this buffer needs to be retained by the listener
* they must make a copy.
*/
void onPingRead(ChannelHandlerContext ctx, ByteBuf data) throws Http2Exception;
/**
* Handles an inbound {@code PING} acknowledgment.
*
* @param ctx the context from the handler where the frame was read.
* @param data the payload of the frame. If this buffer needs to be retained by the listener
* they must make a copy.
*/
void onPingAckRead(ChannelHandlerContext ctx, ByteBuf data) throws Http2Exception;
/**
* Handles an inbound {@code PUSH_PROMISE} frame. Only called if {@code END_HEADERS} encountered.
* <p>
* Promised requests MUST be authoritative, cacheable, and safe.
* See <a href="https://tools.ietf.org/html/draft-ietf-httpbis-http2-17#section-8.2">[RFC http2], Seciton 8.2</a>.
* <p>
* Only one of the following methods will be called for each {@code HEADERS} frame sequence.
* One will be called when the {@code END_HEADERS} flag has been received.
* <ul>
* <li>{@link #onHeadersRead(ChannelHandlerContext, int, Http2Headers, int, boolean)}</li>
* <li>{@link #onHeadersRead(ChannelHandlerContext, int, Http2Headers, int, short, boolean, int, boolean)}</li>
* <li>{@link #onPushPromiseRead(ChannelHandlerContext, int, int, Http2Headers, int)}</li>
* </ul>
* <p>
* To say it another way; the {@link Http2Headers} will contain all of the headers
* for the current message exchange step (additional queuing is not necessary).
*
* @param ctx the context from the handler where the frame was read.
* @param streamId the stream the frame was sent on.
* @param promisedStreamId the ID of the promised stream.
* @param headers the received headers.
* @param padding the number of padding bytes found at the end of the frame.
*/
void onPushPromiseRead(ChannelHandlerContext ctx, int streamId, int promisedStreamId,
Http2Headers headers, int padding) throws Http2Exception;
/**
* Handles an inbound {@code GO_AWAY} frame.
*
* @param ctx the context from the handler where the frame was read.
* @param lastStreamId the last known stream of the remote endpoint.
* @param errorCode the error code, if abnormal closure.
* @param debugData application-defined debug data. If this buffer needs to be retained by the
* listener they must make a copy.
*/
void onGoAwayRead(ChannelHandlerContext ctx, int lastStreamId, long errorCode, ByteBuf debugData)
throws Http2Exception;
/**
* Handles an inbound {@code WINDOW_UPDATE} frame.
*
* @param ctx the context from the handler where the frame was read.
* @param streamId the stream the frame was sent on.
* @param windowSizeIncrement the increased number of bytes of the remote endpoint's flow
* control window.
*/
void onWindowUpdateRead(ChannelHandlerContext ctx, int streamId, int windowSizeIncrement)
throws Http2Exception;
/**
* Handler for a frame not defined by the HTTP/2 spec.
*
* @param ctx the context from the handler where the frame was read.
* @param frameType the frame type from the HTTP/2 header.
* @param streamId the stream the frame was sent on.
* @param flags the flags in the frame header.
* @param payload the payload of the frame.
*/
void onUnknownFrame(ChannelHandlerContext ctx, byte frameType, int streamId, Http2Flags flags, ByteBuf payload)
throws Http2Exception;
}
| apache-2.0 |
apixandru/intellij-community | python/src/com/jetbrains/python/psi/impl/PyPrefixExpressionImpl.java | 5338 | /*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python.psi.impl;
import com.intellij.lang.ASTNode;
import com.intellij.openapi.util.Ref;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiPolyVariantReference;
import com.intellij.psi.PsiReference;
import com.intellij.psi.util.QualifiedName;
import com.jetbrains.python.PyElementTypes;
import com.jetbrains.python.PyNames;
import com.jetbrains.python.PyTokenTypes;
import com.jetbrains.python.PythonDialectsTokenSetProvider;
import com.jetbrains.python.codeInsight.typing.PyTypingTypeProvider;
import com.jetbrains.python.psi.*;
import com.jetbrains.python.psi.impl.references.PyOperatorReference;
import com.jetbrains.python.psi.resolve.PyResolveContext;
import com.jetbrains.python.psi.types.*;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.List;
/**
* @author yole
*/
public class PyPrefixExpressionImpl extends PyElementImpl implements PyPrefixExpression {
public PyPrefixExpressionImpl(ASTNode astNode) {
super(astNode);
}
@Override
public PyExpression getOperand() {
return (PyExpression)childToPsi(PythonDialectsTokenSetProvider.INSTANCE.getExpressionTokens(), 0);
}
@Nullable
public PsiElement getPsiOperator() {
final ASTNode node = getNode();
final ASTNode child = node.findChildByType(PyElementTypes.UNARY_OPS);
return child != null ? child.getPsi() : null;
}
@NotNull
@Override
public PyElementType getOperator() {
final PsiElement op = getPsiOperator();
assert op != null;
return (PyElementType)op.getNode().getElementType();
}
@Override
protected void acceptPyVisitor(PyElementVisitor pyVisitor) {
pyVisitor.visitPyPrefixExpression(this);
}
@Override
public PsiReference getReference() {
return getReference(PyResolveContext.noImplicits());
}
@NotNull
@Override
public PsiPolyVariantReference getReference(@NotNull PyResolveContext context) {
return new PyOperatorReference(this, context);
}
@Override
public PyType getType(@NotNull TypeEvalContext context, @NotNull TypeEvalContext.Key key) {
if (getOperator() == PyTokenTypes.NOT_KEYWORD) {
return PyBuiltinCache.getInstance(this).getBoolType();
}
final boolean isAwait = getOperator() == PyTokenTypes.AWAIT_KEYWORD;
if (isAwait) {
final PyExpression operand = getOperand();
if (operand != null) {
final PyType operandType = context.getType(operand);
final PyType type = getGeneratorReturnType(operandType, context);
if (type != null) {
return type;
}
}
}
final PsiReference ref = getReference(PyResolveContext.noImplicits().withTypeEvalContext(context));
final PsiElement resolved = ref.resolve();
if (resolved instanceof PyCallable) {
// TODO: Make PyPrefixExpression a PyCallSiteExpression, use getCallType() here and analyze it in PyTypeChecker.analyzeCallSite()
final PyType returnType = ((PyCallable)resolved).getReturnType(context, key);
return isAwait ? getGeneratorReturnType(returnType, context) : returnType;
}
return null;
}
@Override
public PyExpression getQualifier() {
return getOperand();
}
@Nullable
@Override
public QualifiedName asQualifiedName() {
return PyPsiUtils.asQualifiedName(this);
}
@Override
public boolean isQualified() {
return getQualifier() != null;
}
@Override
public String getReferencedName() {
PyElementType t = getOperator();
if (t == PyTokenTypes.PLUS) {
return PyNames.POS;
}
else if (t == PyTokenTypes.MINUS) {
return PyNames.NEG;
}
return getOperator().getSpecialMethodName();
}
@Override
public ASTNode getNameElement() {
final PsiElement op = getPsiOperator();
return op != null ? op.getNode() : null;
}
@Nullable
private static PyType getGeneratorReturnType(@Nullable PyType type, @NotNull TypeEvalContext context) {
if (type instanceof PyClassLikeType && type instanceof PyCollectionType) {
if (type instanceof PyClassType && PyNames.AWAITABLE.equals(((PyClassType)type).getPyClass().getName())) {
return ((PyCollectionType)type).getIteratedItemType();
}
else {
return Ref.deref(PyTypingTypeProvider.coroutineOrGeneratorElementType(type, context));
}
}
else if (type instanceof PyUnionType) {
final List<PyType> memberReturnTypes = new ArrayList<>();
final PyUnionType unionType = (PyUnionType)type;
for (PyType member : unionType.getMembers()) {
memberReturnTypes.add(getGeneratorReturnType(member, context));
}
return PyUnionType.union(memberReturnTypes);
}
return null;
}
}
| apache-2.0 |
Microsoft/TypeScript | tests/baselines/reference/typeOfThisInStaticMembers13(target=es5).js | 1288 | //// [typeOfThisInStaticMembers13.ts]
class C {
static readonly c: "foo" = "foo"
static bar = class Inner {
static [this.c] = 123;
[this.c] = 123;
}
}
//// [typeOfThisInStaticMembers13.js]
var C = /** @class */ (function () {
function C() {
}
var _a, _b, _c, _d;
_a = C;
Object.defineProperty(C, "c", {
enumerable: true,
configurable: true,
writable: true,
value: "foo"
});
Object.defineProperty(C, "bar", {
enumerable: true,
configurable: true,
writable: true,
value: (_b = /** @class */ (function () {
function Inner() {
Object.defineProperty(this, _d, {
enumerable: true,
configurable: true,
writable: true,
value: 123
});
}
return Inner;
}()),
_c = _a.c,
_d = _a.c,
Object.defineProperty(_b, _c, {
enumerable: true,
configurable: true,
writable: true,
value: 123
}),
_b)
});
return C;
}());
| apache-2.0 |
programming086/omim | android/src/com/mapswithme/maps/widget/ToolbarController.java | 1109 | package com.mapswithme.maps.widget;
import android.app.Activity;
import android.support.annotation.StringRes;
import android.support.v7.widget.Toolbar;
import android.view.View;
import com.mapswithme.maps.R;
import com.mapswithme.util.UiUtils;
import com.mapswithme.util.Utils;
public class ToolbarController
{
protected final Activity mActivity;
protected final Toolbar mToolbar;
public ToolbarController(View root, Activity activity)
{
mActivity = activity;
mToolbar = (Toolbar) root.findViewById(R.id.toolbar);
UiUtils.showHomeUpButton(mToolbar);
mToolbar.setNavigationOnClickListener(new View.OnClickListener()
{
@Override
public void onClick(View v)
{
onUpClick();
}
});
}
public void onUpClick()
{
Utils.navigateToParent(mActivity);
}
public ToolbarController setTitle(CharSequence title)
{
mToolbar.setTitle(title);
return this;
}
public ToolbarController setTitle(@StringRes int title)
{
mToolbar.setTitle(title);
return this;
}
public Toolbar getToolbar()
{
return mToolbar;
}
}
| apache-2.0 |
eribeiro/kafka | core/src/main/scala/kafka/admin/PreferredReplicaLeaderElectionCommand.scala | 6795 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.admin
import joptsimple.OptionParser
import kafka.utils._
import org.I0Itec.zkclient.ZkClient
import org.I0Itec.zkclient.exception.ZkNodeExistsException
import kafka.common.{TopicAndPartition, AdminCommandFailedException}
import collection._
import org.apache.kafka.common.utils.Utils
import org.apache.kafka.common.security.JaasUtils
object PreferredReplicaLeaderElectionCommand extends Logging {
def main(args: Array[String]): Unit = {
val parser = new OptionParser
val jsonFileOpt = parser.accepts("path-to-json-file", "The JSON file with the list of partitions " +
"for which preferred replica leader election should be done, in the following format - \n" +
"{\"partitions\":\n\t[{\"topic\": \"foo\", \"partition\": 1},\n\t {\"topic\": \"foobar\", \"partition\": 2}]\n}\n" +
"Defaults to all existing partitions")
.withRequiredArg
.describedAs("list of partitions for which preferred replica leader election needs to be triggered")
.ofType(classOf[String])
val zkConnectOpt = parser.accepts("zookeeper", "REQUIRED: The connection string for the zookeeper connection in the " +
"form host:port. Multiple URLS can be given to allow fail-over.")
.withRequiredArg
.describedAs("urls")
.ofType(classOf[String])
if(args.length == 0)
CommandLineUtils.printUsageAndDie(parser, "This tool causes leadership for each partition to be transferred back to the 'preferred replica'," +
" it can be used to balance leadership among the servers.")
val options = parser.parse(args : _*)
CommandLineUtils.checkRequiredArgs(parser, options, zkConnectOpt)
val zkConnect = options.valueOf(zkConnectOpt)
var zkClient: ZkClient = null
var zkUtils: ZkUtils = null
try {
zkClient = ZkUtils.createZkClient(zkConnect, 30000, 30000)
zkUtils = ZkUtils(zkConnect,
30000,
30000,
JaasUtils.isZkSecurityEnabled())
val partitionsForPreferredReplicaElection =
if (!options.has(jsonFileOpt))
zkUtils.getAllPartitions()
else
parsePreferredReplicaElectionData(Utils.readFileAsString(options.valueOf(jsonFileOpt)))
val preferredReplicaElectionCommand = new PreferredReplicaLeaderElectionCommand(zkUtils, partitionsForPreferredReplicaElection)
preferredReplicaElectionCommand.moveLeaderToPreferredReplica()
} catch {
case e: Throwable =>
println("Failed to start preferred replica election")
println(Utils.stackTrace(e))
} finally {
if (zkClient != null)
zkClient.close()
}
}
def parsePreferredReplicaElectionData(jsonString: String): immutable.Set[TopicAndPartition] = {
Json.parseFull(jsonString) match {
case Some(m) =>
m.asInstanceOf[Map[String, Any]].get("partitions") match {
case Some(partitionsList) =>
val partitionsRaw = partitionsList.asInstanceOf[List[Map[String, Any]]]
val partitions = partitionsRaw.map { p =>
val topic = p.get("topic").get.asInstanceOf[String]
val partition = p.get("partition").get.asInstanceOf[Int]
TopicAndPartition(topic, partition)
}
val duplicatePartitions = CoreUtils.duplicates(partitions)
val partitionsSet = partitions.toSet
if (duplicatePartitions.nonEmpty)
throw new AdminOperationException("Preferred replica election data contains duplicate partitions: %s".format(duplicatePartitions.mkString(",")))
partitionsSet
case None => throw new AdminOperationException("Preferred replica election data is empty")
}
case None => throw new AdminOperationException("Preferred replica election data is empty")
}
}
def writePreferredReplicaElectionData(zkUtils: ZkUtils,
partitionsUndergoingPreferredReplicaElection: scala.collection.Set[TopicAndPartition]) {
val zkPath = ZkUtils.PreferredReplicaLeaderElectionPath
val partitionsList = partitionsUndergoingPreferredReplicaElection.map(e => Map("topic" -> e.topic, "partition" -> e.partition))
val jsonData = Json.encode(Map("version" -> 1, "partitions" -> partitionsList))
try {
zkUtils.createPersistentPath(zkPath, jsonData)
println("Created preferred replica election path with %s".format(jsonData))
} catch {
case _: ZkNodeExistsException =>
val partitionsUndergoingPreferredReplicaElection =
PreferredReplicaLeaderElectionCommand.parsePreferredReplicaElectionData(zkUtils.readData(zkPath)._1)
throw new AdminOperationException("Preferred replica leader election currently in progress for " +
"%s. Aborting operation".format(partitionsUndergoingPreferredReplicaElection))
case e2: Throwable => throw new AdminOperationException(e2.toString)
}
}
}
class PreferredReplicaLeaderElectionCommand(zkUtils: ZkUtils, partitionsFromUser: scala.collection.Set[TopicAndPartition]) {
def moveLeaderToPreferredReplica() = {
try {
val topics = partitionsFromUser.map(_.topic).toSet
val partitionsFromZk = zkUtils.getPartitionsForTopics(topics.toSeq).flatMap{ case (topic, partitions) =>
partitions.map(TopicAndPartition(topic, _))
}.toSet
val (validPartitions, invalidPartitions) = partitionsFromUser.partition(partitionsFromZk.contains)
PreferredReplicaLeaderElectionCommand.writePreferredReplicaElectionData(zkUtils, validPartitions)
println("Successfully started preferred replica election for partitions %s".format(validPartitions))
invalidPartitions.foreach(p => println("Skipping preferred replica leader election for partition %s since it doesn't exist.".format(p)))
} catch {
case e: Throwable => throw new AdminCommandFailedException("Admin command failed", e)
}
}
}
| apache-2.0 |
chirino/fabric8 | insight/insight-elasticsearch-discovery/src/main/java/io/fabric8/insight/elasticsearch/discovery/FabricDiscoveryPlugin.java | 1300 | /**
* Copyright 2005-2015 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package io.fabric8.insight.elasticsearch.discovery;
import org.elasticsearch.common.collect.Lists;
import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.plugins.AbstractPlugin;
import java.util.Collection;
public class FabricDiscoveryPlugin extends AbstractPlugin {
private final Settings settings;
public FabricDiscoveryPlugin(Settings settings) {
this.settings = settings;
}
@Override
public String name() {
return "fabric8-discovery";
}
@Override
public String description() {
return "Discovery module using Fabric8";
}
}
| apache-2.0 |
awslabs/aws-sdk-cpp | aws-cpp-sdk-ec2/source/model/InstanceHealthStatus.cpp | 2041 | /**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#include <aws/ec2/model/InstanceHealthStatus.h>
#include <aws/core/utils/HashingUtils.h>
#include <aws/core/Globals.h>
#include <aws/core/utils/EnumParseOverflowContainer.h>
using namespace Aws::Utils;
namespace Aws
{
namespace EC2
{
namespace Model
{
namespace InstanceHealthStatusMapper
{
static const int healthy_HASH = HashingUtils::HashString("healthy");
static const int unhealthy_HASH = HashingUtils::HashString("unhealthy");
InstanceHealthStatus GetInstanceHealthStatusForName(const Aws::String& name)
{
int hashCode = HashingUtils::HashString(name.c_str());
if (hashCode == healthy_HASH)
{
return InstanceHealthStatus::healthy;
}
else if (hashCode == unhealthy_HASH)
{
return InstanceHealthStatus::unhealthy;
}
EnumParseOverflowContainer* overflowContainer = Aws::GetEnumOverflowContainer();
if(overflowContainer)
{
overflowContainer->StoreOverflow(hashCode, name);
return static_cast<InstanceHealthStatus>(hashCode);
}
return InstanceHealthStatus::NOT_SET;
}
Aws::String GetNameForInstanceHealthStatus(InstanceHealthStatus enumValue)
{
switch(enumValue)
{
case InstanceHealthStatus::healthy:
return "healthy";
case InstanceHealthStatus::unhealthy:
return "unhealthy";
default:
EnumParseOverflowContainer* overflowContainer = Aws::GetEnumOverflowContainer();
if(overflowContainer)
{
return overflowContainer->RetrieveOverflow(static_cast<int>(enumValue));
}
return {};
}
}
} // namespace InstanceHealthStatusMapper
} // namespace Model
} // namespace EC2
} // namespace Aws
| apache-2.0 |
Mikaela/znc | include/znc/Utils.h | 11492 | /*
* Copyright (C) 2004-2016 ZNC, see the NOTICE file for details.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ZNC_UTILS_H
#define ZNC_UTILS_H
#include <znc/zncconfig.h>
#include <znc/ZNCString.h>
#include <assert.h>
#include <cstdio>
#include <fcntl.h>
#include <map>
#include <sys/file.h>
#include <sys/time.h>
#include <unistd.h>
#include <vector>
static inline void SetFdCloseOnExec(int fd) {
int flags = fcntl(fd, F_GETFD, 0);
if (flags < 0) return; // Ignore errors
// When we execve() a new process this fd is now automatically closed.
fcntl(fd, F_SETFD, flags | FD_CLOEXEC);
}
static const char g_HexDigits[] = "0123456789abcdef";
class CUtils {
public:
CUtils();
~CUtils();
static CString GetIP(unsigned long addr);
static unsigned long GetLongIP(const CString& sIP);
static void PrintError(const CString& sMessage);
static void PrintMessage(const CString& sMessage, bool bStrong = false);
static void PrintPrompt(const CString& sMessage);
static void PrintAction(const CString& sMessage);
static void PrintStatus(bool bSuccess, const CString& sMessage = "");
#ifndef SWIGPERL
// TODO refactor this
static const CString sDefaultHash;
#endif
static CString GetSaltedHashPass(CString& sSalt);
static CString GetSalt();
static CString SaltedMD5Hash(const CString& sPass, const CString& sSalt);
static CString SaltedSHA256Hash(const CString& sPass, const CString& sSalt);
static CString GetPass(const CString& sPrompt);
static bool GetInput(const CString& sPrompt, CString& sRet,
const CString& sDefault = "",
const CString& sHint = "");
static bool GetBoolInput(const CString& sPrompt, bool bDefault);
static bool GetBoolInput(const CString& sPrompt, bool* pbDefault = nullptr);
static bool GetNumInput(const CString& sPrompt, unsigned int& uRet,
unsigned int uMin = 0, unsigned int uMax = ~0,
unsigned int uDefault = ~0);
static unsigned long long GetMillTime() {
struct timeval tv;
unsigned long long iTime = 0;
gettimeofday(&tv, nullptr);
iTime = (unsigned long long)tv.tv_sec * 1000;
iTime += ((unsigned long long)tv.tv_usec / 1000);
return iTime;
}
#ifdef HAVE_LIBSSL
static void GenerateCert(FILE* pOut, const CString& sHost = "");
#endif /* HAVE_LIBSSL */
static CString CTime(time_t t, const CString& sTZ);
static CString FormatTime(time_t t, const CString& sFormat,
const CString& sTZ);
static CString FormatServerTime(const timeval& tv);
static timeval ParseServerTime(const CString& sTime);
static SCString GetTimezones();
static SCString GetEncodings();
/// @deprecated Use CMessage instead
static MCString GetMessageTags(const CString& sLine);
/// @deprecated Use CMessage instead
static void SetMessageTags(CString& sLine, const MCString& mssTags);
private:
protected:
};
class CException {
public:
typedef enum { EX_Shutdown, EX_Restart } EType;
CException(EType e) : m_eType(e) {}
virtual ~CException() {}
EType GetType() const { return m_eType; }
private:
protected:
EType m_eType;
};
/** Previously this generated a grid-like output from a given input.
*
* @code
* CTable table;
* table.AddColumn("a");
* table.AddColumn("b");
* table.AddRow();
* table.SetCell("a", "hello");
* table.SetCell("b", "world");
*
* unsigned int idx = 0;
* CString tmp;
* while (table.GetLine(idx++, tmp)) {
* // Output tmp somehow
* }
* @endcode
*
* But tables look awful in IRC. So now it puts every cell on separate line.
*/
class CTable : protected std::vector<std::vector<CString>> {
public:
/** Constructor
*
* @param uPreferredWidth If width of table is bigger than this, text in cells will be wrapped to several lines, if possible
*/
CTable() : m_vsHeaders(), m_vsOutput() {}
virtual ~CTable() {}
/** Adds a new column to the table.
* Please note that you should add all columns before starting to fill
* the table!
* @param sName The name of the column.
* @return false if a column by that name already existed.
*/
bool AddColumn(const CString& sName);
/** Adds a new row to the table.
* After calling this you can fill the row with content.
* @return The index of this row
*/
size_type AddRow();
/** Sets a given cell in the table to a value.
* @param sColumn The name of the column you want to fill.
* @param sValue The value to write into that column.
* @param uRowIdx The index of the row to use as returned by AddRow().
* If this is not given, the last row will be used.
* @return True if setting the cell was successful.
*/
bool SetCell(const CString& sColumn, const CString& sValue,
size_type uRowIdx = ~0);
/** Get a line of the table's output
* @param uIdx The index of the line you want.
* @param sLine This string will receive the output.
* @return True unless uIdx is past the end of the table.
*/
bool GetLine(unsigned int uIdx, CString& sLine) const;
/// Completely clear the table.
void Clear();
/// @return The number of rows in this table, not counting the header.
using std::vector<std::vector<CString>>::size;
/// @return True if this table doesn't contain any rows.
using std::vector<std::vector<CString>>::empty;
private:
unsigned int GetColumnIndex(const CString& sName) const;
VCString Render() const;
protected:
// TODO: cleanup these fields before 1.7.0 (I don't want to break ABI)
VCString m_vsHeaders;
mutable VCString m_vsOutput; // Rendered table
};
#ifdef HAVE_LIBSSL
#include <openssl/aes.h>
#include <openssl/blowfish.h>
#include <openssl/md5.h>
//! does Blowfish w/64 bit feedback, no padding
class CBlowfish {
public:
/**
* @param sPassword key to encrypt with
* @param iEncrypt encrypt method (BF_DECRYPT or BF_ENCRYPT)
* @param sIvec what to set the ivector to start with, default sets it all 0's
*/
CBlowfish(const CString& sPassword, int iEncrypt,
const CString& sIvec = "");
~CBlowfish();
CBlowfish(const CBlowfish&) = default;
CBlowfish& operator=(const CBlowfish&) = default;
//! output must be freed
static unsigned char* MD5(const unsigned char* input, u_int ilen);
//! returns an md5 of the CString (not hex encoded)
static CString MD5(const CString& sInput, bool bHexEncode = false);
//! output must be the same size as input
void Crypt(unsigned char* input, unsigned char* output, u_int ibytes);
//! must free result
unsigned char* Crypt(unsigned char* input, u_int ibytes);
CString Crypt(const CString& sData);
private:
unsigned char* m_ivec;
BF_KEY m_bkey;
int m_iEncrypt, m_num;
};
#endif /* HAVE_LIBSSL */
/**
* @class TCacheMap
* @author prozac <[email protected]>
* @brief Insert an object with a time-to-live and check later if it still exists
*/
template <typename K, typename V = bool>
class TCacheMap {
public:
TCacheMap(unsigned int uTTL = 5000) : m_mItems(), m_uTTL(uTTL) {}
virtual ~TCacheMap() {}
/**
* @brief This function adds an item to the cache using the default time-to-live value
* @param Item the item to add to the cache
*/
void AddItem(const K& Item) { AddItem(Item, m_uTTL); }
/**
* @brief This function adds an item to the cache using a custom time-to-live value
* @param Item the item to add to the cache
* @param uTTL the time-to-live for this specific item
*/
void AddItem(const K& Item, unsigned int uTTL) { AddItem(Item, V(), uTTL); }
/**
* @brief This function adds an item to the cache using the default time-to-live value
* @param Item the item to add to the cache
* @param Val The value associated with the key Item
*/
void AddItem(const K& Item, const V& Val) { AddItem(Item, Val, m_uTTL); }
/**
* @brief This function adds an item to the cache using a custom time-to-live value
* @param Item the item to add to the cache
* @param Val The value associated with the key Item
* @param uTTL the time-to-live for this specific item
*/
void AddItem(const K& Item, const V& Val, unsigned int uTTL) {
if (!uTTL) {
// If time-to-live is zero we don't want to waste our time adding
// it
RemItem(Item); // Remove the item incase it already exists
return;
}
m_mItems[Item] = value(CUtils::GetMillTime() + uTTL, Val);
}
/**
* @brief Performs a Cleanup() and then checks to see if your item exists
* @param Item The item to check for
* @return true if item exists
*/
bool HasItem(const K& Item) {
Cleanup();
return (m_mItems.find(Item) != m_mItems.end());
}
/**
* @brief Performs a Cleanup() and returns a pointer to the object, or nullptr
* @param Item The item to check for
* @return Pointer to the item or nullptr if there is no suitable one
*/
V* GetItem(const K& Item) {
Cleanup();
iterator it = m_mItems.find(Item);
if (it == m_mItems.end()) return nullptr;
return &it->second.second;
}
/**
* @brief Removes a specific item from the cache
* @param Item The item to be removed
* @return true if item existed and was removed, false if it never existed
*/
bool RemItem(const K& Item) { return (m_mItems.erase(Item) != 0); }
/**
* @brief Cycles through the queue removing all of the stale entries
*/
void Cleanup() {
iterator it = m_mItems.begin();
while (it != m_mItems.end()) {
if (CUtils::GetMillTime() > (it->second.first)) {
m_mItems.erase(it++);
} else {
++it;
}
}
}
/**
* @brief Clear all entries
*/
void Clear() { m_mItems.clear(); }
/**
* @brief Returns all entries
*/
std::map<K, V> GetItems() {
Cleanup();
std::map<K, V> mItems;
for (const auto& it : m_mItems) {
mItems[it.first] = it.second.second;
}
return mItems;
}
// Setters
void SetTTL(unsigned int u) { m_uTTL = u; }
// !Setters
// Getters
unsigned int GetTTL() const { return m_uTTL; }
// !Getters
protected:
typedef std::pair<unsigned long long, V> value;
typedef typename std::map<K, value>::iterator iterator;
std::map<K, value>
m_mItems; //!< Map of cached items. The value portion of the map is for the expire time
unsigned int m_uTTL; //!< Default time-to-live duration
};
#endif // !ZNC_UTILS_H
| apache-2.0 |
nmldiegues/stibt | infinispan/cachestore/cassandra/src/main/java/org/infinispan/loaders/cassandra/CassandraCacheStoreConfig.java | 7387 | /*
* JBoss, Home of Professional Open Source
* Copyright 2010 Red Hat Inc. and/or its affiliates and other
* contributors as indicated by the @author tags. All rights reserved.
* See the copyright.txt in the distribution for a full listing of
* individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.infinispan.loaders.cassandra;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import net.dataforte.cassandra.pool.PoolProperties;
import org.apache.cassandra.thrift.ConsistencyLevel;
import org.infinispan.config.ConfigurationException;
import org.infinispan.loaders.AbstractCacheStoreConfig;
import org.infinispan.loaders.keymappers.DefaultTwoWayKey2StringMapper;
import org.infinispan.util.FileLookupFactory;
import org.infinispan.util.Util;
/**
* Configures {@link CassandraCacheStore}.
*/
public class CassandraCacheStoreConfig extends AbstractCacheStoreConfig {
/**
* @configRef desc="The Cassandra keyspace"
*/
String keySpace = "Infinispan";
/**
* @configRef desc="The Cassandra column family for entries"
*/
String entryColumnFamily = "InfinispanEntries";
/**
* @configRef desc="The Cassandra column family for expirations"
*/
String expirationColumnFamily = "InfinispanExpiration";
/**
* @configRef desc="Whether the keySpace is shared between multiple caches"
*/
boolean sharedKeyspace = false;
/**
* @configRef desc="Which Cassandra consistency level to use when reading"
*/
String readConsistencyLevel = "ONE";
/**
* @configRef desc="Which Cassandra consistency level to use when writing"
*/
String writeConsistencyLevel = "ONE";
/**
* @configRef desc=
* "An optional properties file for configuring the underlying cassandra connection pool"
*/
String configurationPropertiesFile;
/**
* @configRef desc=
* "The keymapper for converting keys to strings (uses the DefaultTwoWayKey2Stringmapper by default)"
*/
String keyMapper = DefaultTwoWayKey2StringMapper.class.getName();
/**
* @configRef desc=
* "Whether to automatically create the keyspace with the appropriate column families (true by default)"
*/
boolean autoCreateKeyspace = true;
protected PoolProperties poolProperties;
public CassandraCacheStoreConfig() {
setCacheLoaderClassName(CassandraCacheStore.class.getName());
poolProperties = new PoolProperties();
}
public String getKeySpace() {
return keySpace;
}
public void setKeySpace(String keySpace) {
this.keySpace = keySpace;
}
public String getEntryColumnFamily() {
return entryColumnFamily;
}
public void setEntryColumnFamily(String entryColumnFamily) {
this.entryColumnFamily = entryColumnFamily;
}
public String getExpirationColumnFamily() {
return expirationColumnFamily;
}
public void setExpirationColumnFamily(String expirationColumnFamily) {
this.expirationColumnFamily = expirationColumnFamily;
}
public boolean isSharedKeyspace() {
return sharedKeyspace;
}
public void setSharedKeyspace(boolean sharedKeyspace) {
this.sharedKeyspace = sharedKeyspace;
}
public String getReadConsistencyLevel() {
return readConsistencyLevel;
}
public void setReadConsistencyLevel(String readConsistencyLevel) {
this.readConsistencyLevel = readConsistencyLevel;
}
public String getWriteConsistencyLevel() {
return writeConsistencyLevel;
}
public void setWriteConsistencyLevel(String writeConsistencyLevel) {
this.writeConsistencyLevel = writeConsistencyLevel;
}
public PoolProperties getPoolProperties() {
return poolProperties;
}
public void setHost(String host) {
poolProperties.setHost(host);
}
public String getHost() {
return poolProperties.getHost();
}
public void setPort(int port) {
poolProperties.setPort(port);
}
public int getPort() {
return poolProperties.getPort();
}
public boolean isFramed() {
return poolProperties.isFramed();
}
public String getPassword() {
return poolProperties.getPassword();
}
public String getUsername() {
return poolProperties.getUsername();
}
public void setFramed(boolean framed) {
poolProperties.setFramed(framed);
}
public void setPassword(String password) {
poolProperties.setPassword(password);
}
public void setUsername(String username) {
poolProperties.setUsername(username);
}
public void setDatasourceJndiLocation(String location) {
poolProperties.setDataSourceJNDI(location);
}
public String getDatasourceJndiLocation() {
return poolProperties.getDataSourceJNDI();
}
public String getConfigurationPropertiesFile() {
return configurationPropertiesFile;
}
public void setConfigurationPropertiesFile(String configurationPropertiesFile) {
this.configurationPropertiesFile = configurationPropertiesFile;
readConfigurationProperties();
}
private void readConfigurationProperties() {
if (configurationPropertiesFile == null || configurationPropertiesFile.trim().length() == 0)
return;
InputStream i = FileLookupFactory.newInstance().lookupFile(configurationPropertiesFile, getClassLoader());
if (i != null) {
Properties p = new Properties();
try {
p.load(i);
} catch (IOException ioe) {
throw new ConfigurationException("Unable to read environment properties file " + configurationPropertiesFile,
ioe);
} finally {
Util.close(i);
}
// Apply all properties to the PoolProperties object
for (String propertyName : p.stringPropertyNames()) {
poolProperties.set(propertyName, p.getProperty(propertyName));
}
}
}
public String getKeyMapper() {
return keyMapper;
}
public void setKeyMapper(String keyMapper) {
this.keyMapper = keyMapper;
}
public boolean isAutoCreateKeyspace() {
return autoCreateKeyspace;
}
public void setAutoCreateKeyspace(boolean autoCreateKeyspace) {
this.autoCreateKeyspace = autoCreateKeyspace;
}
public void setReadConsistencyLevel(ConsistencyLevel readConsistencyLevel) {
this.readConsistencyLevel = readConsistencyLevel.toString();
}
public void setWriteConsistencyLevel(ConsistencyLevel writeConsistencyLevel) {
this.writeConsistencyLevel = writeConsistencyLevel.toString();
}
}
| apache-2.0 |
Miciah/origin | vendor/golang.org/x/tools/internal/lsp/diagnostics.go | 3070 | // Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package lsp
import (
"context"
"strings"
"golang.org/x/tools/internal/lsp/protocol"
"golang.org/x/tools/internal/lsp/source"
"golang.org/x/tools/internal/lsp/telemetry"
"golang.org/x/tools/internal/span"
"golang.org/x/tools/internal/telemetry/log"
)
func (s *Server) Diagnostics(ctx context.Context, view source.View, uri span.URI) {
ctx = telemetry.File.With(ctx, uri)
f, err := view.GetFile(ctx, uri)
if err != nil {
log.Error(ctx, "no file", err, telemetry.File)
return
}
// For non-Go files, don't return any diagnostics.
gof, ok := f.(source.GoFile)
if !ok {
return
}
reports, err := source.Diagnostics(ctx, view, gof, s.disabledAnalyses)
if err != nil {
log.Error(ctx, "failed to compute diagnostics", err, telemetry.File)
return
}
s.undeliveredMu.Lock()
defer s.undeliveredMu.Unlock()
for uri, diagnostics := range reports {
if err := s.publishDiagnostics(ctx, uri, diagnostics); err != nil {
if s.undelivered == nil {
s.undelivered = make(map[span.URI][]source.Diagnostic)
}
log.Error(ctx, "failed to deliver diagnostic (will retry)", err, telemetry.File)
s.undelivered[uri] = diagnostics
continue
}
// In case we had old, undelivered diagnostics.
delete(s.undelivered, uri)
}
// Anytime we compute diagnostics, make sure to also send along any
// undelivered ones (only for remaining URIs).
for uri, diagnostics := range s.undelivered {
if err := s.publishDiagnostics(ctx, uri, diagnostics); err != nil {
log.Error(ctx, "failed to deliver diagnostic for (will not retry)", err, telemetry.File)
}
// If we fail to deliver the same diagnostics twice, just give up.
delete(s.undelivered, uri)
}
}
func (s *Server) publishDiagnostics(ctx context.Context, uri span.URI, diagnostics []source.Diagnostic) error {
protocolDiagnostics, err := toProtocolDiagnostics(ctx, diagnostics)
if err != nil {
return err
}
s.client.PublishDiagnostics(ctx, &protocol.PublishDiagnosticsParams{
Diagnostics: protocolDiagnostics,
URI: protocol.NewURI(uri),
})
return nil
}
func toProtocolDiagnostics(ctx context.Context, diagnostics []source.Diagnostic) ([]protocol.Diagnostic, error) {
reports := []protocol.Diagnostic{}
for _, diag := range diagnostics {
diagnostic, err := toProtocolDiagnostic(ctx, diag)
if err != nil {
return nil, err
}
reports = append(reports, diagnostic)
}
return reports, nil
}
func toProtocolDiagnostic(ctx context.Context, diag source.Diagnostic) (protocol.Diagnostic, error) {
var severity protocol.DiagnosticSeverity
switch diag.Severity {
case source.SeverityError:
severity = protocol.SeverityError
case source.SeverityWarning:
severity = protocol.SeverityWarning
}
return protocol.Diagnostic{
Message: strings.TrimSpace(diag.Message), // go list returns errors prefixed by newline
Range: diag.Range,
Severity: severity,
Source: diag.Source,
}, nil
}
| apache-2.0 |
arnaudsj/titanium_mobile | demos/SmokeTest/Resources/examples/yql_flickr.js | 1970 | // create table view
var tableview = Titanium.UI.createTableView();
Ti.App.fireEvent("show_indicator");
// create table view event listener
tableview.addEventListener('click', function(e)
{
// event data
var index = e.index;
var section = e.section;
var row = e.row;
var rowdata = e.rowData;
Titanium.UI.createAlertDialog({title:'Table View',message:'row ' + row + ' index ' + index + ' section ' + section + ' row data ' + rowdata}).show();
});
var navActInd = Titanium.UI.createActivityIndicator();
navActInd.show();
if (Titanium.Platform.name == 'iPhone OS') {
Titanium.UI.currentWindow.setRightNavButton(navActInd);
}
// add table view to the window
Titanium.UI.currentWindow.add(tableview);
Titanium.Yahoo.yql('select * from flickr.photos.search where text="Cat" limit 10',function(e)
{
var images = [];
var data = e.data;
for (var c=0;c<data.photo.length;c++)
{
var photo = data.photo[c];
// form the flickr url
var url = 'http://farm' + photo.farm + '.static.flickr.com/' + photo.server + '/' + photo.id + '_' + photo.secret + '_m.jpg';
Ti.API.info("flickr url = "+url);
var row = Ti.UI.createTableViewRow({height:60});
var title = Ti.UI.createLabel({
left:70,
right:10,
textAlign:'left',
height:50,
text:photo.title ? photo.title : "Untitled",
font:{fontWeight:'bold',fontSize:18}
});
var image;
if (Titanium.Platform.name == 'android')
{
// iphone moved to a single image property - android needs to do the same
image = Ti.UI.createImageView({
url : url,
height:50,
width:50,
left:10,
defaultImage:'../modules/ui/images/photoDefault.png'
});
}
else
{
image = Ti.UI.createImageView({
image : url,
height:50,
width:50,
left:10,
defaultImage:'../modules/ui/images/photoDefault.png'
});
}
row.add(image);
row.add(title);
images[c] = row;
}
tableview.setData(images);
navActInd.hide();
Ti.App.fireEvent("hide_indicator");
});
| apache-2.0 |
gilberto-torrezan/gwt-material | gwt-material/src/main/java/gwt/material/design/client/ui/MaterialIcon.java | 4174 | package gwt.material.design.client.ui;
/*
* #%L
* GwtMaterial
* %%
* Copyright (C) 2015 GwtMaterialDesign
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.google.gwt.dom.client.Document;
import com.google.gwt.dom.client.Element;
import com.google.gwt.dom.client.Style;
import gwt.material.design.client.base.mixin.ColorsMixin;
import gwt.material.design.client.base.mixin.CssNameMixin;
import gwt.material.design.client.base.mixin.ToggleStyleMixin;
import gwt.material.design.client.constants.IconPosition;
import gwt.material.design.client.constants.IconSize;
import gwt.material.design.client.base.AbstractButton;
import gwt.material.design.client.base.HasIcon;
import gwt.material.design.client.base.HasSeparator;
import gwt.material.design.client.constants.IconType;
//@formatter:off
/**
* We have included 740 Material Design Icons courtesy of Google.
* You can download them directly from the Material Design specs.
*
* <h3>UiBinder Usage:</h3>
* <pre>
*{@code <m:MaterialIcon waves="LIGHT" iconType="POLYMER"/>
* <m:MaterialIcon waves="LIGHT" iconType="POLYMER" textColor="blue" type="CIRCLE"/>
* <m:MaterialIcon waves="LIGHT" iconType="POLYMER" backgroundColor="blue" textColor="white" type="CIRCLE" tooltip="Tooltip" tooltipLocation="BOTTOM"/>}
* </pre>
*
* @author kevzlou7979
* @author Ben Dol
* @see <a href="http://www.google.com/design/icons/">Search Google Icons</a>
* @see <a href="http://gwt-material-demo.herokuapp.com/#icons">Material Icons Documentation</a>
*/
//@formatter:on
public class MaterialIcon extends AbstractButton implements HasSeparator, HasIcon {
private final CssNameMixin<MaterialIcon, IconPosition> posMixin = new CssNameMixin<>(this);
private final CssNameMixin<MaterialIcon, IconSize> sizeMixin = new CssNameMixin<>(this);
private final ToggleStyleMixin<MaterialIcon> prefixMixin = new ToggleStyleMixin<>(this, "prefix");
private final ColorsMixin<MaterialIcon> colorsMixin = new ColorsMixin<>(this);
/**
* Creates an empty icon.
*/
public MaterialIcon() {
super();
addStyleName("material-icons");
}
/**
* Sets a simple icon with a given type.
*/
public MaterialIcon(IconType iconType) {
this();
setIconType(iconType);
}
/**
* Sets an icon with textColor and backgroundColor.
*/
public MaterialIcon(IconType iconType, String textColor, String bgColor) {
this();
setIconType(iconType);
setTextColor(textColor);
setBackgroundColor(bgColor);
}
public void setInnerText(String innerText){
getElement().setInnerText(innerText);
}
@Override
protected Element createElement() {
return Document.get().createElement("i");
}
@Override
public MaterialIcon getIcon() {
return this;
}
@Override
public void setIconType(IconType icon) {
getElement().setInnerText(icon.getCssName());
}
@Override
public void setIconPosition(IconPosition position) {
posMixin.setCssName(position);
}
@Override
public void setIconSize(IconSize size) {
sizeMixin.setCssName(size);
}
@Override
public void setIconColor(String iconColor) {
colorsMixin.setTextColor(iconColor);
}
@Override
public void setIconFontSize(double size, Style.Unit unit) {
getElement().getStyle().setFontSize(size, unit);
}
@Override
public void setIconPrefix(boolean prefix) {
prefixMixin.setOn(prefix);
}
@Override
public boolean isIconPrefix() {
return prefixMixin.isOn();
}
}
| apache-2.0 |
stackforge/blazar | blazar/monitor/base.py | 3283 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_log import log as logging
from oslo_service import threadgroup
from blazar.db import api as db_api
LOG = logging.getLogger(__name__)
class BaseMonitor(object):
"""Base class for monitoring classes."""
def __init__(self, monitor_plugins):
self.monitor_plugins = monitor_plugins
self.tg = threadgroup.ThreadGroup()
self.healing_timers = []
def start_monitoring(self):
"""Start monitoring."""
self.start_periodic_healing()
def stop_monitoring(self):
"""Stop monitoring."""
self.stop_periodic_healing()
def start_periodic_healing(self):
"""Start periodic healing process."""
for plugin in self.monitor_plugins:
healing_interval_mins = plugin.get_healing_interval()
if healing_interval_mins > 0:
self.healing_timers.append(
self.tg.add_timer(healing_interval_mins * 60,
self.call_monitor_plugin,
None,
plugin.heal))
def stop_periodic_healing(self):
"""Stop periodic healing process."""
for timer in self.healing_timers:
self.tg.timer_done(timer)
def call_monitor_plugin(self, callback, *args, **kwargs):
"""Call a callback and update lease/reservation flags."""
# This method has to handle any exception internally. It shouldn't
# raise an exception because the timer threads in the BaseMonitor class
# terminates its execution once the thread has received any exception.
try:
# The callback() has to return a dictionary of
# {reservation id: flags to update}.
# e.g. {'dummyid': {'missing_resources': True}}
reservation_flags = callback(*args, **kwargs)
if reservation_flags:
self._update_flags(reservation_flags)
except Exception as e:
LOG.exception('Caught an exception while executing a callback. '
'%s', str(e))
def _update_flags(self, reservation_flags):
"""Update lease/reservation flags."""
lease_ids = set([])
for reservation_id, flags in reservation_flags.items():
db_api.reservation_update(reservation_id, flags)
LOG.debug('Reservation %s was updated: %s',
reservation_id, flags)
reservation = db_api.reservation_get(reservation_id)
lease_ids.add(reservation['lease_id'])
for lease_id in lease_ids:
LOG.debug('Lease %s was updated: {"degraded": True}', lease_id)
db_api.lease_update(lease_id, {'degraded': True})
| apache-2.0 |
skunkiferous/SMEdit | jo_plugin/src/main/java/jo/sm/plugins/all/props/PropsParameters.java | 1578 | /**
* Copyright 2014
* SMEdit https://github.com/StarMade/SMEdit
* SMTools https://github.com/StarMade/SMTools
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
**/
package jo.sm.plugins.all.props;
import jo.sm.ui.act.plugin.Description;
/**
* @Auther Jo Jaquinta for SMEdit Classic - version 1.0
**/
@Description(displayName = "Properties", shortDescription = "Properties affecting the whole application.")
public class PropsParameters {
@Description(displayName = "Invert X", shortDescription = "Invert X Axis on mouse")
private boolean mInvertXAxis;
@Description(displayName = "Invert Y", shortDescription = "Invert Y Axis on mouse")
private boolean mInvertYAxis;
public PropsParameters() {
}
public boolean isInvertXAxis() {
return mInvertXAxis;
}
public void setInvertXAxis(boolean invertXAxis) {
mInvertXAxis = invertXAxis;
}
public boolean isInvertYAxis() {
return mInvertYAxis;
}
public void setInvertYAxis(boolean invertYAxis) {
mInvertYAxis = invertYAxis;
}
}
| apache-2.0 |
submergerock/avatar-hadoop | docs/api/org/apache/hadoop/metrics/util/class-use/MetricsIntValue.html | 8893 | <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_20) on Wed Mar 30 21:34:43 CST 2011 -->
<TITLE>
Uses of Class org.apache.hadoop.metrics.util.MetricsIntValue (Facebook's realtime distributed database, powered by Apache Hadoop based on 0.20-append branch 0.20.1-dev API)
</TITLE>
<META NAME="date" CONTENT="2011-03-30">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.apache.hadoop.metrics.util.MetricsIntValue (Facebook's realtime distributed database, powered by Apache Hadoop based on 0.20-append branch 0.20.1-dev API)";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../org/apache/hadoop/metrics/util/MetricsIntValue.html" title="class in org.apache.hadoop.metrics.util"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../index.html?org/apache/hadoop/metrics/util//class-useMetricsIntValue.html" target="_top"><B>FRAMES</B></A>
<A HREF="MetricsIntValue.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<CENTER>
<H2>
<B>Uses of Class<br>org.apache.hadoop.metrics.util.MetricsIntValue</B></H2>
</CENTER>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Packages that use <A HREF="../../../../../../org/apache/hadoop/metrics/util/MetricsIntValue.html" title="class in org.apache.hadoop.metrics.util">MetricsIntValue</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.hadoop.ipc.metrics"><B>org.apache.hadoop.ipc.metrics</B></A></TD>
<TD> </TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.hadoop.ipc.metrics"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Uses of <A HREF="../../../../../../org/apache/hadoop/metrics/util/MetricsIntValue.html" title="class in org.apache.hadoop.metrics.util">MetricsIntValue</A> in <A HREF="../../../../../../org/apache/hadoop/ipc/metrics/package-summary.html">org.apache.hadoop.ipc.metrics</A></FONT></TH>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">Fields in <A HREF="../../../../../../org/apache/hadoop/ipc/metrics/package-summary.html">org.apache.hadoop.ipc.metrics</A> declared as <A HREF="../../../../../../org/apache/hadoop/metrics/util/MetricsIntValue.html" title="class in org.apache.hadoop.metrics.util">MetricsIntValue</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> <A HREF="../../../../../../org/apache/hadoop/metrics/util/MetricsIntValue.html" title="class in org.apache.hadoop.metrics.util">MetricsIntValue</A></CODE></FONT></TD>
<TD><CODE><B>RpcMetrics.</B><B><A HREF="../../../../../../org/apache/hadoop/ipc/metrics/RpcMetrics.html#callQueueLen">callQueueLen</A></B></CODE>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> <A HREF="../../../../../../org/apache/hadoop/metrics/util/MetricsIntValue.html" title="class in org.apache.hadoop.metrics.util">MetricsIntValue</A></CODE></FONT></TD>
<TD><CODE><B>RpcMetrics.</B><B><A HREF="../../../../../../org/apache/hadoop/ipc/metrics/RpcMetrics.html#numOpenConnections">numOpenConnections</A></B></CODE>
<BR>
</TD>
</TR>
</TABLE>
<P>
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../org/apache/hadoop/metrics/util/MetricsIntValue.html" title="class in org.apache.hadoop.metrics.util"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../index.html?org/apache/hadoop/metrics/util//class-useMetricsIntValue.html" target="_top"><B>FRAMES</B></A>
<A HREF="MetricsIntValue.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
This release is based on the Facebook's version of Hadoop.<br>Copyright © 2009 The Apache Software Foundation.
</BODY>
</HTML>
| apache-2.0 |
brettharrisonzya/couchbase-lite-net | src/ListenerComponent/Couchbase.Lite.Listener.Shared/PeerToPeer/AuthenticationMethods.cs | 4263 | //
// AuthenticationMethods.cs
//
// Author:
// Jim Borden <[email protected]>
//
// Copyright (c) 2015 Couchbase, Inc All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
using System;
using System.Collections.Generic;
using Couchbase.Lite.Auth;
namespace Couchbase.Lite.Listener
{
/// <summary>
/// Methods dealing with authentication for the client
/// </summary>
internal static class AuthenticationMethods
{
#region Public Methods
/// <summary>
/// Verifies and registers a facebook token for use in replication authentication
/// </summary>
/// <returns>The response state for further HTTP processing</returns>
/// <param name="context">The context of the Couchbase Lite HTTP request</param>
public static ICouchbaseResponseState RegisterFacebookToken(ICouchbaseListenerContext context)
{
var response = context.CreateResponse();
var body = context.BodyAs<Dictionary<string, object>>();
string email = body.GetCast<string>("email");
string remoteUrl = body.GetCast<string>("remote_url");
string accessToken = body.GetCast<string>("access_token");
if (email != null && remoteUrl != null && accessToken != null) {
Uri siteUrl;
if (!Uri.TryCreate(remoteUrl, UriKind.Absolute, out siteUrl)) {
response.InternalStatus = StatusCode.BadParam;
response.JsonBody = new Body(new Dictionary<string, object> {
{ "error", "invalid remote_url" }
});
} else if (!FacebookAuthorizer.RegisterAccessToken(accessToken, email, siteUrl)) {
response.InternalStatus = StatusCode.BadParam;
response.JsonBody = new Body(new Dictionary<string, object> {
{ "error", "invalid access_token" }
});
} else {
response.JsonBody = new Body(new Dictionary<string, object> {
{ "ok", "registered" },
{ "email", email }
});
}
} else {
response.InternalStatus = StatusCode.BadParam;
response.JsonBody = new Body(new Dictionary<string, object> {
{ "error", "required fields: access_token, email, remote_url" }
});
}
return response.AsDefaultState();
}
/// <summary>
/// Verifies and registers a persona token for use in replication authentication
/// </summary>
/// <returns>The response state for further HTTP processing</returns>
/// <param name="context">The context of the Couchbase Lite HTTP request</param>
public static ICouchbaseResponseState RegisterPersonaToken(ICouchbaseListenerContext context)
{
var response = context.CreateResponse();
var body = context.BodyAs<Dictionary<string, object>>();
string email = PersonaAuthorizer.RegisterAssertion(body.GetCast<string>("assertion"));
if (email != null) {
response.JsonBody = new Body(new Dictionary<string, object> {
{ "ok", "registered" },
{ "email", email }
});
} else {
response.InternalStatus = StatusCode.BadParam;
response.JsonBody = new Body(new Dictionary<string, object> {
{ "error", "invalid assertion" }
});
}
return response.AsDefaultState();
}
#endregion
}
}
| apache-2.0 |
lepdou/apollo | apollo-client/src/main/java/com/ctrip/framework/apollo/spring/config/ConfigPropertySourceFactory.java | 1284 | /*
* Copyright 2021 Apollo Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.ctrip.framework.apollo.spring.config;
import java.util.List;
import com.ctrip.framework.apollo.Config;
import com.google.common.collect.Lists;
public class ConfigPropertySourceFactory {
private final List<ConfigPropertySource> configPropertySources = Lists.newLinkedList();
public ConfigPropertySource getConfigPropertySource(String name, Config source) {
ConfigPropertySource configPropertySource = new ConfigPropertySource(name, source);
configPropertySources.add(configPropertySource);
return configPropertySource;
}
public List<ConfigPropertySource> getAllConfigPropertySources() {
return Lists.newLinkedList(configPropertySources);
}
}
| apache-2.0 |
adeelmahmood/lens | lens-cube/src/main/java/org/apache/lens/cube/parse/AggregateResolver.java | 12371 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.lens.cube.parse;
import static org.apache.hadoop.hive.ql.parse.HiveParser.Identifier;
import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_TABLE_OR_COL;
import java.util.Iterator;
import org.apache.lens.cube.error.LensCubeErrorCode;
import org.apache.lens.cube.metadata.CubeMeasure;
import org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
import org.apache.lens.server.api.error.LensException;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
import org.apache.hadoop.hive.ql.parse.HiveParser;
import org.antlr.runtime.CommonToken;
import lombok.extern.slf4j.Slf4j;
/**
* <p> Replace select and having columns with default aggregate functions on them, if default aggregate is defined and
* if there isn't already an aggregate function specified on the columns. </p> <p/> <p> Expressions which already
* contain aggregate sub-expressions will not be changed. </p> <p/> <p> At this point it's assumed that aliases have
* been added to all columns. </p>
*/
@Slf4j
class AggregateResolver implements ContextRewriter {
public AggregateResolver(Configuration conf) {
}
@Override
public void rewriteContext(CubeQueryContext cubeql) throws LensException {
if (cubeql.getCube() == null) {
return;
}
boolean nonDefaultAggregates = false;
boolean aggregateResolverDisabled = cubeql.getConf().getBoolean(CubeQueryConfUtil.DISABLE_AGGREGATE_RESOLVER,
CubeQueryConfUtil.DEFAULT_DISABLE_AGGREGATE_RESOLVER);
// Check if the query contains measures
// 1. not inside default aggregate expressions
// 2. With no default aggregate defined
// 3. there are distinct selection of measures
// If yes, only the raw (non aggregated) fact can answer this query.
// In that case remove aggregate facts from the candidate fact list
if (hasMeasuresInDistinctClause(cubeql, cubeql.getSelectAST(), false)
|| hasMeasuresInDistinctClause(cubeql, cubeql.getHavingAST(), false)
|| hasMeasuresNotInDefaultAggregates(cubeql, cubeql.getSelectAST(), null, aggregateResolverDisabled)
|| hasMeasuresNotInDefaultAggregates(cubeql, cubeql.getHavingAST(), null, aggregateResolverDisabled)
|| hasMeasures(cubeql, cubeql.getWhereAST()) || hasMeasures(cubeql, cubeql.getGroupByAST())
|| hasMeasures(cubeql, cubeql.getOrderByAST())) {
Iterator<CandidateFact> factItr = cubeql.getCandidateFacts().iterator();
while (factItr.hasNext()) {
CandidateFact candidate = factItr.next();
if (candidate.fact.isAggregated()) {
cubeql.addFactPruningMsgs(candidate.fact,
CandidateTablePruneCause.missingDefaultAggregate());
factItr.remove();
}
}
nonDefaultAggregates = true;
log.info("Query has non default aggregates, no aggregate resolution will be done");
}
cubeql.pruneCandidateFactSet(CandidateTablePruneCode.MISSING_DEFAULT_AGGREGATE);
if (nonDefaultAggregates || aggregateResolverDisabled) {
return;
}
resolveClause(cubeql, cubeql.getSelectAST());
resolveClause(cubeql, cubeql.getHavingAST());
Configuration distConf = cubeql.getConf();
boolean isDimOnlyDistinctEnabled = distConf.getBoolean(CubeQueryConfUtil.ENABLE_ATTRFIELDS_ADD_DISTINCT,
CubeQueryConfUtil.DEFAULT_ATTR_FIELDS_ADD_DISTINCT);
if (isDimOnlyDistinctEnabled) {
// Check if any measure/aggregate columns and distinct clause used in
// select tree. If not, update selectAST token "SELECT" to "SELECT DISTINCT"
if (!hasMeasures(cubeql, cubeql.getSelectAST()) && !isDistinctClauseUsed(cubeql.getSelectAST())
&& !HQLParser.hasAggregate(cubeql.getSelectAST())) {
cubeql.getSelectAST().getToken().setType(HiveParser.TOK_SELECTDI);
}
}
}
// We need to traverse the clause looking for eligible measures which can be
// wrapped inside aggregates
// We have to skip any columns that are already inside an aggregate UDAF
private String resolveClause(CubeQueryContext cubeql, ASTNode clause) throws LensException {
if (clause == null) {
return null;
}
for (int i = 0; i < clause.getChildCount(); i++) {
transform(cubeql, clause, (ASTNode) clause.getChild(i), i);
}
return HQLParser.getString(clause);
}
private void transform(CubeQueryContext cubeql, ASTNode parent, ASTNode node, int nodePos) throws LensException {
if (node == null) {
return;
}
int nodeType = node.getToken().getType();
if (!(HQLParser.isAggregateAST(node))) {
if (nodeType == HiveParser.TOK_TABLE_OR_COL || nodeType == HiveParser.DOT) {
// Leaf node
ASTNode wrapped = wrapAggregate(cubeql, node);
if (wrapped != node) {
if (parent != null) {
parent.setChild(nodePos, wrapped);
// Check if this node has an alias
ASTNode sibling = HQLParser.findNodeByPath(parent, Identifier);
String expr;
if (sibling != null) {
expr = HQLParser.getString(parent);
} else {
expr = HQLParser.getString(wrapped);
}
cubeql.addAggregateExpr(expr.trim());
}
}
} else {
// Dig deeper in non-leaf nodes
for (int i = 0; i < node.getChildCount(); i++) {
transform(cubeql, node, (ASTNode) node.getChild(i), i);
}
}
}
}
// Wrap an aggregate function around the node if its a measure, leave it
// unchanged otherwise
private ASTNode wrapAggregate(CubeQueryContext cubeql, ASTNode node) throws LensException {
String tabname = null;
String colname;
if (node.getToken().getType() == HiveParser.TOK_TABLE_OR_COL) {
colname = ((ASTNode) node.getChild(0)).getText();
} else {
// node in 'alias.column' format
ASTNode tabident = HQLParser.findNodeByPath(node, TOK_TABLE_OR_COL, Identifier);
ASTNode colIdent = (ASTNode) node.getChild(1);
colname = colIdent.getText();
tabname = tabident.getText();
}
String msrname = StringUtils.isBlank(tabname) ? colname : tabname + "." + colname;
if (cubeql.isCubeMeasure(msrname)) {
if (cubeql.getQueriedExprs().contains(colname)) {
String alias = cubeql.getAliasForTableName(cubeql.getCube().getName());
for (ASTNode exprNode : cubeql.getExprCtx().getExpressionContext(colname, alias).getAllASTNodes()) {
transform(cubeql, null, exprNode, 0);
}
return node;
} else {
CubeMeasure measure = cubeql.getCube().getMeasureByName(colname);
String aggregateFn = measure.getAggregate();
if (StringUtils.isBlank(aggregateFn)) {
throw new LensException(LensCubeErrorCode.NO_DEFAULT_AGGREGATE.getLensErrorInfo(), colname);
}
ASTNode fnroot = new ASTNode(new CommonToken(HiveParser.TOK_FUNCTION));
fnroot.setParent(node.getParent());
ASTNode fnIdentNode = new ASTNode(new CommonToken(HiveParser.Identifier, aggregateFn));
fnIdentNode.setParent(fnroot);
fnroot.addChild(fnIdentNode);
node.setParent(fnroot);
fnroot.addChild(node);
return fnroot;
}
} else {
return node;
}
}
private boolean hasMeasuresNotInDefaultAggregates(CubeQueryContext cubeql, ASTNode node, String function,
boolean aggregateResolverDisabled) {
if (node == null) {
return false;
}
if (HQLParser.isAggregateAST(node)) {
if (node.getChild(0).getType() == HiveParser.Identifier) {
function = BaseSemanticAnalyzer.unescapeIdentifier(node.getChild(0).getText());
}
} else if (cubeql.isCubeMeasure(node)) {
// Exit for the recursion
String colname;
if (node.getToken().getType() == HiveParser.TOK_TABLE_OR_COL) {
colname = ((ASTNode) node.getChild(0)).getText();
} else {
// node in 'alias.column' format
ASTNode colIdent = (ASTNode) node.getChild(1);
colname = colIdent.getText();
}
colname = colname.toLowerCase();
if (cubeql.getQueriedExprs().contains(colname)) {
String cubeAlias = cubeql.getAliasForTableName(cubeql.getCube().getName());
for (ASTNode exprNode : cubeql.getExprCtx().getExpressionContext(colname, cubeAlias).getAllASTNodes()) {
if (hasMeasuresNotInDefaultAggregates(cubeql, exprNode, function, aggregateResolverDisabled)) {
return true;
}
}
return false;
} else {
CubeMeasure measure = cubeql.getCube().getMeasureByName(colname);
if (function != null && !function.isEmpty()) {
// Get the cube measure object and check if the passed function is the
// default one set for this measure
return !function.equalsIgnoreCase(measure.getAggregate());
} else if (!aggregateResolverDisabled && measure.getAggregate() != null) {
// not inside any aggregate, but default aggregate exists
return false;
}
return true;
}
}
for (int i = 0; i < node.getChildCount(); i++) {
if (hasMeasuresNotInDefaultAggregates(cubeql, (ASTNode) node.getChild(i), function, aggregateResolverDisabled)) {
// Return on the first measure not inside its default aggregate
return true;
}
}
return false;
}
/*
* Check if distinct keyword used in node
*/
private boolean isDistinctClauseUsed(ASTNode node) {
if (node == null) {
return false;
}
if (node.getToken() != null) {
if (node.getToken().getType() == HiveParser.TOK_FUNCTIONDI
|| node.getToken().getType() == HiveParser.TOK_SELECTDI) {
return true;
}
}
for (int i = 0; i < node.getChildCount(); i++) {
if (isDistinctClauseUsed((ASTNode) node.getChild(i))) {
return true;
}
}
return false;
}
private boolean hasMeasuresInDistinctClause(CubeQueryContext cubeql, ASTNode node, boolean hasDistinct) {
if (node == null) {
return false;
}
int exprTokenType = node.getToken().getType();
boolean isDistinct = hasDistinct;
if (exprTokenType == HiveParser.TOK_FUNCTIONDI || exprTokenType == HiveParser.TOK_SELECTDI) {
isDistinct = true;
} else if (cubeql.isCubeMeasure(node) && isDistinct) {
// Exit for the recursion
return true;
}
for (int i = 0; i < node.getChildCount(); i++) {
if (hasMeasuresInDistinctClause(cubeql, (ASTNode) node.getChild(i), isDistinct)) {
// Return on the first measure in distinct clause
return true;
}
}
return false;
}
private boolean hasMeasures(CubeQueryContext cubeql, ASTNode node) {
if (node == null) {
return false;
}
if (cubeql.isCubeMeasure(node)) {
return true;
}
for (int i = 0; i < node.getChildCount(); i++) {
if (hasMeasures(cubeql, (ASTNode) node.getChild(i))) {
return true;
}
}
return false;
}
static void updateAggregates(ASTNode root, CubeQueryContext cubeql) {
if (root == null) {
return;
}
if (HQLParser.isAggregateAST(root)) {
cubeql.addAggregateExpr(HQLParser.getString(root).trim());
} else {
for (int i = 0; i < root.getChildCount(); i++) {
ASTNode child = (ASTNode) root.getChild(i);
updateAggregates(child, cubeql);
}
}
}
}
| apache-2.0 |
coufon/neon-distributed | neon/data/ticker.py | 13214 | # ----------------------------------------------------------------------------
# Copyright 2014 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
"""
For machine generated datasets.
"""
import numpy as np
from neon import NervanaObject
class Task(NervanaObject):
"""
Base class from which ticker tasks inherit.
"""
def fetch_io(self, time_steps):
"""
Generate inputs, outputs numpy tensor
pair of size appropriate for this minibatch
"""
columns = time_steps * self.be.bsz
inputs = np.zeros((self.nin, columns))
outputs = np.zeros((self.nout, columns))
return inputs, outputs
def fill_buffers(self, time_steps, inputs, outputs, in_tensor, out_tensor, mask):
"""
Do some logistical stuff to get our numpy arrays safely to device.
This can almost certainly be cleaned up.
"""
# Put inputs and outputs, which are too small, into properly shaped arrays
columns = time_steps * self.be.bsz
inC = np.zeros((self.nin, self.max_columns))
outC = np.zeros((self.nout, self.max_columns))
inC[:, :columns] = inputs
outC[:, :columns] = outputs
# Copy those arrays to device
in_tensor.set(inC)
out_tensor.set(outC)
# Set a mask over the unused part of the buffer
mask[:, :columns] = 1
mask[:, columns:] = 0
class CopyTask(Task):
"""
The copy task from the Neural Turing Machines paper:
http://arxiv.org/abs/1410.5401
This version of the task is batched.
All sequences in the same mini-batch are the same length,
but every new minibatch has a randomly chosen minibatch length.
When a given minibatch has length < seq_len_max, we mask the outputs
for time steps after time_steps_max.
The generated data is laid out in the same way as other RNN data in neon.
"""
def __init__(self, seq_len_max, vec_size):
"""
Set up the attributes that Ticker needs to see.
Args:
seq_len_max (int): longest allowable sequence length
vec_size (int): width of the bit-vector to be copied (was 8 in paper)
"""
self.seq_len_max = seq_len_max
self.vec_size = vec_size
self.nout = self.vec_size # output has the same dimension as the underlying bit vector
self.nin = self.vec_size + 2 # input has more dims (for the start and stop channels)
self.time_steps_func = lambda l: 2 * l + 2
self.time_steps_max = 2 * self.seq_len_max + 2
self.time_steps_max = self.time_steps_func(self.seq_len_max)
self.max_columns = self.time_steps_max * self.be.bsz
def synthesize(self, in_tensor, out_tensor, mask):
"""
Create a new minibatch of ticker copy task data.
Args:
in_tensor: device buffer holding inputs
out_tensor: device buffer holding outputs
mask: device buffer for the output mask
"""
# All sequences in a minibatch are the same length for convenience
seq_len = np.random.randint(1, self.seq_len_max + 1)
time_steps = self.time_steps_func(seq_len)
# Generate intermediate buffers of the right size
inputs, outputs = super(CopyTask, self).fetch_io(time_steps)
# Set the start bit
inputs[-2, :self.be.bsz] = 1
# Generate the sequence to be copied
seq = np.random.randint(2,
size=(self.vec_size,
seq_len * self.be.bsz))
# Set the stop bit
stop_loc = self.be.bsz * (seq_len + 1)
inputs[-1, stop_loc:stop_loc + self.be.bsz] = 1
# Place the actual sequence to copy in inputs
inputs[:self.vec_size, self.be.bsz:stop_loc] = seq
# Now place that same sequence in a different place in outputs
outputs[:, self.be.bsz * (seq_len + 2):] = seq
# Fill the device minibatch buffers
super(CopyTask, self).fill_buffers(time_steps, inputs, outputs,
in_tensor, out_tensor, mask)
class RepeatCopyTask(Task):
"""
The repeat copy task from the Neural Turing Machines paper:
http://arxiv.org/abs/1410.5401
See comments on CopyTask class for more details.
"""
def __init__(self, seq_len_max, repeat_count_max, vec_size):
"""
Set up the attributes that Ticker needs to see.
Args:
seq_len_max (int): longest allowable sequence length
repeat_count_max (int): max number of repeats
vec_size (int): width of the bit-vector to be copied (was 8 in paper)
"""
self.seq_len_max = seq_len_max
self.repeat_count_max = seq_len_max
self.vec_size = vec_size
self.nout = self.vec_size + 1 # we output the sequence and a stop bit in a stop channel
self.nin = self.vec_size + 2 # input has more dims (for the start and stop channels)
# seq is seen once as input, repeat_count times as output, with a
# start bit, stop bit, and output stop bit
self.time_steps_func = lambda l, r: l * (r + 1) + 3
self.time_steps_max = self.time_steps_func(self.seq_len_max, self.repeat_count_max)
self.max_columns = self.time_steps_max * self.be.bsz
def synthesize(self, in_tensor, out_tensor, mask):
"""
Create a new minibatch of ticker repeat copy task data.
Args:
in_tensor: device buffer holding inputs
out_tensor: device buffer holding outputs
mask: device buffer for the output mask
"""
# All sequences in a minibatch are the same length for convenience
seq_len = np.random.randint(1, self.seq_len_max + 1)
repeat_count = np.random.randint(1, self.repeat_count_max + 1)
time_steps = self.time_steps_func(seq_len, repeat_count)
# Get the minibatch specific numpy buffers
inputs, outputs = super(RepeatCopyTask, self).fetch_io(time_steps)
# Set the start bit
inputs[-2, :self.be.bsz] = 1
# Generate the sequence to be copied
seq = np.random.randint(2,
size=(self.vec_size,
seq_len * self.be.bsz))
# Set the repeat count
# TODO: should we normalize repeat count?
stop_loc = self.be.bsz * (seq_len + 1)
inputs[-1, stop_loc:stop_loc + self.be.bsz] = repeat_count
# Place the actual sequence to copy in inputs
inputs[:self.vec_size, self.be.bsz:stop_loc] = seq
# Now place that same sequence repeat_copy times in outputs
for i in range(repeat_count):
start = self.be.bsz * ((i + 1) * seq_len + 2)
stop = start + seq_len * self.be.bsz
outputs[:-1, start:stop] = seq
# Place the output finish bit
outputs[-1, -self.be.bsz:] = 1
# Fill the device minibatch buffers
super(RepeatCopyTask, self).fill_buffers(time_steps, inputs, outputs,
in_tensor, out_tensor, mask)
class PrioritySortTask(Task):
"""
The priority sort task from the Neural Turing Machines paper:
http://arxiv.org/abs/1410.5401
See comments on CopyTask class for more details.
"""
def __init__(self, seq_len_max, vec_size):
"""
Set up the attributes that Ticker needs to see.
Args:
seq_len_max (int): longest allowable sequence length
vec_size (int): width of the bit-vector to be copied (was 8 in paper)
"""
self.seq_len_max = seq_len_max
self.vec_size = vec_size
self.nout = self.vec_size # we output the sorted sequence, with no stop bit
self.nin = self.vec_size + 3 # extra channels for start, stop, and priority
# seq is seen once as input with start and stop bits
# then we output seq in sorted order
self.time_steps_func = lambda l: 2 * l + 2
self.time_steps_max = self.time_steps_func(self.seq_len_max)
self.max_columns = self.time_steps_max * self.be.bsz
def synthesize(self, in_tensor, out_tensor, mask):
"""
Create a new minibatch of ticker priority sort task data.
Args:
in_tensor: device buffer holding inputs
out_tensor: device buffer holding outputs
mask: device buffer for the output mask
"""
# All sequences in a minibatch are the same length for convenience
seq_len = np.random.randint(1, self.seq_len_max + 1)
time_steps = self.time_steps_func(seq_len)
# Get the minibatch specific numpy buffers
inputs, outputs = super(PrioritySortTask, self).fetch_io(time_steps)
# Set the start bit
inputs[-3, :self.be.bsz] = 1
# Generate the sequence to be copied
seq = np.random.randint(2,
size=(self.nin,
seq_len * self.be.bsz)).astype(float)
# Zero out the start, stop, and priority channels
seq[-3:, :] = 0
# Generate the scalar priorities and put them in seq
priorities = np.random.uniform(-1, 1, size=(seq_len * self.be.bsz,))
seq[-1, :] = priorities
# Set the stop bit
stop_loc = self.be.bsz * (seq_len + 1)
inputs[-2, stop_loc:stop_loc + self.be.bsz] = 1
# Place the actual sequence to copy in inputs
inputs[:, self.be.bsz:stop_loc] = seq
# sort the sequences
for i in range(self.be.bsz):
# for every sequence in the batch
# x <- every column in the sequence
x = seq[:, i::self.be.bsz]
# sort that set of columns by elt in the last row (the priority)
x = x[:, x[-1, :].argsort()]
# put those columns back into minibatch in the right places
seq[:, i::self.be.bsz] = x
outputs[:, self.be.bsz * (seq_len + 2):] = seq[:self.nout, :]
# Fill the device minibatch buffers
super(PrioritySortTask, self).fill_buffers(time_steps, inputs, outputs,
in_tensor, out_tensor, mask)
class Ticker(NervanaObject):
"""
This class defines methods for generating and iterating over ticker datasets.
"""
def reset(self):
"""
Reset has no meaning in the context of ticker data.
"""
pass
def __init__(self, task):
"""
Construct a ticker dataset object.
Args:
Task is an object representing the task to be trained on
It contains information about input and output size,
sequence length, etc. It also implements a synthesize function,
which is used to generate the next minibatch of data.
"""
self.task = task
# These attributes don't make much sense in the context of tickers
# but I suspect it will be hard to get rid of them
self.batch_index = 0
self.nbatches = 100
self.ndata = self.nbatches * self.be.bsz
# Alias these because other code relies on datasets having nin and nout
self.nout = task.nout
self.nin = task.nin
# Configuration elsewhere relies on the existence of this
self.shape = (self.nin, self.task.time_steps_max)
# Initialize the inputs, the outputs, and the mask
self.dev_X = self.be.iobuf((self.nin, self.task.time_steps_max))
self.dev_y = self.be.iobuf((self.nout, self.task.time_steps_max))
self.mask = self.be.iobuf((self.nout, self.task.time_steps_max))
def __iter__(self):
"""
Generator that can be used to iterate over this dataset.
Yields:
tuple : the next minibatch of data.
The second element of the tuple is itself a tuple (t,m) with:
t: the actual target as generated by the task object
m: the output mask to account for the difference between
the seq_length for this minibatch and the max seq_len,
which is also the number of columns in X,t, and m
"""
self.batch_index = 0
while self.batch_index < self.nbatches:
# The task object writes minibatch data into buffers we pass it
self.task.synthesize(self.dev_X, self.dev_y, self.mask)
self.batch_index += 1
yield self.dev_X, (self.dev_y, self.mask)
| apache-2.0 |
boundlessgeo/sdk | examples/realtime/index.html | 223 | ---
layout: example.html
title: Realtime Data Example
shortdesc: Demonstrates showing realtime data on a map.
docs: >
<li>Updating a GeoJSON source every 2 seconds</li>
<li>Using icon-image in a layer's layout</li>
---
| apache-2.0 |
fginter/docs-fginterfork | _includes/stats/ga/dep/neg.md | 2936 |
--------------------------------------------------------------------------------
## Treebank Statistics (UD_Irish)
This relation is universal.
85 nodes (0%) are attached to their parents as `neg`.
85 instances of `neg` (100%) are right-to-left (child precedes parent).
Average distance between parent and child is 1.
The following 3 pairs of parts of speech are connected with `neg`: [ga-pos/VERB]()-[ga-pos/PART]() (82; 96% instances), [ga-pos/X]()-[ga-pos/PART]() (2; 2% instances), [ga-pos/NOUN]()-[ga-pos/PART]() (1; 1% instances).
~~~ conllu
# visual-style 1 bgColor:blue
# visual-style 1 fgColor:white
# visual-style 2 bgColor:blue
# visual-style 2 fgColor:white
# visual-style 2 1 neg color:blue
1 Níor níor PART Vb Negative=Neg|PartType=Vb|Tense=Past 2 neg _ _
2 tháinig tar VERB VI Form=Len|Mood=Ind|Negative=Neg|Tense=Past 0 root _ _
3 sé sé PRON Pers Gender=Masc|Number=Sing|Person=3 2 nsubj _ _
4 go go PART Ad PartType=Ad 5 mark:prt _ _
5 fóill fóill ADJ Adj Degree=Pos 2 advmod _ _
6 . . PUNCT . _ 2 punct _ _
~~~
~~~ conllu
# visual-style 1 bgColor:blue
# visual-style 1 fgColor:white
# visual-style 2 bgColor:blue
# visual-style 2 fgColor:white
# visual-style 2 1 neg color:blue
1 Ná ná PART Vb Mood=Imp|PartType=Vb 2 neg _ _
2 dein dein X CM Dialect=Munster|Mood=Imp|Negative=Neg|Number=Sing|Person=2 0 root _ _
3 so seo X CM Dialect=Munster|PronType=Dem 2 dobj _ _
4 agus agus CONJ Coord _ 2 cc _ _
5 ná ná PART Vb Mood=Imp|PartType=Vb 6 neg _ _
6 dein dein X CM Dialect=Munster|Mood=Imp|Negative=Neg|Number=Sing|Person=2 2 conj _ _
7 súd siúd PRON Dem PronType=Dem 6 dobj _ _
8 . . PUNCT . _ 2 punct _ _
~~~
~~~ conllu
# visual-style 2 bgColor:blue
# visual-style 2 fgColor:white
# visual-style 3 bgColor:blue
# visual-style 3 fgColor:white
# visual-style 3 2 neg color:blue
1 ' ' PUNCT Punct _ 3 punct _ _
2 Ná ná PART Vb Mood=Imp|PartType=Vb 3 neg _ _
3 habair abar NOUN Noun Case=Com|Gender=Masc|Number=Plur 0 root _ _
4 liom le ADP Prep Number=Sing|Person=1 3 nmod:prep _ _
5 gurb is VERB Cop Form=VF|Tense=Pres|VerbForm=Cop 8 cop _ _
6 é é PRON Pers Gender=Masc|Number=Sing|Person=3 8 nmod _ _
7 do do DET Det Number=Sing|Person=2|Poss=Yes 8 nmod:poss _ _
8 thuairimse tuairim NOUN Noun Case=Com|Form=Len|Gender=Fem|Number=Sing 3 ccomp _ _
9 leis le ADP Simp _ 10 case _ _
10 é é PRON Pers Gender=Masc|Number=Sing|Person=3 8 nmod _ _
11 gur is VERB Cop Tense=Pres|VerbForm=Cop 12 cop _ _
12 bréaga bréag NOUN Noun Case=Com|Gender=Fem|Number=Plur 8 csubj:cop _ _
13 a a DET Det Gender=Masc|Number=Sing|Person=3|Poss=Yes 14 det _ _
14 leath leath NOUN Noun Case=Com|Gender=Fem|Number=Sing 12 compound _ _
15 dá dá SCONJ Subord _ 16 nsubj _ _
16 bhfuil bí VERB PresInd Form=Ecl|Mood=Ind|Tense=Pres 12 acl:relcl _ _
17 foghlamtha foghlamtha ADJ Adj VerbForm=Part 16 xcomp:pred _ _
18 againn ag ADP Prep Number=Plur|Person=1 17 nmod:prep _ _
19 . . PUNCT . _ 3 punct _ _
~~~
| apache-2.0 |
apache/incubator-datafu | datafu-pig/src/main/java/datafu/pig/random/RandomUUID.java | 1445 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package datafu.pig.random;
import java.io.IOException;
import java.util.UUID;
import org.apache.pig.EvalFunc;
import org.apache.pig.builtin.Nondeterministic;
import org.apache.pig.data.*;
import org.apache.pig.impl.logicalLayer.schema.Schema;
/**
* Generates a random UUID using java.util.UUID
*/
@Nondeterministic
public class RandomUUID extends EvalFunc<String>
{
public String exec(Tuple input) throws IOException
{
return UUID.randomUUID().toString();
}
@Override
public Schema outputSchema(Schema input)
{
return new Schema(new Schema.FieldSchema("uuid", DataType.CHARARRAY));
}
}
| apache-2.0 |
libopenstorage/stork | vendor/github.com/portworx/kvdb/mem/kv_mem.go | 20068 | package mem
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"strings"
"sync"
"sync/atomic"
"time"
"github.com/portworx/kvdb"
"github.com/portworx/kvdb/common"
"github.com/sirupsen/logrus"
)
const (
// Name is the name of this kvdb implementation.
Name = "kv-mem"
// KvSnap is an option passed to designate this kvdb as a snap.
KvSnap = "KvSnap"
// KvUseInterface is an option passed that configures the mem to store
// the values as interfaces instead of bytes. It will not create a
// copy of the interface that is passed in. USE WITH CAUTION
KvUseInterface = "KvUseInterface"
bootstrapKey = "bootstrap"
)
var (
// ErrSnap is returned if an operation is not supported on a snap.
ErrSnap = errors.New("operation not supported on snap")
// ErrSnapWithInterfaceNotSupported is returned when a snap kv-mem is
// created with KvUseInterface flag on
ErrSnapWithInterfaceNotSupported = errors.New("snap kvdb not supported with interfaces")
// ErrIllegalSelect is returned when an incorrect select function
// implementation is detected.
ErrIllegalSelect = errors.New("Illegal Select implementation")
)
func init() {
if err := kvdb.Register(Name, New, Version); err != nil {
panic(err.Error())
}
}
type memKV struct {
common.BaseKvdb
// m is the key value database
m map[string]*memKVPair
// updates is the list of latest few updates
dist WatchDistributor
// mutex protects m, w, wt
mutex sync.Mutex
// index current kvdb index
index uint64
domain string
// locks is the map of currently held locks
locks map[string]chan int
// noByte will store all the values as interface
noByte bool
kvdb.Controller
}
type memKVPair struct {
kvdb.KVPair
// ivalue is the value for this kv pair stored as an interface
ivalue interface{}
}
func (mkvp *memKVPair) copy() *kvdb.KVPair {
copyKvp := mkvp.KVPair
if mkvp.Value == nil && mkvp.ivalue != nil {
copyKvp.Value, _ = common.ToBytes(mkvp.ivalue)
}
return ©Kvp
}
type snapMem struct {
*memKV
}
// watchUpdate refers to an update to this kvdb
type watchUpdate struct {
// key is the key that was updated
key string
// kvp is the key-value that was updated
kvp memKVPair
// err is any error on update
err error
}
// WatchUpdateQueue is a producer consumer queue.
type WatchUpdateQueue interface {
// Enqueue will enqueue an update. It is non-blocking.
Enqueue(update *watchUpdate)
// Dequeue will either return an element from front of the queue or
// will block until element becomes available
Dequeue() *watchUpdate
}
// WatchDistributor distributes updates to the watchers
type WatchDistributor interface {
// Add creates a new watch queue to send updates
Add() WatchUpdateQueue
// Remove removes an existing watch queue
Remove(WatchUpdateQueue)
// NewUpdate is invoked to distribute a new update
NewUpdate(w *watchUpdate)
}
// distributor implements WatchDistributor interface
type distributor struct {
sync.Mutex
// updates is the list of latest few updates
updates []*watchUpdate
// watchers watch for updates
watchers []WatchUpdateQueue
}
// NewWatchDistributor returns a new instance of
// the WatchDistrubtor interface
func NewWatchDistributor() WatchDistributor {
return &distributor{}
}
func (d *distributor) Add() WatchUpdateQueue {
d.Lock()
defer d.Unlock()
q := NewWatchUpdateQueue()
for _, u := range d.updates {
q.Enqueue(u)
}
d.watchers = append(d.watchers, q)
return q
}
func (d *distributor) Remove(r WatchUpdateQueue) {
d.Lock()
defer d.Unlock()
for i, q := range d.watchers {
if q == r {
copy(d.watchers[i:], d.watchers[i+1:])
d.watchers[len(d.watchers)-1] = nil
d.watchers = d.watchers[:len(d.watchers)-1]
}
}
}
func (d *distributor) NewUpdate(u *watchUpdate) {
d.Lock()
defer d.Unlock()
// collect update
d.updates = append(d.updates, u)
if len(d.updates) > 100 {
d.updates = d.updates[100:]
}
// send update to watchers
for _, q := range d.watchers {
q.Enqueue(u)
}
}
// watchQueue implements WatchUpdateQueue interface for watchUpdates
type watchQueue struct {
// updates is the list of updates
updates []*watchUpdate
// m is the mutex to protect updates
m *sync.Mutex
// cv is used to coordinate the producer-consumer threads
cv *sync.Cond
}
// NewWatchUpdateQueue returns an instance of WatchUpdateQueue
func NewWatchUpdateQueue() WatchUpdateQueue {
mtx := &sync.Mutex{}
return &watchQueue{
m: mtx,
cv: sync.NewCond(mtx),
updates: make([]*watchUpdate, 0)}
}
func (w *watchQueue) Dequeue() *watchUpdate {
w.m.Lock()
for {
if len(w.updates) > 0 {
update := w.updates[0]
w.updates = w.updates[1:]
w.m.Unlock()
return update
}
w.cv.Wait()
}
}
// Enqueue enqueues and never blocks
func (w *watchQueue) Enqueue(update *watchUpdate) {
w.m.Lock()
w.updates = append(w.updates, update)
w.cv.Signal()
w.m.Unlock()
}
type watchData struct {
cb kvdb.WatchCB
opaque interface{}
waitIndex uint64
}
// New constructs a new kvdb.Kvdb.
func New(
domain string,
machines []string,
options map[string]string,
fatalErrorCb kvdb.FatalErrorCB,
) (kvdb.Kvdb, error) {
if domain != "" && !strings.HasSuffix(domain, "/") {
domain = domain + "/"
}
mem := &memKV{
BaseKvdb: common.BaseKvdb{FatalCb: fatalErrorCb},
m: make(map[string]*memKVPair),
dist: NewWatchDistributor(),
domain: domain,
Controller: kvdb.ControllerNotSupported,
locks: make(map[string]chan int),
}
var noByte bool
if _, noByte = options[KvUseInterface]; noByte {
mem.noByte = true
}
if _, ok := options[KvSnap]; ok && !noByte {
return &snapMem{memKV: mem}, nil
} else if ok && noByte {
return nil, ErrSnapWithInterfaceNotSupported
}
return mem, nil
}
// Version returns the supported version of the mem implementation
func Version(url string, kvdbOptions map[string]string) (string, error) {
return kvdb.MemVersion1, nil
}
func (kv *memKV) String() string {
return Name
}
func (kv *memKV) Capabilities() int {
return kvdb.KVCapabilityOrderedUpdates
}
func (kv *memKV) get(key string) (*memKVPair, error) {
key = kv.domain + key
v, ok := kv.m[key]
if !ok {
return nil, kvdb.ErrNotFound
}
return v, nil
}
func (kv *memKV) exists(key string) (*memKVPair, error) {
return kv.get(key)
}
func (kv *memKV) Get(key string) (*kvdb.KVPair, error) {
kv.mutex.Lock()
defer kv.mutex.Unlock()
v, err := kv.get(key)
if err != nil {
return nil, err
}
return v.copy(), nil
}
func (kv *memKV) Snapshot(prefixes []string, consistent bool) (kvdb.Kvdb, uint64, error) {
kv.mutex.Lock()
defer kv.mutex.Unlock()
_, err := kv.put(bootstrapKey, time.Now().UnixNano(), 0)
if err != nil {
return nil, 0, fmt.Errorf("Failed to create snap bootstrap key: %v", err)
}
data := make(map[string]*memKVPair)
for key, value := range kv.m {
if strings.Contains(key, "/_") {
continue
}
found := false
for _, prefix := range prefixes {
prefix = kv.domain + prefix
if strings.HasPrefix(key, prefix) {
found = true
break
}
}
if !found {
continue
}
snap := &memKVPair{}
snap.KVPair = value.KVPair
cpy := value.copy()
snap.Value = make([]byte, len(cpy.Value))
copy(snap.Value, cpy.Value)
data[key] = snap
}
highestKvPair, _ := kv.delete(bootstrapKey)
// Snapshot only data, watches are not copied.
return &snapMem{
&memKV{
m: data,
domain: kv.domain,
},
}, highestKvPair.ModifiedIndex, nil
}
func (kv *memKV) put(
key string,
value interface{},
ttl uint64,
) (*kvdb.KVPair, error) {
var (
kvp *memKVPair
b []byte
err error
ival interface{}
)
suffix := key
key = kv.domain + suffix
index := atomic.AddUint64(&kv.index, 1)
// Either set bytes or interface value
if !kv.noByte {
b, err = common.ToBytes(value)
if err != nil {
return nil, err
}
} else {
ival = value
}
if old, ok := kv.m[key]; ok {
old.Value = b
old.ivalue = ival
old.Action = kvdb.KVSet
old.ModifiedIndex = index
old.KVDBIndex = index
kvp = old
} else {
kvp = &memKVPair{
KVPair: kvdb.KVPair{
Key: key,
Value: b,
TTL: int64(ttl),
KVDBIndex: index,
ModifiedIndex: index,
CreatedIndex: index,
Action: kvdb.KVCreate,
},
ivalue: ival,
}
kv.m[key] = kvp
}
kv.normalize(&kvp.KVPair)
kv.dist.NewUpdate(&watchUpdate{key, *kvp, nil})
if ttl != 0 {
time.AfterFunc(time.Second*time.Duration(ttl), func() {
// TODO: handle error
kv.mutex.Lock()
defer kv.mutex.Unlock()
_, _ = kv.delete(suffix)
})
}
return kvp.copy(), nil
}
func (kv *memKV) Put(
key string,
value interface{},
ttl uint64,
) (*kvdb.KVPair, error) {
kv.mutex.Lock()
defer kv.mutex.Unlock()
return kv.put(key, value, ttl)
}
func (kv *memKV) GetVal(key string, v interface{}) (*kvdb.KVPair, error) {
kv.mutex.Lock()
defer kv.mutex.Unlock()
kvp, err := kv.get(key)
if err != nil {
return nil, err
}
cpy := kvp.copy()
err = json.Unmarshal(cpy.Value, v)
return cpy, err
}
func (kv *memKV) Create(
key string,
value interface{},
ttl uint64,
) (*kvdb.KVPair, error) {
kv.mutex.Lock()
defer kv.mutex.Unlock()
result, err := kv.exists(key)
if err != nil {
return kv.put(key, value, ttl)
}
return &result.KVPair, kvdb.ErrExist
}
func (kv *memKV) Update(
key string,
value interface{},
ttl uint64,
) (*kvdb.KVPair, error) {
kv.mutex.Lock()
defer kv.mutex.Unlock()
if _, err := kv.exists(key); err != nil {
return nil, kvdb.ErrNotFound
}
return kv.put(key, value, ttl)
}
func (kv *memKV) Enumerate(prefix string) (kvdb.KVPairs, error) {
kv.mutex.Lock()
defer kv.mutex.Unlock()
return kv.enumerate(prefix)
}
// enumerate returns a list of values and creates a copy if specified
func (kv *memKV) enumerate(prefix string) (kvdb.KVPairs, error) {
var kvp = make(kvdb.KVPairs, 0, 100)
prefix = kv.domain + prefix
for k, v := range kv.m {
if strings.HasPrefix(k, prefix) && !strings.Contains(k, "/_") {
kvpLocal := v.copy()
kvpLocal.Key = k
kv.normalize(kvpLocal)
kvp = append(kvp, kvpLocal)
}
}
return kvp, nil
}
func (kv *memKV) delete(key string) (*kvdb.KVPair, error) {
kvp, err := kv.get(key)
if err != nil {
return nil, err
}
kvp.KVDBIndex = atomic.AddUint64(&kv.index, 1)
kvp.ModifiedIndex = kvp.KVDBIndex
kvp.Action = kvdb.KVDelete
delete(kv.m, kv.domain+key)
kv.dist.NewUpdate(&watchUpdate{kv.domain + key, *kvp, nil})
return &kvp.KVPair, nil
}
func (kv *memKV) Delete(key string) (*kvdb.KVPair, error) {
kv.mutex.Lock()
defer kv.mutex.Unlock()
return kv.delete(key)
}
func (kv *memKV) DeleteTree(prefix string) error {
kv.mutex.Lock()
defer kv.mutex.Unlock()
if len(prefix) > 0 && !strings.HasSuffix(prefix, kvdb.DefaultSeparator) {
prefix += kvdb.DefaultSeparator
}
kvp, err := kv.enumerate(prefix)
if err != nil {
return err
}
for _, v := range kvp {
// TODO: multiple errors
if _, iErr := kv.delete(v.Key); iErr != nil {
err = iErr
}
}
return err
}
func (kv *memKV) Keys(prefix, sep string) ([]string, error) {
if "" == sep {
sep = "/"
}
prefix = kv.domain + prefix
lenPrefix := len(prefix)
lenSep := len(sep)
if prefix[lenPrefix-lenSep:] != sep {
prefix += sep
lenPrefix += lenSep
}
seen := make(map[string]bool)
kv.mutex.Lock()
defer kv.mutex.Unlock()
for k := range kv.m {
if strings.HasPrefix(k, prefix) && !strings.Contains(k, "/_") {
key := k[lenPrefix:]
if idx := strings.Index(key, sep); idx > 0 {
key = key[:idx]
}
seen[key] = true
}
}
retList := make([]string, len(seen))
i := 0
for k := range seen {
retList[i] = k
i++
}
return retList, nil
}
func (kv *memKV) CompareAndSet(
kvp *kvdb.KVPair,
flags kvdb.KVFlags,
prevValue []byte,
) (*kvdb.KVPair, error) {
kv.mutex.Lock()
defer kv.mutex.Unlock()
result, err := kv.exists(kvp.Key)
if err != nil {
return nil, err
}
if prevValue != nil {
cpy := result.copy()
if !bytes.Equal(cpy.Value, prevValue) {
return nil, kvdb.ErrValueMismatch
}
}
if flags == kvdb.KVModifiedIndex {
if kvp.ModifiedIndex != result.ModifiedIndex {
return nil, kvdb.ErrValueMismatch
}
}
return kv.put(kvp.Key, kvp.Value, 0)
}
func (kv *memKV) CompareAndDelete(
kvp *kvdb.KVPair,
flags kvdb.KVFlags,
) (*kvdb.KVPair, error) {
kv.mutex.Lock()
defer kv.mutex.Unlock()
result, err := kv.exists(kvp.Key)
if err != nil {
return nil, err
}
if flags&kvdb.KVModifiedIndex > 0 && result.ModifiedIndex != kvp.ModifiedIndex {
return nil, kvdb.ErrModified
} else {
cpy := result.copy()
if !bytes.Equal(cpy.Value, kvp.Value) {
return nil, kvdb.ErrNotFound
}
}
return kv.delete(kvp.Key)
}
func (kv *memKV) WatchKey(
key string,
waitIndex uint64,
opaque interface{},
cb kvdb.WatchCB,
) error {
kv.mutex.Lock()
defer kv.mutex.Unlock()
key = kv.domain + key
go kv.watchCb(kv.dist.Add(), key,
&watchData{cb: cb, waitIndex: waitIndex, opaque: opaque},
false)
return nil
}
func (kv *memKV) WatchTree(
prefix string,
waitIndex uint64,
opaque interface{},
cb kvdb.WatchCB,
) error {
kv.mutex.Lock()
defer kv.mutex.Unlock()
prefix = kv.domain + prefix
go kv.watchCb(kv.dist.Add(), prefix,
&watchData{cb: cb, waitIndex: waitIndex, opaque: opaque},
true)
return nil
}
func (kv *memKV) Lock(key string) (*kvdb.KVPair, error) {
return kv.LockWithID(key, "locked")
}
func (kv *memKV) LockWithID(
key string,
lockerID string,
) (*kvdb.KVPair, error) {
return kv.LockWithTimeout(key, lockerID, kvdb.DefaultLockTryDuration, kv.GetLockTimeout())
}
func (kv *memKV) LockWithTimeout(
key string,
lockerID string,
lockTryDuration time.Duration,
lockHoldDuration time.Duration,
) (*kvdb.KVPair, error) {
key = kv.domain + key
duration := time.Second
result, err := kv.Create(key, lockerID, uint64(duration*3))
startTime := time.Now()
for count := 0; err != nil; count++ {
time.Sleep(duration)
result, err = kv.Create(key, lockerID, uint64(duration*3))
if err != nil && count > 0 && count%15 == 0 {
var currLockerID string
if _, errGet := kv.GetVal(key, currLockerID); errGet == nil {
logrus.Infof("Lock %v locked for %v seconds, tag: %v",
key, count, currLockerID)
}
}
if err != nil && time.Since(startTime) > lockTryDuration {
return nil, err
}
}
if err != nil {
return nil, err
}
lockChan := make(chan int)
kv.mutex.Lock()
kv.locks[key] = lockChan
kv.mutex.Unlock()
if lockHoldDuration > 0 {
go func() {
timeout := time.After(lockHoldDuration)
for {
select {
case <-timeout:
kv.LockTimedout(key)
case <-lockChan:
return
}
}
}()
}
return result, err
}
func (kv *memKV) Unlock(kvp *kvdb.KVPair) error {
kv.mutex.Lock()
lockChan, ok := kv.locks[kvp.Key]
if ok {
delete(kv.locks, kvp.Key)
}
kv.mutex.Unlock()
if lockChan != nil {
close(lockChan)
}
_, err := kv.CompareAndDelete(kvp, kvdb.KVFlags(0))
return err
}
func (kv *memKV) EnumerateWithSelect(
prefix string,
enumerateSelect kvdb.EnumerateSelect,
copySelect kvdb.CopySelect,
) ([]interface{}, error) {
if enumerateSelect == nil || copySelect == nil {
return nil, ErrIllegalSelect
}
kv.mutex.Lock()
defer kv.mutex.Unlock()
var kvi []interface{}
prefix = kv.domain + prefix
for k, v := range kv.m {
if strings.HasPrefix(k, prefix) && !strings.Contains(k, "/_") {
if enumerateSelect(v.ivalue) {
cpy := copySelect(v.ivalue)
if cpy == nil {
return nil, ErrIllegalSelect
}
kvi = append(kvi, cpy)
}
}
}
return kvi, nil
}
func (kv *memKV) EnumerateKVPWithSelect(
prefix string,
enumerateSelect kvdb.EnumerateKVPSelect,
copySelect kvdb.CopyKVPSelect,
) (kvdb.KVPairs, error) {
if enumerateSelect == nil || copySelect == nil {
return nil, ErrIllegalSelect
}
kv.mutex.Lock()
defer kv.mutex.Unlock()
var kvi kvdb.KVPairs
prefix = kv.domain + prefix
for k, v := range kv.m {
if strings.HasPrefix(k, prefix) && !strings.Contains(k, "/_") {
if enumerateSelect(&v.KVPair, v.ivalue) {
cpy := copySelect(&v.KVPair, v.ivalue)
if cpy == nil {
return nil, ErrIllegalSelect
}
kvi = append(kvi, cpy)
}
}
}
return kvi, nil
}
func (kv *memKV) GetWithCopy(
key string,
copySelect kvdb.CopySelect,
) (interface{}, error) {
if copySelect == nil {
return nil, ErrIllegalSelect
}
kv.mutex.Lock()
defer kv.mutex.Unlock()
kvp, err := kv.get(key)
if err != nil {
return nil, err
}
return copySelect(kvp.ivalue), nil
}
func (kv *memKV) TxNew() (kvdb.Tx, error) {
return nil, kvdb.ErrNotSupported
}
func (kv *memKV) normalize(kvp *kvdb.KVPair) {
kvp.Key = strings.TrimPrefix(kvp.Key, kv.domain)
}
func copyWatchKeys(w map[string]*watchData) []string {
keys := make([]string, len(w))
i := 0
for key := range w {
keys[i] = key
i++
}
return keys
}
func (kv *memKV) watchCb(
q WatchUpdateQueue,
prefix string,
v *watchData,
treeWatch bool,
) {
for {
update := q.Dequeue()
if ((treeWatch && strings.HasPrefix(update.key, prefix)) ||
(!treeWatch && update.key == prefix)) &&
(v.waitIndex == 0 || v.waitIndex < update.kvp.ModifiedIndex) {
kvpCopy := update.kvp.copy()
err := v.cb(update.key, v.opaque, kvpCopy, update.err)
if err != nil {
_ = v.cb("", v.opaque, nil, kvdb.ErrWatchStopped)
kv.dist.Remove(q)
return
}
}
}
}
func (kv *memKV) SnapPut(snapKvp *kvdb.KVPair) (*kvdb.KVPair, error) {
return nil, kvdb.ErrNotSupported
}
func (kv *snapMem) SnapPut(snapKvp *kvdb.KVPair) (*kvdb.KVPair, error) {
var kvp *memKVPair
key := kv.domain + snapKvp.Key
kv.mutex.Lock()
defer kv.mutex.Unlock()
if old, ok := kv.m[key]; ok {
old.Value = snapKvp.Value
old.Action = kvdb.KVSet
old.ModifiedIndex = snapKvp.ModifiedIndex
old.KVDBIndex = snapKvp.KVDBIndex
kvp = old
} else {
kvp = &memKVPair{
KVPair: kvdb.KVPair{
Key: key,
Value: snapKvp.Value,
TTL: 0,
KVDBIndex: snapKvp.KVDBIndex,
ModifiedIndex: snapKvp.ModifiedIndex,
CreatedIndex: snapKvp.CreatedIndex,
Action: kvdb.KVCreate,
},
}
kv.m[key] = kvp
}
kv.normalize(&kvp.KVPair)
return &kvp.KVPair, nil
}
func (kv *snapMem) Put(
key string,
value interface{},
ttl uint64,
) (*kvdb.KVPair, error) {
return nil, ErrSnap
}
func (kv *snapMem) Create(
key string,
value interface{},
ttl uint64,
) (*kvdb.KVPair, error) {
return nil, ErrSnap
}
func (kv *snapMem) Update(
key string,
value interface{},
ttl uint64,
) (*kvdb.KVPair, error) {
return nil, ErrSnap
}
func (kv *snapMem) Delete(snapKey string) (*kvdb.KVPair, error) {
key := kv.domain + snapKey
kv.mutex.Lock()
defer kv.mutex.Unlock()
kvp, ok := kv.m[key]
if !ok {
return nil, kvdb.ErrNotFound
}
kvPair := kvp.KVPair
delete(kv.m, key)
return &kvPair, nil
}
func (kv *snapMem) DeleteTree(prefix string) error {
return ErrSnap
}
func (kv *snapMem) CompareAndSet(
kvp *kvdb.KVPair,
flags kvdb.KVFlags,
prevValue []byte,
) (*kvdb.KVPair, error) {
return nil, ErrSnap
}
func (kv *snapMem) CompareAndDelete(
kvp *kvdb.KVPair,
flags kvdb.KVFlags,
) (*kvdb.KVPair, error) {
return nil, ErrSnap
}
func (kv *snapMem) WatchKey(
key string,
waitIndex uint64,
opaque interface{},
watchCB kvdb.WatchCB,
) error {
return ErrSnap
}
func (kv *snapMem) WatchTree(
prefix string,
waitIndex uint64,
opaque interface{},
watchCB kvdb.WatchCB,
) error {
return ErrSnap
}
func (kv *memKV) AddUser(username string, password string) error {
return kvdb.ErrNotSupported
}
func (kv *memKV) RemoveUser(username string) error {
return kvdb.ErrNotSupported
}
func (kv *memKV) GrantUserAccess(
username string,
permType kvdb.PermissionType,
subtree string,
) error {
return kvdb.ErrNotSupported
}
func (kv *memKV) RevokeUsersAccess(
username string,
permType kvdb.PermissionType,
subtree string,
) error {
return kvdb.ErrNotSupported
}
func (kv *memKV) Serialize() ([]byte, error) {
kvps, err := kv.Enumerate("")
if err != nil {
return nil, err
}
return kv.SerializeAll(kvps)
}
func (kv *memKV) Deserialize(b []byte) (kvdb.KVPairs, error) {
return kv.DeserializeAll(b)
}
| apache-2.0 |
plashchynski/crono | spec/web_spec.rb | 1684 | require 'spec_helper'
require 'rack/test'
include Rack::Test::Methods
describe Crono::Web do
let(:app) { Crono::Web }
before do
Crono::CronoJob.destroy_all
@test_job_id = 'Perform TestJob every 5 seconds'
@test_job_log = 'All runs ok'
@test_job = Crono::CronoJob.create!(
job_id: @test_job_id,
log: @test_job_log
)
end
after { @test_job.destroy }
describe '/' do
it 'should show all jobs' do
get '/'
expect(last_response).to be_ok
expect(last_response.body).to include @test_job_id
end
it 'should show a error mark when a job is unhealthy' do
@test_job.update(healthy: false, last_performed_at: 10.minutes.ago)
get '/'
expect(last_response.body).to include 'Error'
end
it 'should show a success mark when a job is healthy' do
@test_job.update(healthy: true, last_performed_at: 10.minutes.ago)
get '/'
expect(last_response.body).to include 'Success'
end
it 'should show a pending mark when a job is pending' do
@test_job.update(healthy: nil)
get '/'
expect(last_response.body).to include 'Pending'
end
end
describe '/job/:id' do
it 'should show job log' do
get "/job/#{@test_job.id}"
expect(last_response).to be_ok
expect(last_response.body).to include @test_job_id
expect(last_response.body).to include @test_job_log
end
it 'should show a message about the unhealthy job' do
message = 'An error occurs during the last execution of this job'
@test_job.update(healthy: false)
get "/job/#{@test_job.id}"
expect(last_response.body).to include message
end
end
end
| apache-2.0 |
goodwinnk/intellij-community | platform/util/src/com/intellij/util/ref/GCUtil.java | 3604 | /*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.util.ref;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.TestOnly;
import java.beans.Introspector;
import java.lang.ref.ReferenceQueue;
import java.lang.ref.SoftReference;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
public class GCUtil {
/**
* Try to force VM to collect all the garbage along with soft- and weak-references.
* Method doesn't guarantee to succeed, and should not be used in the production code.
*/
@TestOnly
public static void tryForceGC() {
tryGcSoftlyReachableObjects();
WeakReference<Object> weakReference = new WeakReference<Object>(new Object());
do {
System.gc();
}
while (weakReference.get() != null);
}
/**
* Try to force VM to collect soft references if possible.
* Method doesn't guarantee to succeed, and should not be used in the production code.
* Commits / hours optimized method code: 5 / 3
*/
@TestOnly
public static void tryGcSoftlyReachableObjects() {
//long started = System.nanoTime();
ReferenceQueue<Object> q = new ReferenceQueue<Object>();
SoftReference<Object> ref = new SoftReference<Object>(new Object(), q);
ArrayList<SoftReference<?>> list = ContainerUtil.newArrayListWithCapacity(100 + useReference(ref));
System.gc();
final long freeMemory = Runtime.getRuntime().freeMemory();
int i = 0;
while (q.poll() == null) {
// full gc is caused by allocation of large enough array below, SoftReference will be cleared after two full gc
int bytes = Math.min((int)(freeMemory * 0.05), Integer.MAX_VALUE / 2);
list.add(new SoftReference<Object>(new byte[bytes]));
i++;
if (i > 1000) {
//noinspection UseOfSystemOutOrSystemErr
System.out.println("GCUtil.tryGcSoftlyReachableObjects: giving up");
break;
}
}
// use ref is important as to loop to finish with several iterations: long runs of the method (~80 run of PsiModificationTrackerTest)
// discovered 'ref' being collected and loop iterated 100 times taking a lot of time
list.ensureCapacity(list.size() + useReference(ref));
// do not leave a chance for our created SoftReference's content to lie around until next full GC's
for(SoftReference createdReference:list) createdReference.clear();
//System.out.println("Done gc'ing refs:" + ((System.nanoTime() - started) / 1000000));
}
private static int useReference(SoftReference<Object> ref) {
Object o = ref.get();
return o == null ? 0 : Math.abs(o.hashCode()) % 10;
}
/**
* Using java beans (e.g. Groovy does it) results in all referenced class infos being cached in ThreadGroupContext. A valid fix
* would be to hold BeanInfo objects on soft references, but that should be done in JDK. So let's clear this cache manually for now,
* in clients that are known to create bean infos.
*/
public static void clearBeanInfoCache() {
Introspector.flushCaches();
}
}
| apache-2.0 |
igniterealtime/Smack | smack-extensions/src/main/java/org/jivesoftware/smackx/bookmarks/Bookmarks.java | 10216 | /**
*
* Copyright 2003-2007 Jive Software.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.smackx.bookmarks;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.jivesoftware.smack.util.ParserUtils;
import org.jivesoftware.smack.util.XmlStringBuilder;
import org.jivesoftware.smack.xml.XmlPullParser;
import org.jivesoftware.smack.xml.XmlPullParserException;
import org.jivesoftware.smackx.iqprivate.packet.PrivateData;
import org.jivesoftware.smackx.iqprivate.provider.PrivateDataProvider;
import org.jxmpp.jid.EntityBareJid;
import org.jxmpp.jid.parts.Resourcepart;
/**
* Bookmarks is used for storing and retrieving URLS and Conference rooms.
* Bookmark Storage (XEP-0048) defined a protocol for the storage of bookmarks to conference rooms and other entities
* in a Jabber user's account.
* See the following code sample for saving Bookmarks:
* <pre>
* XMPPConnection con = new XMPPTCPConnection("jabber.org");
* con.login("john", "doe");
* Bookmarks bookmarks = new Bookmarks();
* // Bookmark a URL
* BookmarkedURL url = new BookmarkedURL();
* url.setName("Google");
* url.setURL("http://www.jivesoftware.com");
* bookmarks.addURL(url);
* // Bookmark a Conference room.
* BookmarkedConference conference = new BookmarkedConference();
* conference.setName("My Favorite Room");
* conference.setAutoJoin("true");
* conference.setJID("[email protected]");
* bookmarks.addConference(conference);
* // Save Bookmarks using PrivateDataManager.
* PrivateDataManager manager = new PrivateDataManager(con);
* manager.setPrivateData(bookmarks);
* LastActivity activity = LastActivity.getLastActivity(con, "[email protected]");
* </pre>
*
* @author Derek DeMoro
*/
public class Bookmarks implements PrivateData {
public static final String NAMESPACE = "storage:bookmarks";
public static final String ELEMENT = "storage";
private final List<BookmarkedURL> bookmarkedURLS;
private final List<BookmarkedConference> bookmarkedConferences;
/**
* Required Empty Constructor to use Bookmarks.
*/
public Bookmarks() {
bookmarkedURLS = new ArrayList<>();
bookmarkedConferences = new ArrayList<>();
}
/**
* Adds a BookmarkedURL.
*
* @param bookmarkedURL the bookmarked bookmarkedURL.
*/
public void addBookmarkedURL(BookmarkedURL bookmarkedURL) {
bookmarkedURLS.add(bookmarkedURL);
}
/**
* Removes a bookmarked bookmarkedURL.
*
* @param bookmarkedURL the bookmarked bookmarkedURL to remove.
*/
public void removeBookmarkedURL(BookmarkedURL bookmarkedURL) {
bookmarkedURLS.remove(bookmarkedURL);
}
/**
* Removes all BookmarkedURLs from user's bookmarks.
*/
public void clearBookmarkedURLS() {
bookmarkedURLS.clear();
}
/**
* Add a BookmarkedConference to bookmarks.
*
* @param bookmarkedConference the conference to remove.
*/
public void addBookmarkedConference(BookmarkedConference bookmarkedConference) {
bookmarkedConferences.add(bookmarkedConference);
}
/**
* Removes a BookmarkedConference.
*
* @param bookmarkedConference the BookmarkedConference to remove.
*/
public void removeBookmarkedConference(BookmarkedConference bookmarkedConference) {
bookmarkedConferences.remove(bookmarkedConference);
}
/**
* Removes all BookmarkedConferences from Bookmarks.
*/
public void clearBookmarkedConferences() {
bookmarkedConferences.clear();
}
/**
* Returns a Collection of all Bookmarked URLs for this user.
*
* @return a collection of all Bookmarked URLs.
*/
public List<BookmarkedURL> getBookmarkedURLS() {
return bookmarkedURLS;
}
/**
* Returns a Collection of all Bookmarked Conference for this user.
*
* @return a collection of all Bookmarked Conferences.
*/
public List<BookmarkedConference> getBookmarkedConferences() {
return bookmarkedConferences;
}
/**
* Returns the root element name.
*
* @return the element name.
*/
@Override
public String getElementName() {
return ELEMENT;
}
/**
* Returns the root element XML namespace.
*
* @return the namespace.
*/
@Override
public String getNamespace() {
return NAMESPACE;
}
/**
* Returns the XML representation of the PrivateData.
*
* @return the private data as XML.
*/
@Override
public XmlStringBuilder toXML() {
XmlStringBuilder buf = new XmlStringBuilder();
buf.halfOpenElement(ELEMENT).xmlnsAttribute(NAMESPACE).rightAngleBracket();
for (BookmarkedURL urlStorage : getBookmarkedURLS()) {
if (urlStorage.isShared()) {
continue;
}
buf.halfOpenElement("url").attribute("name", urlStorage.getName()).attribute("url", urlStorage.getURL());
buf.condAttribute(urlStorage.isRss(), "rss", "true");
buf.closeEmptyElement();
}
// Add Conference additions
for (BookmarkedConference conference : getBookmarkedConferences()) {
if (conference.isShared()) {
continue;
}
buf.halfOpenElement("conference");
buf.attribute("name", conference.getName());
buf.attribute("autojoin", Boolean.toString(conference.isAutoJoin()));
buf.attribute("jid", conference.getJid());
buf.rightAngleBracket();
buf.optElement("nick", conference.getNickname());
buf.optElement("password", conference.getPassword());
buf.closeElement("conference");
}
buf.closeElement(ELEMENT);
return buf;
}
/**
* The IQ Provider for BookmarkStorage.
*
* @author Derek DeMoro
*/
public static class Provider implements PrivateDataProvider {
/**
* Empty Constructor for PrivateDataProvider.
*/
public Provider() {
super();
}
@Override
public PrivateData parsePrivateData(XmlPullParser parser) throws XmlPullParserException, IOException {
Bookmarks storage = new Bookmarks();
boolean done = false;
while (!done) {
XmlPullParser.Event eventType = parser.next();
if (eventType == XmlPullParser.Event.START_ELEMENT && "url".equals(parser.getName())) {
final BookmarkedURL urlStorage = getURLStorage(parser);
if (urlStorage != null) {
storage.addBookmarkedURL(urlStorage);
}
}
else if (eventType == XmlPullParser.Event.START_ELEMENT &&
"conference".equals(parser.getName())) {
final BookmarkedConference conference = getConferenceStorage(parser);
storage.addBookmarkedConference(conference);
}
else if (eventType == XmlPullParser.Event.END_ELEMENT && "storage".equals(parser.getName())) {
done = true;
}
}
return storage;
}
}
private static BookmarkedURL getURLStorage(XmlPullParser parser) throws IOException, XmlPullParserException {
String name = parser.getAttributeValue("", "name");
String url = parser.getAttributeValue("", "url");
String rssString = parser.getAttributeValue("", "rss");
boolean rss = rssString != null && "true".equals(rssString);
BookmarkedURL urlStore = new BookmarkedURL(url, name, rss);
boolean done = false;
while (!done) {
XmlPullParser.Event eventType = parser.next();
if (eventType == XmlPullParser.Event.START_ELEMENT
&& "shared_bookmark".equals(parser.getName())) {
urlStore.setShared(true);
}
else if (eventType == XmlPullParser.Event.END_ELEMENT && "url".equals(parser.getName())) {
done = true;
}
}
return urlStore;
}
private static BookmarkedConference getConferenceStorage(XmlPullParser parser) throws XmlPullParserException, IOException {
String name = parser.getAttributeValue("", "name");
boolean autojoin = ParserUtils.getBooleanAttribute(parser, "autojoin", false);
EntityBareJid jid = ParserUtils.getBareJidAttribute(parser);
BookmarkedConference conf = new BookmarkedConference(jid);
conf.setName(name);
conf.setAutoJoin(autojoin);
// Check for nickname
boolean done = false;
while (!done) {
XmlPullParser.Event eventType = parser.next();
if (eventType == XmlPullParser.Event.START_ELEMENT && "nick".equals(parser.getName())) {
String nickString = parser.nextText();
conf.setNickname(Resourcepart.from(nickString));
}
else if (eventType == XmlPullParser.Event.START_ELEMENT && "password".equals(parser.getName())) {
conf.setPassword(parser.nextText());
}
else if (eventType == XmlPullParser.Event.START_ELEMENT
&& "shared_bookmark".equals(parser.getName())) {
conf.setShared(true);
}
else if (eventType == XmlPullParser.Event.END_ELEMENT && "conference".equals(parser.getName())) {
done = true;
}
}
return conf;
}
}
| apache-2.0 |
hong1012/FreePay | node_modules/rc-editor-mention/README.md | 3226 | # rc-editor-mention
---
React Mention Component
[![NPM version][npm-image]][npm-url]
[![build status][travis-image]][travis-url]
[![Test coverage][coveralls-image]][coveralls-url]
[![gemnasium deps][gemnasium-image]][gemnasium-url]
[![node version][node-image]][node-url]
[![npm download][download-image]][download-url]
[](https://saucelabs.com/u/rc-editor-mention)
[](https://saucelabs.com/u/rc-editor-mention)
[npm-image]: http://img.shields.io/npm/v/rc-editor-mention.svg?style=flat-square
[npm-url]: http://npmjs.org/package/rc-editor-mention
[travis-image]: https://img.shields.io/travis/react-component/mention.svg?style=flat-square
[travis-url]: https://travis-ci.org/react-component/mention
[coveralls-image]: https://img.shields.io/coveralls/react-component/mention.svg?style=flat-square
[coveralls-url]: https://coveralls.io/r/react-component/mention?branch=master
[gemnasium-image]: http://img.shields.io/gemnasium/react-component/mention.svg?style=flat-square
[gemnasium-url]: https://gemnasium.com/react-component/mention
[node-image]: https://img.shields.io/badge/node.js-%3E=_0.10-green.svg?style=flat-square
[node-url]: http://nodejs.org/download/
[download-image]: https://img.shields.io/npm/dm/rc-editor-mention.svg?style=flat-square
[download-url]: https://npmjs.org/package/rc-editor-mention
## Browser Support
| |  |  |  | |
| --- | --- | --- | --- | --- |
| IE 8+ ✔ | Chrome 31.0+ ✔ | Firefox 31.0+ ✔ | Opera 30.0+ ✔ | Safari 7.0+ ✔ |
## Screenshots
<img src="" width="288"/>
## Development
```
npm install
npm start
```
## Example
http://localhost:8001/examples/
online example: http://react-component.github.io/mention/
## Feature
* support ie8,ie8+,chrome,firefox,safari
## install
[](https://npmjs.org/package/rc-editor-mention)
## Usage
```js
var Mention = require('rc-editor-mention');
var React = require('react');
React.render(<Mention />, container);
```
## API
### props
<table class="table table-bordered table-striped">
<thead>
<tr>
<th style="width: 100px;">name</th>
<th style="width: 50px;">type</th>
<th style="width: 50px;">default</th>
<th>description</th>
</tr>
</thead>
<tbody>
<tr>
<td>className</td>
<td>String</td>
<td></td>
<td>additional css class of root dom node</td>
</tr>
</tbody>
</table>
## Test Case
```
npm test
npm run chrome-test
```
## Coverage
```
npm run coverage
```
open coverage/ dir
## License
rc-editor-mention is released under the MIT license.
| apache-2.0 |
janstey/fuse | bai/bai-core/src/main/java/org/fusesource/bai/config/package-info.java | 1208 | /*
* Copyright (C) FuseSource, Inc.
* http://fusesource.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
@XmlSchema(namespace = "http://fuse.fusesource.org/schema/bai",
xmlns = {
@XmlNs(namespaceURI = Namespaces.DEFAULT_NAMESPACE, prefix = "c"),
@XmlNs(namespaceURI = AuditConstants.AUDIT_NAMESPACE, prefix = AuditConstants.EXPRESSION_NAMESPACE_PREFIX)
},
elementFormDefault = javax.xml.bind.annotation.XmlNsForm.QUALIFIED) package org.fusesource.bai.config;
import org.apache.camel.builder.xml.Namespaces;
import org.fusesource.bai.AuditConstants;
import javax.xml.bind.annotation.XmlNs;
import javax.xml.bind.annotation.XmlSchema; | apache-2.0 |
krkhan/azure-powershell | src/ResourceManager/SiteRecovery/Commands.SiteRecovery/help/Get-AzureRmSiteRecoveryPolicy.md | 1771 | ---
external help file: Microsoft.Azure.Commands.SiteRecovery.dll-Help.xml
ms.assetid: 07F9EE13-9874-42FC-A17E-7615419F1381
online version:
schema: 2.0.0
---
# Get-AzureRmSiteRecoveryPolicy
## SYNOPSIS
Gets Site Recovery protection policies.
## SYNTAX
### Default (Default)
```
Get-AzureRmSiteRecoveryPolicy [<CommonParameters>]
```
### ByName
```
Get-AzureRmSiteRecoveryPolicy -Name <String> [<CommonParameters>]
```
### ByFriendlyName
```
Get-AzureRmSiteRecoveryPolicy -FriendlyName <String> [<CommonParameters>]
```
## DESCRIPTION
The **Get-AzureRmSiteRecoveryPolicy** cmdlet gets the list of configured Azure Site Recovery protection policies or a specific protection policy by name.
## EXAMPLES
## PARAMETERS
### -FriendlyName
Specifies the friendly name of the Site Recovery replication policy.
```yaml
Type: String
Parameter Sets: ByFriendlyName
Aliases:
Required: True
Position: Named
Default value: None
Accept pipeline input: False
Accept wildcard characters: False
```
### -Name
Specifies the name of the Site Recovery replication policy.
```yaml
Type: String
Parameter Sets: ByName
Aliases:
Required: True
Position: Named
Default value: None
Accept pipeline input: False
Accept wildcard characters: False
```
### CommonParameters
This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see about_CommonParameters (http://go.microsoft.com/fwlink/?LinkID=113216).
## INPUTS
## OUTPUTS
## NOTES
## RELATED LINKS
[New-AzureRmSiteRecoveryPolicy](./New-AzureRmSiteRecoveryPolicy.md)
[Remove-AzureRmSiteRecoveryPolicy](./Remove-AzureRmSiteRecoveryPolicy.md)
| apache-2.0 |
ChrisA89/assertj-core | src/test/java/org/assertj/core/error/future/ShouldHaveCompletedExceptionally_create_Test.java | 1500 | /**
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2017 the original author or authors.
*/
package org.assertj.core.error.future;
import static java.lang.String.format;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.error.future.ShouldBeCompletedExceptionally.shouldHaveCompletedExceptionally;
import java.util.concurrent.CompletableFuture;
import org.assertj.core.internal.TestDescription;
import org.junit.Test;
public class ShouldHaveCompletedExceptionally_create_Test {
@Test
public void should_create_error_message() throws Exception {
String error = shouldHaveCompletedExceptionally(new CompletableFuture<Object>()).create(new TestDescription("TEST"));
assertThat(error).isEqualTo(format("[TEST] %n" +
"Expecting%n" +
" <CompletableFuture[Incomplete]>%n" +
"to be completed exceptionally"));
}
}
| apache-2.0 |
opener-project/nerc-fr | opennlp/docs/apidocs/opennlp-tools/opennlp/tools/tokenize/lang/en/package-use.html | 5743 | <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.5.0_22) on Wed Apr 03 10:03:18 GMT-03:00 2013 -->
<META http-equiv="Content-Type" content="text/html; charset=UTF-8">
<TITLE>
Uses of Package opennlp.tools.tokenize.lang.en (Apache OpenNLP Tools 1.5.3 API)
</TITLE>
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
parent.document.title="Uses of Package opennlp.tools.tokenize.lang.en (Apache OpenNLP Tools 1.5.3 API)";
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Class</FONT> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../index.html?opennlp/tools/tokenize/lang/en/package-use.html" target="_top"><B>FRAMES</B></A>
<A HREF="package-use.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<CENTER>
<H2>
<B>Uses of Package<br>opennlp.tools.tokenize.lang.en</B></H2>
</CENTER>
No usage of opennlp.tools.tokenize.lang.en
<P>
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Class</FONT> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../index.html?opennlp/tools/tokenize/lang/en/package-use.html" target="_top"><B>FRAMES</B></A>
<A HREF="package-use.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
Copyright © 2013 <a href="http://www.apache.org/">The Apache Software Foundation</a>. All Rights Reserved.
</BODY>
</HTML>
| apache-2.0 |
alexgarciac/biotea | src/elsevier/jaxb/math/mathml/Mglyph.java | 2890 | //
// This file was pubmed.openAccess.jaxb.generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2011.06.04 at 07:58:30 PM BST
//
package elsevier.jaxb.math.mathml;
import java.math.BigInteger;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for mglyph.type complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="mglyph.type">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <attGroup ref="{http://www.w3.org/1998/Math/MathML}mglyph.attlist"/>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "mglyph.type")
@XmlRootElement(name = "mglyph")
public class Mglyph {
@XmlAttribute
protected String alt;
@XmlAttribute
protected String fontfamily;
@XmlAttribute
@XmlSchemaType(name = "positiveInteger")
protected BigInteger index;
/**
* Gets the value of the alt property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getAlt() {
return alt;
}
/**
* Sets the value of the alt property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setAlt(String value) {
this.alt = value;
}
/**
* Gets the value of the fontfamily property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getFontfamily() {
return fontfamily;
}
/**
* Sets the value of the fontfamily property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setFontfamily(String value) {
this.fontfamily = value;
}
/**
* Gets the value of the index property.
*
* @return
* possible object is
* {@link BigInteger }
*
*/
public BigInteger getIndex() {
return index;
}
/**
* Sets the value of the index property.
*
* @param value
* allowed object is
* {@link BigInteger }
*
*/
public void setIndex(BigInteger value) {
this.index = value;
}
}
| apache-2.0 |
ingokegel/intellij-community | platform/lang-impl/src/com/intellij/util/indexing/UnindexedFilesFinder.java | 10024 | // Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.util.indexing;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.ex.FileTypeManagerEx;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.VirtualFileWithId;
import com.intellij.openapi.vfs.newvfs.impl.VirtualFileSystemEntry;
import com.intellij.psi.search.FileTypeIndex;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.indexing.projectFilter.ProjectIndexableFilesFilterHolder;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Collectors;
final class UnindexedFilesFinder {
private static final Logger LOG = Logger.getInstance(UnindexedFilesFinder.class);
private final Project myProject;
private final boolean myDoTraceForFilesToBeIndexed = FileBasedIndexImpl.LOG.isTraceEnabled();
private final FileBasedIndexImpl myFileBasedIndex;
private final UpdatableIndex<FileType, Void, FileContent> myFileTypeIndex;
private final Collection<FileBasedIndexInfrastructureExtension.FileIndexingStatusProcessor> myStateProcessors;
private final @NotNull ProjectIndexableFilesFilterHolder myIndexableFilesFilterHolder;
private final boolean myShouldProcessUpToDateFiles;
UnindexedFilesFinder(@NotNull Project project,
@NotNull FileBasedIndexImpl fileBasedIndex) {
myProject = project;
myFileBasedIndex = fileBasedIndex;
myFileTypeIndex = fileBasedIndex.getIndex(FileTypeIndex.NAME);
myStateProcessors = FileBasedIndexInfrastructureExtension
.EP_NAME
.extensions()
.map(ex -> ex.createFileIndexingStatusProcessor(project))
.filter(Objects::nonNull)
.collect(Collectors.toList());
myShouldProcessUpToDateFiles = ContainerUtil.find(myStateProcessors, p -> p.shouldProcessUpToDateFiles()) != null;
myIndexableFilesFilterHolder = fileBasedIndex.getIndexableFilesFilterHolder();
}
@Nullable("null if the file is not subject for indexing (a directory, invalid, etc.)")
public UnindexedFileStatus getFileStatus(@NotNull VirtualFile file) {
return ReadAction.compute(() -> {
if (myProject.isDisposed() || !file.isValid() || !(file instanceof VirtualFileWithId)) {
return null;
}
AtomicBoolean indexesWereProvidedByInfrastructureExtension = new AtomicBoolean();
AtomicLong timeProcessingUpToDateFiles = new AtomicLong();
AtomicLong timeUpdatingContentLessIndexes = new AtomicLong();
AtomicLong timeIndexingWithoutContent = new AtomicLong();
IndexedFileImpl indexedFile = new IndexedFileImpl(file, myProject);
int inputId = FileBasedIndex.getFileId(file);
boolean fileWereJustAdded = myIndexableFilesFilterHolder.addFileId(inputId, myProject);
if (file instanceof VirtualFileSystemEntry && ((VirtualFileSystemEntry)file).isFileIndexed()) {
boolean wasInvalidated = false;
if (fileWereJustAdded) {
List<ID<?, ?>> ids = IndexingStamp.getNontrivialFileIndexedStates(inputId);
for (FileBasedIndexInfrastructureExtension.FileIndexingStatusProcessor processor : myStateProcessors) {
for (ID<?, ?> id : ids) {
if (myFileBasedIndex.needsFileContentLoading(id)) {
long nowTime = System.nanoTime();
try {
if (!processor.processUpToDateFile(indexedFile, inputId, id)) {
wasInvalidated = true;
}
} finally {
timeProcessingUpToDateFiles.addAndGet(System.nanoTime() - nowTime);
}
}
}
}
}
if (!wasInvalidated) {
IndexingStamp.flushCache(inputId);
return new UnindexedFileStatus(false,
false,
timeProcessingUpToDateFiles.get(),
timeUpdatingContentLessIndexes.get(),
timeIndexingWithoutContent.get());
}
}
AtomicBoolean shouldIndex = new AtomicBoolean();
FileTypeManagerEx.getInstanceEx().freezeFileTypeTemporarilyIn(file, () -> {
boolean isDirectory = file.isDirectory();
FileIndexingState fileTypeIndexState = null;
if (!isDirectory && !myFileBasedIndex.isTooLarge(file)) {
if ((fileTypeIndexState = myFileTypeIndex.getIndexingStateForFile(inputId, indexedFile)) == FileIndexingState.OUT_DATED) {
myFileBasedIndex.dropNontrivialIndexedStates(inputId);
shouldIndex.set(true);
}
else {
final List<ID<?, ?>> affectedIndexCandidates = myFileBasedIndex.getAffectedIndexCandidates(indexedFile);
//noinspection ForLoopReplaceableByForEach
for (int i = 0, size = affectedIndexCandidates.size(); i < size; ++i) {
final ID<?, ?> indexId = affectedIndexCandidates.get(i);
try {
if (myFileBasedIndex.needsFileContentLoading(indexId)) {
FileIndexingState fileIndexingState = myFileBasedIndex.shouldIndexFile(indexedFile, indexId);
boolean indexInfrastructureExtensionInvalidated = false;
if (fileIndexingState == FileIndexingState.UP_TO_DATE) {
if (myShouldProcessUpToDateFiles) {
for (FileBasedIndexInfrastructureExtension.FileIndexingStatusProcessor p : myStateProcessors) {
long nowTime = System.nanoTime();
try {
if (!p.processUpToDateFile(indexedFile, inputId, indexId)) {
indexInfrastructureExtensionInvalidated = true;
}
} finally {
timeProcessingUpToDateFiles.addAndGet(System.nanoTime() - nowTime);
}
}
}
}
if (indexInfrastructureExtensionInvalidated) {
fileIndexingState = myFileBasedIndex.shouldIndexFile(indexedFile, indexId);
}
if (fileIndexingState.updateRequired()) {
if (myDoTraceForFilesToBeIndexed) {
LOG.trace("Scheduling indexing of " + file + " by request of index " + indexId);
}
long nowTime = System.nanoTime();
boolean wasIndexedByInfrastructure;
try {
wasIndexedByInfrastructure = tryIndexWithoutContentViaInfrastructureExtension(indexedFile, inputId, indexId);
} finally {
timeIndexingWithoutContent.addAndGet(System.nanoTime() - nowTime);
}
if (wasIndexedByInfrastructure) {
indexesWereProvidedByInfrastructureExtension.set(true);
}
else {
shouldIndex.set(true);
// NOTE! Do not break the loop here. We must process ALL IDs and pass them to the FileIndexingStatusProcessor
// so that it can invalidate all "indexing states" (by means of clearing IndexingStamp)
// for all indexes that became invalid. See IDEA-252846 for more details.
}
}
}
}
catch (RuntimeException e) {
final Throwable cause = e.getCause();
if (cause instanceof IOException || cause instanceof StorageException) {
LOG.info(e);
myFileBasedIndex.requestRebuild(indexId);
}
else {
throw e;
}
}
}
}
}
long nowTime = System.nanoTime();
try {
for (ID<?, ?> indexId : myFileBasedIndex.getContentLessIndexes(isDirectory)) {
if (FileTypeIndex.NAME.equals(indexId) && fileTypeIndexState != null && !fileTypeIndexState.updateRequired()) {
continue;
}
if (myFileBasedIndex.shouldIndexFile(indexedFile, indexId).updateRequired()) {
myFileBasedIndex.updateSingleIndex(indexId, file, inputId, new IndexedFileWrapper(indexedFile));
}
}
} finally {
timeUpdatingContentLessIndexes.addAndGet(System.nanoTime() - nowTime);
}
IndexingStamp.flushCache(inputId);
if (!shouldIndex.get()) {
IndexingFlag.setFileIndexed(file);
}
});
return new UnindexedFileStatus(shouldIndex.get(),
indexesWereProvidedByInfrastructureExtension.get(),
timeProcessingUpToDateFiles.get(),
timeUpdatingContentLessIndexes.get(),
timeIndexingWithoutContent.get());
});
}
private boolean tryIndexWithoutContentViaInfrastructureExtension(IndexedFile fileContent, int inputId, ID<?, ?> indexId) {
for (FileBasedIndexInfrastructureExtension.FileIndexingStatusProcessor processor : myStateProcessors) {
if (processor.tryIndexFileWithoutContent(fileContent, inputId, indexId)) {
FileBasedIndexImpl.setIndexedState(myFileBasedIndex.getIndex(indexId), fileContent, inputId, true);
return true;
}
}
return false;
}
} | apache-2.0 |
zanthrash/deck-1 | app/scripts/modules/google/serverGroup/configure/wizard/ServerGroupInstanceArchetype.controller.js | 911 | 'use strict';
let angular = require('angular');
module.exports = angular.module('spinnaker.serverGroup.configure.gce.instanceArchetypeCtrl', [])
.controller('gceInstanceArchetypeCtrl', function($scope, instanceTypeService, modalWizardService) {
var wizard = modalWizardService.getWizard();
$scope.$watch('command.viewState.instanceProfile', function() {
if (!$scope.command.viewState.instanceProfile || $scope.command.viewState.instanceProfile === 'custom') {
wizard.excludePage('instance-type');
} else {
wizard.includePage('instance-type');
wizard.markClean('instance-profile');
wizard.markComplete('instance-profile');
}
});
$scope.$watch('command.viewState.instanceType', function(newVal) {
if (newVal) {
wizard.markClean('instance-profile');
wizard.markComplete('instance-profile');
}
});
}).name;
| apache-2.0 |
murat8505/dredit_text_editor_Google-Drive | php/utils.php | 2194 | <?php
/**
* Utility functions for the DrEdit PHP application.
*
* @author Burcu Dogan <[email protected]>
*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Returns the current user in the session or NULL.
*/
function get_user() {
if (isset($_SESSION["user"])) {
return json_decode($_SESSION["user"]);
}
return NULL;
}
/**
* Sets the current user.
*/
function set_user($tokens) {
$_SESSION["user"] = json_encode(array(
'tokens' => $tokens
));
}
/**
* Deletes the user in the session.
*/
function delete_user() {
$_SESSION["user"] = NULL;
}
/**
* Checks whether or not there is an authenticated
* user in the session. If not, responds with error message.
*/
function checkUserAuthentication($app) {
$user = get_user();
if (!$user) {
$app->renderErrJson($app, 401, 'User is not authenticated.');
}
}
/**
* Checks whether or not all given params are represented in the
* request's query parameters. If not, responds with error message.
*/
function checkRequiredQueryParams($app, $params = array()) {
foreach ($params as &$param) {
if (!$app->request()->get($param)) {
renderErrJson($app, 400, 'Required parameter missing.');
}
}
};
/**
* Renders the given object as JSON.
*/
function renderJson($app, $obj) {
echo json_encode($obj);
}
/**
* Renders the given message as JSON and responds with the
* given HTTP status code.
*/
function renderErrJson($app, $statusCode, $message) {
echo json_encode(array( message => $message ));
$app->halt($statusCode);
}
/**
* Renders the given Exception object as JSON.
*/
function renderEx($app, $ex) {
echo json_encode($ex);
}
| apache-2.0 |
hnafar/IronJS | Src/Tests/ietestcenter/chapter15/15.4/15.4.4/15.4.4.14/15.4.4.14-9-b-i-6.js | 2446 | /// Copyright (c) 2009 Microsoft Corporation
///
/// Redistribution and use in source and binary forms, with or without modification, are permitted provided
/// that the following conditions are met:
/// * Redistributions of source code must retain the above copyright notice, this list of conditions and
/// the following disclaimer.
/// * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and
/// the following disclaimer in the documentation and/or other materials provided with the distribution.
/// * Neither the name of Microsoft nor the names of its contributors may be used to
/// endorse or promote products derived from this software without specific prior written permission.
///
/// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
/// IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
/// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
/// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
/// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
/// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
/// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
/// ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
ES5Harness.registerTest({
id: "15.4.4.14-9-b-i-6",
path: "TestCases/chapter15/15.4/15.4.4/15.4.4.14/15.4.4.14-9-b-i-6.js",
description: "Array.prototype.indexOf - element to be retrieved is own data property that overrides an inherited accessor property on an Array-like object",
test: function testcase() {
try {
Object.defineProperty(Object.prototype, "0", {
get: function () {
return false;
},
configurable: true
});
return 0 === Array.prototype.indexOf.call({ 0: true, 1: 1, length: 2 }, true);
} finally {
delete Object.prototype[0];
}
},
precondition: function prereq() {
return fnExists(Array.prototype.indexOf) && fnExists(Object.defineProperty) && fnSupportsArrayIndexGettersOnObjects();
}
});
| apache-2.0 |
ghchinoy/tensorflow | tensorflow/contrib/distribute/python/keras_multi_worker_test_base.py | 4173 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test base for tf.keras Models in multi-worker mode."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import threading
# pylint: disable=g-direct-tensorflow-import
from tensorflow.contrib.distribute.python import collective_all_reduce_strategy as collective_strategy
from tensorflow.contrib.distribute.python import parameter_server_strategy
from tensorflow.python.distribute import combinations
from tensorflow.python.distribute import distribute_coordinator as dc
from tensorflow.python.distribute import multi_worker_test_base
from tensorflow.python.eager import context
from tensorflow.python.platform import test
_original_run_std_server = dc._run_std_server # pylint: disable=protected-access
# Used as a decorator on test methods.
run_sync_strategies = combinations.generate(
combinations.combine(
mode=['graph'],
strategy_cls=[
collective_strategy.CollectiveAllReduceStrategy,
],
required_gpus=[0, 1]))
# Used as a decorator on test methods.
run_async_strategies = combinations.generate(
combinations.combine(
mode=['graph'],
strategy_cls=[parameter_server_strategy.ParameterServerStrategy],
required_gpus=[0, 1]))
def get_strategy_object(strategy_cls):
return strategy_cls(num_gpus_per_worker=context.num_gpus())
# TODO(omalleyt): Merge with keras_multiworker_callback_test
class KerasIndependentWorkerTestBase(
multi_worker_test_base.IndependentWorkerTestBase):
"""Test base for simulating Keras Multi-Worker in threads."""
def _make_mock_run_std_server(self):
thread_local = threading.local()
def _mock_run_std_server(*args, **kwargs):
ret = _original_run_std_server(*args, **kwargs)
# Wait for all std servers to be brought up in order to reduce the chance
# of remote sessions taking local ports that have been assigned to std
# servers. Only call this barrier the first time this function is run for
# each thread.
if not getattr(thread_local, 'server_started', False):
self._barrier.wait()
thread_local.server_started = True
return ret
return _mock_run_std_server
def run_independent_workers(self,
worker_fn,
strategy_cls,
num_workers,
num_ps=None,
**kwargs):
cluster_spec = multi_worker_test_base.create_cluster_spec(
num_workers=num_workers, num_ps=num_ps)
self._barrier = dc._Barrier(num_workers + (num_ps or 0)) # pylint: disable=protected-access
def _worker_fn(**kwargs):
"""Runs the worker function in a thread."""
with test.mock.patch.object(dc, '_run_std_server',
self._make_mock_run_std_server()):
strategy = get_strategy_object(strategy_cls)
with strategy.scope():
return worker_fn(**kwargs)
threads = self.run_multiple_tasks_in_threads(_worker_fn, cluster_spec,
**kwargs)
strategy = get_strategy_object(strategy_cls)
if strategy.extended.experimental_between_graph:
threads_to_join = threads.get('chief', []) + threads.get('worker', [])
else:
threads_to_join = [
threads['chief'][0] if 'chief' in threads else threads['worker'][0]
]
self.join_independent_workers(threads_to_join)
| apache-2.0 |
hyundukkim/WATT | libs/brackets-server/embedded-ext/importfile/node/ImportDomain.js | 5812 | (function () {
"use strict";
var fs = require("fs"),
fse = require("fs-extra"),
path = require("path"),
readline = require("readline");
var _domainManager;
function init(domainManager) {
_domainManager = domainManager;
if (!_domainManager.hasDomain("importNode")) {
_domainManager.registerDomain("importNode", {major: 0, minor: 1});
}
// Get shared project from share
function getSharedProject(callback) {
var sharedPath = path.join(process.cwd(), "share");
fs.readdir(sharedPath, function(error, files) {
callback(null, files);
});
}
function getSharedFile(projectName, callback) {
var fileName;
var sharedPath = path.join(process.cwd(), "share", projectName);
// Get target name from makefile
var makePath = path.join(sharedPath, "makefile");
if (fs.existsSync(makePath)) {
var lineReader = readline.createInterface({
input: fs.createReadStream(makePath)
});
lineReader.on("line", function(line) {
if (line.startsWith("TARGET")) {
var file = line.split("=")[1].trim();
fileName = file.split(".")[0];
}
});
lineReader.on("close", function() {
// FIXME: We just checked wasm and js whether it was exsited
// or not. We need a way to find correct result file.
var wasmPath = path.join(sharedPath, fileName + ".wasm");
var loaderPath = path.join(sharedPath, fileName + ".js");
var fileList = [];
if (fs.existsSync(wasmPath) && fs.existsSync(loaderPath)) {
fileList.push(fileName + ".wasm");
fileList.push(fileName + ".js");
callback(null, fileList);
} else {
callback("Not found wasm");
}
});
} else {
callback("Not found makefile");
}
}
function copySharedFile(projectName, fileList, targetId, callback) {
var sharedPath = path.join(process.cwd(), "share", projectName);
var destPath = path.join(process.cwd(), "projects", targetId);
// Copy files to the target project
fileList.forEach(function(file) {
var sourcePath = path.join(sharedPath, file);
if (fs.existsSync(sourcePath)) {
var destFilePath = path.join(destPath, file);
try {
fse.copySync(sourcePath, destFilePath);
} catch (error) {
return callback("Fail to copy files");
}
}
});
callback(null);
}
function copyFile(projectId, src, name, dest, callback) {
const sourcePath = path.join(process.cwd(), 'projects', projectId, src, name);
const destPath = path.join(process.cwd(), 'projects', projectId, dest, name);
fse.copy(sourcePath, destPath, (err) => {
if (err) {
return callback(err);
}
callback();
});
}
function moveFile(projectId, src, name, dest, callback) {
const sourcePath = path.join(process.cwd(), 'projects', projectId, src, name);
const destPath = path.join(process.cwd(), 'projects', projectId, dest, name);
fse.move(sourcePath, destPath, (err) => {
if (err) {
return callback(err);
}
callback();
});
}
_domainManager.registerCommand(
"importNode",
"getSharedProject",
getSharedProject,
true,
"Get Shared Project",
null,
[
{name: "data", type: "array"}
]
);
_domainManager.registerCommand(
"importNode",
"getSharedFile",
getSharedFile,
true,
"Get Shared File",
[
{name: "projectName", type: "string"}
],
[
{name: "result", type: "array"}
]
);
_domainManager.registerCommand(
"importNode",
"copySharedFile",
copySharedFile,
true,
"Copy Shared File",
[
{name: "projectName", type: "string"},
{name: "fileList", type: "array"},
{name: "targetId", type: "string"}
],
[]
);
_domainManager.registerCommand(
"importNode",
"COPY",
copyFile,
true,
"Copy File",
[
{name: "projectId", type: "string"},
{name: "src", type: "string"},
{name: "name", type: "string"},
{name: "dest", type: "string"}
],
[]
);
_domainManager.registerCommand(
"importNode",
"CUT",
moveFile,
true,
"Move File",
[
{name: "projectId", type: "string"},
{name: "src", type: "string"},
{name: "name", type: "string"},
{name: "dest", type: "string"}
],
[]
);
}
exports.init = init;
}());
| apache-2.0 |
yafengguo/Apache-beam | sdks/java/core/src/main/java/org/apache/beam/sdk/util/PubsubGrpcClient.java | 15532 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.util;
import static com.google.common.base.Preconditions.checkState;
import com.google.auth.Credentials;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.protobuf.ByteString;
import com.google.protobuf.Timestamp;
import com.google.pubsub.v1.AcknowledgeRequest;
import com.google.pubsub.v1.DeleteSubscriptionRequest;
import com.google.pubsub.v1.DeleteTopicRequest;
import com.google.pubsub.v1.GetSubscriptionRequest;
import com.google.pubsub.v1.ListSubscriptionsRequest;
import com.google.pubsub.v1.ListSubscriptionsResponse;
import com.google.pubsub.v1.ListTopicsRequest;
import com.google.pubsub.v1.ListTopicsResponse;
import com.google.pubsub.v1.ModifyAckDeadlineRequest;
import com.google.pubsub.v1.PublishRequest;
import com.google.pubsub.v1.PublishResponse;
import com.google.pubsub.v1.PublisherGrpc;
import com.google.pubsub.v1.PublisherGrpc.PublisherBlockingStub;
import com.google.pubsub.v1.PubsubMessage;
import com.google.pubsub.v1.PullRequest;
import com.google.pubsub.v1.PullResponse;
import com.google.pubsub.v1.ReceivedMessage;
import com.google.pubsub.v1.SubscriberGrpc;
import com.google.pubsub.v1.SubscriberGrpc.SubscriberBlockingStub;
import com.google.pubsub.v1.Subscription;
import com.google.pubsub.v1.Topic;
import io.grpc.Channel;
import io.grpc.ClientInterceptors;
import io.grpc.ManagedChannel;
import io.grpc.auth.ClientAuthInterceptor;
import io.grpc.netty.GrpcSslContexts;
import io.grpc.netty.NegotiationType;
import io.grpc.netty.NettyChannelBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import javax.annotation.Nullable;
import org.apache.beam.sdk.options.GcpOptions;
import org.apache.beam.sdk.options.PubsubOptions;
/**
* A helper class for talking to Pubsub via grpc.
*
* <p>CAUTION: Currently uses the application default credentials and does not respect any
* credentials-related arguments in {@link GcpOptions}.
*/
public class PubsubGrpcClient extends PubsubClient {
private static final String PUBSUB_ADDRESS = "pubsub.googleapis.com";
private static final int PUBSUB_PORT = 443;
private static final int LIST_BATCH_SIZE = 1000;
private static final int DEFAULT_TIMEOUT_S = 15;
private static class PubsubGrpcClientFactory implements PubsubClientFactory {
@Override
public PubsubClient newClient(
@Nullable String timestampLabel, @Nullable String idLabel, PubsubOptions options)
throws IOException {
ManagedChannel channel = NettyChannelBuilder
.forAddress(PUBSUB_ADDRESS, PUBSUB_PORT)
.negotiationType(NegotiationType.TLS)
.sslContext(GrpcSslContexts.forClient().ciphers(null).build())
.build();
return new PubsubGrpcClient(timestampLabel,
idLabel,
DEFAULT_TIMEOUT_S,
channel,
options.getGcpCredential());
}
@Override
public String getKind() {
return "Grpc";
}
}
/**
* Factory for creating Pubsub clients using gRCP transport.
*/
public static final PubsubClientFactory FACTORY = new PubsubGrpcClientFactory();
/**
* Timeout for grpc calls (in s).
*/
private final int timeoutSec;
/**
* Underlying netty channel, or {@literal null} if closed.
*/
@Nullable
private ManagedChannel publisherChannel;
/**
* Credentials determined from options and environment.
*/
private final Credentials credentials;
/**
* Label to use for custom timestamps, or {@literal null} if should use Pubsub publish time
* instead.
*/
@Nullable
private final String timestampLabel;
/**
* Label to use for custom ids, or {@literal null} if should use Pubsub provided ids.
*/
@Nullable
private final String idLabel;
/**
* Cached stubs, or null if not cached.
*/
@Nullable
private PublisherGrpc.PublisherBlockingStub cachedPublisherStub;
private SubscriberGrpc.SubscriberBlockingStub cachedSubscriberStub;
@VisibleForTesting
PubsubGrpcClient(
@Nullable String timestampLabel,
@Nullable String idLabel,
int timeoutSec,
ManagedChannel publisherChannel,
Credentials credentials) {
this.timestampLabel = timestampLabel;
this.idLabel = idLabel;
this.timeoutSec = timeoutSec;
this.publisherChannel = publisherChannel;
this.credentials = credentials;
}
/**
* Gracefully close the underlying netty channel.
*/
@Override
public void close() {
if (publisherChannel == null) {
// Already closed.
return;
}
// Can gc the underlying stubs.
cachedPublisherStub = null;
cachedSubscriberStub = null;
// Mark the client as having been closed before going further
// in case we have an exception from the channel.
ManagedChannel publisherChannel = this.publisherChannel;
this.publisherChannel = null;
// Gracefully shutdown the channel.
publisherChannel.shutdown();
try {
publisherChannel.awaitTermination(timeoutSec, TimeUnit.SECONDS);
} catch (InterruptedException e) {
// Ignore.
Thread.currentThread().interrupt();
}
}
/**
* Return channel with interceptor for returning credentials.
*/
private Channel newChannel() throws IOException {
checkState(publisherChannel != null, "PubsubGrpcClient has been closed");
ClientAuthInterceptor interceptor =
new ClientAuthInterceptor(credentials, Executors.newSingleThreadExecutor());
return ClientInterceptors.intercept(publisherChannel, interceptor);
}
/**
* Return a stub for making a publish request with a timeout.
*/
private PublisherBlockingStub publisherStub() throws IOException {
if (cachedPublisherStub == null) {
cachedPublisherStub = PublisherGrpc.newBlockingStub(newChannel());
}
return cachedPublisherStub.withDeadlineAfter(timeoutSec, TimeUnit.SECONDS);
}
/**
* Return a stub for making a subscribe request with a timeout.
*/
private SubscriberBlockingStub subscriberStub() throws IOException {
if (cachedSubscriberStub == null) {
cachedSubscriberStub = SubscriberGrpc.newBlockingStub(newChannel());
}
return cachedSubscriberStub.withDeadlineAfter(timeoutSec, TimeUnit.SECONDS);
}
@Override
public int publish(TopicPath topic, List<OutgoingMessage> outgoingMessages)
throws IOException {
PublishRequest.Builder request = PublishRequest.newBuilder()
.setTopic(topic.getPath());
for (OutgoingMessage outgoingMessage : outgoingMessages) {
PubsubMessage.Builder message =
PubsubMessage.newBuilder()
.setData(ByteString.copyFrom(outgoingMessage.elementBytes));
if (timestampLabel != null) {
message.getMutableAttributes()
.put(timestampLabel, String.valueOf(outgoingMessage.timestampMsSinceEpoch));
}
if (idLabel != null && !Strings.isNullOrEmpty(outgoingMessage.recordId)) {
message.getMutableAttributes().put(idLabel, outgoingMessage.recordId);
}
request.addMessages(message);
}
PublishResponse response = publisherStub().publish(request.build());
return response.getMessageIdsCount();
}
@Override
public List<IncomingMessage> pull(
long requestTimeMsSinceEpoch,
SubscriptionPath subscription,
int batchSize,
boolean returnImmediately) throws IOException {
PullRequest request = PullRequest.newBuilder()
.setSubscription(subscription.getPath())
.setReturnImmediately(returnImmediately)
.setMaxMessages(batchSize)
.build();
PullResponse response = subscriberStub().pull(request);
if (response.getReceivedMessagesCount() == 0) {
return ImmutableList.of();
}
List<IncomingMessage> incomingMessages = new ArrayList<>(response.getReceivedMessagesCount());
for (ReceivedMessage message : response.getReceivedMessagesList()) {
PubsubMessage pubsubMessage = message.getMessage();
@Nullable Map<String, String> attributes = pubsubMessage.getAttributes();
// Payload.
byte[] elementBytes = pubsubMessage.getData().toByteArray();
// Timestamp.
String pubsubTimestampString = null;
Timestamp timestampProto = pubsubMessage.getPublishTime();
if (timestampProto != null) {
pubsubTimestampString = String.valueOf(timestampProto.getSeconds()
+ timestampProto.getNanos() / 1000L);
}
long timestampMsSinceEpoch =
extractTimestamp(timestampLabel, pubsubTimestampString, attributes);
// Ack id.
String ackId = message.getAckId();
checkState(!Strings.isNullOrEmpty(ackId));
// Record id, if any.
@Nullable String recordId = null;
if (idLabel != null && attributes != null) {
recordId = attributes.get(idLabel);
}
if (Strings.isNullOrEmpty(recordId)) {
// Fall back to the Pubsub provided message id.
recordId = pubsubMessage.getMessageId();
}
incomingMessages.add(new IncomingMessage(elementBytes, timestampMsSinceEpoch,
requestTimeMsSinceEpoch, ackId, recordId));
}
return incomingMessages;
}
@Override
public void acknowledge(SubscriptionPath subscription, List<String> ackIds)
throws IOException {
AcknowledgeRequest request = AcknowledgeRequest.newBuilder()
.setSubscription(subscription.getPath())
.addAllAckIds(ackIds)
.build();
subscriberStub().acknowledge(request); // ignore Empty result.
}
@Override
public void modifyAckDeadline(
SubscriptionPath subscription, List<String> ackIds, int deadlineSeconds)
throws IOException {
ModifyAckDeadlineRequest request =
ModifyAckDeadlineRequest.newBuilder()
.setSubscription(subscription.getPath())
.addAllAckIds(ackIds)
.setAckDeadlineSeconds(deadlineSeconds)
.build();
subscriberStub().modifyAckDeadline(request); // ignore Empty result.
}
@Override
public void createTopic(TopicPath topic) throws IOException {
Topic request = Topic.newBuilder()
.setName(topic.getPath())
.build();
publisherStub().createTopic(request); // ignore Topic result.
}
@Override
public void deleteTopic(TopicPath topic) throws IOException {
DeleteTopicRequest request = DeleteTopicRequest.newBuilder()
.setTopic(topic.getPath())
.build();
publisherStub().deleteTopic(request); // ignore Empty result.
}
@Override
public List<TopicPath> listTopics(ProjectPath project) throws IOException {
ListTopicsRequest.Builder request =
ListTopicsRequest.newBuilder()
.setProject(project.getPath())
.setPageSize(LIST_BATCH_SIZE);
ListTopicsResponse response = publisherStub().listTopics(request.build());
if (response.getTopicsCount() == 0) {
return ImmutableList.of();
}
List<TopicPath> topics = new ArrayList<>(response.getTopicsCount());
while (true) {
for (Topic topic : response.getTopicsList()) {
topics.add(topicPathFromPath(topic.getName()));
}
if (response.getNextPageToken().isEmpty()) {
break;
}
request.setPageToken(response.getNextPageToken());
response = publisherStub().listTopics(request.build());
}
return topics;
}
@Override
public void createSubscription(
TopicPath topic, SubscriptionPath subscription,
int ackDeadlineSeconds) throws IOException {
Subscription request = Subscription.newBuilder()
.setTopic(topic.getPath())
.setName(subscription.getPath())
.setAckDeadlineSeconds(ackDeadlineSeconds)
.build();
subscriberStub().createSubscription(request); // ignore Subscription result.
}
@Override
public void deleteSubscription(SubscriptionPath subscription) throws IOException {
DeleteSubscriptionRequest request =
DeleteSubscriptionRequest.newBuilder()
.setSubscription(subscription.getPath())
.build();
subscriberStub().deleteSubscription(request); // ignore Empty result.
}
@Override
public List<SubscriptionPath> listSubscriptions(ProjectPath project, TopicPath topic)
throws IOException {
ListSubscriptionsRequest.Builder request =
ListSubscriptionsRequest.newBuilder()
.setProject(project.getPath())
.setPageSize(LIST_BATCH_SIZE);
ListSubscriptionsResponse response = subscriberStub().listSubscriptions(request.build());
if (response.getSubscriptionsCount() == 0) {
return ImmutableList.of();
}
List<SubscriptionPath> subscriptions = new ArrayList<>(response.getSubscriptionsCount());
while (true) {
for (Subscription subscription : response.getSubscriptionsList()) {
if (subscription.getTopic().equals(topic.getPath())) {
subscriptions.add(subscriptionPathFromPath(subscription.getName()));
}
}
if (response.getNextPageToken().isEmpty()) {
break;
}
request.setPageToken(response.getNextPageToken());
response = subscriberStub().listSubscriptions(request.build());
}
return subscriptions;
}
@Override
public int ackDeadlineSeconds(SubscriptionPath subscription) throws IOException {
GetSubscriptionRequest request =
GetSubscriptionRequest.newBuilder()
.setSubscription(subscription.getPath())
.build();
Subscription response = subscriberStub().getSubscription(request);
return response.getAckDeadlineSeconds();
}
@Override
public boolean isEOF() {
return false;
}
}
| apache-2.0 |
Esri/arcgis-templates-silverlight | src/TabbedRibbonVB/TabbedRibbonVB/Actions/ToggleCollapseAction.vb | 902 | 'Copyright 2013 Esri
'Licensed under the Apache License, Version 2.0 (the "License");
'You may not use this file except in compliance with the License.
'You may obtain a copy of the License at
'http://www.apache.org/licenses/LICENSE-2.0
'Unless required by applicable law or agreed to in writing, software
'distributed under the License is distributed on an "AS IS" BASIS,
'WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
'See the License for the specific language governing permissions and
'limitations under the License.
Imports System.Windows
Imports System.Windows.Interactivity
Namespace ESRI.ArcGIS.SilverlightMapApp.Actions
Public Class ToggleCollapseAction
Inherits TargetedTriggerAction(Of CollapsiblePanel)
Protected Overrides Sub Invoke(ByVal parameter As Object)
Me.Target.IsExpanded = Not Me.Target.IsExpanded
End Sub
End Class
End Namespace
| apache-2.0 |
Tycheo/coffeemud | com/planet_ink/coffee_mud/MOBS/Squirrel.java | 2521 | package com.planet_ink.coffee_mud.MOBS;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2013-2015 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class Squirrel extends StdMOB
{
@Override public String ID(){return "Squirrel";}
public Squirrel()
{
super();
final Random randomizer = new Random(System.currentTimeMillis());
username="a squirrel";
setDescription("It\\`s small, cute, and quick with a big expressive tail.");
setDisplayText("A squirrel darts around.");
CMLib.factions().setAlignment(this,Faction.Align.NEUTRAL);
setMoney(0);
basePhyStats.setWeight(4450 + Math.abs(randomizer.nextInt() % 5));
setWimpHitPoint(2);
basePhyStats().setDamage(2);
baseCharStats().setStat(CharStats.STAT_INTELLIGENCE,1);
baseCharStats().setMyRace(CMClass.getRace("Squirrel"));
baseCharStats().getMyRace().startRacing(this,false);
basePhyStats().setAbility(0);
basePhyStats().setLevel(1);
basePhyStats().setArmor(90);
baseState.setHitPoints(CMLib.dice().roll(basePhyStats().level(),11,basePhyStats().level()));
recoverMaxState();
resetToMaxState();
recoverPhyStats();
recoverCharStats();
}
}
| apache-2.0 |
jamesagnew/hapi-fhir | hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/introduction/changelog_2016.md | 95 | # Changelog: 2016
<th:block th:insert="fragment_changelog.md :: changelog('2016', '2016')"/>
| apache-2.0 |
debovis/webanno | webanno-brat/src/main/java/de/tudarmstadt/ukp/clarin/webanno/brat/controller/ArcCrossedMultipleSentenceException.java | 1274 | /*******************************************************************************
* Copyright 2013
* Ubiquitous Knowledge Processing (UKP) Lab and FG Language Technology
* Technische Universität Darmstadt
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package de.tudarmstadt.ukp.clarin.webanno.brat.controller;
/**
* Throw an exception if the arc annotation is not in the same sentence
* * @author Seid Muhie Yimam
*/
public class ArcCrossedMultipleSentenceException
extends BratAnnotationException
{
private static final long serialVersionUID = 1280015349963924638L;
public ArcCrossedMultipleSentenceException(String message)
{
super(message);
}
} | apache-2.0 |
michaelgallacher/intellij-community | java/idea-ui/src/com/intellij/openapi/roots/ui/configuration/libraryEditor/JavadocQuarantineStatusCleaner.java | 4651 | /*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.roots.ui.configuration.libraryEditor;
import com.intellij.notification.Notification;
import com.intellij.notification.NotificationType;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationBundle;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.Task;
import com.intellij.openapi.progress.util.ProgressIndicatorBase;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.sun.jna.platform.mac.XAttrUtil;
import org.jetbrains.annotations.NotNull;
import java.io.IOException;
import java.net.URI;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.stream.Stream;
/**
* Files downloaded from Internet are marked as 'quarantined' by OS X.
* For such files opening urls of type file://path#fragment via
* <a href="https://developer.apple.com/library/mac/documentation/Carbon/Conceptual/LaunchServicesConcepts/LSCIntro/LSCIntro.html">
* Launch Services API
* </a>
* (used internally by {@link java.awt.Desktop#browse(URI)}) won't work as expected (fragment will be ignored on file opening).
* This class allows to clear quarantine status from folder containing Javadoc, if confirmed by user.
*/
public class JavadocQuarantineStatusCleaner {
private static final Logger LOG = Logger.getInstance(JavadocQuarantineStatusCleaner.class);
private static final String QUARANTINE_ATTRIBUTE = "com.apple.quarantine";
public static void cleanIfNeeded(@NotNull VirtualFile javadocFolder) {
Application application = ApplicationManager.getApplication();
assert !application.isDispatchThread();
if (!SystemInfo.isMac || !javadocFolder.isInLocalFileSystem() || !javadocFolder.isDirectory()) return;
String folderPath = VfsUtilCore.virtualToIoFile(javadocFolder).getAbsolutePath();
// UserDefinedFileAttributeView isn't supported by JDK for HFS+ extended attributes on OS X, so we resort to JNA
if (XAttrUtil.getXAttr(folderPath, QUARANTINE_ATTRIBUTE) == null) return;
application.invokeLater(() -> {
int result = Messages.showYesNoDialog(ApplicationBundle.message("quarantine.dialog.message"),
ApplicationBundle.message("quarantine.dialog.title"),
null);
if (result == Messages.YES) {
cleanQuarantineStatusInBackground(folderPath);
}
}, ModalityState.any());
}
private static void cleanQuarantineStatusInBackground(@NotNull String folderPath) {
ProgressIndicatorBase progressIndicator = new ProgressIndicatorBase();
String message = ApplicationBundle.message("quarantine.clean.progress", folderPath);
ProgressManager.getInstance().runProcessWithProgressAsynchronously(new Task.Backgroundable(null, message) {
@Override
public void run(@NotNull ProgressIndicator indicator) {
try(Stream<Path> s = Files.walk(Paths.get(folderPath))) {
s.forEach(p -> {
ProgressManager.checkCanceled();
XAttrUtil.removeXAttr(p.toFile().getAbsolutePath(), QUARANTINE_ATTRIBUTE);
});
}
catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public void onError(@NotNull Exception error) {
LOG.warn(error);
new Notification(ApplicationBundle.message("quarantine.error.group"),
ApplicationBundle.message("quarantine.error.title"),
ApplicationBundle.message("quarantine.error.message"),
NotificationType.WARNING).notify(null);
}
}, progressIndicator);
}
}
| apache-2.0 |
mmatz-ccri/geomesa | geomesa-core/src/main/scala/org/locationtech/geomesa/core/iterators/RecordTableIterator.scala | 1899 | /*
* Copyright 2014 Commonwealth Computer Research, Inc.
*
* Licensed under the Apache License, Version 2.0 (the License);
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.locationtech.geomesa.core.iterators
import com.typesafe.scalalogging.slf4j.Logging
import org.apache.accumulo.core.data._
import org.apache.accumulo.core.iterators.{IteratorEnvironment, SortedKeyValueIterator}
/**
* Iterator for the record table. Applies transforms and ECQL filters.
*/
class RecordTableIterator
extends GeomesaFilteringIterator
with HasFeatureType
with SetTopInclude
with SetTopFilter
with SetTopTransform
with SetTopFilterTransform {
var setTopOptimized: (Key) => Unit = null
override def init(source: SortedKeyValueIterator[Key, Value],
options: java.util.Map[String, String],
env: IteratorEnvironment) = {
super.init(source, options, env)
initFeatureType(options)
init(featureType, options)
// pick the execution path once based on the filters and transforms we need to apply
// see org.locationtech.geomesa.core.iterators.IteratorFunctions
setTopOptimized = (filter, transform) match {
case (null, null) => setTopInclude
case (_, null) => setTopFilter
case (null, _) => setTopTransform
case (_, _) => setTopFilterTransform
}
}
override def setTopConditionally(): Unit = setTopOptimized(source.getTopKey)
}
| apache-2.0 |
GavinGu/kyli_v2 | webapp/s/xthena/htpicker/htpicker.css | 56 |
.htPicker .add-on {
padding: 2px;
cursor: pointer;
}
| apache-2.0 |
hasinitg/airavata | airavata-api/airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/workspace_model_constants.cpp | 1201 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Autogenerated by Thrift Compiler (0.9.2)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
#include "workspace_model_constants.h"
namespace apache { namespace airavata { namespace model { namespace workspace {
const workspace_modelConstants g_workspace_model_constants;
workspace_modelConstants::workspace_modelConstants() {
}
}}}} // namespace
| apache-2.0 |
cwooldridge/MassTransit.Persistence.MongoDb | MassTransit.Persistence.MongoDb.Tests/Sagas/AuctionSaga.cs | 2517 | using System;
using Magnum.StateMachine;
using MassTransit.Saga;
using Serilog;
namespace MassTransit.Persistence.MongoDb.Tests.Sagas
{
public class AuctionSaga : SagaStateMachine<AuctionSaga>, ISaga
{
public static readonly ILogger Logger = Log.Logger.ForContext<AuctionSaga>();
static AuctionSaga()
{
Define(
() =>
{
Correlate(Bid).By((saga, message) => saga.CorrelationId == message.AuctionId);
Initially(
When(Create).Then(
(saga, message) =>
{
saga.OpeningBid = message.OpeningBid;
saga.OwnerEmail = message.OwnerEmail;
saga.Title = message.Title;
}).TransitionTo(Open));
During(Open, When(Bid).Call((saga, message) => saga.Handle(message)));
});
}
public AuctionSaga(Guid correlationId)
{
this.CorrelationId = correlationId;
}
public decimal? CurrentBid { get; set; }
public string HighBidder { get; set; }
public Guid HighBidId { get; set; }
public decimal OpeningBid { get; set; }
public string OwnerEmail { get; set; }
public string Title { get; set; }
public static State Initial { get; set; }
public static State Completed { get; set; }
public static State Open { get; set; }
public static State Closed { get; set; }
public static Event<CreateAuction> Create { get; set; }
public static Event<PlaceBid> Bid { get; set; }
public Guid CorrelationId { get; set; }
public IServiceBus Bus { get; set; }
private void Handle(PlaceBid bid)
{
if (!this.CurrentBid.HasValue || bid.MaximumBid > this.CurrentBid)
{
if (this.HighBidder != null)
{
this.Bus.Publish(new Outbid(this.HighBidId));
}
this.CurrentBid = bid.MaximumBid;
this.HighBidder = bid.BidderEmail;
this.HighBidId = bid.BidId;
}
else
{
// already outbid
this.Bus.Publish(new Outbid(bid.BidId));
}
}
}
} | apache-2.0 |
docker-library/repo-info | repos/php/local/7.4.28-zts-buster.md | 119050 | # `php:7.4.28-zts-buster`
## Docker Metadata
- Image ID: `sha256:26c749d3faa43f4ef1d91d75a708f59580a89c200bf7cb16fb4beffde1111169`
- Created: `2022-03-03T10:18:06.549558732Z`
- Virtual Size: ~ 385.67 Mb
(total size of all layers on-disk)
- Arch: `linux`/`amd64`
- Entrypoint: `["docker-php-entrypoint"]`
- Command: `["php","-a"]`
- Environment:
- `PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin`
- `PHPIZE_DEPS=autoconf dpkg-dev file g++ gcc libc-dev make pkg-config re2c`
- `PHP_INI_DIR=/usr/local/etc/php`
- `PHP_CFLAGS=-fstack-protector-strong -fpic -fpie -O2 -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64`
- `PHP_CPPFLAGS=-fstack-protector-strong -fpic -fpie -O2 -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64`
- `PHP_LDFLAGS=-Wl,-O1 -pie`
- `GPG_KEYS=42670A7FE4D0441C8E4632349E4FDC074A4EF02D 5A52880781F755608BF815FC910DEB46F53EA312`
- `PHP_VERSION=7.4.28`
- `PHP_URL=https://www.php.net/distributions/php-7.4.28.tar.xz`
- `PHP_ASC_URL=https://www.php.net/distributions/php-7.4.28.tar.xz.asc`
- `PHP_SHA256=9cc3b6f6217b60582f78566b3814532c4b71d517876c25013ae51811e65d8fce`
## `dpkg` (`.deb`-based packages)
### `dpkg` source package: `acl=2.2.53-4`
Binary Packages:
- `libacl1:amd64=2.2.53-4`
Licenses: (parsed from: `/usr/share/doc/libacl1/copyright`)
- `GPL-2`
- `GPL-2+`
- `LGPL-2+`
- `LGPL-2.1`
Source:
```console
$ apt-get source -qq --print-uris acl=2.2.53-4
'http://deb.debian.org/debian/pool/main/a/acl/acl_2.2.53-4.dsc' acl_2.2.53-4.dsc 2330 SHA256:532eb4029659db74e6625adc2bd277144f33c92cb0603272d61693b069896a85
'http://deb.debian.org/debian/pool/main/a/acl/acl_2.2.53.orig.tar.gz' acl_2.2.53.orig.tar.gz 524300 SHA256:06be9865c6f418d851ff4494e12406568353b891ffe1f596b34693c387af26c7
'http://deb.debian.org/debian/pool/main/a/acl/acl_2.2.53.orig.tar.gz.asc' acl_2.2.53.orig.tar.gz.asc 833 SHA256:06849bece0b56a6a7269173abe101cff223bb9346d74027a3cd5ff80914abf4b
'http://deb.debian.org/debian/pool/main/a/acl/acl_2.2.53-4.debian.tar.xz' acl_2.2.53-4.debian.tar.xz 18572 SHA256:3e6571adea4886a9549bdc2323d5c55ee8f7dafb6a204513111d5943d2776dd8
```
Other potentially useful URLs:
- https://sources.debian.net/src/acl/2.2.53-4/ (for browsing the source)
- https://sources.debian.net/src/acl/2.2.53-4/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/acl/2.2.53-4/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `adduser=3.118`
Binary Packages:
- `adduser=3.118`
Licenses: (parsed from: `/usr/share/doc/adduser/copyright`)
- `GPL-2`
Source:
```console
$ apt-get source -qq --print-uris adduser=3.118
'http://deb.debian.org/debian/pool/main/a/adduser/adduser_3.118.dsc' adduser_3.118.dsc 1670 SHA256:fc79bc37fcf5e5700546c78a80670bb7b34836d012595b343fe2304cac82917d
'http://deb.debian.org/debian/pool/main/a/adduser/adduser_3.118.tar.xz' adduser_3.118.tar.xz 212280 SHA256:3e9eea661c9aac6b2c791bfcc1de3a9c6a422d45c8f3d38ed417737ed3166ffc
```
Other potentially useful URLs:
- https://sources.debian.net/src/adduser/3.118/ (for browsing the source)
- https://sources.debian.net/src/adduser/3.118/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/adduser/3.118/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `apt=1.8.2.3`
Binary Packages:
- `apt=1.8.2.3`
- `libapt-pkg5.0:amd64=1.8.2.3`
Licenses: (parsed from: `/usr/share/doc/apt/copyright`, `/usr/share/doc/libapt-pkg5.0/copyright`)
- `GPL-2`
- `GPLv2+`
Source:
```console
$ apt-get source -qq --print-uris apt=1.8.2.3
'http://deb.debian.org/debian/pool/main/a/apt/apt_1.8.2.3.dsc' apt_1.8.2.3.dsc 2774 SHA256:03ed672edefe4badbb2c7b32332293403bb03feb2ea0777c0846939a2fcb8bba
'http://deb.debian.org/debian/pool/main/a/apt/apt_1.8.2.3.tar.xz' apt_1.8.2.3.tar.xz 2191868 SHA256:c21c9b18c4a26bc183432cb49b919af073862954f1ae8a204096b0a68c946d3b
```
Other potentially useful URLs:
- https://sources.debian.net/src/apt/1.8.2.3/ (for browsing the source)
- https://sources.debian.net/src/apt/1.8.2.3/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/apt/1.8.2.3/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `argon2=0~20171227-0.2`
Binary Packages:
- `libargon2-1:amd64=0~20171227-0.2`
Licenses: (parsed from: `/usr/share/doc/libargon2-1/copyright`)
- `Apache-2.0`
- `CC0`
Source:
```console
$ apt-get source -qq --print-uris argon2=0~20171227-0.2
'http://deb.debian.org/debian/pool/main/a/argon2/argon2_0%7e20171227-0.2.dsc' argon2_0~20171227-0.2.dsc 2108 SHA256:357d1e93318d7dd3bee401ee9cd92bd0f3ecaab3990013580a12306efda4ebf7
'http://deb.debian.org/debian/pool/main/a/argon2/argon2_0%7e20171227.orig.tar.gz' argon2_0~20171227.orig.tar.gz 1503745 SHA256:eaea0172c1f4ee4550d1b6c9ce01aab8d1ab66b4207776aa67991eb5872fdcd8
'http://deb.debian.org/debian/pool/main/a/argon2/argon2_0%7e20171227-0.2.debian.tar.xz' argon2_0~20171227-0.2.debian.tar.xz 6932 SHA256:49e630c0027ebbe0b53e3e692ce99da750e9bdfeddcebf303e595b4af5a2142f
```
Other potentially useful URLs:
- https://sources.debian.net/src/argon2/0~20171227-0.2/ (for browsing the source)
- https://sources.debian.net/src/argon2/0~20171227-0.2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/argon2/0~20171227-0.2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `attr=1:2.4.48-4`
Binary Packages:
- `libattr1:amd64=1:2.4.48-4`
Licenses: (parsed from: `/usr/share/doc/libattr1/copyright`)
- `GPL-2`
- `GPL-2+`
- `LGPL-2+`
- `LGPL-2.1`
Source:
```console
$ apt-get source -qq --print-uris attr=1:2.4.48-4
'http://deb.debian.org/debian/pool/main/a/attr/attr_2.4.48-4.dsc' attr_2.4.48-4.dsc 2427 SHA256:e53c076f39f1be4186704c94bd32276fa4661a587c360d8da25a5c3abe40cb29
'http://deb.debian.org/debian/pool/main/a/attr/attr_2.4.48.orig.tar.gz' attr_2.4.48.orig.tar.gz 467840 SHA256:5ead72b358ec709ed00bbf7a9eaef1654baad937c001c044fe8b74c57f5324e7
'http://deb.debian.org/debian/pool/main/a/attr/attr_2.4.48.orig.tar.gz.asc' attr_2.4.48.orig.tar.gz.asc 833 SHA256:5d23c2c83cc13d170f1c209f48d0efa1fc46d16487b790e9996c5206dcfe0395
'http://deb.debian.org/debian/pool/main/a/attr/attr_2.4.48-4.debian.tar.xz' attr_2.4.48-4.debian.tar.xz 22388 SHA256:a491d226fb3b47aa65997406009893a4cc0628e2ffffe0d411179652dfeb6935
```
Other potentially useful URLs:
- https://sources.debian.net/src/attr/1:2.4.48-4/ (for browsing the source)
- https://sources.debian.net/src/attr/1:2.4.48-4/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/attr/1:2.4.48-4/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `audit=1:2.8.4-3`
Binary Packages:
- `libaudit-common=1:2.8.4-3`
- `libaudit1:amd64=1:2.8.4-3`
Licenses: (parsed from: `/usr/share/doc/libaudit-common/copyright`, `/usr/share/doc/libaudit1/copyright`)
- `GPL-1`
- `GPL-2`
- `LGPL-2.1`
Source:
```console
$ apt-get source -qq --print-uris audit=1:2.8.4-3
'http://deb.debian.org/debian/pool/main/a/audit/audit_2.8.4-3.dsc' audit_2.8.4-3.dsc 2483 SHA256:101fd82f4c7af2f8753060b494ac46204b0eee1ffe5d1e113a493b99571af186
'http://deb.debian.org/debian/pool/main/a/audit/audit_2.8.4.orig.tar.gz' audit_2.8.4.orig.tar.gz 1123889 SHA256:a410694d09fc5708d980a61a5abcb9633a591364f1ecc7e97ad5daef9c898c38
'http://deb.debian.org/debian/pool/main/a/audit/audit_2.8.4-3.debian.tar.xz' audit_2.8.4-3.debian.tar.xz 16712 SHA256:2b4b16cf58c3a6180d380bd4ad1d30a38fa22826ca3c1233c5298138427e29d0
```
Other potentially useful URLs:
- https://sources.debian.net/src/audit/1:2.8.4-3/ (for browsing the source)
- https://sources.debian.net/src/audit/1:2.8.4-3/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/audit/1:2.8.4-3/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `autoconf=2.69-11`
Binary Packages:
- `autoconf=2.69-11`
Licenses: (parsed from: `/usr/share/doc/autoconf/copyright`)
- `GFDL-1.3`
- `GFDL-1.3+`
- `GPL-2`
- `GPL-2+`
- `GPL-2+ with Autoconf exception`
- `GPL-3`
- `GPL-3+`
- `GPL-3+ with Autoconf exception`
- `GPL-3+ with Texinfo exception`
- `MIT-X-Consortium`
- `no-modification`
- `other`
- `permissive`
- `permissive-long-disclaimer`
- `permissive-short-disclaimer`
- `permissive-without-disclaimer`
- `permissive-without-notices-or-disclaimer`
Source:
```console
$ apt-get source -qq --print-uris autoconf=2.69-11
'http://deb.debian.org/debian/pool/main/a/autoconf/autoconf_2.69-11.dsc' autoconf_2.69-11.dsc 1948 SHA256:249d25370d4f4d1d0cf7b37bfd178bb6fa87011566dd82230991f64814a39dde
'http://deb.debian.org/debian/pool/main/a/autoconf/autoconf_2.69.orig.tar.xz' autoconf_2.69.orig.tar.xz 1214744 SHA256:64ebcec9f8ac5b2487125a86a7760d2591ac9e1d3dbd59489633f9de62a57684
'http://deb.debian.org/debian/pool/main/a/autoconf/autoconf_2.69-11.debian.tar.xz' autoconf_2.69-11.debian.tar.xz 23540 SHA256:885b3947fdead5b737f6437b80a90a41c5d611791573c5d0cfef50a59c943c1b
```
Other potentially useful URLs:
- https://sources.debian.net/src/autoconf/2.69-11/ (for browsing the source)
- https://sources.debian.net/src/autoconf/2.69-11/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/autoconf/2.69-11/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `base-files=10.3+deb10u11`
Binary Packages:
- `base-files=10.3+deb10u11`
Licenses: (parsed from: `/usr/share/doc/base-files/copyright`)
- `GPL`
Source:
```console
$ apt-get source -qq --print-uris base-files=10.3+deb10u11
'http://deb.debian.org/debian/pool/main/b/base-files/base-files_10.3%2bdeb10u11.dsc' base-files_10.3+deb10u11.dsc 1107 SHA256:c330a188675075328a715fe1d3ace68ed366293dbd002ead99e17215debab4ee
'http://deb.debian.org/debian/pool/main/b/base-files/base-files_10.3%2bdeb10u11.tar.xz' base-files_10.3+deb10u11.tar.xz 65496 SHA256:8b3071976d9d896c0fa3b2481d81c69c7d627ab9f13eee848bddc8ed4f446794
```
Other potentially useful URLs:
- https://sources.debian.net/src/base-files/10.3+deb10u11/ (for browsing the source)
- https://sources.debian.net/src/base-files/10.3+deb10u11/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/base-files/10.3+deb10u11/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `base-passwd=3.5.46`
Binary Packages:
- `base-passwd=3.5.46`
Licenses: (parsed from: `/usr/share/doc/base-passwd/copyright`)
- `GPL-2`
- `PD`
Source:
```console
$ apt-get source -qq --print-uris base-passwd=3.5.46
'http://deb.debian.org/debian/pool/main/b/base-passwd/base-passwd_3.5.46.dsc' base-passwd_3.5.46.dsc 1651 SHA256:98b5d79c9f06e05e9f41013f8fee48b08d0ffe398653b6f8bbd93c1ae1f24bd4
'http://deb.debian.org/debian/pool/main/b/base-passwd/base-passwd_3.5.46.tar.xz' base-passwd_3.5.46.tar.xz 52780 SHA256:da15e380557b5a00cdc14018e3da6cbeaaadc786f2c3cb5b8f1fb4acc150b3da
```
Other potentially useful URLs:
- https://sources.debian.net/src/base-passwd/3.5.46/ (for browsing the source)
- https://sources.debian.net/src/base-passwd/3.5.46/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/base-passwd/3.5.46/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `bash=5.0-4`
Binary Packages:
- `bash=5.0-4`
Licenses: (parsed from: `/usr/share/doc/bash/copyright`)
- `GPL-3`
Source:
```console
$ apt-get source -qq --print-uris bash=5.0-4
'http://deb.debian.org/debian/pool/main/b/bash/bash_5.0-4.dsc' bash_5.0-4.dsc 2305 SHA256:fe746c72de6e61866a0ed4e21a5b9d154966a8684ec3bdf5bacc70d5351f6282
'http://deb.debian.org/debian/pool/main/b/bash/bash_5.0.orig.tar.xz' bash_5.0.orig.tar.xz 5554808 SHA256:893858ba233d65bda38039e99dd96a4102b2f6a2d5e6c1c546e0794a60beed97
'http://deb.debian.org/debian/pool/main/b/bash/bash_5.0-4.debian.tar.xz' bash_5.0-4.debian.tar.xz 91884 SHA256:1e33dff5dd8604fa4205a1746828063cd96a1e635355f3626b54fef155b8c4e5
```
Other potentially useful URLs:
- https://sources.debian.net/src/bash/5.0-4/ (for browsing the source)
- https://sources.debian.net/src/bash/5.0-4/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/bash/5.0-4/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `binutils=2.31.1-16`
Binary Packages:
- `binutils=2.31.1-16`
- `binutils-common:amd64=2.31.1-16`
- `binutils-x86-64-linux-gnu=2.31.1-16`
- `libbinutils:amd64=2.31.1-16`
Licenses: (parsed from: `/usr/share/doc/binutils/copyright`, `/usr/share/doc/binutils-common/copyright`, `/usr/share/doc/binutils-x86-64-linux-gnu/copyright`, `/usr/share/doc/libbinutils/copyright`)
- `GFDL`
- `GPL`
- `LGPL`
Source:
```console
$ apt-get source -qq --print-uris binutils=2.31.1-16
'http://deb.debian.org/debian/pool/main/b/binutils/binutils_2.31.1-16.dsc' binutils_2.31.1-16.dsc 11421 SHA256:ec76c13684d922a3619d7ec982db191714927bde6de6a3ff89e95d1ce7a61f33
'http://deb.debian.org/debian/pool/main/b/binutils/binutils_2.31.1.orig.tar.xz' binutils_2.31.1.orig.tar.xz 21649228 SHA256:e398a2d579faa0f2b5a988add5f7481af8e21a21f63b6ea5702e6f517960c5eb
'http://deb.debian.org/debian/pool/main/b/binutils/binutils_2.31.1-16.debian.tar.xz' binutils_2.31.1-16.debian.tar.xz 127464 SHA256:15fc82a7c682da6bcbf56caf57da8f059655369cbfeb58b8312040e53e4fa11d
```
Other potentially useful URLs:
- https://sources.debian.net/src/binutils/2.31.1-16/ (for browsing the source)
- https://sources.debian.net/src/binutils/2.31.1-16/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/binutils/2.31.1-16/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `bzip2=1.0.6-9.2~deb10u1`
Binary Packages:
- `bzip2=1.0.6-9.2~deb10u1`
- `libbz2-1.0:amd64=1.0.6-9.2~deb10u1`
Licenses: (parsed from: `/usr/share/doc/bzip2/copyright`, `/usr/share/doc/libbz2-1.0/copyright`)
- `BSD-variant`
- `GPL-2`
Source:
```console
$ apt-get source -qq --print-uris bzip2=1.0.6-9.2~deb10u1
'http://deb.debian.org/debian/pool/main/b/bzip2/bzip2_1.0.6-9.2%7edeb10u1.dsc' bzip2_1.0.6-9.2~deb10u1.dsc 2380 SHA256:f518d7c599e1028002a739bd9123fa23767d74e1c5cf1d05f36eb7de9fc25b5c
'http://deb.debian.org/debian/pool/main/b/bzip2/bzip2_1.0.6.orig.tar.bz2' bzip2_1.0.6.orig.tar.bz2 708737 SHA256:d70a9ccd8bdf47e302d96c69fecd54925f45d9c7b966bb4ef5f56b770960afa7
'http://deb.debian.org/debian/pool/main/b/bzip2/bzip2_1.0.6-9.2%7edeb10u1.debian.tar.bz2' bzip2_1.0.6-9.2~deb10u1.debian.tar.bz2 27542 SHA256:44900f7371503fe35ea7d3aa5b8ab8c677300be9b0d5277838d0c874be9c8541
```
Other potentially useful URLs:
- https://sources.debian.net/src/bzip2/1.0.6-9.2~deb10u1/ (for browsing the source)
- https://sources.debian.net/src/bzip2/1.0.6-9.2~deb10u1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/bzip2/1.0.6-9.2~deb10u1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `ca-certificates=20200601~deb10u2`
Binary Packages:
- `ca-certificates=20200601~deb10u2`
Licenses: (parsed from: `/usr/share/doc/ca-certificates/copyright`)
- `GPL-2`
- `GPL-2+`
- `MPL-2.0`
Source:
```console
$ apt-get source -qq --print-uris ca-certificates=20200601~deb10u2
'http://deb.debian.org/debian/pool/main/c/ca-certificates/ca-certificates_20200601%7edeb10u2.dsc' ca-certificates_20200601~deb10u2.dsc 1907 SHA256:b89cbd8c235e131ee10cb85a9bd4b3e429874c1e0577c5ed35121f8590d4d029
'http://deb.debian.org/debian/pool/main/c/ca-certificates/ca-certificates_20200601%7edeb10u2.tar.xz' ca-certificates_20200601~deb10u2.tar.xz 245804 SHA256:13ffd04d36230309ff383ad4ccbefb1852b1483f0ac3da75b4979906933ba5e8
```
Other potentially useful URLs:
- https://sources.debian.net/src/ca-certificates/20200601~deb10u2/ (for browsing the source)
- https://sources.debian.net/src/ca-certificates/20200601~deb10u2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/ca-certificates/20200601~deb10u2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `cdebconf=0.249`
Binary Packages:
- `libdebconfclient0:amd64=0.249`
**WARNING:** unable to detect licenses! (package likely not compliant with DEP-5)
If source is available (seen below), check the contents of `debian/copyright` within it.
Source:
```console
$ apt-get source -qq --print-uris cdebconf=0.249
'http://deb.debian.org/debian/pool/main/c/cdebconf/cdebconf_0.249.dsc' cdebconf_0.249.dsc 2783 SHA256:6a0061589add058e5130e9be20ea45056701fd71ac0d26defd9a8c53758486f1
'http://deb.debian.org/debian/pool/main/c/cdebconf/cdebconf_0.249.tar.xz' cdebconf_0.249.tar.xz 275256 SHA256:f7211ab20bfde7a0726cd566fd004b08e7ee358d238e35ea215f4fe0b3883b3e
```
Other potentially useful URLs:
- https://sources.debian.net/src/cdebconf/0.249/ (for browsing the source)
- https://sources.debian.net/src/cdebconf/0.249/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/cdebconf/0.249/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `coreutils=8.30-3`
Binary Packages:
- `coreutils=8.30-3`
Licenses: (parsed from: `/usr/share/doc/coreutils/copyright`)
- `GPL-3`
Source:
```console
$ apt-get source -qq --print-uris coreutils=8.30-3
'http://deb.debian.org/debian/pool/main/c/coreutils/coreutils_8.30-3.dsc' coreutils_8.30-3.dsc 1861 SHA256:106031a57a2ab2ba46b61083035e2ccb438c85a2b3506a8198b67868dde1546d
'http://deb.debian.org/debian/pool/main/c/coreutils/coreutils_8.30.orig.tar.xz' coreutils_8.30.orig.tar.xz 5359532 SHA256:e831b3a86091496cdba720411f9748de81507798f6130adeaef872d206e1b057
'http://deb.debian.org/debian/pool/main/c/coreutils/coreutils_8.30-3.debian.tar.xz' coreutils_8.30-3.debian.tar.xz 32808 SHA256:9179d45fb51d07a8743c4d58464459330eb6d4b489d59641d70c3bd9f579b694
```
Other potentially useful URLs:
- https://sources.debian.net/src/coreutils/8.30-3/ (for browsing the source)
- https://sources.debian.net/src/coreutils/8.30-3/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/coreutils/8.30-3/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `curl=7.64.0-4+deb10u2`
Binary Packages:
- `curl=7.64.0-4+deb10u2`
- `libcurl4:amd64=7.64.0-4+deb10u2`
Licenses: (parsed from: `/usr/share/doc/curl/copyright`, `/usr/share/doc/libcurl4/copyright`)
- `BSD-3-Clause`
- `BSD-4-Clause`
- `ISC`
- `curl`
- `other`
- `public-domain`
Source:
```console
$ apt-get source -qq --print-uris curl=7.64.0-4+deb10u2
'http://deb.debian.org/debian/pool/main/c/curl/curl_7.64.0-4%2bdeb10u2.dsc' curl_7.64.0-4+deb10u2.dsc 2719 SHA256:3cc585f4c22c0c001527f5f26f5a6154a86d1df9752aa118bf7d8e892ec4fac2
'http://deb.debian.org/debian/pool/main/c/curl/curl_7.64.0.orig.tar.gz' curl_7.64.0.orig.tar.gz 4032645 SHA256:cb90d2eb74d4e358c1ed1489f8e3af96b50ea4374ad71f143fa4595e998d81b5
'http://deb.debian.org/debian/pool/main/c/curl/curl_7.64.0-4%2bdeb10u2.debian.tar.xz' curl_7.64.0-4+deb10u2.debian.tar.xz 43572 SHA256:6b7f793e0cd41e2ccb49301ad4e58c83282f46be7c9415c9280164834efd0e52
```
Other potentially useful URLs:
- https://sources.debian.net/src/curl/7.64.0-4+deb10u2/ (for browsing the source)
- https://sources.debian.net/src/curl/7.64.0-4+deb10u2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/curl/7.64.0-4+deb10u2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `cyrus-sasl2=2.1.27+dfsg-1+deb10u2`
Binary Packages:
- `libsasl2-2:amd64=2.1.27+dfsg-1+deb10u2`
- `libsasl2-modules-db:amd64=2.1.27+dfsg-1+deb10u2`
Licenses: (parsed from: `/usr/share/doc/libsasl2-2/copyright`, `/usr/share/doc/libsasl2-modules-db/copyright`)
- `BSD-4-clause`
- `GPL-3`
- `GPL-3+`
Source:
```console
$ apt-get source -qq --print-uris cyrus-sasl2=2.1.27+dfsg-1+deb10u2
'http://security.debian.org/debian-security/pool/updates/main/c/cyrus-sasl2/cyrus-sasl2_2.1.27%2bdfsg-1%2bdeb10u2.dsc' cyrus-sasl2_2.1.27+dfsg-1+deb10u2.dsc 3583 SHA256:64f060b2e272069c2a6f89025d586c08855179dc03425b55848c2e6bb3e5f85e
'http://security.debian.org/debian-security/pool/updates/main/c/cyrus-sasl2/cyrus-sasl2_2.1.27%2bdfsg.orig.tar.xz' cyrus-sasl2_2.1.27+dfsg.orig.tar.xz 2058596 SHA256:108b0c691c423837264f05abb559ea76c3dfdd91246555e8abe87c129a6e37cd
'http://security.debian.org/debian-security/pool/updates/main/c/cyrus-sasl2/cyrus-sasl2_2.1.27%2bdfsg-1%2bdeb10u2.debian.tar.xz' cyrus-sasl2_2.1.27+dfsg-1+deb10u2.debian.tar.xz 101064 SHA256:43296455617072928e3c409b3a40b08e98ad335b98fd40fc2a0d921bab72cf97
```
Other potentially useful URLs:
- https://sources.debian.net/src/cyrus-sasl2/2.1.27+dfsg-1+deb10u2/ (for browsing the source)
- https://sources.debian.net/src/cyrus-sasl2/2.1.27+dfsg-1+deb10u2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/cyrus-sasl2/2.1.27+dfsg-1+deb10u2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `dash=0.5.10.2-5`
Binary Packages:
- `dash=0.5.10.2-5`
Licenses: (parsed from: `/usr/share/doc/dash/copyright`)
- `GPL`
Source:
```console
$ apt-get source -qq --print-uris dash=0.5.10.2-5
'http://deb.debian.org/debian/pool/main/d/dash/dash_0.5.10.2-5.dsc' dash_0.5.10.2-5.dsc 1756 SHA256:6255cf35f61df5122637856ad0912986de1c20875177932de1c971b7bbbbd848
'http://deb.debian.org/debian/pool/main/d/dash/dash_0.5.10.2.orig.tar.gz' dash_0.5.10.2.orig.tar.gz 225196 SHA256:3c663919dc5c66ec991da14c7cf7e0be8ad00f3db73986a987c118862b5f6071
'http://deb.debian.org/debian/pool/main/d/dash/dash_0.5.10.2-5.debian.tar.xz' dash_0.5.10.2-5.debian.tar.xz 41804 SHA256:fabf27bd78778b151143ed598a6b65019cfce5dd087d9693b848346459951d24
```
Other potentially useful URLs:
- https://sources.debian.net/src/dash/0.5.10.2-5/ (for browsing the source)
- https://sources.debian.net/src/dash/0.5.10.2-5/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/dash/0.5.10.2-5/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `db5.3=5.3.28+dfsg1-0.5`
Binary Packages:
- `libdb5.3:amd64=5.3.28+dfsg1-0.5`
**WARNING:** unable to detect licenses! (package likely not compliant with DEP-5)
If source is available (seen below), check the contents of `debian/copyright` within it.
Source:
```console
$ apt-get source -qq --print-uris db5.3=5.3.28+dfsg1-0.5
'http://deb.debian.org/debian/pool/main/d/db5.3/db5.3_5.3.28%2bdfsg1-0.5.dsc' db5.3_5.3.28+dfsg1-0.5.dsc 2804 SHA256:600ef735e47273c7e8de0a9bbbf2d6f31cb1d2851117f94776d7952588c0ecc4
'http://deb.debian.org/debian/pool/main/d/db5.3/db5.3_5.3.28%2bdfsg1.orig.tar.xz' db5.3_5.3.28+dfsg1.orig.tar.xz 19723860 SHA256:b19bf3dd8ce74b95a7b215be9a7c8489e8e8f18da60d64d6340a06e75f497749
'http://deb.debian.org/debian/pool/main/d/db5.3/db5.3_5.3.28%2bdfsg1-0.5.debian.tar.xz' db5.3_5.3.28+dfsg1-0.5.debian.tar.xz 29128 SHA256:682c1736c1b5f3afbd90cf24e085a0437821ae595dc54aeef8c09ddd1c3d05fe
```
Other potentially useful URLs:
- https://sources.debian.net/src/db5.3/5.3.28+dfsg1-0.5/ (for browsing the source)
- https://sources.debian.net/src/db5.3/5.3.28+dfsg1-0.5/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/db5.3/5.3.28+dfsg1-0.5/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `debconf=1.5.71+deb10u1`
Binary Packages:
- `debconf=1.5.71+deb10u1`
Licenses: (parsed from: `/usr/share/doc/debconf/copyright`)
- `BSD-2-clause`
Source:
```console
$ apt-get source -qq --print-uris debconf=1.5.71+deb10u1
'http://deb.debian.org/debian/pool/main/d/debconf/debconf_1.5.71%2bdeb10u1.dsc' debconf_1.5.71+deb10u1.dsc 2199 SHA256:9bad49c9ec67cfe5b516813a8643e8c22d89434fee6947c16c27ab5cbb99c7d5
'http://deb.debian.org/debian/pool/main/d/debconf/debconf_1.5.71%2bdeb10u1.tar.xz' debconf_1.5.71+deb10u1.tar.xz 570832 SHA256:cfa00cb4ac50919b8a1e3a9190a06a248e83e3f0362fa70e49237a3a2686dc61
```
Other potentially useful URLs:
- https://sources.debian.net/src/debconf/1.5.71+deb10u1/ (for browsing the source)
- https://sources.debian.net/src/debconf/1.5.71+deb10u1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/debconf/1.5.71+deb10u1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `debian-archive-keyring=2019.1+deb10u1`
Binary Packages:
- `debian-archive-keyring=2019.1+deb10u1`
Licenses: (parsed from: `/usr/share/doc/debian-archive-keyring/copyright`)
- `GPL`
Source:
```console
$ apt-get source -qq --print-uris debian-archive-keyring=2019.1+deb10u1
'http://deb.debian.org/debian/pool/main/d/debian-archive-keyring/debian-archive-keyring_2019.1%2bdeb10u1.dsc' debian-archive-keyring_2019.1+deb10u1.dsc 1878 SHA256:5e331767f3b49a6da251e0723604ac953496536965dfddc1d527d23d028f0de0
'http://deb.debian.org/debian/pool/main/d/debian-archive-keyring/debian-archive-keyring_2019.1%2bdeb10u1.tar.xz' debian-archive-keyring_2019.1+deb10u1.tar.xz 149144 SHA256:ffb63bc54d67f3d9125ebbb7298fdf3be1d18a6df1816d5ae96f1cf0f141eb39
```
Other potentially useful URLs:
- https://sources.debian.net/src/debian-archive-keyring/2019.1+deb10u1/ (for browsing the source)
- https://sources.debian.net/src/debian-archive-keyring/2019.1+deb10u1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/debian-archive-keyring/2019.1+deb10u1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `debianutils=4.8.6.1`
Binary Packages:
- `debianutils=4.8.6.1`
Licenses: (parsed from: `/usr/share/doc/debianutils/copyright`)
- `GPL`
Source:
```console
$ apt-get source -qq --print-uris debianutils=4.8.6.1
'http://deb.debian.org/debian/pool/main/d/debianutils/debianutils_4.8.6.1.dsc' debianutils_4.8.6.1.dsc 1625 SHA256:fa869200410510cdefc85c89755d21ac054836a18b6916aedeba472e4b0567bb
'http://deb.debian.org/debian/pool/main/d/debianutils/debianutils_4.8.6.1.tar.xz' debianutils_4.8.6.1.tar.xz 156604 SHA256:099f1e8a7278b26145a2ba2dda84c4118403bfab38c8d7070a6235a7ffcb55ed
```
Other potentially useful URLs:
- https://sources.debian.net/src/debianutils/4.8.6.1/ (for browsing the source)
- https://sources.debian.net/src/debianutils/4.8.6.1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/debianutils/4.8.6.1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `diffutils=1:3.7-3`
Binary Packages:
- `diffutils=1:3.7-3`
Licenses: (parsed from: `/usr/share/doc/diffutils/copyright`)
- `GFDL`
- `GPL`
Source:
```console
$ apt-get source -qq --print-uris diffutils=1:3.7-3
'http://deb.debian.org/debian/pool/main/d/diffutils/diffutils_3.7-3.dsc' diffutils_3.7-3.dsc 1453 SHA256:99dee94cec05454a65a9cb542bea1720dbd4c511d13f9784c9e3741e76a9b9ba
'http://deb.debian.org/debian/pool/main/d/diffutils/diffutils_3.7.orig.tar.xz' diffutils_3.7.orig.tar.xz 1448828 SHA256:b3a7a6221c3dc916085f0d205abf6b8e1ba443d4dd965118da364a1dc1cb3a26
'http://deb.debian.org/debian/pool/main/d/diffutils/diffutils_3.7-3.debian.tar.xz' diffutils_3.7-3.debian.tar.xz 11116 SHA256:a455228f12283b5f3c0165db4ab9b12071adc37fb9dd50dcb5e1b8851c524f1f
```
Other potentially useful URLs:
- https://sources.debian.net/src/diffutils/1:3.7-3/ (for browsing the source)
- https://sources.debian.net/src/diffutils/1:3.7-3/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/diffutils/1:3.7-3/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `dpkg=1.19.7`
Binary Packages:
- `dpkg=1.19.7`
- `dpkg-dev=1.19.7`
- `libdpkg-perl=1.19.7`
Licenses: (parsed from: `/usr/share/doc/dpkg/copyright`, `/usr/share/doc/dpkg-dev/copyright`, `/usr/share/doc/libdpkg-perl/copyright`)
- `BSD-2-clause`
- `GPL-2`
- `GPL-2+`
- `public-domain-md5`
- `public-domain-s-s-d`
Source:
```console
$ apt-get source -qq --print-uris dpkg=1.19.7
'http://deb.debian.org/debian/pool/main/d/dpkg/dpkg_1.19.7.dsc' dpkg_1.19.7.dsc 2103 SHA256:098b285d5fc7add8972e5b2b3678027bba3f3fe01962e5176db2fbff33bbd8e3
'http://deb.debian.org/debian/pool/main/d/dpkg/dpkg_1.19.7.tar.xz' dpkg_1.19.7.tar.xz 4716724 SHA256:4c27fededf620c0aa522fff1a48577ba08144445341257502e7730f2b1a296e8
```
Other potentially useful URLs:
- https://sources.debian.net/src/dpkg/1.19.7/ (for browsing the source)
- https://sources.debian.net/src/dpkg/1.19.7/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/dpkg/1.19.7/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `e2fsprogs=1.44.5-1+deb10u3`
Binary Packages:
- `e2fsprogs=1.44.5-1+deb10u3`
- `libcom-err2:amd64=1.44.5-1+deb10u3`
- `libext2fs2:amd64=1.44.5-1+deb10u3`
- `libss2:amd64=1.44.5-1+deb10u3`
Licenses: (parsed from: `/usr/share/doc/e2fsprogs/copyright`, `/usr/share/doc/libcom-err2/copyright`, `/usr/share/doc/libext2fs2/copyright`, `/usr/share/doc/libss2/copyright`)
- `GPL-2`
- `LGPL-2`
Source:
```console
$ apt-get source -qq --print-uris e2fsprogs=1.44.5-1+deb10u3
'http://deb.debian.org/debian/pool/main/e/e2fsprogs/e2fsprogs_1.44.5-1%2bdeb10u3.dsc' e2fsprogs_1.44.5-1+deb10u3.dsc 2903 SHA256:acdc31d6fd491f9db97aabc96340559d8492b98e3549df32d8369690e03058dc
'http://deb.debian.org/debian/pool/main/e/e2fsprogs/e2fsprogs_1.44.5.orig.tar.gz' e2fsprogs_1.44.5.orig.tar.gz 7619237 SHA256:2e211fae27ef74d5af4a4e40b10b8df7f87c655933bd171aab4889bfc4e6d1cc
'http://deb.debian.org/debian/pool/main/e/e2fsprogs/e2fsprogs_1.44.5.orig.tar.gz.asc' e2fsprogs_1.44.5.orig.tar.gz.asc 488 SHA256:c0e3e4e51f46c005890963b005015b784b2f19e291a16a15681b9906528f557e
'http://deb.debian.org/debian/pool/main/e/e2fsprogs/e2fsprogs_1.44.5-1%2bdeb10u3.debian.tar.xz' e2fsprogs_1.44.5-1+deb10u3.debian.tar.xz 82412 SHA256:0114857448922a218613f369f665f03f1b1435004c9d79ce5ee1a8a8a6cec53f
```
Other potentially useful URLs:
- https://sources.debian.net/src/e2fsprogs/1.44.5-1+deb10u3/ (for browsing the source)
- https://sources.debian.net/src/e2fsprogs/1.44.5-1+deb10u3/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/e2fsprogs/1.44.5-1+deb10u3/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `file=1:5.35-4+deb10u2`
Binary Packages:
- `file=1:5.35-4+deb10u2`
- `libmagic-mgc=1:5.35-4+deb10u2`
- `libmagic1:amd64=1:5.35-4+deb10u2`
Licenses: (parsed from: `/usr/share/doc/file/copyright`, `/usr/share/doc/libmagic-mgc/copyright`, `/usr/share/doc/libmagic1/copyright`)
- `BSD-2-Clause-alike`
- `BSD-2-Clause-netbsd`
- `BSD-2-Clause-regents`
- `MIT-Old-Style-with-legal-disclaimer-2`
- `public-domain`
Source:
```console
$ apt-get source -qq --print-uris file=1:5.35-4+deb10u2
'http://deb.debian.org/debian/pool/main/f/file/file_5.35-4%2bdeb10u2.dsc' file_5.35-4+deb10u2.dsc 1984 SHA256:765a08b9476e3b39c0aeedc1907813939f4f102b4b17a7a693956b08909fcd32
'http://deb.debian.org/debian/pool/main/f/file/file_5.35.orig.tar.xz' file_5.35.orig.tar.xz 643268 SHA256:60b5b8bc762d35452c7995f3db7e8a5e2004d736b8763f086585a5b1af57a632
'http://deb.debian.org/debian/pool/main/f/file/file_5.35-4%2bdeb10u2.debian.tar.xz' file_5.35-4+deb10u2.debian.tar.xz 56428 SHA256:d1e8b940bb6e2c4f9e4ca32a8c9b23acf7e06c7d91f5ae2cfc7128edf41c8c5b
```
Other potentially useful URLs:
- https://sources.debian.net/src/file/1:5.35-4+deb10u2/ (for browsing the source)
- https://sources.debian.net/src/file/1:5.35-4+deb10u2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/file/1:5.35-4+deb10u2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `findutils=4.6.0+git+20190209-2`
Binary Packages:
- `findutils=4.6.0+git+20190209-2`
Licenses: (parsed from: `/usr/share/doc/findutils/copyright`)
- `GFDL-1.3`
- `GPL-3`
Source:
```console
$ apt-get source -qq --print-uris findutils=4.6.0+git+20190209-2
'http://deb.debian.org/debian/pool/main/f/findutils/findutils_4.6.0%2bgit%2b20190209-2.dsc' findutils_4.6.0+git+20190209-2.dsc 2137 SHA256:e09430f44f976ee0e51e3226543247668b4ef88c05d14a84ed2d5a6f1bd07421
'http://deb.debian.org/debian/pool/main/f/findutils/findutils_4.6.0%2bgit%2b20190209.orig.tar.xz' findutils_4.6.0+git+20190209.orig.tar.xz 1893084 SHA256:6832b3f6ddc0e2718795e6732ea40cc5309b948505f55fb9935919d6aaac7e9d
'http://deb.debian.org/debian/pool/main/f/findutils/findutils_4.6.0%2bgit%2b20190209-2.debian.tar.xz' findutils_4.6.0+git+20190209-2.debian.tar.xz 26628 SHA256:d6f4c6fedc27cf5d616c9fbf41a46b8fb8b078f1f21045b484419b145037e849
```
Other potentially useful URLs:
- https://sources.debian.net/src/findutils/4.6.0+git+20190209-2/ (for browsing the source)
- https://sources.debian.net/src/findutils/4.6.0+git+20190209-2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/findutils/4.6.0+git+20190209-2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `gcc-8=8.3.0-6`
Binary Packages:
- `cpp-8=8.3.0-6`
- `g++-8=8.3.0-6`
- `gcc-8=8.3.0-6`
- `gcc-8-base:amd64=8.3.0-6`
- `libasan5:amd64=8.3.0-6`
- `libatomic1:amd64=8.3.0-6`
- `libcc1-0:amd64=8.3.0-6`
- `libgcc-8-dev:amd64=8.3.0-6`
- `libgcc1:amd64=1:8.3.0-6`
- `libgomp1:amd64=8.3.0-6`
- `libitm1:amd64=8.3.0-6`
- `liblsan0:amd64=8.3.0-6`
- `libmpx2:amd64=8.3.0-6`
- `libquadmath0:amd64=8.3.0-6`
- `libstdc++-8-dev:amd64=8.3.0-6`
- `libstdc++6:amd64=8.3.0-6`
- `libtsan0:amd64=8.3.0-6`
- `libubsan1:amd64=8.3.0-6`
Licenses: (parsed from: `/usr/share/doc/cpp-8/copyright`, `/usr/share/doc/g++-8/copyright`, `/usr/share/doc/gcc-8/copyright`, `/usr/share/doc/gcc-8-base/copyright`, `/usr/share/doc/libasan5/copyright`, `/usr/share/doc/libatomic1/copyright`, `/usr/share/doc/libcc1-0/copyright`, `/usr/share/doc/libgcc-8-dev/copyright`, `/usr/share/doc/libgcc1/copyright`, `/usr/share/doc/libgomp1/copyright`, `/usr/share/doc/libitm1/copyright`, `/usr/share/doc/liblsan0/copyright`, `/usr/share/doc/libmpx2/copyright`, `/usr/share/doc/libquadmath0/copyright`, `/usr/share/doc/libstdc++-8-dev/copyright`, `/usr/share/doc/libstdc++6/copyright`, `/usr/share/doc/libtsan0/copyright`, `/usr/share/doc/libubsan1/copyright`)
- `Artistic`
- `GFDL-1.2`
- `GPL`
- `GPL-2`
- `GPL-3`
- `LGPL`
Source:
```console
$ apt-get source -qq --print-uris gcc-8=8.3.0-6
'http://deb.debian.org/debian/pool/main/g/gcc-8/gcc-8_8.3.0-6.dsc' gcc-8_8.3.0-6.dsc 32433 SHA256:3b380579af74f1a325a07cc5798f8bff5206f0820fcac5bf64ff2bbd0466867d
'http://deb.debian.org/debian/pool/main/g/gcc-8/gcc-8_8.3.0.orig.tar.gz' gcc-8_8.3.0.orig.tar.gz 87764363 SHA256:ee3fd608f66e5737f20cf71b176cfbf58f7c1d190ad6def33d57610cdae8eac2
'http://deb.debian.org/debian/pool/main/g/gcc-8/gcc-8_8.3.0-6.diff.gz' gcc-8_8.3.0-6.diff.gz 704334 SHA256:211e5e1022e115abbcb9eeb39cf4bf84958c4e8469c0cbe430569947a04c5415
```
Other potentially useful URLs:
- https://sources.debian.net/src/gcc-8/8.3.0-6/ (for browsing the source)
- https://sources.debian.net/src/gcc-8/8.3.0-6/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/gcc-8/8.3.0-6/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `gcc-defaults=1.181`
Binary Packages:
- `cpp=4:8.3.0-1`
- `g++=4:8.3.0-1`
- `gcc=4:8.3.0-1`
Licenses: (parsed from: `/usr/share/doc/cpp/copyright`, `/usr/share/doc/g++/copyright`, `/usr/share/doc/gcc/copyright`)
- `GPL`
Source:
```console
$ apt-get source -qq --print-uris gcc-defaults=1.181
'http://deb.debian.org/debian/pool/main/g/gcc-defaults/gcc-defaults_1.181.dsc' gcc-defaults_1.181.dsc 15508 SHA256:d89d80502009816bac8e77c423c3f7d4e6fb4b684f036fae785dacf4454ddc75
'http://deb.debian.org/debian/pool/main/g/gcc-defaults/gcc-defaults_1.181.tar.gz' gcc-defaults_1.181.tar.gz 72227 SHA256:39c34b070fc29223ba42ae6d53653a8f02fdbc0e9d6ca3245de9b19d2c6e9d07
```
Other potentially useful URLs:
- https://sources.debian.net/src/gcc-defaults/1.181/ (for browsing the source)
- https://sources.debian.net/src/gcc-defaults/1.181/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/gcc-defaults/1.181/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `gdbm=1.18.1-4`
Binary Packages:
- `libgdbm-compat4:amd64=1.18.1-4`
- `libgdbm6:amd64=1.18.1-4`
Licenses: (parsed from: `/usr/share/doc/libgdbm-compat4/copyright`, `/usr/share/doc/libgdbm6/copyright`)
- `GFDL-NIV-1.3+`
- `GPL-2`
- `GPL-2+`
- `GPL-3`
- `GPL-3+`
Source:
```console
$ apt-get source -qq --print-uris gdbm=1.18.1-4
'http://deb.debian.org/debian/pool/main/g/gdbm/gdbm_1.18.1-4.dsc' gdbm_1.18.1-4.dsc 2635 SHA256:14f2a1741041f3ee8ebe1db9985ec12855c856a4c545ace6140b1222030ae64a
'http://deb.debian.org/debian/pool/main/g/gdbm/gdbm_1.18.1.orig.tar.gz' gdbm_1.18.1.orig.tar.gz 941863 SHA256:86e613527e5dba544e73208f42b78b7c022d4fa5a6d5498bf18c8d6f745b91dc
'http://deb.debian.org/debian/pool/main/g/gdbm/gdbm_1.18.1.orig.tar.gz.asc' gdbm_1.18.1.orig.tar.gz.asc 412 SHA256:3254738e7689e44ac65e78a766806828b8282e6bb1c0e5bb6156a99e567889a5
'http://deb.debian.org/debian/pool/main/g/gdbm/gdbm_1.18.1-4.debian.tar.xz' gdbm_1.18.1-4.debian.tar.xz 16460 SHA256:1a7771cf18cacf86b8415cbdeafa4e54dd2dadee59f0c29833aba476726594c5
```
Other potentially useful URLs:
- https://sources.debian.net/src/gdbm/1.18.1-4/ (for browsing the source)
- https://sources.debian.net/src/gdbm/1.18.1-4/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/gdbm/1.18.1-4/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `glib2.0=2.58.3-2+deb10u3`
Binary Packages:
- `libglib2.0-0:amd64=2.58.3-2+deb10u3`
Licenses: (parsed from: `/usr/share/doc/libglib2.0-0/copyright`)
- `Apache-2.0`
- `Expat`
- `GPL-2+`
- `LGPL`
Source:
```console
$ apt-get source -qq --print-uris glib2.0=2.58.3-2+deb10u3
'http://deb.debian.org/debian/pool/main/g/glib2.0/glib2.0_2.58.3-2%2bdeb10u3.dsc' glib2.0_2.58.3-2+deb10u3.dsc 3444 SHA256:1e016740f39e61ef728f4e2536dc3e3645d37c6dc8369816f8507792563643d8
'http://deb.debian.org/debian/pool/main/g/glib2.0/glib2.0_2.58.3.orig.tar.xz' glib2.0_2.58.3.orig.tar.xz 4863648 SHA256:8f43c31767e88a25da72b52a40f3301fefc49a665b56dc10ee7cc9565cbe7481
'http://deb.debian.org/debian/pool/main/g/glib2.0/glib2.0_2.58.3-2%2bdeb10u3.debian.tar.xz' glib2.0_2.58.3-2+deb10u3.debian.tar.xz 107124 SHA256:2749397b93fca317a7f47489390393dedda6ef3c9359488bbd475a698529cf7a
```
Other potentially useful URLs:
- https://sources.debian.net/src/glib2.0/2.58.3-2+deb10u3/ (for browsing the source)
- https://sources.debian.net/src/glib2.0/2.58.3-2+deb10u3/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/glib2.0/2.58.3-2+deb10u3/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `glibc=2.28-10`
Binary Packages:
- `libc-bin=2.28-10`
- `libc-dev-bin=2.28-10`
- `libc6:amd64=2.28-10`
- `libc6-dev:amd64=2.28-10`
Licenses: (parsed from: `/usr/share/doc/libc-bin/copyright`, `/usr/share/doc/libc-dev-bin/copyright`, `/usr/share/doc/libc6/copyright`, `/usr/share/doc/libc6-dev/copyright`)
- `GPL-2`
- `LGPL-2.1`
Source:
```console
$ apt-get source -qq --print-uris glibc=2.28-10
'http://deb.debian.org/debian/pool/main/g/glibc/glibc_2.28-10.dsc' glibc_2.28-10.dsc 8889 SHA256:9f21ef7002d51a32b46aafb9ca604427cf28c49495ecbf97e44740f53619ce69
'http://deb.debian.org/debian/pool/main/g/glibc/glibc_2.28.orig.tar.xz' glibc_2.28.orig.tar.xz 17061292 SHA256:53d3c1c7bff0fb25d4c7874bf13435dc44a71fd7dd5ffc9bfdcb513cdfc36854
'http://deb.debian.org/debian/pool/main/g/glibc/glibc_2.28-10.debian.tar.xz' glibc_2.28-10.debian.tar.xz 885796 SHA256:08ca414d8428a252ea357661631885ff72e47afa0663e3811167cc0897dbb042
```
Other potentially useful URLs:
- https://sources.debian.net/src/glibc/2.28-10/ (for browsing the source)
- https://sources.debian.net/src/glibc/2.28-10/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/glibc/2.28-10/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `gmp=2:6.1.2+dfsg-4`
Binary Packages:
- `libgmp10:amd64=2:6.1.2+dfsg-4`
Licenses: (parsed from: `/usr/share/doc/libgmp10/copyright`)
- `GPL`
- `GPL-2`
- `GPL-3`
- `LGPL-3`
Source:
```console
$ apt-get source -qq --print-uris gmp=2:6.1.2+dfsg-4
'http://deb.debian.org/debian/pool/main/g/gmp/gmp_6.1.2%2bdfsg-4.dsc' gmp_6.1.2+dfsg-4.dsc 2123 SHA256:5e9c98e1636344bf0c84710ee564ee6032d6a9db26aa5d29857d65b2a979877c
'http://deb.debian.org/debian/pool/main/g/gmp/gmp_6.1.2%2bdfsg.orig.tar.xz' gmp_6.1.2+dfsg.orig.tar.xz 1804424 SHA256:18016f718f621e7641ddd4e57f8e140391c5183252e5998263ffff59198a65b7
'http://deb.debian.org/debian/pool/main/g/gmp/gmp_6.1.2%2bdfsg-4.debian.tar.xz' gmp_6.1.2+dfsg-4.debian.tar.xz 21416 SHA256:cb25b080d915d9e5a641920f0471b4deb5368af739c7675d887cf290c2cffbe2
```
Other potentially useful URLs:
- https://sources.debian.net/src/gmp/2:6.1.2+dfsg-4/ (for browsing the source)
- https://sources.debian.net/src/gmp/2:6.1.2+dfsg-4/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/gmp/2:6.1.2+dfsg-4/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `gnupg2=2.2.12-1+deb10u1`
Binary Packages:
- `gpgv=2.2.12-1+deb10u1`
Licenses: (parsed from: `/usr/share/doc/gpgv/copyright`)
- `BSD-3-clause`
- `CC0-1.0`
- `Expat`
- `GPL-3`
- `GPL-3+`
- `LGPL-2.1`
- `LGPL-2.1+`
- `LGPL-3`
- `LGPL-3+`
- `RFC-Reference`
- `TinySCHEME`
- `permissive`
Source:
```console
$ apt-get source -qq --print-uris gnupg2=2.2.12-1+deb10u1
'http://deb.debian.org/debian/pool/main/g/gnupg2/gnupg2_2.2.12-1%2bdeb10u1.dsc' gnupg2_2.2.12-1+deb10u1.dsc 3261 SHA256:2e1ca8d194593c151228f6b54da51ccd0b17036a532c7724bfcab17594c886ed
'http://deb.debian.org/debian/pool/main/g/gnupg2/gnupg2_2.2.12.orig.tar.bz2' gnupg2_2.2.12.orig.tar.bz2 6682303 SHA256:db030f8b4c98640e91300d36d516f1f4f8fe09514a94ea9fc7411ee1a34082cb
'http://deb.debian.org/debian/pool/main/g/gnupg2/gnupg2_2.2.12.orig.tar.bz2.asc' gnupg2_2.2.12.orig.tar.bz2.asc 3204 SHA256:97c8dc25c4c2fe9a39b2ffd81b65b6f3dc4ad359c9a81ca4bb9b4bdeb6167c60
'http://deb.debian.org/debian/pool/main/g/gnupg2/gnupg2_2.2.12-1%2bdeb10u1.debian.tar.xz' gnupg2_2.2.12-1+deb10u1.debian.tar.xz 123224 SHA256:f8cd4f8a2b63208fd05ae433dc9cb11d2483a72ef057cfe5fcfe2385b7c63f38
```
Other potentially useful URLs:
- https://sources.debian.net/src/gnupg2/2.2.12-1+deb10u1/ (for browsing the source)
- https://sources.debian.net/src/gnupg2/2.2.12-1+deb10u1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/gnupg2/2.2.12-1+deb10u1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `gnutls28=3.6.7-4+deb10u7`
Binary Packages:
- `libgnutls30:amd64=3.6.7-4+deb10u7`
Licenses: (parsed from: `/usr/share/doc/libgnutls30/copyright`)
- `Apache-2.0`
- `CC0 license`
- `GFDL-1.3`
- `GPL`
- `GPL-3`
- `GPLv3+`
- `LGPL`
- `LGPL-3`
- `LGPLv3+_or_GPLv2+`
- `The MIT License (MIT)`
- `The main library is licensed under GNU Lesser`
Source:
```console
$ apt-get source -qq --print-uris gnutls28=3.6.7-4+deb10u7
'http://deb.debian.org/debian/pool/main/g/gnutls28/gnutls28_3.6.7-4%2bdeb10u7.dsc' gnutls28_3.6.7-4+deb10u7.dsc 3354 SHA256:dac5aff80109fa5e05f4ab1cb5d402ee9caeefebaa12daf034bcdd7e614af6b2
'http://deb.debian.org/debian/pool/main/g/gnutls28/gnutls28_3.6.7.orig.tar.xz' gnutls28_3.6.7.orig.tar.xz 8153728 SHA256:5b3409ad5aaf239808730d1ee12fdcd148c0be00262c7edf157af655a8a188e2
'http://deb.debian.org/debian/pool/main/g/gnutls28/gnutls28_3.6.7.orig.tar.xz.asc' gnutls28_3.6.7.orig.tar.xz.asc 534 SHA256:a14d0a7b9295b65ae797a70f8e765024a2e363dca03d008bfce0aec2b3f292b0
'http://deb.debian.org/debian/pool/main/g/gnutls28/gnutls28_3.6.7-4%2bdeb10u7.debian.tar.xz' gnutls28_3.6.7-4+deb10u7.debian.tar.xz 94000 SHA256:4f399badd85387e1dd42c811e16d10c4c22196e57142a7325ec44c52b3c6a168
```
Other potentially useful URLs:
- https://sources.debian.net/src/gnutls28/3.6.7-4+deb10u7/ (for browsing the source)
- https://sources.debian.net/src/gnutls28/3.6.7-4+deb10u7/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/gnutls28/3.6.7-4+deb10u7/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `grep=3.3-1`
Binary Packages:
- `grep=3.3-1`
Licenses: (parsed from: `/usr/share/doc/grep/copyright`)
- `GPL-3`
- `GPL-3+`
Source:
```console
$ apt-get source -qq --print-uris grep=3.3-1
'http://deb.debian.org/debian/pool/main/g/grep/grep_3.3-1.dsc' grep_3.3-1.dsc 2038 SHA256:4a019e5634f0a3a15715140fe8639af4cff0f2f7af8cee9b95b0607740ba9b25
'http://deb.debian.org/debian/pool/main/g/grep/grep_3.3.orig.tar.xz' grep_3.3.orig.tar.xz 1473056 SHA256:b960541c499619efd6afe1fa795402e4733c8e11ebf9fafccc0bb4bccdc5b514
'http://deb.debian.org/debian/pool/main/g/grep/grep_3.3-1.debian.tar.xz' grep_3.3-1.debian.tar.xz 104280 SHA256:2cea85fdfe3c70855019c3d9ed9346363137bf3f9931103d9b38514828c8989f
```
Other potentially useful URLs:
- https://sources.debian.net/src/grep/3.3-1/ (for browsing the source)
- https://sources.debian.net/src/grep/3.3-1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/grep/3.3-1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `gzip=1.9-3`
Binary Packages:
- `gzip=1.9-3`
Licenses: (parsed from: `/usr/share/doc/gzip/copyright`)
- `GPL`
Source:
```console
$ apt-get source -qq --print-uris gzip=1.9-3
'http://deb.debian.org/debian/pool/main/g/gzip/gzip_1.9-3.dsc' gzip_1.9-3.dsc 1960 SHA256:fb4702653d4d5475db22dc5cb054b7321b9dc2ca2067540e31d9460bc11246c2
'http://deb.debian.org/debian/pool/main/g/gzip/gzip_1.9.orig.tar.gz' gzip_1.9.orig.tar.gz 1181937 SHA256:5d2d3a3432ef32f24cdb060d278834507b481a75adeca18850c73592f778f6ad
'http://deb.debian.org/debian/pool/main/g/gzip/gzip_1.9-3.debian.tar.xz' gzip_1.9-3.debian.tar.xz 14420 SHA256:45996a08643cad9339a30606c9f523984b2f421c6d58e5949471efab75c1ac52
```
Other potentially useful URLs:
- https://sources.debian.net/src/gzip/1.9-3/ (for browsing the source)
- https://sources.debian.net/src/gzip/1.9-3/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/gzip/1.9-3/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `hostname=3.21`
Binary Packages:
- `hostname=3.21`
Licenses: (parsed from: `/usr/share/doc/hostname/copyright`)
- `GPL-2`
Source:
```console
$ apt-get source -qq --print-uris hostname=3.21
'http://deb.debian.org/debian/pool/main/h/hostname/hostname_3.21.dsc' hostname_3.21.dsc 1398 SHA256:8e61f35d7b3e57833d6110ee22a95af6b12e159bf41a5b659e63b21d01e83121
'http://deb.debian.org/debian/pool/main/h/hostname/hostname_3.21.tar.gz' hostname_3.21.tar.gz 13467 SHA256:566193a99f97a58f80b1537efe207c798bb88436c31c7dfc6dd4471d888a4a4f
```
Other potentially useful URLs:
- https://sources.debian.net/src/hostname/3.21/ (for browsing the source)
- https://sources.debian.net/src/hostname/3.21/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/hostname/3.21/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `icu=63.1-6+deb10u2`
Binary Packages:
- `libicu63:amd64=63.1-6+deb10u2`
**WARNING:** unable to detect licenses! (package likely not compliant with DEP-5)
If source is available (seen below), check the contents of `debian/copyright` within it.
Source:
```console
$ apt-get source -qq --print-uris icu=63.1-6+deb10u2
'http://security.debian.org/debian-security/pool/updates/main/i/icu/icu_63.1-6%2bdeb10u2.dsc' icu_63.1-6+deb10u2.dsc 2152 SHA256:4d175ca81d9605af871f634b831d75394f0497d66b523af8eaaf15846550333c
'http://security.debian.org/debian-security/pool/updates/main/i/icu/icu_63.1.orig.tar.xz' icu_63.1.orig.tar.xz 13638120 SHA256:347d0e6c39c3538b812c10c6c83815d4a089d578380387ae7d94c5b820948e82
'http://security.debian.org/debian-security/pool/updates/main/i/icu/icu_63.1-6%2bdeb10u2.debian.tar.xz' icu_63.1-6+deb10u2.debian.tar.xz 25732 SHA256:1626f226cf3dad093bfc7874c5ebe9cc7a24e5fe8a1225a1de9fe576a4b9b463
```
Other potentially useful URLs:
- https://sources.debian.net/src/icu/63.1-6+deb10u2/ (for browsing the source)
- https://sources.debian.net/src/icu/63.1-6+deb10u2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/icu/63.1-6+deb10u2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `init-system-helpers=1.56+nmu1`
Binary Packages:
- `init-system-helpers=1.56+nmu1`
Licenses: (parsed from: `/usr/share/doc/init-system-helpers/copyright`)
- `BSD-3-clause`
- `GPL-2`
- `GPL-2+`
Source:
```console
$ apt-get source -qq --print-uris init-system-helpers=1.56+nmu1
'http://deb.debian.org/debian/pool/main/i/init-system-helpers/init-system-helpers_1.56%2bnmu1.dsc' init-system-helpers_1.56+nmu1.dsc 1945 SHA256:96f7d1c696faf801eb5990223b2782dedaf4092efb9b0dcc13d038b91dbb1a51
'http://deb.debian.org/debian/pool/main/i/init-system-helpers/init-system-helpers_1.56%2bnmu1.tar.xz' init-system-helpers_1.56+nmu1.tar.xz 40488 SHA256:ecb5b9a0dbf0b7e83ef41bfc15bf9d41868642d4d5f817a0962aa1b980a56368
```
Other potentially useful URLs:
- https://sources.debian.net/src/init-system-helpers/1.56+nmu1/ (for browsing the source)
- https://sources.debian.net/src/init-system-helpers/1.56+nmu1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/init-system-helpers/1.56+nmu1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `isl=0.20-2`
Binary Packages:
- `libisl19:amd64=0.20-2`
Licenses: (parsed from: `/usr/share/doc/libisl19/copyright`)
- `BSD-2-clause`
- `LGPL-2`
- `LGPL-2.1+`
- `MIT`
Source:
```console
$ apt-get source -qq --print-uris isl=0.20-2
'http://deb.debian.org/debian/pool/main/i/isl/isl_0.20-2.dsc' isl_0.20-2.dsc 1842 SHA256:466b881ac0207f9430ae21069e644f17a6e4428544f9802284727381e5d26089
'http://deb.debian.org/debian/pool/main/i/isl/isl_0.20.orig.tar.xz' isl_0.20.orig.tar.xz 1539064 SHA256:a5596a9fb8a5b365cb612e4b9628735d6e67e9178fae134a816ae195017e77aa
'http://deb.debian.org/debian/pool/main/i/isl/isl_0.20-2.debian.tar.xz' isl_0.20-2.debian.tar.xz 23512 SHA256:ea2b467fea2395ca08f236f520fcc37e50a1c91cad471a9ee89443bfae8f50af
```
Other potentially useful URLs:
- https://sources.debian.net/src/isl/0.20-2/ (for browsing the source)
- https://sources.debian.net/src/isl/0.20-2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/isl/0.20-2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `keyutils=1.6-6`
Binary Packages:
- `libkeyutils1:amd64=1.6-6`
Licenses: (parsed from: `/usr/share/doc/libkeyutils1/copyright`)
- `GPL-2`
- `GPL-2+`
- `LGPL-2`
- `LGPL-2+`
Source:
```console
$ apt-get source -qq --print-uris keyutils=1.6-6
'http://deb.debian.org/debian/pool/main/k/keyutils/keyutils_1.6-6.dsc' keyutils_1.6-6.dsc 2062 SHA256:1da6a0f50759b4eefe210e351558a854e28d312213d5528792af6938f106f183
'http://deb.debian.org/debian/pool/main/k/keyutils/keyutils_1.6.orig.tar.bz2' keyutils_1.6.orig.tar.bz2 93973 SHA256:d3aef20cec0005c0fa6b4be40079885567473185b1a57b629b030e67942c7115
'http://deb.debian.org/debian/pool/main/k/keyutils/keyutils_1.6-6.debian.tar.xz' keyutils_1.6-6.debian.tar.xz 12828 SHA256:063876d3733337aad5e632b013bb8fd85bef85b2285ba7d6c8ab5ac7492ca245
```
Other potentially useful URLs:
- https://sources.debian.net/src/keyutils/1.6-6/ (for browsing the source)
- https://sources.debian.net/src/keyutils/1.6-6/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/keyutils/1.6-6/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `krb5=1.17-3+deb10u3`
Binary Packages:
- `libgssapi-krb5-2:amd64=1.17-3+deb10u3`
- `libk5crypto3:amd64=1.17-3+deb10u3`
- `libkrb5-3:amd64=1.17-3+deb10u3`
- `libkrb5support0:amd64=1.17-3+deb10u3`
Licenses: (parsed from: `/usr/share/doc/libgssapi-krb5-2/copyright`, `/usr/share/doc/libk5crypto3/copyright`, `/usr/share/doc/libkrb5-3/copyright`, `/usr/share/doc/libkrb5support0/copyright`)
- `GPL-2`
Source:
```console
$ apt-get source -qq --print-uris krb5=1.17-3+deb10u3
'http://deb.debian.org/debian/pool/main/k/krb5/krb5_1.17-3%2bdeb10u3.dsc' krb5_1.17-3+deb10u3.dsc 2968 SHA256:002bf39a1c8d410c5acbf20294d31a1ed620e207d589c75e13a568fddd6295ce
'http://deb.debian.org/debian/pool/main/k/krb5/krb5_1.17.orig.tar.gz' krb5_1.17.orig.tar.gz 8761763 SHA256:5a6e2284a53de5702d3dc2be3b9339c963f9b5397d3fbbc53beb249380a781f5
'http://deb.debian.org/debian/pool/main/k/krb5/krb5_1.17-3%2bdeb10u3.debian.tar.xz' krb5_1.17-3+deb10u3.debian.tar.xz 103168 SHA256:12d8d480d5fa1dcec99c8ce0d5e3f249da94ab959829d83ca5ff09376349b1e3
```
Other potentially useful URLs:
- https://sources.debian.net/src/krb5/1.17-3+deb10u3/ (for browsing the source)
- https://sources.debian.net/src/krb5/1.17-3+deb10u3/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/krb5/1.17-3+deb10u3/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libcap-ng=0.7.9-2`
Binary Packages:
- `libcap-ng0:amd64=0.7.9-2`
Licenses: (parsed from: `/usr/share/doc/libcap-ng0/copyright`)
- `GPL-2`
- `GPL-3`
- `LGPL-2.1`
Source:
```console
$ apt-get source -qq --print-uris libcap-ng=0.7.9-2
'http://deb.debian.org/debian/pool/main/libc/libcap-ng/libcap-ng_0.7.9-2.dsc' libcap-ng_0.7.9-2.dsc 1912 SHA256:e787ebb86a7c9fdcfe429c20f2b17528d084917a34b5efc0022619e1e11572a4
'http://deb.debian.org/debian/pool/main/libc/libcap-ng/libcap-ng_0.7.9.orig.tar.gz' libcap-ng_0.7.9.orig.tar.gz 449038 SHA256:4a1532bcf3731aade40936f6d6a586ed5a66ca4c7455e1338d1f6c3e09221328
'http://deb.debian.org/debian/pool/main/libc/libcap-ng/libcap-ng_0.7.9-2.debian.tar.xz' libcap-ng_0.7.9-2.debian.tar.xz 6220 SHA256:1ce4d5f7ee041b01f254e9d12ae86fef563566871bc457579c70b058b071ae22
```
Other potentially useful URLs:
- https://sources.debian.net/src/libcap-ng/0.7.9-2/ (for browsing the source)
- https://sources.debian.net/src/libcap-ng/0.7.9-2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libcap-ng/0.7.9-2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libffi=3.2.1-9`
Binary Packages:
- `libffi6:amd64=3.2.1-9`
Licenses: (parsed from: `/usr/share/doc/libffi6/copyright`)
- `GPL`
Source:
```console
$ apt-get source -qq --print-uris libffi=3.2.1-9
'http://deb.debian.org/debian/pool/main/libf/libffi/libffi_3.2.1-9.dsc' libffi_3.2.1-9.dsc 2000 SHA256:28beaed76f2ce4c6a3ce1527eb07534c8ef4bf624a42c803fea045c416f8faa5
'http://deb.debian.org/debian/pool/main/libf/libffi/libffi_3.2.1.orig.tar.gz' libffi_3.2.1.orig.tar.gz 940837 SHA256:d06ebb8e1d9a22d19e38d63fdb83954253f39bedc5d46232a05645685722ca37
'http://deb.debian.org/debian/pool/main/libf/libffi/libffi_3.2.1-9.debian.tar.xz' libffi_3.2.1-9.debian.tar.xz 17148 SHA256:26e3cfd358733832da251778bc615a42b908d7779cf8b8d7fc2bdee4660bbbce
```
Other potentially useful URLs:
- https://sources.debian.net/src/libffi/3.2.1-9/ (for browsing the source)
- https://sources.debian.net/src/libffi/3.2.1-9/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libffi/3.2.1-9/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libgcrypt20=1.8.4-5+deb10u1`
Binary Packages:
- `libgcrypt20:amd64=1.8.4-5+deb10u1`
Licenses: (parsed from: `/usr/share/doc/libgcrypt20/copyright`)
- `GPL-2`
- `LGPL`
Source:
```console
$ apt-get source -qq --print-uris libgcrypt20=1.8.4-5+deb10u1
'http://deb.debian.org/debian/pool/main/libg/libgcrypt20/libgcrypt20_1.8.4-5%2bdeb10u1.dsc' libgcrypt20_1.8.4-5+deb10u1.dsc 2838 SHA256:b81da28242d5516fdd74449a48c67ab11f247070acfb11156d04e19a06cb0e63
'http://deb.debian.org/debian/pool/main/libg/libgcrypt20/libgcrypt20_1.8.4.orig.tar.bz2' libgcrypt20_1.8.4.orig.tar.bz2 2990108 SHA256:f638143a0672628fde0cad745e9b14deb85dffb175709cacc1f4fe24b93f2227
'http://deb.debian.org/debian/pool/main/libg/libgcrypt20/libgcrypt20_1.8.4.orig.tar.bz2.asc' libgcrypt20_1.8.4.orig.tar.bz2.asc 534 SHA256:97df94317ad273cffce4e78ad34ad0664819b44496f6528818a4298a691209a3
'http://deb.debian.org/debian/pool/main/libg/libgcrypt20/libgcrypt20_1.8.4-5%2bdeb10u1.debian.tar.xz' libgcrypt20_1.8.4-5+deb10u1.debian.tar.xz 30712 SHA256:5ab932db1f9c2c9e5d3e86266d020a492bbae845367982a0c7d5ec78abf55027
```
Other potentially useful URLs:
- https://sources.debian.net/src/libgcrypt20/1.8.4-5+deb10u1/ (for browsing the source)
- https://sources.debian.net/src/libgcrypt20/1.8.4-5+deb10u1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libgcrypt20/1.8.4-5+deb10u1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libgpg-error=1.35-1`
Binary Packages:
- `libgpg-error0:amd64=1.35-1`
Licenses: (parsed from: `/usr/share/doc/libgpg-error0/copyright`)
- `BSD-3-clause`
- `GPL-3`
- `GPL-3+`
- `LGPL-2.1`
- `LGPL-2.1+`
- `g10-permissive`
Source:
```console
$ apt-get source -qq --print-uris libgpg-error=1.35-1
'http://deb.debian.org/debian/pool/main/libg/libgpg-error/libgpg-error_1.35-1.dsc' libgpg-error_1.35-1.dsc 2155 SHA256:1d5e455ea385f522a0cf39510291945d42b95fafc8a1f05537cef3863c1d6c16
'http://deb.debian.org/debian/pool/main/libg/libgpg-error/libgpg-error_1.35.orig.tar.bz2' libgpg-error_1.35.orig.tar.bz2 918408 SHA256:cbd5ee62a8a8c88d48c158fff4fc9ead4132aacd1b4a56eb791f9f997d07e067
'http://deb.debian.org/debian/pool/main/libg/libgpg-error/libgpg-error_1.35.orig.tar.bz2.asc' libgpg-error_1.35.orig.tar.bz2.asc 534 SHA256:f6bfdc64a84245437c443f83faea85407d051d0487550515a4a279573589944d
'http://deb.debian.org/debian/pool/main/libg/libgpg-error/libgpg-error_1.35-1.debian.tar.xz' libgpg-error_1.35-1.debian.tar.xz 16056 SHA256:e600a34c09e6a3e8ec63d6145f4a11b16d92dc0ddeff1ba94cba08a8fecf0b66
```
Other potentially useful URLs:
- https://sources.debian.net/src/libgpg-error/1.35-1/ (for browsing the source)
- https://sources.debian.net/src/libgpg-error/1.35-1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libgpg-error/1.35-1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libidn2=2.0.5-1+deb10u1`
Binary Packages:
- `libidn2-0:amd64=2.0.5-1+deb10u1`
Licenses: (parsed from: `/usr/share/doc/libidn2-0/copyright`)
- `GPL-2`
- `GPL-2+`
- `GPL-3`
- `GPL-3+`
- `LGPL-3`
- `LGPL-3+`
- `Unicode`
Source:
```console
$ apt-get source -qq --print-uris libidn2=2.0.5-1+deb10u1
'http://deb.debian.org/debian/pool/main/libi/libidn2/libidn2_2.0.5-1%2bdeb10u1.dsc' libidn2_2.0.5-1+deb10u1.dsc 2501 SHA256:6c4eac5dc85983e4cf37ee8deea5e23cfb9e1620f7a94a858726676c8858b498
'http://deb.debian.org/debian/pool/main/libi/libidn2/libidn2_2.0.5.orig.tar.gz' libidn2_2.0.5.orig.tar.gz 2091929 SHA256:53f69170886f1fa6fa5b332439c7a77a7d22626a82ef17e2c1224858bb4ca2b8
'http://deb.debian.org/debian/pool/main/libi/libidn2/libidn2_2.0.5-1%2bdeb10u1.debian.tar.xz' libidn2_2.0.5-1+deb10u1.debian.tar.xz 10286540 SHA256:37cfdc06e4e2f03e932af5bb309cbe94f8466f8b347aa34fa7c1e03a425556b2
```
Other potentially useful URLs:
- https://sources.debian.net/src/libidn2/2.0.5-1+deb10u1/ (for browsing the source)
- https://sources.debian.net/src/libidn2/2.0.5-1+deb10u1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libidn2/2.0.5-1+deb10u1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libonig=6.9.1-1`
Binary Packages:
- `libonig5:amd64=6.9.1-1`
Licenses: (parsed from: `/usr/share/doc/libonig5/copyright`)
- `BSD-2-clause`
- `GPL-2`
- `GPL-2+`
Source:
```console
$ apt-get source -qq --print-uris libonig=6.9.1-1
'http://deb.debian.org/debian/pool/main/libo/libonig/libonig_6.9.1-1.dsc' libonig_6.9.1-1.dsc 1854 SHA256:597b84337f078a46e9212eeedf39ba22723bf7447cf3d117a15c5635a390f955
'http://deb.debian.org/debian/pool/main/libo/libonig/libonig_6.9.1.orig.tar.gz' libonig_6.9.1.orig.tar.gz 535443 SHA256:733a91e257c20cb17b81596db48fd0fbd2e0074fe922ae4c2bc4fefcec110aae
'http://deb.debian.org/debian/pool/main/libo/libonig/libonig_6.9.1-1.debian.tar.xz' libonig_6.9.1-1.debian.tar.xz 7800 SHA256:29e99f42ee8dcf7498b85de13c4e2b07548e52fed9f40bd85f90431db7f03ab6
```
Other potentially useful URLs:
- https://sources.debian.net/src/libonig/6.9.1-1/ (for browsing the source)
- https://sources.debian.net/src/libonig/6.9.1-1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libonig/6.9.1-1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libpsl=0.20.2-2`
Binary Packages:
- `libpsl5:amd64=0.20.2-2`
Licenses: (parsed from: `/usr/share/doc/libpsl5/copyright`)
- `Chromium`
- `MIT`
Source:
```console
$ apt-get source -qq --print-uris libpsl=0.20.2-2
'http://deb.debian.org/debian/pool/main/libp/libpsl/libpsl_0.20.2-2.dsc' libpsl_0.20.2-2.dsc 1637 SHA256:ae401852522d748f1222b91734bc5bd7c6db0de843dd675adc180f2a1884c94d
'http://deb.debian.org/debian/pool/main/libp/libpsl/libpsl_0.20.2.orig.tar.gz' libpsl_0.20.2.orig.tar.gz 8590430 SHA256:94d2b5e00e9aa761ae7efbaa67edc00d5298487ed9706eb4789e349012993c31
'http://deb.debian.org/debian/pool/main/libp/libpsl/libpsl_0.20.2-2.debian.tar.xz' libpsl_0.20.2-2.debian.tar.xz 9920 SHA256:1f008454fdb973964202020fb700d5028e001b7eaa4e77eeab8ebc99b749ea51
```
Other potentially useful URLs:
- https://sources.debian.net/src/libpsl/0.20.2-2/ (for browsing the source)
- https://sources.debian.net/src/libpsl/0.20.2-2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libpsl/0.20.2-2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libseccomp=2.3.3-4`
Binary Packages:
- `libseccomp2:amd64=2.3.3-4`
Licenses: (parsed from: `/usr/share/doc/libseccomp2/copyright`)
- `LGPL-2.1`
Source:
```console
$ apt-get source -qq --print-uris libseccomp=2.3.3-4
'http://deb.debian.org/debian/pool/main/libs/libseccomp/libseccomp_2.3.3-4.dsc' libseccomp_2.3.3-4.dsc 2500 SHA256:1443086c253ffacdad635aeb27a37b21958119833782290ae868b897eb9f6ab0
'http://deb.debian.org/debian/pool/main/libs/libseccomp/libseccomp_2.3.3.orig.tar.gz' libseccomp_2.3.3.orig.tar.gz 564546 SHA256:7fc28f4294cc72e61c529bedf97e705c3acf9c479a8f1a3028d4cd2ca9f3b155
'http://deb.debian.org/debian/pool/main/libs/libseccomp/libseccomp_2.3.3-4.debian.tar.xz' libseccomp_2.3.3-4.debian.tar.xz 12104 SHA256:deab2e069e145bf31d0a5569ad3adb2b94217623e02a25d4c9fa0d298073769e
```
Other potentially useful URLs:
- https://sources.debian.net/src/libseccomp/2.3.3-4/ (for browsing the source)
- https://sources.debian.net/src/libseccomp/2.3.3-4/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libseccomp/2.3.3-4/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libselinux=2.8-1`
Binary Packages:
- `libselinux1:amd64=2.8-1+b1`
Licenses: (parsed from: `/usr/share/doc/libselinux1/copyright`)
- `GPL-2`
- `LGPL-2.1`
Source:
```console
$ apt-get source -qq --print-uris libselinux=2.8-1
'http://deb.debian.org/debian/pool/main/libs/libselinux/libselinux_2.8-1.dsc' libselinux_2.8-1.dsc 2347 SHA256:0f08d64f4488312a8e8b7ffb12771cd385560752473a2e585449edc27223c129
'http://deb.debian.org/debian/pool/main/libs/libselinux/libselinux_2.8.orig.tar.gz' libselinux_2.8.orig.tar.gz 187759 SHA256:31db96ec7643ce10912b3c3f98506a08a9116dcfe151855fd349c3fda96187e1
'http://deb.debian.org/debian/pool/main/libs/libselinux/libselinux_2.8-1.debian.tar.xz' libselinux_2.8-1.debian.tar.xz 23052 SHA256:a0b150e870a3da7e1d7b0fec7c1a5ae6988a0985e545c69cfe8fe05363c5bf64
```
Other potentially useful URLs:
- https://sources.debian.net/src/libselinux/2.8-1/ (for browsing the source)
- https://sources.debian.net/src/libselinux/2.8-1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libselinux/2.8-1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libsemanage=2.8-2`
Binary Packages:
- `libsemanage-common=2.8-2`
- `libsemanage1:amd64=2.8-2`
Licenses: (parsed from: `/usr/share/doc/libsemanage-common/copyright`, `/usr/share/doc/libsemanage1/copyright`)
- `GPL`
- `LGPL`
Source:
```console
$ apt-get source -qq --print-uris libsemanage=2.8-2
'http://deb.debian.org/debian/pool/main/libs/libsemanage/libsemanage_2.8-2.dsc' libsemanage_2.8-2.dsc 2434 SHA256:f7cbe0594c098808a449804a357159bec4db54389df0319c2b5306b10ec2e707
'http://deb.debian.org/debian/pool/main/libs/libsemanage/libsemanage_2.8.orig.tar.gz' libsemanage_2.8.orig.tar.gz 154200 SHA256:1c0de8d2c51e5460926c21e371105c84a39087dfd8f8e9f0cc1d017e4cbea8e2
'http://deb.debian.org/debian/pool/main/libs/libsemanage/libsemanage_2.8-2.debian.tar.xz' libsemanage_2.8-2.debian.tar.xz 17756 SHA256:02315ffeb2b0a24b7c3bc8fa0c0e1e217e4a7b284bb88f64b0bf613e76d125e2
```
Other potentially useful URLs:
- https://sources.debian.net/src/libsemanage/2.8-2/ (for browsing the source)
- https://sources.debian.net/src/libsemanage/2.8-2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libsemanage/2.8-2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libsepol=2.8-1`
Binary Packages:
- `libsepol1:amd64=2.8-1`
Licenses: (parsed from: `/usr/share/doc/libsepol1/copyright`)
- `GPL`
- `LGPL`
Source:
```console
$ apt-get source -qq --print-uris libsepol=2.8-1
'http://deb.debian.org/debian/pool/main/libs/libsepol/libsepol_2.8-1.dsc' libsepol_2.8-1.dsc 1792 SHA256:37b0b79ab0f7533c194272809ccb3f3c5ff788536f66254c0d405e2e8b2b270e
'http://deb.debian.org/debian/pool/main/libs/libsepol/libsepol_2.8.orig.tar.gz' libsepol_2.8.orig.tar.gz 473384 SHA256:3ad6916a8352bef0bad49acc8037a5f5b48c56f94e4cb4e1959ca475fa9d24d6
'http://deb.debian.org/debian/pool/main/libs/libsepol/libsepol_2.8-1.debian.tar.xz' libsepol_2.8-1.debian.tar.xz 14076 SHA256:7b8d0b47396c96830754db2e5b679d294486aeffd93cfd21ac68202031374a00
```
Other potentially useful URLs:
- https://sources.debian.net/src/libsepol/2.8-1/ (for browsing the source)
- https://sources.debian.net/src/libsepol/2.8-1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libsepol/2.8-1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libsigsegv=2.12-2`
Binary Packages:
- `libsigsegv2:amd64=2.12-2`
Licenses: (parsed from: `/usr/share/doc/libsigsegv2/copyright`)
- `GPL-2`
- `GPL-2+`
- `GPL-2+ with Autoconf exception`
- `permissive-fsf`
- `permissive-other`
Source:
```console
$ apt-get source -qq --print-uris libsigsegv=2.12-2
'http://deb.debian.org/debian/pool/main/libs/libsigsegv/libsigsegv_2.12-2.dsc' libsigsegv_2.12-2.dsc 2363 SHA256:b081b244de2f427345838f379405d8438c29db1fa746a4e270167ae7cb10c079
'http://deb.debian.org/debian/pool/main/libs/libsigsegv/libsigsegv_2.12.orig.tar.gz' libsigsegv_2.12.orig.tar.gz 451408 SHA256:3ae1af359eebaa4ffc5896a1aee3568c052c99879316a1ab57f8fe1789c390b6
'http://deb.debian.org/debian/pool/main/libs/libsigsegv/libsigsegv_2.12.orig.tar.gz.asc' libsigsegv_2.12.orig.tar.gz.asc 2442 SHA256:1861a9a182bbb7a24a18f7e43fe0fa3eb6f6fd53780b30e01990677112694dfc
'http://deb.debian.org/debian/pool/main/libs/libsigsegv/libsigsegv_2.12-2.debian.tar.xz' libsigsegv_2.12-2.debian.tar.xz 8340 SHA256:73940fb346f7afd90c93a341164cd175349e0507de8b1c05b0834b598c372260
```
Other potentially useful URLs:
- https://sources.debian.net/src/libsigsegv/2.12-2/ (for browsing the source)
- https://sources.debian.net/src/libsigsegv/2.12-2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libsigsegv/2.12-2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libsodium=1.0.17-1`
Binary Packages:
- `libsodium23:amd64=1.0.17-1`
Licenses: (parsed from: `/usr/share/doc/libsodium23/copyright`)
- `BSD-2-clause`
- `CC0`
- `GPL-2`
- `GPL-2+`
- `ISC`
- `MIT`
- `public-domain`
Source:
```console
$ apt-get source -qq --print-uris libsodium=1.0.17-1
'http://deb.debian.org/debian/pool/main/libs/libsodium/libsodium_1.0.17-1.dsc' libsodium_1.0.17-1.dsc 1913 SHA256:e2fb1951476b7b7177e7b2848b6d896a55ddffb11b0e5f82563d24944fc910ac
'http://deb.debian.org/debian/pool/main/libs/libsodium/libsodium_1.0.17.orig.tar.gz' libsodium_1.0.17.orig.tar.gz 1604410 SHA256:602e07029c780e154347fb95495b13ce48709ae705c6cff927ecb0c485b95672
'http://deb.debian.org/debian/pool/main/libs/libsodium/libsodium_1.0.17-1.debian.tar.xz' libsodium_1.0.17-1.debian.tar.xz 7256 SHA256:fdaf9fcb6b5a0801f1344d2350da2882d49273ed9c641e1dd747a66e5b318b6c
```
Other potentially useful URLs:
- https://sources.debian.net/src/libsodium/1.0.17-1/ (for browsing the source)
- https://sources.debian.net/src/libsodium/1.0.17-1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libsodium/1.0.17-1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libssh2=1.8.0-2.1`
Binary Packages:
- `libssh2-1:amd64=1.8.0-2.1`
Licenses: (parsed from: `/usr/share/doc/libssh2-1/copyright`)
- `BSD3`
Source:
```console
$ apt-get source -qq --print-uris libssh2=1.8.0-2.1
'http://deb.debian.org/debian/pool/main/libs/libssh2/libssh2_1.8.0-2.1.dsc' libssh2_1.8.0-2.1.dsc 1958 SHA256:33f070a4a32db5d3952457986d8f80c9cf874dd144d81f5bce062171564b35d9
'http://deb.debian.org/debian/pool/main/libs/libssh2/libssh2_1.8.0.orig.tar.gz' libssh2_1.8.0.orig.tar.gz 846989 SHA256:4382d33de790b28f862e53ed59ffbd65f3def7a06e8b6e9ca1b6f70453b4d5e0
'http://deb.debian.org/debian/pool/main/libs/libssh2/libssh2_1.8.0-2.1.debian.tar.xz' libssh2_1.8.0-2.1.debian.tar.xz 13988 SHA256:e3c34166cddaba7f2162132ef4f4bdc1490c499ee6610bde81f773adef43489e
```
Other potentially useful URLs:
- https://sources.debian.net/src/libssh2/1.8.0-2.1/ (for browsing the source)
- https://sources.debian.net/src/libssh2/1.8.0-2.1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libssh2/1.8.0-2.1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libtasn1-6=4.13-3`
Binary Packages:
- `libtasn1-6:amd64=4.13-3`
Licenses: (parsed from: `/usr/share/doc/libtasn1-6/copyright`)
- `GFDL-1.3`
- `GPL-3`
- `LGPL`
- `LGPL-2.1`
Source:
```console
$ apt-get source -qq --print-uris libtasn1-6=4.13-3
'http://deb.debian.org/debian/pool/main/libt/libtasn1-6/libtasn1-6_4.13-3.dsc' libtasn1-6_4.13-3.dsc 2574 SHA256:15a984daba0bc64819a1203cd28a1e869a30e0edde227237e4cdcfbc86131227
'http://deb.debian.org/debian/pool/main/libt/libtasn1-6/libtasn1-6_4.13.orig.tar.gz' libtasn1-6_4.13.orig.tar.gz 1891703 SHA256:7e528e8c317ddd156230c4e31d082cd13e7ddeb7a54824be82632209550c8cca
'http://deb.debian.org/debian/pool/main/libt/libtasn1-6/libtasn1-6_4.13.orig.tar.gz.asc' libtasn1-6_4.13.orig.tar.gz.asc 774 SHA256:90261376528edf44831d1369847088cc2fb48669860d343961daca42e674b226
'http://deb.debian.org/debian/pool/main/libt/libtasn1-6/libtasn1-6_4.13-3.debian.tar.xz' libtasn1-6_4.13-3.debian.tar.xz 63384 SHA256:1428c31d3d900d8fa1946fc29d9d2839c73c7a4c0ebff7a2571c134aef53c310
```
Other potentially useful URLs:
- https://sources.debian.net/src/libtasn1-6/4.13-3/ (for browsing the source)
- https://sources.debian.net/src/libtasn1-6/4.13-3/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libtasn1-6/4.13-3/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libunistring=0.9.10-1`
Binary Packages:
- `libunistring2:amd64=0.9.10-1`
Licenses: (parsed from: `/usr/share/doc/libunistring2/copyright`)
- `FreeSoftware`
- `GFDL-1.2`
- `GFDL-1.2+`
- `GPL-2`
- `GPL-2+`
- `GPL-2+ with distribution exception`
- `GPL-3`
- `GPL-3+`
- `LGPL-3`
- `LGPL-3+`
- `MIT`
Source:
```console
$ apt-get source -qq --print-uris libunistring=0.9.10-1
'http://deb.debian.org/debian/pool/main/libu/libunistring/libunistring_0.9.10-1.dsc' libunistring_0.9.10-1.dsc 2206 SHA256:2118b96b1125399556bd95b8917cd559c4e9afe8d85861b01435f9635cefcdf2
'http://deb.debian.org/debian/pool/main/libu/libunistring/libunistring_0.9.10.orig.tar.xz' libunistring_0.9.10.orig.tar.xz 2051320 SHA256:eb8fb2c3e4b6e2d336608377050892b54c3c983b646c561836550863003c05d7
'http://deb.debian.org/debian/pool/main/libu/libunistring/libunistring_0.9.10.orig.tar.xz.asc' libunistring_0.9.10.orig.tar.xz.asc 1310 SHA256:e1606f691034fa21b00e08269622743547c16d21cca6c8a64156b4774a49e78e
'http://deb.debian.org/debian/pool/main/libu/libunistring/libunistring_0.9.10-1.debian.tar.xz' libunistring_0.9.10-1.debian.tar.xz 40328 SHA256:dd4d07437e6332003e702aa2f56911a21091ac6f10d0cdc17aaaaa8e29ad63b7
```
Other potentially useful URLs:
- https://sources.debian.net/src/libunistring/0.9.10-1/ (for browsing the source)
- https://sources.debian.net/src/libunistring/0.9.10-1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libunistring/0.9.10-1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libxml2=2.9.4+dfsg1-7+deb10u2`
Binary Packages:
- `libxml2:amd64=2.9.4+dfsg1-7+deb10u2`
Licenses: (parsed from: `/usr/share/doc/libxml2/copyright`)
- `ISC`
- `MIT-1`
Source:
```console
$ apt-get source -qq --print-uris libxml2=2.9.4+dfsg1-7+deb10u2
'http://deb.debian.org/debian/pool/main/libx/libxml2/libxml2_2.9.4%2bdfsg1-7%2bdeb10u2.dsc' libxml2_2.9.4+dfsg1-7+deb10u2.dsc 3163 SHA256:1a5189b5c4238d4d833ad7b18e56e23e50252b62dddc90b3aff67018bce1f3aa
'http://deb.debian.org/debian/pool/main/libx/libxml2/libxml2_2.9.4%2bdfsg1.orig.tar.xz' libxml2_2.9.4+dfsg1.orig.tar.xz 2446412 SHA256:a74ad55e346aa0b2b41903e66d21f8f3d2a736b3f41e32496376861ab484184e
'http://deb.debian.org/debian/pool/main/libx/libxml2/libxml2_2.9.4%2bdfsg1-7%2bdeb10u2.debian.tar.xz' libxml2_2.9.4+dfsg1-7+deb10u2.debian.tar.xz 40924 SHA256:07d0f31c11472f5a3407db92d363c30ad26100a11b5a181a6cf664af531f43fd
```
Other potentially useful URLs:
- https://sources.debian.net/src/libxml2/2.9.4+dfsg1-7+deb10u2/ (for browsing the source)
- https://sources.debian.net/src/libxml2/2.9.4+dfsg1-7+deb10u2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libxml2/2.9.4+dfsg1-7+deb10u2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `libzstd=1.3.8+dfsg-3+deb10u2`
Binary Packages:
- `libzstd1:amd64=1.3.8+dfsg-3+deb10u2`
Licenses: (parsed from: `/usr/share/doc/libzstd1/copyright`)
- `BSD-3-clause`
- `Expat`
- `GPL-2`
- `GPL-2+`
- `zlib`
Source:
```console
$ apt-get source -qq --print-uris libzstd=1.3.8+dfsg-3+deb10u2
'http://deb.debian.org/debian/pool/main/libz/libzstd/libzstd_1.3.8%2bdfsg-3%2bdeb10u2.dsc' libzstd_1.3.8+dfsg-3+deb10u2.dsc 1947 SHA256:572fae1c7dc9bace3b9f7fcdeabf30dd1d00d0462e319ccec7b58b0adbf7dc85
'http://deb.debian.org/debian/pool/main/libz/libzstd/libzstd_1.3.8%2bdfsg.orig.tar.xz' libzstd_1.3.8+dfsg.orig.tar.xz 1299276 SHA256:03851f2c26ffbf1d43633df3f98966f3c62e698e91ef4dc90523915bc934e5f7
'http://deb.debian.org/debian/pool/main/libz/libzstd/libzstd_1.3.8%2bdfsg-3%2bdeb10u2.debian.tar.xz' libzstd_1.3.8+dfsg-3+deb10u2.debian.tar.xz 11648 SHA256:1f107f6cdc3bf46fb2aebf9c5c997ed2a125ac2fb1d28e939da857d5b061079e
```
Other potentially useful URLs:
- https://sources.debian.net/src/libzstd/1.3.8+dfsg-3+deb10u2/ (for browsing the source)
- https://sources.debian.net/src/libzstd/1.3.8+dfsg-3+deb10u2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/libzstd/1.3.8+dfsg-3+deb10u2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `linux=4.19.208-1`
Binary Packages:
- `linux-libc-dev:amd64=4.19.208-1`
Licenses: (parsed from: `/usr/share/doc/linux-libc-dev/copyright`)
- `CRYPTOGAMS`
- `GPL-2`
- `GPL-2+`
- `LGPL-2.1`
- `Unicode-data`
- `X11`
- `Xen-interface`
Source:
```console
$ apt-get source -qq --print-uris linux=4.19.208-1
'http://deb.debian.org/debian/pool/main/l/linux/linux_4.19.208-1.dsc' linux_4.19.208-1.dsc 191615 SHA256:abc4bc72ccc5bf6d2c5ee4d60547c58ce8e00246effa0d4ca8d4a4ab36131dde
'http://deb.debian.org/debian/pool/main/l/linux/linux_4.19.208.orig.tar.xz' linux_4.19.208.orig.tar.xz 107598860 SHA256:cabff7d88404362e0ac398f5fed783e00acfb0fcce8669ced3e0de44fc2b03bc
'http://deb.debian.org/debian/pool/main/l/linux/linux_4.19.208-1.debian.tar.xz' linux_4.19.208-1.debian.tar.xz 1504196 SHA256:f4582cbfc68afe8650596ec591c4ebe4339d938f20b5dd034ae3110e011357cd
```
Other potentially useful URLs:
- https://sources.debian.net/src/linux/4.19.208-1/ (for browsing the source)
- https://sources.debian.net/src/linux/4.19.208-1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/linux/4.19.208-1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `lz4=1.8.3-1+deb10u1`
Binary Packages:
- `liblz4-1:amd64=1.8.3-1+deb10u1`
Licenses: (parsed from: `/usr/share/doc/liblz4-1/copyright`)
- `BSD-2-clause`
- `GPL-2`
- `GPL-2+`
Source:
```console
$ apt-get source -qq --print-uris lz4=1.8.3-1+deb10u1
'http://deb.debian.org/debian/pool/main/l/lz4/lz4_1.8.3-1%2bdeb10u1.dsc' lz4_1.8.3-1+deb10u1.dsc 2119 SHA256:22a32d93f1e1525efd33e80b8c8ffb9ad74b5baf441aa3534e875523d9b5eb93
'http://deb.debian.org/debian/pool/main/l/lz4/lz4_1.8.3.orig.tar.gz' lz4_1.8.3.orig.tar.gz 327897 SHA256:33af5936ac06536805f9745e0b6d61da606a1f8b4cc5c04dd3cbaca3b9b4fc43
'http://deb.debian.org/debian/pool/main/l/lz4/lz4_1.8.3-1%2bdeb10u1.debian.tar.xz' lz4_1.8.3-1+deb10u1.debian.tar.xz 12068 SHA256:6d9896c90c93f312af743b2c6ea1b62ba8defe8696acf7a009bed03b012db60a
```
Other potentially useful URLs:
- https://sources.debian.net/src/lz4/1.8.3-1+deb10u1/ (for browsing the source)
- https://sources.debian.net/src/lz4/1.8.3-1+deb10u1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/lz4/1.8.3-1+deb10u1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `m4=1.4.18-2`
Binary Packages:
- `m4=1.4.18-2`
Licenses: (parsed from: `/usr/share/doc/m4/copyright`)
- `GFDL`
- `GPL`
Source:
```console
$ apt-get source -qq --print-uris m4=1.4.18-2
'http://deb.debian.org/debian/pool/main/m/m4/m4_1.4.18-2.dsc' m4_1.4.18-2.dsc 1426 SHA256:93dda06744f90619c4666515c9b5bc51aa584519c16cafd1e74aaa3733628c1b
'http://deb.debian.org/debian/pool/main/m/m4/m4_1.4.18.orig.tar.xz' m4_1.4.18.orig.tar.xz 1207688 SHA256:f2c1e86ca0a404ff281631bdc8377638992744b175afb806e25871a24a934e07
'http://deb.debian.org/debian/pool/main/m/m4/m4_1.4.18-2.debian.tar.xz' m4_1.4.18-2.debian.tar.xz 17032 SHA256:73718bae96a2f63f0ed38c614ea081074914698207e73450da571461af1c58ec
```
Other potentially useful URLs:
- https://sources.debian.net/src/m4/1.4.18-2/ (for browsing the source)
- https://sources.debian.net/src/m4/1.4.18-2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/m4/1.4.18-2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `make-dfsg=4.2.1-1.2`
Binary Packages:
- `make=4.2.1-1.2`
Licenses: (parsed from: `/usr/share/doc/make/copyright`)
- `GPL-3`
- `GPL-3+`
Source:
```console
$ apt-get source -qq --print-uris make-dfsg=4.2.1-1.2
'http://deb.debian.org/debian/pool/main/m/make-dfsg/make-dfsg_4.2.1-1.2.dsc' make-dfsg_4.2.1-1.2.dsc 2019 SHA256:0c8a2da5d51e03bf43e2929322d5a8406f08e5ee2d81a71ed6e5a8734f1b05cb
'http://deb.debian.org/debian/pool/main/m/make-dfsg/make-dfsg_4.2.1.orig.tar.gz' make-dfsg_4.2.1.orig.tar.gz 1485018 SHA256:480405e8995796ea47cc54b281b7855280f0d815d296a1af1993eeeb72074e39
'http://deb.debian.org/debian/pool/main/m/make-dfsg/make-dfsg_4.2.1-1.2.diff.gz' make-dfsg_4.2.1-1.2.diff.gz 53108 SHA256:80e0b96cee381391a5d3322317075e23d8474c92c5fa4fecd334bc2e0920887b
```
Other potentially useful URLs:
- https://sources.debian.net/src/make-dfsg/4.2.1-1.2/ (for browsing the source)
- https://sources.debian.net/src/make-dfsg/4.2.1-1.2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/make-dfsg/4.2.1-1.2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `mawk=1.3.3-17`
Binary Packages:
- `mawk=1.3.3-17+b3`
Licenses: (parsed from: `/usr/share/doc/mawk/copyright`)
- `GPL-2`
Source:
```console
$ apt-get source -qq --print-uris mawk=1.3.3-17
'http://deb.debian.org/debian/pool/main/m/mawk/mawk_1.3.3-17.dsc' mawk_1.3.3-17.dsc 1801 SHA256:f98ce6e153e8ac1faf8165bbf77447a4279313f1c18f6bfeec0c5ce35e4b9c03
'http://deb.debian.org/debian/pool/main/m/mawk/mawk_1.3.3.orig.tar.gz' mawk_1.3.3.orig.tar.gz 209942 SHA256:32649c46063d4ef0777a12ae6e9a26bcc920833d54e1abca7edb8d37481e7485
'http://deb.debian.org/debian/pool/main/m/mawk/mawk_1.3.3-17.diff.gz' mawk_1.3.3-17.diff.gz 63506 SHA256:13cb66b6eb5ee654d5626621d5ef476ede6b0bebac18ce765516de810e58490c
```
Other potentially useful URLs:
- https://sources.debian.net/src/mawk/1.3.3-17/ (for browsing the source)
- https://sources.debian.net/src/mawk/1.3.3-17/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/mawk/1.3.3-17/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `mpclib3=1.1.0-1`
Binary Packages:
- `libmpc3:amd64=1.1.0-1`
Licenses: (parsed from: `/usr/share/doc/libmpc3/copyright`)
- `LGPL-2.1`
Source:
```console
$ apt-get source -qq --print-uris mpclib3=1.1.0-1
'http://deb.debian.org/debian/pool/main/m/mpclib3/mpclib3_1.1.0-1.dsc' mpclib3_1.1.0-1.dsc 1990 SHA256:bb57824015b735bf72399a53f8c6a241e6a8bd402753b0fdcdaa5b99d0aef790
'http://deb.debian.org/debian/pool/main/m/mpclib3/mpclib3_1.1.0.orig.tar.gz' mpclib3_1.1.0.orig.tar.gz 701263 SHA256:6985c538143c1208dcb1ac42cedad6ff52e267b47e5f970183a3e75125b43c2e
'http://deb.debian.org/debian/pool/main/m/mpclib3/mpclib3_1.1.0-1.diff.gz' mpclib3_1.1.0-1.diff.gz 3794 SHA256:84b10a4ae958b3015e136b75be5fee22961255d19be655f7d0adae8d4f3bc977
```
Other potentially useful URLs:
- https://sources.debian.net/src/mpclib3/1.1.0-1/ (for browsing the source)
- https://sources.debian.net/src/mpclib3/1.1.0-1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/mpclib3/1.1.0-1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `mpfr4=4.0.2-1`
Binary Packages:
- `libmpfr6:amd64=4.0.2-1`
Licenses: (parsed from: `/usr/share/doc/libmpfr6/copyright`)
- `GFDL-1.2`
- `LGPL-3`
Source:
```console
$ apt-get source -qq --print-uris mpfr4=4.0.2-1
'http://deb.debian.org/debian/pool/main/m/mpfr4/mpfr4_4.0.2-1.dsc' mpfr4_4.0.2-1.dsc 1972 SHA256:9021ec2462ed0e73ea1379266740473abf5f826be819226497729f6c6b02e672
'http://deb.debian.org/debian/pool/main/m/mpfr4/mpfr4_4.0.2.orig.tar.xz' mpfr4_4.0.2.orig.tar.xz 1441996 SHA256:1d3be708604eae0e42d578ba93b390c2a145f17743a744d8f3f8c2ad5855a38a
'http://deb.debian.org/debian/pool/main/m/mpfr4/mpfr4_4.0.2-1.debian.tar.xz' mpfr4_4.0.2-1.debian.tar.xz 10544 SHA256:99c4d35654f33340f0efdec67142a34753157b20334cadad9018f5eab29738da
```
Other potentially useful URLs:
- https://sources.debian.net/src/mpfr4/4.0.2-1/ (for browsing the source)
- https://sources.debian.net/src/mpfr4/4.0.2-1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/mpfr4/4.0.2-1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `ncurses=6.1+20181013-2+deb10u2`
Binary Packages:
- `libncurses6:amd64=6.1+20181013-2+deb10u2`
- `libncursesw6:amd64=6.1+20181013-2+deb10u2`
- `libtinfo6:amd64=6.1+20181013-2+deb10u2`
- `ncurses-base=6.1+20181013-2+deb10u2`
- `ncurses-bin=6.1+20181013-2+deb10u2`
**WARNING:** unable to detect licenses! (package likely not compliant with DEP-5)
If source is available (seen below), check the contents of `debian/copyright` within it.
Source:
```console
$ apt-get source -qq --print-uris ncurses=6.1+20181013-2+deb10u2
'http://deb.debian.org/debian/pool/main/n/ncurses/ncurses_6.1%2b20181013-2%2bdeb10u2.dsc' ncurses_6.1+20181013-2+deb10u2.dsc 4179 SHA256:8318631ff3298951a93d6dd6c20bd47c9e5fdaaf30578d541bd6404bdd5317ea
'http://deb.debian.org/debian/pool/main/n/ncurses/ncurses_6.1%2b20181013.orig.tar.gz' ncurses_6.1+20181013.orig.tar.gz 3411288 SHA256:aeb1d098ee90b39a763b57b00da19ff5bbb573dea077f98fbd85d59444bb3b59
'http://deb.debian.org/debian/pool/main/n/ncurses/ncurses_6.1%2b20181013.orig.tar.gz.asc' ncurses_6.1+20181013.orig.tar.gz.asc 251 SHA256:865931406e519909a4d0ab87b14d0c6d3ebccb7b3e0dac5c6095f0dfce5e14cf
'http://deb.debian.org/debian/pool/main/n/ncurses/ncurses_6.1%2b20181013-2%2bdeb10u2.debian.tar.xz' ncurses_6.1+20181013-2+deb10u2.debian.tar.xz 61664 SHA256:4574ec11ce2577e76f30f8d40cc2a9ebf94d8208f47247021da88b7b09e77df9
```
Other potentially useful URLs:
- https://sources.debian.net/src/ncurses/6.1+20181013-2+deb10u2/ (for browsing the source)
- https://sources.debian.net/src/ncurses/6.1+20181013-2+deb10u2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/ncurses/6.1+20181013-2+deb10u2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `nettle=3.4.1-1+deb10u1`
Binary Packages:
- `libhogweed4:amd64=3.4.1-1+deb10u1`
- `libnettle6:amd64=3.4.1-1+deb10u1`
Licenses: (parsed from: `/usr/share/doc/libhogweed4/copyright`, `/usr/share/doc/libnettle6/copyright`)
- `GAP`
- `GPL`
- `GPL-2`
- `GPL-2+`
- `GPL-2+ with Autoconf exception`
- `LGPL`
- `LGPL-2`
- `LGPL-2+`
- `LGPL-2.1+`
- `other`
- `public-domain`
Source:
```console
$ apt-get source -qq --print-uris nettle=3.4.1-1+deb10u1
'http://deb.debian.org/debian/pool/main/n/nettle/nettle_3.4.1-1%2bdeb10u1.dsc' nettle_3.4.1-1+deb10u1.dsc 2290 SHA256:b38c9a78ae0732a94d06dbc811479f6ee8357bd47604dfa92f0d0801b148eebc
'http://deb.debian.org/debian/pool/main/n/nettle/nettle_3.4.1.orig.tar.gz' nettle_3.4.1.orig.tar.gz 1947053 SHA256:f941cf1535cd5d1819be5ccae5babef01f6db611f9b5a777bae9c7604b8a92ad
'http://deb.debian.org/debian/pool/main/n/nettle/nettle_3.4.1.orig.tar.gz.asc' nettle_3.4.1.orig.tar.gz.asc 2476 SHA256:07b265366b46bc67950da3f34687235eaa85c45b326e42bb7c9b58830b651d28
'http://deb.debian.org/debian/pool/main/n/nettle/nettle_3.4.1-1%2bdeb10u1.debian.tar.xz' nettle_3.4.1-1+deb10u1.debian.tar.xz 26508 SHA256:b847de5ccd50b9bc0aa56dd7fe750c224683174676dde69c86f62bece52ff4ba
```
Other potentially useful URLs:
- https://sources.debian.net/src/nettle/3.4.1-1+deb10u1/ (for browsing the source)
- https://sources.debian.net/src/nettle/3.4.1-1+deb10u1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/nettle/3.4.1-1+deb10u1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `nghttp2=1.36.0-2+deb10u1`
Binary Packages:
- `libnghttp2-14:amd64=1.36.0-2+deb10u1`
Licenses: (parsed from: `/usr/share/doc/libnghttp2-14/copyright`)
- `BSD-2-clause`
- `Expat`
- `GPL-3`
- `GPL-3+ with autoconf exception`
- `MIT`
- `SIL-OFL-1.1`
- `all-permissive`
Source:
```console
$ apt-get source -qq --print-uris nghttp2=1.36.0-2+deb10u1
'http://deb.debian.org/debian/pool/main/n/nghttp2/nghttp2_1.36.0-2%2bdeb10u1.dsc' nghttp2_1.36.0-2+deb10u1.dsc 2601 SHA256:3712e7cbb20d1b43f8f7a9c5408b79bd80e4c3c0cb2d4ad68062d367b1715fd6
'http://deb.debian.org/debian/pool/main/n/nghttp2/nghttp2_1.36.0.orig.tar.bz2' nghttp2_1.36.0.orig.tar.bz2 1919021 SHA256:16a734d7414062911e23989e243ca76e7722cb3c60273723e3e3ae4c21e71ceb
'http://deb.debian.org/debian/pool/main/n/nghttp2/nghttp2_1.36.0-2%2bdeb10u1.debian.tar.xz' nghttp2_1.36.0-2+deb10u1.debian.tar.xz 13132 SHA256:f4fb4dd2385d158efba2ec3d3ce1b13c24ecb05c75f353f370f7cb0f080c7537
```
Other potentially useful URLs:
- https://sources.debian.net/src/nghttp2/1.36.0-2+deb10u1/ (for browsing the source)
- https://sources.debian.net/src/nghttp2/1.36.0-2+deb10u1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/nghttp2/1.36.0-2+deb10u1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `openldap=2.4.47+dfsg-3+deb10u6`
Binary Packages:
- `libldap-2.4-2:amd64=2.4.47+dfsg-3+deb10u6`
- `libldap-common=2.4.47+dfsg-3+deb10u6`
**WARNING:** unable to detect licenses! (package likely not compliant with DEP-5)
If source is available (seen below), check the contents of `debian/copyright` within it.
Source:
```console
$ apt-get source -qq --print-uris openldap=2.4.47+dfsg-3+deb10u6
'http://deb.debian.org/debian/pool/main/o/openldap/openldap_2.4.47%2bdfsg-3%2bdeb10u6.dsc' openldap_2.4.47+dfsg-3+deb10u6.dsc 3022 SHA256:14f48c8d0ffddaaf51edf064d9abe90d6f7466c710d6e78c520db48998eaebb0
'http://deb.debian.org/debian/pool/main/o/openldap/openldap_2.4.47%2bdfsg.orig.tar.gz' openldap_2.4.47+dfsg.orig.tar.gz 4872293 SHA256:8f1ac7a4be7dd8ef158361efbfe16509756d3d9b396f5f378c3cf5c727807651
'http://deb.debian.org/debian/pool/main/o/openldap/openldap_2.4.47%2bdfsg-3%2bdeb10u6.debian.tar.xz' openldap_2.4.47+dfsg-3+deb10u6.debian.tar.xz 173600 SHA256:d21ccc7d2fc3b38dd68e8f4dd73bcff51d377e4ad47e6372ea4f806729856b79
```
Other potentially useful URLs:
- https://sources.debian.net/src/openldap/2.4.47+dfsg-3+deb10u6/ (for browsing the source)
- https://sources.debian.net/src/openldap/2.4.47+dfsg-3+deb10u6/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/openldap/2.4.47+dfsg-3+deb10u6/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `openssl=1.1.1d-0+deb10u7`
Binary Packages:
- `libssl1.1:amd64=1.1.1d-0+deb10u7`
- `openssl=1.1.1d-0+deb10u7`
**WARNING:** unable to detect licenses! (package likely not compliant with DEP-5)
If source is available (seen below), check the contents of `debian/copyright` within it.
Source:
```console
$ apt-get source -qq --print-uris openssl=1.1.1d-0+deb10u7
'http://deb.debian.org/debian/pool/main/o/openssl/openssl_1.1.1d-0%2bdeb10u7.dsc' openssl_1.1.1d-0+deb10u7.dsc 2649 SHA256:1cb63c67aedf26f139124e4a9ec5d51fa036948df3964805070da7b816b14581
'http://deb.debian.org/debian/pool/main/o/openssl/openssl_1.1.1d.orig.tar.gz' openssl_1.1.1d.orig.tar.gz 8845861 SHA256:1e3a91bc1f9dfce01af26026f856e064eab4c8ee0a8f457b5ae30b40b8b711f2
'http://deb.debian.org/debian/pool/main/o/openssl/openssl_1.1.1d.orig.tar.gz.asc' openssl_1.1.1d.orig.tar.gz.asc 488 SHA256:f3fd3299a79421fffd51d35f62636b8e987dab1d3033d93a19d7685868e15395
'http://deb.debian.org/debian/pool/main/o/openssl/openssl_1.1.1d-0%2bdeb10u7.debian.tar.xz' openssl_1.1.1d-0+deb10u7.debian.tar.xz 109236 SHA256:8a84cb76bf14a6725ee6b56d76335adadc639513d7c15b24b041fbb5f80cf053
```
Other potentially useful URLs:
- https://sources.debian.net/src/openssl/1.1.1d-0+deb10u7/ (for browsing the source)
- https://sources.debian.net/src/openssl/1.1.1d-0+deb10u7/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/openssl/1.1.1d-0+deb10u7/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `p11-kit=0.23.15-2+deb10u1`
Binary Packages:
- `libp11-kit0:amd64=0.23.15-2+deb10u1`
Licenses: (parsed from: `/usr/share/doc/libp11-kit0/copyright`)
- `BSD-3-Clause`
- `ISC`
- `ISC+IBM`
- `permissive-like-automake-output`
- `same-as-rest-of-p11kit`
Source:
```console
$ apt-get source -qq --print-uris p11-kit=0.23.15-2+deb10u1
'http://deb.debian.org/debian/pool/main/p/p11-kit/p11-kit_0.23.15-2%2bdeb10u1.dsc' p11-kit_0.23.15-2+deb10u1.dsc 2607 SHA256:d0c7215905c92f521b7b5a4dd98e2ad85abd40a05430dcf2a17dcc3e21f96389
'http://deb.debian.org/debian/pool/main/p/p11-kit/p11-kit_0.23.15.orig.tar.gz' p11-kit_0.23.15.orig.tar.gz 1276733 SHA256:f7c139a0c77a1f0012619003e542060ba8f94799a0ef463026db390680e4d798
'http://deb.debian.org/debian/pool/main/p/p11-kit/p11-kit_0.23.15.orig.tar.gz.asc' p11-kit_0.23.15.orig.tar.gz.asc 879 SHA256:e28bd948178e2f91e18fbb4387d7b6532aa44eb92ac4c67a6485bc9cd9c79db8
'http://deb.debian.org/debian/pool/main/p/p11-kit/p11-kit_0.23.15-2%2bdeb10u1.debian.tar.xz' p11-kit_0.23.15-2+deb10u1.debian.tar.xz 25496 SHA256:9f90af175ef68fdb3aca71f22074fa1860f88a820e29e1290ee51d606c53b572
```
Other potentially useful URLs:
- https://sources.debian.net/src/p11-kit/0.23.15-2+deb10u1/ (for browsing the source)
- https://sources.debian.net/src/p11-kit/0.23.15-2+deb10u1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/p11-kit/0.23.15-2+deb10u1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `pam=1.3.1-5`
Binary Packages:
- `libpam-modules:amd64=1.3.1-5`
- `libpam-modules-bin=1.3.1-5`
- `libpam-runtime=1.3.1-5`
- `libpam0g:amd64=1.3.1-5`
Licenses: (parsed from: `/usr/share/doc/libpam-modules/copyright`, `/usr/share/doc/libpam-modules-bin/copyright`, `/usr/share/doc/libpam-runtime/copyright`, `/usr/share/doc/libpam0g/copyright`)
- `GPL`
Source:
```console
$ apt-get source -qq --print-uris pam=1.3.1-5
'http://deb.debian.org/debian/pool/main/p/pam/pam_1.3.1-5.dsc' pam_1.3.1-5.dsc 2648 SHA256:6be33a9db415ff3e474a10d1a0c41fca3dbe90ae8c9ddd9a4a997892b11d67ab
'http://deb.debian.org/debian/pool/main/p/pam/pam_1.3.1.orig.tar.xz' pam_1.3.1.orig.tar.xz 912332 SHA256:eff47a4ecd833fbf18de9686632a70ee8d0794b79aecb217ebd0ce11db4cd0db
'http://deb.debian.org/debian/pool/main/p/pam/pam_1.3.1-5.debian.tar.xz' pam_1.3.1-5.debian.tar.xz 114384 SHA256:be2c2b27efd6bea02f9d102d7d8c58374557beb7245b2a9d75ecc829e9449f62
```
Other potentially useful URLs:
- https://sources.debian.net/src/pam/1.3.1-5/ (for browsing the source)
- https://sources.debian.net/src/pam/1.3.1-5/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/pam/1.3.1-5/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `patch=2.7.6-3+deb10u1`
Binary Packages:
- `patch=2.7.6-3+deb10u1`
Licenses: (parsed from: `/usr/share/doc/patch/copyright`)
- `GPL`
Source:
```console
$ apt-get source -qq --print-uris patch=2.7.6-3+deb10u1
'http://deb.debian.org/debian/pool/main/p/patch/patch_2.7.6-3%2bdeb10u1.dsc' patch_2.7.6-3+deb10u1.dsc 1731 SHA256:dae4e0d25106b2d14d981309395371397091892359b44a919eb08dd841bee13f
'http://deb.debian.org/debian/pool/main/p/patch/patch_2.7.6.orig.tar.xz' patch_2.7.6.orig.tar.xz 783756 SHA256:ac610bda97abe0d9f6b7c963255a11dcb196c25e337c61f94e4778d632f1d8fd
'http://deb.debian.org/debian/pool/main/p/patch/patch_2.7.6-3%2bdeb10u1.debian.tar.xz' patch_2.7.6-3+deb10u1.debian.tar.xz 13164 SHA256:58d4e84bd4ce850152e1d1911546ac0aad9764992570c360cff8f9cf03eb37bc
```
Other potentially useful URLs:
- https://sources.debian.net/src/patch/2.7.6-3+deb10u1/ (for browsing the source)
- https://sources.debian.net/src/patch/2.7.6-3+deb10u1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/patch/2.7.6-3+deb10u1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `pcre3=2:8.39-12`
Binary Packages:
- `libpcre3:amd64=2:8.39-12`
**WARNING:** unable to detect licenses! (package likely not compliant with DEP-5)
If source is available (seen below), check the contents of `debian/copyright` within it.
Source:
```console
$ apt-get source -qq --print-uris pcre3=2:8.39-12
'http://deb.debian.org/debian/pool/main/p/pcre3/pcre3_8.39-12.dsc' pcre3_8.39-12.dsc 2226 SHA256:7660921533f286d211bc129318327041ceb80d3d21e91c1ae7c10f284342c5e0
'http://deb.debian.org/debian/pool/main/p/pcre3/pcre3_8.39.orig.tar.bz2' pcre3_8.39.orig.tar.bz2 1560758 SHA256:b858099f82483031ee02092711689e7245586ada49e534a06e678b8ea9549e8b
'http://deb.debian.org/debian/pool/main/p/pcre3/pcre3_8.39-12.debian.tar.gz' pcre3_8.39-12.debian.tar.gz 26509 SHA256:ee193ddee446f0bdb966fca5987ef871da7a528a473304285619988102371c4c
```
Other potentially useful URLs:
- https://sources.debian.net/src/pcre3/2:8.39-12/ (for browsing the source)
- https://sources.debian.net/src/pcre3/2:8.39-12/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/pcre3/2:8.39-12/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `perl=5.28.1-6+deb10u1`
Binary Packages:
- `libperl5.28:amd64=5.28.1-6+deb10u1`
- `perl=5.28.1-6+deb10u1`
- `perl-base=5.28.1-6+deb10u1`
- `perl-modules-5.28=5.28.1-6+deb10u1`
Licenses: (parsed from: `/usr/share/doc/libperl5.28/copyright`, `/usr/share/doc/perl/copyright`, `/usr/share/doc/perl-base/copyright`, `/usr/share/doc/perl-modules-5.28/copyright`)
- `Artistic`
- `Artistic,`
- `Artistic-2`
- `Artistic-dist`
- `BSD-3-clause`
- `BSD-3-clause-GENERIC`
- `BSD-3-clause-with-weird-numbering`
- `BSD-4-clause-POWERDOG`
- `BZIP`
- `CC0-1.0`
- `DONT-CHANGE-THE-GPL`
- `Expat`
- `GPL-1`
- `GPL-1+`
- `GPL-2`
- `GPL-2+`
- `GPL-3+-WITH-BISON-EXCEPTION`
- `HSIEH-BSD`
- `HSIEH-DERIVATIVE`
- `LGPL-2.1`
- `REGCOMP`
- `REGCOMP,`
- `RRA-KEEP-THIS-NOTICE`
- `S2P`
- `SDBM-PUBLIC-DOMAIN`
- `TEXT-TABS`
- `Unicode`
- `ZLIB`
Source:
```console
$ apt-get source -qq --print-uris perl=5.28.1-6+deb10u1
'http://deb.debian.org/debian/pool/main/p/perl/perl_5.28.1-6%2bdeb10u1.dsc' perl_5.28.1-6+deb10u1.dsc 2863 SHA256:a680d97001398640c249fc6bae6124fe59eb465b044f03fb4148b22152895785
'http://deb.debian.org/debian/pool/main/p/perl/perl_5.28.1.orig-regen-configure.tar.xz' perl_5.28.1.orig-regen-configure.tar.xz 411944 SHA256:5873b81af4514d3910ab1a8267b15ff8c0e2100dbae4edfd10b65ef72cd31ef8
'http://deb.debian.org/debian/pool/main/p/perl/perl_5.28.1.orig.tar.xz' perl_5.28.1.orig.tar.xz 12372080 SHA256:fea7162d4cca940a387f0587b93f6737d884bf74d8a9d7cfd978bc12cd0b202d
'http://deb.debian.org/debian/pool/main/p/perl/perl_5.28.1-6%2bdeb10u1.debian.tar.xz' perl_5.28.1-6+deb10u1.debian.tar.xz 185004 SHA256:e531c2d8c85b28b34c2122175a8e8f6cfe56b8a0708972fc4beae9876549d815
```
Other potentially useful URLs:
- https://sources.debian.net/src/perl/5.28.1-6+deb10u1/ (for browsing the source)
- https://sources.debian.net/src/perl/5.28.1-6+deb10u1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/perl/5.28.1-6+deb10u1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `pkg-config=0.29-6`
Binary Packages:
- `pkg-config=0.29-6`
Licenses: (parsed from: `/usr/share/doc/pkg-config/copyright`)
- `GPL`
Source:
```console
$ apt-get source -qq --print-uris pkg-config=0.29-6
'http://deb.debian.org/debian/pool/main/p/pkg-config/pkg-config_0.29-6.dsc' pkg-config_0.29-6.dsc 1757 SHA256:a5f1a8f976f3d8ad579341ba73514eb3af9dbc6bad8d2b5828699ac24196624f
'http://deb.debian.org/debian/pool/main/p/pkg-config/pkg-config_0.29.orig.tar.gz' pkg-config_0.29.orig.tar.gz 1973875 SHA256:c8507705d2a10c67f385d66ca2aae31e81770cc0734b4191eb8c489e864a006b
'http://deb.debian.org/debian/pool/main/p/pkg-config/pkg-config_0.29-6.diff.gz' pkg-config_0.29-6.diff.gz 8145 SHA256:c06146d878fb7faa4ac3edb5e45188b184cc650a752384d5c1053f41edf590bc
```
Other potentially useful URLs:
- https://sources.debian.net/src/pkg-config/0.29-6/ (for browsing the source)
- https://sources.debian.net/src/pkg-config/0.29-6/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/pkg-config/0.29-6/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `re2c=1.1.1-1`
Binary Packages:
- `re2c=1.1.1-1`
Licenses: (parsed from: `/usr/share/doc/re2c/copyright`)
- `PHP-3.01`
- `Zend-2.00`
- `public-domain`
Source:
```console
$ apt-get source -qq --print-uris re2c=1.1.1-1
'http://deb.debian.org/debian/pool/main/r/re2c/re2c_1.1.1-1.dsc' re2c_1.1.1-1.dsc 1833 SHA256:dce993d2ca99b5ab360e9833a068ad615df6930a3424b4337bb888d426e85eae
'http://deb.debian.org/debian/pool/main/r/re2c/re2c_1.1.1.orig.tar.gz' re2c_1.1.1.orig.tar.gz 5907416 SHA256:856597337ea00b24ce91f549f79e6eece1b92ba5f8b63292cad66c14ac7451cf
'http://deb.debian.org/debian/pool/main/r/re2c/re2c_1.1.1-1.debian.tar.xz' re2c_1.1.1-1.debian.tar.xz 9032 SHA256:2f9e3637df4d4fc517ac274fbb3404aa891c3e61d111ffb40bcb9e103e5e9aec
```
Other potentially useful URLs:
- https://sources.debian.net/src/re2c/1.1.1-1/ (for browsing the source)
- https://sources.debian.net/src/re2c/1.1.1-1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/re2c/1.1.1-1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `readline=7.0-5`
Binary Packages:
- `libreadline7:amd64=7.0-5`
- `readline-common=7.0-5`
Licenses: (parsed from: `/usr/share/doc/libreadline7/copyright`, `/usr/share/doc/readline-common/copyright`)
- `GFDL`
- `GPL-3`
Source:
```console
$ apt-get source -qq --print-uris readline=7.0-5
'http://deb.debian.org/debian/pool/main/r/readline/readline_7.0-5.dsc' readline_7.0-5.dsc 2419 SHA256:4a804235e91ced3b957b0772101ca3992f5ad051e6540b8c41a1f98a06e84033
'http://deb.debian.org/debian/pool/main/r/readline/readline_7.0.orig.tar.gz' readline_7.0.orig.tar.gz 2910016 SHA256:750d437185286f40a369e1e4f4764eda932b9459b5ec9a731628393dd3d32334
'http://deb.debian.org/debian/pool/main/r/readline/readline_7.0-5.debian.tar.xz' readline_7.0-5.debian.tar.xz 29992 SHA256:5c1cc7396a670ce7e6e4c0bc36e8d3067b7642bea5b30fc3ff22bf8e65d2ee80
```
Other potentially useful URLs:
- https://sources.debian.net/src/readline/7.0-5/ (for browsing the source)
- https://sources.debian.net/src/readline/7.0-5/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/readline/7.0-5/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `rtmpdump=2.4+20151223.gitfa8646d.1-2`
Binary Packages:
- `librtmp1:amd64=2.4+20151223.gitfa8646d.1-2`
Licenses: (parsed from: `/usr/share/doc/librtmp1/copyright`)
- `GPL-2`
- `LGPL-2.1`
Source:
```console
$ apt-get source -qq --print-uris rtmpdump=2.4+20151223.gitfa8646d.1-2
'http://deb.debian.org/debian/pool/main/r/rtmpdump/rtmpdump_2.4%2b20151223.gitfa8646d.1-2.dsc' rtmpdump_2.4+20151223.gitfa8646d.1-2.dsc 2299 SHA256:a296819cd2ab5880b67ad963ef0867cb10e462f4403e52565aa863eb05bb1370
'http://deb.debian.org/debian/pool/main/r/rtmpdump/rtmpdump_2.4%2b20151223.gitfa8646d.1.orig.tar.gz' rtmpdump_2.4+20151223.gitfa8646d.1.orig.tar.gz 142213 SHA256:5c032f5c8cc2937eb55a81a94effdfed3b0a0304b6376147b86f951e225e3ab5
'http://deb.debian.org/debian/pool/main/r/rtmpdump/rtmpdump_2.4%2b20151223.gitfa8646d.1-2.debian.tar.xz' rtmpdump_2.4+20151223.gitfa8646d.1-2.debian.tar.xz 8096 SHA256:26d47de07d16285e4ca55b0828cbbf1ba35e671f9b3500a87e301fe755d26882
```
Other potentially useful URLs:
- https://sources.debian.net/src/rtmpdump/2.4+20151223.gitfa8646d.1-2/ (for browsing the source)
- https://sources.debian.net/src/rtmpdump/2.4+20151223.gitfa8646d.1-2/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/rtmpdump/2.4+20151223.gitfa8646d.1-2/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `sed=4.7-1`
Binary Packages:
- `sed=4.7-1`
Licenses: (parsed from: `/usr/share/doc/sed/copyright`)
- `GPL-3`
Source:
```console
$ apt-get source -qq --print-uris sed=4.7-1
'http://deb.debian.org/debian/pool/main/s/sed/sed_4.7-1.dsc' sed_4.7-1.dsc 1880 SHA256:dd0e8daed987929920f7729771f9c7a5b48d094923aaf686efd2ab19db776108
'http://deb.debian.org/debian/pool/main/s/sed/sed_4.7.orig.tar.xz' sed_4.7.orig.tar.xz 1298316 SHA256:2885768cd0a29ff8d58a6280a270ff161f6a3deb5690b2be6c49f46d4c67bd6a
'http://deb.debian.org/debian/pool/main/s/sed/sed_4.7-1.debian.tar.xz' sed_4.7-1.debian.tar.xz 59824 SHA256:a2ab8d50807fd2242f86d6c6257399e790445ab6f8932f7f487d34361b4fc483
```
Other potentially useful URLs:
- https://sources.debian.net/src/sed/4.7-1/ (for browsing the source)
- https://sources.debian.net/src/sed/4.7-1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/sed/4.7-1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `shadow=1:4.5-1.1`
Binary Packages:
- `login=1:4.5-1.1`
- `passwd=1:4.5-1.1`
Licenses: (parsed from: `/usr/share/doc/login/copyright`, `/usr/share/doc/passwd/copyright`)
- `GPL-2`
Source:
```console
$ apt-get source -qq --print-uris shadow=1:4.5-1.1
'http://deb.debian.org/debian/pool/main/s/shadow/shadow_4.5-1.1.dsc' shadow_4.5-1.1.dsc 2319 SHA256:75993dc19ccc4d5c404831d2dab021a03eaa39216b518d596b639d8f2ea4e98b
'http://deb.debian.org/debian/pool/main/s/shadow/shadow_4.5.orig.tar.xz' shadow_4.5.orig.tar.xz 1344524 SHA256:22b0952dc944b163e2370bb911b11ca275fc80ad024267cf21e496b28c23d500
'http://deb.debian.org/debian/pool/main/s/shadow/shadow_4.5-1.1.debian.tar.xz' shadow_4.5-1.1.debian.tar.xz 462960 SHA256:3bb16bbf5d9a255d7333932ae99815d65c1c8e86127e5016809d4ba55c499538
```
Other potentially useful URLs:
- https://sources.debian.net/src/shadow/1:4.5-1.1/ (for browsing the source)
- https://sources.debian.net/src/shadow/1:4.5-1.1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/shadow/1:4.5-1.1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `sqlite3=3.27.2-3+deb10u1`
Binary Packages:
- `libsqlite3-0:amd64=3.27.2-3+deb10u1`
Licenses: (parsed from: `/usr/share/doc/libsqlite3-0/copyright`)
- `GPL-2`
- `GPL-2+`
- `public-domain`
Source:
```console
$ apt-get source -qq --print-uris sqlite3=3.27.2-3+deb10u1
'http://deb.debian.org/debian/pool/main/s/sqlite3/sqlite3_3.27.2-3%2bdeb10u1.dsc' sqlite3_3.27.2-3+deb10u1.dsc 2430 SHA256:6faf359c70d0618558056d6dee9f05ea810ff57aebb141ac28129c17e685cf1b
'http://deb.debian.org/debian/pool/main/s/sqlite3/sqlite3_3.27.2.orig-www.tar.xz' sqlite3_3.27.2.orig-www.tar.xz 5602752 SHA256:b50bea0e1974b33bcb2cec4c29fcdeecd8f960020ce0310b15fb123938844bee
'http://deb.debian.org/debian/pool/main/s/sqlite3/sqlite3_3.27.2.orig.tar.xz' sqlite3_3.27.2.orig.tar.xz 6844832 SHA256:6cb1606bbc38270739d256b5ab1cf94dccf5b2a3b4cbceb0545aac76f6ef40f2
'http://deb.debian.org/debian/pool/main/s/sqlite3/sqlite3_3.27.2-3%2bdeb10u1.debian.tar.xz' sqlite3_3.27.2-3+deb10u1.debian.tar.xz 38844 SHA256:ccabd6cc3d05887f2dd239065f3c3063f766e9e187804a89f04cbcbdb49a6b7e
```
Other potentially useful URLs:
- https://sources.debian.net/src/sqlite3/3.27.2-3+deb10u1/ (for browsing the source)
- https://sources.debian.net/src/sqlite3/3.27.2-3+deb10u1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/sqlite3/3.27.2-3+deb10u1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `systemd=241-7~deb10u8`
Binary Packages:
- `libsystemd0:amd64=241-7~deb10u8`
- `libudev1:amd64=241-7~deb10u8`
Licenses: (parsed from: `/usr/share/doc/libsystemd0/copyright`, `/usr/share/doc/libudev1/copyright`)
- `CC0-1.0`
- `Expat`
- `GPL-2`
- `GPL-2+`
- `LGPL-2.1`
- `LGPL-2.1+`
- `public-domain`
Source:
```console
$ apt-get source -qq --print-uris systemd=241-7~deb10u8
'http://deb.debian.org/debian/pool/main/s/systemd/systemd_241-7%7edeb10u8.dsc' systemd_241-7~deb10u8.dsc 5101 SHA256:925ac30011dee81ab0e418f27515db2ddead0fa609b34eb7815cee7861c5a561
'http://deb.debian.org/debian/pool/main/s/systemd/systemd_241.orig.tar.gz' systemd_241.orig.tar.gz 7640538 SHA256:b2561a8e1d10a2c248253f0dda31a85dd6d69f2b54177de55e02cd1d2778316e
'http://deb.debian.org/debian/pool/main/s/systemd/systemd_241-7%7edeb10u8.debian.tar.xz' systemd_241-7~deb10u8.debian.tar.xz 182616 SHA256:6f5419d06f917a0565a55b9c9e7b9b55c094623a32b722f24431c20f7b48491b
```
Other potentially useful URLs:
- https://sources.debian.net/src/systemd/241-7~deb10u8/ (for browsing the source)
- https://sources.debian.net/src/systemd/241-7~deb10u8/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/systemd/241-7~deb10u8/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `sysvinit=2.93-8`
Binary Packages:
- `sysvinit-utils=2.93-8`
Licenses: (parsed from: `/usr/share/doc/sysvinit-utils/copyright`)
- `GPL-2`
- `GPL-2+`
Source:
```console
$ apt-get source -qq --print-uris sysvinit=2.93-8
'http://deb.debian.org/debian/pool/main/s/sysvinit/sysvinit_2.93-8.dsc' sysvinit_2.93-8.dsc 2657 SHA256:84aa66bfa1c7963c179da26c015468d489b39bde19c85096b4d3e261e5fc043d
'http://deb.debian.org/debian/pool/main/s/sysvinit/sysvinit_2.93.orig.tar.xz' sysvinit_2.93.orig.tar.xz 117580 SHA256:472d460e233d981488509a167125a82925c8c9aba6b5608cb22598fdf326a8ff
'http://deb.debian.org/debian/pool/main/s/sysvinit/sysvinit_2.93.orig.tar.xz.asc' sysvinit_2.93.orig.tar.xz.asc 1076 SHA256:cf2b374a96276a16e3ef07ad2be596420f0d8d77227aad3144d7ab4ea165a4af
'http://deb.debian.org/debian/pool/main/s/sysvinit/sysvinit_2.93-8.debian.tar.xz' sysvinit_2.93-8.debian.tar.xz 127136 SHA256:2db2ae46048acf743445545151cbc0bc5530eca1f2eec51df3175d8ab26edfa6
```
Other potentially useful URLs:
- https://sources.debian.net/src/sysvinit/2.93-8/ (for browsing the source)
- https://sources.debian.net/src/sysvinit/2.93-8/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/sysvinit/2.93-8/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `tar=1.30+dfsg-6`
Binary Packages:
- `tar=1.30+dfsg-6`
Licenses: (parsed from: `/usr/share/doc/tar/copyright`)
- `GPL-2`
- `GPL-3`
Source:
```console
$ apt-get source -qq --print-uris tar=1.30+dfsg-6
'http://deb.debian.org/debian/pool/main/t/tar/tar_1.30%2bdfsg-6.dsc' tar_1.30+dfsg-6.dsc 1995 SHA256:1515951c8a2fc9a43e822efd82d9043cdec4bec47ddca9e7f1311c73e6b00d0c
'http://deb.debian.org/debian/pool/main/t/tar/tar_1.30%2bdfsg.orig.tar.xz' tar_1.30+dfsg.orig.tar.xz 1883220 SHA256:c02f3747ffe02017878303dde8b78e79cd220364c5e8048cf92320232e38912d
'http://deb.debian.org/debian/pool/main/t/tar/tar_1.30%2bdfsg-6.debian.tar.xz' tar_1.30+dfsg-6.debian.tar.xz 22124 SHA256:b7caae6287992536353413e7a9b21301b29c32066bb6f36b7190074af9dd5c50
```
Other potentially useful URLs:
- https://sources.debian.net/src/tar/1.30+dfsg-6/ (for browsing the source)
- https://sources.debian.net/src/tar/1.30+dfsg-6/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/tar/1.30+dfsg-6/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `tzdata=2021a-0+deb10u3`
Binary Packages:
- `tzdata=2021a-0+deb10u3`
**WARNING:** unable to detect licenses! (package likely not compliant with DEP-5)
If source is available (seen below), check the contents of `debian/copyright` within it.
Source:
```console
$ apt-get source -qq --print-uris tzdata=2021a-0+deb10u3
'http://deb.debian.org/debian/pool/main/t/tzdata/tzdata_2021a-0%2bdeb10u3.dsc' tzdata_2021a-0+deb10u3.dsc 2264 SHA256:1730f7a8a2ac3ac3c9db9dc7ddd3074343a7846988c515d55f046be527a00834
'http://deb.debian.org/debian/pool/main/t/tzdata/tzdata_2021a.orig.tar.gz' tzdata_2021a.orig.tar.gz 411892 SHA256:39e7d2ba08c68cbaefc8de3227aab0dec2521be8042cf56855f7dc3a9fb14e08
'http://deb.debian.org/debian/pool/main/t/tzdata/tzdata_2021a.orig.tar.gz.asc' tzdata_2021a.orig.tar.gz.asc 833 SHA256:9dc5f54674166f4ffbc2d4485e656227430ab5f39c9006e6ed9986281117f058
'http://deb.debian.org/debian/pool/main/t/tzdata/tzdata_2021a-0%2bdeb10u3.debian.tar.xz' tzdata_2021a-0+deb10u3.debian.tar.xz 109800 SHA256:aa09186f2ceb3d4b7ec9e7575ba51a3c1b4894e0669e1125c4cae1eafec73808
```
Other potentially useful URLs:
- https://sources.debian.net/src/tzdata/2021a-0+deb10u3/ (for browsing the source)
- https://sources.debian.net/src/tzdata/2021a-0+deb10u3/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/tzdata/2021a-0+deb10u3/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `util-linux=2.33.1-0.1`
Binary Packages:
- `bsdutils=1:2.33.1-0.1`
- `fdisk=2.33.1-0.1`
- `libblkid1:amd64=2.33.1-0.1`
- `libfdisk1:amd64=2.33.1-0.1`
- `libmount1:amd64=2.33.1-0.1`
- `libsmartcols1:amd64=2.33.1-0.1`
- `libuuid1:amd64=2.33.1-0.1`
- `mount=2.33.1-0.1`
- `util-linux=2.33.1-0.1`
Licenses: (parsed from: `/usr/share/doc/bsdutils/copyright`, `/usr/share/doc/fdisk/copyright`, `/usr/share/doc/libblkid1/copyright`, `/usr/share/doc/libfdisk1/copyright`, `/usr/share/doc/libmount1/copyright`, `/usr/share/doc/libsmartcols1/copyright`, `/usr/share/doc/libuuid1/copyright`, `/usr/share/doc/mount/copyright`, `/usr/share/doc/util-linux/copyright`)
- `BSD-2-clause`
- `BSD-3-clause`
- `BSD-4-clause`
- `GPL-2`
- `GPL-2+`
- `GPL-3`
- `GPL-3+`
- `LGPL`
- `LGPL-2`
- `LGPL-2+`
- `LGPL-2.1`
- `LGPL-2.1+`
- `LGPL-3`
- `LGPL-3+`
- `MIT`
- `public-domain`
Source:
```console
$ apt-get source -qq --print-uris util-linux=2.33.1-0.1
'http://deb.debian.org/debian/pool/main/u/util-linux/util-linux_2.33.1-0.1.dsc' util-linux_2.33.1-0.1.dsc 3988 SHA256:b5ee1ff0a8de37c3e4d7c0c29b7571b30ba4bea1d37e55e3d1dac3a3cbc50827
'http://deb.debian.org/debian/pool/main/u/util-linux/util-linux_2.33.1.orig.tar.xz' util-linux_2.33.1.orig.tar.xz 4650936 SHA256:c14bd9f3b6e1792b90db87696e87ec643f9d63efa0a424f092a5a6b2f2dbef21
'http://deb.debian.org/debian/pool/main/u/util-linux/util-linux_2.33.1-0.1.debian.tar.xz' util-linux_2.33.1-0.1.debian.tar.xz 81780 SHA256:07bfeb8298fab559dec2091463cab343785853bcae6c92c0806b7639e105913a
```
Other potentially useful URLs:
- https://sources.debian.net/src/util-linux/2.33.1-0.1/ (for browsing the source)
- https://sources.debian.net/src/util-linux/2.33.1-0.1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/util-linux/2.33.1-0.1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `xz-utils=5.2.4-1`
Binary Packages:
- `liblzma5:amd64=5.2.4-1`
- `xz-utils=5.2.4-1`
Licenses: (parsed from: `/usr/share/doc/liblzma5/copyright`, `/usr/share/doc/xz-utils/copyright`)
- `Autoconf`
- `GPL-2`
- `GPL-2+`
- `GPL-3`
- `LGPL-2`
- `LGPL-2.1`
- `LGPL-2.1+`
- `PD`
- `PD-debian`
- `config-h`
- `noderivs`
- `none`
- `permissive-fsf`
- `permissive-nowarranty`
- `probably-PD`
Source:
```console
$ apt-get source -qq --print-uris xz-utils=5.2.4-1
'http://deb.debian.org/debian/pool/main/x/xz-utils/xz-utils_5.2.4-1.dsc' xz-utils_5.2.4-1.dsc 2518 SHA256:b1572c4efb3c8ebf6f0e044b70e1e0451c919a99d3f80be03b624a54dd7ea593
'http://deb.debian.org/debian/pool/main/x/xz-utils/xz-utils_5.2.4.orig.tar.xz' xz-utils_5.2.4.orig.tar.xz 1053868 SHA256:9717ae363760dedf573dad241420c5fea86256b65bc21d2cf71b2b12f0544f4b
'http://deb.debian.org/debian/pool/main/x/xz-utils/xz-utils_5.2.4.orig.tar.xz.asc' xz-utils_5.2.4.orig.tar.xz.asc 879 SHA256:88290c1deeaf674ae2a4821f4373fe0e4cc2a94199eae6dcc26df1e70cc15303
'http://deb.debian.org/debian/pool/main/x/xz-utils/xz-utils_5.2.4-1.debian.tar.xz' xz-utils_5.2.4-1.debian.tar.xz 135296 SHA256:d37b558444b76e88a69601df008cf1c0343c58cb7765b7bbb2099b0a19619361
```
Other potentially useful URLs:
- https://sources.debian.net/src/xz-utils/5.2.4-1/ (for browsing the source)
- https://sources.debian.net/src/xz-utils/5.2.4-1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/xz-utils/5.2.4-1/ (for access to the source package after it no longer exists in the archive)
### `dpkg` source package: `zlib=1:1.2.11.dfsg-1`
Binary Packages:
- `zlib1g:amd64=1:1.2.11.dfsg-1`
Licenses: (parsed from: `/usr/share/doc/zlib1g/copyright`)
- `Zlib`
Source:
```console
$ apt-get source -qq --print-uris zlib=1:1.2.11.dfsg-1
'http://deb.debian.org/debian/pool/main/z/zlib/zlib_1.2.11.dfsg-1.dsc' zlib_1.2.11.dfsg-1.dsc 2266 SHA256:bf21ab4d60cb836725162f5072884596e781a2f4974182af1868f546306eb8c8
'http://deb.debian.org/debian/pool/main/z/zlib/zlib_1.2.11.dfsg.orig.tar.gz' zlib_1.2.11.dfsg.orig.tar.gz 370248 SHA256:80c481411a4fe8463aeb8270149a0e80bb9eaf7da44132b6e16f2b5af01bc899
'http://deb.debian.org/debian/pool/main/z/zlib/zlib_1.2.11.dfsg-1.debian.tar.xz' zlib_1.2.11.dfsg-1.debian.tar.xz 18956 SHA256:00b95b629fbe9a5181f8ba1ceddedf627aba1ab42e47f5916be8a41deb54098a
```
Other potentially useful URLs:
- https://sources.debian.net/src/zlib/1:1.2.11.dfsg-1/ (for browsing the source)
- https://sources.debian.net/src/zlib/1:1.2.11.dfsg-1/debian/copyright/ (for direct copyright/license information)
- http://snapshot.debian.org/package/zlib/1:1.2.11.dfsg-1/ (for access to the source package after it no longer exists in the archive)
| apache-2.0 |
coreos/ignition | config/v3_4_experimental/types/ignition.go | 1335 | // Copyright 2020 Red Hat, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package types
import (
"github.com/coreos/go-semver/semver"
"github.com/coreos/ignition/v2/config/shared/errors"
"github.com/coreos/vcontext/path"
"github.com/coreos/vcontext/report"
)
func (v Ignition) Semver() (*semver.Version, error) {
return semver.NewVersion(v.Version)
}
func (ic IgnitionConfig) Validate(c path.ContextPath) (r report.Report) {
for i, res := range ic.Merge {
r.AddOnError(c.Append("merge", i), res.validateRequiredSource())
}
return
}
func (v Ignition) Validate(c path.ContextPath) (r report.Report) {
c = c.Append("version")
tv, err := v.Semver()
if err != nil {
r.AddOnError(c, errors.ErrInvalidVersion)
return
}
if MaxVersion != *tv {
r.AddOnError(c, errors.ErrUnknownVersion)
}
return
}
| apache-2.0 |
leppa/home-assistant | homeassistant/helpers/state.py | 3813 | """Helpers that help with state related things."""
import asyncio
from collections import defaultdict
import datetime as dt
import logging
from types import ModuleType, TracebackType
from typing import Dict, Iterable, List, Optional, Type, Union
from homeassistant.components.sun import STATE_ABOVE_HORIZON, STATE_BELOW_HORIZON
from homeassistant.const import (
STATE_CLOSED,
STATE_HOME,
STATE_LOCKED,
STATE_NOT_HOME,
STATE_OFF,
STATE_ON,
STATE_OPEN,
STATE_UNKNOWN,
STATE_UNLOCKED,
)
from homeassistant.core import Context, State
from homeassistant.loader import IntegrationNotFound, async_get_integration, bind_hass
import homeassistant.util.dt as dt_util
from .typing import HomeAssistantType
_LOGGER = logging.getLogger(__name__)
class AsyncTrackStates:
"""
Record the time when the with-block is entered.
Add all states that have changed since the start time to the return list
when with-block is exited.
Must be run within the event loop.
"""
def __init__(self, hass: HomeAssistantType) -> None:
"""Initialize a TrackStates block."""
self.hass = hass
self.states: List[State] = []
# pylint: disable=attribute-defined-outside-init
def __enter__(self) -> List[State]:
"""Record time from which to track changes."""
self.now = dt_util.utcnow()
return self.states
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_value: Optional[BaseException],
traceback: Optional[TracebackType],
) -> None:
"""Add changes states to changes list."""
self.states.extend(get_changed_since(self.hass.states.async_all(), self.now))
def get_changed_since(
states: Iterable[State], utc_point_in_time: dt.datetime
) -> List[State]:
"""Return list of states that have been changed since utc_point_in_time."""
return [state for state in states if state.last_updated >= utc_point_in_time]
@bind_hass
async def async_reproduce_state(
hass: HomeAssistantType,
states: Union[State, Iterable[State]],
blocking: bool = False,
context: Optional[Context] = None,
) -> None:
"""Reproduce a list of states on multiple domains."""
if isinstance(states, State):
states = [states]
to_call: Dict[str, List[State]] = defaultdict(list)
for state in states:
to_call[state.domain].append(state)
async def worker(domain: str, states_by_domain: List[State]) -> None:
try:
integration = await async_get_integration(hass, domain)
except IntegrationNotFound:
_LOGGER.warning(
"Trying to reproduce state for unknown integration: %s", domain
)
return
try:
platform: Optional[ModuleType] = integration.get_platform("reproduce_state")
except ImportError:
_LOGGER.warning("Integration %s does not support reproduce state", domain)
return
await platform.async_reproduce_states( # type: ignore
hass, states_by_domain, context=context
)
if to_call:
# run all domains in parallel
await asyncio.gather(
*(worker(domain, data) for domain, data in to_call.items())
)
def state_as_number(state: State) -> float:
"""
Try to coerce our state to a number.
Raises ValueError if this is not possible.
"""
if state.state in (
STATE_ON,
STATE_LOCKED,
STATE_ABOVE_HORIZON,
STATE_OPEN,
STATE_HOME,
):
return 1
if state.state in (
STATE_OFF,
STATE_UNLOCKED,
STATE_UNKNOWN,
STATE_BELOW_HORIZON,
STATE_CLOSED,
STATE_NOT_HOME,
):
return 0
return float(state.state)
| apache-2.0 |
abdasgupta/kubernetes | plugin/pkg/scheduler/algorithm/predicates/predicates.go | 53588 | /*
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package predicates
import (
"fmt"
"math/rand"
"strconv"
"sync"
"time"
"github.com/golang/glog"
"k8s.io/api/core/v1"
apierrors "k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/labels"
utilruntime "k8s.io/apimachinery/pkg/util/runtime"
utilfeature "k8s.io/apiserver/pkg/util/feature"
"k8s.io/client-go/util/workqueue"
v1helper "k8s.io/kubernetes/pkg/api/v1/helper"
v1qos "k8s.io/kubernetes/pkg/api/v1/helper/qos"
corelisters "k8s.io/kubernetes/pkg/client/listers/core/v1"
"k8s.io/kubernetes/pkg/features"
kubeletapis "k8s.io/kubernetes/pkg/kubelet/apis"
volumeutil "k8s.io/kubernetes/pkg/volume/util"
"k8s.io/kubernetes/plugin/pkg/scheduler/algorithm"
priorityutil "k8s.io/kubernetes/plugin/pkg/scheduler/algorithm/priorities/util"
"k8s.io/kubernetes/plugin/pkg/scheduler/schedulercache"
schedutil "k8s.io/kubernetes/plugin/pkg/scheduler/util"
"k8s.io/metrics/pkg/client/clientset_generated/clientset"
)
// predicatePrecomputations: Helper types/variables...
type PredicateMetadataModifier func(pm *predicateMetadata)
var predicatePrecomputeRegisterLock sync.Mutex
var predicatePrecomputations map[string]PredicateMetadataModifier = make(map[string]PredicateMetadataModifier)
func RegisterPredicatePrecomputation(predicateName string, precomp PredicateMetadataModifier) {
predicatePrecomputeRegisterLock.Lock()
defer predicatePrecomputeRegisterLock.Unlock()
predicatePrecomputations[predicateName] = precomp
}
// Other types for predicate functions...
type NodeInfo interface {
GetNodeInfo(nodeID string) (*v1.Node, error)
}
type PersistentVolumeInfo interface {
GetPersistentVolumeInfo(pvID string) (*v1.PersistentVolume, error)
}
// CachedPersistentVolumeInfo implements PersistentVolumeInfo
type CachedPersistentVolumeInfo struct {
corelisters.PersistentVolumeLister
}
func (c *CachedPersistentVolumeInfo) GetPersistentVolumeInfo(pvID string) (*v1.PersistentVolume, error) {
return c.Get(pvID)
}
type PersistentVolumeClaimInfo interface {
GetPersistentVolumeClaimInfo(namespace string, name string) (*v1.PersistentVolumeClaim, error)
}
// CachedPersistentVolumeClaimInfo implements PersistentVolumeClaimInfo
type CachedPersistentVolumeClaimInfo struct {
corelisters.PersistentVolumeClaimLister
}
// GetPersistentVolumeClaimInfo fetches the claim in specified namespace with specified name
func (c *CachedPersistentVolumeClaimInfo) GetPersistentVolumeClaimInfo(namespace string, name string) (*v1.PersistentVolumeClaim, error) {
return c.PersistentVolumeClaims(namespace).Get(name)
}
type CachedNodeInfo struct {
corelisters.NodeLister
}
// GetNodeInfo returns cached data for the node 'id'.
func (c *CachedNodeInfo) GetNodeInfo(id string) (*v1.Node, error) {
node, err := c.Get(id)
if apierrors.IsNotFound(err) {
return nil, fmt.Errorf("node '%v' not found", id)
}
if err != nil {
return nil, fmt.Errorf("error retrieving node '%v' from cache: %v", id, err)
}
return node, nil
}
// Note that predicateMetadata and matchingPodAntiAffinityTerm need to be declared in the same file
// due to the way declarations are processed in predicate declaration unit tests.
type matchingPodAntiAffinityTerm struct {
term *v1.PodAffinityTerm
node *v1.Node
}
type predicateMetadata struct {
pod *v1.Pod
podBestEffort bool
podRequest *schedulercache.Resource
podPorts map[int]bool
matchingAntiAffinityTerms []matchingPodAntiAffinityTerm
serviceAffinityMatchingPodList []*v1.Pod
serviceAffinityMatchingPodServices []*v1.Service
}
func isVolumeConflict(volume v1.Volume, pod *v1.Pod) bool {
// fast path if there is no conflict checking targets.
if volume.GCEPersistentDisk == nil && volume.AWSElasticBlockStore == nil && volume.RBD == nil && volume.ISCSI == nil {
return false
}
for _, existingVolume := range pod.Spec.Volumes {
// Same GCE disk mounted by multiple pods conflicts unless all pods mount it read-only.
if volume.GCEPersistentDisk != nil && existingVolume.GCEPersistentDisk != nil {
disk, existingDisk := volume.GCEPersistentDisk, existingVolume.GCEPersistentDisk
if disk.PDName == existingDisk.PDName && !(disk.ReadOnly && existingDisk.ReadOnly) {
return true
}
}
if volume.AWSElasticBlockStore != nil && existingVolume.AWSElasticBlockStore != nil {
if volume.AWSElasticBlockStore.VolumeID == existingVolume.AWSElasticBlockStore.VolumeID {
return true
}
}
if volume.ISCSI != nil && existingVolume.ISCSI != nil {
iqn := volume.ISCSI.IQN
eiqn := existingVolume.ISCSI.IQN
// two ISCSI volumes are same, if they share the same iqn. As iscsi volumes are of type
// RWO or ROX, we could permit only one RW mount. Same iscsi volume mounted by multiple Pods
// conflict unless all other pods mount as read only.
if iqn == eiqn && !(volume.ISCSI.ReadOnly && existingVolume.ISCSI.ReadOnly) {
return true
}
}
if volume.RBD != nil && existingVolume.RBD != nil {
mon, pool, image := volume.RBD.CephMonitors, volume.RBD.RBDPool, volume.RBD.RBDImage
emon, epool, eimage := existingVolume.RBD.CephMonitors, existingVolume.RBD.RBDPool, existingVolume.RBD.RBDImage
// two RBDs images are the same if they share the same Ceph monitor, are in the same RADOS Pool, and have the same image name
// only one read-write mount is permitted for the same RBD image.
// same RBD image mounted by multiple Pods conflicts unless all Pods mount the image read-only
if haveSame(mon, emon) && pool == epool && image == eimage && !(volume.RBD.ReadOnly && existingVolume.RBD.ReadOnly) {
return true
}
}
}
return false
}
// NoDiskConflict evaluates if a pod can fit due to the volumes it requests, and those that
// are already mounted. If there is already a volume mounted on that node, another pod that uses the same volume
// can't be scheduled there.
// This is GCE, Amazon EBS, and Ceph RBD specific for now:
// - GCE PD allows multiple mounts as long as they're all read-only
// - AWS EBS forbids any two pods mounting the same volume ID
// - Ceph RBD forbids if any two pods share at least same monitor, and match pool and image.
// - ISCSI forbids if any two pods share at least same IQN, LUN and Target
// TODO: migrate this into some per-volume specific code?
func NoDiskConflict(pod *v1.Pod, meta interface{}, nodeInfo *schedulercache.NodeInfo) (bool, []algorithm.PredicateFailureReason, error) {
for _, v := range pod.Spec.Volumes {
for _, ev := range nodeInfo.Pods() {
if isVolumeConflict(v, ev) {
return false, []algorithm.PredicateFailureReason{ErrDiskConflict}, nil
}
}
}
return true, nil, nil
}
type MaxPDVolumeCountChecker struct {
filter VolumeFilter
maxVolumes int
pvInfo PersistentVolumeInfo
pvcInfo PersistentVolumeClaimInfo
}
// VolumeFilter contains information on how to filter PD Volumes when checking PD Volume caps
type VolumeFilter struct {
// Filter normal volumes
FilterVolume func(vol *v1.Volume) (id string, relevant bool)
FilterPersistentVolume func(pv *v1.PersistentVolume) (id string, relevant bool)
}
// NewMaxPDVolumeCountPredicate creates a predicate which evaluates whether a pod can fit based on the
// number of volumes which match a filter that it requests, and those that are already present. The
// maximum number is configurable to accommodate different systems.
//
// The predicate looks for both volumes used directly, as well as PVC volumes that are backed by relevant volume
// types, counts the number of unique volumes, and rejects the new pod if it would place the total count over
// the maximum.
func NewMaxPDVolumeCountPredicate(filter VolumeFilter, maxVolumes int, pvInfo PersistentVolumeInfo, pvcInfo PersistentVolumeClaimInfo) algorithm.FitPredicate {
c := &MaxPDVolumeCountChecker{
filter: filter,
maxVolumes: maxVolumes,
pvInfo: pvInfo,
pvcInfo: pvcInfo,
}
return c.predicate
}
func (c *MaxPDVolumeCountChecker) filterVolumes(volumes []v1.Volume, namespace string, filteredVolumes map[string]bool) error {
for i := range volumes {
vol := &volumes[i]
if id, ok := c.filter.FilterVolume(vol); ok {
filteredVolumes[id] = true
} else if vol.PersistentVolumeClaim != nil {
pvcName := vol.PersistentVolumeClaim.ClaimName
if pvcName == "" {
return fmt.Errorf("PersistentVolumeClaim had no name")
}
pvc, err := c.pvcInfo.GetPersistentVolumeClaimInfo(namespace, pvcName)
if err != nil {
// if the PVC is not found, log the error and count the PV towards the PV limit
// generate a random volume ID since its required for de-dup
utilruntime.HandleError(fmt.Errorf("Unable to look up PVC info for %s/%s, assuming PVC matches predicate when counting limits: %v", namespace, pvcName, err))
source := rand.NewSource(time.Now().UnixNano())
generatedID := "missingPVC" + strconv.Itoa(rand.New(source).Intn(1000000))
filteredVolumes[generatedID] = true
return nil
}
if pvc == nil {
return fmt.Errorf("PersistentVolumeClaim not found: %q", pvcName)
}
pvName := pvc.Spec.VolumeName
if pvName == "" {
return fmt.Errorf("PersistentVolumeClaim is not bound: %q", pvcName)
}
pv, err := c.pvInfo.GetPersistentVolumeInfo(pvName)
if err != nil {
// if the PV is not found, log the error
// and count the PV towards the PV limit
// generate a random volume ID since it is required for de-dup
utilruntime.HandleError(fmt.Errorf("Unable to look up PV info for %s/%s/%s, assuming PV matches predicate when counting limits: %v", namespace, pvcName, pvName, err))
source := rand.NewSource(time.Now().UnixNano())
generatedID := "missingPV" + strconv.Itoa(rand.New(source).Intn(1000000))
filteredVolumes[generatedID] = true
return nil
}
if pv == nil {
return fmt.Errorf("PersistentVolume not found: %q", pvName)
}
if id, ok := c.filter.FilterPersistentVolume(pv); ok {
filteredVolumes[id] = true
}
}
}
return nil
}
func (c *MaxPDVolumeCountChecker) predicate(pod *v1.Pod, meta interface{}, nodeInfo *schedulercache.NodeInfo) (bool, []algorithm.PredicateFailureReason, error) {
// If a pod doesn't have any volume attached to it, the predicate will always be true.
// Thus we make a fast path for it, to avoid unnecessary computations in this case.
if len(pod.Spec.Volumes) == 0 {
return true, nil, nil
}
newVolumes := make(map[string]bool)
if err := c.filterVolumes(pod.Spec.Volumes, pod.Namespace, newVolumes); err != nil {
return false, nil, err
}
// quick return
if len(newVolumes) == 0 {
return true, nil, nil
}
// count unique volumes
existingVolumes := make(map[string]bool)
for _, existingPod := range nodeInfo.Pods() {
if err := c.filterVolumes(existingPod.Spec.Volumes, existingPod.Namespace, existingVolumes); err != nil {
return false, nil, err
}
}
numExistingVolumes := len(existingVolumes)
// filter out already-mounted volumes
for k := range existingVolumes {
if _, ok := newVolumes[k]; ok {
delete(newVolumes, k)
}
}
numNewVolumes := len(newVolumes)
if numExistingVolumes+numNewVolumes > c.maxVolumes {
// violates MaxEBSVolumeCount or MaxGCEPDVolumeCount
return false, []algorithm.PredicateFailureReason{ErrMaxVolumeCountExceeded}, nil
}
return true, nil, nil
}
// EBSVolumeFilter is a VolumeFilter for filtering AWS ElasticBlockStore Volumes
var EBSVolumeFilter VolumeFilter = VolumeFilter{
FilterVolume: func(vol *v1.Volume) (string, bool) {
if vol.AWSElasticBlockStore != nil {
return vol.AWSElasticBlockStore.VolumeID, true
}
return "", false
},
FilterPersistentVolume: func(pv *v1.PersistentVolume) (string, bool) {
if pv.Spec.AWSElasticBlockStore != nil {
return pv.Spec.AWSElasticBlockStore.VolumeID, true
}
return "", false
},
}
// GCEPDVolumeFilter is a VolumeFilter for filtering GCE PersistentDisk Volumes
var GCEPDVolumeFilter VolumeFilter = VolumeFilter{
FilterVolume: func(vol *v1.Volume) (string, bool) {
if vol.GCEPersistentDisk != nil {
return vol.GCEPersistentDisk.PDName, true
}
return "", false
},
FilterPersistentVolume: func(pv *v1.PersistentVolume) (string, bool) {
if pv.Spec.GCEPersistentDisk != nil {
return pv.Spec.GCEPersistentDisk.PDName, true
}
return "", false
},
}
// AzureDiskVolumeFilter is a VolumeFilter for filtering Azure Disk Volumes
var AzureDiskVolumeFilter VolumeFilter = VolumeFilter{
FilterVolume: func(vol *v1.Volume) (string, bool) {
if vol.AzureDisk != nil {
return vol.AzureDisk.DiskName, true
}
return "", false
},
FilterPersistentVolume: func(pv *v1.PersistentVolume) (string, bool) {
if pv.Spec.AzureDisk != nil {
return pv.Spec.AzureDisk.DiskName, true
}
return "", false
},
}
type VolumeZoneChecker struct {
pvInfo PersistentVolumeInfo
pvcInfo PersistentVolumeClaimInfo
}
// VolumeZonePredicate evaluates if a pod can fit due to the volumes it requests, given
// that some volumes may have zone scheduling constraints. The requirement is that any
// volume zone-labels must match the equivalent zone-labels on the node. It is OK for
// the node to have more zone-label constraints (for example, a hypothetical replicated
// volume might allow region-wide access)
//
// Currently this is only supported with PersistentVolumeClaims, and looks to the labels
// only on the bound PersistentVolume.
//
// Working with volumes declared inline in the pod specification (i.e. not
// using a PersistentVolume) is likely to be harder, as it would require
// determining the zone of a volume during scheduling, and that is likely to
// require calling out to the cloud provider. It seems that we are moving away
// from inline volume declarations anyway.
func NewVolumeZonePredicate(pvInfo PersistentVolumeInfo, pvcInfo PersistentVolumeClaimInfo) algorithm.FitPredicate {
c := &VolumeZoneChecker{
pvInfo: pvInfo,
pvcInfo: pvcInfo,
}
return c.predicate
}
func (c *VolumeZoneChecker) predicate(pod *v1.Pod, meta interface{}, nodeInfo *schedulercache.NodeInfo) (bool, []algorithm.PredicateFailureReason, error) {
// If a pod doesn't have any volume attached to it, the predicate will always be true.
// Thus we make a fast path for it, to avoid unnecessary computations in this case.
if len(pod.Spec.Volumes) == 0 {
return true, nil, nil
}
node := nodeInfo.Node()
if node == nil {
return false, nil, fmt.Errorf("node not found")
}
nodeConstraints := make(map[string]string)
for k, v := range node.ObjectMeta.Labels {
if k != kubeletapis.LabelZoneFailureDomain && k != kubeletapis.LabelZoneRegion {
continue
}
nodeConstraints[k] = v
}
if len(nodeConstraints) == 0 {
// The node has no zone constraints, so we're OK to schedule.
// In practice, when using zones, all nodes must be labeled with zone labels.
// We want to fast-path this case though.
return true, nil, nil
}
namespace := pod.Namespace
manifest := &(pod.Spec)
for i := range manifest.Volumes {
volume := &manifest.Volumes[i]
if volume.PersistentVolumeClaim != nil {
pvcName := volume.PersistentVolumeClaim.ClaimName
if pvcName == "" {
return false, nil, fmt.Errorf("PersistentVolumeClaim had no name")
}
pvc, err := c.pvcInfo.GetPersistentVolumeClaimInfo(namespace, pvcName)
if err != nil {
return false, nil, err
}
if pvc == nil {
return false, nil, fmt.Errorf("PersistentVolumeClaim was not found: %q", pvcName)
}
pvName := pvc.Spec.VolumeName
if pvName == "" {
return false, nil, fmt.Errorf("PersistentVolumeClaim is not bound: %q", pvcName)
}
pv, err := c.pvInfo.GetPersistentVolumeInfo(pvName)
if err != nil {
return false, nil, err
}
if pv == nil {
return false, nil, fmt.Errorf("PersistentVolume not found: %q", pvName)
}
for k, v := range pv.ObjectMeta.Labels {
if k != kubeletapis.LabelZoneFailureDomain && k != kubeletapis.LabelZoneRegion {
continue
}
nodeV, _ := nodeConstraints[k]
if v != nodeV {
glog.V(10).Infof("Won't schedule pod %q onto node %q due to volume %q (mismatch on %q)", pod.Name, node.Name, pvName, k)
return false, []algorithm.PredicateFailureReason{ErrVolumeZoneConflict}, nil
}
}
}
}
return true, nil, nil
}
// Returns a *schedulercache.Resource that covers the largest width in each
// resource dimension. Because init-containers run sequentially, we collect the
// max in each dimension iteratively. In contrast, we sum the resource vectors
// for regular containers since they run simultaneously.
//
// Example:
//
// Pod:
// InitContainers
// IC1:
// CPU: 2
// Memory: 1G
// IC2:
// CPU: 2
// Memory: 3G
// Containers
// C1:
// CPU: 2
// Memory: 1G
// C2:
// CPU: 1
// Memory: 1G
//
// Result: CPU: 3, Memory: 3G
func GetResourceRequest(pod *v1.Pod) *schedulercache.Resource {
result := schedulercache.Resource{}
for _, container := range pod.Spec.Containers {
for rName, rQuantity := range container.Resources.Requests {
switch rName {
case v1.ResourceMemory:
result.Memory += rQuantity.Value()
case v1.ResourceCPU:
result.MilliCPU += rQuantity.MilliValue()
case v1.ResourceNvidiaGPU:
result.NvidiaGPU += rQuantity.Value()
case v1.ResourceStorageOverlay:
result.StorageOverlay += rQuantity.Value()
default:
if v1helper.IsOpaqueIntResourceName(rName) {
result.AddOpaque(rName, rQuantity.Value())
}
}
}
}
// Account for storage requested by emptydir volumes
// If the storage medium is memory, should exclude the size
for _, vol := range pod.Spec.Volumes {
if vol.EmptyDir != nil && vol.EmptyDir.Medium != v1.StorageMediumMemory {
result.StorageScratch += vol.EmptyDir.SizeLimit.Value()
}
}
// take max_resource(sum_pod, any_init_container)
for _, container := range pod.Spec.InitContainers {
for rName, rQuantity := range container.Resources.Requests {
switch rName {
case v1.ResourceMemory:
if mem := rQuantity.Value(); mem > result.Memory {
result.Memory = mem
}
case v1.ResourceCPU:
if cpu := rQuantity.MilliValue(); cpu > result.MilliCPU {
result.MilliCPU = cpu
}
case v1.ResourceNvidiaGPU:
if gpu := rQuantity.Value(); gpu > result.NvidiaGPU {
result.NvidiaGPU = gpu
}
case v1.ResourceStorageOverlay:
if overlay := rQuantity.Value(); overlay > result.StorageOverlay {
result.StorageOverlay = overlay
}
default:
if v1helper.IsOpaqueIntResourceName(rName) {
value := rQuantity.Value()
if value > result.OpaqueIntResources[rName] {
result.SetOpaque(rName, value)
}
}
}
}
}
return &result
}
func podName(pod *v1.Pod) string {
return pod.Namespace + "/" + pod.Name
}
// PodFitsResources checks if a node has sufficient resources, such as cpu, memory, gpu, opaque int resources etc to run a pod.
// First return value indicates whether a node has sufficient resources to run a pod while the second return value indicates the
// predicate failure reasons if the node has insufficient resources to run the pod.
func PodFitsResources(pod *v1.Pod, meta interface{}, nodeInfo *schedulercache.NodeInfo) (bool, []algorithm.PredicateFailureReason, error) {
node := nodeInfo.Node()
if node == nil {
return false, nil, fmt.Errorf("node not found")
}
var predicateFails []algorithm.PredicateFailureReason
allowedPodNumber := nodeInfo.AllowedPodNumber()
if len(nodeInfo.Pods())+1 > allowedPodNumber {
predicateFails = append(predicateFails, NewInsufficientResourceError(v1.ResourcePods, 1, int64(len(nodeInfo.Pods())), int64(allowedPodNumber)))
}
var podRequest *schedulercache.Resource
if predicateMeta, ok := meta.(*predicateMetadata); ok {
podRequest = predicateMeta.podRequest
} else {
// We couldn't parse metadata - fallback to computing it.
podRequest = GetResourceRequest(pod)
}
if podRequest.MilliCPU == 0 && podRequest.Memory == 0 && podRequest.NvidiaGPU == 0 && podRequest.StorageOverlay == 0 && podRequest.StorageScratch == 0 && len(podRequest.OpaqueIntResources) == 0 {
return len(predicateFails) == 0, predicateFails, nil
}
allocatable := nodeInfo.AllocatableResource()
if allocatable.MilliCPU < podRequest.MilliCPU+nodeInfo.RequestedResource().MilliCPU {
predicateFails = append(predicateFails, NewInsufficientResourceError(v1.ResourceCPU, podRequest.MilliCPU, nodeInfo.RequestedResource().MilliCPU, allocatable.MilliCPU))
}
if allocatable.Memory < podRequest.Memory+nodeInfo.RequestedResource().Memory {
predicateFails = append(predicateFails, NewInsufficientResourceError(v1.ResourceMemory, podRequest.Memory, nodeInfo.RequestedResource().Memory, allocatable.Memory))
}
if allocatable.NvidiaGPU < podRequest.NvidiaGPU+nodeInfo.RequestedResource().NvidiaGPU {
predicateFails = append(predicateFails, NewInsufficientResourceError(v1.ResourceNvidiaGPU, podRequest.NvidiaGPU, nodeInfo.RequestedResource().NvidiaGPU, allocatable.NvidiaGPU))
}
scratchSpaceRequest := podRequest.StorageScratch
if allocatable.StorageOverlay == 0 {
scratchSpaceRequest += podRequest.StorageOverlay
//scratchSpaceRequest += nodeInfo.RequestedResource().StorageOverlay
nodeScratchRequest := nodeInfo.RequestedResource().StorageOverlay + nodeInfo.RequestedResource().StorageScratch
if allocatable.StorageScratch < scratchSpaceRequest+nodeScratchRequest {
predicateFails = append(predicateFails, NewInsufficientResourceError(v1.ResourceStorageScratch, scratchSpaceRequest, nodeScratchRequest, allocatable.StorageScratch))
}
} else if allocatable.StorageScratch < scratchSpaceRequest+nodeInfo.RequestedResource().StorageScratch {
predicateFails = append(predicateFails, NewInsufficientResourceError(v1.ResourceStorageScratch, scratchSpaceRequest, nodeInfo.RequestedResource().StorageScratch, allocatable.StorageScratch))
}
if allocatable.StorageOverlay > 0 && allocatable.StorageOverlay < podRequest.StorageOverlay+nodeInfo.RequestedResource().StorageOverlay {
predicateFails = append(predicateFails, NewInsufficientResourceError(v1.ResourceStorageOverlay, podRequest.StorageOverlay, nodeInfo.RequestedResource().StorageOverlay, allocatable.StorageOverlay))
}
for rName, rQuant := range podRequest.OpaqueIntResources {
if allocatable.OpaqueIntResources[rName] < rQuant+nodeInfo.RequestedResource().OpaqueIntResources[rName] {
predicateFails = append(predicateFails, NewInsufficientResourceError(rName, podRequest.OpaqueIntResources[rName], nodeInfo.RequestedResource().OpaqueIntResources[rName], allocatable.OpaqueIntResources[rName]))
}
}
if glog.V(10) {
if len(predicateFails) == 0 {
// We explicitly don't do glog.V(10).Infof() to avoid computing all the parameters if this is
// not logged. There is visible performance gain from it.
glog.Infof("Schedule Pod %+v on Node %+v is allowed, Node is running only %v out of %v Pods.",
podName(pod), node.Name, len(nodeInfo.Pods()), allowedPodNumber)
}
}
return len(predicateFails) == 0, predicateFails, nil
}
// nodeMatchesNodeSelectorTerms checks if a node's labels satisfy a list of node selector terms,
// terms are ORed, and an empty list of terms will match nothing.
func nodeMatchesNodeSelectorTerms(node *v1.Node, nodeSelectorTerms []v1.NodeSelectorTerm) bool {
for _, req := range nodeSelectorTerms {
nodeSelector, err := v1helper.NodeSelectorRequirementsAsSelector(req.MatchExpressions)
if err != nil {
glog.V(10).Infof("Failed to parse MatchExpressions: %+v, regarding as not match.", req.MatchExpressions)
return false
}
if nodeSelector.Matches(labels.Set(node.Labels)) {
return true
}
}
return false
}
// The pod can only schedule onto nodes that satisfy requirements in both NodeAffinity and nodeSelector.
func podMatchesNodeLabels(pod *v1.Pod, node *v1.Node) bool {
// Check if node.Labels match pod.Spec.NodeSelector.
if len(pod.Spec.NodeSelector) > 0 {
selector := labels.SelectorFromSet(pod.Spec.NodeSelector)
if !selector.Matches(labels.Set(node.Labels)) {
return false
}
}
// 1. nil NodeSelector matches all nodes (i.e. does not filter out any nodes)
// 2. nil []NodeSelectorTerm (equivalent to non-nil empty NodeSelector) matches no nodes
// 3. zero-length non-nil []NodeSelectorTerm matches no nodes also, just for simplicity
// 4. nil []NodeSelectorRequirement (equivalent to non-nil empty NodeSelectorTerm) matches no nodes
// 5. zero-length non-nil []NodeSelectorRequirement matches no nodes also, just for simplicity
// 6. non-nil empty NodeSelectorRequirement is not allowed
nodeAffinityMatches := true
affinity := pod.Spec.Affinity
if affinity != nil && affinity.NodeAffinity != nil {
nodeAffinity := affinity.NodeAffinity
// if no required NodeAffinity requirements, will do no-op, means select all nodes.
// TODO: Replace next line with subsequent commented-out line when implement RequiredDuringSchedulingRequiredDuringExecution.
if nodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution == nil {
// if nodeAffinity.RequiredDuringSchedulingRequiredDuringExecution == nil && nodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution == nil {
return true
}
// Match node selector for requiredDuringSchedulingRequiredDuringExecution.
// TODO: Uncomment this block when implement RequiredDuringSchedulingRequiredDuringExecution.
// if nodeAffinity.RequiredDuringSchedulingRequiredDuringExecution != nil {
// nodeSelectorTerms := nodeAffinity.RequiredDuringSchedulingRequiredDuringExecution.NodeSelectorTerms
// glog.V(10).Infof("Match for RequiredDuringSchedulingRequiredDuringExecution node selector terms %+v", nodeSelectorTerms)
// nodeAffinityMatches = nodeMatchesNodeSelectorTerms(node, nodeSelectorTerms)
// }
// Match node selector for requiredDuringSchedulingIgnoredDuringExecution.
if nodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution != nil {
nodeSelectorTerms := nodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms
glog.V(10).Infof("Match for RequiredDuringSchedulingIgnoredDuringExecution node selector terms %+v", nodeSelectorTerms)
nodeAffinityMatches = nodeAffinityMatches && nodeMatchesNodeSelectorTerms(node, nodeSelectorTerms)
}
}
return nodeAffinityMatches
}
// PodMatchNodeSelector checks if a pod node selector matches the node label.
func PodMatchNodeSelector(pod *v1.Pod, meta interface{}, nodeInfo *schedulercache.NodeInfo) (bool, []algorithm.PredicateFailureReason, error) {
node := nodeInfo.Node()
if node == nil {
return false, nil, fmt.Errorf("node not found")
}
if podMatchesNodeLabels(pod, node) {
return true, nil, nil
}
return false, []algorithm.PredicateFailureReason{ErrNodeSelectorNotMatch}, nil
}
// PodFitsHost checks if a pod spec node name matches the current node.
func PodFitsHost(pod *v1.Pod, meta interface{}, nodeInfo *schedulercache.NodeInfo) (bool, []algorithm.PredicateFailureReason, error) {
if len(pod.Spec.NodeName) == 0 {
return true, nil, nil
}
node := nodeInfo.Node()
if node == nil {
return false, nil, fmt.Errorf("node not found")
}
if pod.Spec.NodeName == node.Name {
return true, nil, nil
}
return false, []algorithm.PredicateFailureReason{ErrPodNotMatchHostName}, nil
}
type NodeLabelChecker struct {
labels []string
presence bool
}
func NewNodeLabelPredicate(labels []string, presence bool) algorithm.FitPredicate {
labelChecker := &NodeLabelChecker{
labels: labels,
presence: presence,
}
return labelChecker.CheckNodeLabelPresence
}
// CheckNodeLabelPresence checks whether all of the specified labels exists on a node or not, regardless of their value
// If "presence" is false, then returns false if any of the requested labels matches any of the node's labels,
// otherwise returns true.
// If "presence" is true, then returns false if any of the requested labels does not match any of the node's labels,
// otherwise returns true.
//
// Consider the cases where the nodes are placed in regions/zones/racks and these are identified by labels
// In some cases, it is required that only nodes that are part of ANY of the defined regions/zones/racks be selected
//
// Alternately, eliminating nodes that have a certain label, regardless of value, is also useful
// A node may have a label with "retiring" as key and the date as the value
// and it may be desirable to avoid scheduling new pods on this node
func (n *NodeLabelChecker) CheckNodeLabelPresence(pod *v1.Pod, meta interface{}, nodeInfo *schedulercache.NodeInfo) (bool, []algorithm.PredicateFailureReason, error) {
node := nodeInfo.Node()
if node == nil {
return false, nil, fmt.Errorf("node not found")
}
var exists bool
nodeLabels := labels.Set(node.Labels)
for _, label := range n.labels {
exists = nodeLabels.Has(label)
if (exists && !n.presence) || (!exists && n.presence) {
return false, []algorithm.PredicateFailureReason{ErrNodeLabelPresenceViolated}, nil
}
}
return true, nil, nil
}
type ServiceAffinity struct {
podLister algorithm.PodLister
serviceLister algorithm.ServiceLister
nodeInfo NodeInfo
labels []string
}
// serviceAffinityPrecomputation should be run once by the scheduler before looping through the Predicate. It is a helper function that
// only should be referenced by NewServiceAffinityPredicate.
func (s *ServiceAffinity) serviceAffinityPrecomputation(pm *predicateMetadata) {
if pm.pod == nil {
glog.Errorf("Cannot precompute service affinity, a pod is required to calculate service affinity.")
return
}
var errSvc, errList error
// Store services which match the pod.
pm.serviceAffinityMatchingPodServices, errSvc = s.serviceLister.GetPodServices(pm.pod)
selector := CreateSelectorFromLabels(pm.pod.Labels)
// consider only the pods that belong to the same namespace
allMatches, errList := s.podLister.List(selector)
// In the future maybe we will return them as part of the function.
if errSvc != nil || errList != nil {
glog.Errorf("Some Error were found while precomputing svc affinity: \nservices:%v , \npods:%v", errSvc, errList)
}
pm.serviceAffinityMatchingPodList = FilterPodsByNamespace(allMatches, pm.pod.Namespace)
}
func NewServiceAffinityPredicate(podLister algorithm.PodLister, serviceLister algorithm.ServiceLister, nodeInfo NodeInfo, labels []string) (algorithm.FitPredicate, PredicateMetadataModifier) {
affinity := &ServiceAffinity{
podLister: podLister,
serviceLister: serviceLister,
nodeInfo: nodeInfo,
labels: labels,
}
return affinity.checkServiceAffinity, affinity.serviceAffinityPrecomputation
}
// checkServiceAffinity is a predicate which matches nodes in such a way to force that
// ServiceAffinity.labels are homogenous for pods that are scheduled to a node.
// (i.e. it returns true IFF this pod can be added to this node such that all other pods in
// the same service are running on nodes with
// the exact same ServiceAffinity.label values).
//
// For example:
// If the first pod of a service was scheduled to a node with label "region=foo",
// all the other subsequent pods belong to the same service will be schedule on
// nodes with the same "region=foo" label.
//
// Details:
//
// If (the svc affinity labels are not a subset of pod's label selectors )
// The pod has all information necessary to check affinity, the pod's label selector is sufficient to calculate
// the match.
// Otherwise:
// Create an "implicit selector" which guarantees pods will land on nodes with similar values
// for the affinity labels.
//
// To do this, we "reverse engineer" a selector by introspecting existing pods running under the same service+namespace.
// These backfilled labels in the selector "L" are defined like so:
// - L is a label that the ServiceAffinity object needs as a matching constraints.
// - L is not defined in the pod itself already.
// - and SOME pod, from a service, in the same namespace, ALREADY scheduled onto a node, has a matching value.
//
// WARNING: This Predicate is NOT guaranteed to work if some of the predicateMetadata data isn't precomputed...
// For that reason it is not exported, i.e. it is highly coupled to the implementation of the FitPredicate construction.
func (s *ServiceAffinity) checkServiceAffinity(pod *v1.Pod, meta interface{}, nodeInfo *schedulercache.NodeInfo) (bool, []algorithm.PredicateFailureReason, error) {
var services []*v1.Service
var pods []*v1.Pod
if pm, ok := meta.(*predicateMetadata); ok && (pm.serviceAffinityMatchingPodList != nil || pm.serviceAffinityMatchingPodServices != nil) {
services = pm.serviceAffinityMatchingPodServices
pods = pm.serviceAffinityMatchingPodList
} else {
// Make the predicate resilient in case metadata is missing.
pm = &predicateMetadata{pod: pod}
s.serviceAffinityPrecomputation(pm)
pods, services = pm.serviceAffinityMatchingPodList, pm.serviceAffinityMatchingPodServices
}
node := nodeInfo.Node()
if node == nil {
return false, nil, fmt.Errorf("node not found")
}
// check if the pod being scheduled has the affinity labels specified in its NodeSelector
affinityLabels := FindLabelsInSet(s.labels, labels.Set(pod.Spec.NodeSelector))
// Step 1: If we don't have all constraints, introspect nodes to find the missing constraints.
if len(s.labels) > len(affinityLabels) {
if len(services) > 0 {
if len(pods) > 0 {
nodeWithAffinityLabels, err := s.nodeInfo.GetNodeInfo(pods[0].Spec.NodeName)
if err != nil {
return false, nil, err
}
AddUnsetLabelsToMap(affinityLabels, s.labels, labels.Set(nodeWithAffinityLabels.Labels))
}
}
}
// Step 2: Finally complete the affinity predicate based on whatever set of predicates we were able to find.
if CreateSelectorFromLabels(affinityLabels).Matches(labels.Set(node.Labels)) {
return true, nil, nil
}
return false, []algorithm.PredicateFailureReason{ErrServiceAffinityViolated}, nil
}
// PodFitsHostPorts checks if a node has free ports for the requested pod ports.
func PodFitsHostPorts(pod *v1.Pod, meta interface{}, nodeInfo *schedulercache.NodeInfo) (bool, []algorithm.PredicateFailureReason, error) {
var wantPorts map[int]bool
if predicateMeta, ok := meta.(*predicateMetadata); ok {
wantPorts = predicateMeta.podPorts
} else {
// We couldn't parse metadata - fallback to computing it.
wantPorts = schedutil.GetUsedPorts(pod)
}
if len(wantPorts) == 0 {
return true, nil, nil
}
existingPorts := nodeInfo.UsedPorts()
for wport := range wantPorts {
if wport != 0 && existingPorts[wport] {
return false, []algorithm.PredicateFailureReason{ErrPodNotFitsHostPorts}, nil
}
}
return true, nil, nil
}
// search two arrays and return true if they have at least one common element; return false otherwise
func haveSame(a1, a2 []string) bool {
for _, val1 := range a1 {
for _, val2 := range a2 {
if val1 == val2 {
return true
}
}
}
return false
}
// GeneralPredicates checks whether noncriticalPredicates and EssentialPredicates pass. noncriticalPredicates are the predicates
// that only non-critical pods need and EssentialPredicates are the predicates that all pods, including critical pods, need
func GeneralPredicates(pod *v1.Pod, meta interface{}, nodeInfo *schedulercache.NodeInfo) (bool, []algorithm.PredicateFailureReason, error) {
var predicateFails []algorithm.PredicateFailureReason
fit, reasons, err := noncriticalPredicates(pod, meta, nodeInfo)
if err != nil {
return false, predicateFails, err
}
if !fit {
predicateFails = append(predicateFails, reasons...)
}
fit, reasons, err = EssentialPredicates(pod, meta, nodeInfo)
if err != nil {
return false, predicateFails, err
}
if !fit {
predicateFails = append(predicateFails, reasons...)
}
return len(predicateFails) == 0, predicateFails, nil
}
// noncriticalPredicates are the predicates that only non-critical pods need
func noncriticalPredicates(pod *v1.Pod, meta interface{}, nodeInfo *schedulercache.NodeInfo) (bool, []algorithm.PredicateFailureReason, error) {
var predicateFails []algorithm.PredicateFailureReason
fit, reasons, err := PodFitsResources(pod, meta, nodeInfo)
if err != nil {
return false, predicateFails, err
}
if !fit {
predicateFails = append(predicateFails, reasons...)
}
return len(predicateFails) == 0, predicateFails, nil
}
// EssentialPredicates are the predicates that all pods, including critical pods, need
func EssentialPredicates(pod *v1.Pod, meta interface{}, nodeInfo *schedulercache.NodeInfo) (bool, []algorithm.PredicateFailureReason, error) {
var predicateFails []algorithm.PredicateFailureReason
fit, reasons, err := PodFitsHost(pod, meta, nodeInfo)
if err != nil {
return false, predicateFails, err
}
if !fit {
predicateFails = append(predicateFails, reasons...)
}
// TODO: PodFitsHostPorts is essential for now, but kubelet should ideally
// preempt pods to free up host ports too
fit, reasons, err = PodFitsHostPorts(pod, meta, nodeInfo)
if err != nil {
return false, predicateFails, err
}
if !fit {
predicateFails = append(predicateFails, reasons...)
}
fit, reasons, err = PodMatchNodeSelector(pod, meta, nodeInfo)
if err != nil {
return false, predicateFails, err
}
if !fit {
predicateFails = append(predicateFails, reasons...)
}
return len(predicateFails) == 0, predicateFails, nil
}
type PodAffinityChecker struct {
info NodeInfo
podLister algorithm.PodLister
}
func NewPodAffinityPredicate(info NodeInfo, podLister algorithm.PodLister) algorithm.FitPredicate {
checker := &PodAffinityChecker{
info: info,
podLister: podLister,
}
return checker.InterPodAffinityMatches
}
// InterPodAffinityMatches checks if a pod can be scheduled on the specified node with pod affinity/anti-affinity configuration.
// First return value indicates whether a pod can be scheduled on the specified node while the second return value indicates the
// predicate failure reasons if the pod cannot be scheduled on the specified node.
func (c *PodAffinityChecker) InterPodAffinityMatches(pod *v1.Pod, meta interface{}, nodeInfo *schedulercache.NodeInfo) (bool, []algorithm.PredicateFailureReason, error) {
node := nodeInfo.Node()
if node == nil {
return false, nil, fmt.Errorf("node not found")
}
if !c.satisfiesExistingPodsAntiAffinity(pod, meta, node) {
return false, []algorithm.PredicateFailureReason{ErrPodAffinityNotMatch}, nil
}
// Now check if <pod> requirements will be satisfied on this node.
affinity := pod.Spec.Affinity
if affinity == nil || (affinity.PodAffinity == nil && affinity.PodAntiAffinity == nil) {
return true, nil, nil
}
if !c.satisfiesPodsAffinityAntiAffinity(pod, node, affinity) {
return false, []algorithm.PredicateFailureReason{ErrPodAffinityNotMatch}, nil
}
if glog.V(10) {
// We explicitly don't do glog.V(10).Infof() to avoid computing all the parameters if this is
// not logged. There is visible performance gain from it.
glog.Infof("Schedule Pod %+v on Node %+v is allowed, pod (anti)affinity constraints satisfied",
podName(pod), node.Name)
}
return true, nil, nil
}
// anyPodMatchesPodAffinityTerm checks if any of given pods can match the specific podAffinityTerm.
// First return value indicates whether a matching pod exists on a node that matches the topology key,
// while the second return value indicates whether a matching pod exists anywhere.
// TODO: Do we really need any pod matching, or all pods matching? I think the latter.
func (c *PodAffinityChecker) anyPodMatchesPodAffinityTerm(pod *v1.Pod, allPods []*v1.Pod, node *v1.Node, term *v1.PodAffinityTerm) (bool, bool, error) {
if len(term.TopologyKey) == 0 {
return false, false, fmt.Errorf("empty topologyKey is not allowed except for PreferredDuringScheduling pod anti-affinity")
}
matchingPodExists := false
namespaces := priorityutil.GetNamespacesFromPodAffinityTerm(pod, term)
selector, err := metav1.LabelSelectorAsSelector(term.LabelSelector)
if err != nil {
return false, false, err
}
for _, existingPod := range allPods {
match := priorityutil.PodMatchesTermsNamespaceAndSelector(existingPod, namespaces, selector)
if match {
matchingPodExists = true
existingPodNode, err := c.info.GetNodeInfo(existingPod.Spec.NodeName)
if err != nil {
return false, matchingPodExists, err
}
if priorityutil.NodesHaveSameTopologyKey(node, existingPodNode, term.TopologyKey) {
return true, matchingPodExists, nil
}
}
}
return false, matchingPodExists, nil
}
func getPodAffinityTerms(podAffinity *v1.PodAffinity) (terms []v1.PodAffinityTerm) {
if podAffinity != nil {
if len(podAffinity.RequiredDuringSchedulingIgnoredDuringExecution) != 0 {
terms = podAffinity.RequiredDuringSchedulingIgnoredDuringExecution
}
// TODO: Uncomment this block when implement RequiredDuringSchedulingRequiredDuringExecution.
//if len(podAffinity.RequiredDuringSchedulingRequiredDuringExecution) != 0 {
// terms = append(terms, podAffinity.RequiredDuringSchedulingRequiredDuringExecution...)
//}
}
return terms
}
func getPodAntiAffinityTerms(podAntiAffinity *v1.PodAntiAffinity) (terms []v1.PodAffinityTerm) {
if podAntiAffinity != nil {
if len(podAntiAffinity.RequiredDuringSchedulingIgnoredDuringExecution) != 0 {
terms = podAntiAffinity.RequiredDuringSchedulingIgnoredDuringExecution
}
// TODO: Uncomment this block when implement RequiredDuringSchedulingRequiredDuringExecution.
//if len(podAntiAffinity.RequiredDuringSchedulingRequiredDuringExecution) != 0 {
// terms = append(terms, podAntiAffinity.RequiredDuringSchedulingRequiredDuringExecution...)
//}
}
return terms
}
func getMatchingAntiAffinityTerms(pod *v1.Pod, nodeInfoMap map[string]*schedulercache.NodeInfo) ([]matchingPodAntiAffinityTerm, error) {
allNodeNames := make([]string, 0, len(nodeInfoMap))
for name := range nodeInfoMap {
allNodeNames = append(allNodeNames, name)
}
var lock sync.Mutex
var result []matchingPodAntiAffinityTerm
var firstError error
appendResult := func(toAppend []matchingPodAntiAffinityTerm) {
lock.Lock()
defer lock.Unlock()
result = append(result, toAppend...)
}
catchError := func(err error) {
lock.Lock()
defer lock.Unlock()
if firstError == nil {
firstError = err
}
}
processNode := func(i int) {
nodeInfo := nodeInfoMap[allNodeNames[i]]
node := nodeInfo.Node()
if node == nil {
catchError(fmt.Errorf("node not found"))
return
}
var nodeResult []matchingPodAntiAffinityTerm
for _, existingPod := range nodeInfo.PodsWithAffinity() {
affinity := existingPod.Spec.Affinity
if affinity == nil {
continue
}
for _, term := range getPodAntiAffinityTerms(affinity.PodAntiAffinity) {
namespaces := priorityutil.GetNamespacesFromPodAffinityTerm(existingPod, &term)
selector, err := metav1.LabelSelectorAsSelector(term.LabelSelector)
if err != nil {
catchError(err)
return
}
if priorityutil.PodMatchesTermsNamespaceAndSelector(pod, namespaces, selector) {
nodeResult = append(nodeResult, matchingPodAntiAffinityTerm{term: &term, node: node})
}
}
}
if len(nodeResult) > 0 {
appendResult(nodeResult)
}
}
workqueue.Parallelize(16, len(allNodeNames), processNode)
return result, firstError
}
func (c *PodAffinityChecker) getMatchingAntiAffinityTerms(pod *v1.Pod, allPods []*v1.Pod) ([]matchingPodAntiAffinityTerm, error) {
var result []matchingPodAntiAffinityTerm
for _, existingPod := range allPods {
affinity := existingPod.Spec.Affinity
if affinity != nil && affinity.PodAntiAffinity != nil {
existingPodNode, err := c.info.GetNodeInfo(existingPod.Spec.NodeName)
if err != nil {
return nil, err
}
for _, term := range getPodAntiAffinityTerms(affinity.PodAntiAffinity) {
namespaces := priorityutil.GetNamespacesFromPodAffinityTerm(existingPod, &term)
selector, err := metav1.LabelSelectorAsSelector(term.LabelSelector)
if err != nil {
return nil, err
}
if priorityutil.PodMatchesTermsNamespaceAndSelector(pod, namespaces, selector) {
result = append(result, matchingPodAntiAffinityTerm{term: &term, node: existingPodNode})
}
}
}
}
return result, nil
}
// Checks if scheduling the pod onto this node would break any anti-affinity
// rules indicated by the existing pods.
func (c *PodAffinityChecker) satisfiesExistingPodsAntiAffinity(pod *v1.Pod, meta interface{}, node *v1.Node) bool {
var matchingTerms []matchingPodAntiAffinityTerm
if predicateMeta, ok := meta.(*predicateMetadata); ok {
matchingTerms = predicateMeta.matchingAntiAffinityTerms
} else {
allPods, err := c.podLister.List(labels.Everything())
if err != nil {
glog.Errorf("Failed to get all pods, %+v", err)
return false
}
if matchingTerms, err = c.getMatchingAntiAffinityTerms(pod, allPods); err != nil {
glog.Errorf("Failed to get all terms that pod %+v matches, err: %+v", podName(pod), err)
return false
}
}
for _, term := range matchingTerms {
if len(term.term.TopologyKey) == 0 {
glog.Error("Empty topologyKey is not allowed except for PreferredDuringScheduling pod anti-affinity")
return false
}
if priorityutil.NodesHaveSameTopologyKey(node, term.node, term.term.TopologyKey) {
glog.V(10).Infof("Cannot schedule pod %+v onto node %v,because of PodAntiAffinityTerm %v",
podName(pod), node.Name, term.term)
return false
}
}
if glog.V(10) {
// We explicitly don't do glog.V(10).Infof() to avoid computing all the parameters if this is
// not logged. There is visible performance gain from it.
glog.Infof("Schedule Pod %+v on Node %+v is allowed, existing pods anti-affinity rules satisfied.",
podName(pod), node.Name)
}
return true
}
// Checks if scheduling the pod onto this node would break any rules of this pod.
func (c *PodAffinityChecker) satisfiesPodsAffinityAntiAffinity(pod *v1.Pod, node *v1.Node, affinity *v1.Affinity) bool {
allPods, err := c.podLister.List(labels.Everything())
if err != nil {
return false
}
// Check all affinity terms.
for _, term := range getPodAffinityTerms(affinity.PodAffinity) {
termMatches, matchingPodExists, err := c.anyPodMatchesPodAffinityTerm(pod, allPods, node, &term)
if err != nil {
glog.Errorf("Cannot schedule pod %+v onto node %v,because of PodAffinityTerm %v, err: %v",
podName(pod), node.Name, term, err)
return false
}
if !termMatches {
// If the requirement matches a pod's own labels are namespace, and there are
// no other such pods, then disregard the requirement. This is necessary to
// not block forever because the first pod of the collection can't be scheduled.
if matchingPodExists {
glog.V(10).Infof("Cannot schedule pod %+v onto node %v,because of PodAffinityTerm %v, err: %v",
podName(pod), node.Name, term, err)
return false
}
namespaces := priorityutil.GetNamespacesFromPodAffinityTerm(pod, &term)
selector, err := metav1.LabelSelectorAsSelector(term.LabelSelector)
if err != nil {
glog.Errorf("Cannot parse selector on term %v for pod %v. Details %v",
term, podName(pod), err)
return false
}
match := priorityutil.PodMatchesTermsNamespaceAndSelector(pod, namespaces, selector)
if !match {
glog.V(10).Infof("Cannot schedule pod %+v onto node %v,because of PodAffinityTerm %v, err: %v",
podName(pod), node.Name, term, err)
return false
}
}
}
// Check all anti-affinity terms.
for _, term := range getPodAntiAffinityTerms(affinity.PodAntiAffinity) {
termMatches, _, err := c.anyPodMatchesPodAffinityTerm(pod, allPods, node, &term)
if err != nil || termMatches {
glog.V(10).Infof("Cannot schedule pod %+v onto node %v,because of PodAntiAffinityTerm %v, err: %v",
podName(pod), node.Name, term, err)
return false
}
}
if glog.V(10) {
// We explicitly don't do glog.V(10).Infof() to avoid computing all the parameters if this is
// not logged. There is visible performance gain from it.
glog.Infof("Schedule Pod %+v on Node %+v is allowed, pod afinnity/anti-affinity constraints satisfied.",
podName(pod), node.Name)
}
return true
}
// PodToleratesNodeTaints checks if a pod tolertaions can tolerate the node taints
func PodToleratesNodeTaints(pod *v1.Pod, meta interface{}, nodeInfo *schedulercache.NodeInfo) (bool, []algorithm.PredicateFailureReason, error) {
taints, err := nodeInfo.Taints()
if err != nil {
return false, nil, err
}
if v1helper.TolerationsTolerateTaintsWithFilter(pod.Spec.Tolerations, taints, func(t *v1.Taint) bool {
// PodToleratesNodeTaints is only interested in NoSchedule and NoExecute taints.
return t.Effect == v1.TaintEffectNoSchedule || t.Effect == v1.TaintEffectNoExecute
}) {
return true, nil, nil
}
return false, []algorithm.PredicateFailureReason{ErrTaintsTolerationsNotMatch}, nil
}
// isPodBestEffort checks if pod is scheduled with best-effort QoS
func isPodBestEffort(pod *v1.Pod) bool {
return v1qos.GetPodQOS(pod) == v1.PodQOSBestEffort
}
// CheckNodeMemoryPressurePredicate checks if a pod can be scheduled on a node
// reporting memory pressure condition.
func CheckNodeMemoryPressurePredicate(pod *v1.Pod, meta interface{}, nodeInfo *schedulercache.NodeInfo) (bool, []algorithm.PredicateFailureReason, error) {
var podBestEffort bool
if predicateMeta, ok := meta.(*predicateMetadata); ok {
podBestEffort = predicateMeta.podBestEffort
} else {
// We couldn't parse metadata - fallback to computing it.
podBestEffort = isPodBestEffort(pod)
}
// pod is not BestEffort pod
if !podBestEffort {
return true, nil, nil
}
// check if node is under memory preasure
if nodeInfo.MemoryPressureCondition() == v1.ConditionTrue {
return false, []algorithm.PredicateFailureReason{ErrNodeUnderMemoryPressure}, nil
}
return true, nil, nil
}
// CheckNodeDiskPressurePredicate checks if a pod can be scheduled on a node
// reporting disk pressure condition.
func CheckNodeDiskPressurePredicate(pod *v1.Pod, meta interface{}, nodeInfo *schedulercache.NodeInfo) (bool, []algorithm.PredicateFailureReason, error) {
// check if node is under disk preasure
if nodeInfo.DiskPressureCondition() == v1.ConditionTrue {
return false, []algorithm.PredicateFailureReason{ErrNodeUnderDiskPressure}, nil
}
return true, nil, nil
}
type VolumeNodeChecker struct {
pvInfo PersistentVolumeInfo
pvcInfo PersistentVolumeClaimInfo
client clientset.Interface
}
// VolumeNodeChecker evaluates if a pod can fit due to the volumes it requests, given
// that some volumes have node topology constraints, particularly when using Local PVs.
// The requirement is that any pod that uses a PVC that is bound to a PV with topology constraints
// must be scheduled to a node that satisfies the PV's topology labels.
func NewVolumeNodePredicate(pvInfo PersistentVolumeInfo, pvcInfo PersistentVolumeClaimInfo, client clientset.Interface) algorithm.FitPredicate {
c := &VolumeNodeChecker{
pvInfo: pvInfo,
pvcInfo: pvcInfo,
client: client,
}
return c.predicate
}
func (c *VolumeNodeChecker) predicate(pod *v1.Pod, meta interface{}, nodeInfo *schedulercache.NodeInfo) (bool, []algorithm.PredicateFailureReason, error) {
if !utilfeature.DefaultFeatureGate.Enabled(features.PersistentLocalVolumes) {
return true, nil, nil
}
// If a pod doesn't have any volume attached to it, the predicate will always be true.
// Thus we make a fast path for it, to avoid unnecessary computations in this case.
if len(pod.Spec.Volumes) == 0 {
return true, nil, nil
}
node := nodeInfo.Node()
if node == nil {
return false, nil, fmt.Errorf("node not found")
}
glog.V(2).Infof("Checking for prebound volumes with node affinity")
namespace := pod.Namespace
manifest := &(pod.Spec)
for i := range manifest.Volumes {
volume := &manifest.Volumes[i]
if volume.PersistentVolumeClaim == nil {
continue
}
pvcName := volume.PersistentVolumeClaim.ClaimName
if pvcName == "" {
return false, nil, fmt.Errorf("PersistentVolumeClaim had no name")
}
pvc, err := c.pvcInfo.GetPersistentVolumeClaimInfo(namespace, pvcName)
if err != nil {
return false, nil, err
}
if pvc == nil {
return false, nil, fmt.Errorf("PersistentVolumeClaim was not found: %q", pvcName)
}
pvName := pvc.Spec.VolumeName
if pvName == "" {
return false, nil, fmt.Errorf("PersistentVolumeClaim is not bound: %q", pvcName)
}
pv, err := c.pvInfo.GetPersistentVolumeInfo(pvName)
if err != nil {
return false, nil, err
}
if pv == nil {
return false, nil, fmt.Errorf("PersistentVolume not found: %q", pvName)
}
err = volumeutil.CheckNodeAffinity(pv, node.Labels)
if err != nil {
glog.V(2).Infof("Won't schedule pod %q onto node %q due to volume %q node mismatch: %v", pod.Name, node.Name, pvName, err.Error())
return false, []algorithm.PredicateFailureReason{ErrVolumeNodeConflict}, nil
}
glog.V(4).Infof("VolumeNode predicate allows node %q for pod %q due to volume %q", node.Name, pod.Name, pvName)
}
return true, nil, nil
}
| apache-2.0 |
chewbaccateam/hackfest | vendor/microsoft/windowsazure/tests/unit/WindowsAzure/ServiceBus/models/SqlRuleActionTest.php | 2265 | <?php
/**
* LICENSE: Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* PHP version 5
*
* @category Microsoft
*
* @author Azure PHP SDK <[email protected]>
* @copyright 2012 Microsoft Corporation
* @license http://www.apache.org/licenses/LICENSE-2.0 Apache License 2.0
*
* @link https://github.com/WindowsAzure/azure-sdk-for-php
*/
namespace Tests\unit\WindowsAzure\ServiceBus\models;
use WindowsAzure\ServiceBus\Models\SqlRuleAction;
/**
* Unit tests for class WrapAccessTokenResult.
*
* @category Microsoft
*
* @author Azure PHP SDK <[email protected]>
* @copyright 2012 Microsoft Corporation
* @license http://www.apache.org/licenses/LICENSE-2.0 Apache License 2.0
*
* @version Release: 0.5.0_2016-11
*
* @link https://github.com/WindowsAzure/azure-sdk-for-php
*/
class SqlRuleActionTest extends \PHPUnit_Framework_TestCase
{
/**
* @covers \WindowsAzure\ServiceBus\Models\SqlRuleAction::__construct
*/
public function testSqlRuleActionConstructor()
{
// Setup
// Test
$sqlRuleAction = new SqlRuleAction();
// Assert
$this->assertNotNull($sqlRuleAction);
}
/**
* @covers \WindowsAzure\ServiceBus\Models\SqlRuleAction::getSqlExpression
* @covers \WindowsAzure\ServiceBus\Models\SqlRuleAction::setSqlExpression
*/
public function testGetSetSqlExpression()
{
// Setup
$expected = 'testSqlExpression';
$sqlRuleAction = new SqlRuleAction();
// Test
$sqlRuleAction->setSqlExpression($expected);
$actual = $sqlRuleAction->getSqlExpression();
// Assert
$this->assertEquals(
$expected,
$actual
);
}
}
| apache-2.0 |
Becca42/geowave | extensions/cli/geoserver/src/main/java/mil/nga/giat/geowave/cli/geoserver/GeoServerRemoveStyleCommand.java | 2009 | package mil.nga.giat.geowave.cli.geoserver;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import mil.nga.giat.geowave.core.cli.annotations.GeowaveOperation;
import mil.nga.giat.geowave.core.cli.api.Command;
import mil.nga.giat.geowave.core.cli.api.OperationParams;
import mil.nga.giat.geowave.core.cli.operations.config.options.ConfigOptions;
import com.beust.jcommander.Parameter;
import com.beust.jcommander.ParameterException;
import com.beust.jcommander.Parameters;
@GeowaveOperation(name = "rmstyle", parentOperation = GeoServerSection.class)
@Parameters(commandDescription = "Remove GeoServer Style")
public class GeoServerRemoveStyleCommand implements
Command
{
private GeoServerRestClient geoserverClient = null;
@Parameter(description = "<style name>")
private List<String> parameters = new ArrayList<String>();
private String styleName = null;
@Override
public boolean prepare(
OperationParams params ) {
if (geoserverClient == null) {
// Get the local config for GeoServer
File propFile = (File) params.getContext().get(
ConfigOptions.PROPERTIES_FILE_CONTEXT);
GeoServerConfig config = new GeoServerConfig(
propFile);
// Create the rest client
geoserverClient = new GeoServerRestClient(
config);
}
// Successfully prepared
return true;
}
@Override
public void execute(
OperationParams params )
throws Exception {
if (parameters.size() != 1) {
throw new ParameterException(
"Requires argument: <style name>");
}
styleName = parameters.get(0);
Response deleteStyleResponse = geoserverClient.deleteStyle(styleName);
if (deleteStyleResponse.getStatus() == Status.OK.getStatusCode()) {
System.out.println("Delete style '" + styleName + "' on GeoServer: OK");
}
else {
System.err.println("Error deleting style '" + styleName + "' on GeoServer; code = "
+ deleteStyleResponse.getStatus());
}
}
}
| apache-2.0 |
AndroidX/androidx | appcompat/appcompat/src/main/java/androidx/appcompat/widget/ContentFrameLayout.java | 7578 | /*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.appcompat.widget;
import static android.view.View.MeasureSpec.AT_MOST;
import static android.view.View.MeasureSpec.EXACTLY;
import static android.view.View.MeasureSpec.getMode;
import static androidx.annotation.RestrictTo.Scope.LIBRARY;
import android.content.Context;
import android.graphics.Rect;
import android.util.AttributeSet;
import android.util.DisplayMetrics;
import android.util.TypedValue;
import android.widget.FrameLayout;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RestrictTo;
import androidx.core.view.ViewCompat;
/**
* @hide
*/
@RestrictTo(LIBRARY)
public class ContentFrameLayout extends FrameLayout {
public interface OnAttachListener {
void onDetachedFromWindow();
void onAttachedFromWindow();
}
private TypedValue mMinWidthMajor;
private TypedValue mMinWidthMinor;
private TypedValue mFixedWidthMajor;
private TypedValue mFixedWidthMinor;
private TypedValue mFixedHeightMajor;
private TypedValue mFixedHeightMinor;
private final Rect mDecorPadding;
private OnAttachListener mAttachListener;
public ContentFrameLayout(@NonNull Context context) {
this(context, null);
}
public ContentFrameLayout(@NonNull Context context, @Nullable AttributeSet attrs) {
this(context, attrs, 0);
}
public ContentFrameLayout(
@NonNull Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
mDecorPadding = new Rect();
}
/**
* @hide
*/
@RestrictTo(LIBRARY)
public void dispatchFitSystemWindows(Rect insets) {
fitSystemWindows(insets);
}
public void setAttachListener(OnAttachListener attachListener) {
mAttachListener = attachListener;
}
/**
* Notify this view of the window decor view's padding. We use these values when working out
* our size for the window size attributes.
*
* @hide
*/
@RestrictTo(LIBRARY)
public void setDecorPadding(int left, int top, int right, int bottom) {
mDecorPadding.set(left, top, right, bottom);
if (ViewCompat.isLaidOut(this)) {
requestLayout();
}
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
final DisplayMetrics metrics = getContext().getResources().getDisplayMetrics();
final boolean isPortrait = metrics.widthPixels < metrics.heightPixels;
final int widthMode = getMode(widthMeasureSpec);
final int heightMode = getMode(heightMeasureSpec);
boolean fixedWidth = false;
if (widthMode == AT_MOST) {
final TypedValue tvw = isPortrait ? mFixedWidthMinor : mFixedWidthMajor;
if (tvw != null && tvw.type != TypedValue.TYPE_NULL) {
int w = 0;
if (tvw.type == TypedValue.TYPE_DIMENSION) {
w = (int) tvw.getDimension(metrics);
} else if (tvw.type == TypedValue.TYPE_FRACTION) {
w = (int) tvw.getFraction(metrics.widthPixels, metrics.widthPixels);
}
if (w > 0) {
w -= (mDecorPadding.left + mDecorPadding.right);
final int widthSize = MeasureSpec.getSize(widthMeasureSpec);
widthMeasureSpec = MeasureSpec.makeMeasureSpec(
Math.min(w, widthSize), EXACTLY);
fixedWidth = true;
}
}
}
if (heightMode == AT_MOST) {
final TypedValue tvh = isPortrait ? mFixedHeightMajor : mFixedHeightMinor;
if (tvh != null && tvh.type != TypedValue.TYPE_NULL) {
int h = 0;
if (tvh.type == TypedValue.TYPE_DIMENSION) {
h = (int) tvh.getDimension(metrics);
} else if (tvh.type == TypedValue.TYPE_FRACTION) {
h = (int) tvh.getFraction(metrics.heightPixels, metrics.heightPixels);
}
if (h > 0) {
h -= (mDecorPadding.top + mDecorPadding.bottom);
final int heightSize = MeasureSpec.getSize(heightMeasureSpec);
heightMeasureSpec = MeasureSpec.makeMeasureSpec(
Math.min(h, heightSize), EXACTLY);
}
}
}
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
int width = getMeasuredWidth();
boolean measure = false;
widthMeasureSpec = MeasureSpec.makeMeasureSpec(width, EXACTLY);
if (!fixedWidth && widthMode == AT_MOST) {
final TypedValue tv = isPortrait ? mMinWidthMinor : mMinWidthMajor;
if (tv != null && tv.type != TypedValue.TYPE_NULL) {
int min = 0;
if (tv.type == TypedValue.TYPE_DIMENSION) {
min = (int) tv.getDimension(metrics);
} else if (tv.type == TypedValue.TYPE_FRACTION) {
min = (int) tv.getFraction(metrics.widthPixels, metrics.widthPixels);
}
if (min > 0) {
min -= (mDecorPadding.left + mDecorPadding.right);
}
if (width < min) {
widthMeasureSpec = MeasureSpec.makeMeasureSpec(min, EXACTLY);
measure = true;
}
}
}
if (measure) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
}
public TypedValue getMinWidthMajor() {
if (mMinWidthMajor == null) mMinWidthMajor = new TypedValue();
return mMinWidthMajor;
}
public TypedValue getMinWidthMinor() {
if (mMinWidthMinor == null) mMinWidthMinor = new TypedValue();
return mMinWidthMinor;
}
public TypedValue getFixedWidthMajor() {
if (mFixedWidthMajor == null) mFixedWidthMajor = new TypedValue();
return mFixedWidthMajor;
}
public TypedValue getFixedWidthMinor() {
if (mFixedWidthMinor == null) mFixedWidthMinor = new TypedValue();
return mFixedWidthMinor;
}
public TypedValue getFixedHeightMajor() {
if (mFixedHeightMajor == null) mFixedHeightMajor = new TypedValue();
return mFixedHeightMajor;
}
public TypedValue getFixedHeightMinor() {
if (mFixedHeightMinor == null) mFixedHeightMinor = new TypedValue();
return mFixedHeightMinor;
}
@Override
protected void onAttachedToWindow() {
super.onAttachedToWindow();
if (mAttachListener != null) {
mAttachListener.onAttachedFromWindow();
}
}
@Override
protected void onDetachedFromWindow() {
super.onDetachedFromWindow();
if (mAttachListener != null) {
mAttachListener.onDetachedFromWindow();
}
}
}
| apache-2.0 |
michael-jia-sage/libgoogle | src/gapps/userfeed.cs | 1602 | /* Copyright (c) 2007 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Text;
using System.Xml;
using Google.GData.Client;
using Google.GData.Extensions;
using Google.GData.Extensions.Apps;
namespace Google.GData.Apps
{
/// <summary>
/// Feed API customization class for defining user account feed.
/// </summary>
public class UserFeed : AbstractFeed
{
/// <summary>
/// Constructor
/// </summary>
/// <param name="uriBase">The uri for the user account feed.</param>
/// <param name="iService">The user account service.</param>
public UserFeed(Uri uriBase, IService iService)
: base(uriBase, iService)
{
GAppsExtensions.AddProvisioningExtensions(this);
}
/// <summary>
/// Overridden. Returns a new <code>UserEntry</code>.
/// </summary>
/// <returns>the new <code>UserEntry</code></returns>
public override AtomEntry CreateFeedEntry()
{
return new UserEntry();
}
}
}
| apache-2.0 |
tempbottle/flume-ng-kafka-sink | impl/src/test/java/com.thilinamb.flume.sink/util/ZooKeeperLocal.java | 2197 | /**
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
limitations under the License.
*/
package com.thilinamb.flume.sink.util;
import org.apache.zookeeper.server.ServerConfig;
import org.apache.zookeeper.server.ZooKeeperServerMain;
import org.apache.zookeeper.server.quorum.QuorumPeerConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Properties;
/**
* A local Zookeeper server for running unit tests.
* Reference: https://gist.github.com/fjavieralba/7930018/
*/
public class ZooKeeperLocal {
private static final Logger logger = LoggerFactory.getLogger(ZooKeeperLocal.class);
private ZooKeeperServerMain zooKeeperServer;
public ZooKeeperLocal(Properties zkProperties) throws IOException{
QuorumPeerConfig quorumConfiguration = new QuorumPeerConfig();
try {
quorumConfiguration.parseProperties(zkProperties);
} catch(Exception e) {
throw new RuntimeException(e);
}
zooKeeperServer = new ZooKeeperServerMain();
final ServerConfig configuration = new ServerConfig();
configuration.readFrom(quorumConfiguration);
new Thread() {
public void run() {
try {
zooKeeperServer.runFromConfig(configuration);
} catch (IOException e) {
logger.error("Zookeeper startup failed.", e);
}
}
}.start();
}
}
| apache-2.0 |
googleapis/google-cloud-php-billing-budgets | src/V1/LastPeriodAmount.php | 1331 | <?php
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/billing/budgets/v1/budget_model.proto
namespace Google\Cloud\Billing\Budgets\V1;
use Google\Protobuf\Internal\GPBType;
use Google\Protobuf\Internal\RepeatedField;
use Google\Protobuf\Internal\GPBUtil;
/**
* Describes a budget amount targeted to the last
* [Filter.calendar_period][google.cloud.billing.budgets.v1.Filter.calendar_period]
* spend. At this time, the amount is automatically 100% of the last calendar
* period's spend; that is, there are no other options yet.
* Future configuration options will be described here (for example, configuring
* a percentage of last period's spend).
* LastPeriodAmount cannot be set for a budget configured with
* a
* [Filter.custom_period][google.cloud.billing.budgets.v1.Filter.custom_period].
*
* Generated from protobuf message <code>google.cloud.billing.budgets.v1.LastPeriodAmount</code>
*/
class LastPeriodAmount extends \Google\Protobuf\Internal\Message
{
/**
* Constructor.
*
* @param array $data {
* Optional. Data for populating the Message object.
*
* }
*/
public function __construct($data = NULL) {
\GPBMetadata\Google\Cloud\Billing\Budgets\V1\BudgetModel::initOnce();
parent::__construct($data);
}
}
| apache-2.0 |
papicella/snappy-store | gemfire-core/src/main/java/com/gemstone/gemfire/admin/internal/CacheServerImpl.java | 6469 | /*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.gemstone.gemfire.admin.internal;
import com.gemstone.gemfire.admin.*;
import com.gemstone.gemfire.distributed.internal.DM;
import com.gemstone.gemfire.distributed.internal.DistributionManager;
import com.gemstone.gemfire.internal.admin.GemFireVM;
import com.gemstone.gemfire.internal.admin.remote.RemoteApplicationVM;
import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
/**
* Implements the administrative interface to a cache server.
*
* @author David Whitlock
* @since 3.5
*/
public class CacheServerImpl extends ManagedSystemMemberImpl
implements CacheVm, CacheServer {
private static final String GFXD_LAUNCHER_NAME = "gfxd";
private static final String GFE_LAUNCHER_NAME = "cacheserver";
private static final boolean isSqlFire = Boolean.getBoolean("isSqlFire");
/** How many new <code>CacheServer</code>s have been created? */
private static int newCacheServers = 0;
/////////////////////// Instance Fields ///////////////////////
/** The configuration object for this cache server */
private final CacheServerConfigImpl config;
///////////////////////// Constructors ////////////////////////
/**
* Creates a new <code>CacheServerImpl</code> that represents a
* non-existsing (unstarted) cache server in a given distributed
* system.
*/
public CacheServerImpl(AdminDistributedSystemImpl system,
CacheVmConfig config)
throws AdminException {
super(system, config);
this.config = (CacheServerConfigImpl) config;
this.config.setManagedEntity(this);
}
/**
* Creates a new <code>CacheServerImpl</code> that represents an
* existing dedicated cache server in a given distributed system.
*/
public CacheServerImpl(AdminDistributedSystemImpl system,
GemFireVM vm)
throws AdminException {
super(system, vm);
this.config = new CacheServerConfigImpl(vm);
}
////////////////////// Instance Methods //////////////////////
@Override
public SystemMemberType getType() {
return SystemMemberType.CACHE_VM;
}
public String getNewId() {
synchronized (CacheServerImpl.class) {
return "CacheVm" + (++newCacheServers);
}
}
public void start() throws AdminException {
if (!needToStart()) {
return;
}
this.config.validate();
this.controller.start(this);
this.config.setManagedEntity(this);
}
public void stop() {
if (!needToStop()) {
return;
}
this.controller.stop(this);
// NOTE: DistributedSystem nodeLeft will then set this.manager to null
this.config.setManagedEntity(null);
}
public boolean isRunning() {
DM dm = ((AdminDistributedSystemImpl)getDistributedSystem()).getDistributionManager();
if(dm == null) {
try {
return this.controller.isRunning(this);
}
catch (IllegalStateException e) {
return false;
}
}
return ((DistributionManager)dm).getDistributionManagerIdsIncludingAdmin().contains(getDistributedMember());
}
public CacheServerConfig getConfig() {
return this.config;
}
public CacheVmConfig getVmConfig() {
return this.config;
}
//////////////////////// Command execution ////////////////////////
public ManagedEntityConfig getEntityConfig() {
return this.getConfig();
}
public String getEntityType() {
// Fix bug 32564
return "Cache Vm";
}
public String getStartCommand() {
StringBuffer sb = new StringBuffer();
if (!isSqlFire) {
sb.append(this.controller.getProductExecutable(this, GFE_LAUNCHER_NAME));
sb.append(" start -dir=");
} else {
sb.append(this.controller.getProductExecutable(this, GFXD_LAUNCHER_NAME));
sb.append(" server start -dir=");
}
sb.append(this.getConfig().getWorkingDirectory());
String file = this.getConfig().getCacheXMLFile();
if (file != null && file.length() > 0) {
sb.append(" ");
sb.append(com.gemstone.gemfire.distributed.internal.DistributionConfig.CACHE_XML_FILE_NAME);
sb.append("=");
sb.append(file);
}
String classpath = this.getConfig().getClassPath();
if (classpath != null && classpath.length() > 0) {
sb.append(" -classpath=");
sb.append(classpath);
}
appendConfiguration(sb);
return sb.toString().trim();
}
public String getStopCommand() {
StringBuffer sb = new StringBuffer();
if (!isSqlFire) {
sb.append(this.controller.getProductExecutable(this, GFE_LAUNCHER_NAME));
sb.append(" stop -dir=");
} else {
sb.append(this.controller.getProductExecutable(this, GFXD_LAUNCHER_NAME));
sb.append(" server stop -dir=");
}
sb.append(this.getConfig().getWorkingDirectory());
return sb.toString().trim();
}
public String getIsRunningCommand() {
StringBuffer sb = new StringBuffer();
if (!isSqlFire) {
sb.append(this.controller.getProductExecutable(this, GFE_LAUNCHER_NAME));
sb.append(" status -dir=");
} else {
sb.append(this.controller.getProductExecutable(this, GFXD_LAUNCHER_NAME));
sb.append(" server status -dir=");
}
sb.append(this.getConfig().getWorkingDirectory());
return sb.toString().trim();
}
/**
* Find whether this server is primary for given client (durableClientId)
*
* @param durableClientId -
* durable-id of the client
* @return true if the server is primary for given client
*
* @since 5.6
*/
public boolean isPrimaryForDurableClient(String durableClientId)
{
RemoteApplicationVM vm = (RemoteApplicationVM)this.getGemFireVM();
boolean isPrimary = false;
if (vm != null) {
isPrimary = vm.isPrimaryForDurableClient(durableClientId);
}
return isPrimary;
}
}
| apache-2.0 |
peridotperiod/isis | tck/tck-viewer-restfulobjects/src/test/java/org/apache/isis/viewer/restfulobjects/tck/domainobject/oid/collection/Get_whenDoesntExistOid_then_404.java | 2570 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.viewer.restfulobjects.tck.domainobject.oid.collection;
import org.jboss.resteasy.client.core.executors.URLConnectionClientExecutor;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.apache.isis.viewer.restfulobjects.applib.RestfulHttpMethod;
import org.apache.isis.viewer.restfulobjects.applib.client.RestfulClient;
import org.apache.isis.viewer.restfulobjects.applib.client.RestfulRequest;
import org.apache.isis.viewer.restfulobjects.applib.client.RestfulResponse;
import org.apache.isis.viewer.restfulobjects.applib.version.VersionRepresentation;
import org.apache.isis.viewer.restfulobjects.tck.IsisWebServerRule;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
public class Get_whenDoesntExistOid_then_404 {
@Rule
public IsisWebServerRule webServerRule = new IsisWebServerRule();
private RestfulClient client;
@Before
public void setUp() throws Exception {
client = webServerRule.getClient(new URLConnectionClientExecutor());
}
@Test
public void returns404() throws Exception {
givenWhenThen("73", RestfulResponse.HttpStatusCode.OK);
givenWhenThen("nonExistentOid", RestfulResponse.HttpStatusCode.NOT_FOUND);
}
private void givenWhenThen(String oid, RestfulResponse.HttpStatusCode statusCode1) {
// given
RestfulRequest request = client.createRequest(RestfulHttpMethod.GET, "objects/BSRL/" +
oid +
"/collections/visibleAndEditableCollection");
// when
RestfulResponse<VersionRepresentation> restfulResponse = request.executeT();
assertThat(restfulResponse.getStatus(), is(statusCode1));
}
}
| apache-2.0 |
dain/presto | testing/trino-product-tests/src/main/java/io/trino/tests/product/cassandra/TestSelect.java | 19572 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.tests.product.cassandra;
import com.datastax.driver.core.utils.Bytes;
import io.airlift.units.Duration;
import io.trino.jdbc.Row;
import io.trino.tempto.ProductTest;
import io.trino.tempto.Requirement;
import io.trino.tempto.RequirementsProvider;
import io.trino.tempto.configuration.Configuration;
import io.trino.tempto.internal.query.CassandraQueryExecutor;
import io.trino.tempto.query.QueryResult;
import org.assertj.core.api.Assertions;
import org.testng.annotations.Test;
import java.sql.Date;
import java.sql.Timestamp;
import java.time.OffsetDateTime;
import java.time.ZoneOffset;
import java.util.function.Consumer;
import static io.trino.tempto.Requirements.compose;
import static io.trino.tempto.assertions.QueryAssert.Row.row;
import static io.trino.tempto.assertions.QueryAssert.assertThat;
import static io.trino.tempto.fulfillment.table.TableRequirements.immutableTable;
import static io.trino.tempto.query.QueryExecutor.query;
import static io.trino.tests.product.TestGroups.CASSANDRA;
import static io.trino.tests.product.TestGroups.PROFILE_SPECIFIC_TESTS;
import static io.trino.tests.product.TpchTableResults.PRESTO_NATION_RESULT;
import static io.trino.tests.product.cassandra.CassandraTpchTableDefinitions.CASSANDRA_NATION;
import static io.trino.tests.product.cassandra.CassandraTpchTableDefinitions.CASSANDRA_SUPPLIER;
import static io.trino.tests.product.cassandra.DataTypesTableDefinition.CASSANDRA_ALL_TYPES;
import static io.trino.tests.product.cassandra.TestConstants.CONNECTOR_NAME;
import static io.trino.tests.product.cassandra.TestConstants.KEY_SPACE;
import static io.trino.tests.product.utils.QueryAssertions.assertContainsEventually;
import static io.trino.tests.product.utils.QueryExecutors.onTrino;
import static java.lang.String.format;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.sql.JDBCType.BIGINT;
import static java.sql.JDBCType.BOOLEAN;
import static java.sql.JDBCType.DATE;
import static java.sql.JDBCType.DOUBLE;
import static java.sql.JDBCType.INTEGER;
import static java.sql.JDBCType.REAL;
import static java.sql.JDBCType.SMALLINT;
import static java.sql.JDBCType.TIMESTAMP_WITH_TIMEZONE;
import static java.sql.JDBCType.TINYINT;
import static java.sql.JDBCType.VARBINARY;
import static java.sql.JDBCType.VARCHAR;
import static java.util.concurrent.TimeUnit.MINUTES;
public class TestSelect
extends ProductTest
implements RequirementsProvider
{
private Configuration configuration;
@Override
public Requirement getRequirements(Configuration configuration)
{
this.configuration = configuration;
return compose(
immutableTable(CASSANDRA_NATION),
immutableTable(CASSANDRA_SUPPLIER),
immutableTable(CASSANDRA_ALL_TYPES));
}
@Test(groups = {CASSANDRA, PROFILE_SPECIFIC_TESTS})
public void testSelectNation()
{
String sql = format(
"SELECT n_nationkey, n_name, n_regionkey, n_comment FROM %s.%s.%s",
CONNECTOR_NAME,
KEY_SPACE,
CASSANDRA_NATION.getName());
QueryResult queryResult = onTrino()
.executeQuery(sql);
assertThat(queryResult).matches(PRESTO_NATION_RESULT);
}
@Test(groups = {CASSANDRA, PROFILE_SPECIFIC_TESTS})
public void testSelectWithEqualityFilterOnPartitioningKey()
{
String sql = format(
"SELECT n_nationkey FROM %s.%s.%s WHERE n_nationkey = 0",
CONNECTOR_NAME,
KEY_SPACE,
CASSANDRA_NATION.getName());
QueryResult queryResult = onTrino()
.executeQuery(sql);
assertThat(queryResult).containsOnly(row(0));
}
@Test(groups = {CASSANDRA, PROFILE_SPECIFIC_TESTS})
public void testSelectWithFilterOnPartitioningKey()
{
String sql = format(
"SELECT n_nationkey FROM %s.%s.%s WHERE n_nationkey > 23",
CONNECTOR_NAME,
KEY_SPACE,
CASSANDRA_NATION.getName());
QueryResult queryResult = onTrino()
.executeQuery(sql);
assertThat(queryResult).containsOnly(row(24));
}
@Test(groups = {CASSANDRA, PROFILE_SPECIFIC_TESTS})
public void testSelectWithEqualityFilterOnNonPartitioningKey()
{
String sql = format(
"SELECT n_name FROM %s.%s.%s WHERE n_name = 'UNITED STATES'",
CONNECTOR_NAME,
KEY_SPACE,
CASSANDRA_NATION.getName());
QueryResult queryResult = onTrino()
.executeQuery(sql);
assertThat(queryResult).containsOnly(row("UNITED STATES"));
}
@Test(groups = {CASSANDRA, PROFILE_SPECIFIC_TESTS})
public void testSelectWithNonEqualityFilterOnNonPartitioningKey()
{
String sql = format(
"SELECT n_name FROM %s.%s.%s WHERE n_name < 'B'",
CONNECTOR_NAME,
KEY_SPACE,
CASSANDRA_NATION.getName());
QueryResult queryResult = onTrino()
.executeQuery(sql);
assertThat(queryResult).containsOnly(row("ALGERIA"), row("ARGENTINA"));
}
@Test(groups = {CASSANDRA, PROFILE_SPECIFIC_TESTS})
public void testSelectWithMorePartitioningKeysThanLimit()
{
String sql = format(
"SELECT s_suppkey FROM %s.%s.%s WHERE s_suppkey = 10",
CONNECTOR_NAME,
KEY_SPACE,
CASSANDRA_SUPPLIER.getName());
QueryResult queryResult = onTrino()
.executeQuery(sql);
assertThat(queryResult).containsOnly(row(10));
}
@Test(groups = {CASSANDRA, PROFILE_SPECIFIC_TESTS})
public void testSelectWithMorePartitioningKeysThanLimitNonPK()
{
String sql = format(
"SELECT s_suppkey FROM %s.%s.%s WHERE s_name = 'Supplier#000000010'",
CONNECTOR_NAME,
KEY_SPACE,
CASSANDRA_SUPPLIER.getName());
QueryResult queryResult = onTrino()
.executeQuery(sql);
assertThat(queryResult).containsOnly(row(10));
}
@Test(groups = {CASSANDRA, PROFILE_SPECIFIC_TESTS})
public void testAllDataTypes()
{
// NOTE: DECIMAL is treated like DOUBLE
QueryResult query = query(format(
"SELECT a, b, bl, bo, d, do, dt, f, fr, i, integer, l, m, s, si, t, ti, ts, tu, u, v, vari FROM %s.%s.%s",
CONNECTOR_NAME, KEY_SPACE, CASSANDRA_ALL_TYPES.getName()));
assertThat(query)
.hasColumns(VARCHAR, BIGINT, VARBINARY, BOOLEAN, DOUBLE, DOUBLE, DATE, REAL, VARCHAR, VARCHAR,
INTEGER, VARCHAR, VARCHAR, VARCHAR, SMALLINT, VARCHAR, TINYINT, TIMESTAMP_WITH_TIMEZONE, VARCHAR, VARCHAR,
VARCHAR, VARCHAR)
.containsOnly(
row("\0",
Long.MIN_VALUE,
Bytes.fromHexString("0x00").array(),
false,
0f,
Double.MIN_VALUE,
Date.valueOf("1970-01-02"),
Float.MIN_VALUE,
"[0]",
"0.0.0.0",
Integer.MIN_VALUE,
"[0]",
"{\"\\u0000\":-2147483648,\"a\":0}",
"[0]",
Short.MIN_VALUE,
"\0",
Byte.MIN_VALUE,
Timestamp.from(OffsetDateTime.of(1970, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC).toInstant()),
"d2177dd0-eaa2-11de-a572-001b779c76e3",
"01234567-0123-0123-0123-0123456789ab",
"\0",
String.valueOf(Long.MIN_VALUE)),
row("the quick brown fox jumped over the lazy dog",
9223372036854775807L,
"01234".getBytes(UTF_8),
true,
Double.valueOf("99999999999999999999999999999999999999"),
Double.MAX_VALUE,
Date.valueOf("9999-12-31"),
Float.MAX_VALUE,
"[4,5,6,7]",
"255.255.255.255",
Integer.MAX_VALUE,
"[4,5,6]",
"{\"a\":1,\"b\":2}",
"[4,5,6]",
Short.MAX_VALUE,
"this is a text value",
Byte.MAX_VALUE,
Timestamp.from(OffsetDateTime.of(9999, 12, 31, 23, 59, 59, 0, ZoneOffset.UTC).toInstant()),
"d2177dd0-eaa2-11de-a572-001b779c76e3",
"01234567-0123-0123-0123-0123456789ab",
"abc",
String.valueOf(Long.MAX_VALUE)),
row("def", null, null, null, null, null, null, null, null, null, null,
null, null, null, null, null, null, null, null, null, null, null));
}
@Test(groups = {CASSANDRA, PROFILE_SPECIFIC_TESTS})
public void testNationJoinNation()
{
String tableName = format("%s.%s.%s", CONNECTOR_NAME, KEY_SPACE, CASSANDRA_NATION.getName());
String sql = format(
"SELECT n1.n_name, n2.n_regionkey FROM %s n1 JOIN " +
"%s n2 ON n1.n_nationkey = n2.n_regionkey " +
"WHERE n1.n_nationkey=3",
tableName,
tableName);
QueryResult queryResult = onTrino()
.executeQuery(sql);
assertThat(queryResult).containsOnly(
row("CANADA", 3),
row("CANADA", 3),
row("CANADA", 3),
row("CANADA", 3),
row("CANADA", 3));
}
@Test(groups = {CASSANDRA, PROFILE_SPECIFIC_TESTS})
public void testNationJoinRegion()
{
String sql = format(
"SELECT c.n_name, t.name FROM %s.%s.%s c JOIN " +
"tpch.tiny.region t ON c.n_regionkey = t.regionkey " +
"WHERE c.n_nationkey=3",
CONNECTOR_NAME,
KEY_SPACE,
CASSANDRA_NATION.getName());
QueryResult queryResult = onTrino()
.executeQuery(sql);
assertThat(queryResult).containsOnly(row("CANADA", "AMERICA"));
}
@Test(groups = {CASSANDRA, PROFILE_SPECIFIC_TESTS})
public void testSelectAllTypePartitioningMaterializedView()
{
String materializedViewName = format("%s_partitioned_mv", CASSANDRA_ALL_TYPES.getName());
onCassandra(format("DROP MATERIALIZED VIEW IF EXISTS %s.%s", KEY_SPACE, materializedViewName));
onCassandra(format("CREATE MATERIALIZED VIEW %s.%s AS SELECT * FROM %s.%s WHERE b IS NOT NULL PRIMARY KEY (a, b)",
KEY_SPACE,
materializedViewName,
KEY_SPACE,
CASSANDRA_ALL_TYPES.getName()));
assertContainsEventually(() -> query(format("SHOW TABLES FROM %s.%s", CONNECTOR_NAME, KEY_SPACE)),
query(format("SELECT '%s'", materializedViewName)),
new Duration(1, MINUTES));
// Materialized view may not return all results during the creation
assertContainsEventually(() -> query(format("SELECT status_replicated FROM %s.system.built_views WHERE view_name = '%s'", CONNECTOR_NAME, materializedViewName)),
query("SELECT true"),
new Duration(1, MINUTES));
QueryResult query = query(format(
"SELECT a, b, bl, bo, d, do, dt, f, fr, i, integer, l, m, s, si, t, ti, ts, tu, u, v, vari FROM %s.%s.%s WHERE a = '\0'",
CONNECTOR_NAME, KEY_SPACE, materializedViewName));
assertThat(query)
.hasColumns(VARCHAR, BIGINT, VARBINARY, BOOLEAN, DOUBLE, DOUBLE, DATE, REAL, VARCHAR, VARCHAR,
INTEGER, VARCHAR, VARCHAR, VARCHAR, SMALLINT, VARCHAR, TINYINT, TIMESTAMP_WITH_TIMEZONE, VARCHAR, VARCHAR,
VARCHAR, VARCHAR)
.containsOnly(
row("\0",
Long.MIN_VALUE,
Bytes.fromHexString("0x00").array(),
false,
0f,
Double.MIN_VALUE,
Date.valueOf("1970-01-02"),
Float.MIN_VALUE,
"[0]",
"0.0.0.0",
Integer.MIN_VALUE,
"[0]",
"{\"\\u0000\":-2147483648,\"a\":0}",
"[0]",
Short.MIN_VALUE,
"\0",
Byte.MIN_VALUE,
Timestamp.from(OffsetDateTime.of(1970, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC).toInstant()),
"d2177dd0-eaa2-11de-a572-001b779c76e3",
"01234567-0123-0123-0123-0123456789ab",
"\0",
String.valueOf(Long.MIN_VALUE)));
onCassandra(format("DROP MATERIALIZED VIEW IF EXISTS %s.%s", KEY_SPACE, materializedViewName));
}
@Test(groups = {CASSANDRA, PROFILE_SPECIFIC_TESTS})
public void testSelectClusteringMaterializedView()
{
String mvName = "clustering_mv";
onCassandra(format("DROP MATERIALIZED VIEW IF EXISTS %s.%s", KEY_SPACE, mvName));
onCassandra(format("CREATE MATERIALIZED VIEW %s.%s AS " +
"SELECT * FROM %s.%s " +
"WHERE s_nationkey IS NOT NULL " +
"PRIMARY KEY (s_nationkey, s_suppkey) " +
"WITH CLUSTERING ORDER BY (s_nationkey DESC)",
KEY_SPACE,
mvName,
KEY_SPACE,
CASSANDRA_SUPPLIER.getName()));
assertContainsEventually(() -> query(format("SHOW TABLES FROM %s.%s", CONNECTOR_NAME, KEY_SPACE)),
query(format("SELECT '%s'", mvName)),
new Duration(1, MINUTES));
// Materialized view may not return all results during the creation
assertContainsEventually(() -> query(format("SELECT status_replicated FROM %s.system.built_views WHERE view_name = '%s'", CONNECTOR_NAME, mvName)),
query("SELECT true"),
new Duration(1, MINUTES));
QueryResult aggregateQueryResult = onTrino()
.executeQuery(format(
"SELECT MAX(s_nationkey), SUM(s_suppkey), AVG(s_acctbal) " +
"FROM %s.%s.%s WHERE s_suppkey BETWEEN 1 AND 10 ", CONNECTOR_NAME, KEY_SPACE, mvName));
assertThat(aggregateQueryResult).containsOnly(
row(24, 55, 4334.653));
QueryResult orderedResult = onTrino()
.executeQuery(format(
"SELECT s_nationkey, s_suppkey, s_acctbal " +
"FROM %s.%s.%s WHERE s_nationkey = 1 LIMIT 1", CONNECTOR_NAME, KEY_SPACE, mvName));
assertThat(orderedResult).containsOnly(
row(1, 3, 4192.4));
onCassandra(format("DROP MATERIALIZED VIEW IF EXISTS %s.%s", KEY_SPACE, mvName));
}
@Test(groups = {CASSANDRA, PROFILE_SPECIFIC_TESTS})
public void testProtocolVersion()
{
QueryResult queryResult = onTrino()
.executeQuery(format("SELECT native_protocol_version FROM %s.system.local", CONNECTOR_NAME));
assertThat(queryResult).containsOnly(row("4"));
}
@Test(groups = {CASSANDRA, PROFILE_SPECIFIC_TESTS})
public void testSelectTupleType()
{
String tableName = "select_tuple_table";
onCassandra(format("DROP TABLE IF EXISTS %s.%s", KEY_SPACE, tableName));
onCassandra(format("CREATE TABLE %s.%s (key int, value frozen<tuple<int, text, float>>, PRIMARY KEY (key))",
KEY_SPACE, tableName));
onCassandra(format("INSERT INTO %s.%s (key, value) VALUES(1, (1, 'text-1', 1.11))", KEY_SPACE, tableName));
QueryResult queryResult = onTrino().executeQuery(
format("SELECT * FROM %s.%s.%s", CONNECTOR_NAME, KEY_SPACE, tableName));
assertThat(queryResult).hasRowsCount(1);
Assertions.assertThat(queryResult.row(0).get(0)).isEqualTo(1);
Assertions.assertThat(queryResult.row(0).get(1)).isEqualTo(Row.builder()
.addUnnamedField(1)
.addUnnamedField("text-1")
.addUnnamedField(1.11f)
.build());
onCassandra(format("DROP TABLE IF EXISTS %s.%s", KEY_SPACE, tableName));
}
@Test(groups = {CASSANDRA, PROFILE_SPECIFIC_TESTS})
public void testSelectTupleTypeInPrimaryKey()
{
String tableName = "select_tuple_in_primary_key_table";
onCassandra(format("DROP TABLE IF EXISTS %s.%s", KEY_SPACE, tableName));
onCassandra(format("CREATE TABLE %s.%s (intkey int, tuplekey frozen<tuple<int, text, float>>, PRIMARY KEY (intkey, tuplekey))",
KEY_SPACE, tableName));
onCassandra(format("INSERT INTO %s.%s (intkey, tuplekey) VALUES(1, (1, 'text-1', 1.11))", KEY_SPACE, tableName));
Consumer<QueryResult> assertion = queryResult -> {
assertThat(queryResult).hasRowsCount(1);
Assertions.assertThat(queryResult.row(0).get(0)).isEqualTo(1);
Assertions.assertThat(queryResult.row(0).get(1)).isEqualTo(Row.builder()
.addUnnamedField(1)
.addUnnamedField("text-1")
.addUnnamedField(1.11f)
.build());
};
assertion.accept(onTrino().executeQuery(format("SELECT * FROM %s.%s.%s", CONNECTOR_NAME, KEY_SPACE, tableName)));
assertion.accept(onTrino().executeQuery(format("SELECT * FROM %s.%s.%s WHERE intkey = 1 and tuplekey = row(1, 'text-1', 1.11)", CONNECTOR_NAME, KEY_SPACE, tableName)));
onCassandra(format("DROP TABLE IF EXISTS %s.%s", KEY_SPACE, tableName));
}
private void onCassandra(String query)
{
try (CassandraQueryExecutor queryExecutor = new CassandraQueryExecutor(configuration)) {
queryExecutor.executeQuery(query);
}
}
}
| apache-2.0 |
ryancoleman/autodock-vina | boost_1_54_0/libs/math/test/test_ibeta_inv.hpp | 6612 | // Copyright John Maddock 2006.
// Copyright Paul A. Bristow 2007, 2009
// Use, modification and distribution are subject to the
// Boost Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
#include <boost/math/concepts/real_concept.hpp>
#define BOOST_TEST_MAIN
#include <boost/test/unit_test.hpp>
#include <boost/test/floating_point_comparison.hpp>
#include <boost/math/special_functions/math_fwd.hpp>
#include <boost/math/tools/stats.hpp>
#include <boost/math/tools/test.hpp>
#include <boost/math/constants/constants.hpp>
#include <boost/type_traits/is_floating_point.hpp>
#include <boost/array.hpp>
#include "functor.hpp"
#include "test_beta_hooks.hpp"
#include "handle_test_result.hpp"
#include "table_type.hpp"
#ifndef SC_
#define SC_(x) static_cast<typename table_type<T>::type>(BOOST_JOIN(x, L))
#endif
template <class Real, class T>
void test_inverses(const T& data)
{
using namespace std;
typedef typename T::value_type row_type;
typedef Real value_type;
value_type precision = static_cast<value_type>(ldexp(1.0, 1-boost::math::policies::digits<value_type, boost::math::policies::policy<> >()/2)) * 100;
if(boost::math::policies::digits<value_type, boost::math::policies::policy<> >() < 50)
precision = 1; // 1% or two decimal digits, all we can hope for when the input is truncated
for(unsigned i = 0; i < data.size(); ++i)
{
//
// These inverse tests are thrown off if the output of the
// incomplete beta is too close to 1: basically there is insuffient
// information left in the value we're using as input to the inverse
// to be able to get back to the original value.
//
if(Real(data[i][5]) == 0)
BOOST_CHECK_EQUAL(boost::math::ibeta_inv(Real(data[i][0]), Real(data[i][1]), Real(data[i][5])), value_type(0));
else if((1 - Real(data[i][5]) > 0.001)
&& (fabs(Real(data[i][5])) > 2 * boost::math::tools::min_value<value_type>())
&& (fabs(Real(data[i][5])) > 2 * boost::math::tools::min_value<double>()))
{
value_type inv = boost::math::ibeta_inv(Real(data[i][0]), Real(data[i][1]), Real(data[i][5]));
BOOST_CHECK_CLOSE(Real(data[i][2]), inv, precision);
}
else if(1 == Real(data[i][5]))
BOOST_CHECK_EQUAL(boost::math::ibeta_inv(Real(data[i][0]), Real(data[i][1]), Real(data[i][5])), value_type(1));
if(Real(data[i][6]) == 0)
BOOST_CHECK_EQUAL(boost::math::ibetac_inv(Real(data[i][0]), Real(data[i][1]), Real(data[i][6])), value_type(1));
else if((1 - Real(data[i][6]) > 0.001)
&& (fabs(Real(data[i][6])) > 2 * boost::math::tools::min_value<value_type>())
&& (fabs(Real(data[i][6])) > 2 * boost::math::tools::min_value<double>()))
{
value_type inv = boost::math::ibetac_inv(Real(data[i][0]), Real(data[i][1]), Real(data[i][6]));
BOOST_CHECK_CLOSE(Real(data[i][2]), inv, precision);
}
else if(Real(data[i][6]) == 1)
BOOST_CHECK_EQUAL(boost::math::ibetac_inv(Real(data[i][0]), Real(data[i][1]), Real(data[i][6])), value_type(0));
}
}
template <class Real, class T>
void test_inverses2(const T& data, const char* type_name, const char* test_name)
{
typedef typename T::value_type row_type;
typedef Real value_type;
typedef value_type (*pg)(value_type, value_type, value_type);
#if defined(BOOST_MATH_NO_DEDUCED_FUNCTION_POINTERS)
pg funcp = boost::math::ibeta_inv<value_type, value_type, value_type>;
#else
pg funcp = boost::math::ibeta_inv;
#endif
boost::math::tools::test_result<value_type> result;
std::cout << "Testing " << test_name << " with type " << type_name
<< "\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n";
//
// test ibeta_inv(T, T, T) against data:
//
result = boost::math::tools::test_hetero<Real>(
data,
bind_func<Real>(funcp, 0, 1, 2),
extract_result<Real>(3));
handle_test_result(result, data[result.worst()], result.worst(), type_name, "boost::math::ibeta_inv", test_name);
//
// test ibetac_inv(T, T, T) against data:
//
#if defined(BOOST_MATH_NO_DEDUCED_FUNCTION_POINTERS)
funcp = boost::math::ibetac_inv<value_type, value_type, value_type>;
#else
funcp = boost::math::ibetac_inv;
#endif
result = boost::math::tools::test_hetero<Real>(
data,
bind_func<Real>(funcp, 0, 1, 2),
extract_result<Real>(4));
handle_test_result(result, data[result.worst()], result.worst(), type_name, "boost::math::ibetac_inv", test_name);
}
template <class T>
void test_beta(T, const char* name)
{
(void)name;
//
// The actual test data is rather verbose, so it's in a separate file
//
// The contents are as follows, each row of data contains
// five items, input value a, input value b, integration limits x, beta(a, b, x) and ibeta(a, b, x):
//
#if !defined(TEST_DATA) || (TEST_DATA == 1)
# include "ibeta_small_data.ipp"
test_inverses<T>(ibeta_small_data);
#endif
#if !defined(TEST_DATA) || (TEST_DATA == 2)
# include "ibeta_data.ipp"
test_inverses<T>(ibeta_data);
#endif
#if !defined(TEST_DATA) || (TEST_DATA == 3)
# include "ibeta_large_data.ipp"
test_inverses<T>(ibeta_large_data);
#endif
#if !defined(TEST_DATA) || (TEST_DATA == 4)
# include "ibeta_inv_data.ipp"
test_inverses2<T>(ibeta_inv_data, name, "Inverse incomplete beta");
#endif
}
template <class T>
void test_spots(T)
{
//
// basic sanity checks, tolerance is 100 epsilon expressed as a percentage:
//
T tolerance = boost::math::tools::epsilon<T>() * 10000;
BOOST_CHECK_CLOSE(
::boost::math::ibeta_inv(
static_cast<T>(1),
static_cast<T>(2),
static_cast<T>(0.5)),
static_cast<T>(0.29289321881345247559915563789515096071516406231153L), tolerance);
BOOST_CHECK_CLOSE(
::boost::math::ibeta_inv(
static_cast<T>(3),
static_cast<T>(0.5),
static_cast<T>(0.5)),
static_cast<T>(0.92096723292382700385142816696980724853063433975470L), tolerance);
BOOST_CHECK_CLOSE(
::boost::math::ibeta_inv(
static_cast<T>(20.125),
static_cast<T>(0.5),
static_cast<T>(0.5)),
static_cast<T>(0.98862133312917003480022776106012775747685870929920L), tolerance);
BOOST_CHECK_CLOSE(
::boost::math::ibeta_inv(
static_cast<T>(40),
static_cast<T>(80),
static_cast<T>(0.5)),
static_cast<T>(0.33240456430025026300937492802591128972548660643778L), tolerance);
}
| apache-2.0 |
surdy/dcos | packages/adminrouter/extra/src/test-harness/tests/test_master.py | 13807 | # Copyright (C) Mesosphere, Inc. See LICENSE file for details.
import copy
import logging
import os
import time
import pytest
import requests
from generic_test_code.common import (
generic_correct_upstream_dest_test,
generic_correct_upstream_request_test,
generic_upstream_headers_verify_test,
generic_verify_response_test,
overridden_file_content,
verify_header,
)
from util import GuardedSubprocess, LineBufferFilter, SearchCriteria
log = logging.getLogger(__name__)
class TestServiceEndpoint:
# Majority of /service endpoint tests are done with generic tests framework
def test_if_accept_encoding_header_is_removed_from_upstream_request(
self, master_ar_process_perclass, mocker, valid_user_header):
headers = copy.deepcopy(valid_user_header)
headers['Accept-Encoding'] = 'gzip'
generic_upstream_headers_verify_test(master_ar_process_perclass,
headers,
'/service/scheduler-alwaysthere/foo/bar/',
assert_headers_absent=["Accept-Encoding"],
)
class TestAgentEndpoint:
# Tests for /agent endpoint routing are done in test_cache.py
def test_if_accept_encoding_header_is_removed_from_upstream_request(
self, master_ar_process_perclass, mocker, valid_user_header):
headers = copy.deepcopy(valid_user_header)
headers['Accept-Encoding'] = 'gzip'
generic_upstream_headers_verify_test(master_ar_process_perclass,
headers,
'/agent/de1baf83-c36c-4d23-9cb0-f89f596cd6ab-S1/',
assert_headers_absent=["Accept-Encoding"],
)
class TestSystemAgentEndpoint:
# Tests for /agent endpoint routing are done in test_cache.py
def test_if_accept_encoding_header_is_removed_from_upstream_request(
self, master_ar_process_perclass, mocker, valid_user_header):
headers = copy.deepcopy(valid_user_header)
headers['Accept-Encoding'] = 'gzip'
generic_upstream_headers_verify_test(
master_ar_process_perclass,
headers,
'/system/v1/agent/de1baf83-c36c-4d23-9cb0-f89f596cd6ab-S0/logs',
assert_headers_absent=["Accept-Encoding"],
)
class TestHistoryServiceRouting:
def test_if_invalid_cache_case_is_handled(
self, nginx_class, valid_user_header, dns_server_mock):
ar = nginx_class()
url = ar.make_url_from_path('/dcos-history-service/foo/bar')
with GuardedSubprocess(ar):
# Unfortunatelly there are upstreams that use `leader.mesos` and
# removing this entry too early will result in Nginx failing to start.
# So we need to do it right after nginx starts, but before first
# cache update.
time.sleep(1)
dns_server_mock.remove_dns_entry('leader.mesos.')
resp = requests.get(url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 503
assert 'cache is invalid' in resp.text
def test_if_leader_is_unknown_state_is_handled(
self, nginx_class, valid_user_header):
ar = nginx_class(host_ip=None)
url = ar.make_url_from_path('/dcos-history-service/foo/bar')
with GuardedSubprocess(ar):
resp = requests.get(url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 503
assert 'mesos leader is unknown' in resp.text
def test_if_leader_is_local_state_is_handled(
self, nginx_class, valid_user_header):
ar = nginx_class()
path_sent = '/dcos-history-service/foo/bar?a1=GET+param&a2=foobarism'
path_expected = '/foo/bar?a1=GET+param&a2=foobarism'
with GuardedSubprocess(ar):
generic_correct_upstream_dest_test(
ar,
valid_user_header,
path_sent,
"http://127.0.0.1:15055")
generic_correct_upstream_request_test(
ar,
valid_user_header,
path_sent,
path_expected)
generic_upstream_headers_verify_test(
ar,
valid_user_header,
path_sent)
def test_if_leader_is_nonlocal_state_is_handled(
self, nginx_class, valid_user_header, dns_server_mock):
ar = nginx_class()
path_sent = '/dcos-history-service/foo/bar?a1=GET+param&a2=foobarism'
path_expected = '/dcos-history-service/foo/bar?a1=GET+param&a2=foobarism'
dns_server_mock.set_dns_entry('leader.mesos.', ip='127.0.0.3')
with GuardedSubprocess(ar):
generic_correct_upstream_dest_test(
ar,
valid_user_header,
path_sent,
"http://127.0.0.3:80")
generic_correct_upstream_request_test(
ar,
valid_user_header,
path_sent,
path_expected)
generic_upstream_headers_verify_test(
ar,
valid_user_header,
path_sent,
assert_headers={"DCOS-Forwarded": "true"})
def test_if_proxy_loop_is_handled(
self, nginx_class, valid_user_header, dns_server_mock):
ar = nginx_class()
url = ar.make_url_from_path('/dcos-history-service/foo/bar')
dns_server_mock.set_dns_entry('leader.mesos.', ip='127.0.0.3')
h = valid_user_header
h.update({"DCOS-Forwarded": "true"})
with GuardedSubprocess(ar):
resp = requests.get(url,
allow_redirects=False,
headers=h)
assert resp.status_code == 503
assert 'mesos leader is unknown' in resp.text
class TestMetadata:
@pytest.mark.parametrize("public_ip", ['1.2.3.4', "10.20.20.30"])
def test_if_public_ip_detection_works(
self, master_ar_process_perclass, valid_user_header, public_ip):
url = master_ar_process_perclass.make_url_from_path('/metadata')
with overridden_file_content(
'/usr/local/detect_ip_public_data.txt',
"return ip {}".format(public_ip)):
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
resp_data = resp.json()
assert resp_data['PUBLIC_IPV4'] == public_ip
def test_if_clusterid_is_returned(
self, master_ar_process_perclass, valid_user_header):
url = master_ar_process_perclass.make_url_from_path('/metadata')
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
resp_data = resp.json()
assert resp_data['CLUSTER_ID'] == 'fdb1d7c0-06cf-4d65-bb9b-a8920bb854ef'
with overridden_file_content(
'/var/lib/dcos/cluster-id',
"fd21689b-4fe2-4779-8c30-9125149eef11"):
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
resp_data = resp.json()
assert resp_data['CLUSTER_ID'] == "fd21689b-4fe2-4779-8c30-9125149eef11"
def test_if_missing_clusterid_file_is_handled(
self, master_ar_process_perclass, valid_user_header):
url = master_ar_process_perclass.make_url_from_path('/metadata')
with overridden_file_content('/var/lib/dcos/cluster-id'):
os.unlink('/var/lib/dcos/cluster-id')
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
resp_data = resp.json()
assert 'CLUSTER_ID' not in resp_data
def test_if_public_ip_detect_script_failue_is_handled(
self, master_ar_process_perclass, valid_user_header):
url = master_ar_process_perclass.make_url_from_path('/metadata')
filter_regexp = {
'Traceback \(most recent call last\):': SearchCriteria(1, True),
("FileNotFoundError: \[Errno 2\] No such file or directory:"
" '/usr/local/detect_ip_public_data.txt'"): SearchCriteria(1, True),
}
lbf = LineBufferFilter(filter_regexp,
line_buffer=master_ar_process_perclass.stderr_line_buffer)
with lbf, overridden_file_content('/usr/local/detect_ip_public_data.txt'):
os.unlink('/usr/local/detect_ip_public_data.txt')
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
assert lbf.extra_matches == {}
resp_data = resp.json()
assert resp_data['PUBLIC_IPV4'] == "127.0.0.1"
@pytest.mark.xfail(reason="Needs some refactoring, tracked in DCOS_OSS-1007")
def test_if_public_ip_detect_script_execution_is_timed_out(
self, master_ar_process_perclass, valid_user_header):
url = master_ar_process_perclass.make_url_from_path('/metadata')
ts_start = time.time()
with overridden_file_content('/usr/local/detect_ip_public_data.txt',
"timeout 10"):
requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
ts_total = time.time() - ts_start
assert ts_total < 10
# TODO (prozlach): tune it a bit
# assert resp.status_code == 200
# resp_data = resp.json()
# assert resp_data['PUBLIC_IPV4'] == "127.0.0.1"
@pytest.mark.xfail(reason="Needs some refactoring, tracked in DCOS_OSS-1007")
def test_if_public_ip_detect_script_nonzero_exit_status_is_handled(
self, master_ar_process_perclass, valid_user_header):
url = master_ar_process_perclass.make_url_from_path('/metadata')
with overridden_file_content(
'/usr/local/detect_ip_public_data.txt',
"break with 1"):
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
resp_data = resp.json()
assert resp_data['PUBLIC_IPV4'] == "127.0.0.1"
class TestUiRoot:
@pytest.mark.parametrize("uniq_content", ["(。◕‿‿◕。)", "plain text 1234"])
@pytest.mark.parametrize("path", ["plain-ui-testfile.html",
"nest1/nested-ui-testfile.html"])
def test_if_ui_files_are_handled(
self,
master_ar_process_perclass,
valid_user_header,
uniq_content,
path):
url = master_ar_process_perclass.make_url_from_path('/{}'.format(path))
with overridden_file_content(
'/opt/mesosphere/active/dcos-ui/usr/{}'.format(path),
uniq_content):
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header)
assert resp.status_code == 200
resp.encoding = 'utf-8'
assert resp.text == uniq_content
verify_header(resp.headers.items(), 'X-Frame-Options', 'DENY')
class TestMisc:
@pytest.mark.parametrize("content", ["{'data': '1234'}", "{'data': 'abcd'}"])
def test_if_buildinfo_is_served(
self, master_ar_process_perclass, valid_user_header, content):
url = master_ar_process_perclass.make_url_from_path(
'/pkgpanda/active.buildinfo.full.json')
with overridden_file_content(
'/opt/mesosphere/active.buildinfo.full.json',
content):
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header
)
assert resp.status_code == 200
assert resp.text == content
@pytest.mark.parametrize("content", ["{'data': '1234'}", "{'data': 'abcd'}"])
def test_if_dcos_metadata_is_served(
self, master_ar_process_perclass, valid_user_header, content):
url = master_ar_process_perclass.make_url_from_path(
'/dcos-metadata/dcos-version.json')
with overridden_file_content(
'/opt/mesosphere/active/dcos-metadata/etc/dcos-version.json',
content):
resp = requests.get(
url,
allow_redirects=False,
headers=valid_user_header
)
assert resp.status_code == 200
assert resp.text == content
def test_if_xaccel_header_is_passed_to_client_by_ar(
self,
master_ar_process_perclass,
valid_user_header,
mocker):
accel_buff_header = {"X-Accel-Buffering": "TEST"}
mocker.send_command(
endpoint_id='http:///run/dcos/dcos-log.sock',
func_name='set_response_headers',
aux_data=accel_buff_header,
)
generic_verify_response_test(
master_ar_process_perclass,
valid_user_header,
'/system/v1/logs/foo/bar',
assert_headers=accel_buff_header)
| apache-2.0 |
sbagdadi-gpsw/custom-prometheus-client | spec/spec_helper.rb | 203 | # encoding: UTF-8
require 'simplecov'
require 'coveralls'
SimpleCov.formatter =
if ENV['CI']
Coveralls::SimpleCov::Formatter
else
SimpleCov::Formatter::HTMLFormatter
end
SimpleCov.start
| apache-2.0 |
youzan/zan | php-test/apitest/swoole_http_server/simple_https_server.php | 704 | <?php
require_once __DIR__ . "/http_server.php";
/*
class swoole_http_server extends swoole_server
{
public function on($name, $cb) {} // 与 tcp server 的on接受的eventname 不同
}
class swoole_http_response
{
public function cookie() {}
public function rawcookie() {}
public function status() {}
public function gzip() {}
public function header() {}
public function write() {}
public function end() {}
public function sendfile() {}
}
class swoole_http_request
{
public function rawcontent() {}
}
*/
$host = isset($argv[1]) ? $argv[1] : HTTP_SERVER_HOST;
$port = isset($argv[2]) ? $argv[2] : HTTP_SERVER_PORT;
(new HttpServer($host, $port, true))->start(); | apache-2.0 |
idea4bsd/idea4bsd | RegExpSupport/gen/org/intellij/lang/regexp/_RegExLexer.java | 43798 | /* The following code was generated by JFlex 1.7.0-SNAPSHOT tweaked for IntelliJ platform */
/* It's an automatically generated code. Do not modify it. */
package org.intellij.lang.regexp;
import com.intellij.lexer.FlexLexer;
import com.intellij.psi.StringEscapesTokenTypes;
import com.intellij.psi.tree.IElementType;
import java.util.ArrayList;
import java.util.EnumSet;
@SuppressWarnings("ALL")
/**
* This class is a scanner generated by
* <a href="http://www.jflex.de/">JFlex</a> 1.7.0-SNAPSHOT
* from the specification file <tt>regexp-lexer.flex</tt>
*/
class _RegExLexer implements FlexLexer {
/** This character denotes the end of file */
public static final int YYEOF = -1;
/** initial size of the lookahead buffer */
private static final int ZZ_BUFFERSIZE = 16384;
/** lexical states */
public static final int YYINITIAL = 0;
public static final int QUOTED = 2;
public static final int EMBRACED = 4;
public static final int CLASS1 = 6;
public static final int NEGATE_CLASS1 = 8;
public static final int CLASS2 = 10;
public static final int PROP = 12;
public static final int NAMED = 14;
public static final int OPTIONS = 16;
public static final int COMMENT = 18;
public static final int NAMED_GROUP = 20;
public static final int QUOTED_NAMED_GROUP = 22;
public static final int PY_NAMED_GROUP_REF = 24;
public static final int PY_COND_REF = 26;
public static final int BRACKET_EXPRESSION = 28;
/**
* ZZ_LEXSTATE[l] is the state in the DFA for the lexical state l
* ZZ_LEXSTATE[l+1] is the state in the DFA for the lexical state l
* at the beginning of a line
* l is of the form l = 2*k, k a non negative integer
*/
private static final int ZZ_LEXSTATE[] = {
0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7,
8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13, 14, 14
};
/**
* Translates characters to character classes
* Chosen bits are [11, 6, 4]
* Total runtime size is 13792 bytes
*/
public static int ZZ_CMAP(int ch) {
return ZZ_CMAP_A[(ZZ_CMAP_Y[(ZZ_CMAP_Z[ch>>10]<<6)|((ch>>4)&0x3f)]<<4)|(ch&0xf)];
}
/* The ZZ_CMAP_Z table has 1088 entries */
static final char ZZ_CMAP_Z[] = zzUnpackCMap(
"\1\0\1\1\1\2\1\3\1\4\1\5\1\6\1\7\1\10\2\11\1\12\1\13\6\14\1\15\23\14\1\16"+
"\1\14\1\17\1\20\12\14\1\21\10\11\1\22\1\23\1\24\1\25\1\26\1\27\1\30\1\31\1"+
"\32\1\11\1\33\1\34\2\11\1\14\1\35\3\11\1\36\10\11\1\37\1\40\20\11\1\41\2\11"+
"\1\42\5\11\1\43\4\11\1\44\1\45\4\11\51\14\1\46\3\14\1\47\1\50\4\14\1\51\12"+
"\11\1\52\u0381\11");
/* The ZZ_CMAP_Y table has 2752 entries */
static final char ZZ_CMAP_Y[] = zzUnpackCMap(
"\1\0\1\1\1\2\1\3\1\4\1\5\1\6\1\7\1\10\1\1\1\11\1\12\1\13\1\14\1\13\1\14\34"+
"\13\1\15\1\16\1\17\10\1\1\20\1\21\1\13\1\22\4\13\1\23\10\13\1\24\12\13\1\25"+
"\1\13\1\26\1\25\1\13\1\27\4\1\1\13\1\30\1\31\2\1\2\13\1\30\1\1\1\32\1\25\5"+
"\13\1\33\1\34\1\35\1\1\1\36\1\13\1\1\1\37\5\13\1\40\1\41\1\42\1\13\1\30\1"+
"\43\1\13\1\44\1\45\1\1\1\13\1\46\4\1\1\13\1\47\4\1\1\50\2\13\1\51\1\1\1\52"+
"\1\53\1\25\1\54\1\55\1\56\1\57\1\60\1\61\1\53\1\16\1\62\1\55\1\56\1\63\1\1"+
"\1\64\1\65\1\66\1\67\1\22\1\56\1\70\1\1\1\71\1\53\1\72\1\73\1\55\1\56\1\70"+
"\1\1\1\61\1\53\1\41\1\74\1\75\1\76\1\77\1\1\1\71\1\65\1\1\1\100\1\36\1\56"+
"\1\51\1\1\1\101\1\53\1\1\1\100\1\36\1\56\1\102\1\1\1\60\1\53\1\103\1\100\1"+
"\36\1\13\1\104\1\60\1\105\1\53\1\106\1\107\1\110\1\13\1\111\1\112\1\1\1\65"+
"\1\1\1\25\2\13\1\113\1\112\1\114\2\1\1\115\1\116\1\117\1\120\1\121\1\122\2"+
"\1\1\71\1\1\1\114\1\1\1\123\1\13\1\124\1\1\1\125\7\1\2\13\1\30\1\105\1\114"+
"\1\126\1\127\1\130\1\131\1\114\2\13\1\132\2\13\1\133\24\13\1\134\1\135\2\13"+
"\1\134\2\13\1\136\1\137\1\14\3\13\1\137\3\13\1\30\2\1\1\13\1\1\5\13\1\140"+
"\1\25\45\13\1\141\1\13\1\25\1\30\4\13\1\30\1\142\1\143\1\16\1\13\1\16\1\13"+
"\1\16\1\143\1\71\3\13\1\144\1\1\1\145\1\114\2\1\1\114\5\13\1\27\2\13\1\146"+
"\4\13\1\40\1\13\1\147\2\1\1\65\1\13\1\150\1\47\2\13\1\151\1\13\1\77\1\114"+
"\2\1\1\13\1\112\3\13\1\47\2\1\2\114\1\152\5\1\1\107\2\13\1\144\1\153\1\114"+
"\2\1\1\154\1\13\1\155\1\42\2\13\1\40\1\1\2\13\1\144\1\1\1\156\1\42\1\13\1"+
"\150\6\1\1\157\1\160\14\13\4\1\21\13\1\140\2\13\1\140\1\161\1\13\1\150\3\13"+
"\1\162\1\163\1\164\1\124\1\163\2\1\1\165\4\1\1\166\1\1\1\124\6\1\1\167\1\170"+
"\1\171\1\172\1\173\3\1\1\174\147\1\2\13\1\147\2\13\1\147\10\13\1\175\1\176"+
"\2\13\1\132\3\13\1\177\1\1\1\13\1\112\4\200\4\1\1\105\35\1\1\201\2\1\1\202"+
"\1\25\4\13\1\203\1\25\4\13\1\133\1\107\1\13\1\150\1\25\4\13\1\147\1\1\1\13"+
"\1\30\3\1\1\13\40\1\133\13\1\40\4\1\135\13\1\40\2\1\10\13\1\124\4\1\2\13\1"+
"\150\20\13\1\124\1\13\1\204\1\1\2\13\1\147\1\105\1\13\1\150\4\13\1\40\2\1"+
"\1\205\1\206\5\13\1\207\1\13\1\150\1\27\3\1\1\205\1\210\1\13\1\31\1\1\3\13"+
"\1\144\1\206\2\13\1\144\1\1\1\114\1\1\1\211\1\42\1\13\1\40\1\13\1\112\1\1"+
"\1\13\1\124\1\50\2\13\1\31\1\105\1\114\1\212\1\213\2\13\1\46\1\1\1\214\1\114"+
"\1\13\1\215\3\13\1\216\1\217\1\220\1\30\1\66\1\221\1\222\1\200\2\13\1\133"+
"\1\40\7\13\1\31\1\114\72\13\1\144\1\13\1\223\2\13\1\151\20\1\26\13\1\150\6"+
"\13\1\77\2\1\1\112\1\224\1\56\1\225\1\226\6\13\1\16\1\1\1\154\25\13\1\150"+
"\1\1\4\13\1\206\2\13\1\27\2\1\1\151\7\1\1\212\7\13\1\124\1\1\1\114\1\25\1"+
"\30\1\25\1\30\1\227\4\13\1\147\1\230\1\231\2\1\1\232\1\13\1\14\1\233\2\150"+
"\2\1\7\13\1\30\30\1\1\13\1\124\3\13\1\71\2\1\2\13\1\1\1\13\1\234\2\13\1\40"+
"\1\13\1\150\2\13\1\235\3\1\11\13\1\150\1\114\5\1\2\13\1\27\3\13\1\144\11\1"+
"\23\13\1\112\1\13\1\40\1\27\11\1\1\236\2\13\1\237\1\13\1\40\1\13\1\112\1\13"+
"\1\147\4\1\1\13\1\240\1\13\1\40\1\13\1\77\4\1\3\13\1\241\4\1\1\71\1\242\1"+
"\13\1\144\2\1\1\13\1\124\1\13\1\124\2\1\1\123\1\13\1\47\1\1\3\13\1\40\1\13"+
"\1\40\1\13\1\31\1\13\1\16\6\1\4\13\1\46\3\1\3\13\1\31\3\13\1\31\60\1\1\154"+
"\2\13\1\27\2\1\1\65\1\1\1\154\2\13\2\1\1\13\1\46\1\114\1\154\1\13\1\112\1"+
"\65\1\1\2\13\1\243\1\154\2\13\1\31\1\244\1\245\2\1\1\13\1\22\1\151\5\1\1\246"+
"\1\247\1\46\2\13\1\147\1\1\1\114\1\73\1\55\1\56\1\70\1\1\1\250\1\16\21\1\3"+
"\13\1\1\1\251\1\114\12\1\2\13\1\147\2\1\1\252\2\1\3\13\1\1\1\253\1\114\2\1"+
"\2\13\1\30\1\1\1\114\3\1\1\13\1\77\1\1\1\114\26\1\4\13\1\114\1\105\34\1\3"+
"\13\1\46\20\1\71\13\1\77\16\1\14\13\1\144\53\1\2\13\1\147\75\1\44\13\1\112"+
"\33\1\43\13\1\46\1\13\1\147\1\114\6\1\1\13\1\150\1\1\3\13\1\1\1\144\1\114"+
"\1\154\1\254\1\13\67\1\4\13\1\47\1\71\3\1\1\154\6\1\1\16\77\1\6\13\1\30\1"+
"\124\1\46\1\77\66\1\5\13\1\212\3\13\1\143\1\255\1\256\1\257\3\13\1\260\1\261"+
"\1\13\1\262\1\263\1\36\24\13\1\264\1\13\1\36\1\133\1\13\1\133\1\13\1\212\1"+
"\13\1\212\1\147\1\13\1\147\1\13\1\56\1\13\1\56\1\13\1\265\3\266\14\13\1\47"+
"\123\1\1\257\1\13\1\267\1\270\1\271\1\272\1\273\1\274\1\275\1\151\1\276\1"+
"\151\24\1\55\13\1\112\2\1\103\13\1\47\15\13\1\150\150\13\1\16\25\1\41\13\1"+
"\150\36\1");
/* The ZZ_CMAP_A table has 3056 entries */
static final char ZZ_CMAP_A[] = zzUnpackCMap(
"\10\0\2\60\1\63\1\64\1\65\1\63\22\0\1\16\1\72\1\0\1\73\1\21\1\0\1\67\1\57"+
"\1\4\1\5\1\23\1\24\1\62\1\15\1\3\1\0\1\2\7\52\2\1\1\66\1\0\1\56\1\71\1\70"+
"\1\22\1\0\2\32\1\41\1\37\1\47\1\45\1\33\1\53\1\42\2\13\2\61\1\54\1\13\1\44"+
"\1\46\1\35\1\36\2\13\3\35\1\13\1\34\1\10\1\12\1\11\1\20\1\14\1\0\1\27\1\30"+
"\1\40\1\37\2\27\1\31\1\53\1\42\1\13\1\55\2\13\1\26\1\13\1\43\1\13\1\26\1\35"+
"\1\26\1\51\2\35\1\50\1\13\1\33\1\6\1\25\1\7\7\0\1\64\24\0\1\13\12\0\1\13\4"+
"\0\1\13\5\0\27\13\1\0\12\13\4\0\14\13\16\0\5\13\7\0\1\13\1\0\1\13\1\0\5\13"+
"\1\0\2\13\2\0\4\13\1\0\1\13\6\0\1\13\1\0\3\13\1\0\1\13\1\0\4\13\1\0\23\13"+
"\1\0\13\13\10\0\6\13\1\0\26\13\2\0\1\13\6\0\10\13\10\0\13\13\5\0\3\13\15\0"+
"\12\17\4\0\6\13\1\0\1\13\17\0\2\13\7\0\2\13\12\17\3\13\2\0\2\13\1\0\16\13"+
"\15\0\11\13\13\0\1\13\16\0\12\17\6\13\4\0\2\13\4\0\1\13\5\0\6\13\4\0\1\13"+
"\11\0\1\13\3\0\1\13\7\0\11\13\7\0\5\13\17\0\26\13\3\0\1\13\2\0\1\13\7\0\12"+
"\13\4\0\12\17\1\13\4\0\10\13\2\0\2\13\2\0\26\13\1\0\7\13\1\0\1\13\3\0\4\13"+
"\3\0\1\13\20\0\1\13\15\0\2\13\1\0\1\13\5\0\6\13\4\0\2\13\1\0\2\13\1\0\2\13"+
"\1\0\2\13\17\0\4\13\1\0\1\13\7\0\12\17\2\0\3\13\20\0\11\13\1\0\2\13\1\0\2"+
"\13\1\0\5\13\3\0\1\13\2\0\1\13\30\0\1\13\13\0\10\13\2\0\1\13\3\0\1\13\1\0"+
"\6\13\3\0\3\13\1\0\4\13\3\0\2\13\1\0\1\13\1\0\2\13\3\0\2\13\3\0\3\13\3\0\14"+
"\13\13\0\10\13\1\0\2\13\10\0\3\13\5\0\4\13\1\0\5\13\3\0\1\13\3\0\2\13\15\0"+
"\13\13\2\0\1\13\21\0\1\13\12\0\6\13\5\0\22\13\3\0\10\13\1\0\11\13\1\0\1\13"+
"\2\0\7\13\11\0\1\13\1\0\2\13\14\0\12\17\7\0\2\13\1\0\1\13\2\0\2\13\1\0\1\13"+
"\2\0\1\13\6\0\4\13\1\0\7\13\1\0\3\13\1\0\1\13\1\0\1\13\2\0\2\13\1\0\4\13\1"+
"\0\2\13\11\0\1\13\2\0\5\13\1\0\1\13\11\0\12\17\2\0\14\13\1\0\24\13\13\0\5"+
"\13\3\0\6\13\4\0\4\13\3\0\1\13\3\0\2\13\7\0\3\13\4\0\15\13\14\0\1\13\1\0\6"+
"\13\1\0\1\13\5\0\1\13\2\0\13\13\1\0\15\13\1\0\4\13\2\0\7\13\1\0\1\13\1\0\4"+
"\13\2\0\1\13\1\0\4\13\2\0\7\13\1\0\1\13\1\0\4\13\2\0\16\13\2\0\6\13\2\0\15"+
"\13\2\0\1\13\1\0\10\13\7\0\15\13\1\0\6\13\23\0\1\13\4\0\1\13\3\0\11\13\1\0"+
"\1\13\5\0\17\13\1\0\16\13\2\0\14\13\13\0\1\13\15\0\7\13\7\0\16\13\15\0\2\13"+
"\12\17\3\0\3\13\11\0\4\13\1\0\4\13\3\0\2\13\11\0\10\13\1\0\1\13\1\0\1\13\1"+
"\0\1\13\1\0\6\13\1\0\7\13\1\0\1\13\3\0\3\13\1\0\7\13\3\0\4\13\2\0\6\13\14"+
"\0\2\64\7\0\1\13\15\0\1\13\2\0\1\13\4\0\1\13\2\0\12\13\1\0\1\13\3\0\5\13\6"+
"\0\1\13\1\0\1\13\1\0\1\13\1\0\4\13\1\0\13\13\2\0\4\13\5\0\5\13\4\0\1\13\4"+
"\0\2\13\13\0\5\13\6\0\4\13\3\0\2\13\14\0\10\13\7\0\10\13\1\0\7\13\6\0\2\13"+
"\12\0\5\13\5\0\2\13\3\0\7\13\6\0\3\13\12\17\2\13\13\0\11\13\2\0\27\13\2\0"+
"\7\13\1\0\3\13\1\0\4\13\1\0\4\13\2\0\6\13\3\0\1\13\1\0\1\13\2\0\5\13\1\0\12"+
"\13\12\17\5\13\1\0\3\13\1\0\10\13\4\0\7\13\3\0\1\13\3\0\2\13\1\0\1\13\3\0"+
"\2\13\2\0\5\13\2\0\1\13\1\0\1\13\30\0\3\13\3\0\6\13\2\0\6\13\2\0\6\13\11\0"+
"\7\13\4\0\5\13\3\0\5\13\5\0\1\13\1\0\10\13\1\0\5\13\1\0\1\13\1\0\2\13\1\0"+
"\2\13\1\0\12\13\6\0\12\13\2\0\6\13\2\0\6\13\2\0\6\13\2\0\3\13\3\0\14\13\1"+
"\0\16\13\1\0\2\13\1\0\2\13\1\0\10\13\6\0\4\13\4\0\16\13\2\0\1\13\1\0\14\13"+
"\1\0\2\13\3\0\1\13\2\0\4\13\1\0\2\13\12\0\10\13\6\0\6\13\1\0\3\13\1\0\12\13"+
"\3\0\1\13\12\0\4\13\13\0\12\17\1\13\1\0\1\13\3\0\7\13\1\0\1\13\1\0\4\13\1"+
"\0\17\13\1\0\2\13\14\0\3\13\4\0\2\13\1\0\1\13\20\0\4\13\10\0\1\13\13\0\10"+
"\13\5\0\3\13\2\0\1\13\2\0\2\13\2\0\4\13\1\0\14\13\1\0\1\13\1\0\7\13\1\0\21"+
"\13\1\0\4\13\2\0\10\13\1\0\7\13\1\0\14\13\1\0\4\13\1\0\5\13\1\0\1\13\3\0\14"+
"\13\2\0\13\13\1\0\10\13\2\0\22\17\1\0\2\13\1\0\1\13\2\0\1\13\1\0\12\13\1\0"+
"\4\13\1\0\1\13\1\0\1\13\6\0\1\13\4\0\1\13\1\0\1\13\1\0\1\13\1\0\3\13\1\0\2"+
"\13\1\0\1\13\2\0\1\13\1\0\1\13\1\0\1\13\1\0\1\13\1\0\1\13\1\0\2\13\1\0\1\13"+
"\2\0\4\13\1\0\7\13\1\0\4\13\1\0\4\13\1\0\1\13\1\0\12\13\1\0\5\13\1\0\3\13"+
"\1\0\5\13\1\0\5\13");
/**
* Translates DFA states to action switch labels.
*/
private static final int [] ZZ_ACTION = zzUnpackAction();
private static final String ZZ_ACTION_PACKED_0 =
"\11\0\1\1\5\0\1\2\1\3\1\4\1\5\1\6"+
"\1\7\1\10\1\11\1\12\1\13\1\14\1\15\1\16"+
"\1\17\1\20\1\21\1\22\1\2\1\23\1\24\1\25"+
"\1\26\1\27\1\30\1\31\1\30\1\32\1\33\1\34"+
"\1\10\1\35\1\2\1\36\1\23\1\37\1\40\1\41"+
"\1\42\1\43\1\44\1\1\1\45\1\46\2\47\1\50"+
"\1\51\2\0\1\52\1\53\1\54\1\55\1\56\1\10"+
"\1\57\1\60\1\61\1\62\1\10\1\62\1\63\2\64"+
"\1\65\1\66\1\67\1\70\1\71\1\72\1\10\1\73"+
"\1\74\1\75\1\0\1\76\1\77\1\100\1\101\1\0"+
"\1\102\1\103\1\104\1\105\1\106\1\0\1\107\1\0"+
"\1\110\1\0\1\111\1\112\1\113\2\67\1\70\1\114"+
"\1\115\1\116\1\117\1\120\1\121\1\122\1\0\1\110"+
"\1\0\1\123\2\67\1\70\1\124\1\110\1\67\1\70"+
"\1\67\1\125\4\67";
private static int [] zzUnpackAction() {
int [] result = new int[135];
int offset = 0;
offset = zzUnpackAction(ZZ_ACTION_PACKED_0, offset, result);
return result;
}
private static int zzUnpackAction(String packed, int offset, int [] result) {
int i = 0; /* index in packed string */
int j = offset; /* index in unpacked array */
int l = packed.length();
while (i < l) {
int count = packed.charAt(i++);
int value = packed.charAt(i++);
do result[j++] = value; while (--count > 0);
}
return j;
}
/**
* Translates a state to a row index in the transition table
*/
private static final int [] ZZ_ROWMAP = zzUnpackRowMap();
private static final String ZZ_ROWMAP_PACKED_0 =
"\0\0\0\74\0\170\0\264\0\360\0\u012c\0\u0168\0\u01a4"+
"\0\u01e0\0\u021c\0\u0258\0\u0294\0\u02d0\0\u030c\0\u0348\0\u0384"+
"\0\u0384\0\u03c0\0\u0384\0\u0384\0\u03fc\0\u0438\0\u0384\0\u0384"+
"\0\u0384\0\u0384\0\u0384\0\u0384\0\u0384\0\u0384\0\u0384\0\u0384"+
"\0\u0474\0\u0384\0\u04b0\0\u0384\0\u04ec\0\u0384\0\u0384\0\u0384"+
"\0\u0528\0\u0384\0\u0564\0\u0384\0\u05a0\0\u0384\0\u05dc\0\u0384"+
"\0\u03fc\0\u0384\0\u0384\0\u0384\0\u0618\0\u0654\0\u0384\0\u0384"+
"\0\u0384\0\u0384\0\u0384\0\u0690\0\u06cc\0\u0384\0\u0708\0\u0744"+
"\0\u0384\0\u0780\0\u07bc\0\u0384\0\u0384\0\u0384\0\u0384\0\u0384"+
"\0\u0384\0\u07f8\0\u0834\0\u0384\0\u0384\0\u0870\0\u0384\0\u0384"+
"\0\u0384\0\u08ac\0\u08e8\0\u0384\0\u0384\0\u0924\0\u0384\0\u0384"+
"\0\u0384\0\u0960\0\u0384\0\u0384\0\u0384\0\u0384\0\u099c\0\u09d8"+
"\0\u0384\0\u0384\0\u0384\0\u0384\0\u0a14\0\u0384\0\u0a50\0\u0a8c"+
"\0\u0ac8\0\u0384\0\u0384\0\u0384\0\u0b04\0\u0b40\0\u0b7c\0\u0384"+
"\0\u0384\0\u0384\0\u0384\0\u0384\0\u0384\0\u0384\0\u0bb8\0\u0bf4"+
"\0\u0c30\0\u0384\0\u0c6c\0\u0384\0\u0ca8\0\u0384\0\u0384\0\u0ce4"+
"\0\u0d20\0\u0d5c\0\u0384\0\u0d98\0\u0dd4\0\u0e10\0\u0e4c";
private static int [] zzUnpackRowMap() {
int [] result = new int[135];
int offset = 0;
offset = zzUnpackRowMap(ZZ_ROWMAP_PACKED_0, offset, result);
return result;
}
private static int zzUnpackRowMap(String packed, int offset, int [] result) {
int i = 0; /* index in packed string */
int j = offset; /* index in unpacked array */
int l = packed.length();
while (i < l) {
int high = packed.charAt(i++) << 16;
result[j++] = high | packed.charAt(i++);
}
return j;
}
/**
* The transition table of the DFA
*/
private static final int [] ZZ_TRANS = zzUnpackTrans();
private static final String ZZ_TRANS_PACKED_0 =
"\3\20\1\21\1\22\1\23\1\24\1\20\1\25\1\20"+
"\1\26\2\20\1\27\1\30\1\20\1\31\1\32\1\33"+
"\1\34\1\35\1\36\32\20\1\37\2\20\1\37\1\20"+
"\1\37\5\20\1\40\12\20\1\41\61\20\1\42\2\43"+
"\4\42\1\44\3\42\1\45\3\42\1\43\1\31\5\42"+
"\24\45\1\43\3\45\3\42\1\45\1\46\11\42\11\47"+
"\1\50\1\51\50\47\3\0\26\47\1\52\42\47\3\0"+
"\6\47\6\20\1\24\1\20\1\53\1\54\1\55\2\20"+
"\1\27\2\20\1\31\37\20\1\56\2\20\1\56\1\20"+
"\1\56\1\20\1\57\4\20\6\42\1\60\1\42\1\61"+
"\1\42\1\26\21\42\1\62\1\42\1\62\2\42\1\62"+
"\2\42\1\62\7\42\1\62\4\42\1\62\20\42\1\60"+
"\1\42\1\61\1\42\1\26\61\42\5\63\1\64\5\63"+
"\1\65\1\63\1\66\10\63\24\65\1\63\3\65\3\63"+
"\1\65\4\63\1\67\5\63\63\12\1\70\10\12\13\63"+
"\1\45\12\63\24\45\1\63\3\45\3\63\1\45\6\63"+
"\1\71\16\63\1\45\12\63\24\45\1\63\3\45\1\63"+
"\1\72\1\63\1\45\17\63\1\64\5\63\1\45\12\63"+
"\24\45\1\63\3\45\3\63\1\45\13\63\2\43\2\63"+
"\1\64\5\63\1\45\3\63\1\43\6\63\24\45\1\43"+
"\3\45\3\63\1\45\12\63\13\73\1\45\4\73\1\31"+
"\5\73\24\45\1\73\3\45\3\73\1\45\4\73\1\74"+
"\5\73\116\0\1\75\62\0\1\76\1\77\5\0\1\100"+
"\53\0\1\101\1\102\1\103\4\104\1\101\1\105\1\101"+
"\1\105\1\106\1\101\1\107\1\110\1\101\1\105\5\104"+
"\2\111\1\112\1\113\3\114\3\115\1\116\2\117\2\120"+
"\1\106\1\121\1\106\1\122\1\123\1\102\1\124\1\125"+
"\1\126\2\101\1\110\1\106\1\101\1\110\1\101\1\110"+
"\6\101\47\0\1\127\25\0\2\43\14\0\1\43\32\0"+
"\1\43\22\0\2\45\1\0\2\45\5\0\5\45\6\0"+
"\30\45\3\0\1\45\23\0\1\130\73\0\1\131\1\132"+
"\5\0\1\100\45\0\1\133\5\0\1\101\1\102\1\103"+
"\4\104\1\101\3\105\1\106\1\101\1\107\1\110\1\101"+
"\1\105\5\104\2\111\1\112\1\113\3\114\3\115\1\116"+
"\2\117\2\120\1\106\1\121\1\106\1\122\1\123\1\102"+
"\1\124\1\125\1\126\2\101\1\110\1\106\1\101\1\110"+
"\1\101\1\110\6\101\67\0\1\134\17\0\1\65\12\0"+
"\24\65\1\0\3\65\3\0\1\65\25\0\1\66\12\0"+
"\24\66\1\0\3\66\3\0\1\66\23\0\1\135\66\0"+
"\1\136\37\0\1\137\11\0\1\140\1\141\6\0\1\142"+
"\1\0\1\142\1\143\1\144\1\145\11\0\1\76\73\0"+
"\1\146\1\147\62\0\2\102\47\0\1\102\23\0\1\150"+
"\47\0\1\150\27\0\1\151\143\0\1\152\1\153\14\0"+
"\74\154\1\0\2\155\3\0\1\156\20\0\2\155\1\0"+
"\1\155\4\0\3\155\3\0\1\155\1\0\1\155\2\0"+
"\1\155\22\0\2\157\24\0\2\157\1\0\1\157\4\0"+
"\3\157\3\0\1\157\1\0\1\157\2\0\1\157\77\0"+
"\1\160\1\161\25\0\1\162\140\0\1\163\12\0\1\164"+
"\73\0\1\165\1\166\1\0\5\167\1\0\66\167\11\0"+
"\1\146\64\0\1\170\47\0\1\170\52\0\1\171\43\0"+
"\2\172\24\0\2\172\1\0\1\172\4\0\3\172\3\0"+
"\1\172\1\0\1\172\2\0\1\172\22\0\2\173\4\0"+
"\1\174\17\0\2\173\1\0\1\173\4\0\3\173\3\0"+
"\1\173\1\0\1\173\2\0\1\173\22\0\2\175\24\0"+
"\2\175\1\0\1\175\4\0\3\175\3\0\1\175\1\0"+
"\1\175\2\0\1\175\21\0\5\167\1\176\66\167\2\0"+
"\1\177\47\0\1\177\30\0\1\114\65\0\2\200\4\0"+
"\1\172\17\0\2\200\1\0\1\200\4\0\3\200\3\0"+
"\1\200\1\0\1\200\2\0\1\200\22\0\2\201\24\0"+
"\2\201\1\0\1\201\4\0\3\201\3\0\1\201\1\0"+
"\1\201\2\0\1\201\22\0\2\202\4\0\1\172\17\0"+
"\2\202\1\0\1\202\4\0\3\202\3\0\1\202\1\0"+
"\1\202\2\0\1\202\22\0\2\203\24\0\2\203\1\0"+
"\1\203\4\0\3\203\3\0\1\203\1\0\1\203\2\0"+
"\1\203\22\0\2\204\4\0\1\172\17\0\2\204\1\0"+
"\1\204\4\0\3\204\3\0\1\204\1\0\1\204\2\0"+
"\1\204\22\0\2\205\4\0\1\172\17\0\2\205\1\0"+
"\1\205\4\0\3\205\3\0\1\205\1\0\1\205\2\0"+
"\1\205\22\0\2\206\4\0\1\172\17\0\2\206\1\0"+
"\1\206\4\0\3\206\3\0\1\206\1\0\1\206\2\0"+
"\1\206\22\0\2\207\4\0\1\172\17\0\2\207\1\0"+
"\1\207\4\0\3\207\3\0\1\207\1\0\1\207\2\0"+
"\1\207\22\0\2\207\4\0\1\174\17\0\2\207\1\0"+
"\1\207\4\0\3\207\3\0\1\207\1\0\1\207\2\0"+
"\1\207\21\0";
private static int [] zzUnpackTrans() {
int [] result = new int[3720];
int offset = 0;
offset = zzUnpackTrans(ZZ_TRANS_PACKED_0, offset, result);
return result;
}
private static int zzUnpackTrans(String packed, int offset, int [] result) {
int i = 0; /* index in packed string */
int j = offset; /* index in unpacked array */
int l = packed.length();
while (i < l) {
int count = packed.charAt(i++);
int value = packed.charAt(i++);
value--;
do result[j++] = value; while (--count > 0);
}
return j;
}
/* error codes */
private static final int ZZ_UNKNOWN_ERROR = 0;
private static final int ZZ_NO_MATCH = 1;
private static final int ZZ_PUSHBACK_2BIG = 2;
/* error messages for the codes above */
private static final String[] ZZ_ERROR_MSG = {
"Unknown internal scanner error",
"Error: could not match input",
"Error: pushback value was too large"
};
/**
* ZZ_ATTRIBUTE[aState] contains the attributes of state <code>aState</code>
*/
private static final int [] ZZ_ATTRIBUTE = zzUnpackAttribute();
private static final String ZZ_ATTRIBUTE_PACKED_0 =
"\11\0\1\1\5\0\2\11\1\1\2\11\2\1\12\11"+
"\1\1\1\11\1\1\1\11\1\1\3\11\1\1\1\11"+
"\1\1\1\11\1\1\1\11\1\1\1\11\1\1\3\11"+
"\2\1\5\11\2\1\1\11\2\0\1\11\2\1\6\11"+
"\2\1\2\11\1\1\3\11\2\1\2\11\1\1\3\11"+
"\1\0\4\11\1\0\1\1\4\11\1\0\1\11\1\0"+
"\1\1\1\0\3\11\3\1\7\11\1\0\1\1\1\0"+
"\1\11\1\1\1\11\1\1\2\11\3\1\1\11\4\1";
private static int [] zzUnpackAttribute() {
int [] result = new int[135];
int offset = 0;
offset = zzUnpackAttribute(ZZ_ATTRIBUTE_PACKED_0, offset, result);
return result;
}
private static int zzUnpackAttribute(String packed, int offset, int [] result) {
int i = 0; /* index in packed string */
int j = offset; /* index in unpacked array */
int l = packed.length();
while (i < l) {
int count = packed.charAt(i++);
int value = packed.charAt(i++);
do result[j++] = value; while (--count > 0);
}
return j;
}
/** the input device */
private java.io.Reader zzReader;
/** the current state of the DFA */
private int zzState;
/** the current lexical state */
private int zzLexicalState = YYINITIAL;
/** this buffer contains the current text to be matched and is
the source of the yytext() string */
private CharSequence zzBuffer = "";
/** the textposition at the last accepting state */
private int zzMarkedPos;
/** the current text position in the buffer */
private int zzCurrentPos;
/** startRead marks the beginning of the yytext() string in the buffer */
private int zzStartRead;
/** endRead marks the last character in the buffer, that has been read
from input */
private int zzEndRead;
/**
* zzAtBOL == true <=> the scanner is currently at the beginning of a line
*/
private boolean zzAtBOL = true;
/** zzAtEOF == true <=> the scanner is at the EOF */
private boolean zzAtEOF;
/** denotes if the user-EOF-code has already been executed */
private boolean zzEOFDone;
/* user code: */
// This adds support for nested states. I'm no JFlex pro, so maybe this is overkill, but it works quite well.
final ArrayList<Integer> states = new ArrayList<>();
// This was an idea to use the regex implementation for XML schema regexes (which use a slightly different syntax)
// as well, but is currently unfinished as it requires to tweak more places than just the lexer.
private boolean xmlSchemaMode;
int capturingGroupCount = 0;
private boolean allowDanglingMetacharacters;
private boolean allowNestedCharacterClasses;
private boolean allowOctalNoLeadingZero;
private boolean allowHexDigitClass;
private boolean allowEmptyCharacterClass;
private boolean allowHorizontalWhitespaceClass;
private boolean allowPosixBracketExpressions;
_RegExLexer(EnumSet<RegExpCapability> capabilities) {
this((java.io.Reader)null);
this.xmlSchemaMode = capabilities.contains(RegExpCapability.XML_SCHEMA_MODE);
this.allowDanglingMetacharacters = capabilities.contains(RegExpCapability.DANGLING_METACHARACTERS);
this.allowNestedCharacterClasses = capabilities.contains(RegExpCapability.NESTED_CHARACTER_CLASSES);
this.allowOctalNoLeadingZero = capabilities.contains(RegExpCapability.OCTAL_NO_LEADING_ZERO);
this.commentMode = capabilities.contains(RegExpCapability.COMMENT_MODE);
this.allowHexDigitClass = capabilities.contains(RegExpCapability.ALLOW_HEX_DIGIT_CLASS);
this.allowHorizontalWhitespaceClass = capabilities.contains(RegExpCapability.ALLOW_HORIZONTAL_WHITESPACE_CLASS);
this.allowEmptyCharacterClass = capabilities.contains(RegExpCapability.ALLOW_EMPTY_CHARACTER_CLASS);
this.allowPosixBracketExpressions = capabilities.contains(RegExpCapability.POSIX_BRACKET_EXPRESSIONS);
}
private void yypushstate(int state) {
states.add(yystate());
yybegin(state);
}
private void yypopstate() {
final int state = states.remove(states.size() - 1);
yybegin(state);
}
private void handleOptions() {
final String o = yytext().toString();
if (o.contains("x")) {
commentMode = !o.startsWith("-");
}
}
// tracks whether the lexer is in comment mode, i.e. whether whitespace is not significant and whether to ignore
// text after '#' till EOL
boolean commentMode = false;
/**
* Creates a new scanner
*
* @param in the java.io.Reader to read input from.
*/
_RegExLexer(java.io.Reader in) {
this.zzReader = in;
}
/**
* Unpacks the compressed character translation table.
*
* @param packed the packed character translation table
* @return the unpacked character translation table
*/
private static char [] zzUnpackCMap(String packed) {
int size = 0;
for (int i = 0, length = packed.length(); i < length; i += 2) {
size += packed.charAt(i);
}
char[] map = new char[size];
int i = 0; /* index in packed string */
int j = 0; /* index in unpacked array */
while (i < packed.length()) {
int count = packed.charAt(i++);
char value = packed.charAt(i++);
do map[j++] = value; while (--count > 0);
}
return map;
}
public final int getTokenStart() {
return zzStartRead;
}
public final int getTokenEnd() {
return getTokenStart() + yylength();
}
public void reset(CharSequence buffer, int start, int end, int initialState) {
zzBuffer = buffer;
zzCurrentPos = zzMarkedPos = zzStartRead = start;
zzAtEOF = false;
zzAtBOL = true;
zzEndRead = end;
yybegin(initialState);
}
/**
* Refills the input buffer.
*
* @return <code>false</code>, iff there was new input.
*
* @exception java.io.IOException if any I/O-Error occurs
*/
private boolean zzRefill() throws java.io.IOException {
return true;
}
/**
* Returns the current lexical state.
*/
public final int yystate() {
return zzLexicalState;
}
/**
* Enters a new lexical state
*
* @param newState the new lexical state
*/
public final void yybegin(int newState) {
zzLexicalState = newState;
}
/**
* Returns the text matched by the current regular expression.
*/
public final CharSequence yytext() {
return zzBuffer.subSequence(zzStartRead, zzMarkedPos);
}
/**
* Returns the character at position <tt>pos</tt> from the
* matched text.
*
* It is equivalent to yytext().charAt(pos), but faster
*
* @param pos the position of the character to fetch.
* A value from 0 to yylength()-1.
*
* @return the character at position pos
*/
public final char yycharat(int pos) {
return zzBuffer.charAt(zzStartRead+pos);
}
/**
* Returns the length of the matched text region.
*/
public final int yylength() {
return zzMarkedPos-zzStartRead;
}
/**
* Reports an error that occured while scanning.
*
* In a wellformed scanner (no or only correct usage of
* yypushback(int) and a match-all fallback rule) this method
* will only be called with things that "Can't Possibly Happen".
* If this method is called, something is seriously wrong
* (e.g. a JFlex bug producing a faulty scanner etc.).
*
* Usual syntax/scanner level error handling should be done
* in error fallback rules.
*
* @param errorCode the code of the errormessage to display
*/
private void zzScanError(int errorCode) {
String message;
try {
message = ZZ_ERROR_MSG[errorCode];
}
catch (ArrayIndexOutOfBoundsException e) {
message = ZZ_ERROR_MSG[ZZ_UNKNOWN_ERROR];
}
throw new Error(message);
}
/**
* Pushes the specified amount of characters back into the input stream.
*
* They will be read again by then next call of the scanning method
*
* @param number the number of characters to be read again.
* This number must not be greater than yylength()!
*/
public void yypushback(int number) {
if ( number > yylength() )
zzScanError(ZZ_PUSHBACK_2BIG);
zzMarkedPos -= number;
}
/**
* Resumes scanning until the next regular expression is matched,
* the end of input is encountered or an I/O-Error occurs.
*
* @return the next token
* @exception java.io.IOException if any I/O-Error occurs
*/
public IElementType advance() throws java.io.IOException {
int zzInput;
int zzAction;
// cached fields:
int zzCurrentPosL;
int zzMarkedPosL;
int zzEndReadL = zzEndRead;
CharSequence zzBufferL = zzBuffer;
int [] zzTransL = ZZ_TRANS;
int [] zzRowMapL = ZZ_ROWMAP;
int [] zzAttrL = ZZ_ATTRIBUTE;
while (true) {
zzMarkedPosL = zzMarkedPos;
zzAction = -1;
zzCurrentPosL = zzCurrentPos = zzStartRead = zzMarkedPosL;
zzState = ZZ_LEXSTATE[zzLexicalState];
// set up zzAction for empty match case:
int zzAttributes = zzAttrL[zzState];
if ( (zzAttributes & 1) == 1 ) {
zzAction = zzState;
}
zzForAction: {
while (true) {
if (zzCurrentPosL < zzEndReadL) {
zzInput = Character.codePointAt(zzBufferL, zzCurrentPosL/*, zzEndReadL*/);
zzCurrentPosL += Character.charCount(zzInput);
}
else if (zzAtEOF) {
zzInput = YYEOF;
break zzForAction;
}
else {
// store back cached positions
zzCurrentPos = zzCurrentPosL;
zzMarkedPos = zzMarkedPosL;
boolean eof = zzRefill();
// get translated positions and possibly new buffer
zzCurrentPosL = zzCurrentPos;
zzMarkedPosL = zzMarkedPos;
zzBufferL = zzBuffer;
zzEndReadL = zzEndRead;
if (eof) {
zzInput = YYEOF;
break zzForAction;
}
else {
zzInput = Character.codePointAt(zzBufferL, zzCurrentPosL/*, zzEndReadL*/);
zzCurrentPosL += Character.charCount(zzInput);
}
}
int zzNext = zzTransL[ zzRowMapL[zzState] + ZZ_CMAP(zzInput) ];
if (zzNext == -1) break zzForAction;
zzState = zzNext;
zzAttributes = zzAttrL[zzState];
if ( (zzAttributes & 1) == 1 ) {
zzAction = zzState;
zzMarkedPosL = zzCurrentPosL;
if ( (zzAttributes & 8) == 8 ) break zzForAction;
}
}
}
// store back cached position
zzMarkedPos = zzMarkedPosL;
if (zzInput == YYEOF && zzStartRead == zzCurrentPos) {
zzAtEOF = true;
return null;
}
else {
switch (zzAction < 0 ? zzAction : ZZ_ACTION[zzAction]) {
case 1:
{ yypopstate(); return RegExpTT.COMMENT;
}
case 86: break;
case 2:
{ return RegExpTT.CHARACTER;
}
case 87: break;
case 3:
{ return RegExpTT.DOT;
}
case 88: break;
case 4:
{ capturingGroupCount++; return RegExpTT.GROUP_BEGIN;
}
case 89: break;
case 5:
{ return RegExpTT.GROUP_END;
}
case 90: break;
case 6:
{ if (yystate() != CLASS2) yypushstate(EMBRACED); return RegExpTT.LBRACE;
}
case 91: break;
case 7:
{ yypushstate(CLASS2); return RegExpTT.CLASS_BEGIN;
}
case 92: break;
case 8:
{ return StringEscapesTokenTypes.INVALID_CHARACTER_ESCAPE_TOKEN;
}
case 93: break;
case 9:
{ return RegExpTT.MINUS;
}
case 94: break;
case 10:
{ return commentMode ? com.intellij.psi.TokenType.WHITE_SPACE : RegExpTT.CHARACTER;
}
case 95: break;
case 11:
{ return RegExpTT.CARET;
}
case 96: break;
case 12:
{ return RegExpTT.DOLLAR;
}
case 97: break;
case 13:
{ return RegExpTT.QUEST;
}
case 98: break;
case 14:
{ return RegExpTT.STAR;
}
case 99: break;
case 15:
{ return RegExpTT.PLUS;
}
case 100: break;
case 16:
{ return RegExpTT.UNION;
}
case 101: break;
case 17:
{ return commentMode ? com.intellij.psi.TokenType.WHITE_SPACE : RegExpTT.CTRL_CHARACTER;
}
case 102: break;
case 18:
{ if (commentMode) { yypushstate(COMMENT); return RegExpTT.COMMENT; } else return RegExpTT.CHARACTER;
}
case 103: break;
case 19:
{ yypopstate(); yypushback(1);
}
case 104: break;
case 20:
{ return RegExpTT.NUMBER;
}
case 105: break;
case 21:
{ yypopstate(); return RegExpTT.RBRACE;
}
case 106: break;
case 22:
{ return RegExpTT.NAME;
}
case 107: break;
case 23:
{ return RegExpTT.COMMA;
}
case 108: break;
case 24:
{ assert false : yytext();
}
case 109: break;
case 25:
{ yybegin(CLASS2); return RegExpTT.CHARACTER;
}
case 110: break;
case 26:
{ yybegin(CLASS1); return RegExpTT.CARET;
}
case 111: break;
case 27:
{ if (allowNestedCharacterClasses) {
yypushstate(CLASS2);
return RegExpTT.CLASS_BEGIN;
}
return RegExpTT.CHARACTER;
}
case 112: break;
case 28:
{ yypopstate(); return RegExpTT.CLASS_END;
}
case 113: break;
case 29:
{ return commentMode ? com.intellij.psi.TokenType.WHITE_SPACE : RegExpTT.ESC_CHARACTER;
}
case 114: break;
case 30:
{ yypopstate(); yypushstate(EMBRACED); return RegExpTT.LBRACE;
}
case 115: break;
case 31:
{ yypopstate(); return RegExpTT.CATEGORY_SHORT_HAND;
}
case 116: break;
case 32:
{ yybegin(YYINITIAL); return RegExpTT.BAD_CHARACTER;
}
case 117: break;
case 33:
{ yybegin(YYINITIAL); return RegExpTT.GROUP_END;
}
case 118: break;
case 34:
{ handleOptions(); return RegExpTT.OPTIONS_ON;
}
case 119: break;
case 35:
{ handleOptions(); return RegExpTT.OPTIONS_OFF;
}
case 120: break;
case 36:
{ yybegin(YYINITIAL); return RegExpTT.COLON;
}
case 121: break;
case 37:
{ yybegin(YYINITIAL); return RegExpTT.GT;
}
case 122: break;
case 38:
{ yybegin(YYINITIAL); return RegExpTT.QUOTE;
}
case 123: break;
case 39:
{ return RegExpTT.BAD_CHARACTER;
}
case 124: break;
case 40:
{ yybegin(OPTIONS); return RegExpTT.SET_OPTIONS;
}
case 125: break;
case 41:
// lookahead expression with fixed base length
zzMarkedPos = Character.offsetByCodePoints
(zzBufferL/*, zzStartRead, zzEndRead - zzStartRead*/, zzStartRead, 1);
{ if (allowEmptyCharacterClass) yypushstate(CLASS2); else yypushstate(CLASS1); return RegExpTT.CLASS_BEGIN;
}
case 126: break;
case 42:
{ return RegExpTT.REDUNDANT_ESCAPE;
}
case 127: break;
case 43:
{ if (allowOctalNoLeadingZero) {
CharSequence s = yytext();
int i = 1;
for (; i < s.length(); i++) {
if (s.charAt(i) > '7') break;
}
if (i > 1) {
yypushback(yylength() - i);
return RegExpTT.OCT_CHAR;
}
}
if (yystate() == CLASS2) {
yypushback(yylength() - 2);
return RegExpTT.REDUNDANT_ESCAPE;
}
while (yylength() > 2 && Integer.parseInt(yytext().toString().substring(1)) > capturingGroupCount) {
yypushback(1);
}
return RegExpTT.BACKREF;
}
case 128: break;
case 44:
{ return (allowOctalNoLeadingZero ? RegExpTT.OCT_CHAR : RegExpTT.BAD_OCT_VALUE);
}
case 129: break;
case 45:
{ return (yystate() == CLASS2) ? RegExpTT.REDUNDANT_ESCAPE : RegExpTT.ESC_CHARACTER;
}
case 130: break;
case 46:
{ return RegExpTT.ESC_CHARACTER;
}
case 131: break;
case 47:
{ return (yystate() == CLASS2) ? RegExpTT.ESC_CHARACTER : RegExpTT.REDUNDANT_ESCAPE;
}
case 132: break;
case 48:
{ return commentMode ? RegExpTT.CHARACTER : RegExpTT.REDUNDANT_ESCAPE;
}
case 133: break;
case 49:
{ return RegExpTT.ESC_CTRL_CHARACTER;
}
case 134: break;
case 50:
{ return yystate() != CLASS2 ? RegExpTT.BOUNDARY : RegExpTT.ESC_CHARACTER;
}
case 135: break;
case 51:
{ return RegExpTT.CHAR_CLASS;
}
case 136: break;
case 52:
{ if (xmlSchemaMode) return RegExpTT.CHAR_CLASS; else return StringEscapesTokenTypes.INVALID_CHARACTER_ESCAPE_TOKEN;
}
case 137: break;
case 53:
{ yypushstate(PROP); return RegExpTT.PROPERTY;
}
case 138: break;
case 54:
{ yypushstate(QUOTED); return RegExpTT.QUOTE_BEGIN;
}
case 139: break;
case 55:
{ return RegExpTT.BAD_HEX_VALUE;
}
case 140: break;
case 56:
{ return StringEscapesTokenTypes.INVALID_UNICODE_ESCAPE_TOKEN;
}
case 141: break;
case 57:
{ return (allowHexDigitClass || allowHorizontalWhitespaceClass ? RegExpTT.CHAR_CLASS : StringEscapesTokenTypes.INVALID_CHARACTER_ESCAPE_TOKEN);
}
case 142: break;
case 58:
{ yypushstate(NAMED); return RegExpTT.NAMED_CHARACTER;
}
case 143: break;
case 59:
{ yypopstate(); return RegExpTT.QUOTE_END;
}
case 144: break;
case 60:
{ yybegin(CLASS2); return RegExpTT.REDUNDANT_ESCAPE;
}
case 145: break;
case 61:
// lookahead expression with fixed base length
zzMarkedPos = Character.offsetByCodePoints
(zzBufferL/*, zzStartRead, zzEndRead - zzStartRead*/, zzStartRead, 1);
{ if (allowNestedCharacterClasses) {
yypushstate(CLASS1);
return RegExpTT.CLASS_BEGIN;
}
return RegExpTT.CHARACTER;
}
case 146: break;
case 62:
{ if (allowPosixBracketExpressions) {
yybegin(BRACKET_EXPRESSION);
return RegExpTT.BRACKET_EXPRESSION_BEGIN;
} else {
yypushback(1);
return RegExpTT.CHARACTER;
}
}
case 147: break;
case 63:
{ if (allowNestedCharacterClasses) return RegExpTT.ANDAND; else yypushback(1); return RegExpTT.CHARACTER;
}
case 148: break;
case 64:
{ yybegin(CLASS2); return RegExpTT.BRACKET_EXPRESSION_END;
}
case 149: break;
case 65:
{ yybegin(PY_COND_REF); return RegExpTT.PYTHON_COND_REF;
}
case 150: break;
case 66:
{ yybegin(NAMED_GROUP); capturingGroupCount++; return RegExpTT.RUBY_NAMED_GROUP;
}
case 151: break;
case 67:
{ yybegin(QUOTED_NAMED_GROUP); capturingGroupCount++; return RegExpTT.RUBY_QUOTED_NAMED_GROUP;
}
case 152: break;
case 68:
{ return RegExpTT.NON_CAPT_GROUP;
}
case 153: break;
case 69:
{ return RegExpTT.POS_LOOKAHEAD;
}
case 154: break;
case 70:
{ return RegExpTT.NEG_LOOKAHEAD;
}
case 155: break;
case 71:
// lookahead expression with fixed base length
zzMarkedPos = Character.offsetByCodePoints
(zzBufferL/*, zzStartRead, zzEndRead - zzStartRead*/, zzStartRead, 1);
{ if (allowEmptyCharacterClass) yypushstate(CLASS2); else yypushstate(NEGATE_CLASS1); return RegExpTT.CLASS_BEGIN;
}
case 156: break;
case 72:
{ return RegExpTT.OCT_CHAR;
}
case 157: break;
case 73:
{ yybegin(NAMED_GROUP); return RegExpTT.RUBY_NAMED_GROUP_CALL;
}
case 158: break;
case 74:
{ yybegin(QUOTED_NAMED_GROUP); return RegExpTT.RUBY_QUOTED_NAMED_GROUP_CALL;
}
case 159: break;
case 75:
{ if (xmlSchemaMode) { yypushback(1); return RegExpTT.CHAR_CLASS; } else return RegExpTT.CTRL;
}
case 160: break;
case 76:
{ yybegin(NAMED_GROUP); return RegExpTT.RUBY_NAMED_GROUP_REF;
}
case 161: break;
case 77:
{ yybegin(QUOTED_NAMED_GROUP); return RegExpTT.RUBY_QUOTED_NAMED_GROUP_REF;
}
case 162: break;
case 78:
// lookahead expression with fixed base length
zzMarkedPos = Character.offsetByCodePoints
(zzBufferL/*, zzStartRead, zzEndRead - zzStartRead*/, zzStartRead, 1);
{ if (allowNestedCharacterClasses) {
yypushstate(CLASS1);
return RegExpTT.CLASS_BEGIN;
}
return RegExpTT.CHARACTER;
}
case 163: break;
case 79:
{ yybegin(NAMED_GROUP); capturingGroupCount++; return RegExpTT.PYTHON_NAMED_GROUP;
}
case 164: break;
case 80:
{ yybegin(PY_NAMED_GROUP_REF); return RegExpTT.PYTHON_NAMED_GROUP_REF;
}
case 165: break;
case 81:
{ return RegExpTT.POS_LOOKBEHIND;
}
case 166: break;
case 82:
{ return RegExpTT.NEG_LOOKBEHIND;
}
case 167: break;
case 83:
{ return RegExpTT.HEX_CHAR;
}
case 168: break;
case 84:
{ return RegExpTT.COMMENT;
}
case 169: break;
case 85:
{ return RegExpTT.UNICODE_CHAR;
}
case 170: break;
default:
zzScanError(ZZ_NO_MATCH);
}
}
}
}
}
| apache-2.0 |
jayantgolhar/Hadoop-0.21.0 | common/docs/api/org/apache/hadoop/conf/class-use/Configured.html | 23837 | <!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_14) on Tue Aug 17 01:02:51 EDT 2010 -->
<TITLE>
Uses of Class org.apache.hadoop.conf.Configured (Hadoop-common 0.21.0 API)
</TITLE>
<META NAME="date" CONTENT="2010-08-17">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.apache.hadoop.conf.Configured (Hadoop-common 0.21.0 API)";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../org/apache/hadoop/conf/Configured.html" title="class in org.apache.hadoop.conf"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../index.html?org/apache/hadoop/conf//class-useConfigured.html" target="_top"><B>FRAMES</B></A>
<A HREF="Configured.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<CENTER>
<H2>
<B>Uses of Class<br>org.apache.hadoop.conf.Configured</B></H2>
</CENTER>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Packages that use <A HREF="../../../../../org/apache/hadoop/conf/Configured.html" title="class in org.apache.hadoop.conf">Configured</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.hadoop.fs"><B>org.apache.hadoop.fs</B></A></TD>
<TD>An abstract file system API. </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.hadoop.fs.ftp"><B>org.apache.hadoop.fs.ftp</B></A></TD>
<TD> </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.hadoop.fs.kfs"><B>org.apache.hadoop.fs.kfs</B></A></TD>
<TD>A client for the Kosmos filesystem (KFS) </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.hadoop.fs.s3"><B>org.apache.hadoop.fs.s3</B></A></TD>
<TD>A distributed, block-based implementation of <A HREF="../../../../../org/apache/hadoop/fs/FileSystem.html" title="class in org.apache.hadoop.fs"><CODE>FileSystem</CODE></A> that uses <a href="http://aws.amazon.com/s3">Amazon S3</a>
as a backing store. </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.hadoop.fs.s3native"><B>org.apache.hadoop.fs.s3native</B></A></TD>
<TD>
A distributed implementation of <A HREF="../../../../../org/apache/hadoop/fs/FileSystem.html" title="class in org.apache.hadoop.fs"><CODE>FileSystem</CODE></A> for reading and writing files on
<a href="http://aws.amazon.com/s3">Amazon S3</a>. </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.hadoop.fs.shell"><B>org.apache.hadoop.fs.shell</B></A></TD>
<TD> </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.hadoop.io.serializer"><B>org.apache.hadoop.io.serializer</B></A></TD>
<TD>
This package provides a mechanism for using different serialization frameworks
in Hadoop. </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.hadoop.io.serializer.avro"><B>org.apache.hadoop.io.serializer.avro</B></A></TD>
<TD>
This package provides Avro serialization in Hadoop. </TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.hadoop.fs"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Uses of <A HREF="../../../../../org/apache/hadoop/conf/Configured.html" title="class in org.apache.hadoop.conf">Configured</A> in <A HREF="../../../../../org/apache/hadoop/fs/package-summary.html">org.apache.hadoop.fs</A></FONT></TH>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">Subclasses of <A HREF="../../../../../org/apache/hadoop/conf/Configured.html" title="class in org.apache.hadoop.conf">Configured</A> in <A HREF="../../../../../org/apache/hadoop/fs/package-summary.html">org.apache.hadoop.fs</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> class</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../org/apache/hadoop/fs/ChecksumFileSystem.html" title="class in org.apache.hadoop.fs">ChecksumFileSystem</A></B></CODE>
<BR>
Abstract Checksumed FileSystem.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> class</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../org/apache/hadoop/fs/FileSystem.html" title="class in org.apache.hadoop.fs">FileSystem</A></B></CODE>
<BR>
An abstract base class for a fairly generic filesystem.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> class</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../org/apache/hadoop/fs/FilterFileSystem.html" title="class in org.apache.hadoop.fs">FilterFileSystem</A></B></CODE>
<BR>
A <code>FilterFileSystem</code> contains
some other file system, which it uses as
its basic file system, possibly transforming
the data along the way or providing additional
functionality.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> class</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../org/apache/hadoop/fs/LocalFileSystem.html" title="class in org.apache.hadoop.fs">LocalFileSystem</A></B></CODE>
<BR>
Implement the FileSystem API for the checksumed local filesystem.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> class</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../org/apache/hadoop/fs/RawLocalFileSystem.html" title="class in org.apache.hadoop.fs">RawLocalFileSystem</A></B></CODE>
<BR>
Implement the FileSystem API for the raw local filesystem.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> class</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../org/apache/hadoop/fs/Trash.html" title="class in org.apache.hadoop.fs">Trash</A></B></CODE>
<BR>
Provides a <i>trash</i> feature.</TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.hadoop.fs.ftp"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Uses of <A HREF="../../../../../org/apache/hadoop/conf/Configured.html" title="class in org.apache.hadoop.conf">Configured</A> in <A HREF="../../../../../org/apache/hadoop/fs/ftp/package-summary.html">org.apache.hadoop.fs.ftp</A></FONT></TH>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">Subclasses of <A HREF="../../../../../org/apache/hadoop/conf/Configured.html" title="class in org.apache.hadoop.conf">Configured</A> in <A HREF="../../../../../org/apache/hadoop/fs/ftp/package-summary.html">org.apache.hadoop.fs.ftp</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> class</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../org/apache/hadoop/fs/ftp/FTPFileSystem.html" title="class in org.apache.hadoop.fs.ftp">FTPFileSystem</A></B></CODE>
<BR>
A <A HREF="../../../../../org/apache/hadoop/fs/FileSystem.html" title="class in org.apache.hadoop.fs"><CODE>FileSystem</CODE></A> backed by an FTP client provided by <a
href="http://commons.apache.org/net/">Apache Commons Net</a>.</TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.hadoop.fs.kfs"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Uses of <A HREF="../../../../../org/apache/hadoop/conf/Configured.html" title="class in org.apache.hadoop.conf">Configured</A> in <A HREF="../../../../../org/apache/hadoop/fs/kfs/package-summary.html">org.apache.hadoop.fs.kfs</A></FONT></TH>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">Subclasses of <A HREF="../../../../../org/apache/hadoop/conf/Configured.html" title="class in org.apache.hadoop.conf">Configured</A> in <A HREF="../../../../../org/apache/hadoop/fs/kfs/package-summary.html">org.apache.hadoop.fs.kfs</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> class</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../org/apache/hadoop/fs/kfs/KosmosFileSystem.html" title="class in org.apache.hadoop.fs.kfs">KosmosFileSystem</A></B></CODE>
<BR>
A FileSystem backed by KFS.</TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.hadoop.fs.s3"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Uses of <A HREF="../../../../../org/apache/hadoop/conf/Configured.html" title="class in org.apache.hadoop.conf">Configured</A> in <A HREF="../../../../../org/apache/hadoop/fs/s3/package-summary.html">org.apache.hadoop.fs.s3</A></FONT></TH>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">Subclasses of <A HREF="../../../../../org/apache/hadoop/conf/Configured.html" title="class in org.apache.hadoop.conf">Configured</A> in <A HREF="../../../../../org/apache/hadoop/fs/s3/package-summary.html">org.apache.hadoop.fs.s3</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> class</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../org/apache/hadoop/fs/s3/MigrationTool.html" title="class in org.apache.hadoop.fs.s3">MigrationTool</A></B></CODE>
<BR>
This class is a tool for migrating data from an older to a newer version
of an S3 filesystem.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> class</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../org/apache/hadoop/fs/s3/S3FileSystem.html" title="class in org.apache.hadoop.fs.s3">S3FileSystem</A></B></CODE>
<BR>
A block-based <A HREF="../../../../../org/apache/hadoop/fs/FileSystem.html" title="class in org.apache.hadoop.fs"><CODE>FileSystem</CODE></A> backed by
<a href="http://aws.amazon.com/s3">Amazon S3</a>.</TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.hadoop.fs.s3native"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Uses of <A HREF="../../../../../org/apache/hadoop/conf/Configured.html" title="class in org.apache.hadoop.conf">Configured</A> in <A HREF="../../../../../org/apache/hadoop/fs/s3native/package-summary.html">org.apache.hadoop.fs.s3native</A></FONT></TH>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">Subclasses of <A HREF="../../../../../org/apache/hadoop/conf/Configured.html" title="class in org.apache.hadoop.conf">Configured</A> in <A HREF="../../../../../org/apache/hadoop/fs/s3native/package-summary.html">org.apache.hadoop.fs.s3native</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> class</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../org/apache/hadoop/fs/s3native/NativeS3FileSystem.html" title="class in org.apache.hadoop.fs.s3native">NativeS3FileSystem</A></B></CODE>
<BR>
A <A HREF="../../../../../org/apache/hadoop/fs/FileSystem.html" title="class in org.apache.hadoop.fs"><CODE>FileSystem</CODE></A> for reading and writing files stored on
<a href="http://aws.amazon.com/s3">Amazon S3</a>.</TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.hadoop.fs.shell"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Uses of <A HREF="../../../../../org/apache/hadoop/conf/Configured.html" title="class in org.apache.hadoop.conf">Configured</A> in org.apache.hadoop.fs.shell</FONT></TH>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">Subclasses of <A HREF="../../../../../org/apache/hadoop/conf/Configured.html" title="class in org.apache.hadoop.conf">Configured</A> in org.apache.hadoop.fs.shell</FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> class</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../org/apache/hadoop/fs/shell/Command.html" title="class in org.apache.hadoop.fs.shell">Command</A></B></CODE>
<BR>
An abstract class for the execution of a file system command</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> class</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../org/apache/hadoop/fs/shell/Count.html" title="class in org.apache.hadoop.fs.shell">Count</A></B></CODE>
<BR>
Count the number of directories, files, bytes, quota, and remaining quota.</TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.hadoop.io.serializer"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Uses of <A HREF="../../../../../org/apache/hadoop/conf/Configured.html" title="class in org.apache.hadoop.conf">Configured</A> in <A HREF="../../../../../org/apache/hadoop/io/serializer/package-summary.html">org.apache.hadoop.io.serializer</A></FONT></TH>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">Subclasses of <A HREF="../../../../../org/apache/hadoop/conf/Configured.html" title="class in org.apache.hadoop.conf">Configured</A> in <A HREF="../../../../../org/apache/hadoop/io/serializer/package-summary.html">org.apache.hadoop.io.serializer</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> class</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../org/apache/hadoop/io/serializer/WritableSerialization.html" title="class in org.apache.hadoop.io.serializer">WritableSerialization</A></B></CODE>
<BR>
A <CODE>Serialization</CODE> for <A HREF="../../../../../org/apache/hadoop/io/Writable.html" title="interface in org.apache.hadoop.io"><CODE>Writable</CODE></A>s that delegates to
<A HREF="../../../../../org/apache/hadoop/io/Writable.html#write(java.io.DataOutput)"><CODE>Writable.write(java.io.DataOutput)</CODE></A> and
<A HREF="../../../../../org/apache/hadoop/io/Writable.html#readFields(java.io.DataInput)"><CODE>Writable.readFields(java.io.DataInput)</CODE></A>.</TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.hadoop.io.serializer.avro"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Uses of <A HREF="../../../../../org/apache/hadoop/conf/Configured.html" title="class in org.apache.hadoop.conf">Configured</A> in <A HREF="../../../../../org/apache/hadoop/io/serializer/avro/package-summary.html">org.apache.hadoop.io.serializer.avro</A></FONT></TH>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">Subclasses of <A HREF="../../../../../org/apache/hadoop/conf/Configured.html" title="class in org.apache.hadoop.conf">Configured</A> in <A HREF="../../../../../org/apache/hadoop/io/serializer/avro/package-summary.html">org.apache.hadoop.io.serializer.avro</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> class</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../org/apache/hadoop/io/serializer/avro/AvroReflectSerialization.html" title="class in org.apache.hadoop.io.serializer.avro">AvroReflectSerialization</A></B></CODE>
<BR>
Serialization for Avro Reflect classes.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> class</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../org/apache/hadoop/io/serializer/avro/AvroSerialization.html" title="class in org.apache.hadoop.io.serializer.avro">AvroSerialization<T></A></B></CODE>
<BR>
Base class for providing serialization to Avro types.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> class</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../org/apache/hadoop/io/serializer/avro/AvroSpecificSerialization.html" title="class in org.apache.hadoop.io.serializer.avro">AvroSpecificSerialization</A></B></CODE>
<BR>
Serialization for Avro Specific classes.</TD>
</TR>
</TABLE>
<P>
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../org/apache/hadoop/conf/Configured.html" title="class in org.apache.hadoop.conf"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../index.html?org/apache/hadoop/conf//class-useConfigured.html" target="_top"><B>FRAMES</B></A>
<A HREF="Configured.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
Copyright © 2009 The Apache Software Foundation
</BODY>
</HTML>
| apache-2.0 |
jt70471/aws-sdk-cpp | aws-cpp-sdk-rds/source/model/CreateDBParameterGroupRequest.cpp | 1483 | /**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#include <aws/rds/model/CreateDBParameterGroupRequest.h>
#include <aws/core/utils/StringUtils.h>
#include <aws/core/utils/memory/stl/AWSStringStream.h>
using namespace Aws::RDS::Model;
using namespace Aws::Utils;
CreateDBParameterGroupRequest::CreateDBParameterGroupRequest() :
m_dBParameterGroupNameHasBeenSet(false),
m_dBParameterGroupFamilyHasBeenSet(false),
m_descriptionHasBeenSet(false),
m_tagsHasBeenSet(false)
{
}
Aws::String CreateDBParameterGroupRequest::SerializePayload() const
{
Aws::StringStream ss;
ss << "Action=CreateDBParameterGroup&";
if(m_dBParameterGroupNameHasBeenSet)
{
ss << "DBParameterGroupName=" << StringUtils::URLEncode(m_dBParameterGroupName.c_str()) << "&";
}
if(m_dBParameterGroupFamilyHasBeenSet)
{
ss << "DBParameterGroupFamily=" << StringUtils::URLEncode(m_dBParameterGroupFamily.c_str()) << "&";
}
if(m_descriptionHasBeenSet)
{
ss << "Description=" << StringUtils::URLEncode(m_description.c_str()) << "&";
}
if(m_tagsHasBeenSet)
{
unsigned tagsCount = 1;
for(auto& item : m_tags)
{
item.OutputToStream(ss, "Tags.member.", tagsCount, "");
tagsCount++;
}
}
ss << "Version=2014-10-31";
return ss.str();
}
void CreateDBParameterGroupRequest::DumpBodyToUrl(Aws::Http::URI& uri ) const
{
uri.SetQueryString(SerializePayload());
}
| apache-2.0 |
kris-nova/klone | vendor/github.com/docker/cli/cli/command/node/promote_test.go | 2594 | package node
import (
"bytes"
"io/ioutil"
"testing"
"github.com/docker/cli/cli/internal/test"
"github.com/docker/docker/api/types/swarm"
"github.com/pkg/errors"
// Import builders to get the builder function as package function
. "github.com/docker/cli/cli/internal/test/builders"
"github.com/docker/docker/pkg/testutil"
"github.com/stretchr/testify/assert"
)
func TestNodePromoteErrors(t *testing.T) {
testCases := []struct {
args []string
nodeInspectFunc func() (swarm.Node, []byte, error)
nodeUpdateFunc func(nodeID string, version swarm.Version, node swarm.NodeSpec) error
expectedError string
}{
{
expectedError: "requires at least 1 argument",
},
{
args: []string{"nodeID"},
nodeInspectFunc: func() (swarm.Node, []byte, error) {
return swarm.Node{}, []byte{}, errors.Errorf("error inspecting the node")
},
expectedError: "error inspecting the node",
},
{
args: []string{"nodeID"},
nodeUpdateFunc: func(nodeID string, version swarm.Version, node swarm.NodeSpec) error {
return errors.Errorf("error updating the node")
},
expectedError: "error updating the node",
},
}
for _, tc := range testCases {
buf := new(bytes.Buffer)
cmd := newPromoteCommand(
test.NewFakeCli(&fakeClient{
nodeInspectFunc: tc.nodeInspectFunc,
nodeUpdateFunc: tc.nodeUpdateFunc,
}, buf))
cmd.SetArgs(tc.args)
cmd.SetOutput(ioutil.Discard)
testutil.ErrorContains(t, cmd.Execute(), tc.expectedError)
}
}
func TestNodePromoteNoChange(t *testing.T) {
buf := new(bytes.Buffer)
cmd := newPromoteCommand(
test.NewFakeCli(&fakeClient{
nodeInspectFunc: func() (swarm.Node, []byte, error) {
return *Node(Manager()), []byte{}, nil
},
nodeUpdateFunc: func(nodeID string, version swarm.Version, node swarm.NodeSpec) error {
if node.Role != swarm.NodeRoleManager {
return errors.Errorf("expected role manager, got %s", node.Role)
}
return nil
},
}, buf))
cmd.SetArgs([]string{"nodeID"})
assert.NoError(t, cmd.Execute())
}
func TestNodePromoteMultipleNode(t *testing.T) {
buf := new(bytes.Buffer)
cmd := newPromoteCommand(
test.NewFakeCli(&fakeClient{
nodeInspectFunc: func() (swarm.Node, []byte, error) {
return *Node(), []byte{}, nil
},
nodeUpdateFunc: func(nodeID string, version swarm.Version, node swarm.NodeSpec) error {
if node.Role != swarm.NodeRoleManager {
return errors.Errorf("expected role manager, got %s", node.Role)
}
return nil
},
}, buf))
cmd.SetArgs([]string{"nodeID1", "nodeID2"})
assert.NoError(t, cmd.Execute())
}
| apache-2.0 |
hippich/typescript | tests/baselines/reference/recursiveBaseCheck6.js | 400 | var __extends = this.__extends || function (d, b) {
for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p];
function __() { this.constructor = d; }
__.prototype = b.prototype;
d.prototype = new __();
};
var S18 = (function (_super) {
__extends(S18, _super);
function S18() {
_super.apply(this, arguments);
}
return S18;
})(S18);
(new S18()).blah;
| apache-2.0 |
ebidel/WebFundamentals | src/content/en/tools/workbox/reference-docs/v3.0.0/workbox.backgroundSync.Plugin.html | 2654 | <!DOCTYPE html>
<html devsite>
<head>
<meta name="project_path" value="/web/tools/workbox/_project.yaml" />
<meta name="book_path" value="/web/tools/workbox/_book.yaml" />
<meta name="gtm_var" data-key="docType" data-value="reference">
<title>Class: Plugin</title>
<link href="jsdoc.css" rel="stylesheet">
</head>
<body>
<div id="jsdoc-body-container">
<div id="jsdoc-content">
<div id="jsdoc-content-container">
<div id="jsdoc-main" role="main">
<header class="page-header">
<h1><small><a href="workbox.html">workbox</a>.<wbr><a href="workbox.backgroundSync.html">backgroundSync</a>.<wbr></small><span class="symbol-name">Plugin</span></h1>
<div class="symbol-detail-labels"><span class="label label-kind"><small>class</small></span> <span class="label label-static"><small>static</small></span></div>
<div class="symbol-detail-labels">
<span class="label label-kind"><small>Version</small></span>
<span class="label label-version"><small>v3.0.0</small></span>
</div>
<p>A class implementing the <code>fetchDidFail</code> lifecycle callback. This makes it easier to add failed requests to a background sync Queue.</p>
</header>
<section>
<h2>Constructor</h2>
<section>
<h3 id="Plugin" class="symbol-name">Plugin</h3>
<p class="type-signature"> new Plugin(...queueArgs)
</p>
<section>
<table class="function param responsive">
<tr>
<th colspan="2">
<h4>Parameter</h4>
</th>
</tr>
<tbody>
<tr>
<td class="details-table-name">
<p>queueArgs</p>
</td>
<td>
<p class="details-table-types">any type</p>
<p>Args to forward to the composed Queue instance. See the <a href="workbox.backgroundSync.Queue.html">Queue</a> documentation for parameter details.</p>
<p>Value may be repeated.</p>
</td>
</tr>
</tbody>
</table>
</section>
<dl class="dl-compact">
</dl>
</section>
</section>
<section>
</section>
</div>
</div>
<nav id="jsdoc-toc-nav" role="navigation"></nav>
</div>
</div>
</body>
</html> | apache-2.0 |
krivachy/compgs03_mutation_testing | src/main/java/org/apache/commons/collections4/iterators/FilterIterator.java | 6056 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.collections4.iterators;
import java.util.Iterator;
import java.util.NoSuchElementException;
import org.apache.commons.collections4.Predicate;
/**
* Decorates another {@link Iterator} using a predicate to filter elements.
* <p>
* This iterator decorates the underlying iterator, only allowing through
* those elements that match the specified {@link Predicate Predicate}.
*
* @since 1.0
* @version $Id: FilterIterator.java 1477802 2013-04-30 20:01:28Z tn $
*/
public class FilterIterator<E> implements Iterator<E> {
/** The iterator being used */
private Iterator<? extends E> iterator;
/** The predicate being used */
private Predicate<? super E> predicate;
/** The next object in the iteration */
private E nextObject;
/** Whether the next object has been calculated yet */
private boolean nextObjectSet = false;
//-----------------------------------------------------------------------
/**
* Constructs a new <code>FilterIterator</code> that will not function
* until {@link #setIterator(Iterator) setIterator} is invoked.
*/
public FilterIterator() {
super();
}
/**
* Constructs a new <code>FilterIterator</code> that will not function
* until {@link #setPredicate(Predicate) setPredicate} is invoked.
*
* @param iterator the iterator to use
*/
public FilterIterator(final Iterator<? extends E> iterator) {
super();
this.iterator = iterator;
}
/**
* Constructs a new <code>FilterIterator</code> that will use the
* given iterator and predicate.
*
* @param iterator the iterator to use
* @param predicate the predicate to use
*/
public FilterIterator(final Iterator<? extends E> iterator, final Predicate<? super E> predicate) {
super();
this.iterator = iterator;
this.predicate = predicate;
}
//-----------------------------------------------------------------------
/**
* Returns true if the underlying iterator contains an object that
* matches the predicate.
*
* @return true if there is another object that matches the predicate
* @throws NullPointerException if either the iterator or predicate are null
*/
public boolean hasNext() {
return nextObjectSet || setNextObject();
}
/**
* Returns the next object that matches the predicate.
*
* @return the next object which matches the given predicate
* @throws NullPointerException if either the iterator or predicate are null
* @throws NoSuchElementException if there are no more elements that
* match the predicate
*/
public E next() {
if (!nextObjectSet) {
if (!setNextObject()) {
throw new NoSuchElementException();
}
}
nextObjectSet = false;
return nextObject;
}
/**
* Removes from the underlying collection of the base iterator the last
* element returned by this iterator.
* This method can only be called
* if <code>next()</code> was called, but not after
* <code>hasNext()</code>, because the <code>hasNext()</code> call
* changes the base iterator.
*
* @throws IllegalStateException if <code>hasNext()</code> has already
* been called.
*/
public void remove() {
if (nextObjectSet) {
throw new IllegalStateException("remove() cannot be called");
}
iterator.remove();
}
//-----------------------------------------------------------------------
/**
* Gets the iterator this iterator is using.
*
* @return the iterator
*/
public Iterator<? extends E> getIterator() {
return iterator;
}
/**
* Sets the iterator for this iterator to use.
* If iteration has started, this effectively resets the iterator.
*
* @param iterator the iterator to use
*/
public void setIterator(final Iterator<? extends E> iterator) {
this.iterator = iterator;
nextObject = null;
nextObjectSet = false;
}
//-----------------------------------------------------------------------
/**
* Gets the predicate this iterator is using.
*
* @return the predicate
*/
public Predicate<? super E> getPredicate() {
return predicate;
}
/**
* Sets the predicate this the iterator to use.
*
* @param predicate the predicate to use
*/
public void setPredicate(final Predicate<? super E> predicate) {
this.predicate = predicate;
nextObject = null;
nextObjectSet = false;
}
//-----------------------------------------------------------------------
/**
* Set nextObject to the next object. If there are no more
* objects then return false. Otherwise, return true.
*/
private boolean setNextObject() {
while (iterator.hasNext()) {
final E object = iterator.next();
if (predicate.evaluate(object)) {
nextObject = object;
nextObjectSet = true;
return true;
}
}
return false;
}
}
| apache-2.0 |
romankagan/DDBWorkbench | platform/external-system-impl/src/com/intellij/openapi/externalSystem/service/settings/AbstractImportFromExternalSystemControl.java | 9041 | /*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.externalSystem.service.settings;
import com.intellij.openapi.externalSystem.ExternalSystemManager;
import com.intellij.openapi.externalSystem.model.ProjectSystemId;
import com.intellij.openapi.externalSystem.settings.AbstractExternalSystemSettings;
import com.intellij.openapi.externalSystem.settings.ExternalProjectSettings;
import com.intellij.openapi.externalSystem.settings.ExternalSystemSettingsListener;
import com.intellij.openapi.externalSystem.util.*;
import com.intellij.openapi.fileChooser.FileChooserDescriptor;
import com.intellij.openapi.options.ConfigurationException;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.TextComponentAccessor;
import com.intellij.openapi.ui.TextFieldWithBrowseButton;
import com.intellij.openapi.util.text.StringUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
import java.awt.*;
/**
* A control which knows how to manage settings of external project being imported.
*
* @author Denis Zhdanov
* @since 4/30/13 2:33 PM
*/
public abstract class AbstractImportFromExternalSystemControl<
ProjectSettings extends ExternalProjectSettings,
L extends ExternalSystemSettingsListener<ProjectSettings>,
SystemSettings extends AbstractExternalSystemSettings<SystemSettings, ProjectSettings, L>>
{
@NotNull private final SystemSettings mySystemSettings;
@NotNull private final ProjectSettings myProjectSettings;
@NotNull private final PaintAwarePanel myComponent = new PaintAwarePanel(new GridBagLayout());
@NotNull private final TextFieldWithBrowseButton myLinkedProjectPathField = new TextFieldWithBrowseButton();
@NotNull private final ExternalSystemSettingsControl<ProjectSettings> myProjectSettingsControl;
@NotNull private final ProjectSystemId myExternalSystemId;
@Nullable private final ExternalSystemSettingsControl<SystemSettings> mySystemSettingsControl;
@Nullable Project myCurrentProject;
@SuppressWarnings("AbstractMethodCallInConstructor")
protected AbstractImportFromExternalSystemControl(@NotNull ProjectSystemId externalSystemId,
@NotNull SystemSettings systemSettings,
@NotNull ProjectSettings projectSettings)
{
myExternalSystemId = externalSystemId;
mySystemSettings = systemSettings;
myProjectSettings = projectSettings;
myProjectSettingsControl = createProjectSettingsControl(projectSettings);
mySystemSettingsControl = createSystemSettingsControl(systemSettings);
JLabel linkedProjectPathLabel =
new JLabel(ExternalSystemBundle.message("settings.label.select.project", externalSystemId.getReadableName()));
ExternalSystemManager<?, ?, ?, ?, ?> manager = ExternalSystemApiUtil.getManager(externalSystemId);
assert manager != null;
FileChooserDescriptor fileChooserDescriptor = manager.getExternalProjectDescriptor();
myLinkedProjectPathField.addBrowseFolderListener("",
ExternalSystemBundle
.message("settings.label.select.project", externalSystemId.getReadableName()),
null,
fileChooserDescriptor,
TextComponentAccessor.TEXT_FIELD_WHOLE_TEXT,
false);
myLinkedProjectPathField.getTextField().getDocument().addDocumentListener(new DocumentListener() {
@Override
public void insertUpdate(DocumentEvent e) {
onLinkedProjectPathChange(myLinkedProjectPathField.getText());
}
@Override
public void removeUpdate(DocumentEvent e) {
onLinkedProjectPathChange(myLinkedProjectPathField.getText());
}
@Override
public void changedUpdate(DocumentEvent e) {
onLinkedProjectPathChange(myLinkedProjectPathField.getText());
}
});
myComponent.add(linkedProjectPathLabel, ExternalSystemUiUtil.getLabelConstraints(0));
myComponent.add(myLinkedProjectPathField, ExternalSystemUiUtil.getFillLineConstraints(0));
myProjectSettingsControl.fillUi(myComponent, 0);
if (mySystemSettingsControl != null) {
mySystemSettingsControl.fillUi(myComponent, 0);
}
ExternalSystemUiUtil.fillBottom(myComponent);
}
/**
* This control is assumed to be used at least at two circumstances:
* <pre>
* <ul>
* <li>new ide project is being created on the external project basis;</li>
* <li>new ide module(s) is being added to the existing ide project on the external project basis;</li>
* </ul>
* </pre>
* We need to differentiate these situations, for example, we don't want to allow linking an external project to existing ide
* project if it's already linked.
* <p/>
* This property helps us to achieve that - when an ide project is defined, that means that new modules are being imported
* to that ide project from external project; when this property is <code>null</code> that means that new ide project is being
* created on the target external project basis.
*
* @param currentProject current ide project (if any)
*/
public void setCurrentProject(@Nullable Project currentProject) {
myCurrentProject = currentProject;
}
protected abstract void onLinkedProjectPathChange(@NotNull String path);
/**
* Creates a control for managing given project settings.
*
* @param settings target external project settings
* @return control for managing given project settings
*/
@NotNull
protected abstract ExternalSystemSettingsControl<ProjectSettings> createProjectSettingsControl(@NotNull ProjectSettings settings);
/**
* Creates a control for managing given system-level settings (if any).
*
* @param settings target system settings
* @return a control for managing given system-level settings;
* <code>null</code> if current external system doesn't have system-level settings (only project-level settings)
*/
@Nullable
protected abstract ExternalSystemSettingsControl<SystemSettings> createSystemSettingsControl(@NotNull SystemSettings settings);
@NotNull
public JComponent getComponent() {
return myComponent;
}
@NotNull
public ExternalSystemSettingsControl<ProjectSettings> getProjectSettingsControl() {
return myProjectSettingsControl;
}
public void setLinkedProjectPath(@NotNull String path) {
myProjectSettings.setExternalProjectPath(path);
myLinkedProjectPathField.setText(path);
}
@NotNull
public SystemSettings getSystemSettings() {
return mySystemSettings;
}
@NotNull
public ProjectSettings getProjectSettings() {
return myProjectSettings;
}
public void reset() {
myLinkedProjectPathField.setText("");
myProjectSettingsControl.reset();
if (mySystemSettingsControl != null) {
mySystemSettingsControl.reset();
}
}
public void apply() throws ConfigurationException {
String linkedProjectPath = myLinkedProjectPathField.getText();
if (StringUtil.isEmpty(linkedProjectPath)) {
throw new ConfigurationException(ExternalSystemBundle.message("error.project.undefined"));
}
else if (myCurrentProject != null) {
ExternalSystemManager<?, ?, ?, ?, ?> manager = ExternalSystemApiUtil.getManager(myExternalSystemId);
assert manager != null;
AbstractExternalSystemSettings<?, ?,?> settings = manager.getSettingsProvider().fun(myCurrentProject);
if (settings.getLinkedProjectSettings(linkedProjectPath) != null) {
throw new ConfigurationException(ExternalSystemBundle.message("error.project.already.registered"));
}
}
//noinspection ConstantConditions
myProjectSettings.setExternalProjectPath(ExternalSystemApiUtil.normalizePath(linkedProjectPath));
myProjectSettingsControl.validate(myProjectSettings);
myProjectSettingsControl.apply(myProjectSettings);
if (mySystemSettingsControl != null) {
mySystemSettingsControl.validate(mySystemSettings);
mySystemSettingsControl.apply(mySystemSettings);
}
}
}
| apache-2.0 |
yajiedesign/mxnet | src/operator/numpy/np_einsum_op.cc | 14145 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Copyright (c) 2005-2019, NumPy Developers.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* * Neither the name of the NumPy Developers nor the names of any
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*!
* \file np_einsum_op.cc
* \brief CPU Implementation of numpy-compatible einsum
*/
#include "./np_einsum_op-inl.h"
#include <cstdlib>
#include <cstring>
namespace mxnet {
namespace op {
inline std::vector<std::string> _parse_einsum_input(std::string subscripts,
const mxnet::ShapeVector& shapes) {
const std::string einsum_symbols =
"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
std::bitset<MAXAXIS> einsum_symbols_set;
for (const char& c : einsum_symbols) {
einsum_symbols_set.set(c);
}
CHECK_NE(shapes.size(), 0U)
<< "No input operands";
auto end_pos = std::remove(subscripts.begin(), subscripts.end(), ' ');
subscripts.erase(end_pos, subscripts.end());
// Ensure all characters are valid
for (const char& c : subscripts) {
if (c == '.' || c == ',' || c == '-' || c == '>') {
continue;
}
CHECK(einsum_symbols_set.test(c))
<< "Character " << c
<< " is not a valid symbol.";
}
// Check for proper "->"
if (subscripts.find('-') != std::string::npos ||
subscripts.find('>') != std::string::npos) {
bool invalid = (std::count(subscripts.begin(), subscripts.end(), '-') > 1 ||
std::count(subscripts.begin(), subscripts.end(), '>') > 1);
CHECK(!invalid && _count_substring(subscripts, "->") == 1)
<< "Subscripts can only contain one '->'.";
}
// Parse ellipses
if (subscripts.find('.') != std::string::npos) {
std::string used = subscripts;
used.erase(std::remove_if(used.begin(),
used.end(),
[](const char& c){return c == '.' ||
c == ',' ||
c == '-' ||
c == '>';}),
used.end());
std::bitset<MAXAXIS> used_set = str2set(used);
std::string ellipse_inds = "";
for (const char& c : einsum_symbols) {
if (!used_set.test(static_cast<int>(c))) {
ellipse_inds.append(1, c);
}
}
int longest = 0;
std::string input_tmp, output_sub;
std::vector<std::string> split_subscripts;
bool out_sub;
if (subscripts.find("->") != std::string::npos) {
std::vector<std::string> tmp = split(subscripts, "->");
input_tmp = tmp[0];
output_sub = tmp[1];
split_subscripts = split(input_tmp, ",");
out_sub = true;
} else {
split_subscripts = split(subscripts, ",");
out_sub = false;
}
size_t size_split_subscripts = split_subscripts.size();
subscripts = "";
for (size_t i = 0; i < size_split_subscripts; ++i) {
const std::string& sub = split_subscripts[i];
if (sub.find('.') != std::string::npos) {
CHECK_EQ(std::count(sub.begin(), sub.end(), '.'), 3)
<< "Invalid Ellipses";
CHECK_EQ(_count_substring(sub, "..."), 1)
<< "Invalid Ellipses";
// Take into account numerical values
int ellipse_count = 0;
if (shapes[i].ndim() == 0) {
ellipse_count = 0;
} else {
ellipse_count = std::max(shapes[i].ndim(), 1);
ellipse_count -= sub.length() - 3;
}
if (ellipse_count > longest) {
longest = ellipse_count;
}
CHECK_GE(ellipse_count, 0)
<< "Ellipses lengths do not match.";
if (ellipse_count == 0) {
split_subscripts[i].erase(sub.find("..."), 3);
} else {
std::string rep_inds = ellipse_inds.substr(ellipse_inds.length() - ellipse_count);
split_subscripts[i].replace(sub.find("..."), 3, rep_inds);
}
}
subscripts += split_subscripts[i];
if (i + 1 < size_split_subscripts) {
subscripts += ",";
}
}
std::string out_ellipse;
if (longest == 0) {
out_ellipse = "";
} else {
out_ellipse = ellipse_inds.substr(ellipse_inds.length() - longest);
}
if (out_sub) {
output_sub.replace(output_sub.find("..."), 3, out_ellipse);
subscripts += "->" + output_sub;
} else {
// Special care for outputless ellipses
std::bitset<MAXAXIS> out_ellipse_set = str2set(out_ellipse);
std::string tmp_subscripts = subscripts, output_subscript = "";
size_t len_tmp_subscripts = tmp_subscripts.length();
std::sort(tmp_subscripts.begin(), tmp_subscripts.end());
for (size_t i = 0; i < len_tmp_subscripts; ++i) {
const char& c = tmp_subscripts[i];
if (c == ',') {
continue;
}
CHECK(einsum_symbols_set.test(c))
<< "Character " << c
<< " is not a valid symbol.";
if ((i == 0 || tmp_subscripts[i - 1] != c) &&
(i == len_tmp_subscripts - 1 || tmp_subscripts[i + 1] != c) &&
!out_ellipse_set.test(c)) {
output_subscript.append(1, c);
}
}
subscripts += "->" + out_ellipse + output_subscript;
}
}
// Build output string if does not exist
std::vector<std::string> ret(2);
if (subscripts.find("->") != std::string::npos) {
ret = split(subscripts, "->");
} else {
ret[0] = subscripts;
ret[1] = "";
// Build output subscripts
std::string tmp_subscripts = subscripts;
size_t len_tmp_subscripts = tmp_subscripts.length();
std::sort(tmp_subscripts.begin(), tmp_subscripts.end());
for (size_t i = 0; i < len_tmp_subscripts; ++i) {
const char& c = tmp_subscripts[i];
if (c == ',') {
continue;
}
CHECK(einsum_symbols_set.test(c))
<< "Character " << c
<< " is not a valid symbol.";
if ((i == 0 || tmp_subscripts[i - 1] != c) &&
(i == len_tmp_subscripts - 1 || tmp_subscripts[i + 1] != c)) {
ret[1].append(1, c);
}
}
}
// Make sure output subscripts are in the input
std::bitset<MAXAXIS> input_subscripts_set = str2set(ret[0]);
for (const char& c : ret[1]) {
CHECK(input_subscripts_set.test(c))
<< "Output character " << c
<< " did not appear in the input";
}
// Make sure number operands is equivalent to the number of terms
CHECK_EQ(std::count(ret[0].begin(), ret[0].end(), ',') + 1, shapes.size())
<< "Number of einsum subscripts must be equal to the "
<< "number of operands.";
return ret;
}
bool NumpyEinsumShape(const nnvm::NodeAttrs& attrs,
mxnet::ShapeVector *in_attrs,
mxnet::ShapeVector *out_attrs) {
const NumpyEinsumParam ¶m = nnvm::get<NumpyEinsumParam>(attrs.parsed);
const std::string& subscripts = param.subscripts;
int num_args = param.num_args;
CHECK_EQ(in_attrs->size(), num_args);
CHECK_EQ(out_attrs->size(), 1U);
for (int i = 0; i < num_args; i++) {
if (!shape_is_known(in_attrs->at(i))) {
return false;
}
}
// Parsing
std::vector<std::string> parsed_subscripts = _parse_einsum_input(subscripts, *in_attrs);
// Build a few useful list and sets
std::vector<std::string> input_list = split(parsed_subscripts[0], ",");
size_t isize = input_list.size();
// Get length of each unique dimension and ensure all dimensions are correct
dim_t dimension_dict[MAXAXIS];
memset(dimension_dict, -1, sizeof(dimension_dict));
for (size_t i = 0; i < isize; ++i) {
const std::string& term = input_list[i];
const TShape& sh = in_attrs->at(i);
CHECK_EQ(sh.ndim(), term.length())
<< "Einstein sum subscript " << input_list[i]
<< " does not contain the "
<< "correct number of indices for operand " << i << ".";
size_t len_term = term.length();
for (size_t j = 0; j < len_term; ++j) {
dim_t dim = sh[j];
const char& c = term[j];
if (dimension_dict[static_cast<int>(c)] != -1) {
// For broadcasting cases we always want the largest dim size
if (dimension_dict[static_cast<int>(c)] == 1) {
dimension_dict[static_cast<int>(c)] = dim;
}
CHECK(dim == 1 || dim == dimension_dict[static_cast<int>(c)])
<< "Size of label '" << c
<< "' for operand " << i
<< " (" << dimension_dict[static_cast<int>(c)]
<< ") does not match previous terms ("
<< dim << ").";
} else {
dimension_dict[static_cast<int>(c)] = dim;
}
}
}
// Get oshape
const std::string& output_str = parsed_subscripts[1];
size_t odim = output_str.size();
TShape oshape(odim, -1);
for (size_t i = 0; i < odim; ++i) {
oshape[i] = dimension_dict[static_cast<int>(output_str[i])];
}
SHAPE_ASSIGN_CHECK(*out_attrs, 0, oshape);
size_t lim = static_cast<size_t>(std::numeric_limits<index_t>::max());
for (int i = 0; i < num_args; ++i) {
CHECK_LE(in_attrs->at(i).Size(), lim)
<< "Size of operand " << i
<< " exceeds the maximum index."
<< " Try setting `USE_INT64_TENSOR_SIZE`.";
}
CHECK_LE(oshape.Size(), lim)
<< "Size of output"
<< " exceeds the maximum index."
<< " Try setting `USE_INT64_TENSOR_SIZE`.";
return shape_is_known(oshape);
}
OpStatePtr CreateEinsumState(const NodeAttrs& attrs,
Context ctx,
const mxnet::ShapeVector& in_shapes,
const std::vector<int>& in_types) {
const NumpyEinsumParam& param = dmlc::get<NumpyEinsumParam>(attrs.parsed);
return OpStatePtr::Create<EinsumOp>(param.num_args, param.optimize, param.subscripts);
}
DMLC_REGISTER_PARAMETER(NumpyEinsumParam);
NNVM_REGISTER_OP(_npi_einsum)
.describe(R"doc()doc" ADD_FILELINE)
.set_attr_parser(ParamParser<NumpyEinsumParam>)
.set_num_inputs([](const nnvm::NodeAttrs& attrs) {
const NumpyEinsumParam& param = dmlc::get<NumpyEinsumParam>(attrs.parsed);
return static_cast<uint32_t>(param.num_args);
})
.set_num_outputs(1)
.set_attr<std::string>("key_var_num_args", "num_args")
.set_attr<nnvm::FListInputNames>("FListInputNames",
[](const nnvm::NodeAttrs& attrs) {
int num_args = dmlc::get<NumpyEinsumParam>(attrs.parsed).num_args;
std::vector<std::string> ret;
ret.reserve(num_args);
for (int i = 0; i < num_args; i++) {
ret.push_back(std::string("arg") + std::to_string(i));
}
return ret;
})
.set_attr<mxnet::FInferShape>("FInferShape", NumpyEinsumShape)
.set_attr<nnvm::FInferType>("FInferType", ElemwiseType<-1, 1>)
.set_attr<FCreateOpState>("FCreateOpState", CreateEinsumState)
.set_attr<FResourceRequest>("FResourceRequest",
[](const NodeAttrs& attrs) {
return std::vector<ResourceRequest>(1, ResourceRequest::kTempSpace);
})
.set_attr<FStatefulCompute>("FStatefulCompute<cpu>", NumpyEinsumForward<cpu>)
.set_attr<nnvm::FGradient>("FGradient", ElemwiseGradUseIn{"_backward_npi_einsum"})
.add_argument("data", "NDArray-or-Symbol[]", "List of eimsum operands")
.add_arguments(NumpyEinsumParam::__FIELDS__());
NNVM_REGISTER_OP(_backward_npi_einsum)
.set_attr_parser(ParamParser<NumpyEinsumParam>)
.set_num_inputs([](const nnvm::NodeAttrs& attrs) {
const NumpyEinsumParam& param = dmlc::get<NumpyEinsumParam>(attrs.parsed);
return static_cast<uint32_t>(param.num_args + 1);
})
.set_num_outputs([](const nnvm::NodeAttrs& attrs) {
const NumpyEinsumParam& param = dmlc::get<NumpyEinsumParam>(attrs.parsed);
return static_cast<uint32_t>(param.num_args);
})
.set_attr<bool>("TIsLayerOpBackward", true)
.set_attr<nnvm::TIsBackward>("TIsBackward", true)
.set_attr<FResourceRequest>("FResourceRequest",
[](const NodeAttrs& attrs) {
return std::vector<ResourceRequest>(1, ResourceRequest::kTempSpace);
})
.set_attr<FStatefulCompute>("FStatefulCompute<cpu>", NumpyEinsumBackward<cpu>);
} // namespace op
} // namespace mxnet
| apache-2.0 |
mprobst/closure-compiler | src/com/google/javascript/jscomp/TypeTransformation.java | 30898 | /*
* Copyright 2014 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import static com.google.common.base.MoreObjects.firstNonNull;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.javascript.jscomp.parsing.TypeTransformationParser;
import com.google.javascript.jscomp.parsing.TypeTransformationParser.Keywords;
import com.google.javascript.rhino.JSDocInfo;
import com.google.javascript.rhino.JSTypeExpression;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.jstype.JSType;
import com.google.javascript.rhino.jstype.JSTypeNative;
import com.google.javascript.rhino.jstype.JSTypeRegistry;
import com.google.javascript.rhino.jstype.ObjectType;
import com.google.javascript.rhino.jstype.StaticTypedScope;
import com.google.javascript.rhino.jstype.StaticTypedSlot;
import java.util.Arrays;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* A class for processing type transformation expressions
*
* @author [email protected] (Luis Fernando Pino Duque)
*/
class TypeTransformation {
private static final String VIRTUAL_FILE = "<TypeTransformation.java>";
static final DiagnosticType UNKNOWN_TYPEVAR =
DiagnosticType.warning("TYPEVAR_UNDEFINED",
"Reference to an unknown type variable {0}");
static final DiagnosticType UNKNOWN_STRVAR =
DiagnosticType.warning("UNKNOWN_STRVAR",
"Reference to an unknown string variable {0}");
static final DiagnosticType UNKNOWN_TYPENAME =
DiagnosticType.warning("TYPENAME_UNDEFINED",
"Reference to an unknown type name {0}");
static final DiagnosticType BASETYPE_INVALID =
DiagnosticType.warning("BASETYPE_INVALID",
"The type {0} cannot be templatized");
static final DiagnosticType TEMPTYPE_INVALID =
DiagnosticType.warning("TEMPTYPE_INVALID",
"Expected templatized type in {0} found {1}");
static final DiagnosticType INDEX_OUTOFBOUNDS =
DiagnosticType.warning("INDEX_OUTOFBOUNDS",
"Index out of bounds in templateTypeOf: expected a number less than {0}, found {1}");
static final DiagnosticType DUPLICATE_VARIABLE =
DiagnosticType.warning("DUPLICATE_VARIABLE",
"The variable {0} is already defined");
// This warning is never exercised.
static final DiagnosticType UNKNOWN_NAMEVAR =
DiagnosticType.warning("UNKNOWN_NAMEVAR",
"Reference to an unknown name variable {0}");
static final DiagnosticType RECTYPE_INVALID =
DiagnosticType.warning("RECTYPE_INVALID",
"The first parameter of a maprecord must be a record type, "
+ "found {0}");
static final DiagnosticType MAPRECORD_BODY_INVALID =
DiagnosticType.warning("MAPRECORD_BODY_INVALID",
"The body of a maprecord function must evaluate to a record type or "
+ "a no type, found {0}");
static final DiagnosticType VAR_UNDEFINED =
DiagnosticType.warning("VAR_UNDEFINED",
"Variable {0} is undefined in the scope");
static final DiagnosticType INVALID_CTOR =
DiagnosticType.warning("INVALID_CTOR",
"Expected a constructor type, found {0}");
static final DiagnosticType RECPARAM_INVALID =
DiagnosticType.warning("RECPARAM_INVALID",
"Expected a record type, found {0}");
static final DiagnosticType PROPTYPE_INVALID =
DiagnosticType.warning("PROPTYPE_INVALID",
"Expected object type, found {0}");
private final AbstractCompiler compiler;
private final JSTypeRegistry registry;
private final StaticTypedScope typeEnv;
/**
* A helper class for holding the information about the type variables
* and the name variables in maprecord expressions
*/
private static class NameResolver {
ImmutableMap<String, JSType> typeVars;
ImmutableMap<String, String> nameVars;
NameResolver(ImmutableMap<String, JSType> typeVars, ImmutableMap<String, String> nameVars) {
this.typeVars = typeVars;
this.nameVars = nameVars;
}
}
@SuppressWarnings("unchecked")
TypeTransformation(AbstractCompiler compiler, StaticTypedScope typeEnv) {
this.compiler = compiler;
this.registry = compiler.getTypeRegistry();
this.typeEnv = typeEnv;
}
private boolean isTypeVar(Node n) {
return n.isName();
}
private boolean isTypeName(Node n) {
return n.isString();
}
private boolean isBooleanOperation(Node n) {
return n.isAnd() || n.isOr() || n.isNot();
}
private Keywords nameToKeyword(String s) {
return TypeTransformationParser.Keywords.valueOf(s.toUpperCase());
}
private JSType getType(String typeName) {
JSType type = registry.getType(typeEnv, typeName);
if (type != null) {
return type;
}
StaticTypedSlot slot = typeEnv.getSlot(typeName);
type = slot != null ? slot.getType() : null;
if (type != null) {
if (type.isConstructor() || type.isInterface()) {
return type.toMaybeFunctionType().getInstanceType().getRawType();
}
if (type.isEnumElementType()) {
return type.getEnumeratedTypeOfEnumElement();
}
return type;
}
JSDocInfo jsdoc = slot == null ? null : slot.getJSDocInfo();
if (jsdoc != null && jsdoc.hasTypedefType()) {
return this.registry.evaluateTypeExpression(jsdoc.getTypedefType(), typeEnv);
}
return null;
}
private JSType getUnknownType() {
return registry.getNativeObjectType(JSTypeNative.UNKNOWN_TYPE);
}
private JSType getNoType() {
return registry.getNativeObjectType(JSTypeNative.NO_TYPE);
}
private JSType getAllType() {
return registry.getNativeType(JSTypeNative.ALL_TYPE);
}
private JSType getObjectType() {
return registry.getNativeType(JSTypeNative.OBJECT_TYPE);
}
private JSType createUnionType(JSType[] variants) {
return registry.createUnionType(Arrays.asList(variants));
}
private JSType createTemplatizedType(ObjectType baseType, JSType[] params) {
return registry.instantiateGenericType(baseType, ImmutableList.copyOf(params));
}
private JSType createRecordType(ImmutableMap<String, JSType> props) {
return this.registry.createRecordType(props);
}
private void reportWarning(Node n, DiagnosticType msg, String... param) {
compiler.report(JSError.make(n, msg, param));
}
private <T> ImmutableMap<String, T> addNewEntry(
ImmutableMap<String, T> map, String name, T type) {
return new ImmutableMap.Builder<String, T>()
.putAll(map)
.put(name, type)
.build();
}
private String getFunctionParameter(Node n, int i) {
Preconditions.checkArgument(n.isFunction(), "Expected a function node, found %s", n);
return n.getSecondChild().getChildAtIndex(i).getString();
}
private String getCallName(Node n) {
Preconditions.checkArgument(n.isCall(), "Expected a call node, found %s", n);
return n.getFirstChild().getString();
}
private Node getCallArgument(Node n, int i) {
Preconditions.checkArgument(n.isCall(), "Expected a call node, found %s", n);
return n.getChildAtIndex(i + 1);
}
private int getCallParamCount(Node n) {
Preconditions.checkArgument(n.isCall(), "Expected a call node, found %s", n);
return n.getChildCount() - 1;
}
// TODO(dimvar): rewrite the uses of this method to use siblings() and delete it.
// Copying is unnecessarily inefficient.
private ImmutableList<Node> getCallParams(Node n) {
Preconditions.checkArgument(n.isCall(), "Expected a call node, found %s", n);
ImmutableList.Builder<Node> builder = new ImmutableList.Builder<>();
for (int i = 0; i < getCallParamCount(n); i++) {
builder.add(getCallArgument(n, i));
}
return builder.build();
}
private Node getComputedPropValue(Node n) {
Preconditions.checkArgument(
n.isComputedProp(), "Expected a computed property node, found %s", n);
return n.getSecondChild();
}
private String getComputedPropName(Node n) {
Preconditions.checkArgument(
n.isComputedProp(), "Expected a computed property node, found %s", n);
return n.getFirstChild().getString();
}
/** Evaluates the type transformation expression and returns the resulting type.
*
* @param ttlAst The node representing the type transformation expression
* @param typeVars The environment containing the information about the type variables
* @return JSType The resulting type after the transformation
*/
JSType eval(Node ttlAst, ImmutableMap<String, JSType> typeVars) {
return eval(ttlAst, typeVars, ImmutableMap.of());
}
/** Evaluates the type transformation expression and returns the resulting type.
*
* @param ttlAst The node representing the type transformation expression
* @param typeVars The environment containing the information about the type variables
* @param nameVars The environment containing the information about the name variables
* @return JSType The resulting type after the transformation
*/
@SuppressWarnings("unchecked")
@VisibleForTesting
JSType eval(Node ttlAst, ImmutableMap<String, JSType> typeVars,
ImmutableMap<String, String> nameVars) {
JSType result = evalInternal(ttlAst, new NameResolver(typeVars, nameVars));
return result.isEmptyType() ? getUnknownType() : result;
}
private JSType evalInternal(Node ttlAst, NameResolver nameResolver) {
if (isTypeName(ttlAst)) {
return evalTypeName(ttlAst);
}
if (isTypeVar(ttlAst)) {
return evalTypeVar(ttlAst, nameResolver);
}
String name = getCallName(ttlAst);
Keywords keyword = nameToKeyword(name);
switch (keyword.kind) {
case TYPE_CONSTRUCTOR:
return evalTypeExpression(ttlAst, nameResolver);
case OPERATION:
return evalOperationExpression(ttlAst, nameResolver);
default:
throw new IllegalStateException(
"Could not evaluate the type transformation expression");
}
}
private JSType evalOperationExpression(Node ttlAst, NameResolver nameResolver) {
String name = getCallName(ttlAst);
Keywords keyword = nameToKeyword(name);
switch (keyword) {
case COND:
return evalConditional(ttlAst, nameResolver);
case MAPUNION:
return evalMapunion(ttlAst, nameResolver);
case MAPRECORD:
return evalMaprecord(ttlAst, nameResolver);
case TYPEOFVAR:
return evalTypeOfVar(ttlAst);
case INSTANCEOF:
return evalInstanceOf(ttlAst, nameResolver);
case PRINTTYPE:
return evalPrintType(ttlAst, nameResolver);
case PROPTYPE:
return evalPropType(ttlAst, nameResolver);
default:
throw new IllegalStateException("Invalid type transformation operation");
}
}
private JSType evalTypeExpression(Node ttlAst, NameResolver nameResolver) {
String name = getCallName(ttlAst);
Keywords keyword = nameToKeyword(name);
switch (keyword) {
case TYPE:
return evalTemplatizedType(ttlAst, nameResolver);
case UNION:
return evalUnionType(ttlAst, nameResolver);
case NONE:
return getNoType();
case ALL:
return getAllType();
case UNKNOWN:
return getUnknownType();
case RAWTYPEOF:
return evalRawTypeOf(ttlAst, nameResolver);
case TEMPLATETYPEOF:
return evalTemplateTypeOf(ttlAst, nameResolver);
case RECORD:
return evalRecordType(ttlAst, nameResolver);
case TYPEEXPR:
return evalNativeTypeExpr(ttlAst);
default:
throw new IllegalStateException("Invalid type expression");
}
}
private JSType evalTypeName(Node ttlAst) {
String typeName = ttlAst.getString();
JSType resultingType = getType(typeName);
// If the type name is not defined then return UNKNOWN and report a warning
if (resultingType == null) {
reportWarning(ttlAst, UNKNOWN_TYPENAME, typeName);
return getUnknownType();
}
return resultingType;
}
private JSType evalTemplatizedType(Node ttlAst, NameResolver nameResolver) {
ImmutableList<Node> params = getCallParams(ttlAst);
JSType firstParam = evalInternal(params.get(0), nameResolver);
if (firstParam.isFullyInstantiated()) {
reportWarning(ttlAst, BASETYPE_INVALID, firstParam.toString());
return getUnknownType();
}
// TODO(lpino): Check that the number of parameters correspond with the
// number of template types that the base type can take when creating
// a templatized type. For instance, if the base type is Array then there
// must be just one parameter.
JSType[] templatizedTypes = new JSType[params.size() - 1];
for (int i = 0; i < templatizedTypes.length; i++) {
templatizedTypes[i] = evalInternal(params.get(i + 1), nameResolver);
}
ObjectType baseType = firstParam.toMaybeObjectType();
return createTemplatizedType(baseType, templatizedTypes);
}
private JSType evalTypeVar(Node ttlAst, NameResolver nameResolver) {
String typeVar = ttlAst.getString();
JSType resultingType = nameResolver.typeVars.get(typeVar);
// If the type variable is not defined then return UNKNOWN and report a warning
if (resultingType == null) {
reportWarning(ttlAst, UNKNOWN_TYPEVAR, typeVar);
return getUnknownType();
}
return resultingType;
}
private JSType evalUnionType(Node ttlAst, NameResolver nameResolver) {
// Get the parameters of the union
ImmutableList<Node> params = getCallParams(ttlAst);
int paramCount = params.size();
// Create an array of types after evaluating each parameter
JSType[] basicTypes = new JSType[paramCount];
for (int i = 0; i < paramCount; i++) {
basicTypes[i] = evalInternal(params.get(i), nameResolver);
}
return createUnionType(basicTypes);
}
private JSType[] evalTypeParams(Node ttlAst, NameResolver nameResolver) {
ImmutableList<Node> params = getCallParams(ttlAst);
int paramCount = params.size();
JSType[] result = new JSType[paramCount];
for (int i = 0; i < paramCount; i++) {
result[i] = evalInternal(params.get(i), nameResolver);
}
return result;
}
private String evalString(Node ttlAst, NameResolver nameResolver) {
if (ttlAst.isName()) {
// Return the empty string if the name variable cannot be resolved
if (!nameResolver.nameVars.containsKey(ttlAst.getString())) {
reportWarning(ttlAst, UNKNOWN_STRVAR, ttlAst.getString());
return "";
}
return nameResolver.nameVars.get(ttlAst.getString());
}
return ttlAst.getString();
}
private String[] evalStringParams(Node ttlAst, NameResolver nameResolver) {
ImmutableList<Node> params = getCallParams(ttlAst);
int paramCount = params.size();
String[] result = new String[paramCount];
for (int i = 0; i < paramCount; i++) {
result[i] = evalString(params.get(i), nameResolver);
}
return result;
}
private boolean evalTypePredicate(Node ttlAst, NameResolver nameResolver) {
JSType[] params = evalTypeParams(ttlAst, nameResolver);
String name = getCallName(ttlAst);
Keywords keyword = nameToKeyword(name);
JSType type = params[0];
switch (keyword) {
case EQ:
return type.isEquivalentTo(params[1]);
case SUB:
return type.isSubtypeOf(params[1]);
case ISCTOR:
return type.isConstructor();
case ISTEMPLATIZED:
return type.isObjectType() && type.toMaybeObjectType().isGenericObjectType()
&& type.isPartiallyInstantiated();
case ISRECORD:
return type.isRecordType();
case ISUNKNOWN:
return type.isSomeUnknownType();
default:
throw new IllegalStateException(
"Invalid type predicate in the type transformation");
}
}
private boolean evalStringPredicate(Node ttlAst,
NameResolver nameResolver) {
String[] params = evalStringParams(ttlAst, nameResolver);
// If any of the parameters evaluates to the empty string then they were
// not resolved by the name resolver. In this case we always return false.
for (int i = 0; i < params.length; i++) {
if (params[i].isEmpty()) {
return false;
}
}
String name = getCallName(ttlAst);
Keywords keyword = nameToKeyword(name);
switch (keyword) {
case STREQ:
return params[0].equals(params[1]);
default:
throw new IllegalStateException(
"Invalid string predicate in the type transformation");
}
}
private boolean evalTypevarPredicate(Node ttlAst, NameResolver nameResolver) {
String name = getCallName(ttlAst);
Keywords keyword = nameToKeyword(name);
switch (keyword) {
case ISDEFINED:
return nameResolver.typeVars.containsKey(getCallArgument(ttlAst, 0).getString());
default:
throw new IllegalStateException(
"Invalid typevar predicate in the type transformation");
}
}
private boolean evalBooleanOperation(Node ttlAst, NameResolver nameResolver) {
boolean param0 = evalBoolean(ttlAst.getFirstChild(), nameResolver);
if (ttlAst.isNot()) {
return !param0;
}
if (ttlAst.isAnd()) {
return param0 && evalBoolean(ttlAst.getLastChild(), nameResolver);
}
if (ttlAst.isOr()) {
return param0 || evalBoolean(ttlAst.getLastChild(), nameResolver);
}
throw new IllegalStateException(
"Invalid boolean predicate in the type transformation");
}
private boolean evalBoolean(Node ttlAst, NameResolver nameResolver) {
if (isBooleanOperation(ttlAst)) {
return evalBooleanOperation(ttlAst, nameResolver);
}
String name = getCallName(ttlAst);
Keywords keyword = nameToKeyword(name);
switch (keyword.kind) {
case STRING_PREDICATE:
return evalStringPredicate(ttlAst, nameResolver);
case TYPE_PREDICATE:
return evalTypePredicate(ttlAst, nameResolver);
case TYPEVAR_PREDICATE:
return evalTypevarPredicate(ttlAst, nameResolver);
default:
throw new IllegalStateException(
"Invalid boolean predicate in the type transformation");
}
}
private JSType evalConditional(Node ttlAst, NameResolver nameResolver) {
ImmutableList<Node> params = getCallParams(ttlAst);
if (evalBoolean(params.get(0), nameResolver)) {
return evalInternal(params.get(1), nameResolver);
} else {
return evalInternal(params.get(2), nameResolver);
}
}
private JSType evalMapunion(Node ttlAst, NameResolver nameResolver) {
ImmutableList<Node> params = getCallParams(ttlAst);
Node unionParam = params.get(0);
Node mapFunction = params.get(1);
String paramName = getFunctionParameter(mapFunction, 0);
// The mapunion variable must not be defined in the environment
if (nameResolver.typeVars.containsKey(paramName)) {
reportWarning(ttlAst, DUPLICATE_VARIABLE, paramName);
return getUnknownType();
}
Node mapFunctionBody = NodeUtil.getFunctionBody(mapFunction);
JSType unionType = evalInternal(unionParam, nameResolver);
// If the first parameter does not correspond to a union type then
// consider it as a union with a single type and evaluate
if (!unionType.isUnionType()) {
NameResolver newNameResolver = new NameResolver(
addNewEntry(nameResolver.typeVars, paramName, unionType),
nameResolver.nameVars);
return evalInternal(mapFunctionBody, newNameResolver);
}
// Otherwise obtain the elements in the union type. Note that the block
// above guarantees the casting to be safe
Collection<JSType> unionElms = ImmutableList.copyOf(unionType.getUnionMembers());
// Evaluate the map function body using each element in the union type
int unionSize = unionElms.size();
JSType[] newUnionElms = new JSType[unionSize];
int i = 0;
for (JSType elm : unionElms) {
NameResolver newNameResolver = new NameResolver(
addNewEntry(nameResolver.typeVars, paramName, elm),
nameResolver.nameVars);
newUnionElms[i] = evalInternal(mapFunctionBody, newNameResolver);
i++;
}
return createUnionType(newUnionElms);
}
private JSType evalRawTypeOf(Node ttlAst, NameResolver nameResolver) {
ImmutableList<Node> params = getCallParams(ttlAst);
JSType type = evalInternal(params.get(0), nameResolver);
if (!type.isGenericObjectType()) {
reportWarning(ttlAst, TEMPTYPE_INVALID, "rawTypeOf", type.toString());
return getUnknownType();
}
return type.toMaybeObjectType().getRawType();
}
private JSType evalTemplateTypeOf(Node ttlAst, NameResolver nameResolver) {
ImmutableList<Node> params = getCallParams(ttlAst);
JSType type = evalInternal(params.get(0), nameResolver);
if (!type.isGenericObjectType()) {
reportWarning(ttlAst, TEMPTYPE_INVALID, "templateTypeOf", type.toString());
return getUnknownType();
}
int index = (int) params.get(1).getDouble();
ImmutableList<? extends JSType> templateTypes = type.toMaybeObjectType().getTemplateTypes();
if (index >= templateTypes.size()) {
reportWarning(ttlAst, INDEX_OUTOFBOUNDS,
Integer.toString(templateTypes.size()), Integer.toString(index));
return getUnknownType();
}
return templateTypes.get(index);
}
private JSType evalRecord(Node record, NameResolver nameResolver) {
Map<String, JSType> props = new LinkedHashMap<>();
for (Node propNode : record.children()) {
// If it is a computed property then find the property name using the resolver
if (propNode.isComputedProp()) {
String compPropName = getComputedPropName(propNode);
// If the name does not exist then report a warning
if (!nameResolver.nameVars.containsKey(compPropName)) {
reportWarning(record, UNKNOWN_NAMEVAR, compPropName);
return getUnknownType();
}
// Otherwise add the property
Node propValue = getComputedPropValue(propNode);
String resolvedName = nameResolver.nameVars.get(compPropName);
JSType resultingType = evalInternal(propValue, nameResolver);
props.put(resolvedName, resultingType);
} else {
String propName = propNode.getString();
JSType resultingType = evalInternal(propNode.getFirstChild(),
nameResolver);
props.put(propName, resultingType);
}
}
return this.registry.createRecordType(props);
}
private JSType evalRecordParam(Node ttlAst, NameResolver nameResolver) {
if (ttlAst.isObjectLit()) {
return evalRecord(ttlAst, nameResolver);
}
// The parameter of record can be a type transformation expression
return evalInternal(ttlAst, nameResolver);
}
private JSType evalRecordType(Node ttlAst, NameResolver nameResolver) {
int paramCount = getCallParamCount(ttlAst);
ImmutableList.Builder<ObjectType> recTypesBuilder = new ImmutableList.Builder<>();
for (int i = 0; i < paramCount; i++) {
JSType type = evalRecordParam(getCallArgument(ttlAst, i), nameResolver);
// Check that each parameter evaluates to an object
ObjectType objType = type.toMaybeObjectType();
if (objType == null || objType.isUnknownType()) {
reportWarning(ttlAst, RECPARAM_INVALID, type.toString());
return getUnknownType();
}
JSType recType = this.registry.buildRecordTypeFromObject(objType);
if (!recType.isEquivalentTo(getObjectType())) {
recTypesBuilder.add(recType.toMaybeObjectType());
}
}
return joinRecordTypes(recTypesBuilder.build());
}
private void putNewPropInPropertyMap(Map<String, JSType> props,
String newPropName, JSType newPropValue) {
// TODO(lpino): Decide if the best strategy is to collapse the properties
// to a union type or not. So far, new values replace the old ones except
// if they are two record types in which case the properties are joined
// together
// Three cases:
// (i) If the key does not exist then add it to the map with the new value
// (ii) If the key to be added already exists in the map and the new value
// is not a record type then the current value is replaced with the new one
// (iii) If the new value is a record type and the current is not then
// the current value is replaced with the new one
if (!props.containsKey(newPropName)
|| !newPropValue.isRecordType()
|| !props.get(newPropName).isRecordType()) {
props.put(newPropName, newPropValue);
return;
}
// Otherwise join the current value with the new one since both are records
props.put(newPropName,
joinRecordTypes(ImmutableList.of(
(ObjectType) props.get(newPropName),
(ObjectType) newPropValue)));
}
/**
* Merges a list of record types.
* Example
* {r:{s:string, n:number}} and {a:boolean}
* is transformed into {r:{s:string, n:number}, a:boolean}
*/
private JSType joinRecordTypes(ImmutableList<ObjectType> recTypes) {
Map<String, JSType> props = new LinkedHashMap<>();
for (ObjectType recType : recTypes) {
for (String newPropName : recType.getOwnPropertyNames()) {
JSType newPropValue = recType.getPropertyType(newPropName);
// Put the new property depending if it already exists in the map
putNewPropInPropertyMap(props, newPropName, newPropValue);
}
}
return createRecordType(ImmutableMap.copyOf(props));
}
private JSType evalMaprecord(Node ttlAst, NameResolver nameResolver) {
Node recordNode = ttlAst.getSecondChild();
Node mapFunction = ttlAst.getChildAtIndex(2);
JSType type = evalInternal(recordNode, nameResolver);
// If it is an empty record type (Object) then return
if (type.isEquivalentTo(getObjectType())) {
return getObjectType();
}
// The parameter must be a valid record type
if (!type.isRecordType()) {
// TODO(lpino): Decide how to handle non-record types
reportWarning(recordNode, RECTYPE_INVALID, type.toString());
return getUnknownType();
}
ObjectType objtype = type.toMaybeObjectType();
// Fetch the information of the map function
String paramKey = getFunctionParameter(mapFunction, 0);
String paramValue = getFunctionParameter(mapFunction, 1);
// The maprecord variables must not be defined in the environment
if (nameResolver.nameVars.containsKey(paramKey)) {
reportWarning(ttlAst, DUPLICATE_VARIABLE, paramKey);
return getUnknownType();
}
if (nameResolver.typeVars.containsKey(paramValue)) {
reportWarning(ttlAst, DUPLICATE_VARIABLE, paramValue);
return getUnknownType();
}
// Compute the new properties using the map function
Node mapFnBody = NodeUtil.getFunctionBody(mapFunction);
Map<String, JSType> newProps = new LinkedHashMap<>();
for (String propName : objtype.getOwnPropertyNames()) {
// The value of the current property
JSType propValue = objtype.getPropertyType(propName);
// Evaluate the map function body with paramValue and paramKey replaced
// by the values of the current property
NameResolver newNameResolver = new NameResolver(
addNewEntry(nameResolver.typeVars, paramValue, propValue),
addNewEntry(nameResolver.nameVars, paramKey, propName));
JSType body = evalInternal(mapFnBody, newNameResolver);
// If the body returns unknown then the whole expression returns unknown
if (body.isUnknownType()) {
return getUnknownType();
}
// Skip the property when the body evaluates to NO_TYPE
// or the empty record (Object)
if (body.isEmptyType() || body.isEquivalentTo(getObjectType())) {
continue;
}
// Otherwise the body must evaluate to a record type
if (!body.isRecordType()) {
reportWarning(ttlAst, MAPRECORD_BODY_INVALID, body.toString());
return getUnknownType();
}
// Add the properties of the resulting record type to the original one
ObjectType bodyAsObj = body.toMaybeObjectType();
for (String newPropName : bodyAsObj.getOwnPropertyNames()) {
JSType newPropValue = bodyAsObj.getPropertyType(newPropName);
// If the key already exists then we have to mix it with the current property value
putNewPropInPropertyMap(newProps, newPropName, newPropValue);
}
}
return createRecordType(ImmutableMap.copyOf(newProps));
}
private JSType evalTypeOfVar(Node ttlAst) {
String name = getCallArgument(ttlAst, 0).getString();
StaticTypedSlot slot = typeEnv.getSlot(name);
JSType type = slot != null ? slot.getType() : null;
if (type == null) {
reportWarning(ttlAst, VAR_UNDEFINED, name);
return getUnknownType();
}
return type;
}
private JSType evalInstanceOf(Node ttlAst, NameResolver nameResolver) {
JSType type = evalInternal(getCallArgument(ttlAst, 0), nameResolver);
if (type.isUnknownType() || !type.isConstructor()) {
reportWarning(ttlAst, INVALID_CTOR, type.getDisplayName());
return getUnknownType();
}
return type.toMaybeFunctionType().getInstanceType();
}
private JSType evalNativeTypeExpr(Node ttlAst) {
JSTypeExpression expr = new JSTypeExpression(getCallArgument(ttlAst, 0), VIRTUAL_FILE);
return this.registry.evaluateTypeExpression(expr, this.typeEnv);
}
private JSType evalPrintType(Node ttlAst, NameResolver nameResolver) {
JSType type = evalInternal(getCallArgument(ttlAst, 1), nameResolver);
String msg = getCallArgument(ttlAst, 0).getString() + type;
System.out.println(msg);
return type;
}
private JSType evalPropType(Node ttlAst, NameResolver nameResolver) {
JSType type = evalInternal(getCallArgument(ttlAst, 1), nameResolver);
ObjectType objType = type.toMaybeObjectType();
if (objType == null) {
reportWarning(ttlAst, PROPTYPE_INVALID, type.toString());
return getUnknownType();
}
JSType propType = objType.getPropertyType(getCallArgument(ttlAst, 0).getString());
return firstNonNull(propType, getUnknownType());
}
}
| apache-2.0 |
google/syzygy | syzygy/refinery/types/test_typenames.h | 918 | // Copyright 2015 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef SYZYGY_REFINERY_TYPES_TEST_TYPENAMES_H_
#define SYZYGY_REFINERY_TYPES_TEST_TYPENAMES_H_
namespace testing {
// Function implemented to ensure all the test types make it into the
// associated PDB file.
void AliasTypes();
} // namespace testing
#endif // SYZYGY_REFINERY_TYPES_TEST_TYPENAMES_H_
| apache-2.0 |
xhorn/xchorizon | horizon/conf/panel_template/panel.py | 280 | from django.utils.translation import ugettext_lazy as _
import horizon
from {{ dash_path }} import dashboard
class {{ panel_name|title }}(horizon.Panel):
name = _("{{ panel_name|title }}")
slug = "{{ panel_name|slugify }}"
dashboard.register({{ panel_name|title }})
| apache-2.0 |
raffaeu/Syncho | src/node_modules/kontainer-di/examples/express/services/todo_service.js | 2050 | 'use strict';
var TodoServiceFactory = function(database){
return {
// Return all todos in the database
getTodos: function(){
return database('Todo').select().orderBy('createdAt', 'desc');
},
// Return a single todo by Id
getTodo: function(id){
var p = Promise.defer();
database('Todo').where('id', id).select()
.then(function(rows){
if(rows.length === 0){
//not found
p.reject('TodoService: not found');
}
else {
p.resolve(rows[0]);
}
});
return p.promise;
},
//Update a todo in the database
updateTodo: function(todo){
var p = Promise.defer();
//TODO: real-world validation
database('Todo').update({
text: todo.text,
completed: todo.completed
})
.where('id', todo.id)
.then(function(affectedRows){
if(affectedRows === 1){
p.resolve(todo);
}
else {
p.reject('Not found');
}
});
return p.promise;
},
//Create a new todo in the database
createTodo: function(todo){
var p = Promise.defer();
//TODO: real-world validation
database('Todo').insert(todo)
.then(function(idArray){
//return the newly created todo
todo.id = idArray[0];
p.resolve(todo);
})
.catch(function(err){
p.reject('TodoService: create failed. Error:' + err.toString());
});
return p.promise;
},
//Delete a todo specified by Id
deleteTodo: function(todoId){
var p = Promise.defer();
database('Todo').where('id', todoId).del()
.then(function(affectedRows){
if(affectedRows === 1){
return p.resolve(true);
}
else {
return p.reject('TodoService: not found');
}
})
.catch(function(err){
p.reject('TodoService: delete failed. Error' + err.toString());
});
return p.promise;
}
}
}
module.exports = TodoServiceFactory; | apache-2.0 |
gardenia/jitsi-hammer | src/net/java/sip/communicator/impl/protocol/jabber/extensions/jingle/RemoteCandidatePacketExtension.java | 1149 | /*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.java.sip.communicator.impl.protocol.jabber.extensions.jingle;
/**
* A representation of the <tt>remote-candidate</tt> ICE transport element.
*
* @author Emil Ivov
*/
public class RemoteCandidatePacketExtension extends CandidatePacketExtension
{
/**
* The name of the "candidate" element.
*/
public static final String ELEMENT_NAME = "remote-candidate";
/**
* Creates a new {@link CandidatePacketExtension}
*/
public RemoteCandidatePacketExtension()
{
super(ELEMENT_NAME);
}
}
| apache-2.0 |
boundlessgeo/sdk | src/components/map/zoom-control.js | 2469 | /*
* Copyright 2015-present Boundless Spatial Inc., http://boundlessgeo.com
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and limitations
* under the License.
*/
import React from 'react';
import {connect} from 'react-redux';
import PropTypes from 'prop-types';
import * as mapActions from '../../actions/map';
/** @module components/map/zoom-control
* @example
* import SdkZoomControl from '@boundlessgeo/sdk/components/map/zoom-control';
* import { Provider } from 'react-redux';
* import SdkMap from '@boundlessgeo/sdk/components/map';
* import ReactDOM from 'react-dom';
*
* ReactDOM.render(<Provider store={store}>
* <SdkMap>
* <SdkZoomControl />
* </SdkMap>
* </Provider>, document.getElementById('map'));
*
* @desc Provides 2 buttons to zoom the map (zoom in and out).
*/
class ZoomControl extends React.Component {
render() {
let className = 'sdk-zoom-control';
if (this.props.className) {
className += ' ' + this.props.className;
}
return (
<div className={className} style={this.props.style}>
<button className='sdk-zoom-in' onClick={this.props.zoomIn} title={this.props.zoomInTitle}>+</button>
<button className='sdk-zoom-out' onClick={this.props.zoomOut} title={this.props.zoomOutTitle}>{'\u2212'}</button>
</div>
);
}
}
ZoomControl.propTypes = {
/**
* Css className for the root div.
*/
className: PropTypes.string,
/**
* Style config object for root div.
*/
style: PropTypes.object,
/**
* Title for the zoom in button.
*/
zoomInTitle: PropTypes.string,
/**
* Title for the zoom out button.
*/
zoomOutTitle: PropTypes.string,
};
ZoomControl.defaultProps = {
zoomInTitle: 'Zoom in',
zoomOutTitle: 'Zoom out',
};
function mapDispatchToProps(dispatch) {
return {
zoomIn: () => {
dispatch(mapActions.zoomIn());
},
zoomOut: () => {
dispatch(mapActions.zoomOut());
},
};
}
export default connect(null, mapDispatchToProps)(ZoomControl);
| apache-2.0 |
coursera/shift | shift/src/main/java/org/coursera/android/shift/ShiftActionsFragment.java | 2029 | /*
Copyright 2015 Coursera Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.coursera.android.shift;
import android.os.Bundle;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
public class ShiftActionsFragment extends ViewPagerFragment {
private static final String TAB_TITLE_ACTIONS = "Actions";
public ShiftActionsFragment() {
super(TAB_TITLE_ACTIONS);
}
public static ShiftActionsFragment getNewInstance() {
return new ShiftActionsFragment();
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.actions_fragment, container, false);
RecyclerView recyclerView = (RecyclerView) view.findViewById(R.id.recycler_view);
recyclerView.setHasFixedSize(true);
recyclerView.setLayoutManager(new LinearLayoutManager(getActivity()));
int bottomMargin = (int) getResources().getDimension(R.dimen.card_margin);
recyclerView.addItemDecoration(new VerticalMarginItemDecoration(bottomMargin));
ShiftActionRecyclerViewAdapter adapter =
new ShiftActionRecyclerViewAdapter(getActivity(),
ShiftManager.getInstance().getActionManager().getActionList());
recyclerView.setAdapter(adapter);
return view;
}
}
| apache-2.0 |
mongodb-csharp/mongodb-csharp | source/MongoDB/Protocol/MsgMessage.cs | 1486 | using MongoDB.Bson;
namespace MongoDB.Protocol
{
/// <summary>
/// Deprecated. OP_MSG sends a diagnostic message to the database.
/// The database sends back a fixed resonse.
/// </summary>
/// <remarks>
/// struct {
/// MsgHeader header; // standard message header
/// cstring message; // message for the database
/// }
/// </remarks>
internal class MsgMessage : RequestMessageBase
{
/// <summary>
/// Initializes a new instance of the <see cref="MsgMessage"/> class.
/// </summary>
public MsgMessage()
: base(new BsonWriterSettings()){
Header = new MessageHeader(OpCode.Msg);
}
/// <summary>
/// Gets or sets the message.
/// </summary>
/// <value>The message.</value>
public string Message { get; set; }
/// <summary>
/// Writes the body.
/// </summary>
/// <param name="writer">The writer.</param>
protected override void WriteBody(BsonWriter writer){
writer.Write(Message, false);
}
/// <summary>
/// Calculates the size of the body.
/// </summary>
/// <param name="writer">The writer.</param>
/// <returns></returns>
protected override int CalculateBodySize(BsonWriter writer){
return writer.CalculateSize(Message, false);
}
}
} | apache-2.0 |
kandu-community/android | kandu-android/trunk/src/com/inomma/kandu/server/RequestMethod.java | 80 | package com.inomma.kandu.server;
public enum RequestMethod {
POST,GET,PUT;
}
| apache-2.0 |
dslomov/bazel | src/main/java/com/google/devtools/build/lib/rules/apple/AppleToolchain.java | 7703 | // Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.apple;
import static com.google.devtools.build.lib.packages.Attribute.attr;
import static com.google.devtools.build.lib.packages.BuildType.LABEL;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableMap;
import com.google.devtools.build.lib.analysis.RuleDefinition;
import com.google.devtools.build.lib.analysis.RuleDefinitionEnvironment;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable;
import com.google.devtools.build.lib.packages.Attribute;
import com.google.devtools.build.lib.packages.Attribute.LabelLateBoundDefault;
import com.google.devtools.build.lib.packages.RuleClass;
import com.google.devtools.build.lib.packages.RuleClass.Builder.RuleClassType;
import com.google.devtools.build.lib.skylarkbuildapi.apple.AppleToolchainApi;
import java.io.Serializable;
/**
* Utility class for resolving items for the Apple toolchain (such as common tool flags, and paths).
*/
@Immutable
public class AppleToolchain implements AppleToolchainApi<AppleConfiguration> {
// These next two strings are shared secrets with the xcrunwrapper.sh to allow
// expansion of DeveloperDir and SDKRoot and runtime, since they aren't known
// until compile time on any given build machine.
private static final String DEVELOPER_DIR = "__BAZEL_XCODE_DEVELOPER_DIR__";
private static final String SDKROOT_DIR = "__BAZEL_XCODE_SDKROOT__";
// These two paths are framework paths relative to SDKROOT.
@VisibleForTesting
public static final String DEVELOPER_FRAMEWORK_PATH = "/Developer/Library/Frameworks";
@VisibleForTesting
public static final String SYSTEM_FRAMEWORK_PATH = "/System/Library/Frameworks";
// There is a handy reference to many clang warning flags at
// http://nshipster.com/clang-diagnostics/
// There is also a useful narrative for many Xcode settings at
// http://www.xs-labs.com/en/blog/2011/02/04/xcode-build-settings/
public static final ImmutableMap<String, String> DEFAULT_WARNINGS =
new ImmutableMap.Builder<String, String>()
.put("GCC_WARN_64_TO_32_BIT_CONVERSION", "-Wshorten-64-to-32")
.put("CLANG_WARN_BOOL_CONVERSION", "-Wbool-conversion")
.put("CLANG_WARN_CONSTANT_CONVERSION", "-Wconstant-conversion")
// Double-underscores are intentional - thanks Xcode.
.put("CLANG_WARN__DUPLICATE_METHOD_MATCH", "-Wduplicate-method-match")
.put("CLANG_WARN_EMPTY_BODY", "-Wempty-body")
.put("CLANG_WARN_ENUM_CONVERSION", "-Wenum-conversion")
.put("CLANG_WARN_INT_CONVERSION", "-Wint-conversion")
.put("CLANG_WARN_UNREACHABLE_CODE", "-Wunreachable-code")
.put("GCC_WARN_ABOUT_RETURN_TYPE", "-Wmismatched-return-types")
.put("GCC_WARN_UNDECLARED_SELECTOR", "-Wundeclared-selector")
.put("GCC_WARN_UNINITIALIZED_AUTOS", "-Wuninitialized")
.put("GCC_WARN_UNUSED_FUNCTION", "-Wunused-function")
.put("GCC_WARN_UNUSED_VARIABLE", "-Wunused-variable")
.build();
/** Returns the platform directory inside of Xcode for a platform name. */
public static String platformDir(String platformName) {
return developerDir() + "/Platforms/" + platformName + ".platform";
}
/**
* Returns the platform directory inside of Xcode for a given configuration.
*/
public static String sdkDir() {
return SDKROOT_DIR;
}
/**
* Returns the Developer directory inside of Xcode for a given configuration.
*/
public static String developerDir() {
return DEVELOPER_DIR;
}
/**
* Returns the platform frameworks directory inside of Xcode for a given {@link ApplePlatform}.
*/
public static String platformDeveloperFrameworkDir(ApplePlatform platform) {
String platformDir = platformDir(platform.getNameInPlist());
return platformDir + "/Developer/Library/Frameworks";
}
/** Returns the SDK frameworks directory inside of Xcode for a given configuration. */
public static String sdkFrameworkDir(ApplePlatform targetPlatform, XcodeConfigInfo xcodeConfig) {
String relativePath;
switch (targetPlatform) {
case IOS_DEVICE:
case IOS_SIMULATOR:
if (xcodeConfig
.getSdkVersionForPlatform(targetPlatform)
.compareTo(DottedVersion.fromStringUnchecked("9.0"))
>= 0) {
relativePath = SYSTEM_FRAMEWORK_PATH;
} else {
relativePath = DEVELOPER_FRAMEWORK_PATH;
}
break;
case MACOS:
case WATCHOS_DEVICE:
case WATCHOS_SIMULATOR:
case TVOS_DEVICE:
case TVOS_SIMULATOR:
relativePath = SYSTEM_FRAMEWORK_PATH;
break;
default:
throw new IllegalArgumentException("Unhandled platform " + targetPlatform);
}
return sdkDir() + relativePath;
}
/** The default label of the build-wide {@code xcode_config} configuration rule. */
public static LabelLateBoundDefault<AppleConfiguration> getXcodeConfigLabel(
String toolsRepository) {
return LabelLateBoundDefault.fromTargetConfiguration(
AppleConfiguration.class,
Label.parseAbsoluteUnchecked(
toolsRepository + AppleCommandLineOptions.DEFAULT_XCODE_VERSION_CONFIG_LABEL),
(Attribute.LateBoundDefault.Resolver<AppleConfiguration, Label> & Serializable)
(rule, attributes, appleConfig) -> appleConfig.getXcodeConfigLabel());
}
/**
* Returns the platform directory inside of Xcode for a given configuration.
*/
@Override
public String sdkDirConstant() {
return sdkDir();
}
/**
* Returns the Developer directory inside of Xcode for a given configuration.
*/
@Override
public String developerDirConstant() {
return developerDir();
}
/**
* Returns the platform frameworks directory inside of Xcode for a given configuration.
*/
@Override
public String platformFrameworkDirFromConfig(AppleConfiguration configuration) {
return platformDeveloperFrameworkDir(configuration.getSingleArchPlatform());
}
/**
* Base rule definition to be ancestor for rules which may require an xcode toolchain.
*/
public static class RequiresXcodeConfigRule implements RuleDefinition {
private final String toolsRepository;
public RequiresXcodeConfigRule(String toolsRepository) {
this.toolsRepository = toolsRepository;
}
@Override
public RuleClass build(RuleClass.Builder builder, RuleDefinitionEnvironment env) {
return builder
.add(
attr(XcodeConfigRule.XCODE_CONFIG_ATTR_NAME, LABEL)
.allowedRuleClasses("xcode_config")
.checkConstraints()
.direct_compile_time_input()
.value(getXcodeConfigLabel(toolsRepository)))
.build();
}
@Override
public Metadata getMetadata() {
return RuleDefinition.Metadata.builder()
.name("$requires_xcode_config")
.type(RuleClassType.ABSTRACT)
.build();
}
}
}
| apache-2.0 |
evanw/flatbuffers | tests/namespace_test/NamespaceA/NamespaceB/TableInNestedNS.py | 693 | # automatically generated, do not modify
# namespace: NamespaceB
import flatbuffers
class TableInNestedNS(object):
__slots__ = ['_tab']
# TableInNestedNS
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
# TableInNestedNS
def Foo(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos)
return 0
def TableInNestedNSStart(builder): builder.StartObject(1)
def TableInNestedNSAddFoo(builder, foo): builder.PrependInt32Slot(0, foo, 0)
def TableInNestedNSEnd(builder): return builder.EndObject()
| apache-2.0 |
rnirmal/openstack-dashboard | django-openstack/django_openstack/templates/django_openstack/dash/networks/rename.html | 1019 | {% extends 'django_openstack/dash/base.html' %}
{% block sidebar %}
{% with current_sidebar="networks" %}
{{block.super}}
{% endwith %}
{% endblock %}
{% block page_header %}
{# to make searchable false, just remove it from the include statement #}
{% include "django_openstack/common/_page_header.html" with title="Rename Network" %}
{% endblock page_header %}
{% block headerjs %}
<script type="text/javascript">
$(document).ready(function() {
$('input#id_new_name').val('{{network.network.name}}');
});
</script>
{% endblock headerjs %}
{% block dash_main %}
<div class="dash_block">
<div class="left">
{% include 'django_openstack/dash/networks/_rename_form.html' with form=rename_form %}
<h3><a href="{% url dash_networks request.user.tenant_id %}"><< Return to networks list</a></h3>
</div>
<div class="right">
<h3>Rename:</h3>
<p>Enter a new name for your network.</p>
</div>
<div class="clear"> </div>
</div>
{% endblock %}
| apache-2.0 |
kumy/cgeo | main/src/cgeo/geocaching/files/FileSelectionListAdapter.java | 2548 | package cgeo.geocaching.files;
import cgeo.geocaching.R;
import cgeo.geocaching.ui.recyclerview.AbstractRecyclerViewAdapter;
import cgeo.geocaching.ui.recyclerview.AbstractRecyclerViewHolder;
import android.graphics.Typeface;
import android.support.annotation.NonNull;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.TextView;
import java.io.File;
import java.util.List;
import butterknife.BindView;
public class FileSelectionListAdapter extends AbstractRecyclerViewAdapter<FileSelectionListAdapter.ViewHolder> {
private final IFileSelectionView parentView;
@NonNull private final List<File> files;
public FileSelectionListAdapter(@NonNull final IFileSelectionView parentIn, @NonNull final List<File> listIn) {
files = listIn;
parentView = parentIn;
}
@Override
public int getItemCount() {
return files.size();
}
@Override
public ViewHolder onCreateViewHolder(final ViewGroup parent, final int position) {
final View view = LayoutInflater.from(parent.getContext()).inflate(R.layout.mapfile_item, parent, false);
final ViewHolder viewHolder = new ViewHolder(view);
viewHolder.itemView.setOnClickListener(new OnClickListener() {
@Override
public void onClick(final View view) {
final File file = files.get(viewHolder.getItemPosition());
parentView.setCurrentFile(file.toString());
parentView.close();
}
});
return viewHolder;
}
@Override
public void onBindViewHolder(final ViewHolder holder, final int position) {
super.onBindViewHolder(holder, position);
final File file = files.get(position);
final String currentFile = parentView.getCurrentFile();
if (currentFile != null && file.equals(new File(currentFile))) {
holder.filename.setTypeface(holder.filename.getTypeface(), Typeface.BOLD);
} else {
holder.filename.setTypeface(holder.filename.getTypeface(), Typeface.NORMAL);
}
holder.filepath.setText(file.getParent());
holder.filename.setText(file.getName());
}
protected static final class ViewHolder extends AbstractRecyclerViewHolder {
@BindView(R.id.mapfilepath) TextView filepath;
@BindView(R.id.mapfilename) TextView filename;
ViewHolder(final View view) {
super(view);
}
}
}
| apache-2.0 |
tianjianinline/azkaban-psbc | azkaban-solo-server/src/main/bash/azkaban-solo-shutdown.sh | 302 | #!/usr/bin/env bash
# Shutdown script for azkaban solo server
set -o nounset
source "$(dirname $0)/util.sh"
installdir="$(dirname $0)/.."
maxattempt=3
pid=`cat ${installdir}/currentpid`
pname="solo server"
kill_process_with_retry "${pid}" "${pname}" "${maxattempt}" && rm -f ${installdir}/currentpid
| apache-2.0 |
google/nearby-connections | internal/platform/implementation/windows/generated/winrt/Windows.UI.Xaml.Resources.h | 9435 | // WARNING: Please don't edit this file. It was generated by C++/WinRT v2.0.210930.14
#pragma once
#ifndef WINRT_Windows_UI_Xaml_Resources_H
#define WINRT_Windows_UI_Xaml_Resources_H
#include "winrt/base.h"
static_assert(winrt::check_version(CPPWINRT_VERSION, "2.0.210930.14"), "Mismatched C++/WinRT headers.");
#define CPPWINRT_VERSION "2.0.210930.14"
#include "winrt/Windows.UI.Xaml.h"
#include "winrt/impl/Windows.UI.Xaml.Resources.2.h"
namespace winrt::impl
{
template <typename D> WINRT_IMPL_AUTO(winrt::Windows::UI::Xaml::Resources::CustomXamlResourceLoader) consume_Windows_UI_Xaml_Resources_ICustomXamlResourceLoaderFactory<D>::CreateInstance(winrt::Windows::Foundation::IInspectable const& baseInterface, winrt::Windows::Foundation::IInspectable& innerInterface) const
{
void* value{};
check_hresult(WINRT_IMPL_SHIM(winrt::Windows::UI::Xaml::Resources::ICustomXamlResourceLoaderFactory)->CreateInstance(*(void**)(&baseInterface), impl::bind_out(innerInterface), &value));
return winrt::Windows::UI::Xaml::Resources::CustomXamlResourceLoader{ value, take_ownership_from_abi };
}
template <typename D> WINRT_IMPL_AUTO(winrt::Windows::Foundation::IInspectable) consume_Windows_UI_Xaml_Resources_ICustomXamlResourceLoaderOverrides<D>::GetResource(param::hstring const& resourceId, param::hstring const& objectType, param::hstring const& propertyName, param::hstring const& propertyType) const
{
void* result{};
check_hresult(WINRT_IMPL_SHIM(winrt::Windows::UI::Xaml::Resources::ICustomXamlResourceLoaderOverrides)->GetResource(*(void**)(&resourceId), *(void**)(&objectType), *(void**)(&propertyName), *(void**)(&propertyType), &result));
return winrt::Windows::Foundation::IInspectable{ result, take_ownership_from_abi };
}
template <typename D> WINRT_IMPL_AUTO(winrt::Windows::UI::Xaml::Resources::CustomXamlResourceLoader) consume_Windows_UI_Xaml_Resources_ICustomXamlResourceLoaderStatics<D>::Current() const
{
void* value{};
check_hresult(WINRT_IMPL_SHIM(winrt::Windows::UI::Xaml::Resources::ICustomXamlResourceLoaderStatics)->get_Current(&value));
return winrt::Windows::UI::Xaml::Resources::CustomXamlResourceLoader{ value, take_ownership_from_abi };
}
template <typename D> WINRT_IMPL_AUTO(void) consume_Windows_UI_Xaml_Resources_ICustomXamlResourceLoaderStatics<D>::Current(winrt::Windows::UI::Xaml::Resources::CustomXamlResourceLoader const& value) const
{
check_hresult(WINRT_IMPL_SHIM(winrt::Windows::UI::Xaml::Resources::ICustomXamlResourceLoaderStatics)->put_Current(*(void**)(&value)));
}
#ifndef WINRT_LEAN_AND_MEAN
template <typename D>
struct produce<D, winrt::Windows::UI::Xaml::Resources::ICustomXamlResourceLoader> : produce_base<D, winrt::Windows::UI::Xaml::Resources::ICustomXamlResourceLoader>
{
};
#endif
#ifndef WINRT_LEAN_AND_MEAN
template <typename D>
struct produce<D, winrt::Windows::UI::Xaml::Resources::ICustomXamlResourceLoaderFactory> : produce_base<D, winrt::Windows::UI::Xaml::Resources::ICustomXamlResourceLoaderFactory>
{
int32_t __stdcall CreateInstance(void* baseInterface, void** innerInterface, void** value) noexcept final try
{
if (innerInterface) *innerInterface = nullptr;
winrt::Windows::Foundation::IInspectable winrt_impl_innerInterface;
clear_abi(value);
typename D::abi_guard guard(this->shim());
*value = detach_from<winrt::Windows::UI::Xaml::Resources::CustomXamlResourceLoader>(this->shim().CreateInstance(*reinterpret_cast<winrt::Windows::Foundation::IInspectable const*>(&baseInterface), winrt_impl_innerInterface));
if (innerInterface) *innerInterface = detach_abi(winrt_impl_innerInterface);
return 0;
}
catch (...) { return to_hresult(); }
};
#endif
template <typename D>
struct produce<D, winrt::Windows::UI::Xaml::Resources::ICustomXamlResourceLoaderOverrides> : produce_base<D, winrt::Windows::UI::Xaml::Resources::ICustomXamlResourceLoaderOverrides>
{
int32_t __stdcall GetResource(void* resourceId, void* objectType, void* propertyName, void* propertyType, void** result) noexcept final try
{
clear_abi(result);
typename D::abi_guard guard(this->shim());
*result = detach_from<winrt::Windows::Foundation::IInspectable>(this->shim().GetResource(*reinterpret_cast<hstring const*>(&resourceId), *reinterpret_cast<hstring const*>(&objectType), *reinterpret_cast<hstring const*>(&propertyName), *reinterpret_cast<hstring const*>(&propertyType)));
return 0;
}
catch (...) { return to_hresult(); }
};
#ifndef WINRT_LEAN_AND_MEAN
template <typename D>
struct produce<D, winrt::Windows::UI::Xaml::Resources::ICustomXamlResourceLoaderStatics> : produce_base<D, winrt::Windows::UI::Xaml::Resources::ICustomXamlResourceLoaderStatics>
{
int32_t __stdcall get_Current(void** value) noexcept final try
{
clear_abi(value);
typename D::abi_guard guard(this->shim());
*value = detach_from<winrt::Windows::UI::Xaml::Resources::CustomXamlResourceLoader>(this->shim().Current());
return 0;
}
catch (...) { return to_hresult(); }
int32_t __stdcall put_Current(void* value) noexcept final try
{
typename D::abi_guard guard(this->shim());
this->shim().Current(*reinterpret_cast<winrt::Windows::UI::Xaml::Resources::CustomXamlResourceLoader const*>(&value));
return 0;
}
catch (...) { return to_hresult(); }
};
#endif
template <typename T, typename D>
struct __declspec(empty_bases) produce_dispatch_to_overridable<T, D, winrt::Windows::UI::Xaml::Resources::ICustomXamlResourceLoaderOverrides>
: produce_dispatch_to_overridable_base<T, D, winrt::Windows::UI::Xaml::Resources::ICustomXamlResourceLoaderOverrides>
{
auto GetResource(hstring const& resourceId, hstring const& objectType, hstring const& propertyName, hstring const& propertyType)
{
if (auto overridable = this->shim_overridable())
{
return overridable.GetResource(resourceId, objectType, propertyName, propertyType);
}
return this->shim().GetResource(resourceId, objectType, propertyName, propertyType);
}
};
}
WINRT_EXPORT namespace winrt::Windows::UI::Xaml::Resources
{
inline CustomXamlResourceLoader::CustomXamlResourceLoader()
{
winrt::Windows::Foundation::IInspectable baseInterface, innerInterface;
*this = impl::call_factory<CustomXamlResourceLoader, ICustomXamlResourceLoaderFactory>([&](ICustomXamlResourceLoaderFactory const& f) { return f.CreateInstance(baseInterface, innerInterface); });
}
inline auto CustomXamlResourceLoader::Current()
{
return impl::call_factory_cast<winrt::Windows::UI::Xaml::Resources::CustomXamlResourceLoader(*)(ICustomXamlResourceLoaderStatics const&), CustomXamlResourceLoader, ICustomXamlResourceLoaderStatics>([](ICustomXamlResourceLoaderStatics const& f) { return f.Current(); });
}
inline auto CustomXamlResourceLoader::Current(winrt::Windows::UI::Xaml::Resources::CustomXamlResourceLoader const& value)
{
impl::call_factory<CustomXamlResourceLoader, ICustomXamlResourceLoaderStatics>([&](ICustomXamlResourceLoaderStatics const& f) { return f.Current(value); });
}
template <typename D> WINRT_IMPL_AUTO(winrt::Windows::Foundation::IInspectable) ICustomXamlResourceLoaderOverridesT<D>::GetResource(param::hstring const& resourceId, param::hstring const& objectType, param::hstring const& propertyName, param::hstring const& propertyType) const
{
return shim().template try_as<ICustomXamlResourceLoaderOverrides>().GetResource(resourceId, objectType, propertyName, propertyType);
}
template <typename D, typename... Interfaces>
struct CustomXamlResourceLoaderT :
implements<D, winrt::Windows::UI::Xaml::Resources::ICustomXamlResourceLoaderOverrides, composing, Interfaces...>,
impl::require<D, winrt::Windows::UI::Xaml::Resources::ICustomXamlResourceLoader>,
impl::base<D, CustomXamlResourceLoader>,
winrt::Windows::UI::Xaml::Resources::ICustomXamlResourceLoaderOverridesT<D>
{
using composable = CustomXamlResourceLoader;
protected:
CustomXamlResourceLoaderT()
{
impl::call_factory<CustomXamlResourceLoader, ICustomXamlResourceLoaderFactory>([&](ICustomXamlResourceLoaderFactory const& f) { [[maybe_unused]] auto winrt_impl_discarded = f.CreateInstance(*this, this->m_inner); });
}
};
}
namespace std
{
#ifndef WINRT_LEAN_AND_MEAN
template<> struct hash<winrt::Windows::UI::Xaml::Resources::ICustomXamlResourceLoader> : winrt::impl::hash_base {};
template<> struct hash<winrt::Windows::UI::Xaml::Resources::ICustomXamlResourceLoaderFactory> : winrt::impl::hash_base {};
template<> struct hash<winrt::Windows::UI::Xaml::Resources::ICustomXamlResourceLoaderOverrides> : winrt::impl::hash_base {};
template<> struct hash<winrt::Windows::UI::Xaml::Resources::ICustomXamlResourceLoaderStatics> : winrt::impl::hash_base {};
template<> struct hash<winrt::Windows::UI::Xaml::Resources::CustomXamlResourceLoader> : winrt::impl::hash_base {};
#endif
#ifdef __cpp_lib_format
#endif
}
#endif
| apache-2.0 |
pkman/droolsjbpm-tools | drools-eclipse/org.drools.eclipse/src/main/java/org/drools/eclipse/flow/ruleflow/view/property/constraint/StateConstraintListDialog.java | 5496 | /*
* Copyright 2005 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.eclipse.flow.ruleflow.view.property.constraint;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.drools.eclipse.flow.common.view.property.EditBeanDialog;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Shell;
import org.jbpm.workflow.core.Constraint;
import org.jbpm.workflow.core.WorkflowProcess;
import org.jbpm.workflow.core.impl.ConnectionRef;
import org.jbpm.workflow.core.impl.NodeImpl;
import org.jbpm.workflow.core.node.StateNode;
import org.kie.api.definition.process.Connection;
/**
* Dialog for editing constraints.
*/
public class StateConstraintListDialog extends EditBeanDialog<Map<ConnectionRef, Constraint>> {
private WorkflowProcess process;
private StateNode stateNode;
private Map<ConnectionRef, Constraint> newMap;
private Map<Connection, Label> labels = new HashMap<Connection, Label>();
protected StateConstraintListDialog(Shell parentShell, WorkflowProcess process,
StateNode stateNode) {
super(parentShell, "Edit Constraints");
this.process = process;
this.stateNode = stateNode;
}
protected Control createDialogArea(Composite parent) {
Composite composite = (Composite) super.createDialogArea(parent);
GridLayout gridLayout = new GridLayout();
gridLayout.numColumns = 3;
composite.setLayout(gridLayout);
List<Connection> outgoingConnections = stateNode.getOutgoingConnections(NodeImpl.CONNECTION_DEFAULT_TYPE);
labels.clear();
for (Connection outgoingConnection: outgoingConnections) {
Label label1 = new Label(composite, SWT.NONE);
label1.setText("To node "
+ outgoingConnection.getTo().getName() + ": ");
Label label2 = new Label(composite, SWT.NONE);
labels.put(outgoingConnection, label2);
GridData gridData = new GridData();
gridData.grabExcessHorizontalSpace = true;
gridData.horizontalAlignment = GridData.FILL;
label2.setLayoutData(gridData);
Constraint constraint = newMap.get(
new ConnectionRef(outgoingConnection.getTo().getId(), outgoingConnection.getToType()));
if (constraint != null) {
label2.setText(constraint.getName());
}
Button editButton = new Button(composite, SWT.NONE);
editButton.setText("Edit");
editButton.addSelectionListener(new EditButtonListener(
outgoingConnection));
}
return composite;
}
public void setValue(Map<ConnectionRef, Constraint> value) {
super.setValue(value);
this.newMap = new HashMap<ConnectionRef, Constraint>((Map<ConnectionRef, Constraint>) value);
}
protected Map<ConnectionRef, Constraint> updateValue(Map<ConnectionRef, Constraint> value) {
return newMap;
}
private void editItem(final Connection connection) {
final Runnable r = new Runnable() {
public void run() {
RuleFlowConstraintDialog dialog = new RuleFlowConstraintDialog(
getShell(), process);
dialog.create();
ConnectionRef connectionRef = new ConnectionRef(connection.getTo().getId(), connection.getToType());
Constraint constraint = newMap.get(connectionRef);
dialog.setConstraint(constraint);
dialog.fixType(0);
dialog.fixDialect(0);
int code = dialog.open();
if (code != CANCEL) {
constraint = dialog.getConstraint();
newMap.put(
connectionRef,
constraint);
setConnectionText(
(Label) labels.get(connection), constraint.getName());
}
}
};
r.run();
}
private void setConnectionText(final Label connection, final String name) {
Display.getDefault().asyncExec(new Runnable() {
public void run() {
connection.setText(name);
}
});
}
private class EditButtonListener extends SelectionAdapter {
private Connection connection;
public EditButtonListener(Connection connection) {
this.connection = connection;
}
public void widgetSelected(SelectionEvent e) {
editItem(connection);
}
}
}
| apache-2.0 |
xsm110/Apache-Beam | runners/google-cloud-dataflow-java/src/test/java/org/apache/beam/runners/dataflow/DataflowMetricsTest.java | 10530 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.dataflow;
import static org.apache.beam.sdk.metrics.MetricMatchers.attemptedMetricsResult;
import static org.apache.beam.sdk.metrics.MetricMatchers.committedMetricsResult;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.is;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import com.google.api.services.dataflow.Dataflow;
import com.google.api.services.dataflow.model.Job;
import com.google.api.services.dataflow.model.JobMetrics;
import com.google.api.services.dataflow.model.MetricStructuredName;
import com.google.api.services.dataflow.model.MetricUpdate;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import java.io.IOException;
import java.math.BigDecimal;
import org.apache.beam.runners.dataflow.testing.TestDataflowPipelineOptions;
import org.apache.beam.sdk.PipelineResult.State;
import org.apache.beam.sdk.metrics.MetricQueryResults;
import org.apache.beam.sdk.options.PipelineOptionsFactory;
import org.apache.beam.sdk.util.NoopPathValidator;
import org.apache.beam.sdk.util.TestCredential;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
/**
* Tests for {@link DataflowMetrics}.
*/
@RunWith(JUnit4.class)
public class DataflowMetricsTest {
private static final String PROJECT_ID = "some-project";
private static final String JOB_ID = "1234";
private static final String REGION_ID = "some-region";
private static final String REPLACEMENT_JOB_ID = "4321";
@Mock
private Dataflow mockWorkflowClient;
@Mock
private Dataflow.Projects mockProjects;
@Mock
private Dataflow.Projects.Locations mockLocations;
@Mock
private Dataflow.Projects.Locations.Jobs mockJobs;
private TestDataflowPipelineOptions options;
@Before
public void setup() {
MockitoAnnotations.initMocks(this);
when(mockWorkflowClient.projects()).thenReturn(mockProjects);
when(mockProjects.locations()).thenReturn(mockLocations);
when(mockLocations.jobs()).thenReturn(mockJobs);
options = PipelineOptionsFactory.as(TestDataflowPipelineOptions.class);
options.setDataflowClient(mockWorkflowClient);
options.setProject(PROJECT_ID);
options.setRunner(DataflowRunner.class);
options.setTempLocation("gs://fakebucket/temp");
options.setPathValidatorClass(NoopPathValidator.class);
options.setGcpCredential(new TestCredential());
}
@Test
public void testEmptyMetricUpdates() throws IOException {
Job modelJob = new Job();
modelJob.setCurrentState(State.RUNNING.toString());
DataflowPipelineJob job = mock(DataflowPipelineJob.class);
when(job.getState()).thenReturn(State.RUNNING);
job.jobId = JOB_ID;
JobMetrics jobMetrics = new JobMetrics();
jobMetrics.setMetrics(ImmutableList.<MetricUpdate>of());
DataflowClient dataflowClient = mock(DataflowClient.class);
when(dataflowClient.getJobMetrics(JOB_ID)).thenReturn(jobMetrics);
DataflowMetrics dataflowMetrics = new DataflowMetrics(job, dataflowClient);
MetricQueryResults result = dataflowMetrics.queryMetrics();
assertThat(ImmutableList.copyOf(result.counters()), is(empty()));
assertThat(ImmutableList.copyOf(result.distributions()), is(empty()));
}
@Test
public void testCachingMetricUpdates() throws IOException {
Job modelJob = new Job();
modelJob.setCurrentState(State.RUNNING.toString());
DataflowPipelineJob job = mock(DataflowPipelineJob.class);
when(job.getState()).thenReturn(State.DONE);
job.jobId = JOB_ID;
JobMetrics jobMetrics = new JobMetrics();
jobMetrics.setMetrics(ImmutableList.<MetricUpdate>of());
DataflowClient dataflowClient = mock(DataflowClient.class);
when(dataflowClient.getJobMetrics(JOB_ID)).thenReturn(jobMetrics);
DataflowMetrics dataflowMetrics = new DataflowMetrics(job, dataflowClient);
verify(dataflowClient, times(0)).getJobMetrics(JOB_ID);
dataflowMetrics.queryMetrics(null);
verify(dataflowClient, times(1)).getJobMetrics(JOB_ID);
dataflowMetrics.queryMetrics(null);
verify(dataflowClient, times(1)).getJobMetrics(JOB_ID);
}
private MetricUpdate makeCounterMetricUpdate(String name, String namespace, String step,
long scalar, boolean tentative) {
MetricUpdate update = new MetricUpdate();
update.setScalar(new BigDecimal(scalar));
MetricStructuredName structuredName = new MetricStructuredName();
structuredName.setName(name);
structuredName.setOrigin("user");
ImmutableMap.Builder contextBuilder = new ImmutableMap.Builder<String, String>();
contextBuilder.put("step", step)
.put("namespace", namespace);
if (tentative) {
contextBuilder.put("tentative", "true");
}
structuredName.setContext(contextBuilder.build());
update.setName(structuredName);
return update;
}
@Test
public void testSingleCounterUpdates() throws IOException {
JobMetrics jobMetrics = new JobMetrics();
DataflowPipelineJob job = mock(DataflowPipelineJob.class);
when(job.getState()).thenReturn(State.RUNNING);
job.jobId = JOB_ID;
MetricUpdate update = new MetricUpdate();
long stepValue = 1234L;
update.setScalar(new BigDecimal(stepValue));
// The parser relies on the fact that one tentative and one committed metric update exist in
// the job metrics results.
MetricUpdate mu1 = makeCounterMetricUpdate("counterName", "counterNamespace",
"s2", 1234L, false);
MetricUpdate mu1Tentative = makeCounterMetricUpdate("counterName",
"counterNamespace", "s2", 1233L, true);
jobMetrics.setMetrics(ImmutableList.of(mu1, mu1Tentative));
DataflowClient dataflowClient = mock(DataflowClient.class);
when(dataflowClient.getJobMetrics(JOB_ID)).thenReturn(jobMetrics);
DataflowMetrics dataflowMetrics = new DataflowMetrics(job, dataflowClient);
MetricQueryResults result = dataflowMetrics.queryMetrics(null);
assertThat(result.counters(), containsInAnyOrder(
attemptedMetricsResult("counterNamespace", "counterName", "s2", 1233L)));
assertThat(result.counters(), containsInAnyOrder(
committedMetricsResult("counterNamespace", "counterName", "s2", 1234L)));
}
@Test
public void testIgnoreDistributionButGetCounterUpdates() throws IOException {
JobMetrics jobMetrics = new JobMetrics();
DataflowClient dataflowClient = mock(DataflowClient.class);
when(dataflowClient.getJobMetrics(JOB_ID)).thenReturn(jobMetrics);
DataflowPipelineJob job = mock(DataflowPipelineJob.class);
when(job.getState()).thenReturn(State.RUNNING);
job.jobId = JOB_ID;
// The parser relies on the fact that one tentative and one committed metric update exist in
// the job metrics results.
jobMetrics.setMetrics(ImmutableList.of(
makeCounterMetricUpdate("counterName", "counterNamespace", "s2", 1233L, false),
makeCounterMetricUpdate("counterName", "counterNamespace", "s2", 1234L, true),
makeCounterMetricUpdate("otherCounter[MIN]", "otherNamespace", "s3", 0L, false),
makeCounterMetricUpdate("otherCounter[MIN]", "otherNamespace", "s3", 0L, true)));
DataflowMetrics dataflowMetrics = new DataflowMetrics(job, dataflowClient);
MetricQueryResults result = dataflowMetrics.queryMetrics(null);
assertThat(result.counters(), containsInAnyOrder(
attemptedMetricsResult("counterNamespace", "counterName", "s2", 1234L)));
assertThat(result.counters(), containsInAnyOrder(
committedMetricsResult("counterNamespace", "counterName", "s2", 1233L)));
}
@Test
public void testMultipleCounterUpdates() throws IOException {
JobMetrics jobMetrics = new JobMetrics();
DataflowClient dataflowClient = mock(DataflowClient.class);
when(dataflowClient.getJobMetrics(JOB_ID)).thenReturn(jobMetrics);
DataflowPipelineJob job = mock(DataflowPipelineJob.class);
when(job.getState()).thenReturn(State.RUNNING);
job.jobId = JOB_ID;
// The parser relies on the fact that one tentative and one committed metric update exist in
// the job metrics results.
jobMetrics.setMetrics(ImmutableList.of(
makeCounterMetricUpdate("counterName", "counterNamespace", "s2", 1233L, false),
makeCounterMetricUpdate("counterName", "counterNamespace", "s2", 1234L, true),
makeCounterMetricUpdate("otherCounter", "otherNamespace", "s3", 12L, false),
makeCounterMetricUpdate("otherCounter", "otherNamespace", "s3", 12L, true),
makeCounterMetricUpdate("counterName", "otherNamespace", "s4", 1200L, false),
makeCounterMetricUpdate("counterName", "otherNamespace", "s4", 1233L, true)));
DataflowMetrics dataflowMetrics = new DataflowMetrics(job, dataflowClient);
MetricQueryResults result = dataflowMetrics.queryMetrics(null);
assertThat(result.counters(), containsInAnyOrder(
attemptedMetricsResult("counterNamespace", "counterName", "s2", 1234L),
attemptedMetricsResult("otherNamespace", "otherCounter", "s3", 12L),
attemptedMetricsResult("otherNamespace", "counterName", "s4", 1233L)));
assertThat(result.counters(), containsInAnyOrder(
committedMetricsResult("counterNamespace", "counterName", "s2", 1233L),
committedMetricsResult("otherNamespace", "otherCounter", "s3", 12L),
committedMetricsResult("otherNamespace", "counterName", "s4", 1200L)));
}
}
| apache-2.0 |
hnafar/IronJS | Src/Tests/ietestcenter/chapter15/15.4/15.4.4/15.4.4.14/15.4.4.14-3-21.js | 2528 | /// Copyright (c) 2009 Microsoft Corporation
///
/// Redistribution and use in source and binary forms, with or without modification, are permitted provided
/// that the following conditions are met:
/// * Redistributions of source code must retain the above copyright notice, this list of conditions and
/// the following disclaimer.
/// * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and
/// the following disclaimer in the documentation and/or other materials provided with the distribution.
/// * Neither the name of Microsoft nor the names of its contributors may be used to
/// endorse or promote products derived from this software without specific prior written permission.
///
/// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
/// IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
/// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
/// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
/// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
/// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
/// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
/// ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
ES5Harness.registerTest({
id: "15.4.4.14-3-21",
path: "TestCases/chapter15/15.4/15.4.4/15.4.4.14/15.4.4.14-3-21.js",
description: "Array.prototype.indexOf - 'length' is an object that has an own valueOf method that returns an object and toString method that returns a string",
test: function testcase() {
var toStringAccessed = false;
var valueOfAccessed = false;
var obj = {
1: true,
length: {
toString: function () {
toStringAccessed = true;
return '2';
},
valueOf: function () {
valueOfAccessed = true;
return {};
}
}
};
return Array.prototype.indexOf.call(obj, true) === 1 && toStringAccessed && valueOfAccessed;
},
precondition: function prereq() {
return fnExists(Array.prototype.indexOf);
}
});
| apache-2.0 |
hsiun/yoyo | ulive/ULive-IOS-SDK/ucloud-ios-live-sdk-v1.2.7/ucloud-ios-live-sdk-v1.2.7/UCloudMediaRecorderDemo/UCloudMediaRecorderDemo/lib/Recorder/Filter/UCloudGPUImageNobleCornerDetectionFilter.h | 374 | #import "UCloudGPUImageHarrisCornerDetectionFilter.h"
/** Noble corner detector
This is the Noble variant on the Harris detector, from
Alison Noble, "Descriptions of Image Surfaces", PhD thesis, Department of Engineering Science, Oxford University 1989, p45.
*/
@interface UCloudGPUImageNobleCornerDetectionFilter : UCloudGPUImageHarrisCornerDetectionFilter
@end
| apache-2.0 |
christophd/camel | dsl/camel-componentdsl/src/generated/java/org/apache/camel/builder/component/dsl/AtomixSetComponentBuilderFactory.java | 16066 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder.component.dsl;
import javax.annotation.Generated;
import org.apache.camel.Component;
import org.apache.camel.builder.component.AbstractComponentBuilder;
import org.apache.camel.builder.component.ComponentBuilder;
import org.apache.camel.component.atomix.client.set.AtomixSetComponent;
/**
* Access Atomix's distributed set.
*
* Generated by camel-package-maven-plugin - do not edit this file!
*/
@Generated("org.apache.camel.maven.packaging.ComponentDslMojo")
public interface AtomixSetComponentBuilderFactory {
/**
* Atomix Set (camel-atomix)
* Access Atomix's distributed set.
*
* Category: clustering
* Since: 2.20
* Maven coordinates: org.apache.camel:camel-atomix
*
* @return the dsl builder
*/
@Deprecated
static AtomixSetComponentBuilder atomixSet() {
return new AtomixSetComponentBuilderImpl();
}
/**
* Builder for the Atomix Set component.
*/
interface AtomixSetComponentBuilder
extends
ComponentBuilder<AtomixSetComponent> {
/**
* The Atomix instance to use.
*
* The option is a: <code>io.atomix.Atomix</code> type.
*
* Group: common
*
* @param atomix the value to set
* @return the dsl builder
*/
default AtomixSetComponentBuilder atomix(io.atomix.Atomix atomix) {
doSetProperty("atomix", atomix);
return this;
}
/**
* The shared component configuration.
*
* The option is a:
* <code>org.apache.camel.component.atomix.client.set.AtomixSetConfiguration</code> type.
*
* Group: common
*
* @param configuration the value to set
* @return the dsl builder
*/
default AtomixSetComponentBuilder configuration(
org.apache.camel.component.atomix.client.set.AtomixSetConfiguration configuration) {
doSetProperty("configuration", configuration);
return this;
}
/**
* The path to the AtomixClient configuration.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param configurationUri the value to set
* @return the dsl builder
*/
default AtomixSetComponentBuilder configurationUri(
java.lang.String configurationUri) {
doSetProperty("configurationUri", configurationUri);
return this;
}
/**
* The default action.
*
* The option is a:
* <code>org.apache.camel.component.atomix.client.set.AtomixSet.Action</code> type.
*
* Default: ADD
* Group: common
*
* @param defaultAction the value to set
* @return the dsl builder
*/
default AtomixSetComponentBuilder defaultAction(
org.apache.camel.component.atomix.client.set.AtomixSet.Action defaultAction) {
doSetProperty("defaultAction", defaultAction);
return this;
}
/**
* The nodes the AtomixClient should connect to.
*
* The option is a:
* <code>java.util.List&lt;io.atomix.catalyst.transport.Address&gt;</code> type.
*
* Group: common
*
* @param nodes the value to set
* @return the dsl builder
*/
default AtomixSetComponentBuilder nodes(
java.util.List<io.atomix.catalyst.transport.Address> nodes) {
doSetProperty("nodes", nodes);
return this;
}
/**
* The header that wil carry the result.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*
* @param resultHeader the value to set
* @return the dsl builder
*/
default AtomixSetComponentBuilder resultHeader(
java.lang.String resultHeader) {
doSetProperty("resultHeader", resultHeader);
return this;
}
/**
* The class name (fqn) of the Atomix transport.
*
* The option is a: <code>java.lang.String</code> type.
*
* Default: io.atomix.catalyst.transport.netty.NettyTransport
* Group: common
*
* @param transportClassName the value to set
* @return the dsl builder
*/
default AtomixSetComponentBuilder transportClassName(
java.lang.String transportClassName) {
doSetProperty("transportClassName", transportClassName);
return this;
}
/**
* The resource ttl.
*
* The option is a: <code>long</code> type.
*
* Group: common
*
* @param ttl the value to set
* @return the dsl builder
*/
default AtomixSetComponentBuilder ttl(long ttl) {
doSetProperty("ttl", ttl);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions occurred while the consumer is trying to
* pickup incoming messages, or the likes, will now be processed as a
* message and handled by the routing Error Handler. By default the
* consumer will use the org.apache.camel.spi.ExceptionHandler to deal
* with exceptions, that will be logged at WARN or ERROR level and
* ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*
* @param bridgeErrorHandler the value to set
* @return the dsl builder
*/
default AtomixSetComponentBuilder bridgeErrorHandler(
boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*
* @param lazyStartProducer the value to set
* @return the dsl builder
*/
default AtomixSetComponentBuilder lazyStartProducer(
boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether autowiring is enabled. This is used for automatic autowiring
* options (the option must be marked as autowired) by looking up in the
* registry to find if there is a single instance of matching type,
* which then gets configured on the component. This can be used for
* automatic configuring JDBC data sources, JMS connection factories,
* AWS Clients, etc.
*
* The option is a: <code>boolean</code> type.
*
* Default: true
* Group: advanced
*
* @param autowiredEnabled the value to set
* @return the dsl builder
*/
default AtomixSetComponentBuilder autowiredEnabled(
boolean autowiredEnabled) {
doSetProperty("autowiredEnabled", autowiredEnabled);
return this;
}
/**
* The cluster wide default resource configuration.
*
* The option is a: <code>java.util.Properties</code> type.
*
* Group: advanced
*
* @param defaultResourceConfig the value to set
* @return the dsl builder
*/
default AtomixSetComponentBuilder defaultResourceConfig(
java.util.Properties defaultResourceConfig) {
doSetProperty("defaultResourceConfig", defaultResourceConfig);
return this;
}
/**
* The local default resource options.
*
* The option is a: <code>java.util.Properties</code> type.
*
* Group: advanced
*
* @param defaultResourceOptions the value to set
* @return the dsl builder
*/
default AtomixSetComponentBuilder defaultResourceOptions(
java.util.Properties defaultResourceOptions) {
doSetProperty("defaultResourceOptions", defaultResourceOptions);
return this;
}
/**
* Sets if the local member should join groups as PersistentMember or
* not. If set to ephemeral the local member will receive an auto
* generated ID thus the local one is ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*
* @param ephemeral the value to set
* @return the dsl builder
*/
default AtomixSetComponentBuilder ephemeral(boolean ephemeral) {
doSetProperty("ephemeral", ephemeral);
return this;
}
/**
* The read consistency level.
*
* The option is a:
* <code>io.atomix.resource.ReadConsistency</code> type.
*
* Group: advanced
*
* @param readConsistency the value to set
* @return the dsl builder
*/
default AtomixSetComponentBuilder readConsistency(
io.atomix.resource.ReadConsistency readConsistency) {
doSetProperty("readConsistency", readConsistency);
return this;
}
/**
* Cluster wide resources configuration.
*
* The option is a: <code>java.util.Map&lt;java.lang.String,
* java.util.Properties&gt;</code> type.
*
* Group: advanced
*
* @param resourceConfigs the value to set
* @return the dsl builder
*/
default AtomixSetComponentBuilder resourceConfigs(
java.util.Map<java.lang.String, java.util.Properties> resourceConfigs) {
doSetProperty("resourceConfigs", resourceConfigs);
return this;
}
/**
* Local resources configurations.
*
* The option is a: <code>java.util.Map&lt;java.lang.String,
* java.util.Properties&gt;</code> type.
*
* Group: advanced
*
* @param resourceOptions the value to set
* @return the dsl builder
*/
default AtomixSetComponentBuilder resourceOptions(
java.util.Map<java.lang.String, java.util.Properties> resourceOptions) {
doSetProperty("resourceOptions", resourceOptions);
return this;
}
}
class AtomixSetComponentBuilderImpl
extends
AbstractComponentBuilder<AtomixSetComponent>
implements
AtomixSetComponentBuilder {
@Override
protected AtomixSetComponent buildConcreteComponent() {
return new AtomixSetComponent();
}
private org.apache.camel.component.atomix.client.set.AtomixSetConfiguration getOrCreateConfiguration(
org.apache.camel.component.atomix.client.set.AtomixSetComponent component) {
if (component.getConfiguration() == null) {
component.setConfiguration(new org.apache.camel.component.atomix.client.set.AtomixSetConfiguration());
}
return component.getConfiguration();
}
@Override
protected boolean setPropertyOnComponent(
Component component,
String name,
Object value) {
switch (name) {
case "atomix": getOrCreateConfiguration((AtomixSetComponent) component).setAtomix((io.atomix.Atomix) value); return true;
case "configuration": ((AtomixSetComponent) component).setConfiguration((org.apache.camel.component.atomix.client.set.AtomixSetConfiguration) value); return true;
case "configurationUri": ((AtomixSetComponent) component).setConfigurationUri((java.lang.String) value); return true;
case "defaultAction": getOrCreateConfiguration((AtomixSetComponent) component).setDefaultAction((org.apache.camel.component.atomix.client.set.AtomixSet.Action) value); return true;
case "nodes": ((AtomixSetComponent) component).setNodes((java.util.List) value); return true;
case "resultHeader": getOrCreateConfiguration((AtomixSetComponent) component).setResultHeader((java.lang.String) value); return true;
case "transportClassName": getOrCreateConfiguration((AtomixSetComponent) component).setTransportClassName((java.lang.String) value); return true;
case "ttl": getOrCreateConfiguration((AtomixSetComponent) component).setTtl((long) value); return true;
case "bridgeErrorHandler": ((AtomixSetComponent) component).setBridgeErrorHandler((boolean) value); return true;
case "lazyStartProducer": ((AtomixSetComponent) component).setLazyStartProducer((boolean) value); return true;
case "autowiredEnabled": ((AtomixSetComponent) component).setAutowiredEnabled((boolean) value); return true;
case "defaultResourceConfig": getOrCreateConfiguration((AtomixSetComponent) component).setDefaultResourceConfig((java.util.Properties) value); return true;
case "defaultResourceOptions": getOrCreateConfiguration((AtomixSetComponent) component).setDefaultResourceOptions((java.util.Properties) value); return true;
case "ephemeral": getOrCreateConfiguration((AtomixSetComponent) component).setEphemeral((boolean) value); return true;
case "readConsistency": getOrCreateConfiguration((AtomixSetComponent) component).setReadConsistency((io.atomix.resource.ReadConsistency) value); return true;
case "resourceConfigs": getOrCreateConfiguration((AtomixSetComponent) component).setResourceConfigs((java.util.Map) value); return true;
case "resourceOptions": getOrCreateConfiguration((AtomixSetComponent) component).setResourceOptions((java.util.Map) value); return true;
default: return false;
}
}
}
} | apache-2.0 |
hkernbach/arangodb | arangosh/Benchmark/BenchFeature.h | 3371 | ////////////////////////////////////////////////////////////////////////////////
/// DISCLAIMER
///
/// Copyright 2016 ArangoDB GmbH, Cologne, Germany
///
/// Licensed under the Apache License, Version 2.0 (the "License");
/// you may not use this file except in compliance with the License.
/// You may obtain a copy of the License at
///
/// http://www.apache.org/licenses/LICENSE-2.0
///
/// Unless required by applicable law or agreed to in writing, software
/// distributed under the License is distributed on an "AS IS" BASIS,
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
/// See the License for the specific language governing permissions and
/// limitations under the License.
///
/// Copyright holder is ArangoDB GmbH, Cologne, Germany
///
/// @author Jan Steemann
////////////////////////////////////////////////////////////////////////////////
#ifndef ARANGODB_BENCHMARK_BENCH_FEATURE_H
#define ARANGODB_BENCHMARK_BENCH_FEATURE_H 1
#include "ApplicationFeatures/ApplicationFeature.h"
namespace arangodb {
class ClientFeature;
struct BenchRunResult {
double time;
size_t failures;
size_t incomplete;
double requestTime;
void update(double _time, size_t _failures, size_t _incomplete, double _requestTime) {
time = _time;
failures = _failures;
incomplete = _incomplete;
requestTime = _requestTime;
}
};
class BenchFeature final : public application_features::ApplicationFeature {
public:
BenchFeature(application_features::ApplicationServer* server, int* result);
public:
void collectOptions(std::shared_ptr<options::ProgramOptions>) override;
void start() override final;
void unprepare() override final;
public:
bool async() const { return _async; }
uint64_t concurrency() const { return _concurreny; }
uint64_t operations() const { return _operations; }
uint64_t batchSize() const { return _batchSize; }
bool keepAlive() const { return _keepAlive; }
std::string const& collection() const { return _collection; }
std::string const& testCase() const { return _testCase; }
uint64_t complexity() const { return _complexity; }
bool delay() const { return _delay; }
bool progress() const { return _progress; }
bool verbose() const { return _verbose; }
bool quit() const { return _quiet; }
uint64_t runs() const { return _runs; }
std::string const& junitReportFile() const { return _junitReportFile; }
uint64_t replicationFactor() const { return _replicationFactor; }
uint64_t numberOfShards() const { return _numberOfShards; }
bool waitForSync() const { return _waitForSync; }
private:
void status(std::string const& value);
bool report(ClientFeature*, std::vector<BenchRunResult>);
void printResult(BenchRunResult const& result);
bool writeJunitReport(BenchRunResult const& result);
private:
bool _async;
uint64_t _concurreny;
uint64_t _operations;
uint64_t _batchSize;
bool _keepAlive;
std::string _collection;
std::string _testCase;
uint64_t _complexity;
bool _delay;
bool _progress;
bool _verbose;
bool _quiet;
uint64_t _runs;
std::string _junitReportFile;
uint64_t _replicationFactor;
uint64_t _numberOfShards;
bool _waitForSync;
private:
int* _result;
private:
static void updateStartCounter();
static int getStartCounter();
private:
static std::atomic<int> _started;
};
}
#endif
| apache-2.0 |
18826252059/im | web/bundles/topxiaadmin/js/controller/analysis/classroom-income.js | 1998 | define(function(require, exports, module) {
var Morris=require("morris");
require("jquery.bootstrap-datetimepicker");
var Validator = require('bootstrap.validator');
var autoSubmitCondition=require("./autoSubmitCondition.js");
require('common/validator-rules').inject(Validator);
var now = new Date();
exports.run = function() {
if($('#data').length > 0){
var data = eval ("(" + $('#data').attr("value") + ")");
Morris.Line({
element: 'line-data',
data: data,
xkey: 'date',
ykeys: ['count'],
labels: [Translator.trans('班级营收额')],
xLabels:"day"
});
}
$("[name=endTime]").datetimepicker({
autoclose: true,
format: 'yyyy-mm-dd',
minView: 'month'
});
$('[name=endTime]').datetimepicker('setEndDate', now);
$('[name=endTime]').datetimepicker('setStartDate', $('#classroomIncomeStartDate').attr("value"));
$("[name=startTime]").datetimepicker({
autoclose: true,
format: 'yyyy-mm-dd',
minView: 'month'
});
$('[name=startTime]').datetimepicker('setEndDate', now);
$('[name=startTime]').datetimepicker('setStartDate', $('#classroomIncomeStartDate').attr("value"));
var validator = new Validator({
element: '#operation-form'});
validator.addItem({
element: '[name=startTime]',
required: true,
rule:'date_check'
});
validator.addItem({
element: '[name=endTime]',
required: true,
rule:'date_check'
});
validator.addItem({
element: '[name=analysisDateType]',
required: true
});
autoSubmitCondition.autoSubmitCondition();
};
}); | apache-2.0 |
aws/aws-sdk-cpp | aws-cpp-sdk-license-manager/source/model/Tag.cpp | 1136 | /**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#include <aws/license-manager/model/Tag.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <utility>
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
namespace Aws
{
namespace LicenseManager
{
namespace Model
{
Tag::Tag() :
m_keyHasBeenSet(false),
m_valueHasBeenSet(false)
{
}
Tag::Tag(JsonView jsonValue) :
m_keyHasBeenSet(false),
m_valueHasBeenSet(false)
{
*this = jsonValue;
}
Tag& Tag::operator =(JsonView jsonValue)
{
if(jsonValue.ValueExists("Key"))
{
m_key = jsonValue.GetString("Key");
m_keyHasBeenSet = true;
}
if(jsonValue.ValueExists("Value"))
{
m_value = jsonValue.GetString("Value");
m_valueHasBeenSet = true;
}
return *this;
}
JsonValue Tag::Jsonize() const
{
JsonValue payload;
if(m_keyHasBeenSet)
{
payload.WithString("Key", m_key);
}
if(m_valueHasBeenSet)
{
payload.WithString("Value", m_value);
}
return payload;
}
} // namespace Model
} // namespace LicenseManager
} // namespace Aws
| apache-2.0 |
ceros/apache2-onehealth | recipes/mod_auth_cas.rb | 2026 | #
# Cookbook Name:: apache2
# Recipe:: mod_auth_cas
#
# Copyright 2013, Opscode, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include_recipe 'apache2::default'
if node['apache']['mod_auth_cas']['from_source']
package 'httpd-devel' do
package_name value_for_platform_family(
%w[rhel fedora suse] => 'httpd-devel',
'debian' => 'apache2-dev'
)
end
git '/tmp/mod_auth_cas' do
repository 'git://github.com/Jasig/mod_auth_cas.git'
revision node['apache']['mod_auth_cas']['source_revision']
notifies :run, 'execute[compile mod_auth_cas]', :immediately
end
execute 'compile mod_auth_cas' do
command './configure && make && make install'
cwd '/tmp/mod_auth_cas'
not_if "test -f #{node['apache']['libexecdir']}/mod_auth_cas.so"
end
template "#{node['apache']['dir']}/mods-available/auth_cas.load" do
source 'mods/auth_cas.load.erb'
owner 'root'
group node['apache']['root_group']
mode '0644'
end
else
case node['platform_family']
when 'debian'
package 'libapache2-mod-auth-cas'
when 'rhel', 'fedora'
yum_package 'mod_auth_cas' do
notifies :run, 'execute[generate-module-list]', :immediately
end
file "#{node['apache']['dir']}/conf-available/auth_cas.conf" do
action :delete
backup false
end
end
end
apache_module 'auth_cas' do
conf true
end
directory "#{node['apache']['cache_dir']}/mod_auth_cas" do
owner node['apache']['user']
group node['apache']['group']
mode '0700'
end
| apache-2.0 |
mduerig/jackrabbit-oak | oak-security-spi/src/main/java/org/apache/jackrabbit/oak/plugins/tree/package-info.java | 916 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
@Version("3.0.0")
package org.apache.jackrabbit.oak.plugins.tree;
import org.osgi.annotation.versioning.Version;
| apache-2.0 |
izenecloud/sf1r-lite | source/core/search-manager/ORDocumentIterator.h | 2728 | /**
* @file sf1r/search-manager/ORDocumentIterator.h
* @author Yingfeng Zhang
* @date Created <2009-09-22>
* @date Updated <2010-03-24 14:42:03>
* @brief DocumentIterator which implements the OR semantics
* NOT semantics is also included
*/
#ifndef OR_DOCUMENT_ITERATOR_H
#define OR_DOCUMENT_ITERATOR_H
#include "DocumentIterator.h"
#include <ir/index_manager/utility/PriorityQueue.h>
#include <vector>
namespace sf1r
{
class NOTDocumentIterator;
class ORDocumentIterator:public DocumentIterator
{
public:
class DocumentIteratorQueue : public izenelib::ir::indexmanager::PriorityQueue<DocumentIterator*>
{
public:
DocumentIteratorQueue(size_t size)
{
initialize(size,false);
}
protected:
bool lessThan(DocumentIterator* o1, DocumentIterator* o2)
{
return o1->doc() < o2->doc();
}
};
public:
ORDocumentIterator();
virtual ~ORDocumentIterator();
public:
void add(DocumentIterator* pDocIterator);
void add(VirtualPropertyTermDocumentIterator* pDocIterator);
bool next();
docid_t doc()
{
return currDoc_;
}
void doc_item(RankDocumentProperty& rankDocumentProperty, unsigned propIndex = 0);
void df_cmtf(
DocumentFrequencyInProperties& dfmap,
CollectionTermFrequencyInProperties& ctfmap,
MaxTermFrequencyInProperties& maxtfmap);
count_t tf();
bool empty()
{
return docIteratorList_.empty();
}
void queryBoosting(double& score, double& weight);
void print(int level=0);
DocumentIteratorQueue* getDocumentIteratorQueue()
{
return pDocIteratorQueue_;
}
std::vector<DocumentIterator*> getdocIteratorList_()
{
return docIteratorList_;
}
void setUB(bool useOriginalQuery, UpperBoundInProperties& ubmap);
float getUB();
const char* getProperty()
{
if (docIteratorList_.begin() == docIteratorList_.end())
return NULL;
return (*docIteratorList_.begin())->getProperty();
}
#if SKIP_ENABLED
docid_t skipTo(docid_t target);
protected:
docid_t do_skipTo(docid_t target);
#endif
protected:
virtual void initDocIteratorQueue();
bool do_next();
private:
inline bool move_together_with_not();
protected:
std::vector<DocumentIterator*> docIteratorList_;
DocumentIteratorQueue* pDocIteratorQueue_;
docid_t currDoc_;
private:
bool hasNot_;
docid_t currDocOfNOTIter_;
bool initNOTIterator_;
NOTDocumentIterator* pNOTDocIterator_;
};
}
#endif
| apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.