repo_name
stringlengths 4
116
| path
stringlengths 3
942
| size
stringlengths 1
7
| content
stringlengths 3
1.05M
| license
stringclasses 15
values |
---|---|---|---|---|
VahidN/WpfFramework
|
MyWpfFramework.Common/Config/ConfigSetGet.cs
|
1554
|
using System;
using System.Configuration;
using MyWpfFramework.Common.MVVM;
namespace MyWpfFramework.Common.Config
{
/// <summary>
/// خواندن تنظیمات از فایل کانفیگ
/// </summary>
public class ConfigSetGet : IConfigSetGet
{
/// <summary>
/// read settings from app.config file
/// </summary>
/// <param name="key">کلید</param>
/// <returns>مقدار کلید</returns>
public string GetConfigData(string key)
{
//don't load on design time
if (Designer.IsInDesignModeStatic)
return "0";
var configuration = ConfigurationManager.OpenExeConfiguration(ConfigurationUserLevel.None);
var appSettings = configuration.AppSettings;
string res = appSettings.Settings[key].Value;
if (res == null) throw new Exception("Undefined: " + key);
return res;
}
/// <summary>
/// ذخیره سازی تنظیمات در فایل کانفیگ برنامه
/// </summary>
/// <param name="key">کلید</param>
/// <param name="data">مقدار</param>
public void SetConfigData(string key, string data)
{
var config = ConfigurationManager.OpenExeConfiguration(ConfigurationUserLevel.None);
config.AppSettings.Settings[key].Value = data;
config.Save(ConfigurationSaveMode.Modified);
ConfigurationManager.RefreshSection("appSettings");
}
}
}
|
apache-2.0
|
pangkailang/TOMS
|
target-analysis/src/main/resources/static/tocc-toms/roadNetworkHighSpeed/res/css/style.css
|
1407
|
html,
body {
width: 100%;
height: 100%;
font-family: "microsoft yahei";
font-size: 14px;
}
/*map--legend*/
.mapLegend {
position: absolute;
right: 20px;
bottom: 350px;
}
.mapLegend ul li {
border-radius: 5px;
width: 55px;
height: 55px;
text-align: center;
margin-bottom: 10px;
cursor: pointer;
}
.mapLegend ul li img {
margin-top: 6px;
}
.mapLegend ul li div {
margin-top: -3px;
}
.mapLegend ul li.pink {
border: 1px solid #da2c71;
color: #da2c71;
}
.mapLegend ul li.green {
border: 1px solid #1dd95b;
color: #1dd95b;
}
.mapLegend ul li.lengActive {
background-color: #da2c71;
color: #fff;
}
/*mapFoot-*/
.mapFoot .table table {
margin: 0 auto;
width: 100%;
}
.mapFoot .table table tr td {
height: 40px !important;
line-height: 40px !important;
text-align: center;
}
.mapFoot .table table tr .blue {
color: #32c1ce;
font-size: 15px;
}
.mapFoot .table table tr:first-child td {
background-color: #0a1b39;
color: #8692a4;
}
.mapFoot .table table tr td:first-child {
color: #8692a4;
}
.footRight > div {
float: left;
height: 224px;
padding-top: 20px;
border-right: 2px solid #112e6b;
}
.footRight > div#swiperBox1 {
width: 29.6%;
}
.footRight > div#swiperBox2 {
width: 40%;
}
.footRight > div#swiperBox3 {
width: 30%;
}
.layerBox .layerInfo li ul {
overflow: hidden;
}
.layerBox .layerInfo li ul li {
width: 33%;
float: left;
}
|
apache-2.0
|
cedral/aws-sdk-cpp
|
aws-cpp-sdk-personalize/include/aws/personalize/model/CampaignSummary.h
|
11940
|
/*
* Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#pragma once
#include <aws/personalize/Personalize_EXPORTS.h>
#include <aws/core/utils/memory/stl/AWSString.h>
#include <aws/core/utils/DateTime.h>
#include <utility>
namespace Aws
{
namespace Utils
{
namespace Json
{
class JsonValue;
class JsonView;
} // namespace Json
} // namespace Utils
namespace Personalize
{
namespace Model
{
/**
* <p>Provides a summary of the properties of a campaign. For a complete listing,
* call the <a>DescribeCampaign</a> API.</p><p><h3>See Also:</h3> <a
* href="http://docs.aws.amazon.com/goto/WebAPI/personalize-2018-05-22/CampaignSummary">AWS
* API Reference</a></p>
*/
class AWS_PERSONALIZE_API CampaignSummary
{
public:
CampaignSummary();
CampaignSummary(Aws::Utils::Json::JsonView jsonValue);
CampaignSummary& operator=(Aws::Utils::Json::JsonView jsonValue);
Aws::Utils::Json::JsonValue Jsonize() const;
/**
* <p>The name of the campaign.</p>
*/
inline const Aws::String& GetName() const{ return m_name; }
/**
* <p>The name of the campaign.</p>
*/
inline bool NameHasBeenSet() const { return m_nameHasBeenSet; }
/**
* <p>The name of the campaign.</p>
*/
inline void SetName(const Aws::String& value) { m_nameHasBeenSet = true; m_name = value; }
/**
* <p>The name of the campaign.</p>
*/
inline void SetName(Aws::String&& value) { m_nameHasBeenSet = true; m_name = std::move(value); }
/**
* <p>The name of the campaign.</p>
*/
inline void SetName(const char* value) { m_nameHasBeenSet = true; m_name.assign(value); }
/**
* <p>The name of the campaign.</p>
*/
inline CampaignSummary& WithName(const Aws::String& value) { SetName(value); return *this;}
/**
* <p>The name of the campaign.</p>
*/
inline CampaignSummary& WithName(Aws::String&& value) { SetName(std::move(value)); return *this;}
/**
* <p>The name of the campaign.</p>
*/
inline CampaignSummary& WithName(const char* value) { SetName(value); return *this;}
/**
* <p>The Amazon Resource Name (ARN) of the campaign.</p>
*/
inline const Aws::String& GetCampaignArn() const{ return m_campaignArn; }
/**
* <p>The Amazon Resource Name (ARN) of the campaign.</p>
*/
inline bool CampaignArnHasBeenSet() const { return m_campaignArnHasBeenSet; }
/**
* <p>The Amazon Resource Name (ARN) of the campaign.</p>
*/
inline void SetCampaignArn(const Aws::String& value) { m_campaignArnHasBeenSet = true; m_campaignArn = value; }
/**
* <p>The Amazon Resource Name (ARN) of the campaign.</p>
*/
inline void SetCampaignArn(Aws::String&& value) { m_campaignArnHasBeenSet = true; m_campaignArn = std::move(value); }
/**
* <p>The Amazon Resource Name (ARN) of the campaign.</p>
*/
inline void SetCampaignArn(const char* value) { m_campaignArnHasBeenSet = true; m_campaignArn.assign(value); }
/**
* <p>The Amazon Resource Name (ARN) of the campaign.</p>
*/
inline CampaignSummary& WithCampaignArn(const Aws::String& value) { SetCampaignArn(value); return *this;}
/**
* <p>The Amazon Resource Name (ARN) of the campaign.</p>
*/
inline CampaignSummary& WithCampaignArn(Aws::String&& value) { SetCampaignArn(std::move(value)); return *this;}
/**
* <p>The Amazon Resource Name (ARN) of the campaign.</p>
*/
inline CampaignSummary& WithCampaignArn(const char* value) { SetCampaignArn(value); return *this;}
/**
* <p>The status of the campaign.</p> <p>A campaign can be in one of the following
* states:</p> <ul> <li> <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or-
* CREATE FAILED</p> </li> <li> <p>DELETE PENDING > DELETE IN_PROGRESS</p> </li>
* </ul>
*/
inline const Aws::String& GetStatus() const{ return m_status; }
/**
* <p>The status of the campaign.</p> <p>A campaign can be in one of the following
* states:</p> <ul> <li> <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or-
* CREATE FAILED</p> </li> <li> <p>DELETE PENDING > DELETE IN_PROGRESS</p> </li>
* </ul>
*/
inline bool StatusHasBeenSet() const { return m_statusHasBeenSet; }
/**
* <p>The status of the campaign.</p> <p>A campaign can be in one of the following
* states:</p> <ul> <li> <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or-
* CREATE FAILED</p> </li> <li> <p>DELETE PENDING > DELETE IN_PROGRESS</p> </li>
* </ul>
*/
inline void SetStatus(const Aws::String& value) { m_statusHasBeenSet = true; m_status = value; }
/**
* <p>The status of the campaign.</p> <p>A campaign can be in one of the following
* states:</p> <ul> <li> <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or-
* CREATE FAILED</p> </li> <li> <p>DELETE PENDING > DELETE IN_PROGRESS</p> </li>
* </ul>
*/
inline void SetStatus(Aws::String&& value) { m_statusHasBeenSet = true; m_status = std::move(value); }
/**
* <p>The status of the campaign.</p> <p>A campaign can be in one of the following
* states:</p> <ul> <li> <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or-
* CREATE FAILED</p> </li> <li> <p>DELETE PENDING > DELETE IN_PROGRESS</p> </li>
* </ul>
*/
inline void SetStatus(const char* value) { m_statusHasBeenSet = true; m_status.assign(value); }
/**
* <p>The status of the campaign.</p> <p>A campaign can be in one of the following
* states:</p> <ul> <li> <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or-
* CREATE FAILED</p> </li> <li> <p>DELETE PENDING > DELETE IN_PROGRESS</p> </li>
* </ul>
*/
inline CampaignSummary& WithStatus(const Aws::String& value) { SetStatus(value); return *this;}
/**
* <p>The status of the campaign.</p> <p>A campaign can be in one of the following
* states:</p> <ul> <li> <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or-
* CREATE FAILED</p> </li> <li> <p>DELETE PENDING > DELETE IN_PROGRESS</p> </li>
* </ul>
*/
inline CampaignSummary& WithStatus(Aws::String&& value) { SetStatus(std::move(value)); return *this;}
/**
* <p>The status of the campaign.</p> <p>A campaign can be in one of the following
* states:</p> <ul> <li> <p>CREATE PENDING > CREATE IN_PROGRESS > ACTIVE -or-
* CREATE FAILED</p> </li> <li> <p>DELETE PENDING > DELETE IN_PROGRESS</p> </li>
* </ul>
*/
inline CampaignSummary& WithStatus(const char* value) { SetStatus(value); return *this;}
/**
* <p>The date and time (in Unix time) that the campaign was created.</p>
*/
inline const Aws::Utils::DateTime& GetCreationDateTime() const{ return m_creationDateTime; }
/**
* <p>The date and time (in Unix time) that the campaign was created.</p>
*/
inline bool CreationDateTimeHasBeenSet() const { return m_creationDateTimeHasBeenSet; }
/**
* <p>The date and time (in Unix time) that the campaign was created.</p>
*/
inline void SetCreationDateTime(const Aws::Utils::DateTime& value) { m_creationDateTimeHasBeenSet = true; m_creationDateTime = value; }
/**
* <p>The date and time (in Unix time) that the campaign was created.</p>
*/
inline void SetCreationDateTime(Aws::Utils::DateTime&& value) { m_creationDateTimeHasBeenSet = true; m_creationDateTime = std::move(value); }
/**
* <p>The date and time (in Unix time) that the campaign was created.</p>
*/
inline CampaignSummary& WithCreationDateTime(const Aws::Utils::DateTime& value) { SetCreationDateTime(value); return *this;}
/**
* <p>The date and time (in Unix time) that the campaign was created.</p>
*/
inline CampaignSummary& WithCreationDateTime(Aws::Utils::DateTime&& value) { SetCreationDateTime(std::move(value)); return *this;}
/**
* <p>The date and time (in Unix time) that the campaign was last updated.</p>
*/
inline const Aws::Utils::DateTime& GetLastUpdatedDateTime() const{ return m_lastUpdatedDateTime; }
/**
* <p>The date and time (in Unix time) that the campaign was last updated.</p>
*/
inline bool LastUpdatedDateTimeHasBeenSet() const { return m_lastUpdatedDateTimeHasBeenSet; }
/**
* <p>The date and time (in Unix time) that the campaign was last updated.</p>
*/
inline void SetLastUpdatedDateTime(const Aws::Utils::DateTime& value) { m_lastUpdatedDateTimeHasBeenSet = true; m_lastUpdatedDateTime = value; }
/**
* <p>The date and time (in Unix time) that the campaign was last updated.</p>
*/
inline void SetLastUpdatedDateTime(Aws::Utils::DateTime&& value) { m_lastUpdatedDateTimeHasBeenSet = true; m_lastUpdatedDateTime = std::move(value); }
/**
* <p>The date and time (in Unix time) that the campaign was last updated.</p>
*/
inline CampaignSummary& WithLastUpdatedDateTime(const Aws::Utils::DateTime& value) { SetLastUpdatedDateTime(value); return *this;}
/**
* <p>The date and time (in Unix time) that the campaign was last updated.</p>
*/
inline CampaignSummary& WithLastUpdatedDateTime(Aws::Utils::DateTime&& value) { SetLastUpdatedDateTime(std::move(value)); return *this;}
/**
* <p>If a campaign fails, the reason behind the failure.</p>
*/
inline const Aws::String& GetFailureReason() const{ return m_failureReason; }
/**
* <p>If a campaign fails, the reason behind the failure.</p>
*/
inline bool FailureReasonHasBeenSet() const { return m_failureReasonHasBeenSet; }
/**
* <p>If a campaign fails, the reason behind the failure.</p>
*/
inline void SetFailureReason(const Aws::String& value) { m_failureReasonHasBeenSet = true; m_failureReason = value; }
/**
* <p>If a campaign fails, the reason behind the failure.</p>
*/
inline void SetFailureReason(Aws::String&& value) { m_failureReasonHasBeenSet = true; m_failureReason = std::move(value); }
/**
* <p>If a campaign fails, the reason behind the failure.</p>
*/
inline void SetFailureReason(const char* value) { m_failureReasonHasBeenSet = true; m_failureReason.assign(value); }
/**
* <p>If a campaign fails, the reason behind the failure.</p>
*/
inline CampaignSummary& WithFailureReason(const Aws::String& value) { SetFailureReason(value); return *this;}
/**
* <p>If a campaign fails, the reason behind the failure.</p>
*/
inline CampaignSummary& WithFailureReason(Aws::String&& value) { SetFailureReason(std::move(value)); return *this;}
/**
* <p>If a campaign fails, the reason behind the failure.</p>
*/
inline CampaignSummary& WithFailureReason(const char* value) { SetFailureReason(value); return *this;}
private:
Aws::String m_name;
bool m_nameHasBeenSet;
Aws::String m_campaignArn;
bool m_campaignArnHasBeenSet;
Aws::String m_status;
bool m_statusHasBeenSet;
Aws::Utils::DateTime m_creationDateTime;
bool m_creationDateTimeHasBeenSet;
Aws::Utils::DateTime m_lastUpdatedDateTime;
bool m_lastUpdatedDateTimeHasBeenSet;
Aws::String m_failureReason;
bool m_failureReasonHasBeenSet;
};
} // namespace Model
} // namespace Personalize
} // namespace Aws
|
apache-2.0
|
FTSRG/seviz
|
Source/Graph#/Algorithms/Layout/Simple/FDP/FRCoolingFunction.cs
|
139
|
namespace GraphSharp.Algorithms.Layout.Simple.FDP
{
public enum FRCoolingFunction
{
Linear,
Exponential
}
}
|
apache-2.0
|
containous/yaegi
|
_test/a9.go
|
187
|
package main
import "fmt"
//var (
// samples = []int{}
// b = 1
//)
func main() {
var samples = []int{}
samples = append(samples, 1)
fmt.Println(samples)
}
// Output:
// [1]
|
apache-2.0
|
andreiHi/hincuA
|
chapter_008/src/main/java/ru/job4j/jdbc/package-info.java
|
125
|
/**
* @author Hincu Andrei ([email protected])on 01.12.2017.
* @version $Id$.
* @since 0.1.
*/
package ru.job4j.jdbc;
|
apache-2.0
|
DataDrake/csv-analyze
|
tests/types/group.go
|
1759
|
//
// Copyright 2017 Bryan T. Meyers <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package types
import (
"fmt"
"github.com/DataDrake/csv-analyze/tests"
"io"
)
const groupFormat = "\t\033[96m\033[4m%s\033[0m\n"
// Group is a set of Type tests to run for a set of related values
type Group struct {
tests map[string][]tests.Test
names []string
}
// NewGroup creates a new test group for the type tests
func NewGroup() tests.Group {
return &Group{
map[string][]tests.Test{
"Numerical": []tests.Test{
NewUnsignedTest(),
NewSignedTest(),
NewFloatTest(),
},
"Logical": []tests.Test{
NewBooleanTest(),
},
"DateTime": []tests.Test{
NewTimeTest(),
},
"String": []tests.Test{
NewStringTest(),
},
},
[]string{"Numerical", "Logical", "DateTime", "String"},
}
}
// Run hands the same string to all of the tests
func (g *Group) Run(cell string) {
for _, ts := range g.tests {
for _, t := range ts {
t.Run(cell)
}
}
}
// PrintResult writes out the results of the type tests
func (g *Group) PrintResult(dst io.Writer) {
for _, name := range g.names {
fmt.Fprintf(dst, groupFormat, name)
for _, t := range g.tests[name] {
t.PrintResult(dst)
}
}
}
|
apache-2.0
|
poise/poise
|
test/spec/spec_helper.rb
|
843
|
#
# Copyright 2013-2016, Noah Kantrowitz
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'poise_boiler/spec_helper'
# If we aren't on Windows, inject our fake win32/process module.
unless RbConfig::CONFIG['host_os'] =~ /mswin|mingw|cygwin/
$LOAD_PATH.insert(0, File.expand_path('../utils/win32_helper', __FILE__))
end
require 'poise'
|
apache-2.0
|
KulykRoman/drill
|
exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
|
42918
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec;
import org.apache.drill.exec.physical.impl.common.HashTable;
import org.apache.drill.exec.rpc.user.InboundImpersonationManager;
import org.apache.drill.exec.server.options.OptionValidator;
import org.apache.drill.exec.server.options.TypeValidators.BooleanValidator;
import org.apache.drill.exec.server.options.TypeValidators.DoubleValidator;
import org.apache.drill.exec.server.options.TypeValidators.EnumeratedStringValidator;
import org.apache.drill.exec.server.options.TypeValidators.LongValidator;
import org.apache.drill.exec.server.options.TypeValidators.MaxWidthValidator;
import org.apache.drill.exec.server.options.TypeValidators.PositiveLongValidator;
import org.apache.drill.exec.server.options.TypeValidators.PowerOfTwoLongValidator;
import org.apache.drill.exec.server.options.TypeValidators.RangeDoubleValidator;
import org.apache.drill.exec.server.options.TypeValidators.RangeLongValidator;
import org.apache.drill.exec.server.options.TypeValidators.StringValidator;
import org.apache.drill.exec.server.options.TypeValidators.AdminUsersValidator;
import org.apache.drill.exec.server.options.TypeValidators.AdminUserGroupsValidator;
import org.apache.drill.exec.testing.ExecutionControls;
public final class ExecConstants {
private ExecConstants() {
// Don't allow instantiation
}
public static final String ZK_RETRY_TIMES = "drill.exec.zk.retry.count";
public static final String ZK_RETRY_DELAY = "drill.exec.zk.retry.delay";
public static final String ZK_CONNECTION = "drill.exec.zk.connect";
public static final String ZK_TIMEOUT = "drill.exec.zk.timeout";
public static final String ZK_ROOT = "drill.exec.zk.root";
public static final String ZK_REFRESH = "drill.exec.zk.refresh";
public static final String BIT_RETRY_TIMES = "drill.exec.rpc.bit.server.retry.count";
public static final String BIT_RETRY_DELAY = "drill.exec.rpc.bit.server.retry.delay";
public static final String BIT_TIMEOUT = "drill.exec.bit.timeout" ;
public static final String SERVICE_NAME = "drill.exec.cluster-id";
public static final String INITIAL_BIT_PORT = "drill.exec.rpc.bit.server.port";
public static final String INITIAL_DATA_PORT = "drill.exec.rpc.bit.server.dataport";
public static final String BIT_RPC_TIMEOUT = "drill.exec.rpc.bit.timeout";
public static final String INITIAL_USER_PORT = "drill.exec.rpc.user.server.port";
public static final String USER_RPC_TIMEOUT = "drill.exec.rpc.user.timeout";
public static final String METRICS_CONTEXT_NAME = "drill.exec.metrics.context";
public static final String USE_IP_ADDRESS = "drill.exec.rpc.use.ip";
public static final String CLIENT_RPC_THREADS = "drill.exec.rpc.user.client.threads";
public static final String BIT_SERVER_RPC_THREADS = "drill.exec.rpc.bit.server.threads";
public static final String USER_SERVER_RPC_THREADS = "drill.exec.rpc.user.server.threads";
public static final String FRAG_RUNNER_RPC_TIMEOUT = "drill.exec.rpc.fragrunner.timeout";
public static final PositiveLongValidator FRAG_RUNNER_RPC_TIMEOUT_VALIDATOR = new PositiveLongValidator(FRAG_RUNNER_RPC_TIMEOUT, Long.MAX_VALUE);
public static final String TRACE_DUMP_DIRECTORY = "drill.exec.trace.directory";
public static final String TRACE_DUMP_FILESYSTEM = "drill.exec.trace.filesystem";
public static final String TEMP_DIRECTORIES = "drill.exec.tmp.directories";
public static final String TEMP_FILESYSTEM = "drill.exec.tmp.filesystem";
public static final String INCOMING_BUFFER_IMPL = "drill.exec.buffer.impl";
/** incoming buffer size (number of batches) */
public static final String INCOMING_BUFFER_SIZE = "drill.exec.buffer.size";
public static final String SPOOLING_BUFFER_DELETE = "drill.exec.buffer.spooling.delete";
public static final String SPOOLING_BUFFER_MEMORY = "drill.exec.buffer.spooling.size";
public static final String BATCH_PURGE_THRESHOLD = "drill.exec.sort.purge.threshold";
// Spill boot-time Options common to all spilling operators
// (Each individual operator may override the common options)
public static final String SPILL_FILESYSTEM = "drill.exec.spill.fs";
public static final String SPILL_DIRS = "drill.exec.spill.directories";
public static final String OUTPUT_BATCH_SIZE = "drill.exec.memory.operator.output_batch_size";
// Output Batch Size in Bytes. We have a small lower bound so we can test with unit tests without the
// need to produce very large batches that take up lot of memory.
public static final LongValidator OUTPUT_BATCH_SIZE_VALIDATOR = new RangeLongValidator(OUTPUT_BATCH_SIZE, 128, 512 * 1024 * 1024);
// External Sort Boot configuration
public static final String EXTERNAL_SORT_TARGET_SPILL_BATCH_SIZE = "drill.exec.sort.external.spill.batch.size";
public static final String EXTERNAL_SORT_SPILL_GROUP_SIZE = "drill.exec.sort.external.spill.group.size";
public static final String EXTERNAL_SORT_SPILL_THRESHOLD = "drill.exec.sort.external.spill.threshold";
public static final String EXTERNAL_SORT_SPILL_DIRS = "drill.exec.sort.external.spill.directories";
public static final String EXTERNAL_SORT_SPILL_FILESYSTEM = "drill.exec.sort.external.spill.fs";
public static final String EXTERNAL_SORT_SPILL_FILE_SIZE = "drill.exec.sort.external.spill.file_size";
public static final String EXTERNAL_SORT_MSORT_MAX_BATCHSIZE = "drill.exec.sort.external.msort.batch.maxsize";
public static final String EXTERNAL_SORT_DISABLE_MANAGED = "drill.exec.sort.external.disable_managed";
public static final String EXTERNAL_SORT_MERGE_LIMIT = "drill.exec.sort.external.merge_limit";
public static final String EXTERNAL_SORT_SPILL_BATCH_SIZE = "drill.exec.sort.external.spill.spill_batch_size";
public static final String EXTERNAL_SORT_MERGE_BATCH_SIZE = "drill.exec.sort.external.spill.merge_batch_size";
public static final String EXTERNAL_SORT_MAX_MEMORY = "drill.exec.sort.external.mem_limit";
public static final String EXTERNAL_SORT_BATCH_LIMIT = "drill.exec.sort.external.batch_limit";
// External Sort Runtime options
public static final BooleanValidator EXTERNAL_SORT_DISABLE_MANAGED_OPTION = new BooleanValidator("exec.sort.disable_managed");
// Hash Aggregate Options
public static final String HASHAGG_NUM_PARTITIONS_KEY = "exec.hashagg.num_partitions";
public static final LongValidator HASHAGG_NUM_PARTITIONS_VALIDATOR = new RangeLongValidator(HASHAGG_NUM_PARTITIONS_KEY, 1, 128); // 1 means - no spilling
public static final String HASHAGG_MAX_MEMORY_KEY = "exec.hashagg.mem_limit";
public static final LongValidator HASHAGG_MAX_MEMORY_VALIDATOR = new RangeLongValidator(HASHAGG_MAX_MEMORY_KEY, 0, Integer.MAX_VALUE);
// min batches is used for tuning (each partition needs so many batches when planning the number of partitions,
// or reserve this number when calculating whether the remaining available memory is too small and requires a spill.)
// Low value may OOM (e.g., when incoming rows become wider), higher values use fewer partitions but are safer
public static final String HASHAGG_MIN_BATCHES_PER_PARTITION_KEY = "exec.hashagg.min_batches_per_partition";
public static final LongValidator HASHAGG_MIN_BATCHES_PER_PARTITION_VALIDATOR = new RangeLongValidator(HASHAGG_MIN_BATCHES_PER_PARTITION_KEY, 1, 5);
// Can be turned off mainly for testing. Memory prediction is used to decide on when to spill to disk; with this option off,
// spill would be triggered only by another mechanism -- "catch OOMs and then spill".
public static final String HASHAGG_USE_MEMORY_PREDICTION_KEY = "exec.hashagg.use_memory_prediction";
public static final BooleanValidator HASHAGG_USE_MEMORY_PREDICTION_VALIDATOR = new BooleanValidator(HASHAGG_USE_MEMORY_PREDICTION_KEY);
public static final String HASHAGG_SPILL_DIRS = "drill.exec.hashagg.spill.directories";
public static final String HASHAGG_SPILL_FILESYSTEM = "drill.exec.hashagg.spill.fs";
public static final String HASHAGG_FALLBACK_ENABLED_KEY = "drill.exec.hashagg.fallback.enabled";
public static final BooleanValidator HASHAGG_FALLBACK_ENABLED_VALIDATOR = new BooleanValidator(HASHAGG_FALLBACK_ENABLED_KEY);
public static final String SSL_PROVIDER = "drill.exec.ssl.provider"; // valid values are "JDK", "OPENSSL" // default JDK
public static final String SSL_PROTOCOL = "drill.exec.ssl.protocol"; // valid values are SSL, SSLV2, SSLV3, TLS, TLSV1, TLSv1.1, TLSv1.2(default)
public static final String SSL_KEYSTORE_TYPE = "drill.exec.ssl.keyStoreType";
public static final String SSL_KEYSTORE_PATH = "drill.exec.ssl.keyStorePath"; // path to keystore. default : $JRE_HOME/lib/security/keystore.jks
public static final String SSL_KEYSTORE_PASSWORD = "drill.exec.ssl.keyStorePassword"; // default: changeit
public static final String SSL_KEY_PASSWORD = "drill.exec.ssl.keyPassword"; //
public static final String SSL_TRUSTSTORE_TYPE = "drill.exec.ssl.trustStoreType"; // valid values are jks(default), jceks, pkcs12
public static final String SSL_TRUSTSTORE_PATH = "drill.exec.ssl.trustStorePath"; // path to keystore. default : $JRE_HOME/lib/security/cacerts.jks
public static final String SSL_TRUSTSTORE_PASSWORD = "drill.exec.ssl.trustStorePassword"; // default: changeit
public static final String SSL_USE_HADOOP_CONF = "drill.exec.ssl.useHadoopConfig"; // Initialize ssl params from hadoop if not provided by drill. default: true
public static final String SSL_HANDSHAKE_TIMEOUT = "drill.exec.security.user.encryption.ssl.handshakeTimeout"; // Default 10 seconds
public static final String TEXT_LINE_READER_BATCH_SIZE = "drill.exec.storage.file.text.batch.size";
public static final String TEXT_LINE_READER_BUFFER_SIZE = "drill.exec.storage.file.text.buffer.size";
public static final String HAZELCAST_SUBNETS = "drill.exec.cache.hazel.subnets";
public static final String HTTP_ENABLE = "drill.exec.http.enabled";
public static final String HTTP_MAX_PROFILES = "drill.exec.http.max_profiles";
public static final String HTTP_PORT = "drill.exec.http.port";
public static final String HTTP_PORT_HUNT = "drill.exec.http.porthunt";
public static final String HTTP_ENABLE_SSL = "drill.exec.http.ssl_enabled";
public static final String HTTP_CORS_ENABLED = "drill.exec.http.cors.enabled";
public static final String HTTP_CORS_ALLOWED_ORIGINS = "drill.exec.http.cors.allowedOrigins";
public static final String HTTP_CORS_ALLOWED_METHODS = "drill.exec.http.cors.allowedMethods";
public static final String HTTP_CORS_ALLOWED_HEADERS = "drill.exec.http.cors.allowedHeaders";
public static final String HTTP_CORS_CREDENTIALS = "drill.exec.http.cors.credentials";
public static final String HTTP_SESSION_MEMORY_RESERVATION = "drill.exec.http.session.memory.reservation";
public static final String HTTP_SESSION_MEMORY_MAXIMUM = "drill.exec.http.session.memory.maximum";
public static final String HTTP_SESSION_MAX_IDLE_SECS = "drill.exec.http.session_max_idle_secs";
public static final String HTTP_KEYSTORE_PATH = SSL_KEYSTORE_PATH;
public static final String HTTP_KEYSTORE_PASSWORD = SSL_KEYSTORE_PASSWORD;
public static final String HTTP_TRUSTSTORE_PATH = SSL_TRUSTSTORE_PATH;
public static final String HTTP_TRUSTSTORE_PASSWORD = SSL_TRUSTSTORE_PASSWORD;
public static final String HTTP_AUTHENTICATION_MECHANISMS = "drill.exec.http.auth.mechanisms";
public static final String HTTP_SPNEGO_PRINCIPAL = "drill.exec.http.auth.spnego.principal";
public static final String HTTP_SPNEGO_KEYTAB = "drill.exec.http.auth.spnego.keytab";
public static final String SYS_STORE_PROVIDER_CLASS = "drill.exec.sys.store.provider.class";
public static final String SYS_STORE_PROVIDER_LOCAL_PATH = "drill.exec.sys.store.provider.local.path";
public static final String SYS_STORE_PROVIDER_LOCAL_ENABLE_WRITE = "drill.exec.sys.store.provider.local.write";
public static final String PROFILES_STORE_INMEMORY = "drill.exec.profiles.store.inmemory";
public static final String PROFILES_STORE_CAPACITY = "drill.exec.profiles.store.capacity";
public static final String IMPERSONATION_ENABLED = "drill.exec.impersonation.enabled";
public static final String IMPERSONATION_MAX_CHAINED_USER_HOPS = "drill.exec.impersonation.max_chained_user_hops";
public static final String AUTHENTICATION_MECHANISMS = "drill.exec.security.auth.mechanisms";
public static final String USER_AUTHENTICATION_ENABLED = "drill.exec.security.user.auth.enabled";
public static final String USER_AUTHENTICATOR_IMPL = "drill.exec.security.user.auth.impl";
public static final String PAM_AUTHENTICATOR_PROFILES = "drill.exec.security.user.auth.pam_profiles";
public static final String BIT_AUTHENTICATION_ENABLED = "drill.exec.security.bit.auth.enabled";
public static final String BIT_AUTHENTICATION_MECHANISM = "drill.exec.security.bit.auth.mechanism";
public static final String USE_LOGIN_PRINCIPAL = "drill.exec.security.bit.auth.use_login_principal";
public static final String USER_ENCRYPTION_SASL_ENABLED = "drill.exec.security.user.encryption.sasl.enabled";
public static final String USER_ENCRYPTION_SASL_MAX_WRAPPED_SIZE = "drill.exec.security.user.encryption.sasl.max_wrapped_size";
public static final String WEB_SERVER_THREAD_POOL_MAX = "drill.exec.web_server.thread_pool_max";
public static final String USER_SSL_ENABLED = "drill.exec.security.user.encryption.ssl.enabled";
public static final String BIT_ENCRYPTION_SASL_ENABLED = "drill.exec.security.bit.encryption.sasl.enabled";
public static final String BIT_ENCRYPTION_SASL_MAX_WRAPPED_SIZE = "drill.exec.security.bit.encryption.sasl.max_wrapped_size";
/** Size of JDBC batch queue (in batches) above which throttling begins. */
public static final String JDBC_BATCH_QUEUE_THROTTLING_THRESHOLD =
"drill.jdbc.batch_queue_throttling_threshold";
// Thread pool size for scan threads. Used by the Parquet scan.
public static final String SCAN_THREADPOOL_SIZE = "drill.exec.scan.threadpool_size";
// The size of the thread pool used by a scan to decode the data. Used by Parquet
public static final String SCAN_DECODE_THREADPOOL_SIZE = "drill.exec.scan.decode_threadpool_size";
/**
* Currently if a query is cancelled, but one of the fragments reports the status as FAILED instead of CANCELLED or
* FINISHED we report the query result as CANCELLED by swallowing the failures occurred in fragments. This BOOT
* setting allows the user to see the query status as failure. Useful for developers/testers.
*/
public static final String RETURN_ERROR_FOR_FAILURE_IN_CANCELLED_FRAGMENTS = "drill.exec.debug.return_error_for_failure_in_cancelled_fragments";
public static final String CLIENT_SUPPORT_COMPLEX_TYPES = "drill.client.supports-complex-types";
/**
* Configuration properties connected with dynamic UDFs support
*/
public static final String UDF_RETRY_ATTEMPTS = "drill.exec.udf.retry-attempts";
public static final String UDF_DIRECTORY_LOCAL = "drill.exec.udf.directory.local";
public static final String UDF_DIRECTORY_FS = "drill.exec.udf.directory.fs";
public static final String UDF_DIRECTORY_ROOT = "drill.exec.udf.directory.root";
public static final String UDF_DIRECTORY_STAGING = "drill.exec.udf.directory.staging";
public static final String UDF_DIRECTORY_REGISTRY = "drill.exec.udf.directory.registry";
public static final String UDF_DIRECTORY_TMP = "drill.exec.udf.directory.tmp";
public static final String UDF_DISABLE_DYNAMIC = "drill.exec.udf.disable_dynamic";
/**
* Local temporary directory is used as base for temporary storage of Dynamic UDF jars.
*/
public static final String DRILL_TMP_DIR = "drill.tmp-dir";
/**
* Temporary tables can be created ONLY in default temporary workspace.
*/
public static final String DEFAULT_TEMPORARY_WORKSPACE = "drill.exec.default_temporary_workspace";
public static final String OUTPUT_FORMAT_OPTION = "store.format";
public static final OptionValidator OUTPUT_FORMAT_VALIDATOR = new StringValidator(OUTPUT_FORMAT_OPTION);
public static final String PARQUET_BLOCK_SIZE = "store.parquet.block-size";
public static final String PARQUET_WRITER_USE_SINGLE_FS_BLOCK = "store.parquet.writer.use_single_fs_block";
public static final OptionValidator PARQUET_WRITER_USE_SINGLE_FS_BLOCK_VALIDATOR = new BooleanValidator(
PARQUET_WRITER_USE_SINGLE_FS_BLOCK);
public static final OptionValidator PARQUET_BLOCK_SIZE_VALIDATOR = new PositiveLongValidator(PARQUET_BLOCK_SIZE, Integer.MAX_VALUE);
public static final String PARQUET_PAGE_SIZE = "store.parquet.page-size";
public static final OptionValidator PARQUET_PAGE_SIZE_VALIDATOR = new PositiveLongValidator(PARQUET_PAGE_SIZE, Integer.MAX_VALUE);
public static final String PARQUET_DICT_PAGE_SIZE = "store.parquet.dictionary.page-size";
public static final OptionValidator PARQUET_DICT_PAGE_SIZE_VALIDATOR = new PositiveLongValidator(PARQUET_DICT_PAGE_SIZE, Integer.MAX_VALUE);
public static final String PARQUET_WRITER_COMPRESSION_TYPE = "store.parquet.compression";
public static final OptionValidator PARQUET_WRITER_COMPRESSION_TYPE_VALIDATOR = new EnumeratedStringValidator(
PARQUET_WRITER_COMPRESSION_TYPE, "snappy", "gzip", "none");
public static final String PARQUET_WRITER_ENABLE_DICTIONARY_ENCODING = "store.parquet.enable_dictionary_encoding";
public static final OptionValidator PARQUET_WRITER_ENABLE_DICTIONARY_ENCODING_VALIDATOR = new BooleanValidator(
PARQUET_WRITER_ENABLE_DICTIONARY_ENCODING);
public static final String PARQUET_VECTOR_FILL_THRESHOLD = "store.parquet.vector_fill_threshold";
public static final OptionValidator PARQUET_VECTOR_FILL_THRESHOLD_VALIDATOR = new PositiveLongValidator(PARQUET_VECTOR_FILL_THRESHOLD, 99l);
public static final String PARQUET_VECTOR_FILL_CHECK_THRESHOLD = "store.parquet.vector_fill_check_threshold";
public static final OptionValidator PARQUET_VECTOR_FILL_CHECK_THRESHOLD_VALIDATOR = new PositiveLongValidator(PARQUET_VECTOR_FILL_CHECK_THRESHOLD, 100l);
public static final String PARQUET_NEW_RECORD_READER = "store.parquet.use_new_reader";
public static final OptionValidator PARQUET_RECORD_READER_IMPLEMENTATION_VALIDATOR = new BooleanValidator(PARQUET_NEW_RECORD_READER);
public static final String PARQUET_READER_INT96_AS_TIMESTAMP = "store.parquet.reader.int96_as_timestamp";
public static final OptionValidator PARQUET_READER_INT96_AS_TIMESTAMP_VALIDATOR = new BooleanValidator(PARQUET_READER_INT96_AS_TIMESTAMP);
public static final String PARQUET_PAGEREADER_ASYNC = "store.parquet.reader.pagereader.async";
public static final OptionValidator PARQUET_PAGEREADER_ASYNC_VALIDATOR = new BooleanValidator(PARQUET_PAGEREADER_ASYNC);
// Number of pages the Async Parquet page reader will read before blocking
public static final String PARQUET_PAGEREADER_QUEUE_SIZE = "store.parquet.reader.pagereader.queuesize";
public static final OptionValidator PARQUET_PAGEREADER_QUEUE_SIZE_VALIDATOR = new PositiveLongValidator(PARQUET_PAGEREADER_QUEUE_SIZE, Integer.MAX_VALUE);
public static final String PARQUET_PAGEREADER_ENFORCETOTALSIZE = "store.parquet.reader.pagereader.enforceTotalSize";
public static final OptionValidator PARQUET_PAGEREADER_ENFORCETOTALSIZE_VALIDATOR = new BooleanValidator(PARQUET_PAGEREADER_ENFORCETOTALSIZE);
public static final String PARQUET_COLUMNREADER_ASYNC = "store.parquet.reader.columnreader.async";
public static final OptionValidator PARQUET_COLUMNREADER_ASYNC_VALIDATOR = new BooleanValidator(PARQUET_COLUMNREADER_ASYNC);
// Use a buffering reader for Parquet page reader
public static final String PARQUET_PAGEREADER_USE_BUFFERED_READ = "store.parquet.reader.pagereader.bufferedread";
public static final OptionValidator PARQUET_PAGEREADER_USE_BUFFERED_READ_VALIDATOR = new BooleanValidator(PARQUET_PAGEREADER_USE_BUFFERED_READ);
// Size in MiB of the buffer the Parquet page reader will use to read from disk. Default is 1 MiB
public static final String PARQUET_PAGEREADER_BUFFER_SIZE = "store.parquet.reader.pagereader.buffersize";
public static final OptionValidator PARQUET_PAGEREADER_BUFFER_SIZE_VALIDATOR = new LongValidator(PARQUET_PAGEREADER_BUFFER_SIZE);
// try to use fadvise if available
public static final String PARQUET_PAGEREADER_USE_FADVISE = "store.parquet.reader.pagereader.usefadvise";
public static final OptionValidator PARQUET_PAGEREADER_USE_FADVISE_VALIDATOR = new BooleanValidator(PARQUET_PAGEREADER_USE_FADVISE);
public static final OptionValidator COMPILE_SCALAR_REPLACEMENT = new BooleanValidator("exec.compile.scalar_replacement");
public static final String JSON_ALL_TEXT_MODE = "store.json.all_text_mode";
public static final BooleanValidator JSON_READER_ALL_TEXT_MODE_VALIDATOR = new BooleanValidator(JSON_ALL_TEXT_MODE);
public static final BooleanValidator JSON_EXTENDED_TYPES = new BooleanValidator("store.json.extended_types");
public static final BooleanValidator JSON_WRITER_UGLIFY = new BooleanValidator("store.json.writer.uglify");
public static final BooleanValidator JSON_WRITER_SKIPNULLFIELDS = new BooleanValidator("store.json.writer.skip_null_fields");
public static final String JSON_READER_SKIP_INVALID_RECORDS_FLAG = "store.json.reader.skip_invalid_records";
public static final BooleanValidator JSON_SKIP_MALFORMED_RECORDS_VALIDATOR = new BooleanValidator(JSON_READER_SKIP_INVALID_RECORDS_FLAG);
public static final String JSON_READER_PRINT_INVALID_RECORDS_LINE_NOS_FLAG = "store.json.reader.print_skipped_invalid_record_number";
public static final BooleanValidator JSON_READER_PRINT_INVALID_RECORDS_LINE_NOS_FLAG_VALIDATOR = new BooleanValidator(JSON_READER_PRINT_INVALID_RECORDS_LINE_NOS_FLAG);
public static final DoubleValidator TEXT_ESTIMATED_ROW_SIZE = new RangeDoubleValidator("store.text.estimated_row_size_bytes", 1, Long.MAX_VALUE);
/**
* Json writer option for writing `NaN` and `Infinity` tokens as numbers (not enclosed with double quotes)
*/
public static final String JSON_WRITER_NAN_INF_NUMBERS = "store.json.writer.allow_nan_inf";
public static final BooleanValidator JSON_WRITER_NAN_INF_NUMBERS_VALIDATOR = new BooleanValidator(JSON_WRITER_NAN_INF_NUMBERS);
/**
* Json reader option that enables parser to read `NaN` and `Infinity` tokens as numbers
*/
public static final String JSON_READER_NAN_INF_NUMBERS = "store.json.reader.allow_nan_inf";
public static final BooleanValidator JSON_READER_NAN_INF_NUMBERS_VALIDATOR = new BooleanValidator(JSON_READER_NAN_INF_NUMBERS);
/**
* The column label (for directory levels) in results when querying files in a directory
* E.g. labels: dir0 dir1<pre>
* structure: foo
* |- bar - a.parquet
* |- baz - b.parquet</pre>
*/
public static final String FILESYSTEM_PARTITION_COLUMN_LABEL = "drill.exec.storage.file.partition.column.label";
public static final StringValidator FILESYSTEM_PARTITION_COLUMN_LABEL_VALIDATOR = new StringValidator(FILESYSTEM_PARTITION_COLUMN_LABEL);
/**
* Implicit file columns
*/
public static final String IMPLICIT_FILENAME_COLUMN_LABEL = "drill.exec.storage.implicit.filename.column.label";
public static final OptionValidator IMPLICIT_FILENAME_COLUMN_LABEL_VALIDATOR = new StringValidator(IMPLICIT_FILENAME_COLUMN_LABEL);
public static final String IMPLICIT_SUFFIX_COLUMN_LABEL = "drill.exec.storage.implicit.suffix.column.label";
public static final OptionValidator IMPLICIT_SUFFIX_COLUMN_LABEL_VALIDATOR = new StringValidator(IMPLICIT_SUFFIX_COLUMN_LABEL);
public static final String IMPLICIT_FQN_COLUMN_LABEL = "drill.exec.storage.implicit.fqn.column.label";
public static final OptionValidator IMPLICIT_FQN_COLUMN_LABEL_VALIDATOR = new StringValidator(IMPLICIT_FQN_COLUMN_LABEL);
public static final String IMPLICIT_FILEPATH_COLUMN_LABEL = "drill.exec.storage.implicit.filepath.column.label";
public static final OptionValidator IMPLICIT_FILEPATH_COLUMN_LABEL_VALIDATOR = new StringValidator(IMPLICIT_FILEPATH_COLUMN_LABEL);
public static final String JSON_READ_NUMBERS_AS_DOUBLE = "store.json.read_numbers_as_double";
public static final BooleanValidator JSON_READ_NUMBERS_AS_DOUBLE_VALIDATOR = new BooleanValidator(JSON_READ_NUMBERS_AS_DOUBLE);
public static final String MONGO_ALL_TEXT_MODE = "store.mongo.all_text_mode";
public static final OptionValidator MONGO_READER_ALL_TEXT_MODE_VALIDATOR = new BooleanValidator(MONGO_ALL_TEXT_MODE);
public static final String MONGO_READER_READ_NUMBERS_AS_DOUBLE = "store.mongo.read_numbers_as_double";
public static final OptionValidator MONGO_READER_READ_NUMBERS_AS_DOUBLE_VALIDATOR = new BooleanValidator(MONGO_READER_READ_NUMBERS_AS_DOUBLE);
public static final String MONGO_BSON_RECORD_READER = "store.mongo.bson.record.reader";
public static final OptionValidator MONGO_BSON_RECORD_READER_VALIDATOR = new BooleanValidator(MONGO_BSON_RECORD_READER);
public static final String ENABLE_UNION_TYPE_KEY = "exec.enable_union_type";
public static final BooleanValidator ENABLE_UNION_TYPE = new BooleanValidator(ENABLE_UNION_TYPE_KEY);
// Kafka plugin related options.
public static final String KAFKA_ALL_TEXT_MODE = "store.kafka.all_text_mode";
public static final OptionValidator KAFKA_READER_ALL_TEXT_MODE_VALIDATOR = new BooleanValidator(KAFKA_ALL_TEXT_MODE);
public static final String KAFKA_READER_READ_NUMBERS_AS_DOUBLE = "store.kafka.read_numbers_as_double";
public static final OptionValidator KAFKA_READER_READ_NUMBERS_AS_DOUBLE_VALIDATOR = new BooleanValidator(
KAFKA_READER_READ_NUMBERS_AS_DOUBLE);
public static final String KAFKA_RECORD_READER = "store.kafka.record.reader";
public static final OptionValidator KAFKA_RECORD_READER_VALIDATOR = new StringValidator(KAFKA_RECORD_READER);
public static final String KAFKA_POLL_TIMEOUT = "store.kafka.poll.timeout";
public static final PositiveLongValidator KAFKA_POLL_TIMEOUT_VALIDATOR = new PositiveLongValidator(KAFKA_POLL_TIMEOUT,
Long.MAX_VALUE);
// TODO: We need to add a feature that enables storage plugins to add their own options. Currently we have to declare
// in core which is not right. Move this option and above two mongo plugin related options once we have the feature.
public static final String HIVE_OPTIMIZE_SCAN_WITH_NATIVE_READERS = "store.hive.optimize_scan_with_native_readers";
public static final OptionValidator HIVE_OPTIMIZE_SCAN_WITH_NATIVE_READERS_VALIDATOR =
new BooleanValidator(HIVE_OPTIMIZE_SCAN_WITH_NATIVE_READERS);
public static final String SLICE_TARGET = "planner.slice_target";
public static final long SLICE_TARGET_DEFAULT = 100000l;
public static final PositiveLongValidator SLICE_TARGET_OPTION = new PositiveLongValidator(SLICE_TARGET, Long.MAX_VALUE);
public static final String CAST_TO_NULLABLE_NUMERIC = "drill.exec.functions.cast_empty_string_to_null";
public static final BooleanValidator CAST_TO_NULLABLE_NUMERIC_OPTION = new BooleanValidator(CAST_TO_NULLABLE_NUMERIC);
/**
* HashTable runtime settings
*/
public static final String MIN_HASH_TABLE_SIZE_KEY = "exec.min_hash_table_size";
public static final PositiveLongValidator MIN_HASH_TABLE_SIZE = new PositiveLongValidator(MIN_HASH_TABLE_SIZE_KEY, HashTable.MAXIMUM_CAPACITY);
public static final String MAX_HASH_TABLE_SIZE_KEY = "exec.max_hash_table_size";
public static final PositiveLongValidator MAX_HASH_TABLE_SIZE = new PositiveLongValidator(MAX_HASH_TABLE_SIZE_KEY, HashTable.MAXIMUM_CAPACITY);
/**
* Limits the maximum level of parallelization to this factor time the number of Drillbits
*/
public static final String CPU_LOAD_AVERAGE_KEY = "planner.cpu_load_average";
public static final DoubleValidator CPU_LOAD_AVERAGE = new DoubleValidator(CPU_LOAD_AVERAGE_KEY);
public static final String MAX_WIDTH_PER_NODE_KEY = "planner.width.max_per_node";
public static final MaxWidthValidator MAX_WIDTH_PER_NODE = new MaxWidthValidator(MAX_WIDTH_PER_NODE_KEY);
/**
* The maximum level or parallelization any stage of the query can do. Note that while this
* might be the number of active Drillbits, realistically, this could be well beyond that
* number of we want to do things like speed results return.
*/
public static final String MAX_WIDTH_GLOBAL_KEY = "planner.width.max_per_query";
public static final OptionValidator MAX_WIDTH_GLOBAL = new PositiveLongValidator(MAX_WIDTH_GLOBAL_KEY, Integer.MAX_VALUE);
/**
* Factor by which a node with endpoint affinity will be favored while creating assignment
*/
public static final String AFFINITY_FACTOR_KEY = "planner.affinity_factor";
public static final OptionValidator AFFINITY_FACTOR = new DoubleValidator(AFFINITY_FACTOR_KEY);
public static final String EARLY_LIMIT0_OPT_KEY = "planner.enable_limit0_optimization";
public static final BooleanValidator EARLY_LIMIT0_OPT = new BooleanValidator(EARLY_LIMIT0_OPT_KEY);
public static final String ENABLE_MEMORY_ESTIMATION_KEY = "planner.memory.enable_memory_estimation";
public static final OptionValidator ENABLE_MEMORY_ESTIMATION = new BooleanValidator(ENABLE_MEMORY_ESTIMATION_KEY);
/**
* Maximum query memory per node (in MB). Re-plan with cheaper operators if
* memory estimation exceeds this limit.
* <p/>
* DEFAULT: 2048 MB
*/
public static final String MAX_QUERY_MEMORY_PER_NODE_KEY = "planner.memory.max_query_memory_per_node";
public static final LongValidator MAX_QUERY_MEMORY_PER_NODE = new RangeLongValidator(MAX_QUERY_MEMORY_PER_NODE_KEY, 1024 * 1024, Long.MAX_VALUE);
/**
* Alternative way to compute per-query-per-node memory as a percent
* of the total available system memory.
* <p>
* Suggestion for computation.
* <ul>
* <li>Assume an allowance for non-managed operators. Default assumption:
* 50%</li>
* <li>Assume a desired number of concurrent queries. Default assumption:
* 10.</li>
* <li>The value of this parameter is<br>
* (1 - non-managed allowance) / concurrency</li>
* </ul>
* Doing the math produces the default 5% number. The actual number
* given is no less than the <tt>max_query_memory_per_node</tt>
* amount.
* <p>
* This number is used only when throttling is disabled. Setting the
* number to 0 effectively disables this technique as it will always
* produce values lower than <tt>max_query_memory_per_node</tt>.
* <p>
* DEFAULT: 5%
*/
public static String PERCENT_MEMORY_PER_QUERY_KEY = "planner.memory.percent_per_query";
public static DoubleValidator PERCENT_MEMORY_PER_QUERY = new RangeDoubleValidator(
PERCENT_MEMORY_PER_QUERY_KEY, 0, 1.0);
/**
* Minimum memory allocated to each buffered operator instance.
* <p/>
* DEFAULT: 40 MB
*/
public static final String MIN_MEMORY_PER_BUFFERED_OP_KEY = "planner.memory.min_memory_per_buffered_op";
public static final LongValidator MIN_MEMORY_PER_BUFFERED_OP = new RangeLongValidator(MIN_MEMORY_PER_BUFFERED_OP_KEY, 1024 * 1024, Long.MAX_VALUE);
/**
* Extra query memory per node for non-blocking operators.
* NOTE: This option is currently used only for memory estimation.
* <p/>
* DEFAULT: 64 MB
* MAXIMUM: 2048 MB
*/
public static final String NON_BLOCKING_OPERATORS_MEMORY_KEY = "planner.memory.non_blocking_operators_memory";
public static final OptionValidator NON_BLOCKING_OPERATORS_MEMORY = new PowerOfTwoLongValidator(
NON_BLOCKING_OPERATORS_MEMORY_KEY, 1 << 11);
public static final String HASH_JOIN_TABLE_FACTOR_KEY = "planner.memory.hash_join_table_factor";
public static final OptionValidator HASH_JOIN_TABLE_FACTOR = new DoubleValidator(HASH_JOIN_TABLE_FACTOR_KEY);
public static final String HASH_AGG_TABLE_FACTOR_KEY = "planner.memory.hash_agg_table_factor";
public static final OptionValidator HASH_AGG_TABLE_FACTOR = new DoubleValidator(HASH_AGG_TABLE_FACTOR_KEY);
public static final String AVERAGE_FIELD_WIDTH_KEY = "planner.memory.average_field_width";
public static final OptionValidator AVERAGE_FIELD_WIDTH = new PositiveLongValidator(AVERAGE_FIELD_WIDTH_KEY, Long.MAX_VALUE);
// Mux Exchange options.
public static final String ORDERED_MUX_EXCHANGE = "planner.enable_ordered_mux_exchange";
// Resource management boot-time options.
public static final String MAX_MEMORY_PER_NODE = "drill.exec.rm.memory_per_node";
public static final String MAX_CPUS_PER_NODE = "drill.exec.rm.cpus_per_node";
// Resource management system run-time options.
// Enables queues. When running embedded, enables an in-process queue. When
// running distributed, enables the Zookeeper-based distributed queue.
public static final BooleanValidator ENABLE_QUEUE = new BooleanValidator("exec.queue.enable");
public static final LongValidator LARGE_QUEUE_SIZE = new PositiveLongValidator("exec.queue.large", 10_000);
public static final LongValidator SMALL_QUEUE_SIZE = new PositiveLongValidator("exec.queue.small", 100_000);
public static final LongValidator QUEUE_THRESHOLD_SIZE = new PositiveLongValidator("exec.queue.threshold", Long.MAX_VALUE);
public static final LongValidator QUEUE_TIMEOUT = new PositiveLongValidator("exec.queue.timeout_millis", Long.MAX_VALUE);
// Ratio of memory for small queries vs. large queries.
// Each small query gets 1 unit, each large query gets QUEUE_MEMORY_RATIO units.
// A lower limit of 1 enforces the intuition that a large query should never get
// *less* memory than a small one.
public static final DoubleValidator QUEUE_MEMORY_RATIO = new RangeDoubleValidator("exec.queue.memory_ratio", 1.0, 1000);
public static final DoubleValidator QUEUE_MEMORY_RESERVE = new RangeDoubleValidator("exec.queue.memory_reserve_ratio", 0, 1.0);
public static final String ENABLE_VERBOSE_ERRORS_KEY = "exec.errors.verbose";
public static final OptionValidator ENABLE_VERBOSE_ERRORS = new BooleanValidator(ENABLE_VERBOSE_ERRORS_KEY);
public static final String ENABLE_NEW_TEXT_READER_KEY = "exec.storage.enable_new_text_reader";
public static final OptionValidator ENABLE_NEW_TEXT_READER = new BooleanValidator(ENABLE_NEW_TEXT_READER_KEY);
public static final String BOOTSTRAP_STORAGE_PLUGINS_FILE = "bootstrap-storage-plugins.json";
public static final String DRILL_SYS_FILE_SUFFIX = ".sys.drill";
public static final String ENABLE_WINDOW_FUNCTIONS = "window.enable";
public static final OptionValidator ENABLE_WINDOW_FUNCTIONS_VALIDATOR = new BooleanValidator(ENABLE_WINDOW_FUNCTIONS);
public static final String DRILLBIT_CONTROL_INJECTIONS = "drill.exec.testing.controls";
public static final OptionValidator DRILLBIT_CONTROLS_VALIDATOR = new ExecutionControls.ControlsOptionValidator(DRILLBIT_CONTROL_INJECTIONS, 1);
public static final String NEW_VIEW_DEFAULT_PERMS_KEY = "new_view_default_permissions";
public static final OptionValidator NEW_VIEW_DEFAULT_PERMS_VALIDATOR = new StringValidator(NEW_VIEW_DEFAULT_PERMS_KEY);
public static final String CTAS_PARTITIONING_HASH_DISTRIBUTE = "store.partition.hash_distribute";
public static final BooleanValidator CTAS_PARTITIONING_HASH_DISTRIBUTE_VALIDATOR = new BooleanValidator(CTAS_PARTITIONING_HASH_DISTRIBUTE);
public static final String ENABLE_BULK_LOAD_TABLE_LIST_KEY = "exec.enable_bulk_load_table_list";
public static final BooleanValidator ENABLE_BULK_LOAD_TABLE_LIST = new BooleanValidator(ENABLE_BULK_LOAD_TABLE_LIST_KEY);
/**
* When getting Hive Table information with exec.enable_bulk_load_table_list set to true,
* use the exec.bulk_load_table_list.bulk_size to determine how many tables to fetch from HiveMetaStore
* at a time. (The number of tables can get to be quite large.)
*/
public static final String BULK_LOAD_TABLE_LIST_BULK_SIZE_KEY = "exec.bulk_load_table_list.bulk_size";
public static final PositiveLongValidator BULK_LOAD_TABLE_LIST_BULK_SIZE = new PositiveLongValidator(BULK_LOAD_TABLE_LIST_BULK_SIZE_KEY, Integer.MAX_VALUE);
/**
* Option whose value is a comma separated list of admin usernames. Admin users are users who have special privileges
* such as changing system options.
*/
public static final String ADMIN_USERS_KEY = "security.admin.users";
public static final AdminUsersValidator ADMIN_USERS_VALIDATOR = new AdminUsersValidator(ADMIN_USERS_KEY);
/**
* Option whose value is a comma separated list of admin usergroups.
*/
public static final String ADMIN_USER_GROUPS_KEY = "security.admin.user_groups";
public static final AdminUserGroupsValidator ADMIN_USER_GROUPS_VALIDATOR =
new AdminUserGroupsValidator(ADMIN_USER_GROUPS_KEY);
/**
* Option whose value is a string representing list of inbound impersonation policies.
*
* Impersonation policy format:
* [
* {
* proxy_principals : { users : [“...”], groups : [“...”] },
* target_principals : { users : [“...”], groups : [“...”] }
* },
* ...
* ]
*/
public static final String IMPERSONATION_POLICIES_KEY = "exec.impersonation.inbound_policies";
public static final StringValidator IMPERSONATION_POLICY_VALIDATOR =
new InboundImpersonationManager.InboundImpersonationPolicyValidator(IMPERSONATION_POLICIES_KEY);
/**
* Web settings
*/
public static final String WEB_LOGS_MAX_LINES = "web.logs.max_lines";
public static final OptionValidator WEB_LOGS_MAX_LINES_VALIDATOR = new PositiveLongValidator(WEB_LOGS_MAX_LINES, Integer.MAX_VALUE);
public static final String CODE_GEN_EXP_IN_METHOD_SIZE = "exec.java.compiler.exp_in_method_size";
public static final LongValidator CODE_GEN_EXP_IN_METHOD_SIZE_VALIDATOR = new LongValidator(CODE_GEN_EXP_IN_METHOD_SIZE);
/**
* Timeout for create prepare statement request. If the request exceeds this timeout, then request is timed out.
* Default value is 10mins.
*/
public static final String CREATE_PREPARE_STATEMENT_TIMEOUT_MILLIS = "prepare.statement.create_timeout_ms";
public static final OptionValidator CREATE_PREPARE_STATEMENT_TIMEOUT_MILLIS_VALIDATOR =
new PositiveLongValidator(CREATE_PREPARE_STATEMENT_TIMEOUT_MILLIS, Integer.MAX_VALUE);
public static final String DYNAMIC_UDF_SUPPORT_ENABLED = "exec.udf.enable_dynamic_support";
public static final BooleanValidator DYNAMIC_UDF_SUPPORT_ENABLED_VALIDATOR = new BooleanValidator(DYNAMIC_UDF_SUPPORT_ENABLED);
/**
* Option to save query profiles. If false, no query profile will be saved
* for any query.
*/
public static final String ENABLE_QUERY_PROFILE_OPTION = "exec.query_profile.save";
public static final BooleanValidator ENABLE_QUERY_PROFILE_VALIDATOR = new BooleanValidator(ENABLE_QUERY_PROFILE_OPTION);
/**
* Profiles are normally written after the last client message to reduce latency.
* When running tests, however, we want the profile written <i>before</i> the
* return so that the client can immediately read the profile for test
* verification.
*/
public static final String QUERY_PROFILE_DEBUG_OPTION = "exec.query_profile.debug_mode";
public static final BooleanValidator QUERY_PROFILE_DEBUG_VALIDATOR = new BooleanValidator(QUERY_PROFILE_DEBUG_OPTION);
public static final String USE_DYNAMIC_UDFS_KEY = "exec.udf.use_dynamic";
public static final BooleanValidator USE_DYNAMIC_UDFS = new BooleanValidator(USE_DYNAMIC_UDFS_KEY);
public static final String QUERY_TRANSIENT_STATE_UPDATE_KEY = "exec.query.progress.update";
public static final BooleanValidator QUERY_TRANSIENT_STATE_UPDATE = new BooleanValidator(QUERY_TRANSIENT_STATE_UPDATE_KEY);
public static final String PERSISTENT_TABLE_UMASK = "exec.persistent_table.umask";
public static final StringValidator PERSISTENT_TABLE_UMASK_VALIDATOR = new StringValidator(PERSISTENT_TABLE_UMASK);
/**
* Enables batch iterator (operator) validation. Validation is normally enabled
* only when assertions are enabled. This option enables iterator validation even
* if assertions are not enabled. That is, it allows iterator validation even on
* a "production" Drill instance.
*/
public static final String ENABLE_ITERATOR_VALIDATION_OPTION = "debug.validate_iterators";
public static final BooleanValidator ENABLE_ITERATOR_VALIDATOR = new BooleanValidator(ENABLE_ITERATOR_VALIDATION_OPTION);
/**
* Boot-time config option to enable validation. Primarily used for tests.
* If true, overrrides the above. (That is validation is done if assertions are on,
* if the above session option is set to true, or if this config option is set to true.
*/
public static final String ENABLE_ITERATOR_VALIDATION = "drill.exec.debug.validate_iterators";
/**
* When iterator validation is enabled, additionally validates the vectors in
* each batch passed to each iterator.
*/
public static final String ENABLE_VECTOR_VALIDATION_OPTION = "debug.validate_vectors";
public static final BooleanValidator ENABLE_VECTOR_VALIDATOR = new BooleanValidator(ENABLE_VECTOR_VALIDATION_OPTION);
/**
* Boot-time config option to enable vector validation. Primarily used for
* tests. Add the following to the command line to enable:<br>
* <tt>-ea -Ddrill.exec.debug.validate_vectors=true</tt>
*/
public static final String ENABLE_VECTOR_VALIDATION = "drill.exec.debug.validate_vectors";
public static final String OPTION_DEFAULTS_ROOT = "drill.exec.options.";
public static String bootDefaultFor(String name) {
return OPTION_DEFAULTS_ROOT + name;
}
/**
* Boot-time config option provided to modify duration of the grace period.
* Grace period is the amount of time where the drillbit accepts work after
* the shutdown request is triggered. The primary use of grace period is to
* avoid the race conditions caused by zookeeper delay in updating the state
* information of the drillbit that is shutting down. So, it is advisable
* to have a grace period that is atleast twice the amount of zookeeper
* refresh time.
*/
public static final String GRACE_PERIOD = "drill.exec.grace_period_ms";
public static final String DRILL_PORT_HUNT = "drill.exec.port_hunt";
}
|
apache-2.0
|
Horusiath/akka.net
|
test/Akka.Tests/Util/SwitchTests.cs
|
3717
|
using System;
using Akka.Util;
using Microsoft.VisualStudio.TestTools.UnitTesting;
namespace Akka.Tests.Util
{
[TestClass]
public class SwitchTests : AkkaSpec
{
[TestMethod]
public void OnAndOff()
{
var s = new Switch(false);
Assert.IsTrue(s.IsOff, "Initially should be off");
Assert.IsFalse(s.IsOn, "Initially should not be on");
Assert.IsTrue(s.SwitchOn(), "Switch on from off should succeed");
Assert.IsTrue(s.IsOn, "Switched on should be on");
Assert.IsFalse(s.IsOff, "Switched on should not be off");
Assert.IsFalse(s.SwitchOn(), "Switch on when already on should not succeed");
Assert.IsTrue(s.IsOn, "Already switched on should be on");
Assert.IsFalse(s.IsOff, "Already switched on should not be off");
Assert.IsTrue(s.SwitchOff(), "Switch off from on should succeed");
Assert.IsTrue(s.IsOff, "Switched off should be off");
Assert.IsFalse(s.IsOn, "Switched off should not be on");
Assert.IsFalse(s.SwitchOff(), "Switch off when already off should not succeed");
Assert.IsTrue(s.IsOff, "Already switched off should be off");
Assert.IsFalse(s.IsOn, "Already switched off should not be on");
}
[TestMethod]
public void InitiallyOnShouldBeOn()
{
var s = new Switch(true);
Assert.IsTrue(s.IsOn, "Switched on should be on");
Assert.IsFalse(s.IsOff, "Switched on should not be off");
}
[TestMethod]
public void Given_OffSwitch_When_SwitchOn_throws_exception_Then_Should_revert()
{
var s = new Switch(false);
intercept<InvalidOperationException>(() => s.SwitchOn(() => { throw new InvalidOperationException(); }));
Assert.IsTrue(s.IsOff);
Assert.IsFalse(s.IsOn);
}
[TestMethod]
public void Given_OnSwitch_When_SwitchOff_throws_exception_Then_Should_revert()
{
var s = new Switch(true);
intercept<InvalidOperationException>(() => s.SwitchOff(() => { throw new InvalidOperationException(); }));
Assert.IsTrue(s.IsOn);
Assert.IsFalse(s.IsOff);
}
[TestMethod]
public void RunActionWithoutLocking()
{
var s = new Switch(false);
var actionRun = false;
Assert.IsTrue(s.IfOff(() => { actionRun = true; }));
Assert.IsTrue(actionRun);
actionRun = false;
Assert.IsFalse(s.IfOn(() => { actionRun = true; }));
Assert.IsFalse(actionRun);
s.SwitchOn();
actionRun = false;
Assert.IsTrue(s.IfOn(() => { actionRun = true; }));
Assert.IsTrue(actionRun);
actionRun = false;
Assert.IsFalse(s.IfOff(() => { actionRun = true; }));
Assert.IsFalse(actionRun);
}
[TestMethod]
public void RunActionWithLocking()
{
var s = new Switch(false);
var actionRun = false;
Assert.IsTrue(s.WhileOff(() => { actionRun = true; }));
Assert.IsTrue(actionRun);
actionRun = false;
Assert.IsFalse(s.WhileOn(() => { actionRun = true; }));
Assert.IsFalse(actionRun);
s.SwitchOn();
actionRun = false;
Assert.IsTrue(s.WhileOn(() => { actionRun = true; }));
Assert.IsTrue(actionRun);
actionRun = false;
Assert.IsFalse(s.WhileOff(() => { actionRun = true; }));
Assert.IsFalse(actionRun);
}
}
}
|
apache-2.0
|
GoogleCloudPlatform/buildpack-samples
|
sample-python/main.py
|
252
|
from flask import Flask
app = Flask(__name__)
@app.get("/")
def index():
return "hello, world"
if __name__ == "__main__":
# Dev only: run "python main.py" and open http://localhost:8080
app.run(host="localhost", port=8080, debug=True)
|
apache-2.0
|
assemblade/CAT
|
cat-directory/src/main/java/com/assemblade/opendj/acis/AciFactory.java
|
2298
|
/*
* Copyright 2012 Mike Adamson
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.assemblade.opendj.acis;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class AciFactory implements AciPatterns {
private static Pattern headerPattern = Pattern.compile(header);
private static Pattern targetPattern = Pattern.compile(target);
private static Pattern bodyPattern = Pattern.compile(body);
public static AccessControlItem parse(String aci) {
String name = null;
String targets = null;
String rules = null;
Matcher headerMatcher = headerPattern.matcher(aci);
if (headerMatcher.find()) {
targets = aci.substring(0, headerMatcher.start());
name = headerMatcher.group(1);
rules = aci.substring(headerMatcher.end());
}
List<Target> targetList = new ArrayList<Target>();
Matcher targetMatcher = targetPattern.matcher(targets);
while (targetMatcher.find()) {
String keyword = targetMatcher.group(1);
String operator = targetMatcher.group(2);
String expression = targetMatcher.group(3);
targetList.add(new Target(keyword, operator, expression));
}
List<Permission> ruleList = new ArrayList<Permission>();
Matcher bodyMatcher = bodyPattern.matcher(rules);
while (bodyMatcher.find()) {
String permission = bodyMatcher.group(1);
String rights = bodyMatcher.group(2);
String rule = bodyMatcher.group(3);
ruleList.add(new Permission(permission, rights, Subject.parse(rule)));
}
return new AccessControlItem(name, targetList, ruleList);
}
}
|
apache-2.0
|
krumman/bubbles-for-android
|
README.md
|
4318
|
Bubbles for Android
=====================
Bubbles for Android is an Android library to provide chat heads capabilities on your apps. With a fast way to integrate with your development.

## Latest Version
[ ](https://bintray.com/txusballesteros/maven/bubbles-for-android/_latestVersion)
## How to use
### Configuring your project dependencies
Add the library dependency in your build.gradle file.
```groovy
dependencies {
...
compile 'com.txusballesteros:bubbles:1.0'
}
```
### Configuring your AndroidManifest
Add the next lines to you AndroidManifest.xml file.
```xml
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.txusballesteros.bubbles.app" >
<uses-permission android:name="android.permission.SYSTEM_ALERT_WINDOW" />
<application ...>
...
<service android:name="com.txusballesteros.bubbles.BubblesService"
android:enabled="true"
android:exported="false" />
</application>
</manifest>
```
### Adding your first Bubble
Compose your Bubble layout, for example using a Xml layout file. Remember that the first view of your Bubble layout has to be a BubbleLayout view.
```xml
<com.txusballesteros.bubbles.BubbleLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="wrap_content"
android:layout_height="wrap_content">
<ImageView
android:id="@+id/avatar"
android:layout_width="70dp"
android:layout_height="70dp"
android:layout_gravity="center"
android:background="@drawable/profile_decorator"
android:src="@drawable/profile"
android:scaleType="centerCrop"/>
</com.txusballesteros.bubbles.BubbleLayout>
```
Create your BubblesManager instance.
```java
private BubblesManager bubblesManager;
@Override
protected void onCreate(Bundle savedInstanceState) {
bubblesManager = new BubblesManager.Builder(this)
.build();
bubblesManager.initialize();
...
}
@Override
protected void onDestroy() {
bubblesManager.recycle();
...
}
```
Attach your Bubble to the window.
```java
BubbleLayout bubbleView = (BubbleLayout)LayoutInflater
.from(MainActivity.this).inflate(R.layout.bubble_layout, null);
bubblesManager.addBubble(bubbleView, 60, 20);
```
### Configuring your Bubbles Trash
If you want to have a trash to remove on screen bubbles, you can configure the
layout of that.
Define your trash layout Xml.
```xml
<ImageView
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginBottom="20dp"
android:src="@mipmap/bubble_trash_background"
android:layout_gravity="bottom|center_horizontal" />
```
Configure the trash layout with your BubblesManager builder.
```java
private BubblesManager bubblesManager;
@Override
protected void onCreate(Bundle savedInstanceState) {
bubblesManager = new BubblesManager.Builder(this)
.setTrashLayout(R.layout.bubble_trash_layout)
.build();
bubblesManager.initialize();
...
}
```
## License
Copyright Txus Ballesteros 2015 (@txusballesteros)
This file is part of some open source application.
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
Contact: Txus Ballesteros <[email protected]>
|
apache-2.0
|
ScalABM/contracts-sandbox
|
README.md
|
624
|
[](https://travis-ci.org/ScalABM/contracts-sandbox)
[](https://coveralls.io/github/ScalABM/contracts-sandbox?branch=master)
[](https://gitter.im/ScalABM/contracts-sandbox?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
# contracts-sandbox
A sandbox for building composable implementations of economic contracts
|
apache-2.0
|
bytescout/ByteScout-SDK-SourceCode
|
PDF.co Web API/PDF Classifier/JavaScript/Classify PDF From URL (jQuery)/program.js
|
2113
|
//*******************************************************************************************//
// //
// Download Free Evaluation Version From: https://bytescout.com/download/web-installer //
// //
// Also available as Web API! Get Your Free API Key: https://app.pdf.co/signup //
// //
// Copyright © 2017-2020 ByteScout, Inc. All rights reserved. //
// https://www.bytescout.com //
// https://pdf.co //
// //
//*******************************************************************************************//
var myHeaders = new Headers();
myHeaders.append("Content-Type", "application/json");
myHeaders.append("x-api-key", "");
// You can also upload your own file into PDF.co and use it as url. Check "Upload File" samples for code snippets: https://github.com/bytescout/pdf-co-api-samples/tree/master/File%20Upload/
var raw = JSON.stringify({
"url": "https://bytescout-com.s3-us-west-2.amazonaws.com/files/demo-files/cloud-api/document-parser/sample-invoice.pdf",
"rulescsv": "Amazon,Amazon Web Services Invoice|Amazon CloudFront\nDigital Ocean,DigitalOcean|DOInvoice\nAcme,ACME Inc.|1540 Long Street, Jacksonville, 32099",
"caseSensitive": "true",
"async": false,
"encrypt": "false",
"inline": "true",
"password": "",
"profiles": ""
});
var requestOptions = {
method: 'POST',
headers: myHeaders,
body: raw,
redirect: 'follow'
};
fetch("https://api.pdf.co/v1/pdf/classifier", requestOptions)
.then(response => response.text())
.then(result => console.log(result))
.catch(error => console.log('error', error));
|
apache-2.0
|
inputx/code-ref-doc
|
bonfire/_variables/_ci_view.html
|
5700
|
<!doctype html public "-//W3C//DTD HTML 4.0 Transitional//EN" "http://www.w3.org/TR/REC-html40/loose.dtd">
<html>
<head>
<title>PHPXRef 0.7.1 : Unnamed Project : Variable Reference: $_ci_view</title>
<link rel="stylesheet" href="../sample.css" type="text/css">
<link rel="stylesheet" href="../sample-print.css" type="text/css" media="print">
<style id="hilight" type="text/css"></style>
<meta http-equiv="content-type" content="text/html;charset=iso-8859-1">
</head>
<body bgcolor="#ffffff" text="#000000" link="#801800" vlink="#300540" alink="#ffffff">
<table class="pagetitle" width="100%">
<tr>
<td valign="top" class="pagetitle">
[ <a href="../index.html">Index</a> ]
</td>
<td align="right" class="pagetitle">
<h2 style="margin-bottom: 0px">PHP Cross Reference of Unnamed Project</h2>
</td>
</tr>
</table>
<!-- Generated by PHPXref 0.7.1 at Thu Oct 23 18:57:41 2014 -->
<!-- PHPXref (c) 2000-2010 Gareth Watts - [email protected] -->
<!-- http://phpxref.sourceforge.net/ -->
<script src="../phpxref.js" type="text/javascript"></script>
<script language="JavaScript" type="text/javascript">
<!--
ext='.html';
relbase='../';
subdir='_variables';
filename='index.html';
cookiekey='phpxref';
handleNavFrame(relbase, subdir, filename);
logVariable('_ci_view');
// -->
</script>
<script language="JavaScript" type="text/javascript">
if (gwGetCookie('xrefnav')=='off')
document.write('<p class="navlinks">[ <a href="javascript:navOn()">Show Explorer<\/a> ]<\/p>');
else
document.write('<p class="navlinks">[ <a href="javascript:navOff()">Hide Explorer<\/a> ]<\/p>');
</script>
<noscript>
<p class="navlinks">
[ <a href="../nav.html" target="_top">Show Explorer</a> ]
[ <a href="index.html" target="_top">Hide Navbar</a> ]
</p>
</noscript>
[<a href="../index.html">Top level directory</a>]<br>
<script language="JavaScript" type="text/javascript">
<!--
document.writeln('<table align="right" class="searchbox-link"><tr><td><a class="searchbox-link" href="javascript:void(0)" onMouseOver="showSearchBox()">Search</a><br>');
document.writeln('<table border="0" cellspacing="0" cellpadding="0" class="searchbox" id="searchbox">');
document.writeln('<tr><td class="searchbox-title">');
document.writeln('<a class="searchbox-title" href="javascript:showSearchPopup()">Search History +</a>');
document.writeln('<\/td><\/tr>');
document.writeln('<tr><td class="searchbox-body" id="searchbox-body">');
document.writeln('<form name="search" style="margin:0px; padding:0px" onSubmit=\'return jump()\'>');
document.writeln('<a class="searchbox-body" href="../_classes/index.html">Class<\/a>: ');
document.writeln('<input type="text" size=10 value="" name="classname"><br>');
document.writeln('<a id="funcsearchlink" class="searchbox-body" href="../_functions/index.html">Function<\/a>: ');
document.writeln('<input type="text" size=10 value="" name="funcname"><br>');
document.writeln('<a class="searchbox-body" href="../_variables/index.html">Variable<\/a>: ');
document.writeln('<input type="text" size=10 value="" name="varname"><br>');
document.writeln('<a class="searchbox-body" href="../_constants/index.html">Constant<\/a>: ');
document.writeln('<input type="text" size=10 value="" name="constname"><br>');
document.writeln('<a class="searchbox-body" href="../_tables/index.html">Table<\/a>: ');
document.writeln('<input type="text" size=10 value="" name="tablename"><br>');
document.writeln('<input type="submit" class="searchbox-button" value="Search">');
document.writeln('<\/form>');
document.writeln('<\/td><\/tr><\/table>');
document.writeln('<\/td><\/tr><\/table>');
// -->
</script>
<div id="search-popup" class="searchpopup"><p id="searchpopup-title" class="searchpopup-title">title</p><div id="searchpopup-body" class="searchpopup-body">Body</div><p class="searchpopup-close"><a href="javascript:gwCloseActive()">[close]</a></p></div>
<h3>Variable Cross Reference</h3>
<h2><a href="index.html#_ci_view">$_ci_view</a></h2>
<br><b>Referenced 7 times:</b><ul>
<li><a href="../application/third_party/MX/Loader.php.html">/application/third_party/MX/Loader.php</a> -> <a href="../application/third_party/MX/Loader.php.source.html#l286"> line 286</a></li>
<li><a href="../application/third_party/MX/Loader.php.html">/application/third_party/MX/Loader.php</a> -> <a href="../application/third_party/MX/Loader.php.source.html#l291"> line 291</a></li>
<li><a href="../application/third_party/MX/Loader.php.html">/application/third_party/MX/Loader.php</a> -> <a href="../application/third_party/MX/Loader.php.source.html#l291"> line 291</a></li>
<li><a href="../application/third_party/MX/Loader.php.html">/application/third_party/MX/Loader.php</a> -> <a href="../application/third_party/MX/Loader.php.source.html#l291"> line 291</a></li>
<li><a href="../bonfire/codeigniter/core/Loader.php.html">/bonfire/codeigniter/core/Loader.php</a> -> <a href="../bonfire/codeigniter/core/Loader.php.source.html#l759"> line 759</a></li>
<li><a href="../bonfire/codeigniter/core/Loader.php.html">/bonfire/codeigniter/core/Loader.php</a> -> <a href="../bonfire/codeigniter/core/Loader.php.source.html#l760"> line 760</a></li>
<li><a href="../bonfire/codeigniter/core/Loader.php.html">/bonfire/codeigniter/core/Loader.php</a> -> <a href="../bonfire/codeigniter/core/Loader.php.source.html#l760"> line 760</a></li>
</ul>
<!-- A link to the phpxref site in your customized footer file is appreciated ;-) -->
<br><hr>
<table width="100%">
<tr><td>Generated: Thu Oct 23 18:57:41 2014</td>
<td align="right"><i>Cross-referenced by <a href="http://phpxref.sourceforge.net/">PHPXref 0.7.1</a></i></td>
</tr>
</table>
</body></html>
|
apache-2.0
|
ogycode/JesusPassword
|
src/Windows/Unsupported projects/Jesus password/assets/core/Site.cs
|
1134
|
/*
Copyright 2015 Verloka Vadim, http://ogy.pp.ua
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
using System;
using System.Collections.Generic;
namespace JesusPassword.assets.core
{
[Serializable]
public struct Site
{
public string Name { get; set; }
public string Address { get; set; }
public string Login { get; set; }
public string Password { get; set; }
public string Mail { get; set; }
public Dictionary<string, string> CustomFields { get; set; }
public DateTime DateAdd { get; set; }
}
}
|
apache-2.0
|
britter/bootify-testpyramid
|
src/test/java/com/github/britter/bootifytestpyramid/domain/WeightTest.java
|
2642
|
/*
* Copyright 2017 Benedikt Ritter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.britter.bootifytestpyramid.domain;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import java.math.BigDecimal;
import static com.github.britter.bootifytestpyramid.domain.WeightTemplates.ONE;
import static com.github.britter.bootifytestpyramid.domain.WeightTemplates.TWO;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertAll;
import static org.junit.jupiter.api.Assertions.assertThrows;
class WeightTest {
@Nested
class Invariants {
@Test
void should_throw_exception_when_passing_null_value() {
assertThrows(NullPointerException.class, () -> new Weight(null));
}
@Test
void should_throw_exception_when_passing_negative_value() {
assertAll(
() -> assertThrows(IllegalArgumentException.class, () -> new Weight(BigDecimal.valueOf(-1))),
() -> assertThrows(IllegalArgumentException.class, () -> new Weight(-1))
);
}
}
@Nested
class Calculations {
@Nested
class Add {
@Test
void should_add_weights() {
assertThat(ONE.add(ONE)).isEqualTo(TWO);
}
}
@Nested
class Multiply {
@Test
void should_multiply_weights() {
assertThat(ONE.multiply(2)).isEqualTo(TWO);
}
@Test
void should_throw_exception_when_multiply_with_negtaive_factor() {
assertThrows(IllegalArgumentException.class, () -> ONE.multiply(-2));
}
}
}
@Nested
class Comparing {
@Test
void should_compare_to_other_weights() {
assertAll(
() -> assertThat(ONE.compareTo(ONE)).isEqualTo(0),
() -> assertThat(ONE.compareTo(TWO)).isLessThan(0),
() -> assertThat(TWO.compareTo(ONE)).isGreaterThan(0)
);
}
}
}
|
apache-2.0
|
richygreat/service
|
src/test/java/com/occar/test/rest/DBRestClient.java
|
385
|
package com.occar.test.rest;
import javax.ws.rs.FormParam;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
@Path("/db")
public interface DBRestClient {
@POST
@Produces(MediaType.APPLICATION_JSON)
public Response query(@FormParam("q") String query, @FormParam("uid") String uid);
}
|
apache-2.0
|
MyersResearchGroup/iBioSim
|
verification/src/main/java/edu/utah/ece/async/lema/verification/platu/platuLpn/io/PlatuGrammarLexer.java
|
77940
|
/*******************************************************************************
*
* This file is part of iBioSim. Please visit <http://www.async.ece.utah.edu/ibiosim>
* for the latest version of iBioSim.
*
* Copyright (C) 2017 University of Utah
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the Apache License. A copy of the license agreement is provided
* in the file named "LICENSE.txt" included with this software distribution
* and also available online at <http://www.async.ece.utah.edu/ibiosim/License>.
*
*******************************************************************************/
// $ANTLR 3.4 /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g 2013-06-26 17:00:36
package edu.utah.ece.async.lema.verification.platu.platuLpn.io;
import org.antlr.runtime.*;
import java.util.Stack;
import java.util.List;
import java.util.ArrayList;
@SuppressWarnings({"all", "warnings", "unchecked"})
public class PlatuGrammarLexer extends Lexer {
public static final int EOF=-1;
public static final int T__57=57;
public static final int T__58=58;
public static final int T__59=59;
public static final int T__60=60;
public static final int T__61=61;
public static final int T__62=62;
public static final int T__63=63;
public static final int AND=4;
public static final int BITWISE_AND=5;
public static final int BITWISE_LSHIFT=6;
public static final int BITWISE_NEGATION=7;
public static final int BITWISE_OR=8;
public static final int BITWISE_RSHIFT=9;
public static final int BITWISE_XOR=10;
public static final int COLON=11;
public static final int COMMA=12;
public static final int COMMENT=13;
public static final int DIGIT=14;
public static final int DIV=15;
public static final int EQUALS=16;
public static final int EQUIV=17;
public static final int FALSE=18;
public static final int GREATER=19;
public static final int GREATER_EQUAL=20;
public static final int ID=21;
public static final int IGNORE=22;
public static final int IMPLICATION=23;
public static final int INPUT=24;
public static final int INT=25;
public static final int INTERNAL=26;
public static final int LABEL=27;
public static final int LESS=28;
public static final int LESS_EQUAL=29;
public static final int LETTER=30;
public static final int LPAREN=31;
public static final int MARKING=32;
public static final int MINUS=33;
public static final int MOD=34;
public static final int MODULE=35;
public static final int MULTILINECOMMENT=36;
public static final int NAME=37;
public static final int NEGATION=38;
public static final int NOT_EQUIV=39;
public static final int OR=40;
public static final int OUTPUT=41;
public static final int PERIOD=42;
public static final int PLUS=43;
public static final int POSTSET=44;
public static final int PRESET=45;
public static final int QMARK=46;
public static final int QUOTE=47;
public static final int RPAREN=48;
public static final int SEMICOLON=49;
public static final int STATE_VECTOR=50;
public static final int TIMES=51;
public static final int TRANSITION=52;
public static final int TRUE=53;
public static final int UNDERSCORE=54;
public static final int WS=55;
public static final int XMLCOMMENT=56;
// delegates
// delegators
public Lexer[] getDelegates() {
return new Lexer[] {};
}
public PlatuGrammarLexer() {}
public PlatuGrammarLexer(CharStream input) {
this(input, new RecognizerSharedState());
}
public PlatuGrammarLexer(CharStream input, RecognizerSharedState state) {
super(input,state);
}
public String getGrammarFileName() { return "/Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g"; }
// $ANTLR start "T__57"
public final void mT__57() throws RecognitionException {
try {
int _type = T__57;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:11:7: ( '[' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:11:9: '['
{
match('[');
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "T__57"
// $ANTLR start "T__58"
public final void mT__58() throws RecognitionException {
try {
int _type = T__58;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:12:7: ( ']' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:12:9: ']'
{
match(']');
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "T__58"
// $ANTLR start "T__59"
public final void mT__59() throws RecognitionException {
try {
int _type = T__59;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:13:7: ( 'assert' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:13:9: 'assert'
{
match("assert");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "T__59"
// $ANTLR start "T__60"
public final void mT__60() throws RecognitionException {
try {
int _type = T__60;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:14:7: ( 'const' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:14:9: 'const'
{
match("const");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "T__60"
// $ANTLR start "T__61"
public final void mT__61() throws RecognitionException {
try {
int _type = T__61;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:15:7: ( 'inf' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:15:9: 'inf'
{
match("inf");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "T__61"
// $ANTLR start "T__62"
public final void mT__62() throws RecognitionException {
try {
int _type = T__62;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:16:7: ( 'inst' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:16:9: 'inst'
{
match("inst");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "T__62"
// $ANTLR start "T__63"
public final void mT__63() throws RecognitionException {
try {
int _type = T__63;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:17:7: ( 'main' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:17:9: 'main'
{
match("main");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "T__63"
// $ANTLR start "LPAREN"
public final void mLPAREN() throws RecognitionException {
try {
int _type = LPAREN;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1343:7: ( '(' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1343:9: '('
{
match('(');
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "LPAREN"
// $ANTLR start "RPAREN"
public final void mRPAREN() throws RecognitionException {
try {
int _type = RPAREN;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1344:7: ( ')' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1344:9: ')'
{
match(')');
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "RPAREN"
// $ANTLR start "QMARK"
public final void mQMARK() throws RecognitionException {
try {
int _type = QMARK;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1345:6: ( '?' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1345:8: '?'
{
match('?');
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "QMARK"
// $ANTLR start "COLON"
public final void mCOLON() throws RecognitionException {
try {
int _type = COLON;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1346:6: ( ':' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1346:8: ':'
{
match(':');
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "COLON"
// $ANTLR start "SEMICOLON"
public final void mSEMICOLON() throws RecognitionException {
try {
int _type = SEMICOLON;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1347:10: ( ';' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1347:12: ';'
{
match(';');
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "SEMICOLON"
// $ANTLR start "PERIOD"
public final void mPERIOD() throws RecognitionException {
try {
int _type = PERIOD;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1348:7: ( '.' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1348:9: '.'
{
match('.');
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "PERIOD"
// $ANTLR start "UNDERSCORE"
public final void mUNDERSCORE() throws RecognitionException {
try {
int _type = UNDERSCORE;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1349:11: ( '_' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1349:13: '_'
{
match('_');
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "UNDERSCORE"
// $ANTLR start "COMMA"
public final void mCOMMA() throws RecognitionException {
try {
int _type = COMMA;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1350:6: ( ',' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1350:8: ','
{
match(',');
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "COMMA"
// $ANTLR start "QUOTE"
public final void mQUOTE() throws RecognitionException {
try {
int _type = QUOTE;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1351:6: ( '\"' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1351:8: '\"'
{
match('\"');
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "QUOTE"
// $ANTLR start "MODULE"
public final void mMODULE() throws RecognitionException {
try {
int _type = MODULE;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1354:7: ( 'mod' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1354:9: 'mod'
{
match("mod");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "MODULE"
// $ANTLR start "NAME"
public final void mNAME() throws RecognitionException {
try {
int _type = NAME;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1355:5: ( 'name' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1355:7: 'name'
{
match("name");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "NAME"
// $ANTLR start "INPUT"
public final void mINPUT() throws RecognitionException {
try {
int _type = INPUT;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1356:6: ( 'input' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1356:8: 'input'
{
match("input");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "INPUT"
// $ANTLR start "OUTPUT"
public final void mOUTPUT() throws RecognitionException {
try {
int _type = OUTPUT;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1357:7: ( 'output' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1357:9: 'output'
{
match("output");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "OUTPUT"
// $ANTLR start "INTERNAL"
public final void mINTERNAL() throws RecognitionException {
try {
int _type = INTERNAL;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1358:9: ( 'var' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1358:11: 'var'
{
match("var");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "INTERNAL"
// $ANTLR start "MARKING"
public final void mMARKING() throws RecognitionException {
try {
int _type = MARKING;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1359:8: ( 'marking' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1359:10: 'marking'
{
match("marking");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "MARKING"
// $ANTLR start "STATE_VECTOR"
public final void mSTATE_VECTOR() throws RecognitionException {
try {
int _type = STATE_VECTOR;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1360:13: ( 'statevector' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1360:15: 'statevector'
{
match("statevector");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "STATE_VECTOR"
// $ANTLR start "TRANSITION"
public final void mTRANSITION() throws RecognitionException {
try {
int _type = TRANSITION;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1361:11: ( 'transition' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1361:13: 'transition'
{
match("transition");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "TRANSITION"
// $ANTLR start "LABEL"
public final void mLABEL() throws RecognitionException {
try {
int _type = LABEL;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1362:6: ( 'label' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1362:8: 'label'
{
match("label");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "LABEL"
// $ANTLR start "PRESET"
public final void mPRESET() throws RecognitionException {
try {
int _type = PRESET;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1363:7: ( 'preset' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1363:9: 'preset'
{
match("preset");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "PRESET"
// $ANTLR start "POSTSET"
public final void mPOSTSET() throws RecognitionException {
try {
int _type = POSTSET;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1364:8: ( 'postset' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1364:10: 'postset'
{
match("postset");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "POSTSET"
// $ANTLR start "TRUE"
public final void mTRUE() throws RecognitionException {
try {
int _type = TRUE;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1365:5: ( 'true' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1365:7: 'true'
{
match("true");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "TRUE"
// $ANTLR start "FALSE"
public final void mFALSE() throws RecognitionException {
try {
int _type = FALSE;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1366:6: ( 'false' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1366:8: 'false'
{
match("false");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "FALSE"
// $ANTLR start "PLUS"
public final void mPLUS() throws RecognitionException {
try {
int _type = PLUS;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1369:5: ( '+' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1369:7: '+'
{
match('+');
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "PLUS"
// $ANTLR start "MINUS"
public final void mMINUS() throws RecognitionException {
try {
int _type = MINUS;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1370:6: ( '-' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1370:8: '-'
{
match('-');
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "MINUS"
// $ANTLR start "TIMES"
public final void mTIMES() throws RecognitionException {
try {
int _type = TIMES;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1371:6: ( '*' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1371:8: '*'
{
match('*');
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "TIMES"
// $ANTLR start "DIV"
public final void mDIV() throws RecognitionException {
try {
int _type = DIV;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1372:4: ( '/' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1372:6: '/'
{
match('/');
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "DIV"
// $ANTLR start "MOD"
public final void mMOD() throws RecognitionException {
try {
int _type = MOD;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1373:4: ( '%' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1373:6: '%'
{
match('%');
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "MOD"
// $ANTLR start "EQUALS"
public final void mEQUALS() throws RecognitionException {
try {
int _type = EQUALS;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1374:7: ( '=' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1374:9: '='
{
match('=');
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "EQUALS"
// $ANTLR start "GREATER"
public final void mGREATER() throws RecognitionException {
try {
int _type = GREATER;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1377:8: ( '>' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1377:10: '>'
{
match('>');
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "GREATER"
// $ANTLR start "LESS"
public final void mLESS() throws RecognitionException {
try {
int _type = LESS;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1378:5: ( '<' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1378:7: '<'
{
match('<');
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "LESS"
// $ANTLR start "GREATER_EQUAL"
public final void mGREATER_EQUAL() throws RecognitionException {
try {
int _type = GREATER_EQUAL;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1379:14: ( '>=' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1379:16: '>='
{
match(">=");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "GREATER_EQUAL"
// $ANTLR start "LESS_EQUAL"
public final void mLESS_EQUAL() throws RecognitionException {
try {
int _type = LESS_EQUAL;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1380:11: ( '<=' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1380:13: '<='
{
match("<=");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "LESS_EQUAL"
// $ANTLR start "EQUIV"
public final void mEQUIV() throws RecognitionException {
try {
int _type = EQUIV;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1381:6: ( '==' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1381:8: '=='
{
match("==");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "EQUIV"
// $ANTLR start "NOT_EQUIV"
public final void mNOT_EQUIV() throws RecognitionException {
try {
int _type = NOT_EQUIV;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1382:10: ( '!=' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1382:12: '!='
{
match("!=");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "NOT_EQUIV"
// $ANTLR start "NEGATION"
public final void mNEGATION() throws RecognitionException {
try {
int _type = NEGATION;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1385:9: ( '!' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1385:11: '!'
{
match('!');
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "NEGATION"
// $ANTLR start "AND"
public final void mAND() throws RecognitionException {
try {
int _type = AND;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1386:4: ( '&&' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1386:6: '&&'
{
match("&&");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "AND"
// $ANTLR start "OR"
public final void mOR() throws RecognitionException {
try {
int _type = OR;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1387:3: ( '||' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1387:5: '||'
{
match("||");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "OR"
// $ANTLR start "IMPLICATION"
public final void mIMPLICATION() throws RecognitionException {
try {
int _type = IMPLICATION;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1388:12: ( '->' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1388:14: '->'
{
match("->");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "IMPLICATION"
// $ANTLR start "BITWISE_NEGATION"
public final void mBITWISE_NEGATION() throws RecognitionException {
try {
int _type = BITWISE_NEGATION;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1391:17: ( '~' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1391:19: '~'
{
match('~');
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "BITWISE_NEGATION"
// $ANTLR start "BITWISE_AND"
public final void mBITWISE_AND() throws RecognitionException {
try {
int _type = BITWISE_AND;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1392:12: ( '&' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1392:14: '&'
{
match('&');
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "BITWISE_AND"
// $ANTLR start "BITWISE_OR"
public final void mBITWISE_OR() throws RecognitionException {
try {
int _type = BITWISE_OR;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1393:11: ( '|' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1393:13: '|'
{
match('|');
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "BITWISE_OR"
// $ANTLR start "BITWISE_XOR"
public final void mBITWISE_XOR() throws RecognitionException {
try {
int _type = BITWISE_XOR;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1394:12: ( '^' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1394:14: '^'
{
match('^');
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "BITWISE_XOR"
// $ANTLR start "BITWISE_LSHIFT"
public final void mBITWISE_LSHIFT() throws RecognitionException {
try {
int _type = BITWISE_LSHIFT;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1395:15: ( '<<' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1395:17: '<<'
{
match("<<");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "BITWISE_LSHIFT"
// $ANTLR start "BITWISE_RSHIFT"
public final void mBITWISE_RSHIFT() throws RecognitionException {
try {
int _type = BITWISE_RSHIFT;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1396:15: ( '>>' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1396:17: '>>'
{
match(">>");
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "BITWISE_RSHIFT"
// $ANTLR start "LETTER"
public final void mLETTER() throws RecognitionException {
try {
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1398:16: ( ( 'a' .. 'z' | 'A' .. 'Z' ) )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:
{
if ( (input.LA(1) >= 'A' && input.LA(1) <= 'Z')||(input.LA(1) >= 'a' && input.LA(1) <= 'z') ) {
input.consume();
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
recover(mse);
throw mse;
}
}
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "LETTER"
// $ANTLR start "DIGIT"
public final void mDIGIT() throws RecognitionException {
try {
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1399:15: ( '0' .. '9' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:
{
if ( (input.LA(1) >= '0' && input.LA(1) <= '9') ) {
input.consume();
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
recover(mse);
throw mse;
}
}
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "DIGIT"
// $ANTLR start "INT"
public final void mINT() throws RecognitionException {
try {
int _type = INT;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1400:4: ( ( '-' )? ( DIGIT )+ )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1400:6: ( '-' )? ( DIGIT )+
{
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1400:6: ( '-' )?
int alt1=2;
int LA1_0 = input.LA(1);
if ( (LA1_0=='-') ) {
alt1=1;
}
switch (alt1) {
case 1 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1400:6: '-'
{
match('-');
}
break;
}
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1400:11: ( DIGIT )+
int cnt2=0;
loop2:
do {
int alt2=2;
int LA2_0 = input.LA(1);
if ( ((LA2_0 >= '0' && LA2_0 <= '9')) ) {
alt2=1;
}
switch (alt2) {
case 1 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:
{
if ( (input.LA(1) >= '0' && input.LA(1) <= '9') ) {
input.consume();
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
recover(mse);
throw mse;
}
}
break;
default :
if ( cnt2 >= 1 ) break loop2;
EarlyExitException eee =
new EarlyExitException(2, input);
throw eee;
}
cnt2++;
} while (true);
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "INT"
// $ANTLR start "ID"
public final void mID() throws RecognitionException {
try {
int _type = ID;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1401:3: ( LETTER ( ( UNDERSCORE | PERIOD )? ( LETTER | DIGIT ) )* )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1401:5: LETTER ( ( UNDERSCORE | PERIOD )? ( LETTER | DIGIT ) )*
{
mLETTER();
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1401:12: ( ( UNDERSCORE | PERIOD )? ( LETTER | DIGIT ) )*
loop4:
do {
int alt4=2;
int LA4_0 = input.LA(1);
if ( (LA4_0=='.'||(LA4_0 >= '0' && LA4_0 <= '9')||(LA4_0 >= 'A' && LA4_0 <= 'Z')||LA4_0=='_'||(LA4_0 >= 'a' && LA4_0 <= 'z')) ) {
alt4=1;
}
switch (alt4) {
case 1 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1401:13: ( UNDERSCORE | PERIOD )? ( LETTER | DIGIT )
{
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1401:13: ( UNDERSCORE | PERIOD )?
int alt3=2;
int LA3_0 = input.LA(1);
if ( (LA3_0=='.'||LA3_0=='_') ) {
alt3=1;
}
switch (alt3) {
case 1 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:
{
if ( input.LA(1)=='.'||input.LA(1)=='_' ) {
input.consume();
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
recover(mse);
throw mse;
}
}
break;
}
if ( (input.LA(1) >= '0' && input.LA(1) <= '9')||(input.LA(1) >= 'A' && input.LA(1) <= 'Z')||(input.LA(1) >= 'a' && input.LA(1) <= 'z') ) {
input.consume();
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
recover(mse);
throw mse;
}
}
break;
default :
break loop4;
}
} while (true);
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "ID"
// $ANTLR start "WS"
public final void mWS() throws RecognitionException {
try {
int _type = WS;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1402:3: ( ( ' ' | '\\t' | '\\n' | '\\r' | '\\f' )+ )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1402:5: ( ' ' | '\\t' | '\\n' | '\\r' | '\\f' )+
{
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1402:5: ( ' ' | '\\t' | '\\n' | '\\r' | '\\f' )+
int cnt5=0;
loop5:
do {
int alt5=2;
int LA5_0 = input.LA(1);
if ( ((LA5_0 >= '\t' && LA5_0 <= '\n')||(LA5_0 >= '\f' && LA5_0 <= '\r')||LA5_0==' ') ) {
alt5=1;
}
switch (alt5) {
case 1 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:
{
if ( (input.LA(1) >= '\t' && input.LA(1) <= '\n')||(input.LA(1) >= '\f' && input.LA(1) <= '\r')||input.LA(1)==' ' ) {
input.consume();
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
recover(mse);
throw mse;
}
}
break;
default :
if ( cnt5 >= 1 ) break loop5;
EarlyExitException eee =
new EarlyExitException(5, input);
throw eee;
}
cnt5++;
} while (true);
_channel = HIDDEN;
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "WS"
// $ANTLR start "COMMENT"
public final void mCOMMENT() throws RecognitionException {
try {
int _type = COMMENT;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1403:8: ( '//' ( . )* ( '\\n' | '\\r' ) )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1403:10: '//' ( . )* ( '\\n' | '\\r' )
{
match("//");
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1403:15: ( . )*
loop6:
do {
int alt6=2;
int LA6_0 = input.LA(1);
if ( (LA6_0=='\n'||LA6_0=='\r') ) {
alt6=2;
}
else if ( ((LA6_0 >= '\u0000' && LA6_0 <= '\t')||(LA6_0 >= '\u000B' && LA6_0 <= '\f')||(LA6_0 >= '\u000E' && LA6_0 <= '\uFFFF')) ) {
alt6=1;
}
switch (alt6) {
case 1 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1403:15: .
{
matchAny();
}
break;
default :
break loop6;
}
} while (true);
if ( input.LA(1)=='\n'||input.LA(1)=='\r' ) {
input.consume();
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
recover(mse);
throw mse;
}
_channel = HIDDEN;
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "COMMENT"
// $ANTLR start "MULTILINECOMMENT"
public final void mMULTILINECOMMENT() throws RecognitionException {
try {
int _type = MULTILINECOMMENT;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1404:17: ( '/*' ( . )* '*/' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1404:19: '/*' ( . )* '*/'
{
match("/*");
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1404:24: ( . )*
loop7:
do {
int alt7=2;
int LA7_0 = input.LA(1);
if ( (LA7_0=='*') ) {
int LA7_1 = input.LA(2);
if ( (LA7_1=='/') ) {
alt7=2;
}
else if ( ((LA7_1 >= '\u0000' && LA7_1 <= '.')||(LA7_1 >= '0' && LA7_1 <= '\uFFFF')) ) {
alt7=1;
}
}
else if ( ((LA7_0 >= '\u0000' && LA7_0 <= ')')||(LA7_0 >= '+' && LA7_0 <= '\uFFFF')) ) {
alt7=1;
}
switch (alt7) {
case 1 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1404:24: .
{
matchAny();
}
break;
default :
break loop7;
}
} while (true);
match("*/");
_channel = HIDDEN;
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "MULTILINECOMMENT"
// $ANTLR start "XMLCOMMENT"
public final void mXMLCOMMENT() throws RecognitionException {
try {
int _type = XMLCOMMENT;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1405:11: ( ( '<' '!' '-' '-' ) ( . )* ( '-' '-' '>' ) )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1405:13: ( '<' '!' '-' '-' ) ( . )* ( '-' '-' '>' )
{
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1405:13: ( '<' '!' '-' '-' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1405:14: '<' '!' '-' '-'
{
match('<');
match('!');
match('-');
match('-');
}
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1405:31: ( . )*
loop8:
do {
int alt8=2;
int LA8_0 = input.LA(1);
if ( (LA8_0=='-') ) {
int LA8_1 = input.LA(2);
if ( (LA8_1=='-') ) {
int LA8_3 = input.LA(3);
if ( (LA8_3=='>') ) {
alt8=2;
}
else if ( ((LA8_3 >= '\u0000' && LA8_3 <= '=')||(LA8_3 >= '?' && LA8_3 <= '\uFFFF')) ) {
alt8=1;
}
}
else if ( ((LA8_1 >= '\u0000' && LA8_1 <= ',')||(LA8_1 >= '.' && LA8_1 <= '\uFFFF')) ) {
alt8=1;
}
}
else if ( ((LA8_0 >= '\u0000' && LA8_0 <= ',')||(LA8_0 >= '.' && LA8_0 <= '\uFFFF')) ) {
alt8=1;
}
switch (alt8) {
case 1 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1405:31: .
{
matchAny();
}
break;
default :
break loop8;
}
} while (true);
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1405:34: ( '-' '-' '>' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1405:35: '-' '-' '>'
{
match('-');
match('-');
match('>');
}
_channel = HIDDEN;
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "XMLCOMMENT"
// $ANTLR start "IGNORE"
public final void mIGNORE() throws RecognitionException {
try {
int _type = IGNORE;
int _channel = DEFAULT_TOKEN_CHANNEL;
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1406:7: ( '<' '?' ( . )* '?' '>' )
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1406:9: '<' '?' ( . )* '?' '>'
{
match('<');
match('?');
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1406:17: ( . )*
loop9:
do {
int alt9=2;
int LA9_0 = input.LA(1);
if ( (LA9_0=='?') ) {
int LA9_1 = input.LA(2);
if ( (LA9_1=='>') ) {
alt9=2;
}
else if ( ((LA9_1 >= '\u0000' && LA9_1 <= '=')||(LA9_1 >= '?' && LA9_1 <= '\uFFFF')) ) {
alt9=1;
}
}
else if ( ((LA9_0 >= '\u0000' && LA9_0 <= '>')||(LA9_0 >= '@' && LA9_0 <= '\uFFFF')) ) {
alt9=1;
}
switch (alt9) {
case 1 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1406:17: .
{
matchAny();
}
break;
default :
break loop9;
}
} while (true);
match('?');
match('>');
_channel = HIDDEN;
}
state.type = _type;
state.channel = _channel;
}
finally {
// do for sure before leaving
}
}
// $ANTLR end "IGNORE"
public void mTokens() throws RecognitionException {
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:8: ( T__57 | T__58 | T__59 | T__60 | T__61 | T__62 | T__63 | LPAREN | RPAREN | QMARK | COLON | SEMICOLON | PERIOD | UNDERSCORE | COMMA | QUOTE | MODULE | NAME | INPUT | OUTPUT | INTERNAL | MARKING | STATE_VECTOR | TRANSITION | LABEL | PRESET | POSTSET | TRUE | FALSE | PLUS | MINUS | TIMES | DIV | MOD | EQUALS | GREATER | LESS | GREATER_EQUAL | LESS_EQUAL | EQUIV | NOT_EQUIV | NEGATION | AND | OR | IMPLICATION | BITWISE_NEGATION | BITWISE_AND | BITWISE_OR | BITWISE_XOR | BITWISE_LSHIFT | BITWISE_RSHIFT | INT | ID | WS | COMMENT | MULTILINECOMMENT | XMLCOMMENT | IGNORE )
int alt10=58;
alt10 = dfa10.predict(input);
switch (alt10) {
case 1 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:10: T__57
{
mT__57();
}
break;
case 2 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:16: T__58
{
mT__58();
}
break;
case 3 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:22: T__59
{
mT__59();
}
break;
case 4 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:28: T__60
{
mT__60();
}
break;
case 5 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:34: T__61
{
mT__61();
}
break;
case 6 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:40: T__62
{
mT__62();
}
break;
case 7 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:46: T__63
{
mT__63();
}
break;
case 8 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:52: LPAREN
{
mLPAREN();
}
break;
case 9 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:59: RPAREN
{
mRPAREN();
}
break;
case 10 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:66: QMARK
{
mQMARK();
}
break;
case 11 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:72: COLON
{
mCOLON();
}
break;
case 12 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:78: SEMICOLON
{
mSEMICOLON();
}
break;
case 13 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:88: PERIOD
{
mPERIOD();
}
break;
case 14 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:95: UNDERSCORE
{
mUNDERSCORE();
}
break;
case 15 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:106: COMMA
{
mCOMMA();
}
break;
case 16 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:112: QUOTE
{
mQUOTE();
}
break;
case 17 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:118: MODULE
{
mMODULE();
}
break;
case 18 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:125: NAME
{
mNAME();
}
break;
case 19 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:130: INPUT
{
mINPUT();
}
break;
case 20 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:136: OUTPUT
{
mOUTPUT();
}
break;
case 21 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:143: INTERNAL
{
mINTERNAL();
}
break;
case 22 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:152: MARKING
{
mMARKING();
}
break;
case 23 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:160: STATE_VECTOR
{
mSTATE_VECTOR();
}
break;
case 24 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:173: TRANSITION
{
mTRANSITION();
}
break;
case 25 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:184: LABEL
{
mLABEL();
}
break;
case 26 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:190: PRESET
{
mPRESET();
}
break;
case 27 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:197: POSTSET
{
mPOSTSET();
}
break;
case 28 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:205: TRUE
{
mTRUE();
}
break;
case 29 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:210: FALSE
{
mFALSE();
}
break;
case 30 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:216: PLUS
{
mPLUS();
}
break;
case 31 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:221: MINUS
{
mMINUS();
}
break;
case 32 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:227: TIMES
{
mTIMES();
}
break;
case 33 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:233: DIV
{
mDIV();
}
break;
case 34 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:237: MOD
{
mMOD();
}
break;
case 35 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:241: EQUALS
{
mEQUALS();
}
break;
case 36 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:248: GREATER
{
mGREATER();
}
break;
case 37 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:256: LESS
{
mLESS();
}
break;
case 38 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:261: GREATER_EQUAL
{
mGREATER_EQUAL();
}
break;
case 39 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:275: LESS_EQUAL
{
mLESS_EQUAL();
}
break;
case 40 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:286: EQUIV
{
mEQUIV();
}
break;
case 41 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:292: NOT_EQUIV
{
mNOT_EQUIV();
}
break;
case 42 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:302: NEGATION
{
mNEGATION();
}
break;
case 43 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:311: AND
{
mAND();
}
break;
case 44 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:315: OR
{
mOR();
}
break;
case 45 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:318: IMPLICATION
{
mIMPLICATION();
}
break;
case 46 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:330: BITWISE_NEGATION
{
mBITWISE_NEGATION();
}
break;
case 47 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:347: BITWISE_AND
{
mBITWISE_AND();
}
break;
case 48 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:359: BITWISE_OR
{
mBITWISE_OR();
}
break;
case 49 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:370: BITWISE_XOR
{
mBITWISE_XOR();
}
break;
case 50 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:382: BITWISE_LSHIFT
{
mBITWISE_LSHIFT();
}
break;
case 51 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:397: BITWISE_RSHIFT
{
mBITWISE_RSHIFT();
}
break;
case 52 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:412: INT
{
mINT();
}
break;
case 53 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:416: ID
{
mID();
}
break;
case 54 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:419: WS
{
mWS();
}
break;
case 55 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:422: COMMENT
{
mCOMMENT();
}
break;
case 56 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:430: MULTILINECOMMENT
{
mMULTILINECOMMENT();
}
break;
case 57 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:447: XMLCOMMENT
{
mXMLCOMMENT();
}
break;
case 58 :
// /Users/zhangz/myBioSim/BioSim/gui/src/verification/platu/lpn/io/PlatuGrammar.g:1:458: IGNORE
{
mIGNORE();
}
break;
}
}
protected DFA10 dfa10 = new DFA10(this);
static final String DFA10_eotS =
"\3\uffff\4\46\11\uffff\10\46\1\uffff\1\67\1\uffff\1\72\1\uffff\1"+
"\74\1\77\1\104\1\106\1\110\1\112\5\uffff\16\46\25\uffff\2\46\1\137"+
"\4\46\1\144\2\46\1\147\11\46\1\uffff\1\161\1\46\1\163\1\46\1\uffff"+
"\1\165\1\46\1\uffff\2\46\1\171\5\46\1\177\1\uffff\1\u0080\1\uffff"+
"\1\46\1\uffff\3\46\1\uffff\1\u0085\2\46\1\u0088\1\u0089\2\uffff"+
"\1\46\1\u008b\2\46\1\uffff\1\u008e\1\46\2\uffff\1\u0090\1\uffff"+
"\2\46\1\uffff\1\u0093\1\uffff\2\46\1\uffff\3\46\1\u0099\1\u009a"+
"\2\uffff";
static final String DFA10_eofS =
"\u009b\uffff";
static final String DFA10_minS =
"\1\11\2\uffff\1\163\1\157\1\156\1\141\11\uffff\1\141\1\165\1\141"+
"\1\164\1\162\1\141\1\157\1\141\1\uffff\1\60\1\uffff\1\52\1\uffff"+
"\2\75\1\41\1\75\1\46\1\174\5\uffff\1\163\1\156\1\146\1\151\1\144"+
"\1\155\1\164\1\162\2\141\1\142\1\145\1\163\1\154\25\uffff\1\145"+
"\1\163\1\56\1\164\1\165\1\156\1\153\1\56\1\145\1\160\1\56\1\164"+
"\1\156\2\145\1\163\1\164\1\163\1\162\1\164\1\uffff\1\56\1\164\1"+
"\56\1\151\1\uffff\1\56\1\165\1\uffff\1\145\1\163\1\56\1\154\1\145"+
"\1\163\1\145\1\164\1\56\1\uffff\1\56\1\uffff\1\156\1\uffff\1\164"+
"\1\166\1\151\1\uffff\1\56\1\164\1\145\2\56\2\uffff\1\147\1\56\1"+
"\145\1\164\1\uffff\1\56\1\164\2\uffff\1\56\1\uffff\1\143\1\151\1"+
"\uffff\1\56\1\uffff\1\164\1\157\1\uffff\1\157\1\156\1\162\2\56\2"+
"\uffff";
static final String DFA10_maxS =
"\1\176\2\uffff\1\163\1\157\1\156\1\157\11\uffff\1\141\1\165\1\141"+
"\1\164\1\162\1\141\1\162\1\141\1\uffff\1\76\1\uffff\1\57\1\uffff"+
"\1\75\1\76\1\77\1\75\1\46\1\174\5\uffff\1\163\1\156\1\163\1\162"+
"\1\144\1\155\1\164\1\162\1\141\1\165\1\142\1\145\1\163\1\154\25"+
"\uffff\1\145\1\163\1\172\1\164\1\165\1\156\1\153\1\172\1\145\1\160"+
"\1\172\1\164\1\156\2\145\1\163\1\164\1\163\1\162\1\164\1\uffff\1"+
"\172\1\164\1\172\1\151\1\uffff\1\172\1\165\1\uffff\1\145\1\163\1"+
"\172\1\154\1\145\1\163\1\145\1\164\1\172\1\uffff\1\172\1\uffff\1"+
"\156\1\uffff\1\164\1\166\1\151\1\uffff\1\172\1\164\1\145\2\172\2"+
"\uffff\1\147\1\172\1\145\1\164\1\uffff\1\172\1\164\2\uffff\1\172"+
"\1\uffff\1\143\1\151\1\uffff\1\172\1\uffff\1\164\1\157\1\uffff\1"+
"\157\1\156\1\162\2\172\2\uffff";
static final String DFA10_acceptS =
"\1\uffff\1\1\1\2\4\uffff\1\10\1\11\1\12\1\13\1\14\1\15\1\16\1\17"+
"\1\20\10\uffff\1\36\1\uffff\1\40\1\uffff\1\42\6\uffff\1\56\1\61"+
"\1\64\1\65\1\66\16\uffff\1\55\1\37\1\67\1\70\1\41\1\50\1\43\1\46"+
"\1\63\1\44\1\47\1\62\1\71\1\72\1\45\1\51\1\52\1\53\1\57\1\54\1\60"+
"\24\uffff\1\5\4\uffff\1\21\2\uffff\1\25\11\uffff\1\6\1\uffff\1\7"+
"\1\uffff\1\22\3\uffff\1\34\5\uffff\1\4\1\23\4\uffff\1\31\2\uffff"+
"\1\35\1\3\1\uffff\1\24\2\uffff\1\32\1\uffff\1\26\2\uffff\1\33\5"+
"\uffff\1\30\1\27";
static final String DFA10_specialS =
"\u009b\uffff}>";
static final String[] DFA10_transitionS = {
"\2\47\1\uffff\2\47\22\uffff\1\47\1\40\1\17\2\uffff\1\34\1\41"+
"\1\uffff\1\7\1\10\1\32\1\30\1\16\1\31\1\14\1\33\12\45\1\12\1"+
"\13\1\37\1\35\1\36\1\11\1\uffff\32\46\1\1\1\uffff\1\2\1\44\1"+
"\15\1\uffff\1\3\1\46\1\4\2\46\1\27\2\46\1\5\2\46\1\25\1\6\1"+
"\20\1\21\1\26\2\46\1\23\1\24\1\46\1\22\4\46\1\uffff\1\42\1\uffff"+
"\1\43",
"",
"",
"\1\50",
"\1\51",
"\1\52",
"\1\53\15\uffff\1\54",
"",
"",
"",
"",
"",
"",
"",
"",
"",
"\1\55",
"\1\56",
"\1\57",
"\1\60",
"\1\61",
"\1\62",
"\1\64\2\uffff\1\63",
"\1\65",
"",
"\12\45\4\uffff\1\66",
"",
"\1\71\4\uffff\1\70",
"",
"\1\73",
"\1\75\1\76",
"\1\102\32\uffff\1\101\1\100\1\uffff\1\103",
"\1\105",
"\1\107",
"\1\111",
"",
"",
"",
"",
"",
"\1\113",
"\1\114",
"\1\115\11\uffff\1\117\2\uffff\1\116",
"\1\120\10\uffff\1\121",
"\1\122",
"\1\123",
"\1\124",
"\1\125",
"\1\126",
"\1\127\23\uffff\1\130",
"\1\131",
"\1\132",
"\1\133",
"\1\134",
"",
"",
"",
"",
"",
"",
"",
"",
"",
"",
"",
"",
"",
"",
"",
"",
"",
"",
"",
"",
"",
"\1\135",
"\1\136",
"\1\46\1\uffff\12\46\7\uffff\32\46\4\uffff\1\46\1\uffff\32\46",
"\1\140",
"\1\141",
"\1\142",
"\1\143",
"\1\46\1\uffff\12\46\7\uffff\32\46\4\uffff\1\46\1\uffff\32\46",
"\1\145",
"\1\146",
"\1\46\1\uffff\12\46\7\uffff\32\46\4\uffff\1\46\1\uffff\32\46",
"\1\150",
"\1\151",
"\1\152",
"\1\153",
"\1\154",
"\1\155",
"\1\156",
"\1\157",
"\1\160",
"",
"\1\46\1\uffff\12\46\7\uffff\32\46\4\uffff\1\46\1\uffff\32\46",
"\1\162",
"\1\46\1\uffff\12\46\7\uffff\32\46\4\uffff\1\46\1\uffff\32\46",
"\1\164",
"",
"\1\46\1\uffff\12\46\7\uffff\32\46\4\uffff\1\46\1\uffff\32\46",
"\1\166",
"",
"\1\167",
"\1\170",
"\1\46\1\uffff\12\46\7\uffff\32\46\4\uffff\1\46\1\uffff\32\46",
"\1\172",
"\1\173",
"\1\174",
"\1\175",
"\1\176",
"\1\46\1\uffff\12\46\7\uffff\32\46\4\uffff\1\46\1\uffff\32\46",
"",
"\1\46\1\uffff\12\46\7\uffff\32\46\4\uffff\1\46\1\uffff\32\46",
"",
"\1\u0081",
"",
"\1\u0082",
"\1\u0083",
"\1\u0084",
"",
"\1\46\1\uffff\12\46\7\uffff\32\46\4\uffff\1\46\1\uffff\32\46",
"\1\u0086",
"\1\u0087",
"\1\46\1\uffff\12\46\7\uffff\32\46\4\uffff\1\46\1\uffff\32\46",
"\1\46\1\uffff\12\46\7\uffff\32\46\4\uffff\1\46\1\uffff\32\46",
"",
"",
"\1\u008a",
"\1\46\1\uffff\12\46\7\uffff\32\46\4\uffff\1\46\1\uffff\32\46",
"\1\u008c",
"\1\u008d",
"",
"\1\46\1\uffff\12\46\7\uffff\32\46\4\uffff\1\46\1\uffff\32\46",
"\1\u008f",
"",
"",
"\1\46\1\uffff\12\46\7\uffff\32\46\4\uffff\1\46\1\uffff\32\46",
"",
"\1\u0091",
"\1\u0092",
"",
"\1\46\1\uffff\12\46\7\uffff\32\46\4\uffff\1\46\1\uffff\32\46",
"",
"\1\u0094",
"\1\u0095",
"",
"\1\u0096",
"\1\u0097",
"\1\u0098",
"\1\46\1\uffff\12\46\7\uffff\32\46\4\uffff\1\46\1\uffff\32\46",
"\1\46\1\uffff\12\46\7\uffff\32\46\4\uffff\1\46\1\uffff\32\46",
"",
""
};
static final short[] DFA10_eot = DFA.unpackEncodedString(DFA10_eotS);
static final short[] DFA10_eof = DFA.unpackEncodedString(DFA10_eofS);
static final char[] DFA10_min = DFA.unpackEncodedStringToUnsignedChars(DFA10_minS);
static final char[] DFA10_max = DFA.unpackEncodedStringToUnsignedChars(DFA10_maxS);
static final short[] DFA10_accept = DFA.unpackEncodedString(DFA10_acceptS);
static final short[] DFA10_special = DFA.unpackEncodedString(DFA10_specialS);
static final short[][] DFA10_transition;
static {
int numStates = DFA10_transitionS.length;
DFA10_transition = new short[numStates][];
for (int i=0; i<numStates; i++) {
DFA10_transition[i] = DFA.unpackEncodedString(DFA10_transitionS[i]);
}
}
class DFA10 extends DFA {
public DFA10(BaseRecognizer recognizer) {
this.recognizer = recognizer;
this.decisionNumber = 10;
this.eot = DFA10_eot;
this.eof = DFA10_eof;
this.min = DFA10_min;
this.max = DFA10_max;
this.accept = DFA10_accept;
this.special = DFA10_special;
this.transition = DFA10_transition;
}
public String getDescription() {
return "1:1: Tokens : ( T__57 | T__58 | T__59 | T__60 | T__61 | T__62 | T__63 | LPAREN | RPAREN | QMARK | COLON | SEMICOLON | PERIOD | UNDERSCORE | COMMA | QUOTE | MODULE | NAME | INPUT | OUTPUT | INTERNAL | MARKING | STATE_VECTOR | TRANSITION | LABEL | PRESET | POSTSET | TRUE | FALSE | PLUS | MINUS | TIMES | DIV | MOD | EQUALS | GREATER | LESS | GREATER_EQUAL | LESS_EQUAL | EQUIV | NOT_EQUIV | NEGATION | AND | OR | IMPLICATION | BITWISE_NEGATION | BITWISE_AND | BITWISE_OR | BITWISE_XOR | BITWISE_LSHIFT | BITWISE_RSHIFT | INT | ID | WS | COMMENT | MULTILINECOMMENT | XMLCOMMENT | IGNORE );";
}
}
}
|
apache-2.0
|
ericbottard/spring-cloud-stream
|
spring-cloud-stream-samples/transform/src/main/java/demo/TransformApplication.java
|
1079
|
/*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package demo;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.ComponentScan;
import transform.LoggingTransformer;
@SpringBootApplication
@ComponentScan(basePackageClasses=LoggingTransformer.class)
public class TransformApplication {
public static void main(String[] args) {
SpringApplication.run(TransformApplication.class, args);
}
}
|
apache-2.0
|
4finance/spring-cloud-zookeeper
|
spring-cloud-zookeeper-discovery/src/main/java/org/springframework/cloud/zookeeper/discovery/dependency/DependenciesNotPassedCondition.java
|
1832
|
/*
* Copyright 2013-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.zookeeper.discovery.dependency;
import org.springframework.boot.autoconfigure.condition.ConditionOutcome;
import org.springframework.context.annotation.ConditionContext;
import org.springframework.core.type.AnnotatedTypeMetadata;
/**
* Inverse of the {@link ConditionalOnDependenciesPassed} condition. Also checks if switch for zookeeper dependencies
* was turned on
*
* @author Marcin Grzejszczak
* @since 1.0.0
*/
public class DependenciesNotPassedCondition extends DependenciesPassedCondition {
@Override
public ConditionOutcome getMatchOutcome(ConditionContext context, AnnotatedTypeMetadata metadata) {
ConditionOutcome propertiesSet = super.getMatchOutcome(context, metadata);
if (propertiesSet.isMatch()) {
return ConditionOutcome.inverse(propertiesSet);
}
Boolean dependenciesEnabled = context.getEnvironment()
.getProperty("spring.cloud.zookeeper.dependency.enabled", Boolean.class, false);
if (dependenciesEnabled) {
return ConditionOutcome.noMatch("Dependencies are defined in configuration and switch is turned on");
}
return ConditionOutcome.match("Dependencies are not defined in configuration and switch is turned off");
}
}
|
apache-2.0
|
resin-io-library/base-images
|
balena-base-images/python/beagleboard-xm/debian/sid/3.10.0/build/Dockerfile
|
4852
|
# AUTOGENERATED FILE
FROM balenalib/beagleboard-xm-debian:sid-build
# remove several traces of debian python
RUN apt-get purge -y python.*
# http://bugs.python.org/issue19846
# > At the moment, setting "LANG=C" on a Linux system *fundamentally breaks Python 3*, and that's not OK.
ENV LANG C.UTF-8
# key 63C7CC90: public key "Simon McVittie <[email protected]>" imported
# key 3372DCFA: public key "Donald Stufft (dstufft) <[email protected]>" imported
RUN gpg --batch --keyserver keyring.debian.org --recv-keys 4DE8FF2A63C7CC90 \
&& gpg --batch --keyserver keyserver.ubuntu.com --recv-key 6E3CBCE93372DCFA \
&& gpg --batch --keyserver keyserver.ubuntu.com --recv-keys 0x52a43a1e4b77b059
ENV PYTHON_VERSION 3.10.0
# if this is called "PIP_VERSION", pip explodes with "ValueError: invalid truth value '<VERSION>'"
ENV PYTHON_PIP_VERSION 21.2.4
ENV SETUPTOOLS_VERSION 58.0.0
RUN set -x \
&& curl -SLO "http://resin-packages.s3.amazonaws.com/python/v$PYTHON_VERSION/Python-$PYTHON_VERSION.linux-armv7hf-libffi3.3.tar.gz" \
&& echo "84875fbd8f39240a2d2299c6957ac3bf9eec6c0a34ebdc45e3ad73a5e97f8e6b Python-$PYTHON_VERSION.linux-armv7hf-libffi3.3.tar.gz" | sha256sum -c - \
&& tar -xzf "Python-$PYTHON_VERSION.linux-armv7hf-libffi3.3.tar.gz" --strip-components=1 \
&& rm -rf "Python-$PYTHON_VERSION.linux-armv7hf-libffi3.3.tar.gz" \
&& ldconfig \
&& if [ ! -e /usr/local/bin/pip3 ]; then : \
&& curl -SLO "https://raw.githubusercontent.com/pypa/get-pip/430ba37776ae2ad89f794c7a43b90dc23bac334c/get-pip.py" \
&& echo "19dae841a150c86e2a09d475b5eb0602861f2a5b7761ec268049a662dbd2bd0c get-pip.py" | sha256sum -c - \
&& python3 get-pip.py \
&& rm get-pip.py \
; fi \
&& pip3 install --no-cache-dir --upgrade --force-reinstall pip=="$PYTHON_PIP_VERSION" setuptools=="$SETUPTOOLS_VERSION" \
&& find /usr/local \
\( -type d -a -name test -o -name tests \) \
-o \( -type f -a -name '*.pyc' -o -name '*.pyo' \) \
-exec rm -rf '{}' + \
&& cd / \
&& rm -rf /usr/src/python ~/.cache
# install "virtualenv", since the vast majority of users of this image will want it
RUN pip3 install --no-cache-dir virtualenv
ENV PYTHON_DBUS_VERSION 1.2.18
# install dbus-python dependencies
RUN apt-get update && apt-get install -y --no-install-recommends \
libdbus-1-dev \
libdbus-glib-1-dev \
&& rm -rf /var/lib/apt/lists/* \
&& apt-get -y autoremove
# install dbus-python
RUN set -x \
&& mkdir -p /usr/src/dbus-python \
&& curl -SL "http://dbus.freedesktop.org/releases/dbus-python/dbus-python-$PYTHON_DBUS_VERSION.tar.gz" -o dbus-python.tar.gz \
&& curl -SL "http://dbus.freedesktop.org/releases/dbus-python/dbus-python-$PYTHON_DBUS_VERSION.tar.gz.asc" -o dbus-python.tar.gz.asc \
&& gpg --verify dbus-python.tar.gz.asc \
&& tar -xzC /usr/src/dbus-python --strip-components=1 -f dbus-python.tar.gz \
&& rm dbus-python.tar.gz* \
&& cd /usr/src/dbus-python \
&& PYTHON_VERSION=$(expr match "$PYTHON_VERSION" '\([0-9]*\.[0-9]*\)') ./configure \
&& make -j$(nproc) \
&& make install -j$(nproc) \
&& cd / \
&& rm -rf /usr/src/dbus-python
# make some useful symlinks that are expected to exist
RUN cd /usr/local/bin \
&& ln -sf pip3 pip \
&& { [ -e easy_install ] || ln -s easy_install-* easy_install; } \
&& ln -sf idle3 idle \
&& ln -sf pydoc3 pydoc \
&& ln -sf python3 python \
&& ln -sf python3-config python-config
# set PYTHONPATH to point to dist-packages
ENV PYTHONPATH /usr/lib/python3/dist-packages:$PYTHONPATH
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/[email protected]" \
&& echo "Running test-stack@python" \
&& chmod +x [email protected] \
&& bash [email protected] \
&& rm -rf [email protected]
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Debian Sid \nVariant: build variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nPython v3.10.0, Pip v21.2.4, Setuptools v58.0.0 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh
|
apache-2.0
|
resin-io-library/base-images
|
balena-base-images/python/am571x-evm/fedora/36/3.7.12/build/Dockerfile
|
2438
|
# AUTOGENERATED FILE
FROM balenalib/am571x-evm-fedora:36-build
# http://bugs.python.org/issue19846
# > At the moment, setting "LANG=C" on a Linux system *fundamentally breaks Python 3*, and that's not OK.
ENV LANG C.UTF-8
RUN dnf install -y \
python3-pip \
python3-dbus \
&& dnf clean all
# install "virtualenv", since the vast majority of users of this image will want it
RUN pip3 install -U --no-cache-dir --ignore-installed pip setuptools \
&& pip3 install --no-cache-dir virtualenv
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo $'As of January 1st, 2020, Python 2 was end-of-life, we will change the latest tag for Balenalib Python base image to Python 3.x and drop support for Python 2 soon. So after 1st July, 2020, all the balenalib Python latest tag will point to the latest Python 3 version and no changes, or fixes will be made to balenalib Python 2 base image. If you are using Python 2 for your application, please upgrade to Python 3 before 1st July.' > /.balena/messages/python-deprecation-warning
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/[email protected]" \
&& echo "Running test-stack@python" \
&& chmod +x [email protected] \
&& bash [email protected] \
&& rm -rf [email protected]
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo $'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Fedora 36 \nVariant: build variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nPython v3.7.12, Pip v21.3.1, Setuptools v60.5.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo $'#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh
|
apache-2.0
|
PanDAWMS/panda-bigmon-core
|
core/grafana/views.py
|
19477
|
import json
import random
from datetime import datetime, timedelta
import hashlib
from django.http import HttpResponse, JsonResponse
from django.shortcuts import render_to_response
from django.template import loader
from django.utils import encoding
from core.grafana.GrafanaES import Grafana
from core.grafana.QueryES import Query
from core.grafana.data_tranformation import stacked_hist, pledges_merging
from core.libs.cache import setCacheEntry, getCacheEntry
from core.oauth.utils import login_customrequired
from core.views import initRequest, DateTimeEncoder, DateEncoder
colours_codes = {
"0": "#AE3C51",
"1": "#6298FF",
"2": "#D97529",
"3": "#009246",
"AOD": "#006019",
"Analysis": "#FF00FF",
"CA": "#FF1F1F",
"CAF processing": "#CAD141",
"CERN": "#AE3C51",
"Custodial": "#FF0000",
"DE": "#000000",
"DESD": "#4189FF",
"DPD": "#FEF100",
"Data Processing": "#FFFF00",
"Data Processing (XP)": "#008800",
"Default": "#808080",
"ES": "#EDBF00",
"ESD": "#001640",
"Extra Production": "#FF0000",
"FR": "#0055A5",
"Group Analysis": "#808080",
"Group Production": "#008800",
"HITS": "#FF6666",
"IT": "#009246",
"MC Event Generation": "#356C20",
"MC Production": "#0000FF",
"MC Reconstruction": "#00006B",
"MC Reconstruction (XP)": "#D97529",
"MC Simulation": "#0000FF",
"MC Simulation (XP)": "#AE3C51",
"MC Simulation Fast": "#0099CC",
"MC Simulation Fast (XP)": "#0099CC",
"MC Simulation Full": "#00CCCC",
"MC Simulation Full (XP)": "#00CCCC",
"ND": "#6298FF",
"NL": "#D97529",
"Other": "#66008D",
"Others": "#00FFFF",
"Others (XP)": "#009246",
"Primary": "#FFA500",
"RAW": "#FF0000",
"RU": "#66008D",
"Rest": "#625D5D",
"Secondary": "#00FFFF",
"T0 processing": "#DB9900",
"TW": "#89000F",
"Testing": "#00FF00",
"ToBeDeleted": "#FFFF00",
"UK": "#356C20",
"UNKNOWN": "#FFA500",
"US": "#00006B",
"User Analysis": "#FF00FF",
"Validation": "#000000",
"analysis": "#FF0000",
"bstream": "#0055A5",
"cancelled": "#FF9933",
"closed": "#808080",
"evgen": "#D97529",
"evgentx": "#AE3C51",
"failed": "#bf1b00",
"filter": "#DB9900",
"finished": "#248F24",
"ganga": "#1433CC",
"gangarobot": "#006666",
"gangarobot-64": "#009999",
"gangarobot-filestager": "#00CCCC",
"gangarobot-new": "#00FFFF",
"gangarobot-nightly": "#99FF00",
"gangarobot-pft": "#99CC33",
"gangarobot-pft-trial": "#999966",
"gangarobot-rctest": "#996699",
"gangarobot-root": "#CC0000",
"gangarobot-squid": "#CC0066",
"gangarobotnew": "#CC3399",
"hammercloud": "#A5D3CA",
"merge": "#FFA600",
"merging": "#47D147",
"non-panda_analysis": "#CCCCCC",
"pandamover": "#FFE920",
"pile": "#FF00FF",
"prod_test": "#B4D1B6",
"production": "#CAD141",
"ptest": "#89C7FF",
"rc_test": "#A5FF8A",
"reco": "#00006B",
"reprocessing": "#008800",
"running": "#47D147",
"simul": "#0000FF",
"software": "#FFCFA4s",
"t0_caf": "#CAD141",
"t0_processing": "#FFA600",
"test": "#00FF00",
"transfering": "#47D147",
"txtgen": "#29AFD6",
"validation": "#000000"
}
@login_customrequired
def index(request):
"""The main page containing drop-down menus to select group by options etc.
Data delivers asynchroniously by request to grafana_api view"""
valid, response = initRequest(request)
# all possible group by options and plots to build
group_by = {'dst_federation': 'Federation'}
split_series = {'adcactivity': 'ADC Activity', 'jobstatus': 'Job status'}
plots = {'cpuconsumption': 'CPU Consumption', 'wallclockhepspec06': 'WallClock HEPSPEC06'}
data = {
'group_by': group_by,
'split_series': split_series,
'plots': plots,
}
response = render_to_response('grafana-api-plots.html', data, content_type='text/html')
return response
def chartjs(request):
"""The main page containing drop-down menus to select group by options etc.
Data delivers asynchroniously by request to grafana_api view"""
valid, response = initRequest(request)
# all possible group by options and plots to build
group_by = {'dst_federation': 'Federation'}
split_series = {'adcactivity': 'ADC Activity', 'jobstatus': 'Job status'}
plots = {'cpuconsumption': 'CPU Consumption', 'wallclockhepspec06': 'WallClock HEPSPEC06'}
data = {
'group_by': group_by,
'split_series': split_series,
'plots': plots,
}
response = render_to_response('grafana-chartjs-plots.html', data, content_type='text/html')
return response
def grafana_api(request):
valid, response = initRequest(request)
group_by = None
split_series = None
if 'groupby' in request.session['requestParams']:
groupby_params = request.session['requestParams']['groupby'].split(',')
if 'time' in groupby_params:
pass
else:
group_by = groupby_params[0]
if len(groupby_params) > 1:
split_series = groupby_params[1]
result = []
q = Query()
q = q.request_to_query(request)
last_pledges = Query(agg_func='last', table='pledges_last', field='value', grouping='real_federation')
# / api / datasources / proxy / 9267 / query?db = monit_production_rebus
# sum_pledges = Query(agg_func='sum', table='pledges', field='atlas', grouping='time(1m),real_federation')
try:
if q.table == 'pledges_last' or q.table == 'pledges_sum' or q.table == 'pledges_hs06sec':
result = Grafana(database='monit_production_rebus').get_data(q)
else:
result = Grafana().get_data(q)
# last_pledges = Grafana().get_data(last_pledges)
if 'type' in request.session['requestParams'] and request.session['requestParams']['type'] == 'd3js':
data = stacked_hist(result['results'][0]['series'], group_by, split_series)
return JsonResponse(data)
if 'type' in request.session['requestParams'] and request.session['requestParams']['type'] == 'chartjs':
last_pledges = Grafana(database='monit_production_rebus').get_data(last_pledges)
data = {}
data = stacked_hist(result['results'][0]['series'], group_by, split_series)
last_pledges = stacked_hist(last_pledges['results'][0]['series'], 'real_federation')
lables = list(data.keys())
pledges_keys = list(last_pledges.keys())
datasets = []
elements = {}
for object in data:
for element in data[object]:
elements.setdefault(element, []).append(data[object][element])
if object in pledges_keys:
elements.setdefault('pledges', []).append(last_pledges[object]['all'] * 7 * 24 * 60 * 60)
else:
elements.setdefault('pledges', []).append(0)
background = ''
for key in elements:
if key in colours_codes:
background = colours_codes[key]
else:
r = lambda: random.randint(0, 255)
background = '#%02X%02X%02X' % (r(), r(), r())
if key != 'pledges':
datasets.append(
{'label': key, 'stack': 'Stack 0', 'data': elements[key], 'backgroundColor': background})
else:
datasets.append(
{'label': key, 'stack': 'Stack 1', 'data': elements[key], 'backgroundColor': '#FF0000'})
data = {'labels': lables, 'datasets': datasets}
return HttpResponse(json.dumps(data, cls=DateTimeEncoder), content_type='application/json')
if 'export' in request.session['requestParams']:
if request.session['requestParams']['export'] == 'csv':
data = stacked_hist(result['results'][0]['series'], group_by, split_series)
import csv
import copy
response = HttpResponse(content_type='text/csv')
column_titles = copy.deepcopy(groupby_params)
column_titles.append('value')
response['Content-Disposition'] = 'attachment; filename={0}.csv'.format('_'.join(groupby_params))
writer = csv.writer(response, delimiter=";")
writer.writerow(column_titles)
csvList = []
if len(groupby_params) > 1:
csvList = grab_children(data)
else:
for key, value in data.items():
csvList.append([key, value['all']])
writer.writerows(csvList)
return response
except Exception as ex:
result.append(ex)
return JsonResponse(result)
def grab_children(data, parent=None, child=None):
if child is None:
child = []
for key, value in data.items():
if isinstance(value, dict):
grab_children(value, key, child)
else:
child.append([parent, key, value])
return child
#@login_customrequired
def pledges(request):
valid, response = initRequest(request)
if 'date_from' in request.session['requestParams'] and 'date_to' in request.session['requestParams']:
starttime = request.session['requestParams']['date_from']
endtime = request.session['requestParams']['date_to']
date_to = datetime.strptime(endtime, "%d.%m.%Y %H:%M:%S")
date_from = datetime.strptime(starttime, "%d.%m.%Y %H:%M:%S")
total_seconds = (date_to - date_from).total_seconds()
total_days = (date_to - date_from).days
date_list = []
if (date_to - date_from).days > 30:
n = 20
while True:
start_date = date_from
end_date = (start_date + timedelta(days=n))
end_date = end_date - timedelta(minutes=1)
if end_date >= date_to:
end_date = date_to - timedelta(minutes=1)
date_list.append([start_date.strftime("%d.%m.%Y %H:%M:%S"), end_date.strftime("%d.%m.%Y %H:%M:%S")])
break
else:
date_list.append([start_date.strftime("%d.%m.%Y %H:%M:%S"), end_date.strftime("%d.%m.%Y %H:%M:%S")])
date_from = end_date + timedelta(minutes=1)
else:
newendtime = (date_to - timedelta(minutes=1)).strftime("%d.%m.%Y %H:%M:%S")
date_list.append([starttime, newendtime])
else:
timebefore = timedelta(days=7)
endtime = (datetime.utcnow()).replace(minute=00, hour=00, second=00, microsecond=000)
starttime = (endtime - timebefore).replace(minute=00, hour=00, second=00, microsecond=000)
total_seconds = (starttime - endtime).total_seconds()
total_days = (endtime - starttime).days
endtime = endtime - timedelta(minutes=1)
endtime = endtime.strftime("%d.%m.%Y %H:%M:%S")
starttime = starttime.strftime("%d.%m.%Y %H:%M:%S")
if 'type' in request.session['requestParams'] and request.session['requestParams'] \
['type'] == 'federation':
key = hashlib.md5(encoding.force_bytes("{0}_{1}_federation".format(starttime, endtime)))
key = key.hexdigest()
federations = getCacheEntry(request, key, isData=True)
if federations is not None:
federations = json.loads(federations)
return HttpResponse(json.dumps(federations), content_type='text/json')
pledges_dict = {}
pledges_list = []
federations_info = {}
if len(date_list) > 1:
for date in date_list:
hs06sec = Query(agg_func='sum', table='completed', field=['sum_hs06sec','sum_count',
'sum_cpuconsumptiontime','sum_walltime'],
grouping='time,dst_federation,dst_tier,dst_experiment_site,computingsite',
starttime=date[0], endtime=date[1])
hs06sec = Grafana().get_data(hs06sec)
pledges_sum = Query(agg_func='mean', table='pledges_hs06sec', field='value',
grouping='time,real_federation,tier', starttime=date[0], endtime=date[1])
pledges_sum = Grafana(database='monit_production_rebus').get_data(pledges_sum)
pledges_dict, federations_info = pledges_merging(hs06sec, pledges_sum, total_seconds,
pledges_dict, federations_info)
else:
hs06sec = Query(agg_func='sum', table='completed', field=['sum_hs06sec','sum_count',
'sum_cpuconsumptiontime','sum_walltime'],
grouping='time,dst_federation,dst_tier,dst_experiment_site,computingsite',
starttime=date_list[0][0], endtime=date_list[0][1])
hs06sec = Grafana().get_data(hs06sec)
pledges_sum = Query(agg_func='mean', table='pledges_hs06sec', field='value',
grouping='time,real_federation,tier', starttime=date_list[0][0],
endtime=date_list[0][1])
pledges_sum = Grafana(database='monit_production_rebus').get_data(pledges_sum)
pledges_dict, federations_info = pledges_merging(hs06sec, pledges_sum, total_seconds,
pledges_dict, federations_info)
for pledges in pledges_dict:
if pledges == 'NULL':
continue
else:
# pledges_list.append(
# {type: pledges, "hs06sec": pledges_dict[pledges]['hs06sec'],
# 'pledges': pledges_dict[pledges]['pledges']})
pledges_list.append({"dst_federation": pledges,
"hs06sec": int(round(float(pledges_dict[pledges]['hs06sec']) / 86400, 2)),
'pledges': int(round(float(pledges_dict[pledges]['pledges']) / 86400, 2)),
'tier': pledges_dict[pledges]['tier'],
'federation_info': federations_info[pledges] if pledges in federations_info else None}
)
setCacheEntry(request, key, json.dumps(pledges_list), 60 * 60 * 24 * 30, isData=True)
return HttpResponse(json.dumps(pledges_list), content_type='text/json')
elif 'type' in request.session['requestParams'] and request.session['requestParams'] \
['type'] == 'country':
key = hashlib.md5(encoding.force_bytes("{0}_{1}_country".format(starttime, endtime)))
key = key.hexdigest()
countries = getCacheEntry(request, key, isData=True)
if countries is not None:
countries = json.loads(countries)
return HttpResponse(json.dumps(countries), content_type='text/json')
federations_info = {}
pledges_dict = {}
pledges_list = []
if len(date_list) > 1:
for date in date_list:
hs06sec = Query(agg_func='sum', table='completed', field='sum_hs06sec',
grouping='time,dst_federation,dst_country',
starttime=date[0], endtime=date[1])
hs06sec = Grafana().get_data(hs06sec)
pledges_sum = Query(agg_func='mean', table='pledges_hs06sec', field='value',
grouping='time,real_federation,country', starttime=date[0], endtime=date[1])
pledges_sum = Grafana(database='monit_production_rebus').get_data(pledges_sum)
pledges_dict = pledges_merging(hs06sec, pledges_sum, total_seconds, pledges_dict, federations_info,
type='dst_country')
else:
hs06sec = Query(agg_func='sum', table='completed', field='sum_hs06sec',
grouping='time,dst_federation,dst_country', starttime=date_list[0][0],
endtime=date_list[0][1])
hs06sec = Grafana().get_data(hs06sec)
pledges_sum = Query(agg_func='mean', table='pledges_hs06sec', field='value',
grouping='time,real_federation,country', starttime=date_list[0][0],
endtime=date_list[0][1])
pledges_sum = Grafana(database='monit_production_rebus').get_data(pledges_sum)
pledges_dict = pledges_merging(hs06sec, pledges_sum, total_seconds, federations_info,
pledges_dict, type='dst_country')
for pledges in pledges_dict:
if pledges == 'NULL':
continue
else:
pledges_list.append(
{"dst_country": pledges, "hs06sec": int(round(float(pledges_dict[pledges]['hs06sec']) / 86400, 2)),
'pledges': int(round(float(pledges_dict[pledges]['pledges']) / 86400, 2))})
setCacheEntry(request, key, json.dumps(pledges_list),
60 * 60 * 24 * 30, isData=True)
return HttpResponse(json.dumps(pledges_list), content_type='text/json')
else:
data = getCacheEntry(request, "pledges")
# data = None
if data is not None:
data = json.loads(data)
t = loader.get_template('grafana-pledges.html')
return HttpResponse(t.render(data, request), content_type='text/html')
else:
key_fed = hashlib.md5(encoding.force_bytes("{0}_{1}_federation".format(starttime, endtime)))
key_country = hashlib.md5(encoding.force_bytes("{0}_{1}_country".format(starttime, endtime)))
key_fed = key_fed.hexdigest()
key_country = key_country.hexdigest()
setCacheEntry(request, key_fed, None, 60, isData=True)
setCacheEntry(request, key_country, None, 60, isData=True)
t = loader.get_template('grafana-pledges.html')
data = {
'request': request,
'date_from': starttime,
'date_to': endtime,
'days': total_days,
'info': "This page was cached: {0}".format(str(datetime.utcnow()))
}
setCacheEntry(request, "pledges", json.dumps(data, cls=DateEncoder), 60 * 60 * 24 * 30)
return HttpResponse(t.render({"date_from": starttime, "date_to": endtime, "days": total_days}, request),
content_type='text/html')
def grafana_api_es(request):
valid, response = initRequest(request)
group_by = None
split_series = None
if 'groupby' in request.session['requestParams']:
groupby_params = request.session['requestParams']['groupby'].split(',')
if 'time' in groupby_params:
pass
else:
group_by = groupby_params[0]
if len(groupby_params) > 1:
split_series = groupby_params[1]
else:
split_series = group_by
result = []
q = Query()
q = q.request_to_query(request)
result = Grafana().get_data(q)
return JsonResponse(result)
|
apache-2.0
|
shocksm/stella
|
src/main/java/org/stellasql/stella/session/ResultTabHandler.java
|
187
|
package org.stellasql.stella.session;
public interface ResultTabHandler
{
public void closeSelectedTab();
public void selectNextTab();
public void selectPreviousTab();
}
|
apache-2.0
|
muffato/ensembl
|
modules/Bio/EnsEMBL/Utils/Tree/Interval/Immutable.pm
|
10649
|
=head1 LICENSE
Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
Copyright [2016-2019] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=cut
=head1 CONTACT
Please email comments or questions to the public Ensembl
developers list at <http://lists.ensembl.org/mailman/listinfo/dev>.
Questions may also be sent to the Ensembl help desk at
<http://www.ensembl.org/Help/Contact>.
=cut
=head1 NAME
Bio::EnsEMBL::Utils::Tree::Interval::Immutable
=head1 SYNOPSIS
# define a set of intervals to be added to the tree
my $intervals = [ Bio::EnsEMBL::Utils::Interval->new(121626874, 122092717),
Bio::EnsEMBL::Utils::Interval->new(121637917, 121658918),
Bio::EnsEMBL::Utils::Interval->new(122096077, 124088369) ];
# initialise the tree with the above intervals
my $tree = Bio::EnsEMBL::Utils::Tree::Interval::Immutable->new($intervals);
# point query
my $results = $tree->query(121779004);
if (scalar @$results) {
print "Intervals contain 121779004\n";
}
# same query, but use interval query
my $results = $tree->query(121779004, 121779004);
if (scalar @$results) {
print "Found containing interval: [", $result->[0]->start, ', ', $result->[0]->end, "\n";
}
=head1 DESCRIPTION
An implementation of an immutable interval tree. Immutable means the tree is
initialised with a fixed set of intervals at creation time. Intervals cannot
be added to or removed from the tree during its life cycle.
Implementation heavily inspired by https://github.com/tylerkahn/intervaltree-python
This implementation does not support Intervals having a start > end - i.e.
intervals spanning the origin of a circular chromosome.
=head1 METHODS
=cut
package Bio::EnsEMBL::Utils::Tree::Interval::Immutable;
use strict;
use Tie::RefHash;
use Bio::EnsEMBL::Utils::Scalar qw(assert_ref);
use Bio::EnsEMBL::Utils::Exception qw(throw);
use Bio::EnsEMBL::Utils::Tree::Interval::Immutable::Node;
use Bio::EnsEMBL::Utils::Interval;
=head2 new
Arg [1] : Arrayref of Bio::EnsEMBL::Utils::Interval instances
Example : my $tree = Bio::EnsEMBL::Utils::Tree::Immutable([ $i1, $i2, $i3 ]);
Description : Constructor. Creates a new immutable tree instance
Returntype : Bio::EnsEMBL::Utils::Tree::Interval::Immutable
Exceptions : none
Caller : general
=cut
sub new {
my $caller = shift;
my $class = ref($caller) || $caller;
my $intervals = shift;
if (defined $intervals ) {
assert_ref($intervals, 'ARRAY');
}
my $self = bless({}, $class);
$self->{top_node} = $self->_divide_intervals($intervals);
return $self;
}
=head2 root
Arg [] : none
Example : my $root = $tree->root();
Description : Return the tree top node
Returntype : Bio::EnsEMBL::Utils::Tree::Interval::Immutable::Node
Exceptions : none
Caller : general
=cut
sub root {
return shift->{top_node};
}
=head2 query
Arg [1] : scalar, $start
Where the query interval begins
Arg [2] : (optional) scalar, $end
Where the query interval ends
Example : my $results = $tree->query(121626874, 122092717);
Description : Query the tree if its intervals overlap the interval whose start
and end points are specified by the argument list.
If end is not specified, it is assumed to be the same as start
so effectively making a point query.
Returntype : An arrayref of Bio::EnsEMBL::Utils::Interval instances
Exceptions : none
Caller : general
=cut
sub query {
my ($self, $start, $end) = @_;
my $interval;
if (defined $start) {
$end = $start unless defined $end;
$interval = Bio::EnsEMBL::Utils::Interval->new($start, $end);
}
return [] unless $interval;
return $self->_query_point($self->root, $interval->start, []) if $interval->is_point;
my $result = [];
return $result unless $self->root or $interval->is_empty;
my $node = $self->root;
while ($node) {
if ($interval->contains($node->x_center)) {
push @{$result}, @{$node->s_center_beg};
$self->_range_query_left($node->left, $interval, $result);
$self->_range_query_right($node->right, $interval, $result);
last;
}
if ($interval->is_left_of($node->x_center)) {
foreach my $s_beg (@{$node->s_center_beg}) {
last unless $interval->intersects($s_beg);
push @{$result}, $s_beg;
}
$node = $node->left;
} else {
foreach my $s_end (@{$node->s_center_end}) {
last unless $interval->intersects($s_end);
push @{$result}, $s_end;
}
$node = $node->right;
}
}
return sort_by_begin(uniq($result));
}
sub _query_point {
my ($self, $node, $point, $result) = @_;
return $result unless $node;
# if x is less than x_center, the leftmost set of intervals, S_left, is considered
if ($point <= $node->x_center) {
# if x is less than x_center, we know that all intervals in S_center end after x,
# or they could not also overlap x_center. Therefore, we need only find those intervals
# in S_center that begin before x. We can consult the lists of S_center that have already
# been constructed. Since we only care about the interval beginnings in this scenario,
# we can consult the list sorted by beginnings.
# Suppose we find the closest number no greater than x in this list. All ranges from the
# beginning of the list to that found point overlap x because they begin before x and end
# after x (as we know because they overlap x_center which is larger than x).
# Thus, we can simply start enumerating intervals in the list until the startpoint value exceeds x.
foreach my $s_beg (@{$node->s_center_beg}) {
last if $s_beg->is_right_of($point);
push @{$result}, $s_beg;
}
# since x < x_center, we also consider the leftmost set of intervals
return $self->_query_point($node->left, $point, $result);
} else {
# if x is greater than x_center, we know that all intervals in S_center must begin before x,
# so we find those intervals that end after x using the list sorted by interval endings.
foreach my $s_end (@{$node->s_center_end}) {
last if $s_end->is_left_of($point);
push @{$result}, $s_end;
}
# since x > x_center, we also consider the rightmost set of intervals
return $self->_query_point($node->right, $point, $result);
}
return sort_by_begin(uniq($result));
}
# This corresponds to the left branch of the range search, once we find a node, whose
# midpoint is contained in the query interval. All intervals in the left subtree of that node
# are guaranteed to intersect with the query, if they have an endpoint greater or equal than
# the start of the query interval. Basically, this means that every time we branch to the left
# in the binary search, we need to add the whole right subtree to the result set.
sub _range_query_left {
my ($self, $node, $interval, $result) = @_;
while ($node) {
if ($interval->contains($node->x_center)) {
push @{$result}, @{$node->s_center_beg};
if ($node->right) {
# in-order traversal of the right subtree to add all its intervals
$self->_in_order_traversal($node->right, $result);
}
$node = $node->left;
} else {
foreach my $seg_end (@{$node->s_center_end}) {
last if $seg_end->is_left_of($interval);
push @{$result}, $seg_end;
}
$node = $node->right;
}
}
}
# This corresponds to the right branch of the range search, once we find a node, whose
# midpoint is contained in the query interval. All intervals in the right subtree of that node
# are guaranteed to intersect with the query, if they have an endpoint smaller or equal than
# the end of the query interval. Basically, this means that every time we branch to the right
# in the binary search, we need to add the whole left subtree to the result set.
sub _range_query_right {
my ($self, $node, $interval, $result) = @_;
while ($node) {
if ($interval->contains($node->x_center)) {
push @{$result}, @{$node->s_center_beg};
if ($node->left) {
# in-order traversal of the left subtree to add all its intervals
$self->_in_order_traversal($node->left, $result);
}
$node = $node->right;
} else {
foreach my $seg_beg (@{$node->s_center_beg}) {
last if $seg_beg->is_right_of($interval);
push @{$result}, $seg_beg;
}
$node = $node->left;
}
}
}
sub in_order_traversal {
my ($self) = @_;
my $result = [];
$self->_in_order_traversal($self->root, $result);
return $result;
}
sub _in_order_traversal {
my ($self, $node, $result) = @_;
return unless $node;
$result ||= [];
$self->_in_order_traversal($node->left, $result);
push @{$result}, @{$node->s_center_beg};
$self->_in_order_traversal($node->right, $result);
}
sub _divide_intervals {
my ($self, $intervals) = @_;
return undef unless scalar @{$intervals};
my $x_center = $self->_center($intervals);
my ($s_center, $s_left, $s_right) = ([], [], []);
foreach my $interval (@{$intervals}) {
if ($interval->spans_origin) {
throw "Cannot build a tree containing an interval that spans the origin";
}
if ($interval->end < $x_center) {
push @{$s_left}, $interval;
} elsif ($interval->start > $x_center) {
push @{$s_right}, $interval;
} else {
push @{$s_center}, $interval;
}
}
my $node = Bio::EnsEMBL::Utils::Tree::Interval::Immutable::Node->new($x_center,
$s_center,
$self->_divide_intervals($s_left),
$self->_divide_intervals($s_right));
}
sub _center {
my ($self, $intervals) = @_;
my $sorted_intervals = sort_by_begin($intervals);
my $len = scalar @{$sorted_intervals};
return $sorted_intervals->[int($len/2)]->start;
}
sub sort_by_begin {
my $intervals = shift;
return [ map { $_->[1] } sort { $a->[0] <=> $b->[0] } map { [ $_->start, $_ ] } @{$intervals} ];
}
sub uniq {
my $intervals = shift;
tie my %seen, 'Tie::RefHash';
return [ grep { ! $seen{ $_ }++ } @{$intervals} ];
}
1;
|
apache-2.0
|
google/pigweed
|
pw_cpu_exception_cortex_m/pw_cpu_exception_cortex_m_private/cortex_m_constants.h
|
4708
|
// Copyright 2021 The Pigweed Authors
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy of
// the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
#pragma once
#include <cstdint>
#include "pw_preprocessor/arch.h"
namespace pw::cpu_exception::cortex_m {
// CMSIS/Cortex-M/ARMv7 related constants.
// These values are from the ARMv7-M Architecture Reference Manual DDI 0403E.b.
// https://static.docs.arm.com/ddi0403/e/DDI0403E_B_armv7m_arm.pdf
constexpr uint32_t kThreadModeIsrNum = 0x0;
constexpr uint32_t kNmiIsrNum = 0x2;
constexpr uint32_t kHardFaultIsrNum = 0x3;
constexpr uint32_t kMemFaultIsrNum = 0x4;
constexpr uint32_t kBusFaultIsrNum = 0x5;
constexpr uint32_t kUsageFaultIsrNum = 0x6;
// Masks for Interrupt Control and State Register ICSR (ARMv7-M Section B3.2.4)
constexpr uint32_t kIcsrVectactiveMask = (1 << 9) - 1;
// Masks for individual bits of HFSR. (ARMv7-M Section B3.2.16)
constexpr uint32_t kHfsrForcedMask = (0x1 << 30);
// Masks for different sections of CFSR. (ARMv7-M Section B3.2.15)
constexpr uint32_t kCfsrMemFaultMask = 0x000000ff;
constexpr uint32_t kCfsrBusFaultMask = 0x0000ff00;
constexpr uint32_t kCfsrUsageFaultMask = 0xffff0000;
// Masks for individual bits of CFSR. (ARMv7-M Section B3.2.15)
// Memory faults (MemManage Status Register)
constexpr uint32_t kCfsrMemFaultStart = (0x1);
constexpr uint32_t kCfsrIaccviolMask = (kCfsrMemFaultStart << 0);
constexpr uint32_t kCfsrDaccviolMask = (kCfsrMemFaultStart << 1);
constexpr uint32_t kCfsrMunstkerrMask = (kCfsrMemFaultStart << 3);
constexpr uint32_t kCfsrMstkerrMask = (kCfsrMemFaultStart << 4);
constexpr uint32_t kCfsrMlsperrMask = (kCfsrMemFaultStart << 5);
constexpr uint32_t kCfsrMmarvalidMask = (kCfsrMemFaultStart << 7);
// Bus faults (BusFault Status Register)
constexpr uint32_t kCfsrBusFaultStart = (0x1 << 8);
constexpr uint32_t kCfsrIbuserrMask = (kCfsrBusFaultStart << 0);
constexpr uint32_t kCfsrPreciserrMask = (kCfsrBusFaultStart << 1);
constexpr uint32_t kCfsrImpreciserrMask = (kCfsrBusFaultStart << 2);
constexpr uint32_t kCfsrUnstkerrMask = (kCfsrBusFaultStart << 3);
constexpr uint32_t kCfsrStkerrMask = (kCfsrBusFaultStart << 4);
constexpr uint32_t kCfsrLsperrMask = (kCfsrBusFaultStart << 5);
constexpr uint32_t kCfsrBfarvalidMask = (kCfsrBusFaultStart << 7);
// Usage faults (UsageFault Status Register)
constexpr uint32_t kCfsrUsageFaultStart = (0x1 << 16);
constexpr uint32_t kCfsrUndefinstrMask = (kCfsrUsageFaultStart << 0);
constexpr uint32_t kCfsrInvstateMask = (kCfsrUsageFaultStart << 1);
constexpr uint32_t kCfsrInvpcMask = (kCfsrUsageFaultStart << 2);
constexpr uint32_t kCfsrNocpMask = (kCfsrUsageFaultStart << 3);
#if _PW_ARCH_ARM_V8M_MAINLINE
constexpr uint32_t kCfsrStkofMask = (kCfsrUsageFaultStart << 4);
#endif // _PW_ARCH_ARM_V8M_MAINLINE
constexpr uint32_t kCfsrUnalignedMask = (kCfsrUsageFaultStart << 8);
constexpr uint32_t kCfsrDivbyzeroMask = (kCfsrUsageFaultStart << 9);
// Bit masks for an exception return value. (ARMv7-M Section B1.5.8)
constexpr uint32_t kExcReturnStackMask = 0x1u << 2; // 0=MSP, 1=PSP
constexpr uint32_t kExcReturnModeMask = 0x1u << 3; // 0=Handler, 1=Thread
constexpr uint32_t kExcReturnBasicFrameMask = 0x1u << 4;
// Mask for the IPSR, bits 8:0, of the xPSR register.
constexpr uint32_t kXpsrIpsrMask = 0b1'1111'1111;
// Bit masks for the control register. (ARMv7-M Section B1.4.4)
// The SPSEL bit is only valid while in Thread Mode:
constexpr uint32_t kControlThreadModeStackMask = 0x1u << 1; // 0=MSP, 1=PSP
// Memory mapped registers. (ARMv7-M Section B3.2.2, Table B3-4)
// TODO(pwbug/316): Only some of these are supported on ARMv6-M.
inline volatile uint32_t& cortex_m_cfsr =
*reinterpret_cast<volatile uint32_t*>(0xE000ED28u);
inline volatile uint32_t& cortex_m_mmfar =
*reinterpret_cast<volatile uint32_t*>(0xE000ED34u);
inline volatile uint32_t& cortex_m_bfar =
*reinterpret_cast<volatile uint32_t*>(0xE000ED38u);
inline volatile uint32_t& cortex_m_icsr =
*reinterpret_cast<volatile uint32_t*>(0xE000ED04u);
inline volatile uint32_t& cortex_m_hfsr =
*reinterpret_cast<volatile uint32_t*>(0xE000ED2Cu);
inline volatile uint32_t& cortex_m_shcsr =
*reinterpret_cast<volatile uint32_t*>(0xE000ED24u);
} // namespace pw::cpu_exception::cortex_m
|
apache-2.0
|
muffato/public-plugins
|
ensembl/htdocs/ssi/species/Tarsius_syrichta_annotation.html
|
971
|
<p>Owing to the fragmentary nature of this preliminary assembly,
it was necessary to arrange some scaffolds into "gene-scaffold"
super-structures, in order to present complete genes. There are
8606 such gene-scaffolds, with identifiers of the form "GeneScaffold_1".
</p>
<ul><li><a href="/info/genome/genebuild/2x_genomes.html">Details of the gene-scaffold construction and subsequent gene-build</a></li></ul>
<h3>Mammalian Genome Project</h3>
<p><i>Tarsius syrichta</i> is one of 24 mammals that will be sequenced as part of the
Mammalian Genome Project, funded by the National Institutes of Health
(NIH). A group of species were chosen to maximise the branch length of
the evolutionary tree while representing the diversity of mammalian
species. Low-coverage 2X assemblies will be produced for these mammals
and used in alignments for cross-species comparison. The aim is to
increase our understanding of functional elements, especially in the
human genome.
</p>
|
apache-2.0
|
hortonworks/cloudbreak
|
core/src/main/java/com/sequenceiq/cloudbreak/service/user/UserProfileDecorator.java
|
720
|
package com.sequenceiq.cloudbreak.service.user;
import javax.inject.Inject;
import org.springframework.stereotype.Service;
import com.sequenceiq.cloudbreak.api.endpoint.v4.userprofile.responses.UserProfileV4Response;
import com.sequenceiq.cloudbreak.auth.crn.Crn;
import com.sequenceiq.cloudbreak.auth.altus.EntitlementService;
@Service
public class UserProfileDecorator {
@Inject
private EntitlementService entitlementService;
public UserProfileV4Response decorate(UserProfileV4Response userProfileV4Response, String userCrn) {
userProfileV4Response.setEntitlements(entitlementService.getEntitlements(Crn.safeFromString(userCrn).getAccountId()));
return userProfileV4Response;
}
}
|
apache-2.0
|
tensorflow/agents
|
tf_agents/trajectories/trajectory_test.py
|
14383
|
# coding=utf-8
# Copyright 2020 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for trajectory."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf # pylint: disable=g-explicit-tensorflow-version-import
from tf_agents.drivers import dynamic_episode_driver
from tf_agents.drivers import test_utils as drivers_test_utils
from tf_agents.environments import tf_py_environment
from tf_agents.trajectories import time_step as ts
from tf_agents.trajectories import trajectory
from tf_agents.utils import test_utils
class TrajectoryTest(test_utils.TestCase):
def testFirstTensors(self):
observation = ()
action = ()
policy_info = ()
reward = tf.constant([1.0, 1.0, 2.0])
discount = tf.constant([1.0, 1.0, 1.0])
traj = trajectory.first(observation, action, policy_info, reward, discount)
self.assertTrue(tf.is_tensor(traj.step_type))
traj_val = self.evaluate(traj)
self.assertAllEqual(traj_val.step_type, [ts.StepType.FIRST] * 3)
self.assertAllEqual(traj_val.next_step_type, [ts.StepType.MID] * 3)
def testFirstArrays(self):
observation = ()
action = ()
policy_info = ()
reward = np.array([1.0, 1.0, 2.0])
discount = np.array([1.0, 1.0, 1.0])
traj = trajectory.first(observation, action, policy_info, reward, discount)
self.assertFalse(tf.is_tensor(traj.step_type))
self.assertAllEqual(traj.step_type, [ts.StepType.FIRST] * 3)
self.assertAllEqual(traj.next_step_type, [ts.StepType.MID] * 3)
def testMidTensors(self):
observation = ()
action = ()
policy_info = ()
reward = tf.constant([1.0, 1.0, 2.0])
discount = tf.constant([1.0, 1.0, 1.0])
traj = trajectory.mid(observation, action, policy_info, reward, discount)
self.assertTrue(tf.is_tensor(traj.step_type))
traj_val = self.evaluate(traj)
self.assertAllEqual(traj_val.step_type, [ts.StepType.MID] * 3)
self.assertAllEqual(traj_val.next_step_type, [ts.StepType.MID] * 3)
def testMidArrays(self):
observation = ()
action = ()
policy_info = ()
reward = np.array([1.0, 1.0, 2.0])
discount = np.array([1.0, 1.0, 1.0])
traj = trajectory.mid(observation, action, policy_info, reward, discount)
self.assertFalse(tf.is_tensor(traj.step_type))
self.assertAllEqual(traj.step_type, [ts.StepType.MID] * 3)
self.assertAllEqual(traj.next_step_type, [ts.StepType.MID] * 3)
def testLastTensors(self):
observation = ()
action = ()
policy_info = ()
reward = tf.constant([1.0, 1.0, 2.0])
discount = tf.constant([1.0, 1.0, 1.0])
traj = trajectory.last(observation, action, policy_info, reward, discount)
self.assertTrue(tf.is_tensor(traj.step_type))
traj_val = self.evaluate(traj)
self.assertAllEqual(traj_val.step_type, [ts.StepType.MID] * 3)
self.assertAllEqual(traj_val.next_step_type, [ts.StepType.LAST] * 3)
def testLastArrays(self):
observation = ()
action = ()
policy_info = ()
reward = np.array([1.0, 1.0, 2.0])
discount = np.array([1.0, 1.0, 1.0])
traj = trajectory.last(observation, action, policy_info, reward, discount)
self.assertFalse(tf.is_tensor(traj.step_type))
self.assertAllEqual(traj.step_type, [ts.StepType.MID] * 3)
self.assertAllEqual(traj.next_step_type, [ts.StepType.LAST] * 3)
def testSingleStepTensors(self):
observation = ()
action = ()
policy_info = ()
reward = tf.constant([1.0, 1.0, 2.0])
discount = tf.constant([1.0, 1.0, 1.0])
traj = trajectory.single_step(observation, action, policy_info, reward,
discount)
self.assertTrue(tf.is_tensor(traj.step_type))
traj_val = self.evaluate(traj)
self.assertAllEqual(traj_val.step_type, [ts.StepType.FIRST] * 3)
self.assertAllEqual(traj_val.next_step_type, [ts.StepType.LAST] * 3)
def testSingleStepArrays(self):
observation = ()
action = ()
policy_info = ()
reward = np.array([1.0, 1.0, 2.0])
discount = np.array([1.0, 1.0, 1.0])
traj = trajectory.single_step(observation, action, policy_info, reward,
discount)
self.assertFalse(tf.is_tensor(traj.step_type))
self.assertAllEqual(traj.step_type, [ts.StepType.FIRST] * 3)
self.assertAllEqual(traj.next_step_type, [ts.StepType.LAST] * 3)
def testFromEpisodeTensor(self):
observation = tf.random.uniform((4, 5))
action = ()
policy_info = ()
reward = tf.random.uniform((4,))
traj = trajectory.from_episode(
observation, action, policy_info, reward, discount=None)
self.assertTrue(tf.is_tensor(traj.step_type))
traj_val, obs_val, reward_val = self.evaluate((traj, observation, reward))
first = ts.StepType.FIRST
mid = ts.StepType.MID
last = ts.StepType.LAST
self.assertAllEqual(
traj_val.step_type, [first, mid, mid, mid])
self.assertAllEqual(
traj_val.next_step_type, [mid, mid, mid, last])
self.assertAllClose(traj_val.observation, obs_val)
self.assertAllEqual(traj_val.reward, reward_val)
self.assertAllEqual(traj_val.discount, [1.0, 1.0, 1.0, 1.0])
def testFromEpisodeWithCompositeTensorOfTensors(self):
observation = tf.SparseTensor(
indices=tf.random.uniform((7, 2), maxval=9, dtype=tf.int64),
values=tf.random.uniform((7,)),
dense_shape=[4, 10]) # The 4 is important, it must match reward length.
action = ()
policy_info = ()
reward = tf.random.uniform((4,))
traj = trajectory.from_episode(
observation, action, policy_info, reward, discount=None)
self.assertTrue(tf.is_tensor(traj.step_type))
traj_val, obs_val, reward_val = self.evaluate((traj, observation, reward))
first = ts.StepType.FIRST
mid = ts.StepType.MID
last = ts.StepType.LAST
self.assertAllEqual(
traj_val.step_type, [first, mid, mid, mid])
self.assertAllEqual(
traj_val.next_step_type, [mid, mid, mid, last])
self.assertAllClose(traj_val.observation, obs_val)
self.assertAllEqual(traj_val.reward, reward_val)
self.assertAllEqual(traj_val.discount, [1.0, 1.0, 1.0, 1.0])
def testFromEpisodeArray(self):
observation = np.random.rand(4, 5)
action = ()
policy_info = ()
reward = np.random.rand(4)
traj = trajectory.from_episode(
observation, action, policy_info, reward, discount=None)
self.assertFalse(tf.is_tensor(traj.step_type))
first = ts.StepType.FIRST
mid = ts.StepType.MID
last = ts.StepType.LAST
self.assertAllEqual(
traj.step_type, [first, mid, mid, mid])
self.assertAllEqual(
traj.next_step_type, [mid, mid, mid, last])
self.assertAllEqual(traj.observation, observation)
self.assertAllEqual(traj.reward, reward)
self.assertAllEqual(traj.discount, [1.0, 1.0, 1.0, 1.0])
def testToTransition(self):
first = ts.StepType.FIRST
mid = ts.StepType.MID
last = ts.StepType.LAST
# Define a batch size 1, 3-step trajectory.
traj = trajectory.Trajectory(
step_type=np.array([[first, mid, last]]),
next_step_type=np.array([[mid, last, first]]),
observation=np.array([[10.0, 20.0, 30.0]]),
action=np.array([[11.0, 22.0, 33.0]]),
# reward at step 2 is an invalid dummy reward.
reward=np.array([[0.0, 1.0, 2.0]]),
discount=np.array([[1.0, 1.0, 0.0]]),
policy_info=np.array([[1.0, 2.0, 3.0]]))
transition = trajectory.to_transition(traj)
self.assertIsInstance(transition, trajectory.Transition)
time_steps, policy_steps, next_time_steps = transition
self.assertAllEqual(time_steps.step_type, np.array([[first, mid]]))
self.assertAllEqual(time_steps.observation, np.array([[10.0, 20.0]]))
# reward and discount are filled with zero (dummy) values
self.assertAllEqual(time_steps.reward, np.array([[0.0, 0.0]]))
self.assertAllEqual(time_steps.discount, np.array([[0.0, 0.0]]))
self.assertAllEqual(next_time_steps.step_type, np.array([[mid, last]]))
self.assertAllEqual(next_time_steps.observation, np.array([[20.0, 30.0]]))
self.assertAllEqual(next_time_steps.reward, np.array([[0.0, 1.0]]))
self.assertAllEqual(next_time_steps.discount, np.array([[1.0, 1.0]]))
self.assertAllEqual(policy_steps.action, np.array([[11.0, 22.0]]))
self.assertAllEqual(policy_steps.info, np.array([[1.0, 2.0]]))
def testToNStepTransitionForNEquals1(self):
first = ts.StepType.FIRST
last = ts.StepType.LAST
# Define a batch size 1, 2-step trajectory.
traj = trajectory.Trajectory(
step_type=np.array([[first, last]]),
next_step_type=np.array([[last, first]]),
observation=np.array([[10.0, 20.0]]),
action=np.array([[11.0, 22.0]]),
# reward & discount values at step 1 is an invalid dummy reward.
reward=np.array([[-1.0, 0.0]]),
discount=np.array([[0.9, 0.0]]),
policy_info=np.array([[10.0, 20.0]]))
transition = trajectory.to_n_step_transition(traj, gamma=0.5)
self.assertIsInstance(transition, trajectory.Transition)
time_steps, policy_steps, next_time_steps = transition
self.assertAllEqual(time_steps.step_type, np.array([first]))
self.assertAllEqual(time_steps.observation, np.array([10.0]))
self.assertAllEqual(time_steps.reward, np.array([np.nan]))
self.assertAllEqual(time_steps.discount, np.array([np.nan]))
self.assertAllEqual(next_time_steps.step_type, np.array([last]))
self.assertAllEqual(next_time_steps.observation, np.array([20.0]))
# r0
self.assertAllEqual(next_time_steps.reward, np.array([-1.0]))
# d0
self.assertAllEqual(next_time_steps.discount, np.array([0.9]))
self.assertAllEqual(policy_steps.action, np.array([11.0]))
self.assertAllEqual(policy_steps.info, np.array([10.0]))
def testToNStepTransition(self):
first = ts.StepType.FIRST
mid = ts.StepType.MID
last = ts.StepType.LAST
gamma = 0.5
# Define a batch size 1, 4-step trajectory.
traj = trajectory.Trajectory(
step_type=np.array([[first, mid, mid, last]]),
next_step_type=np.array([[mid, mid, last, first]]),
observation=np.array([[10.0, 20.0, 30.0, 40.0]]),
action=np.array([[11.0, 22.0, 33.0, 44.0]]),
# reward & discount values at step 3 is an invalid dummy reward.
reward=np.array([[-1.0, 1.0, 2.0, 0.0]]),
discount=np.array([[0.9, 0.95, 1.0, 0.0]]),
policy_info=np.array([[10.0, 20.0, 30.0, 40.0]]))
transition = trajectory.to_n_step_transition(traj, gamma=gamma)
self.assertIsInstance(transition, trajectory.Transition)
time_steps, policy_steps, next_time_steps = transition
self.assertAllEqual(time_steps.step_type, np.array([first]))
self.assertAllEqual(time_steps.observation, np.array([10.0]))
self.assertAllEqual(time_steps.reward, np.array([np.nan]))
self.assertAllEqual(time_steps.discount, np.array([np.nan]))
self.assertAllEqual(next_time_steps.step_type, np.array([last]))
self.assertAllEqual(next_time_steps.observation, np.array([40.0]))
# r0 + r1 * g * d0 + r2 * g * d0 * d1
# == -1.0 + 1.0*0.5*(0.9) + 2.0*(0.5**2)*(0.9*0.95)
self.assertAllEqual(
next_time_steps.reward,
np.array([-1.0 + 1.0 * gamma * 0.9 + 2.0 * gamma**2 * 0.9 * 0.95]))
# gamma**2 * (d0 * d1 * d2)
self.assertAllEqual(
next_time_steps.discount, np.array([gamma**2 * (0.9 * 0.95 * 1.0)]))
self.assertAllEqual(policy_steps.action, np.array([11.0]))
self.assertAllEqual(policy_steps.info, np.array([10.0]))
def testToTransitionHandlesTrajectoryFromDriverCorrectly(self):
env = tf_py_environment.TFPyEnvironment(
drivers_test_utils.PyEnvironmentMock())
policy = drivers_test_utils.TFPolicyMock(
env.time_step_spec(), env.action_spec())
replay_buffer = drivers_test_utils.make_replay_buffer(policy)
driver = dynamic_episode_driver.DynamicEpisodeDriver(
env, policy, num_episodes=3, observers=[replay_buffer.add_batch])
run_driver = driver.run()
rb_gather_all = replay_buffer.gather_all()
self.evaluate(tf.compat.v1.global_variables_initializer())
self.evaluate(run_driver)
trajectories = self.evaluate(rb_gather_all)
transitions = trajectory.to_transition(trajectories)
self.assertIsInstance(transitions, trajectory.Transition)
time_steps, policy_step, next_time_steps = transitions
self.assertAllEqual(time_steps.observation,
trajectories.observation[:, :-1])
self.assertAllEqual(time_steps.step_type, trajectories.step_type[:, :-1])
self.assertAllEqual(next_time_steps.observation,
trajectories.observation[:, 1:])
self.assertAllEqual(next_time_steps.step_type,
trajectories.step_type[:, 1:])
self.assertAllEqual(next_time_steps.reward, trajectories.reward[:, :-1])
self.assertAllEqual(next_time_steps.discount, trajectories.discount[:, :-1])
self.assertAllEqual(policy_step.action, trajectories.action[:, :-1])
self.assertAllEqual(policy_step.info, trajectories.policy_info[:, :-1])
def testToTransitionSpec(self):
env = tf_py_environment.TFPyEnvironment(
drivers_test_utils.PyEnvironmentMock())
policy = drivers_test_utils.TFPolicyMock(
env.time_step_spec(), env.action_spec())
trajectory_spec = policy.trajectory_spec
transition_spec = trajectory.to_transition_spec(trajectory_spec)
self.assertIsInstance(transition_spec, trajectory.Transition)
ts_spec, ps_spec, nts_spec = transition_spec
self.assertAllEqual(ts_spec, env.time_step_spec())
self.assertAllEqual(ps_spec.action, env.action_spec())
self.assertAllEqual(nts_spec, env.time_step_spec())
if __name__ == '__main__':
tf.test.main()
|
apache-2.0
|
wildfly-swarm/wildfly-swarm-javadocs
|
2.3.0.Final/apidocs/org/wildfly/swarm/ejb/package-frame.html
|
922
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_151) on Wed Jan 16 11:48:21 MST 2019 -->
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>org.wildfly.swarm.ejb (BOM: * : All 2.3.0.Final API)</title>
<meta name="date" content="2019-01-16">
<link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../../script.js"></script>
</head>
<body>
<h1 class="bar"><a href="../../../../org/wildfly/swarm/ejb/package-summary.html" target="classFrame">org.wildfly.swarm.ejb</a></h1>
<div class="indexContainer">
<h2 title="Classes">Classes</h2>
<ul title="Classes">
<li><a href="EJBFraction.html" title="class in org.wildfly.swarm.ejb" target="classFrame">EJBFraction</a></li>
</ul>
</div>
</body>
</html>
|
apache-2.0
|
Ouyangan/hunt-admin
|
hunt-web/src/main/java/com/hunt/system/exception/ForbiddenIpException.java
|
597
|
package com.hunt.system.exception;
/**
* @Author ouyangan
* @Date 2016/10/29/17:32
* @Description
*/
public class ForbiddenIpException extends Exception {
/**
* Constructs a new exception with the specified detail message. The
* cause is not initialized, and may subsequently be initialized by
* a call to {@link #initCause}.
*
* @param message the detail message. The detail message is saved for
* later retrieval by the {@link #getMessage()} method.
*/
public ForbiddenIpException(String message) {
super(message);
}
}
|
apache-2.0
|
kjirou/developers-defense
|
flow-typed/npm/rpgparameter_vx.x.x.js
|
2281
|
// flow-typed signature: cf33449b9d38407cc88b416ce0c87eec
// flow-typed version: <<STUB>>/rpgparameter_v2.1.0/flow_v0.41.0
/**
* This is an autogenerated libdef stub for:
*
* 'rpgparameter'
*
* Fill this stub out by replacing all the `any` types.
*
* Once filled out, we encourage you to share your work with the
* community by sending a pull request to:
* https://github.com/flowtype/flow-typed
*/
declare module 'rpgparameter' {
declare module.exports: any;
}
/**
* We include stubs for each file inside this npm package in case you need to
* require those files directly. Feel free to delete any files that aren't
* needed.
*/
declare module 'rpgparameter/examples/mixin-parameters' {
declare module.exports: any;
}
declare module 'rpgparameter/lib/aggregators' {
declare module.exports: any;
}
declare module 'rpgparameter/lib/utils' {
declare module.exports: any;
}
declare module 'rpgparameter/public/tests' {
declare module.exports: any;
}
declare module 'rpgparameter/test/index' {
declare module.exports: any;
}
declare module 'rpgparameter/test/lib/aggregators' {
declare module.exports: any;
}
declare module 'rpgparameter/test/lib/utils' {
declare module.exports: any;
}
// Filename aliases
declare module 'rpgparameter/examples/mixin-parameters.js' {
declare module.exports: $Exports<'rpgparameter/examples/mixin-parameters'>;
}
declare module 'rpgparameter/index' {
declare module.exports: $Exports<'rpgparameter'>;
}
declare module 'rpgparameter/index.js' {
declare module.exports: $Exports<'rpgparameter'>;
}
declare module 'rpgparameter/lib/aggregators.js' {
declare module.exports: $Exports<'rpgparameter/lib/aggregators'>;
}
declare module 'rpgparameter/lib/utils.js' {
declare module.exports: $Exports<'rpgparameter/lib/utils'>;
}
declare module 'rpgparameter/public/tests.js' {
declare module.exports: $Exports<'rpgparameter/public/tests'>;
}
declare module 'rpgparameter/test/index.js' {
declare module.exports: $Exports<'rpgparameter/test/index'>;
}
declare module 'rpgparameter/test/lib/aggregators.js' {
declare module.exports: $Exports<'rpgparameter/test/lib/aggregators'>;
}
declare module 'rpgparameter/test/lib/utils.js' {
declare module.exports: $Exports<'rpgparameter/test/lib/utils'>;
}
|
apache-2.0
|
Norhaven/FluentBoilerplate
|
DotNet/FluentBoilerplate/PublicContract/Traits/IRolesBasedTrait.cs
|
1638
|
/*
Copyright 2015 Chris Hannon
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
namespace FluentBoilerplate.Traits
{
/// <summary>
/// Represents a trait that allows contract requirements for rights
/// </summary>
/// <typeparam name="TContext">The context type</typeparam>
public interface IRolesBasedTrait<TContext>
{
/// <summary>
/// Indicates that the current identity must have a set of roles prior to performing a context action
/// </summary>
/// <param name="rights">The required rights</param>
/// <returns>An instance of <typeparamref name="TContext"/> that contains the new requirements</returns>
TContext RequireRoles(params IRole[] roles);
/// <summary>
/// Indicates that the current identity must not have a set of roles prior to performing a context action
/// </summary>
/// <param name="rights">The restricted rights</param>
/// <returns>An instance of <typeparamref name="TContext"/> that contains the new requirements</returns>
TContext MustNotHaveRoles(params IRole[] roles);
}
}
|
apache-2.0
|
huang303513/AndroidBasicCommonDemos
|
11_FilePersistenceTest/app/src/test/java/www/huangchengdu/com/a11_filepersistencetest/ExampleUnitTest.java
|
405
|
package www.huangchengdu.com.a11_filepersistencetest;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Example local unit test, which will execute on the development machine (host).
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
public class ExampleUnitTest {
@Test
public void addition_isCorrect() {
assertEquals(4, 2 + 2);
}
}
|
apache-2.0
|
dpquigl/YAM
|
src/pyyam/yam/handlers/yamMessageHandlerBase.py
|
1148
|
class yamMessageHandlerBase(object):
"""
Base class for message handlers for a :class:`ZMQProcess`.
Inheriting classes only need to implement a handler function for each
message type. It must assign the protobuf Message class to self.cls and
create a mapping of message types to handler functions
"""
def __init__(self, rep_stream, stop):
self._rep_stream = rep_stream
self._stop = stop
self.cls = None
self.funcMap = {}
self.subMessageHandler = False
pass
def __call__(self, msg):
"""
Gets called when a messages is received by the stream this handlers is
registered at. *msg* is a list as return by
:meth:`zmq.core.socket.Socket.recv_multipart`.
"""
if self.subMessageHandler:
yamMessage = msg
else:
yamMessage = self.cls()
fullMsg = "".join(msg)
yamMessage.ParseFromString(fullMsg)
handlerFunc = self.funcMap[yamMessage.type]
responseMessage = handlerFunc(yamMessage)
return responseMessage
|
apache-2.0
|
wsv-accidis/sjoslaget
|
SjoslagetFrontend/web/stats/stats.css
|
280
|
@import url(https://fonts.googleapis.com/css?family=Roboto);
@import url(https://fonts.googleapis.com/css?family=Material+Icons);
* {
font-family: Roboto, Helvetica, Arial, sans-serif;
}
body {
background: white;
margin: 0;
}
div.loading {
padding: 5vw;
margin: 0 auto;
}
|
apache-2.0
|
mdoering/backbone
|
life/Fungi/Ascomycota/Leotiomycetes/Helotiales/Hyaloscyphaceae/Lachnum/Lachnum pulverulentum/ Syn. Dasyscyphus pulverulentus/README.md
|
233
|
# Dasyscyphus pulverulentus (Lib.) Sacc., 1889 SPECIES
#### Status
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
Syll. fung. (Abellini) 8: 462 (1889)
#### Original name
null
### Remarks
null
|
apache-2.0
|
katallaxie/generator-angular2-ts
|
templates/app/grunt/clean.js
|
394
|
'use strict';
/**
* Grunt - clean
*
* Url: https://github.com/gruntjs/grunt-contrib-clean
*/
module.exports = ( grunt, config ) => {
return {
// clean destination of intermediares
all : {
options : {
force : true, // caution, this is to allow deletion outside of cwd
},
files : {
src : [ `${ config.path.www.base }/**/*` ]
}
}
};
};
|
apache-2.0
|
Anz/npl
|
src/lib/stdout.c
|
718
|
#include "lib/stdout.h"
#include "lib/string.h"
#include <stdio.h>
#include <stdarg.h>
#include <stdlib.h>
void print(int num, ...) {
va_list arguments;
va_start(arguments, num);
size_t size = 1;
char* buffer = malloc(size);
for (int i = 0; i < num; i++) {
lib_string string = va_arg(arguments, lib_string);
int32_t* memory = array_memory(string);
size_t length = memory[0];
size += length;
buffer = realloc(buffer, size);
for (int i = 0; i < length; i++) {
buffer[size - length - 1 + i] = (char)memory[i + 2];
}
}
buffer[size - 1] = '\0';
va_end(arguments);
printf("print: %s\n", buffer);
free(buffer);
}
|
apache-2.0
|
vschs007/buck
|
src/com/facebook/buck/versions/VersionedTargetGraphBuilder.java
|
19928
|
/*
* Copyright 2016-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.versions;
import com.facebook.buck.log.Logger;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.Flavor;
import com.facebook.buck.model.InternalFlavor;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.rules.TargetGraphAndBuildTargets;
import com.facebook.buck.rules.TargetNode;
import com.facebook.buck.util.MoreCollectors;
import com.google.common.base.Charsets;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.base.Throwables;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
import com.google.common.hash.Hasher;
import com.google.common.hash.Hashing;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.RecursiveAction;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.StreamSupport;
/**
* Takes a regular {@link TargetGraph}, resolves any versioned nodes, and returns a new graph with
* the versioned nodes removed.
*/
public class VersionedTargetGraphBuilder {
private static final Logger LOG = Logger.get(VersionedTargetGraphBuilder.class);
private final ForkJoinPool pool;
private final VersionSelector versionSelector;
private final TargetGraphAndBuildTargets unversionedTargetGraphAndBuildTargets;
/**
* The resolved version graph being built.
*/
private final VersionedTargetGraph.Builder targetGraphBuilder = VersionedTargetGraph.builder();
/**
* Map of the build targets to nodes in the resolved graph.
*/
private final ConcurrentHashMap<BuildTarget, TargetNode<?, ?>> index;
/**
* Fork-join actions for each root node.
*/
private final ConcurrentHashMap<BuildTarget, RootAction> rootActions;
/**
* Intermediate version info for each node.
*/
private final ConcurrentHashMap<BuildTarget, VersionInfo> versionInfo;
/**
* Count of root nodes.
*/
private final AtomicInteger roots = new AtomicInteger();
VersionedTargetGraphBuilder(
ForkJoinPool pool,
VersionSelector versionSelector,
TargetGraphAndBuildTargets unversionedTargetGraphAndBuildTargets) {
this.pool = pool;
this.versionSelector = versionSelector;
this.unversionedTargetGraphAndBuildTargets = unversionedTargetGraphAndBuildTargets;
this.index =
new ConcurrentHashMap<>(
unversionedTargetGraphAndBuildTargets.getTargetGraph().getNodes().size() * 4,
0.75f,
pool.getParallelism());
this.rootActions =
new ConcurrentHashMap<>(
unversionedTargetGraphAndBuildTargets.getTargetGraph().getNodes().size() / 2,
0.75f,
pool.getParallelism());
this.versionInfo =
new ConcurrentHashMap<>(
2 * unversionedTargetGraphAndBuildTargets.getTargetGraph().getNodes().size(),
0.75f,
pool.getParallelism());
}
private TargetNode<?, ?> getNode(BuildTarget target) {
return unversionedTargetGraphAndBuildTargets.getTargetGraph().get(target);
}
private Optional<TargetNode<?, ?>> getNodeOptional(BuildTarget target) {
return unversionedTargetGraphAndBuildTargets.getTargetGraph().getOptional(target);
}
private TargetNode<?, ?> indexPutIfAbsent(TargetNode<?, ?> node) {
return index.putIfAbsent(node.getBuildTarget(), node);
}
/**
* Get/cache the transitive version info for this node.
*/
private VersionInfo getVersionInfo(TargetNode<?, ?> node) {
VersionInfo info = this.versionInfo.get(node.getBuildTarget());
if (info != null) {
return info;
}
Map<BuildTarget, ImmutableSet<Version>> versionDomain = new HashMap<>();
Optional<TargetNode<VersionedAliasDescription.Arg, ?>> versionedNode =
TargetGraphVersionTransformations.getVersionedNode(node);
if (versionedNode.isPresent()) {
ImmutableMap<Version, BuildTarget> versions =
versionedNode.get().getConstructorArg().versions;
// Merge in the versioned deps and the version domain.
versionDomain.put(node.getBuildTarget(), versions.keySet());
// If this version has only one possible choice, there's no need to wrap the constraints from
// it's transitive deps in an implication constraint.
if (versions.size() == 1) {
Map.Entry<Version, BuildTarget> ent = versions.entrySet().iterator().next();
VersionInfo depInfo = getVersionInfo(getNode(ent.getValue()));
versionDomain.putAll(depInfo.getVersionDomain());
} else {
// For each version choice, inherit the transitive constraints by wrapping them in an
// implication dependent on the specific version that pulls them in.
for (Map.Entry<Version, BuildTarget> ent : versions.entrySet()) {
VersionInfo depInfo = getVersionInfo(getNode(ent.getValue()));
versionDomain.putAll(depInfo.getVersionDomain());
}
}
} else {
// Merge in the constraints and version domain/deps from transitive deps.
for (BuildTarget depTarget : TargetGraphVersionTransformations.getDeps(node)) {
TargetNode<?, ?> dep = getNode(depTarget);
if (TargetGraphVersionTransformations.isVersionPropagator(dep) ||
TargetGraphVersionTransformations.getVersionedNode(dep).isPresent()) {
VersionInfo depInfo = getVersionInfo(dep);
versionDomain.putAll(depInfo.getVersionDomain());
}
}
}
info = VersionInfo.of(versionDomain);
this.versionInfo.put(node.getBuildTarget(), info);
return info;
}
/**
* @return a flavor to which summarizes the given version selections.
*/
static Flavor getVersionedFlavor(SortedMap<BuildTarget, Version> versions) {
Preconditions.checkArgument(!versions.isEmpty());
Hasher hasher = Hashing.md5().newHasher();
for (Map.Entry<BuildTarget, Version> ent : versions.entrySet()) {
hasher.putString(ent.getKey().toString(), Charsets.UTF_8);
hasher.putString(ent.getValue().getName(), Charsets.UTF_8);
}
return InternalFlavor.of("v" + hasher.hash().toString().substring(0, 7));
}
private TargetNode<?, ?> resolveVersions(
TargetNode<?, ?> node,
ImmutableMap<BuildTarget, Version> selectedVersions) {
Optional<TargetNode<VersionedAliasDescription.Arg, ?>> versionedNode =
node.castArg(VersionedAliasDescription.Arg.class);
if (versionedNode.isPresent()) {
node =
getNode(
Preconditions.checkNotNull(
versionedNode.get().getConstructorArg().versions.get(
selectedVersions.get(node.getBuildTarget()))));
}
return node;
}
/**
* @return the {@link BuildTarget} to use in the resolved target graph, formed by adding a
* flavor generated from the given version selections.
*/
private Optional<BuildTarget> getTranslateBuildTarget(
TargetNode<?, ?> node,
ImmutableMap<BuildTarget, Version> selectedVersions) {
BuildTarget originalTarget = node.getBuildTarget();
node = resolveVersions(node, selectedVersions);
BuildTarget newTarget = node.getBuildTarget();
if (TargetGraphVersionTransformations.isVersionPropagator(node)) {
VersionInfo info = getVersionInfo(node);
Collection<BuildTarget> versionedDeps = info.getVersionDomain().keySet();
TreeMap<BuildTarget, Version> versions = new TreeMap<>();
for (BuildTarget depTarget : versionedDeps) {
versions.put(depTarget, selectedVersions.get(depTarget));
}
if (!versions.isEmpty()) {
Flavor versionedFlavor = getVersionedFlavor(versions);
newTarget = node.getBuildTarget().withAppendedFlavors(versionedFlavor);
}
}
return newTarget.equals(originalTarget) ?
Optional.empty() :
Optional.of(newTarget);
}
public TargetGraph build() throws VersionException, InterruptedException {
LOG.debug(
"Starting version target graph transformation (nodes %d)",
unversionedTargetGraphAndBuildTargets.getTargetGraph().getNodes().size());
long start = System.currentTimeMillis();
// Walk through explicit built targets, separating them into root and non-root nodes.
ImmutableList<RootAction> actions =
unversionedTargetGraphAndBuildTargets.getBuildTargets().stream()
.map(this::getNode)
.map(RootAction::new)
.collect(MoreCollectors.toImmutableList());
// Add actions to the `rootActions` member for bookkeeping.
actions.forEach(a -> rootActions.put(a.getRoot().getBuildTarget(), a));
// Kick off the jobs to process the root nodes.
actions.forEach(pool::submit);
// Wait for actions to complete.
for (RootAction action : actions) {
action.getChecked();
}
long end = System.currentTimeMillis();
LOG.debug(
"Finished version target graph transformation in %.2f (nodes %d, roots: %d)",
(end - start) / 1000.0,
index.size(),
roots.get());
return targetGraphBuilder.build();
}
public static TargetGraphAndBuildTargets transform(
VersionSelector versionSelector,
TargetGraphAndBuildTargets unversionedTargetGraphAndBuildTargets,
ForkJoinPool pool)
throws VersionException, InterruptedException {
return unversionedTargetGraphAndBuildTargets.withTargetGraph(
new VersionedTargetGraphBuilder(
pool,
versionSelector,
unversionedTargetGraphAndBuildTargets)
.build());
}
/**
* Transform a version sub-graph at the given root node.
*/
private class RootAction extends RecursiveAction {
private final TargetNode<?, ?> node;
RootAction(TargetNode<?, ?> node) {
this.node = node;
}
private final Predicate<BuildTarget> isVersionPropagator =
target -> TargetGraphVersionTransformations.isVersionPropagator(getNode(target));
private final Predicate<BuildTarget> isVersioned =
target -> TargetGraphVersionTransformations.getVersionedNode(getNode(target)).isPresent();
/**
* Process a non-root node in the graph.
*/
private TargetNode<?, ?> processNode(TargetNode<?, ?> node) throws VersionException {
// If we've already processed this node, exit now.
TargetNode<?, ?> processed = index.get(node.getBuildTarget());
if (processed != null) {
return processed;
}
// Add the node to the graph and recurse on its deps.
TargetNode<?, ?> oldNode = indexPutIfAbsent(node);
if (oldNode != null) {
node = oldNode;
} else {
targetGraphBuilder.addNode(node.getBuildTarget().withFlavors(), node);
for (TargetNode<?, ?> dep : process(node.getParseDeps())) {
targetGraphBuilder.addEdge(node, dep);
}
}
return node;
}
/**
* Dispatch new jobs to transform the given nodes in parallel and wait for their results.
*/
private Iterable<TargetNode<?, ?>> process(Iterable<BuildTarget> targets)
throws VersionException {
int size = Iterables.size(targets);
List<RootAction> newActions = new ArrayList<>(size);
List<RootAction> oldActions = new ArrayList<>(size);
List<TargetNode<?, ?>> nonRootNodes = new ArrayList<>(size);
for (BuildTarget target : targets) {
TargetNode<?, ?> node = getNode(target);
// If we see a root node, create an action to process it using the pool, since it's
// potentially heavy-weight.
if (TargetGraphVersionTransformations.isVersionRoot(node)) {
RootAction oldAction = rootActions.get(target);
if (oldAction != null) {
oldActions.add(oldAction);
} else {
RootAction newAction = new RootAction(getNode(target));
oldAction = rootActions.putIfAbsent(target, newAction);
if (oldAction == null) {
newActions.add(newAction);
} else {
oldActions.add(oldAction);
}
}
} else {
nonRootNodes.add(node);
}
}
// Kick off all new rootActions in parallel.
invokeAll(newActions);
// For non-root nodes, just process them in-place, as they are inexpensive.
for (TargetNode<?, ?> node : nonRootNodes) {
processNode(node);
}
// Wait for any existing rootActions to finish.
for (RootAction action : oldActions) {
action.join();
}
// Now that everything is ready, return all the results.
return StreamSupport.stream(targets.spliterator(), false)
.map(index::get)
.collect(MoreCollectors.toImmutableList());
}
public Void getChecked() throws VersionException, InterruptedException {
try {
return get();
} catch (ExecutionException e) {
Throwable rootCause = Throwables.getRootCause(e);
Throwables.throwIfInstanceOf(rootCause, VersionException.class);
Throwables.throwIfInstanceOf(rootCause, RuntimeException.class);
throw new IllegalStateException(
String.format("Unexpected exception: %s: %s", e.getClass(), e.getMessage()),
e);
}
}
@SuppressWarnings("unchecked")
private TargetNode<?, ?> processVersionSubGraphNode(
TargetNode<?, ?> node,
ImmutableMap<BuildTarget, Version> selectedVersions,
TargetNodeTranslator targetTranslator)
throws VersionException {
Optional<BuildTarget> newTarget =
targetTranslator.translateBuildTarget(node.getBuildTarget());
TargetNode<?, ?> processed = index.get(newTarget.orElse(node.getBuildTarget()));
if (processed != null) {
return processed;
}
// Create the new target node, with the new target and deps.
TargetNode<?, ?> newNode =
((Optional<TargetNode<?, ?>>) (Optional<?>) targetTranslator.translateNode(node))
.orElse(node);
LOG.verbose(
"%s: new node declared deps %s, extra deps %s, arg %s",
newNode.getBuildTarget(),
newNode.getDeclaredDeps(),
newNode.getExtraDeps(),
newNode.getConstructorArg());
// Add the new node, and it's dep edges, to the new graph.
TargetNode<?, ?> oldNode = indexPutIfAbsent(newNode);
if (oldNode != null) {
newNode = oldNode;
} else {
// Insert the node into the graph, indexing it by a base target containing only the version
// flavor, if one exists.
targetGraphBuilder.addNode(
node.getBuildTarget().withFlavors(
Sets.difference(
newNode.getBuildTarget().getFlavors(),
node.getBuildTarget().getFlavors())),
newNode);
for (BuildTarget depTarget :
FluentIterable.from(node.getParseDeps())
.filter(Predicates.or(isVersionPropagator, isVersioned))) {
targetGraphBuilder.addEdge(
newNode,
processVersionSubGraphNode(
resolveVersions(getNode(depTarget), selectedVersions),
selectedVersions,
targetTranslator));
}
for (TargetNode<?, ?> dep :
process(
FluentIterable.from(node.getParseDeps())
.filter(Predicates.not(Predicates.or(isVersionPropagator, isVersioned))))) {
targetGraphBuilder.addEdge(newNode, dep);
}
}
return newNode;
}
// Transform a root node and its version sub-graph.
private TargetNode<?, ?> processRoot(TargetNode<?, ?> root) throws VersionException {
// If we've already processed this root, exit now.
final TargetNode<?, ?> processedRoot = index.get(root.getBuildTarget());
if (processedRoot != null) {
return processedRoot;
}
// For stats collection.
roots.incrementAndGet();
VersionInfo versionInfo = getVersionInfo(root);
// Select the versions to use for this sub-graph.
final ImmutableMap<BuildTarget, Version> selectedVersions =
versionSelector.resolve(
root.getBuildTarget(),
versionInfo.getVersionDomain());
// Build a target translator object to translate build targets.
ImmutableList<TargetTranslator<?>> translators =
ImmutableList.of(
new QueryTargetTranslator());
TargetNodeTranslator targetTranslator =
new TargetNodeTranslator(translators) {
private final LoadingCache<BuildTarget, Optional<BuildTarget>> cache =
CacheBuilder.newBuilder()
.build(
CacheLoader.from(
target -> {
// If we're handling the root node, there's nothing to translate.
if (root.getBuildTarget().equals(target)) {
return Optional.empty();
}
// If this target isn't in the target graph, which can be the case
// of build targets in the `tests` parameter, don't do any
// translation.
Optional<TargetNode<?, ?>> node = getNodeOptional(target);
if (!node.isPresent()) {
return Optional.empty();
}
return getTranslateBuildTarget(getNode(target), selectedVersions);
}));
@Override
public Optional<BuildTarget> translateBuildTarget(BuildTarget target) {
return cache.getUnchecked(target);
}
@Override
public Optional<ImmutableMap<BuildTarget, Version>> getSelectedVersions(
BuildTarget target) {
ImmutableMap.Builder<BuildTarget, Version> builder = ImmutableMap.builder();
for (BuildTarget dep : getVersionInfo(getNode(target)).getVersionDomain().keySet()) {
builder.put(dep, selectedVersions.get(dep));
}
return Optional.of(builder.build());
}
};
return processVersionSubGraphNode(root, selectedVersions, targetTranslator);
}
@Override
protected void compute() {
try {
processRoot(node);
} catch (VersionException e) {
completeExceptionally(e);
}
}
public TargetNode<?, ?> getRoot() {
return node;
}
}
}
|
apache-2.0
|
rajeevanv89/developer-studio
|
esb/org.wso2.developerstudio.eclipse.gmf.esb.diagram/src/org/wso2/developerstudio/eclipse/gmf/esb/diagram/edit/policies/MediatorFlowMediatorFlowCompartment14ItemSemanticEditPolicy.java
|
15236
|
package org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.policies;
import org.eclipse.gef.commands.Command;
import org.eclipse.gmf.runtime.emf.type.core.requests.CreateElementRequest;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.APIResourceEndpointCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.AddressEndPointCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.AddressingEndpointCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.AggregateMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.BAMMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.BeanMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.BuilderMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.CacheMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.CallMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.CallTemplateMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.CalloutMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.ClassMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.CloneMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.CloudConnectorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.CloudConnectorOperationCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.CommandMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.ConditionalRouterMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.DBLookupMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.DBReportMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.DataMapperMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.DefaultEndPointCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.DropMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.EJBMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.EnqueueMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.EnrichMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.EntitlementMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.EventMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.FailoverEndPointCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.FastXSLTMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.FaultMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.FilterMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.HTTPEndpointCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.HeaderMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.IterateMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.LoadBalanceEndPointCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.LogMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.LoopBackMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.NamedEndpointCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.OAuthMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.PayloadFactoryMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.PropertyMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.RMSequenceMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.RecipientListEndPointCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.RespondMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.RouterMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.RuleMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.ScriptMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.SendMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.SequenceCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.SmooksMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.SpringMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.StoreMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.SwitchMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.TemplateEndpointCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.ThrottleMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.TransactionMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.URLRewriteMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.ValidateMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.WSDLEndPointCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.XQueryMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.XSLTMediatorCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.EsbElementTypes;
/**
* @generated
*/
public class MediatorFlowMediatorFlowCompartment14ItemSemanticEditPolicy extends
EsbBaseItemSemanticEditPolicy {
/**
* @generated
*/
public MediatorFlowMediatorFlowCompartment14ItemSemanticEditPolicy() {
super(EsbElementTypes.MediatorFlow_3627);
}
/**
* @generated
*/
protected Command getCreateCommand(CreateElementRequest req) {
if (EsbElementTypes.DropMediator_3491 == req.getElementType()) {
return getGEFWrapper(new DropMediatorCreateCommand(req));
}
if (EsbElementTypes.PropertyMediator_3492 == req.getElementType()) {
return getGEFWrapper(new PropertyMediatorCreateCommand(req));
}
if (EsbElementTypes.ThrottleMediator_3493 == req.getElementType()) {
return getGEFWrapper(new ThrottleMediatorCreateCommand(req));
}
if (EsbElementTypes.FilterMediator_3494 == req.getElementType()) {
return getGEFWrapper(new FilterMediatorCreateCommand(req));
}
if (EsbElementTypes.LogMediator_3495 == req.getElementType()) {
return getGEFWrapper(new LogMediatorCreateCommand(req));
}
if (EsbElementTypes.EnrichMediator_3496 == req.getElementType()) {
return getGEFWrapper(new EnrichMediatorCreateCommand(req));
}
if (EsbElementTypes.XSLTMediator_3497 == req.getElementType()) {
return getGEFWrapper(new XSLTMediatorCreateCommand(req));
}
if (EsbElementTypes.SwitchMediator_3498 == req.getElementType()) {
return getGEFWrapper(new SwitchMediatorCreateCommand(req));
}
if (EsbElementTypes.Sequence_3503 == req.getElementType()) {
return getGEFWrapper(new SequenceCreateCommand(req));
}
if (EsbElementTypes.EventMediator_3504 == req.getElementType()) {
return getGEFWrapper(new EventMediatorCreateCommand(req));
}
if (EsbElementTypes.EntitlementMediator_3505 == req.getElementType()) {
return getGEFWrapper(new EntitlementMediatorCreateCommand(req));
}
if (EsbElementTypes.ClassMediator_3506 == req.getElementType()) {
return getGEFWrapper(new ClassMediatorCreateCommand(req));
}
if (EsbElementTypes.SpringMediator_3507 == req.getElementType()) {
return getGEFWrapper(new SpringMediatorCreateCommand(req));
}
if (EsbElementTypes.ScriptMediator_3508 == req.getElementType()) {
return getGEFWrapper(new ScriptMediatorCreateCommand(req));
}
if (EsbElementTypes.FaultMediator_3509 == req.getElementType()) {
return getGEFWrapper(new FaultMediatorCreateCommand(req));
}
if (EsbElementTypes.XQueryMediator_3510 == req.getElementType()) {
return getGEFWrapper(new XQueryMediatorCreateCommand(req));
}
if (EsbElementTypes.CommandMediator_3511 == req.getElementType()) {
return getGEFWrapper(new CommandMediatorCreateCommand(req));
}
if (EsbElementTypes.DBLookupMediator_3512 == req.getElementType()) {
return getGEFWrapper(new DBLookupMediatorCreateCommand(req));
}
if (EsbElementTypes.DBReportMediator_3513 == req.getElementType()) {
return getGEFWrapper(new DBReportMediatorCreateCommand(req));
}
if (EsbElementTypes.SmooksMediator_3514 == req.getElementType()) {
return getGEFWrapper(new SmooksMediatorCreateCommand(req));
}
if (EsbElementTypes.SendMediator_3515 == req.getElementType()) {
return getGEFWrapper(new SendMediatorCreateCommand(req));
}
if (EsbElementTypes.HeaderMediator_3516 == req.getElementType()) {
return getGEFWrapper(new HeaderMediatorCreateCommand(req));
}
if (EsbElementTypes.CloneMediator_3517 == req.getElementType()) {
return getGEFWrapper(new CloneMediatorCreateCommand(req));
}
if (EsbElementTypes.CacheMediator_3518 == req.getElementType()) {
return getGEFWrapper(new CacheMediatorCreateCommand(req));
}
if (EsbElementTypes.IterateMediator_3519 == req.getElementType()) {
return getGEFWrapper(new IterateMediatorCreateCommand(req));
}
if (EsbElementTypes.CalloutMediator_3520 == req.getElementType()) {
return getGEFWrapper(new CalloutMediatorCreateCommand(req));
}
if (EsbElementTypes.TransactionMediator_3521 == req.getElementType()) {
return getGEFWrapper(new TransactionMediatorCreateCommand(req));
}
if (EsbElementTypes.RMSequenceMediator_3522 == req.getElementType()) {
return getGEFWrapper(new RMSequenceMediatorCreateCommand(req));
}
if (EsbElementTypes.RuleMediator_3523 == req.getElementType()) {
return getGEFWrapper(new RuleMediatorCreateCommand(req));
}
if (EsbElementTypes.OAuthMediator_3524 == req.getElementType()) {
return getGEFWrapper(new OAuthMediatorCreateCommand(req));
}
if (EsbElementTypes.AggregateMediator_3525 == req.getElementType()) {
return getGEFWrapper(new AggregateMediatorCreateCommand(req));
}
if (EsbElementTypes.StoreMediator_3588 == req.getElementType()) {
return getGEFWrapper(new StoreMediatorCreateCommand(req));
}
if (EsbElementTypes.BuilderMediator_3591 == req.getElementType()) {
return getGEFWrapper(new BuilderMediatorCreateCommand(req));
}
if (EsbElementTypes.CallTemplateMediator_3594 == req.getElementType()) {
return getGEFWrapper(new CallTemplateMediatorCreateCommand(req));
}
if (EsbElementTypes.PayloadFactoryMediator_3597 == req.getElementType()) {
return getGEFWrapper(new PayloadFactoryMediatorCreateCommand(req));
}
if (EsbElementTypes.EnqueueMediator_3600 == req.getElementType()) {
return getGEFWrapper(new EnqueueMediatorCreateCommand(req));
}
if (EsbElementTypes.URLRewriteMediator_3620 == req.getElementType()) {
return getGEFWrapper(new URLRewriteMediatorCreateCommand(req));
}
if (EsbElementTypes.ValidateMediator_3623 == req.getElementType()) {
return getGEFWrapper(new ValidateMediatorCreateCommand(req));
}
if (EsbElementTypes.RouterMediator_3628 == req.getElementType()) {
return getGEFWrapper(new RouterMediatorCreateCommand(req));
}
if (EsbElementTypes.ConditionalRouterMediator_3635 == req
.getElementType()) {
return getGEFWrapper(new ConditionalRouterMediatorCreateCommand(req));
}
if (EsbElementTypes.BAMMediator_3680 == req.getElementType()) {
return getGEFWrapper(new BAMMediatorCreateCommand(req));
}
if (EsbElementTypes.BeanMediator_3683 == req.getElementType()) {
return getGEFWrapper(new BeanMediatorCreateCommand(req));
}
if (EsbElementTypes.EJBMediator_3686 == req.getElementType()) {
return getGEFWrapper(new EJBMediatorCreateCommand(req));
}
if (EsbElementTypes.DefaultEndPoint_3609 == req.getElementType()) {
return getGEFWrapper(new DefaultEndPointCreateCommand(req));
}
if (EsbElementTypes.AddressEndPoint_3610 == req.getElementType()) {
return getGEFWrapper(new AddressEndPointCreateCommand(req));
}
if (EsbElementTypes.FailoverEndPoint_3611 == req.getElementType()) {
return getGEFWrapper(new FailoverEndPointCreateCommand(req));
}
if (EsbElementTypes.RecipientListEndPoint_3692 == req.getElementType()) {
return getGEFWrapper(new RecipientListEndPointCreateCommand(req));
}
if (EsbElementTypes.WSDLEndPoint_3612 == req.getElementType()) {
return getGEFWrapper(new WSDLEndPointCreateCommand(req));
}
if (EsbElementTypes.NamedEndpoint_3660 == req.getElementType()) {
return getGEFWrapper(new NamedEndpointCreateCommand(req));
}
if (EsbElementTypes.LoadBalanceEndPoint_3613 == req.getElementType()) {
return getGEFWrapper(new LoadBalanceEndPointCreateCommand(req));
}
if (EsbElementTypes.APIResourceEndpoint_3674 == req.getElementType()) {
return getGEFWrapper(new APIResourceEndpointCreateCommand(req));
}
if (EsbElementTypes.AddressingEndpoint_3689 == req.getElementType()) {
return getGEFWrapper(new AddressingEndpointCreateCommand(req));
}
if (EsbElementTypes.HTTPEndpoint_3709 == req.getElementType()) {
return getGEFWrapper(new HTTPEndpointCreateCommand(req));
}
if (EsbElementTypes.TemplateEndpoint_3716 == req.getElementType()) {
return getGEFWrapper(new TemplateEndpointCreateCommand(req));
}
if (EsbElementTypes.CloudConnector_3719 == req.getElementType()) {
return getGEFWrapper(new CloudConnectorCreateCommand(req));
}
if (EsbElementTypes.CloudConnectorOperation_3722 == req
.getElementType()) {
return getGEFWrapper(new CloudConnectorOperationCreateCommand(req));
}
if (EsbElementTypes.LoopBackMediator_3736 == req.getElementType()) {
return getGEFWrapper(new LoopBackMediatorCreateCommand(req));
}
if (EsbElementTypes.RespondMediator_3739 == req.getElementType()) {
return getGEFWrapper(new RespondMediatorCreateCommand(req));
}
if (EsbElementTypes.CallMediator_3742 == req.getElementType()) {
return getGEFWrapper(new CallMediatorCreateCommand(req));
}
if (EsbElementTypes.DataMapperMediator_3761 == req.getElementType()) {
return getGEFWrapper(new DataMapperMediatorCreateCommand(req));
}
if (EsbElementTypes.FastXSLTMediator_3764 == req.getElementType()) {
return getGEFWrapper(new FastXSLTMediatorCreateCommand(req));
}
return super.getCreateCommand(req);
}
}
|
apache-2.0
|
lijiangdong/lijiangdong.github.io
|
tags/测试/index.html
|
25006
|
<!doctype html>
<html class="theme-next mist use-motion" lang="zh-Hans">
<head>
<meta charset="UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1"/>
<meta http-equiv="Cache-Control" content="no-transform" />
<meta http-equiv="Cache-Control" content="no-siteapp" />
<link href="/lib/fancybox/source/jquery.fancybox.css?v=2.1.5" rel="stylesheet" type="text/css" />
<link href="//fonts.googleapis.com/css?family=Lato:300,300italic,400,400italic,700,700italic&subset=latin,latin-ext" rel="stylesheet" type="text/css">
<link href="/lib/font-awesome/css/font-awesome.min.css?v=4.6.2" rel="stylesheet" type="text/css" />
<link href="/css/main.css?v=5.1.1" rel="stylesheet" type="text/css" />
<meta name="keywords" content="Android, Tensorflow" />
<link rel="shortcut icon" type="image/x-icon" href="/favicon.ico?v=5.1.1" />
<meta name="description" content="用代码书写这个世界">
<meta property="og:type" content="website">
<meta property="og:title" content="无嘴小呆子">
<meta property="og:url" content="http://lijiangdong.github.io/tags/测试/index.html">
<meta property="og:site_name" content="无嘴小呆子">
<meta property="og:description" content="用代码书写这个世界">
<meta property="og:locale" content="zh-Hans">
<meta name="twitter:card" content="summary">
<meta name="twitter:title" content="无嘴小呆子">
<meta name="twitter:description" content="用代码书写这个世界">
<script type="text/javascript" id="hexo.configurations">
var NexT = window.NexT || {};
var CONFIG = {
root: '/',
scheme: 'Mist',
sidebar: {"position":"left","display":"always","offset":12,"offset_float":0,"b2t":false,"scrollpercent":false},
fancybox: true,
motion: true,
duoshuo: {
userId: '0',
author: '博主'
},
algolia: {
applicationID: '',
apiKey: '',
indexName: '',
hits: {"per_page":10},
labels: {"input_placeholder":"Search for Posts","hits_empty":"We didn't find any results for the search: ${query}","hits_stats":"${hits} results found in ${time} ms"}
}
};
</script>
<link rel="canonical" href="http://lijiangdong.github.io/tags/测试/"/>
<title>标签: 测试 | 无嘴小呆子</title>
</head>
<body itemscope itemtype="http://schema.org/WebPage" lang="zh-Hans">
<script type="text/javascript">
var _hmt = _hmt || [];
(function() {
var hm = document.createElement("script");
hm.src = "https://hm.baidu.com/hm.js?75a5108fa849298412266b0c02b04616";
var s = document.getElementsByTagName("script")[0];
s.parentNode.insertBefore(hm, s);
})();
</script>
<div class="container sidebar-position-left ">
<div class="headband"></div>
<header id="header" class="header" itemscope itemtype="http://schema.org/WPHeader">
<div class="header-inner"><div class="site-brand-wrapper">
<div class="site-meta ">
<div class="custom-logo-site-title">
<a href="/" class="brand" rel="start">
<span class="logo-line-before"><i></i></span>
<span class="site-title">无嘴小呆子</span>
<span class="logo-line-after"><i></i></span>
</a>
</div>
<p class="site-subtitle"></p>
</div>
<div class="site-nav-toggle">
<button>
<span class="btn-bar"></span>
<span class="btn-bar"></span>
<span class="btn-bar"></span>
</button>
</div>
</div>
<nav class="site-nav">
<ul id="menu" class="menu">
<li class="menu-item menu-item-home">
<a href="/" rel="section">
<i class="menu-item-icon fa fa-fw fa-home"></i> <br />
首页
</a>
</li>
<li class="menu-item menu-item-categories">
<a href="/categories" rel="section">
<i class="menu-item-icon fa fa-fw fa-th"></i> <br />
分类
</a>
</li>
<li class="menu-item menu-item-about">
<a href="/about" rel="section">
<i class="menu-item-icon fa fa-fw fa-user"></i> <br />
关于
</a>
</li>
<li class="menu-item menu-item-archives">
<a href="/archives" rel="section">
<i class="menu-item-icon fa fa-fw fa-archive"></i> <br />
归档
</a>
</li>
<li class="menu-item menu-item-tags">
<a href="/tags" rel="section">
<i class="menu-item-icon fa fa-fw fa-tags"></i> <br />
标签
</a>
</li>
<li class="menu-item menu-item-search">
<a href="javascript:;" class="popup-trigger">
<i class="menu-item-icon fa fa-search fa-fw"></i> <br />
搜索
</a>
</li>
</ul>
<div class="site-search">
<div class="popup search-popup local-search-popup">
<div class="local-search-header clearfix">
<span class="search-icon">
<i class="fa fa-search"></i>
</span>
<span class="popup-btn-close">
<i class="fa fa-times-circle"></i>
</span>
<div class="local-search-input-wrapper">
<input autocapitalize="off" autocomplete="off" autocorrect="off"
placeholder="搜索..." spellcheck="false"
type="text" id="local-search-input">
</div>
</div>
<div id="local-search-result"></div>
</div>
</div>
</nav>
</div>
</header>
<main id="main" class="main">
<div class="main-inner">
<div class="content-wrap">
<div id="content" class="content">
<div id="posts" class="posts-collapse">
<div class="collection-title">
<h2 >
测试
<small>标签</small>
</h2>
</div>
<article class="post post-type-normal" itemscope itemtype="http://schema.org/Article">
<header class="post-header">
<h1 class="post-title">
<a class="post-title-link" href="/2017/01/15/20170115_Monkey/" itemprop="url">
<span itemprop="name">Android稳定性测试工具Monkey的使用</span>
</a>
</h1>
<div class="post-meta">
<time class="post-time" itemprop="dateCreated"
datetime="2017-01-15T13:07:00+08:00"
content="2017-01-15" >
01-15
</time>
</div>
</header>
</article>
</div>
</div>
</div>
<div class="sidebar-toggle">
<div class="sidebar-toggle-line-wrap">
<span class="sidebar-toggle-line sidebar-toggle-line-first"></span>
<span class="sidebar-toggle-line sidebar-toggle-line-middle"></span>
<span class="sidebar-toggle-line sidebar-toggle-line-last"></span>
</div>
</div>
<aside id="sidebar" class="sidebar">
<div class="sidebar-inner">
<section class="site-overview sidebar-panel sidebar-panel-active">
<div class="site-author motion-element" itemprop="author" itemscope itemtype="http://schema.org/Person">
<img class="site-author-image" itemprop="image"
src="/images/avatar.png"
alt="李江东" />
<p class="site-author-name" itemprop="name">李江东</p>
<p class="site-description motion-element" itemprop="description">用代码书写这个世界</p>
</div>
<nav class="site-state motion-element">
<div class="site-state-item site-state-posts">
<a href="/archives">
<span class="site-state-item-count">9</span>
<span class="site-state-item-name">日志</span>
</a>
</div>
<div class="site-state-item site-state-categories">
<a href="/categories/index.html">
<span class="site-state-item-count">2</span>
<span class="site-state-item-name">分类</span>
</a>
</div>
<div class="site-state-item site-state-tags">
<a href="/tags/index.html">
<span class="site-state-item-count">7</span>
<span class="site-state-item-name">标签</span>
</a>
</div>
</nav>
<div class="links-of-author motion-element">
<span class="links-of-author-item">
<a href="https://github.com/lijiangdong" target="_blank" title="GitHub">
<i class="fa fa-fw fa-github"></i>
GitHub
</a>
</span>
<span class="links-of-author-item">
<a href="http://weibo.com/3257022505" target="_blank" title="Weibo">
<i class="fa fa-fw fa-weibo"></i>
Weibo
</a>
</span>
</div>
</section>
</div>
</aside>
</div>
</main>
<footer id="footer" class="footer">
<div class="footer-inner">
<div class="copyright" >
©
<span itemprop="copyrightYear">2019</span>
<span class="with-love">
<i class="fa fa-heart"></i>
</span>
<span class="author" itemprop="copyrightHolder">李江东</span>
</div>
<div class="powered-by">
由 <a class="theme-link" href="https://hexo.io">Hexo</a> 强力驱动
</div>
<div class="theme-info">
主题 -
<a class="theme-link" href="https://github.com/iissnan/hexo-theme-next">
NexT.Mist
</a>
</div>
</div>
</footer>
<div class="back-to-top">
<i class="fa fa-arrow-up"></i>
</div>
</div>
<script type="text/javascript">
if (Object.prototype.toString.call(window.Promise) !== '[object Function]') {
window.Promise = null;
}
</script>
<script type="text/javascript" src="/lib/jquery/index.js?v=2.1.3"></script>
<script type="text/javascript" src="/lib/fastclick/lib/fastclick.min.js?v=1.0.6"></script>
<script type="text/javascript" src="/lib/jquery_lazyload/jquery.lazyload.js?v=1.9.7"></script>
<script type="text/javascript" src="/lib/velocity/velocity.min.js?v=1.2.1"></script>
<script type="text/javascript" src="/lib/velocity/velocity.ui.min.js?v=1.2.1"></script>
<script type="text/javascript" src="/lib/fancybox/source/jquery.fancybox.pack.js?v=2.1.5"></script>
<script type="text/javascript" src="/js/src/utils.js?v=5.1.1"></script>
<script type="text/javascript" src="/js/src/motion.js?v=5.1.1"></script>
<script type="text/javascript" src="/js/src/bootstrap.js?v=5.1.1"></script>
<script type="text/javascript">
// Popup Window;
var isfetched = false;
// Search DB path;
var search_path = "search.xml";
if (search_path.length === 0) {
search_path = "search.xml";
}
var path = "/" + search_path;
// monitor main search box;
var onPopupClose = function (e) {
$('.popup').hide();
$('#local-search-input').val('');
$('.search-result-list').remove();
$(".local-search-pop-overlay").remove();
$('body').css('overflow', '');
}
function proceedsearch() {
$("body")
.append('<div class="search-popup-overlay local-search-pop-overlay"></div>')
.css('overflow', 'hidden');
$('.search-popup-overlay').click(onPopupClose);
$('.popup').toggle();
$('#local-search-input').focus();
}
// search function;
var searchFunc = function(path, search_id, content_id) {
'use strict';
$.ajax({
url: path,
dataType: "xml",
async: true,
success: function( xmlResponse ) {
// get the contents from search data
isfetched = true;
$('.popup').detach().appendTo('.header-inner');
var datas = $( "entry", xmlResponse ).map(function() {
return {
title: $( "title", this ).text(),
content: $("content",this).text(),
url: $( "url" , this).text()
};
}).get();
var $input = document.getElementById(search_id);
var $resultContent = document.getElementById(content_id);
$input.addEventListener('input', function(){
var keywords = this.value.trim().toLowerCase().split(/[\s\-]+/);
var resultItems = [];
if (this.value.trim().length > 0) {
// perform local searching
datas.forEach(function(data) {
var isMatch = false;
var hitCountInArticle = 0;
var title = data.title.trim();
var titleInLowerCase = title.toLowerCase();
var content = data.content.trim().replace(/<[^>]+>/g,"");
var contentInLowerCase = content.toLowerCase();
var articleUrl = decodeURIComponent(data.url);
var indexOfTitle = [];
var indexOfContent = [];
// only match articles with not empty titles
if(title != '') {
keywords.forEach(function(keyword, i) {
function getIndexByWord(word, text, caseSensitive) {
var wordLen = word.length;
if (wordLen === 0) {
return [];
}
var startPosition = 0, position = [], index = [];
if (!caseSensitive) {
text = text.toLowerCase();
word = word.toLowerCase();
}
while ((position = text.indexOf(word, startPosition)) > -1) {
index.push({position: position, word: word});
startPosition = position + wordLen;
}
return index;
}
indexOfTitle = indexOfTitle.concat(getIndexByWord(keyword, titleInLowerCase, false));
indexOfContent = indexOfContent.concat(getIndexByWord(keyword, contentInLowerCase, false));
});
if (indexOfTitle.length > 0 || indexOfContent.length > 0) {
isMatch = true;
hitCountInArticle = indexOfTitle.length + indexOfContent.length;
}
}
// show search results
if (isMatch) {
var resultItem = '';
function highlightKeyword(text, start, end, index) {
var item = index[index.length - 1];
var position = item.position;
var word = item.word;
var matchText = text.substring(start, end);
var matchResult = [];
var prevEnd = 0;
while (position + word.length <= end && index.length != 0) {
// highlight keyword
var wordBegin = position - start;
var wordEnd = position - start + word.length;
matchResult.push(matchText.substring(prevEnd, wordBegin));
matchResult.push("<b class=\"search-keyword\">" + matchText.substring(wordBegin, wordEnd) + "</b>");
// move to next position of hit
index.pop();
prevEnd = wordEnd;
while (index.length != 0) {
item = index[index.length - 1];
position = item.position;
word = item.word;
if (prevEnd > position - start) {
index.pop();
} else {
break;
}
}
}
matchResult.push(matchText.substring(prevEnd));
return matchResult.join('');
}
// sort index by position of keyword
indexOfTitle.sort(function (itemLeft, itemRight) {
return itemRight.position - itemLeft.position;
});
indexOfContent.sort(function (itemLeft, itemRight) {
return itemRight.position - itemLeft.position;
});
// highlight title
if (indexOfTitle.length != 0) {
resultItem += "<li><a href='" + articleUrl + "' class='search-result-title'>" + highlightKeyword(title, 0, title.length, indexOfTitle) + "</a>";
} else {
resultItem += "<li><a href='" + articleUrl + "' class='search-result-title'>" + title + "</a>";
}
// highlight content
var resultUpperBound = parseInt(1);
var withoutUpperBound = false;
if (resultUpperBound === -1) {
withoutUpperBound = true;
}
var currentResultNum = 0;
while (indexOfContent.length != 0 && (withoutUpperBound || (currentResultNum < resultUpperBound))) {
var item = indexOfContent[indexOfContent.length - 1];
var position = item.position;
var word = item.word;
// cut out 100 characters
var start = position - 20;
var end = position + 80;
if(start < 0){
start = 0;
}
if (end < position + word.length) {
end = position + word.length;
}
if(end > content.length){
end = content.length;
}
resultItem += "<a href='" + articleUrl + "'>" +
"<p class=\"search-result\">" + highlightKeyword(content, start, end, indexOfContent) +
"...</p>" + "</a>";
currentResultNum++;
}
resultItem += "</li>";
resultItems.push({item: resultItem, hitCount: hitCountInArticle, id: resultItems.length});
}
})
};
if (keywords.length === 1 && keywords[0] === "") {
$resultContent.innerHTML = '<div id="no-result"><i class="fa fa-search fa-5x" /></div>'
} else if (resultItems.length === 0) {
$resultContent.innerHTML = '<div id="no-result"><i class="fa fa-frown-o fa-5x" /></div>'
} else {
resultItems.sort(function (resultLeft, resultRight) {
if (resultLeft.hitCount != resultRight.hitCount) {
return resultRight.hitCount - resultLeft.hitCount;
} else {
return resultLeft.id - resultRight.id;
}
});
var searchResultList = '<ul class=\"search-result-list\">';
resultItems.forEach(function (result, i) {
searchResultList += result.item;
})
searchResultList += "</ul>";
$resultContent.innerHTML = searchResultList;
}
});
proceedsearch();
}
});}
// handle and trigger popup window;
$('.popup-trigger').click(function(e) {
e.stopPropagation();
if (isfetched === false) {
searchFunc(path, 'local-search-input', 'local-search-result');
} else {
proceedsearch();
};
});
$('.popup-btn-close').click(onPopupClose);
$('.popup').click(function(e){
e.stopPropagation();
});
</script>
<script src="https://cdn1.lncld.net/static/js/av-core-mini-0.6.1.js"></script>
<script>AV.initialize("WpQBIOFFVk2j3VShGdWrNzdx-gzGzoHsz", "N7O0UNa3oH14UGye19dyCBcm");</script>
<script>
function showTime(Counter) {
var query = new AV.Query(Counter);
var entries = [];
var $visitors = $(".leancloud_visitors");
$visitors.each(function () {
entries.push( $(this).attr("id").trim() );
});
query.containedIn('url', entries);
query.find()
.done(function (results) {
var COUNT_CONTAINER_REF = '.leancloud-visitors-count';
if (results.length === 0) {
$visitors.find(COUNT_CONTAINER_REF).text(0);
return;
}
for (var i = 0; i < results.length; i++) {
var item = results[i];
var url = item.get('url');
var time = item.get('time');
var element = document.getElementById(url);
$(element).find(COUNT_CONTAINER_REF).text(time);
}
for(var i = 0; i < entries.length; i++) {
var url = entries[i];
var element = document.getElementById(url);
var countSpan = $(element).find(COUNT_CONTAINER_REF);
if( countSpan.text() == '') {
countSpan.text(0);
}
}
})
.fail(function (object, error) {
console.log("Error: " + error.code + " " + error.message);
});
}
function addCount(Counter) {
var $visitors = $(".leancloud_visitors");
var url = $visitors.attr('id').trim();
var title = $visitors.attr('data-flag-title').trim();
var query = new AV.Query(Counter);
query.equalTo("url", url);
query.find({
success: function(results) {
if (results.length > 0) {
var counter = results[0];
counter.fetchWhenSave(true);
counter.increment("time");
counter.save(null, {
success: function(counter) {
var $element = $(document.getElementById(url));
$element.find('.leancloud-visitors-count').text(counter.get('time'));
},
error: function(counter, error) {
console.log('Failed to save Visitor num, with error message: ' + error.message);
}
});
} else {
var newcounter = new Counter();
/* Set ACL */
var acl = new AV.ACL();
acl.setPublicReadAccess(true);
acl.setPublicWriteAccess(true);
newcounter.setACL(acl);
/* End Set ACL */
newcounter.set("title", title);
newcounter.set("url", url);
newcounter.set("time", 1);
newcounter.save(null, {
success: function(newcounter) {
var $element = $(document.getElementById(url));
$element.find('.leancloud-visitors-count').text(newcounter.get('time'));
},
error: function(newcounter, error) {
console.log('Failed to create');
}
});
}
},
error: function(error) {
console.log('Error:' + error.code + " " + error.message);
}
});
}
$(function() {
var Counter = AV.Object.extend("Counter");
if ($('.leancloud_visitors').length == 1) {
addCount(Counter);
} else if ($('.post-title-link').length > 1) {
showTime(Counter);
}
});
</script>
<script type="text/x-mathjax-config">
MathJax.Hub.Config({
tex2jax: {
inlineMath: [ ['$','$'], ["\\(","\\)"] ],
processEscapes: true,
skipTags: ['script', 'noscript', 'style', 'textarea', 'pre', 'code']
}
});
</script>
<script type="text/x-mathjax-config">
MathJax.Hub.Queue(function() {
var all = MathJax.Hub.getAllJax(), i;
for (i=0; i < all.length; i += 1) {
all[i].SourceElement().parentNode.className += ' has-jax';
}
});
</script>
<script type="text/javascript" src="//cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML"></script>
</body>
</html>
|
apache-2.0
|
redhat-dotnet-msa/aloha
|
HomeModule.cs
|
375
|
using Nancy;
public class HomeModule : NancyModule
{
public HomeModule()
{
Get("/", args => "Aloha from .NET, using the NancyFX framework. This is version 2.0 of this program.");
Get("/os", x =>
{
return System.Runtime.InteropServices.RuntimeInformation.OSDescription;
});
}
}
|
apache-2.0
|
oriontribunal/CoffeeMud
|
com/planet_ink/coffee_mud/Abilities/Spells/Spell_CombatPrecognition.java
|
6760
|
package com.planet_ink.coffee_mud.Abilities.Spells;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2002-2016 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class Spell_CombatPrecognition extends Spell
{
@Override public String ID() { return "Spell_CombatPrecognition"; }
private final static String localizedName = CMLib.lang().L("Combat Precognition");
@Override public String name() { return localizedName; }
private final static String localizedStaticDisplay = CMLib.lang().L("(Combat Precognition)");
@Override public String displayText() { return localizedStaticDisplay; }
@Override public int abstractQuality(){return Ability.QUALITY_BENEFICIAL_SELF;}
@Override protected int canAffectCode(){return CAN_MOBS;}
@Override protected int overrideMana(){return 100;}
boolean lastTime=false;
@Override public int classificationCode(){ return Ability.ACODE_SPELL|Ability.DOMAIN_DIVINATION;}
@Override
public boolean okMessage(final Environmental myHost, final CMMsg msg)
{
if(!(affected instanceof MOB))
return true;
final MOB mob=(MOB)affected;
if(msg.amITarget(mob)
&&(mob.location()!=null)
&&(CMLib.flags().isAliveAwakeMobile(mob,true)))
{
if(msg.targetMinor()==CMMsg.TYP_WEAPONATTACK)
{
final CMMsg msg2=CMClass.getMsg(mob,msg.source(),null,CMMsg.MSG_QUIETMOVEMENT,L("<S-NAME> avoid(s) the attack by <T-NAME>!"));
if((proficiencyCheck(null,mob.charStats().getStat(CharStats.STAT_DEXTERITY)-60,false))
&&(!lastTime)
&&(msg.source().getVictim()==mob)
&&(msg.source().rangeToTarget()==0)
&&(mob.location().okMessage(mob,msg2)))
{
lastTime=true;
mob.location().send(mob,msg2);
helpProficiency(mob, 0);
return false;
}
lastTime=false;
}
else
if((msg.value()<=0)
&&(CMath.bset(msg.targetMajor(),CMMsg.MASK_MALICIOUS))
&&((mob.fetchAbility(ID())==null)||proficiencyCheck(null,mob.charStats().getStat(CharStats.STAT_DEXTERITY)-50,false)))
{
String tool=null;
if((msg.tool() instanceof Ability))
tool=((Ability)msg.tool()).name();
CMMsg msg2=null;
switch(msg.targetMinor())
{
case CMMsg.TYP_JUSTICE:
if((CMath.bset(msg.targetMajor(),CMMsg.MASK_MOVE))
&&(tool!=null))
msg2=CMClass.getMsg(mob,msg.source(),CMMsg.MSG_NOISYMOVEMENT,L("<S-NAME> avoid(s) the @x1 from <T-NAME>.",tool));
break;
case CMMsg.TYP_GAS:
msg2=CMClass.getMsg(mob,msg.source(),CMMsg.MSG_NOISYMOVEMENT,L("<S-NAME> avoid(s) the @x1 from <T-NAME>.",((tool==null)?"noxious fumes":tool)));
break;
case CMMsg.TYP_COLD:
msg2=CMClass.getMsg(mob,msg.source(),CMMsg.MSG_NOISYMOVEMENT,L("<S-NAME> avoid(s) the @x1 from <T-NAME>.",((tool==null)?"cold blast":tool)));
break;
case CMMsg.TYP_ELECTRIC:
msg2=CMClass.getMsg(mob,msg.source(),CMMsg.MSG_NOISYMOVEMENT,L("<S-NAME> avoid(s) the @x1 from <T-NAME>.",((tool==null)?"electrical attack":tool)));
break;
case CMMsg.TYP_FIRE:
msg2=CMClass.getMsg(mob,msg.source(),CMMsg.MSG_NOISYMOVEMENT,L("<S-NAME> avoid(s) the @x1 from <T-NAME>.",((tool==null)?"blast of heat":tool)));
break;
case CMMsg.TYP_WATER:
msg2=CMClass.getMsg(mob,msg.source(),CMMsg.MSG_NOISYMOVEMENT,L("<S-NAME> avoid(s) the @x1 from <T-NAME>.",((tool==null)?"weat blast":tool)));
break;
case CMMsg.TYP_ACID:
msg2=CMClass.getMsg(mob,msg.source(),CMMsg.MSG_NOISYMOVEMENT,L("<S-NAME> avoid(s) the @x1 from <T-NAME>.",((tool==null)?"acid attack":tool)));
break;
case CMMsg.TYP_SONIC:
msg2=CMClass.getMsg(mob,msg.source(),CMMsg.MSG_NOISYMOVEMENT,L("<S-NAME> avoid(s) the @x1 from <T-NAME>.",((tool==null)?"sonic attack":tool)));
break;
case CMMsg.TYP_LASER:
msg2=CMClass.getMsg(mob,msg.source(),CMMsg.MSG_NOISYMOVEMENT,L("<S-NAME> avoid(s) the @x1 from <T-NAME>.",((tool==null)?"laser attack":tool)));
break;
}
if((msg2!=null)&&(mob.location()!=null)&&(mob.location().okMessage(mob,msg2)))
{
mob.location().send(mob,msg2);
return false;
}
}
}
return true;
}
@Override
public void unInvoke()
{
// undo the affects of this spell
if(!(affected instanceof MOB))
return;
final MOB mob=(MOB)affected;
super.unInvoke();
mob.tell(L("Your combat precognition fades away."));
}
@Override
public boolean invoke(MOB mob, List<String> commands, Physical givenTarget, boolean auto, int asLevel)
{
MOB target=mob;
if((auto)&&(givenTarget!=null)&&(givenTarget instanceof MOB))
target=(MOB)givenTarget;
if(target.fetchEffect(ID())!=null)
{
mob.tell(target,null,null,L("<S-NAME> already <S-HAS-HAVE> the sight."));
return false;
}
if(!super.invoke(mob,commands,givenTarget,auto,asLevel))
return false;
final boolean success=proficiencyCheck(mob,0,auto);
if(success)
{
invoker=mob;
final CMMsg msg=CMClass.getMsg(mob,target,this,verbalCastCode(mob,target,auto),L(auto?"<T-NAME> shout(s) combatively!":"^S<S-NAME> shout(s) a combative spell!^?"));
if(mob.location().okMessage(mob,msg))
{
mob.location().send(mob,msg);
beneficialAffect(mob,target,asLevel,0);
}
}
else
return beneficialWordsFizzle(mob,target,L("<S-NAME> shout(s) combatively, but nothing more happens."));
// return whether it worked
return success;
}
}
|
apache-2.0
|
pingcap/docs
|
information-schema/client-errors-summary-by-user.md
|
3915
|
---
title: CLIENT_ERRORS_SUMMARY_BY_USER
summary: Learn about the `CLIENT_ERRORS_SUMMARY_BY_USER` information_schema table.
---
# CLIENT_ERRORS_SUMMARY_BY_USER
The table `CLIENT_ERRORS_SUMMARY_BY_USER` provides a summary of SQL errors and warnings that have been returned to clients that connect to a TiDB server. These include:
* Malformed SQL statements.
* Division by zero errors.
* The attempt to insert out-of-range or duplicate key values.
* Permission errors.
* A table that does not exist.
Client errors are returned to the client via the MySQL server protocol, where applications are expected to take appropriate action. The `information_schema`.`CLIENT_ERRORS_SUMMARY_BY_USER` table provides an useful method to inspect errors in the scenario where applications are not correctly handling (or logging) errors returned by the TiDB server.
Because `CLIENT_ERRORS_SUMMARY_BY_USER` summarizes the errors on a per-user basis, it can be useful to diagnose scenarios where one user server is generating more errors than other servers. Possible scenarios include:
* Permission errors.
* Missing tables, or relational objects.
* Incorrect SQL syntax, or incompatibilities between the application and the version of TiDB.
The summarized counts can be reset with the statement `FLUSH CLIENT_ERRORS_SUMMARY`. The summary is local to each TiDB server and is only retained in memory. Summaries will be lost if the TiDB server restarts.
{{< copyable "sql" >}}
```sql
USE information_schema;
DESC CLIENT_ERRORS_SUMMARY_BY_USER;
```
```sql
+---------------+---------------+------+------+---------+-------+
| Field | Type | Null | Key | Default | Extra |
+---------------+---------------+------+------+---------+-------+
| USER | varchar(64) | NO | | NULL | |
| ERROR_NUMBER | bigint(64) | NO | | NULL | |
| ERROR_MESSAGE | varchar(1024) | NO | | NULL | |
| ERROR_COUNT | bigint(64) | NO | | NULL | |
| WARNING_COUNT | bigint(64) | NO | | NULL | |
| FIRST_SEEN | timestamp | YES | | NULL | |
| LAST_SEEN | timestamp | YES | | NULL | |
+---------------+---------------+------+------+---------+-------+
7 rows in set (0.00 sec)
```
Field description:
* `USER`: The authenticated user.
* `ERROR_NUMBER`: The MySQL-compatible error number that was returned.
* `ERROR_MESSAGE`: The error message which matches the error number (in prepared statement form).
* `ERROR_COUNT`: The number of times this error was returned to the user.
* `WARNING_COUNT`: The number of times this warning was returned to the user.
* `FIRST_SEEN`: The first time this error (or warning) was sent to the user.
* `LAST_SEEN`: The most recent time this error (or warning) was sent to the user.
The following example shows a warning being generated when the client connects to a local TiDB server. The summary is reset after executing `FLUSH CLIENT_ERRORS_SUMMARY`:
{{< copyable "sql" >}}
```sql
SELECT 0/0;
SELECT * FROM CLIENT_ERRORS_SUMMARY_BY_USER;
FLUSH CLIENT_ERRORS_SUMMARY;
SELECT * FROM CLIENT_ERRORS_SUMMARY_BY_USER;
```
```sql
+-----+
| 0/0 |
+-----+
| NULL |
+-----+
1 row in set, 1 warning (0.00 sec)
+------+--------------+---------------+-------------+---------------+---------------------+---------------------+
| USER | ERROR_NUMBER | ERROR_MESSAGE | ERROR_COUNT | WARNING_COUNT | FIRST_SEEN | LAST_SEEN |
+------+--------------+---------------+-------------+---------------+---------------------+---------------------+
| root | 1365 | Division by 0 | 0 | 1 | 2021-03-18 13:05:36 | 2021-03-18 13:05:36 |
+------+--------------+---------------+-------------+---------------+---------------------+---------------------+
1 row in set (0.00 sec)
Query OK, 0 rows affected (0.00 sec)
Empty set (0.00 sec)
```
|
apache-2.0
|
magneticio/vamp
|
operation/src/main/scala/io/vamp/operation/sla/SlaActor.scala
|
5823
|
package io.vamp.operation.sla
import java.time.OffsetDateTime
import java.time.temporal.ChronoUnit
import akka.actor._
import akka.pattern.ask
import io.vamp.common.akka.IoC._
import io.vamp.common.akka._
import io.vamp.common.notification.Notification
import io.vamp.model.artifact._
import io.vamp.model.event.{ Aggregator, Event, EventQuery, TimeRange, _ }
import io.vamp.model.notification.{ DeEscalate, Escalate, SlaEvent }
import io.vamp.operation.notification._
import io.vamp.operation.sla.SlaActor.SlaProcessAll
import io.vamp.persistence.{ ArtifactPaginationSupport, EventPaginationSupport, PersistenceActor }
import io.vamp.pulse.PulseActor.Publish
import io.vamp.pulse.{ EventRequestEnvelope, PulseActor }
import scala.concurrent.Future
class SlaSchedulerActor extends SchedulerActor with OperationNotificationProvider {
def tick() = IoC.actorFor[SlaActor] ! SlaProcessAll
}
object SlaActor {
object SlaProcessAll
}
class SlaActor extends SlaPulse with ArtifactPaginationSupport with EventPaginationSupport with CommonSupportForActors with OperationNotificationProvider {
def receive: Receive = {
case SlaProcessAll ⇒
implicit val timeout = PersistenceActor.timeout()
forAll(allArtifacts[Deployment], check)
}
override def info(notification: Notification): Unit = {
notification match {
case se: SlaEvent ⇒ actorFor[PulseActor] ! Publish(Event(Event.defaultVersion, Set("sla") ++ se.tags, se.value, se.timestamp))
case _ ⇒
}
super.info(notification)
}
private def check(deployments: List[Deployment]) = {
deployments.foreach(deployment ⇒ {
try {
deployment.clusters.foreach(cluster ⇒
cluster.sla match {
case Some(sla: ResponseTimeSlidingWindowSla) ⇒ responseTimeSlidingWindow(deployment, cluster, sla)
case Some(s: EscalationOnlySla) ⇒
case Some(s: GenericSla) ⇒ info(UnsupportedSlaType(s.`type`))
case Some(s: Sla) ⇒ throwException(UnsupportedSlaType(s.name))
case None ⇒
})
}
catch {
case any: Throwable ⇒ reportException(InternalServerError(any))
}
})
}
private def responseTimeSlidingWindow(deployment: Deployment, cluster: DeploymentCluster, sla: ResponseTimeSlidingWindowSla) = {
log.debug(s"response time sliding window sla check for: ${deployment.name}/${cluster.name}")
if (cluster.services.forall(_.status.isDone)) {
val from = OffsetDateTime.now().minus((sla.interval + sla.cooldown).toSeconds, ChronoUnit.SECONDS)
eventExists(deployment, cluster, from) map {
case true ⇒ log.debug(s"escalation event found within cooldown + interval period for: ${deployment.name}/${cluster.name}.")
case false ⇒
log.debug(s"escalation event not found within cooldown + interval period for: ${deployment.name}/${cluster.name}.")
val to = OffsetDateTime.now()
val from = to.minus(sla.interval.toSeconds, ChronoUnit.SECONDS)
val portMapping = cluster.gateways.map { gateway ⇒ GatewayPath(gateway.name).segments.last }
Future.sequence(portMapping.map({ portName ⇒
responseTime(deployment, cluster, portName, from, to)
})) map { optionalResponseTimes ⇒
val responseTimes = optionalResponseTimes.flatten
if (responseTimes.nonEmpty) {
val maxResponseTimes = responseTimes.max
log.debug(s"escalation max response time for ${deployment.name}/${cluster.name}: $maxResponseTimes.")
if (maxResponseTimes > sla.upper.toMillis)
info(Escalate(deployment, cluster))
else if (maxResponseTimes < sla.lower.toMillis)
info(DeEscalate(deployment, cluster))
}
}
}
}
}
}
trait SlaPulse {
this: CommonSupportForActors ⇒
implicit lazy val timeout = PulseActor.timeout()
def eventExists(deployment: Deployment, cluster: DeploymentCluster, from: OffsetDateTime): Future[Boolean] = {
eventCount(SlaEvent.slaTags(deployment, cluster), from, OffsetDateTime.now(), 1) map { count ⇒ count > 0 }
}
def responseTime(deployment: Deployment, cluster: DeploymentCluster, portName: String, from: OffsetDateTime, to: OffsetDateTime): Future[Option[Double]] = {
cluster.gateways.find(gateway ⇒ GatewayPath(gateway.name).segments.last == portName) match {
case Some(gateway) ⇒
val tags = Set(s"gateways:${gateway.name}", "metrics:responseTime")
eventCount(tags, from, to, -1) flatMap {
case count if count >= 0 ⇒
val eventQuery = EventQuery(tags, None, Some(TimeRange(Some(from), Some(to), includeLower = true, includeUpper = true)), Some(Aggregator(Aggregator.average, Option("metrics"))))
actorFor[PulseActor] ? PulseActor.Query(EventRequestEnvelope(eventQuery, 1, 1)) map {
case DoubleValueAggregationResult(value) ⇒ Some(value)
case other ⇒ log.error(other.toString); None
}
case _ ⇒ Future.successful(None)
}
case None ⇒ Future.successful(None)
}
}
def eventCount(tags: Set[String], from: OffsetDateTime, to: OffsetDateTime, onError: Long): Future[Long] = {
val eventQuery = EventQuery(tags, None, Some(TimeRange(Some(from), Some(OffsetDateTime.now()), includeLower = true, includeUpper = true)), Some(Aggregator(Aggregator.count)))
actorFor[PulseActor] ? PulseActor.Query(EventRequestEnvelope(eventQuery, 1, 1)) map {
case LongValueAggregationResult(count) ⇒ count
case other ⇒ onError
}
}
}
|
apache-2.0
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Magnoliopsida/Fabales/Fabaceae/Hedysarum/Hedysarum boreale/ Syn. Hedysarum mackenziei fraseri/README.md
|
200
|
# Hedysarum mackenziei var. fraseri B. Boivin VARIETY
#### Status
SYNONYM
#### According to
Integrated Taxonomic Information System
#### Published in
null
#### Original name
null
### Remarks
null
|
apache-2.0
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Magnoliopsida/Dipsacales/Dipsacaceae/Scabiosa/Scabiosa officinarum-arvensis/README.md
|
186
|
# Scabiosa officinarum-arvensis Crantz SPECIES
#### Status
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null
|
apache-2.0
|
mdoering/backbone
|
life/Plantae/Bacillariophyta/Bacillariophyceae/Surirellales/Surirellaceae/Surirella/Surirella fastuosa/Surirella fastuosa spinlifera/README.md
|
194
|
# Surirella fastuosa spinlifera A. Schmidt SUBSPECIES
#### Status
ACCEPTED
#### According to
The National Checklist of Taiwan
#### Published in
null
#### Original name
null
### Remarks
null
|
apache-2.0
|
mdoering/backbone
|
life/Fungi/Basidiomycota/Ustilaginomycetes/Ustilaginales/Ustilaginaceae/Ustilago/Ustilago hordei/Ustilago hordei hordei/README.md
|
260
|
# Ustilago hordei f. hordei (Pers.) Lagerh. FORM
#### Status
ACCEPTED
#### According to
Index Fungorum
#### Published in
Mitteilungen des badischen botanischen Vereins 70 (1889)
#### Original name
Ustilago hordei f. hordei (Pers.) Lagerh.
### Remarks
null
|
apache-2.0
|
mdoering/backbone
|
life/Plantae/Pteridophyta/Polypodiopsida/Polypodiales/Pteridaceae/Pteris/Pteris lomariacea/README.md
|
173
|
# Pteris lomariacea Kunze SPECIES
#### Status
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null
|
apache-2.0
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Magnoliopsida/Solanales/Solanaceae/Anthocercis/Anthocercis viscosa/README.md
|
187
|
# Anthocercis viscosa R.Br. SPECIES
#### Status
ACCEPTED
#### According to
International Plant Names Index
#### Published in
Prodr. 448. 1810
#### Original name
null
### Remarks
null
|
apache-2.0
|
rchillyard/INFO6205
|
src/main/java/edu/neu/coe/info6205/TicketChecker.java
|
4562
|
package edu.neu.coe.info6205;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Scanner;
/*
* This is program to check whether the tickets have been used by students. It will take input all the tickets.
* After this user need to input the ticket number used by students.
* The program can be terminated by entering 7889- Exit code
*
* Input Format--
*
* [ticket_number_1,ticket_number_2,ticket_number_3,ticket_number_4,ticket_number_5....ticket_number_n ]
*
* Output--
*
* ========================================================================
* Final Tally of tickets
* Tickets Used Status
* ========================================================================
* 182051 1
* 167929 2
* 154421 Not Used
* 160561 Not Used
*
* */
class Checker {
public void checkValid(int[] nums) {
Scanner input = new Scanner(System.in);
System.out.println("Total tickets: " + nums.length);
HashMap<Integer, Integer> ticketCounter = new HashMap<>();
for (int num : nums) {
ticketCounter.put(num, 0);
}
while (true) {
System.out.println("Enter the ticket number: ");
int ticket = input.nextInt();
if (ticket == 7889) break;
if (!ticketCounter.containsKey(ticket)) {
System.out.println("Invalid Ticket: " + ticket);
} else {
int value = ticketCounter.get(ticket);
if (value == 0) {
ticketCounter.put(ticket, value + 1);
System.out.println("Valid Ticket: " + ticket);
} else {
ticketCounter.put(ticket, value + 1);
System.out.println("Ticket already used by another User");
System.out.println("Number of user: " + ticketCounter.get(ticket));
}
}
}
System.out.println("========================================================================");
System.out.println("Final Tally of tickets");
System.out.println("Tickets Used Status");
System.out.println("========================================================================");
for (int num : nums) {
System.out.println(num + " " + (ticketCounter.get(num) == 0 ? "Not Used" : ticketCounter.get(num)));
}
}
}
public class TicketChecker {
public static int[] stringToIntegerArray(String input) {
input = input.trim();
input = input.substring(1, input.length() - 1);
if (input.length() == 0) {
return new int[0];
}
String[] parts = input.split(",");
int[] output = new int[parts.length];
for (int index = 0; index < parts.length; index++) {
String part = parts[index].trim();
output[index] = Integer.parseInt(part);
}
return output;
}
public static String integerArrayListToString(List<Integer> nums, int length) {
if (length == 0) {
return "[]";
}
StringBuilder result = new StringBuilder();
for (int index = 0; index < length; index++) {
Integer number = nums.get(index);
result.append(number).append(", ");
}
return "[" + result.substring(0, result.length() - 2) + "]";
}
public static String integerArrayListToString(List<Integer> nums) {
return integerArrayListToString(nums, nums.size());
}
public static String int2dListToString(Collection<List<Integer>> nums) {
StringBuilder sb = new StringBuilder("[");
for (List<Integer> list : nums) {
sb.append(integerArrayListToString(list));
sb.append(",");
}
sb.setCharAt(sb.length() - 1, ']');
return sb.toString();
}
public static void main(String[] args) throws IOException {
System.out.println("Enter the total tickets");
BufferedReader in = new BufferedReader(new InputStreamReader(System.in));
String line;
// TODO figure out what was meant here: while does not loop!
while ((line = in.readLine()) != null) {
int[] nums = stringToIntegerArray(line);
new Checker().checkValid(nums);
break;
}
}
}
|
apache-2.0
|
OpenXIP/xip-libraries
|
src/extern/inventor/lib/database/include/Inventor/sensors/SoFieldSensor.h
|
3684
|
/*
*
* Copyright (C) 2000 Silicon Graphics, Inc. All Rights Reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* Further, this software is distributed without any warranty that it is
* free of the rightful claim of any third person regarding infringement
* or the like. Any license provided herein, whether implied or
* otherwise, applies only to this software file. Patent licenses, if
* any, provided herein do not apply to combinations of this program with
* other software, or any other product whatsoever.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
* Contact information: Silicon Graphics, Inc., 1600 Amphitheatre Pkwy,
* Mountain View, CA 94043, or:
*
* http://www.sgi.com
*
* For further information regarding this notice, see:
*
* http://oss.sgi.com/projects/GenInfo/NoticeExplan/
*
*/
// -*- C++ -*-
/*
* Copyright (C) 1990,91,92 Silicon Graphics, Inc.
*
_______________________________________________________________________
______________ S I L I C O N G R A P H I C S I N C . ____________
|
| $Revision: 1.1.1.1 $
|
| Description:
| Data sensor that is attached to a field in a node or elsewhere.
| The sensor is scheduled when a change is made to that field. Note:
| the field must be contained within a node or function, or
| attachment will not work.
|
| Author(s) : Paul Strauss
|
______________ S I L I C O N G R A P H I C S I N C . ____________
_______________________________________________________________________
*/
#ifndef _SO_FIELD_SENSOR_
#define _SO_FIELD_SENSOR_
#include <Inventor/sensors/SoDataSensor.h>
class SoFieldContainer;
// C-api: prefix=SoFieldSens
class INVENTOR_API SoFieldSensor : public SoDataSensor {
public:
// Constructors. The second form takes standard callback function and data
SoFieldSensor();
// C-api: name=CreateCB
SoFieldSensor(SoSensorCB *func, void *data);
// Destructor
virtual ~SoFieldSensor();
// Attaches the sensor to the given field. Will not attach if the
// field is not contained in a node or function.
void attach(SoField *field);
// Detaches the sensor if it is attached to a field
void detach();
// Returns the field to which the sensor is attached, or NULL if it
// is not attached.
SoField * getAttachedField() const { return field; }
SoINTERNAL public:
// Override trigger to evaluate the field we're connected to, just
// in case the trigger method doesn't get the value.
virtual void trigger();
private:
SoField * field; // Field sensor is attached to
// Propagates modification notification through an instance. This
// checks to see if the field that triggered notification matches
// the field we are attached to. (It also checks indices if necessary.)
virtual void notify(SoNotList *list);
// Called by the attached path when it (the path) is about to be deleted
virtual void dyingReference();
};
#endif /* _SO_FIELD_SENSOR_ */
|
apache-2.0
|
wackerly/faucet
|
faucet/conf.py
|
5573
|
"""Base configuration implementation."""
# Copyright (C) 2015 Brad Cowie, Christopher Lorier and Joe Stringer.
# Copyright (C) 2015 Research and Education Advanced Network New Zealand Ltd.
# Copyright (C) 2015--2018 The Contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class InvalidConfigError(Exception):
"""This error is thrown when the config file is not valid."""
pass
def test_config_condition(cond, msg):
if cond:
raise InvalidConfigError(msg)
class Conf(object):
"""Base class for FAUCET configuration."""
defaults = {} # type: dict
defaults_types = {} # type: dict
dyn_finalized = False
dyn_hash = None
def __init__(self, _id, dp_id, conf=None):
self._id = _id
self.dp_id = dp_id
if conf is None:
conf = {}
# TODO: handle conf as a sequence. # pylint: disable=fixme
if isinstance(conf, dict):
self.update(conf)
self.set_defaults()
self.check_config()
def set_defaults(self):
"""Set default values and run any basic sanity checks."""
for key, value in list(self.defaults.items()):
self._set_default(key, value)
def _check_unknown_conf(self, conf):
"""Check that supplied conf dict doesn't specify keys not defined."""
sub_conf_names = set(conf.keys())
unknown_conf_names = sub_conf_names - set(self.defaults.keys())
test_config_condition(unknown_conf_names, '%s fields unknown in %s' % (
unknown_conf_names, self._id))
def _check_conf_types(self, conf, conf_types):
"""Check that conf value is of the correct type."""
for conf_key, conf_value in list(conf.items()):
test_config_condition(conf_key not in conf_types, '%s field unknown in %s (known types %s)' % (
conf_key, self._id, conf_types))
if conf_value is not None:
conf_type = conf_types[conf_key]
test_config_condition(not isinstance(conf_value, conf_type), '%s value %s must be %s not %s' % (
conf_key, conf_value, conf_type, type(conf_value))) # pytype: disable=invalid-typevar
@staticmethod
def _set_unknown_conf(conf, conf_types):
for conf_key, conf_type in list(conf_types.items()):
if conf_key not in conf:
if conf_type == list:
conf[conf_key] = []
else:
conf[conf_key] = None
return conf
def update(self, conf):
"""Parse supplied YAML config and sanity check."""
self.__dict__.update(conf)
self._check_unknown_conf(conf)
self._check_conf_types(conf, self.defaults_types)
def check_config(self):
"""Check config at instantiation time for errors, typically via assert."""
return
@staticmethod
def _conf_keys(conf, dyn=False, subconf=True, ignore_keys=None):
"""Return a list of key/values of attributes with dyn/Conf attributes/filtered."""
conf_keys = []
for key, value in list(conf.__dict__.items()):
if not dyn and key.startswith('dyn'):
continue
if not subconf and isinstance(value, Conf):
continue
if ignore_keys and key in ignore_keys:
continue
conf_keys.append((key, value))
return conf_keys
def merge_dyn(self, other_conf):
"""Merge dynamic state from other conf object."""
for key, value in self._conf_keys(other_conf, dyn=True):
self.__dict__[key] = value
def _set_default(self, key, value):
if key not in self.__dict__ or self.__dict__[key] is None:
self.__dict__[key] = value
def to_conf(self):
"""Return configuration as a dict."""
result = {}
for key in self.defaults:
if key != 'name':
result[key] = self.__dict__[str(key)]
return result
def conf_hash(self, dyn=False, subconf=True, ignore_keys=None):
"""Return hash of keys configurably filtering attributes."""
return hash(frozenset(list(map(
str, self._conf_keys(self, dyn=dyn, subconf=subconf, ignore_keys=ignore_keys)))))
def __hash__(self):
if self.dyn_hash is not None:
return self.dyn_hash
dyn_hash = self.conf_hash(dyn=False, subconf=True)
if self.dyn_finalized:
self.dyn_hash = dyn_hash
return dyn_hash
def finalize(self):
"""Configuration parsing marked complete."""
self.dyn_finalized = True
def ignore_subconf(self, other, ignore_keys=None):
"""Return True if this config same as other, ignoring sub config."""
return (self.conf_hash(dyn=False, subconf=False, ignore_keys=ignore_keys)
== other.conf_hash(dyn=False, subconf=False, ignore_keys=ignore_keys))
def __eq__(self, other):
return self.__hash__() == other.__hash__()
def __ne__(self, other):
return not self.__eq__(other)
|
apache-2.0
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Liliopsida/Poales/Poaceae/Sasa/Sasa pubiculmis/ Syn. Neosasamorpha chitosensis/README.md
|
197
|
# Neosasamorpha chitosensis (Nakai) Tatew. SPECIES
#### Status
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null
|
apache-2.0
|
android-art-intel/marshmallow
|
art-extension/opttests/src/OptimizationTests/ShortLeafMethodsInlining/InvokeVirtual_add_int_lit16_001/Main.java
|
1083
|
/*
* Copyright (C) 2015 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package OptimizationTests.ShortLeafMethodsInlining.InvokeVirtual_add_int_lit16_001;
class Main {
final static int iterations = 10;
public static void main(String[] args) {
Test test = new Test();
int nextJ = -10;
System.out.println("Initial nextJ value is " + nextJ);
for(int i = 0; i < iterations; i++) {
nextJ = test.simple_method(i) + i;
}
System.out.println("Final nextJ value is " + nextJ);
}
}
|
apache-2.0
|
ihoneymon/spring-boot
|
spring-boot-project/spring-boot-actuator/src/test/java/org/springframework/boot/actuate/metrics/web/servlet/WebMvcMetricsFilterAutoTimedTests.java
|
3712
|
/*
* Copyright 2012-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.actuate.metrics.web.servlet;
import io.micrometer.core.instrument.Clock;
import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.core.instrument.MockClock;
import io.micrometer.core.instrument.simple.SimpleConfig;
import io.micrometer.core.instrument.simple.SimpleMeterRegistry;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.context.web.WebAppConfiguration;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.context.WebApplicationContext;
import org.springframework.web.servlet.config.annotation.EnableWebMvc;
import static org.assertj.core.api.Assertions.assertThat;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/**
* @author Jon Schneider
*/
@RunWith(SpringRunner.class)
@WebAppConfiguration
public class WebMvcMetricsFilterAutoTimedTests {
@Autowired
private MeterRegistry registry;
@Autowired
private WebApplicationContext context;
private MockMvc mvc;
@Autowired
private WebMvcMetricsFilter filter;
@Before
public void setupMockMvc() {
this.mvc = MockMvcBuilders.webAppContextSetup(this.context)
.addFilters(this.filter).build();
}
@Test
public void metricsCanBeAutoTimed() throws Exception {
this.mvc.perform(get("/api/10")).andExpect(status().isOk());
assertThat(
this.registry.find("http.server.requests").tags("status", "200").timer())
.hasValueSatisfying((t) -> assertThat(t.count()).isEqualTo(1));
}
@Configuration
@EnableWebMvc
@Import({ Controller.class })
static class TestConfiguration {
@Bean
MockClock clock() {
return new MockClock();
}
@Bean
MeterRegistry meterRegistry(Clock clock) {
return new SimpleMeterRegistry(SimpleConfig.DEFAULT, clock);
}
@Bean
public WebMvcMetrics controllerMetrics(MeterRegistry registry) {
return new WebMvcMetrics(registry, new DefaultWebMvcTagsProvider(),
"http.server.requests", true, false);
}
@Bean
public WebMvcMetricsFilter webMetricsFilter(ApplicationContext context) {
return new WebMvcMetricsFilter(context);
}
}
@RestController
@RequestMapping("/api")
static class Controller {
@GetMapping("/{id}")
public String successful(@PathVariable Long id) {
return id.toString();
}
}
}
|
apache-2.0
|
friendranjith/vizzly
|
jetty-runtime/javadoc/org/eclipse/jetty/io/bio/package-use.html
|
8615
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (version 1.7.0_03) on Mon Sep 10 14:26:00 CDT 2012 -->
<meta http-equiv="Content-Type" content="text/html" charset="UTF-8">
<title>Uses of Package org.eclipse.jetty.io.bio (Jetty :: Aggregate :: All core Jetty 8.1.7.v20120910 API)</title>
<meta name="date" content="2012-09-10">
<link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Package org.eclipse.jetty.io.bio (Jetty :: Aggregate :: All core Jetty 8.1.7.v20120910 API)";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li>Class</li>
<li class="navBarCell1Rev">Use</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?org/eclipse/jetty/io/bio/package-use.html" target="_top">Frames</a></li>
<li><a href="package-use.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h1 title="Uses of Package org.eclipse.jetty.io.bio" class="title">Uses of Package<br>org.eclipse.jetty.io.bio</h1>
</div>
<div class="contentContainer">
<ul class="blockList">
<li class="blockList">
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
<caption><span>Packages that use <a href="../../../../../org/eclipse/jetty/io/bio/package-summary.html">org.eclipse.jetty.io.bio</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Package</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="#org.eclipse.jetty.io.bio">org.eclipse.jetty.io.bio</a></td>
<td class="colLast"> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="#org.eclipse.jetty.nested">org.eclipse.jetty.nested</a></td>
<td class="colLast"> </td>
</tr>
<tr class="altColor">
<td class="colFirst"><a href="#org.eclipse.jetty.server.bio">org.eclipse.jetty.server.bio</a></td>
<td class="colLast"> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="#org.eclipse.jetty.server.ssl">org.eclipse.jetty.server.ssl</a></td>
<td class="colLast"> </td>
</tr>
</tbody>
</table>
</li>
<li class="blockList"><a name="org.eclipse.jetty.io.bio">
<!-- -->
</a>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing classes, and an explanation">
<caption><span>Classes in <a href="../../../../../org/eclipse/jetty/io/bio/package-summary.html">org.eclipse.jetty.io.bio</a> used by <a href="../../../../../org/eclipse/jetty/io/bio/package-summary.html">org.eclipse.jetty.io.bio</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colOne" scope="col">Class and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colOne"><a href="../../../../../org/eclipse/jetty/io/bio/class-use/StreamEndPoint.html#org.eclipse.jetty.io.bio">StreamEndPoint</a> </td>
</tr>
</tbody>
</table>
</li>
<li class="blockList"><a name="org.eclipse.jetty.nested">
<!-- -->
</a>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing classes, and an explanation">
<caption><span>Classes in <a href="../../../../../org/eclipse/jetty/io/bio/package-summary.html">org.eclipse.jetty.io.bio</a> used by <a href="../../../../../org/eclipse/jetty/nested/package-summary.html">org.eclipse.jetty.nested</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colOne" scope="col">Class and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colOne"><a href="../../../../../org/eclipse/jetty/io/bio/class-use/StreamEndPoint.html#org.eclipse.jetty.nested">StreamEndPoint</a> </td>
</tr>
</tbody>
</table>
</li>
<li class="blockList"><a name="org.eclipse.jetty.server.bio">
<!-- -->
</a>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing classes, and an explanation">
<caption><span>Classes in <a href="../../../../../org/eclipse/jetty/io/bio/package-summary.html">org.eclipse.jetty.io.bio</a> used by <a href="../../../../../org/eclipse/jetty/server/bio/package-summary.html">org.eclipse.jetty.server.bio</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colOne" scope="col">Class and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colOne"><a href="../../../../../org/eclipse/jetty/io/bio/class-use/SocketEndPoint.html#org.eclipse.jetty.server.bio">SocketEndPoint</a> </td>
</tr>
<tr class="rowColor">
<td class="colOne"><a href="../../../../../org/eclipse/jetty/io/bio/class-use/StreamEndPoint.html#org.eclipse.jetty.server.bio">StreamEndPoint</a> </td>
</tr>
</tbody>
</table>
</li>
<li class="blockList"><a name="org.eclipse.jetty.server.ssl">
<!-- -->
</a>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing classes, and an explanation">
<caption><span>Classes in <a href="../../../../../org/eclipse/jetty/io/bio/package-summary.html">org.eclipse.jetty.io.bio</a> used by <a href="../../../../../org/eclipse/jetty/server/ssl/package-summary.html">org.eclipse.jetty.server.ssl</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colOne" scope="col">Class and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colOne"><a href="../../../../../org/eclipse/jetty/io/bio/class-use/SocketEndPoint.html#org.eclipse.jetty.server.ssl">SocketEndPoint</a> </td>
</tr>
<tr class="rowColor">
<td class="colOne"><a href="../../../../../org/eclipse/jetty/io/bio/class-use/StreamEndPoint.html#org.eclipse.jetty.server.ssl">StreamEndPoint</a> </td>
</tr>
</tbody>
</table>
</li>
</ul>
</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li>Class</li>
<li class="navBarCell1Rev">Use</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?org/eclipse/jetty/io/bio/package-use.html" target="_top">Frames</a></li>
<li><a href="package-use.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>Copyright © 1995-2012 <a href="http://www.mortbay.com">Mort Bay Consulting</a>. All Rights Reserved.</small></p>
</body>
</html>
|
apache-2.0
|
minesh1291/Learning-C
|
pattern1.c
|
335
|
#include<stdio.h>
#include<conio.h>
void main()
{
int i,j,k,n;
clrscr();
printf("enter");
scanf("%d",&n);
for(i=1;i<=n;i++)
{
for(j=1;j<=n;j++)
{
if(i==1)
printf("*");
else if(i==n)
printf("*");
else if(i+j==n+1)
printf("*");
else if(i!=1&&i!=n)
printf(" ");
}
printf("\n");
}
getch();
}
|
apache-2.0
|
Kuraikari/Modern-Times
|
Modern Time (J)RPG/Assets/Plugins/GUI Animator/GUI Animator FREE/Demo (CSharp)/Scripts/GA_FREE_OpenOtherScene.cs
|
4394
|
// GUI Animator FREE
// Version: 1.1.0
// Compatilble: Unity 5.4.0 or higher, see more info in Readme.txt file.
//
// Developer: Gold Experience Team (https://www.ge-team.com)
//
// Unity Asset Store: https://www.assetstore.unity3d.com/en/#!/content/58843
// GE Store: https://www.ge-team.com/en/products/gui-animator-free/
// Full version on Unity Asset Store: https://www.assetstore.unity3d.com/en/#!/content/28709
// Full version on GE Store: https://www.ge-team.com/en/products/gui-animator-for-unity-ui/
//
// Please direct any bugs/comments/suggestions to [email protected]
#region Namespaces
using UnityEngine;
using System.Collections;
#endregion // Namespaces
// ######################################################################
// GA_FREE_OpenOtherScene class
// This class handles 8 buttons for changing scene.
// ######################################################################
public class GA_FREE_OpenOtherScene : MonoBehaviour
{
// ########################################
// MonoBehaviour Functions
// ########################################
#region MonoBehaviour
// Start is called on the frame when a script is enabled just before any of the Update methods is called the first time.
// http://docs.unity3d.com/ScriptReference/MonoBehaviour.Start.html
void Start () {
}
// Update is called every frame, if the MonoBehaviour is enabled.
// http://docs.unity3d.com/ScriptReference/MonoBehaviour.Update.html
void Update () {
}
#endregion // MonoBehaviour
// ########################################
// UI Responder functions
// ########################################
#region UI Responder
// Open Demo Scene 1
public void ButtonOpenDemoScene1 ()
{
// Disable all buttons
GUIAnimSystemFREE.Instance.EnableAllButtons(false);
// Waits 1.5 secs for Moving Out animation then load next level
GUIAnimSystemFREE.Instance.LoadLevel("GA FREE - Demo01 (960x600px)", 1.5f);
gameObject.SendMessage("HideAllGUIs");
}
// Open Demo Scene 2
public void ButtonOpenDemoScene2 ()
{
// Disable all buttons
GUIAnimSystemFREE.Instance.EnableAllButtons(false);
// Waits 1.5 secs for Moving Out animation then load next level
GUIAnimSystemFREE.Instance.LoadLevel("GA FREE - Demo02 (960x600px)", 1.5f);
gameObject.SendMessage("HideAllGUIs");
}
// Open Demo Scene 3
public void ButtonOpenDemoScene3 ()
{
// Disable all buttons
GUIAnimSystemFREE.Instance.EnableAllButtons(false);
// Waits 1.5 secs for Moving Out animation then load next level
GUIAnimSystemFREE.Instance.LoadLevel("GA FREE - Demo03 (960x600px)", 1.5f);
gameObject.SendMessage("HideAllGUIs");
}
// Open Demo Scene 4
public void ButtonOpenDemoScene4 ()
{
// Disable all buttons
GUIAnimSystemFREE.Instance.EnableAllButtons(false);
// Waits 1.5 secs for Moving Out animation then load next level
GUIAnimSystemFREE.Instance.LoadLevel("GA FREE - Demo04 (960x600px)", 1.5f);
gameObject.SendMessage("HideAllGUIs");
}
// Open Demo Scene 5
public void ButtonOpenDemoScene5 ()
{
// Disable all buttons
GUIAnimSystemFREE.Instance.EnableAllButtons(false);
// Waits 1.5 secs for Moving Out animation then load next level
GUIAnimSystemFREE.Instance.LoadLevel("GA FREE - Demo05 (960x600px)", 1.5f);
gameObject.SendMessage("HideAllGUIs");
}
// Open Demo Scene 6
public void ButtonOpenDemoScene6 ()
{
// Disable all buttons
GUIAnimSystemFREE.Instance.EnableAllButtons(false);
// Waits 1.5 secs for Moving Out animation then load next level
GUIAnimSystemFREE.Instance.LoadLevel("GA FREE - Demo06 (960x600px)", 1.5f);
gameObject.SendMessage("HideAllGUIs");
}
// Open Demo Scene 7
public void ButtonOpenDemoScene7 ()
{
// Disable all buttons
GUIAnimSystemFREE.Instance.EnableAllButtons(false);
// Waits 1.5 secs for Moving Out animation then load next level
GUIAnimSystemFREE.Instance.LoadLevel("GA FREE - Demo07 (960x600px)", 1.5f);
gameObject.SendMessage("HideAllGUIs");
}
// Open Demo Scene 8
public void ButtonOpenDemoScene8 ()
{
// Disable all buttons
GUIAnimSystemFREE.Instance.EnableAllButtons(false);
// Waits 1.5 secs for Moving Out animation then load next level
GUIAnimSystemFREE.Instance.LoadLevel("GA FREE - Demo08 (960x600px)", 1.5f);
gameObject.SendMessage("HideAllGUIs");
}
#endregion // UI Responder
}
|
apache-2.0
|
jamesrr39/goutil
|
notification/notification_linux.go
|
157
|
//+build linux
package notification
import "os/exec"
func Send(title, summary string) error {
return exec.Command("notify-send", title, summary).Run()
}
|
apache-2.0
|
HunseopJeong/WATT
|
libs/brackets-server/embedded-ext/tau-document/tau-document-parser.js
|
5250
|
define(function(require, exports, module) {
var EditorManager = brackets.getModule("editor/EditorManager");
var ExtensionUtils = brackets.getModule("utils/ExtensionUtils");
var HTMLUtils = brackets.getModule("language/HTMLUtils");
var PreferencesManager = brackets.getModule("preferences/PreferencesManager");
function wrapBrackets(str) {
if (typeof str !== "string") {
return null;
}
var result = str;
if (!result.startsWith("<")) {
result = "<" + result;
}
if (!result.endsWith(">")) {
result = result + ">";
}
return result;
}
var TauDocumentParser;
module.exports = TauDocumentParser = (function() {
function TauDocumentParser() {
this.tauAPIs = {};
this.tauHTML = {};
this.tauGuideData = {};
this.tauGuidePaths = {};
this.readJson();
}
TauDocumentParser.prototype.readJson = function() {
var self = this;
ExtensionUtils.loadFile(module, "tau-document-config.json").done(
function (data) {
self.tauGuideData = data;
self.setTauGuideData();
}
);
};
TauDocumentParser.prototype.setTauGuideData = function() {
var profile, version;
profile = PreferencesManager.getViewState("projectProfile");
version = PreferencesManager.getViewState("projectVersion");
this.tauGuidePaths = this.tauGuideData[version][profile].doc;
this.tauAPIs = this.tauGuideData[version][profile].api;
this.tauHTML = this.tauGuideData[version][profile].html;
return this.tauAPIs;
};
TauDocumentParser.prototype.parse = function() {
var api = this.tauAPIs;
var html = this.tauHTML;
var href = null;
var name = null;
var editor = EditorManager.getFocusedEditor();
var language = editor.getLanguageForSelection();
var langId = language.getId();
var pos = editor.getSelection();
var line = editor.document.getLine(pos.start.line);
if (langId === "html") {
var tagInfo = HTMLUtils.getTagInfo(editor, editor.getCursorPos());
if (tagInfo.position.tokenType === HTMLUtils.TAG_NAME || tagInfo.position.tokenType === HTMLUtils.ATTR_VALUE) {
var start = 0;
var end = 0;
// Find a start tag
for (var cur = pos.start.ch; cur >= 0; cur--) {
if (line[cur] === "<") {
start = cur;
break;
}
}
// Find a end tag
for (var cur = pos.start.ch; cur < line.length; cur++) {
if (line[cur] === ">" || line[cur] === "/") {
end = cur;
break;
}
}
var result = line.slice(start, end);
result = wrapBrackets(result);
var element = $.parseHTML(result);
if (element && element.length > 0) {
Object.keys(html).forEach((value) => {
if (element[0].matches(value)) {
if (html[value].href) {
href = this.tauGuidePaths.local + html[value].href;
name = html[value].name;
}
}
});
}
}
} else if (langId === "javascript") {
var start = line.lastIndexOf("tau.");
var end = 0;
if (start === -1) {
return null;
}
for (var cur = pos.start.ch; cur < line.length; cur++) {
if (line[cur] === " " || line[cur] === "(" || line[cur] === ".") {
end = cur;
break;
}
}
var data = line.slice(start, end);
if (data) {
data = data.split(".");
for (var i=0; i<data.length; i++) {
api = api[data[i]];
if (!api) {
break;
}
}
if (api && api.href) {
// TODO: Should change the href to use the network
// href = this.tauGuidePaths.network + api.href;
href = this.tauGuidePaths.local + api.href;
name = api.name;
}
}
}
return {
href: href,
name: name
};
};
return TauDocumentParser;
}());
});
|
apache-2.0
|
xasx/camunda-bpm-platform
|
javaee/ejb-service/src/main/java/org/camunda/bpm/container/impl/ejb/EjbProcessEngineService.java
|
2196
|
/*
* Copyright © 2013-2018 camunda services GmbH and various authors ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.container.impl.ejb;
import java.util.List;
import java.util.Set;
import javax.annotation.PostConstruct;
import javax.ejb.EJB;
import javax.ejb.Local;
import javax.ejb.Stateless;
import javax.ejb.TransactionAttribute;
import javax.ejb.TransactionAttributeType;
import org.camunda.bpm.ProcessEngineService;
import org.camunda.bpm.engine.ProcessEngine;
/**
* <p>Exposes the {@link ProcessEngineService} as EJB inside the container.</p>
*
* @author Daniel Meyer
*
*/
@Stateless(name="ProcessEngineService", mappedName="ProcessEngineService")
@Local(ProcessEngineService.class)
@TransactionAttribute(TransactionAttributeType.SUPPORTS)
public class EjbProcessEngineService implements ProcessEngineService {
@EJB
protected EjbBpmPlatformBootstrap ejbBpmPlatform;
/** the processEngineServiceDelegate */
protected ProcessEngineService processEngineServiceDelegate;
@PostConstruct
protected void initProcessEngineServiceDelegate() {
processEngineServiceDelegate = ejbBpmPlatform.getProcessEngineService();
}
public ProcessEngine getDefaultProcessEngine() {
return processEngineServiceDelegate.getDefaultProcessEngine();
}
public List<ProcessEngine> getProcessEngines() {
return processEngineServiceDelegate.getProcessEngines();
}
public Set<String> getProcessEngineNames() {
return processEngineServiceDelegate.getProcessEngineNames();
}
public ProcessEngine getProcessEngine(String name) {
return processEngineServiceDelegate.getProcessEngine(name);
}
}
|
apache-2.0
|
groupe-sii/ogham
|
ogham-core/src/main/java/fr/sii/ogham/core/util/PriorizedList.java
|
2496
|
package fr.sii.ogham.core.util;
import static java.util.stream.Collectors.toList;
import java.util.ArrayList;
import java.util.List;
/**
* Helper class that registers objects with associated priority. Each registered
* object is then returned as list ordered by priority. The higher priority
* value comes first in the list.
*
* @author Aurélien Baudet
*
* @param <P>
* the type of priorized objects
*/
public class PriorizedList<P> {
private final List<WithPriority<P>> priorities;
/**
* Initializes with an empty list
*/
public PriorizedList() {
this(new ArrayList<>());
}
/**
* Initializes with some priorized objects
*
* @param priorities
* the priorized objects
*/
public PriorizedList(List<WithPriority<P>> priorities) {
super();
this.priorities = priorities;
}
/**
* Registers a new priorized object
*
* @param priorized
* the wrapped object with its priority
* @return this instance for fluent chaining
*/
public PriorizedList<P> register(WithPriority<P> priorized) {
priorities.add(priorized);
return this;
}
/**
* Registers an object with its priority
*
* @param priorized
* the object to register
* @param priority
* the associated priority
* @return this instance for fluent chaining
*/
public PriorizedList<P> register(P priorized, int priority) {
priorities.add(new WithPriority<>(priorized, priority));
return this;
}
/**
* Merge all priorities of another {@link PriorizedList} into this one.
*
* @param other
* the priority list
* @return this isntance for fluent chaining
*/
public PriorizedList<P> register(PriorizedList<P> other) {
priorities.addAll(other.getPriorities());
return this;
}
/**
* Returns true if this list contains no elements.
*
* @return if this list contains no elements
*/
public boolean isEmpty() {
return priorities.isEmpty();
}
/**
* Get the list of priorities ordered by priority
*
* @return ordered list of priorities
*/
public List<WithPriority<P>> getPriorities() {
return sort();
}
/**
* Get the list of priorized objects ordered by highest priority.
*
* @return list of objects ordered by highet priority
*/
public List<P> getOrdered() {
return sort().stream().map(WithPriority::getPriorized).collect(toList());
}
private List<WithPriority<P>> sort() {
priorities.sort(WithPriority.comparator());
return priorities;
}
}
|
apache-2.0
|
CarloMicieli/java8-for-hipsters
|
src/test/java/io/github/carlomicieli/java8/football/PlayerTests.java
|
5683
|
/*
* Copyright 2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.carlomicieli.java8.football;
import org.junit.Test;
import java.time.LocalDate;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
import static org.junit.Assert.assertThat;
/**
* @author Carlo Micieli
*/
public class PlayerTests {
@Test
public void shouldCreateNewPlayers() {
Player patrickWillis = createPlayerForPatrickWillis();
assertThatPlayer_Is_PatrickWillis(patrickWillis);
}
@Test
public void playersShouldHaveOptionalTeam() {
Player withTeam = createPlayerWithTeam("SF");
Player withoutTeam = createPlayerWithoutTeam();
assertThat(withTeam.getTeam().get(), is(equalTo("SF")));
assertThat(withoutTeam.getTeam().orElse(null), is(nullValue()));
}
@Test
public void playersShouldHaveOptionalJerseyNumber() {
Player withNumber = createPlayerWithNumber(99);
Player withoutNumber = createPlayerWithoutNumber();
assertThat(withNumber.getNumber().get(), is(equalTo(99)));
assertThat(withoutNumber.getNumber().orElse(null), is(nullValue()));
}
@Test
public void playersShouldHaveOptionalNumberOfYearsAmongPro() {
Player withYearsPro = createPlayerWithYearsPro(9);
Player withoutYearsPro = createPlayerWithoutYearsPro();
assertThat(withYearsPro.getYearsPro().get(), is(equalTo(9)));
assertThat(withoutYearsPro.getYearsPro().orElse(null), is(nullValue()));
}
@Test
public void shouldReturnPlayersHeightInCentimeters() {
Player patrickWillis = createPlayerForPatrickWillis();
assertThat(patrickWillis.heightInCentimeters(), is(equalTo(185)));
}
@Test
public void shouldReturnPlayersWeightInKilograms() {
Player patrickWillis = createPlayerForPatrickWillis();
assertThat(patrickWillis.weightInKg(), is(equalTo(109)));
}
@Test
public void shouldCheckWhetherTwoPlayersAreDifferent() {
Player x = createPlayerForPatrickWillis();
Player y = createAnotherPlayer();
assertThat(x.equals(y), is(equalTo(false)));
}
@Test
public void shouldCheckWhetherTwoPlayersAreEquals() {
Player x = createPlayerForPatrickWillis();
Player y = createPlayerForPatrickWillis();
assertThat(x.equals(x), is(equalTo(true)));
assertThat(x.equals(y), is(equalTo(true)));
}
@Test
public void shouldCalculateHashCodeForPlayers() {
Player x = createPlayerForPatrickWillis();
Player y = createPlayerForPatrickWillis();
assertThat(x.hashCode(), is(equalTo(y.hashCode())));
}
private Player createPlayerWithNumber(int number) {
return createPlayer(null, number, null);
}
private Player createPlayerWithYearsPro(int yearsPro) {
return createPlayer(null, null, yearsPro);
}
private Player createPlayerWithTeam(String team) {
return createPlayer(team, null, null);
}
private Player createPlayerWithoutYearsPro() {
return createAnotherPlayer();
}
private Player createPlayerWithoutTeam() {
return createAnotherPlayer();
}
private Player createPlayerWithoutNumber() {
return createAnotherPlayer();
}
private Player createAnotherPlayer() {
return createPlayer(null, null, null);
}
private Player createPlayer(String team, Integer number, Integer yearsPro) {
return new Player("John",
"Doe",
"FS",
"Hogwarts",
"00-0000000",
team,
"http://www.nfl.com/player/johndoe/profile",
LocalDate.of(1981, 9, 9),
73,
240,
number,
yearsPro);
}
private Player createPlayerForPatrickWillis() {
return new Player("Patrick", "Willis",
"ILB",
"Mississippi",
"00-0025398",
"SF",
"http://www.nfl.com/player/patrickwillis/2495781/profile",
LocalDate.of(1985, 1, 25),
73,
240,
52,
8);
}
private void assertThatPlayer_Is_PatrickWillis(Player p) {
assertThat(p.getFirstName(), is(equalTo("Patrick")));
assertThat(p.getLastName(), is(equalTo("Willis")));
assertThat(p.getCollege(), is(equalTo("Mississippi")));
assertThat(p.getPlayerId(), is(equalTo("00-0025398")));
assertThat(p.getTeam().orElse(null), is(equalTo("SF")));
assertThat(p.getProfileUrl(), is(equalTo("http://www.nfl.com/player/patrickwillis/2495781/profile")));
assertThat(p.getBirthdate(), is(equalTo(LocalDate.of(1985, 1, 25))));
assertThat(p.getHeight(), is(equalTo(73)));
assertThat(p.getWeight(), is(equalTo(240)));
assertThat(p.getNumber().orElse(null), is(equalTo(52)));
assertThat(p.getYearsPro().orElse(null), is(equalTo(8)));
}
}
|
apache-2.0
|
donNewtonAlpha/onos
|
drivers/lumentum/src/main/java/org/onosproject/drivers/lumentum/LumentumSdnRoadmFlowRuleProgrammable.java
|
17118
|
/*
* Copyright 2016-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.drivers.lumentum;
import com.google.common.collect.Lists;
import org.apache.commons.lang3.tuple.Pair;
import org.onosproject.net.ChannelSpacing;
import org.onosproject.net.GridType;
import org.onosproject.net.OchSignal;
import org.onosproject.net.OchSignalType;
import org.onosproject.net.Port;
import org.onosproject.net.PortNumber;
import org.onosproject.net.device.DeviceService;
import org.onosproject.net.driver.AbstractHandlerBehaviour;
import org.onosproject.net.flow.DefaultFlowEntry;
import org.onosproject.net.flow.DefaultFlowRule;
import org.onosproject.net.flow.DefaultTrafficSelector;
import org.onosproject.net.flow.DefaultTrafficTreatment;
import org.onosproject.net.flow.FlowEntry;
import org.onosproject.net.flow.FlowId;
import org.onosproject.net.flow.FlowRule;
import org.onosproject.net.flow.FlowRuleProgrammable;
import org.onosproject.net.flow.TrafficSelector;
import org.onosproject.net.flow.TrafficTreatment;
import org.onosproject.net.flow.criteria.Criteria;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.snmp4j.PDU;
import org.snmp4j.event.ResponseEvent;
import org.snmp4j.smi.Integer32;
import org.snmp4j.smi.OID;
import org.snmp4j.smi.UnsignedInteger32;
import org.snmp4j.smi.VariableBinding;
import org.snmp4j.util.TreeEvent;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
import static com.google.common.base.Preconditions.checkArgument;
// TODO: need to convert between OChSignal and XC channel number
public class LumentumSdnRoadmFlowRuleProgrammable extends AbstractHandlerBehaviour implements FlowRuleProgrammable {
private static final Logger log =
LoggerFactory.getLogger(LumentumSdnRoadmFlowRuleProgrammable.class);
// Default values
private static final int DEFAULT_TARGET_GAIN_PREAMP = 150;
private static final int DEFAULT_TARGET_GAIN_BOOSTER = 200;
private static final int DISABLE_CHANNEL_TARGET_POWER = -650;
private static final int DEFAULT_CHANNEL_TARGET_POWER = -30;
private static final int DISABLE_CHANNEL_ABSOLUTE_ATTENUATION = 160;
private static final int DEFAULT_CHANNEL_ABSOLUTE_ATTENUATION = 50;
private static final int DISABLE_CHANNEL_ADD_DROP_PORT_INDEX = 1;
private static final int OUT_OF_SERVICE = 1;
private static final int IN_SERVICE = 2;
private static final int OPEN_LOOP = 1;
private static final int CLOSED_LOOP = 2;
// First 20 ports are add/mux ports, next 20 are drop/demux
private static final int DROP_PORT_OFFSET = 20;
// OIDs
private static final String CTRL_AMP_MODULE_SERVICE_STATE_PREAMP = ".1.3.6.1.4.1.46184.1.4.4.1.2.1";
private static final String CTRL_AMP_MODULE_SERVICE_STATE_BOOSTER = ".1.3.6.1.4.1.46184.1.4.4.1.2.2";
private static final String CTRL_AMP_MODULE_TARGET_GAIN_PREAMP = ".1.3.6.1.4.1.46184.1.4.4.1.8.1";
private static final String CTRL_AMP_MODULE_TARGET_GAIN_BOOSTER = ".1.3.6.1.4.1.46184.1.4.4.1.8.2";
private static final String CTRL_CHANNEL_STATE = ".1.3.6.1.4.1.46184.1.4.2.1.3.";
private static final String CTRL_CHANNEL_MODE = ".1.3.6.1.4.1.46184.1.4.2.1.4.";
private static final String CTRL_CHANNEL_TARGET_POWER = ".1.3.6.1.4.1.46184.1.4.2.1.6.";
private static final String CTRL_CHANNEL_ADD_DROP_PORT_INDEX = ".1.3.6.1.4.1.46184.1.4.2.1.13.";
private static final String CTRL_CHANNEL_ABSOLUTE_ATTENUATION = ".1.3.6.1.4.1.46184.1.4.2.1.5.";
private LumentumSnmpDevice snmp;
@Override
public Collection<FlowEntry> getFlowEntries() {
try {
snmp = new LumentumSnmpDevice(handler().data().deviceId());
} catch (IOException e) {
log.error("Failed to connect to device: ", e);
return Collections.emptyList();
}
// Line in is last but one port, line out is last
DeviceService deviceService = this.handler().get(DeviceService.class);
List<Port> ports = deviceService.getPorts(data().deviceId());
if (ports.size() < 2) {
return Collections.emptyList();
}
PortNumber lineIn = ports.get(ports.size() - 2).number();
PortNumber lineOut = ports.get(ports.size() - 1).number();
Collection<FlowEntry> entries = Lists.newLinkedList();
// Add rules
OID addOid = new OID(CTRL_CHANNEL_STATE + "1");
entries.addAll(
fetchRules(addOid, true, lineOut).stream()
.map(fr -> new DefaultFlowEntry(fr, FlowEntry.FlowEntryState.ADDED, 0, 0, 0))
.collect(Collectors.toList())
);
// Drop rules
OID dropOid = new OID(CTRL_CHANNEL_STATE + "2");
entries.addAll(
fetchRules(dropOid, false, lineIn).stream()
.map(fr -> new DefaultFlowEntry(fr, FlowEntry.FlowEntryState.ADDED, 0, 0, 0))
.collect(Collectors.toList())
);
return entries;
}
@Override
public Collection<FlowRule> applyFlowRules(Collection<FlowRule> rules) {
try {
snmp = new LumentumSnmpDevice(data().deviceId());
} catch (IOException e) {
log.error("Failed to connect to device: ", e);
}
// Line ports
DeviceService deviceService = this.handler().get(DeviceService.class);
List<Port> ports = deviceService.getPorts(data().deviceId());
List<PortNumber> linePorts = ports.subList(ports.size() - 2, ports.size()).stream()
.map(p -> p.number())
.collect(Collectors.toList());
// Apply the valid rules on the device
Collection<FlowRule> added = rules.stream()
.map(r -> new CrossConnectFlowRule(r, linePorts))
.filter(xc -> installCrossConnect(xc))
.collect(Collectors.toList());
// Cache the cookie/priority
CrossConnectCache cache = this.handler().get(CrossConnectCache.class);
added.forEach(xc -> cache.set(
Objects.hash(data().deviceId(), xc.selector(), xc.treatment()),
xc.id(),
xc.priority()));
return added;
}
@Override
public Collection<FlowRule> removeFlowRules(Collection<FlowRule> rules) {
try {
snmp = new LumentumSnmpDevice(data().deviceId());
} catch (IOException e) {
log.error("Failed to connect to device: ", e);
}
// Line ports
DeviceService deviceService = this.handler().get(DeviceService.class);
List<Port> ports = deviceService.getPorts(data().deviceId());
List<PortNumber> linePorts = ports.subList(ports.size() - 2, ports.size()).stream()
.map(p -> p.number())
.collect(Collectors.toList());
// Apply the valid rules on the device
Collection<FlowRule> removed = rules.stream()
.map(r -> new CrossConnectFlowRule(r, linePorts))
.filter(xc -> removeCrossConnect(xc))
.collect(Collectors.toList());
// Remove flow rule from cache
CrossConnectCache cache = this.handler().get(CrossConnectCache.class);
removed.forEach(xc -> cache.remove(
Objects.hash(data().deviceId(), xc.selector(), xc.treatment())));
return removed;
}
// Installs cross connect on device
private boolean installCrossConnect(CrossConnectFlowRule xc) {
int channel = toChannel(xc.ochSignal());
long addDrop = xc.addDrop().toLong();
if (!xc.isAddRule()) {
addDrop -= DROP_PORT_OFFSET;
}
// Create the PDU object
PDU pdu = new PDU();
pdu.setType(PDU.SET);
// Enable preamp & booster
List<OID> oids = Arrays.asList(new OID(CTRL_AMP_MODULE_SERVICE_STATE_PREAMP),
new OID(CTRL_AMP_MODULE_SERVICE_STATE_BOOSTER));
oids.forEach(
oid -> pdu.add(new VariableBinding(oid, new Integer32(IN_SERVICE)))
);
// Set target gain on preamp & booster
OID ctrlAmpModuleTargetGainPreamp = new OID(CTRL_AMP_MODULE_TARGET_GAIN_PREAMP);
pdu.add(new VariableBinding(ctrlAmpModuleTargetGainPreamp, new Integer32(DEFAULT_TARGET_GAIN_PREAMP)));
OID ctrlAmpModuleTargetGainBooster = new OID(CTRL_AMP_MODULE_TARGET_GAIN_BOOSTER);
pdu.add(new VariableBinding(ctrlAmpModuleTargetGainBooster, new Integer32(DEFAULT_TARGET_GAIN_BOOSTER)));
// Make cross connect
OID ctrlChannelAddDropPortIndex = new OID(CTRL_CHANNEL_ADD_DROP_PORT_INDEX +
(xc.isAddRule() ? "1." : "2.") + channel);
pdu.add(new VariableBinding(ctrlChannelAddDropPortIndex, new UnsignedInteger32(addDrop)));
// Add rules use closed loop, drop rules open loop
// Add rules are set to target power, drop rules are attenuated
if (xc.isAddRule()) {
OID ctrlChannelMode = new OID(CTRL_CHANNEL_MODE + "1." + channel);
pdu.add(new VariableBinding(ctrlChannelMode, new Integer32(CLOSED_LOOP)));
OID ctrlChannelTargetPower = new OID(CTRL_CHANNEL_TARGET_POWER + "1." + channel);
pdu.add(new VariableBinding(ctrlChannelTargetPower, new Integer32(DEFAULT_CHANNEL_TARGET_POWER)));
} else {
OID ctrlChannelMode = new OID(CTRL_CHANNEL_MODE + "2." + channel);
pdu.add(new VariableBinding(ctrlChannelMode, new Integer32(OPEN_LOOP)));
OID ctrlChannelAbsoluteAttenuation = new OID(CTRL_CHANNEL_ABSOLUTE_ATTENUATION + "2." + channel);
pdu.add(new VariableBinding(
ctrlChannelAbsoluteAttenuation, new UnsignedInteger32(DEFAULT_CHANNEL_ABSOLUTE_ATTENUATION)));
}
// Final step is to enable the channel
OID ctrlChannelState = new OID(CTRL_CHANNEL_STATE + (xc.isAddRule() ? "1." : "2.") + channel);
pdu.add(new VariableBinding(ctrlChannelState, new Integer32(IN_SERVICE)));
try {
ResponseEvent response = snmp.set(pdu);
// TODO: parse response
} catch (IOException e) {
log.error("Failed to create cross connect, unable to connect to device: ", e);
}
return true;
}
// Removes cross connect on device
private boolean removeCrossConnect(CrossConnectFlowRule xc) {
int channel = toChannel(xc.ochSignal());
// Create the PDU object
PDU pdu = new PDU();
pdu.setType(PDU.SET);
// Disable the channel
OID ctrlChannelState = new OID(CTRL_CHANNEL_STATE + (xc.isAddRule() ? "1." : "2.") + channel);
pdu.add(new VariableBinding(ctrlChannelState, new Integer32(OUT_OF_SERVICE)));
// Put cross connect back into default port 1
OID ctrlChannelAddDropPortIndex = new OID(CTRL_CHANNEL_ADD_DROP_PORT_INDEX +
(xc.isAddRule() ? "1." : "2.") + channel);
pdu.add(new VariableBinding(ctrlChannelAddDropPortIndex,
new UnsignedInteger32(DISABLE_CHANNEL_ADD_DROP_PORT_INDEX)));
// Put port/channel back to open loop
OID ctrlChannelMode = new OID(CTRL_CHANNEL_MODE + (xc.isAddRule() ? "1." : "2.") + channel);
pdu.add(new VariableBinding(ctrlChannelMode, new Integer32(OPEN_LOOP)));
// Add rules are set to target power, drop rules are attenuated
if (xc.isAddRule()) {
OID ctrlChannelTargetPower = new OID(CTRL_CHANNEL_TARGET_POWER + "1." + channel);
pdu.add(new VariableBinding(ctrlChannelTargetPower, new Integer32(DISABLE_CHANNEL_TARGET_POWER)));
} else {
OID ctrlChannelAbsoluteAttenuation = new OID(CTRL_CHANNEL_ABSOLUTE_ATTENUATION + "2." + channel);
pdu.add(new VariableBinding(
ctrlChannelAbsoluteAttenuation, new UnsignedInteger32(DISABLE_CHANNEL_ABSOLUTE_ATTENUATION)));
}
try {
ResponseEvent response = snmp.set(pdu);
// TODO: parse response
} catch (IOException e) {
log.error("Failed to remove cross connect, unable to connect to device: ", e);
return false;
}
return true;
}
/**
* Convert OCh signal to Lumentum channel ID.
*
* @param ochSignal OCh signal
* @return Lumentum channel ID
*/
public static int toChannel(OchSignal ochSignal) {
// FIXME: move to cross connect validation
checkArgument(ochSignal.channelSpacing() == ChannelSpacing.CHL_50GHZ);
checkArgument(LumentumSnmpDevice.START_CENTER_FREQ.compareTo(ochSignal.centralFrequency()) <= 0);
checkArgument(LumentumSnmpDevice.END_CENTER_FREQ.compareTo(ochSignal.centralFrequency()) >= 0);
return ochSignal.spacingMultiplier() + LumentumSnmpDevice.MULTIPLIER_SHIFT;
}
/**
* Convert Lumentum channel ID to OCh signal.
*
* @param channel Lumentum channel ID
* @return OCh signal
*/
public static OchSignal toOchSignal(int channel) {
checkArgument(1 <= channel);
checkArgument(channel <= 96);
return new OchSignal(GridType.DWDM, ChannelSpacing.CHL_50GHZ,
channel - LumentumSnmpDevice.MULTIPLIER_SHIFT, 4);
}
// Returns the currently configured add/drop port for the given channel.
private PortNumber getAddDropPort(int channel, boolean isAddPort) {
OID oid = new OID(CTRL_CHANNEL_ADD_DROP_PORT_INDEX + (isAddPort ? "1" : "2"));
for (TreeEvent event : snmp.get(oid)) {
if (event == null) {
return null;
}
VariableBinding[] varBindings = event.getVariableBindings();
for (VariableBinding varBinding : varBindings) {
if (varBinding.getOid().last() == channel) {
int port = varBinding.getVariable().toInt();
if (!isAddPort) {
port += DROP_PORT_OFFSET;
}
return PortNumber.portNumber(port);
}
}
}
return null;
}
// Returns the currently installed flow entries on the device.
private List<FlowRule> fetchRules(OID oid, boolean isAdd, PortNumber linePort) {
List<FlowRule> rules = new LinkedList<>();
for (TreeEvent event : snmp.get(oid)) {
if (event == null) {
continue;
}
VariableBinding[] varBindings = event.getVariableBindings();
for (VariableBinding varBinding : varBindings) {
CrossConnectCache cache = this.handler().get(CrossConnectCache.class);
if (varBinding.getVariable().toInt() == IN_SERVICE) {
int channel = varBinding.getOid().removeLast();
PortNumber addDropPort = getAddDropPort(channel, isAdd);
if (addDropPort == null) {
continue;
}
TrafficSelector selector = DefaultTrafficSelector.builder()
.matchInPort(isAdd ? addDropPort : linePort)
.add(Criteria.matchOchSignalType(OchSignalType.FIXED_GRID))
.add(Criteria.matchLambda(toOchSignal(channel)))
.build();
TrafficTreatment treatment = DefaultTrafficTreatment.builder()
.setOutput(isAdd ? linePort : addDropPort)
.build();
// Lookup flow ID and priority
int hash = Objects.hash(data().deviceId(), selector, treatment);
Pair<FlowId, Integer> lookup = cache.get(hash);
if (lookup == null) {
continue;
}
FlowRule fr = DefaultFlowRule.builder()
.forDevice(data().deviceId())
.makePermanent()
.withSelector(selector)
.withTreatment(treatment)
.withPriority(lookup.getRight())
.withCookie(lookup.getLeft().value())
.build();
rules.add(fr);
}
}
}
return rules;
}
}
|
apache-2.0
|
iBotPeaches/Apktool
|
brut.apktool/apktool-lib/src/test/java/brut/androlib/aapt2/BuildAndDecodeTest.java
|
5032
|
/*
* Copyright (C) 2010 Ryszard Wiśniewski <[email protected]>
* Copyright (C) 2010 Connor Tumbleson <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package brut.androlib.aapt2;
import brut.androlib.*;
import brut.androlib.meta.MetaInfo;
import brut.androlib.options.BuildOptions;
import brut.common.BrutException;
import brut.directory.ExtFile;
import brut.util.OS;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import static org.junit.Assert.*;
public class BuildAndDecodeTest extends BaseTest {
@BeforeClass
public static void beforeClass() throws Exception {
TestUtils.cleanFrameworkFile();
sTmpDir = new ExtFile(OS.createTempDirectory());
sTestOrigDir = new ExtFile(sTmpDir, "testapp-orig");
sTestNewDir = new ExtFile(sTmpDir, "testapp-new");
LOGGER.info("Unpacking testapp...");
TestUtils.copyResourceDir(BuildAndDecodeTest.class, "aapt2/testapp/", sTestOrigDir);
BuildOptions buildOptions = new BuildOptions();
buildOptions.useAapt2 = true;
buildOptions.verbose = true;
LOGGER.info("Building testapp.apk...");
File testApk = new File(sTmpDir, "testapp.apk");
new Androlib(buildOptions).build(sTestOrigDir, testApk);
LOGGER.info("Decoding testapp.apk...");
ApkDecoder apkDecoder = new ApkDecoder(testApk);
apkDecoder.setOutDir(sTestNewDir);
apkDecoder.decode();
}
@AfterClass
public static void afterClass() throws BrutException {
OS.rmdir(sTmpDir);
}
@Test
public void buildAndDecodeTest() {
assertTrue(sTestNewDir.isDirectory());
}
@Test
public void valuesStringsTest() throws BrutException {
compareValuesFiles("values/strings.xml");
}
@Test
public void valuesColorsTest() throws BrutException {
compareValuesFiles("values/colors.xml");
}
@Test
public void valuesBoolsTest() throws BrutException {
compareValuesFiles("values/bools.xml");
}
@Test
public void valuesMaxLengthTest() throws BrutException {
compareValuesFiles("values-es/strings.xml");
}
@Test
public void confirmZeroByteFileExtensionIsNotStored() throws BrutException {
MetaInfo metaInfo = new Androlib().readMetaFile(sTestNewDir);
assertFalse(metaInfo.doNotCompress.contains("jpg"));
}
@Test
public void confirmZeroByteFileIsStored() throws BrutException {
MetaInfo metaInfo = new Androlib().readMetaFile(sTestNewDir);
assertTrue(metaInfo.doNotCompress.contains("assets/0byte_file.jpg"));
}
@Test
public void navigationResourceTest() throws BrutException {
compareXmlFiles("res/navigation/nav_graph.xml");
}
@Test
public void xmlIdsEmptyTest() throws BrutException {
compareXmlFiles("res/values/ids.xml");
}
@Test
public void leadingDollarSignResourceNameTest() throws BrutException {
compareXmlFiles("res/drawable/$avd_hide_password__0.xml");
compareXmlFiles("res/drawable/$avd_show_password__0.xml");
compareXmlFiles("res/drawable/$avd_show_password__1.xml");
compareXmlFiles("res/drawable/$avd_show_password__2.xml");
compareXmlFiles("res/drawable/avd_show_password.xml");
}
@Test
public void samsungQmgFilesHandledTest() throws IOException, BrutException {
compareBinaryFolder("drawable-xhdpi", true);
}
@Test
public void confirmManifestStructureTest() throws BrutException {
compareXmlFiles("AndroidManifest.xml");
}
@Test
public void xmlXsdFileTest() throws BrutException {
compareXmlFiles("res/xml/ww_box_styles_schema.xsd");
}
@Test
public void multipleDexTest() throws BrutException, IOException {
compareBinaryFolder("/smali_classes2", false);
compareBinaryFolder("/smali_classes3", false);
File classes2Dex = new File(sTestOrigDir, "build/apk/classes2.dex");
File classes3Dex = new File(sTestOrigDir, "build/apk/classes3.dex");
assertTrue(classes2Dex.isFile());
assertTrue(classes3Dex.isFile());
}
@Test
public void singleDexTest() throws BrutException, IOException {
compareBinaryFolder("/smali", false);
File classesDex = new File(sTestOrigDir, "build/apk/classes.dex");
assertTrue(classesDex.isFile());
}
}
|
apache-2.0
|
dbflute-example/dbflute-example-with-non-rdb
|
etc/tools/rediscluster/redis-cli.sh
|
100
|
#!/bin/sh
. ./_project.sh
docker exec -i -t ${DOKER_NAME_PREFIX}redis-cluster redis-cli -p 7000 -c
|
apache-2.0
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Liliopsida/Asparagales/Orchidaceae/Bulbophyllum/Bulbophyllum orbiculare/README.md
|
189
|
# Bulbophyllum orbiculare J.J.Sm. SPECIES
#### Status
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null
|
apache-2.0
|
everttigchelaar/camel-svn
|
camel-core/src/test/java/org/apache/camel/impl/ShutdownRouteGracefulTimeoutTriggerTest.java
|
2779
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.impl;
import java.util.concurrent.TimeUnit;
import org.apache.camel.ContextTestSupport;
import org.apache.camel.Exchange;
import org.apache.camel.Processor;
import org.apache.camel.builder.RouteBuilder;
/**
* @version
*/
public class ShutdownRouteGracefulTimeoutTriggerTest extends ContextTestSupport {
private static String foo = "";
public void testShutdownRouteGraceful() throws Exception {
getMockEndpoint("mock:foo").expectedMessageCount(1);
template.sendBody("seda:foo", "A");
template.sendBody("seda:foo", "B");
template.sendBody("seda:foo", "C");
template.sendBody("seda:foo", "D");
template.sendBody("seda:foo", "E");
assertMockEndpointsSatisfied();
// now stop the route before its complete
foo = foo + "stop";
// timeout after 2 seconds
context.shutdownRoute("seda", 2, TimeUnit.SECONDS);
// should not be able to complete all messages as timeout occurred
assertNotSame("Should not able able to complete all pending messages", "stopABCDE", foo);
assertEquals("bar should still be running", true, context.getRouteStatus("bar").isStarted());
assertEquals("Seda should be stopped", true, context.getRouteStatus("seda").isStopped());
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("seda:foo").routeId("seda").to("mock:foo").delay(1000).process(new Processor() {
public void process(Exchange exchange) throws Exception {
foo = foo + exchange.getIn().getBody(String.class);
}
});
from("direct:bar").routeId("bar").to("mock:bar");
}
};
}
}
|
apache-2.0
|
ruspl-afed/dbeaver
|
plugins/org.jkiss.dbeaver.ext.sqlite/src/org/jkiss/dbeaver/ext/sqlite/model/SQLiteSQLDialect.java
|
1391
|
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2017 Serge Rider ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.ext.sqlite.model;
import org.jkiss.dbeaver.ext.generic.model.GenericSQLDialect;
import org.jkiss.dbeaver.model.exec.jdbc.JDBCDatabaseMetaData;
import org.jkiss.dbeaver.model.impl.jdbc.JDBCDataSource;
import org.jkiss.dbeaver.model.impl.sql.BasicSQLDialect;
import org.jkiss.dbeaver.model.sql.SQLConstants;
public class SQLiteSQLDialect extends GenericSQLDialect {
public SQLiteSQLDialect() {
super("SQLite");
}
public void initDriverSettings(JDBCDataSource dataSource, JDBCDatabaseMetaData metaData) {
super.initDriverSettings(dataSource, metaData);
}
public String[][] getIdentifierQuoteStrings() {
return BasicSQLDialect.DEFAULT_QUOTE_STRINGS;
}
}
|
apache-2.0
|
mongodb/mongo-cxx-driver
|
benchmark/multi_doc/gridfs_download.hpp
|
2561
|
// Copyright 2017 MongoDB Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma once
#include "../microbench.hpp"
#include <algorithm>
#include <fstream>
#include <bsoncxx/stdx/make_unique.hpp>
#include <bsoncxx/stdx/optional.hpp>
#include <mongocxx/client.hpp>
#include <mongocxx/gridfs/bucket.hpp>
#include <mongocxx/instance.hpp>
#include <mongocxx/uri.hpp>
namespace benchmark {
using bsoncxx::builder::basic::kvp;
using bsoncxx::builder::basic::make_document;
using bsoncxx::stdx::make_unique;
class gridfs_download : public microbench {
public:
// The task size comes from the Driver Perfomance Benchmarking Reference Doc.
gridfs_download(std::string file_name)
: microbench{"TestGridFsDownload",
52.43,
std::set<benchmark_type>{benchmark_type::multi_bench,
benchmark_type::read_bench}},
_conn{mongocxx::uri{}},
_file_name{std::move(file_name)} {}
void setup();
void teardown();
protected:
void task();
private:
mongocxx::client _conn;
mongocxx::gridfs::bucket _bucket;
bsoncxx::stdx::optional<bsoncxx::types::bson_value::view> _id;
std::string _file_name;
};
void gridfs_download::setup() {
mongocxx::database db = _conn["perftest"];
db.drop();
std::ifstream stream{_file_name};
_bucket = db.gridfs_bucket();
auto result = _bucket.upload_from_stream(_file_name, &stream);
_id = result.id();
}
void gridfs_download::teardown() {
_conn["perftest"].drop();
}
void gridfs_download::task() {
auto downloader = _bucket.open_download_stream(_id.value());
auto file_length = downloader.file_length();
auto buffer_size = std::min(file_length, static_cast<std::int64_t>(downloader.chunk_size()));
auto buffer = make_unique<std::uint8_t[]>(static_cast<std::size_t>(buffer_size));
while (auto length_read =
downloader.read(buffer.get(), static_cast<std::size_t>(buffer_size))) {
}
}
} // namespace benchmark
|
apache-2.0
|
sadikovi/pulsar
|
analytics/tests/integrationtest_service.py
|
3628
|
#!/usr/bin/env python
# import libs
import unittest
import sys
import os
import random
import uuid
# import classes
import analytics.utils.misc as misc
import analytics.exceptions.exceptions as ex
import analytics.service as service
from analytics.datamanager.datamanager import DataManager
class IntegrationTestSequence(unittest.TestCase):
def setUp(self):
filepath = os.path.dirname(os.path.realpath(__file__))
self.integrationpath = os.path.join(filepath, "datasets")
self.datamanager = DataManager()
self.datamanager.loadDatasets(self.integrationpath)
self.datasets = self.datamanager._datasets
def test_service_default(self):
query = ""
datasetId = random.choice(self.datasets.keys())
result = service.requestData(datasetId, query, self.datamanager)
self.assertEqual(result["status"], "success")
self.assertEqual(result["code"], 200)
def test_service_wrongquery(self):
query = uuid.uuid4().hex
datasetId = random.choice(self.datasets.keys())
result = service.requestData(datasetId, query, self.datamanager)
self.assertEqual(result["status"], "error")
self.assertEqual(result["code"], 400)
def test_service_simpleQuery(self):
query = """select from ${pulses}
where @1b4cf15c86ec31cd8838feab0f9856b1 |is| static
and @1b4cf15c86ec31cd8838feab0f9856b1 = 2
and @b6db26b3972932b2862dac41cbb1493d = [up]"""
datasetId = random.choice(self.datasets.keys())
result = service.requestData(datasetId, query, self.datamanager)
self.assertEqual(result["status"], "success")
self.assertEqual(result["code"], 200)
def test_service_selectCluster(self):
query = """select from ${clusters}
where @id = [bc27b4dbbc0f34f9ae8e4b72f2d51b60]"""
datasetId = random.choice(self.datasets.keys())
result = service.requestData(datasetId, query, self.datamanager)
self.assertEqual(result["status"], "success")
self.assertEqual(result["code"], 200)
def service_warnings(self, warn=True):
query = """select from ${pulses}
where @f4b9ea9d3bf239f5a1c80578b0556a5e |is| dynamic"""
datasetId = random.choice(self.datasets.keys())
result = service.requestData(
datasetId,
query,
self.datamanager,
iswarnings=warn
)
# result should not fail and should generate warnings
return result
def test_service_warnings_on(self):
# warnings are on by default
result = self.service_warnings()
self.assertEqual(result["status"], "success")
self.assertEqual(result["code"], 200)
self.assertEqual(len(result["messages"]), 1)
def test_service_warnings_off(self):
# warning is expected, but we turn it off
result = self.service_warnings(False)
self.assertEqual(result["status"], "success")
self.assertEqual(result["code"], 200)
self.assertEqual(len(result["messages"]), 0)
# Load test suites
def _suites():
return [
IntegrationTestSequence
]
# Load tests
def loadSuites():
# global test suite for this module
gsuite = unittest.TestSuite()
for suite in _suites():
gsuite.addTest(unittest.TestLoader().loadTestsFromTestCase(suite))
return gsuite
if __name__ == '__main__':
suite = loadSuites()
print ""
print "### Running tests ###"
print "-" * 70
unittest.TextTestRunner(verbosity=2).run(suite)
|
apache-2.0
|
GlenKPeterson/UncleJim
|
src/main/java/org/organicdesign/fp/tuple/Tuple12.java
|
6112
|
// Copyright 2016 PlanBase Inc. & Glen Peterson
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.organicdesign.fp.tuple;
import java.io.Serializable;
import java.util.Objects;
import static org.organicdesign.fp.FunctionUtils.stringify;
// ======================================================================================
// THIS CLASS IS GENERATED BY /tupleGenerator/TupleGenerator.java. DO NOT EDIT MANUALLY!
// ======================================================================================
/**
Holds 12 items of potentially different types. Designed to let you easily create immutable
subclasses (to give your data structures meaningful names) with correct equals(), hashCode(), and
toString() methods.
*/
public class Tuple12<A,B,C,D,E,F,G,H,I,J,K,L> implements Serializable {
// For serializable. Make sure to change whenever internal data format changes.
// Implemented because implementing serializable only on a sub-class of an
// immutable class requires a serialization proxy. That's probably worse than
// the conceptual burdeon of all tuples being Serializable. private static final long serialVersionUID = 20160906065500L;
// Fields are protected so that sub-classes can make accessor methods with meaningful names.
protected final A _1;
protected final B _2;
protected final C _3;
protected final D _4;
protected final E _5;
protected final F _6;
protected final G _7;
protected final H _8;
protected final I _9;
protected final J _10;
protected final K _11;
protected final L _12;
/**
Constructor is protected (not public) for easy inheritance. Josh Bloch's "Item 1" says public
static factory methods are better than constructors because they have names, they can return
an existing object instead of a new one, and they can return a sub-type. Therefore, you
have more flexibility with a static factory as part of your public API then with a public
constructor.
*/
protected Tuple12(A a, B b, C c, D d, E e, F f, G g, H h, I i, J j, K k, L l) {
_1 = a; _2 = b; _3 = c; _4 = d; _5 = e; _6 = f; _7 = g; _8 = h; _9 = i;
_10 = j; _11 = k; _12 = l;
}
/** Public static factory method */
public static <A,B,C,D,E,F,G,H,I,J,K,L> Tuple12<A,B,C,D,E,F,G,H,I,J,K,L>
of(A a, B b, C c, D d, E e, F f, G g, H h, I i, J j, K k, L l) {
return new Tuple12<>(a, b, c, d, e, f, g, h, i, j, k, l);
}
/** Returns the 1st field */
public A _1() { return _1; }
/** Returns the 2nd field */
public B _2() { return _2; }
/** Returns the 3rd field */
public C _3() { return _3; }
/** Returns the 4th field */
public D _4() { return _4; }
/** Returns the 5th field */
public E _5() { return _5; }
/** Returns the 6th field */
public F _6() { return _6; }
/** Returns the 7th field */
public G _7() { return _7; }
/** Returns the 8th field */
public H _8() { return _8; }
/** Returns the 9th field */
public I _9() { return _9; }
/** Returns the 10th field */
public J _10() { return _10; }
/** Returns the 11th field */
public K _11() { return _11; }
/** Returns the 12th field */
public L _12() { return _12; }
@Override
public String toString() {
return getClass().getSimpleName() + "(" +
stringify(_1) + "," + stringify(_2) + "," +
stringify(_3) + "," + stringify(_4) + "," + stringify(_5) + "," +
stringify(_6) + "," + stringify(_7) + "," + stringify(_8) + "," +
stringify(_9) + "," + stringify(_10) + "," + stringify(_11) + "," +
stringify(_12) + ")";
}
@Override
public boolean equals(Object other) {
// Cheapest operation first...
if (this == other) { return true; }
if (!(other instanceof Tuple12)) { return false; }
// Details...
@SuppressWarnings("rawtypes") final Tuple12 that = (Tuple12) other;
return Objects.equals(this._1, that._1()) &&
Objects.equals(this._2, that._2()) &&
Objects.equals(this._3, that._3()) &&
Objects.equals(this._4, that._4()) &&
Objects.equals(this._5, that._5()) &&
Objects.equals(this._6, that._6()) &&
Objects.equals(this._7, that._7()) &&
Objects.equals(this._8, that._8()) &&
Objects.equals(this._9, that._9()) &&
Objects.equals(this._10, that._10()) &&
Objects.equals(this._11, that._11()) &&
Objects.equals(this._12, that._12());
}
@Override
public int hashCode() {
// First 2 fields match Tuple2 which implements java.util.Map.Entry as part of the map
// contract and therefore must match java.util.HashMap.Node.hashCode().
int ret = 0;
if (_1 != null) { ret = _1.hashCode(); }
if (_2 != null) { ret = ret ^ _2.hashCode(); }
if (_3 != null) { ret = ret + _3.hashCode(); }
if (_4 != null) { ret = ret + _4.hashCode(); }
if (_5 != null) { ret = ret + _5.hashCode(); }
if (_6 != null) { ret = ret + _6.hashCode(); }
if (_7 != null) { ret = ret + _7.hashCode(); }
if (_8 != null) { ret = ret + _8.hashCode(); }
if (_9 != null) { ret = ret + _9.hashCode(); }
if (_10 != null) { ret = ret + _10.hashCode(); }
if (_11 != null) { ret = ret + _11.hashCode(); }
if (_12 != null) { ret = ret + _12.hashCode(); }
return ret;
}
}
|
apache-2.0
|
dyzhxsl3897/goliveiptv
|
lobby/src/main/java/com/zhongdan/lobby/bl/ai/chinesechess/engine/SearchEngine.java
|
29025
|
package com.zhongdan.lobby.bl.ai.chinesechess.engine;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.RandomAccessFile;
import java.net.URL;
import java.util.Calendar;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
public class SearchEngine {
private static Log log = LogFactory.getLog(SearchEngine.class);
public static final int MaxBookMove = 40;// 使用开局库的最大步数
public static final int MaxKiller = 4;// 搜索杀着的最大步数
private static final int BookUnique = 1;// 指示结点类型,下同
private static final int BookMulti = 2;
private static final int HashAlpha = 4;
private static final int HashBeta = 8;
private static final int HashPv = 16;
private static final int ObsoleteValue = -CCEvalue.MaxValue - 1;
private static final int UnknownValue = -CCEvalue.MaxValue - 2;
// private static final int BookUniqueValue = CCEvalue.MaxValue + 1;
// private static final int BookMultiValue = CCEvalue.MaxValue + 2;//推荐使用开局库,值要足够大
public static final int CLOCK_S = 1000;// 1秒=1000毫秒
public static final int CLOCK_M = 1000 * 60;// 1分=60秒
private static final Random rand = new Random();
private MoveNode bestMove = null;
// for search control
private int depth;
private long properTimer, limitTimer;
// 搜索过程的全局变量,包括:
// 1. 搜索树和历史表
private ActiveBoard activeBoard;
private int histTab[][];
public void setActiveBoard(ActiveBoard activeBoard) {
this.activeBoard = activeBoard;
}
// 2. 搜索选项
private int selectMask, style;// 下棋风格 default = EngineOption.Normal;
private boolean wideQuiesc, futility, nullMove;
// SelectMask:随机性 , WideQuiesc(保守true if Style == EngineOption.solid)
// Futility(true if Style == EngineOption.risky冒进)
// NullMove 是否空着剪裁
private boolean ponder;
// 3. 时间控制参数
private long startTimer, minTimer, maxTimer;
private int startMove;
private boolean stop;
// 4. 统计信息:Main Search Nodes, Quiescence Search Nodes and Hash Nodes
private int nodes, nullNodes, hashNodes, killerNodes, betaNodes, pvNodes, alphaNodes, mateNodes, leafNodes;
private int quiescNullNodes, quiescBetaNodes, quiescPvNodes, quiescAlphaNodes, quiescMateNodes;
private int hitBeta, hitPv, hitAlpha;
// 5. 搜索结果
private int lastScore, pvLineNum;
private MoveNode pvLine[] = new MoveNode[ActiveBoard.MAX_MOVE_NUM];
// 6. Hash and Book Structure
private int hashMask, maxBookPos, bookPosNum;
private HashRecord[] hashList;
private BookRecord[] bookList;
public SearchEngine(ActiveBoard chessP) {
this();
activeBoard = chessP;
}
public SearchEngine() {
int i;
// Position = new ChessPosition();
histTab = new int[90][90];
;
nodes = nullNodes = hashNodes = killerNodes = betaNodes = pvNodes = alphaNodes = mateNodes = leafNodes = 0;
selectMask = 0;// 1<<10-1;//随机性
style = EngineOption.Normal;
wideQuiesc = style == EngineOption.Solid;
futility = style == EngineOption.Risky;
nullMove = true;
// Search results
lastScore = 0;
pvLineNum = 0;
MoveNode PvLine[] = new MoveNode[ActiveBoard.MAX_MOVE_NUM];
for (i = 0; i < ActiveBoard.MAX_MOVE_NUM; i++) {
PvLine[i] = new MoveNode();
}
newHash(17, 14);
// 设置超时和迭代搜索层数 setup timeout and search depth
// depth = 8;
// properTimer = CLOCK_M * 1;
// limitTimer = CLOCK_M * 20;
depth = 8;
properTimer = CLOCK_S * 2;
limitTimer = CLOCK_S * 2;
}
// Begin History and Hash Table Procedures
public void newHash(int HashScale, int BookScale) {
histTab = new int[90][90];
hashMask = (1 << HashScale) - 1;
maxBookPos = 1 << BookScale;
hashList = new HashRecord[hashMask + 1];
for (int i = 0; i < hashMask + 1; i++) {
hashList[i] = new HashRecord();
}
bookList = new BookRecord[maxBookPos];
// for (int i=0; i< MaxBookPos; i++){
// BookList[i]=new BookRecord();
// }
clearHistTab();
clearHash();
// BookRand = rand.nextLong();//(unsigned long) time(NULL);
}
public void delHash() {
histTab = null;
hashList = null;
bookList = null;
}
public void clearHistTab() {
int i, j;
for (i = 0; i < 90; i++) {
for (j = 0; j < 90; j++) {
histTab[i][j] = 0;
}
}
}
public void clearHash() {
int i;
for (i = 0; i <= hashMask; i++) {
hashList[i].flag = 0;
}
}
private int probeHash(MoveNode HashMove, int Alpha, int Beta, int Depth) {
boolean MateNode;
HashRecord TempHash;
int tmpInt = (int) (activeBoard.getZobristKey() & hashMask);
long tmpLong1 = activeBoard.getZobristLock(), tmpLong2;
TempHash = hashList[(int) (activeBoard.getZobristKey() & hashMask)];
tmpLong2 = TempHash.zobristLock;
if (TempHash.flag != 0 && TempHash.zobristLock == activeBoard.getZobristLock()) {
MateNode = false;
if (TempHash.value > CCEvalue.MaxValue - ActiveBoard.MAX_MOVE_NUM / 2) {
TempHash.value -= activeBoard.getMoveNum() - startMove;
MateNode = true;
} else if (TempHash.value < ActiveBoard.MAX_MOVE_NUM / 2 - CCEvalue.MaxValue) {
TempHash.value += activeBoard.getMoveNum() - startMove;
MateNode = true;
}
if (MateNode || TempHash.depth >= Depth) {
if ((TempHash.flag & HashBeta) != 0) {
if (TempHash.value >= Beta) {
hitBeta++;
return TempHash.value;
}
} else if ((TempHash.flag & HashAlpha) != 0) {
if (TempHash.value <= Alpha) {
hitAlpha++;
return TempHash.value;
}
} else if ((TempHash.flag & HashPv) != 0) {
hitPv++;
return TempHash.value;
} else {
return UnknownValue;
}
}
if (TempHash.bestMove.src == -1) {
return UnknownValue;
} else {
HashMove = TempHash.bestMove;
return ObsoleteValue;
}
}
return UnknownValue;
}
private void recordHash(MoveNode hashMove, int hashFlag, int value, int depth) {
HashRecord tempHash;
tempHash = hashList[(int) (activeBoard.getZobristKey() & hashMask)];
if ((tempHash.flag != 0) && tempHash.depth > depth) {
return;
}
tempHash.zobristLock = activeBoard.getZobristLock();
tempHash.flag = hashFlag;
tempHash.depth = depth;
tempHash.value = value;
if (tempHash.value > CCEvalue.MaxValue - ActiveBoard.MAX_MOVE_NUM / 2) {
tempHash.value += activeBoard.getMoveNum() - startMove;
} else if (tempHash.value < ActiveBoard.MAX_MOVE_NUM / 2 - CCEvalue.MaxValue) {
tempHash.value -= activeBoard.getMoveNum() - startMove;
}
tempHash.bestMove = hashMove;
hashList[(int) (activeBoard.getZobristKey() & hashMask)] = tempHash;
}
private void GetPvLine() {
HashRecord tempHash;
tempHash = hashList[(int) (activeBoard.getZobristKey() & hashMask)];
if ((tempHash.flag != 0) && tempHash.bestMove.src != -1 && tempHash.zobristLock == activeBoard.getZobristLock()) {
pvLine[pvLineNum] = tempHash.bestMove;
activeBoard.movePiece(tempHash.bestMove);
pvLineNum++;
if (activeBoard.isLoop(1) == 0) {// ???????
GetPvLine();
}
activeBoard.undoMove();
}
}
// record example: i0h0 4 rnbakabr1/9/4c1c1n/p1p1N3p/9/6p2/P1P1P3P/2N1C2C1/9/R1BAKAB1R w - - 0 7
// i0h0:Move , 4: evalue, other: FEN String
public void loadBook(final String bookFile) throws IOException {// 开局库
int bookMoveNum, value, i;
BufferedReader inFile;
String lineStr;
// LineStr;
int index = 0;
MoveNode bookMove = new MoveNode();// note:wrong
HashRecord tempHash;
ActiveBoard BookPos = new ActiveBoard();// note:wrong
InputStream is = SearchEngine.class.getResourceAsStream(bookFile);
inFile = new BufferedReader(new InputStreamReader(is), 1024 * 1024);
if (inFile == null)
return;
bookPosNum = 0;
int recordedToHash = 0;// for test
while ((lineStr = inFile.readLine()) != null) {
bookMove = new MoveNode();
bookMove.move(lineStr);
index = 0;
if (bookMove.src != -1) {
index += 5;
while (lineStr.charAt(index) == ' ') {
index++;
}
BookPos.loadFen(lineStr.substring(index));
long tmpZob = BookPos.getZobristKey();
int tmp = BookPos.getSquares(bookMove.src);// for test
if (BookPos.getSquares(bookMove.src) != 0) {
tempHash = hashList[(int) (BookPos.getZobristKey() & hashMask)];
if (tempHash.flag != 0) {// 占用
if (tempHash.zobristLock == BookPos.getZobristLock()) {// 局面相同
if ((tempHash.flag & BookMulti) != 0) {// 多个相同走法
bookMoveNum = bookList[tempHash.value].moveNum;
if (bookMoveNum < MaxBookMove) {
bookList[tempHash.value].moveList[bookMoveNum] = bookMove;
bookList[tempHash.value].moveNum++;
recordedToHash++;// for test
}
} else {
if (bookPosNum < maxBookPos) {
tempHash.flag = BookMulti;
bookList[bookPosNum] = new BookRecord();
bookList[bookPosNum].moveNum = 2;
bookList[bookPosNum].moveList[0] = tempHash.bestMove;
bookList[bookPosNum].moveList[1] = bookMove;
tempHash.value = bookPosNum;
bookPosNum++;
hashList[(int) (BookPos.getZobristKey() & hashMask)] = tempHash;
recordedToHash++;// for test
}
}
}
} else {
tempHash.zobristLock = BookPos.getZobristLock();
tempHash.flag = BookUnique;
tempHash.depth = 0;
tempHash.value = 0;
tempHash.bestMove = bookMove;
hashList[(int) (BookPos.getZobristKey() & hashMask)] = tempHash;
recordedToHash++;
}
}
}
}
inFile.close();
}
// End History and Hash Tables Procedures
// Begin Search Procedures
// Search Procedures
private int RAdapt(int depth) {
// 根据不同情况来调整R值的做法,称为“适应性空着裁剪”(Adaptive Null-Move Pruning),
// 它首先由Ernst Heinz发表在1999年的ICCA杂志上。其内容可以概括为
// a. 深度小于或等于6时,用R = 2的空着裁剪进行搜索
// b. 深度大于8时,用R = 3;
// c. 深度是6或7时,如果每方棋子都大于或等于3个,则用 R = 3,否则用 R = 2。
if (depth <= 6) {
return 2;
} else if (depth <= 8) {
return activeBoard.getEvalue(0) < CCEvalue.EndgameMargin || activeBoard.getEvalue(1) < CCEvalue.EndgameMargin ? 2 : 3;
} else {
return 3;
}
}
private int quiesc(int Alpha, int Beta) {// 只对吃子
int i, bestValue, thisAlpha, thisValue;
boolean inCheck, movable;
MoveNode thisMove;
SortedMoveNodes moveSort = new SortedMoveNodes();
// 1. Return if a Loop position is detected
if (activeBoard.getMoveNum() > startMove) {
thisValue = activeBoard.isLoop(1);// note:wrong
if (thisValue != 0) {
return activeBoard.loopValue(thisValue, activeBoard.getMoveNum() - startMove);
}
}
// 2. Initialize
inCheck = activeBoard.lastMove().chk;
movable = false;
bestValue = -CCEvalue.MaxValue;
thisAlpha = Alpha;
// 3. For non-check position, try Null-Move before generate moves
if (!inCheck) {
movable = true;
thisValue = activeBoard.evaluation() + (selectMask != 0 ? (rand.nextInt() & selectMask) - (rand.nextInt() & selectMask) : 0);
if (thisValue > bestValue) {
if (thisValue >= Beta) {
quiescNullNodes++;
return thisValue;
}
bestValue = thisValue;
if (thisValue > thisAlpha) {
thisAlpha = thisValue;
}
}
}
// 4. Generate and sort all moves for check position, or capture moves for non-check position
moveSort.GenMoves(activeBoard, inCheck ? histTab : null);
for (i = 0; i < moveSort.MoveNum; i++) {
moveSort.BubbleSortMax(i);
thisMove = moveSort.MoveList[i];
if (inCheck || activeBoard.narrowCap(thisMove, wideQuiesc)) {
if (activeBoard.movePiece(thisMove)) {
movable = true;
// 5. Call Quiescence Alpha-Beta Search for every leagal moves
thisValue = -quiesc(-Beta, -thisAlpha);
// for debug
String tmpStr = "";
for (int k = 0; k < activeBoard.getMoveNum(); k++) {
tmpStr = tmpStr + activeBoard.moveList[k] + ",";
}
tmpStr = tmpStr + "Value:" + thisValue + "\n";
activeBoard.undoMove();
// 6. Select the best move for Fail-Soft Alpha-Beta
if (thisValue > bestValue) {
if (thisValue >= Beta) {
quiescBetaNodes++;
return thisValue;
}
bestValue = thisValue;
if (thisValue > thisAlpha) {
thisAlpha = thisValue;
}
}
}
}
}
// 7. Return a loose value if no leagal moves
if (!movable) {
quiescMateNodes++;
return activeBoard.getMoveNum() - startMove - CCEvalue.MaxValue;
}
if (thisAlpha > Alpha) {
quiescPvNodes++;
} else {
quiescAlphaNodes++;
}
return bestValue;
}
// 搜索算法,包括
// 1. Hash Table;
// 2. 超出边界的Alpha-Beta搜索
// 3. 适应性空着裁减
// 4. 选择性扩展
// 5. 使用Hash Table的迭代加深;
// 6. 杀着表
// 7. 将军扩展
// 8. 主要变例搜索
// 9. 历史启发表
private int search(KillerStruct KillerTab, int Alpha, int Beta, int Depth) {
int i, j, thisDepth, futPrune, hashFlag;
boolean inCheck, movable, searched;
int hashValue, bestValue, thisAlpha, thisValue, futValue = 0;
MoveNode thisMove = new MoveNode();
MoveNode bestMove = new MoveNode();
SortedMoveNodes moveSort = new SortedMoveNodes();
KillerStruct subKillerTab = new KillerStruct();
// Alpha-Beta Search:
// 1. 重复循环检测
if (activeBoard.getMoveNum() > startMove) {
thisValue = activeBoard.isLoop(1);//
if (thisValue != 0) {
return activeBoard.loopValue(thisValue, activeBoard.getMoveNum() - startMove);
}
}
// 2. 是否需要扩展
inCheck = activeBoard.lastMove().chk;
thisDepth = Depth;
if (inCheck) {
thisDepth++;
}
// 3. Return if hit the Hash Table
hashValue = probeHash(thisMove, Alpha, Beta, thisDepth);
if (hashValue >= -CCEvalue.MaxValue && hashValue <= CCEvalue.MaxValue) {
return hashValue;
}
// 4. Return if interrupted or timeout
if (interrupt()) {
return 0;
}
;
// 5. 正式开始搜索:
if (thisDepth > 0) {
movable = false;
searched = false;
bestValue = -CCEvalue.MaxValue;
thisAlpha = Alpha;
hashFlag = HashAlpha;
subKillerTab.moveNum = 0;
// 6. 是否需要空着裁减与冒进?
futPrune = 0;
if (futility) {
// 冒进
if (thisDepth == 3 && !inCheck && activeBoard.evaluation() + CCEvalue.RazorMargin <= Alpha
&& activeBoard.getEvalue(activeBoard.getOppPlayer()) > CCEvalue.EndgameMargin) {
thisDepth = 2;
}
if (thisDepth < 3) {
futValue = activeBoard.evaluation() + (thisDepth == 2 ? CCEvalue.ExtFutMargin : CCEvalue.SelFutMargin);
if (!inCheck && futValue <= Alpha) {
futPrune = thisDepth;
bestValue = futValue;
}
}
}
// 7. 空着裁减
if (nullMove && futPrune == 0 && !inCheck && activeBoard.lastMove().src != -1
&& activeBoard.getEvalue(activeBoard.getPlayer()) > CCEvalue.EndgameMargin) {
activeBoard.nullMove();
thisValue = -search(subKillerTab, -Beta, 1 - Beta, thisDepth - 1 - RAdapt(thisDepth));
activeBoard.undoNull();
if (thisValue >= Beta) {
nullNodes++;
return Beta;
}
}
// 8. 搜索命中Hash Table
if (hashValue == ObsoleteValue) {
// System.out.println(ThisMove.Coord());
if (activeBoard.movePiece(thisMove)) {
movable = true;
if (futPrune != 0 && -activeBoard.evaluation() + (futPrune == 2 ? CCEvalue.ExtFutMargin : CCEvalue.SelFutMargin) <= Alpha
&& activeBoard.lastMove().chk) {
activeBoard.undoMove();
} else {
thisValue = -search(subKillerTab, -Beta, -thisAlpha, thisDepth - 1);
searched = true;
activeBoard.undoMove();
if (stop) {
return 0;
}
if (thisValue > bestValue) {
if (thisValue >= Beta) {
histTab[thisMove.src][thisMove.dst] += 1 << (thisDepth - 1);
recordHash(thisMove, HashBeta, Beta, thisDepth);
hashNodes++;
return thisValue;
}
bestValue = thisValue;
bestMove = thisMove;
if (thisValue > thisAlpha) {
thisAlpha = thisValue;
hashFlag = HashPv;
if (activeBoard.getMoveNum() == startMove) {
recordHash(bestMove, hashFlag, thisAlpha, thisDepth);
popInfo(thisAlpha, Depth);
}
}
}
}
}
}
// 9. 命中杀着表
for (i = 0; i < KillerTab.moveNum; i++) {
thisMove = KillerTab.moveList[i];
if (activeBoard.leagalMove(thisMove)) {
if (activeBoard.movePiece(thisMove)) {
movable = true;
if (futPrune != 0 && -activeBoard.evaluation() + (futPrune == 2 ? CCEvalue.ExtFutMargin : CCEvalue.SelFutMargin) <= Alpha
&& activeBoard.lastMove().chk) {
activeBoard.undoMove();
} else {
if (searched) {
thisValue = -search(subKillerTab, -thisAlpha - 1, -thisAlpha, thisDepth - 1);
if (thisValue > thisAlpha && thisValue < Beta) {
thisValue = -search(subKillerTab, -Beta, -thisAlpha, thisDepth - 1);
}
} else {
thisValue = -search(subKillerTab, -Beta, -thisAlpha, thisDepth - 1);
searched = true;
}
activeBoard.undoMove();
if (stop) {
return 0;
}
if (thisValue > bestValue) {
if (thisValue >= Beta) {
killerNodes++;
histTab[thisMove.src][thisMove.dst] += 1 << (thisDepth - 1);
recordHash(thisMove, HashBeta, Beta, thisDepth);
return thisValue;
}
bestValue = thisValue;
bestMove = thisMove;
if (thisValue > thisAlpha) {
thisAlpha = thisValue;
hashFlag = HashPv;
if (activeBoard.getMoveNum() == startMove) {
recordHash(bestMove, hashFlag, thisAlpha, thisDepth);
popInfo(thisAlpha, Depth);
}
}
}
}
}
}
}
// 10. 生成当前所有合法着法并排序
moveSort.GenMoves(activeBoard, histTab);
nodes += moveSort.MoveNum;
for (i = 0; i < moveSort.MoveNum; i++) {
moveSort.BubbleSortMax(i);
thisMove = moveSort.MoveList[i];
if (activeBoard.movePiece(thisMove)) {
movable = true;
// 11. Alpha-Beta Search
if (futPrune != 0 && -activeBoard.evaluation() + (futPrune == 2 ? CCEvalue.ExtFutMargin : CCEvalue.SelFutMargin) <= Alpha
&& activeBoard.lastMove().chk) {
activeBoard.undoMove();
} else {
if (searched) {
thisValue = -search(subKillerTab, -thisAlpha - 1, -thisAlpha, thisDepth - 1);
if (thisValue > thisAlpha && thisValue < Beta) {
thisValue = -search(subKillerTab, -Beta, -thisAlpha, thisDepth - 1);
}
} else {
thisValue = -search(subKillerTab, -Beta, -thisAlpha, thisDepth - 1);
searched = true;
}
activeBoard.undoMove();
if (stop) {
return 0;
}
// 12. 超出边界Alpha-Beta
if (thisValue > bestValue) {
if (thisValue >= Beta) {
betaNodes++;
histTab[thisMove.src][thisMove.dst] += 1 << (thisDepth - 1);
recordHash(thisMove, HashBeta, Beta, thisDepth);
if (KillerTab.moveNum < MaxKiller) {
KillerTab.moveList[KillerTab.moveNum] = thisMove;
KillerTab.moveNum++;
}
return thisValue;
}
bestValue = thisValue;
bestMove = thisMove;
if (thisValue > thisAlpha) {
thisAlpha = thisValue;
hashFlag = HashPv;
if (activeBoard.getMoveNum() == startMove) {
recordHash(bestMove, hashFlag, thisAlpha, thisDepth);
popInfo(thisAlpha, Depth);
}
}
}
}
}
}
// 13.无路可走,输棋!
if (!movable) {
mateNodes++;
return activeBoard.getMoveNum() - startMove - CCEvalue.MaxValue;
}
// 14. Update History Tables and Hash Tables
if (futPrune != 0 && bestValue == futValue) {
bestMove.src = bestMove.dst = -1;
}
if ((hashFlag & HashAlpha) != 0) {
alphaNodes++;
} else {
pvNodes++;
histTab[bestMove.src][bestMove.dst] += 1 << (thisDepth - 1);
if (KillerTab.moveNum < MaxKiller) {
KillerTab.moveList[KillerTab.moveNum] = bestMove;
KillerTab.moveNum++;
}
}
recordHash(bestMove, hashFlag, thisAlpha, thisDepth);
return bestValue;
// 15. 静态搜索
} else {
thisValue = quiesc(Alpha, Beta);
thisMove.src = bestMove.dst = -1;
if (thisValue <= Alpha) {
recordHash(thisMove, HashAlpha, Alpha, 0);
} else if (thisValue >= Beta) {
recordHash(thisMove, HashBeta, Beta, 0);
} else {
recordHash(thisMove, HashPv, thisValue, 0);
}
leafNodes++;
return thisValue;
}
}
// End Search Procedures
// Start Control Procedures
private boolean interrupt() {
if (stop)
return true;
return false;
}
public void stopSearch() {
this.stop = true;
}
private void popInfo(int value, int depth) {
int i, quiescNodes, nps, npsQuiesc;
char[] moveStr;
long tempLong;
if (depth != 0) {
String logString = "PVNode: depth=" + depth + ",score=" + value + ",Move: " + "\n";
pvLineNum = 0;
GetPvLine();
for (i = 0; i < pvLineNum; i++) {
moveStr = pvLine[i].location();
logString += " " + String.copyValueOf(moveStr) + "\n";
}
if (ponder && System.currentTimeMillis() > minTimer && value + CCEvalue.InadequateValue > lastScore) {
stop = true;
}
if (log.isDebugEnabled())
log.debug(logString);
}
}
public void setupControl(int depth, long proper, long limit) {
this.depth = depth;
this.properTimer = proper;
this.limitTimer = limit;
}
public void control() throws LostException {
// int Depth, int ProperTimer, int LimitTimer) throws IOException {
int i, MoveNum, ThisValue;
char[] MoveStr;
stop = false;
bestMove = null;
MoveNode ThisMove = new MoveNode(), UniqueMove = new MoveNode();
HashRecord TempHash;
SortedMoveNodes MoveSort = new SortedMoveNodes();
KillerStruct SubKillerTab = new KillerStruct();
// The Computer Thinking Procedure:
// 1. Search the moveNodes in Book
int tmpInt = (int) (activeBoard.getZobristKey() & hashMask);
TempHash = hashList[(int) (activeBoard.getZobristKey() & hashMask)];
if (TempHash.flag != 0 && TempHash.zobristLock == activeBoard.getZobristLock()) {
if ((TempHash.flag == BookUnique)) {
MoveStr = TempHash.bestMove.location();
bestMove = new MoveNode(String.copyValueOf(MoveStr));
return;
} else if (TempHash.flag == BookMulti) {
ThisValue = 0;
i = Math.abs(rand.nextInt()) % (bookList[TempHash.value].moveNum);
MoveStr = bookList[TempHash.value].moveList[i].location();
bestMove = new MoveNode(String.copyValueOf(MoveStr));
return;
}
}
// 2. Initailize Timer and other Counter
startTimer = System.currentTimeMillis();
minTimer = startTimer + (properTimer >> 1);
maxTimer = properTimer << 1;
if (maxTimer > limitTimer) {
maxTimer = limitTimer;
}
maxTimer += startTimer;
stop = false;
startMove = activeBoard.getMoveNum();
nodes = nullNodes = hashNodes = killerNodes = betaNodes = pvNodes = alphaNodes = mateNodes = leafNodes = 0;
quiescNullNodes = quiescBetaNodes = quiescPvNodes = quiescAlphaNodes = quiescMateNodes = 0;
hitBeta = hitPv = hitAlpha = 0;
pvLineNum = 0;
// 3. 不合法:主动送将
if (activeBoard.checked(activeBoard.getOppPlayer())) {
return;
}
ThisValue = activeBoard.isLoop(3);
if (ThisValue != 0) {
throw new LostException("不可常捉!");
}
if (activeBoard.getMoveNum() > ActiveBoard.MAX_CONSECUTIVE_MOVES) {
throw new LostException("最大步数,和棋!");
}
// 4. 测试所有应将的着法
if (activeBoard.lastMove().chk) {
MoveNum = 0;
MoveSort.GenMoves(activeBoard, histTab);
for (i = 0; i < MoveSort.MoveNum; i++) {
ThisMove = MoveSort.MoveList[i];
if (activeBoard.movePiece(ThisMove)) {
activeBoard.undoMove();
UniqueMove = ThisMove;
MoveNum++;
if (MoveNum > 1) {
break;
}
}
}
if (MoveNum == 0) {
if (log.isDebugEnabled())
log.debug("score " + -CCEvalue.MaxValue + "\n");
}
if (MoveNum == 1) {
MoveStr = UniqueMove.location();
if (log.isDebugEnabled())
log.debug("bestmove " + String.copyValueOf(MoveStr) + "\n");
bestMove = new MoveNode(String.copyValueOf(MoveStr));
return;
}
}
// 5. 迭代加深
if (depth == 0) {
return;
}
for (i = 4; i <= depth; i++) {
if (log.isDebugEnabled())
log.debug("info depth " + i + "\n");
SubKillerTab.moveNum = 0;
ThisValue = search(SubKillerTab, -CCEvalue.MaxValue, CCEvalue.MaxValue, i);
popInfo(ThisValue, depth);
if (stop) {
break;
}
lastScore = ThisValue;
// 6. Stop thinking if timeout or solved
if (!ponder && System.currentTimeMillis() > minTimer) {
break;
}
if (ThisValue > CCEvalue.MaxValue - ActiveBoard.MAX_MOVE_NUM / 2 || ThisValue < ActiveBoard.MAX_MOVE_NUM / 2 - CCEvalue.MaxValue) {
break;
}
}
// 7. 得到最佳着法及其线路
if (pvLineNum != 0) {
MoveStr = pvLine[0].location();
bestMove = new MoveNode(String.copyValueOf(MoveStr));
if (log.isDebugEnabled())
log.debug("bestmove: " + String.copyValueOf(MoveStr) + "\n");
if (pvLineNum > 1) {
MoveStr = pvLine[1].location();
if (log.isDebugEnabled())
log.debug("ponder:" + String.copyValueOf(MoveStr) + "\n");
}
} else {
if (log.isDebugEnabled())
log.info("score:" + ThisValue);
}
}
// End Control Procedures
public MoveNode getBestMove() throws LostException {
control();
MoveNode retVal = bestMove;
return bestMove;
}
// for test
public static void main(String[] args) throws IOException {
long start, end;
RandomAccessFile testResult;
log.info("begin search, please wait......");
start = System.currentTimeMillis();
int steps = 8;
ActiveBoard cp = new ActiveBoard();
String FenStr = "1c1k1abR1/4a4/4b4/6NP1/4P4/2C1n1P2/r5p2/4B4/4A4/2BAK4 w - - 0 20";
cp.loadFen(FenStr);
SearchEngine searchMove = new SearchEngine(cp);
searchMove.loadBook("/data/book.txt");
// System.out.println(cp.AllPieces);
// searchMove.Control(steps,CLOCK_M*2,CLOCK_M*4);
log.info(FenStr);
end = System.currentTimeMillis();
long second = (end - start) / 1000;
if (second == 0)
second = 1;
long minutes = second / 60;
URL url = SearchEngine.class.getResource("/data/test.log");
String uri = url.toString().replaceAll("file:/", "");
testResult = new RandomAccessFile(uri, "rw");
Calendar c = Calendar.getInstance();
String tmpStr = "\n\n********************************************************************\n";
tmpStr = tmpStr + "[Test Time] " + c.getTime() + "\n";
tmpStr = tmpStr + "[Fen String] " + FenStr + "\n";
tmpStr = tmpStr + " Deep =" + steps + ",Used Time:" + minutes + ":" + second % 60 + "\n";
tmpStr = tmpStr + "[Nodes] " + searchMove.nodes + "\n";
tmpStr = tmpStr + "[AlphaNodes] " + searchMove.alphaNodes + "\n";
tmpStr = tmpStr + "[BetaNodes] " + searchMove.betaNodes + "\n";
tmpStr = tmpStr + "[HashNodes] " + searchMove.hashNodes + "\n";
tmpStr = tmpStr + "[KillerNodes] " + searchMove.killerNodes + "\n";
tmpStr = tmpStr + "[LeafNodes] " + searchMove.leafNodes + "\n";
tmpStr = tmpStr + "[NullNodes] " + searchMove.nullNodes + "\n";
tmpStr = tmpStr + "[QuiescAlphaNodes] " + searchMove.quiescAlphaNodes + "\n";
tmpStr = tmpStr + "[QuiescBetaNodesNodes] " + searchMove.quiescBetaNodes + "\n";
tmpStr = tmpStr + "[QuiescMateNodes] " + searchMove.quiescMateNodes + "\n";
tmpStr = tmpStr + "[QuiescNullNodes] " + searchMove.quiescNullNodes + "\n";
tmpStr = tmpStr + "[QuiescPvNodes] " + searchMove.quiescPvNodes + "\n";
tmpStr = tmpStr + "[HitAlpha] " + searchMove.hitAlpha + "\n";
tmpStr = tmpStr + "[HitBeta] " + searchMove.hitBeta + "\n";
tmpStr = tmpStr + "[HitPv] " + searchMove.hitPv + "\n";
tmpStr = tmpStr + "[BetaNode] " + searchMove.betaNodes + "\n";
tmpStr = tmpStr + "[BPS] " + searchMove.nodes / second;
int count = 0;
for (int i = 1; i < searchMove.hashList.length; i++) {
if (searchMove.hashList[i].flag != 0)
count++;
}
tmpStr = tmpStr + "[HashTable] length=" + searchMove.hashList.length + ", occupied=" + count;
testResult.seek(testResult.length());
testResult.writeBytes(tmpStr);
testResult.close();
System.out.println(tmpStr);
searchMove = null;
cp = null;
System.gc();
}
}
class BookRecord {
int moveNum;
MoveNode[] moveList;// [MaxBookMove];
public BookRecord() {
moveList = new MoveNode[SearchEngine.MaxBookMove];
moveNum = 0;
}
};
class KillerStruct {
int moveNum;
MoveNode[] moveList;// [MaxKiller];
public KillerStruct() {
moveList = new MoveNode[SearchEngine.MaxKiller];
for (int i = 0; i < SearchEngine.MaxKiller; i++)
moveList[i] = new MoveNode();
moveNum = 0;
}
};
class HashRecord {
public HashRecord() {
flag = 0;
depth = 0;
value = 0;
zobristLock = 0;
bestMove = new MoveNode();
}
long zobristLock;
int flag, depth;
int value;
MoveNode bestMove;
};
|
apache-2.0
|
aaronwalker/camel
|
components/camel-mail/src/test/java/org/apache/camel/component/mail/MailSplitAttachmentsTest.java
|
3594
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.mail;
import javax.activation.DataHandler;
import javax.activation.FileDataSource;
import org.apache.camel.Endpoint;
import org.apache.camel.Exchange;
import org.apache.camel.Message;
import org.apache.camel.Producer;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.test.junit4.CamelTestSupport;
import org.junit.Test;
import org.jvnet.mock_javamail.Mailbox;
/**
*
*/
public class MailSplitAttachmentsTest extends CamelTestSupport {
@Test
public void testSplitAttachments() throws Exception {
// clear mailbox
Mailbox.clearAll();
// create an exchange with a normal body and attachment to be produced as email
Endpoint endpoint = context.getEndpoint("smtp://[email protected]?password=secret");
// create the exchange with the mail message that is multipart with a file and a Hello World text/plain message.
Exchange exchange = endpoint.createExchange();
Message in = exchange.getIn();
in.setBody("Hello World");
in.addAttachment("logo.jpeg", new DataHandler(new FileDataSource("src/test/data/logo.jpeg")));
in.addAttachment("license.txt", new DataHandler(new FileDataSource("src/main/resources/META-INF/LICENSE.txt")));
Producer producer = endpoint.createProducer();
producer.start();
producer.process(exchange);
Thread.sleep(2000);
MockEndpoint mock = getMockEndpoint("mock:split");
mock.expectedMessageCount(2);
mock.assertIsSatisfied();
Message first = mock.getReceivedExchanges().get(0).getIn();
Message second = mock.getReceivedExchanges().get(1).getIn();
assertEquals(1, first.getAttachments().size());
assertEquals("logo.jpeg", first.getAttachments().keySet().iterator().next());
assertEquals(1, second.getAttachments().size());
assertEquals("license.txt", second.getAttachments().keySet().iterator().next());
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
// START SNIPPET: e1
from("pop3://[email protected]?password=secret&consumer.delay=1000")
.to("log:email")
// use the SplitAttachmentsExpression which will split the message per attachment
.split(new SplitAttachmentsExpression())
// each message going to this mock has a single attachment
.to("mock:split")
.end();
// END SNIPPET: e1
}
};
}
}
|
apache-2.0
|
jeltz/rust-debian-package
|
src/test/run-pass/unify-return-ty.rs
|
731
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Tests that the tail expr in null() has its type
// unified with the type *T, and so the type variable
// in that type gets resolved.
extern mod std;
fn null<T>() -> *T {
unsafe {
cast::reinterpret_cast(&0)
}
}
pub fn main() { null::<int>(); }
|
apache-2.0
|
svn2github/ImageMagick
|
www/api/MagickCore/widget_8h.html
|
3662
|
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=9"/>
<meta name="generator" content="Doxygen 1.8.3.1"/>
<title>MagickCore: widget.h File Reference</title>
<link href="tabs.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="jquery.js"></script>
<script type="text/javascript" src="dynsections.js"></script>
<link href="navtree.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="resize.js"></script>
<script type="text/javascript" src="navtree.js"></script>
<script type="text/javascript">
$(document).ready(initResizable);
$(window).load(resizeHeight);
</script>
<link href="doxygen.css" rel="stylesheet" type="text/css" />
</head>
<body>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div id="titlearea">
<table cellspacing="0" cellpadding="0">
<tbody>
<tr style="height: 56px;">
<td style="padding-left: 0.5em;">
<div id="projectname">MagickCore
 <span id="projectnumber">7.0.0</span>
</div>
</td>
</tr>
</tbody>
</table>
</div>
<!-- end header part -->
<!-- Generated by Doxygen 1.8.3.1 -->
</div><!-- top -->
<div id="side-nav" class="ui-resizable side-nav-resizable">
<div id="nav-tree">
<div id="nav-tree-contents">
<div id="nav-sync" class="sync"></div>
</div>
</div>
<div id="splitbar" style="-moz-user-select:none;"
class="ui-resizable-handle">
</div>
</div>
<script type="text/javascript">
$(document).ready(function(){initNavTree('widget_8h.html','');});
</script>
<div id="doc-content">
<div class="header">
<div class="headertitle">
<div class="title">widget.h File Reference</div> </div>
</div><!--header-->
<div class="contents">
<div class="textblock"><div class="dynheader">
This graph shows which files directly or indirectly include this file:</div>
<div class="dyncontent">
<div class="center"><img src="widget_8h__dep__incl.png" border="0" usemap="#widget_8hdep" alt=""/></div>
<map name="widget_8hdep" id="widget_8hdep">
<area shape="rect" id="node3" href="animate_8c.html" title="animate.c" alt="" coords="5,80,91,107"/><area shape="rect" id="node5" href="display_8c.html" title="display.c" alt="" coords="115,80,191,107"/><area shape="rect" id="node7" href="widget_8c.html" title="widget.c" alt="" coords="215,80,291,107"/><area shape="rect" id="node9" href="xwindow_8c.html" title="xwindow.c" alt="" coords="316,80,407,107"/></map>
</div>
</div>
<p><a href="widget_8h_source.html">Go to the source code of this file.</a></p>
</div><!-- contents -->
</div><!-- doc-content -->
<!-- start footer part -->
<div id="nav-path" class="navpath"><!-- id is needed for treeview function! -->
<ul>
<li class="navelem"><a class="el" href="dir_43e0a1f539e00dcfa1a6bc4d4fee4fc2.html">home</a></li><li class="navelem"><a class="el" href="dir_68918867e77b0a45f29c43673f2273f8.html">cristy</a></li><li class="navelem"><a class="el" href="dir_ed73005e52fb9d11ccda84631fe63a06.html">ImageMagick</a></li><li class="navelem"><a class="el" href="dir_95a66e94b416f0a122e65928fcf2737e.html">ImageMagick</a></li><li class="navelem"><a class="el" href="dir_7e75216414136f366c55fef7840927ba.html">MagickCore</a></li><li class="navelem"><a class="el" href="widget_8h.html">widget.h</a></li>
<li class="footer">Generated by
<a href="http://www.doxygen.org/index.html">
<img class="footer" src="doxygen.png" alt="doxygen"/></a> 1.8.3.1 </li>
</ul>
</div>
</body>
</html>
|
apache-2.0
|
maheshgaya/lips-with-maps
|
android/src/main/java/edu/drake/research/android/lipswithmaps/adapter/WifiAdapter.java
|
2440
|
/*
* Copyright 2017 Mahesh Gaya
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.drake.research.android.lipswithmaps.adapter;
import android.content.Context;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import java.util.List;
import butterknife.BindView;
import butterknife.ButterKnife;
import edu.drake.research.android.lipswithmaps.R;
import edu.drake.research.lipswithmaps.WifiItem;
/**
* Created by Mahesh Gaya on 1/15/17.
*/
public class WifiAdapter extends RecyclerView.Adapter<WifiAdapter.ViewHolder> {
private List<WifiItem> mWifiItemList;
public WifiAdapter(List<WifiItem> wifiItemList){
this.mWifiItemList = wifiItemList;
}
@Override
public ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View rootView = LayoutInflater.from(parent.getContext())
.inflate(R.layout.list_wifi_item, parent, false);
return new ViewHolder(rootView);
}
@Override
public void onBindViewHolder(ViewHolder holder, int position) {
WifiItem wifiItem = mWifiItemList.get(position);
holder. levelTextView.setText(String.valueOf(wifiItem.getLevel()));
holder.ssidTextView.setText(wifiItem.getSsid());
holder.bssidTextView.setText(wifiItem.getBssid());
}
@Override
public int getItemCount() {
return mWifiItemList.size();
}
public class ViewHolder extends RecyclerView.ViewHolder{
@BindView(R.id.textview_wifi_level)TextView levelTextView;
@BindView(R.id.textview_wifi_ssid)TextView ssidTextView;
@BindView(R.id.textview_wifi_bssid)TextView bssidTextView;
public ViewHolder(View itemView) {
super(itemView);
ButterKnife.bind(this, itemView);
}
}
}
|
apache-2.0
|
dgmpk/vue-music-app
|
src/assets/js/request.js
|
659
|
import Vue from 'vue';
import axios from 'axios';
import VueAxios from 'vue-axios';
Vue.use(VueAxios, axios);
let ajax = (options) => {
let p = new Promise(function(resolve, reject) {
Vue.axios(options).catch(err => {
if(err.code === 401) {
//未登录
login().catch(err => reject(err))
.then(() => ajax(options))
.catch(err => reject(err))
.then(data => resolve(data));
}
}).then(data => resolve(data)).finally(() => {
});
})
return p;
};
ajax.decorator = function(promiseFn, {locked, animated}) {
}
|
apache-2.0
|
dropbox/notouch
|
tests/api_tests/util.py
|
1077
|
import requests
class Client(object):
def __init__(self, tornado_server):
self.tornado_server = tornado_server
@property
def base_url(self):
return "http://localhost:{}/api/v1".format(self.tornado_server.port)
def request(self, method, url, **kwargs):
headers = {}
if method.lower() in ("put", "post"):
headers["Content-type"] = "application/json"
return requests.request(
method, self.base_url + url,
headers=headers, **kwargs
)
def get(self, url, **kwargs):
return self.request("GET", url, **kwargs)
def post(self, url, **kwargs):
return self.request("POST", url, **kwargs)
def put(self, url, **kwargs):
return self.request("PUT", url, **kwargs)
def delete(self, url, **kwargs):
return self.request("DELETE", url, **kwargs)
def create(self, url, **kwargs):
return self.post(url, data=json.dumps(kwargs))
def update(self, url, **kwargs):
return self.put(url, data=json.dumps(kwargs))
|
apache-2.0
|
phatboyg/Machete
|
src/Machete.HL7Schema/Generated/V26/Messages/ORL_O34.cs
|
1084
|
// This file was automatically generated and may be regenerated at any
// time. To ensure any changes are retained, modify the tool with any segment/component/group/field name
// or type changes.
namespace Machete.HL7Schema.V26
{
using HL7;
/// <summary>
/// ORL_O34 (Message) -
/// </summary>
public interface ORL_O34 :
HL7V26Layout
{
/// <summary>
/// MSH
/// </summary>
Segment<MSH> MSH { get; }
/// <summary>
/// MSA
/// </summary>
Segment<MSA> MSA { get; }
/// <summary>
/// ERR
/// </summary>
SegmentList<ERR> ERR { get; }
/// <summary>
/// SFT
/// </summary>
SegmentList<SFT> SFT { get; }
/// <summary>
/// UAC
/// </summary>
Segment<UAC> UAC { get; }
/// <summary>
/// NTE
/// </summary>
SegmentList<NTE> NTE { get; }
/// <summary>
/// RESPONSE
/// </summary>
Layout<ORL_O34_RESPONSE> Response { get; }
}
}
|
apache-2.0
|
braulio94/Quadro
|
app/src/main/java/com/braulio/cassule/designfocus/fragment/PostListFragment.java
|
6726
|
package com.braulio.cassule.designfocus.fragment;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.braulio.cassule.designfocus.ui.PostViewHolder;
import com.braulio.cassule.designfocus.R;
import com.braulio.cassule.designfocus.activity.PostDetailActivity;
import com.braulio.cassule.designfocus.model.Post;
import com.firebase.ui.database.FirebaseRecyclerAdapter;
import com.google.firebase.auth.FirebaseAuth;
import com.google.firebase.database.DataSnapshot;
import com.google.firebase.database.DatabaseError;
import com.google.firebase.database.DatabaseReference;
import com.google.firebase.database.FirebaseDatabase;
import com.google.firebase.database.MutableData;
import com.google.firebase.database.Query;
import com.google.firebase.database.Transaction;
import com.squareup.picasso.Picasso;
public abstract class PostListFragment extends Fragment {
private static final String TAG = "PostListFragment";
// [START define_database_reference]
private DatabaseReference mDatabase;
// [END define_database_reference]
private FirebaseRecyclerAdapter<Post, PostViewHolder> mAdapter;
private RecyclerView mRecycler;
private LinearLayoutManager mManager;
public PostListFragment() {}
@Override
public View onCreateView (LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
super.onCreateView(inflater, container, savedInstanceState);
View rootView = inflater.inflate(R.layout.fragment_all_posts, container, false);
// [START create_database_reference]
mDatabase = FirebaseDatabase.getInstance().getReference();
// [END create_database_reference]
mRecycler = (RecyclerView) rootView.findViewById(R.id.messages_list);
mRecycler.setHasFixedSize(true);
return rootView;
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
// Set up Layout Manager, reverse layout
mManager = new LinearLayoutManager(getActivity());
mManager.setReverseLayout(true);
mManager.setStackFromEnd(true);
mRecycler.setLayoutManager(mManager);
// Set up FirebaseRecyclerAdapter with the Query
Query postsQuery = getQuery(mDatabase);
mAdapter = new FirebaseRecyclerAdapter<Post, PostViewHolder>(Post.class, R.layout.item_post,
PostViewHolder.class, postsQuery) {
@Override
protected void populateViewHolder(final PostViewHolder viewHolder, final Post model, final int position) {
final DatabaseReference postRef = getRef(position);
// Set click listener for the whole post view
final String postKey = postRef.getKey();
viewHolder.itemView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
// Launch PostDetailActivity
Intent intent = new Intent(getActivity(), PostDetailActivity.class);
intent.putExtra(PostDetailActivity.EXTRA_POST_KEY, postKey);
startActivity(intent);
}
});
if (model.image == null){
return;
} else {
Picasso.with(getContext()).load(Uri.parse(model.image)).fit().centerCrop().into(viewHolder.imageView);
}
// Determine if the current user has liked this post and set UI accordingly
if (model.stars.containsKey(getUid())) {
viewHolder.starView.setImageResource(R.drawable.ic_toggle_star_fill_24);
} else {
viewHolder.starView.setImageResource(R.drawable.ic_toggle_star_outline_24);
}
// Bind Post to ViewHolder, setting OnClickListener for the star button
viewHolder.bindToPost(model, new View.OnClickListener() {
@Override
public void onClick(View starView) {
// Need to write to both places the post is stored
DatabaseReference globalPostRef = mDatabase.child("posts").child(postRef.getKey());
DatabaseReference userPostRef = mDatabase.child("user-posts").child(model.uid).child(postRef.getKey());
// Run two transactions
onStarClicked(globalPostRef);
onStarClicked(userPostRef);
}
});
}
};
mRecycler.setAdapter(mAdapter);
}
// [START post_stars_transaction]
private void onStarClicked(DatabaseReference postRef) {
postRef.runTransaction(new Transaction.Handler() {
@Override
public Transaction.Result doTransaction(MutableData mutableData) {
Post p = mutableData.getValue(Post.class);
if (p == null) {
return Transaction.success(mutableData);
}
if (p.stars.containsKey(getUid())) {
// Unstar the post and remove self from stars
p.starCount = p.starCount - 1;
p.stars.remove(getUid());
} else {
// Star the post and add self to stars
p.starCount = p.starCount + 1;
p.stars.put(getUid(), true);
}
// Set value and report transaction success
mutableData.setValue(p);
return Transaction.success(mutableData);
}
@Override
public void onComplete(DatabaseError databaseError, boolean b,
DataSnapshot dataSnapshot) {
// Transaction completed
Log.d(TAG, "postTransaction:onComplete:" + databaseError);
}
});
}
// [END post_stars_transaction]
@Override
public void onDestroy() {
super.onDestroy();
if (mAdapter != null) {
mAdapter.cleanup();
}
}
public String getUid() {
return FirebaseAuth.getInstance().getCurrentUser().getUid();
}
public abstract Query getQuery(DatabaseReference databaseReference);
}
|
apache-2.0
|
thefactory/AndroidAsync-Sharp
|
Additions/IAsyncSocketWrapper.cs
|
141
|
using System;
namespace Com.Koushikdutta.Async.Wrapper {
partial interface IAsyncSocketWrapper {
new void Close();
}
}
|
apache-2.0
|
fpbfabio/river-raid-remake
|
js/game/player.js
|
1075
|
/*global Phaser, Assets, Screen*/
var Player = function (game) {
"use strict";
this.game = game;
this.sprite = null;
};
Player.DISTANCE_TO_BORDER = 50;
Player.VELOCITY_X = 300;
Player.SPRITE_ANCHOR_X = 0.5;
Player.SPRITE_ANCHOR_Y = 0.5;
Player.prototype = {
create: function () {
"use strict";
var y = Screen.HEIGHT - Player.DISTANCE_TO_BORDER;
this.sprite = this.game.add.sprite(this.game.world.centerX, y,
Assets.PLAYER_SPRITE_KEY);
this.sprite.anchor.set(Player.SPRITE_ANCHOR_X, Player.SPRITE_ANCHOR_Y);
this.game.physics.enable(this.sprite, Phaser.Physics.ARCADE);
},
update: function () {
"use strict";
if (this.game.input.keyboard.isDown(Phaser.Keyboard.LEFT)) {
this.sprite.body.velocity.x = -Player.VELOCITY_X;
} else if (this.game.input.keyboard.isDown(Phaser.Keyboard.RIGHT)) {
this.sprite.body.velocity.x = Player.VELOCITY_X;
} else {
this.sprite.body.velocity.x = 0;
}
}
};
|
apache-2.0
|
gridcf/gct
|
gsi_openssh/source/ssh-keysign.c
|
8056
|
/* $OpenBSD: ssh-keysign.c,v 1.66 2020/12/17 23:10:27 djm Exp $ */
/*
* Copyright (c) 2002 Markus Friedl. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "includes.h"
#include <fcntl.h>
#ifdef HAVE_PATHS_H
#include <paths.h>
#endif
#include <pwd.h>
#include <stdarg.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
#include <errno.h>
#ifdef WITH_OPENSSL
#include <openssl/evp.h>
#include <openssl/rand.h>
#include <openssl/rsa.h>
#include "openbsd-compat/openssl-compat.h"
#endif
#include "xmalloc.h"
#include "log.h"
#include "sshkey.h"
#include "ssh.h"
#include "ssh2.h"
#include "misc.h"
#include "sshbuf.h"
#include "authfile.h"
#include "msg.h"
#include "canohost.h"
#include "pathnames.h"
#include "readconf.h"
#include "uidswap.h"
#include "ssherr.h"
extern char *__progname;
static int
valid_request(struct passwd *pw, char *host, struct sshkey **ret,
u_char *data, size_t datalen)
{
struct sshbuf *b;
struct sshkey *key = NULL;
u_char type, *pkblob;
char *p;
size_t blen, len;
char *pkalg, *luser;
int r, pktype, fail;
if (ret != NULL)
*ret = NULL;
fail = 0;
if ((b = sshbuf_from(data, datalen)) == NULL)
fatal_f("sshbuf_from failed");
/* session id, currently limited to SHA1 (20 bytes) or SHA256 (32) */
if ((r = sshbuf_get_string(b, NULL, &len)) != 0)
fatal_fr(r, "parse session ID");
if (len != 20 && len != 32)
fail++;
if ((r = sshbuf_get_u8(b, &type)) != 0)
fatal_fr(r, "parse type");
if (type != SSH2_MSG_USERAUTH_REQUEST)
fail++;
/* server user */
if ((r = sshbuf_skip_string(b)) != 0)
fatal_fr(r, "parse user");
/* service */
if ((r = sshbuf_get_cstring(b, &p, NULL)) != 0)
fatal_fr(r, "parse service");
if (strcmp("ssh-connection", p) != 0)
fail++;
free(p);
/* method */
if ((r = sshbuf_get_cstring(b, &p, NULL)) != 0)
fatal_fr(r, "parse method");
if (strcmp("hostbased", p) != 0)
fail++;
free(p);
/* pubkey */
if ((r = sshbuf_get_cstring(b, &pkalg, NULL)) != 0 ||
(r = sshbuf_get_string(b, &pkblob, &blen)) != 0)
fatal_fr(r, "parse pk");
pktype = sshkey_type_from_name(pkalg);
if (pktype == KEY_UNSPEC)
fail++;
else if ((r = sshkey_from_blob(pkblob, blen, &key)) != 0) {
error_fr(r, "decode key");
fail++;
} else if (key->type != pktype)
fail++;
free(pkalg);
free(pkblob);
/* client host name, handle trailing dot */
if ((r = sshbuf_get_cstring(b, &p, &len)) != 0)
fatal_fr(r, "parse hostname");
debug2_f("check expect chost %s got %s", host, p);
if (strlen(host) != len - 1)
fail++;
else if (p[len - 1] != '.')
fail++;
else if (strncasecmp(host, p, len - 1) != 0)
fail++;
free(p);
/* local user */
if ((r = sshbuf_get_cstring(b, &luser, NULL)) != 0)
fatal_fr(r, "parse luser");
if (strcmp(pw->pw_name, luser) != 0)
fail++;
free(luser);
/* end of message */
if (sshbuf_len(b) != 0)
fail++;
sshbuf_free(b);
debug3_f("fail %d", fail);
if (fail)
sshkey_free(key);
else if (ret != NULL)
*ret = key;
return (fail ? -1 : 0);
}
int
main(int argc, char **argv)
{
struct sshbuf *b;
Options options;
#define NUM_KEYTYPES 5
struct sshkey *keys[NUM_KEYTYPES], *key = NULL;
struct passwd *pw;
int r, key_fd[NUM_KEYTYPES], i, found, version = 2, fd;
u_char *signature, *data, rver;
char *host, *fp;
size_t slen, dlen;
if (pledge("stdio rpath getpw dns id", NULL) != 0)
fatal("%s: pledge: %s", __progname, strerror(errno));
/* Ensure that stdin and stdout are connected */
if ((fd = open(_PATH_DEVNULL, O_RDWR)) < 2)
exit(1);
/* Leave /dev/null fd iff it is attached to stderr */
if (fd > 2)
close(fd);
i = 0;
/* XXX This really needs to read sshd_config for the paths */
key_fd[i++] = open(_PATH_HOST_DSA_KEY_FILE, O_RDONLY);
key_fd[i++] = open(_PATH_HOST_ECDSA_KEY_FILE, O_RDONLY);
key_fd[i++] = open(_PATH_HOST_ED25519_KEY_FILE, O_RDONLY);
key_fd[i++] = open(_PATH_HOST_XMSS_KEY_FILE, O_RDONLY);
key_fd[i++] = open(_PATH_HOST_RSA_KEY_FILE, O_RDONLY);
if ((pw = getpwuid(getuid())) == NULL)
fatal("getpwuid failed");
pw = pwcopy(pw);
permanently_set_uid(pw);
seed_rng();
#ifdef DEBUG_SSH_KEYSIGN
log_init("ssh-keysign", SYSLOG_LEVEL_DEBUG3, SYSLOG_FACILITY_AUTH, 0);
#endif
/* verify that ssh-keysign is enabled by the admin */
initialize_options(&options);
(void)read_config_file(_PATH_HOST_CONFIG_FILE, pw, "", "",
&options, 0, NULL);
(void)fill_default_options(&options);
if (options.enable_ssh_keysign != 1)
fatal("ssh-keysign not enabled in %s",
_PATH_HOST_CONFIG_FILE);
for (i = found = 0; i < NUM_KEYTYPES; i++) {
if (key_fd[i] != -1)
found = 1;
}
if (found == 0)
fatal("could not open any host key");
found = 0;
for (i = 0; i < NUM_KEYTYPES; i++) {
keys[i] = NULL;
if (key_fd[i] == -1)
continue;
r = sshkey_load_private_type_fd(key_fd[i], KEY_UNSPEC,
NULL, &key, NULL);
close(key_fd[i]);
if (r != 0)
debug_r(r, "parse key %d", i);
else if (key != NULL) {
keys[i] = key;
found = 1;
}
}
if (!found)
fatal("no hostkey found");
if (pledge("stdio dns", NULL) != 0)
fatal("%s: pledge: %s", __progname, strerror(errno));
if ((b = sshbuf_new()) == NULL)
fatal("%s: sshbuf_new failed", __progname);
if (ssh_msg_recv(STDIN_FILENO, b) < 0)
fatal("%s: ssh_msg_recv failed", __progname);
if ((r = sshbuf_get_u8(b, &rver)) != 0)
fatal_r(r, "%s: buffer error", __progname);
if (rver != version)
fatal("%s: bad version: received %d, expected %d",
__progname, rver, version);
if ((r = sshbuf_get_u32(b, (u_int *)&fd)) != 0)
fatal_r(r, "%s: buffer error", __progname);
if (fd < 0 || fd == STDIN_FILENO || fd == STDOUT_FILENO)
fatal("%s: bad fd = %d", __progname, fd);
if ((host = get_local_name(fd)) == NULL)
fatal("%s: cannot get local name for fd", __progname);
if ((r = sshbuf_get_string(b, &data, &dlen)) != 0)
fatal_r(r, "%s: buffer error", __progname);
if (valid_request(pw, host, &key, data, dlen) < 0)
fatal("%s: not a valid request", __progname);
free(host);
found = 0;
for (i = 0; i < NUM_KEYTYPES; i++) {
if (keys[i] != NULL &&
sshkey_equal_public(key, keys[i])) {
found = 1;
break;
}
}
if (!found) {
if ((fp = sshkey_fingerprint(key, options.fingerprint_hash,
SSH_FP_DEFAULT)) == NULL)
fatal("%s: sshkey_fingerprint failed", __progname);
fatal("%s: no matching hostkey found for key %s %s", __progname,
sshkey_type(key), fp ? fp : "");
}
if ((r = sshkey_sign(keys[i], &signature, &slen, data, dlen,
NULL, NULL, NULL, 0)) != 0)
fatal_r(r, "%s: sshkey_sign failed", __progname);
free(data);
/* send reply */
sshbuf_reset(b);
if ((r = sshbuf_put_string(b, signature, slen)) != 0)
fatal_r(r, "%s: buffer error", __progname);
if (ssh_msg_send(STDOUT_FILENO, version, b) == -1)
fatal("%s: ssh_msg_send failed", __progname);
return (0);
}
|
apache-2.0
|
jasonwee/videoOnCloud
|
pmd/pmd-doc-5.5.1/pmd-cpp/testapidocs/net/sourceforge/pmd/cpd/package-tree.html
|
5050
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_92) on Wed Jul 27 21:20:55 CEST 2016 -->
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>net.sourceforge.pmd.cpd Class Hierarchy (PMD C++ 5.5.1 Test API)</title>
<meta name="date" content="2016-07-27">
<link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="net.sourceforge.pmd.cpd Class Hierarchy (PMD C++ 5.5.1 Test API)";
}
}
catch(err) {
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li>Class</li>
<li>Use</li>
<li class="navBarCell1Rev">Tree</li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-all.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../net/sourceforge/pmd/package-tree.html">Prev</a></li>
<li><a href="../../../../net/sourceforge/pmd/lang/cpp/package-tree.html">Next</a></li>
</ul>
<ul class="navList">
<li><a href="../../../../index.html?net/sourceforge/pmd/cpd/package-tree.html" target="_top">Frames</a></li>
<li><a href="package-tree.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h1 class="title">Hierarchy For Package net.sourceforge.pmd.cpd</h1>
<span class="packageHierarchyLabel">Package Hierarchies:</span>
<ul class="horizontal">
<li><a href="../../../../overview-tree.html">All Packages</a></li>
</ul>
</div>
<div class="contentContainer">
<h2 title="Class Hierarchy">Class Hierarchy</h2>
<ul>
<li type="circle">java.lang.<a href="http://docs.oracle.com/javase/8/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang"><span class="typeNameLink">Object</span></a>
<ul>
<li type="circle">net.sourceforge.pmd.cpd.<a href="../../../../net/sourceforge/pmd/cpd/CPPTokenizerTest.html" title="class in net.sourceforge.pmd.cpd"><span class="typeNameLink">CPPTokenizerTest</span></a></li>
</ul>
</li>
</ul>
</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li>Class</li>
<li>Use</li>
<li class="navBarCell1Rev">Tree</li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-all.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../net/sourceforge/pmd/package-tree.html">Prev</a></li>
<li><a href="../../../../net/sourceforge/pmd/lang/cpp/package-tree.html">Next</a></li>
</ul>
<ul class="navList">
<li><a href="../../../../index.html?net/sourceforge/pmd/cpd/package-tree.html" target="_top">Frames</a></li>
<li><a href="package-tree.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>Copyright © 2002–2016 <a href="http://pmd.sourceforge.net/">InfoEther</a>. All rights reserved.</small></p>
</body>
</html>
|
apache-2.0
|
dkhwangbo/druid
|
processing/src/main/java/org/apache/druid/query/dimension/RegexFilteredDimensionSpec.java
|
4458
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.query.dimension;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import it.unimi.dsi.fastutil.ints.Int2IntMap;
import it.unimi.dsi.fastutil.ints.Int2IntOpenHashMap;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.query.filter.DimFilterUtils;
import org.apache.druid.segment.DimensionSelector;
import javax.annotation.Nullable;
import java.nio.ByteBuffer;
import java.util.regex.Pattern;
/**
*/
public class RegexFilteredDimensionSpec extends BaseFilteredDimensionSpec
{
private static final byte CACHE_TYPE_ID = 0x2;
private final String pattern;
private final Pattern compiledRegex;
public RegexFilteredDimensionSpec(
@JsonProperty("delegate") DimensionSpec delegate,
@JsonProperty("pattern") String pattern //rows not matching the pattern will be discarded
)
{
super(delegate);
this.pattern = Preconditions.checkNotNull(pattern, "pattern must not be null");
this.compiledRegex = Pattern.compile(pattern);
}
@JsonProperty
public String getPattern()
{
return pattern;
}
@Override
public DimensionSelector decorate(final DimensionSelector selector)
{
if (selector == null) {
return null;
}
final int selectorCardinality = selector.getValueCardinality();
if (selectorCardinality < 0 || !selector.nameLookupPossibleInAdvance()) {
return new PredicateFilteredDimensionSelector(
selector,
new Predicate<String>()
{
@Override
public boolean apply(@Nullable String input)
{
return compiledRegex.matcher(NullHandling.nullToEmptyIfNeeded(input)).matches();
}
}
);
}
int count = 0;
final Int2IntOpenHashMap forwardMapping = new Int2IntOpenHashMap();
forwardMapping.defaultReturnValue(-1);
for (int i = 0; i < selectorCardinality; i++) {
String val = NullHandling.nullToEmptyIfNeeded(selector.lookupName(i));
if (val != null && compiledRegex.matcher(val).matches()) {
forwardMapping.put(i, count++);
}
}
final int[] reverseMapping = new int[forwardMapping.size()];
for (Int2IntMap.Entry e : forwardMapping.int2IntEntrySet()) {
reverseMapping[e.getIntValue()] = e.getIntKey();
}
return new ForwardingFilteredDimensionSelector(selector, forwardMapping, reverseMapping);
}
@Override
public byte[] getCacheKey()
{
byte[] delegateCacheKey = delegate.getCacheKey();
byte[] regexBytes = StringUtils.toUtf8(pattern);
return ByteBuffer.allocate(2 + delegateCacheKey.length + regexBytes.length)
.put(CACHE_TYPE_ID)
.put(delegateCacheKey)
.put(DimFilterUtils.STRING_SEPARATOR)
.put(regexBytes)
.array();
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
RegexFilteredDimensionSpec that = (RegexFilteredDimensionSpec) o;
if (!delegate.equals(that.delegate)) {
return false;
}
return pattern.equals(that.pattern);
}
@Override
public int hashCode()
{
int result = delegate.hashCode();
result = 31 * result + pattern.hashCode();
return result;
}
@Override
public String toString()
{
return "RegexFilteredDimensionSpec{" +
"pattern='" + pattern + '\'' +
'}';
}
}
|
apache-2.0
|
goncha/django-accounts
|
urls.py
|
449
|
from django.conf.urls import patterns, url
urlpatterns = patterns('accounts.views',
url(r'^$', 'home_view', name='home'),
url(r'^login/$', 'login_view', name='login'),
url(r'^logout/$', 'logout_view', name='logout'),
url(r'^register/$', 'register_view', name='register'),
url(r'^password/$', 'password_view', name='password'),
url(r'^profile/$', 'profile_view', name='profile'),
url(r'^hello/$', 'hello_view', name='hello'),
)
|
apache-2.0
|
symsoft/ecc-api-guide
|
change_a2p_voice_app_key.md
|
1219
|
### Change A2P Voice Application Key
Change the A2P Voice Application Key assigned to the Subscription by issue a PATCH request on the _/ecc/v1/subscriptions/{type}:{id}_ path. The body of the request shall include the desired application key. Note that this feature is only applicable for subscriptions that have been enabled for A2P Voice support as part of the onboarding process.
This property can also be supplied when creating a subscription.
**Example Command:**
```
curl --request PATCH \
--data '{"a2p-voice-app-key" : "appkey"}' \
--header "Content-type: application/json" \
--header "Accept: application/json" \
https://user:[email protected]/ecc/v1/subscriptions/msisdn:46708421488
```
**Example Request:**
```
PATCH /ecc/v1/subscriptions/msisdn:46708421488 HTTP/1.1
Host: api.ecc.symsoft.com
Authorization: Basic c3VwZXI6c3VwZXI=
User-Agent: curl/7.43.0
Accept: application/json
Content-Type: application/json
Content-Length: 43
{
"a2p-voice-app-key" : "appkey"
}
```
**Example Response:**
```
HTTP/1.1 202 Accepted
Server: Nobill/5.3.0
Content-Type: application/json;charset=UTF-8
Date: Thu, 10 Mar 2016 15:39:18 GMT
Content-Length: 26
{
"orders": [
20144
]
}
```
|
apache-2.0
|
wesm/arrow
|
cpp/src/arrow/compute/kernels/test_util.h
|
6777
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
#pragma once
// IWYU pragma: begin_exports
#include <gmock/gmock.h>
#include <memory>
#include <string>
#include <vector>
#include "arrow/array.h"
#include "arrow/compute/api_scalar.h"
#include "arrow/compute/kernel.h"
#include "arrow/datum.h"
#include "arrow/memory_pool.h"
#include "arrow/pretty_print.h"
#include "arrow/testing/gtest_util.h"
#include "arrow/testing/random.h"
#include "arrow/testing/util.h"
#include "arrow/type.h"
// IWYU pragma: end_exports
namespace arrow {
using internal::checked_cast;
namespace compute {
using DatumVector = std::vector<Datum>;
template <typename Type, typename T>
std::shared_ptr<Array> _MakeArray(const std::shared_ptr<DataType>& type,
const std::vector<T>& values,
const std::vector<bool>& is_valid) {
std::shared_ptr<Array> result;
if (is_valid.size() > 0) {
ArrayFromVector<Type, T>(type, is_valid, values, &result);
} else {
ArrayFromVector<Type, T>(type, values, &result);
}
return result;
}
inline std::string CompareOperatorToFunctionName(CompareOperator op) {
static std::string function_names[] = {
"equal", "not_equal", "greater", "greater_equal", "less", "less_equal",
};
return function_names[op];
}
void CheckScalar(std::string func_name, const ScalarVector& inputs,
std::shared_ptr<Scalar> expected,
const FunctionOptions* options = nullptr);
void CheckScalar(std::string func_name, const DatumVector& inputs, Datum expected,
const FunctionOptions* options = nullptr);
void CheckScalarUnary(std::string func_name, std::shared_ptr<DataType> in_ty,
std::string json_input, std::shared_ptr<DataType> out_ty,
std::string json_expected,
const FunctionOptions* options = nullptr);
void CheckScalarUnary(std::string func_name, std::shared_ptr<Array> input,
std::shared_ptr<Array> expected,
const FunctionOptions* options = nullptr);
void CheckScalarUnary(std::string func_name, std::shared_ptr<Scalar> input,
std::shared_ptr<Scalar> expected,
const FunctionOptions* options = nullptr);
void CheckScalarBinary(std::string func_name, std::shared_ptr<Scalar> left_input,
std::shared_ptr<Scalar> right_input,
std::shared_ptr<Scalar> expected,
const FunctionOptions* options = nullptr);
void CheckScalarBinary(std::string func_name, std::shared_ptr<Array> left_input,
std::shared_ptr<Array> right_input,
std::shared_ptr<Array> expected,
const FunctionOptions* options = nullptr);
void CheckScalarBinary(std::string func_name, std::shared_ptr<Array> left_input,
std::shared_ptr<Scalar> right_input,
std::shared_ptr<Array> expected,
const FunctionOptions* options = nullptr);
void CheckScalarBinary(std::string func_name, std::shared_ptr<Scalar> left_input,
std::shared_ptr<Array> right_input,
std::shared_ptr<Array> expected,
const FunctionOptions* options = nullptr);
void CheckVectorUnary(std::string func_name, Datum input, std::shared_ptr<Array> expected,
const FunctionOptions* options = nullptr);
void ValidateOutput(const Datum& output);
using BinaryTypes =
::testing::Types<BinaryType, LargeBinaryType, StringType, LargeStringType>;
using StringTypes = ::testing::Types<StringType, LargeStringType>;
static constexpr random::SeedType kRandomSeed = 0x0ff1ce;
template <template <typename> class DoTestFunctor>
void TestRandomPrimitiveCTypes() {
DoTestFunctor<Int8Type>::Test(int8());
DoTestFunctor<Int16Type>::Test(int16());
DoTestFunctor<Int32Type>::Test(int32());
DoTestFunctor<Int64Type>::Test(int64());
DoTestFunctor<UInt8Type>::Test(uint8());
DoTestFunctor<UInt16Type>::Test(uint16());
DoTestFunctor<UInt32Type>::Test(uint32());
DoTestFunctor<UInt64Type>::Test(uint64());
DoTestFunctor<FloatType>::Test(float32());
DoTestFunctor<DoubleType>::Test(float64());
DoTestFunctor<Date32Type>::Test(date32());
DoTestFunctor<Date64Type>::Test(date64());
DoTestFunctor<Time32Type>::Test(time32(TimeUnit::SECOND));
DoTestFunctor<Time64Type>::Test(time64(TimeUnit::MICRO));
DoTestFunctor<TimestampType>::Test(timestamp(TimeUnit::SECOND));
DoTestFunctor<TimestampType>::Test(timestamp(TimeUnit::MICRO));
DoTestFunctor<DurationType>::Test(duration(TimeUnit::MILLI));
}
// Check that DispatchBest on a given function yields the same Kernel as
// produced by DispatchExact on another set of ValueDescrs.
void CheckDispatchBest(std::string func_name, std::vector<ValueDescr> descrs,
std::vector<ValueDescr> exact_descrs);
// Check that function fails to produce a Kernel for the set of ValueDescrs.
void CheckDispatchFails(std::string func_name, std::vector<ValueDescr> descrs);
// Helper to get a default instance of a type, including parameterized types
template <typename T>
enable_if_parameter_free<T, std::shared_ptr<DataType>> default_type_instance() {
return TypeTraits<T>::type_singleton();
}
template <typename T>
enable_if_time<T, std::shared_ptr<DataType>> default_type_instance() {
// Time32 requires second/milli, Time64 requires nano/micro
if (bit_width(T::type_id) == 32) {
return std::make_shared<T>(TimeUnit::type::SECOND);
}
return std::make_shared<T>(TimeUnit::type::NANO);
}
template <typename T>
enable_if_timestamp<T, std::shared_ptr<DataType>> default_type_instance() {
return std::make_shared<T>(TimeUnit::type::SECOND);
}
template <typename T>
enable_if_decimal<T, std::shared_ptr<DataType>> default_type_instance() {
return std::make_shared<T>(5, 2);
}
} // namespace compute
} // namespace arrow
|
apache-2.0
|
SunnyRx/SunnyRx.github.io
|
_posts/2018-09-19-Realize-the-main-function-first-or-realize-the-foundation-first.markdown
|
3061
|
---
layout: post
title: "先实现功能还是先搭建基础"
subtitle: "做项目的决策时遇到的问题分享"
date: 2018-09-19
author: "SunnyRx"
header-img: "img/post-bg-gamejam.webp"
catalog: true
tags:
- 游戏制作
- 分享
- 游戏策划
---
>这是笔者在和主程[老朱](http://zxtechart.com/)讨论“这个项目优先实现哪部分内容?”时遇到的问题。
直接看结论点[这里](/#结论)
# 问题
对于一个即将要开始开发的项目,哪些功能比较重要,需要优先实现,我们提出了两个方向。但是在这两个方向中,应该选哪个,成为了问题。这两个方向分别是:
- **先实现导入数据的功能,再实现主要的功能**
- **先实现主要的功能,再实现导入数据的功能**
# 过程
一开始笔者的想法是,先专注实现主要的功能,再实现导入数据的功能,可以更好的专注功能本身,而测试功能时录入的测试数据可以暂时使用硬编码之类粗暴方式解决先。
同时也想到,先做好数据导入的话,之后功能实现后可以同时测试大小规模的数据,同时主程序也是建议先做数据导入,花的时间也不多。
笔者当时认为这种时候听主程序的建议是最合适的,但是考虑到如果以后独自遇到这种问题需要自行作出决策的话,搞清楚怎么做出判断是很重要的。
针对这两个方向,笔者感到无从判断,于是向[蔡老师](https://www.weibo.com/acaciaforjesus?refer_flag=1005050006_&is_all=1)请教,在蔡老师的指引下和老朱的帮助下,整理出这两个方向的优缺点。
**先实现导入数据的功能,再实现主要的功能**:
优点:
1. 节省写硬编码或临时代码的时间
2. 可以测试大规模数据
缺点:
1. 不能很快看到主要功能的效果
2. 不能优先解决困难的地方
**先实现主要的功能,再实现导入数据的功能**:
优点:
1. 能很快看到主要功能的效果
2. 可以优先解决困难的地方
缺点:
1. 需要花费写硬编码或临时代码的时间
2. 测试大规模数据会很麻烦
老朱的意见是,在功能需求不怎么需要修改的情况下,**先实现导入数据的功能,再实现主要的功能**会更合适,当下就是这种情况。
# 结论
对于要开发的东西,功能已经相当明确,有具体的设计文档的时候,**先实现导入数据的功能,再实现主要的功能**是合适的选择。
> 前期将项目基础搭建好,后面可以节省很多不必要的时间,也不会造成代码浪费。这个选择对应的是**瀑布开发模式**。
对于要开发的东西,功能尚不完全确定,需要反复验证功能的时候,**先实现主要的功能,再实现导入数据的功能**是合适的选择。
> 例如做一个新的游戏,当不确定这个游戏的玩法是否可行时,需要快速搭建原型验证和反复修改。这个选择对应的是**敏捷开发模式**。
|
apache-2.0
|
fthomas/crjdt
|
modules/core/src/test/scala/eu/timepit/crjdt/core/ReplicaSpec.scala
|
2719
|
package eu.timepit.crjdt.core
import eu.timepit.crjdt.core.Cmd.{Assign, Delete}
import eu.timepit.crjdt.core.arbitrary._
import eu.timepit.crjdt.core.testUtil._
import org.scalacheck.Prop._
import org.scalacheck.Properties
object ReplicaSpec extends Properties("Replica") {
val p0 = Replica.empty("p")
val q0 = Replica.empty("q")
val r0 = Replica.empty("r")
property("convergence 1") = forAll { (cmds: List[Cmd]) =>
val p1 = p0.applyCmds(cmds)
val q1 = merge(q0, p1)
converged(p1, q1)
}
property("convergence 2") = forAll { (cmds1: List[Cmd], cmds2: List[Cmd]) =>
val p1 = p0.applyCmds(cmds1)
val q1 = q0.applyCmds(cmds2)
val p2 = merge(p1, q1)
val q2 = merge(q1, p1)
converged(p2, q2)
}
property("convergence 3") = forAll {
(cmds1: List[Cmd], cmds2: List[Cmd], cmds3: List[Cmd]) =>
val p1 = p0.applyCmds(cmds1)
val q1 = q0.applyCmds(cmds2)
val r1 = r0.applyCmds(cmds3)
val p2 = merge(merge(p1, q1), r1)
val q2 = merge(merge(q1, p1), r1)
val r2 = merge(merge(r1, p1), q1)
converged(p2, q2, r2)
}
property("commutativity 1") = forAll { (cmds: List[Cmd]) =>
val p1 = p0.applyCmds(cmds)
val q1 = q0.applyRemoteOps(randomPermutation(p1.generatedOps))
converged(p1, q1)
}
property("commutativity 2") = forAll { (cmds1: List[Cmd], cmds2: List[Cmd]) =>
val p1 = p0.applyCmds(cmds1)
val q1 = q0.applyCmds(cmds2)
val p2 = p1.applyRemoteOps(randomPermutation(q1.generatedOps))
val q2 = q1.applyRemoteOps(randomPermutation(p1.generatedOps))
converged(p2, q2)
}
property("commutativity 3") = forAll {
(cmds1: List[Cmd], cmds2: List[Cmd], cmds3: List[Cmd]) =>
val p1 = p0.applyCmds(cmds1)
val q1 = q0.applyCmds(cmds2)
val r1 = r0.applyCmds(cmds3)
val p2 =
p1.applyRemoteOps(randomPermutation(q1.generatedOps ++ r1.generatedOps))
val q2 =
q1.applyRemoteOps(randomPermutation(p1.generatedOps ++ r1.generatedOps))
val r2 =
r1.applyRemoteOps(randomPermutation(p1.generatedOps ++ q1.generatedOps))
converged(p2, q2, r2)
}
property("idempotence") = forAll { (cmd: Cmd) =>
val p1 = p0.applyCmd(cmd)
val q1 = merge(q0, p1)
val q2 = merge(q1, p1)
converged(p1, q1, q2)
}
property("Lemma 6") = forAll { (delete: Delete, cmd: Cmd) =>
val p1 = p0.applyCmd(delete)
val q1 = q0.applyCmd(cmd)
val p2 = merge(p1, q1)
val q2 = merge(q1, p1)
converged(p2, q2)
}
property("Lemma 7") = forAll { (assign: Assign, cmd: Cmd) =>
val p1 = p0.applyCmd(assign)
val q1 = q0.applyCmd(cmd)
val p2 = merge(p1, q1)
val q2 = merge(q1, p1)
converged(p2, q2)
}
}
|
apache-2.0
|
mustachesolutions/BboyJueces
|
web/HTML/css/admin/stimenu.css
|
1548
|
/*Sliding Text and Icon Menu Style*/
.sti-menu{
position:relative;
margin:60px auto 0 auto;
background: #9d9898;
display: table;
}
.sti-menu li{
float:left;
background: black;
width:200px;
height:300px;
margin:1px;
}
.sti-menu li a{
display:block;
overflow:hidden;
background:#fff;
text-align:center;
background: #ebe9e9;
height:100%;
width:100%;
position:relative;
-moz-box-shadow:1px 1px 2px #ddd;
-webkit-box-shadow:1px 1px 2px #ddd;
box-shadow:1px 1px 2px #ddd;
}
.sti-menu li a h2{
color:#000;
font-family: 'Wire One', arial, serif;
font-size:42px;
font-weight:bold;
text-transform:uppercase;
position:absolute;
padding:10px;
width:180px;
top:140px;
left:0px;
text-shadow: 0px 1px 1px black;
}
.sti-menu li a h3{
font-family: Baskerville, "Hoefler Text", Garamond, "Times New Roman", serif;
font-size:18px;
font-style:italic;
color: #111;
position:absolute;
top:248px;
left:0px;
width:180px;
padding:10px;
}
.sti-icon{
width:100px;
height:100px;
position:absolute;
background-position:top left;
background-repeat:no-repeat;
background-color:transparent;
left:50px;
top:30px;
}
.sti-icon-crew{
background-image:url(/images/crew.svg);
}
.sti-icon-batalla{
background-image:url(/images/batalla.svg);
}
.sti-icon-resultados{
background-image:url(/images/resultados.svg);
}
.sti-icon-info{
background-image:url(/images/info.png);
}
.sti-icon-technology{
background-image:url(../images/technology.png);
}
|
apache-2.0
|
cory-work/amphtml
|
build-system/compile/bundles.config.js
|
26274
|
/**
* Copyright 2018 The AMP HTML Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
const colors = require('ansi-colors');
const log = require('fancy-log');
const {VERSION: internalRuntimeVersion} = require('./internal-version');
/**
* @enum {string}
*/
const TYPES = (exports.TYPES = {
AD: '_base_ad',
MEDIA: '_base_media',
MISC: '_base_misc',
});
/**
* Used to generate top-level JS build targets
*/
exports.jsBundles = {
'polyfills.js': {
srcDir: './src/',
srcFilename: 'polyfills.js',
destDir: './build/',
minifiedDestDir: './build/',
},
'alp.max.js': {
srcDir: './ads/alp/',
srcFilename: 'install-alp.js',
destDir: './dist',
minifiedDestDir: './dist',
options: {
toName: 'alp.max.js',
includePolyfills: true,
minifiedName: 'alp.js',
},
},
'examiner.max.js': {
srcDir: './src/examiner/',
srcFilename: 'examiner.js',
destDir: './dist',
minifiedDestDir: './dist',
options: {
toName: 'examiner.max.js',
includePolyfills: true,
minifiedName: 'examiner.js',
},
},
'ww.max.js': {
srcDir: './src/web-worker/',
srcFilename: 'web-worker.js',
destDir: './dist',
minifiedDestDir: './dist',
options: {
toName: 'ww.max.js',
minifiedName: 'ww.js',
includePolyfills: true,
},
},
'integration.js': {
srcDir: './3p/',
srcFilename: 'integration.js',
destDir: './dist.3p/current',
minifiedDestDir: './dist.3p/' + internalRuntimeVersion,
options: {
minifiedName: 'f.js',
externs: ['./ads/ads.extern.js'],
include3pDirectories: true,
includePolyfills: true,
},
},
'ampcontext-lib.js': {
srcDir: './3p/',
srcFilename: 'ampcontext-lib.js',
destDir: './dist.3p/current',
minifiedDestDir: './dist.3p/' + internalRuntimeVersion,
options: {
minifiedName: 'ampcontext-v0.js',
externs: ['./ads/ads.extern.js'],
include3pDirectories: true,
includePolyfills: false,
},
},
'iframe-transport-client-lib.js': {
srcDir: './3p/',
srcFilename: 'iframe-transport-client-lib.js',
destDir: './dist.3p/current',
minifiedDestDir: './dist.3p/' + internalRuntimeVersion,
options: {
minifiedName: 'iframe-transport-client-v0.js',
externs: ['./ads/ads.extern.js'],
include3pDirectories: true,
includePolyfills: false,
},
},
'recaptcha.js': {
srcDir: './3p/',
srcFilename: 'recaptcha.js',
destDir: './dist.3p/current',
minifiedDestDir: './dist.3p/' + internalRuntimeVersion,
options: {
minifiedName: 'recaptcha.js',
externs: [],
include3pDirectories: true,
includePolyfills: true,
},
},
'amp-viewer-host.max.js': {
srcDir: './extensions/amp-viewer-integration/0.1/examples/',
srcFilename: 'amp-viewer-host.js',
destDir: './dist/v0/examples',
minifiedDestDir: './dist/v0/examples',
options: {
toName: 'amp-viewer-host.max.js',
minifiedName: 'amp-viewer-host.js',
incudePolyfills: true,
extraGlobs: ['extensions/amp-viewer-integration/**/*.js'],
compilationLevel: 'WHITESPACE_ONLY',
skipUnknownDepsCheck: true,
},
},
'video-iframe-integration.js': {
srcDir: './src/',
srcFilename: 'video-iframe-integration.js',
destDir: './dist',
minifiedDestDir: './dist',
options: {
minifiedName: 'video-iframe-integration-v0.js',
includePolyfills: false,
},
},
'amp-story-player.js': {
srcDir: './src/',
srcFilename: 'amp-story-player.js',
destDir: './dist',
minifiedDestDir: './dist',
options: {
minifiedName: 'amp-story-player-v0.js',
includePolyfills: false,
},
},
'amp-inabox-host.js': {
srcDir: './ads/inabox/',
srcFilename: 'inabox-host.js',
destDir: './dist',
minifiedDestDir: './dist',
options: {
toName: 'amp-inabox-host.js',
minifiedName: 'amp4ads-host-v0.js',
includePolyfills: false,
},
},
'amp.js': {
srcDir: './src/',
srcFilename: 'amp.js',
destDir: './dist',
minifiedDestDir: './dist',
options: {
minifiedName: 'v0.js',
includePolyfills: true,
},
},
'amp-shadow.js': {
srcDir: './src/',
srcFilename: 'amp-shadow.js',
destDir: './dist',
minifiedDestDir: './dist',
options: {
minifiedName: 'shadow-v0.js',
includePolyfills: true,
},
},
'amp-inabox.js': {
srcDir: './src/inabox/',
srcFilename: 'amp-inabox.js',
destDir: './dist',
minifiedDestDir: './dist',
options: {
toName: 'amp-inabox.js',
minifiedName: 'amp4ads-v0.js',
includePolyfills: true,
extraGlobs: ['src/inabox/*.js', '3p/iframe-messaging-client.js'],
},
},
};
/**
* Used to generate extension build targets
*/
exports.extensionBundles = [
{
name: 'amp-3d-gltf',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-3q-player',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-access',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-access-laterpay',
version: ['0.1', '0.2'],
latestVersion: '0.2',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-access-scroll',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-access-poool',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-accordion',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-action-macro',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-ad',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.AD,
},
{
name: 'amp-ad-custom',
version: '0.1',
latestVersion: '0.1',
type: TYPES.AD,
},
{
name: 'amp-ad-network-adsense-impl',
version: '0.1',
latestVersion: '0.1',
type: TYPES.AD,
},
{
name: 'amp-ad-network-adzerk-impl',
version: '0.1',
latestVersion: '0.1',
type: TYPES.AD,
},
{
name: 'amp-ad-network-doubleclick-impl',
version: '0.1',
latestVersion: '0.1',
type: TYPES.AD,
},
{
name: 'amp-ad-network-fake-impl',
version: '0.1',
latestVersion: '0.1',
type: TYPES.AD,
},
{
name: 'amp-ad-network-triplelift-impl',
version: '0.1',
latestVersion: '0.1',
type: TYPES.AD,
},
{
name: 'amp-ad-network-cloudflare-impl',
version: '0.1',
latestVersion: '0.1',
type: TYPES.AD,
},
{
name: 'amp-ad-network-gmossp-impl',
version: '0.1',
latestVersion: '0.1',
type: TYPES.AD,
},
{
name: 'amp-ad-network-mytarget-impl',
version: '0.1',
latestVersion: '0.1',
type: TYPES.AD,
},
{
name: 'amp-ad-exit',
version: '0.1',
latestVersion: '0.1',
type: TYPES.AD,
},
{
name: 'amp-addthis',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-analytics',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-anim',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-animation',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-apester-media',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MEDIA,
},
{
name: 'amp-app-banner',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-audio',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-auto-ads',
version: '0.1',
latestVersion: '0.1',
type: TYPES.AD,
},
{
name: 'amp-autocomplete',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-auto-lightbox',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-base-carousel',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-beopinion',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-bind',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-bodymovin-animation',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-brid-player',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-delight-player',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MEDIA,
},
{
name: 'amp-brightcove',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-byside-content',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-kaltura-player',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-call-tracking',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-carousel',
version: ['0.1', '0.2'],
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-consent',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-connatix-player',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-crypto-polyfill',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-dailymotion',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-date-countdown',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-date-display',
version: ['0.1', '0.2'],
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-google-document-embed',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-dynamic-css-classes',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-embedly-card',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-experiment',
version: ['0.1', '1.0'],
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-facebook',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-facebook-comments',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-facebook-like',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-facebook-page',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-fit-text',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-font',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-form',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-fx-collection',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-fx-flying-carpet',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-geo',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-gfycat',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-gist',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-gwd-animation',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-hulu',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-iframe',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-ima-video',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-image-lightbox',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-image-slider',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-imgur',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-inline-gallery',
version: '0.1',
latestVersion: '0.1',
options: {
hasCss: true,
cssBinaries: [
'amp-inline-gallery',
'amp-inline-gallery-pagination',
'amp-inline-gallery-thumbnails',
],
},
type: TYPES.MISC,
},
{
name: 'amp-inputmask',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
postPrepend: ['third_party/inputmask/bundle.js'],
},
{
name: 'amp-instagram',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-install-serviceworker',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-izlesene',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-jwplayer',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-lightbox',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-lightbox-gallery',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-list',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-live-list',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-loader',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-mathml',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-mega-menu',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-megaphone',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-mustache',
version: ['0.1', '0.2'],
latestVersion: '0.2',
type: TYPES.MISC,
},
{
name: 'amp-nested-menu',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-next-page',
version: ['0.1', '1.0'],
latestVersion: '1.0',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-nexxtv-player',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-o2-player',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-ooyala-player',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-pinterest',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-playbuzz',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MEDIA,
},
{
name: 'amp-reach-player',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-redbull-player',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-reddit',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-riddle-quiz',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-script',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-share-tracking',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-sidebar',
version: ['0.1', '0.2'],
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-skimlinks',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-smartlinks',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-soundcloud',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-springboard-player',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-standalone',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-sticky-ad',
version: '1.0',
latestVersion: '1.0',
options: {hasCss: true},
type: TYPES.AD,
},
{
name: 'amp-story',
version: '0.1',
latestVersion: '1.0',
options: {
hasCss: true,
cssBinaries: [
'amp-story-bookend',
'amp-story-consent',
'amp-story-hint',
'amp-story-unsupported-browser-layer',
'amp-story-viewport-warning-layer',
'amp-story-info-dialog',
'amp-story-share',
'amp-story-share-menu',
'amp-story-system-layer',
],
},
type: TYPES.MISC,
},
{
name: 'amp-story',
version: '1.0',
latestVersion: '1.0',
options: {
hasCss: true,
cssBinaries: [
'amp-story-bookend',
'amp-story-consent',
'amp-story-draggable-drawer-header',
'amp-story-hint',
'amp-story-info-dialog',
'amp-story-quiz',
'amp-story-share',
'amp-story-share-menu',
'amp-story-system-layer',
'amp-story-tooltip',
'amp-story-unsupported-browser-layer',
'amp-story-viewport-warning-layer',
],
},
type: TYPES.MISC,
},
{
name: 'amp-story-auto-ads',
version: '0.1',
latestVersion: '0.1',
options: {
hasCss: true,
cssBinaries: [
'amp-story-auto-ads-ad-badge',
'amp-story-auto-ads-attribution',
],
},
type: TYPES.MISC,
},
{
name: 'amp-stream-gallery',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-selector',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-web-push',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-wistia-player',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-position-observer',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-orientation-observer',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-date-picker',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
postPrepend: ['third_party/react-dates/bundle.js'],
},
{
name: 'amp-image-viewer',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-subscriptions',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-subscriptions-google',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-pan-zoom',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-recaptcha-input',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
/**
* @deprecated `amp-slides` is deprecated and will be deleted before 1.0.
* Please see {@link AmpCarousel} with `type=slides` attribute instead.
*/
{
name: 'amp-slides',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-social-share',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-timeago',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-truncate-text',
version: '0.1',
latestVersion: '0.1',
options: {
hasCss: true,
cssBinaries: ['amp-truncate-text', 'amp-truncate-text-shadow'],
},
type: TYPES.MISC,
},
{
name: 'amp-twitter',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-user-notification',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
},
{
name: 'amp-vimeo',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-vine',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-viz-vega',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MISC,
postPrepend: [
'third_party/d3/d3.js',
'third_party/d3-geo-projection/d3-geo-projection.js',
'third_party/vega/vega.js',
],
},
{
name: 'amp-google-vrview-image',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-viewer-assistance',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-viewer-integration',
version: '0.1',
latestVersion: '0.1',
options: {
// The viewer integration code needs to run asap, so that viewers
// can influence document state asap. Otherwise the document may take
// a long time to learn that it should start process other extensions
// faster.
loadPriority: 'high',
},
type: TYPES.MISC,
},
{
name: 'amp-video',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-video-docking',
version: '0.1',
latestVersion: '0.1',
options: {hasCss: true},
type: TYPES.MEDIA,
},
{
name: 'amp-video-iframe',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-viqeo-player',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-vk',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-yotpo',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-youtube',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-mowplayer',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-powr-player',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
{
name: 'amp-mraid',
version: '0.1',
latestVersion: '0.1',
type: TYPES.AD,
},
{
name: 'amp-link-rewriter',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MISC,
},
{
name: 'amp-minute-media-player',
version: '0.1',
latestVersion: '0.1',
type: TYPES.MEDIA,
},
];
/**
* Used to alias a version of an extension to an older deprecated version.
*/
exports.extensionAliasBundles = {
'amp-sticky-ad': {
version: '1.0',
aliasedVersion: '0.1',
},
};
/**
* Used to generate alternative JS build targets
*/
exports.altMainBundles = [
{
path: 'src/amp-shadow.js',
name: 'shadow-v0',
version: '0.1',
latestVersion: '0.1',
},
{
path: 'src/inabox/amp-inabox.js',
name: 'amp4ads-v0',
version: '0.1',
latestVersion: '0.1',
},
];
/**
* @param {boolean} condition
* @param {string} field
* @param {string} message
* @param {string} name
* @param {string} found
*/
function verifyBundle_(condition, field, message, name, found) {
if (!condition) {
log(
colors.red('ERROR:'),
colors.cyan(field),
message,
colors.cyan(name),
'\n' + found
);
process.exit(1);
}
}
exports.verifyExtensionBundles = function() {
exports.extensionBundles.forEach(bundle => {
const bundleString = JSON.stringify(bundle, null, 2);
verifyBundle_(
'name' in bundle,
'name',
'is missing from',
'',
bundleString
);
verifyBundle_(
'version' in bundle,
'version',
'is missing from',
bundle.name,
bundleString
);
verifyBundle_(
'latestVersion' in bundle,
'latestVersion',
'is missing from',
bundle.name,
bundleString
);
const duplicates = exports.extensionBundles.filter(
duplicate => duplicate.name === bundle.name
);
verifyBundle_(
duplicates.every(
duplicate => duplicate.latestVersion === bundle.latestVersion
),
'latestVersion',
'is not the same for all versions of',
bundle.name,
JSON.stringify(duplicates, null, 2)
);
verifyBundle_(
'type' in bundle,
'type',
'is missing from',
bundle.name,
bundleString
);
const validTypes = Object.keys(TYPES).map(x => TYPES[x]);
verifyBundle_(
validTypes.some(validType => validType === bundle.type),
'type',
`is not one of ${validTypes.join(',')} in`,
bundle.name,
bundleString
);
});
};
|
apache-2.0
|
googleads/google-ads-java
|
google-ads-stubs-v10/src/main/java/com/google/ads/googleads/v10/resources/BiddingStrategySimulationName.java
|
10111
|
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.ads.googleads.v10.resources;
import com.google.api.pathtemplate.PathTemplate;
import com.google.api.resourcenames.ResourceName;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
@Generated("by gapic-generator-java")
public class BiddingStrategySimulationName implements ResourceName {
private static final PathTemplate
CUSTOMER_ID_BIDDING_STRATEGY_ID_TYPE_MODIFICATION_METHOD_START_DATE_END_DATE =
PathTemplate.createWithoutUrlEncoding(
"customers/{customer_id}/biddingStrategySimulations/{bidding_strategy_id}~{type}~{modification_method}~{start_date}~{end_date}");
private volatile Map<String, String> fieldValuesMap;
private final String customerId;
private final String biddingStrategyId;
private final String type;
private final String modificationMethod;
private final String startDate;
private final String endDate;
@Deprecated
protected BiddingStrategySimulationName() {
customerId = null;
biddingStrategyId = null;
type = null;
modificationMethod = null;
startDate = null;
endDate = null;
}
private BiddingStrategySimulationName(Builder builder) {
customerId = Preconditions.checkNotNull(builder.getCustomerId());
biddingStrategyId = Preconditions.checkNotNull(builder.getBiddingStrategyId());
type = Preconditions.checkNotNull(builder.getType());
modificationMethod = Preconditions.checkNotNull(builder.getModificationMethod());
startDate = Preconditions.checkNotNull(builder.getStartDate());
endDate = Preconditions.checkNotNull(builder.getEndDate());
}
public String getCustomerId() {
return customerId;
}
public String getBiddingStrategyId() {
return biddingStrategyId;
}
public String getType() {
return type;
}
public String getModificationMethod() {
return modificationMethod;
}
public String getStartDate() {
return startDate;
}
public String getEndDate() {
return endDate;
}
public static Builder newBuilder() {
return new Builder();
}
public Builder toBuilder() {
return new Builder(this);
}
public static BiddingStrategySimulationName of(
String customerId,
String biddingStrategyId,
String type,
String modificationMethod,
String startDate,
String endDate) {
return newBuilder()
.setCustomerId(customerId)
.setBiddingStrategyId(biddingStrategyId)
.setType(type)
.setModificationMethod(modificationMethod)
.setStartDate(startDate)
.setEndDate(endDate)
.build();
}
public static String format(
String customerId,
String biddingStrategyId,
String type,
String modificationMethod,
String startDate,
String endDate) {
return newBuilder()
.setCustomerId(customerId)
.setBiddingStrategyId(biddingStrategyId)
.setType(type)
.setModificationMethod(modificationMethod)
.setStartDate(startDate)
.setEndDate(endDate)
.build()
.toString();
}
public static BiddingStrategySimulationName parse(String formattedString) {
if (formattedString.isEmpty()) {
return null;
}
Map<String, String> matchMap =
CUSTOMER_ID_BIDDING_STRATEGY_ID_TYPE_MODIFICATION_METHOD_START_DATE_END_DATE.validatedMatch(
formattedString,
"BiddingStrategySimulationName.parse: formattedString not in valid format");
return of(
matchMap.get("customer_id"),
matchMap.get("bidding_strategy_id"),
matchMap.get("type"),
matchMap.get("modification_method"),
matchMap.get("start_date"),
matchMap.get("end_date"));
}
public static List<BiddingStrategySimulationName> parseList(List<String> formattedStrings) {
List<BiddingStrategySimulationName> list = new ArrayList<>(formattedStrings.size());
for (String formattedString : formattedStrings) {
list.add(parse(formattedString));
}
return list;
}
public static List<String> toStringList(List<BiddingStrategySimulationName> values) {
List<String> list = new ArrayList<>(values.size());
for (BiddingStrategySimulationName value : values) {
if (value == null) {
list.add("");
} else {
list.add(value.toString());
}
}
return list;
}
public static boolean isParsableFrom(String formattedString) {
return CUSTOMER_ID_BIDDING_STRATEGY_ID_TYPE_MODIFICATION_METHOD_START_DATE_END_DATE.matches(
formattedString);
}
@Override
public Map<String, String> getFieldValuesMap() {
if (fieldValuesMap == null) {
synchronized (this) {
if (fieldValuesMap == null) {
ImmutableMap.Builder<String, String> fieldMapBuilder = ImmutableMap.builder();
if (customerId != null) {
fieldMapBuilder.put("customer_id", customerId);
}
if (biddingStrategyId != null) {
fieldMapBuilder.put("bidding_strategy_id", biddingStrategyId);
}
if (type != null) {
fieldMapBuilder.put("type", type);
}
if (modificationMethod != null) {
fieldMapBuilder.put("modification_method", modificationMethod);
}
if (startDate != null) {
fieldMapBuilder.put("start_date", startDate);
}
if (endDate != null) {
fieldMapBuilder.put("end_date", endDate);
}
fieldValuesMap = fieldMapBuilder.build();
}
}
}
return fieldValuesMap;
}
public String getFieldValue(String fieldName) {
return getFieldValuesMap().get(fieldName);
}
@Override
public String toString() {
return CUSTOMER_ID_BIDDING_STRATEGY_ID_TYPE_MODIFICATION_METHOD_START_DATE_END_DATE.instantiate(
"customer_id",
customerId,
"bidding_strategy_id",
biddingStrategyId,
"type",
type,
"modification_method",
modificationMethod,
"start_date",
startDate,
"end_date",
endDate);
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (o != null || getClass() == o.getClass()) {
BiddingStrategySimulationName that = ((BiddingStrategySimulationName) o);
return Objects.equals(this.customerId, that.customerId)
&& Objects.equals(this.biddingStrategyId, that.biddingStrategyId)
&& Objects.equals(this.type, that.type)
&& Objects.equals(this.modificationMethod, that.modificationMethod)
&& Objects.equals(this.startDate, that.startDate)
&& Objects.equals(this.endDate, that.endDate);
}
return false;
}
@Override
public int hashCode() {
int h = 1;
h *= 1000003;
h ^= Objects.hashCode(customerId);
h *= 1000003;
h ^= Objects.hashCode(biddingStrategyId);
h *= 1000003;
h ^= Objects.hashCode(type);
h *= 1000003;
h ^= Objects.hashCode(modificationMethod);
h *= 1000003;
h ^= Objects.hashCode(startDate);
h *= 1000003;
h ^= Objects.hashCode(endDate);
return h;
}
/**
* Builder for
* customers/{customer_id}/biddingStrategySimulations/{bidding_strategy_id}~{type}~{modification_method}~{start_date}~{end_date}.
*/
public static class Builder {
private String customerId;
private String biddingStrategyId;
private String type;
private String modificationMethod;
private String startDate;
private String endDate;
protected Builder() {}
public String getCustomerId() {
return customerId;
}
public String getBiddingStrategyId() {
return biddingStrategyId;
}
public String getType() {
return type;
}
public String getModificationMethod() {
return modificationMethod;
}
public String getStartDate() {
return startDate;
}
public String getEndDate() {
return endDate;
}
public Builder setCustomerId(String customerId) {
this.customerId = customerId;
return this;
}
public Builder setBiddingStrategyId(String biddingStrategyId) {
this.biddingStrategyId = biddingStrategyId;
return this;
}
public Builder setType(String type) {
this.type = type;
return this;
}
public Builder setModificationMethod(String modificationMethod) {
this.modificationMethod = modificationMethod;
return this;
}
public Builder setStartDate(String startDate) {
this.startDate = startDate;
return this;
}
public Builder setEndDate(String endDate) {
this.endDate = endDate;
return this;
}
private Builder(BiddingStrategySimulationName biddingStrategySimulationName) {
this.customerId = biddingStrategySimulationName.customerId;
this.biddingStrategyId = biddingStrategySimulationName.biddingStrategyId;
this.type = biddingStrategySimulationName.type;
this.modificationMethod = biddingStrategySimulationName.modificationMethod;
this.startDate = biddingStrategySimulationName.startDate;
this.endDate = biddingStrategySimulationName.endDate;
}
public BiddingStrategySimulationName build() {
return new BiddingStrategySimulationName(this);
}
}
}
|
apache-2.0
|
maheshika/carbon4-kernel
|
core/org.wso2.carbon.core/src/main/java/org/wso2/carbon/core/persistence/OperationPersistenceManager.java
|
5853
|
/*
* Copyright (c) 2005-2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.core.persistence;
import org.apache.axis2.AxisFault;
import org.apache.axis2.description.AxisModule;
import org.apache.axis2.description.AxisOperation;
import org.apache.axis2.description.Parameter;
import org.apache.axis2.engine.AxisConfiguration;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@Deprecated
public class OperationPersistenceManager extends AbstractPersistenceManager {
private static final Log log = LogFactory.getLog(OperationPersistenceManager.class);
/**
* Constructor gets the axis config and calls the super constructor.
*
* @param axisConfig - AxisConfiguration
* @param pf
* @throws AxisFault - if the config registry is not found
*/
public OperationPersistenceManager(AxisConfiguration axisConfig, PersistenceFactory pf) throws AxisFault {
super(axisConfig, pf.getServiceGroupFilePM(), pf);
}
/**
* Constructor gets the axis config and calls the super constructor.
*
* @param axisConfig - AxisConfiguration
* @throws AxisFault - if the config registry is not found
*/
public OperationPersistenceManager(AxisConfiguration axisConfig) throws AxisFault {
super(axisConfig);
try {
if (this.pf == null) {
this.pf = PersistenceFactory.getInstance(axisConfig);
}
this.fpm = this.pf.getServiceGroupFilePM();
} catch (Exception e) {
log.error("Error getting PersistenceFactory instance", e);
}
}
/**
* Handle the engagement of the module to operation at the registry level
*
* @param module - AxisModule instance
* @param operation - AxisOperation instance
* @throws Exception - on error
*/
public void engageModuleForOperation(AxisModule module, AxisOperation operation)
throws Exception {
try {
handleModuleForAxisDescription(operation.getAxisService().getAxisServiceGroup().getServiceGroupName(),
module,
PersistenceUtils.getResourcePath(operation), true);
if (log.isDebugEnabled()) {
log.debug("Successfully engaged " + module.getName() +
" module for " + operation.getName() + " operation");
}
} catch (Throwable e) {
handleExceptionWithRollback(module.getName(), "Unable to engage " + module.getName() +
" module to " + module.getOperations() + " operation ", e);
}
}
/**
* Handle the dis-engagement of the module to operation at the registry level
*
* @param module - AxisModule instance
* @param operation - AxisOperation instance
* @throws Exception - on error
*/
public void disengageModuleForOperation(AxisModule module, AxisOperation operation)
throws Exception {
try {
handleModuleForAxisDescription(operation.getAxisService().getAxisServiceGroup().getServiceGroupName(),
module, PersistenceUtils.getResourcePath(operation), false);
if (log.isDebugEnabled()) {
log.debug("Successfully disengaged " + module.getName() +
" module from " + operation.getName() + " operation");
}
} catch (Throwable e) {
handleExceptionWithRollback(module.getName(), "Unable to disengage " + module.getName() +
" module from " + module.getOperations() + " operation ", e);
}
}
/**
* Remove the specified parameter from the given operation
*
* @param operation - AxisOperation instance
* @param parameter - parameter to remove
* @throws Exception - on error
*/
public void removeOperationParameter(AxisOperation operation, Parameter parameter)
throws Exception {
removeParameter(operation.getAxisService().getAxisServiceGroup().getServiceGroupName(),
parameter.getName(), PersistenceUtils.getResourcePath(operation));
}
/**
* Persist the given operation parameter. If the parameter already exists in registry, update
* it. Otherwise, create a new parameter.
*
* @param operation - AxisOperation instance
* @param parameter - parameter to persist
* @throws Exception - on registry call errors
*/
public void updateOperationParameter(AxisOperation operation, Parameter parameter)
throws Exception {
try {
updateParameter(operation.getAxisService().getAxisServiceGroup().getServiceGroupName(),
parameter, PersistenceUtils.getResourcePath(operation));
} catch (Throwable e) {
handleExceptionWithRollback(operation.getAxisService().getAxisServiceGroup().getServiceGroupName(),
"Unable to update the operation parameter " +
parameter.getName() + " of operation " + operation.getName(), e);
}
}
}
|
apache-2.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.