repo_name
stringlengths 4
116
| path
stringlengths 3
942
| size
stringlengths 1
7
| content
stringlengths 3
1.05M
| license
stringclasses 15
values |
---|---|---|---|---|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Liliopsida/Asparagales/Orchidaceae/Masdevallia/Masdevallia ophioglossa/ Syn. Masdevallia grossa/README.md
|
180
|
# Masdevallia grossa Luer SPECIES
#### Status
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null
|
apache-2.0
|
dreamfactorysoftware/df-core
|
database/migrations/2020_03_02_121555_create_instance_id_table.php
|
719
|
<?php
use Illuminate\Support\Facades\Schema;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class CreateInstanceIdTable extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('instance_id', function (Blueprint $t) {
$t->increments('id');
$t->string('instance_id');
$t->timestamp('created_date')->nullable();
$t->timestamp('last_modified_date')->useCurrent();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
Schema::dropIfExists('instance_id');
}
}
|
apache-2.0
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Magnoliopsida/Malvales/Malvaceae/Wissadula/Wissadula ferruginea/README.md
|
188
|
# Wissadula ferruginea Garcke & K.Schum. SPECIES
#### Status
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null
|
apache-2.0
|
T4MVC/T4MVC
|
T4MVCHostMvcApp/T4MVC Files/Ctrl6Controller.generated.cs
|
11849
|
// <auto-generated />
// This file was generated by a T4 template.
// Don't change it directly as your change would get overwritten. Instead, make changes
// to the .tt file (i.e. the T4 template) and save it to regenerate this file.
// Make sure the compiler doesn't complain about missing Xml comments and CLS compliance
// 0108: suppress "Foo hides inherited member Foo. Use the new keyword if hiding was intended." when a controller and its abstract parent are both processed
// 0114: suppress "Foo.BarController.Baz()' hides inherited member 'Qux.BarController.Baz()'. To make the current member override that implementation, add the override keyword. Otherwise add the new keyword." when an action (with an argument) overrides an action in a parent controller
#pragma warning disable 1591, 3008, 3009, 0108, 0114
#region T4MVC
using System;
using System.Diagnostics;
using System.CodeDom.Compiler;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading.Tasks;
using System.Web;
using System.Web.Hosting;
using System.Web.Mvc;
using System.Web.Mvc.Ajax;
using System.Web.Mvc.Html;
using System.Web.Routing;
using T4MVC;
namespace T4MVCHostMvcApp.Controllers
{
public partial class Ctrl6Controller
{
[GeneratedCode("T4MVC", "2.0"), DebuggerNonUserCode]
public Ctrl6Controller() { }
[GeneratedCode("T4MVC", "2.0"), DebuggerNonUserCode]
protected Ctrl6Controller(Dummy d) { }
[GeneratedCode("T4MVC", "2.0"), DebuggerNonUserCode]
protected RedirectToRouteResult RedirectToAction(ActionResult result)
{
var callInfo = result.GetT4MVCResult();
return RedirectToRoute(callInfo.RouteValueDictionary);
}
[GeneratedCode("T4MVC", "2.0"), DebuggerNonUserCode]
protected RedirectToRouteResult RedirectToAction(Task<ActionResult> taskResult)
{
return RedirectToAction(taskResult.Result);
}
[GeneratedCode("T4MVC", "2.0"), DebuggerNonUserCode]
protected RedirectToRouteResult RedirectToActionPermanent(ActionResult result)
{
var callInfo = result.GetT4MVCResult();
return RedirectToRoutePermanent(callInfo.RouteValueDictionary);
}
[GeneratedCode("T4MVC", "2.0"), DebuggerNonUserCode]
protected RedirectToRouteResult RedirectToActionPermanent(Task<ActionResult> taskResult)
{
return RedirectToActionPermanent(taskResult.Result);
}
[NonAction]
[GeneratedCode("T4MVC", "2.0"), DebuggerNonUserCode]
public virtual System.Web.Mvc.ActionResult Details()
{
return new T4MVC_System_Web_Mvc_ActionResult(Area, Name, ActionNames.Details);
}
[NonAction]
[GeneratedCode("T4MVC", "2.0"), DebuggerNonUserCode]
public virtual System.Web.Mvc.ActionResult Edit()
{
return new T4MVC_System_Web_Mvc_ActionResult(Area, Name, ActionNames.Edit);
}
[NonAction]
[GeneratedCode("T4MVC", "2.0"), DebuggerNonUserCode]
public virtual System.Web.Mvc.ActionResult Delete()
{
return new T4MVC_System_Web_Mvc_ActionResult(Area, Name, ActionNames.Delete);
}
[NonAction]
[GeneratedCode("T4MVC", "2.0"), DebuggerNonUserCode]
public virtual System.Web.Mvc.ActionResult OtherAction()
{
return new T4MVC_System_Web_Mvc_ActionResult(Area, Name, ActionNames.OtherAction);
}
[GeneratedCode("T4MVC", "2.0"), DebuggerNonUserCode]
public Ctrl6Controller Actions { get { return MVC.Ctrl6; } }
[GeneratedCode("T4MVC", "2.0")]
public readonly string Area = "";
[GeneratedCode("T4MVC", "2.0")]
public readonly string Name = "Ctrl6";
[GeneratedCode("T4MVC", "2.0")]
public const string NameConst = "Ctrl6";
[GeneratedCode("T4MVC", "2.0")]
static readonly ActionNamesClass s_actions = new ActionNamesClass();
[GeneratedCode("T4MVC", "2.0"), DebuggerNonUserCode]
public ActionNamesClass ActionNames { get { return s_actions; } }
[GeneratedCode("T4MVC", "2.0"), DebuggerNonUserCode]
public class ActionNamesClass
{
public readonly string Index = "Index";
public readonly string Details = "Details";
public readonly string Create = "Create";
public readonly string Edit = "Edit";
public readonly string Delete = "Delete";
public readonly string OtherAction = "OtherAction";
}
[GeneratedCode("T4MVC", "2.0"), DebuggerNonUserCode]
public class ActionNameConstants
{
public const string Index = "Index";
public const string Details = "Details";
public const string Create = "Create";
public const string Edit = "Edit";
public const string Delete = "Delete";
public const string OtherAction = "OtherAction";
}
static readonly ActionParamsClass_Details s_params_Details = new ActionParamsClass_Details();
[GeneratedCode("T4MVC", "2.0"), DebuggerNonUserCode]
public ActionParamsClass_Details DetailsParams { get { return s_params_Details; } }
[GeneratedCode("T4MVC", "2.0"), DebuggerNonUserCode]
public class ActionParamsClass_Details
{
public readonly string id = "id";
}
static readonly ActionParamsClass_Create s_params_Create = new ActionParamsClass_Create();
[GeneratedCode("T4MVC", "2.0"), DebuggerNonUserCode]
public ActionParamsClass_Create CreateParams { get { return s_params_Create; } }
[GeneratedCode("T4MVC", "2.0"), DebuggerNonUserCode]
public class ActionParamsClass_Create
{
public readonly string data = "data";
}
static readonly ActionParamsClass_Edit s_params_Edit = new ActionParamsClass_Edit();
[GeneratedCode("T4MVC", "2.0"), DebuggerNonUserCode]
public ActionParamsClass_Edit EditParams { get { return s_params_Edit; } }
[GeneratedCode("T4MVC", "2.0"), DebuggerNonUserCode]
public class ActionParamsClass_Edit
{
public readonly string id = "id";
public readonly string data = "data";
}
static readonly ActionParamsClass_Delete s_params_Delete = new ActionParamsClass_Delete();
[GeneratedCode("T4MVC", "2.0"), DebuggerNonUserCode]
public ActionParamsClass_Delete DeleteParams { get { return s_params_Delete; } }
[GeneratedCode("T4MVC", "2.0"), DebuggerNonUserCode]
public class ActionParamsClass_Delete
{
public readonly string id = "id";
}
static readonly ActionParamsClass_OtherAction s_params_OtherAction = new ActionParamsClass_OtherAction();
[GeneratedCode("T4MVC", "2.0"), DebuggerNonUserCode]
public ActionParamsClass_OtherAction OtherActionParams { get { return s_params_OtherAction; } }
[GeneratedCode("T4MVC", "2.0"), DebuggerNonUserCode]
public class ActionParamsClass_OtherAction
{
public readonly string id = "id";
}
static readonly ViewsClass s_views = new ViewsClass();
[GeneratedCode("T4MVC", "2.0"), DebuggerNonUserCode]
public ViewsClass Views { get { return s_views; } }
[GeneratedCode("T4MVC", "2.0"), DebuggerNonUserCode]
public class ViewsClass
{
static readonly _ViewNamesClass s_ViewNames = new _ViewNamesClass();
public _ViewNamesClass ViewNames { get { return s_ViewNames; } }
public class _ViewNamesClass
{
}
}
}
[GeneratedCode("T4MVC", "2.0"), DebuggerNonUserCode]
public partial class T4MVC_Ctrl6Controller : T4MVCHostMvcApp.Controllers.Ctrl6Controller
{
public T4MVC_Ctrl6Controller() : base(Dummy.Instance) { }
[NonAction]
partial void IndexOverride(T4MVC_System_Web_Mvc_ActionResult callInfo);
[NonAction]
public override System.Web.Mvc.ActionResult Index()
{
var callInfo = new T4MVC_System_Web_Mvc_ActionResult(Area, Name, ActionNames.Index);
IndexOverride(callInfo);
return callInfo;
}
[NonAction]
partial void DetailsOverride(T4MVC_System_Web_Mvc_ActionResult callInfo, long id);
[NonAction]
public override System.Web.Mvc.ActionResult Details(long id)
{
var callInfo = new T4MVC_System_Web_Mvc_ActionResult(Area, Name, ActionNames.Details);
ModelUnbinderHelpers.AddRouteValues(callInfo.RouteValueDictionary, "id", id);
DetailsOverride(callInfo, id);
return callInfo;
}
[NonAction]
partial void CreateOverride(T4MVC_System_Web_Mvc_ActionResult callInfo);
[NonAction]
public override System.Web.Mvc.ActionResult Create()
{
var callInfo = new T4MVC_System_Web_Mvc_ActionResult(Area, Name, ActionNames.Create);
CreateOverride(callInfo);
return callInfo;
}
[NonAction]
partial void CreateOverride(T4MVC_System_Web_Mvc_ActionResult callInfo, string data);
[NonAction]
public override System.Web.Mvc.ActionResult Create(string data)
{
var callInfo = new T4MVC_System_Web_Mvc_ActionResult(Area, Name, ActionNames.Create);
ModelUnbinderHelpers.AddRouteValues(callInfo.RouteValueDictionary, "data", data);
CreateOverride(callInfo, data);
return callInfo;
}
[NonAction]
partial void EditOverride(T4MVC_System_Web_Mvc_ActionResult callInfo, long id);
[NonAction]
public override System.Web.Mvc.ActionResult Edit(long id)
{
var callInfo = new T4MVC_System_Web_Mvc_ActionResult(Area, Name, ActionNames.Edit);
ModelUnbinderHelpers.AddRouteValues(callInfo.RouteValueDictionary, "id", id);
EditOverride(callInfo, id);
return callInfo;
}
[NonAction]
partial void EditOverride(T4MVC_System_Web_Mvc_ActionResult callInfo, long id, string data);
[NonAction]
public override System.Web.Mvc.ActionResult Edit(long id, string data)
{
var callInfo = new T4MVC_System_Web_Mvc_ActionResult(Area, Name, ActionNames.Edit);
ModelUnbinderHelpers.AddRouteValues(callInfo.RouteValueDictionary, "id", id);
ModelUnbinderHelpers.AddRouteValues(callInfo.RouteValueDictionary, "data", data);
EditOverride(callInfo, id, data);
return callInfo;
}
[NonAction]
partial void DeleteOverride(T4MVC_System_Web_Mvc_ActionResult callInfo, long id);
[NonAction]
public override System.Web.Mvc.ActionResult Delete(long id)
{
var callInfo = new T4MVC_System_Web_Mvc_ActionResult(Area, Name, ActionNames.Delete);
ModelUnbinderHelpers.AddRouteValues(callInfo.RouteValueDictionary, "id", id);
DeleteOverride(callInfo, id);
return callInfo;
}
[NonAction]
partial void OtherActionOverride(T4MVC_System_Web_Mvc_ActionResult callInfo, long id);
[NonAction]
public override System.Web.Mvc.ActionResult OtherAction(long id)
{
var callInfo = new T4MVC_System_Web_Mvc_ActionResult(Area, Name, ActionNames.OtherAction);
ModelUnbinderHelpers.AddRouteValues(callInfo.RouteValueDictionary, "id", id);
OtherActionOverride(callInfo, id);
return callInfo;
}
}
}
#endregion T4MVC
#pragma warning restore 1591, 3008, 3009, 0108, 0114
|
apache-2.0
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Magnoliopsida/Vitales/Vitaceae/Cissus/Cissus heyneana/README.md
|
173
|
# Cissus heyneana Planch. SPECIES
#### Status
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null
|
apache-2.0
|
raner/top.java.matrix
|
src/main/java/top/java/matrix/MatrixFactory.java
|
1406
|
// //
// Copyright 2017 Mirko Raner //
// //
// Licensed under the Apache License, Version 2.0 (the "License"); //
// you may not use this file except in compliance with the License. //
// You may obtain a copy of the License at //
// //
// http://www.apache.org/licenses/LICENSE-2.0 //
// //
// Unless required by applicable law or agreed to in writing, software //
// distributed under the License is distributed on an "AS IS" BASIS, //
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //
// See the License for the specific language governing permissions and //
// limitations under the License. //
// //
package top.java.matrix;
public interface MatrixFactory
{
<M extends Dimension, N extends Dimension> Matrix<M, N> create(Dimension rows, Dimension columns, float[] columnMajorArray, MatrixOperation... operations);
}
|
apache-2.0
|
practicalswift/swift
|
lib/SILOptimizer/IPO/LetPropertiesOpts.cpp
|
22467
|
//===--- LetPropertiesOpts.cpp - Optimize let properties ------------------===//
//
// This source file is part of the Swift.org open source project
//
// Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
// Licensed under Apache License v2.0 with Runtime Library Exception
//
// See https://swift.org/LICENSE.txt for license information
// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
//
//===----------------------------------------------------------------------===//
// Promote values of non-static let properties initialized by means
// of constant values of simple types into their uses.
//
// For any given non-static let property this optimization is only possible
// if this pass can prove that it has analyzed all assignments of an initial
// value to this property and all those assignments assign the same value
// to this property.
//===----------------------------------------------------------------------===//
#define DEBUG_TYPE "let-properties-opt"
#include "swift/SIL/DebugUtils.h"
#include "swift/SIL/InstructionUtils.h"
#include "swift/SIL/SILBasicBlock.h"
#include "swift/SIL/SILInstruction.h"
#include "swift/SIL/SILLinkage.h"
#include "swift/SILOptimizer/PassManager/Passes.h"
#include "swift/SILOptimizer/PassManager/Transforms.h"
#include "swift/SILOptimizer/Utils/Local.h"
#include "llvm/ADT/MapVector.h"
#include "llvm/Support/CommandLine.h"
#include "llvm/Support/Debug.h"
using namespace swift;
namespace {
using InstructionList = SmallVector<SILInstruction *, 8>;
struct InitSequence {
InstructionList Instructions;
SILValue Result;
bool isValid() const {
return (bool) Result;
}
};
/// Promote values of non-static let properties initialized by means
/// of constant values of simple types into their uses.
///
/// TODO: Don't occupy any storage for such let properties with constant
/// initializers.
///
/// Note: Storage from a let property can only be removed if this
/// property can never be referenced from another module.
class LetPropertiesOpt {
SILModule *Module;
typedef SmallVector<VarDecl *, 4> Properties;
llvm::SetVector<SILFunction *> ChangedFunctions;
// Map each let property to a set of instructions accessing it.
llvm::MapVector<VarDecl *, InstructionList> AccessMap;
// Map each let property to the instruction sequence which initializes it.
llvm::MapVector<VarDecl *, InitSequence> InitMap;
// Properties in this set should not be processed by this pass
// anymore.
llvm::SmallPtrSet<VarDecl *, 16> SkipProcessing;
// Types in this set should not be processed by this pass
// anymore.
llvm::SmallPtrSet<NominalTypeDecl *, 16> SkipTypeProcessing;
// Properties in this set cannot be removed.
llvm::SmallPtrSet<VarDecl *, 16> CannotRemove;
// Set of let properties in a given nominal type.
llvm::MapVector<NominalTypeDecl *, Properties> NominalTypeLetProperties;
// Set of properties which already fulfill all conditions, except
// the available of constant, statically known initializer.
llvm::SmallPtrSet<VarDecl *, 16> PotentialConstantLetProperty;
public:
LetPropertiesOpt(SILModule *M): Module(M) {}
void run(SILModuleTransform *T);
protected:
bool isConstantLetProperty(VarDecl *Property);
void collectPropertyAccess(SILInstruction *I, VarDecl *Property, bool NonRemovable);
void collectStructPropertiesAccess(StructInst *SI, bool NonRemovable);
void optimizeLetPropertyAccess(VarDecl *SILG, const InitSequence &Init);
bool analyzeInitValue(SILInstruction *I, VarDecl *Prop);
};
/// Helper class to copy only a set of SIL instructions providing in the
/// constructor.
class InitSequenceCloner : public SILClonerWithScopes<InitSequenceCloner> {
friend class SILInstructionVisitor<InitSequenceCloner>;
friend class SILCloner<InitSequenceCloner>;
const InitSequence &Init;
SILInstruction *DestIP;
public:
InitSequenceCloner(const InitSequence &init, SILInstruction *destIP)
: SILClonerWithScopes(*destIP->getFunction()), Init(init), DestIP(destIP) {}
void process(SILInstruction *I) { visit(I); }
SILBasicBlock *remapBasicBlock(SILBasicBlock *BB) { return BB; }
SILValue getMappedValue(SILValue Value) {
return SILCloner<InitSequenceCloner>::getMappedValue(Value);
}
void postProcess(SILInstruction *orig, SILInstruction *cloned) {
DestIP->getParent()->push_front(cloned);
cloned->moveBefore(DestIP);
SILClonerWithScopes<InitSequenceCloner>::postProcess(orig, cloned);
}
/// Clone all the instructions from Insns into the destination function,
/// immediately before the destination block, and return the value of
/// the result.
SILValue clone() {
for (auto I : Init.Instructions)
process(I);
return getMappedValue(Init.Result);
}
};
} // end anonymous namespace
#ifndef NDEBUG
// For debugging only.
static raw_ostream &operator<<(raw_ostream &OS, const VarDecl &decl) {
auto *Ty = dyn_cast<NominalTypeDecl>(decl.getDeclContext());
if (Ty)
OS << Ty->getName() << "::";
OS << decl.getName();
return OS;
}
#endif
/// Optimize access to the let property, which is known
/// to have a constant value. Replace all loads from the
/// property by its constant value.
void LetPropertiesOpt::optimizeLetPropertyAccess(VarDecl *Property,
const InitSequence &init) {
assert(init.isValid());
if (SkipProcessing.count(Property))
return;
auto *Ty = dyn_cast<NominalTypeDecl>(Property->getDeclContext());
if (SkipTypeProcessing.count(Ty))
return;
LLVM_DEBUG(llvm::dbgs() << "Replacing access to property '" << *Property
<< "' by its constant initializer\n");
auto PropertyAccess = Property->getEffectiveAccess();
auto TypeAccess = Ty->getEffectiveAccess();
auto CanRemove = false;
// Check if a given let property can be removed, because it
// is not accessible elsewhere. This can happen if this property
// is private or if it is internal and WMO mode is used.
if (TypeAccess <= AccessLevel::FilePrivate ||
PropertyAccess <= AccessLevel::FilePrivate
|| ((TypeAccess <= AccessLevel::Internal ||
PropertyAccess <= AccessLevel::Internal) &&
Module->isWholeModule())) {
CanRemove = true;
LLVM_DEBUG(llvm::dbgs() << "Storage for property '" << *Property
<< "' can be eliminated\n");
}
if (CannotRemove.count(Property))
CanRemove = false;
if (!AccessMap.count(Property)) {
LLVM_DEBUG(llvm::dbgs() << "Property '" << *Property <<"' is never read\n");
if (CanRemove) {
// TODO: Remove the let property, because it is never accessed.
}
return;
}
auto &Loads = AccessMap[Property];
unsigned NumReplaced = 0;
for (auto Load: Loads) {
SILFunction *F = Load->getFunction();
// A helper function to copy the initializer into the target function
// at the target insertion point.
auto cloneInitAt = [&](SILInstruction *insertionPoint) -> SILValue {
InitSequenceCloner cloner(init, insertionPoint);
return cloner.clone();
};
// Look for any instructions accessing let properties.
if (isa<RefElementAddrInst>(Load) || isa<StructElementAddrInst>(Load)
|| isa<BeginAccessInst>(Load)) {
auto proj = cast<SingleValueInstruction>(Load);
// Copy the initializer into the function
// Replace the access to a let property by the value
// computed by this initializer.
SILValue clonedInit = cloneInitAt(proj);
for (auto UI = proj->use_begin(), E = proj->use_end(); UI != E;) {
auto *User = UI->getUser();
++UI;
// A nested begin_access will be mapped as a separate "Load".
if (isa<BeginAccessInst>(User))
continue;
if (!canReplaceLoadSequence(User))
continue;
replaceLoadSequence(User, clonedInit);
eraseUsesOfInstruction(User);
User->eraseFromParent();
++NumReplaced;
}
ChangedFunctions.insert(F);
} else if (auto proj = dyn_cast<StructExtractInst>(Load)) {
// Copy the initializer into the function
// Replace the access to a let property by the value
// computed by this initializer.
SILValue clonedInit = cloneInitAt(proj);
proj->replaceAllUsesWith(clonedInit);
LLVM_DEBUG(llvm::dbgs() << "Access to " << *Property <<" was replaced:\n";
clonedInit->dumpInContext());
proj->eraseFromParent();
++NumReplaced;
ChangedFunctions.insert(F);
}
}
LLVM_DEBUG(llvm::dbgs() << "Access to " << *Property << " was replaced "
<< NumReplaced << " time(s)\n");
if (CanRemove) {
// TODO: Remove the let property, because it is never accessed.
}
}
/// Compare two SILValues structurally.
static bool isStructurallyIdentical(SILValue LHS, SILValue RHS) {
if (LHS == RHS)
return true;
if (LHS->getType() != RHS->getType())
return false;
auto lResult = LHS->getDefiningInstructionResult();
auto rResult = RHS->getDefiningInstructionResult();
assert(lResult && rResult &&
"operands of instructions approved by analyzeStaticInitializer "
"should always be defined by instructions");
return (lResult->ResultIndex == rResult->ResultIndex &&
lResult->Instruction->isIdenticalTo(rResult->Instruction,
isStructurallyIdentical));
};
/// Compare two sequences of SIL instructions. They should be structurally
/// equivalent.
static bool isSameInitSequence(const InitSequence &LHS,
const InitSequence &RHS) {
assert(LHS.isValid() && RHS.isValid());
// This will recursively check all the instructions. It's possible
// that they'll be composed slightly differently, but it shouldn't matter.
return isStructurallyIdentical(LHS.Result, RHS.Result);
}
/// Check if a given let property can be assigned externally.
static bool isAssignableExternally(VarDecl *Property, SILModule *Module) {
if (Module->isVisibleExternally(Property)) {
// If at least one of the properties of the enclosing type cannot be
// used externally, then no initializer can be implemented externally as
// it wouldn't be able to initialize such a property.
// More over, for classes, only the class itself can initialize its
// let properties. Subclasses and extensions cannot do it.
// For structs, external extensions may initialize let properties. But to do
// that they need to be able to initialize all properties, i.e. all
// properties should be accessible by the extension.
auto *Ty = dyn_cast<NominalTypeDecl>(Property->getDeclContext());
// Initializer for a let property of a class cannot exist externally.
// It cannot be defined by an extension or a derived class.
if (isa<ClassDecl>(Ty))
return false;
// Check if there are any private properties or any internal properties and
// it is a whole module compilation. In this case, no external initializer
// may exist.
for (auto SP : Ty->getStoredProperties()) {
auto storedPropertyAccess = SP->getEffectiveAccess();
if (storedPropertyAccess <= AccessLevel::FilePrivate ||
(storedPropertyAccess <= AccessLevel::Internal &&
Module->isWholeModule())) {
LLVM_DEBUG(llvm::dbgs() << "Property " << *Property
<< " cannot be set externally\n");
return false;
}
}
LLVM_DEBUG(llvm::dbgs() << "Property " << *Property
<< " can be used externally\n");
return true;
}
return false;
}
// Checks if a given property may have any unknown uses which cannot
// be analyzed by this pass.
static bool mayHaveUnknownUses(VarDecl *Property, SILModule *Module) {
if (Property->getDeclContext()->getParentModule() !=
Module->getSwiftModule()) {
LLVM_DEBUG(llvm::dbgs() << "Property " << *Property
<< " is defined in a different module\n");
// We don't see the bodies of initializers from a different module
// unless all of them are fragile.
// TODO: Support fragile initializers.
return true;
}
// If let properties can be assigned externally, we don't know
// the values they may get.
if (isAssignableExternally(Property, Module)) {
return true;
}
return false;
}
/// Check if a given property is a non-static let property
/// with known constant value.
bool LetPropertiesOpt::isConstantLetProperty(VarDecl *Property) {
// Process only non-static let properties here.
if (!Property->isLet() || Property->isStatic())
return false;
// Do not re-process already known properties.
if (SkipProcessing.count(Property))
return false;
// If these checks were performed already, no need to
// repeat them.
if (PotentialConstantLetProperty.count(Property))
return true;
// Check the visibility of this property. If its visibility
// implies that this optimization pass cannot analyze all uses,
// don't process it.
if (mayHaveUnknownUses(Property, Module)) {
LLVM_DEBUG(llvm::dbgs() << "Property '" << *Property
<< "' may have unknown uses\n");
SkipProcessing.insert(Property);
return false;
}
LLVM_DEBUG(llvm::dbgs() << "Property '" << *Property
<< "' has no unknown uses\n");
// Only properties of simple types can be optimized.
if (!isSimpleType(Module->Types.getLoweredType(Property->getType()), *Module)) {
LLVM_DEBUG(llvm::dbgs() << "Property '" << *Property
<< "' is not of trivial type\n");
SkipProcessing.insert(Property);
return false;
}
PotentialConstantLetProperty.insert(Property);
return true;
}
static bool isProjectionOfProperty(SILValue addr, VarDecl *Property) {
if (auto *REA = dyn_cast<RefElementAddrInst>(addr)) {
return REA->getField() == Property;
}
if (auto *SEA = dyn_cast<StructElementAddrInst>(addr)) {
return SEA->getField() == Property;
}
return false;
}
// Analyze the init value being stored by the instruction into a property.
bool
LetPropertiesOpt::analyzeInitValue(SILInstruction *I, VarDecl *Property) {
SILValue value;
if (auto SI = dyn_cast<StructInst>(I)) {
value = SI->getFieldValue(Property);
} else if (auto SI = dyn_cast<StoreInst>(I)) {
auto Dest = stripAddressAccess(SI->getDest());
assert(isProjectionOfProperty(stripAddressAccess(SI->getDest()), Property)
&& "Store instruction should store into a proper let property");
(void) Dest;
value = SI->getSrc();
}
// Check if it's just a copy from another instance of the struct.
if (auto *LI = dyn_cast<LoadInst>(value)) {
SILValue addr = LI->getOperand();
if (isProjectionOfProperty(addr, Property))
return true;
}
// Bail if a value of a property is not a statically known constant init.
InitSequence sequence;
sequence.Result = value;
if (!analyzeStaticInitializer(value, sequence.Instructions))
return false;
auto &cachedSequence = InitMap[Property];
if (cachedSequence.isValid() &&
!isSameInitSequence(cachedSequence, sequence)) {
// The found init value is different from the already seen init value.
return false;
} else {
LLVM_DEBUG(llvm::dbgs() << "The value of property '" << *Property
<< "' is statically known so far\n");
// Remember the statically known value.
cachedSequence = std::move(sequence);
return true;
}
}
// Analyze the 'struct' instruction and check if it initializes
// any let properties by statically known constant initializers.
void LetPropertiesOpt::collectStructPropertiesAccess(StructInst *SI,
bool NonRemovable) {
auto structDecl = SI->getStructDecl();
// Check if this struct has any let properties.
// Bail, if this struct is known to contain nothing interesting.
if (SkipTypeProcessing.count(structDecl))
return;
// Get the set of let properties defined by this struct.
if (!NominalTypeLetProperties.count(structDecl)) {
// Compute the let properties of this struct.
SmallVector<VarDecl *, 4> LetProps;
for (auto Prop : structDecl->getStoredProperties()) {
if (!isConstantLetProperty(Prop))
continue;
LetProps.push_back(Prop);
}
if (LetProps.empty()) {
// No interesting let properties in this struct.
SkipTypeProcessing.insert(structDecl);
return;
}
NominalTypeLetProperties[structDecl] = LetProps;
LLVM_DEBUG(llvm::dbgs() << "Computed set of let properties for struct '"
<< structDecl->getName() << "'\n");
}
auto &Props = NominalTypeLetProperties[structDecl];
LLVM_DEBUG(llvm::dbgs() << "Found a struct instruction initializing some "
"let properties: ";
SI->dumpInContext());
// Figure out the initializing sequence for each
// of the properties.
for (auto Prop : Props) {
if (SkipProcessing.count(Prop))
continue;
SILValue PropValue = SI->getOperandForField(Prop)->get();
LLVM_DEBUG(llvm::dbgs() << "Check the value of property '" << *Prop
<< "' :" << PropValue << "\n");
if (!analyzeInitValue(SI, Prop)) {
SkipProcessing.insert(Prop);
LLVM_DEBUG(llvm::dbgs() << "The value of a let property '" << *Prop
<< "' is not statically known\n");
}
(void) PropValue;
}
}
/// Check if I is a sequence of projections followed by a load.
/// Since it is supposed to be a load from a let property with
/// statically known constant initializer, only struct_element_addr
/// and tuple_element_addr projections are considered.
static bool isValidPropertyLoad(SILInstruction *I) {
if (isa<LoadInst>(I))
return true;
if (isa<StructElementAddrInst>(I) || isa<TupleElementAddrInst>(I)) {
auto projection = cast<SingleValueInstruction>(I);
for (auto Use : getNonDebugUses(projection)) {
if (isIncidentalUse(Use->getUser()))
continue;
if (!isValidPropertyLoad(Use->getUser()))
return false;
}
return true;
}
return false;
}
/// Remember where this property is accessed.
void LetPropertiesOpt::collectPropertyAccess(SILInstruction *I,
VarDecl *Property,
bool NonRemovable) {
if (!isConstantLetProperty(Property))
return;
LLVM_DEBUG(llvm::dbgs() << "Collecting property access for property '"
<< *Property << "':\n";
llvm::dbgs() << "The instructions are:\n"; I->dumpInContext());
if (isa<RefElementAddrInst>(I) || isa<StructElementAddrInst>(I)
|| isa<BeginAccessInst>(I)) {
// Check if there is a store to this property.
auto projection = cast<SingleValueInstruction>(I);
for (auto Use : getNonDebugUses(projection)) {
auto *User = Use->getUser();
if (isIncidentalUse(User))
continue;
// Each begin_access is analyzed as a separate property access. Do not
// consider a begin_access a use of the current projection.
if (isa<BeginAccessInst>(User))
continue;
if (auto *SI = dyn_cast<StoreInst>(User)) {
// There is a store into this property.
// Analyze the assigned value and check if it is a constant
// statically known initializer.
if (SI->getDest() != projection || !analyzeInitValue(SI, Property)) {
SkipProcessing.insert(Property);
return;
}
continue;
}
// Follow the chain of projections and check if it ends up with a load.
// If this is not the case, it is potentially a store into sub-property
// of a property.
// We cannot handle such cases yet, so bail.
if (!isValidPropertyLoad(User)) {
SkipProcessing.insert(Property);
return;
}
}
}
AccessMap[Property].push_back(I);
// If any property is marked as non-removable, their initialization
// and storage cannot be completely removed. But their constant
// values can still be propagated into their uses whenever possible.
if (NonRemovable)
CannotRemove.insert(Property);
}
void LetPropertiesOpt::run(SILModuleTransform *T) {
// Collect property access information for the whole module.
for (auto &F : *Module) {
// Take into account even those functions that should not be
// optimized, because they may contain access to the let
// properties.
bool NonRemovable = !F.shouldOptimize();
// FIXME: We should be able to handle ownership.
NonRemovable &= !F.hasOwnership();
for (auto &BB : F) {
for (auto &I : BB)
// Look for any instructions accessing let properties.
// It includes referencing this specific property (both reads and
// stores), as well as implicit stores by means of e.g.
// a struct instruction.
if (auto *BAI = dyn_cast<BeginAccessInst>(&I)) {
if (auto *REAI =
dyn_cast<RefElementAddrInst>(stripAddressAccess(BAI))) {
collectPropertyAccess(BAI, REAI->getField(), NonRemovable);
}
} else if (auto *REAI = dyn_cast<RefElementAddrInst>(&I)) {
collectPropertyAccess(REAI, REAI->getField(), NonRemovable);
} else if (auto *SEI = dyn_cast<StructExtractInst>(&I)) {
collectPropertyAccess(SEI, SEI->getField(), NonRemovable);
} else if (auto *SEAI = dyn_cast<StructElementAddrInst>(&I)) {
collectPropertyAccess(SEAI, SEAI->getField(), NonRemovable);
} else if (auto *SI = dyn_cast<StructInst>(&I)) {
collectStructPropertiesAccess(SI, NonRemovable);
}
}
}
for (auto &Init: InitMap) {
optimizeLetPropertyAccess(Init.first, Init.second);
}
for (SILFunction *ChangedFn : ChangedFunctions) {
// Program flow is not changed by this pass.
T->invalidateAnalysis(ChangedFn,
SILAnalysis::InvalidationKind::Instructions);
}
}
namespace {
class LetPropertiesOptPass : public SILModuleTransform
{
void run() override {
LetPropertiesOpt(getModule()).run(this);
}
};
} // end anonymous namespace
SILTransform *swift::createLetPropertiesOpt() {
return new LetPropertiesOptPass();
}
|
apache-2.0
|
slightperturbation/Cobalt
|
ext/emsdk_portable/clang/tag-e1.34.1/src/lib/Transforms/InstCombine/InstCombineVectorOps.cpp
|
49354
|
//===- InstCombineVectorOps.cpp -------------------------------------------===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
//
// This file implements instcombine for ExtractElement, InsertElement and
// ShuffleVector.
//
//===----------------------------------------------------------------------===//
#include "InstCombineInternal.h"
#include "llvm/ADT/DenseMap.h"
#include "llvm/IR/PatternMatch.h"
using namespace llvm;
using namespace PatternMatch;
#define DEBUG_TYPE "instcombine"
/// CheapToScalarize - Return true if the value is cheaper to scalarize than it
/// is to leave as a vector operation. isConstant indicates whether we're
/// extracting one known element. If false we're extracting a variable index.
static bool CheapToScalarize(Value *V, bool isConstant) {
if (Constant *C = dyn_cast<Constant>(V)) {
if (isConstant) return true;
// If all elts are the same, we can extract it and use any of the values.
if (Constant *Op0 = C->getAggregateElement(0U)) {
for (unsigned i = 1, e = V->getType()->getVectorNumElements(); i != e;
++i)
if (C->getAggregateElement(i) != Op0)
return false;
return true;
}
}
Instruction *I = dyn_cast<Instruction>(V);
if (!I) return false;
// Insert element gets simplified to the inserted element or is deleted if
// this is constant idx extract element and its a constant idx insertelt.
if (I->getOpcode() == Instruction::InsertElement && isConstant &&
isa<ConstantInt>(I->getOperand(2)))
return true;
if (I->getOpcode() == Instruction::Load && I->hasOneUse())
return true;
if (BinaryOperator *BO = dyn_cast<BinaryOperator>(I))
if (BO->hasOneUse() &&
(CheapToScalarize(BO->getOperand(0), isConstant) ||
CheapToScalarize(BO->getOperand(1), isConstant)))
return true;
if (CmpInst *CI = dyn_cast<CmpInst>(I))
if (CI->hasOneUse() &&
(CheapToScalarize(CI->getOperand(0), isConstant) ||
CheapToScalarize(CI->getOperand(1), isConstant)))
return true;
return false;
}
/// FindScalarElement - Given a vector and an element number, see if the scalar
/// value is already around as a register, for example if it were inserted then
/// extracted from the vector.
static Value *FindScalarElement(Value *V, unsigned EltNo) {
assert(V->getType()->isVectorTy() && "Not looking at a vector?");
VectorType *VTy = cast<VectorType>(V->getType());
unsigned Width = VTy->getNumElements();
if (EltNo >= Width) // Out of range access.
return UndefValue::get(VTy->getElementType());
if (Constant *C = dyn_cast<Constant>(V))
return C->getAggregateElement(EltNo);
if (InsertElementInst *III = dyn_cast<InsertElementInst>(V)) {
// If this is an insert to a variable element, we don't know what it is.
if (!isa<ConstantInt>(III->getOperand(2)))
return nullptr;
unsigned IIElt = cast<ConstantInt>(III->getOperand(2))->getZExtValue();
// If this is an insert to the element we are looking for, return the
// inserted value.
if (EltNo == IIElt)
return III->getOperand(1);
// Otherwise, the insertelement doesn't modify the value, recurse on its
// vector input.
return FindScalarElement(III->getOperand(0), EltNo);
}
if (ShuffleVectorInst *SVI = dyn_cast<ShuffleVectorInst>(V)) {
unsigned LHSWidth = SVI->getOperand(0)->getType()->getVectorNumElements();
int InEl = SVI->getMaskValue(EltNo);
if (InEl < 0)
return UndefValue::get(VTy->getElementType());
if (InEl < (int)LHSWidth)
return FindScalarElement(SVI->getOperand(0), InEl);
return FindScalarElement(SVI->getOperand(1), InEl - LHSWidth);
}
// Extract a value from a vector add operation with a constant zero.
Value *Val = nullptr; Constant *Con = nullptr;
if (match(V, m_Add(m_Value(Val), m_Constant(Con)))) {
if (Con->getAggregateElement(EltNo)->isNullValue())
return FindScalarElement(Val, EltNo);
}
// Otherwise, we don't know.
return nullptr;
}
// If we have a PHI node with a vector type that has only 2 uses: feed
// itself and be an operand of extractelement at a constant location,
// try to replace the PHI of the vector type with a PHI of a scalar type.
Instruction *InstCombiner::scalarizePHI(ExtractElementInst &EI, PHINode *PN) {
// Verify that the PHI node has exactly 2 uses. Otherwise return NULL.
if (!PN->hasNUses(2))
return nullptr;
// If so, it's known at this point that one operand is PHI and the other is
// an extractelement node. Find the PHI user that is not the extractelement
// node.
auto iu = PN->user_begin();
Instruction *PHIUser = dyn_cast<Instruction>(*iu);
if (PHIUser == cast<Instruction>(&EI))
PHIUser = cast<Instruction>(*(++iu));
// Verify that this PHI user has one use, which is the PHI itself,
// and that it is a binary operation which is cheap to scalarize.
// otherwise return NULL.
if (!PHIUser->hasOneUse() || !(PHIUser->user_back() == PN) ||
!(isa<BinaryOperator>(PHIUser)) || !CheapToScalarize(PHIUser, true))
return nullptr;
// Create a scalar PHI node that will replace the vector PHI node
// just before the current PHI node.
PHINode *scalarPHI = cast<PHINode>(InsertNewInstWith(
PHINode::Create(EI.getType(), PN->getNumIncomingValues(), ""), *PN));
// Scalarize each PHI operand.
for (unsigned i = 0; i < PN->getNumIncomingValues(); i++) {
Value *PHIInVal = PN->getIncomingValue(i);
BasicBlock *inBB = PN->getIncomingBlock(i);
Value *Elt = EI.getIndexOperand();
// If the operand is the PHI induction variable:
if (PHIInVal == PHIUser) {
// Scalarize the binary operation. Its first operand is the
// scalar PHI, and the second operand is extracted from the other
// vector operand.
BinaryOperator *B0 = cast<BinaryOperator>(PHIUser);
unsigned opId = (B0->getOperand(0) == PN) ? 1 : 0;
Value *Op = InsertNewInstWith(
ExtractElementInst::Create(B0->getOperand(opId), Elt,
B0->getOperand(opId)->getName() + ".Elt"),
*B0);
Value *newPHIUser = InsertNewInstWith(
BinaryOperator::Create(B0->getOpcode(), scalarPHI, Op), *B0);
scalarPHI->addIncoming(newPHIUser, inBB);
} else {
// Scalarize PHI input:
Instruction *newEI = ExtractElementInst::Create(PHIInVal, Elt, "");
// Insert the new instruction into the predecessor basic block.
Instruction *pos = dyn_cast<Instruction>(PHIInVal);
BasicBlock::iterator InsertPos;
if (pos && !isa<PHINode>(pos)) {
InsertPos = pos;
++InsertPos;
} else {
InsertPos = inBB->getFirstInsertionPt();
}
InsertNewInstWith(newEI, *InsertPos);
scalarPHI->addIncoming(newEI, inBB);
}
}
return ReplaceInstUsesWith(EI, scalarPHI);
}
Instruction *InstCombiner::visitExtractElementInst(ExtractElementInst &EI) {
// If vector val is constant with all elements the same, replace EI with
// that element. We handle a known element # below.
if (Constant *C = dyn_cast<Constant>(EI.getOperand(0)))
if (CheapToScalarize(C, false))
return ReplaceInstUsesWith(EI, C->getAggregateElement(0U));
// If extracting a specified index from the vector, see if we can recursively
// find a previously computed scalar that was inserted into the vector.
if (ConstantInt *IdxC = dyn_cast<ConstantInt>(EI.getOperand(1))) {
unsigned IndexVal = IdxC->getZExtValue();
unsigned VectorWidth = EI.getVectorOperandType()->getNumElements();
// If this is extracting an invalid index, turn this into undef, to avoid
// crashing the code below.
if (IndexVal >= VectorWidth)
return ReplaceInstUsesWith(EI, UndefValue::get(EI.getType()));
// This instruction only demands the single element from the input vector.
// If the input vector has a single use, simplify it based on this use
// property.
if (EI.getOperand(0)->hasOneUse() && VectorWidth != 1) {
APInt UndefElts(VectorWidth, 0);
APInt DemandedMask(VectorWidth, 0);
DemandedMask.setBit(IndexVal);
if (Value *V = SimplifyDemandedVectorElts(EI.getOperand(0), DemandedMask,
UndefElts)) {
EI.setOperand(0, V);
return &EI;
}
}
if (Value *Elt = FindScalarElement(EI.getOperand(0), IndexVal))
return ReplaceInstUsesWith(EI, Elt);
// If the this extractelement is directly using a bitcast from a vector of
// the same number of elements, see if we can find the source element from
// it. In this case, we will end up needing to bitcast the scalars.
if (BitCastInst *BCI = dyn_cast<BitCastInst>(EI.getOperand(0))) {
if (VectorType *VT = dyn_cast<VectorType>(BCI->getOperand(0)->getType()))
if (VT->getNumElements() == VectorWidth)
if (Value *Elt = FindScalarElement(BCI->getOperand(0), IndexVal))
return new BitCastInst(Elt, EI.getType());
}
// If there's a vector PHI feeding a scalar use through this extractelement
// instruction, try to scalarize the PHI.
if (PHINode *PN = dyn_cast<PHINode>(EI.getOperand(0))) {
Instruction *scalarPHI = scalarizePHI(EI, PN);
if (scalarPHI)
return scalarPHI;
}
}
if (Instruction *I = dyn_cast<Instruction>(EI.getOperand(0))) {
// Push extractelement into predecessor operation if legal and
// profitable to do so
if (BinaryOperator *BO = dyn_cast<BinaryOperator>(I)) {
if (I->hasOneUse() &&
CheapToScalarize(BO, isa<ConstantInt>(EI.getOperand(1)))) {
Value *newEI0 =
Builder->CreateExtractElement(BO->getOperand(0), EI.getOperand(1),
EI.getName()+".lhs");
Value *newEI1 =
Builder->CreateExtractElement(BO->getOperand(1), EI.getOperand(1),
EI.getName()+".rhs");
return BinaryOperator::Create(BO->getOpcode(), newEI0, newEI1);
}
} else if (InsertElementInst *IE = dyn_cast<InsertElementInst>(I)) {
// Extracting the inserted element?
if (IE->getOperand(2) == EI.getOperand(1))
return ReplaceInstUsesWith(EI, IE->getOperand(1));
// If the inserted and extracted elements are constants, they must not
// be the same value, extract from the pre-inserted value instead.
if (isa<Constant>(IE->getOperand(2)) && isa<Constant>(EI.getOperand(1))) {
Worklist.AddValue(EI.getOperand(0));
EI.setOperand(0, IE->getOperand(0));
return &EI;
}
} else if (ShuffleVectorInst *SVI = dyn_cast<ShuffleVectorInst>(I)) {
// If this is extracting an element from a shufflevector, figure out where
// it came from and extract from the appropriate input element instead.
if (ConstantInt *Elt = dyn_cast<ConstantInt>(EI.getOperand(1))) {
int SrcIdx = SVI->getMaskValue(Elt->getZExtValue());
Value *Src;
unsigned LHSWidth =
SVI->getOperand(0)->getType()->getVectorNumElements();
if (SrcIdx < 0)
return ReplaceInstUsesWith(EI, UndefValue::get(EI.getType()));
if (SrcIdx < (int)LHSWidth)
Src = SVI->getOperand(0);
else {
SrcIdx -= LHSWidth;
Src = SVI->getOperand(1);
}
Type *Int32Ty = Type::getInt32Ty(EI.getContext());
return ExtractElementInst::Create(Src,
ConstantInt::get(Int32Ty,
SrcIdx, false));
}
} else if (CastInst *CI = dyn_cast<CastInst>(I)) {
// Canonicalize extractelement(cast) -> cast(extractelement)
// bitcasts can change the number of vector elements and they cost nothing
if (CI->hasOneUse() && (CI->getOpcode() != Instruction::BitCast)) {
Value *EE = Builder->CreateExtractElement(CI->getOperand(0),
EI.getIndexOperand());
Worklist.AddValue(EE);
return CastInst::Create(CI->getOpcode(), EE, EI.getType());
}
} else if (SelectInst *SI = dyn_cast<SelectInst>(I)) {
if (SI->hasOneUse()) {
// TODO: For a select on vectors, it might be useful to do this if it
// has multiple extractelement uses. For vector select, that seems to
// fight the vectorizer.
// If we are extracting an element from a vector select or a select on
// vectors, a select on the scalars extracted from the vector arguments.
Value *TrueVal = SI->getTrueValue();
Value *FalseVal = SI->getFalseValue();
Value *Cond = SI->getCondition();
if (Cond->getType()->isVectorTy()) {
Cond = Builder->CreateExtractElement(Cond,
EI.getIndexOperand(),
Cond->getName() + ".elt");
}
Value *V1Elem
= Builder->CreateExtractElement(TrueVal,
EI.getIndexOperand(),
TrueVal->getName() + ".elt");
Value *V2Elem
= Builder->CreateExtractElement(FalseVal,
EI.getIndexOperand(),
FalseVal->getName() + ".elt");
return SelectInst::Create(Cond,
V1Elem,
V2Elem,
SI->getName() + ".elt");
}
}
}
return nullptr;
}
/// CollectSingleShuffleElements - If V is a shuffle of values that ONLY returns
/// elements from either LHS or RHS, return the shuffle mask and true.
/// Otherwise, return false.
static bool CollectSingleShuffleElements(Value *V, Value *LHS, Value *RHS,
SmallVectorImpl<Constant*> &Mask) {
assert(LHS->getType() == RHS->getType() &&
"Invalid CollectSingleShuffleElements");
unsigned NumElts = V->getType()->getVectorNumElements();
if (isa<UndefValue>(V)) {
Mask.assign(NumElts, UndefValue::get(Type::getInt32Ty(V->getContext())));
return true;
}
if (V == LHS) {
for (unsigned i = 0; i != NumElts; ++i)
Mask.push_back(ConstantInt::get(Type::getInt32Ty(V->getContext()), i));
return true;
}
if (V == RHS) {
for (unsigned i = 0; i != NumElts; ++i)
Mask.push_back(ConstantInt::get(Type::getInt32Ty(V->getContext()),
i+NumElts));
return true;
}
if (InsertElementInst *IEI = dyn_cast<InsertElementInst>(V)) {
// If this is an insert of an extract from some other vector, include it.
Value *VecOp = IEI->getOperand(0);
Value *ScalarOp = IEI->getOperand(1);
Value *IdxOp = IEI->getOperand(2);
if (!isa<ConstantInt>(IdxOp))
return false;
unsigned InsertedIdx = cast<ConstantInt>(IdxOp)->getZExtValue();
if (isa<UndefValue>(ScalarOp)) { // inserting undef into vector.
// We can handle this if the vector we are inserting into is
// transitively ok.
if (CollectSingleShuffleElements(VecOp, LHS, RHS, Mask)) {
// If so, update the mask to reflect the inserted undef.
Mask[InsertedIdx] = UndefValue::get(Type::getInt32Ty(V->getContext()));
return true;
}
} else if (ExtractElementInst *EI = dyn_cast<ExtractElementInst>(ScalarOp)){
if (isa<ConstantInt>(EI->getOperand(1))) {
unsigned ExtractedIdx =
cast<ConstantInt>(EI->getOperand(1))->getZExtValue();
unsigned NumLHSElts = LHS->getType()->getVectorNumElements();
// This must be extracting from either LHS or RHS.
if (EI->getOperand(0) == LHS || EI->getOperand(0) == RHS) {
// We can handle this if the vector we are inserting into is
// transitively ok.
if (CollectSingleShuffleElements(VecOp, LHS, RHS, Mask)) {
// If so, update the mask to reflect the inserted value.
if (EI->getOperand(0) == LHS) {
Mask[InsertedIdx % NumElts] =
ConstantInt::get(Type::getInt32Ty(V->getContext()),
ExtractedIdx);
} else {
assert(EI->getOperand(0) == RHS);
Mask[InsertedIdx % NumElts] =
ConstantInt::get(Type::getInt32Ty(V->getContext()),
ExtractedIdx + NumLHSElts);
}
return true;
}
}
}
}
}
return false;
}
/// We are building a shuffle to create V, which is a sequence of insertelement,
/// extractelement pairs. If PermittedRHS is set, then we must either use it or
/// not rely on the second vector source. Return a std::pair containing the
/// left and right vectors of the proposed shuffle (or 0), and set the Mask
/// parameter as required.
///
/// Note: we intentionally don't try to fold earlier shuffles since they have
/// often been chosen carefully to be efficiently implementable on the target.
typedef std::pair<Value *, Value *> ShuffleOps;
static ShuffleOps CollectShuffleElements(Value *V,
SmallVectorImpl<Constant *> &Mask,
Value *PermittedRHS) {
assert(V->getType()->isVectorTy() && "Invalid shuffle!");
unsigned NumElts = cast<VectorType>(V->getType())->getNumElements();
if (isa<UndefValue>(V)) {
Mask.assign(NumElts, UndefValue::get(Type::getInt32Ty(V->getContext())));
return std::make_pair(
PermittedRHS ? UndefValue::get(PermittedRHS->getType()) : V, nullptr);
}
if (isa<ConstantAggregateZero>(V)) {
Mask.assign(NumElts, ConstantInt::get(Type::getInt32Ty(V->getContext()),0));
return std::make_pair(V, nullptr);
}
if (InsertElementInst *IEI = dyn_cast<InsertElementInst>(V)) {
// If this is an insert of an extract from some other vector, include it.
Value *VecOp = IEI->getOperand(0);
Value *ScalarOp = IEI->getOperand(1);
Value *IdxOp = IEI->getOperand(2);
if (ExtractElementInst *EI = dyn_cast<ExtractElementInst>(ScalarOp)) {
if (isa<ConstantInt>(EI->getOperand(1)) && isa<ConstantInt>(IdxOp)) {
unsigned ExtractedIdx =
cast<ConstantInt>(EI->getOperand(1))->getZExtValue();
unsigned InsertedIdx = cast<ConstantInt>(IdxOp)->getZExtValue();
// Either the extracted from or inserted into vector must be RHSVec,
// otherwise we'd end up with a shuffle of three inputs.
if (EI->getOperand(0) == PermittedRHS || PermittedRHS == nullptr) {
Value *RHS = EI->getOperand(0);
ShuffleOps LR = CollectShuffleElements(VecOp, Mask, RHS);
assert(LR.second == nullptr || LR.second == RHS);
if (LR.first->getType() != RHS->getType()) {
// We tried our best, but we can't find anything compatible with RHS
// further up the chain. Return a trivial shuffle.
for (unsigned i = 0; i < NumElts; ++i)
Mask[i] = ConstantInt::get(Type::getInt32Ty(V->getContext()), i);
return std::make_pair(V, nullptr);
}
unsigned NumLHSElts = RHS->getType()->getVectorNumElements();
Mask[InsertedIdx % NumElts] =
ConstantInt::get(Type::getInt32Ty(V->getContext()),
NumLHSElts+ExtractedIdx);
return std::make_pair(LR.first, RHS);
}
if (VecOp == PermittedRHS) {
// We've gone as far as we can: anything on the other side of the
// extractelement will already have been converted into a shuffle.
unsigned NumLHSElts =
EI->getOperand(0)->getType()->getVectorNumElements();
for (unsigned i = 0; i != NumElts; ++i)
Mask.push_back(ConstantInt::get(
Type::getInt32Ty(V->getContext()),
i == InsertedIdx ? ExtractedIdx : NumLHSElts + i));
return std::make_pair(EI->getOperand(0), PermittedRHS);
}
// If this insertelement is a chain that comes from exactly these two
// vectors, return the vector and the effective shuffle.
if (EI->getOperand(0)->getType() == PermittedRHS->getType() &&
CollectSingleShuffleElements(IEI, EI->getOperand(0), PermittedRHS,
Mask))
return std::make_pair(EI->getOperand(0), PermittedRHS);
}
}
}
// Otherwise, can't do anything fancy. Return an identity vector.
for (unsigned i = 0; i != NumElts; ++i)
Mask.push_back(ConstantInt::get(Type::getInt32Ty(V->getContext()), i));
return std::make_pair(V, nullptr);
}
/// Try to find redundant insertvalue instructions, like the following ones:
/// %0 = insertvalue { i8, i32 } undef, i8 %x, 0
/// %1 = insertvalue { i8, i32 } %0, i8 %y, 0
/// Here the second instruction inserts values at the same indices, as the
/// first one, making the first one redundant.
/// It should be transformed to:
/// %0 = insertvalue { i8, i32 } undef, i8 %y, 0
Instruction *InstCombiner::visitInsertValueInst(InsertValueInst &I) {
bool IsRedundant = false;
ArrayRef<unsigned int> FirstIndices = I.getIndices();
// If there is a chain of insertvalue instructions (each of them except the
// last one has only one use and it's another insertvalue insn from this
// chain), check if any of the 'children' uses the same indices as the first
// instruction. In this case, the first one is redundant.
Value *V = &I;
unsigned Depth = 0;
while (V->hasOneUse() && Depth < 10) {
User *U = V->user_back();
auto UserInsInst = dyn_cast<InsertValueInst>(U);
if (!UserInsInst || U->getOperand(0) != V)
break;
if (UserInsInst->getIndices() == FirstIndices) {
IsRedundant = true;
break;
}
V = UserInsInst;
Depth++;
}
if (IsRedundant)
return ReplaceInstUsesWith(I, I.getOperand(0));
return nullptr;
}
Instruction *InstCombiner::visitInsertElementInst(InsertElementInst &IE) {
Value *VecOp = IE.getOperand(0);
Value *ScalarOp = IE.getOperand(1);
Value *IdxOp = IE.getOperand(2);
// Inserting an undef or into an undefined place, remove this.
if (isa<UndefValue>(ScalarOp) || isa<UndefValue>(IdxOp))
ReplaceInstUsesWith(IE, VecOp);
// If the inserted element was extracted from some other vector, and if the
// indexes are constant, try to turn this into a shufflevector operation.
if (ExtractElementInst *EI = dyn_cast<ExtractElementInst>(ScalarOp)) {
if (isa<ConstantInt>(EI->getOperand(1)) && isa<ConstantInt>(IdxOp)) {
unsigned NumInsertVectorElts = IE.getType()->getNumElements();
unsigned NumExtractVectorElts =
EI->getOperand(0)->getType()->getVectorNumElements();
unsigned ExtractedIdx =
cast<ConstantInt>(EI->getOperand(1))->getZExtValue();
unsigned InsertedIdx = cast<ConstantInt>(IdxOp)->getZExtValue();
if (ExtractedIdx >= NumExtractVectorElts) // Out of range extract.
return ReplaceInstUsesWith(IE, VecOp);
if (InsertedIdx >= NumInsertVectorElts) // Out of range insert.
return ReplaceInstUsesWith(IE, UndefValue::get(IE.getType()));
// If we are extracting a value from a vector, then inserting it right
// back into the same place, just use the input vector.
if (EI->getOperand(0) == VecOp && ExtractedIdx == InsertedIdx)
return ReplaceInstUsesWith(IE, VecOp);
// If this insertelement isn't used by some other insertelement, turn it
// (and any insertelements it points to), into one big shuffle.
if (!IE.hasOneUse() || !isa<InsertElementInst>(IE.user_back())) {
SmallVector<Constant*, 16> Mask;
ShuffleOps LR = CollectShuffleElements(&IE, Mask, nullptr);
// The proposed shuffle may be trivial, in which case we shouldn't
// perform the combine.
if (LR.first != &IE && LR.second != &IE) {
// We now have a shuffle of LHS, RHS, Mask.
if (LR.second == nullptr)
LR.second = UndefValue::get(LR.first->getType());
return new ShuffleVectorInst(LR.first, LR.second,
ConstantVector::get(Mask));
}
}
}
}
unsigned VWidth = cast<VectorType>(VecOp->getType())->getNumElements();
APInt UndefElts(VWidth, 0);
APInt AllOnesEltMask(APInt::getAllOnesValue(VWidth));
if (Value *V = SimplifyDemandedVectorElts(&IE, AllOnesEltMask, UndefElts)) {
if (V != &IE)
return ReplaceInstUsesWith(IE, V);
return &IE;
}
return nullptr;
}
/// Return true if we can evaluate the specified expression tree if the vector
/// elements were shuffled in a different order.
static bool CanEvaluateShuffled(Value *V, ArrayRef<int> Mask,
unsigned Depth = 5) {
// We can always reorder the elements of a constant.
if (isa<Constant>(V))
return true;
// We won't reorder vector arguments. No IPO here.
Instruction *I = dyn_cast<Instruction>(V);
if (!I) return false;
// Two users may expect different orders of the elements. Don't try it.
if (!I->hasOneUse())
return false;
if (Depth == 0) return false;
switch (I->getOpcode()) {
case Instruction::Add:
case Instruction::FAdd:
case Instruction::Sub:
case Instruction::FSub:
case Instruction::Mul:
case Instruction::FMul:
case Instruction::UDiv:
case Instruction::SDiv:
case Instruction::FDiv:
case Instruction::URem:
case Instruction::SRem:
case Instruction::FRem:
case Instruction::Shl:
case Instruction::LShr:
case Instruction::AShr:
case Instruction::And:
case Instruction::Or:
case Instruction::Xor:
case Instruction::ICmp:
case Instruction::FCmp:
case Instruction::Trunc:
case Instruction::ZExt:
case Instruction::SExt:
case Instruction::FPToUI:
case Instruction::FPToSI:
case Instruction::UIToFP:
case Instruction::SIToFP:
case Instruction::FPTrunc:
case Instruction::FPExt:
case Instruction::GetElementPtr: {
for (int i = 0, e = I->getNumOperands(); i != e; ++i) {
if (!CanEvaluateShuffled(I->getOperand(i), Mask, Depth-1))
return false;
}
return true;
}
case Instruction::InsertElement: {
ConstantInt *CI = dyn_cast<ConstantInt>(I->getOperand(2));
if (!CI) return false;
int ElementNumber = CI->getLimitedValue();
// Verify that 'CI' does not occur twice in Mask. A single 'insertelement'
// can't put an element into multiple indices.
bool SeenOnce = false;
for (int i = 0, e = Mask.size(); i != e; ++i) {
if (Mask[i] == ElementNumber) {
if (SeenOnce)
return false;
SeenOnce = true;
}
}
return CanEvaluateShuffled(I->getOperand(0), Mask, Depth-1);
}
}
return false;
}
/// Rebuild a new instruction just like 'I' but with the new operands given.
/// In the event of type mismatch, the type of the operands is correct.
static Value *BuildNew(Instruction *I, ArrayRef<Value*> NewOps) {
// We don't want to use the IRBuilder here because we want the replacement
// instructions to appear next to 'I', not the builder's insertion point.
switch (I->getOpcode()) {
case Instruction::Add:
case Instruction::FAdd:
case Instruction::Sub:
case Instruction::FSub:
case Instruction::Mul:
case Instruction::FMul:
case Instruction::UDiv:
case Instruction::SDiv:
case Instruction::FDiv:
case Instruction::URem:
case Instruction::SRem:
case Instruction::FRem:
case Instruction::Shl:
case Instruction::LShr:
case Instruction::AShr:
case Instruction::And:
case Instruction::Or:
case Instruction::Xor: {
BinaryOperator *BO = cast<BinaryOperator>(I);
assert(NewOps.size() == 2 && "binary operator with #ops != 2");
BinaryOperator *New =
BinaryOperator::Create(cast<BinaryOperator>(I)->getOpcode(),
NewOps[0], NewOps[1], "", BO);
if (isa<OverflowingBinaryOperator>(BO)) {
New->setHasNoUnsignedWrap(BO->hasNoUnsignedWrap());
New->setHasNoSignedWrap(BO->hasNoSignedWrap());
}
if (isa<PossiblyExactOperator>(BO)) {
New->setIsExact(BO->isExact());
}
if (isa<FPMathOperator>(BO))
New->copyFastMathFlags(I);
return New;
}
case Instruction::ICmp:
assert(NewOps.size() == 2 && "icmp with #ops != 2");
return new ICmpInst(I, cast<ICmpInst>(I)->getPredicate(),
NewOps[0], NewOps[1]);
case Instruction::FCmp:
assert(NewOps.size() == 2 && "fcmp with #ops != 2");
return new FCmpInst(I, cast<FCmpInst>(I)->getPredicate(),
NewOps[0], NewOps[1]);
case Instruction::Trunc:
case Instruction::ZExt:
case Instruction::SExt:
case Instruction::FPToUI:
case Instruction::FPToSI:
case Instruction::UIToFP:
case Instruction::SIToFP:
case Instruction::FPTrunc:
case Instruction::FPExt: {
// It's possible that the mask has a different number of elements from
// the original cast. We recompute the destination type to match the mask.
Type *DestTy =
VectorType::get(I->getType()->getScalarType(),
NewOps[0]->getType()->getVectorNumElements());
assert(NewOps.size() == 1 && "cast with #ops != 1");
return CastInst::Create(cast<CastInst>(I)->getOpcode(), NewOps[0], DestTy,
"", I);
}
case Instruction::GetElementPtr: {
Value *Ptr = NewOps[0];
ArrayRef<Value*> Idx = NewOps.slice(1);
GetElementPtrInst *GEP = GetElementPtrInst::Create(
cast<GetElementPtrInst>(I)->getSourceElementType(), Ptr, Idx, "", I);
GEP->setIsInBounds(cast<GetElementPtrInst>(I)->isInBounds());
return GEP;
}
}
llvm_unreachable("failed to rebuild vector instructions");
}
Value *
InstCombiner::EvaluateInDifferentElementOrder(Value *V, ArrayRef<int> Mask) {
// Mask.size() does not need to be equal to the number of vector elements.
assert(V->getType()->isVectorTy() && "can't reorder non-vector elements");
if (isa<UndefValue>(V)) {
return UndefValue::get(VectorType::get(V->getType()->getScalarType(),
Mask.size()));
}
if (isa<ConstantAggregateZero>(V)) {
return ConstantAggregateZero::get(
VectorType::get(V->getType()->getScalarType(),
Mask.size()));
}
if (Constant *C = dyn_cast<Constant>(V)) {
SmallVector<Constant *, 16> MaskValues;
for (int i = 0, e = Mask.size(); i != e; ++i) {
if (Mask[i] == -1)
MaskValues.push_back(UndefValue::get(Builder->getInt32Ty()));
else
MaskValues.push_back(Builder->getInt32(Mask[i]));
}
return ConstantExpr::getShuffleVector(C, UndefValue::get(C->getType()),
ConstantVector::get(MaskValues));
}
Instruction *I = cast<Instruction>(V);
switch (I->getOpcode()) {
case Instruction::Add:
case Instruction::FAdd:
case Instruction::Sub:
case Instruction::FSub:
case Instruction::Mul:
case Instruction::FMul:
case Instruction::UDiv:
case Instruction::SDiv:
case Instruction::FDiv:
case Instruction::URem:
case Instruction::SRem:
case Instruction::FRem:
case Instruction::Shl:
case Instruction::LShr:
case Instruction::AShr:
case Instruction::And:
case Instruction::Or:
case Instruction::Xor:
case Instruction::ICmp:
case Instruction::FCmp:
case Instruction::Trunc:
case Instruction::ZExt:
case Instruction::SExt:
case Instruction::FPToUI:
case Instruction::FPToSI:
case Instruction::UIToFP:
case Instruction::SIToFP:
case Instruction::FPTrunc:
case Instruction::FPExt:
case Instruction::Select:
case Instruction::GetElementPtr: {
SmallVector<Value*, 8> NewOps;
bool NeedsRebuild = (Mask.size() != I->getType()->getVectorNumElements());
for (int i = 0, e = I->getNumOperands(); i != e; ++i) {
Value *V = EvaluateInDifferentElementOrder(I->getOperand(i), Mask);
NewOps.push_back(V);
NeedsRebuild |= (V != I->getOperand(i));
}
if (NeedsRebuild) {
return BuildNew(I, NewOps);
}
return I;
}
case Instruction::InsertElement: {
int Element = cast<ConstantInt>(I->getOperand(2))->getLimitedValue();
// The insertelement was inserting at Element. Figure out which element
// that becomes after shuffling. The answer is guaranteed to be unique
// by CanEvaluateShuffled.
bool Found = false;
int Index = 0;
for (int e = Mask.size(); Index != e; ++Index) {
if (Mask[Index] == Element) {
Found = true;
break;
}
}
// If element is not in Mask, no need to handle the operand 1 (element to
// be inserted). Just evaluate values in operand 0 according to Mask.
if (!Found)
return EvaluateInDifferentElementOrder(I->getOperand(0), Mask);
Value *V = EvaluateInDifferentElementOrder(I->getOperand(0), Mask);
return InsertElementInst::Create(V, I->getOperand(1),
Builder->getInt32(Index), "", I);
}
}
llvm_unreachable("failed to reorder elements of vector instruction!");
}
static void RecognizeIdentityMask(const SmallVectorImpl<int> &Mask,
bool &isLHSID, bool &isRHSID) {
isLHSID = isRHSID = true;
for (unsigned i = 0, e = Mask.size(); i != e; ++i) {
if (Mask[i] < 0) continue; // Ignore undef values.
// Is this an identity shuffle of the LHS value?
isLHSID &= (Mask[i] == (int)i);
// Is this an identity shuffle of the RHS value?
isRHSID &= (Mask[i]-e == i);
}
}
// Returns true if the shuffle is extracting a contiguous range of values from
// LHS, for example:
// +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
// Input: |AA|BB|CC|DD|EE|FF|GG|HH|II|JJ|KK|LL|MM|NN|OO|PP|
// Shuffles to: |EE|FF|GG|HH|
// +--+--+--+--+
static bool isShuffleExtractingFromLHS(ShuffleVectorInst &SVI,
SmallVector<int, 16> &Mask) {
unsigned LHSElems =
cast<VectorType>(SVI.getOperand(0)->getType())->getNumElements();
unsigned MaskElems = Mask.size();
unsigned BegIdx = Mask.front();
unsigned EndIdx = Mask.back();
if (BegIdx > EndIdx || EndIdx >= LHSElems || EndIdx - BegIdx != MaskElems - 1)
return false;
for (unsigned I = 0; I != MaskElems; ++I)
if (static_cast<unsigned>(Mask[I]) != BegIdx + I)
return false;
return true;
}
Instruction *InstCombiner::visitShuffleVectorInst(ShuffleVectorInst &SVI) {
Value *LHS = SVI.getOperand(0);
Value *RHS = SVI.getOperand(1);
SmallVector<int, 16> Mask = SVI.getShuffleMask();
Type *Int32Ty = Type::getInt32Ty(SVI.getContext());
bool MadeChange = false;
// Undefined shuffle mask -> undefined value.
if (isa<UndefValue>(SVI.getOperand(2)))
return ReplaceInstUsesWith(SVI, UndefValue::get(SVI.getType()));
unsigned VWidth = cast<VectorType>(SVI.getType())->getNumElements();
APInt UndefElts(VWidth, 0);
APInt AllOnesEltMask(APInt::getAllOnesValue(VWidth));
if (Value *V = SimplifyDemandedVectorElts(&SVI, AllOnesEltMask, UndefElts)) {
if (V != &SVI)
return ReplaceInstUsesWith(SVI, V);
LHS = SVI.getOperand(0);
RHS = SVI.getOperand(1);
MadeChange = true;
}
unsigned LHSWidth = cast<VectorType>(LHS->getType())->getNumElements();
// Canonicalize shuffle(x ,x,mask) -> shuffle(x, undef,mask')
// Canonicalize shuffle(undef,x,mask) -> shuffle(x, undef,mask').
if (LHS == RHS || isa<UndefValue>(LHS)) {
if (isa<UndefValue>(LHS) && LHS == RHS) {
// shuffle(undef,undef,mask) -> undef.
Value *Result = (VWidth == LHSWidth)
? LHS : UndefValue::get(SVI.getType());
return ReplaceInstUsesWith(SVI, Result);
}
// Remap any references to RHS to use LHS.
SmallVector<Constant*, 16> Elts;
for (unsigned i = 0, e = LHSWidth; i != VWidth; ++i) {
if (Mask[i] < 0) {
Elts.push_back(UndefValue::get(Int32Ty));
continue;
}
if ((Mask[i] >= (int)e && isa<UndefValue>(RHS)) ||
(Mask[i] < (int)e && isa<UndefValue>(LHS))) {
Mask[i] = -1; // Turn into undef.
Elts.push_back(UndefValue::get(Int32Ty));
} else {
Mask[i] = Mask[i] % e; // Force to LHS.
Elts.push_back(ConstantInt::get(Int32Ty, Mask[i]));
}
}
SVI.setOperand(0, SVI.getOperand(1));
SVI.setOperand(1, UndefValue::get(RHS->getType()));
SVI.setOperand(2, ConstantVector::get(Elts));
LHS = SVI.getOperand(0);
RHS = SVI.getOperand(1);
MadeChange = true;
}
if (VWidth == LHSWidth) {
// Analyze the shuffle, are the LHS or RHS and identity shuffles?
bool isLHSID, isRHSID;
RecognizeIdentityMask(Mask, isLHSID, isRHSID);
// Eliminate identity shuffles.
if (isLHSID) return ReplaceInstUsesWith(SVI, LHS);
if (isRHSID) return ReplaceInstUsesWith(SVI, RHS);
}
if (isa<UndefValue>(RHS) && CanEvaluateShuffled(LHS, Mask)) {
Value *V = EvaluateInDifferentElementOrder(LHS, Mask);
return ReplaceInstUsesWith(SVI, V);
}
// SROA generates shuffle+bitcast when the extracted sub-vector is bitcast to
// a non-vector type. We can instead bitcast the original vector followed by
// an extract of the desired element:
//
// %sroa = shufflevector <16 x i8> %in, <16 x i8> undef,
// <4 x i32> <i32 0, i32 1, i32 2, i32 3>
// %1 = bitcast <4 x i8> %sroa to i32
// Becomes:
// %bc = bitcast <16 x i8> %in to <4 x i32>
// %ext = extractelement <4 x i32> %bc, i32 0
//
// If the shuffle is extracting a contiguous range of values from the input
// vector then each use which is a bitcast of the extracted size can be
// replaced. This will work if the vector types are compatible, and the begin
// index is aligned to a value in the casted vector type. If the begin index
// isn't aligned then we can shuffle the original vector (keeping the same
// vector type) before extracting.
//
// This code will bail out if the target type is fundamentally incompatible
// with vectors of the source type.
//
// Example of <16 x i8>, target type i32:
// Index range [4,8): v-----------v Will work.
// +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
// <16 x i8>: | | | | | | | | | | | | | | | | |
// <4 x i32>: | | | | |
// +-----------+-----------+-----------+-----------+
// Index range [6,10): ^-----------^ Needs an extra shuffle.
// Target type i40: ^--------------^ Won't work, bail.
if (isShuffleExtractingFromLHS(SVI, Mask)) {
Value *V = LHS;
unsigned MaskElems = Mask.size();
unsigned BegIdx = Mask.front();
VectorType *SrcTy = cast<VectorType>(V->getType());
unsigned VecBitWidth = SrcTy->getBitWidth();
unsigned SrcElemBitWidth = DL.getTypeSizeInBits(SrcTy->getElementType());
assert(SrcElemBitWidth && "vector elements must have a bitwidth");
unsigned SrcNumElems = SrcTy->getNumElements();
SmallVector<BitCastInst *, 8> BCs;
DenseMap<Type *, Value *> NewBCs;
for (User *U : SVI.users())
if (BitCastInst *BC = dyn_cast<BitCastInst>(U))
if (!BC->use_empty())
// Only visit bitcasts that weren't previously handled.
BCs.push_back(BC);
for (BitCastInst *BC : BCs) {
Type *TgtTy = BC->getDestTy();
unsigned TgtElemBitWidth = DL.getTypeSizeInBits(TgtTy);
if (!TgtElemBitWidth)
continue;
unsigned TgtNumElems = VecBitWidth / TgtElemBitWidth;
bool VecBitWidthsEqual = VecBitWidth == TgtNumElems * TgtElemBitWidth;
bool BegIsAligned = 0 == ((SrcElemBitWidth * BegIdx) % TgtElemBitWidth);
if (!VecBitWidthsEqual)
continue;
if (!VectorType::isValidElementType(TgtTy))
continue;
VectorType *CastSrcTy = VectorType::get(TgtTy, TgtNumElems);
if (!BegIsAligned) {
// Shuffle the input so [0,NumElements) contains the output, and
// [NumElems,SrcNumElems) is undef.
SmallVector<Constant *, 16> ShuffleMask(SrcNumElems,
UndefValue::get(Int32Ty));
for (unsigned I = 0, E = MaskElems, Idx = BegIdx; I != E; ++Idx, ++I)
ShuffleMask[I] = ConstantInt::get(Int32Ty, Idx);
V = Builder->CreateShuffleVector(V, UndefValue::get(V->getType()),
ConstantVector::get(ShuffleMask),
SVI.getName() + ".extract");
BegIdx = 0;
}
unsigned SrcElemsPerTgtElem = TgtElemBitWidth / SrcElemBitWidth;
assert(SrcElemsPerTgtElem);
BegIdx /= SrcElemsPerTgtElem;
bool BCAlreadyExists = NewBCs.find(CastSrcTy) != NewBCs.end();
auto *NewBC =
BCAlreadyExists
? NewBCs[CastSrcTy]
: Builder->CreateBitCast(V, CastSrcTy, SVI.getName() + ".bc");
if (!BCAlreadyExists)
NewBCs[CastSrcTy] = NewBC;
auto *Ext = Builder->CreateExtractElement(
NewBC, ConstantInt::get(Int32Ty, BegIdx), SVI.getName() + ".extract");
// The shufflevector isn't being replaced: the bitcast that used it
// is. InstCombine will visit the newly-created instructions.
ReplaceInstUsesWith(*BC, Ext);
MadeChange = true;
}
}
// If the LHS is a shufflevector itself, see if we can combine it with this
// one without producing an unusual shuffle.
// Cases that might be simplified:
// 1.
// x1=shuffle(v1,v2,mask1)
// x=shuffle(x1,undef,mask)
// ==>
// x=shuffle(v1,undef,newMask)
// newMask[i] = (mask[i] < x1.size()) ? mask1[mask[i]] : -1
// 2.
// x1=shuffle(v1,undef,mask1)
// x=shuffle(x1,x2,mask)
// where v1.size() == mask1.size()
// ==>
// x=shuffle(v1,x2,newMask)
// newMask[i] = (mask[i] < x1.size()) ? mask1[mask[i]] : mask[i]
// 3.
// x2=shuffle(v2,undef,mask2)
// x=shuffle(x1,x2,mask)
// where v2.size() == mask2.size()
// ==>
// x=shuffle(x1,v2,newMask)
// newMask[i] = (mask[i] < x1.size())
// ? mask[i] : mask2[mask[i]-x1.size()]+x1.size()
// 4.
// x1=shuffle(v1,undef,mask1)
// x2=shuffle(v2,undef,mask2)
// x=shuffle(x1,x2,mask)
// where v1.size() == v2.size()
// ==>
// x=shuffle(v1,v2,newMask)
// newMask[i] = (mask[i] < x1.size())
// ? mask1[mask[i]] : mask2[mask[i]-x1.size()]+v1.size()
//
// Here we are really conservative:
// we are absolutely afraid of producing a shuffle mask not in the input
// program, because the code gen may not be smart enough to turn a merged
// shuffle into two specific shuffles: it may produce worse code. As such,
// we only merge two shuffles if the result is either a splat or one of the
// input shuffle masks. In this case, merging the shuffles just removes
// one instruction, which we know is safe. This is good for things like
// turning: (splat(splat)) -> splat, or
// merge(V[0..n], V[n+1..2n]) -> V[0..2n]
ShuffleVectorInst* LHSShuffle = dyn_cast<ShuffleVectorInst>(LHS);
ShuffleVectorInst* RHSShuffle = dyn_cast<ShuffleVectorInst>(RHS);
if (LHSShuffle)
if (!isa<UndefValue>(LHSShuffle->getOperand(1)) && !isa<UndefValue>(RHS))
LHSShuffle = nullptr;
if (RHSShuffle)
if (!isa<UndefValue>(RHSShuffle->getOperand(1)))
RHSShuffle = nullptr;
if (!LHSShuffle && !RHSShuffle)
return MadeChange ? &SVI : nullptr;
Value* LHSOp0 = nullptr;
Value* LHSOp1 = nullptr;
Value* RHSOp0 = nullptr;
unsigned LHSOp0Width = 0;
unsigned RHSOp0Width = 0;
if (LHSShuffle) {
LHSOp0 = LHSShuffle->getOperand(0);
LHSOp1 = LHSShuffle->getOperand(1);
LHSOp0Width = cast<VectorType>(LHSOp0->getType())->getNumElements();
}
if (RHSShuffle) {
RHSOp0 = RHSShuffle->getOperand(0);
RHSOp0Width = cast<VectorType>(RHSOp0->getType())->getNumElements();
}
Value* newLHS = LHS;
Value* newRHS = RHS;
if (LHSShuffle) {
// case 1
if (isa<UndefValue>(RHS)) {
newLHS = LHSOp0;
newRHS = LHSOp1;
}
// case 2 or 4
else if (LHSOp0Width == LHSWidth) {
newLHS = LHSOp0;
}
}
// case 3 or 4
if (RHSShuffle && RHSOp0Width == LHSWidth) {
newRHS = RHSOp0;
}
// case 4
if (LHSOp0 == RHSOp0) {
newLHS = LHSOp0;
newRHS = nullptr;
}
if (newLHS == LHS && newRHS == RHS)
return MadeChange ? &SVI : nullptr;
SmallVector<int, 16> LHSMask;
SmallVector<int, 16> RHSMask;
if (newLHS != LHS)
LHSMask = LHSShuffle->getShuffleMask();
if (RHSShuffle && newRHS != RHS)
RHSMask = RHSShuffle->getShuffleMask();
unsigned newLHSWidth = (newLHS != LHS) ? LHSOp0Width : LHSWidth;
SmallVector<int, 16> newMask;
bool isSplat = true;
int SplatElt = -1;
// Create a new mask for the new ShuffleVectorInst so that the new
// ShuffleVectorInst is equivalent to the original one.
for (unsigned i = 0; i < VWidth; ++i) {
int eltMask;
if (Mask[i] < 0) {
// This element is an undef value.
eltMask = -1;
} else if (Mask[i] < (int)LHSWidth) {
// This element is from left hand side vector operand.
//
// If LHS is going to be replaced (case 1, 2, or 4), calculate the
// new mask value for the element.
if (newLHS != LHS) {
eltMask = LHSMask[Mask[i]];
// If the value selected is an undef value, explicitly specify it
// with a -1 mask value.
if (eltMask >= (int)LHSOp0Width && isa<UndefValue>(LHSOp1))
eltMask = -1;
} else
eltMask = Mask[i];
} else {
// This element is from right hand side vector operand
//
// If the value selected is an undef value, explicitly specify it
// with a -1 mask value. (case 1)
if (isa<UndefValue>(RHS))
eltMask = -1;
// If RHS is going to be replaced (case 3 or 4), calculate the
// new mask value for the element.
else if (newRHS != RHS) {
eltMask = RHSMask[Mask[i]-LHSWidth];
// If the value selected is an undef value, explicitly specify it
// with a -1 mask value.
if (eltMask >= (int)RHSOp0Width) {
assert(isa<UndefValue>(RHSShuffle->getOperand(1))
&& "should have been check above");
eltMask = -1;
}
} else
eltMask = Mask[i]-LHSWidth;
// If LHS's width is changed, shift the mask value accordingly.
// If newRHS == NULL, i.e. LHSOp0 == RHSOp0, we want to remap any
// references from RHSOp0 to LHSOp0, so we don't need to shift the mask.
// If newRHS == newLHS, we want to remap any references from newRHS to
// newLHS so that we can properly identify splats that may occur due to
// obfuscation across the two vectors.
if (eltMask >= 0 && newRHS != nullptr && newLHS != newRHS)
eltMask += newLHSWidth;
}
// Check if this could still be a splat.
if (eltMask >= 0) {
if (SplatElt >= 0 && SplatElt != eltMask)
isSplat = false;
SplatElt = eltMask;
}
newMask.push_back(eltMask);
}
// If the result mask is equal to one of the original shuffle masks,
// or is a splat, do the replacement.
//
// XXX EMSCRIPTEN: Add '|| true' so that we always do the replacement.
// We're targetting SIMD.js, so there's less of an expectation that a
// particular shuffle mask will always map onto a particular instruction on
// a particular ISA because we aren't targetting a particular ISA (what the
// JS engine does is another story). We may wish to re-evaluate this choice
// as we move on to higher-element-count vectors, but especially for now this
// is quite desirable.
if (isSplat || newMask == LHSMask || newMask == RHSMask || newMask == Mask ||
true)
{
SmallVector<Constant*, 16> Elts;
for (unsigned i = 0, e = newMask.size(); i != e; ++i) {
if (newMask[i] < 0) {
Elts.push_back(UndefValue::get(Int32Ty));
} else {
Elts.push_back(ConstantInt::get(Int32Ty, newMask[i]));
}
}
if (!newRHS)
newRHS = UndefValue::get(newLHS->getType());
return new ShuffleVectorInst(newLHS, newRHS, ConstantVector::get(Elts));
}
// If the result mask is an identity, replace uses of this instruction with
// corresponding argument.
bool isLHSID, isRHSID;
RecognizeIdentityMask(newMask, isLHSID, isRHSID);
if (isLHSID && VWidth == LHSOp0Width) return ReplaceInstUsesWith(SVI, newLHS);
if (isRHSID && VWidth == RHSOp0Width) return ReplaceInstUsesWith(SVI, newRHS);
return MadeChange ? &SVI : nullptr;
}
|
apache-2.0
|
alipourm/alipourm.github.io
|
04_teaching.md
|
839
|
---
layout: default
title: Teaching
permalink: /teaching/
published: true
---
* Spring 2020: COSC 6351 Software Engineering [Syllabus](https://uofh-my.sharepoint.com/:w:/g/personal/maalipou_cougarnet_uh_edu/EY6z-fNHWzJFpBRyEyz9xlwBRY2VMgzQv1BvOpkEvVV6uQ?e=9B1gah)
* Fall 2019: COSC 4353 Applied Software Engineering (Software Design) [Syllabus](https://docs.google.com/document/d/1XCe8KElJqudwGU4mKB6Xrw9KN2AXSBQqcQxsAiGelAE/edit?usp=sharing)
* Spring 2019: COSC 4315/6345: Programming Languages [Course Material](https://uofh-my.sharepoint.com/:f:/g/personal/maalipou_cougarnet_uh_edu/Es8z1E5pBm5FnGqbx2aeswMBsk06vybma9EBY7QlmOI1hQ?e=aZD68l)
* Fall 2018: COSC 4353: Software Design [Syllabus](https://docs.google.com/document/d/1sF8ahW1kgXDx8LJJzmqGuRgrfoQ19AbNm9KfV7x34Uw/edit?usp=sharing)
* Fall 2017: COSC 4353/6353: Software Design
|
apache-2.0
|
Pompeu/Jpaint
|
src/jpaint/model/bean/Figura.java
|
4031
|
package jpaint.model.bean;
import java.awt.Color;
import java.awt.Graphics;
import java.util.Date;
public abstract class Figura {
private int pkfigura = -1;
private int x;
private int y;
private int largura;
private int altura;
private int tipo;
private Color colorBorda;
private Color colorInternal;
/**
* construtor da classe abstrata figura que recebe todos seus argumentos
* como parametro
*
* @param x
* @param y
* @param largura
* @param altura
* @param tipo
* @param colorBorda
* @param colorInternal
*/
public Figura(int x, int y, int largura, int altura, int tipo, Color colorBorda, Color colorInternal) {
setX(x);
setY(y);
setLargura(largura);
setAltura(altura);
setTipo(tipo);
setColorBorda(colorBorda);
setColorInternal(colorInternal);
}
public int getX() {
return x;
}
public void setX(int x) {
if (x >= 0) {
this.x = x;
} else {
throw new RuntimeException("Valor inválido");
}
}
public int getY() {
return y;
}
public void setY(int y) {
this.y = y;
}
public int getLargura() {
return largura;
}
/**
* esse metodo trata corrige o problema da figura não ser desenhada
* corretamente na view
*
* @param largura
*/
public void setLargura(int largura) {
if (largura < 0) {
largura *= -1;
x -= largura;
}
this.largura = largura;
}
public int getAltura() {
return altura;
}
/**
* esse metodo trata corrige o problema da figura não ser desenhada
* corretamente na view
*
* @param altura
*/
public void setAltura(int altura) {
if (altura < 0) {
altura *= -1;
y -= altura;
}
this.altura = altura;
}
public void setTipo(int tipo) {
if (tipo >= 0 && tipo <= 4) {
this.tipo = tipo;
} else {
this.tipo = 0;
}
}
public int getTipo() {
return tipo;
}
public int getPkfigura() {
return pkfigura;
}
public void setPkfigura(int pkfigura) {
this.pkfigura = pkfigura;
}
public Color getColorBorda() {
return colorBorda;
}
public void setColorBorda(Color colorBorda) {
this.colorBorda = colorBorda;
}
public Color getColorInternal() {
return colorInternal;
}
public void setColorInternal(Color colorInternal) {
this.colorInternal = colorInternal;
}
/**
* metodo de modelo padrão para todas classe filhas que pinta a borda dos
* desenhos
*
* @param g
*/
public abstract void desenheMe(Graphics g);
/**
* metodo usado para calcular altura de uma figura
*
* @param y0
* @param y1
* @return altura (y1 , y0)
*/
public static int calcAltura(int y0, int y1) {
return y1 - y0;
}
/**
* metodo usado para calcular a largura de uma figura
*
* @param x0
* @param x1
* @return largura(x1 - x0)
*/
public static int calcLargura(int x0, int x1) {
return x1 - x0;
}
/**
* metodos foi resecrito para que possa ser visualizado as cordenadas dos
* pixeis da figura em tempo de execulção
*
* @return cordenadas da figura
*/
@Override
public String toString() {
return "Figura{" + "pkfigura=" + pkfigura
+ ", x=" + x + ", y=" + y + ", largura="
+ largura + ", altura=" + altura + ", tipo="
+ tipo + ", colorBorda=" + colorBorda
+ ", colorInternal=" + colorInternal + '}';
}
}
|
apache-2.0
|
chjw8016/minNotify
|
minnotify.css
|
3871
|
/* --------------------------------
Modules - reusable parts of our design
-------------------------------- */
.img-replace {
/* replace text with an image */
display: inline-block;
overflow: hidden;
text-indent: 100%;
color: transparent;
white-space: nowrap;
}
/* --------------------------------
/* --------------------------------
xpopup
-------------------------------- */
.cd-popup {
position: fixed;
left: 0;
top: 0;
height: 100%;
width: 100%;
background-color: rgba(94, 110, 141, 0.9);
opacity: 0;
visibility: hidden;
-webkit-transition: opacity 0.3s 0s, visibility 0s 0.3s;
-moz-transition: opacity 0.3s 0s, visibility 0s 0.3s;
transition: opacity 0.3s 0s, visibility 0s 0.3s;
}
.cd-popup.is-visible {
opacity: 1;
visibility: visible;
-webkit-transition: opacity 0.3s 0s, visibility 0s 0s;
-moz-transition: opacity 0.3s 0s, visibility 0s 0s;
transition: opacity 0.3s 0s, visibility 0s 0s;
}
.cd-popup-container {
position: relative;
width: 90%;
max-width: 400px;
margin: 4em auto;
background: #FFF;
border-radius: .25em .25em .4em .4em;
text-align: center;
box-shadow: 0 0 20px rgba(0, 0, 0, 0.2);
-webkit-transform: translateY(-40px);
-moz-transform: translateY(-40px);
-ms-transform: translateY(-40px);
-o-transform: translateY(-40px);
transform: translateY(-40px);
/* Force Hardware Acceleration in WebKit */
-webkit-backface-visibility: hidden;
-webkit-transition-property: -webkit-transform;
-moz-transition-property: -moz-transform;
transition-property: transform;
-webkit-transition-duration: 0.3s;
-moz-transition-duration: 0.3s;
transition-duration: 0.3s;
}
.cd-popup-container p {
padding: 3em 1em;
line-height: 1.5em;
text-align: left;
font-size: 14px;
font-family: Helvetica;
color: #666;
}
.cd-popup-title {
position:relative;
left:1em;
top:1em;
text-align: left;
font-family: Helvetica;
font-size: 15px;
color: #6C7B97;
font-weight: bold;
}
.cd-buttons { position: relative;width: 100%}
.cd-popup-container .cd-buttons a:last-child {
background: #b6bece;
border-radius: 0 0 .25em 0;
width: 50%;
right: 0;
}
.cd-popup-container .cd-buttons a:first-child {
background: #fc7169;
border-radius: 0 0 0 .25em;
width: 50%;
left: 0;
}
.cd-popup-container .cd-buttons a {
cursor: pointer;
display: inline-block;
height: 40px;
line-height: 40px;
text-transform: uppercase;
color: #FFF;
-webkit-transition: background-color 0.2s;
-moz-transition: background-color 0.2s;
transition: background-color 0.2s;
font-weight: bold;
}
.cd-popup-container .cd-popup-close {
position: absolute;
top: 8px;
right: 8px;
width: 30px;
height: 30px;
}
.cd-popup-container .cd-popup-close::before, .cd-popup-container .cd-popup-close::after {
content: '';
position: absolute;
top: 12px;
width: 14px;
height: 3px;
background-color: #8f9cb5;
}
.cd-popup-container .cd-popup-close::before {
-webkit-transform: rotate(45deg);
-moz-transform: rotate(45deg);
-ms-transform: rotate(45deg);
-o-transform: rotate(45deg);
transform: rotate(45deg);
left: 8px;
}
.cd-popup-container .cd-popup-close::after {
-webkit-transform: rotate(-45deg);
-moz-transform: rotate(-45deg);
-ms-transform: rotate(-45deg);
-o-transform: rotate(-45deg);
transform: rotate(-45deg);
right: 8px;
}
.is-visible .cd-popup-container {
-webkit-transform: translateY(0);
-moz-transform: translateY(0);
-ms-transform: translateY(0);
-o-transform: translateY(0);
transform: translateY(0);
}
@media only screen and (min-width: 1170px) {
.cd-popup-container {
margin: 8em auto;
}
}
|
apache-2.0
|
fexbraun/hipster-o-mat
|
src/main/java/com/ax/demo/entity/Hipster.java
|
2283
|
package com.ax.demo.entity;
import javax.validation.constraints.Min;
/**
* A Hipster. Used as entity in this demo application.
*/
public class Hipster {
public enum JeansType {
SKINNY, SUPERSKINNY;
}
@Min(value = 0, message = "Id must be positive")
private int id;
private String name;
private JeansType jeans;
private boolean hornRimmedGlasses;
private String imagePath = "";
public Hipster() {
}
public Hipster(int id, String name, JeansType jeans,
boolean hornRimmedGlasses, String imagePath) {
super();
this.id = id;
this.name = name;
this.jeans = jeans;
this.hornRimmedGlasses = hornRimmedGlasses;
this.imagePath = imagePath;
}
public int getId() {
return id;
}
public String getName() {
return name;
}
public JeansType getJeans() {
return jeans;
}
public boolean isHornRimmedGlasses() {
return hornRimmedGlasses;
}
public void setImagePath(final String imagePath) {
this.imagePath = imagePath;
}
public String getImagePath() {
return imagePath;
}
@Override
public String toString() {
return "Hipster [id=" + id + ", name=" + name + ", jeans=" + jeans
+ ", hornRimmedGlasses=" + hornRimmedGlasses + ", imagePath="
+ imagePath + "]";
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (hornRimmedGlasses ? 1231 : 1237);
result = prime * result + id;
result = prime * result
+ ((imagePath == null) ? 0 : imagePath.hashCode());
result = prime * result + ((jeans == null) ? 0 : jeans.hashCode());
result = prime * result + ((name == null) ? 0 : name.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Hipster other = (Hipster) obj;
if (hornRimmedGlasses != other.hornRimmedGlasses)
return false;
if (id != other.id)
return false;
if (imagePath == null) {
if (other.imagePath != null)
return false;
} else if (!imagePath.equals(other.imagePath))
return false;
if (jeans != other.jeans)
return false;
if (name == null) {
if (other.name != null)
return false;
} else if (!name.equals(other.name))
return false;
return true;
}
}
|
apache-2.0
|
robinverduijn/gradle
|
subprojects/build-init/src/main/java/org/gradle/buildinit/plugins/internal/ModuleNameBuilder.java
|
830
|
/*
* Copyright 2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.buildinit.plugins.internal;
import org.gradle.util.GUtil;
public class ModuleNameBuilder {
public static String toModuleName(String name) {
return GUtil.toCamelCase(name);
}
}
|
apache-2.0
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Magnoliopsida/Rosales/Rosaceae/Physocarpus/Physocarpus opulifolius/ Syn. Spiraea caroliniana/README.md
|
185
|
# Spiraea caroliniana Marshall SPECIES
#### Status
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null
|
apache-2.0
|
Microsoft-CISL/hadoop-prototype
|
hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlockStoragePolicy.java
|
61467
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs;
import static org.apache.hadoop.hdfs.protocol.HdfsConstants.BLOCK_STORAGE_POLICY_ID_UNSPECIFIED;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.*;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockStoragePolicySpi;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.hdfs.protocol.*;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;
import org.apache.hadoop.hdfs.server.blockmanagement.*;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.hdfs.server.namenode.snapshot.SnapshotTestHelper;
import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.net.NetworkTopology;
import org.apache.hadoop.net.Node;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.PathUtils;
import org.junit.Assert;
import org.junit.Test;
/** Test {@link BlockStoragePolicy} */
public class TestBlockStoragePolicy {
public static final BlockStoragePolicySuite POLICY_SUITE;
public static final BlockStoragePolicy DEFAULT_STORAGE_POLICY;
public static final Configuration conf;
static {
conf = new HdfsConfiguration();
conf.setLong(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1);
conf.setInt(DFSConfigKeys.DFS_NAMENODE_REPLICATION_INTERVAL_KEY, 1);
POLICY_SUITE = BlockStoragePolicySuite.createDefaultSuite();
DEFAULT_STORAGE_POLICY = POLICY_SUITE.getDefaultPolicy();
}
static final EnumSet<StorageType> none = EnumSet.noneOf(StorageType.class);
static final EnumSet<StorageType> archive = EnumSet.of(StorageType.ARCHIVE);
static final EnumSet<StorageType> disk = EnumSet.of(StorageType.DISK);
static final EnumSet<StorageType> ssd = EnumSet.of(StorageType.SSD);
static final EnumSet<StorageType> disk_archive = EnumSet.of(StorageType.DISK,
StorageType.ARCHIVE);
static final EnumSet<StorageType> all = EnumSet.of(StorageType.SSD,
StorageType.DISK, StorageType.ARCHIVE);
static final long FILE_LEN = 1024;
static final short REPLICATION = 3;
static final byte COLD = HdfsConstants.COLD_STORAGE_POLICY_ID;
static final byte WARM = HdfsConstants.WARM_STORAGE_POLICY_ID;
static final byte HOT = HdfsConstants.HOT_STORAGE_POLICY_ID;
static final byte ONESSD = HdfsConstants.ONESSD_STORAGE_POLICY_ID;
static final byte ALLSSD = HdfsConstants.ALLSSD_STORAGE_POLICY_ID;
static final byte LAZY_PERSIST = HdfsConstants.MEMORY_STORAGE_POLICY_ID;
@Test (timeout=300000)
public void testConfigKeyEnabled() throws IOException {
Configuration conf = new HdfsConfiguration();
conf.setBoolean(DFSConfigKeys.DFS_STORAGE_POLICY_ENABLED_KEY, true);
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(1).build();
try {
cluster.waitActive();
cluster.getFileSystem().setStoragePolicy(new Path("/"),
HdfsConstants.COLD_STORAGE_POLICY_NAME);
} finally {
cluster.shutdown();
}
}
/**
* Ensure that setStoragePolicy throws IOException when
* dfs.storage.policy.enabled is set to false.
* @throws IOException
*/
@Test (timeout=300000, expected=IOException.class)
public void testConfigKeyDisabled() throws IOException {
Configuration conf = new HdfsConfiguration();
conf.setBoolean(DFSConfigKeys.DFS_STORAGE_POLICY_ENABLED_KEY, false);
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(1).build();
try {
cluster.waitActive();
cluster.getFileSystem().setStoragePolicy(new Path("/"),
HdfsConstants.COLD_STORAGE_POLICY_NAME);
} finally {
cluster.shutdown();
}
}
@Test
public void testDefaultPolicies() {
final Map<Byte, String> expectedPolicyStrings = new HashMap<Byte, String>();
expectedPolicyStrings.put(COLD,
"BlockStoragePolicy{COLD:" + COLD + ", storageTypes=[ARCHIVE], " +
"creationFallbacks=[], replicationFallbacks=[]}");
expectedPolicyStrings.put(WARM,
"BlockStoragePolicy{WARM:" + WARM + ", storageTypes=[DISK, ARCHIVE], " +
"creationFallbacks=[DISK, ARCHIVE], " +
"replicationFallbacks=[DISK, ARCHIVE]}");
expectedPolicyStrings.put(HOT,
"BlockStoragePolicy{HOT:" + HOT + ", storageTypes=[DISK], " +
"creationFallbacks=[], replicationFallbacks=[ARCHIVE]}");
expectedPolicyStrings.put(ONESSD, "BlockStoragePolicy{ONE_SSD:" + ONESSD +
", storageTypes=[SSD, DISK], creationFallbacks=[SSD, DISK], " +
"replicationFallbacks=[SSD, DISK]}");
expectedPolicyStrings.put(ALLSSD, "BlockStoragePolicy{ALL_SSD:" + ALLSSD +
", storageTypes=[SSD], creationFallbacks=[DISK], " +
"replicationFallbacks=[DISK]}");
expectedPolicyStrings.put(LAZY_PERSIST,
"BlockStoragePolicy{LAZY_PERSIST:" + LAZY_PERSIST + ", storageTypes=[RAM_DISK, DISK], " +
"creationFallbacks=[DISK], replicationFallbacks=[DISK]}");
for(byte i = 1; i < 16; i++) {
final BlockStoragePolicy policy = POLICY_SUITE.getPolicy(i);
if (policy != null) {
final String s = policy.toString();
Assert.assertEquals(expectedPolicyStrings.get(i), s);
}
}
Assert.assertEquals(POLICY_SUITE.getPolicy(HOT), POLICY_SUITE.getDefaultPolicy());
{ // check Cold policy
final BlockStoragePolicy cold = POLICY_SUITE.getPolicy(COLD);
for(short replication = 1; replication < 6; replication++) {
final List<StorageType> computed = cold.chooseStorageTypes(replication);
assertStorageType(computed, replication, StorageType.ARCHIVE);
}
assertCreationFallback(cold, null, null, null, null, null);
assertReplicationFallback(cold, null, null, null, null);
}
{ // check Warm policy
final BlockStoragePolicy warm = POLICY_SUITE.getPolicy(WARM);
for(short replication = 1; replication < 6; replication++) {
final List<StorageType> computed = warm.chooseStorageTypes(replication);
assertStorageType(computed, replication, StorageType.DISK, StorageType.ARCHIVE);
}
assertCreationFallback(warm, StorageType.DISK, StorageType.DISK,
StorageType.ARCHIVE, StorageType.DISK, null);
assertReplicationFallback(warm, StorageType.DISK, StorageType.DISK,
StorageType.ARCHIVE, StorageType.DISK);
}
{ // check Hot policy
final BlockStoragePolicy hot = POLICY_SUITE.getPolicy(HOT);
for(short replication = 1; replication < 6; replication++) {
final List<StorageType> computed = hot.chooseStorageTypes(replication);
assertStorageType(computed, replication, StorageType.DISK);
}
assertCreationFallback(hot, null, null, null, null, null);
assertReplicationFallback(hot, StorageType.ARCHIVE, null,
StorageType.ARCHIVE, StorageType.ARCHIVE);
}
{ // check ONE_SSD policy
final BlockStoragePolicy onessd = POLICY_SUITE.getPolicy(ONESSD);
for (short replication = 1; replication < 6; replication++) {
final List<StorageType> computed = onessd
.chooseStorageTypes(replication);
assertStorageType(computed, replication, StorageType.SSD,
StorageType.DISK);
}
assertCreationFallback(onessd, StorageType.SSD, StorageType.SSD,
StorageType.SSD, StorageType.DISK, StorageType.SSD);
assertReplicationFallback(onessd, StorageType.SSD, StorageType.SSD,
StorageType.SSD, StorageType.DISK);
}
{ // check ALL_SSD policy
final BlockStoragePolicy allssd = POLICY_SUITE.getPolicy(ALLSSD);
for (short replication = 1; replication < 6; replication++) {
final List<StorageType> computed = allssd
.chooseStorageTypes(replication);
assertStorageType(computed, replication, StorageType.SSD);
}
assertCreationFallback(allssd, StorageType.DISK, StorageType.DISK, null,
StorageType.DISK, null);
assertReplicationFallback(allssd, StorageType.DISK, StorageType.DISK,
null, StorageType.DISK);
}
{ // check LAZY_PERSIST policy
final BlockStoragePolicy lazyPersist = POLICY_SUITE
.getPolicy(LAZY_PERSIST);
for (short replication = 1; replication < 6; replication++) {
final List<StorageType> computed = lazyPersist
.chooseStorageTypes(replication);
assertStorageType(computed, replication, StorageType.DISK);
}
assertCreationFallback(lazyPersist, StorageType.DISK, StorageType.DISK,
null, StorageType.DISK, null);
assertReplicationFallback(lazyPersist, StorageType.DISK,
StorageType.DISK, null, StorageType.DISK);
}
}
static StorageType[] newStorageTypes(int nDisk, int nArchive) {
final StorageType[] t = new StorageType[nDisk + nArchive];
Arrays.fill(t, 0, nDisk, StorageType.DISK);
Arrays.fill(t, nDisk, t.length, StorageType.ARCHIVE);
return t;
}
static List<StorageType> asList(int nDisk, int nArchive) {
return Arrays.asList(newStorageTypes(nDisk, nArchive));
}
static void assertStorageType(List<StorageType> computed, short replication,
StorageType... answers) {
Assert.assertEquals(replication, computed.size());
final StorageType last = answers[answers.length - 1];
for(int i = 0; i < computed.size(); i++) {
final StorageType expected = i < answers.length? answers[i]: last;
Assert.assertEquals(expected, computed.get(i));
}
}
static void assertCreationFallback(BlockStoragePolicy policy,
StorageType noneExpected, StorageType archiveExpected,
StorageType diskExpected, StorageType ssdExpected,
StorageType disk_archiveExpected) {
Assert.assertEquals(noneExpected, policy.getCreationFallback(none));
Assert.assertEquals(archiveExpected, policy.getCreationFallback(archive));
Assert.assertEquals(diskExpected, policy.getCreationFallback(disk));
Assert.assertEquals(ssdExpected, policy.getCreationFallback(ssd));
Assert.assertEquals(disk_archiveExpected,
policy.getCreationFallback(disk_archive));
Assert.assertEquals(null, policy.getCreationFallback(all));
}
static void assertReplicationFallback(BlockStoragePolicy policy,
StorageType noneExpected, StorageType archiveExpected,
StorageType diskExpected, StorageType ssdExpected) {
Assert.assertEquals(noneExpected, policy.getReplicationFallback(none));
Assert
.assertEquals(archiveExpected, policy.getReplicationFallback(archive));
Assert.assertEquals(diskExpected, policy.getReplicationFallback(disk));
Assert.assertEquals(ssdExpected, policy.getReplicationFallback(ssd));
Assert.assertEquals(null, policy.getReplicationFallback(all));
}
private static interface CheckChooseStorageTypes {
public void checkChooseStorageTypes(BlockStoragePolicy p, short replication,
List<StorageType> chosen, StorageType... expected);
/** Basic case: pass only replication and chosen */
static final CheckChooseStorageTypes Basic = new CheckChooseStorageTypes() {
@Override
public void checkChooseStorageTypes(BlockStoragePolicy p, short replication,
List<StorageType> chosen, StorageType... expected) {
final List<StorageType> types = p.chooseStorageTypes(replication, chosen);
assertStorageTypes(types, expected);
}
};
/** With empty unavailables and isNewBlock=true */
static final CheckChooseStorageTypes EmptyUnavailablesAndNewBlock
= new CheckChooseStorageTypes() {
@Override
public void checkChooseStorageTypes(BlockStoragePolicy p,
short replication, List<StorageType> chosen, StorageType... expected) {
final List<StorageType> types = p.chooseStorageTypes(replication,
chosen, none, true);
assertStorageTypes(types, expected);
}
};
/** With empty unavailables and isNewBlock=false */
static final CheckChooseStorageTypes EmptyUnavailablesAndNonNewBlock
= new CheckChooseStorageTypes() {
@Override
public void checkChooseStorageTypes(BlockStoragePolicy p,
short replication, List<StorageType> chosen, StorageType... expected) {
final List<StorageType> types = p.chooseStorageTypes(replication,
chosen, none, false);
assertStorageTypes(types, expected);
}
};
/** With both DISK and ARCHIVE unavailables and isNewBlock=true */
static final CheckChooseStorageTypes BothUnavailableAndNewBlock
= new CheckChooseStorageTypes() {
@Override
public void checkChooseStorageTypes(BlockStoragePolicy p,
short replication, List<StorageType> chosen, StorageType... expected) {
final List<StorageType> types = p.chooseStorageTypes(replication,
chosen, disk_archive, true);
assertStorageTypes(types, expected);
}
};
/** With both DISK and ARCHIVE unavailable and isNewBlock=false */
static final CheckChooseStorageTypes BothUnavailableAndNonNewBlock
= new CheckChooseStorageTypes() {
@Override
public void checkChooseStorageTypes(BlockStoragePolicy p,
short replication, List<StorageType> chosen, StorageType... expected) {
final List<StorageType> types = p.chooseStorageTypes(replication,
chosen, disk_archive, false);
assertStorageTypes(types, expected);
}
};
/** With ARCHIVE unavailable and isNewBlock=true */
static final CheckChooseStorageTypes ArchivalUnavailableAndNewBlock
= new CheckChooseStorageTypes() {
@Override
public void checkChooseStorageTypes(BlockStoragePolicy p,
short replication, List<StorageType> chosen, StorageType... expected) {
final List<StorageType> types = p.chooseStorageTypes(replication,
chosen, archive, true);
assertStorageTypes(types, expected);
}
};
/** With ARCHIVE unavailable and isNewBlock=true */
static final CheckChooseStorageTypes ArchivalUnavailableAndNonNewBlock
= new CheckChooseStorageTypes() {
@Override
public void checkChooseStorageTypes(BlockStoragePolicy p,
short replication, List<StorageType> chosen, StorageType... expected) {
final List<StorageType> types = p.chooseStorageTypes(replication,
chosen, archive, false);
assertStorageTypes(types, expected);
}
};
}
@Test
public void testChooseStorageTypes() {
run(CheckChooseStorageTypes.Basic);
run(CheckChooseStorageTypes.EmptyUnavailablesAndNewBlock);
run(CheckChooseStorageTypes.EmptyUnavailablesAndNonNewBlock);
}
private static void run(CheckChooseStorageTypes method) {
final BlockStoragePolicy hot = POLICY_SUITE.getPolicy(HOT);
final BlockStoragePolicy warm = POLICY_SUITE.getPolicy(WARM);
final BlockStoragePolicy cold = POLICY_SUITE.getPolicy(COLD);
final short replication = 3;
{
final List<StorageType> chosen = Lists.newArrayList();
method.checkChooseStorageTypes(hot, replication, chosen,
StorageType.DISK, StorageType.DISK, StorageType.DISK);
method.checkChooseStorageTypes(warm, replication, chosen,
StorageType.DISK, StorageType.ARCHIVE, StorageType.ARCHIVE);
method.checkChooseStorageTypes(cold, replication, chosen,
StorageType.ARCHIVE, StorageType.ARCHIVE, StorageType.ARCHIVE);
}
{
final List<StorageType> chosen = Arrays.asList(StorageType.DISK);
method.checkChooseStorageTypes(hot, replication, chosen,
StorageType.DISK, StorageType.DISK);
method.checkChooseStorageTypes(warm, replication, chosen,
StorageType.ARCHIVE, StorageType.ARCHIVE);
method.checkChooseStorageTypes(cold, replication, chosen,
StorageType.ARCHIVE, StorageType.ARCHIVE, StorageType.ARCHIVE);
}
{
final List<StorageType> chosen = Arrays.asList(StorageType.ARCHIVE);
method.checkChooseStorageTypes(hot, replication, chosen,
StorageType.DISK, StorageType.DISK, StorageType.DISK);
method.checkChooseStorageTypes(warm, replication, chosen,
StorageType.DISK, StorageType.ARCHIVE);
method.checkChooseStorageTypes(cold, replication, chosen,
StorageType.ARCHIVE, StorageType.ARCHIVE);
}
{
final List<StorageType> chosen = Arrays.asList(
StorageType.DISK, StorageType.DISK);
method.checkChooseStorageTypes(hot, replication, chosen,
StorageType.DISK);
method.checkChooseStorageTypes(warm, replication, chosen,
StorageType.ARCHIVE, StorageType.ARCHIVE);
method.checkChooseStorageTypes(cold, replication, chosen,
StorageType.ARCHIVE, StorageType.ARCHIVE, StorageType.ARCHIVE);
}
{
final List<StorageType> chosen = Arrays.asList(
StorageType.DISK, StorageType.ARCHIVE);
method.checkChooseStorageTypes(hot, replication, chosen,
StorageType.DISK, StorageType.DISK);
method.checkChooseStorageTypes(warm, replication, chosen,
StorageType.ARCHIVE);
method.checkChooseStorageTypes(cold, replication, chosen,
StorageType.ARCHIVE, StorageType.ARCHIVE);
}
{
final List<StorageType> chosen = Arrays.asList(
StorageType.ARCHIVE, StorageType.ARCHIVE);
method.checkChooseStorageTypes(hot, replication, chosen,
StorageType.DISK, StorageType.DISK, StorageType.DISK);
method.checkChooseStorageTypes(warm, replication, chosen,
StorageType.DISK);
method.checkChooseStorageTypes(cold, replication, chosen,
StorageType.ARCHIVE);
}
{
final List<StorageType> chosen = Arrays.asList(
StorageType.DISK, StorageType.DISK, StorageType.DISK);
method.checkChooseStorageTypes(hot, replication, chosen);
method.checkChooseStorageTypes(warm, replication, chosen,
StorageType.ARCHIVE, StorageType.ARCHIVE);
method.checkChooseStorageTypes(cold, replication, chosen,
StorageType.ARCHIVE, StorageType.ARCHIVE, StorageType.ARCHIVE);
}
{
final List<StorageType> chosen = Arrays.asList(
StorageType.DISK, StorageType.DISK, StorageType.ARCHIVE);
method.checkChooseStorageTypes(hot, replication, chosen,
StorageType.DISK);
method.checkChooseStorageTypes(warm, replication, chosen,
StorageType.ARCHIVE);
method.checkChooseStorageTypes(cold, replication, chosen,
StorageType.ARCHIVE, StorageType.ARCHIVE);
}
{
final List<StorageType> chosen = Arrays.asList(
StorageType.DISK, StorageType.ARCHIVE, StorageType.ARCHIVE);
method.checkChooseStorageTypes(hot, replication, chosen,
StorageType.DISK, StorageType.DISK);
method.checkChooseStorageTypes(warm, replication, chosen);
method.checkChooseStorageTypes(cold, replication, chosen,
StorageType.ARCHIVE);
}
{
final List<StorageType> chosen = Arrays.asList(
StorageType.ARCHIVE, StorageType.ARCHIVE, StorageType.ARCHIVE);
method.checkChooseStorageTypes(hot, replication, chosen,
StorageType.DISK, StorageType.DISK, StorageType.DISK);
method.checkChooseStorageTypes(warm, replication, chosen,
StorageType.DISK);
method.checkChooseStorageTypes(cold, replication, chosen);
}
}
@Test
public void testChooseStorageTypesWithBothUnavailable() {
runWithBothUnavailable(CheckChooseStorageTypes.BothUnavailableAndNewBlock);
runWithBothUnavailable(CheckChooseStorageTypes.BothUnavailableAndNonNewBlock);
}
private static void runWithBothUnavailable(CheckChooseStorageTypes method) {
final BlockStoragePolicy hot = POLICY_SUITE.getPolicy(HOT);
final BlockStoragePolicy warm = POLICY_SUITE.getPolicy(WARM);
final BlockStoragePolicy cold = POLICY_SUITE.getPolicy(COLD);
final short replication = 3;
for(int n = 0; n <= 3; n++) {
for(int d = 0; d <= n; d++) {
final int a = n - d;
final List<StorageType> chosen = asList(d, a);
method.checkChooseStorageTypes(hot, replication, chosen);
method.checkChooseStorageTypes(warm, replication, chosen);
method.checkChooseStorageTypes(cold, replication, chosen);
}
}
}
@Test
public void testChooseStorageTypesWithDiskUnavailableAndNewBlock() {
final BlockStoragePolicy hot = POLICY_SUITE.getPolicy(HOT);
final BlockStoragePolicy warm = POLICY_SUITE.getPolicy(WARM);
final BlockStoragePolicy cold = POLICY_SUITE.getPolicy(COLD);
final short replication = 3;
final EnumSet<StorageType> unavailables = disk;
final boolean isNewBlock = true;
{
final List<StorageType> chosen = Lists.newArrayList();
checkChooseStorageTypes(hot, replication, chosen, unavailables, isNewBlock);
checkChooseStorageTypes(warm, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE, StorageType.ARCHIVE, StorageType.ARCHIVE);
checkChooseStorageTypes(cold, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE, StorageType.ARCHIVE, StorageType.ARCHIVE);
}
{
final List<StorageType> chosen = Arrays.asList(StorageType.DISK);
checkChooseStorageTypes(hot, replication, chosen, unavailables, isNewBlock);
checkChooseStorageTypes(warm, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE, StorageType.ARCHIVE);
checkChooseStorageTypes(cold, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE, StorageType.ARCHIVE, StorageType.ARCHIVE);
}
{
final List<StorageType> chosen = Arrays.asList(StorageType.ARCHIVE);
checkChooseStorageTypes(hot, replication, chosen, unavailables, isNewBlock);
checkChooseStorageTypes(warm, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE, StorageType.ARCHIVE);
checkChooseStorageTypes(cold, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE, StorageType.ARCHIVE);
}
{
final List<StorageType> chosen = Arrays.asList(
StorageType.DISK, StorageType.DISK);
checkChooseStorageTypes(hot, replication, chosen, unavailables, isNewBlock);
checkChooseStorageTypes(warm, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE, StorageType.ARCHIVE);
checkChooseStorageTypes(cold, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE, StorageType.ARCHIVE, StorageType.ARCHIVE);
}
{
final List<StorageType> chosen = Arrays.asList(
StorageType.DISK, StorageType.ARCHIVE);
checkChooseStorageTypes(hot, replication, chosen, unavailables, isNewBlock);
checkChooseStorageTypes(warm, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE);
checkChooseStorageTypes(cold, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE, StorageType.ARCHIVE);
}
{
final List<StorageType> chosen = Arrays.asList(
StorageType.ARCHIVE, StorageType.ARCHIVE);
checkChooseStorageTypes(hot, replication, chosen, unavailables, isNewBlock);
checkChooseStorageTypes(warm, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE);
checkChooseStorageTypes(cold, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE);
}
{
final List<StorageType> chosen = Arrays.asList(
StorageType.DISK, StorageType.DISK, StorageType.DISK);
checkChooseStorageTypes(hot, replication, chosen, unavailables, isNewBlock);
checkChooseStorageTypes(warm, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE, StorageType.ARCHIVE);
checkChooseStorageTypes(cold, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE, StorageType.ARCHIVE, StorageType.ARCHIVE);
}
{
final List<StorageType> chosen = Arrays.asList(
StorageType.DISK, StorageType.DISK, StorageType.ARCHIVE);
checkChooseStorageTypes(hot, replication, chosen, unavailables, isNewBlock);
checkChooseStorageTypes(warm, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE);
checkChooseStorageTypes(cold, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE, StorageType.ARCHIVE);
}
{
final List<StorageType> chosen = Arrays.asList(
StorageType.DISK, StorageType.ARCHIVE, StorageType.ARCHIVE);
checkChooseStorageTypes(hot, replication, chosen, unavailables, isNewBlock);
checkChooseStorageTypes(warm, replication, chosen, unavailables, isNewBlock);
checkChooseStorageTypes(cold, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE);
}
{
final List<StorageType> chosen = Arrays.asList(
StorageType.ARCHIVE, StorageType.ARCHIVE, StorageType.ARCHIVE);
checkChooseStorageTypes(hot, replication, chosen, unavailables, isNewBlock);
checkChooseStorageTypes(warm, replication, chosen, unavailables, isNewBlock);
checkChooseStorageTypes(cold, replication, chosen, unavailables, isNewBlock);
}
}
@Test
public void testChooseStorageTypesWithArchiveUnavailable() {
runWithArchiveUnavailable(CheckChooseStorageTypes.ArchivalUnavailableAndNewBlock);
runWithArchiveUnavailable(CheckChooseStorageTypes.ArchivalUnavailableAndNonNewBlock);
}
private static void runWithArchiveUnavailable(CheckChooseStorageTypes method) {
final BlockStoragePolicy hot = POLICY_SUITE.getPolicy(HOT);
final BlockStoragePolicy warm = POLICY_SUITE.getPolicy(WARM);
final BlockStoragePolicy cold = POLICY_SUITE.getPolicy(COLD);
final short replication = 3;
{
final List<StorageType> chosen = Lists.newArrayList();
method.checkChooseStorageTypes(hot, replication, chosen,
StorageType.DISK, StorageType.DISK, StorageType.DISK);
method.checkChooseStorageTypes(warm, replication, chosen,
StorageType.DISK, StorageType.DISK, StorageType.DISK);
method.checkChooseStorageTypes(cold, replication, chosen);
}
{
final List<StorageType> chosen = Arrays.asList(StorageType.DISK);
method.checkChooseStorageTypes(hot, replication, chosen,
StorageType.DISK, StorageType.DISK);
method.checkChooseStorageTypes(warm, replication, chosen,
StorageType.DISK, StorageType.DISK);
method.checkChooseStorageTypes(cold, replication, chosen);
}
{
final List<StorageType> chosen = Arrays.asList(StorageType.ARCHIVE);
method.checkChooseStorageTypes(hot, replication, chosen,
StorageType.DISK, StorageType.DISK, StorageType.DISK);
method.checkChooseStorageTypes(warm, replication, chosen,
StorageType.DISK, StorageType.DISK);
method.checkChooseStorageTypes(cold, replication, chosen);
}
{
final List<StorageType> chosen = Arrays.asList(
StorageType.DISK, StorageType.DISK);
method.checkChooseStorageTypes(hot, replication, chosen,
StorageType.DISK);
method.checkChooseStorageTypes(warm, replication, chosen,
StorageType.DISK);
method.checkChooseStorageTypes(cold, replication, chosen);
}
{
final List<StorageType> chosen = Arrays.asList(
StorageType.DISK, StorageType.ARCHIVE);
method.checkChooseStorageTypes(hot, replication, chosen,
StorageType.DISK, StorageType.DISK);
method.checkChooseStorageTypes(warm, replication, chosen,
StorageType.DISK);
method.checkChooseStorageTypes(cold, replication, chosen);
}
{
final List<StorageType> chosen = Arrays.asList(
StorageType.ARCHIVE, StorageType.ARCHIVE);
method.checkChooseStorageTypes(hot, replication, chosen,
StorageType.DISK, StorageType.DISK, StorageType.DISK);
method.checkChooseStorageTypes(warm, replication, chosen,
StorageType.DISK);
method.checkChooseStorageTypes(cold, replication, chosen);
}
{
final List<StorageType> chosen = Arrays.asList(
StorageType.DISK, StorageType.DISK, StorageType.DISK);
method.checkChooseStorageTypes(hot, replication, chosen);
method.checkChooseStorageTypes(warm, replication, chosen);
method.checkChooseStorageTypes(cold, replication, chosen);
}
{
final List<StorageType> chosen = Arrays.asList(
StorageType.DISK, StorageType.DISK, StorageType.ARCHIVE);
method.checkChooseStorageTypes(hot, replication, chosen,
StorageType.DISK);
method.checkChooseStorageTypes(warm, replication, chosen);
method.checkChooseStorageTypes(cold, replication, chosen);
}
{
final List<StorageType> chosen = Arrays.asList(
StorageType.DISK, StorageType.ARCHIVE, StorageType.ARCHIVE);
method.checkChooseStorageTypes(hot, replication, chosen,
StorageType.DISK, StorageType.DISK);
method.checkChooseStorageTypes(warm, replication, chosen);
method.checkChooseStorageTypes(cold, replication, chosen);
}
{
final List<StorageType> chosen = Arrays.asList(
StorageType.ARCHIVE, StorageType.ARCHIVE, StorageType.ARCHIVE);
method.checkChooseStorageTypes(hot, replication, chosen,
StorageType.DISK, StorageType.DISK, StorageType.DISK);
method.checkChooseStorageTypes(warm, replication, chosen,
StorageType.DISK);
method.checkChooseStorageTypes(cold, replication, chosen);
}
}
@Test
public void testChooseStorageTypesWithDiskUnavailableAndNonNewBlock() {
final BlockStoragePolicy hot = POLICY_SUITE.getPolicy(HOT);
final BlockStoragePolicy warm = POLICY_SUITE.getPolicy(WARM);
final BlockStoragePolicy cold = POLICY_SUITE.getPolicy(COLD);
final short replication = 3;
final EnumSet<StorageType> unavailables = disk;
final boolean isNewBlock = false;
{
final List<StorageType> chosen = Lists.newArrayList();
checkChooseStorageTypes(hot, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE, StorageType.ARCHIVE, StorageType.ARCHIVE);
checkChooseStorageTypes(warm, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE, StorageType.ARCHIVE, StorageType.ARCHIVE);
checkChooseStorageTypes(cold, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE, StorageType.ARCHIVE, StorageType.ARCHIVE);
}
{
final List<StorageType> chosen = Arrays.asList(StorageType.DISK);
checkChooseStorageTypes(hot, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE, StorageType.ARCHIVE);
checkChooseStorageTypes(warm, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE, StorageType.ARCHIVE);
checkChooseStorageTypes(cold, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE, StorageType.ARCHIVE, StorageType.ARCHIVE);
}
{
final List<StorageType> chosen = Arrays.asList(StorageType.ARCHIVE);
checkChooseStorageTypes(hot, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE, StorageType.ARCHIVE);
checkChooseStorageTypes(warm, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE, StorageType.ARCHIVE);
checkChooseStorageTypes(cold, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE, StorageType.ARCHIVE);
}
{
final List<StorageType> chosen = Arrays.asList(
StorageType.DISK, StorageType.DISK);
checkChooseStorageTypes(hot, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE);
checkChooseStorageTypes(warm, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE, StorageType.ARCHIVE);
checkChooseStorageTypes(cold, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE, StorageType.ARCHIVE, StorageType.ARCHIVE);
}
{
final List<StorageType> chosen = Arrays.asList(
StorageType.DISK, StorageType.ARCHIVE);
checkChooseStorageTypes(hot, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE);
checkChooseStorageTypes(warm, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE);
checkChooseStorageTypes(cold, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE, StorageType.ARCHIVE);
}
{
final List<StorageType> chosen = Arrays.asList(
StorageType.ARCHIVE, StorageType.ARCHIVE);
checkChooseStorageTypes(hot, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE);
checkChooseStorageTypes(warm, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE);
checkChooseStorageTypes(cold, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE);
}
{
final List<StorageType> chosen = Arrays.asList(
StorageType.DISK, StorageType.DISK, StorageType.DISK);
checkChooseStorageTypes(hot, replication, chosen, unavailables, isNewBlock);
checkChooseStorageTypes(warm, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE, StorageType.ARCHIVE);
checkChooseStorageTypes(cold, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE, StorageType.ARCHIVE, StorageType.ARCHIVE);
}
{
final List<StorageType> chosen = Arrays.asList(
StorageType.DISK, StorageType.DISK, StorageType.ARCHIVE);
checkChooseStorageTypes(hot, replication, chosen, unavailables, isNewBlock);
checkChooseStorageTypes(warm, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE);
checkChooseStorageTypes(cold, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE, StorageType.ARCHIVE);
}
{
final List<StorageType> chosen = Arrays.asList(
StorageType.DISK, StorageType.ARCHIVE, StorageType.ARCHIVE);
checkChooseStorageTypes(hot, replication, chosen, unavailables, isNewBlock);
checkChooseStorageTypes(warm, replication, chosen, unavailables, isNewBlock);
checkChooseStorageTypes(cold, replication, chosen, unavailables, isNewBlock,
StorageType.ARCHIVE);
}
{
final List<StorageType> chosen = Arrays.asList(
StorageType.ARCHIVE, StorageType.ARCHIVE, StorageType.ARCHIVE);
checkChooseStorageTypes(hot, replication, chosen, unavailables, isNewBlock);
checkChooseStorageTypes(warm, replication, chosen, unavailables, isNewBlock);
checkChooseStorageTypes(cold, replication, chosen, unavailables, isNewBlock);
}
}
static void checkChooseStorageTypes(BlockStoragePolicy p, short replication,
List<StorageType> chosen, EnumSet<StorageType> unavailables,
boolean isNewBlock, StorageType... expected) {
final List<StorageType> types = p.chooseStorageTypes(replication, chosen,
unavailables, isNewBlock);
assertStorageTypes(types, expected);
}
static void assertStorageTypes(List<StorageType> computed, StorageType... expected) {
assertStorageTypes(computed.toArray(StorageType.EMPTY_ARRAY), expected);
}
static void assertStorageTypes(StorageType[] computed, StorageType... expected) {
Arrays.sort(expected);
Arrays.sort(computed);
Assert.assertArrayEquals(expected, computed);
}
@Test
public void testChooseExcess() {
final BlockStoragePolicy hot = POLICY_SUITE.getPolicy(HOT);
final BlockStoragePolicy warm = POLICY_SUITE.getPolicy(WARM);
final BlockStoragePolicy cold = POLICY_SUITE.getPolicy(COLD);
final short replication = 3;
for(int n = 0; n <= 6; n++) {
for(int d = 0; d <= n; d++) {
final int a = n - d;
final List<StorageType> chosen = asList(d, a);
{
final int nDisk = Math.max(0, d - replication);
final int nArchive = a;
final StorageType[] expected = newStorageTypes(nDisk, nArchive);
checkChooseExcess(hot, replication, chosen, expected);
}
{
final int nDisk = Math.max(0, d - 1);
final int nArchive = Math.max(0, a - replication + 1);
final StorageType[] expected = newStorageTypes(nDisk, nArchive);
checkChooseExcess(warm, replication, chosen, expected);
}
{
final int nDisk = d;
final int nArchive = Math.max(0, a - replication );
final StorageType[] expected = newStorageTypes(nDisk, nArchive);
checkChooseExcess(cold, replication, chosen, expected);
}
}
}
}
static void checkChooseExcess(BlockStoragePolicy p, short replication,
List<StorageType> chosen, StorageType... expected) {
final List<StorageType> types = p.chooseExcess(replication, chosen);
assertStorageTypes(types, expected);
}
private void checkDirectoryListing(HdfsFileStatus[] stats, byte... policies) {
Assert.assertEquals(stats.length, policies.length);
for (int i = 0; i < stats.length; i++) {
Assert.assertEquals(stats[i].getStoragePolicy(), policies[i]);
}
}
@Test
public void testSetStoragePolicy() throws Exception {
final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(REPLICATION).build();
cluster.waitActive();
final DistributedFileSystem fs = cluster.getFileSystem();
try {
final Path dir = new Path("/testSetStoragePolicy");
final Path fooFile = new Path(dir, "foo");
final Path barDir = new Path(dir, "bar");
final Path barFile1= new Path(barDir, "f1");
final Path barFile2= new Path(barDir, "f2");
DFSTestUtil.createFile(fs, fooFile, FILE_LEN, REPLICATION, 0L);
DFSTestUtil.createFile(fs, barFile1, FILE_LEN, REPLICATION, 0L);
DFSTestUtil.createFile(fs, barFile2, FILE_LEN, REPLICATION, 0L);
final String invalidPolicyName = "INVALID-POLICY";
try {
fs.setStoragePolicy(fooFile, invalidPolicyName);
Assert.fail("Should throw a HadoopIllegalArgumentException");
} catch (RemoteException e) {
GenericTestUtils.assertExceptionContains(invalidPolicyName, e);
}
// check storage policy
HdfsFileStatus[] dirList = fs.getClient().listPaths(dir.toString(),
HdfsFileStatus.EMPTY_NAME, true).getPartialListing();
HdfsFileStatus[] barList = fs.getClient().listPaths(barDir.toString(),
HdfsFileStatus.EMPTY_NAME, true).getPartialListing();
checkDirectoryListing(dirList, BLOCK_STORAGE_POLICY_ID_UNSPECIFIED,
BLOCK_STORAGE_POLICY_ID_UNSPECIFIED);
checkDirectoryListing(barList, BLOCK_STORAGE_POLICY_ID_UNSPECIFIED,
BLOCK_STORAGE_POLICY_ID_UNSPECIFIED);
final Path invalidPath = new Path("/invalidPath");
try {
fs.setStoragePolicy(invalidPath, HdfsConstants.WARM_STORAGE_POLICY_NAME);
Assert.fail("Should throw a FileNotFoundException");
} catch (FileNotFoundException e) {
GenericTestUtils.assertExceptionContains(invalidPath.toString(), e);
}
try {
fs.getStoragePolicy(invalidPath);
Assert.fail("Should throw a FileNotFoundException");
} catch (FileNotFoundException e) {
GenericTestUtils.assertExceptionContains(invalidPath.toString(), e);
}
fs.setStoragePolicy(fooFile, HdfsConstants.COLD_STORAGE_POLICY_NAME);
fs.setStoragePolicy(barDir, HdfsConstants.WARM_STORAGE_POLICY_NAME);
fs.setStoragePolicy(barFile2, HdfsConstants.HOT_STORAGE_POLICY_NAME);
Assert.assertEquals("File storage policy should be COLD",
HdfsConstants.COLD_STORAGE_POLICY_NAME,
fs.getStoragePolicy(fooFile).getName());
Assert.assertEquals("File storage policy should be WARM",
HdfsConstants.WARM_STORAGE_POLICY_NAME,
fs.getStoragePolicy(barDir).getName());
Assert.assertEquals("File storage policy should be HOT",
HdfsConstants.HOT_STORAGE_POLICY_NAME,
fs.getStoragePolicy(barFile2).getName());
dirList = fs.getClient().listPaths(dir.toString(),
HdfsFileStatus.EMPTY_NAME).getPartialListing();
barList = fs.getClient().listPaths(barDir.toString(),
HdfsFileStatus.EMPTY_NAME).getPartialListing();
checkDirectoryListing(dirList, WARM, COLD); // bar is warm, foo is cold
checkDirectoryListing(barList, WARM, HOT);
// restart namenode to make sure the editlog is correct
cluster.restartNameNode(true);
dirList = fs.getClient().listPaths(dir.toString(),
HdfsFileStatus.EMPTY_NAME, true).getPartialListing();
barList = fs.getClient().listPaths(barDir.toString(),
HdfsFileStatus.EMPTY_NAME, true).getPartialListing();
checkDirectoryListing(dirList, WARM, COLD); // bar is warm, foo is cold
checkDirectoryListing(barList, WARM, HOT);
// restart namenode with checkpoint to make sure the fsimage is correct
fs.setSafeMode(SafeModeAction.SAFEMODE_ENTER);
fs.saveNamespace();
fs.setSafeMode(SafeModeAction.SAFEMODE_LEAVE);
cluster.restartNameNode(true);
dirList = fs.getClient().listPaths(dir.toString(),
HdfsFileStatus.EMPTY_NAME).getPartialListing();
barList = fs.getClient().listPaths(barDir.toString(),
HdfsFileStatus.EMPTY_NAME).getPartialListing();
checkDirectoryListing(dirList, WARM, COLD); // bar is warm, foo is cold
checkDirectoryListing(barList, WARM, HOT);
} finally {
cluster.shutdown();
}
}
@Test
public void testGetStoragePolicy() throws Exception {
final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(REPLICATION).build();
cluster.waitActive();
final DistributedFileSystem fs = cluster.getFileSystem();
try {
final Path dir = new Path("/testGetStoragePolicy");
final Path fooFile = new Path(dir, "foo");
DFSTestUtil.createFile(fs, fooFile, FILE_LEN, REPLICATION, 0L);
DFSClient client = new DFSClient(cluster.getNameNode(0)
.getNameNodeAddress(), conf);
client.setStoragePolicy("/testGetStoragePolicy/foo",
HdfsConstants.COLD_STORAGE_POLICY_NAME);
String policyName = client.getStoragePolicy("/testGetStoragePolicy/foo")
.getName();
Assert.assertEquals("File storage policy should be COLD",
HdfsConstants.COLD_STORAGE_POLICY_NAME, policyName);
} finally {
cluster.shutdown();
}
}
@Test
public void testSetStoragePolicyWithSnapshot() throws Exception {
final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(REPLICATION).build();
cluster.waitActive();
final DistributedFileSystem fs = cluster.getFileSystem();
try {
final Path dir = new Path("/testSetStoragePolicyWithSnapshot");
final Path fooDir = new Path(dir, "foo");
final Path fooFile1= new Path(fooDir, "f1");
final Path fooFile2= new Path(fooDir, "f2");
DFSTestUtil.createFile(fs, fooFile1, FILE_LEN, REPLICATION, 0L);
DFSTestUtil.createFile(fs, fooFile2, FILE_LEN, REPLICATION, 0L);
fs.setStoragePolicy(fooDir, HdfsConstants.WARM_STORAGE_POLICY_NAME);
HdfsFileStatus[] dirList = fs.getClient().listPaths(dir.toString(),
HdfsFileStatus.EMPTY_NAME, true).getPartialListing();
checkDirectoryListing(dirList, WARM);
HdfsFileStatus[] fooList = fs.getClient().listPaths(fooDir.toString(),
HdfsFileStatus.EMPTY_NAME, true).getPartialListing();
checkDirectoryListing(fooList, WARM, WARM);
// take snapshot
SnapshotTestHelper.createSnapshot(fs, dir, "s1");
// change the storage policy of fooFile1
fs.setStoragePolicy(fooFile1, HdfsConstants.COLD_STORAGE_POLICY_NAME);
fooList = fs.getClient().listPaths(fooDir.toString(),
HdfsFileStatus.EMPTY_NAME).getPartialListing();
checkDirectoryListing(fooList, COLD, WARM);
// check the policy for /dir/.snapshot/s1/foo/f1. Note we always return
// the latest storage policy for a file/directory.
Path s1f1 = SnapshotTestHelper.getSnapshotPath(dir, "s1", "foo/f1");
DirectoryListing f1Listing = fs.getClient().listPaths(s1f1.toString(),
HdfsFileStatus.EMPTY_NAME);
checkDirectoryListing(f1Listing.getPartialListing(), COLD);
// delete f1
fs.delete(fooFile1, true);
fooList = fs.getClient().listPaths(fooDir.toString(),
HdfsFileStatus.EMPTY_NAME).getPartialListing();
checkDirectoryListing(fooList, WARM);
// check the policy for /dir/.snapshot/s1/foo/f1 again after the deletion
checkDirectoryListing(fs.getClient().listPaths(s1f1.toString(),
HdfsFileStatus.EMPTY_NAME).getPartialListing(), COLD);
// change the storage policy of foo dir
fs.setStoragePolicy(fooDir, HdfsConstants.HOT_STORAGE_POLICY_NAME);
// /dir/foo is now hot
dirList = fs.getClient().listPaths(dir.toString(),
HdfsFileStatus.EMPTY_NAME, true).getPartialListing();
checkDirectoryListing(dirList, HOT);
// /dir/foo/f2 is hot
fooList = fs.getClient().listPaths(fooDir.toString(),
HdfsFileStatus.EMPTY_NAME).getPartialListing();
checkDirectoryListing(fooList, HOT);
// check storage policy of snapshot path
Path s1 = SnapshotTestHelper.getSnapshotRoot(dir, "s1");
Path s1foo = SnapshotTestHelper.getSnapshotPath(dir, "s1", "foo");
checkDirectoryListing(fs.getClient().listPaths(s1.toString(),
HdfsFileStatus.EMPTY_NAME).getPartialListing(), HOT);
// /dir/.snapshot/.s1/foo/f1 and /dir/.snapshot/.s1/foo/f2 should still
// follow the latest
checkDirectoryListing(fs.getClient().listPaths(s1foo.toString(),
HdfsFileStatus.EMPTY_NAME).getPartialListing(), COLD, HOT);
// delete foo
fs.delete(fooDir, true);
checkDirectoryListing(fs.getClient().listPaths(s1.toString(),
HdfsFileStatus.EMPTY_NAME).getPartialListing(), HOT);
checkDirectoryListing(fs.getClient().listPaths(s1foo.toString(),
HdfsFileStatus.EMPTY_NAME).getPartialListing(), COLD, HOT);
} finally {
cluster.shutdown();
}
}
private static StorageType[][] genStorageTypes(int numDataNodes) {
StorageType[][] types = new StorageType[numDataNodes][];
for (int i = 0; i < types.length; i++) {
types[i] = new StorageType[]{StorageType.DISK, StorageType.ARCHIVE};
}
return types;
}
private void checkLocatedBlocks(HdfsLocatedFileStatus status, int blockNum,
int replicaNum, StorageType... types) {
List<StorageType> typeList = Lists.newArrayList();
Collections.addAll(typeList, types);
LocatedBlocks lbs = status.getBlockLocations();
Assert.assertEquals(blockNum, lbs.getLocatedBlocks().size());
for (LocatedBlock lb : lbs.getLocatedBlocks()) {
Assert.assertEquals(replicaNum, lb.getStorageTypes().length);
for (StorageType type : lb.getStorageTypes()) {
Assert.assertTrue(typeList.remove(type));
}
}
Assert.assertTrue(typeList.isEmpty());
}
private void testChangeFileRep(String policyName, byte policyId,
StorageType[] before,
StorageType[] after) throws Exception {
final int numDataNodes = 5;
final StorageType[][] types = genStorageTypes(numDataNodes);
final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(numDataNodes).storageTypes(types).build();
cluster.waitActive();
final DistributedFileSystem fs = cluster.getFileSystem();
try {
final Path dir = new Path("/test");
fs.mkdirs(dir);
fs.setStoragePolicy(dir, policyName);
final Path foo = new Path(dir, "foo");
DFSTestUtil.createFile(fs, foo, FILE_LEN, REPLICATION, 0L);
HdfsFileStatus[] status = fs.getClient().listPaths(foo.toString(),
HdfsFileStatus.EMPTY_NAME, true).getPartialListing();
checkDirectoryListing(status, policyId);
HdfsLocatedFileStatus fooStatus = (HdfsLocatedFileStatus) status[0];
checkLocatedBlocks(fooStatus, 1, 3, before);
// change the replication factor to 5
fs.setReplication(foo, (short) numDataNodes);
Thread.sleep(1000);
for (DataNode dn : cluster.getDataNodes()) {
DataNodeTestUtils.triggerHeartbeat(dn);
}
Thread.sleep(1000);
status = fs.getClient().listPaths(foo.toString(),
HdfsFileStatus.EMPTY_NAME, true).getPartialListing();
checkDirectoryListing(status, policyId);
fooStatus = (HdfsLocatedFileStatus) status[0];
checkLocatedBlocks(fooStatus, 1, numDataNodes, after);
// change the replication factor back to 3
fs.setReplication(foo, REPLICATION);
Thread.sleep(1000);
for (DataNode dn : cluster.getDataNodes()) {
DataNodeTestUtils.triggerHeartbeat(dn);
}
Thread.sleep(1000);
for (DataNode dn : cluster.getDataNodes()) {
DataNodeTestUtils.triggerBlockReport(dn);
}
Thread.sleep(1000);
status = fs.getClient().listPaths(foo.toString(),
HdfsFileStatus.EMPTY_NAME, true).getPartialListing();
checkDirectoryListing(status, policyId);
fooStatus = (HdfsLocatedFileStatus) status[0];
checkLocatedBlocks(fooStatus, 1, REPLICATION, before);
} finally {
cluster.shutdown();
}
}
/**
* Consider a File with Hot storage policy. Increase replication factor of
* that file from 3 to 5. Make sure all replications are created in DISKS.
*/
@Test
public void testChangeHotFileRep() throws Exception {
testChangeFileRep(HdfsConstants.HOT_STORAGE_POLICY_NAME, HOT,
new StorageType[]{StorageType.DISK, StorageType.DISK,
StorageType.DISK},
new StorageType[]{StorageType.DISK, StorageType.DISK, StorageType.DISK,
StorageType.DISK, StorageType.DISK});
}
/**
* Consider a File with Warm temperature. Increase replication factor of
* that file from 3 to 5. Make sure all replicas are created in DISKS
* and ARCHIVE.
*/
@Test
public void testChangeWarmRep() throws Exception {
testChangeFileRep(HdfsConstants.WARM_STORAGE_POLICY_NAME, WARM,
new StorageType[]{StorageType.DISK, StorageType.ARCHIVE,
StorageType.ARCHIVE},
new StorageType[]{StorageType.DISK, StorageType.ARCHIVE,
StorageType.ARCHIVE, StorageType.ARCHIVE, StorageType.ARCHIVE});
}
/**
* Consider a File with Cold temperature. Increase replication factor of
* that file from 3 to 5. Make sure all replicas are created in ARCHIVE.
*/
@Test
public void testChangeColdRep() throws Exception {
testChangeFileRep(HdfsConstants.COLD_STORAGE_POLICY_NAME, COLD,
new StorageType[]{StorageType.ARCHIVE, StorageType.ARCHIVE,
StorageType.ARCHIVE},
new StorageType[]{StorageType.ARCHIVE, StorageType.ARCHIVE,
StorageType.ARCHIVE, StorageType.ARCHIVE, StorageType.ARCHIVE});
}
@Test
public void testChooseTargetWithTopology() throws Exception {
BlockStoragePolicy policy1 = new BlockStoragePolicy((byte) 9, "TEST1",
new StorageType[]{StorageType.SSD, StorageType.DISK,
StorageType.ARCHIVE}, new StorageType[]{}, new StorageType[]{});
BlockStoragePolicy policy2 = new BlockStoragePolicy((byte) 11, "TEST2",
new StorageType[]{StorageType.DISK, StorageType.SSD,
StorageType.ARCHIVE}, new StorageType[]{}, new StorageType[]{});
final String[] racks = {"/d1/r1", "/d1/r2", "/d1/r2"};
final String[] hosts = {"host1", "host2", "host3"};
final StorageType[] types = {StorageType.DISK, StorageType.SSD,
StorageType.ARCHIVE};
final DatanodeStorageInfo[] storages = DFSTestUtil
.createDatanodeStorageInfos(3, racks, hosts, types);
final DatanodeDescriptor[] dataNodes = DFSTestUtil
.toDatanodeDescriptor(storages);
FileSystem.setDefaultUri(conf, "hdfs://localhost:0");
conf.set(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY, "0.0.0.0:0");
File baseDir = PathUtils.getTestDir(TestReplicationPolicy.class);
conf.set(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY,
new File(baseDir, "name").getPath());
DFSTestUtil.formatNameNode(conf);
NameNode namenode = new NameNode(conf);
final BlockManager bm = namenode.getNamesystem().getBlockManager();
BlockPlacementPolicy replicator = bm.getBlockPlacementPolicy();
NetworkTopology cluster = bm.getDatanodeManager().getNetworkTopology();
for (DatanodeDescriptor datanode : dataNodes) {
cluster.add(datanode);
}
DatanodeStorageInfo[] targets = replicator.chooseTarget("/foo", 3,
dataNodes[0], Collections.<DatanodeStorageInfo>emptyList(), false,
new HashSet<Node>(), 0, policy1);
System.out.println(Arrays.asList(targets));
Assert.assertEquals(3, targets.length);
targets = replicator.chooseTarget("/foo", 3,
dataNodes[0], Collections.<DatanodeStorageInfo>emptyList(), false,
new HashSet<Node>(), 0, policy2);
System.out.println(Arrays.asList(targets));
Assert.assertEquals(3, targets.length);
}
/**
* Test getting all the storage policies from the namenode
*/
@Test
public void testGetAllStoragePolicies() throws Exception {
final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(0).build();
cluster.waitActive();
final DistributedFileSystem fs = cluster.getFileSystem();
try {
BlockStoragePolicy[] policies = fs.getStoragePolicies();
Assert.assertEquals(6, policies.length);
Assert.assertEquals(POLICY_SUITE.getPolicy(COLD).toString(),
policies[0].toString());
Assert.assertEquals(POLICY_SUITE.getPolicy(WARM).toString(),
policies[1].toString());
Assert.assertEquals(POLICY_SUITE.getPolicy(HOT).toString(),
policies[2].toString());
} finally {
IOUtils.cleanup(null, fs);
cluster.shutdown();
}
}
@Test
public void testChooseSsdOverDisk() throws Exception {
BlockStoragePolicy policy = new BlockStoragePolicy((byte) 9, "TEST1",
new StorageType[]{StorageType.SSD, StorageType.DISK,
StorageType.ARCHIVE}, new StorageType[]{}, new StorageType[]{});
final String[] racks = {"/d1/r1", "/d1/r1", "/d1/r1"};
final String[] hosts = {"host1", "host2", "host3"};
final StorageType[] disks = {StorageType.DISK, StorageType.DISK, StorageType.DISK};
final DatanodeStorageInfo[] diskStorages
= DFSTestUtil.createDatanodeStorageInfos(3, racks, hosts, disks);
final DatanodeDescriptor[] dataNodes
= DFSTestUtil.toDatanodeDescriptor(diskStorages);
for(int i = 0; i < dataNodes.length; i++) {
BlockManagerTestUtil.updateStorage(dataNodes[i],
new DatanodeStorage("ssd" + i, DatanodeStorage.State.NORMAL,
StorageType.SSD));
}
FileSystem.setDefaultUri(conf, "hdfs://localhost:0");
conf.set(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY, "0.0.0.0:0");
File baseDir = PathUtils.getTestDir(TestReplicationPolicy.class);
conf.set(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY,
new File(baseDir, "name").getPath());
DFSTestUtil.formatNameNode(conf);
NameNode namenode = new NameNode(conf);
final BlockManager bm = namenode.getNamesystem().getBlockManager();
BlockPlacementPolicy replicator = bm.getBlockPlacementPolicy();
NetworkTopology cluster = bm.getDatanodeManager().getNetworkTopology();
for (DatanodeDescriptor datanode : dataNodes) {
cluster.add(datanode);
}
DatanodeStorageInfo[] targets = replicator.chooseTarget("/foo", 3,
dataNodes[0], Collections.<DatanodeStorageInfo>emptyList(), false,
new HashSet<Node>(), 0, policy);
System.out.println(policy.getName() + ": " + Arrays.asList(targets));
Assert.assertEquals(2, targets.length);
Assert.assertEquals(StorageType.SSD, targets[0].getStorageType());
Assert.assertEquals(StorageType.DISK, targets[1].getStorageType());
}
@Test
public void testGetFileStoragePolicyAfterRestartNN() throws Exception {
//HDFS8219
final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(REPLICATION)
.storageTypes(
new StorageType[] {StorageType.DISK, StorageType.ARCHIVE})
.build();
cluster.waitActive();
final DistributedFileSystem fs = cluster.getFileSystem();
try {
final String file = "/testScheduleWithinSameNode/file";
Path dir = new Path("/testScheduleWithinSameNode");
fs.mkdirs(dir);
// 2. Set Dir policy
fs.setStoragePolicy(dir, "COLD");
// 3. Create file
final FSDataOutputStream out = fs.create(new Path(file));
out.writeChars("testScheduleWithinSameNode");
out.close();
// 4. Set Dir policy
fs.setStoragePolicy(dir, "HOT");
HdfsFileStatus status = fs.getClient().getFileInfo(file);
// 5. get file policy, it should be parent policy.
Assert
.assertTrue("File storage policy should be HOT",
status.getStoragePolicy() == HOT);
// 6. restart NameNode for reloading edits logs.
cluster.restartNameNode(true);
// 7. get file policy, it should be parent policy.
status = fs.getClient().getFileInfo(file);
Assert
.assertTrue("File storage policy should be HOT",
status.getStoragePolicy() == HOT);
} finally {
cluster.shutdown();
}
}
/**
* Verify that {@link FileSystem#getAllStoragePolicies} returns all
* known storage policies for DFS.
*
* @throws IOException
*/
@Test
public void testGetAllStoragePoliciesFromFs() throws IOException {
final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(REPLICATION)
.storageTypes(
new StorageType[] {StorageType.DISK, StorageType.ARCHIVE})
.build();
try {
cluster.waitActive();
// Get policies via {@link FileSystem#getAllStoragePolicies}
Set<String> policyNamesSet1 = new HashSet<>();
for (BlockStoragePolicySpi policy :
cluster.getFileSystem().getAllStoragePolicies()) {
policyNamesSet1.add(policy.getName());
}
// Get policies from the default BlockStoragePolicySuite.
BlockStoragePolicySuite suite = BlockStoragePolicySuite.createDefaultSuite();
Set<String> policyNamesSet2 = new HashSet<>();
for (BlockStoragePolicy policy : suite.getAllPolicies()) {
policyNamesSet2.add(policy.getName());
}
// Ensure that we got the same set of policies in both cases.
Assert.assertTrue(Sets.difference(policyNamesSet1, policyNamesSet2).isEmpty());
Assert.assertTrue(Sets.difference(policyNamesSet2, policyNamesSet1).isEmpty());
} finally {
cluster.shutdown();
}
}
@Test
public void testStorageType() {
final EnumMap<StorageType, Integer> map = new EnumMap<>(StorageType.class);
//put storage type is reversed order
map.put(StorageType.ARCHIVE, 1);
map.put(StorageType.DISK, 1);
map.put(StorageType.SSD, 1);
map.put(StorageType.RAM_DISK, 1);
{
final Iterator<StorageType> i = map.keySet().iterator();
Assert.assertEquals(StorageType.RAM_DISK, i.next());
Assert.assertEquals(StorageType.SSD, i.next());
Assert.assertEquals(StorageType.DISK, i.next());
Assert.assertEquals(StorageType.ARCHIVE, i.next());
}
{
final Iterator<Map.Entry<StorageType, Integer>> i
= map.entrySet().iterator();
Assert.assertEquals(StorageType.RAM_DISK, i.next().getKey());
Assert.assertEquals(StorageType.SSD, i.next().getKey());
Assert.assertEquals(StorageType.DISK, i.next().getKey());
Assert.assertEquals(StorageType.ARCHIVE, i.next().getKey());
}
}
}
|
apache-2.0
|
rwitzel/streamflyer
|
streamflyer-core/src/main/java/com/github/rwitzel/streamflyer/regex/addons/util/EmbeddedFlagUtil.java
|
2915
|
/**
* Copyright (C) 2011 [email protected]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.rwitzel.streamflyer.regex.addons.util;
import java.util.regex.Pattern;
/**
* This class supports the creation of regular expressions with embedded flag expressions.
*
* @author rwoo
* @since 1.1.0
*/
public class EmbeddedFlagUtil {
/**
* Creates a regular expression with an embedded flag expression.
* <p>
* Supports all flags of JDK7 {@link java.util.regex.Pattern}, i.e. the following flags:
* <ul>
* <li>{@link Pattern#CASE_INSENSITIVE}
* <li>{@link Pattern#UNIX_LINES}
* <li>{@link Pattern#MULTILINE}
* <li>{@link Pattern#DOTALL}
* <li>{@link Pattern#UNICODE_CASE}
* <li>{@link Pattern#COMMENTS}
* </ul>
* <p>
* EXAMPLE:
* <ul>
* <li>For <code>("abc", Pattern.CASE_INSENSITIVE ^ Pattern.MULTILINE)</code> the method returns <code>
* "(?im:abc)"</code>.</li>
* <li>For <code>("abc", 0)</code> the method returns <code>"abc"</code>.</li>
* </ul>
*
* @param regex
* @param flags
* @return Returns the given regex enriched with an embedded flag expression that represents the given flags. If
* there is no flag given, the returned regex is equal to the given regex.
*/
public String embedFlags(String regex, int flags) {
if (flags == 0) {
return regex;
} else {
return "(?" + mapFlagsToEmbeddedFlags(flags) + ":" + regex + ")";
}
}
/**
* See {@link #embedFlags(String, int)}.
*/
protected String mapFlagsToEmbeddedFlags(int flags) {
String flagsAsString = "";
if ((flags & Pattern.CASE_INSENSITIVE) != 0) {
flagsAsString += "i";
}
if ((flags & Pattern.UNIX_LINES) != 0) {
flagsAsString += "d";
}
if ((flags & Pattern.MULTILINE) != 0) {
flagsAsString += "m";
}
if ((flags & Pattern.DOTALL) != 0) {
flagsAsString += "s";
}
if ((flags & Pattern.UNICODE_CASE) != 0) {
flagsAsString += "u";
}
if ((flags & Pattern.COMMENTS) != 0) {
flagsAsString += "x";
}
return flagsAsString;
}
}
|
apache-2.0
|
kubernetes/gengo
|
namer/namer_test.go
|
2994
|
/*
Copyright 2015 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package namer
import (
"reflect"
"testing"
"k8s.io/gengo/types"
)
func TestNameStrategy(t *testing.T) {
u := types.Universe{}
// Add some types.
base := u.Type(types.Name{Package: "foo/bar", Name: "Baz"})
base.Kind = types.Struct
tmp := u.Type(types.Name{Package: "", Name: "[]bar.Baz"})
tmp.Kind = types.Slice
tmp.Elem = base
tmp = u.Type(types.Name{Package: "", Name: "map[string]bar.Baz"})
tmp.Kind = types.Map
tmp.Key = types.String
tmp.Elem = base
tmp = u.Type(types.Name{Package: "foo/other", Name: "Baz"})
tmp.Kind = types.Struct
tmp.Members = []types.Member{{
Embedded: true,
Type: base,
}}
tmp = u.Type(types.Name{Package: "", Name: "chan Baz"})
tmp.Kind = types.Chan
tmp.Elem = base
tmp = u.Type(types.Name{Package: "", Name: "[4]Baz"})
tmp.Kind = types.Array
tmp.Elem = base
tmp.Len = 4
u.Type(types.Name{Package: "", Name: "string"})
o := Orderer{NewPublicNamer(0)}
order := o.OrderUniverse(u)
orderedNames := make([]string, len(order))
for i, t := range order {
orderedNames[i] = o.Name(t)
}
expect := []string{"Array4Baz", "Baz", "Baz", "ChanBaz", "MapStringToBaz", "SliceBaz", "String"}
if e, a := expect, orderedNames; !reflect.DeepEqual(e, a) {
t.Errorf("Wanted %#v, got %#v", e, a)
}
o = Orderer{NewRawNamer("my/package", nil)}
order = o.OrderUniverse(u)
orderedNames = make([]string, len(order))
for i, t := range order {
orderedNames[i] = o.Name(t)
}
expect = []string{"[4]bar.Baz", "[]bar.Baz", "bar.Baz", "chan bar.Baz", "map[string]bar.Baz", "other.Baz", "string"}
if e, a := expect, orderedNames; !reflect.DeepEqual(e, a) {
t.Errorf("Wanted %#v, got %#v", e, a)
}
o = Orderer{NewRawNamer("foo/bar", nil)}
order = o.OrderUniverse(u)
orderedNames = make([]string, len(order))
for i, t := range order {
orderedNames[i] = o.Name(t)
}
expect = []string{"Baz", "[4]Baz", "[]Baz", "chan Baz", "map[string]Baz", "other.Baz", "string"}
if e, a := expect, orderedNames; !reflect.DeepEqual(e, a) {
t.Errorf("Wanted %#v, got %#v", e, a)
}
o = Orderer{NewPublicNamer(1)}
order = o.OrderUniverse(u)
orderedNames = make([]string, len(order))
for i, t := range order {
orderedNames[i] = o.Name(t)
}
expect = []string{"Array4BarBaz", "BarBaz", "ChanBarBaz", "MapStringToBarBaz", "OtherBaz", "SliceBarBaz", "String"}
if e, a := expect, orderedNames; !reflect.DeepEqual(e, a) {
t.Errorf("Wanted %#v, got %#v", e, a)
}
}
|
apache-2.0
|
rog2/appinhouse
|
server/controllers/createapp.go
|
1269
|
package controllers
import (
. "appinhouse/server/constants"
"appinhouse/server/models"
"github.com/astaxie/beego"
)
type CreateAppController struct {
BaseController
}
func (c *CreateAppController) CreateApp() {
dto := NewSuccessResponseDto()
app := c.Ctx.Input.Param(":app")
desc := c.GetString("description")
alias := c.GetString("alias")
if app == "" || len(app) > App_Name_Len || desc == "" {
beego.Info("AddApp param name error !name:", app, "desc:", desc)
c.setError4Dto(ErrorParam, dto)
return
}
has, err := models.AppDao.Exist(app)
if err != nil {
beego.Info("AddApp Exist app error !name:", app, "error:", err.Error())
c.setError4Dto(ErrorParam, dto)
return
}
if !has {
appinfo := new(models.AppInfo)
appinfo.App = app
appinfo.Description = desc
appinfo.Alias = alias
err = models.AppDao.Save(appinfo)
if err != nil {
beego.Info("AddApp save app error !name:", app, "error:", err.Error())
c.setError4Dto(err, dto)
return
}
err = models.AppListDao.Save(app)
if err != nil {
beego.Info("AddApp save applist error !name:", app, "error:", err.Error())
c.setError4Dto(err, dto)
return
}
} else {
c.setError4Dto(ErrorAppExistError, dto)
return
}
c.Data["json"] = dto
c.ServeJSON()
}
|
apache-2.0
|
PRImA-Research-Lab/semantic-labelling
|
doc/org/primaresearch/clc/phd/workflow/data/class-use/DataCollectionImpl.html
|
7066
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_11) on Mon Jan 26 08:49:13 GMT 2015 -->
<title>Uses of Class org.primaresearch.clc.phd.workflow.data.DataCollectionImpl</title>
<meta name="date" content="2015-01-26">
<link rel="stylesheet" type="text/css" href="../../../../../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../../../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.primaresearch.clc.phd.workflow.data.DataCollectionImpl";
}
}
catch(err) {
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../../org/primaresearch/clc/phd/workflow/data/DataCollectionImpl.html" title="class in org.primaresearch.clc.phd.workflow.data">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../../index.html?org/primaresearch/clc/phd/workflow/data/class-use/DataCollectionImpl.html" target="_top">Frames</a></li>
<li><a href="DataCollectionImpl.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h2 title="Uses of Class org.primaresearch.clc.phd.workflow.data.DataCollectionImpl" class="title">Uses of Class<br>org.primaresearch.clc.phd.workflow.data.DataCollectionImpl</h2>
</div>
<div class="classUseContainer">
<ul class="blockList">
<li class="blockList">
<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
<caption><span>Packages that use <a href="../../../../../../../org/primaresearch/clc/phd/workflow/data/DataCollectionImpl.html" title="class in org.primaresearch.clc.phd.workflow.data">DataCollectionImpl</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Package</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="#org.primaresearch.clc.phd.workflow.data">org.primaresearch.clc.phd.workflow.data</a></td>
<td class="colLast"> </td>
</tr>
</tbody>
</table>
</li>
<li class="blockList">
<ul class="blockList">
<li class="blockList"><a name="org.primaresearch.clc.phd.workflow.data">
<!-- -->
</a>
<h3>Uses of <a href="../../../../../../../org/primaresearch/clc/phd/workflow/data/DataCollectionImpl.html" title="class in org.primaresearch.clc.phd.workflow.data">DataCollectionImpl</a> in <a href="../../../../../../../org/primaresearch/clc/phd/workflow/data/package-summary.html">org.primaresearch.clc.phd.workflow.data</a></h3>
<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing constructors, and an explanation">
<caption><span>Constructors in <a href="../../../../../../../org/primaresearch/clc/phd/workflow/data/package-summary.html">org.primaresearch.clc.phd.workflow.data</a> with parameters of type <a href="../../../../../../../org/primaresearch/clc/phd/workflow/data/DataCollectionImpl.html" title="class in org.primaresearch.clc.phd.workflow.data">DataCollectionImpl</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colOne" scope="col">Constructor and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colLast"><code><span class="memberNameLink"><a href="../../../../../../../org/primaresearch/clc/phd/workflow/data/DataCollectionImpl.html#DataCollectionImpl-org.primaresearch.clc.phd.workflow.data.DataCollectionImpl-">DataCollectionImpl</a></span>(<a href="../../../../../../../org/primaresearch/clc/phd/workflow/data/DataCollectionImpl.html" title="class in org.primaresearch.clc.phd.workflow.data">DataCollectionImpl</a> other)</code>
<div class="block">Copy constructor</div>
</td>
</tr>
</tbody>
</table>
</li>
</ul>
</li>
</ul>
</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../../org/primaresearch/clc/phd/workflow/data/DataCollectionImpl.html" title="class in org.primaresearch.clc.phd.workflow.data">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../../index-files/index-1.html">Index</a></li>
<li><a href="../../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../../index.html?org/primaresearch/clc/phd/workflow/data/class-use/DataCollectionImpl.html" target="_top">Frames</a></li>
<li><a href="DataCollectionImpl.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
|
apache-2.0
|
pjq/pushup
|
PushUp/src/main/java/me/pjq/pushup/utils/ScreenshotUtils.java
|
2814
|
package me.pjq.pushup.utils;
/**
* Created by pjq on 11/9/13.
*/
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.Rect;
import android.view.View;
import me.pjq.pushup.LocalPathResolver;
public class ScreenshotUtils {
/**
* @param pActivity
* @return bitmap
*/
public static Bitmap takeScreenShot(Activity pActivity) {
Bitmap bitmap = null;
View view = pActivity.getWindow().getDecorView();
view.setDrawingCacheEnabled(true);
view.buildDrawingCache();
bitmap = view.getDrawingCache();
Rect frame = new Rect();
view.getWindowVisibleDisplayFrame(frame);
int stautsHeight = frame.top;
//stautsHeight = 0;
int width = pActivity.getWindowManager().getDefaultDisplay().getWidth();
int height = pActivity.getWindowManager().getDefaultDisplay().getHeight();
bitmap = Bitmap.createBitmap(bitmap, 0, stautsHeight, width, height - stautsHeight);
view.setDrawingCacheEnabled(false);
return bitmap;
}
/**
* @param pBitmap
*/
private static boolean savePic(Bitmap pBitmap, String strName) {
FileOutputStream fos = null;
try {
fos = new FileOutputStream(strName);
if (null != fos) {
pBitmap.compress(Bitmap.CompressFormat.PNG, 90, fos);
fos.flush();
fos.close();
return true;
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return false;
}
public static String getshotFilePath() {
String imagePath = LocalPathResolver.getCachePath("images");
File file = new File(imagePath);
if (!file.exists()) {
file.mkdirs();
}
return imagePath + System.currentTimeMillis() + ".png";
}
public static String getshotFilePathByDay() {
String imagePath = LocalPathResolver.getCachePath("images");
File file = new File(imagePath);
if (!file.exists()) {
file.mkdirs();
}
return imagePath + Utils.time2DateKey("" + System.currentTimeMillis()) + ".png";
}
/**
* @param pActivity
*/
public static boolean shotBitmap(Activity pActivity, String filePath) {
return ScreenshotUtils.savePic(takeScreenShot(pActivity), filePath);
}
public static Bitmap shotBitmap2(Activity pActivity, String filePath) {
Bitmap bitmap = takeScreenShot(pActivity);
ScreenshotUtils.savePic(bitmap, filePath);
return bitmap;
}
}
|
apache-2.0
|
GB-OL-168/ChinaMobileMS
|
src/main/webapp/assets/css/company-query-furniture.css
|
326
|
@CHARSET "UTF-8";
.officeFurnitureInfoId{
width:50px;
}
.furnitureType{
width:100px;
}
.brand{
width:150px;
}
.specification{
width:150px;s
}
.branchName{
width:200px;
}
.buildingName{
width:150px;
}
.floor{
width:100px;
}
.officeFurnitureInhouseIdfoId{
width:100px;
}
.officeFurnitureIncountfoId{
width:100px;
}
|
apache-2.0
|
hirohanin/pig7hadoop21
|
docs/api/org/apache/hadoop/zebra/io/TableInserter.html
|
11132
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_20) on Wed May 05 11:17:07 PDT 2010 -->
<TITLE>
TableInserter (Pig 0.7.0 API)
</TITLE>
<META NAME="date" CONTENT="2010-05-05">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="TableInserter (Pig 0.7.0 API)";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="class-use/TableInserter.html"><FONT CLASS="NavBarFont1"><B>Use</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../org/apache/hadoop/zebra/io/KeyDistribution.html" title="class in org.apache.hadoop.zebra.io"><B>PREV CLASS</B></A>
<A HREF="../../../../../org/apache/hadoop/zebra/io/TableScanner.html" title="interface in org.apache.hadoop.zebra.io"><B>NEXT CLASS</B></A></FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../index.html?org/apache/hadoop/zebra/io/TableInserter.html" target="_top"><B>FRAMES</B></A>
<A HREF="TableInserter.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
<TR>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
SUMMARY: NESTED | FIELD | CONSTR | <A HREF="#method_summary">METHOD</A></FONT></TD>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
DETAIL: FIELD | CONSTR | <A HREF="#method_detail">METHOD</A></FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<!-- ======== START OF CLASS DATA ======== -->
<H2>
<FONT SIZE="-1">
org.apache.hadoop.zebra.io</FONT>
<BR>
Interface TableInserter</H2>
<DL>
<DT><B>All Superinterfaces:</B> <DD><A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/io/Closeable.html?is-external=true" title="class or interface in java.io">Closeable</A></DD>
</DL>
<HR>
<DL>
<DT><PRE>public interface <B>TableInserter</B><DT>extends <A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/io/Closeable.html?is-external=true" title="class or interface in java.io">Closeable</A></DL>
</PRE>
<P>
Inserter interface allows application to to insert a number of rows into
table.
<P>
<P>
<HR>
<P>
<!-- ========== METHOD SUMMARY =========== -->
<A NAME="method_summary"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
<B>Method Summary</B></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> <A HREF="../../../../../org/apache/hadoop/zebra/schema/Schema.html" title="class in org.apache.hadoop.zebra.schema">Schema</A></CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../org/apache/hadoop/zebra/io/TableInserter.html#getSchema()">getSchema</A></B>()</CODE>
<BR>
Get the schema of the underlying table we are writing to.</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> void</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../../../org/apache/hadoop/zebra/io/TableInserter.html#insert(org.apache.hadoop.io.BytesWritable, org.apache.pig.data.Tuple)">insert</A></B>(org.apache.hadoop.io.BytesWritable key,
<A HREF="../../../../../org/apache/pig/data/Tuple.html" title="interface in org.apache.pig.data">Tuple</A> row)</CODE>
<BR>
Insert a new row into the table.</TD>
</TR>
</TABLE>
<A NAME="methods_inherited_from_class_java.io.Closeable"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#EEEEFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left"><B>Methods inherited from interface java.io.<A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/io/Closeable.html?is-external=true" title="class or interface in java.io">Closeable</A></B></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><CODE><A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/io/Closeable.html?is-external=true#close()" title="class or interface in java.io">close</A></CODE></TD>
</TR>
</TABLE>
<P>
<!-- ============ METHOD DETAIL ========== -->
<A NAME="method_detail"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="1"><FONT SIZE="+2">
<B>Method Detail</B></FONT></TH>
</TR>
</TABLE>
<A NAME="insert(org.apache.hadoop.io.BytesWritable, org.apache.pig.data.Tuple)"><!-- --></A><H3>
insert</H3>
<PRE>
void <B>insert</B>(org.apache.hadoop.io.BytesWritable key,
<A HREF="../../../../../org/apache/pig/data/Tuple.html" title="interface in org.apache.pig.data">Tuple</A> row)
throws <A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</A></PRE>
<DL>
<DD>Insert a new row into the table.
<P>
<DD><DL>
</DL>
</DD>
<DD><DL>
<DT><B>Parameters:</B><DD><CODE>key</CODE> - The row key.<DD><CODE>row</CODE> - The row.
<DT><B>Throws:</B>
<DD><CODE><A HREF="http://java.sun.com/j2se/1.5.0/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</A></CODE></DL>
</DD>
</DL>
<HR>
<A NAME="getSchema()"><!-- --></A><H3>
getSchema</H3>
<PRE>
<A HREF="../../../../../org/apache/hadoop/zebra/schema/Schema.html" title="class in org.apache.hadoop.zebra.schema">Schema</A> <B>getSchema</B>()</PRE>
<DL>
<DD>Get the schema of the underlying table we are writing to.
<P>
<DD><DL>
</DL>
</DD>
<DD><DL>
<DT><B>Returns:</B><DD>The schema of the underlying table.</DL>
</DD>
</DL>
<!-- ========= END OF CLASS DATA ========= -->
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="class-use/TableInserter.html"><FONT CLASS="NavBarFont1"><B>Use</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../org/apache/hadoop/zebra/io/KeyDistribution.html" title="class in org.apache.hadoop.zebra.io"><B>PREV CLASS</B></A>
<A HREF="../../../../../org/apache/hadoop/zebra/io/TableScanner.html" title="interface in org.apache.hadoop.zebra.io"><B>NEXT CLASS</B></A></FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../index.html?org/apache/hadoop/zebra/io/TableInserter.html" target="_top"><B>FRAMES</B></A>
<A HREF="TableInserter.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
<TR>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
SUMMARY: NESTED | FIELD | CONSTR | <A HREF="#method_summary">METHOD</A></FONT></TD>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
DETAIL: FIELD | CONSTR | <A HREF="#method_detail">METHOD</A></FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
Copyright © ${year} The Apache Software Foundation
</BODY>
</HTML>
|
apache-2.0
|
mdoering/backbone
|
life/Fungi/Basidiomycota/Exobasidiomycetes/Entylomatales/Entylomataceae/Entyloma/Entyloma leontices/README.md
|
239
|
# Entyloma leontices Savul., 1931 SPECIES
#### Status
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
Annls mycol. 29: 393 (1931)
#### Original name
Entyloma leontices Savul., 1931
### Remarks
null
|
apache-2.0
|
veelion/spf13-vim
|
README.markdown
|
23600
|
# spf13-vim : Steve Francia's Vim Distribution
__ _ _____ _
___ _ __ / _/ |___ / __ __(_)_ __ ___
/ __| '_ \| |_| | |_ \ _____\ \ / /| | '_ ` _ \
\__ \ |_) | _| |___) |_____|\ V / | | | | | | |
|___/ .__/|_| |_|____/ \_/ |_|_| |_| |_|
|_|
spf13-vim is a distribution of vim plugins and resources for Vim, Gvim and [MacVim].
It is a good starting point for anyone intending to use VIM for development running equally well on Windows, Linux, \*nix and Mac.
The distribution is completely customisable using a `~/.vimrc.local`, `~/.vimrc.bundles.local`, and `~/.vimrc.before.local` Vim RC files.
![spf13-vim image][spf13-vim-img]
Unlike traditional VIM plugin structure, which similar to UNIX throws all files into common directories, making updating or disabling plugins a real mess, spf13-vim 3 uses the [Vundle] plugin management system to have a well organized vim directory (Similar to mac's app folders). Vundle also ensures that the latest versions of your plugins are installed and makes it easy to keep them up to date.
Great care has been taken to ensure that each plugin plays nicely with others, and optional configuration has been provided for what we believe is the most efficient use.
Lastly (and perhaps, most importantly) It is completely cross platform. It works well on Windows, Linux and OSX without any modifications or additional configurations. If you are using [MacVim] or Gvim additional features are enabled. So regardless of your environment just clone and run.
# Installation
## Requirements
To make all the plugins work, specifically [neocomplete](https://github.com/Shougo/neocomplete.vim), you need [vim with lua](https://github.com/Shougo/neocomplete.vim#requirements).
## Linux, \*nix, Mac OSX Installation
The easiest way to install spf13-vim is to use our [automatic installer](https://raw.github.com/veelion/spf13-vim/3.0/bootstrap.sh) by simply copying and pasting the following line into a terminal. This will install spf13-vim and backup your existing vim configuration. If you are upgrading from a prior version (before 3.0) this is also the recommended installation.
```bash
curl https://raw.github.com/veelion/spf13-vim/3.0/bootstrap.sh -L > spf13-vim.sh && sh spf13-vim.sh
```
If you have a bash-compatible shell you can run the script directly:
```bash
sh <(curl https://raw.github.com/veelion/spf13-vim/3.0/bootstrap.sh -L)
```
## Installing on Windows
On Windows and \*nix [Git] and [Curl] are required. Also, if you haven't done so already, you'll need to install [Vim].
The quickest option to install all three dependencies ([Git], [Curl], [Vim] and [spf13-vim]) is via [Chocolatey] NuGet. After installing [Chocolatey], execute the following commands on the _command prompt_:
C:\> choco install spf13-vim
_Note: The [spf13-vim package] will install Vim also!_
If you want to install [msysgit], [Curl] and [spf13-vim] individually, follow the directions below.
### Installing dependencies
#### Install [Vim]
After the installation of Vim you must add a new directory to your environment variables path to make it work with the script installation of spf13.
Open Vim and write the following command, it will show the installed directory:
:echo $VIMRUNTIME
C:\Program Files (X86)\Vim\vim74
Then you need to add it to your environment variable path. After that try execute `vim` within command prompt (press Win-R, type `cmd`, press Enter) and you’ll see the default vim page.
#### Install [msysgit]
After installation try running `git --version` within _command prompt_ (press Win-R, type `cmd`, press Enter) to make sure all good:
C:\> git --version
git version 1.7.4.msysgit.0
#### Setup [Curl]
_Instructions blatently copied from vundle readme_
Installing Curl on Windows is easy as [Curl] is bundled with [msysgit]!
But before it can be used with [Vundle] it's required make `curl` run in _command prompt_.
The easiest way is to create `curl.cmd` with [this content](https://gist.github.com/912993)
@rem Do not use "echo off" to not affect any child calls.
@setlocal
@rem Get the abolute path to the parent directory, which is assumed to be the
@rem Git installation root.
@for /F "delims=" %%I in ("%~dp0..") do @set git_install_root=%%~fI
@set PATH=%git_install_root%\bin;%git_install_root%\mingw\bin;%PATH%
@if not exist "%HOME%" @set HOME=%HOMEDRIVE%%HOMEPATH%
@if not exist "%HOME%" @set HOME=%USERPROFILE%
@curl.exe %*
And copy it to `C:\Program Files\Git\cmd\curl.cmd`, assuming [msysgit] was installed to `c:\Program Files\Git`
to verify all good, run:
C:\> curl --version
curl 7.21.1 (i686-pc-mingw32) libcurl/7.21.1 OpenSSL/0.9.8k zlib/1.2.3
Protocols: dict file ftp ftps http https imap imaps ldap ldaps pop3 pop3s rtsp smtp smtps telnet tftp
Features: Largefile NTLM SSL SSPI libz
#### Installing spf13-vim on Windows
The easiest way is to download and run the spf13-vim-windows-install.cmd file. Remember to run this file in **Administrator Mode** if you want the symlinks to be created successfully.
## Updating to the latest version
The simpliest (and safest) way to update is to simply rerun the installer. It will completely and non destructively upgrade to the latest version.
```bash
curl https://j.mp/spf13-vim3 -L -o - | sh
```
Alternatively you can manually perform the following steps. If anything has changed with the structure of the configuration you will need to create the appropriate symlinks.
```bash
cd $HOME/to/spf13-vim/
git pull
vim +BundleInstall! +BundleClean +q
```
### Fork me on GitHub
I'm always happy to take pull requests from others. A good number of people are already [contributors] to [spf13-vim]. Go ahead and fork me.
# A highly optimized .vimrc config file
![spf13-vimrc image][spf13-vimrc-img]
The .vimrc file is suited to programming. It is extremely well organized and folds in sections.
Each section is labeled and each option is commented.
It fixes many of the inconveniences of vanilla vim including
* A single config can be used across Windows, Mac and linux
* Eliminates swap and backup files from littering directories, preferring to store in a central location.
* Fixes common typos like :W, :Q, etc
* Setup a solid set of settings for Formatting (change to meet your needs)
* Setup the interface to take advantage of vim's features including
* omnicomplete
* line numbers
* syntax highlighting
* A better ruler & status line
* & more
* Configuring included plugins
## Customization
Create `~/.vimrc.local` and `~/.gvimrc.local` for any local
customizations.
For example, to override the default color schemes:
```bash
echo colorscheme ir_black >> ~/.vimrc.local
```
### Before File
Create a `~/.vimrc.before.local` file to define any customizations
that get loaded *before* the spf13-vim `.vimrc`.
For example, to prevent autocd into a file directory:
```bash
echo let g:spf13_no_autochdir = 1 >> ~/.vimrc.before.local
```
For a list of available spf13-vim specific customization options, look at the `~/.vimrc.before` file.
### Fork Customization
There is an additional tier of customization available to those who want to maintain a
fork of spf13-vim specialized for a particular group. These users can create `.vimrc.fork`
and `.vimrc.bundles.fork` files in the root of their fork. The load order for the configuration is:
1. `.vimrc.before` - spf13-vim before configuration
2. `.vimrc.before.fork` - fork before configuration
3. `.vimrc.before.local` - before user configuration
4. `.vimrc.bundles` - spf13-vim bundle configuration
5. `.vimrc.bundles.fork` - fork bundle configuration
6. `.vimrc.bundles.local` - local user bundle configuration
6. `.vimrc` - spf13-vim vim configuration
7. `.vimrc.fork` - fork vim configuration
8. `.vimrc.local` - local user configuration
See `.vimrc.bundles` for specifics on what options can be set to override bundle configuration. See `.vimrc.before` for specifics
on what options can be overridden. Most vim configuration options should be set in your `.vimrc.fork` file, bundle configuration
needs to be set in your `.vimrc.bundles.fork` file.
You can specify the default bundles for your fork using `.vimrc.before.fork` file. Here is how to create an example `.vimrc.before.fork` file
in a fork repo for the default bundles.
```bash
echo let g:spf13_bundle_groups=[\'general\', \'programming\', \'misc\', \'youcompleteme\'] >> .vimrc.before.fork
```
Once you have this file in your repo, only the bundles you specified will be installed during the first installation of your fork.
You may also want to update your `README.markdown` file so that the `bootstrap.sh` link points to your repository and your `bootstrap.sh`
file to pull down your fork.
For an example of a fork of spf13-vim that provides customization in this manner see [taxilian's fork](https://github.com/taxilian/spf13-vim).
### Easily Editing Your Configuration
`<Leader>ev` opens a new tab containing the .vimrc configuration files listed above. This makes it easier to get an overview of your
configuration and make customizations.
`<Leader>sv` sources the .vimrc file, instantly applying your customizations to the currently running vim instance.
These two mappings can themselves be customized by setting the following in .vimrc.before.local:
```bash
let g:spf13_edit_config_mapping='<Leader>ev'
let g:spf13_apply_config_mapping='<Leader>sv'
```
# Plugins
spf13-vim contains a curated set of popular vim plugins, colors, snippets and syntaxes. Great care has been made to ensure that these plugins play well together and have optimal configuration.
## Adding new plugins
Create `~/.vimrc.bundles.local` for any additional bundles.
To add a new bundle, just add one line for each bundle you want to install. The line should start with the word "Bundle" followed by a string of either the vim.org project name or the githubusername/githubprojectname. For example, the github project [spf13/vim-colors](https://github.com/spf13/vim-colors) can be added with the following command
```bash
echo Bundle \'spf13/vim-colors\' >> ~/.vimrc.bundles.local
```
Once new plugins are added, they have to be installed.
```bash
vim +BundleInstall! +BundleClean +q
```
## Removing (disabling) an included plugin
Create `~/.vimrc.local` if it doesn't already exist.
Add the UnBundle command to this line. It takes the same input as the Bundle line, so simply copy the line you want to disable and add 'Un' to the beginning.
For example, disabling the 'AutoClose' and 'scrooloose/syntastic' plugins
```bash
echo UnBundle \'AutoClose\' >> ~/.vimrc.bundles.local
echo UnBundle \'scrooloose/syntastic\' >> ~/.vimrc.bundles.local
```
**Remember to run ':BundleClean!' after this to remove the existing directories**
Here are a few of the plugins:
## [Undotree]
If you undo changes and then make a new change, in most editors the changes you undid are gone forever, as their undo-history is a simple list.
Since version 7.0 vim uses an undo-tree instead. If you make a new change after undoing changes, a new branch is created in that tree.
Combined with persistent undo, this is nearly as flexible and safe as git ;-)
Undotree makes that feature more accessible by creating a visual representation of said undo-tree.
**QuickStart** Launch using `<Leader>u`.
## [NERDTree]
NERDTree is a file explorer plugin that provides "project drawer"
functionality to your vim editing. You can learn more about it with
`:help NERDTree`.
**QuickStart** Launch using `<Leader>e`.
**Customizations**:
* Use `<C-E>` to toggle NERDTree
* Use `<leader>e` or `<leader>nt` to load NERDTreeFind which opens NERDTree where the current file is located.
* Hide clutter ('\.pyc', '\.git', '\.hg', '\.svn', '\.bzr')
* Treat NERDTree more like a panel than a split.
## [ctrlp]
Ctrlp replaces the Command-T plugin with a 100% viml plugin. It provides an intuitive and fast mechanism to load files from the file system (with regex and fuzzy find), from open buffers, and from recently used files.
**QuickStart** Launch using `<c-p>`.
## [Surround]
This plugin is a tool for dealing with pairs of "surroundings." Examples
of surroundings include parentheses, quotes, and HTML tags. They are
closely related to what Vim refers to as text-objects. Provided
are mappings to allow for removing, changing, and adding surroundings.
Details follow on the exact semantics, but first, consider the following
examples. An asterisk (*) is used to denote the cursor position.
Old text Command New text ~
"Hello *world!" ds" Hello world!
[123+4*56]/2 cs]) (123+456)/2
"Look ma, I'm *HTML!" cs"<q> <q>Look ma, I'm HTML!</q>
if *x>3 { ysW( if ( x>3 ) {
my $str = *whee!; vllllS' my $str = 'whee!';
For instance, if the cursor was inside `"foo bar"`, you could type
`cs"'` to convert the text to `'foo bar'`.
There's a lot more, check it out at `:help surround`
## [NERDCommenter]
NERDCommenter allows you to wrangle your code comments, regardless of
filetype. View `help :NERDCommenter` or checkout my post on [NERDCommenter](http://spf13.com/post/vim-plugins-nerd-commenter).
**QuickStart** Toggle comments using `<Leader>c<space>` in Visual or Normal mode.
## [neocomplete]
Neocomplete is an amazing autocomplete plugin with additional support for snippets. It can complete simulatiously from the dictionary, buffer, omnicomplete and snippets. This is the one true plugin that brings Vim autocomplete on par with the best editors.
**QuickStart** Just start typing, it will autocomplete where possible
**Customizations**:
* Automatically present the autocomplete menu
* Support tab and enter for autocomplete
* `<C-k>` for completing snippets using [Neosnippet](https://github.com/Shougo/neosnippet.vim).
![neocomplete image][autocomplete-img]
## [YouCompleteMe]
YouCompleteMe is another amazing completion engine. It is slightly more involved to set up as it contains a binary component that the user needs to compile before it will work. As a result of this however it is very fast.
To enable YouCompleteMe add `youcompleteme` to your list of groups by overriding it in your `.vimrc.before.local` like so: `let g:spf13_bundle_groups=['general', 'programming', 'misc', 'scala', 'youcompleteme']` This is just an example. Remember to choose the other groups you want here.
Once you have done this you will need to get Vundle to grab the latest code from git. You can do this by calling `:BundleInstall!`. You should see YouCompleteMe in the list.
You will now have the code in your bundles directory and can proceed to compile the core. Change to the directory it has been downloaded to. If you have a vanilla install then `cd ~/.spf13-vim-3/.vim/bundle/YouCompleteMe/` should do the trick. You should see a file in this directory called install.sh. There are a few options to consider before running the installer:
* Do you want clang support (if you don't know what this is then you likely don't need it)?
* Do you want to link against a local libclang or have the installer download the latest for you?
* Do you want support for c# via the omnisharp server?
The plugin is well documented on the site linked above. Be sure to give that a read and make sure you understand the options you require.
For java users wanting to use eclim be sure to add `let g:EclimCompletionMethod = 'omnifunc'` to your .vimrc.local.
## [Syntastic]
Syntastic is a syntax checking plugin that runs buffers through external syntax
checkers as they are saved and opened. If syntax errors are detected, the user
is notified and is happy because they didn't have to compile their code or
execute their script to find them.
## [AutoClose]
AutoClose does what you expect. It's simple, if you open a bracket, paren, brace, quote,
etc, it automatically closes it. It handles curlys correctly and doesn't get in the
way of double curlies for things like jinja and twig.
## [Fugitive]
Fugitive adds pervasive git support to git directories in vim. For more
information, use `:help fugitive`
Use `:Gstatus` to view `git status` and type `-` on any file to stage or
unstage it. Type `p` on a file to enter `git add -p` and stage specific
hunks in the file.
Use `:Gdiff` on an open file to see what changes have been made to that
file
**QuickStart** `<leader>gs` to bring up git status
**Customizations**:
* `<leader>gs` :Gstatus<CR>
* `<leader>gd` :Gdiff<CR>
* `<leader>gc` :Gcommit<CR>
* `<leader>gb` :Gblame<CR>
* `<leader>gl` :Glog<CR>
* `<leader>gp` :Git push<CR>
* `<leader>gw` :Gwrite<CR>
* :Git ___ will pass anything along to git.
![fugitive image][fugitive-img]
## [PIV]
The most feature complete and up to date PHP Integration for Vim with proper support for PHP 5.3+ including latest syntax, functions, better fold support, etc.
PIV provides:
* PHP 5.3 support
* Auto generation of PHP Doc (,pd on (function, variable, class) definition line)
* Autocomplete of classes, functions, variables, constants and language keywords
* Better indenting
* Full PHP documentation manual (hit K on any function for full docs)
![php vim itegration image][phpmanual-img]
## [Ack.vim]
Ack.vim uses ack to search inside the current directory for a pattern.
You can learn more about it with `:help Ack`
**QuickStart** :Ack
## [Tabularize]
Tabularize lets you align statements on their equal signs and other characters
**Customizations**:
* `<Leader>a= :Tabularize /=<CR>`
* `<Leader>a: :Tabularize /:<CR>`
* `<Leader>a:: :Tabularize /:\zs<CR>`
* `<Leader>a, :Tabularize /,<CR>`
* `<Leader>a<Bar> :Tabularize /<Bar><CR>`
## [Tagbar]
spf13-vim includes the Tagbar plugin. This plugin requires exuberant-ctags and will automatically generate tags for your open files. It also provides a panel to navigate easily via tags
**QuickStart** `CTRL-]` while the cursor is on a keyword (such as a function name) to jump to its definition.
**Customizations**: spf13-vim binds `<Leader>tt` to toggle the tagbar panel
![tagbar image][tagbar-img]
**Note**: For full language support, run `brew install ctags` to install
exuberant-ctags.
**Tip**: Check out `:help ctags` for information about VIM's built-in
ctag support. Tag navigation creates a stack which can traversed via
`Ctrl-]` (to find the source of a token) and `Ctrl-T` (to jump back up
one level).
## [EasyMotion]
EasyMotion provides an interactive way to use motions in Vim.
It quickly maps each possible jump destination to a key allowing very fast and
straightforward movement.
**QuickStart** EasyMotion is triggered using the normal movements, but prefixing them with `<leader><leader>`
For example this screen shot demonstrates pressing `,,w`
![easymotion image][easymotion-img]
## [Airline]
Airline provides a lightweight themable statusline with no external dependencies. By default this configuration uses the symbols `‹` and `›` as separators for different statusline sections but can be configured to use the same symbols as [Powerline]. An example first without and then with powerline symbols is shown here:
![airline image][airline-img]
To enable powerline symbols first install one of the [Powerline Fonts] or patch your favorite font using the provided instructions. Configure your terminal, MacVim, or Gvim to use the desired font. Finally add `let g:airline_powerline_fonts=1` to your `.vimrc.before.local`.
## Additional Syntaxes
spf13-vim ships with a few additional syntaxes:
* Markdown (bound to \*.markdown, \*.md, and \*.mk)
* Twig
* Git commits (set your `EDITOR` to `mvim -f`)
## Amazing Colors
spf13-vim includes [solarized] and [spf13 vim color pack](https://github.com/spf13/vim-colors/):
* ir_black
* molokai
* peaksea
Use `:color molokai` to switch to a color scheme.
Terminal Vim users will benefit from solarizing their terminal emulators and setting solarized support to 16 colors:
let g:solarized_termcolors=16
color solarized
Terminal emulator colorschemes:
* http://ethanschoonover.com/solarized (iTerm2, Terminal.app)
* https://github.com/phiggins/konsole-colors-solarized (KDE Konsole)
* https://github.com/sigurdga/gnome-terminal-colors-solarized (Gnome Terminal)
## Snippets
It also contains a very complete set of [snippets](https://github.com/spf13/snipmate-snippets) for use with snipmate or [neocomplete].
# Intro to VIM
Here's some tips if you've never used VIM before:
## Tutorials
* Type `vimtutor` into a shell to go through a brief interactive
tutorial inside VIM.
* Read the slides at [VIM: Walking Without Crutches](https://walking-without-crutches.heroku.com/#1).
## Modes
* VIM has two (common) modes:
* insert mode- stuff you type is added to the buffer
* normal mode- keys you hit are interpreted as commands
* To enter insert mode, hit `i`
* To exit insert mode, hit `<ESC>`
## Useful commands
* Use `:q` to exit vim
* Certain commands are prefixed with a `<Leader>` key, which by default maps to `\`.
Spf13-vim uses `let mapleader = ","` to change this to `,` which is in a consistent and
convenient location.
* Keyboard [cheat sheet](http://www.viemu.com/vi-vim-cheat-sheet.gif).
[](https://github.com/igrigorik/ga-beacon)
[](https://bitdeli.com/free "Bitdeli Badge")
[Git]:http://git-scm.com
[Curl]:http://curl.haxx.se
[Vim]:http://www.vim.org/download.php#pc
[msysgit]:http://msysgit.github.io
[Chocolatey]: http://chocolatey.org/
[spf13-vim package]: https://chocolatey.org/packages/spf13-vim
[MacVim]:http://code.google.com/p/macvim/
[spf13-vim]:https://github.com/spf13/spf13-vim
[contributors]:https://github.com/spf13/spf13-vim/contributors
[Vundle]:https://github.com/gmarik/vundle
[PIV]:https://github.com/spf13/PIV
[NERDCommenter]:https://github.com/scrooloose/nerdcommenter
[Undotree]:https://github.com/mbbill/undotree
[NERDTree]:https://github.com/scrooloose/nerdtree
[ctrlp]:https://github.com/kien/ctrlp.vim
[solarized]:https://github.com/altercation/vim-colors-solarized
[neocomplete]:https://github.com/shougo/neocomplete
[Fugitive]:https://github.com/tpope/vim-fugitive
[Surround]:https://github.com/tpope/vim-surround
[Tagbar]:https://github.com/majutsushi/tagbar
[Syntastic]:https://github.com/scrooloose/syntastic
[vim-easymotion]:https://github.com/Lokaltog/vim-easymotion
[YouCompleteMe]:https://github.com/Valloric/YouCompleteMe
[Matchit]:http://www.vim.org/scripts/script.php?script_id=39
[Tabularize]:https://github.com/godlygeek/tabular
[EasyMotion]:https://github.com/Lokaltog/vim-easymotion
[Airline]:https://github.com/bling/vim-airline
[Powerline]:https://github.com/lokaltog/powerline
[Powerline Fonts]:https://github.com/Lokaltog/powerline-fonts
[AutoClose]:https://github.com/spf13/vim-autoclose
[Ack.vim]:https://github.com/mileszs/ack.vim
[spf13-vim-img]:https://i.imgur.com/UKToY.png
[spf13-vimrc-img]:https://i.imgur.com/kZWj1.png
[autocomplete-img]:https://i.imgur.com/90Gg7.png
[tagbar-img]:https://i.imgur.com/cjbrC.png
[fugitive-img]:https://i.imgur.com/4NrxV.png
[nerdtree-img]:https://i.imgur.com/9xIfu.png
[phpmanual-img]:https://i.imgur.com/c0GGP.png
[easymotion-img]:https://i.imgur.com/ZsrVL.png
[airline-img]:https://i.imgur.com/D4ZYADr.png
|
apache-2.0
|
jusjoken/gemstone2
|
src/main/java/Gemstone/CachingUserRecord.java
|
3416
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package Gemstone;
import java.io.File;
import java.util.ArrayList;
/**
*
* @author SBANTA
* - 04/04/2012 - updated for Gemstone
*/
public class CachingUserRecord {
private static String SName = sagex.api.Global.IsClient() ? "SageDiamondTeam" + sagex.api.Global.GetUIContextName() : "SageDiamondTeam";
public static void main(String[] args) {
Object[] stores = sagex.api.UserRecordAPI.GetAllUserRecords(SName);
for (Object curr : stores) {
String[] store = sagex.api.UserRecordAPI.GetUserRecordNames(curr);
for (String currs : store) {
System.out.println("CurrentStore=" + currs);
System.out.println("Value=" + sagex.api.UserRecordAPI.GetUserRecordData(curr, currs));
}
}
}
public static void DeleteStoredLocations() {
sagex.api.UserRecordAPI.DeleteAllUserRecords(SName);
}
public static Boolean HasStoredLocation(String ID, String Type) {
Object Record = sagex.api.UserRecordAPI.GetUserRecord(SName, ID);
if (Record == null) {
return false;
}
String Curr = sagex.api.UserRecordAPI.GetUserRecordData(Record, Type);
return Curr != null && !Curr.equals("");
}
public static Boolean HasStoredLocation(String ID) {
Object Record = sagex.api.UserRecordAPI.GetUserRecord(SName, ID);
System.out.println("Recordforfanaat=" + Record);
return Record != null;
}
public static String[] GetAllStoresForID(String ID) {
Object Record = sagex.api.UserRecordAPI.GetUserRecord(SName, ID);
String[] Stores = sagex.api.UserRecordAPI.GetUserRecordNames(Record);
return Stores;
}
public static void DeleteStoresForID(String ID) {
Object Record = sagex.api.UserRecordAPI.GetUserRecord(SName, ID);
sagex.api.UserRecordAPI.DeleteUserRecord(Record);
}
public static ArrayList<File> GetAllCacheLocationsForID(String ID) {
ArrayList<File> Cached = new ArrayList<File>();
Object Record = sagex.api.UserRecordAPI.GetUserRecord(SName, ID);
String[] Stores = sagex.api.UserRecordAPI.GetUserRecordNames(Record);
for (String curr : Stores) {
Cached.add(new File(sagex.api.UserRecordAPI.GetUserRecordData(Record, curr)));
}
return Cached;
}
public static String GetStoredLocation(String ID, String Type) {
Object Record = sagex.api.UserRecordAPI.GetUserRecord(SName, ID);
return sagex.api.UserRecordAPI.GetUserRecordData(Record, Type);
}
public static void setStoredLocation(String ID, String Type, String Location) {
sagex.api.UserRecordAPI.AddUserRecord(SName, ID);
Object Record = sagex.api.UserRecordAPI.GetUserRecord(SName, ID);
sagex.api.UserRecordAPI.SetUserRecordData(Record, Type, Location);
}
public static void deleteStoredLocation(String ID, String Type, String Location) {
Object Record = sagex.api.UserRecordAPI.GetUserRecord(SName, ID);
sagex.api.UserRecordAPI.DeleteUserRecord(Record);
}
public static String[] GetStoredFanart(String ID) {
Object Record = sagex.api.UserRecordAPI.GetUserRecord(SName, ID);
return sagex.api.UserRecordAPI.GetUserRecordNames(Record);
}
}
|
apache-2.0
|
yelhouti/springfox
|
springfox-spring-web/src/main/java/springfox/documentation/spring/web/readers/parameter/ParameterRequiredReader.java
|
4326
|
/*
*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package springfox.documentation.spring.web.readers.parameter;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Optional;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.Ordered;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestHeader;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RequestPart;
import org.springframework.web.bind.annotation.ValueConstants;
import springfox.documentation.service.ResolvedMethodParameter;
import springfox.documentation.spi.DocumentationType;
import springfox.documentation.spi.service.ParameterBuilderPlugin;
import springfox.documentation.spi.service.contexts.ParameterContext;
import springfox.documentation.spring.web.DescriptionResolver;
import java.util.HashSet;
import java.util.Set;
import static com.google.common.base.Strings.*;
@Component
@Order(Ordered.HIGHEST_PRECEDENCE)
public class ParameterRequiredReader implements ParameterBuilderPlugin {
private final DescriptionResolver descriptions;
@Autowired
public ParameterRequiredReader(DescriptionResolver descriptions) {
this.descriptions = descriptions;
}
@Override
public void apply(ParameterContext context) {
ResolvedMethodParameter methodParameter = context.resolvedMethodParameter();
context.parameterBuilder().required(getAnnotatedRequired(methodParameter));
}
@Override
public boolean supports(DocumentationType delimiter) {
return true;
}
private Boolean getAnnotatedRequired(ResolvedMethodParameter methodParameter) {
Set<Boolean> requiredSet = new HashSet<Boolean>();
// when the type is Optional, the required property of @RequestParam/@RequestHeader doesn't matter,
// since the value is always a non-null Optional after conversion
boolean optional = isOptional(methodParameter);
Optional<RequestParam> requestParam = methodParameter.findAnnotation(RequestParam.class);
if (requestParam.isPresent()) {
requiredSet.add(!optional && isRequired(requestParam.get()));
}
Optional<RequestHeader> requestHeader = methodParameter.findAnnotation(RequestHeader.class);
if (requestHeader.isPresent()) {
requiredSet.add(!optional && requestHeader.get().required());
}
Optional<PathVariable> pathVariable = methodParameter.findAnnotation(PathVariable.class);
if (pathVariable.isPresent()) {
requiredSet.add(true);
}
Optional<RequestBody> requestBody = methodParameter.findAnnotation(RequestBody.class);
if (requestBody.isPresent()) {
requiredSet.add(!optional && requestBody.get().required());
}
Optional<RequestPart> requestPart = methodParameter.findAnnotation(RequestPart.class);
if (requestPart.isPresent()) {
requiredSet.add(!optional && requestPart.get().required());
}
return requiredSet.contains(true);
}
@VisibleForTesting
@SuppressWarnings("squid:S1872")
boolean isOptional(ResolvedMethodParameter methodParameter) {
return "java.util.Optional".equals(methodParameter.getParameterType().getErasedType().getName());
}
private boolean isRequired(RequestParam annotation) {
String defaultValue = descriptions.resolve(annotation.defaultValue());
boolean missingDefaultValue = ValueConstants.DEFAULT_NONE.equals(defaultValue) ||
isNullOrEmpty(defaultValue);
return annotation.required() && missingDefaultValue;
}
}
|
apache-2.0
|
sunpy1106/programmingruby
|
ext_ruby/simple/my_test.c
|
517
|
#include "ruby.h"
static ID id_push;
static VALUE t_init(VALUE self)
{
VALUE arr;
arr = rb_ary_new();
rb_iv_set(self, "@arr", arr);
return self;
}
static VALUE t_add(VALUE self, VALUE obj)
{
VALUE arr;
arr = rb_iv_get(self, "@arr");
rb_funcall(arr, id_push, 1, obj);
return arr;
}
VALUE cTest;
void Init_my_test() {
cTest = rb_define_class("MyTest", rb_cObject);
rb_define_method(cTest, "initialize", t_init, 0);
rb_define_method(cTest, "add", t_add, 1);
id_push = rb_intern("push");
}
|
apache-2.0
|
gextech/iptc-newsml
|
src/main/java/gex/newsml/nitf/DuKey.java
|
4635
|
package gex.newsml.nitf;
import lombok.ToString;
import java.util.HashMap;
import java.util.Map;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAnyAttribute;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlID;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.CollapsedStringAdapter;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import javax.xml.namespace.QName;
/**
* <p>
* Java class for anonymous complex type.
*
* <p>
* The following schema fragment specifies the expected content contained within
* this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <attGroup ref="{http://iptc.org/std/NITF/2006-10-18/}globalNITFAttributes"/>
* <attribute name="generation" type="{http://www.w3.org/2001/XMLSchema}NMTOKEN" />
* <attribute name="part" type="{http://www.w3.org/2001/XMLSchema}NMTOKEN" />
* <attribute name="version" type="{http://www.w3.org/2001/XMLSchema}NMTOKEN" />
* <attribute name="key" type="{http://www.w3.org/2001/XMLSchema}string" />
* <anyAttribute processContents='lax' namespace='##other'/>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "")
@XmlRootElement(name = "du-key")
@ToString
public class DuKey {
@XmlAttribute(name = "generation")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlSchemaType(name = "NMTOKEN")
protected String generation;
@XmlAttribute(name = "part")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlSchemaType(name = "NMTOKEN")
protected String part;
@XmlAttribute(name = "version")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlSchemaType(name = "NMTOKEN")
protected String version;
@XmlAttribute(name = "key")
protected String key;
@XmlAttribute(name = "id")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlID
@XmlSchemaType(name = "ID")
protected String id;
@XmlAnyAttribute
private Map<QName, String> otherAttributes = new HashMap<QName, String>();
/**
* Gets the value of the generation property.
*
* @return possible object is {@link String }
*
*/
public String getGeneration() {
return generation;
}
/**
* Sets the value of the generation property.
*
* @param value
* allowed object is {@link String }
*
*/
public void setGeneration(String value) {
this.generation = value;
}
/**
* Gets the value of the part property.
*
* @return possible object is {@link String }
*
*/
public String getPart() {
return part;
}
/**
* Sets the value of the part property.
*
* @param value
* allowed object is {@link String }
*
*/
public void setPart(String value) {
this.part = value;
}
/**
* Gets the value of the version property.
*
* @return possible object is {@link String }
*
*/
public String getVersion() {
return version;
}
/**
* Sets the value of the version property.
*
* @param value
* allowed object is {@link String }
*
*/
public void setVersion(String value) {
this.version = value;
}
/**
* Gets the value of the key property.
*
* @return possible object is {@link String }
*
*/
public String getKey() {
return key;
}
/**
* Sets the value of the key property.
*
* @param value
* allowed object is {@link String }
*
*/
public void setKey(String value) {
this.key = value;
}
/**
* Gets the value of the id property.
*
* @return possible object is {@link String }
*
*/
public String getId() {
return id;
}
/**
* Sets the value of the id property.
*
* @param value
* allowed object is {@link String }
*
*/
public void setId(String value) {
this.id = value;
}
/**
* Gets a map that contains attributes that aren't bound to any typed
* property on this class.
*
* <p>
* the map is keyed by the name of the attribute and the value is the string
* value of the attribute.
*
* the map returned by this method is live, and you can add new attribute by
* updating the map directly. Because of this design, there's no setter.
*
*
* @return always non-null
*/
public Map<QName, String> getOtherAttributes() {
return otherAttributes;
}
}
|
apache-2.0
|
OpenSextant/Xponents
|
Core/src/main/javadoc/org/opensextant/extractors/poli/package.html
|
1377
|
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<html>
<head>
<title></title>
</head>
<body>
<h2>Patterns Of Life (PoLi)</h2>
<p>Patterns are all around us. <br>
</p>
<p> We expanded the use cases of geographic coordinates and
date/time patterns into other common patterns that occur in daily
life and business: telephone numbers, URLs, money/currency, cyber
addresses, etc. These work best when you are trying to make
all of your entity extraction look similar to the standard
Xponents approach rendering things down to <code>TextMatch</code>
annotation objects<br>
</p>
<p>Since pattern development is required here, we strongly suggest
you start by looking at the tester TestPoLi, which has a main
program.<br>
</p>
<p><code> ant -f ./script/testing.xml
test-poli <args><br>
</code></p>
<code></code>Available patterns in this toolkit are listed at <a
href="https://github.com/OpenSextant/Xponents/blob/master/src/main/resources/poli_patterns.cfg">src/main/resources/poli_patterns.cfg</a><br>
<br>
You develop a pattern, a test case for it, and then iteratively
refine as you test -- using the the TestPoLi tool to test if your
patterns are working. <br>
<p> </p>
</body>
</html>
|
apache-2.0
|
go-swagger/go-swagger
|
generator/typeresolver_test.go
|
20309
|
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package generator
import (
"fmt"
"testing"
"github.com/go-openapi/loads"
"github.com/go-openapi/spec"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func schTypeVals() []struct{ Type, Format, Expected string } {
return []struct{ Type, Format, Expected string }{
{"boolean", "", "bool"},
{"string", "", "string"},
{"integer", "int8", "int8"},
{"integer", "int16", "int16"},
{"integer", "int32", "int32"},
{"integer", "int64", "int64"},
{"integer", "", "int64"},
{"integer", "uint8", "uint8"},
{"integer", "uint16", "uint16"},
{"integer", "uint32", "uint32"},
{"integer", "uint64", "uint64"},
{"number", "float", "float32"},
{"number", "double", "float64"},
{"number", "", "float64"},
{"string", "byte", "strfmt.Base64"},
{"string", "date", "strfmt.Date"},
{"string", "date-time", "strfmt.DateTime"},
{"string", "uri", "strfmt.URI"},
{"string", "email", "strfmt.Email"},
{"string", "hostname", "strfmt.Hostname"},
{"string", "ipv4", "strfmt.IPv4"},
{"string", "ipv6", "strfmt.IPv6"},
{"string", "mac", "strfmt.MAC"},
{"string", "uuid", "strfmt.UUID"},
{"string", "uuid3", "strfmt.UUID3"},
{"string", "uuid4", "strfmt.UUID4"},
{"string", "uuid5", "strfmt.UUID5"},
{"string", "isbn", "strfmt.ISBN"},
{"string", "isbn10", "strfmt.ISBN10"},
{"string", "isbn13", "strfmt.ISBN13"},
{"string", "creditcard", "strfmt.CreditCard"},
{"string", "ssn", "strfmt.SSN"},
{"string", "hexcolor", "strfmt.HexColor"},
{"string", "rgbcolor", "strfmt.RGBColor"},
{"string", "duration", "strfmt.Duration"},
{"string", "ObjectId", "strfmt.ObjectId"},
{"string", "password", "strfmt.Password"},
{"string", "uint8", "string"},
{"string", "uint16", "string"},
{"string", "uint32", "string"},
{"string", "uint64", "string"},
{"string", "int8", "string"},
{"string", "int16", "string"},
{"string", "int32", "string"},
{"string", "int64", "string"},
{"file", "", "io.ReadCloser"},
}
}
func schRefVals() []struct{ Type, GoType, Expected string } {
return []struct{ Type, GoType, Expected string }{
{"Comment", "", "models.Comment"},
{"UserCard", "UserItem", "models.UserItem"},
}
}
func TestTypeResolver_AdditionalItems(t *testing.T) {
_, resolver, e := basicTaskListResolver(t)
require.NoError(t, e)
tpe := spec.StringProperty()
// arrays of primitives and string formats with additional formats
for _, val := range schTypeVals() {
var sch spec.Schema
sch.Typed(val.Type, val.Format)
var coll spec.Schema
coll.Type = []string{"array"}
coll.Items = new(spec.SchemaOrArray)
coll.Items.Schema = tpe
coll.AdditionalItems = new(spec.SchemaOrBool)
coll.AdditionalItems.Schema = &sch
rt, err := resolver.ResolveSchema(&coll, true, true)
require.NoError(t, err)
require.True(t, rt.IsArray)
assert.True(t, rt.HasAdditionalItems)
assert.False(t, rt.IsNullable)
}
}
func TestTypeResolver_BasicTypes(t *testing.T) {
_, resolver, e := basicTaskListResolver(t)
require.NoError(t, e)
// primitives and string formats
for _, val := range schTypeVals() {
sch := new(spec.Schema)
sch.Typed(val.Type, val.Format)
rt, err := resolver.ResolveSchema(sch, true, false)
require.NoError(t, err)
assert.False(t, rt.IsNullable, "expected %s with format %q to not be nullable", val.Type, val.Format)
assertPrimitiveResolve(t, val.Type, val.Format, val.Expected, rt)
}
// arrays of primitives and string formats
for _, val := range schTypeVals() {
var sch spec.Schema
sch.Typed(val.Type, val.Format)
rt, err := resolver.ResolveSchema(new(spec.Schema).CollectionOf(sch), true, true)
require.NoError(t, err)
assert.True(t, rt.IsArray)
assert.False(t, rt.IsEmptyOmitted)
s := new(spec.Schema).CollectionOf(sch)
s.AddExtension(xOmitEmpty, false)
rt, err = resolver.ResolveSchema(s, true, true)
require.NoError(t, err)
assert.True(t, rt.IsArray)
assert.False(t, rt.IsEmptyOmitted)
s = new(spec.Schema).CollectionOf(sch)
s.AddExtension(xOmitEmpty, true)
rt, err = resolver.ResolveSchema(s, true, true)
require.NoError(t, err)
assert.True(t, rt.IsArray)
assert.True(t, rt.IsEmptyOmitted)
}
// primitives and string formats
for _, val := range schTypeVals() {
sch := new(spec.Schema)
sch.Typed(val.Type, val.Format)
sch.Extensions = make(spec.Extensions)
sch.Extensions[xIsNullable] = true
rt, err := resolver.ResolveSchema(sch, true, false)
require.NoError(t, err)
if val.Type == "file" {
assert.False(t, rt.IsNullable, "expected %q (%q) to not be nullable", val.Type, val.Format)
} else {
assert.True(t, rt.IsNullable, "expected %q (%q) to be nullable", val.Type, val.Format)
}
assertPrimitiveResolve(t, val.Type, val.Format, val.Expected, rt)
// Test x-nullable overrides x-isnullable
sch.Extensions[xIsNullable] = false
sch.Extensions[xNullable] = true
rt, err = resolver.ResolveSchema(sch, true, true)
require.NoError(t, err)
if val.Type == "file" {
assert.False(t, rt.IsNullable, "expected %q (%q) to not be nullable", val.Type, val.Format)
} else {
assert.True(t, rt.IsNullable, "expected %q (%q) to be nullable", val.Type, val.Format)
}
assertPrimitiveResolve(t, val.Type, val.Format, val.Expected, rt)
// Test x-nullable without x-isnullable
delete(sch.Extensions, xIsNullable)
sch.Extensions[xNullable] = true
rt, err = resolver.ResolveSchema(sch, true, true)
require.NoError(t, err)
if val.Type == "file" {
assert.False(t, rt.IsNullable, "expected %q (%q) to not be nullable", val.Type, val.Format)
} else {
assert.True(t, rt.IsNullable, "expected %q (%q) to be nullable", val.Type, val.Format)
}
assertPrimitiveResolve(t, val.Type, val.Format, val.Expected, rt)
}
// arrays of primitives and string formats
for _, val := range schTypeVals() {
var sch spec.Schema
sch.Typed(val.Type, val.Format)
sch.AddExtension(xIsNullable, true)
rt, err := resolver.ResolveSchema(new(spec.Schema).CollectionOf(sch), true, true)
require.NoError(t, err)
assert.True(t, rt.IsArray)
}
}
func TestTypeResolver_Refs(t *testing.T) {
_, resolver, e := basicTaskListResolver(t)
require.NoError(t, e)
// referenced objects
for _, val := range schRefVals() {
sch := new(spec.Schema)
sch.Ref, _ = spec.NewRef("#/definitions/" + val.Type)
rt, err := resolver.ResolveSchema(sch, true, true)
require.NoError(t, err)
assert.Equal(t, val.Expected, rt.GoType)
assert.False(t, rt.IsAnonymous)
assert.True(t, rt.IsNullable)
assert.Equal(t, "object", rt.SwaggerType)
}
// referenced array objects
for _, val := range schRefVals() {
sch := new(spec.Schema)
sch.Ref, _ = spec.NewRef("#/definitions/" + val.Type)
rt, err := resolver.ResolveSchema(new(spec.Schema).CollectionOf(*sch), true, true)
require.NoError(t, err)
assert.True(t, rt.IsArray)
// now this behavior has moved down to the type resolver:
// * it used to be hidden to the type resolver, but rendered like that eventually
assert.Equal(t, "[]*"+val.Expected, rt.GoType)
}
// for named objects
// referenced objects
for _, val := range schRefVals() {
sch := new(spec.Schema)
sch.Ref, _ = spec.NewRef("#/definitions/" + val.Type)
rt, err := resolver.ResolveSchema(sch, false, true)
require.NoError(t, err)
assert.Equal(t, val.Expected, rt.GoType)
assert.False(t, rt.IsAnonymous)
assert.True(t, rt.IsNullable)
assert.Equal(t, "object", rt.SwaggerType)
}
// referenced array objects
for _, val := range schRefVals() {
sch := new(spec.Schema)
sch.Ref, _ = spec.NewRef("#/definitions/" + val.Type)
rt, err := resolver.ResolveSchema(new(spec.Schema).CollectionOf(*sch), false, true)
require.NoError(t, err)
assert.True(t, rt.IsArray)
// now this behavior has moved down to the type resolver:
// * it used to be hidden to the type resolver, but rendered like that eventually
assert.Equal(t, "[]*"+val.Expected, rt.GoType)
}
}
func TestTypeResolver_AdditionalProperties(t *testing.T) {
_, resolver, err := basicTaskListResolver(t)
require.NoError(t, err)
// primitives as additional properties
for _, val := range schTypeVals() {
sch := new(spec.Schema)
sch.Typed(val.Type, val.Format)
parent := new(spec.Schema)
parent.AdditionalProperties = new(spec.SchemaOrBool)
parent.AdditionalProperties.Schema = sch
rt, err := resolver.ResolveSchema(parent, true, false)
require.NoError(t, err)
assert.True(t, rt.IsMap)
assert.False(t, rt.IsComplexObject)
assert.Equal(t, "map[string]"+val.Expected, rt.GoType)
assert.Equal(t, "object", rt.SwaggerType)
}
// array of primitives as additional properties
for _, val := range schTypeVals() {
sch := new(spec.Schema)
sch.Typed(val.Type, val.Format)
parent := new(spec.Schema)
parent.AdditionalProperties = new(spec.SchemaOrBool)
parent.AdditionalProperties.Schema = new(spec.Schema).CollectionOf(*sch)
rt, err := resolver.ResolveSchema(parent, true, false)
require.NoError(t, err)
assert.True(t, rt.IsMap)
assert.False(t, rt.IsComplexObject)
assert.Equal(t, "map[string][]"+val.Expected, rt.GoType)
assert.Equal(t, "object", rt.SwaggerType)
}
// refs as additional properties
for _, val := range schRefVals() {
sch := new(spec.Schema)
sch.Ref, _ = spec.NewRef("#/definitions/" + val.Type)
parent := new(spec.Schema)
parent.AdditionalProperties = new(spec.SchemaOrBool)
parent.AdditionalProperties.Schema = sch
rt, err := resolver.ResolveSchema(parent, true, true)
require.NoError(t, err)
assert.True(t, rt.IsMap)
assert.False(t, rt.IsComplexObject)
assert.Equal(t, "map[string]"+val.Expected, rt.GoType)
assert.Equal(t, "object", rt.SwaggerType)
}
// when additional properties and properties present, it's a complex object
// primitives as additional properties
for _, val := range schTypeVals() {
sch := new(spec.Schema)
sch.Typed(val.Type, val.Format)
parent := new(spec.Schema)
parent.Properties = make(map[string]spec.Schema)
parent.Properties["id"] = *spec.Int32Property()
parent.AdditionalProperties = new(spec.SchemaOrBool)
parent.AdditionalProperties.Schema = sch
rt, err := resolver.ResolveSchema(parent, true, true)
require.NoError(t, err)
assert.True(t, rt.IsComplexObject)
assert.False(t, rt.IsMap)
assert.Equal(t, "map[string]"+val.Expected, rt.GoType)
assert.Equal(t, "object", rt.SwaggerType)
}
// array of primitives as additional properties
for _, val := range schTypeVals() {
sch := new(spec.Schema)
sch.Typed(val.Type, val.Format)
parent := new(spec.Schema)
parent.Properties = make(map[string]spec.Schema)
parent.Properties["id"] = *spec.Int32Property()
parent.AdditionalProperties = new(spec.SchemaOrBool)
parent.AdditionalProperties.Schema = new(spec.Schema).CollectionOf(*sch)
rt, err := resolver.ResolveSchema(parent, true, true)
require.NoError(t, err)
assert.True(t, rt.IsComplexObject)
assert.False(t, rt.IsMap)
assert.Equal(t, "map[string][]"+val.Expected, rt.GoType)
assert.Equal(t, "object", rt.SwaggerType)
}
// refs as additional properties
for _, val := range schRefVals() {
sch := new(spec.Schema)
sch.Ref, _ = spec.NewRef("#/definitions/" + val.Type)
parent := new(spec.Schema)
parent.Properties = make(map[string]spec.Schema)
parent.Properties["id"] = *spec.Int32Property()
parent.AdditionalProperties = new(spec.SchemaOrBool)
parent.AdditionalProperties.Schema = sch
rt, err := resolver.ResolveSchema(parent, true, true)
require.NoError(t, err)
assert.True(t, rt.IsComplexObject)
assert.False(t, rt.IsMap)
assert.Equal(t, "map[string]"+val.Expected, rt.GoType)
assert.Equal(t, "object", rt.SwaggerType)
}
}
func TestTypeResolver_Notables(t *testing.T) {
doc, resolver, err := specResolver(t, "../fixtures/codegen/todolist.models.yml")
require.NoError(t, err)
def := doc.Spec().Definitions["Notables"]
rest, err := resolver.ResolveSchema(&def, false, true)
require.NoError(t, err)
assert.True(t, rest.IsArray)
assert.False(t, rest.IsAnonymous)
assert.False(t, rest.IsNullable)
assert.Equal(t, "[]*models.Notable", rest.GoType)
}
func specResolver(t testing.TB, path string) (*loads.Document, *typeResolver, error) {
tlb, err := loads.Spec(path)
if err != nil {
return nil, nil, err
}
resolver := &typeResolver{
Doc: tlb,
ModelsPackage: "models",
}
resolver.KnownDefs = make(map[string]struct{})
for k := range tlb.Spec().Definitions {
resolver.KnownDefs[k] = struct{}{}
}
return tlb, resolver, nil
}
func basicTaskListResolver(t testing.TB) (*loads.Document, *typeResolver, error) {
tlb, err := loads.Spec("../fixtures/codegen/tasklist.basic.yml")
if err != nil {
return nil, nil, err
}
swsp := tlb.Spec()
uc := swsp.Definitions["UserCard"]
uc.AddExtension(xGoName, "UserItem")
swsp.Definitions["UserCard"] = uc
resolver := &typeResolver{
Doc: tlb,
ModelsPackage: "models",
}
resolver.KnownDefs = make(map[string]struct{})
for k, sch := range swsp.Definitions {
resolver.KnownDefs[k] = struct{}{}
if nm, ok := sch.Extensions[xGoName]; ok {
resolver.KnownDefs[nm.(string)] = struct{}{}
}
}
return tlb, resolver, nil
}
func TestTypeResolver_TupleTypes(t *testing.T) {
_, resolver, err := basicTaskListResolver(t)
require.NoError(t, err)
// tuple type (items with multiple schemas)
parent := new(spec.Schema)
parent.Typed("array", "")
parent.Items = new(spec.SchemaOrArray)
parent.Items.Schemas = append(
parent.Items.Schemas,
*spec.StringProperty(),
*spec.Int64Property(),
*spec.Float64Property(),
*spec.BoolProperty(),
*spec.ArrayProperty(spec.StringProperty()),
*spec.RefProperty("#/definitions/Comment"),
)
rt, err := resolver.ResolveSchema(parent, true, true)
require.NoError(t, err)
assert.False(t, rt.IsArray)
assert.True(t, rt.IsTuple)
}
func TestTypeResolver_AnonymousStructs(t *testing.T) {
_, resolver, err := basicTaskListResolver(t)
require.NoError(t, err)
// anonymous structs should be accounted for
parent := new(spec.Schema)
parent.Typed("object", "")
parent.Properties = make(map[string]spec.Schema)
parent.Properties["name"] = *spec.StringProperty()
parent.Properties["age"] = *spec.Int32Property()
rt, err := resolver.ResolveSchema(parent, true, true)
require.NoError(t, err)
assert.True(t, rt.IsNullable)
assert.True(t, rt.IsAnonymous)
assert.True(t, rt.IsComplexObject)
parent.Extensions = make(spec.Extensions)
parent.Extensions[xIsNullable] = true
rt, err = resolver.ResolveSchema(parent, true, true)
require.NoError(t, err)
assert.True(t, rt.IsNullable)
assert.True(t, rt.IsAnonymous)
assert.True(t, rt.IsComplexObject)
// Also test that it's nullable with just x-nullable
parent.Extensions[xIsNullable] = false
parent.Extensions[xNullable] = false
rt, err = resolver.ResolveSchema(parent, true, true)
require.NoError(t, err)
assert.False(t, rt.IsNullable)
assert.True(t, rt.IsAnonymous)
assert.True(t, rt.IsComplexObject)
}
func TestTypeResolver_ObjectType(t *testing.T) {
_, resolver, e := basicTaskListResolver(t)
require.NoError(t, e)
resolver.ModelName = "TheModel"
resolver.KnownDefs["TheModel"] = struct{}{}
defer func() { resolver.ModelName = "" }()
// very poor schema definitions (as in none)
types := []string{"object", ""}
for _, tpe := range types {
sch := new(spec.Schema)
sch.Typed(tpe, "")
rt, err := resolver.ResolveSchema(sch, true, true)
require.NoError(t, err)
assert.True(t, rt.IsMap)
assert.False(t, rt.IsComplexObject)
assert.Equal(t, "interface{}", rt.GoType)
assert.Equal(t, "object", rt.SwaggerType)
sch.Properties = make(map[string]spec.Schema)
var ss spec.Schema
sch.Properties["tags"] = *(&ss).CollectionOf(*spec.StringProperty())
rt, err = resolver.ResolveSchema(sch, false, true)
require.NoError(t, err)
assert.True(t, rt.IsComplexObject)
assert.False(t, rt.IsMap)
assert.Equal(t, "models.TheModel", rt.GoType)
assert.Equal(t, "object", rt.SwaggerType)
sch.Properties = nil
nsch := new(spec.Schema)
nsch.Typed(tpe, "")
nsch.AllOf = []spec.Schema{*sch}
rt, err = resolver.ResolveSchema(nsch, false, true)
require.NoError(t, err)
assert.True(t, rt.IsComplexObject)
assert.False(t, rt.IsMap)
assert.Equal(t, "models.TheModel", rt.GoType)
assert.Equal(t, "object", rt.SwaggerType)
sch = new(spec.Schema)
rt, err = resolver.ResolveSchema(sch, true, true)
require.NoError(t, err)
assert.True(t, rt.IsMap)
assert.False(t, rt.IsComplexObject)
assert.Equal(t, "interface{}", rt.GoType)
assert.Equal(t, "object", rt.SwaggerType)
sch = new(spec.Schema)
var sp spec.Schema
sp.Typed("object", "")
sch.AllOf = []spec.Schema{sp}
rt, err = resolver.ResolveSchema(sch, true, true)
require.NoError(t, err)
assert.True(t, rt.IsComplexObject)
assert.False(t, rt.IsMap)
assert.Equal(t, "models.TheModel", rt.GoType)
assert.Equal(t, "object", rt.SwaggerType)
}
}
func TestTypeResolver_AliasTypes(t *testing.T) {
doc, resolver, err := basicTaskListResolver(t)
require.NoError(t, err)
resolver.ModelsPackage = ""
resolver.ModelName = "Currency"
defer func() {
resolver.ModelName = ""
resolver.ModelsPackage = "models"
}()
defs := doc.Spec().Definitions[resolver.ModelName]
rt, err := resolver.ResolveSchema(&defs, false, true)
require.NoError(t, err)
assert.False(t, rt.IsAnonymous)
assert.True(t, rt.IsAliased)
assert.True(t, rt.IsPrimitive)
assert.Equal(t, "Currency", rt.GoType)
assert.Equal(t, "string", rt.AliasedType)
}
func assertPrimitiveResolve(t testing.TB, tpe, tfmt, exp string, tr resolvedType) {
assert.Equal(t, tpe, tr.SwaggerType, fmt.Sprintf("expected %q (%q, %q) to for the swagger type but got %q", tpe, tfmt, exp, tr.SwaggerType))
assert.Equal(t, tfmt, tr.SwaggerFormat, fmt.Sprintf("expected %q (%q, %q) to for the swagger format but got %q", tfmt, tpe, exp, tr.SwaggerFormat))
assert.Equal(t, exp, tr.GoType, fmt.Sprintf("expected %q (%q, %q) to for the go type but got %q", exp, tpe, tfmt, tr.GoType))
}
func TestTypeResolver_ExistingModel(t *testing.T) {
doc, err := loads.Spec("../fixtures/codegen/existing-model.yml")
resolver := newTypeResolver("model", "", doc)
require.NoError(t, err)
def := doc.Spec().Definitions["JsonWebKey"]
tpe, pkg, alias := resolver.knownDefGoType("JsonWebKey", def, nil)
assert.Equal(t, "jwk.Key", tpe)
assert.Equal(t, "github.com/user/package", pkg)
assert.Equal(t, "jwk", alias)
rest, err := resolver.ResolveSchema(&def, false, true)
require.NoError(t, err)
assert.False(t, rest.IsMap)
assert.False(t, rest.IsArray)
assert.False(t, rest.IsTuple)
assert.False(t, rest.IsStream)
assert.True(t, rest.IsAliased)
assert.False(t, rest.IsBaseType)
assert.False(t, rest.IsInterface)
assert.True(t, rest.IsNullable)
assert.False(t, rest.IsPrimitive)
assert.False(t, rest.IsAnonymous)
assert.True(t, rest.IsComplexObject)
assert.False(t, rest.IsCustomFormatter)
assert.Equal(t, "jwk.Key", rest.GoType)
assert.Equal(t, "github.com/user/package", rest.Pkg)
assert.Equal(t, "jwk", rest.PkgAlias)
def = doc.Spec().Definitions["JsonWebKeySet"].Properties["keys"]
rest, err = resolver.ResolveSchema(&def, false, true)
require.NoError(t, err)
assert.False(t, rest.IsMap)
assert.True(t, rest.IsArray)
assert.False(t, rest.IsTuple)
assert.False(t, rest.IsStream)
assert.False(t, rest.IsAliased)
assert.False(t, rest.IsBaseType)
assert.False(t, rest.IsInterface)
assert.False(t, rest.IsNullable)
assert.False(t, rest.IsPrimitive)
assert.False(t, rest.IsAnonymous)
assert.False(t, rest.IsComplexObject)
assert.False(t, rest.IsCustomFormatter)
assert.Equal(t, "[]*jwk.Key", rest.GoType)
assert.Equal(t, "", rest.Pkg)
assert.Equal(t, "", rest.PkgAlias)
}
|
apache-2.0
|
burmanm/origin
|
pkg/cmd/admin/router/router.go
|
23695
|
package router
import (
"fmt"
"io"
"io/ioutil"
"math/rand"
"os"
"strconv"
"strings"
"github.com/golang/glog"
"github.com/spf13/cobra"
kapi "k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/api/errors"
kclient "k8s.io/kubernetes/pkg/client/unversioned"
kclientcmd "k8s.io/kubernetes/pkg/client/unversioned/clientcmd"
"k8s.io/kubernetes/pkg/controller/serviceaccount"
"k8s.io/kubernetes/pkg/fields"
cmdutil "k8s.io/kubernetes/pkg/kubectl/cmd/util"
"k8s.io/kubernetes/pkg/labels"
"k8s.io/kubernetes/pkg/runtime"
kutil "k8s.io/kubernetes/pkg/util"
"github.com/openshift/origin/pkg/cmd/util/clientcmd"
"github.com/openshift/origin/pkg/cmd/util/variable"
configcmd "github.com/openshift/origin/pkg/config/cmd"
dapi "github.com/openshift/origin/pkg/deploy/api"
"github.com/openshift/origin/pkg/generate/app"
"github.com/openshift/origin/pkg/security/admission"
)
const (
routerLong = `
Install or configure a router
This command helps to setup a router to take edge traffic and balance it to
your application. With no arguments, the command will check for an existing router
service called 'router' and create one if it does not exist. If you want to test whether
a router has already been created add the --dry-run flag and the command will exit with
1 if the registry does not exist.
If a router does not exist with the given name, this command will
create a deployment configuration and service that will run the router. If you are
running your router in production, you should pass --replicas=2 or higher to ensure
you have failover protection.`
routerExample = ` # Check the default router ("router")
$ %[1]s %[2]s --dry-run
# See what the router would look like if created
$ %[1]s %[2]s -o json --credentials=/path/to/openshift-router.kubeconfig --service-account=myserviceaccount
# Create a router if it does not exist
$ %[1]s %[2]s router-west --credentials=/path/to/openshift-router.kubeconfig --service-account=myserviceaccount --replicas=2
# Use a different router image and see the router configuration
$ %[1]s %[2]s region-west -o yaml --credentials=/path/to/openshift-router.kubeconfig --service-account=myserviceaccount --images=myrepo/somerouter:mytag
# Run the router with a hint to the underlying implementation to _not_ expose statistics.
$ %[1]s %[2]s router-west --credentials=/path/to/openshift-router.kubeconfig --service-account=myserviceaccount --stats-port=0
`
secretsVolumeName = "secret-volume"
secretsPath = "/etc/secret-volume"
privkeySecretName = "external-host-private-key-secret"
privkeyVolumeName = "external-host-private-key-volume"
privkeyName = "router.pem"
privkeyPath = secretsPath + "/" + privkeyName
)
// RouterConfig contains the configuration parameters necessary to
// launch a router, including general parameters, type of router, and
// type-specific parameters.
type RouterConfig struct {
// Type is the router type, which determines which plugin to use (f5
// or template).
Type string
// ImageTemplate specifies the image from which the router will be created.
ImageTemplate variable.ImageTemplate
// Ports specifies the container ports for the router.
Ports string
// Replicas specifies the initial replica count for the router.
Replicas int
// Labels specifies the label or labels that will be assigned to the router
// pod.
Labels string
// DryRun specifies that the router command should not launch a router but
// should instead exit with code 1 to indicate if a router is already running
// or code 0 otherwise.
DryRun bool
// Credentials specifies the path to a .kubeconfig file with the credentials
// with which the router may contact the master.
Credentials string
// DefaultCertificate holds the certificate that will be used if no more
// specific certificate is found. This is typically a wildcard certificate.
DefaultCertificate string
// Selector specifies a label or set of labels that determines the nodes on
// which the router pod can be scheduled.
Selector string
// StatsPort specifies a port at which the router can provide statistics.
StatsPort int
// StatsPassword specifies a password required to authenticate connections to
// the statistics port.
StatsPassword string
// StatsUsername specifies a username required to authenticate connections to
// the statistics port.
StatsUsername string
// HostNetwork specifies whether to configure the router pod to use the host's
// network namespace or the container's.
HostNetwork bool
// ServiceAccount specifies the service account under which the router will
// run.
ServiceAccount string
// ExternalHost specifies the hostname or IP address of an external host for
// router plugins that integrate with an external load balancer (such as f5).
ExternalHost string
// ExternalHostUsername specifies the username for authenticating with the
// external host.
ExternalHostUsername string
// ExternalHostPassword specifies the password for authenticating with the
// external host.
ExternalHostPassword string
// ExternalHostHttpVserver specifies the virtual server for HTTP connections.
ExternalHostHttpVserver string
// ExternalHostHttpsVserver specifies the virtual server for HTTPS connections.
ExternalHostHttpsVserver string
// ExternalHostPrivateKey specifies an SSH private key for authenticating with
// the external host.
ExternalHostPrivateKey string
// ExternalHostInsecure specifies that the router should skip strict
// certificate verification when connecting to the external host.
ExternalHostInsecure bool
// ExternalHostPartitionPath specifies the partition path to use.
// This is used by some routers to create access access control
// boundaries for users and applications.
ExternalHostPartitionPath string
}
var errExit = fmt.Errorf("exit")
const (
defaultLabel = "router=<name>"
// Default port numbers to expose and bind/listen on.
defaultPorts = "80:80,443:443"
)
// NewCmdRouter implements the OpenShift CLI router command.
func NewCmdRouter(f *clientcmd.Factory, parentName, name string, out io.Writer) *cobra.Command {
cfg := &RouterConfig{
ImageTemplate: variable.NewDefaultImageTemplate(),
Labels: defaultLabel,
Ports: defaultPorts,
Replicas: 1,
StatsUsername: "admin",
StatsPort: 1936,
HostNetwork: true,
}
cmd := &cobra.Command{
Use: fmt.Sprintf("%s [NAME]", name),
Short: "Install a router",
Long: routerLong,
Example: fmt.Sprintf(routerExample, parentName, name),
Run: func(cmd *cobra.Command, args []string) {
err := RunCmdRouter(f, cmd, out, cfg, args)
if err != errExit {
cmdutil.CheckErr(err)
} else {
os.Exit(1)
}
},
}
cmd.Flags().StringVar(&cfg.Type, "type", "haproxy-router", "The type of router to use - if you specify --images this flag may be ignored.")
cmd.Flags().StringVar(&cfg.ImageTemplate.Format, "images", cfg.ImageTemplate.Format, "The image to base this router on - ${component} will be replaced with --type")
cmd.Flags().BoolVar(&cfg.ImageTemplate.Latest, "latest-images", cfg.ImageTemplate.Latest, "If true, attempt to use the latest images for the router instead of the latest release.")
cmd.Flags().StringVar(&cfg.Ports, "ports", cfg.Ports, "A comma delimited list of ports or port pairs to expose on the router pod. The default is set for HAProxy.")
cmd.Flags().IntVar(&cfg.Replicas, "replicas", cfg.Replicas, "The replication factor of the router; commonly 2 when high availability is desired.")
cmd.Flags().StringVar(&cfg.Labels, "labels", cfg.Labels, "A set of labels to uniquely identify the router and its components.")
cmd.Flags().BoolVar(&cfg.DryRun, "dry-run", cfg.DryRun, "Exit with code 1 if the specified router does not exist.")
cmd.Flags().Bool("create", false, "deprecated; this is now the default behavior")
cmd.Flags().StringVar(&cfg.Credentials, "credentials", "", "Path to a .kubeconfig file that will contain the credentials the router should use to contact the master.")
cmd.Flags().StringVar(&cfg.DefaultCertificate, "default-cert", cfg.DefaultCertificate, "Optional path to a certificate file that be used as the default certificate. The file should contain the cert, key, and any CA certs necessary for the router to serve the certificate.")
cmd.Flags().StringVar(&cfg.Selector, "selector", cfg.Selector, "Selector used to filter nodes on deployment. Used to run routers on a specific set of nodes.")
cmd.Flags().StringVar(&cfg.ServiceAccount, "service-account", cfg.ServiceAccount, "Name of the service account to use to run the router pod.")
cmd.Flags().IntVar(&cfg.StatsPort, "stats-port", cfg.StatsPort, "If the underlying router implementation can provide statistics this is a hint to expose it on this port. Specify 0 if you want to turn off exposing the statistics.")
cmd.Flags().StringVar(&cfg.StatsPassword, "stats-password", cfg.StatsPassword, "If the underlying router implementation can provide statistics this is the requested password for auth. If not set a password will be generated.")
cmd.Flags().StringVar(&cfg.StatsUsername, "stats-user", cfg.StatsUsername, "If the underlying router implementation can provide statistics this is the requested username for auth.")
cmd.Flags().BoolVar(&cfg.HostNetwork, "host-network", cfg.HostNetwork, "If true (the default), then use host networking rather than using a separate container network stack.")
cmd.Flags().StringVar(&cfg.ExternalHost, "external-host", cfg.ExternalHost, "If the underlying router implementation connects with an external host, this is the external host's hostname.")
cmd.Flags().StringVar(&cfg.ExternalHostUsername, "external-host-username", cfg.ExternalHostUsername, "If the underlying router implementation connects with an external host, this is the username for authenticating with the external host.")
cmd.Flags().StringVar(&cfg.ExternalHostPassword, "external-host-password", cfg.ExternalHostPassword, "If the underlying router implementation connects with an external host, this is the password for authenticating with the external host.")
cmd.Flags().StringVar(&cfg.ExternalHostHttpVserver, "external-host-http-vserver", cfg.ExternalHostHttpVserver, "If the underlying router implementation uses virtual servers, this is the name of the virtual server for HTTP connections.")
cmd.Flags().StringVar(&cfg.ExternalHostHttpsVserver, "external-host-https-vserver", cfg.ExternalHostHttpsVserver, "If the underlying router implementation uses virtual servers, this is the name of the virtual server for HTTPS connections.")
cmd.Flags().StringVar(&cfg.ExternalHostPrivateKey, "external-host-private-key", cfg.ExternalHostPrivateKey, "If the underlying router implementation requires an SSH private key, this is the path to the private key file.")
cmd.Flags().BoolVar(&cfg.ExternalHostInsecure, "external-host-insecure", cfg.ExternalHostInsecure, "If the underlying router implementation connects with an external host over a secure connection, this causes the router to skip strict certificate verification with the external host.")
cmd.Flags().StringVar(&cfg.ExternalHostPartitionPath, "external-host-partition-path", cfg.ExternalHostPartitionPath, "If the underlying router implementation uses partitions for control boundaries, this is the path to use for that partition.")
cmd.MarkFlagFilename("credentials", "kubeconfig")
cmdutil.AddPrinterFlags(cmd)
return cmd
}
// Read the specified file and return it as a bytes array.
func loadData(file string) ([]byte, error) {
if len(file) == 0 {
return []byte{}, nil
}
bytes, err := ioutil.ReadFile(file)
if err != nil {
return []byte{}, err
}
return bytes, nil
}
// Read the specified certificate file and return it as a string.
func loadCert(file string) (string, error) {
bytes, err := loadData(file)
return string(bytes), err
}
// Read the specified key file and return it as a bytes array.
func loadKey(file string) ([]byte, error) {
return loadData(file)
}
// generateSecretsConfig generates any Secret and Volume objects, such
// as SSH private keys, that are necessary for the router container.
func generateSecretsConfig(cfg *RouterConfig, kClient *kclient.Client,
namespace string) ([]*kapi.Secret, []kapi.Volume, []kapi.VolumeMount,
error) {
secrets := []*kapi.Secret{}
volumes := []kapi.Volume{}
mounts := []kapi.VolumeMount{}
if len(cfg.ExternalHostPrivateKey) != 0 {
privkeyData, err := loadKey(cfg.ExternalHostPrivateKey)
if err != nil {
return secrets, volumes, mounts, fmt.Errorf("error reading private key"+
" for external host: %v", err)
}
serviceAccount, err := kClient.ServiceAccounts(namespace).Get(cfg.ServiceAccount)
if err != nil {
return secrets, volumes, mounts, fmt.Errorf("error looking up"+
" service account %s: %v", cfg.ServiceAccount, err)
}
privkeySecret := &kapi.Secret{
ObjectMeta: kapi.ObjectMeta{
Name: privkeySecretName,
Annotations: map[string]string{
kapi.ServiceAccountNameKey: serviceAccount.Name,
kapi.ServiceAccountUIDKey: string(serviceAccount.UID),
},
},
Data: map[string][]byte{privkeyName: privkeyData},
}
secrets = append(secrets, privkeySecret)
}
// We need a secrets volume and mount iff we have secrets.
if len(secrets) != 0 {
secretsVolume := kapi.Volume{
Name: secretsVolumeName,
VolumeSource: kapi.VolumeSource{
Secret: &kapi.SecretVolumeSource{
SecretName: privkeySecretName,
},
},
}
secretsMount := kapi.VolumeMount{
Name: secretsVolumeName,
ReadOnly: true,
MountPath: secretsPath,
}
volumes = []kapi.Volume{secretsVolume}
mounts = []kapi.VolumeMount{secretsMount}
}
return secrets, volumes, mounts, nil
}
func generateLivenessProbeConfig(cfg *RouterConfig,
ports []kapi.ContainerPort) *kapi.Probe {
var probe *kapi.Probe
if cfg.Type == "haproxy-router" {
probe = &kapi.Probe{
Handler: kapi.Handler{
TCPSocket: &kapi.TCPSocketAction{
Port: kutil.IntOrString{
IntVal: ports[0].ContainerPort,
},
},
},
InitialDelaySeconds: 10,
}
}
return probe
}
// RunCmdRouter contains all the necessary functionality for the
// OpenShift CLI router command.
func RunCmdRouter(f *clientcmd.Factory, cmd *cobra.Command, out io.Writer, cfg *RouterConfig, args []string) error {
var name string
switch len(args) {
case 0:
name = "router"
case 1:
name = args[0]
default:
return cmdutil.UsageError(cmd, "You may pass zero or one arguments to provide a name for the router")
}
if len(cfg.StatsUsername) > 0 {
if strings.Contains(cfg.StatsUsername, ":") {
return cmdutil.UsageError(cmd, "username %s must not contain ':'", cfg.StatsUsername)
}
}
ports, err := app.ContainerPortsFromString(cfg.Ports)
if err != nil {
glog.Fatal(err)
}
// For the host networking case, ensure the ports match.
if cfg.HostNetwork {
for i := 0; i < len(ports); i++ {
if ports[i].ContainerPort != ports[i].HostPort {
return cmdutil.UsageError(cmd, "For host networking mode, please ensure that the container [%v] and host [%v] ports match", ports[i].ContainerPort, ports[i].HostPort)
}
}
}
if cfg.StatsPort > 0 {
ports = append(ports, kapi.ContainerPort{
Name: "stats",
HostPort: cfg.StatsPort,
ContainerPort: cfg.StatsPort,
Protocol: kapi.ProtocolTCP,
})
}
label := map[string]string{"router": name}
if cfg.Labels != defaultLabel {
valid, remove, err := app.LabelsFromSpec(strings.Split(cfg.Labels, ","))
if err != nil {
glog.Fatal(err)
}
if len(remove) > 0 {
return cmdutil.UsageError(cmd, "You may not pass negative labels in %q", cfg.Labels)
}
label = valid
}
nodeSelector := map[string]string{}
if len(cfg.Selector) > 0 {
valid, remove, err := app.LabelsFromSpec(strings.Split(cfg.Selector, ","))
if err != nil {
glog.Fatal(err)
}
if len(remove) > 0 {
return cmdutil.UsageError(cmd, "You may not pass negative labels in selector %q", cfg.Selector)
}
nodeSelector = valid
}
image := cfg.ImageTemplate.ExpandOrDie(cfg.Type)
namespace, _, err := f.OpenShiftClientConfig.Namespace()
if err != nil {
return fmt.Errorf("error getting client: %v", err)
}
_, kClient, err := f.Clients()
if err != nil {
return fmt.Errorf("error getting client: %v", err)
}
_, output, err := cmdutil.PrinterForCommand(cmd)
if err != nil {
return fmt.Errorf("unable to configure printer: %v", err)
}
generate := output
if !generate {
_, err = kClient.Services(namespace).Get(name)
if err != nil {
if !errors.IsNotFound(err) {
return fmt.Errorf("can't check for existing router %q: %v", name, err)
}
generate = true
}
}
if generate {
if cfg.DryRun && !output {
return fmt.Errorf("router %q does not exist (no service)", name)
}
if len(cfg.ServiceAccount) == 0 {
return fmt.Errorf("router could not be created; you must specify a service account with --service-account")
}
err := validateServiceAccount(kClient, namespace, cfg.ServiceAccount)
if err != nil {
return fmt.Errorf("router could not be created; %v", err)
}
// create new router
if len(cfg.Credentials) == 0 {
return fmt.Errorf("router could not be created; you must specify a .kubeconfig file path containing credentials for connecting the router to the master with --credentials")
}
clientConfigLoadingRules := &kclientcmd.ClientConfigLoadingRules{ExplicitPath: cfg.Credentials, Precedence: []string{}}
credentials, err := clientConfigLoadingRules.Load()
if err != nil {
return fmt.Errorf("router could not be created; the provided credentials %q could not be loaded: %v", cfg.Credentials, err)
}
config, err := kclientcmd.NewDefaultClientConfig(*credentials, &kclientcmd.ConfigOverrides{}).ClientConfig()
if err != nil {
return fmt.Errorf("router could not be created; the provided credentials %q could not be used: %v", cfg.Credentials, err)
}
if err := kclient.LoadTLSFiles(config); err != nil {
return fmt.Errorf("router could not be created; the provided credentials %q could not load certificate info: %v", cfg.Credentials, err)
}
insecure := "false"
if config.Insecure {
insecure = "true"
}
defaultCert, err := loadCert(cfg.DefaultCertificate)
if err != nil {
return fmt.Errorf("router could not be created; error reading default certificate file: %v", err)
}
if len(cfg.StatsPassword) == 0 {
cfg.StatsPassword = generateStatsPassword()
fmt.Fprintf(out, "password for stats user %s has been set to %s\n", cfg.StatsUsername, cfg.StatsPassword)
}
env := app.Environment{
"OPENSHIFT_MASTER": config.Host,
"OPENSHIFT_CA_DATA": string(config.CAData),
"OPENSHIFT_KEY_DATA": string(config.KeyData),
"OPENSHIFT_CERT_DATA": string(config.CertData),
"OPENSHIFT_INSECURE": insecure,
"DEFAULT_CERTIFICATE": defaultCert,
"ROUTER_SERVICE_NAME": name,
"ROUTER_SERVICE_NAMESPACE": namespace,
"ROUTER_EXTERNAL_HOST_HOSTNAME": cfg.ExternalHost,
"ROUTER_EXTERNAL_HOST_USERNAME": cfg.ExternalHostUsername,
"ROUTER_EXTERNAL_HOST_PASSWORD": cfg.ExternalHostPassword,
"ROUTER_EXTERNAL_HOST_HTTP_VSERVER": cfg.ExternalHostHttpVserver,
"ROUTER_EXTERNAL_HOST_HTTPS_VSERVER": cfg.ExternalHostHttpsVserver,
"ROUTER_EXTERNAL_HOST_INSECURE": strconv.FormatBool(cfg.ExternalHostInsecure),
"ROUTER_EXTERNAL_HOST_PARTITION_PATH": cfg.ExternalHostPartitionPath,
"ROUTER_EXTERNAL_HOST_PRIVKEY": privkeyPath,
"STATS_PORT": strconv.Itoa(cfg.StatsPort),
"STATS_USERNAME": cfg.StatsUsername,
"STATS_PASSWORD": cfg.StatsPassword,
}
updatePercent := int(-25)
secrets, volumes, mounts, err := generateSecretsConfig(cfg, kClient,
namespace)
if err != nil {
return fmt.Errorf("router could not be created: %v", err)
}
livenessProbe := generateLivenessProbeConfig(cfg, ports)
objects := []runtime.Object{
&dapi.DeploymentConfig{
ObjectMeta: kapi.ObjectMeta{
Name: name,
Labels: label,
},
Triggers: []dapi.DeploymentTriggerPolicy{
{Type: dapi.DeploymentTriggerOnConfigChange},
},
Template: dapi.DeploymentTemplate{
Strategy: dapi.DeploymentStrategy{
Type: dapi.DeploymentStrategyTypeRolling,
RollingParams: &dapi.RollingDeploymentStrategyParams{UpdatePercent: &updatePercent},
},
ControllerTemplate: kapi.ReplicationControllerSpec{
Replicas: cfg.Replicas,
Selector: label,
Template: &kapi.PodTemplateSpec{
ObjectMeta: kapi.ObjectMeta{Labels: label},
Spec: kapi.PodSpec{
HostNetwork: cfg.HostNetwork,
ServiceAccountName: cfg.ServiceAccount,
NodeSelector: nodeSelector,
Containers: []kapi.Container{
{
Name: "router",
Image: image,
Ports: ports,
Env: env.List(),
LivenessProbe: livenessProbe,
ImagePullPolicy: kapi.PullIfNotPresent,
VolumeMounts: mounts,
},
},
Volumes: volumes,
},
},
},
},
},
}
if len(secrets) != 0 {
serviceAccount, err := kClient.ServiceAccounts(namespace).Get(cfg.ServiceAccount)
if err != nil {
return fmt.Errorf("error looking up service account %s: %v",
cfg.ServiceAccount, err)
}
for _, secret := range secrets {
objects = append(objects, secret)
serviceAccount.Secrets = append(serviceAccount.Secrets,
kapi.ObjectReference{Name: secret.Name})
}
_, err = kClient.ServiceAccounts(namespace).Update(serviceAccount)
if err != nil {
return fmt.Errorf("error adding secret key to service account %s: %v",
cfg.ServiceAccount, err)
}
}
objects = app.AddServices(objects, true)
// TODO: label all created objects with the same label - router=<name>
list := &kapi.List{Items: objects}
if output {
if err := f.PrintObject(cmd, list, out); err != nil {
return fmt.Errorf("Unable to print object: %v", err)
}
return nil
}
mapper, typer := f.Factory.Object()
bulk := configcmd.Bulk{
Mapper: mapper,
Typer: typer,
RESTClientFactory: f.Factory.RESTClient,
After: configcmd.NewPrintNameOrErrorAfter(out, os.Stderr),
}
if errs := bulk.Create(list, namespace); len(errs) != 0 {
return errExit
}
return nil
}
fmt.Fprintf(out, "Router %q service exists\n", name)
return nil
}
// generateStatsPassword creates a random password.
func generateStatsPassword() string {
allowableChars := []rune("abcdefghijlkmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890")
allowableCharLength := len(allowableChars)
password := []string{}
for i := 0; i < 10; i++ {
char := allowableChars[rand.Intn(allowableCharLength)]
password = append(password, string(char))
}
return strings.Join(password, "")
}
func validateServiceAccount(kClient *kclient.Client, ns string, sa string) error {
// get cluster sccs
sccList, err := kClient.SecurityContextConstraints().List(labels.Everything(), fields.Everything())
if err != nil {
return fmt.Errorf("unable to validate service account %v", err)
}
// get set of sccs applicable to the service account
userInfo := serviceaccount.UserInfo(ns, sa, "")
for _, scc := range sccList.Items {
if admission.ConstraintAppliesTo(&scc, userInfo) {
if scc.AllowHostPorts {
return nil
}
}
}
return fmt.Errorf("unable to validate service account, host ports are forbidden")
}
|
apache-2.0
|
koichi626/hadoop-gpu
|
hadoop-gpu-0.20.1/docs/api/org/apache/hadoop/contrib/failmon/class-use/Executor.html
|
6087
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_07) on Tue Sep 01 20:57:03 UTC 2009 -->
<TITLE>
Uses of Class org.apache.hadoop.contrib.failmon.Executor (Hadoop 0.20.1 API)
</TITLE>
<META NAME="date" CONTENT="2009-09-01">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.apache.hadoop.contrib.failmon.Executor (Hadoop 0.20.1 API)";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../org/apache/hadoop/contrib/failmon/Executor.html" title="class in org.apache.hadoop.contrib.failmon"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../index.html?org/apache/hadoop/contrib/failmon//class-useExecutor.html" target="_top"><B>FRAMES</B></A>
<A HREF="Executor.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<CENTER>
<H2>
<B>Uses of Class<br>org.apache.hadoop.contrib.failmon.Executor</B></H2>
</CENTER>
No usage of org.apache.hadoop.contrib.failmon.Executor
<P>
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../org/apache/hadoop/contrib/failmon/Executor.html" title="class in org.apache.hadoop.contrib.failmon"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../index.html?org/apache/hadoop/contrib/failmon//class-useExecutor.html" target="_top"><B>FRAMES</B></A>
<A HREF="Executor.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
Copyright © 2009 The Apache Software Foundation
</BODY>
</HTML>
|
apache-2.0
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Magnoliopsida/Asterales/Campanulaceae/Campanula/Campanula griffinii/ Syn. Campanula angustiflora exilis/README.md
|
191
|
# Campanula angustiflora var. exilis VARIETY
#### Status
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null
|
apache-2.0
|
stankovski/azure-sdk-for-net
|
sdk/compute/Azure.ResourceManager.Compute/src/Generated/Models/RecommendedMachineConfiguration.Serialization.cs
|
1897
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// <auto-generated/>
#nullable disable
using System.Text.Json;
using Azure.Core;
namespace Azure.ResourceManager.Compute.Models
{
public partial class RecommendedMachineConfiguration : IUtf8JsonSerializable
{
void IUtf8JsonSerializable.Write(Utf8JsonWriter writer)
{
writer.WriteStartObject();
if (VCPUs != null)
{
writer.WritePropertyName("vCPUs");
writer.WriteObjectValue(VCPUs);
}
if (Memory != null)
{
writer.WritePropertyName("memory");
writer.WriteObjectValue(Memory);
}
writer.WriteEndObject();
}
internal static RecommendedMachineConfiguration DeserializeRecommendedMachineConfiguration(JsonElement element)
{
ResourceRange vCPUs = default;
ResourceRange memory = default;
foreach (var property in element.EnumerateObject())
{
if (property.NameEquals("vCPUs"))
{
if (property.Value.ValueKind == JsonValueKind.Null)
{
continue;
}
vCPUs = ResourceRange.DeserializeResourceRange(property.Value);
continue;
}
if (property.NameEquals("memory"))
{
if (property.Value.ValueKind == JsonValueKind.Null)
{
continue;
}
memory = ResourceRange.DeserializeResourceRange(property.Value);
continue;
}
}
return new RecommendedMachineConfiguration(vCPUs, memory);
}
}
}
|
apache-2.0
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Magnoliopsida/Gentianales/Rubiaceae/Hydnophytum/Hydnophytum laurifolium/README.md
|
187
|
# Hydnophytum laurifolium Warb. SPECIES
#### Status
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null
|
apache-2.0
|
michaelbradley91/DFWin
|
DFWin/DFWin/Setup.cs
|
406
|
using Autofac;
using DFWin.Core;
namespace DFWin
{
public static class Setup
{
public static IContainer CreateIoC()
{
var containerBuilder = new ContainerBuilder();
containerBuilder.RegisterModule<GameModule>();
containerBuilder.RegisterModule<CoreModule>();
return containerBuilder.Build();
}
}
}
|
apache-2.0
|
gtache/intellij-lsp
|
intellij-lsp-dotty/src/org/jetbrains/plugins/scala/lang/psi/ScalaPsiElement.scala
|
3781
|
package org.jetbrains.plugins.scala.lang.psi
import com.intellij.psi.PsiElement
import com.intellij.psi.search.{LocalSearchScope, SearchScope}
import com.intellij.psi.tree.{IElementType, TokenSet}
import org.jetbrains.plugins.scala.extensions.PsiElementExt
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil.intersectScopes
import org.jetbrains.plugins.scala.lang.psi.api.ScalaElementVisitor
import org.jetbrains.plugins.scala.project.{ProjectContext, ProjectContextOwner}
trait ScalaPsiElement extends PsiElement with ProjectContextOwner {
protected var context: PsiElement = null
protected var child: PsiElement = null
implicit def elementScope: ElementScope = ElementScope(this)
implicit def projectContext: ProjectContext = this.getProject
def isInCompiledFile: Boolean =
this.containingScalaFile.exists {
_.isCompiled
}
def setContext(element: PsiElement, child: PsiElement) {
context = element
this.child = child
}
def getSameElementInContext: PsiElement =
child match {
case null => this
case _ => child
}
def getDeepSameElementInContext: PsiElement =
child match {
case null => this
case _ if child == context => this
case child: ScalaPsiElement => child.getDeepSameElementInContext
case _ => child
}
def startOffsetInParent: Int =
child match {
case s: ScalaPsiElement => s.startOffsetInParent
case _ => getStartOffsetInParent
}
protected def findChildByClassScala[T >: Null <: ScalaPsiElement](clazz: Class[T]): T
protected def findChildrenByClassScala[T >: Null <: ScalaPsiElement](clazz: Class[T]): Array[T]
protected def findChild[T >: Null <: ScalaPsiElement](clazz: Class[T]): Option[T] =
Option(findChildByClassScala(clazz))
def findLastChildByType[T <: PsiElement](t: IElementType): T = {
var node = getNode.getLastChildNode
while (node != null && node.getElementType != t) {
node = node.getTreePrev
}
if (node == null) null.asInstanceOf[T]
else node.getPsi.asInstanceOf[T]
}
def findFirstChildByType(t: IElementType): PsiElement = {
var node = getNode.getFirstChildNode
while (node != null && node.getElementType != t) {
node = node.getTreeNext
}
if (node == null) null else node.getPsi
}
def findChildrenByType(t: IElementType): List[PsiElement] = {
val buffer = new collection.mutable.ArrayBuffer[PsiElement]
var node = getNode.getFirstChildNode
while (node != null) {
if (node.getElementType == t) buffer += node.getPsi
node = node.getTreeNext
}
buffer.toList
}
def findLastChildByType(set: TokenSet): PsiElement = {
var node = getNode.getLastChildNode
while (node != null && !set.contains(node.getElementType)) {
node = node.getTreePrev
}
if (node == null) null else node.getPsi
}
protected def findLastChild[T >: Null <: ScalaPsiElement](clazz: Class[T]): Option[T] = {
var child = getLastChild
while (child != null && !clazz.isInstance(child)) {
child = child.getPrevSibling
}
if (child == null) None else Some(child.asInstanceOf[T])
}
/**
* Override in inheritors
*/
def accept(visitor: ScalaElementVisitor): Unit = {
visitor.visitElement(this)
}
/**
* Override in inheritors
*/
def acceptChildren(visitor: ScalaElementVisitor): Unit =
getChildren.collect {
case element: ScalaPsiElement => element
}.foreach {
_.accept(visitor)
}
abstract override def getUseScope: SearchScope = {
val maybeFileScope = this.containingScalaFile.filter { file =>
file.isWorksheetFile || file.isScriptFile
}.map {
new LocalSearchScope(_)
}
intersectScopes(super.getUseScope, maybeFileScope)
}
}
|
apache-2.0
|
buildit/bookit-web
|
webpack.config.babel.js
|
2535
|
import { resolve, join } from 'path'
import merge from 'webpack-merge'
import parts from './webpack/parts'
if (process.env.WDS_HOST === undefined) process.env.WDS_HOST = 'localhost'
if (process.env.WDS_PORT === undefined) process.env.WDS_PORT = 3001
const isVendor = ({ resource }) => resource && resource.indexOf('node_modules') >= 0 && resource.match(/\.js$/)
const PATHS = {
root: resolve(__dirname),
sources: join(__dirname, 'src'),
build: join(__dirname, 'build'),
exclude: [
join(__dirname, 'build'),
/node_modules/,
],
}
const commonConfig = merge([
{
context: PATHS.sources,
output: {
path: PATHS.build,
filename: '[name].js',
publicPath: '/',
},
},
parts.lintStyles({ include: PATHS.sources }),
parts.lintJavascript({ include: PATHS.sources }),
parts.loadHtml(),
parts.loadAssets(),
parts.loadJavascript({ include: PATHS.sources, exclude: PATHS.exclude }),
parts.namedModulesPlugin(),
parts.noErrorsPlugin(),
])
const developmentConfig = merge([
{
output: { pathinfo: true },
},
parts.loadStyles({ include: PATHS.sources, exclude: PATHS.exclude }),
parts.devServer({ host: 'localhost', port: 3001 }),
parts.generateSourceMaps('cheap-module-eval-source-map'),
])
const productionConfig = merge([
{
output: {
chunkFilename: '[name].[chunkhash:8].js',
filename: '[name].[chunkhash:8].js',
},
performance: {
hints: 'warning',
maxEntrypointSize: 100000,
maxAssetSize: 450000,
},
},
parts.cleanPlugin({ path: PATHS.build, root: PATHS.root }),
parts.definePlugin({
'process.env': {
NODE_ENV: JSON.stringify('production'),
},
}),
parts.minifyJavascript(),
parts.extractStyles(),
parts.extractJavascript([
{ name: 'vendor', chunks: [ 'app' ], minChunks: isVendor },
{ name: 'manifest', minChunks: Infinity },
]),
parts.hashedModuleIdsPlugin(),
parts.generateSourceMaps('source-map'),
])
export default (env) => {
process.env.NODE_ENV = env
process.env.BABEL_ENV = env
const isDevelopment = env === 'development'
const config = merge([
parts.page({
title: 'React Skellington Test',
template: 'index.ejs',
entry: {
app: (
isDevelopment ?
parts.hotloader() : []
).concat([ './client/index.js' ]),
},
}),
commonConfig,
isDevelopment ?
developmentConfig : productionConfig,
])
// console.dir(config, { depth: null, colors: true })
return config
}
|
apache-2.0
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Magnoliopsida/Rosales/Rosaceae/Morilandia juniperina/README.md
|
177
|
# Morilandia juniperina Neck. SPECIES
#### Status
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null
|
apache-2.0
|
zkidkid/elasticsearch
|
core/src/main/java/org/elasticsearch/index/analysis/AnalysisService.java
|
9751
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.Analyzer;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.TextFieldMapper;
import java.io.Closeable;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import static java.util.Collections.unmodifiableMap;
/**
*
*/
public class AnalysisService extends AbstractIndexComponent implements Closeable {
private final Map<String, NamedAnalyzer> analyzers;
private final Map<String, TokenizerFactory> tokenizers;
private final Map<String, CharFilterFactory> charFilters;
private final Map<String, TokenFilterFactory> tokenFilters;
private final NamedAnalyzer defaultIndexAnalyzer;
private final NamedAnalyzer defaultSearchAnalyzer;
private final NamedAnalyzer defaultSearchQuoteAnalyzer;
public AnalysisService(IndexSettings indexSettings,
Map<String, AnalyzerProvider<?>> analyzerProviders,
Map<String, TokenizerFactory> tokenizerFactoryFactories,
Map<String, CharFilterFactory> charFilterFactoryFactories,
Map<String, TokenFilterFactory> tokenFilterFactoryFactories) {
super(indexSettings);
this.tokenizers = unmodifiableMap(tokenizerFactoryFactories);
this.charFilters = unmodifiableMap(charFilterFactoryFactories);
this.tokenFilters = unmodifiableMap(tokenFilterFactoryFactories);
analyzerProviders = new HashMap<>(analyzerProviders);
Map<String, NamedAnalyzer> analyzerAliases = new HashMap<>();
Map<String, NamedAnalyzer> analyzers = new HashMap<>();
for (Map.Entry<String, AnalyzerProvider<?>> entry : analyzerProviders.entrySet()) {
processAnalyzerFactory(entry.getKey(), entry.getValue(), analyzerAliases, analyzers);
}
for (Map.Entry<String, NamedAnalyzer> entry : analyzerAliases.entrySet()) {
String key = entry.getKey();
if (analyzers.containsKey(key) &&
("default".equals(key) || "default_search".equals(key) || "default_search_quoted".equals(key)) == false) {
throw new IllegalStateException("already registered analyzer with name: " + key);
} else {
NamedAnalyzer configured = entry.getValue();
analyzers.put(key, configured);
}
}
if (!analyzers.containsKey("default")) {
processAnalyzerFactory("default", new StandardAnalyzerProvider(indexSettings, null, "default", Settings.Builder.EMPTY_SETTINGS),
analyzerAliases, analyzers);
}
if (!analyzers.containsKey("default_search")) {
analyzers.put("default_search", analyzers.get("default"));
}
if (!analyzers.containsKey("default_search_quoted")) {
analyzers.put("default_search_quoted", analyzers.get("default_search"));
}
NamedAnalyzer defaultAnalyzer = analyzers.get("default");
if (defaultAnalyzer == null) {
throw new IllegalArgumentException("no default analyzer configured");
}
if (analyzers.containsKey("default_index")) {
final Version createdVersion = indexSettings.getIndexVersionCreated();
if (createdVersion.onOrAfter(Version.V_5_0_0_alpha1)) {
throw new IllegalArgumentException("setting [index.analysis.analyzer.default_index] is not supported anymore, use [index.analysis.analyzer.default] instead for index [" + index().getName() + "]");
} else {
deprecationLogger.deprecated("setting [index.analysis.analyzer.default_index] is deprecated, use [index.analysis.analyzer.default] instead for index [{}]", index().getName());
}
}
defaultIndexAnalyzer = analyzers.containsKey("default_index") ? analyzers.get("default_index") : defaultAnalyzer;
defaultSearchAnalyzer = analyzers.containsKey("default_search") ? analyzers.get("default_search") : defaultAnalyzer;
defaultSearchQuoteAnalyzer = analyzers.containsKey("default_search_quote") ? analyzers.get("default_search_quote") : defaultSearchAnalyzer;
for (Map.Entry<String, NamedAnalyzer> analyzer : analyzers.entrySet()) {
if (analyzer.getKey().startsWith("_")) {
throw new IllegalArgumentException("analyzer name must not start with '_'. got \"" + analyzer.getKey() + "\"");
}
}
this.analyzers = unmodifiableMap(analyzers);
}
private void processAnalyzerFactory(String name, AnalyzerProvider<?> analyzerFactory, Map<String, NamedAnalyzer> analyzerAliases, Map<String, NamedAnalyzer> analyzers) {
/*
* Lucene defaults positionIncrementGap to 0 in all analyzers but
* Elasticsearch defaults them to 0 only before version 2.0
* and 100 afterwards so we override the positionIncrementGap if it
* doesn't match here.
*/
int overridePositionIncrementGap = TextFieldMapper.Defaults.POSITION_INCREMENT_GAP;
if (analyzerFactory instanceof CustomAnalyzerProvider) {
((CustomAnalyzerProvider) analyzerFactory).build(this);
/*
* Custom analyzers already default to the correct, version
* dependent positionIncrementGap and the user is be able to
* configure the positionIncrementGap directly on the analyzer so
* we disable overriding the positionIncrementGap to preserve the
* user's setting.
*/
overridePositionIncrementGap = Integer.MIN_VALUE;
}
Analyzer analyzerF = analyzerFactory.get();
if (analyzerF == null) {
throw new IllegalArgumentException("analyzer [" + analyzerFactory.name() + "] created null analyzer");
}
NamedAnalyzer analyzer;
if (analyzerF instanceof NamedAnalyzer) {
// if we got a named analyzer back, use it...
analyzer = (NamedAnalyzer) analyzerF;
if (overridePositionIncrementGap >= 0 && analyzer.getPositionIncrementGap(analyzer.name()) != overridePositionIncrementGap) {
// unless the positionIncrementGap needs to be overridden
analyzer = new NamedAnalyzer(analyzer, overridePositionIncrementGap);
}
} else {
analyzer = new NamedAnalyzer(name, analyzerFactory.scope(), analyzerF, overridePositionIncrementGap);
}
if (analyzers.containsKey(name)) {
throw new IllegalStateException("already registered analyzer with name: " + name);
}
analyzers.put(name, analyzer);
String strAliases = this.indexSettings.getSettings().get("index.analysis.analyzer." + analyzerFactory.name() + ".alias");
Set<String> aliases = new HashSet<>();
if (strAliases != null) {
aliases.addAll(Strings.commaDelimitedListToSet(strAliases));
}
aliases.addAll(Arrays.asList(this.indexSettings.getSettings()
.getAsArray("index.analysis.analyzer." + analyzerFactory.name() + ".alias")));
for (String alias : aliases) {
if (analyzerAliases.putIfAbsent(alias, analyzer) != null) {
throw new IllegalStateException("alias [" + alias + "] is already used by [" + analyzerAliases.get(alias).name() + "]");
}
}
}
@Override
public void close() {
for (NamedAnalyzer analyzer : analyzers.values()) {
if (analyzer.scope() == AnalyzerScope.INDEX) {
try {
analyzer.close();
} catch (NullPointerException e) {
// because analyzers are aliased, they might be closed several times
// an NPE is thrown in this case, so ignore....
} catch (Exception e) {
logger.debug("failed to close analyzer {}", analyzer);
}
}
}
}
public NamedAnalyzer analyzer(String name) {
return analyzers.get(name);
}
public NamedAnalyzer defaultIndexAnalyzer() {
return defaultIndexAnalyzer;
}
public NamedAnalyzer defaultSearchAnalyzer() {
return defaultSearchAnalyzer;
}
public NamedAnalyzer defaultSearchQuoteAnalyzer() {
return defaultSearchQuoteAnalyzer;
}
public TokenizerFactory tokenizer(String name) {
return tokenizers.get(name);
}
public CharFilterFactory charFilter(String name) {
return charFilters.get(name);
}
public TokenFilterFactory tokenFilter(String name) {
return tokenFilters.get(name);
}
}
|
apache-2.0
|
stevebargelt/jenkinsWatcher
|
readme.md
|
210
|
#Jenkins Watcher
Libraries to watch a jenkins server and return status codes for select or all projects.
Windows 10 IoT on Raspberry Pi code to do the same and trigger an action via GPIO (turn on a light?)
|
apache-2.0
|
nricheton/java-utils
|
XmlCleanupInputStream.java
|
1495
|
package nricheton.utils.io;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
/**
* This input streams removes invalid XML Characters from the stream. As a
* result you should be able to read invalid documents.
* <p>
* Feel free to submit patchs on this class.
*
* @author Nicolas Richeton
*/
public class XmlCleanupInputStream extends InputStream {
private static final String ENCODING = "UTF-8";
private InputStream originalIS;
private BufferedReader originalReader;
private byte[] buffer = new byte[0];
private int position = 0;
UnsupportedEncodingException error = null;
public XmlCleanupInputStream(InputStream is) {
originalIS = is;
try {
originalReader = new BufferedReader(new InputStreamReader(originalIS, ENCODING));
} catch (UnsupportedEncodingException e) {
error = e;
}
}
@Override
public int read() throws IOException {
if (error != null) {
throw new IOException(error);
}
if (buffer == null) {
return -1;
}
while (position >= buffer.length) {
String temp = originalReader.readLine();
String temp2 = null;
if (temp != null) {
temp2 = temp.replaceAll("[^\\x20-\\x7e]", "");
buffer = temp2.getBytes(ENCODING);
position = 0;
} else {
buffer = null;
break;
}
}
if (buffer == null) {
return -1;
}
int result = buffer[position];
position++;
return result;
}
}
|
apache-2.0
|
iobeam/iobeam-client-node
|
tests/resources/test_DataStore.js
|
6869
|
"use strict";
jest.autoMockOff();
const MockDate = require("mockdate");
const DataStore = require("../../src/resources/DataStore");
const FIELDS = ["foo", "bar", "baz"];
describe("DataStore construction", () => {
it("tests constructor fails", () => {
const check = (fields) => {
try {
new DataStore(fields);
expect(false).toBe(true);
} catch (Exception) {
expect(true).toBe(true);
}
};
check(null);
check(undefined);
check("this is not an array");
check(["col1", "time", "col2"]);
check(["TimE", "col1"]);
check(["col1", "time_offset", "col2"]);
check(["Time_OffseT", "col2"]);
check(["col1", "all", "col2"]);
check(["ALL", "none"]);
check(["fine", undefined]);
check(["fine", null]);
check(["fine", ""]);
check(["fine", 5]);
});
it("tests constructor", () => {
const batch = new DataStore(FIELDS);
const have = batch.fields();
expect(have.length).toBe(FIELDS.length);
expect(batch instanceof DataStore).toBe(true);
for (let i = 0; i < FIELDS.length; i++) {
expect(have[i]).toBe(FIELDS[i]);
}
});
it("tests constructor deep copies", () => {
const temp = ["a", "b", "c"];
const batch = new DataStore(temp);
let have = batch.fields();
expect(have.length).toBe(temp.length);
expect(batch instanceof DataStore).toBe(true);
for (let i = 0; i < temp.length; i++) {
expect(have[i]).toBe(temp[i]);
}
temp.push("d");
have = batch.fields();
expect(have.length).toBe(temp.length - 1);
});
});
describe("adding to batches", () => {
it("tests full row", () => {
const check = (have, wantTime, wantData) => {
expect(have.time).toBe(wantTime);
expect(have.foo).toBe(wantData.foo);
expect(have.bar).toBe(wantData.bar);
expect(have.baz).toBe(wantData.baz);
};
const batch = new DataStore(FIELDS);
const row1 = {foo: 0.0, bar: 2.0, baz: 3.0};
batch.add(0, row1);
let have = batch.rows();
expect(have.length).toBe(1);
check(have[0], 0, row1);
const row2 = {foo: 4.0, bar: 5.0, baz: 6.0};
batch.add(1000, row2);
have = batch.rows();
expect(have.length).toBe(2);
check(have[0], 0, row1);
check(have[1], 1000, row2);
});
it("tests sparse row", () => {
const batch = new DataStore(FIELDS);
const row1 = {foo: 1.0, baz: 3.0};
batch.add(0, row1);
let have = batch.rows();
expect(have.length).toBe(1);
expect(have[0].time).toBe(0);
expect(have[0].foo).toBe(row1.foo);
expect(have[0].bar).toBe(null);
expect(have[0].baz).toBe(row1.baz);
const row2 = {foo: 1.0};
batch.add(1000, row2);
have = batch.rows();
expect(have.length).toBe(2);
expect(have[1].time).toBe(1000);
expect(have[1].foo).toBe(row1.foo);
expect(have[1].bar).toBe(null);
expect(have[1].baz).toBe(null);
});
it("tests bad data errors", () => {
const check = (badRow) => {
try {
batch.add(0, badRow);
expect(false).toBe(true);
} catch (Exception) {
expect(batch.rows().length).toBe(0);
}
};
const batch = new DataStore(FIELDS);
check({wrong: 5.0});
check({foo: 1.0, bar: 2.0, baz: 3.0, bad: 5.0});
check(null);
check(undefined);
});
});
describe("add now", () => {
const check = (have, wantTime, wantData) => {
expect(have.time).toBe(wantTime);
expect(have.foo).toBe(wantData.foo);
expect(have.bar).toBe(wantData.bar);
expect(have.baz).toBe(wantData.baz);
};
it("tests functionality", () => {
let now = 1000;
MockDate.set(now);
const batch = new DataStore(FIELDS);
const row1 = {foo: 1.0, bar: 2.0, baz: 3.0};
batch.addNow(row1);
let have = batch.rows();
expect(have.length).toBe(1);
check(have[0], now, row1);
now = 2000;
MockDate.set(now);
const row2 = {foo: 4.0, bar: 5.0, baz: 6.0};
batch.addNow(row2);
have = batch.rows();
expect(have.length).toBe(2);
check(have[1], now, row2);
});
});
describe("adding too many to batch", () => {
const batch = new DataStore(FIELDS);
it("tests size function", () => {
for (let i = 0; i < 166; i++) {
batch.add(i, {foo: i, bar: i, baz: i});
expect(batch.size()).toBe((i + 1) * 3);
}
});
it("tests than > 500 fails", () => {
try {
batch.add(167, {foo: 167});
expect(false).toBe(true);
} catch (Exception) {
expect(batch.size()).toBe(498);
}
});
});
describe("tests reset", () => {
const batch = new DataStore(FIELDS);
it("tests reset removes all rows", () => {
for (let i = 0; i < 100; i++) {
batch.add(i, {foo: i, bar: i, baz: i});
expect(batch.size()).toBe((i + 1) * 3);
}
batch.reset();
expect(batch.size()).toBe(0);
expect(batch.rows().length).toBe(0);
batch.add(0, {foo: 0, bar: 0, baz: 0});
expect(batch.rows().length).toBe(1);
});
});
describe("tests snapshot", () => {
const batch = new DataStore(FIELDS);
for (let i = 0; i < 5; i++) {
batch.add(i, {foo: i, bar: i, baz: i});
}
it("tests snapshot is a copy", () => {
const batch2 = batch.snapshot();
expect(batch2 instanceof DataStore).toBe(true);
expect(batch2.size()).toBe(batch.size());
expect(batch2.rows().length).toBe(batch.rows().length);
for (let i = 0; i < batch2.fields().length; i++ ) {
expect(batch2.fields()[i]).toBe(batch.fields()[i]);
}
for (let i = 0; i < batch2.rows().length; i++ ) {
const t2 = batch2.rows()[i];
const t1 = batch.rows()[i];
expect(Object.keys(t2).length).toBe(Object.keys(t1).length);
Object.keys(t2).forEach((k) => {
expect(t2[k]).toBe(t1[k]);
});
}
});
it("tests snapshot is deep copy", () => {
const batch2 = batch.snapshot();
expect(batch2 instanceof DataStore).toBe(true);
expect(batch2.size()).toBe(batch.size());
expect(batch2.rows().length).toBe(batch.rows().length);
batch.add(6, {foo: 6});
expect(batch2.size()).toBe(batch.size() - 3);
expect(batch2.rows().length).toBe(batch.rows().length - 1);
});
});
|
apache-2.0
|
phensley/less-compiler
|
less-core/src/main/java/com/squarespace/less/model/UnicodeRange.java
|
1955
|
/**
* Copyright (c) 2014 SQUARESPACE, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.squarespace.less.model;
import static com.squarespace.less.core.LessUtils.safeEquals;
import static com.squarespace.less.model.NodeType.UNICODE_RANGE;
import com.squarespace.less.core.Buffer;
/**
* A wrapper around a CSS Unicode character range.
*/
public class UnicodeRange extends BaseNode {
/**
* String representing the Unicode character range.
*/
protected final String value;
/**
* Constructs a Unicode range node wrapping the given string.
* @param value
*/
public UnicodeRange(String value) {
this.value = value;
}
/**
* Returns the Unicode range string.
*/
public String value() {
return value;
}
/**
* See {@link Node#type()}
*/
@Override
public NodeType type() {
return UNICODE_RANGE;
}
/**
* See {@link Node#repr(Buffer)}
*/
@Override
public void repr(Buffer buf) {
buf.append(value);
}
/**
* See {@link Node#modelRepr(Buffer)}
*/
@Override
public void modelRepr(Buffer buf) {
typeRepr(buf);
posRepr(buf);
buf.append('(').append(value).append(')');
}
@Override
public boolean equals(Object obj) {
return (obj instanceof UnicodeRange) ? safeEquals(value, ((UnicodeRange)obj).value) : false;
}
@Override
public int hashCode() {
return hashCode == 0 ? buildHashCode(value) : hashCode;
}
}
|
apache-2.0
|
sonntagsgesicht/regtest
|
.aux/venv/lib/python3.9/site-packages/coverage/annotate.py
|
3528
|
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
"""Source file annotation for coverage.py."""
import os
import re
from coverage.files import flat_rootname
from coverage.misc import ensure_dir, isolate_module
from coverage.report import get_analysis_to_report
os = isolate_module(os)
class AnnotateReporter:
"""Generate annotated source files showing line coverage.
This reporter creates annotated copies of the measured source files. Each
.py file is copied as a .py,cover file, with a left-hand margin annotating
each line::
> def h(x):
- if 0: #pragma: no cover
- pass
> if x == 1:
! a = 1
> else:
> a = 2
> h(2)
Executed lines use '>', lines not executed use '!', lines excluded from
consideration use '-'.
"""
def __init__(self, coverage):
self.coverage = coverage
self.config = self.coverage.config
self.directory = None
blank_re = re.compile(r"\s*(#|$)")
else_re = re.compile(r"\s*else\s*:\s*(#|$)")
def report(self, morfs, directory=None):
"""Run the report.
See `coverage.report()` for arguments.
"""
self.directory = directory
self.coverage.get_data()
for fr, analysis in get_analysis_to_report(self.coverage, morfs):
self.annotate_file(fr, analysis)
def annotate_file(self, fr, analysis):
"""Annotate a single file.
`fr` is the FileReporter for the file to annotate.
"""
statements = sorted(analysis.statements)
missing = sorted(analysis.missing)
excluded = sorted(analysis.excluded)
if self.directory:
ensure_dir(self.directory)
dest_file = os.path.join(self.directory, flat_rootname(fr.relative_filename()))
if dest_file.endswith("_py"):
dest_file = dest_file[:-3] + ".py"
dest_file += ",cover"
else:
dest_file = fr.filename + ",cover"
with open(dest_file, 'w', encoding='utf8') as dest:
i = 0
j = 0
covered = True
source = fr.source()
for lineno, line in enumerate(source.splitlines(True), start=1):
while i < len(statements) and statements[i] < lineno:
i += 1
while j < len(missing) and missing[j] < lineno:
j += 1
if i < len(statements) and statements[i] == lineno:
covered = j >= len(missing) or missing[j] > lineno
if self.blank_re.match(line):
dest.write(' ')
elif self.else_re.match(line):
# Special logic for lines containing only 'else:'.
if i >= len(statements) and j >= len(missing):
dest.write('! ')
elif i >= len(statements) or j >= len(missing):
dest.write('> ')
elif statements[i] == missing[j]:
dest.write('! ')
else:
dest.write('> ')
elif lineno in excluded:
dest.write('- ')
elif covered:
dest.write('> ')
else:
dest.write('! ')
dest.write(line)
|
apache-2.0
|
aws/aws-sdk-java
|
aws-java-sdk-sesv2/src/main/java/com/amazonaws/services/simpleemailv2/model/ListSuppressedDestinationsResult.java
|
9507
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.simpleemailv2.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* <p>
* A list of suppressed email addresses.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/sesv2-2019-09-27/ListSuppressedDestinations" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListSuppressedDestinationsResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* A list of summaries, each containing a summary for a suppressed email destination.
* </p>
*/
private java.util.List<SuppressedDestinationSummary> suppressedDestinationSummaries;
/**
* <p>
* A token that indicates that there are additional email addresses on the suppression list for your account. To
* view additional suppressed addresses, issue another request to <code>ListSuppressedDestinations</code>, and pass
* this token in the <code>NextToken</code> parameter.
* </p>
*/
private String nextToken;
/**
* <p>
* A list of summaries, each containing a summary for a suppressed email destination.
* </p>
*
* @return A list of summaries, each containing a summary for a suppressed email destination.
*/
public java.util.List<SuppressedDestinationSummary> getSuppressedDestinationSummaries() {
return suppressedDestinationSummaries;
}
/**
* <p>
* A list of summaries, each containing a summary for a suppressed email destination.
* </p>
*
* @param suppressedDestinationSummaries
* A list of summaries, each containing a summary for a suppressed email destination.
*/
public void setSuppressedDestinationSummaries(java.util.Collection<SuppressedDestinationSummary> suppressedDestinationSummaries) {
if (suppressedDestinationSummaries == null) {
this.suppressedDestinationSummaries = null;
return;
}
this.suppressedDestinationSummaries = new java.util.ArrayList<SuppressedDestinationSummary>(suppressedDestinationSummaries);
}
/**
* <p>
* A list of summaries, each containing a summary for a suppressed email destination.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setSuppressedDestinationSummaries(java.util.Collection)} or
* {@link #withSuppressedDestinationSummaries(java.util.Collection)} if you want to override the existing values.
* </p>
*
* @param suppressedDestinationSummaries
* A list of summaries, each containing a summary for a suppressed email destination.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListSuppressedDestinationsResult withSuppressedDestinationSummaries(SuppressedDestinationSummary... suppressedDestinationSummaries) {
if (this.suppressedDestinationSummaries == null) {
setSuppressedDestinationSummaries(new java.util.ArrayList<SuppressedDestinationSummary>(suppressedDestinationSummaries.length));
}
for (SuppressedDestinationSummary ele : suppressedDestinationSummaries) {
this.suppressedDestinationSummaries.add(ele);
}
return this;
}
/**
* <p>
* A list of summaries, each containing a summary for a suppressed email destination.
* </p>
*
* @param suppressedDestinationSummaries
* A list of summaries, each containing a summary for a suppressed email destination.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListSuppressedDestinationsResult withSuppressedDestinationSummaries(java.util.Collection<SuppressedDestinationSummary> suppressedDestinationSummaries) {
setSuppressedDestinationSummaries(suppressedDestinationSummaries);
return this;
}
/**
* <p>
* A token that indicates that there are additional email addresses on the suppression list for your account. To
* view additional suppressed addresses, issue another request to <code>ListSuppressedDestinations</code>, and pass
* this token in the <code>NextToken</code> parameter.
* </p>
*
* @param nextToken
* A token that indicates that there are additional email addresses on the suppression list for your account.
* To view additional suppressed addresses, issue another request to <code>ListSuppressedDestinations</code>,
* and pass this token in the <code>NextToken</code> parameter.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* A token that indicates that there are additional email addresses on the suppression list for your account. To
* view additional suppressed addresses, issue another request to <code>ListSuppressedDestinations</code>, and pass
* this token in the <code>NextToken</code> parameter.
* </p>
*
* @return A token that indicates that there are additional email addresses on the suppression list for your
* account. To view additional suppressed addresses, issue another request to
* <code>ListSuppressedDestinations</code>, and pass this token in the <code>NextToken</code> parameter.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* A token that indicates that there are additional email addresses on the suppression list for your account. To
* view additional suppressed addresses, issue another request to <code>ListSuppressedDestinations</code>, and pass
* this token in the <code>NextToken</code> parameter.
* </p>
*
* @param nextToken
* A token that indicates that there are additional email addresses on the suppression list for your account.
* To view additional suppressed addresses, issue another request to <code>ListSuppressedDestinations</code>,
* and pass this token in the <code>NextToken</code> parameter.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListSuppressedDestinationsResult withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getSuppressedDestinationSummaries() != null)
sb.append("SuppressedDestinationSummaries: ").append(getSuppressedDestinationSummaries()).append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListSuppressedDestinationsResult == false)
return false;
ListSuppressedDestinationsResult other = (ListSuppressedDestinationsResult) obj;
if (other.getSuppressedDestinationSummaries() == null ^ this.getSuppressedDestinationSummaries() == null)
return false;
if (other.getSuppressedDestinationSummaries() != null
&& other.getSuppressedDestinationSummaries().equals(this.getSuppressedDestinationSummaries()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getSuppressedDestinationSummaries() == null) ? 0 : getSuppressedDestinationSummaries().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
return hashCode;
}
@Override
public ListSuppressedDestinationsResult clone() {
try {
return (ListSuppressedDestinationsResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
|
apache-2.0
|
d3scomp/TimeSeriesStatistics
|
benchmark/Makefile
|
3799
|
TARGET:=smodes-eval
TOOLCHAIN_PREFIX:=arm-none-eabi-
TOOLCHAIN_ROOT:=/root/gcc-arm
# Optimization level, can be [0, 1, 2, 3, s].
OPTLVL:=0
DBG:=-g3
FREERTOS:=$(CURDIR)/FreeRTOS
STARTUP:=$(CURDIR)/hardware
LINKER_SCRIPT:=$(CURDIR)/stm32_flash.ld
TSRC = $(CURDIR)/../src
INCLUDE=-I$(CURDIR)/src
INCLUDE=-I$(CURDIR)/hardware
INCLUDE=-I$(TSRC)
INCLUDE+=-I$(FREERTOS)/include
INCLUDE+=-I$(FREERTOS)/portable/GCC/ARM_CM4F
INCLUDE+=-I$(CURDIR)/libraries/CMSIS/Device/ST/STM32F4xx/Include
INCLUDE+=-I$(CURDIR)/libraries/CMSIS/Include
INCLUDE+=-I$(CURDIR)/libraries/STM32F4xx_StdPeriph_Driver/inc
INCLUDE+=-I$(CURDIR)/config
BUILD_DIR = $(CURDIR)/build
BIN_DIR = $(CURDIR)/binary
# vpath is used so object files are written to the current directory instead
# of the same directory as their source files
vpath %.c $(CURDIR)/src $(CURDIR)/libraries/STM32F4xx_StdPeriph_Driver/src \
$(CURDIR)/libraries/syscall $(CURDIR)/hardware
vpath %.s $(STARTUP)
vpath %.cpp $(CURDIR)/src $(TSRC)
# Project Source Files
SRC+=startup_stm32f4xx.s
SRC+=stm32f4xx_it.c
SRC+=system_stm32f4xx.c
SRC+=syscalls.c
#SRC+=main2.cpp
SRC+=main.cpp
SRC+=hooks.cpp
SRC+=Button.cpp
SRC+=LED.cpp
SRC+=UART.cpp
SRC+=StudentsDistribution.cpp
SRC+=TTable.cpp
# Standard Peripheral Source Files
SRC+=stm32f4xx_syscfg.c
SRC+=misc.c
SRC+=stm32f4xx_adc.c
SRC+=stm32f4xx_dac.c
SRC+=stm32f4xx_dma.c
SRC+=stm32f4xx_exti.c
SRC+=stm32f4xx_flash.c
SRC+=stm32f4xx_gpio.c
SRC+=stm32f4xx_i2c.c
SRC+=stm32f4xx_rcc.c
SRC+=stm32f4xx_tim.c
SRC+=stm32f4xx_usart.c
SRC+=stm32f4xx_rng.c
CDEFS=-DUSE_STDPERIPH_DRIVER
CDEFS+=-DSTM32F4XX
CDEFS+=-DHSE_VALUE=8000000
CDEFS+=-D__FPU_PRESENT=1
CDEFS+=-D__FPU_USED=1
CDEFS+=-DARM_MATH_CM4
MCUFLAGS=-mcpu=cortex-m4 -mthumb -mfloat-abi=hard -mfpu=fpv4-sp-d16 -mthumb-interwork -MMD -MP -mlittle-endian
COMMONFLAGS=-O$(OPTLVL) $(DBG) -Wall
CFLAGS=$(COMMONFLAGS) $(MCUFLAGS) $(INCLUDE) $(CDEFS)
CPPFLAGS = $(CFLAGS) -fno-exceptions -fno-rtti -std=c++11 -fno-use-cxa-atexit
#LDFLAGS=$(COMMONFLAGS) $(MCUFLAGS) -fno-exceptions -ffunction-sections -fdata-sections -nostartfiles -Wl,--gc-sections,-T$(LINKER_SCRIPT)
LDFLAGS=$(COMMONFLAGS) -T$(LINKER_SCRIPT) -Wl,-Map,$(BIN_DIR)/$(TARGET).map $(CPPFLAGS)
CC=$(TOOLCHAIN_PREFIX)gcc
CPP=$(TOOLCHAIN_PREFIX)g++
LD=$(TOOLCHAIN_PREFIX)g++
OBJCOPY=$(TOOLCHAIN_PREFIX)objcopy
OSIZE=$(TOOLCHAIN_PREFIX)size
AS=$(TOOLCHAIN_PREFIX)as
OBJ = $(patsubst %.c,$(BUILD_DIR)/%.o,$(SRC))
OBJ := $(patsubst %.cpp,$(BUILD_DIR)/%.o,$(OBJ))
OBJ := $(patsubst %.s,$(BUILD_DIR)/%.o,$(OBJ))
DEP = $(patsubst %.c,$(BUILD_DIR)/%.d,$(SRC))
DEP := $(patsubst %.cpp,$(BUILD_DIR)/%.d,$(DEP))
DEP := $(patsubst %.s,,$(DEP))
$(BUILD_DIR)/%.o: %.c
@echo [CC] $(notdir $<)
$(CC) $(CFLAGS) $< -c -o $@
$(BUILD_DIR)/%.o: %.cpp
@echo [C++] $(notdir $<)
$(CPP) $(CPPFLAGS) $< -c -o $@
$(BUILD_DIR)/%.o: %.s
@echo [AS] $(notdir $<)
$(AS) $< -o $@
$(BUILD_DIR)/%.dep: %.c
$(CC) -M $(CFLAGS) "$<" > "$@"
$(BUILD_DIR)/%.dep: %.cpp
$(CPP) -M $(CPPFLAGS) "$<" > "$@"
all: $(BIN_DIR)/$(TARGET).elf
$(BIN_DIR)/$(TARGET).elf: $(OBJ)
@echo [LD] $(TARGET).elf
$(LD) -o $(BIN_DIR)/$(TARGET).elf $(LDFLAGS) $(OBJ) $(LDLIBS)
@echo [OBJCOPY] $(TARGET).hex
@$(OBJCOPY) -O ihex $(BIN_DIR)/$(TARGET).elf $(BIN_DIR)/$(TARGET).hex
@$(OSIZE) --format=berkeley $(BIN_DIR)/$(TARGET).elf
# @echo [OBJCOPY] $(TARGET).bin
# @$(OBJCOPY) -O binary $(BIN_DIR)/$(TARGET).elf $(BIN_DIR)/$(TARGET).bin
.PHONY: clean
clean:
@echo [RM] OBJ
@rm -f $(OBJ)
@echo [RM] BIN
@rm -f $(BIN_DIR)/$(TARGET).elf
@rm -f $(BIN_DIR)/$(TARGET).hex
@rm -f $(BIN_DIR)/$(TARGET).bin
openocd:
#openocd -f interface/stlink-v2.cfg -f target/stm32f4x_stlink.cfg
openocd -f board/stm32f4discovery.cfg
flash: all
openocd -f board/stm32f4discovery.cfg -c "program $(BIN_DIR)/$(TARGET).elf verify reset"
-include $(DEP)
|
apache-2.0
|
Shmuma/hadoop
|
docs/api/org/apache/hadoop/record/class-use/BinaryRecordInput.html
|
8350
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_26) on Mon May 07 13:00:00 PDT 2012 -->
<TITLE>
Uses of Class org.apache.hadoop.record.BinaryRecordInput (Hadoop 0.20.2-cdh3u4 API)
</TITLE>
<META NAME="date" CONTENT="2012-05-07">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.apache.hadoop.record.BinaryRecordInput (Hadoop 0.20.2-cdh3u4 API)";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../org/apache/hadoop/record/BinaryRecordInput.html" title="class in org.apache.hadoop.record"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../index.html?org/apache/hadoop/record//class-useBinaryRecordInput.html" target="_top"><B>FRAMES</B></A>
<A HREF="BinaryRecordInput.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<CENTER>
<H2>
<B>Uses of Class<br>org.apache.hadoop.record.BinaryRecordInput</B></H2>
</CENTER>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Packages that use <A HREF="../../../../../org/apache/hadoop/record/BinaryRecordInput.html" title="class in org.apache.hadoop.record">BinaryRecordInput</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.hadoop.record"><B>org.apache.hadoop.record</B></A></TD>
<TD>Hadoop record I/O contains classes and a record description language
translator for simplifying serialization and deserialization of records in a
language-neutral manner. </TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.hadoop.record"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Uses of <A HREF="../../../../../org/apache/hadoop/record/BinaryRecordInput.html" title="class in org.apache.hadoop.record">BinaryRecordInput</A> in <A HREF="../../../../../org/apache/hadoop/record/package-summary.html">org.apache.hadoop.record</A></FONT></TH>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">Methods in <A HREF="../../../../../org/apache/hadoop/record/package-summary.html">org.apache.hadoop.record</A> that return <A HREF="../../../../../org/apache/hadoop/record/BinaryRecordInput.html" title="class in org.apache.hadoop.record">BinaryRecordInput</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE>static <A HREF="../../../../../org/apache/hadoop/record/BinaryRecordInput.html" title="class in org.apache.hadoop.record">BinaryRecordInput</A></CODE></FONT></TD>
<TD><CODE><B>BinaryRecordInput.</B><B><A HREF="../../../../../org/apache/hadoop/record/BinaryRecordInput.html#get(java.io.DataInput)">get</A></B>(<A HREF="http://java.sun.com/javase/6/docs/api/java/io/DataInput.html?is-external=true" title="class or interface in java.io">DataInput</A> inp)</CODE>
<BR>
Get a thread-local record input for the supplied DataInput.</TD>
</TR>
</TABLE>
<P>
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../org/apache/hadoop/record/BinaryRecordInput.html" title="class in org.apache.hadoop.record"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../index.html?org/apache/hadoop/record//class-useBinaryRecordInput.html" target="_top"><B>FRAMES</B></A>
<A HREF="BinaryRecordInput.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
Copyright © 2009 The Apache Software Foundation
</BODY>
</HTML>
|
apache-2.0
|
masayukig/tempest
|
tempest/tests/cmd/test_verify_tempest_config.py
|
29474
|
# Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import fixtures
import mock
from oslo_serialization import jsonutils as json
from tempest import clients
from tempest.cmd import init
from tempest.cmd import verify_tempest_config
from tempest.common import credentials_factory
from tempest import config
from tempest.lib.common import rest_client
from tempest.lib.common.utils import data_utils
from tempest.lib import exceptions as lib_exc
from tempest.tests import base
from tempest.tests import fake_config
class TestGetAPIVersions(base.TestCase):
def test_remove_version_project(self):
f = verify_tempest_config._remove_version_project
self.assertEqual('/', f('/v2.1/%s/' % data_utils.rand_uuid_hex()))
self.assertEqual('', f('/v2.1/tenant_id'))
self.assertEqual('', f('/v3'))
self.assertEqual('/', f('/v3/'))
self.assertEqual('/something/', f('/something/v2.1/tenant_id/'))
self.assertEqual('/something', f('/something/v2.1/tenant_id'))
self.assertEqual('/something', f('/something/v3'))
self.assertEqual('/something/', f('/something/v3/'))
self.assertEqual('/', f('/')) # http://localhost/
self.assertEqual('', f('')) # http://localhost
def test_url_grab_versioned_nova_nossl(self):
base_url = 'http://127.0.0.1:8774/v2/'
endpoint = verify_tempest_config._get_unversioned_endpoint(base_url)
self.assertEqual('http://127.0.0.1:8774/', endpoint)
def test_url_grab_versioned_nova_ssl(self):
base_url = 'https://127.0.0.1:8774/v3/'
endpoint = verify_tempest_config._get_unversioned_endpoint(base_url)
self.assertEqual('https://127.0.0.1:8774/', endpoint)
def test_get_unversioned_endpoint_base(self):
base_url = 'https://127.0.0.1:5000/'
endpoint = verify_tempest_config._get_unversioned_endpoint(base_url)
self.assertEqual('https://127.0.0.1:5000/', endpoint)
def test_get_unversioned_endpoint_subpath(self):
base_url = 'https://127.0.0.1/identity/v3'
endpoint = verify_tempest_config._get_unversioned_endpoint(base_url)
self.assertEqual('https://127.0.0.1/identity', endpoint)
def test_get_unversioned_endpoint_subpath_trailing_solidus(self):
base_url = 'https://127.0.0.1/identity/v3/'
endpoint = verify_tempest_config._get_unversioned_endpoint(base_url)
self.assertEqual('https://127.0.0.1/identity/', endpoint)
class TestDiscovery(base.TestCase):
def setUp(self):
super(TestDiscovery, self).setUp()
self.useFixture(fake_config.ConfigFixture())
self.patchobject(config, 'TempestConfigPrivate',
fake_config.FakePrivate)
def test_get_keystone_api_versions(self):
self.useFixture(fixtures.MockPatchObject(
verify_tempest_config, '_get_unversioned_endpoint',
return_value='http://fake_endpoint:5000'))
fake_resp = {'versions': {'values': [{'id': 'v2.0'}, {'id': 'v3.0'}]}}
fake_resp = json.dumps(fake_resp)
self.useFixture(fixtures.MockPatch(
'tempest.lib.common.http.ClosingHttp.request',
return_value=(None, fake_resp)))
fake_os = mock.MagicMock()
versions = verify_tempest_config._get_api_versions(fake_os, 'keystone')
self.assertIn('v2.0', versions)
self.assertIn('v3.0', versions)
def test_get_cinder_api_versions(self):
self.useFixture(fixtures.MockPatchObject(
verify_tempest_config, '_get_unversioned_endpoint',
return_value='http://fake_endpoint:5000'))
fake_resp = {'versions': [{'id': 'v1.0'}, {'id': 'v2.0'}]}
fake_resp = json.dumps(fake_resp)
self.useFixture(fixtures.MockPatch(
'tempest.lib.common.http.ClosingHttp.request',
return_value=(None, fake_resp)))
fake_os = mock.MagicMock()
versions = verify_tempest_config._get_api_versions(fake_os, 'cinder')
self.assertIn('v1.0', versions)
self.assertIn('v2.0', versions)
def test_get_nova_versions(self):
self.useFixture(fixtures.MockPatchObject(
verify_tempest_config, '_get_unversioned_endpoint',
return_value='http://fake_endpoint:5000'))
fake_resp = {'versions': [{'id': 'v2.0'}, {'id': 'v3.0'}]}
fake_resp = json.dumps(fake_resp)
self.useFixture(fixtures.MockPatch(
'tempest.lib.common.http.ClosingHttp.request',
return_value=(None, fake_resp)))
fake_os = mock.MagicMock()
versions = verify_tempest_config._get_api_versions(fake_os, 'nova')
self.assertIn('v2.0', versions)
self.assertIn('v3.0', versions)
def test_get_versions_invalid_response(self):
# When the response doesn't contain a JSON response, an error is
# logged.
mock_log_error = self.useFixture(fixtures.MockPatchObject(
verify_tempest_config.LOG, 'error')).mock
self.useFixture(fixtures.MockPatchObject(
verify_tempest_config, '_get_unversioned_endpoint'))
# Simulated response is not JSON.
sample_body = (
'<html><head>Sample Response</head><body>This is the sample page '
'for the web server. Why are you requesting it?</body></html>')
self.useFixture(fixtures.MockPatch(
'tempest.lib.common.http.ClosingHttp.request',
return_value=(None, sample_body)))
# service value doesn't matter, just needs to match what
# _get_api_versions puts in its client_dict.
self.assertRaises(ValueError, verify_tempest_config._get_api_versions,
os=mock.MagicMock(), service='keystone')
self.assertTrue(mock_log_error.called)
def test_verify_api_versions(self):
api_services = ['cinder', 'glance', 'keystone']
fake_os = mock.MagicMock()
for svc in api_services:
m = 'verify_%s_api_versions' % svc
with mock.patch.object(verify_tempest_config, m) as verify_mock:
verify_tempest_config.verify_api_versions(fake_os, svc, True)
verify_mock.assert_called_once_with(fake_os, True)
def test_verify_api_versions_not_implemented(self):
api_services = ['cinder', 'glance', 'keystone']
fake_os = mock.MagicMock()
for svc in api_services:
m = 'verify_%s_api_versions' % svc
with mock.patch.object(verify_tempest_config, m) as verify_mock:
verify_tempest_config.verify_api_versions(fake_os, 'foo', True)
self.assertFalse(verify_mock.called)
@mock.patch('tempest.lib.common.http.ClosingHttp.request')
def test_verify_keystone_api_versions_no_v3(self, mock_request):
self.useFixture(fixtures.MockPatchObject(
verify_tempest_config, '_get_unversioned_endpoint',
return_value='http://fake_endpoint:5000'))
fake_resp = {'versions': {'values': [{'id': 'v2.0'}]}}
fake_resp = json.dumps(fake_resp)
mock_request.return_value = (None, fake_resp)
fake_os = mock.MagicMock()
with mock.patch.object(verify_tempest_config,
'print_and_or_update') as print_mock:
verify_tempest_config.verify_keystone_api_versions(fake_os, True)
print_mock.assert_called_once_with('api_v3',
'identity-feature-enabled',
False, True)
@mock.patch('tempest.lib.common.http.ClosingHttp.request')
def test_verify_cinder_api_versions_no_v3(self, mock_request):
self.useFixture(fixtures.MockPatchObject(
verify_tempest_config, '_get_unversioned_endpoint',
return_value='http://fake_endpoint:5000'))
fake_resp = {'versions': [{'id': 'v2.0'}]}
fake_resp = json.dumps(fake_resp)
mock_request.return_value = (None, fake_resp)
fake_os = mock.MagicMock()
with mock.patch.object(verify_tempest_config,
'print_and_or_update') as print_mock:
verify_tempest_config.verify_cinder_api_versions(fake_os, True)
print_mock.assert_any_call('api_v3', 'volume-feature-enabled',
False, True)
self.assertEqual(1, print_mock.call_count)
@mock.patch('tempest.lib.common.http.ClosingHttp.request')
def test_verify_cinder_api_versions_no_v2(self, mock_request):
self.useFixture(fixtures.MockPatchObject(
verify_tempest_config, '_get_unversioned_endpoint',
return_value='http://fake_endpoint:5000'))
fake_resp = {'versions': [{'id': 'v3.0'}]}
fake_resp = json.dumps(fake_resp)
mock_request.return_value = (None, fake_resp)
fake_os = mock.MagicMock()
with mock.patch.object(verify_tempest_config,
'print_and_or_update') as print_mock:
verify_tempest_config.verify_cinder_api_versions(fake_os, True)
print_mock.assert_any_call('api_v2', 'volume-feature-enabled',
False, True)
self.assertEqual(1, print_mock.call_count)
@mock.patch('tempest.lib.common.http.ClosingHttp.request')
def test_verify_cinder_api_versions_no_v1(self, mock_request):
self.useFixture(fixtures.MockPatchObject(
verify_tempest_config, '_get_unversioned_endpoint',
return_value='http://fake_endpoint:5000'))
fake_resp = {'versions': [{'id': 'v2.0'}, {'id': 'v3.0'}]}
fake_resp = json.dumps(fake_resp)
mock_request.return_value = (None, fake_resp)
fake_os = mock.MagicMock()
with mock.patch.object(verify_tempest_config,
'print_and_or_update') as print_mock:
verify_tempest_config.verify_cinder_api_versions(fake_os, True)
print_mock.assert_not_called()
def test_verify_glance_version_no_v2_with_v1_1(self):
# This test verifies that wrong config api_v2 = True is detected
class FakeClient(object):
def get_versions(self):
return (None, ['v1.1'])
fake_os = mock.MagicMock()
fake_module = mock.MagicMock()
fake_module.ImagesClient = FakeClient
fake_os.image_v1 = fake_module
with mock.patch.object(verify_tempest_config,
'print_and_or_update') as print_mock:
verify_tempest_config.verify_glance_api_versions(fake_os, True)
print_mock.assert_called_with('api_v2', 'image-feature-enabled',
False, True)
def test_verify_glance_version_no_v2_with_v1_0(self):
# This test verifies that wrong config api_v2 = True is detected
class FakeClient(object):
def get_versions(self):
return (None, ['v1.0'])
fake_os = mock.MagicMock()
fake_module = mock.MagicMock()
fake_module.ImagesClient = FakeClient
fake_os.image_v1 = fake_module
with mock.patch.object(verify_tempest_config,
'print_and_or_update') as print_mock:
verify_tempest_config.verify_glance_api_versions(fake_os, True)
print_mock.assert_called_with('api_v2', 'image-feature-enabled',
False, True)
def test_verify_glance_version_no_v1(self):
# This test verifies that wrong config api_v1 = True is detected
class FakeClient(object):
def get_versions(self):
raise lib_exc.NotFound()
def list_versions(self):
return {'versions': [{'id': 'v2.0'}]}
fake_os = mock.MagicMock()
fake_module = mock.MagicMock()
fake_module.ImagesClient = FakeClient
fake_module.VersionsClient = FakeClient
fake_os.image_v1 = fake_module
fake_os.image_v2 = fake_module
with mock.patch.object(verify_tempest_config,
'print_and_or_update') as print_mock:
verify_tempest_config.verify_glance_api_versions(fake_os, True)
print_mock.assert_not_called()
def test_verify_glance_version_no_version(self):
# This test verifies that wrong config api_v1 = True is detected
class FakeClient(object):
def get_versions(self):
raise lib_exc.NotFound()
def list_versions(self):
raise lib_exc.NotFound()
fake_os = mock.MagicMock()
fake_module = mock.MagicMock()
fake_module.ImagesClient = FakeClient
fake_module.VersionsClient = FakeClient
fake_os.image_v1 = fake_module
fake_os.image_v2 = fake_module
with mock.patch.object(verify_tempest_config,
'print_and_or_update') as print_mock:
verify_tempest_config.verify_glance_api_versions(fake_os, True)
print_mock.assert_called_once_with('glance',
'service-available',
False, True)
def test_verify_extensions_neutron(self):
def fake_list_extensions():
return {'extensions': [{'alias': 'fake1'},
{'alias': 'fake2'},
{'alias': 'not_fake'}]}
fake_os = mock.MagicMock()
fake_client = mock.MagicMock()
fake_client.list_extensions = fake_list_extensions
self.useFixture(fixtures.MockPatchObject(
verify_tempest_config, 'get_extension_client',
return_value=fake_client))
self.useFixture(fixtures.MockPatchObject(
verify_tempest_config, 'get_enabled_extensions',
return_value=(['fake1', 'fake2', 'fake3'])))
results = verify_tempest_config.verify_extensions(fake_os,
'neutron', {})
self.assertIn('neutron', results)
self.assertIn('fake1', results['neutron'])
self.assertTrue(results['neutron']['fake1'])
self.assertIn('fake2', results['neutron'])
self.assertTrue(results['neutron']['fake2'])
self.assertIn('fake3', results['neutron'])
self.assertFalse(results['neutron']['fake3'])
self.assertIn('not_fake', results['neutron'])
self.assertFalse(results['neutron']['not_fake'])
def test_verify_extensions_neutron_all(self):
def fake_list_extensions():
return {'extensions': [{'alias': 'fake1'},
{'alias': 'fake2'},
{'alias': 'not_fake'}]}
fake_os = mock.MagicMock()
fake_client = mock.MagicMock()
fake_client.list_extensions = fake_list_extensions
self.useFixture(fixtures.MockPatchObject(
verify_tempest_config, 'get_extension_client',
return_value=fake_client))
self.useFixture(fixtures.MockPatchObject(
verify_tempest_config, 'get_enabled_extensions',
return_value=(['all'])))
results = verify_tempest_config.verify_extensions(fake_os,
'neutron', {})
self.assertIn('neutron', results)
self.assertIn('extensions', results['neutron'])
self.assertEqual(sorted(['fake1', 'fake2', 'not_fake']),
sorted(results['neutron']['extensions']))
def test_verify_extensions_neutron_none(self):
def fake_list_extensions():
return {'extensions': []}
fake_os = mock.MagicMock()
fake_client = mock.MagicMock()
fake_client.list_extensions = fake_list_extensions
self.useFixture(fixtures.MockPatchObject(
verify_tempest_config, 'get_extension_client',
return_value=fake_client))
self.useFixture(fixtures.MockPatchObject(
verify_tempest_config, 'get_enabled_extensions',
return_value=(['all'])))
results = verify_tempest_config.verify_extensions(fake_os,
'neutron', {})
self.assertIn('neutron', results)
self.assertIn('extensions', results['neutron'])
self.assertEqual([], results['neutron']['extensions'])
def test_verify_extensions_cinder(self):
def fake_list_extensions():
return {'extensions': [{'alias': 'fake1'},
{'alias': 'fake2'},
{'alias': 'not_fake'}]}
fake_os = mock.MagicMock()
fake_client = mock.MagicMock()
fake_client.list_extensions = fake_list_extensions
self.useFixture(fixtures.MockPatchObject(
verify_tempest_config, 'get_extension_client',
return_value=fake_client))
self.useFixture(fixtures.MockPatchObject(
verify_tempest_config, 'get_enabled_extensions',
return_value=(['fake1', 'fake2', 'fake3'])))
results = verify_tempest_config.verify_extensions(fake_os,
'cinder', {})
self.assertIn('cinder', results)
self.assertIn('fake1', results['cinder'])
self.assertTrue(results['cinder']['fake1'])
self.assertIn('fake2', results['cinder'])
self.assertTrue(results['cinder']['fake2'])
self.assertIn('fake3', results['cinder'])
self.assertFalse(results['cinder']['fake3'])
self.assertIn('not_fake', results['cinder'])
self.assertFalse(results['cinder']['not_fake'])
def test_verify_extensions_cinder_all(self):
def fake_list_extensions():
return {'extensions': [{'alias': 'fake1'},
{'alias': 'fake2'},
{'alias': 'not_fake'}]}
fake_os = mock.MagicMock()
fake_client = mock.MagicMock()
fake_client.list_extensions = fake_list_extensions
self.useFixture(fixtures.MockPatchObject(
verify_tempest_config, 'get_extension_client',
return_value=fake_client))
self.useFixture(fixtures.MockPatchObject(
verify_tempest_config, 'get_enabled_extensions',
return_value=(['all'])))
results = verify_tempest_config.verify_extensions(fake_os,
'cinder', {})
self.assertIn('cinder', results)
self.assertIn('extensions', results['cinder'])
self.assertEqual(sorted(['fake1', 'fake2', 'not_fake']),
sorted(results['cinder']['extensions']))
def test_verify_extensions_cinder_none(self):
def fake_list_extensions():
return {'extensions': []}
fake_os = mock.MagicMock()
fake_client = mock.MagicMock()
fake_client.list_extensions = fake_list_extensions
self.useFixture(fixtures.MockPatchObject(
verify_tempest_config, 'get_extension_client',
return_value=fake_client))
self.useFixture(fixtures.MockPatchObject(
verify_tempest_config, 'get_enabled_extensions',
return_value=(['all'])))
results = verify_tempest_config.verify_extensions(fake_os,
'cinder', {})
self.assertIn('cinder', results)
self.assertIn('extensions', results['cinder'])
self.assertEqual([], results['cinder']['extensions'])
def test_verify_extensions_nova(self):
def fake_list_extensions():
return ([{'alias': 'fake1'}, {'alias': 'fake2'},
{'alias': 'not_fake'}])
fake_os = mock.MagicMock()
fake_client = mock.MagicMock()
fake_client.list_extensions = fake_list_extensions
self.useFixture(fixtures.MockPatchObject(
verify_tempest_config, 'get_extension_client',
return_value=fake_client))
self.useFixture(fixtures.MockPatchObject(
verify_tempest_config, 'get_enabled_extensions',
return_value=(['fake1', 'fake2', 'fake3'])))
results = verify_tempest_config.verify_extensions(fake_os,
'nova', {})
self.assertIn('nova', results)
self.assertIn('fake1', results['nova'])
self.assertTrue(results['nova']['fake1'])
self.assertIn('fake2', results['nova'])
self.assertTrue(results['nova']['fake2'])
self.assertIn('fake3', results['nova'])
self.assertFalse(results['nova']['fake3'])
self.assertIn('not_fake', results['nova'])
self.assertFalse(results['nova']['not_fake'])
def test_verify_extensions_nova_all(self):
def fake_list_extensions():
return ({'extensions': [{'alias': 'fake1'},
{'alias': 'fake2'},
{'alias': 'not_fake'}]})
fake_os = mock.MagicMock()
fake_client = mock.MagicMock()
fake_client.list_extensions = fake_list_extensions
self.useFixture(fixtures.MockPatchObject(
verify_tempest_config, 'get_extension_client',
return_value=fake_client))
self.useFixture(fixtures.MockPatchObject(
verify_tempest_config, 'get_enabled_extensions',
return_value=(['all'])))
results = verify_tempest_config.verify_extensions(fake_os,
'nova', {})
self.assertIn('nova', results)
self.assertIn('extensions', results['nova'])
self.assertEqual(sorted(['fake1', 'fake2', 'not_fake']),
sorted(results['nova']['extensions']))
def test_verify_extensions_nova_none(self):
def fake_list_extensions():
return ({'extensions': []})
fake_os = mock.MagicMock()
fake_client = mock.MagicMock()
fake_client.list_extensions = fake_list_extensions
self.useFixture(fixtures.MockPatchObject(
verify_tempest_config, 'get_extension_client',
return_value=fake_client))
self.useFixture(fixtures.MockPatchObject(
verify_tempest_config, 'get_enabled_extensions',
return_value=(['all'])))
results = verify_tempest_config.verify_extensions(fake_os,
'nova', {})
self.assertIn('nova', results)
self.assertIn('extensions', results['nova'])
self.assertEqual([], results['nova']['extensions'])
def test_verify_extensions_swift(self):
def fake_list_extensions():
return {'fake1': 'metadata',
'fake2': 'metadata',
'not_fake': 'metadata',
'swift': 'metadata'}
fake_os = mock.MagicMock()
fake_client = mock.MagicMock()
fake_client.list_capabilities = fake_list_extensions
self.useFixture(fixtures.MockPatchObject(
verify_tempest_config, 'get_extension_client',
return_value=fake_client))
self.useFixture(fixtures.MockPatchObject(
verify_tempest_config, 'get_enabled_extensions',
return_value=(['fake1', 'fake2', 'fake3'])))
results = verify_tempest_config.verify_extensions(fake_os, 'swift', {})
self.assertIn('swift', results)
self.assertIn('fake1', results['swift'])
self.assertTrue(results['swift']['fake1'])
self.assertIn('fake2', results['swift'])
self.assertTrue(results['swift']['fake2'])
self.assertIn('fake3', results['swift'])
self.assertFalse(results['swift']['fake3'])
self.assertIn('not_fake', results['swift'])
self.assertFalse(results['swift']['not_fake'])
def test_verify_extensions_swift_all(self):
def fake_list_extensions():
return {'fake1': 'metadata',
'fake2': 'metadata',
'not_fake': 'metadata',
'swift': 'metadata'}
fake_os = mock.MagicMock()
fake_client = mock.MagicMock()
fake_client.list_capabilities = fake_list_extensions
self.useFixture(fixtures.MockPatchObject(
verify_tempest_config, 'get_extension_client',
return_value=fake_client))
self.useFixture(fixtures.MockPatchObject(
verify_tempest_config, 'get_enabled_extensions',
return_value=(['all'])))
results = verify_tempest_config.verify_extensions(fake_os,
'swift', {})
self.assertIn('swift', results)
self.assertIn('extensions', results['swift'])
self.assertEqual(sorted(['not_fake', 'fake1', 'fake2']),
sorted(results['swift']['extensions']))
def test_verify_extensions_swift_none(self):
def fake_list_extensions():
return {'swift': 'metadata'}
fake_os = mock.MagicMock()
fake_client = mock.MagicMock()
fake_client.list_capabilities = fake_list_extensions
self.useFixture(fixtures.MockPatchObject(
verify_tempest_config, 'get_extension_client',
return_value=fake_client))
self.useFixture(fixtures.MockPatchObject(
verify_tempest_config, 'get_enabled_extensions',
return_value=(['all'])))
results = verify_tempest_config.verify_extensions(fake_os,
'swift', {})
self.assertIn('swift', results)
self.assertIn('extensions', results['swift'])
self.assertEqual([], results['swift']['extensions'])
def test_get_extension_client(self):
creds = credentials_factory.get_credentials(
fill_in=False, username='fake_user', project_name='fake_project',
password='fake_password')
os = clients.Manager(creds)
for service in ['nova', 'neutron', 'swift', 'cinder']:
extensions_client = verify_tempest_config.get_extension_client(
os, service)
self.assertIsInstance(extensions_client, rest_client.RestClient)
def test_get_extension_client_sysexit(self):
creds = credentials_factory.get_credentials(
fill_in=False, username='fake_user', project_name='fake_project',
password='fake_password')
os = clients.Manager(creds)
self.assertRaises(SystemExit,
verify_tempest_config.get_extension_client,
os, 'fakeservice')
def test_get_config_file(self):
conf_dir = os.path.join(os.getcwd(), 'etc/')
conf_file = "tempest.conf.sample"
local_sample_conf_file = os.path.join(conf_dir, conf_file)
def fake_environ_get(key, default=None):
if key == 'TEMPEST_CONFIG_DIR':
return conf_dir
elif key == 'TEMPEST_CONFIG':
return 'tempest.conf.sample'
return default
with mock.patch('os.environ.get', side_effect=fake_environ_get,
autospec=True):
init_cmd = init.TempestInit(None, None)
init_cmd.generate_sample_config(os.path.join(conf_dir, os.pardir))
self.assertTrue(os.path.isfile(local_sample_conf_file),
local_sample_conf_file)
file_pointer = verify_tempest_config._get_config_file()
self.assertEqual(local_sample_conf_file, file_pointer.name)
with open(local_sample_conf_file, 'r+') as f:
local_sample_conf_contents = f.read()
self.assertEqual(local_sample_conf_contents, file_pointer.read())
if file_pointer:
file_pointer.close()
def test_print_and_or_update_true(self):
with mock.patch.object(
verify_tempest_config, 'change_option') as test_mock:
verify_tempest_config.print_and_or_update(
'fakeservice', 'fake-service-available', False, True)
test_mock.assert_called_once_with(
'fakeservice', 'fake-service-available', False)
def test_print_and_or_update_false(self):
with mock.patch.object(
verify_tempest_config, 'change_option') as test_mock:
verify_tempest_config.print_and_or_update(
'fakeservice', 'fake-service-available', False, False)
test_mock.assert_not_called()
def test_contains_version_positive_data(self):
self.assertTrue(
verify_tempest_config.contains_version('v1.', ['v1.0', 'v2.0']))
def test_contains_version_negative_data(self):
self.assertFalse(
verify_tempest_config.contains_version('v5.', ['v1.0', 'v2.0']))
|
apache-2.0
|
glahiru/airavata
|
modules/commons/workflow-execution-context/src/main/java/org/apache/airavata/common/workflow/execution/context/WorkflowContextHeaderBuilder.java
|
20575
|
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.common.workflow.execution.context;
import org.apache.airavata.common.utils.XMLUtil;
import org.apache.airavata.schemas.wec.ApplicationOutputDataHandlingDocument;
import org.apache.airavata.schemas.wec.ApplicationSchedulingContextDocument;
import org.apache.airavata.schemas.wec.ContextHeaderDocument;
import org.apache.airavata.schemas.wec.NameValuePairType;
import org.apache.airavata.schemas.wec.SecurityContextDocument;
import org.apache.airavata.schemas.wec.SoaServiceEprsDocument;
import org.apache.airavata.schemas.wec.WorkflowMonitoringContextDocument;
import org.apache.airavata.schemas.wec.WorkflowOutputDataHandlingDocument;
import org.apache.airavata.schemas.wec.WorkflowSchedulingContextDocument;
import org.apache.xmlbeans.XmlException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xmlpull.v1.builder.XmlElement;
public class WorkflowContextHeaderBuilder {
protected static final Logger log = LoggerFactory.getLogger(WorkflowContextHeaderBuilder.class);
private WorkflowMonitoringContextDocument.WorkflowMonitoringContext workflowMonitoringContext = null;
private SecurityContextDocument.SecurityContext securityContext = null;
private SoaServiceEprsDocument.SoaServiceEprs soaServiceEprs = null;
private String userIdentifier = null;
private String submissionUser = null;
private WorkflowOutputDataHandlingDocument.WorkflowOutputDataHandling workflowOutputDataHandling = null;
private ContextHeaderDocument.ContextHeader contextHeader = null;
private WorkflowSchedulingContextDocument.WorkflowSchedulingContext workflowSchedulingContext = null;
public static ThreadLocal<ContextHeaderDocument.ContextHeader> currentContextHeader = new ThreadLocal<ContextHeaderDocument.ContextHeader>();
public WorkflowContextHeaderBuilder(ContextHeaderDocument.ContextHeader document){
ContextHeaderDocument parse = null;
try {
ContextHeaderDocument doc = ContextHeaderDocument.Factory.newInstance();
doc.setContextHeader(document);
parse = ContextHeaderDocument.Factory.parse(doc.xmlText());
} catch (XmlException e) {
e.printStackTrace();
}
this.contextHeader = parse.getContextHeader();
if (contextHeader!=null) {
this.securityContext = contextHeader.getSecurityContext();
this.workflowSchedulingContext=contextHeader.getWorkflowSchedulingContext();
this.soaServiceEprs=contextHeader.getSoaServiceEprs();
this.workflowMonitoringContext=contextHeader.getWorkflowMonitoringContext();
this.workflowOutputDataHandling=contextHeader.getWorkflowOutputDataHandling();
this.userIdentifier=contextHeader.getUserIdentifier();
}
}
public WorkflowContextHeaderBuilder(String brokerUrl, String gfacUrl, String registryUrl, String experimentId,
String workflowId, String msgBoxUrl) {
this.contextHeader = ContextHeaderDocument.ContextHeader.Factory.newInstance();
this.soaServiceEprs = SoaServiceEprsDocument.SoaServiceEprs.Factory.newInstance();
this.soaServiceEprs.setGfacUrl(gfacUrl);
this.soaServiceEprs.setRegistryUrl(registryUrl);
addWorkflowMonitoringContext(brokerUrl, experimentId, workflowId, msgBoxUrl);
this.contextHeader.setSoaServiceEprs(this.soaServiceEprs);
this.contextHeader.setSecurityContext(SecurityContextDocument.SecurityContext.Factory.newInstance());
this.contextHeader
.setWorkflowSchedulingContext(WorkflowSchedulingContextDocument.WorkflowSchedulingContext.Factory
.newInstance());
}
public static void setCurrentContextHeader(ContextHeaderDocument.ContextHeader contextHeader){
currentContextHeader.set(contextHeader);
}
public static ContextHeaderDocument.ContextHeader getCurrentContextHeader(){
if(currentContextHeader.get() == null){
log.warn("Null WorkflowContext Header, if you are directly using GFacAPI you will be fine !");
// This is a fix done to fix test failures
ContextHeaderDocument.ContextHeader contextHeader1 = ContextHeaderDocument.ContextHeader.Factory.newInstance();
WorkflowMonitoringContextDocument.WorkflowMonitoringContext workflowMonitoringContext1 = contextHeader1.addNewWorkflowMonitoringContext();
workflowMonitoringContext1.setExperimentId("");
return contextHeader1;
}else{
return currentContextHeader.get();
}
}
public void addWorkflowMonitoringContext(String brokerUrl, String experimentId, String workflowId, String msgBoxUrl) {
this.workflowMonitoringContext = WorkflowMonitoringContextDocument.WorkflowMonitoringContext.Factory
.newInstance();
this.workflowMonitoringContext.setEventPublishEpr(brokerUrl);
this.workflowMonitoringContext.setWorkflowInstanceId(workflowId);
this.workflowMonitoringContext.setExperimentId(experimentId);
this.workflowMonitoringContext.setMsgBoxEpr(msgBoxUrl);
this.contextHeader.setWorkflowMonitoringContext(this.workflowMonitoringContext);
}
public WorkflowContextHeaderBuilder setWorkflowMonitoringContext(
WorkflowMonitoringContextDocument.WorkflowMonitoringContext workflowMonitoringContext) {
this.workflowMonitoringContext = workflowMonitoringContext;
return this;
}
public WorkflowContextHeaderBuilder setSecurityContext(SecurityContextDocument.SecurityContext securityContext) {
this.securityContext = securityContext;
return this;
}
public WorkflowContextHeaderBuilder setWorkflowOutputDataHandling(
WorkflowOutputDataHandlingDocument.WorkflowOutputDataHandling workflowOutputDataHandling) {
this.workflowOutputDataHandling = workflowOutputDataHandling;
return this;
}
public WorkflowContextHeaderBuilder setUserIdentifier(String userIdentifier) {
this.userIdentifier = userIdentifier;
return this;
}
public WorkflowContextHeaderBuilder setContextHeader(ContextHeaderDocument.ContextHeader contextHeader) {
this.contextHeader = contextHeader;
return this;
}
public WorkflowContextHeaderBuilder setWorkflowSchedulingContext(
WorkflowSchedulingContextDocument.WorkflowSchedulingContext workflowSchedulingContext) {
this.workflowSchedulingContext = workflowSchedulingContext;
return this;
}
public ContextHeaderDocument.ContextHeader getContextHeader() {
return contextHeader;
}
public WorkflowSchedulingContextDocument.WorkflowSchedulingContext getWorkflowSchedulingContext() {
return workflowSchedulingContext;
}
public SecurityContextDocument.SecurityContext getSecurityContext() {
return securityContext;
}
public WorkflowOutputDataHandlingDocument.WorkflowOutputDataHandling getWorkflowOutputDataHandling() {
return workflowOutputDataHandling;
}
public SoaServiceEprsDocument.SoaServiceEprs getSoaServiceEprs() {
return soaServiceEprs;
}
public String getUserIdentifier() {
return userIdentifier;
}
public WorkflowMonitoringContextDocument.WorkflowMonitoringContext getWorkflowMonitoringContext() {
return workflowMonitoringContext;
}
public XmlElement getXml() {
ContextHeaderDocument document = getDocument();
return XMLUtil.stringToXmlElement3(document.xmlText());
}
private ContextHeaderDocument getDocument() {
ContextHeaderDocument document = ContextHeaderDocument.Factory.newInstance();
if (this.workflowMonitoringContext != null) {
this.contextHeader.setWorkflowMonitoringContext(this.workflowMonitoringContext);
}
if (this.soaServiceEprs != null) {
this.contextHeader.setSoaServiceEprs(this.soaServiceEprs);
}
if (this.securityContext != null) {
this.contextHeader.setSecurityContext(this.securityContext);
}
if (this.workflowSchedulingContext != null) {
this.contextHeader.setWorkflowSchedulingContext(this.workflowSchedulingContext);
}
if (this.userIdentifier != null) {
this.contextHeader.setUserIdentifier(this.userIdentifier);
}
if (this.workflowOutputDataHandling != null) {
this.contextHeader.setWorkflowOutputDataHandling(this.workflowOutputDataHandling);
}
document.setContextHeader(this.contextHeader);
return document;
}
public WorkflowContextHeaderBuilder setResourceSchedularUrl(String resourceSchedular) {
this.soaServiceEprs.setResourceSchedulerUrl(resourceSchedular);
return this;
}
public WorkflowContextHeaderBuilder setWorkflowTemplateId(String template) {
this.workflowMonitoringContext.setWorkflowTemplateId(template);
return this;
}
public WorkflowContextHeaderBuilder setWorkflowNodeId(String node) {
this.workflowMonitoringContext.setWorkflowNodeId(node);
return this;
}
public WorkflowContextHeaderBuilder setWorkflowTimeStep(int timestep) {
this.workflowMonitoringContext.setWorkflowTimeStep(timestep);
return this;
}
public WorkflowContextHeaderBuilder setServiceInstanceId(String node) {
this.workflowMonitoringContext.setServiceInstanceId(node);
return this;
}
public WorkflowContextHeaderBuilder setServiceReplicaId(String node) {
this.workflowMonitoringContext.setServiceReplicaId(node);
return this;
}
public WorkflowContextHeaderBuilder setEventPublishEpr(String node) {
this.workflowMonitoringContext.setEventPublishEpr(node);
return this;
}
public WorkflowContextHeaderBuilder setErrorPublishEpr(String node) {
this.workflowMonitoringContext.setErrorPublishEpr(node);
return this;
}
public WorkflowContextHeaderBuilder setNotificationTopic(String node) {
this.workflowMonitoringContext.setNotificationTopic(node);
return this;
}
public WorkflowContextHeaderBuilder setGridProxy(byte[] gridProxy) {
if (this.securityContext == null) {
this.securityContext = SecurityContextDocument.SecurityContext.Factory.newInstance();
}
this.securityContext.setGridProxy(gridProxy);
return this;
}
public WorkflowContextHeaderBuilder setGridMyProxyRepository(String myProxyServer, String userName,
String password, int lifeTimeInHours) {
if (this.securityContext == null) {
this.securityContext = SecurityContextDocument.SecurityContext.Factory.newInstance();
}
SecurityContextDocument.SecurityContext.GridMyproxyRepository gridMyproxyRepository = this.securityContext
.addNewGridMyproxyRepository();
gridMyproxyRepository.setMyproxyServer(myProxyServer);
gridMyproxyRepository.setUsername(userName);
gridMyproxyRepository.setPassword(password);
gridMyproxyRepository.setLifeTimeInhours(lifeTimeInHours);
return this;
}
public WorkflowContextHeaderBuilder setSSHAuthentication(String accessKeyId, String secretKeyId) {
if (this.securityContext == null) {
this.securityContext = SecurityContextDocument.SecurityContext.Factory.newInstance();
}
SecurityContextDocument.SecurityContext.SshAuthentication sshAuthentication = this.securityContext
.addNewSshAuthentication();
sshAuthentication.setAccessKeyId(accessKeyId);
sshAuthentication.setSecretAccessKey(secretKeyId);
return this;
}
public WorkflowContextHeaderBuilder setCredentialManagementService(String tokenId, String portalUser,
String gatewayId) {
if (this.securityContext == null) {
this.securityContext = SecurityContextDocument.SecurityContext.Factory.newInstance();
}
SecurityContextDocument.SecurityContext.CredentialManagementService credentialManagementService = this.securityContext
.addNewCredentialManagementService();
credentialManagementService.setTokenId(tokenId);
credentialManagementService.setPortalUser(portalUser);
credentialManagementService.setGatewayId(gatewayId);
return this;
}
public WorkflowContextHeaderBuilder setAmazonWebServices(String accessKeyId, String secretAccesKey) {
if (this.securityContext == null) {
this.securityContext = SecurityContextDocument.SecurityContext.Factory.newInstance();
}
SecurityContextDocument.SecurityContext.AmazonWebservices amazonWebservices = this.securityContext
.addNewAmazonWebservices();
amazonWebservices.setAccessKeyId(accessKeyId);
amazonWebservices.setSecretAccessKey(secretAccesKey);
return this;
}
public WorkflowContextHeaderBuilder addApplicationOutputDataHandling(String nodeId, String outputDir, String outputDataRegistry,
Boolean dataPersistence) {
if (this.workflowOutputDataHandling == null) {
this.workflowOutputDataHandling = WorkflowOutputDataHandlingDocument.WorkflowOutputDataHandling.Factory
.newInstance();
}
if (nodeId!=null) {
ApplicationOutputDataHandlingDocument.ApplicationOutputDataHandling applicationOutputDataHandling = this.workflowOutputDataHandling
.addNewApplicationOutputDataHandling();
applicationOutputDataHandling.setNodeId(nodeId);
if (outputDir!=null) {
applicationOutputDataHandling.setOutputDataDirectory(outputDir);
}
if (outputDataRegistry!=null) {
applicationOutputDataHandling
.setDataRegistryUrl(outputDataRegistry);
}
if (dataPersistence!=null) {
applicationOutputDataHandling
.setDataPersistance(dataPersistence);
}
}
return this;
}
/**
* @deprecated - Use <code>addApplicationOutputDataHandling(String,String,String,boolean)</code> instead
* @param outputDir
* @param outputDataRegistry
* @param dataPersistence
* @return
*/
public WorkflowContextHeaderBuilder addApplicationOutputDataHandling(String outputDir, String outputDataRegistry,
Boolean dataPersistence) {
return addApplicationOutputDataHandling(null, outputDir, outputDataRegistry, dataPersistence);
}
public WorkflowContextHeaderBuilder addApplicationSchedulingKeyPair(String workflowNodeId, String name, String value, String description){
if (this.workflowSchedulingContext == null) {
this.workflowSchedulingContext = WorkflowSchedulingContextDocument.WorkflowSchedulingContext.Factory
.newInstance();
}
NameValuePairType nameValuePair = workflowSchedulingContext.addNewNameValuePair();
if(workflowNodeId != null){
nameValuePair.setWorkflowNodeId(workflowNodeId);
}
if(name != null && value != null){
nameValuePair.setName(name);
nameValuePair.setValue(value);
}
if(description != null){
nameValuePair.setDescription(description);
}
return this;
}
/**
* Add Application scheduling information to workflow context per node
* @param workflowNodeId
* @param serviceId
* @param hostName
* @param wsGramPreffered
* @param gateKeepersEpr
* @param jobManager
* @param cpuCount
* @param nodeCount
* @param qName
* @param maxWalTime
* @return
*/
public WorkflowContextHeaderBuilder addApplicationSchedulingContext(String workflowNodeId, String serviceId,
String hostName, Boolean wsGramPreffered, String gateKeepersEpr, String jobManager, Integer cpuCount,
Integer nodeCount, String qName, Integer maxWalTime) {
if (this.workflowSchedulingContext == null) {
this.workflowSchedulingContext = WorkflowSchedulingContextDocument.WorkflowSchedulingContext.Factory
.newInstance();
}
if (workflowNodeId!=null) {
ApplicationSchedulingContextDocument.ApplicationSchedulingContext applicationSchedulingContext = this.workflowSchedulingContext
.addNewApplicationSchedulingContext();
applicationSchedulingContext.setWorkflowNodeId(workflowNodeId);
if (cpuCount!=null) {
applicationSchedulingContext.setCpuCount(cpuCount);
}
if (gateKeepersEpr!=null) {
applicationSchedulingContext.setGatekeeperEpr(gateKeepersEpr);
}
if (hostName!=null) {
applicationSchedulingContext.setHostName(hostName);
}
if (jobManager!=null) {
applicationSchedulingContext.setJobManager(jobManager);
}
if (maxWalTime!=null) {
applicationSchedulingContext.setMaxWallTime(maxWalTime);
}
if (serviceId!=null) {
applicationSchedulingContext.setServiceId(serviceId);
}
if (nodeCount!=null) {
applicationSchedulingContext.setNodeCount(nodeCount);
}
if (qName!=null) {
applicationSchedulingContext.setQueueName(qName);
}
if (wsGramPreffered!=null) {
applicationSchedulingContext
.setWsgramPreferred(wsGramPreffered);
}
}
return this;
}
public static ContextHeaderDocument.ContextHeader removeOtherSchedulingConfig(String nodeID, ContextHeaderDocument.ContextHeader header) {
WorkflowContextHeaderBuilder.setCurrentContextHeader(header);
header=new WorkflowContextHeaderBuilder(header).getContextHeader();
try {
ApplicationSchedulingContextDocument.ApplicationSchedulingContext[] applicationSchedulingContextArray =
header.getWorkflowSchedulingContext().getApplicationSchedulingContextArray();
int index = 0;
if (applicationSchedulingContextArray != null) {
for (ApplicationSchedulingContextDocument.ApplicationSchedulingContext context : applicationSchedulingContextArray) {
if (context.getWorkflowNodeId().equals(nodeID)) {
index++;
header.getWorkflowSchedulingContext().setApplicationSchedulingContextArray(new ApplicationSchedulingContextDocument.ApplicationSchedulingContext[]{context});
break;
} else {
header.getWorkflowSchedulingContext().removeApplicationSchedulingContext(index);
}
}
}
ApplicationOutputDataHandlingDocument.ApplicationOutputDataHandling[] pdh =
header.getWorkflowOutputDataHandling().getApplicationOutputDataHandlingArray();
index = 0;
if(applicationSchedulingContextArray != null){
for(ApplicationOutputDataHandlingDocument.ApplicationOutputDataHandling aODH:pdh){
if(nodeID.equals(aODH.getNodeId())){
index++;
header.getWorkflowOutputDataHandling().setApplicationOutputDataHandlingArray(new ApplicationOutputDataHandlingDocument.ApplicationOutputDataHandling[]{aODH});
break;
}else {
header.getWorkflowOutputDataHandling().removeApplicationOutputDataHandling(index);
}
}
}
} catch (NullPointerException e) {
return header;
}
return header;
}
public String getSubmissionUser() {
return submissionUser;
}
public void setSubmissionUser(String submissionUser) {
this.submissionUser = submissionUser;
}
}
|
apache-2.0
|
0x6e6562/astyanax
|
doc/com/netflix/astyanax/connectionpool/class-use/LatencyScoreStrategy.Listener.html
|
12599
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_29) on Thu May 24 11:27:50 PDT 2012 -->
<META http-equiv="Content-Type" content="text/html; charset=UTF-8">
<TITLE>
Uses of Interface com.netflix.astyanax.connectionpool.LatencyScoreStrategy.Listener (astyanax 1.0.3 API)
</TITLE>
<META NAME="date" CONTENT="2012-05-24">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Interface com.netflix.astyanax.connectionpool.LatencyScoreStrategy.Listener (astyanax 1.0.3 API)";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../com/netflix/astyanax/connectionpool/LatencyScoreStrategy.Listener.html" title="interface in com.netflix.astyanax.connectionpool"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../index.html?com/netflix/astyanax/connectionpool//class-useLatencyScoreStrategy.Listener.html" target="_top"><B>FRAMES</B></A>
<A HREF="LatencyScoreStrategy.Listener.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<CENTER>
<H2>
<B>Uses of Interface<br>com.netflix.astyanax.connectionpool.LatencyScoreStrategy.Listener</B></H2>
</CENTER>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Packages that use <A HREF="../../../../../com/netflix/astyanax/connectionpool/LatencyScoreStrategy.Listener.html" title="interface in com.netflix.astyanax.connectionpool">LatencyScoreStrategy.Listener</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#com.netflix.astyanax.connectionpool"><B>com.netflix.astyanax.connectionpool</B></A></TD>
<TD> </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#com.netflix.astyanax.connectionpool.impl"><B>com.netflix.astyanax.connectionpool.impl</B></A></TD>
<TD> </TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#com.netflix.astyanax.shallows"><B>com.netflix.astyanax.shallows</B></A></TD>
<TD> </TD>
</TR>
</TABLE>
<P>
<A NAME="com.netflix.astyanax.connectionpool"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Uses of <A HREF="../../../../../com/netflix/astyanax/connectionpool/LatencyScoreStrategy.Listener.html" title="interface in com.netflix.astyanax.connectionpool">LatencyScoreStrategy.Listener</A> in <A HREF="../../../../../com/netflix/astyanax/connectionpool/package-summary.html">com.netflix.astyanax.connectionpool</A></FONT></TH>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">Methods in <A HREF="../../../../../com/netflix/astyanax/connectionpool/package-summary.html">com.netflix.astyanax.connectionpool</A> with parameters of type <A HREF="../../../../../com/netflix/astyanax/connectionpool/LatencyScoreStrategy.Listener.html" title="interface in com.netflix.astyanax.connectionpool">LatencyScoreStrategy.Listener</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> void</CODE></FONT></TD>
<TD><CODE><B>LatencyScoreStrategy.</B><B><A HREF="../../../../../com/netflix/astyanax/connectionpool/LatencyScoreStrategy.html#start(com.netflix.astyanax.connectionpool.LatencyScoreStrategy.Listener)">start</A></B>(<A HREF="../../../../../com/netflix/astyanax/connectionpool/LatencyScoreStrategy.Listener.html" title="interface in com.netflix.astyanax.connectionpool">LatencyScoreStrategy.Listener</A> listener)</CODE>
<BR>
Start updating stats for instances created using createInstance.</TD>
</TR>
</TABLE>
<P>
<A NAME="com.netflix.astyanax.connectionpool.impl"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Uses of <A HREF="../../../../../com/netflix/astyanax/connectionpool/LatencyScoreStrategy.Listener.html" title="interface in com.netflix.astyanax.connectionpool">LatencyScoreStrategy.Listener</A> in <A HREF="../../../../../com/netflix/astyanax/connectionpool/impl/package-summary.html">com.netflix.astyanax.connectionpool.impl</A></FONT></TH>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">Methods in <A HREF="../../../../../com/netflix/astyanax/connectionpool/impl/package-summary.html">com.netflix.astyanax.connectionpool.impl</A> with parameters of type <A HREF="../../../../../com/netflix/astyanax/connectionpool/LatencyScoreStrategy.Listener.html" title="interface in com.netflix.astyanax.connectionpool">LatencyScoreStrategy.Listener</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> void</CODE></FONT></TD>
<TD><CODE><B>SmaLatencyScoreStrategyImpl.</B><B><A HREF="../../../../../com/netflix/astyanax/connectionpool/impl/SmaLatencyScoreStrategyImpl.html#start(com.netflix.astyanax.connectionpool.LatencyScoreStrategy.Listener)">start</A></B>(<A HREF="../../../../../com/netflix/astyanax/connectionpool/LatencyScoreStrategy.Listener.html" title="interface in com.netflix.astyanax.connectionpool">LatencyScoreStrategy.Listener</A> listener)</CODE>
<BR>
</TD>
</TR>
</TABLE>
<P>
<A NAME="com.netflix.astyanax.shallows"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Uses of <A HREF="../../../../../com/netflix/astyanax/connectionpool/LatencyScoreStrategy.Listener.html" title="interface in com.netflix.astyanax.connectionpool">LatencyScoreStrategy.Listener</A> in <A HREF="../../../../../com/netflix/astyanax/shallows/package-summary.html">com.netflix.astyanax.shallows</A></FONT></TH>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">Methods in <A HREF="../../../../../com/netflix/astyanax/shallows/package-summary.html">com.netflix.astyanax.shallows</A> with parameters of type <A HREF="../../../../../com/netflix/astyanax/connectionpool/LatencyScoreStrategy.Listener.html" title="interface in com.netflix.astyanax.connectionpool">LatencyScoreStrategy.Listener</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> void</CODE></FONT></TD>
<TD><CODE><B>EmptyLatencyScoreStrategyImpl.</B><B><A HREF="../../../../../com/netflix/astyanax/shallows/EmptyLatencyScoreStrategyImpl.html#start(com.netflix.astyanax.connectionpool.LatencyScoreStrategy.Listener)">start</A></B>(<A HREF="../../../../../com/netflix/astyanax/connectionpool/LatencyScoreStrategy.Listener.html" title="interface in com.netflix.astyanax.connectionpool">LatencyScoreStrategy.Listener</A> listener)</CODE>
<BR>
</TD>
</TR>
</TABLE>
<P>
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../com/netflix/astyanax/connectionpool/LatencyScoreStrategy.Listener.html" title="interface in com.netflix.astyanax.connectionpool"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../index.html?com/netflix/astyanax/connectionpool//class-useLatencyScoreStrategy.Listener.html" target="_top"><B>FRAMES</B></A>
<A HREF="LatencyScoreStrategy.Listener.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
Copyright © 2012. All Rights Reserved.
</BODY>
</HTML>
|
apache-2.0
|
Stratio/Explorer
|
web/app/scripts/directives/ngenter.js
|
1295
|
/*
* Licensed to STRATIO (C) under one or more contributor license agreements.
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership. The STRATIO (C) licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
'use strict';
/**
* @ngdoc directive
* @name explorerWebApp.directive:ngEnter
* @description
* # ngEnter
* Bind the <enter> event
*
* @author anthonycorbacho
*/
angular.module('explorerWebApp').directive('ngEnter', function() {
return function(scope, element, attrs) {
element.bind('keydown keypress', function(event) {
if (event.which === 13) {
scope.$apply(function() {
scope.$eval(attrs.ngEnter);
});
event.preventDefault();
}
});
};
});
|
apache-2.0
|
kenzanlabs/deck
|
app/scripts/modules/core/src/pipeline/config/triggers/cron/cronPicker.html
|
12723
|
<div class="cron-gen-main form-inline no-spel" ng-init="validation={messages: {}}">
<div class="row">
<div class="col-md-12">
<select class="form-control input-sm"
ng-model="$ctrl.activeTab"
ng-change="$ctrl.regenerateCron()"
ng-options="tab for tab in ['minutes', 'hourly', 'daily', 'weekly', 'monthly', 'advanced']"></select>
</div>
</div>
<div class="cron-gen-container">
<div ng-if="$ctrl.activeTab === 'minutes'">
<div class="row" ng-init="$ctrl.regenerateCron()">
<div class="col-md-12">
<input class="form-control input-sm"
type="number"
min="1"
max="59"
ng-change="$ctrl.regenerateCron()"
ng-model="$ctrl.state.minutes.minutes"
ng-required="$ctrl.activeTab === 'minutes'">
minute<span ng-if="$ctrl.state.minutes.minutes > 1">s</span>
</div>
</div>
</div>
<div ng-if="$ctrl.activeTab === 'hourly'">
<div class="row">
<div class="col-md-12">
Every
<input class="form-control input-sm"
type="number"
min="1"
max="23"
ng-change="$ctrl.regenerateCron()"
ng-model="$ctrl.state.hourly.hours"
ng-required="$ctrl.activeTab === 'hourly'">
hour<span ng-if="$ctrl.state.hourly.hours > 1">s</span>
on minute
<input class="form-control input-sm"
type="number"
min="0"
max="59"
ng-change="$ctrl.regenerateCron()"
ng-model="$ctrl.state.hourly.minutes"
ng-required="$ctrl.activeTab === 'hourly'">
</div>
</div>
</div>
<div ng-if="$ctrl.activeTab === 'daily'">
<div class="row">
<div class="col-md-12">
<input type="radio"
value="everyDays"
name="daily-radio-{{$ctrl.name}}"
ng-change="$ctrl.regenerateCron()"
ng-model="$ctrl.state.daily.subTab"
checked="checked">
Every
<input class="form-control input-sm"
type="number"
min="1"
max="31"
ng-disabled="$ctrl.state.daily.subTab !== 'everyDays'"
ng-change="$ctrl.regenerateCron()"
ng-model="$ctrl.state.daily.everyDays.days"
ng-required="$ctrl.activeTab === 'daily' && $ctrl.state.daily.subTab === 'everyDays'">
day<span ng-if="$ctrl.state.daily.everyDays.days > 1">s</span>
</div>
</div>
<div class="row">
<div class="col-md-12">
<input type="radio"
value="everyWeekDay"
ng-change="$ctrl.regenerateCron()"
ng-model="$ctrl.state.daily.subTab"
name="daily-radio-{{$ctrl.name}}">
Every week day
</div>
</div>
<div class="row">
<div class="col-md-12">
Start time
<cron-gen-time-select class="visible-xs-inline-block visible-sm-inline-block visible-md-inline-block visible-lg-inline-block"
ng-if="$ctrl.state.daily.subTab === 'everyDays'"
is-required="state.activeTab === 'daily'"
select-class="$ctrl.parsedOptions.formSelectClass"
is-disabled="$ctrl.activeTab !== 'daily'"
on-change="$ctrl.regenerateCron()"
model="$ctrl.state.daily.everyDays"
use-24-hour-time="$ctrl.parsedOptions.use24HourTime"
hide-seconds="$ctrl.parsedOptions.hideSeconds">
</cron-gen-time-select>
<cron-gen-time-select class="visible-xs-inline-block visible-sm-inline-block visible-md-inline-block visible-lg-inline-block"
ng-if="$ctrl.state.daily.subTab === 'everyWeekDay'"
is-required="state.activeTab === 'daily'"
select-class="$ctrl.parsedOptions.formSelectClass"
is-disabled="$ctrl.activeTab !== 'daily'"
on-change="$ctrl.regenerateCron()"
model="$ctrl.state.daily.everyWeekDay"
use-24-hour-time="$ctrl.parsedOptions.use24HourTime"
hide-seconds="$ctrl.parsedOptions.hideSeconds">
</cron-gen-time-select>
<system-timezone></system-timezone>
</div>
</div>
</div>
<div ng-if="$ctrl.activeTab === 'weekly'">
<div class="row">
<div class="col-md-12">
<div class="btn-group">
<label ng-repeat="day in [{k: 'SUN', l: 'Sun'}, {k: 'MON', l: 'Mon'}, {k: 'TUE', l: 'Tue'}, {k: 'WED', l: 'Wed'}, {k: 'THU', l: 'Thu'}, {k: 'FRI', l: 'Fri'}, {k: 'SAT', l: 'Sat'}]"
class="btn btn-default"
uib-btn-checkbox
ng-class="{active: $ctrl.state.weekly[day.k]}"
ng-click="$ctrl.regenerateCron()"
ng-model="$ctrl.state.weekly[day.k]">{{day.l}}</label>
</div>
</div>
</div>
<div class="row">
<div class="col-md-12">
Start time
<cron-gen-time-select class="visible-xs-inline-block visible-sm-inline-block visible-md-inline-block visible-lg-inline-block"
is-required="state.activeTab === 'weekly'"
select-class="$ctrl.parsedOptions.formSelectClass"
is-disabled="$ctrl.activeTab !== 'weekly'"
on-change="$ctrl.regenerateCron();"
model="$ctrl.state.weekly"
use-24-hour-time="$ctrl.parsedOptions.use24HourTime"
hide-seconds="$ctrl.parsedOptions.hideSeconds">
</cron-gen-time-select>
<system-timezone></system-timezone>
</div>
</div>
</div>
<div ng-if="$ctrl.activeTab === 'monthly'">
<div class="row">
<div class="col-md-12">
<input type="radio"
value="specificDay"
ng-change="$ctrl.regenerateCron()"
ng-model="$ctrl.state.monthly.subTab"
name="monthly-radio-{{$ctrl.name}}"
checked="checked">
On the
<select class="month-days"
ng-disabled="$ctrl.state.monthly.subTab !== 'specificDay'"
ng-change="$ctrl.regenerateCron()"
ng-model="$ctrl.state.monthly.specificDay.day"
ng-required="$ctrl.activeTab === 'monthly' && $ctrl.state.monthly.subTab === 'specificDay'"
ng-options="monthDaysWithLast as $ctrl.monthDayDisplay(monthDaysWithLast) for monthDaysWithLast in $ctrl.selectOptions.monthDaysWithLasts"
ng-class="$ctrl.parsedOptions.formSelectClass">
</select>
of every
<input class="form-control input-sm"
type="number"
min="1"
max="11"
ng-change="$ctrl.regenerateCron()"
ng-model="$ctrl.state.monthly.specificDay.months"
ng-required="$ctrl.activeTab === 'monthly' && $ctrl.state.monthly.subTab === 'specificDay'"
ng-disabled="$ctrl.state.monthly.subTab !== 'specificDay'">
month<span ng-if="$ctrl.state.monthly.specificDay.months > 1">s</span>
</div>
</div>
<div class="row">
<div class="col-md-12">
<input type="radio"
value="specificWeekDay"
ng-change="$ctrl.regenerateCron()"
ng-model="$ctrl.state.monthly.subTab"
name="monthly-radio-{{$ctrl.name}}">
<select class="form-control input-sm"
ng-change="$ctrl.regenerateCron()"
ng-model="$ctrl.state.monthly.specificWeekDay.monthWeek"
ng-required="$ctrl.activeTab === 'monthly' && $ctrl.state.monthly.subTab === 'specificWeekDay'"
ng-options="monthWeek as $ctrl.monthWeekDisplay(monthWeek) for monthWeek in $ctrl.selectOptions.monthWeeks"
ng-disabled="$ctrl.state.monthly.subTab !== 'specificWeekDay'">
</select>
<select class="form-control input-sm"
ng-change="$ctrl.regenerateCron()"
ng-model="$ctrl.state.monthly.specificWeekDay.day"
ng-required="$ctrl.activeTab === 'monthly' && $ctrl.state.monthly.subTab === 'specificWeekDay'"
ng-options="day as $ctrl.dayDisplay(day) for day in $ctrl.selectOptions.days"
ng-disabled="$ctrl.state.monthly.subTab !== 'specificWeekDay'">
</select>
of every
<input class="form-control input-sm"
type="number"
min="1"
max="11"
ng-change="$ctrl.regenerateCron()"
ng-model="$ctrl.state.monthly.specificWeekDay.months"
ng-required="$ctrl.activeTab === 'monthly' && $ctrl.state.monthly.subTab === 'specificWeekDay'"
ng-disabled="$ctrl.state.monthly.subTab !== 'specificWeekDay'">
month<span ng-if="$ctrl.state.monthly.specificWeekDay.months > 1">s</span>
</div>
</div>
<div class="row">
<div class="col-md-12">
Start time
<cron-gen-time-select class="visible-xs-inline-block visible-sm-inline-block visible-md-inline-block visible-lg-inline-block"
ng-if="$ctrl.state.monthly.subTab === 'specificDay'"
is-required="state.activeTab === 'monthly'"
select-class="$ctrl.parsedOptions.formSelectClass"
is-disabled="$ctrl.activeTab !== 'monthly'"
on-change="$ctrl.regenerateCron();"
model="$ctrl.state.monthly.specificDay"
use-24-hour-time="$ctrl.parsedOptions.use24HourTime"
hide-seconds="$ctrl.parsedOptions.hideSeconds">
</cron-gen-time-select>
<cron-gen-time-select class="visible-xs-inline-block visible-sm-inline-block visible-md-inline-block visible-lg-inline-block"
ng-if="$ctrl.state.monthly.subTab === 'specificWeekDay'"
is-required="state.activeTab === 'monthly'"
select-class="$ctrl.parsedOptions.formSelectClass"
is-disabled="$ctrl.activeTab !== 'monthly'"
on-change="$ctrl.regenerateCron();"
model="$ctrl.state.monthly.specificWeekDay"
use-24-hour-time="$ctrl.parsedOptions.use24HourTime"
hide-seconds="$ctrl.parsedOptions.hideSeconds">
</cron-gen-time-select>
<system-timezone></system-timezone>
</div>
</div>
</div>
<div ng-if="$ctrl.activeTab === 'advanced'">
<div class="row">
<div class="col-md-12">
<strong>Expression</strong>
<help-field key="pipeline.config.cron.expression"></help-field>
<input type="text"
class="form-control input-sm"
cron-validator
cron-validation-messages="validation.messages"
ng-change="$ctrl.regenerateCron()"
ng-model="$ctrl.state.advanced.expression">
</div>
</div>
<div class="row">
<div class="col-md-12">
<p>More details about how to create these expressions can be found
<a href="http://www.quartz-scheduler.org/documentation/quartz-2.x/tutorials/crontrigger.html"
target="_blank">here</a>.</p>
</div>
</div>
<div class="row" ng-if="validation.messages.description && !validation.messages.error">
<div class="col-md-12">
<p><strong>Will run {{validation.messages.description}}</strong></p>
</div>
</div>
<div class="row slide-in" ng-if="validation.messages.error">
<div class="col-md-12 error-message">
{{validation.messages.error}}
</div>
</div>
</div>
</div>
</div>
|
apache-2.0
|
rcuvgd/Ivona---Text-to-speach
|
src/main/java/com/ivona/services/tts/model/transform/createspeech/CreateSpeechPostRequestMarshaller.java
|
9116
|
/*
* Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.ivona.services.tts.model.transform.createspeech;
import com.amazonaws.AmazonClientException;
import com.amazonaws.DefaultRequest;
import com.amazonaws.Request;
import com.amazonaws.http.HttpMethodName;
import com.amazonaws.transform.Marshaller;
import com.amazonaws.util.StringInputStream;
import com.amazonaws.util.json.JSONException;
import com.amazonaws.util.json.JSONWriter;
import com.ivona.services.tts.IvonaSpeechCloudClient;
import com.ivona.services.tts.model.CreateSpeechRequest;
import com.ivona.services.tts.model.Input;
import com.ivona.services.tts.model.OutputFormat;
import com.ivona.services.tts.model.Parameters;
import com.ivona.services.tts.model.SpeechMarks;
import com.ivona.services.tts.model.Voice;
import java.io.StringWriter;
import java.io.UnsupportedEncodingException;
import java.util.List;
import static java.nio.charset.StandardCharsets.UTF_8;
/**
* CreateSpeechRequest Marshaller - transforms CreateSpeechRequest class into Json POST request
*/
public class CreateSpeechPostRequestMarshaller implements Marshaller<Request<CreateSpeechRequest>, CreateSpeechRequest>,
CreateSpeechRequestMarshaller {
protected final static String RESOURCE_PATH = "CreateSpeech";
protected final static String JSON_KEY_INPUT = "Input";
protected final static String JSON_KEY_INPUT_DATA = "Data";
protected final static String JSON_KEY_INPUT_TYPE = "Type";
protected final static String JSON_KEY_OUTPUT_FORMAT = "OutputFormat";
protected final static String JSON_KEY_OUTPUT_FORMAT_CODEC = "Codec";
protected final static String JSON_KEY_OUTPUT_FORMAT_SAMPLE_RATE = "SampleRate";
protected final static String JSON_KEY_OUTPUT_FORMAT_SPEECHMARKS = "SpeechMarks";
protected final static String JSON_KEY_OUTPUT_FORMAT_SPEECHMARKS_SENTENCE = "Sentence";
protected final static String JSON_KEY_OUTPUT_FORMAT_SPEECHMARKS_SSML = "Ssml";
protected final static String JSON_KEY_OUTPUT_FORMAT_SPEECHMARKS_VISEME = "Viseme";
protected final static String JSON_KEY_OUTPUT_FORMAT_SPEECHMARKS_WORD = "Word";
protected final static String JSON_KEY_PARAMETERS = "Parameters";
protected final static String JSON_KEY_LEXICONS = "LexiconNames";
protected final static String JSON_KEY_PARAMETERS_RATE = "Rate";
protected final static String JSON_KEY_PARAMETERS_VOLUME = "Volume";
protected final static String JSON_KEY_PARAMETERS_PARAGRAPH_BREAK = "ParagraphBreak";
protected final static String JSON_KEY_PARAMETERS_SENTENCE_BREAK = "SentenceBreak";
protected final static String JSON_KEY_VOICE = "Voice";
protected final static String JSON_KEY_VOICE_NAME = "Name";
protected final static String JSON_KEY_VOICE_LANGUAGE = "Language";
protected final static String JSON_KEY_VOICE_GENDER = "Gender";
public Request<CreateSpeechRequest> marshall(CreateSpeechRequest createSpeechRequest) {
if (createSpeechRequest == null) {
throw new AmazonClientException("null createSpeechRequest passed to marshall(...)");
}
Request<CreateSpeechRequest> request = new DefaultRequest<CreateSpeechRequest>(createSpeechRequest,
IvonaSpeechCloudClient.SERVICE_NAME);
request.setHttpMethod(HttpMethodName.POST);
setRequestPayload(request, createSpeechRequest);
request.setResourcePath(RESOURCE_PATH);
return request;
}
private void setRequestPayload(Request<CreateSpeechRequest> request, CreateSpeechRequest createSpeechRequest) {
try {
StringWriter stringWriter = new StringWriter();
JSONWriter jsonWriter = new JSONWriter(stringWriter);
jsonWriter.object();
if (createSpeechRequest.getInput() != null) {
Input input = createSpeechRequest.getInput();
jsonWriter.key(JSON_KEY_INPUT);
jsonWriter.object();
if (input.getData() != null) {
jsonWriter.key(JSON_KEY_INPUT_DATA).value(input.getData());
}
if (input.getType() != null) {
jsonWriter.key(JSON_KEY_INPUT_TYPE).value(input.getType());
}
jsonWriter.endObject();
}
if (createSpeechRequest.getOutputFormat() != null) {
OutputFormat outputFormat = createSpeechRequest.getOutputFormat();
jsonWriter.key(JSON_KEY_OUTPUT_FORMAT);
jsonWriter.object();
if (outputFormat.getCodec() != null) {
jsonWriter.key(JSON_KEY_OUTPUT_FORMAT_CODEC).value(outputFormat.getCodec());
}
if (outputFormat.getSampleRate() != null && outputFormat.getSampleRate() > 0) {
jsonWriter.key(JSON_KEY_OUTPUT_FORMAT_SAMPLE_RATE).value((long) outputFormat.getSampleRate());
}
if (outputFormat.getSpeechMarks() != null) {
jsonWriter.key(JSON_KEY_OUTPUT_FORMAT_SPEECHMARKS);
jsonWriter.object();
SpeechMarks speechMarks = outputFormat.getSpeechMarks();
if (speechMarks != null) {
if (speechMarks.isSentence()) {
jsonWriter.key(JSON_KEY_OUTPUT_FORMAT_SPEECHMARKS_SENTENCE).value(true);
}
if (speechMarks.isSsml()) {
jsonWriter.key(JSON_KEY_OUTPUT_FORMAT_SPEECHMARKS_SSML).value(true);
}
if (speechMarks.isViseme()) {
jsonWriter.key(JSON_KEY_OUTPUT_FORMAT_SPEECHMARKS_VISEME).value(true);
}
if (speechMarks.isWord()) {
jsonWriter.key(JSON_KEY_OUTPUT_FORMAT_SPEECHMARKS_WORD).value(true);
}
}
jsonWriter.endObject();
}
jsonWriter.endObject();
}
if (createSpeechRequest.getParameters() != null) {
Parameters parameters = createSpeechRequest.getParameters();
jsonWriter.key(JSON_KEY_PARAMETERS);
jsonWriter.object();
if (parameters.getRate() != null) {
jsonWriter.key(JSON_KEY_PARAMETERS_RATE).value(parameters.getRate());
}
if (parameters.getVolume() != null) {
jsonWriter.key(JSON_KEY_PARAMETERS_VOLUME).value(parameters.getVolume());
}
if (parameters.getSentenceBreak() != null) {
jsonWriter.key(JSON_KEY_PARAMETERS_SENTENCE_BREAK).value((long) parameters.getSentenceBreak());
}
if (parameters.getParagraphBreak() != null) {
jsonWriter.key(JSON_KEY_PARAMETERS_PARAGRAPH_BREAK).value((long) parameters.getParagraphBreak());
}
jsonWriter.endObject();
}
if (createSpeechRequest.getLexiconNames() != null) {
List<String> names = createSpeechRequest.getLexiconNames();
jsonWriter.key(JSON_KEY_LEXICONS).value(names);
}
if (createSpeechRequest.getVoice() != null) {
Voice voice = createSpeechRequest.getVoice();
jsonWriter.key(JSON_KEY_VOICE);
jsonWriter.object();
if (voice.getGender() != null) {
jsonWriter.key(JSON_KEY_VOICE_GENDER).value(voice.getGender());
}
if (voice.getLanguage() != null) {
jsonWriter.key(JSON_KEY_VOICE_LANGUAGE).value(voice.getLanguage());
}
if (voice.getName() != null) {
jsonWriter.key(JSON_KEY_VOICE_NAME).value(voice.getName());
}
jsonWriter.endObject();
}
jsonWriter.endObject();
String snippet = stringWriter.toString();
byte[] content = snippet.getBytes(UTF_8);
request.setContent(new StringInputStream(snippet));
request.addHeader("Content-Length", Integer.toString(content.length));
} catch (JSONException e) {
throw new AmazonClientException("Unable to marshall request to JSON", e);
} catch (UnsupportedEncodingException e) {
throw new AmazonClientException("Unable to marshall request to JSON", e);
}
}
}
|
apache-2.0
|
fewwind/InCarMedia
|
Myapp/src/main/java/com/example/fewwind/myfirst/Util/DownUtil.java
|
2986
|
package com.example.fewwind.myfirst.Util;
import android.util.Log;
import java.io.InputStream;
import java.io.RandomAccessFile;
import java.net.HttpURLConnection;
import java.net.URL;
/**
* Created by fewwind on 2015/11/13.
*/
public class DownUtil {
private String filePath;
private String urlLoad;
private int ThreadNum;
DownThread[] threads ;
private int fileSize;
public DownUtil(String filePath,String url,int num){
this.filePath = filePath;
this.urlLoad = url;
this.ThreadNum =num;
threads = new DownThread[num];
}
public void downLoad() throws Exception{
URL url = new URL(urlLoad);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod("GET");
conn.connect();
fileSize = conn.getContentLength();
conn.disconnect();
Log.e("tag","文件大小::"+fileSize);
int currentPart = fileSize/ThreadNum+1;
RandomAccessFile file = new RandomAccessFile(filePath,"rw");
file.setLength(fileSize);
file.close();
for (int i=0;i<ThreadNum;i++){
int startPos = i*currentPart;
RandomAccessFile partfile = new RandomAccessFile(filePath,"rw");
partfile.seek(startPos);
threads[i] = new DownThread(startPos,currentPart,partfile);
threads[i].start();
}
}
public double getPercent(){
int sumSize=0;
for (int i=0;i<ThreadNum;i++){
sumSize+=threads[i].length;
}
Log.e("tag","下载的大小::"+sumSize);
return sumSize*1.0/fileSize;
}
class DownThread extends Thread {
private int startPos;
private int currentPartSize;
private RandomAccessFile randomFile;
public int length=0;
public DownThread(int pos, int currentPartSize, RandomAccessFile file) {
this.startPos = pos;
this.currentPartSize = currentPartSize;
this.randomFile = file;
}
@Override
public void run() {
try {
URL url = new URL(urlLoad);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setConnectTimeout(5000);
conn.setRequestMethod("GET");
conn.connect();
InputStream inputStream = conn.getInputStream();
inputStream.skip(this.startPos);
byte[] buffer = new byte[1024];
int hasRead = 0;
while (length<currentPartSize&&(hasRead = inputStream.read(buffer))>0){
randomFile.write(buffer,0,hasRead);
length+=hasRead;
}
randomFile.close();
inputStream.close();
} catch (
Exception e) {
e.printStackTrace();
}
}
}
}
|
apache-2.0
|
trasa/aws-sdk-java
|
aws-java-sdk-sns/src/main/java/com/amazonaws/services/sns/model/transform/ListEndpointsByPlatformApplicationRequestMarshaller.java
|
2634
|
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.sns.model.transform;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.amazonaws.AmazonClientException;
import com.amazonaws.Request;
import com.amazonaws.DefaultRequest;
import com.amazonaws.http.HttpMethodName;
import com.amazonaws.services.sns.model.*;
import com.amazonaws.transform.Marshaller;
import com.amazonaws.util.StringUtils;
/**
* ListEndpointsByPlatformApplicationRequest Marshaller
*/
public class ListEndpointsByPlatformApplicationRequestMarshaller
implements
Marshaller<Request<ListEndpointsByPlatformApplicationRequest>, ListEndpointsByPlatformApplicationRequest> {
public Request<ListEndpointsByPlatformApplicationRequest> marshall(
ListEndpointsByPlatformApplicationRequest listEndpointsByPlatformApplicationRequest) {
if (listEndpointsByPlatformApplicationRequest == null) {
throw new AmazonClientException(
"Invalid argument passed to marshall(...)");
}
Request<ListEndpointsByPlatformApplicationRequest> request = new DefaultRequest<ListEndpointsByPlatformApplicationRequest>(
listEndpointsByPlatformApplicationRequest, "AmazonSNS");
request.addParameter("Action", "ListEndpointsByPlatformApplication");
request.addParameter("Version", "2010-03-31");
request.setHttpMethod(HttpMethodName.POST);
if (listEndpointsByPlatformApplicationRequest
.getPlatformApplicationArn() != null) {
request.addParameter("PlatformApplicationArn", StringUtils
.fromString(listEndpointsByPlatformApplicationRequest
.getPlatformApplicationArn()));
}
if (listEndpointsByPlatformApplicationRequest.getNextToken() != null) {
request.addParameter("NextToken", StringUtils
.fromString(listEndpointsByPlatformApplicationRequest
.getNextToken()));
}
return request;
}
}
|
apache-2.0
|
consulo/consulo-google-gwt
|
plugin/src/main/java/com/intellij/gwt/inspections/BaseGwtLocalQuickFix.java
|
540
|
package com.intellij.gwt.inspections;
import javax.annotation.Nonnull;
import com.intellij.codeInspection.LocalQuickFix;
import com.intellij.gwt.GwtBundle;
/**
* @author nik
*/
public abstract class BaseGwtLocalQuickFix implements LocalQuickFix
{
private String myName;
protected BaseGwtLocalQuickFix(final String name)
{
myName = name;
}
@Override
@Nonnull
public String getName()
{
return myName;
}
@Override
@Nonnull
public String getFamilyName()
{
return GwtBundle.message("quick.fixes.gwt.family.name");
}
}
|
apache-2.0
|
WarlockD/arm-cortex-v7-unix
|
cube/test_F746G/Src/stm32f7xx_it.c
|
4237
|
/**
******************************************************************************
* @file stm32f7xx_it.c
* @brief Interrupt Service Routines.
******************************************************************************
*
* COPYRIGHT(c) 2017 STMicroelectronics
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. Neither the name of STMicroelectronics nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
******************************************************************************
*/
/* Includes ------------------------------------------------------------------*/
#include "stm32f7xx_hal.h"
#include "stm32f7xx.h"
#include "stm32f7xx_it.h"
#include "cmsis_os.h"
/* USER CODE BEGIN 0 */
/* USER CODE END 0 */
/* External variables --------------------------------------------------------*/
extern ETH_HandleTypeDef heth;
extern LTDC_HandleTypeDef hltdc;
extern TIM_HandleTypeDef htim3;
/******************************************************************************/
/* Cortex-M7 Processor Interruption and Exception Handlers */
/******************************************************************************/
/**
* @brief This function handles System tick timer.
*/
void SysTick_Handler(void)
{
/* USER CODE BEGIN SysTick_IRQn 0 */
/* USER CODE END SysTick_IRQn 0 */
osSystickHandler();
/* USER CODE BEGIN SysTick_IRQn 1 */
/* USER CODE END SysTick_IRQn 1 */
}
/******************************************************************************/
/* STM32F7xx Peripheral Interrupt Handlers */
/* Add here the Interrupt Handlers for the used peripherals. */
/* For the available peripheral interrupt handler names, */
/* please refer to the startup file (startup_stm32f7xx.s). */
/******************************************************************************/
/**
* @brief This function handles TIM3 global interrupt.
*/
void TIM3_IRQHandler(void)
{
/* USER CODE BEGIN TIM3_IRQn 0 */
/* USER CODE END TIM3_IRQn 0 */
HAL_TIM_IRQHandler(&htim3);
/* USER CODE BEGIN TIM3_IRQn 1 */
/* USER CODE END TIM3_IRQn 1 */
}
/**
* @brief This function handles Ethernet global interrupt.
*/
void ETH_IRQHandler(void)
{
/* USER CODE BEGIN ETH_IRQn 0 */
/* USER CODE END ETH_IRQn 0 */
HAL_ETH_IRQHandler(&heth);
/* USER CODE BEGIN ETH_IRQn 1 */
/* USER CODE END ETH_IRQn 1 */
}
/**
* @brief This function handles LTDC global interrupt.
*/
void LTDC_IRQHandler(void)
{
/* USER CODE BEGIN LTDC_IRQn 0 */
/* USER CODE END LTDC_IRQn 0 */
HAL_LTDC_IRQHandler(&hltdc);
/* USER CODE BEGIN LTDC_IRQn 1 */
/* USER CODE END LTDC_IRQn 1 */
}
/* USER CODE BEGIN 1 */
/* USER CODE END 1 */
/************************ (C) COPYRIGHT STMicroelectronics *****END OF FILE****/
|
apache-2.0
|
kwilczynski/packer-templates
|
scripts/common/docker.sh
|
8760
|
#!/bin/bash
set -e
export PATH='/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin'
source /var/tmp/helpers/default.sh
readonly DOCKER_FILES='/var/tmp/docker'
readonly UBUNTU_RELEASE=$(detect_ubuntu_release)
readonly UBUNTU_VERSION=$(detect_ubuntu_version)
readonly AMAZON_EC2=$(detect_amazon_ec2 && echo 'true')
[[ -d $DOCKER_FILES ]] || mkdir -p "$DOCKER_FILES"
# Old package repository has been shut down, see:
# https://www.docker.com/blog/changes-dockerproject-org-apt-yum-repositories/
cat <<EOF > /etc/apt/sources.list.d/docker.list
$(if [[ $UBUNTU_VERSION == '12.04' ]]; then
echo "deb [arch=amd64] https://ftp.yandex.ru/mirrors/docker ubuntu-${UBUNTU_RELEASE} main"
else
if [[ $UBUNTU_VERSION =~ ^(14|16|18).04$ ]]; then
echo "deb [arch=amd64] https://download.docker.com/linux/ubuntu ${UBUNTU_RELEASE} stable"
else
# Starting from 20.04, Docker no long provides packages from their repository.
echo "deb [arch=amd64] https://download.docker.com/linux/ubuntu bionic stable"
fi
fi)
EOF
chown root: /etc/apt/sources.list.d/docker.list
chmod 644 /etc/apt/sources.list.d/docker.list
if [[ $UBUNTU_VERSION == '12.04' ]]; then
if [[ ! -f "${DOCKER_FILES}/12.04/docker.key" ]]; then
# Download key directly from Docker project.
wget -O "${DOCKER_FILES}/docker.key" \
"https://ftp.yandex.ru/mirrors/docker/gpg"
else
cp -f "${DOCKER_FILES}/12.04/docker.key" \
"${DOCKER_FILES}/docker.key"
fi
else
if [[ ! -f "${DOCKER_FILES}/docker.key" ]]; then
# Download key directly from Docker project.
wget -O "${DOCKER_FILES}/docker.key" \
"https://download.docker.com/linux/ubuntu/gpg"
fi
fi
apt-key add "${DOCKER_FILES}/docker.key"
apt_get_update
# Only refresh packages index from Docker's repository.
apt-get --assume-yes update \
-o Dir::Etc::SourceList='/etc/apt/sources.list.d/docker.list' \
-o Dir::Etc::SourceParts='-' -o APT::Get::List-Cleanup='0'
# Dependencies needed by Docker, etc.
PACKAGES=(
'pciutils'
'procps'
'xfsprogs'
'git'
)
if [[ $UBUNTU_VERSION =~ ^(12|14|16|18).04$ ]]; then
PACKAGES+=(
'btrfs-tools'
)
else
# Starting from 20.04, btrfs-progs is no longer a virtual package.
PACKAGES+=(
'btrfs-progs'
)
fi
DOCKER_PACKAGE='docker-ce'
if [[ $UBUNTU_VERSION == '12.04' ]]; then
DOCKER_PACKAGE='docker-engine'
fi
if [[ -n $DOCKER_VERSION ]]; then
# The package name and version is now a little bit awkaward to work
# which is why we rely on wildcard match for a given version of Docker,
# for example:
# - Old packages e.g., docker-engine_17.05.0~ce-0~ubuntu-trusty_amd64.deb;
# - New packages e.g., docker-ce_17.12.0~ce-0~ubuntu_amd64.deb.
PACKAGES+=( $(printf '%s=%s~ce*' "$DOCKER_PACKAGE" "$DOCKER_VERSION") )
else
PACKAGES+=( "$DOCKER_PACKAGE" )
fi
for package in "${PACKAGES[@]}"; do
apt-get --assume-yes install "$package"
done
{
if [[ ! $UBUNTU_VERSION =~ ^(12|14).04$ ]]; then
systemctl stop docker
else
service docker stop
fi
} || true
# Do not start Docker automatically when
# running on Amazon EC2, as it might be
# desirable to relocate the /var/lib/docker
# on a separate mount point, etc.
if [[ -n $AMAZON_EC2 ]]; then
{
if [[ ! $UBUNTU_VERSION =~ ^(12|14).04$ ]]; then
systemctl disable docker
else
update-rc.d -f docker disable
# Disable when using upstart.
echo 'manual' | sudo tee /etc/init/docker.override
fi
} || true
fi
if ! getent group docker &>/dev/null; then
groupadd --system docker
fi
for user in $(echo "root vagrant ubuntu ${USER}" | tr ' ' '\n' | sort -u); do
if getent passwd "$user" &>/dev/null; then
usermod -a -G docker "$user"
fi
done
# Add Bash shell completion for Docker and Docker Compose.
for file in docker docker-compose; do
REPOSITORY='docker-ce'
FILE_PATH='components/cli/contrib/completion/bash'
if [[ $file =~ ^docker-compose$ ]]; then
REPOSITORY='compose'
FILE_PATH='contrib/completion/bash'
fi
if [[ ! -f "${DOCKER_FILES}/${file}" ]]; then
wget -O "${DOCKER_FILES}/${file}" \
"https://raw.githubusercontent.com/docker/${REPOSITORY}/master/${FILE_PATH}/${file}"
fi
cp -f "${DOCKER_FILES}/${file}" \
"/etc/bash_completion.d/${file}"
chown root: "/etc/bash_completion.d/${file}"
chmod 644 "/etc/bash_completion.d/${file}"
done
sed -i -e \
's/.*DOCKER_OPTS="\(.*\)"/DOCKER_OPTS="--config-file=\/etc\/docker\/daemon.json"/g' \
/etc/default/docker
# Shouldn't the package create this?
if [[ ! -d /etc/docker ]]; then
mkdir -p /etc/docker
chown root: /etc/docker
chmod 755 /etc/docker
fi
# For now, the "userns-remap" option is disabled,
# since it breaks almost everything at the moment.
cat <<EOF > /etc/docker/daemon.json
{
"debug": false,
$(if [[ $UBUNTU_VERSION == '12.04' ]]; then
# No support for overlay2 file system in the
# Linux kernel on older versions of Ubuntu.
cat <<'EOS'
"graph": "/var/lib/docker",
"storage-driver": "aufs",
EOS
else
cat <<'EOS'
"data-root": "/var/lib/docker",
"storage-driver": "overlay2",
EOS
fi)
"ipv6": false,
"dns": [
"1.1.1.1",
"8.8.8.8",
"4.2.2.2"
],
"icc": false,
"live-restore": true,
"userland-proxy": false,
"experimental": true
}
EOF
chown root: /etc/docker/daemon.json
chmod 644 /etc/docker/daemon.json
# We can install the docker-compose pip, but it has to be done
# under virtualenv as it has specific version requirements on
# its dependencies, often causing other things to break.
virtualenv /opt/docker-compose
pushd /opt/docker-compose &>/dev/null
# Make sure to switch into the virtualenv.
. /opt/docker-compose/bin/activate
# This is needed, as virtualenv by default will install
# some really old version (e.g. 12.0.x, etc.), sadly.
if [[ $UBUNTU_VERSION =~ '12.04' ]]; then
pip install --upgrade setuptools==43.0.0
else
pip install --upgrade setuptools
fi
# Resolve the "InsecurePlatformWarning" warning.
pip install --upgrade ndg-httpsclient
# The "--install-scripts" option is to make sure that binary
# will be placed in the system-wide directory, rather than
# inside the virtualenv environment only.
if [[ -n $DOCKER_COMPOSE_VERSION ]]; then
pip install \
--install-option='--install-scripts=/usr/local/bin' \
docker-compose=="${DOCKER_COMPOSE_VERSION}"
else
pip install \
--install-option='--install-scripts=/usr/local/bin' \
docker-compose
fi
deactivate
popd &>/dev/null
hash -r
ln -sf /usr/local/bin/docker-compose \
/usr/bin/docker-compose
if [[ -f /usr/local/bin/wsdump.py ]]; then
ln -sf /usr/local/bin/wsdump.py \
/usr/local/bin/wsdump
fi
hash -r
KERNEL_OPTIONS=(
'cgroup_enable=memory'
'swapaccount=1'
)
# Support both grub and grub2 style configuration.
if detect_grub2; then
# Remove any repeated (de-duplicate) Kernel options.
OPTIONS=$(sed -e \
"s/GRUB_CMDLINE_LINUX=\"\(.*\)\"/GRUB_CMDLINE_LINUX=\"\1 ${KERNEL_OPTIONS[*]}\"/" \
/etc/default/grub | \
grep -E '^GRUB_CMDLINE_LINUX=' | \
sed -e 's/GRUB_CMDLINE_LINUX=\"\(.*\)\"/\1/' | \
tr ' ' '\n' | sort -u | tr '\n' ' ' | xargs)
sed -i -e \
"s/GRUB_CMDLINE_LINUX=\"\(.*\)\"/GRUB_CMDLINE_LINUX=\"${OPTIONS}\"/" \
/etc/default/grub
else
# Remove any repeated (de-duplicate) Kernel options.
OPTIONS=$(sed -e \
"s/^#\sdefoptions=\(.*\)/# defoptions=\1 ${KERNEL_OPTIONS[*]}/" \
/boot/grub/menu.lst | \
grep -E '^#\sdefoptions=' | \
sed -e 's/.*defoptions=//' | \
tr ' ' '\n' | sort -u | tr '\n' ' ' | xargs)
sed -i -e \
"s/^#\sdefoptions=.*/# defoptions=${OPTIONS}/" \
/boot/grub/menu.lst
fi
if [[ -f /etc/default/ufw ]]; then
sed -i -e \
's/DEFAULT_FORWARD_POLICY=.*/DEFAULT_FORWARD_POLICY="ACCEPT"/g' \
/etc/default/ufw
fi
grep 'docker' /proc/mounts | awk '{ print length, $2 }' | \
sort -g -r | cut -d' ' -f2- | xargs umount -l -f 2> /dev/null || true
# This would normally be on a separate volume,
# and most likely formatted to use "btrfs".
for directory in /srv/docker /var/lib/docker; do
[[ -d $directory ]] || mkdir -p "$directory"
rm -Rf ${directory:?}/*
chown root: "$directory"
chmod 755 "$directory"
done
# A bind-mount for the Docker root directory.
cat <<'EOS' | sed -e 's/\s\+/\t/g' >> /etc/fstab
/srv/docker /var/lib/docker none bind 0 0
EOS
rm -f ${DOCKER_FILES}/docker{.key,-compose}
|
apache-2.0
|
danielelic/OpenNI2-Linux-Arm-Cubieboard2
|
Documentation/java/org/openni/android/package-summary.html
|
5519
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (version 1.6.0_31) on Thu Jul 17 13:05:41 CEST 2014 -->
<title>org.openni.android</title>
<meta name="date" content="2014-07-17">
<link rel="stylesheet" type="text/css" href="../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="org.openni.android";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../overview-summary.html">Overview</a></li>
<li class="navBarCell1Rev">Package</li>
<li>Class</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../index-all.html">Index</a></li>
<li><a href="../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../org/openni/package-summary.html">PREV PACKAGE</a></li>
<li>NEXT PACKAGE</li>
</ul>
<ul class="navList">
<li><a href="../../../index.html?org/openni/android/package-summary.html" target="_top">FRAMES</a></li>
<li><a href="package-summary.html" target="_top">NO FRAMES</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h1 title="Package" class="title">Package org.openni.android</h1>
</div>
<div class="contentContainer">
<ul class="blockList">
<li class="blockList">
<table class="packageSummary" border="0" cellpadding="3" cellspacing="0" summary="Interface Summary table, listing interfaces, and an explanation">
<caption><span>Interface Summary</span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Interface</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="../../../org/openni/android/OpenNIHelper.DeviceOpenListener.html" title="interface in org.openni.android">OpenNIHelper.DeviceOpenListener</a></td>
<td class="colLast">
<div class="block">Used for receiving the result of <a href="../../../org/openni/android/OpenNIHelper.html#requestDeviceOpen(java.lang.String, org.openni.android.OpenNIHelper.DeviceOpenListener)"><code>OpenNIHelper.requestDeviceOpen(String uri, DeviceOpenListener listener)</code></a>.</div>
</td>
</tr>
</tbody>
</table>
</li>
<li class="blockList">
<table class="packageSummary" border="0" cellpadding="3" cellspacing="0" summary="Class Summary table, listing classes, and an explanation">
<caption><span>Class Summary</span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Class</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="../../../org/openni/android/OpenNIHelper.html" title="class in org.openni.android">OpenNIHelper</a></td>
<td class="colLast">
<div class="block">Provides facilities needed for using OpenNI in Android applications.</div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><a href="../../../org/openni/android/OpenNIView.html" title="class in org.openni.android">OpenNIView</a></td>
<td class="colLast">
<div class="block">A View that displays OpenNI streams.</div>
</td>
</tr>
</tbody>
</table>
</li>
</ul>
</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../overview-summary.html">Overview</a></li>
<li class="navBarCell1Rev">Package</li>
<li>Class</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../index-all.html">Index</a></li>
<li><a href="../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../org/openni/package-summary.html">PREV PACKAGE</a></li>
<li>NEXT PACKAGE</li>
</ul>
<ul class="navList">
<li><a href="../../../index.html?org/openni/android/package-summary.html" target="_top">FRAMES</a></li>
<li><a href="package-summary.html" target="_top">NO FRAMES</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
|
apache-2.0
|
mmkal/handy-redis
|
docs/redis-doc/commands/asking.md
|
525
|
When a cluster client receives an `-ASK` redirect, the `ASKING` command is sent to the target node followed by the command which was redirected.
This is normally done automatically by cluster clients.
If an `-ASK` redirect is received during a transaction, only one ASKING command needs to be sent to the target node before sending the complete transaction to the target node.
See [ASK redirection in the Redis Cluster Specification](/topics/cluster-spec#ask-redirection) for details.
@return
@simple-string-reply: `OK`.
|
apache-2.0
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Magnoliopsida/Caryophyllales/Cactaceae/Melocactus/Melocactus inconcinnus/README.md
|
191
|
# Melocactus inconcinnus Buining & Brederoo SPECIES
#### Status
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null
|
apache-2.0
|
Lyndir/Opal
|
opal-wicket/src/main/java/com/lyndir/lhunath/opal/wayward/navigation/TabState.java
|
835
|
package com.lyndir.lhunath.opal.wayward.navigation;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import org.apache.wicket.markup.html.panel.Panel;
/**
* <h2>{@link TabState}<br> <sub>[in short] (TODO).</sub></h2>
*
* <p> <i>07 10, 2010</i> </p>
*
* @author lhunath
*/
public interface TabState<P extends Panel> {
/**
* @return The representation of this state as it should be used in the URL's fragment for navigation.
*/
@Nullable
String toFragment();
/**
* Apply this state to the given panel.
*
* @param panel The panel to apply the state to.
*
* @throws IncompatibleStateException If the state is incompatible with the current state and can not be applied.
*/
void apply(@Nonnull P panel)
throws IncompatibleStateException;
}
|
apache-2.0
|
dstreev/hdp-mac-utils
|
bin/beewrap.sh
|
847
|
#!/bin/bash
CUR_DIR=`pwd`
APP_DIR=`dirname $0`
HIVE_USER=$USER
. $APP_DIR/bee-env.sh
TARGET_ENV=$1
if [ "$TARGET_ENV" == "" ]; then
. $APP_DIR/beepass-default.sh
else
if [ -f $APP_DIR/beepass-$TARGET_ENV.sh ]; then
. $APP_DIR/beepass-$TARGET_ENV.sh
else
echo "Create a 'beepass-<target>.sh' (use beepass-default.sh as template) file in the beewrap.sh directory and add the following:"
echo "Omit HIVE_USER is you user id matches your HIVE User id"
echo " HIVE_USER=<your hive user>"
echo " HS2_PASSWORD=<your Hive Password>"
echo " URL=jdbc:hive2://lnx21116.csxt.csx.com:10000"
echo ""
echo "chmod the file 700"
fi
fi
beeline -u $URL -n $HIVE_USER -p $HS2_PASSWORD --hivevar USER=$HIVE_USER --hivevar EXEC_ENGINE=$EXEC_ENGINE -i $APP_DIR/beeline_init.sql "$@"
|
apache-2.0
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Magnoliopsida/Gentianales/Gentianaceae/Brachycodon/Brachycodon pumilus/README.md
|
176
|
# Brachycodon pumilus Progel SPECIES
#### Status
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null
|
apache-2.0
|
zhaowenjian/CCCourse
|
front/output/js/coffee-js/net_manager.js
|
2423
|
(function() {
var CusPromise, root;
root = typeof exports !== "undefined" && exports !== null ? exports : this;
CusPromise = function(promise_) {
var obj;
obj = {
to: function(target, attr, callback) {
this.promise.then(function(data) {
if (data.data !== void 0 && data.result !== void 0 && data.msg !== void 0) {
target[attr] = data.data;
return callback && callback.call(this, target[attr], data.result, data.msg);
} else {
target[attr] = data;
return callback && callback.call(this, target[attr]);
}
});
return promise_;
},
then: function(callback) {
return this.promise.then(callback);
},
promise: promise_
};
return obj;
};
ng_app.factory("NetManager", [
'$q', '$http', 'SERVER_HOST', '$timeout', function($q, $http, SERVER_HOST, $timeout) {
var net;
net = {
get: function(url, params_, flag) {
var base, counter, deferred, try_connect;
deferred = $q.defer();
base = SERVER_HOST;
counter = 0;
try_connect = function() {
return $http.get(base + url, {
params: params_
}).success(function(data) {
return deferred.resolve(data);
}).error(function(data) {
if (counter > 4) {
return alert('操作失败!');
} else {
counter++;
return $timeout(try_connect, counter * 500);
}
});
};
try_connect();
return CusPromise(deferred.promise);
},
post: function(url, data_, flag) {
var base, counter, deferred, try_connect;
deferred = $q.defer();
base = SERVER_HOST;
counter = 0;
try_connect = function() {
return $http.post(base + url, data_).success(function(data) {
return deferred.resolve(data);
}).error(function(data) {
if (counter > 4) {
return alert('操作失败!');
} else {
counter++;
return $timeout(try_connect, counter * 500);
}
});
};
try_connect();
return deferred.promise;
}
};
return net;
}
]);
}).call(this);
|
apache-2.0
|
fdecampredon/jsx-typescript-old-version
|
tests/baselines/reference/varArgParamTypeCheck.js
|
785
|
//// [varArgParamTypeCheck.ts]
function sequence(...sequences:{():void;}[]) {
}
function callback(clb:()=>void) {
}
sequence(
function bar() {
},
function foo() {
callback(()=>{
this();
});
},
function baz() {
callback(()=>{
this();
});
}
);
//// [varArgParamTypeCheck.js]
function sequence() {
var sequences = [];
for (var _i = 0; _i < (arguments.length - 0); _i++) {
sequences[_i] = arguments[_i + 0];
}
}
function callback(clb) {
}
sequence(function bar() {
}, function foo() {
var _this = this;
callback(function () {
_this();
});
}, function baz() {
var _this = this;
callback(function () {
_this();
});
});
|
apache-2.0
|
netbear/CloudAnts
|
test/unit/voldemort/store/compress/CompressingStoreTest.java
|
3533
|
package voldemort.store.compress;
import java.util.Arrays;
import java.util.Collection;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import voldemort.ServerTestUtils;
import voldemort.VoldemortTestConstants;
import voldemort.client.ClientConfig;
import voldemort.client.SocketStoreClientFactory;
import voldemort.client.StoreClient;
import voldemort.serialization.Compression;
import voldemort.server.AbstractSocketService;
import voldemort.store.AbstractByteArrayStoreTest;
import voldemort.store.Store;
import voldemort.store.memory.InMemoryStorageEngine;
import voldemort.utils.ByteArray;
@RunWith(Parameterized.class)
public class CompressingStoreTest extends AbstractByteArrayStoreTest {
private CompressingStore store;
private final boolean useNio;
private final Compression compression;
private final CompressionStrategyFactory compressionFactory = new CompressionStrategyFactory();
public CompressingStoreTest(boolean useNio, String compressionType) {
this.useNio = useNio;
this.compression = new Compression(compressionType, null);
}
@Parameters
public static Collection<Object[]> configs() {
return Arrays.asList(new Object[][] { { true, "gzip" }, { false, "gzip" }, { true, "lzf" },
{ false, "lzf" } });
}
@Override
@Before
public void setUp() throws Exception {
this.store = new CompressingStore(new InMemoryStorageEngine<ByteArray, byte[]>("test"),
compressionFactory.get(compression),
compressionFactory.get(compression));
}
@Override
public Store<ByteArray, byte[]> getStore() {
return store;
}
@Test
public void testPutGetWithSocketService() throws Exception {
int freePort = ServerTestUtils.findFreePort();
String clusterXml = VoldemortTestConstants.getOneNodeClusterXml();
clusterXml = clusterXml.replace("<socket-port>6666</socket-port>", "<socket-port>"
+ freePort
+ "</socket-port>");
AbstractSocketService socketService = ServerTestUtils.getSocketService(useNio,
clusterXml,
VoldemortTestConstants.getCompressedStoreDefinitionsXml(),
"test",
freePort);
socketService.start();
Thread.sleep(1000);
SocketStoreClientFactory storeClientFactory = new SocketStoreClientFactory(new ClientConfig().setBootstrapUrls("tcp://localhost:"
+ freePort)
.setMaxBootstrapRetries(10));
StoreClient<String, String> storeClient = storeClientFactory.getStoreClient("test");
storeClient.put("someKey", "someValue");
assertEquals(storeClient.getValue("someKey"), "someValue");
socketService.stop();
}
}
|
apache-2.0
|
niean/gotools
|
rpool/conn/simple_conn_pool/simple_conn_pool.go
|
2962
|
package simple_conn_pool
import (
"fmt"
"io"
"sync"
"time"
)
//TODO: 保存所有的连接, 而不是只保存连接计数
var ErrMaxConn = fmt.Errorf("maximum connections reached")
//
type NConn interface {
io.Closer
Name() string
Closed() bool
}
type ConnPool struct {
sync.RWMutex
Name string
Address string
MaxConns int32
MaxIdle int32
Cnt int64
Ts int64
New func(name string) (NConn, error)
active int32
free []NConn
all map[string]NConn
}
func NewConnPool(name string, address string, maxConns int32, maxIdle int32) *ConnPool {
return &ConnPool{Name: name, Address: address, MaxConns: maxConns, MaxIdle: maxIdle,
Cnt: 0, Ts: time.Now().Unix(), all: make(map[string]NConn)}
}
func (this *ConnPool) Proc() string {
this.RLock()
defer this.RUnlock()
return fmt.Sprintf("Name:%s,Ts:%s,Cnt:%d,active:%d,all:%d,free:%d",
this.Name, time.Unix(this.Ts, 0).Format("2006-01-02T15:04:05Z"), this.Cnt, this.active, len(this.all), len(this.free))
}
func (this *ConnPool) Fetch() (NConn, error) {
this.Lock()
defer this.Unlock()
// get from free
conn := this.fetchFree()
if conn != nil {
return conn, nil
}
if this.overMax() {
return nil, ErrMaxConn
}
// create new conn
conn, err := this.newConn()
if err != nil {
return nil, err
}
this.increActive()
return conn, nil
}
func (this *ConnPool) Release(conn NConn) {
this.Lock()
defer this.Unlock()
if this.overMaxIdle() {
this.deleteConn(conn)
this.decreActive()
} else {
this.addFree(conn)
}
}
func (this *ConnPool) ForceClose(conn NConn) {
this.Lock()
defer this.Unlock()
this.deleteConn(conn)
this.decreActive()
}
func (this *ConnPool) Destroy() {
this.Lock()
defer this.Unlock()
for _, conn := range this.free {
if conn != nil && !conn.Closed() {
conn.Close()
}
}
for _, conn := range this.all {
if conn != nil && !conn.Closed() {
conn.Close()
}
}
this.active = 0
this.free = []NConn{}
this.all = map[string]NConn{}
}
// internal, concurrently unsafe
func (this *ConnPool) newConn() (NConn, error) {
name := fmt.Sprintf("%s_%d_%d", this.Name, this.Cnt, time.Now().Unix())
conn, err := this.New(name)
if err != nil {
if conn != nil {
conn.Close()
}
return nil, err
}
this.Cnt++
this.all[conn.Name()] = conn
return conn, nil
}
func (this *ConnPool) deleteConn(conn NConn) {
if conn != nil {
conn.Close()
}
delete(this.all, conn.Name())
}
func (this *ConnPool) addFree(conn NConn) {
this.free = append(this.free, conn)
}
func (this *ConnPool) fetchFree() NConn {
if len(this.free) == 0 {
return nil
}
conn := this.free[0]
this.free = this.free[1:]
return conn
}
func (this *ConnPool) increActive() {
this.active += 1
}
func (this *ConnPool) decreActive() {
this.active -= 1
}
func (this *ConnPool) overMax() bool {
return this.active >= this.MaxConns
}
func (this *ConnPool) overMaxIdle() bool {
return int32(len(this.free)) >= this.MaxIdle
}
|
apache-2.0
|
lift/framework
|
core/util/src/test/scala/net/liftweb/util/SoftReferenceCacheSpec.scala
|
604
|
package net.liftweb.util
import org.specs2.mutable._
import net.liftweb.common._
class SoftReferenceCacheSpec extends Specification {
sequential
object cache extends SoftReferenceCache[String, String](1)
"SoftReferenceCache " should {
"Accept additions" in {
cache += ("test" -> "test")
cache.keys.size() must_== 1
}
"Allow objects to be retrieved" in {
val cached = cache("test")
cached must beLike { case Full("test") => ok }
}
"Properly age out entries" in {
cache += ("test2" -> "test2")
cache("test") must_== Empty
}
}
}
|
apache-2.0
|
shardingjdbc/sharding-jdbc
|
sharding-core/sharding-core-merge/src/test/java/org/apache/shardingsphere/sharding/merge/dql/orderby/OrderByStreamMergedResultTest.java
|
11739
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.shardingsphere.sharding.merge.dql.orderby;
import com.google.common.collect.ImmutableMap;
import org.apache.shardingsphere.sharding.merge.dql.ShardingDQLResultMerger;
import org.apache.shardingsphere.sql.parser.binder.metadata.column.ColumnMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.table.TableMetaData;
import org.apache.shardingsphere.sql.parser.sql.constant.OrderDirection;
import org.apache.shardingsphere.sql.parser.binder.segment.select.groupby.GroupByContext;
import org.apache.shardingsphere.sql.parser.binder.segment.select.orderby.OrderByContext;
import org.apache.shardingsphere.sql.parser.binder.segment.select.orderby.OrderByItem;
import org.apache.shardingsphere.sql.parser.binder.segment.select.pagination.PaginationContext;
import org.apache.shardingsphere.sql.parser.binder.segment.select.projection.ProjectionsContext;
import org.apache.shardingsphere.sql.parser.binder.statement.dml.SelectStatementContext;
import org.apache.shardingsphere.sql.parser.sql.segment.dml.TableFactorSegment;
import org.apache.shardingsphere.sql.parser.sql.segment.dml.TableReferenceSegment;
import org.apache.shardingsphere.sql.parser.sql.segment.dml.item.ProjectionsSegment;
import org.apache.shardingsphere.sql.parser.sql.segment.dml.order.item.IndexOrderByItemSegment;
import org.apache.shardingsphere.sql.parser.sql.segment.generic.table.SimpleTableSegment;
import org.apache.shardingsphere.sql.parser.sql.statement.dml.SelectStatement;
import org.apache.shardingsphere.sql.parser.sql.value.identifier.IdentifierValue;
import org.apache.shardingsphere.underlying.common.database.type.DatabaseTypes;
import org.apache.shardingsphere.underlying.executor.QueryResult;
import org.apache.shardingsphere.underlying.merge.result.MergedResult;
import org.junit.Before;
import org.junit.Test;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public final class OrderByStreamMergedResultTest {
private SelectStatementContext selectStatementContext;
@Before
public void setUp() {
SelectStatement selectStatement = new SelectStatement();
SimpleTableSegment tableSegment = new SimpleTableSegment(10, 13, new IdentifierValue("tbl"));
TableReferenceSegment tableReferenceSegment = new TableReferenceSegment();
TableFactorSegment tableFactorSegment = new TableFactorSegment();
tableFactorSegment.setTable(tableSegment);
tableReferenceSegment.setTableFactor(tableFactorSegment);
selectStatement.getTableReferences().add(tableReferenceSegment);
ProjectionsSegment projectionsSegment = new ProjectionsSegment(0, 0);
selectStatement.setProjections(projectionsSegment);
OrderByContext orderByContext = new OrderByContext(Arrays.asList(
new OrderByItem(new IndexOrderByItemSegment(0, 0, 1, OrderDirection.ASC, OrderDirection.ASC)),
new OrderByItem(new IndexOrderByItemSegment(0, 0, 2, OrderDirection.ASC, OrderDirection.ASC))), false);
selectStatementContext = new SelectStatementContext(selectStatement, new GroupByContext(Collections.emptyList(), 0), orderByContext,
new ProjectionsContext(0, 0, false, Collections.emptyList()), new PaginationContext(null, null, Collections.emptyList()));
}
@Test
public void assertNextForResultSetsAllEmpty() throws SQLException {
List<QueryResult> queryResults = Arrays.asList(mock(QueryResult.class), mock(QueryResult.class), mock(QueryResult.class));
ShardingDQLResultMerger resultMerger = new ShardingDQLResultMerger(DatabaseTypes.getActualDatabaseType("MySQL"));
MergedResult actual = resultMerger.merge(queryResults, selectStatementContext, createSchemaMetaData());
assertFalse(actual.next());
}
@Test
public void assertNextForSomeResultSetsEmpty() throws SQLException {
List<QueryResult> queryResults = Arrays.asList(mock(QueryResult.class), mock(QueryResult.class), mock(QueryResult.class));
for (int i = 0; i < 3; i++) {
when(queryResults.get(i).getColumnName(1)).thenReturn("col1");
when(queryResults.get(i).getColumnName(2)).thenReturn("col2");
}
ShardingDQLResultMerger resultMerger = new ShardingDQLResultMerger(DatabaseTypes.getActualDatabaseType("MySQL"));
when(queryResults.get(0).next()).thenReturn(true, false);
when(queryResults.get(0).getValue(1, Object.class)).thenReturn("2");
when(queryResults.get(2).next()).thenReturn(true, true, false);
when(queryResults.get(2).getValue(1, Object.class)).thenReturn("1", "1", "3", "3");
MergedResult actual = resultMerger.merge(queryResults, selectStatementContext, createSchemaMetaData());
assertTrue(actual.next());
assertThat(actual.getValue(1, Object.class).toString(), is("1"));
assertTrue(actual.next());
assertThat(actual.getValue(1, Object.class).toString(), is("2"));
assertTrue(actual.next());
assertThat(actual.getValue(1, Object.class).toString(), is("3"));
assertFalse(actual.next());
}
@Test
public void assertNextForMix() throws SQLException {
List<QueryResult> queryResults = Arrays.asList(mock(QueryResult.class), mock(QueryResult.class), mock(QueryResult.class));
for (int i = 0; i < 3; i++) {
when(queryResults.get(i).getColumnName(1)).thenReturn("col1");
when(queryResults.get(i).getColumnName(2)).thenReturn("col2");
}
ShardingDQLResultMerger resultMerger = new ShardingDQLResultMerger(DatabaseTypes.getActualDatabaseType("MySQL"));
when(queryResults.get(0).next()).thenReturn(true, false);
when(queryResults.get(0).getValue(1, Object.class)).thenReturn("2");
when(queryResults.get(1).next()).thenReturn(true, true, true, false);
when(queryResults.get(1).getValue(1, Object.class)).thenReturn("2", "2", "3", "3", "4", "4");
when(queryResults.get(2).next()).thenReturn(true, true, false);
when(queryResults.get(2).getValue(1, Object.class)).thenReturn("1", "1", "3", "3");
MergedResult actual = resultMerger.merge(queryResults, selectStatementContext, createSchemaMetaData());
assertTrue(actual.next());
assertThat(actual.getValue(1, Object.class).toString(), is("1"));
assertTrue(actual.next());
assertThat(actual.getValue(1, Object.class).toString(), is("2"));
assertTrue(actual.next());
assertThat(actual.getValue(1, Object.class).toString(), is("2"));
assertTrue(actual.next());
assertThat(actual.getValue(1, Object.class).toString(), is("3"));
assertTrue(actual.next());
assertThat(actual.getValue(1, Object.class).toString(), is("3"));
assertTrue(actual.next());
assertThat(actual.getValue(1, Object.class).toString(), is("4"));
assertFalse(actual.next());
}
@Test
public void assertNextForCaseSensitive() throws SQLException {
List<QueryResult> queryResults = Arrays.asList(mock(QueryResult.class), mock(QueryResult.class), mock(QueryResult.class));
for (int i = 0; i < 3; i++) {
when(queryResults.get(i).getColumnName(1)).thenReturn("col1");
when(queryResults.get(i).getColumnName(2)).thenReturn("col2");
}
when(queryResults.get(0).next()).thenReturn(true, false);
when(queryResults.get(0).getValue(1, Object.class)).thenReturn("b");
when(queryResults.get(1).next()).thenReturn(true, true, false);
when(queryResults.get(1).getValue(1, Object.class)).thenReturn("B", "B", "a", "a");
when(queryResults.get(2).next()).thenReturn(true, false);
when(queryResults.get(2).getValue(1, Object.class)).thenReturn("A");
ShardingDQLResultMerger resultMerger = new ShardingDQLResultMerger(DatabaseTypes.getActualDatabaseType("MySQL"));
MergedResult actual = resultMerger.merge(queryResults, selectStatementContext, createSchemaMetaData());
assertTrue(actual.next());
assertThat(actual.getValue(1, Object.class).toString(), is("A"));
assertTrue(actual.next());
assertThat(actual.getValue(1, Object.class).toString(), is("B"));
assertTrue(actual.next());
assertThat(actual.getValue(1, Object.class).toString(), is("a"));
assertTrue(actual.next());
assertThat(actual.getValue(1, Object.class).toString(), is("b"));
assertFalse(actual.next());
}
@Test
public void assertNextForCaseInsensitive() throws SQLException {
List<QueryResult> queryResults = Arrays.asList(mock(QueryResult.class), mock(QueryResult.class), mock(QueryResult.class));
for (int i = 0; i < 3; i++) {
when(queryResults.get(i).getColumnName(1)).thenReturn("col1");
when(queryResults.get(i).getColumnName(2)).thenReturn("col2");
}
when(queryResults.get(0).next()).thenReturn(true, false);
when(queryResults.get(0).getValue(2, Object.class)).thenReturn("b");
when(queryResults.get(1).next()).thenReturn(true, true, false);
when(queryResults.get(1).getValue(2, Object.class)).thenReturn("a", "a", "B", "B");
when(queryResults.get(2).next()).thenReturn(true, false);
when(queryResults.get(2).getValue(2, Object.class)).thenReturn("A");
ShardingDQLResultMerger resultMerger = new ShardingDQLResultMerger(DatabaseTypes.getActualDatabaseType("MySQL"));
MergedResult actual = resultMerger.merge(queryResults, selectStatementContext, createSchemaMetaData());
assertTrue(actual.next());
assertThat(actual.getValue(2, Object.class).toString(), is("a"));
assertTrue(actual.next());
assertThat(actual.getValue(2, Object.class).toString(), is("A"));
assertTrue(actual.next());
assertThat(actual.getValue(2, Object.class).toString(), is("B"));
assertTrue(actual.next());
assertThat(actual.getValue(2, Object.class).toString(), is("b"));
assertFalse(actual.next());
}
private SchemaMetaData createSchemaMetaData() {
ColumnMetaData columnMetaData1 = new ColumnMetaData("col1", 0, "dataType", false, false, true);
ColumnMetaData columnMetaData2 = new ColumnMetaData("col2", 0, "dataType", false, false, false);
TableMetaData tableMetaData = new TableMetaData(Arrays.asList(columnMetaData1, columnMetaData2), Collections.emptyList());
return new SchemaMetaData(ImmutableMap.of("tbl", tableMetaData));
}
}
|
apache-2.0
|
Algy/tempy
|
tempy/env.py
|
9404
|
import py_compile
import time
import marshal
import errno
import traceback
from os.path import join as path_join, isfile, isdir, getmtime
from translate import translate_file, translate_string, pystmts_to_string
from struct import unpack
from errors import TempyError, TempyImportError, TempyCompileError, TempyNativeCompileError
TEMPY_EXT = "tpy"
TEMPYC_EXT = "tpyc"
class TempyModule:
def __init__(self, name, env, _dir, _global=None):
self.__name__ = name
self.__env__ = env
self.__dir__ = _dir
self.__global__ = _global or {}
self.__submodule__ = {}
def __repr__(self):
return "<TempyModule %s at %s>"%(repr(self.__name__), self.__dir__)
def __getattr__(self, key):
try:
return self.__global__[key]
except KeyError:
raise AttributeError("%s has no attribute '%s'"%(repr(self), key))
class _Importer:
def __init__(self, env, current_module_name, visited):
self.env = env
self.current_module_name = current_module_name
self.visited = visited
def __call__(self, *names):
return self.env._module(names, self.visited, self.current_module_name)
def _write_code(filename, codeobject):
with open(filename, "wb") as fc:
fc.write('\0\0\0\0')
py_compile.wr_long(fc, long(time.time()))
marshal.dump(codeobject, fc)
fc.flush()
fc.seek(0, 0)
fc.write(py_compile.MAGIC)
def _naive_logger(x): print("[TempyEnvironmentLog]", x)
class CompileOption:
def __init__(self, use_tpyc=True, write_py=False, verbose=False, logger=_naive_logger):
self.use_tpyc = use_tpyc
self.write_py = write_py
self.verbose = verbose
self.logger = logger
def log(self, x):
if self.verbose:
self.logger(x)
class ModuleFetcher:
def __init__(self, systemdir=None, extradirs=None):
self.systemdir = systemdir
self.extradirs = extradirs or []
def _find(self, where, module_name):
file_path = path_join(where, module_name + "." + TEMPY_EXT)
if isfile(file_path):
return file_path
dir_path = path_join(where, module_name)
dir_init_path = path_join(where, module_name, "__init__" + "." + TEMPY_EXT)
if isdir(dir_path) and isfile(dir_init_path):
return dir_init_path
return None
def fetch_dir_by_name(self, pwd, module_name):
'''
Return (tpy filepath, if it is shared), according to given module_name.
If not found, None should be returned.
'''
first = self._find(pwd, module_name)
if first is not None:
return (first, False)
for where in [self.systemdir] + self.extradirs:
res = self._find(where, module_name)
if res is not None:
return (res, True)
return None
def _exchange_ext(s, new_ext):
rdot_idx = s.rfind(".")
if rdot_idx == -1:
return s + "." + new_ext
else:
return s[:rdot_idx] + "." + new_ext
class Environment:
def __init__(self, pwd, cache_module=True, main_name="__main__", module_fetcher=None, compile_option=None):
self.cache_module = cache_module
self.module_fetcher = module_fetcher or ModuleFetcher(pwd)
self.main_module = TempyModule(main_name, self, pwd)
self.shared_dict = {}
self.compile_option = compile_option if compile_option else CompileOption()
def _code_generation(self, tpy_path, tpyc_path, write_to_pyc=True):
if self.compile_option.write_py:
py_path = _exchange_ext(tpyc_path, "py")
try:
with open(py_path, "w") as f:
f.write(pystmts_to_string(translate_file(tpy_path)))
except IOError as err:
self.compile_option.log("IOError occured while writing .py file(%s): %s"%(tpyc_path, str(err)))
code = compile_file(tpy_path)
if write_to_pyc:
try:
_write_code(tpyc_path, code)
except IOError as err:
self.compile_option.log("IOError occured while writing codeobject to .tpyc file(%s): %s"%(tpyc_path, str(err)))
return code
def _retrieve_code(self, tpy_path, tpyc_path):
if self.compile_option.use_tpyc:
if isfile(tpyc_path):
try:
f = open(tpyc_path, "rb")
magic_str = f.read(4)
if len(magic_str) < 4 or py_compile.MAGIC != magic_str:
return self._code_generation(tpy_path, tpyc_path)
timestamp_str = f.read(4)
if len(timestamp_str) < 4:
return self._code_generation(tpy_path, tpyc_path)
tpyc_timestamp = unpack("<I", timestamp_str)[0]
try:
tpy_timestamp = long(getmtime(tpy_path))
except IOError:
tpy_timestamp = 0
if tpyc_timestamp <= tpy_timestamp: # outdated
return self._code_generation(tpy_path, tpyc_path)
code = marshal.load(f)
return code
except IOError as err:
if err.errno == errno.ENOENT: # No such file
self.compile_option.log("Failed to locate .pyc file(%s) even though It was assured that it should be present"%tpyc_path)
return self._code_generation(tpy_path, tpyc_path)
else:
raise
finally:
f.close()
else:
return self._code_generation(tpy_path, tpyc_path)
else:
return self._code_generation(tpy_path, tpyc_path, write_to_pyc=False)
def _import(self, parent_module, module_name, visited=None, invoker_module_name=None):
if module_name in parent_module.__submodule__:
return parent_module.__submodule__[module_name]
elif module_name in self.shared_dict:
return self.shared_dict[module_name]
else:
if visited is None:
visited = set()
pair = self.module_fetcher.fetch_dir_by_name(parent_module.__dir__, module_name)
if pair is None:
raise TempyImportError("No such module named '%s'"%module_name)
tpy_path, is_shared = pair
tpyc_path = _exchange_ext(tpy_path, TEMPYC_EXT)
try:
code = self._retrieve_code(tpy_path, tpyc_path)
except TempyError:
raise
except Exception as error:
err_info = str(error)
err_msg = "Cannot import the module named '%s': %s\n%s"%(module_name, err_info, traceback.format_exc())
raise TempyImportError(err_msg)
else:
lcl = {} # local
gbl = {} # global
exec(code, gbl, lcl)
if is_shared:
current_module_name = module_name
else:
current_module_name = parent_module.__name__ + "." + module_name
if current_module_name in visited:
raise TempyImportError("circular dependency: in module '%s', tried to import '%s'"%(invoker_module_name, module_name))
exec_result = lcl['__tempy_main__'](None,
_Importer(self,
current_module_name,
visited.union([current_module_name])
),
None)
mod = TempyModule(current_module_name, self, path_join(parent_module.__dir__, module_name), exec_result)
if self.cache_module:
if is_shared:
self.shared_dict[module_name] = mod
else:
parent_module.__submodule__[module_name] = mod
return mod
def _module(self, names, visited=None, invoker_module_name=None):
iter_module = self.main_module
invoker_module_name = invoker_module_name or self.main_module.__name__
for module_name in names:
iter_module = self._import(iter_module, module_name, visited, invoker_module_name)
return iter_module
def module(self, dotted_str):
return self._module(dotted_str.split("."))
def _compile_kont(stmts, filename):
src = pystmts_to_string(stmts)
try:
code = compile(src, filename, "exec")
return code
except SyntaxError as error:
raise TempyNativeCompileError(error.args)
def compile_string(path, filename="<string>"):
'''
compile tempy string into compiled python bytecode(.pyc file)
'''
stmts = translate_string(path, filename=filename)
return _compile_kont(stmts, filename)
def compile_file(path, filename=None):
'''
compile tempy file into compiled python bytecode(.pyc file)
'''
if filename is None:
filename = path
stmts = translate_file(path, filename=filename)
return _compile_kont(stmts, filename)
|
apache-2.0
|
kiddinn/plaso
|
tests/parsers/sqlite_plugins/twitter_ios.py
|
3854
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for Twitter on iOS 8+ plugin."""
import unittest
from plaso.lib import definitions
from plaso.parsers.sqlite_plugins import twitter_ios
from tests.parsers.sqlite_plugins import test_lib
class TwitterIOSTest(test_lib.SQLitePluginTestCase):
"""Tests for Twitter on iOS 8+ database plugin."""
def testProcess(self):
"""Test the Process function on a Twitter iOS file."""
plugin = twitter_ios.TwitterIOSPlugin()
storage_writer = self._ParseDatabaseFileWithPlugin(
['twitter_ios.db'], plugin)
# We should have 184 events in total.
# * 25 Contacts creation events.
# * 25 Contacts update events.
# * 67 Status creation events.
# * 67 Status update events.
self.assertEqual(storage_writer.number_of_events, 184)
self.assertEqual(storage_writer.number_of_extraction_warnings, 0)
self.assertEqual(storage_writer.number_of_recovery_warnings, 0)
events = list(storage_writer.GetEvents())
# Test the first contact creation event.
expected_event_values = {
'data_type': 'twitter:ios:contact',
'date_time': '2007-04-22 14:42:37',
'description': (
'Breaking news alerts and updates from the BBC. For news, '
'features, analysis follow @BBCWorld (international) or @BBCNews '
'(UK). Latest sport news @BBCSport.'),
'followers_count': 19466932,
'following': 0,
'following_count': 3,
'location': 'London, UK',
'name': 'BBC Breaking News',
'profile_url': (
'https://pbs.twimg.com/profile_images/460740982498013184/'
'wIPwMwru_normal.png'),
'screen_name': 'BBCBreaking',
'timestamp_desc': definitions.TIME_DESCRIPTION_CREATION,
'url': 'http://www.bbc.co.uk/news'}
self.CheckEventValues(storage_writer, events[0], expected_event_values)
# Test a contact modification event.
expected_event_values = {
'data_type': 'twitter:ios:contact',
'date_time': '2015-12-02 15:35:44',
'description': (
'Breaking news alerts and updates from the BBC. For news, '
'features, analysis follow @BBCWorld (international) or @BBCNews '
'(UK). Latest sport news @BBCSport.'),
'followers_count': 19466932,
'following': 0,
'following_count': 3,
'location': 'London, UK',
'name': 'BBC Breaking News',
'profile_url': (
'https://pbs.twimg.com/profile_images/'
'460740982498013184/wIPwMwru_normal.png'),
'screen_name': 'BBCBreaking',
'timestamp_desc': definitions.TIME_DESCRIPTION_UPDATE,
'url': 'http://www.bbc.co.uk/news'}
self.CheckEventValues(storage_writer, events[1], expected_event_values)
# Test a status creation event.
expected_event_values = {
'data_type': 'twitter:ios:status',
'date_time': '2014-09-11 11:46:16',
'favorite_count': 3,
'favorited': 0,
'name': 'Heather Mahalik',
'retweet_count': 2,
'text': 'Never forget. http://t.co/L7bjWue1A2',
'timestamp_desc': definitions.TIME_DESCRIPTION_CREATION,
'user_id': 475222380}
self.CheckEventValues(storage_writer, events[50], expected_event_values)
# Test a status update event.
expected_event_values = {
'data_type': 'twitter:ios:status',
'date_time': '2015-12-02 15:39:37',
'favorite_count': 3,
'favorited': 0,
'name': 'Heather Mahalik',
'retweet_count': 2,
'text': 'Never forget. http://t.co/L7bjWue1A2',
'timestamp_desc': definitions.TIME_DESCRIPTION_UPDATE,
'user_id': 475222380}
self.CheckEventValues(storage_writer, events[51], expected_event_values)
if __name__ == '__main__':
unittest.main()
|
apache-2.0
|
INAETICS/Drones-Simulator
|
implementation/pubsub/inaetics/inaetics/test/src/test/java/org/inaetics/dronessimulator/pubsub/inaetics/Demo.java
|
1661
|
package org.inaetics.dronessimulator.pubsub.inaetics;
import org.apache.felix.dm.annotation.api.Destroy;
import org.apache.felix.dm.annotation.api.Init;
import org.apache.felix.dm.annotation.api.Start;
import org.apache.felix.dm.annotation.api.Stop;
import org.inaetics.dronessimulator.pubsub.inaetics.subscriber.DemoSubscriber;
import org.inaetics.pubsub.api.pubsub.*;
import org.osgi.framework.BundleContext;
import org.osgi.framework.FrameworkUtil;
import org.osgi.framework.ServiceRegistration;
import java.util.Dictionary;
import java.util.Hashtable;
public class Demo {
public static final String SERVICE_PID = Demo.class.getName();
private Subscriber subscriber;
private Publisher publisher;
private BundleContext bundleContext = FrameworkUtil.getBundle(Demo.class).getBundleContext();
private ServiceRegistration registration;
@Init
protected final void init(){
System.out.println("INITIALIZED " + this.getClass().getName());
this.subscriber = new DemoSubscriber();
}
@Start
protected final void start(){
System.out.println("STARTED " + this.getClass().getName());
Dictionary<String, String> properties = new Hashtable<>();
properties.put(Subscriber.PUBSUB_TOPIC, "test");
registration = bundleContext.registerService(Subscriber.class.getName(), subscriber, properties);
}
@Stop
protected final void stop(){
System.out.println("STOPPED " + this.getClass().getName());
registration.unregister();
}
@Destroy
protected final void destroy(){
System.out.println("DESTROYED " + this.getClass().getName());
}
}
|
apache-2.0
|
ceineke/astyanax-jpa
|
astyanax-jpa-impl/src/test/java/ei/ne/ke/cassandra/cql3/template/BatchStatementBuilderTest.java
|
1820
|
/*
* Copyright 2013 EK3 Technologies, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ei.ne.ke.cassandra.cql3.template;
// CHECKSTYLE:OFF
import static ei.ne.ke.cassandra.cql3.template.Statement.batch;
import static ei.ne.ke.cassandra.cql3.template.Statement.delete;
import static ei.ne.ke.cassandra.cql3.template.Statement.insert;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
// CHECKSTYLE:ON
/**
*
*/
public class BatchStatementBuilderTest {
@Test(expected = IllegalArgumentException.class)
public void buildWithInsertTimestamp() {
Mutating s = insert().into("foo").columns("foo", "bar").usingTimestamp(100);
batch().statements(s).build();
}
@Test(expected = IllegalArgumentException.class)
public void buildWithDeleteTimestamp() {
Mutating s = delete().from("foo").columns("a", "b").usingTimestamp(100);
batch().statements(s).build();
}
@Test
public void buildWithTimestampStatement() {
Mutating s1 = insert().into("foo").columns("a", "b");
Mutating s2 = delete().from("foo").columns("a", "b");
String cql = batch().statements(s1, s2).build();
assertEquals("BEGIN BATCH INSERT INTO foo (a, b) VALUES (?, ?); DELETE a, b FROM foo; APPLY BATCH;", cql);
}
}
|
apache-2.0
|
witcxc/saiku
|
saiku-ui/css/saiku/src/styles.css
|
48755
|
/*
* Copyright 2012 OSBI Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @fileOverview Saiku User Inteface (UI) Default Styles
* @author Prashant Raju and Mark Cahill
* @version 2.0.0
*/
/** Globals */
html, body {
background: #FFF;
height: 100%;
min-width: 960px;
min-height: 600px;
overflow: auto;
text-align: left;
width: 100%;
font-family: "Helvetica Neue", Arial, Helvetica, sans-serif;
font-size: 11px;
line-height: 1.5;
margin:0;
padding:0;
border:0;
}
.clearfix:before,
.clearfix:after {
content: " ";
display: table;
}
.clearfix:after {
clear: both;
}
.clearfix {
*zoom: 1;
}
input[type=text], input[type=password],
input.text, input.title,
textarea {
background-color:#fff;
border:1px solid #bbb;
}
input[type=text]:focus, input[type=password]:focus,
input.text:focus, input.title:focus,
textarea:focus {
border-color:#666;
outline: 1;
}
input[disabled],
select[disabled] {
background-color: #f0f0f0;
cursor: not-allowed;
}
select { background-color:#fff; border-width:1px; border-style:solid; }
input[type=text], input[type=password],
input.text, input.title,
textarea, select {
margin:0.5em 0;
}
:focus { outline: 0; }
.clear {
clear: both;
}
.hide {
display: none;
}
.calign {
text-align: center;
}
.lalign {
text-align: left;
}
.ralign {
text-align: right;
}
.used {
font-weight: bold;
}
.sprite, .ui-dialog-titlebar-close,.ui-dialog-titlebar-close button, .ui-dialog-titlebar-close:hover, .dialog_icon {
background: url(../../../images/sprite.png) no-repeat;
}
.truncate {
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
.accordion-toggle {cursor: pointer;}
.accordion-content {display: none;padding:10px;}
.accordion-content.default {display: block;}
/** Header */
#header {
background: #F0F0F0;
border-bottom: 3px solid #F0F0F0;
}
.refresh_cubes {
background: url('../../../images/src/arrow_refresh.png') no-repeat;
border: 1px solid transparent;
display: block;
height: 26px;
padding: 4px;
width: 26px;
}
.refresh_cubes_nav {
float:right;
margin-top:-30px;
margin-right:8px;
}
.refresh_cubes_nav a.button {
display: block;
height: 12px;
padding: 3px;
width: 12px;
border-radius: 3px;
-moz-border-radius: 3px;
-webkit-border-radius: 3px;
}
.refresh_cubes_nav a.button:hover {
border: 1px solid #CCC;
}
.admin_console {
background: url('../../../images/src/admin_console.png') no-repeat;
border: 1px solid transparent;
display: block;
height: 26px;
padding: 4px;
width: 26px;
}
.admin_console_nav {
float:right;
margin-top:-30px;
margin-right:8px;
}
.admin_console_nav a.button {
display: block;
height: 12px;
padding: 3px;
width: 12px;
border-radius: 3px;
-moz-border-radius: 3px;
-webkit-border-radius: 3px;
}
.admin_console_nav a.button:hover {
border: 1px solid #ccc;
}
/** Toolbar */
#toolbar {
background: #EEEEEE;
background: -webkit-gradient(linear, left top, left bottom, from(#EEEEEE), to(#DDDDDD));
border-bottom: 1px solid #BBB;
height: 50px;
}
#toolbar ul {
list-style-type: none;
margin: 0;
padding: 7px 10px 7px 10px;
}
#toolbar ul li {
display: inline-block;
float: left;
margin: 0;
padding-right: 6px;
}
#toolbar ul li.separator {
border-left: 1px solid #CCC;
display: block;
height: 26px;
padding: 4px 4px 6px 4px;
width: 1px;
}
#toolbar ul li a {
border: 1px solid transparent;
display: block;
height: 26px;
padding: 4px;
width: 26px;
}
#toolbar ul li a:hover {
background-color: #F0F0F0;
border: 1px solid #CCC;
padding: 4px;
border-radius: 2px;
-moz-border-radius: 2px;
-webkit-border-radius: 2px;
}
#toolbar ul li a.new_tab {
background-position: -1554px 5px;
}
#toolbar ul li a.open_query {
background-position: -1628px 5px;
}
#toolbar ul li a.schema_designer {
background: url(../../../images/src/main_toolbar_schema_designer.png) no-repeat center;
}
#toolbar ul li a.logout {
background-position: -1776px 5px;
}
#toolbar ul li a.about {
background-position: -1702px 5px;
}
#toolbar ul li a.bug {
background-position: -1480px 5px;
}
#toolbar ul li a.translate {
background-position: -1848px -86px;
color: #FFF;
text-align: right;
text-decoration: none;
text-shadow: 1px 1px 1px #000;
}
#toolbar ul li a.admin {
background: url(../../../images/src/admin.png) no-repeat center;
}
#toolbar ul li a.dashboard {
background: url(../../../images/src/dashboard.png) no-repeat center;
}
/** Logo */
#logo a {
background-position: -1275px 1px;
float: right;
width: 166px;
height: 28px;
margin: -15px 10px 0 0;
text-indent: -9999px;
}
/** Tabs */
.tabs {
background: #F5F5F5;
border-bottom: 1px solid #CCC;
height: 28px;
/*margin-bottom: 3px;*/
padding-top: 3px;
text-align: left;
overflow: hidden;
}
.tabs a.pager {
background-position: -1919px -86px;
background-color: #DDD;
border: 1px solid #BBB;
float: right;
font-size: 120%;
margin-right: 5px;
width: 20px;
height: 20px;
text-decoration: none;
}
.pager_contents {
position: absolute;
top: 75px;
right: 5px;
width: 200px;
height: 300px;
overflow: auto;
background-color: #EEE;
border: 1px solid #BBB;
}
.pager_contents a {
color: #666;
text-decoration: none;
}
.tabs ul {
background: #F8F8F8;
margin: 0;
padding: 0 0 0 10px;
}
.tabs ul li {
float: left;
list-style: none;
padding-right: 4px;
background: #F2F2F2;
color: #888;
border-left: 1px solid #DDD;
border-top: 1px solid #DDD;
border-right: 1px solid #DDD;
display: block;
margin:1px 1px -1px 0;
padding: 4px 0px 4px 0px;
text-decoration: none;
border-radius: 2px 2px 0 0;
-moz-border-radius: 2px 2px 0 0;
-webkit-border-radius: 2px 2px 0 0;
}
.tabs ul li.selected {
background: #F0F0F0;
border-left: 1px solid #CCC;
border-top: 1px solid #CCC;
border-right: 1px solid #CCC;
background: -webkit-gradient(linear, left top, left bottom, from(#FFFFFF), to(#F0F0F0));
border-bottom: 1px solid #F0F0F0 !important;
font-weight: bold;
}
.tabs ul li:hover {
border-left: 1px solid #BBB;
border-top: 1px solid #BBB;
border-right: 1px solid #BBB;
}
.tabs ul li a {
color: #777;
cursor: pointer;
padding: 5px 2px 5px 10px;
text-decoration: none;
text-shadow: 1px 1px 0 #FFFFFF;
}
.tabs ul li.selected a {
color: #222;
}
.tabs ul li a:hover{
color: #222;
}
.tabs ul li span.close_tab {
-moz-background-origin: 0px 2px;
background-position: -624px -96px;
display: inline-block;
height: 18px;
text-indent: -9999px;
width: 20px;
}
.tabs ul li span.close_tab:hover {
background-position: -690px -96px;
cursor: pointer !important;
display: inline-block;
height: 18px;
width: 20px;
-moz-background-origin: 0px 2px;
}
/** Sidebar */
.sidebar {
float: left;
overflow: auto;
width: 240px;
}
.sidebar h3 {
background: #F9F9F9;
/*
background: -webkit-gradient(linear, left top, left bottom, from(#FFFFFF), to(#F0F0F0));
background: -moz-linear-gradient(top, #FFFFFF 0%, #F0F0F0 100%);
*/
border-top: 1px solid #CCC;
border-bottom: 1px solid #CCC;
font-size: 12px;
font-weight: bold;
line-height: 1.6em;
margin: 0;
padding: 4px 10px 6px;
text-shadow: 1px 1px 0 #FFFFFF;
}
.sidebar h3.top {
border-top: none !important;
margin: 0;
padding-top: 10px;
padding-bottom: 10px;
}
.sidebar .sidebar_inner {
padding: 10px;
}
.sidebar .sidebar_inner select {
border: 1px solid #CCC;
font-family: "Helvetica Neue", Arial, Helvetica, sans-serif;
font-size: 100.01%;
margin: 0;
padding: 2px;
width: 100%;
}
.sidebar .sidebar_inner ul {
margin: 0;
}
/** Sidebar Separator */
.sidebar_separator {
background: #F0F0F0;
border-left: 1px solid #CCC;
cursor: pointer;
float: left;
/*
border-right: 1px solid #CCC;
width: 3px;
*/
}
.sidebar_separator:hover {
background: #E0E0E0;
border-right: 1px solid #CCC;
border-left: 1px solid #CCC;
width: 3px;
}
/** Tree */
#query_info {
list-style-type: none;
padding-left: 0;
}
.dimension_tree ul,
.measure_tree ul {
margin: 0;
padding: 0;
}
.dimension_tree ul li,
.measure_tree ul li {
line-height: 19px;
list-style-type: none;
margin: 0;
padding-left: 0;
}
.dimension_tree ul li span.collapsed,
.measure_tree ul li span.collapsed {
background-position: -1409px -95px;
cursor: pointer;
padding: 5px 6px;
/*
FLAT UI - no folders
background-position: -1406px -95px;
cursor: pointer;
padding: 5px 17px;
*/
}
.dimension_tree ul li span.expand,
.measure_tree ul li span.expand {
background-position: -1541px -95px;
cursor: pointer;
padding: 5px 6px;
/*
FLAT UI - no folders
background-position: -1538px -95px;
cursor: pointer;
padding: 5px 17px;
*/
}
.dimension_tree ul li a,
.measure_tree ul li a {
color: #222;
padding: 5px;
text-decoration: none;
}
.dimension_tree ul li a.folder_expand,
.measure_tree ul li a.folder_expand {
background: #fff;
}
.dimension_tree ul li a.folder_collapsed,
.measure_tree ul li a.folder_collapsed {
background: #fff;
}
.dimension_tree ul li ul,
.measure_tree ul li ul {
padding: 0 0 0 10px;
margin: 0;
}
.measure_tree ul li ul li a.measure {
color: #983030;
}
.dimension_tree ul li ul li a.level {
color: #2A6085;
/*
padding-left: 20px;
background: url('../../../images/src/bullet_blue.png') 0px 4px no-repeat;
*/
}
.measure_tree ul li ul li a {
padding-left: 10px;
/*
padding-left: 20px;
background: url('../../../images/src/bullet_red.png') 0px 4px no-repeat;
*/
}
.measure_caption a.button {
display: block;
height: 16px;
padding: 1px 10px;
-moz-border-radius: 3px;
-webkit-border-radius: 3px;
}
.measure_caption a.button:hover {
border: 1px solid #CCC;
padding: 0px 9px;
cursor: pointer;
}
.measure_caption a.button.addMeasure {
float: right;
font-weight: normal;
}
.dimension_tree .hierarchy {
padding-left: 0px;
}
.dimension_tree .hierarchy a {
cursor: default;
padding-left: 20px;
background: url('../../../images/src/bullet_green.png') 0px 4px no-repeat;
}
.dimension_tree .treehidden, .measure_tree .treehidden {
display: none;
}
.sidebar h3 {
line-height: 1.5em;
margin: 0;
padding: 5px 10px 6px;
}
/** Tab Panel */
#tab_panel {
border-top: 1px solid #CCC;
}
/** Tab Workspace */
.workspace {
background: #FFFFFF;
float: right;
margin-left: -261px;
width: 100%;
}
/** Tab Workspace Inner */
.workspace_inner {
margin-left: 241px;
/*overflow: auto;*/
}
/** Draggables, Sortables and Droppables */
.ui-dialog.ui-draggable {
margin: 0px !important;
}
.ui-dialog.ui-draggable-dragging {
display: block !important;
height: auto !important;
text-align: left !important;
padding: 0.2em !important;
line-height: 16px !important;
}
.ui-dialog.ui-draggable-dragging .form_button {
padding: 5px 5px !important;
background: #F0F0F0 !important;
background: -webkit-gradient(linear, left top, left bottom, from(#FFFFFF), to(#F0F0F0)) !important;
}
.ui-dialog.ui-draggable-dragging .ui-dialog-titlebar-close {
background-image: url(../../../images/sprite.png) !important;
padding: 1px !important;
}
.ui-selectable-helper {
position:absolute;
z-index:100;
border:2px solid #86fe00;
background-color: rgba(203, 239, 163, 0.6);
}
/** Table */
.datatable {
background: #FFF;
}
.datatable .int {
text-align: right;
}
.datatable .str {
text-align: left;
}
.datatable tr th {
background: #FFF;
border-bottom: 1px solid #CCC;
border-right: 1px solid #CCC;
font-weight: bold;
}
.datatable tr td {
background: #FFF !important;
border-bottom: 1px solid #CCC;
border-right: 1px solid #CCC;
}
/** Dialog */
.ui-widget {
font-family: inherit !important;
font-size: inherit !important;
}
.ui-widget-header {
border: 0 !important;
font-weight: normal !important;
}
.ui-dialog {
background: #FFF;
border: 3px solid #EEE;
min-width: 400px !important;
min-height: auto !important;
border-radius: 3px;
border: 1px solid #CCC;
z-index: 100;
position: absolute;
}
#login_form label, #add_folder label {
display: block;
}
#password, #username, #email, input.newfolder {
display: block;
padding: 4px;
width: 240px;
}
#login_form .eval-panel {
width: 250px;
}
#login_form #eval-login {
float: right;
}
#login_form .eval-panel-user ul {
padding: 0;
float: left;
list-style: none;
}
#login_form .eval-panel-user ul li:first-child {
font-weight: bold;
}
.context-menu-item {
line-height: 16.5px;
}
.ui-dialog-content {
padding: 0 !important;
}
.ui-dialog-titlebar {
background: #F0F0F0;
background: -webkit-gradient(linear, left top, left bottom, from(#FFFFFF), to(#F0F0F0));
border-bottom: 1px solid #CCC;
height: 18px;
padding: 5px 5px 6px 10px !important;
border-radius: 0 !important;
box-shadow: 0 1px 3px #DDDDDD;
-moz-box-shadow:0 1px 3px #DDDDDD;
-webkit-box-shadow:0 1px 3px #DDDDDD;
font-family: inherit !important;
}
.ui-dialog-titlebar .ui-dialog-title {
float: left;
font-size: 12px;
font-weight: bold;
line-height: 18px;
margin: 0;
text-shadow: 1px 1px 0 #FFFFFF;
}
.ui-dialog .ui-dialog-titlebar-close {
background-color: inherit;
border: none;
background-position: -690px -97px;
display: inline-block;
float: right;
height: 18px;
text-indent: -9999px;
width: 20px;
}
.ui-dialog .ui-dialog-titlebar-close:hover {
padding: 1px;
background-position: -690px -97px;
background-color: transparent;
cursor: pointer;
}
.ui-icon {
background: none !important;
}
.ui-state-hover {
border: 0px !important;
}
.ui-dialog-content {
padding: 10px;
}
.dialog_body {
padding: 5px;
overflow: auto !important;
}
.dialog_icon {
width: 100px;
height: 100px;
float: left;
}
.dialog_info .dialog_icon {
background-position: -910px -87px;
width: 50px;
}
.dialog_info .dialog_body {
padding: 10px;
}
.dialog_mdx .dialog_icon {
background-position: -1075px -87px;
width: 40px;
}
.dialog_mdx .dialog_body textarea {
width: 450px;
height: 400px;
font-size: 11px;
}
.dialog_save .dialog_icon {
background-position: -1315px -85px;
width: 50px;
}
.dialog_delete .dialog_icon {
background-position: -741px -85px;
width: 60px;
}
.dialog_delete .dialog_body {
padding-top: 10px;
}
.dialog_login .dialog_icon {
background-position: -666px 15px;
}
.dialog_selections {
min-width: 800px !important;
height: 490px !important;
}
.dialog_selections .dialog_icon,
.dialog_date-filter .dialog_icon {
display: none;
}
.dialog_selections .dialog_body,
.dialog_date-filter .dialog_body {
height: 410px;
position: relative;
}
.dialog_selections .dialog_body .available_selections {
padding-left: 5px;
float: left;
}
.dialog_selections .dialog_body .selection_buttons {
float: left;
padding-left: 25px;
padding-top: 85px;
text-align: center;
vertical-align: middle;
}
.dialog_selections .header_selections {
padding-bottom: 10px;
}
.dialog_selections .header_selections input {
margin-left: 5px;
height: inherit;
}
.dialog_selections .dialog_body .used_selections {
padding-left: 25px;
float: left;
}
.dialog_selections .dialog_body .updown_buttons {
float: left;
padding-left: 25px;
padding-top: 145px;
text-align: center;
vertical-align: middle;
display: none;
}
.dialog_selections .dialog_body .selection_options {
height: 270px;
width: 430px;
font-size: 11px;
overflow: auto;
border: 1px solid #CCC;
}
.dialog_selections .dialog_body .selection_options ul {
list-style: none;
padding-left: 3px;
margin: 0;
white-space: nowrap;
}
.dialog_selections .dialog_body .selection_options ul li.all_options {
border-bottom: 1px solid #CCC;
margin-bottom: 2px;
padding-bottom: 2px;
}
.dialog_selections .dialog_body .selection_options ul li label,
.dialog_selections .dialog_body .selection_options ul li input {
cursor: pointer;
}
.dialog_selections .available_selections.unique label.unique,
.dialog_selections .used_selections.unique label.unique {
display: normal;
}
.dialog_selections .available_selections.unique label.caption,
.dialog_selections .used_selections.unique label.caption {
display: none;
}
.dialog_selections .available_selections.caption label.unique,
.dialog_selections .used_selections.caption label.unique {
display: none;
}
.dialog_selections .available_selections.caption label.caption,
.dialog_selections .used_selections.caption label.caption {
display: normal;
}
.totals_container {
margin:4px;
margin-bottom:0px;
}
.totals_container label{
margin-right:4px;
}
#show_totals {
width: 140px;
height: 20px
}
.dialog_selections .filterbox {
width: 300px;
}
.ui-autocomplete {
max-height: 300px;
overflow-x: hidden;
overflow-y: auto;
z-index: 1000;
position: absolute;
}
.dialog_selections .options:before {
clear:both;
}
.dialog_selections .options, .dialog_selections .hint, .dialog_selections .info {
float: left;
width: 250px;
text-align: left;
}
.dialog_selections .info {
width: 400px;
}
.dialog_selections .available_selections input[type="text"] {
width: 300px;
}
.dialog_date-filter {
min-width: 500px !important;
height: 490px !important;
}
.dialog_date-filter .dialog_body .box-selections {
position: relative;
margin: 20px auto;
width: 100%;
/*border: 1px solid #000;*/
}
.dialog_date-filter .dialog_body .available-selections {
margin: 0 auto;
width: 80%;
/*border: 1px solid #000;*/
}
.dialog_date-filter .dialog_body .selection-option {
position: absolute;
top: 50%;
left: 18px;
transform: translateY(-50%);
}
.dialog_date-filter .dialog_body .selection-options {
padding: 10px;
width: 430px;
height: auto;
font-size: 11px;
border: 1px solid #ccc;
overflow: auto;
}
.dialog_date-filter .dialog_body .form-group-selection {
display: inline-block;
width: 100px;
}
.dialog_date-filter .dialog_body .form-group {
margin-top: 5px;
padding-right: 10px;
float: left;
/*border: 1px solid #000;*/
}
.dialog_date-filter .dialog_body .inline-form-group {
display: inline-block;
}
.dialog_date-filter .dialog_body .form-group label,
.dialog_date-filter .dialog_body .form-group select {
padding-right: 10px;
}
.dialog_date-filter .dialog_body .form-group #selection-date {
width: 100px;
}
.dialog_date-filter .dialog_body .form-group #add-date {
margin-left: 5px;
cursor: pointer;
}
.dialog_date-filter .dialog_body #div-selected-date fieldset {
width: 150px;
border: 1px solid #ccc;
}
.dialog_date-filter .dialog_body .form-group #selected-date {
padding: 0;
max-height: 50px;
list-style: none;
overflow: auto;
}
.dialog_date-filter .dialog_body .form-group #selected-date a {
margin-left: 5px;
color: blue;
text-decoration: none;
}
.dialog_date-filter .dialog_body .form-group #selected-date a:hover {
text-decoration: underline;
}
.dialog_permissions .dialog_body {
width: 370px;
height: 380px;
}
.dialog_permissions .dialog_icon {
display: none;
}
.dialog_permissions .dialog_icon {
display: none;
}
.dialog_permissions .select_roles {
width: 350px;
height: 130px;
}
.dialog_filter .dialog_icon {
display: none;
}
.dialog_filter .dialog_body {
margin: 15px;
line-height: 30px;
}
.dialog_filter .dialog_body .n {
width: 40px;
}
.dialog_filter td.col1 {
width: 240px;
}
.dialog_filter textarea.sortliteral {
width: 200px;
height: 40px;
}
.dialog_filter textarea.filter_expression {
width: 300px;
height: 60px;
}
.dialog_filter textarea.measureFormula {
width: 300px;
height: 60px;
}
.dialog_drillthrough .dialog_icon {
display: none;
}
.dialog_drillthrough .dialog_body .maxrows {
width: 60px;
}
.dialog_drillthrough .dialog_body .row_limit {
text-align: center;
}
.ui-menu {
list-style:none;
padding: 2px;
margin: 0;
display:block;
float: left;
}
.ui-menu .ui-menu {
margin-top: -3px;
}
.ui-menu .ui-menu-item {
margin:0;
padding: 0px;
zoom: 1;
float: left;
clear: left;
width: 100%;
}
.ui-menu .ui-menu-item a {
text-decoration:none;
}
.ui-menu .ui-menu-item a.description {
font-size: 8px;
color: #999;
padding: 0;
}
.dialog_footer {
border-top: 1px solid #CCC;
padding: 10px 10px;
text-align: right;
clear: both;
}
.dialog_footer .form_button {
margin-left: 5px;
}
/** Workspace Area */
.workspace_area {
overflow: auto;
}
.workspace_editor {
padding-left: 5px;
}
.mdx_input {
border: 1px solid #CCCCCC;
margin-top: 4px;
/* BECAUSE OF QUERY TOOLBAR */
margin-right: 112px;
}
.workspace_editor .editor_info {
color: #999999;
}
.query_processing {
border: 1px dashed #CCC;
margin: 15px;
overflow: hidden;
padding: 15px;
}
/** Workspace Results */
.workspace_results_info {
color: #999999;
padding-right: 20px;
overflow: hidden;
}
.parameter_input {
padding-left: 5px;
}
.parameter_input span, .parameter_input input {
margin-right: 5px;
}
/** Save Dialog */
.error_msg {
color: #CC0000;
}
/** blockUI */
.processing_container, .blockOverlay {
position: fixed;
top: 0;
left: 0;
width: 100%;
height: 100%;
background-color: #fff;
opacity: 0.5;
z-index: 9999;
padding: 20px;
}
.processing_content {
padding: 5px;
display: inline;
}
.processing, .blockElement {
border-radius: 3px;
border: 1px solid #CCC;
-moz-border-radius: 3px;
-webkit-border-radius: 3px;
box-shadow: 2px 2px 4px rgba(0, 0, 0, 0.10);
-moz-box-shadow: 2px 2px 4px rgba(0, 0, 0, 0.10);
-webkit-box-shadow: 2px 2px 4px rgba(0, 0, 0, 0.10);
z-index: 10001;
background-color: #fff;
position: fixed;
width: 20%;
min-width: 300px;
left: 40%;
top: 45%;
padding: 20px;
}
.processing_image {
display: inline-block;
background-image: url('../../../images/src/waiting.gif');
background-repeat: no-repeat;
width: 16px;
height: 16px;
margin-right: 5px;
}
.saiku_logo {
display: inline-block;
background-image: url('../../../images/src/saiku_32x32.png');
background-repeat: no-repeat;
background-position: 50% 50%;
width: 32px;
height: 32px;
padding: 5px;
margin-right: 20px;
}
/** Workspace toolbar. */
.workspace_toolbar {
background: #F9F9F9;
border-bottom: 1px solid #CCC;
font-weight: bold;
height: 32px;
margin: 0 0 3px 0;
padding: 3px 10px 3px 5px;
text-shadow: 1px 1px 0 #FFFFFF;
overflow: hidden;
box-shadow: 2px 1px 3px rgba(0, 0, 0, 0.1)
-moz-box-shadow: 2px 2px 4px rgba(0, 0, 0, 0.10);
-webkit-box-shadow: 2px 2px 4px rgba(0, 0, 0, 0.10);
}
.workspace_toolbar h3.top {
border-top: none !important;
margin: 0;
padding-top: 10px;
padding-bottom: 10px;
}
a.disabled_toolbar {
-ms-filter: "progid:DXImageTransform.Microsoft.Alpha(Opacity=30)";
filter: alpha(opacity = 30);
opacity: 0.3 !important;
}
.disabled {
-ms-filter: "progid:DXImageTransform.Microsoft.Alpha(Opacity=30)";
filter: alpha(opacity = 30);
opacity: 0.3 !important;
}
a.disabled_toolbar:hover {
border: none !important;
padding: 9px 8px !important;
}
.workspace_toolbar ul {
margin: 0;
padding: 0;
}
.workspace_toolbar ul li {
display: inline;
float: left;
padding-left: 5px;
}
.workspace_toolbar ul li.hide {
display: none;
}
.workspace_toolbar ul li.seperator {
border-left: 1px solid #D5D5D5;
margin-left: 5px;
}
.workspace_toolbar a.button {
display: block;
height: 16px;
padding: 8px;
width: 16px;
border-radius: 3px;
-moz-border-radius: 3px;
-webkit-border-radius: 3px;
background-repeat:no-repeat;
}
.workspace_toolbar a.button:hover {
border: 1px solid #CCC;
padding: 7px;
}
.workspace_toolbar a.on {
border: 1px solid #CCC;
background-color: #E0E0E0;
padding: 7px !important;
}
.workspace_toolbar .open {
background-position: -1040px 7px;
}
.workspace_toolbar .edit_folder {
background-image: url('../../../images/src/folder_edit.png');
background-position: 7px 7px;
}
.workspace_toolbar .edit_permissions {
background-image: url('../../../images/src/group_edit.png');
background-position: 7px 7px;
}
.workspace_toolbar .edit {
background-image: url('../../../images/src/pencil.png');
background-position: 7px 7px;
}
.workspace_toolbar .delete {
background-position: -521px 7px;
}
.workspace_toolbar .save {
background-position: -485px -90px;
}
.workspace_toolbar .new {
background-image: url('../../../images/src/page_white.png');
background-position: 7px 7px;
}
.workspace_toolbar .run {
background-position: -123px -90px;
}
.workspace_toolbar .auto {
background-position: 7px 7px;
}
.workspace_toolbar .non_empty {
background-position: -1914px 7px;
}
.workspace_toolbar .group_parents {
background-image: url('../../../images/src/hide_parents.png');
background-position: 7px 7px;
}
.workspace_toolbar .swap_axis {
background-position: -551px -90px;
}
.workspace_toolbar .mdx {
background-position: -1848px 7px;
}
.workspace_toolbar .explain_query {
background-image: url('../../../images/src/information.png');
background-position: 7px 7px;
}
.workspace_toolbar .export_csv {
background-position: -455px 7px;
}
.workspace_toolbar .export_xls {
background-position: -847px 7px;
}
.workspace_toolbar .export_pdf {
background-image: url('../../../images/src/button_pdf.png');
background-position: 7px 7px;
}
.workspace_toolbar .toggle_fields {
background-position: -913px 7px;
}
.workspace_toolbar .toggle_sidebar {
background-position: -1635px -90px;
}
.workspace_toolbar .drillthrough {
background-position: -781px 7px;
}
.workspace_toolbar .drillthrough_export {
background-image: url('../../../images/src/export_drillthrough.png');
background-position: 7px 7px;
}
.workspace_toolbar .query_scenario {
background-image: url('../../../images/src/query_scenario.png');
background-repeat:no-repeat;
background-position: 7px 7px;
}
.workspace_toolbar .zoom_mode {
background-image: url('../../../images/src/zoom.png');
background-position: 7px 7px;
}
.workspace_toolbar .switch_to_mdx {
background-image: url('../../../images/src/switch_to_mdx.png');
background-position: 7px 7px;
}
.workspace_toolbar .chart {
background-image: url('../../../js/saiku/plugins/CCC_Chart/chart.png');
background-position: 7px 7px;
}
/** query toolbar */
.query_toolbar_vertical {
background: #FEFEFE;
border-left: 1px solid #CCC;
font-weight: bold;
height: 732px;
width: 70px;
float: right;
margin: 0 0 3px 0;
padding: 3px 10px 3px 1px;
text-shadow: 1px 1px 0 #FFFFFF;
overflow: auto;
}
.query_toolbar_vertical ul.options li {
width: 100%;
}
.query_toolbar_horizontal {
background: #FEFEFE;
border-left: 1px solid #CCC;
font-weight: bold;
height: 32px;
margin: 0 0 3px 0;
padding: 3px 10px 3px 5px;
text-shadow: 1px 1px 0 #FFFFFF;
}
.query_toolbar ul.renderer {
padding-top: 8px;
padding-left: 3px;
}
.query_toolbar ul.renderer li {
padding-left: 1px;
}
.query_toolbar ul {
margin: 0;
padding: 0;
}
.query_toolbar ul li {
display: inline;
float: left;
padding-left: 5px;
margin-bottom: 6px;
}
.query_toolbar ul li.hide {
display: none;
}
.query_toolbar ul li.label {
width: 99%;
}
.query_toolbar ul li.seperator {
border-left: 1px solid #D5D5D5;
margin-left: 5px;
}
.query_toolbar ul li.seperator_vertical {
border-top: 1px solid #D5D5D5;
margin-top: 5px;
padding-top: 5px;
}
.query_toolbar a {
display: block;
height: 16px;
padding: 8px;
text-decoration: none;
color: black;
font-weight: normal;
border-radius: 3px;
-moz-border-radius: 3px;
-webkit-border-radius: 3px;
}
.query_toolbar a.label {
font-weight: bold;
}
.query_toolbar a.button {
text-align: center;
}
.query_toolbar a.button:hover {
border: 1px solid #CCC;
padding: 7px;
cursor: pointer;
}
.query_toolbar a.on {
border: 1px solid #CCC;
background-color: #E0E0E0 !important;
padding: 7px !important;
}
.query_toolbar a.render_chart {
background-image: url('../../../js/saiku/plugins/CCC_Chart/chart.png');
background-repeat: no-repeat;
background-position: 7px 7px;
width: 16px;
}
.query_toolbar a.render_table {
background-image: url('../../../images/src/table2.png');
background-repeat: no-repeat;
background-position: 7px 7px;
width: 16px;
}
.query_toolbar a.spark_line {
background: url('../../../images/chart/gray/trendline.png') no-repeat 21px -1px;;
height: 16px;
}
.query_toolbar a.spark_line:hover,
.query_toolbar a.spark_line.on {
background: url('../../../images/chart/trendline.png') no-repeat 21px -1px;;
height: 16px;
}
.query_toolbar a.spark_bar {
background: url('../../../images/chart/gray/trendbars.png') no-repeat 21px -1px;;
height: 16px;
}
.query_toolbar a.spark_bar:hover,
.query_toolbar a.spark_bar.on {
background: url('../../../images/chart/trendbars.png') no-repeat 21px -1px;;
height: 16px;
}
.query_toolbar a.bar {
background: url('../../../images/chart/gray/bar.png') no-repeat 21px -1px;;
height: 16px;
}
.query_toolbar a.stackedBar100 {
background: url('../../../images/chart/gray/100bar.png') no-repeat 21px -1px;;
height: 16px;
}
.query_toolbar a.stackedBar {
background: url('../../../images/chart/gray/stackedbar.png') no-repeat 21px -1px;;
height: 16px;
}
.query_toolbar a.multiple {
background: url('../../../images/chart/gray/multiple.png') no-repeat 21px -1px;;
height: 16px;
}
.query_toolbar a.pie {
background: url('../../../images/chart/gray/pie.png') no-repeat 21px -1px;;
height: 16px;
}
.query_toolbar a.line {
background: url('../../../images/chart/gray/line.png') no-repeat 21px -1px;;
height: 16px;
}
.query_toolbar a.waterfall {
background: url('../../../images/chart/gray/waterfall.png') no-repeat 21px -1px;;
height: 16px;
}
.query_toolbar a.dot {
background: url('../../../images/chart/gray/dot.png') no-repeat 21px -1px;;
height: 16px;
}
.query_toolbar a.heatgrid {
background: url('../../../images/chart/gray/area2.png') no-repeat 21px -1px;;
height: 16px;
}
.query_toolbar a.area {
background: url('../../../images/chart/gray/area.png') no-repeat 21px -1px;;
height: 16px;
}
.query_toolbar a.bar:hover,
.query_toolbar a.bar.on {
background: url('../../../images/chart/bar.png') no-repeat 21px -1px;;
height: 16px;
}
.query_toolbar a.stackedBar100:hover,
.query_toolbar a.stackedBar100.on {
background: url('../../../images/chart/100bar.png') no-repeat 21px -1px;;
height: 16px;
}
.query_toolbar a.stackedBar:hover,
.query_toolbar a.stackedBar.on {
background: url('../../../images/chart/stackedbar.png') no-repeat 21px -1px;;
height: 16px;
}
.query_toolbar a.multiple:hover,
.query_toolbar a.multiple.on {
background: url('../../../images/chart/multiple.png') no-repeat 21px -1px;;
height: 16px;
}
.query_toolbar a.pie:hover,
.query_toolbar a.pie.on {
background: url('../../../images/chart/pie.png') no-repeat 21px -1px;;
height: 16px;
}
.query_toolbar a.line:hover,
.query_toolbar a.line.on {
background: url('../../../images/chart/line.png') no-repeat 21px -1px;;
height: 16px;
}
.query_toolbar a.waterfall:hover,
.query_toolbar a.waterfall.on {
background: url('../../../images/chart/waterfall.png') no-repeat 21px -1px;;
height: 16px;
}
.query_toolbar a.dot:hover,
.query_toolbar a.dot.on {
background: url('../../../images/chart/dot.png') no-repeat 21px -1px;;
height: 16px;
}
.query_toolbar a.heatgrid:hover,
.query_toolbar a.heatgrid.on {
background: url('../../../images/chart/area2.png') no-repeat 21px -1px;;
height: 16px;
}
.query_toolbar a.area:hover,
.query_toolbar a.area.on {
background: url('../../../images/chart/area.png') no-repeat 21px -1px;;
height: 16px;
}
.form_button {
background: #F0F0F0;
background: -webkit-gradient(linear, left top, left bottom, from(#FFFFFF), to(#F0F0F0));
border: 1px solid #CCCCCC;
box-shadow: none;
color: #222222 !important;
font-size: 1em;
height: 18px;
margin-bottom: 2px;
padding: 5px 5px;
text-decoration: none;
-moz-border-radius: 3px;
-webkit-border-radius: 3px;
}
.form_button:hover {
border: 1px solid #999;
color: #222222;
}
#simplemodal-overlay {
background: #FFFFFF;
opacity: 0.5 !important;
-ms-filter: "progid:DXImageTransform.Microsoft.Alpha(Opacity=50)";
}
.bucket_items {
background: #FCFCFC;
border: 1px solid #CCC;
height: 32px;
margin: 0 0 10px 0;
padding: 3px 10px 3px 5px;
text-shadow: 1px 1px 0 #FFFFFF;
}
.bucket_items ul {
margin: 0;
padding: 0;
}
.bucket_items ul li {
display: inline;
float: left;
padding-left: 5px;
}
.bucket_items ul li.seperator {
border-left: 1px solid #D5D5D5;
margin-left: 5px;
}
.bucket_items a.button {
text-decoration: none;
font-weight: bold;
color:black;
display: block;
height: 16px;
padding: 8px;
-moz-border-radius: 3px;
-webkit-border-radius: 3px;
}
.bucket_items a.add_bucket {
background-image: url('../../../js/saiku/plugins/Buckets/tag_blue_add.png');
background-repeat:no-repeat;
width: 16px;
background-position: 50% 50%;
}
.bucket_items a.button:hover {
border: 1px solid #CCC;
padding: 7px;
}
.bucket_items a.delete {
text-decoration:none;
color:black;
}
.bucket_items a.on {
border: 1px solid #CCC;
background-color: #E0E0E0;
padding: 7px !important;
}
.bucket_items .save {
background-position: -485px -90px;
width: 16px;
}
.bucket_items .export {
background-image: url('../../../images/src/export_drillthrough.png');
background-repeat:no-repeat;
background-position: 7px 7px;
width: 16px;
}
/* Synchronized Filter Stuff */
.filter_items {
background: #FCFCFC;
border: 1px solid #CCC;
height: 32px;
margin: 0 0 10px 0;
padding: 3px 10px 3px 5px;
text-shadow: 1px 1px 0 #FFFFFF;
}
.filter_items ul {
margin: 0;
padding: 0;
}
.filter_items ul li {
display: inline;
float: left;
padding-left: 5px;
}
.filter_items ul li.seperator {
border-left: 1px solid #D5D5D5;
margin-left: 5px;
}
.filter_items a.button {
text-decoration: none;
font-weight: bold;
color:black;
display: block;
height: 16px;
padding: 8px;
-moz-border-radius: 3px;
-webkit-border-radius: 3px;
}
.filter_items a.add_filter {
background-image: url('../../../js/saiku/plugins/filters/tag_blue_add.png');
background-repeat:no-repeat;
width: 16px;
background-position: 50% 50%;
}
.filter_items a.edit_filter {
background-image: url('../../../js/saiku/plugins/filters/tag_blue_edit.png');
background-repeat:no-repeat;
width: 16px;
background-position: 50% 50%;
}
.filter_items a.refresh_filters {
background: url('../../../images/src/arrow_refresh.png') no-repeat;
width: 16px;
background-position: 50% 50%;
}
.filter_items a.button:hover {
border: 1px solid #CCC;
padding: 7px;
}
.filter_items a.delete {
text-decoration:none;
color:black;
}
.filter_items a.on {
border: 1px solid #CCC;
background-color: #E0E0E0;
padding: 7px !important;
}
.filter_items .save {
background-position: -485px -90px;
width: 16px;
}
.filter_items .export {
background-image: url('../../../images/src/export_drillthrough.png');
background-repeat:no-repeat;
background-position: 7px 7px;
width: 16px;
}
/* CCC styles */
.tipsy { pointer-events: none; padding: 5px; font-size: 10px; font-family: Arial,Helvetica,sans-serif; position: absolute; z-index: 100000; }
.tipsy-inner { padding: 5px 8px 4px 8px; background-color: black; color: white; max-width: 200px; text-align: center; }
.tipsy-inner { border-radius: 3px; -moz-border-radius:3px; -webkit-border-radius:3px; }
.tipsy-arrow { position: absolute; width: 9px; height: 5px; }
.tipsy-n .tipsy-arrow { top: 0; left: 50%; margin-left: -4px; }
.tipsy-nw .tipsy-arrow { top: 0; left: 10px; }
.tipsy-ne .tipsy-arrow { top: 0; right: 10px; }
.tipsy-s .tipsy-arrow { bottom: 0; left: 50%; margin-left: -4px; background-position: bottom left; }
.tipsy-sw .tipsy-arrow { bottom: 0; left: 10px; background-position: bottom left; }
.tipsy-se .tipsy-arrow { bottom: 0; right: 10px; background-position: bottom left; }
.tipsy-e .tipsy-arrow { top: 50%; margin-top: -4px; right: 0; width: 5px; height: 9px; background-position: top right; }
.tipsy-w .tipsy-arrow { top: 50%; margin-top: -4px; left: 0; width: 5px; height: 9px; }
#tooltip {
position: absolute;
z-index: 3000;
border: 1px solid #111;
background-color: #eee;
padding: 5px;
opacity: 0.85;
}
#tooltip h3, #tooltip div { margin: 0; }
/* OOCSS - reusable classes */
span.search {
position: relative;
top: -16px;
}
input.search_file {
width:350px;
margin-left:10px;
height: 18px;
border-radius: 9px;
-moz-border-radius: 9px;
-webkit-border-radius: 9px;
padding: 1px 7px;
position: absolute;
left: 0px;
z-index: 0;
top: 8px;
}
span.cancel_search {
background: url("../../../images/src/cancel3.png") no-repeat scroll 0 0 transparent;
box-shadow: 0 0 10px #EEEEEE;
-moz-box-shadow: 0 0 2px #EEE;
-webkit-box-shadow: 0 0 10px #EEE;
cursor: pointer;
float: right;
height: 26px;
left: 175px;
margin-top: 5px;
position: absolute;
top: 13px;
width: 25px;
z-index: 1;
opacity: 0.7;
display: none;
}
span.cancel_search:hover {
opacity: 1;
}
.back_query {
background: url('../../../images/src/arrow_up_bnw.png') no-repeat;
width: 16px;
height: 16px;
-webkit-transform: rotate(-90deg);
-ms-transform: rotate(-90deg);
transform: rotate(-90deg);
}
.RepositoryObjects {
margin: 0;
padding: 0;
-webkit-transform: scale3d(1,1,1);
}
.RepositoryObjects li {
cursor: pointer;
line-height: 17px;
list-style-type: none;
margin: 0;
padding-bottom: 1px;
padding-top: 1px;
}
.RepositoryObjects li.query {
margin-left: 16px;
}
.RepositoryObjects li a {
color: #222;
text-decoration: none;
}
.RepositoryObjects li a:hover {
background: #f0f0f0;
}
.RepositoryObjects ul.folder_content {
margin-top: 5px;
margin-left: 13px;
padding-left: 0px;
}
.RepositoryObjects .folder_row.selected {
background: #f0f0f0;
}
.RepositoryObjects li.selected.query {
background: #f0f0f0;
}
.RepositoryObjects li.query .icon {
background-image: url('../../../images/src/saiku_16.png');
padding: 1px 8px 1px;
margin-right: 4px;
}
.RepositoryObjects li .sprite.folder {
background-position: -1538px -95px;
cursor: pointer;
padding: 5px 17px;
}
.RepositoryObjects li .sprite.folder.collapsed {
background-position: -1406px -95px;
cursor: pointer;
padding: 5px 17px;
}
/* custom */
.export_zip {
background-image: url('../../../images/src/page_white_compressed.png');
padding: 1px 8px 1px;
margin-right: 4px;
}
.export_btn, .import_btn {
float: right;
margin-right: 80px;
}
.dialog_body .RepositoryObjects {
padding: 10px;
margin-top: 5px;
border: 1px solid #cccccc;
width: 411px;
height: 200px;
overflow: auto;
}
#save_query_form [name="name"] {
width: 390px;
}
.queries h3 {
position: relative;
height: 33px;
line-height: 33px;
}
.queries .add_folder {
position: absolute;
top: 12px;
right: 10px;
display: block;
height: 16px;
width: 16px;
background-image: url(../../../images/src/folder_add.png);
}
/* Print styles */
@media print {
#header, .sidebar, .workspace_fields, .workspace_toolbar { display: none !important; }
}
/* Chart toolbar */
.chart_toolbar {
background: #F9F9F9;
border: 1px solid #CCC;
font-weight: bold;
height: 38px;
margin: 0 0 3px 0;
padding: 3px 10px 3px 5px;
text-shadow: 1px 1px 0 #FFFFFF;
overflow: hidden;
position: fixed;
left: 3px;
top: 3px;
width: 100%;
}
.chart_toolbar ul {
margin: 0;
padding: 0;
}
.chart_toolbar ul li {
display: inline;
float: left;
padding-left: 5px;
}
.chart_toolbar ul li.hide {
display: none;
}
.chart_toolbar ul li.seperator {
border-left: 1px solid #D5D5D5;
margin-left: 5px;
}
.chart_toolbar a.button {
display: block;
height: 16px;
padding: 8px;
-moz-border-radius: 3px;
-webkit-border-radius: 3px;
}
.chart_toolbar a.button:hover {
border: 1px solid #CCC;
padding: 7px;
}
.chart_toolbar a.on {
border: 1px solid #CCC;
background-color: #E0E0E0;
padding: 7px !important;
}
.chart_toolbar span.logo a.saiku {
background-position: -1275px 1px;
float: right;
width: 166px;
height: 28px;
margin: -2px 10px 0 0;
text-indent: -9999px;
}
.chart_toolbar span.logo a.ccc {
background: url('../../../images/src/ccc3.png') no-repeat;
float: right;
width: 80px;
height: 28px;
margin: 0px 10px 0 0;
text-indent: -9999px;
}
.chartworkspace {
overflow: hidden;
}
.canvas_wrapper a.button {
display: block;
height: 16px;
padding: 8px;
width: 16px;
border-radius: 3px;
-moz-border-radius: 3px;
-webkit-border-radius: 3px;
}
.canvas_wrapper a.button:hover {
border: 1px solid #CCC;
padding: 7px;
}
.canvas_wrapper .zoomin {
background: url('../../../images/src/arrow_in.png') no-repeat;
background-repeat: no-repeat;
background-position: 7px 7px;
}
.canvas_wrapper .zoomout {
background: url('../../../images/src/arrow_up_bnw.png') no-repeat;
background-repeat: no-repeat;
background-position: 7px 7px;
}
.canvas_wrapper .rerender {
background: url('../../../images/src/arrow_refresh_bnw.png') no-repeat;
background-repeat: no-repeat;
background-position: 7px 7px;
}
.chartworkspace_inner {
overflow: auto;
margin-left: 400px;
}
.upgradeheader {
background-color:#F0F0F0;
padding: 3px;
text-align:center;
margin: 0;
border: 1px solid #CCC;
overflow: hidden;
}
.upgradeheader span.close_tab {
-moz-background-origin: 0px 2px;
background-position: -624px -96px;
display: inline-block;
height: 18px;
text-indent: -9999px;
width: 20px;
}
.upgradeheader span.close_tab:hover {
background-position: -690px -96px;
cursor: pointer !important;
display: inline-block;
height: 18px;
width: 20px;
-moz-background-origin: 0px 2px;
}
.controls {
margin-bottom: 20px;
}
.gridster ul {
background-color: #EFEFEF;
list-style-type: none;
}
.gridster li {
font-size: 1em;
font-weight: bold;
line-height: 100%;
text-align: center;
}
.gridster {
margin: 0 auto;
opacity: 0.8;
-webkit-transition: opacity 0.6s ease 0s;
transition: opacity 0.6s ease 0s;
}
.gridster .gs-w {
background: none repeat scroll 0 0 #DDDDDD;
cursor: pointer;
}
.gridster .player {
background: none repeat scroll 0 0 #BBBBBB;
}
.gridster .preview-holder {
background: none repeat scroll 0 0 #FF0000 !important;
border: medium none !important;
}
.user_info {
width:600px;
}
/*label, input {
display: inline-block;
vertical-align: baseline;
width: 125px;
}
label {
color: #2D2D2D;
font-size: 15px;
}
form, input {
box-sizing: border-box;
-moz-box-sizing: border-box;
-webkit-box-sizing: border-box;
}
form {
width: 300px;
}*/
.conn_forms label {
display: inline-block; width: 70px; text-align: right;
}
.conn_forms select {
border: 1px solid #CCC;
font-family: "Helvetica Neue", Arial, Helvetica, sans-serif;
font-size: 100.01%;
margin: 0;
padding: 2px;
width: 32px;
}
.conn_forms input[type="password"] {
margin:0;
border: 2px solid #EEE;
}
.conn_forms input{
display:inline;
width: 150px;
}
.conn_forms input:focus {
border: 2px solid #900;
}
.keyrow label{
width:90px !important;
}
.keyrow select{
width:70px !important;
}
.keyrow input{
width:70px !important;
}
.user_info label {
display: inline-block; width: 140px; text-align: right;
}
.user_info select {
border: 1px solid #CCC;
font-family: "Helvetica Neue", Arial, Helvetica, sans-serif;
font-size: 100.01%;
margin: 0;
padding: 2px;
width: 302px;
}
.user_info input{
display:inline;
width: 300px;
}
.user_info input:focus {
border: 2px solid #900;
}
.user_info .user_button{
float:right;
margin-right:50px;
}
.user_info .upload_button{
padding:3px;
width:auto;
height:auto;
}
.user_info h3 {
background: #F9F9F9;
/*
background: -webkit-gradient(linear, left top, left bottom, from(#FFFFFF), to(#F0F0F0));
background: -moz-linear-gradient(top, #FFFFFF 0%, #F0F0F0 100%);
*/
border-top: 1px solid #CCC;
border-bottom: 1px solid #CCC;
font-size: 12px;
font-weight: bold;
line-height: 1.6em;
margin: 0;
padding: 4px 10px 6px;
text-shadow: 1px 1px 0 #FFFFFF;
}
.dialog_response {
font-weight: bold;
text-align: left;
color: red;
padding-top:5px;
}
#navlist table
{
border: 1px solid black;
float:left;
width:148px;
table-layout: inherit;
}
#navlist {
left:0;
right:0;
margin:0 auto;
display: inline-block;
white-space: nowrap;
width:auto;
}
/*.dbtable{
white-space: nowrap;
-moz-border-radius: 15px;
border-radius: 15px;
vertical-align: top;
display: inline-block;
padding:5px;
}*/
.tablewrapper{
white-space: nowrap;
border-radius: 15px;
vertical-align: top;
display: inline-block;
padding:5px;
}
.tablewrapper > table{
border-collapse:separate;
border:solid black 1px;
border-radius:6px;
-moz-border-radius:6px;
}
.tablewrapper > table th {
background-color: #ffc3c3;
}
.glowing-border {
border: 2px solid #dadada;
border-radius: 7px;
}
.glowing-border:focus {
outline: none;
border-color: #9ecaed;
box-shadow: 0 0 10px #9ecaed;
}
.ui-datepicker {
padding: 10px;
display: none;
font-size: 11px;
background: #fcfcfc;
background: #fff -moz-linear-gradient(top, #fcfcfc 0%, #fff 100%);
background: #fff -webkit-gradient(linear, left top, left bottom, color-stop(0%,#fcfcfc)), color-stop(100%,#fff));
background: #fff -webkit-linear-gradient(top, #fcfcfc 0%, #fff 100%);
background: #fff -o-linear-gradient(top, #fcfcfc 0%, #fff 100%);
background: #fff -ms-linear-gradient(top, #fcfcfc 0%, #fff 100%);
background: #fff linear-gradient(top, #fcfcfc 0%, #fff 100%);
border: 1px solid #ccc;
}
.ui-datepicker table {
width: 100%;
}
.ui-datepicker table td {
text-align: center;
}
.ui-datepicker a {
text-decoration: none;
cursor: pointer;
}
.ui-datepicker-title {
text-align: center;
font-weight: bold;
}
.ui-datepicker-prev {
}
.ui-datepicker-next {
}
#date-input {
width:50px;
}
|
apache-2.0
|
pmac1965/proteus
|
source/display/prBackgroundManager.h
|
2202
|
// File: prBackgroundManager.h
/**
* Copyright 2014 Paul Michael McNab
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include "../core/prCoreSystem.h"
// Forward declarations.
class prBackground;
#define MAX_BACKGROUNDS 8
// Class: prBackgroundManager
// Background management class
//
// Notes:
// This class is used to create and destroy backgrounds.
// You must use this class to create backgrounds, as they
// cannot be created without it.
class prBackgroundManager : public prCoreSystem
{
public:
// Method: prBackgroundManager
// Constructs the background manager.
prBackgroundManager();
// Method: ~prBackgroundManager
// Destructor.
~prBackgroundManager();
// Method: Create
// Creates a background.
//
// Parameters:
// filename - Name of the background to load
//
// Returns:
// A constructed background or NULL
prBackground *Create(const char *filename);
// Method: Release
// Releases a background and any asssociated assets.
//
// Parameters:
// bg - The background to release
void Release(prBackground *bg);
// Method: ReleaseAll
// Releases all backgrounds and their asssociated assets.
void ReleaseAll();
// Method: DisplayUsage
// Shows all the assets being used by the background manager.
void DisplayUsage();
private:
// Stops passing by value and assignment.
prBackgroundManager(const prBackgroundManager&);
const prBackgroundManager& operator = (const prBackgroundManager&);
private:
prBackground *backgrounds[MAX_BACKGROUNDS];
};
|
apache-2.0
|
ExplorViz/ExplorViz
|
src-external/de/cau/cs/kieler/klay/layered/p3order/ForsterConstraintResolver.java
|
11578
|
/*
* KIELER - Kiel Integrated Environment for Layout Eclipse RichClient
*
* http://www.informatik.uni-kiel.de/rtsys/kieler/
*
* Copyright 2010 by
* + Christian-Albrechts-University of Kiel
* + Department of Computer Science
* + Real-Time and Embedded Systems Group
*
* This code is provided under the terms of the Eclipse Public License (EPL).
* See the file epl-v10.html for the license text.
*/
package de.cau.cs.kieler.klay.layered.p3order;
import java.util.List;
import java.util.ListIterator;
import com.google.common.collect.Lists;
import com.google.common.collect.Multimap;
import de.cau.cs.kieler.core.util.Pair;
import de.cau.cs.kieler.klay.layered.graph.LNode;
import de.cau.cs.kieler.klay.layered.graph.LNode.NodeType;
import de.cau.cs.kieler.klay.layered.properties.InternalProperties;
/**
* Detects and resolves violated constraints. Inspired by
* <ul>
* <li>Michael Forster. A fast and simple heuristic for constrained two-level crossing reduction. In
* <i>Graph Drawing</i>, volume 3383 of LNCS, pp. 206-216. Springer, 2005.</li>
* </ul>
* This constraint resolver relies on the assumption that all node groups have well-defined barycenter
* values and are already sorted by these barycenter values.
*
* @author cds
* @author ima
* @author msp
* @kieler.design proposed by msp
* @kieler.rating proposed yellow by msp
*/
public final class ForsterConstraintResolver implements IConstraintResolver {
/** the layout units for handling dummy nodes for north / south ports. */
private final Multimap<LNode, LNode> layoutUnits;
/**
* Constructs a Forster constraint resolver.
*
* @param layoutUnits
* a map associating layout units with their respective members
*/
public ForsterConstraintResolver(final Multimap<LNode, LNode> layoutUnits) {
this.layoutUnits = layoutUnits;
}
/**
* {@inheritDoc}
*/
public void processConstraints(final List<NodeGroup> nodeGroups) {
// Build the constraints graph
buildConstraintsGraph(nodeGroups);
// Find violated vertices
Pair<NodeGroup, NodeGroup> violatedConstraint = null;
while ((violatedConstraint = findViolatedConstraint(nodeGroups)) != null) {
handleViolatedConstraint(violatedConstraint.getFirst(), violatedConstraint.getSecond(),
nodeGroups);
}
}
/**
* Build the constraint graph for the given vertices. The constraint graph is created from
* the predefined <em>in-layer successor constraints</em> and the <em>layout units</em>.
*
* @param nodeGroups
* the array of single-node vertices sorted by their barycenter values.
*/
private void buildConstraintsGraph(final List<NodeGroup> nodeGroups) {
// Reset the constraint fields
for (NodeGroup nodeGroup : nodeGroups) {
nodeGroup.resetOutgoingConstraints();
nodeGroup.incomingConstraintsCount = 0;
}
// Iterate through the vertices, adding the necessary constraints
LNode lastNonDummyNode = null;
for (NodeGroup nodeGroup : nodeGroups) {
LNode node = nodeGroup.getNode();
// Add the constraints given by the vertex's node
for (LNode successor : node.getProperty(
InternalProperties.IN_LAYER_SUCCESSOR_CONSTRAINTS)) {
NodeGroup successorNodeGroup = successor.getProperty(InternalProperties.NODE_GROUP);
nodeGroup.getOutgoingConstraints().add(successorNodeGroup);
successorNodeGroup.incomingConstraintsCount++;
}
// Check if we're processing a a normal, none-dummy node
if (node.getNodeType() == NodeType.NORMAL) {
// If we already processed another normal, non-dummy node, we need to add
// constraints from all of that other node's layout unit's vertices to this
// node's layout unit's vertices
if (lastNonDummyNode != null) {
for (LNode lastUnitNode : layoutUnits.get(lastNonDummyNode)) {
NodeGroup lastUnitNodeGroup = lastUnitNode.getProperty(
InternalProperties.NODE_GROUP);
for (LNode currentUnitNode : layoutUnits.get(node)) {
NodeGroup currentUnitNodeGroup = currentUnitNode.getProperty(
InternalProperties.NODE_GROUP);
lastUnitNodeGroup.getOutgoingConstraints().add(currentUnitNodeGroup);
currentUnitNodeGroup.incomingConstraintsCount++;
}
}
}
lastNonDummyNode = node;
}
}
}
/**
* Returns a violated constraint, if any is left. Constraint violation detection is based
* on the barycenter values of the node groups, hence it is a critical requirement that
* the node groups are sorted by their barycenter values.
*
* @param nodeGroups
* list of vertices.
* @return the two vertices whose constraint is violated, or {@code null} if none could be
* found. The two vertices are returned in the order they should appear in, not in the
* order that violates their constraint.
*/
private Pair<NodeGroup, NodeGroup> findViolatedConstraint(final List<NodeGroup> nodeGroups) {
List<NodeGroup> activeNodeGroups = null;
// Iterate through the constrained vertices
double lastValue = Short.MIN_VALUE;
for (NodeGroup nodeGroup : nodeGroups) {
assert nodeGroup.barycenter != null && nodeGroup.barycenter >= lastValue;
lastValue = nodeGroup.barycenter;
nodeGroup.resetIncomingConstraints();
// Find sources of the constraint graph to start the constraints check
if (nodeGroup.hasOutgoingConstraints() && nodeGroup.incomingConstraintsCount == 0) {
if (activeNodeGroups == null) {
activeNodeGroups = Lists.newArrayList();
}
activeNodeGroups.add(nodeGroup);
}
}
// Iterate through the active node groups to find one with violated constraints
if (activeNodeGroups != null) {
while (!activeNodeGroups.isEmpty()) {
NodeGroup nodeGroup = activeNodeGroups.remove(0);
// See if we can find a violated constraint
if (nodeGroup.hasIncomingConstraints()) {
for (NodeGroup predecessor : nodeGroup.getIncomingConstraints()) {
if (predecessor.barycenter.floatValue() == nodeGroup.barycenter.floatValue()) {
if (nodeGroups.indexOf(predecessor) > nodeGroups.indexOf(nodeGroup)) {
// The predecessor has equal barycenter, but higher index
return Pair.of(predecessor, nodeGroup);
}
} else if (predecessor.barycenter > nodeGroup.barycenter) {
// The predecessor has greater barycenter and thus also higher index
return Pair.of(predecessor, nodeGroup);
}
}
}
// No violated constraints; add outgoing constraints to the respective incoming list
for (NodeGroup successor : nodeGroup.getOutgoingConstraints()) {
List<NodeGroup> successorIncomingList = successor.getIncomingConstraints();
successorIncomingList.add(0, nodeGroup);
if (successor.incomingConstraintsCount == successorIncomingList.size()) {
activeNodeGroups.add(successor);
}
}
}
}
// No violated constraints found
return null;
}
/** Delta that two barycenters can differ by to still be considered equal. */
private static final float BARYCENTER_EQUALITY_DELTA = 0.0001F;
/**
* Handles the case of a violated constraint. The node groups must be sorted by their
* barycenter values. After this method has finished, the list of node groups is smaller
* by one element, since two node groups have been unified, but the list is still correctly
* sorted by barycenter values.
*
* @param firstNodeGroup
* the node group with violated outgoing constraint
* @param secondNodeGroup
* the node group with violated incoming constraint
* @param nodeGroups
* the list of vertices
*/
private void handleViolatedConstraint(final NodeGroup firstNodeGroup,
final NodeGroup secondNodeGroup, final List<NodeGroup> nodeGroups) {
// Create a new vertex from the two constrain-violating vertices; this also
// automatically calculates the new vertex's barycenter value
NodeGroup newNodeGroup = new NodeGroup(firstNodeGroup, secondNodeGroup);
assert newNodeGroup.barycenter + BARYCENTER_EQUALITY_DELTA >= secondNodeGroup.barycenter;
assert newNodeGroup.barycenter - BARYCENTER_EQUALITY_DELTA <= firstNodeGroup.barycenter;
// Iterate through the vertices. Remove the old vertices. Insert the new one
// according to the barycenter value, thereby keeping the list sorted. Along
// the way, constraint relationships will be updated
ListIterator<NodeGroup> nodeGroupIterator = nodeGroups.listIterator();
boolean alreadyInserted = false;
while (nodeGroupIterator.hasNext()) {
NodeGroup nodeGroup = nodeGroupIterator.next();
if (nodeGroup == firstNodeGroup || nodeGroup == secondNodeGroup) {
// Remove the two node groups with violated constraint from the list
nodeGroupIterator.remove();
} else if (!alreadyInserted && nodeGroup.barycenter > newNodeGroup.barycenter) {
// If we haven't inserted the new node group into the list already, do that now.
// Note: we're not calling next() again. This means that during the next iteration,
// we will again be looking at the current node group. But then, alreadyInserted will
// be true and we can look at that node group's outgoing constraints.
nodeGroupIterator.previous();
nodeGroupIterator.add(newNodeGroup);
alreadyInserted = true;
} else if (nodeGroup.hasOutgoingConstraints()) {
// Check if the vertex has any constraints with the former two vertices
boolean firstNodeGroupConstraint = nodeGroup.getOutgoingConstraints()
.remove(firstNodeGroup);
boolean secondNodeGroupConstraint = nodeGroup.getOutgoingConstraints()
.remove(secondNodeGroup);
if (firstNodeGroupConstraint || secondNodeGroupConstraint) {
nodeGroup.getOutgoingConstraints().add(newNodeGroup);
newNodeGroup.incomingConstraintsCount++;
}
}
}
// If we haven't inserted the new node group already, add it to the end
if (!alreadyInserted) {
nodeGroups.add(newNodeGroup);
}
}
}
|
apache-2.0
|
UniversalDependencies/docs
|
treebanks/kfm_aha/kfm_aha-pos-VERB.md
|
4507
|
---
layout: base
title: 'Statistics of VERB in UD_Khunsari-AHA'
udver: '2'
---
## Treebank Statistics: UD_Khunsari-AHA: POS Tags: `VERB`
There are 12 `VERB` lemmas (21%), 14 `VERB` types (23%) and 14 `VERB` tokens (19%).
Out of 11 observed tags, the rank of `VERB` is: 2 in number of lemmas, 2 in number of types and 2 in number of tokens.
The 10 most frequent `VERB` lemmas: <em>چ, کِر, او, جیر, داج, دِ, دک, دی, ش, کَ</em>
The 10 most frequent `VERB` types: <em>آکَ, ئُ, اِداجِن, اِمِگوا, اِچُ, اِکِرُ, بشتون, جیر, دَرکَفتُن, دِ</em>
The 10 most frequent ambiguous lemmas: <em>او</em> (<tt><a href="kfm_aha-pos-PRON.html">PRON</a></tt> 1, <tt><a href="kfm_aha-pos-VERB.html">VERB</a></tt> 1), <em>ش</em> (<tt><a href="kfm_aha-pos-PRON.html">PRON</a></tt> 1, <tt><a href="kfm_aha-pos-VERB.html">VERB</a></tt> 1)
The 10 most frequent ambiguous types:
## Morphology
The form / lemma ratio of `VERB` is 1.166667 (the average of all parts of speech is 1.051724).
The 1st highest number of forms (2) was observed with the lemma “چ”: <em>اِچُ, نَچو</em>.
The 2nd highest number of forms (2) was observed with the lemma “کِر”: <em>اِکِرُ, مِکِرُن</em>.
The 3rd highest number of forms (1) was observed with the lemma “او”: <em>ئُ</em>.
`VERB` occurs with 6 features: <tt><a href="kfm_aha-feat-Number.html">Number</a></tt> (14; 100% instances), <tt><a href="kfm_aha-feat-Person.html">Person</a></tt> (14; 100% instances), <tt><a href="kfm_aha-feat-Tense.html">Tense</a></tt> (11; 79% instances), <tt><a href="kfm_aha-feat-Mood.html">Mood</a></tt> (2; 14% instances), <tt><a href="kfm_aha-feat-Polarity.html">Polarity</a></tt> (2; 14% instances), <tt><a href="kfm_aha-feat-VerbForm.html">VerbForm</a></tt> (2; 14% instances)
`VERB` occurs with 11 feature-value pairs: `Mood=Imp`, `Mood=Sub`, `Number=Plur`, `Number=Sing`, `Person=1`, `Person=2`, `Person=3`, `Polarity=Neg`, `Tense=Past`, `Tense=Pres`, `VerbForm=Part`
`VERB` occurs with 9 feature combinations.
The most frequent feature combination is `Number=Sing|Person=3|Tense=Pres` (4 tokens).
Examples: <em>ئُ, اِچُ, اِکِرُ, کَ</em>
## Relations
`VERB` nodes are attached to their parents using 2 different relations: <tt><a href="kfm_aha-dep-root.html">root</a></tt> (10; 71% instances), <tt><a href="kfm_aha-dep-ccomp.html">ccomp</a></tt> (4; 29% instances)
Parents of `VERB` nodes belong to 2 different parts of speech: (10; 71% instances), <tt><a href="kfm_aha-pos-VERB.html">VERB</a></tt> (4; 29% instances)
0 (0%) `VERB` nodes are leaves.
2 (14%) `VERB` nodes have one child.
1 (7%) `VERB` nodes have two children.
11 (79%) `VERB` nodes have three or more children.
The highest child degree of a `VERB` node is 6.
Children of `VERB` nodes are attached using 13 different relations: <tt><a href="kfm_aha-dep-punct.html">punct</a></tt> (10; 22% instances), <tt><a href="kfm_aha-dep-nsubj.html">nsubj</a></tt> (7; 16% instances), <tt><a href="kfm_aha-dep-obl.html">obl</a></tt> (5; 11% instances), <tt><a href="kfm_aha-dep-ccomp.html">ccomp</a></tt> (4; 9% instances), <tt><a href="kfm_aha-dep-obj.html">obj</a></tt> (4; 9% instances), <tt><a href="kfm_aha-dep-advcl.html">advcl</a></tt> (3; 7% instances), <tt><a href="kfm_aha-dep-advmod.html">advmod</a></tt> (3; 7% instances), <tt><a href="kfm_aha-dep-compound-lvc.html">compound:lvc</a></tt> (3; 7% instances), <tt><a href="kfm_aha-dep-nmod.html">nmod</a></tt> (2; 4% instances), <tt><a href="kfm_aha-dep-aux.html">aux</a></tt> (1; 2% instances), <tt><a href="kfm_aha-dep-cc.html">cc</a></tt> (1; 2% instances), <tt><a href="kfm_aha-dep-mark.html">mark</a></tt> (1; 2% instances), <tt><a href="kfm_aha-dep-nummod.html">nummod</a></tt> (1; 2% instances)
Children of `VERB` nodes belong to 9 different parts of speech: <tt><a href="kfm_aha-pos-NOUN.html">NOUN</a></tt> (16; 36% instances), <tt><a href="kfm_aha-pos-PUNCT.html">PUNCT</a></tt> (10; 22% instances), <tt><a href="kfm_aha-pos-ADV.html">ADV</a></tt> (6; 13% instances), <tt><a href="kfm_aha-pos-PRON.html">PRON</a></tt> (5; 11% instances), <tt><a href="kfm_aha-pos-VERB.html">VERB</a></tt> (4; 9% instances), <tt><a href="kfm_aha-pos-AUX.html">AUX</a></tt> (1; 2% instances), <tt><a href="kfm_aha-pos-CCONJ.html">CCONJ</a></tt> (1; 2% instances), <tt><a href="kfm_aha-pos-NUM.html">NUM</a></tt> (1; 2% instances), <tt><a href="kfm_aha-pos-SCONJ.html">SCONJ</a></tt> (1; 2% instances)
|
apache-2.0
|
DailyHotel/Watchman
|
src/main/java/com/dailyhotel/watchman/CacheClient.java
|
254
|
package com.dailyhotel.watchman;
import java.util.concurrent.TimeUnit;
/**
* Created by tywin on 15/11/2016.
*/
public interface CacheClient {
MethodCall get(String key);
void set(String key, MethodCall value, long timeout, TimeUnit unit);
}
|
apache-2.0
|
chacaa/DoApp
|
app/src/main/java/com/xmartlabs/scasas/doapp/model/AuthResponse.java
|
272
|
package com.xmartlabs.scasas.doapp.model;
import org.parceler.Parcel;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
* Created by remer on 08/12/15.
*/
@Builder
@Data
@NoArgsConstructor
@Parcel
public class AuthResponse {
// TODO
}
|
apache-2.0
|
MissionCriticalCloud/cosmic
|
cosmic-core/server/src/main/java/com/cloud/hypervisor/HypervisorGuruManagerImpl.java
|
2240
|
package com.cloud.hypervisor;
import com.cloud.host.dao.HostDao;
import com.cloud.legacymodel.communication.command.Command;
import com.cloud.legacymodel.utils.Pair;
import com.cloud.model.enumeration.HypervisorType;
import com.cloud.utils.component.ManagerBase;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
@Component
public class HypervisorGuruManagerImpl extends ManagerBase implements HypervisorGuruManager {
public static final Logger s_logger = LoggerFactory.getLogger(HypervisorGuruManagerImpl.class.getName());
@Inject
HostDao _hostDao;
List<HypervisorGuru> _hvGuruList;
Map<HypervisorType, HypervisorGuru> _hvGurus = new ConcurrentHashMap<>();
@PostConstruct
public void init() {
for (final HypervisorGuru guru : _hvGuruList) {
_hvGurus.put(guru.getHypervisorType(), guru);
}
}
@Override
public HypervisorGuru getGuru(final HypervisorType hypervisorType) {
if (hypervisorType == null) {
return null;
}
HypervisorGuru result = _hvGurus.get(hypervisorType);
if (result == null) {
for (final HypervisorGuru guru : _hvGuruList) {
if (guru.getHypervisorType() == hypervisorType) {
_hvGurus.put(hypervisorType, guru);
result = guru;
break;
}
}
}
return result;
}
@Override
public long getGuruProcessedCommandTargetHost(final long hostId, final Command cmd) {
for (final HypervisorGuru guru : _hvGuruList) {
final Pair<Boolean, Long> result = guru.getCommandHostDelegation(hostId, cmd);
if (result.first()) {
return result.second();
}
}
return hostId;
}
public List<HypervisorGuru> getHvGuruList() {
return _hvGuruList;
}
@Inject
public void setHvGuruList(final List<HypervisorGuru> hvGuruList) {
this._hvGuruList = hvGuruList;
}
}
|
apache-2.0
|
maxmert/reflecti
|
test/unit/middlewares/emitter.js
|
2160
|
import { expect } from 'chai';
import sinon from 'sinon';
import flatMap from 'lodash/flatMap';
import createStore from '../../../store';
import emitterMiddleware from '../../../middlewares/emitter';
/* eslint-disable no-unused-expressions */
describe('Middleware', () => {
let store;
let callback;
beforeEach(() => {
store = createStore(10);
callback = sinon.spy();
emitterMiddleware(store);
});
describe('Emitter', () => {
it('should add on and off methods to the store', () => {
expect(store.on).to.exist;
expect(store.on).to.be.a('function');
expect(store.off).to.exist;
expect(store.off).to.be.a('function');
});
it('should emit if data in the store changed', () => {
store.on('data', callback);
store
.dispatch((value) => value - 4)
.dispatch((value) => value * 10);
expect(callback.callCount).to.be.equal(2);
expect(flatMap(callback.args)).to.eql([6, 60]);
store.off('data', callback);
});
it('should not emit, if data after change is the same', () => {
store.on('data', callback);
store
.dispatch((value) => value - 4)
.dispatch((value) => value * 1)
.dispatch((value) => value + 0);
expect(callback.callCount).to.be.equal(1);
expect(flatMap(callback.args)).to.eql([6]);
store.off('data', callback);
});
it('should remove listeners', () => {
store.on('data', callback);
store
.dispatch((value) => value - 4)
.dispatch((value) => value * 10);
expect(callback.callCount).to.be.equal(2);
expect(flatMap(callback.args)).to.eql([6, 60]);
store.off('data', callback);
store.dispatch((value) => value - 4);
expect(callback.callCount).to.be.equal(2);
expect(flatMap(callback.args)).to.eql([6, 60]);
});
});
});
/* eslint-enable no-unused-expressions */
|
apache-2.0
|
kironuniversity/main-website
|
plugins/rainlab/pages/lang/fr/lang.php
|
6039
|
<?php
return [
'plugin' => [
'name' => 'Pages',
'description' => 'Fonctionnalités de pages et menus statiques.',
],
'page' => [
'menu_label' => 'Pages',
'delete_confirmation' => 'Confirmez-vous la suppression des pages sélectionnées ? Les sous-pages seront également supprimées.',
'no_records' => 'Aucune page trouvée',
'delete_confirm_single' => 'Confirmez-vous la suppression de cette page ? Les sous-pages seront également supprimées.',
'new' => 'Nouvelle page',
'add_subpage' => 'Ajouter une sous-page',
'invalid_url' => 'Le format d’URL est invalide. L’URL doit commencer par un / et peut contenir des chiffres, des lettres et les symboles suivants : _-/.',
'url_not_unique' => 'Cette URL est déjà utilisée par une autre page.',
'layout' => 'Maquette',
'layouts_not_found' => 'Aucune maquette trouvée',
'saved' => 'La page a été sauvegardée avec succès.',
'tab' => 'Pages',
'manage_pages' => 'Gérer les pages statiques',
'manage_menus' => 'Gérer les menus statiques',
'access_snippets' => 'Accès aux fragments',
'manage_content' => 'Gérer le contenu statique'
],
'menu' => [
'menu_label' => 'Menus',
'delete_confirmation' => 'Confirmez-vous la suppression des menus sélectionnés ?',
'no_records' => 'Aucun menu trouvé',
'new' => 'Nouveau menu',
'new_name' => 'Nouveau menu',
'new_code' => 'nouveau-menu',
'delete_confirm_single' => 'Confirmez-vous la suppression de ce menu ?',
'saved' => 'Le menu a été sauvegardé avec succès.',
'name' => 'Nom',
'code' => 'Code',
'items' => 'Éléments du menu',
'add_subitem' => 'Ajouter un élément',
'code_required' => 'Le Code est requis',
'invalid_code' => 'Le format du Code est invalide. Le Code peut contenir des chiffres, des lettres et les symboles suivants : _-'
],
'menuitem' => [
'title' => 'Titre',
'editor_title' => 'Modifier l’élément du menu',
'type' => 'Type',
'allow_nested_items' => 'Autoriser les sous-éléments',
'allow_nested_items_comment' => 'Les sous-éléments peuvent être générés dynamiquement par les pages statiques et certains des autres types d’élément',
'url' => 'URL',
'reference' => 'Référence',
'title_required' => 'Le Titre est requis',
'unknown_type' => 'Type d’élément du menu inconnu',
'unnamed' => 'Élément de menu sans nom',
'add_item' => 'Ajouter un élément',
'new_item' => 'Nouvel élément du menu',
'replace' => 'Remplacer cet élément part ses sous-éléments générés',
'replace_comment' => 'Utiliser cette case à cocher pour envoyer les sous-éléments générés au même niveau que cet élément. Cet élément sera lui-même masqué.',
'cms_page' => 'Page CMS',
'cms_page_comment' => 'Sélectionnez une page à ouvrir lors d’un clic sur cet élément du menu.',
'reference_required' => 'La référence de l’élément du menu est requis.',
'url_required' => 'L’URL est requise',
'cms_page_required' => 'Sélectionnez une page CMS s’il vous plaît',
'code' => 'Code',
'code_comment' => 'Entrez le code de l’élément du menu si vous souhaitez y accéder via l’API.'
],
'content' => [
'menu_label' => 'Contenu',
'cant_save_to_dir' => 'L’enregistrement des fichiers de contenu dans le répertoire des pages statiques n’est pas autorisé.'
],
'sidebar' => [
'add' => 'Ajouter',
'search' => 'Rechercher...'
],
'object' => [
'invalid_type' => 'Type d’objet inconnu',
'not_found' => 'L’objet demandé n’a pas été trouvé.'
],
'editor' => [
'title' => 'Titre',
'new_title' => 'Nouveau titre de la page',
'content' => 'Contenu',
'url' => 'URL',
'filename' => 'Nom du fichier',
'layout' => 'Maquette',
'description' => 'Description',
'preview' => 'Aperçu',
'enter_fullscreen' => 'Activer le mode plein écran',
'exit_fullscreen' => 'Annuler le mode plein écran',
'hidden' => 'Caché',
'hidden_comment' => 'Les pages cachées sont seulement accessibles aux administrateurs connectés.',
'navigation_hidden' => 'Maquer dans la navigation',
'navigation_hidden_comment' => 'Cochez cette case pour masquer cette page dans les menus et le fil d’ariane générés automatiquement.',
],
'snippet' => [
'partialtab' => 'Fragment',
'code' => 'Code du fragment',
'code_comment' => 'Entrez un code pour rendre ce contenu partiel disponible en tant que fragment dans le plugin des Pages Statiques.',
'name' => 'Nom',
'name_comment' => 'Le nom est affiché dans la liste des fragments dans le menu latéral des Pages Statiques et dans une Page lorsque qu’un fragment y est ajouté.',
'no_records' => 'Aucun fragment trouvé',
'menu_label' => 'Fragments',
'column_property' => 'Nom de la propriété',
'column_type' => 'Type',
'column_code' => 'Code',
'column_default' => 'Valeur par défaut',
'column_options' => 'Options',
'column_type_string' => 'Chaîne de caractères',
'column_type_checkbox' => 'Case à cocher',
'column_type_dropdown' => 'Menu déroulant',
'not_found' => 'Le fragment demandé avec le code :code n’a pas été trouvé dans le thème.',
'property_format_error' => 'Le code de la propriété devrait commencer par une lettre et ne peut contenir que des lettres et des chiffres',
'invalid_option_key' => 'Clé de l’option de la liste déroulante invalide. Les clés des options ne peuvent contenir que des chiffres, des lettres et les symboles _ et -'
]
];
|
apache-2.0
|
raistlic/raistlic-lib-commons-core
|
src/test/java/org/raistlic/common/precondition/PreconditionForStringStateTest.java
|
4232
|
/*
* Copyright 2015 Lei CHEN ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.raistlic.common.precondition;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* @author Lei CHEN (2015-11-23)
*/
@RunWith(JUnit4.class)
public class PreconditionForStringStateTest {
@Test
public void testIsNullWithNullState() {
Precondition.context((String) null).isNull();
Precondition.context((String) null).isNull("message");
}
@Test(expected = InvalidContextException.class)
public void testIsNullWithNonNullStateNoNameNoMessage() {
Precondition.context("abc").isNull();
}
@Test(expected = InvalidContextException.class)
public void testIsNullWithNonNullStateNoNameWithMessage() {
Precondition.context("abc").isNull("message");
}
@Test
public void testNotNullWithNonNullState() {
Precondition.context("abc").isNotNull();
Precondition.context("abc").isNotNull("message");
}
@Test(expected = InvalidContextException.class)
public void testNotNullWithNullStateNoNameNoMessage() {
Precondition.context((String) null).isNotNull();
}
@Test(expected = InvalidContextException.class)
public void testNotNullWithNullStateNoNameWithMessage() {
Precondition.context((String) null).isNotNull("message");
}
@Test
public void testEqualToWithEqualStates() {
String state1 = "abc";
String state2 = "abc";
Precondition.context(state1).isEqualTo(state2);
Precondition.context(state1).isEqualTo(state2, "message");
}
@Test(expected = InvalidContextException.class)
public void testEqualToWithNotEqualStatesNoNameNoMessage() {
String state1 = "abc";
String state2 = "def";
Precondition.context(state1).isEqualTo(state2);
}
@Test(expected = InvalidContextException.class)
public void testEqualToWithNotEqualStatesNoNameWithMessage() {
String state1 = "abc";
String state2 = "def";
Precondition.context(state1).isEqualTo(state2, "message");
}
@Test
public void testNotEqualToWithNotEqualStates() {
String state1 = "abc";
String state2 = "def";
Precondition.context(state1).isNotEqualTo(state2);
Precondition.context(state1).isNotEqualTo(state2, "message");
}
@Test(expected = InvalidContextException.class)
public void testNotEqualToWithEqualStatesNoNameNoMessage() {
String state1 = "abc";
String state2 = "abc";
Precondition.context(state1).isNotEqualTo(state2);
}
@Test(expected = InvalidContextException.class)
public void testNotEqualToWithEqualStatesNoNameWithMessage() {
String state1 = "abc";
String state2 = "abc";
Precondition.context(state1).isNotEqualTo(state2, "message");
}
@Test
public void testIsEmptyWithEmptyString() {
Precondition.context("").isEmpty();
Precondition.context("").isEmpty("message");
}
@Test(expected = InvalidContextException.class)
public void testIsEmptyWithNonEmptyStringNoNameNoMessage() {
Precondition.context("abc").isEmpty();
}
@Test(expected = InvalidContextException.class)
public void testIsEmptyWithNonEmptyStringNoNameWithMessage() {
Precondition.context("abc").isEmpty("message");
}
@Test
public void testNotEmptyWithNonEmptyState() {
Precondition.context("abc").isNotEmpty();
Precondition.context("abc").isNotEmpty("message");
}
@Test(expected = InvalidContextException.class)
public void testNotEmptyWithEmptyStateNoNameNoMessage() {
Precondition.context("").isNotEmpty();
}
@Test(expected = InvalidContextException.class)
public void testNotEmptyWithEmptyStateNoNameWithMessage() {
Precondition.context("").isNotEmpty("message");
}
}
|
apache-2.0
|
shenki/hostboot
|
src/kernel/devicesegment.C
|
6488
|
/* IBM_PROLOG_BEGIN_TAG */
/* This is an automatically generated prolog. */
/* */
/* $Source: src/kernel/devicesegment.C $ */
/* */
/* OpenPOWER HostBoot Project */
/* */
/* COPYRIGHT International Business Machines Corp. 2011,2014 */
/* */
/* Licensed under the Apache License, Version 2.0 (the "License"); */
/* you may not use this file except in compliance with the License. */
/* You may obtain a copy of the License at */
/* */
/* http://www.apache.org/licenses/LICENSE-2.0 */
/* */
/* Unless required by applicable law or agreed to in writing, software */
/* distributed under the License is distributed on an "AS IS" BASIS, */
/* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or */
/* implied. See the License for the specific language governing */
/* permissions and limitations under the License. */
/* */
/* IBM_PROLOG_END_TAG */
#include <util/singleton.H>
#include <limits.h>
#include <assert.h>
#include <kernel/vmmmgr.H>
#include <kernel/ptmgr.H>
#include <kernel/devicesegment.H>
#include <kernel/segmentmgr.H>
#include <kernel/console.H>
/**
* @brief Add the device segment to the SegmentManager.
*/
void DeviceSegment::init(size_t segId)
{
kassert((segId >= SegmentManager::MMIO_FIRST_SEGMENT_ID) &&
(segId <= SegmentManager::MMIO_LAST_SEGMENT_ID));
SegmentManager::addSegment(this, segId);
}
/**
* @brief Handle a page fault for a device address access
* @param i_task[in] - Task pointer to the task requiring the page
* @param i_addr[in] - 64-bit address needed to be paged
* @return bool - TRUE: Page added to page table
* FALSE: Not a valid address to be paged
*/
bool DeviceSegment::handlePageFault(task_t* i_task, uint64_t i_addr,
bool i_store)
{
//Verify input address falls within this segment's address range
if (i_addr < this->getBaseAddress() ||
i_addr >= (this->getBaseAddress() + (1ull << SLBE_s)))
{
return false;
}
//Verify the device is mapped
uint64_t segment_ea = i_addr - this->getBaseAddress();
size_t idx = segment_ea / ((1ull << SLBE_s) / MMIO_MAP_DEVICES);
uint64_t device_offset = segment_ea -
(idx * (1ull << SLBE_s) / MMIO_MAP_DEVICES);
if (device_offset >= (uint64_t)iv_mmioMap[idx].size)
{
return false;
}
PageTableManager::addEntry((i_addr / PAGESIZE) * PAGESIZE,
(iv_mmioMap[idx].addr + device_offset) / PAGESIZE,
(iv_mmioMap[idx].no_ci ?
(BYPASS_HRMOR | WRITABLE) :
SegmentManager::CI_ACCESS)
);
return true;
}
/**
* @brief Map a device into the device segment.
* @param ra[in] - Void pointer to real address to be mapped in
* @param i_devDataSize[in] - Size of device segment block
* @param i_nonCI[in] - Device should be mapped cacheable instead of CI
* @return void* - Pointer to beginning virtual address, NULL otherwise
*/
void *DeviceSegment::devMap(void *ra, uint64_t i_devDataSize, bool i_nonCI)
{
void *segBlock = NULL;
if (i_devDataSize <= THIRTYTWO_GB)
{
for (size_t i = 0; i < MMIO_MAP_DEVICES; i++)
{
if ((0 == iv_mmioMap[i].addr) && (0 == iv_mmioMap[i].size))
{
iv_mmioMap[i].no_ci = i_nonCI;
iv_mmioMap[i].size = i_devDataSize;
iv_mmioMap[i].addr = reinterpret_cast<uint64_t>(ra);
segBlock = reinterpret_cast<void*>(i *
((1ull << SLBE_s) / MMIO_MAP_DEVICES) +
this->getBaseAddress());
break;
}
}
}
else
{
printk("Unsupported device segment size(0x%lX), ",i_devDataSize);
printk("for address 0x%lX\n",reinterpret_cast<uint64_t>(ra));
}
return segBlock;
}
int DeviceSegment::devUnmap(void *ea)
{
int rc = -EINVAL;
uint64_t segment_ea = reinterpret_cast<uint64_t>(ea);
//Verify input address falls within this segment's address range
if (segment_ea < this->getBaseAddress() ||
segment_ea >= (this->getBaseAddress() + (1ull << SLBE_s)))
{
return rc;
}
segment_ea = segment_ea - this->getBaseAddress();
size_t idx = segment_ea / ((1ull << SLBE_s) / MMIO_MAP_DEVICES);
if ((0 != iv_mmioMap[idx].addr) || (0 != iv_mmioMap[idx].size))
{
//Remove all of the defined block's size (<= 32GB)
PageTableManager::delRangePN(iv_mmioMap[idx].addr / PAGESIZE,
(iv_mmioMap[idx].addr + iv_mmioMap[idx].size) / PAGESIZE,
false);
iv_mmioMap[idx].addr = 0;
iv_mmioMap[idx].size = 0;
rc = 0;
}
return rc;
}
/**
* Locate the physical address of the given virtual address
*/
uint64_t DeviceSegment::findPhysicalAddress(uint64_t i_vaddr) const
{
uint64_t rc = -EFAULT;
uint64_t segment_ea = i_vaddr;
//Verify input address falls within this segment's address range
if (segment_ea < this->getBaseAddress() ||
segment_ea >= (this->getBaseAddress() + (1ull << SLBE_s)))
{
return rc;
}
segment_ea = segment_ea - this->getBaseAddress();
size_t idx = segment_ea / ((1ull << SLBE_s) / MMIO_MAP_DEVICES);
if ((0 != iv_mmioMap[idx].addr) || (0 != iv_mmioMap[idx].size))
{
//memory offset within this device's window
uint64_t offset = segment_ea -
idx*((1ull << SLBE_s) / MMIO_MAP_DEVICES);
return (iv_mmioMap[idx].addr + offset);
}
return rc;
}
|
apache-2.0
|
SamsungARTIK/demokit
|
test/test_gpioctrl.js
|
965
|
/*
* Copyright (c) 2016 Samsung Electronics Co., Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var assert = require('assert')
var GpioCtrl = require('../libs/gpioctrl').GpioCtrl
describe('gpioctrl', function () {
var ctrl = new GpioCtrl(0, true)
it('setOn with on event', function (done) {
ctrl.on('on', done)
ctrl.setOn()
})
it('setOff with off event', function (done) {
ctrl.on('off', done)
ctrl.setOff()
})
})
|
apache-2.0
|
mdoering/backbone
|
life/Plantae/Magnoliophyta/Magnoliopsida/Gentianales/Rubiaceae/Spermacoce/Spermacoce setidens/ Syn. Bigelovia setidens/README.md
|
180
|
# Bigelovia setidens Miq. SPECIES
#### Status
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null
|
apache-2.0
|
sardine/mina-ja
|
src/mina-core/target/site/apidocs/org/apache/mina/filter/reqres/class-use/ResponseType.html
|
11485
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_20) on Sat May 29 01:41:14 JST 2010 -->
<META http-equiv="Content-Type" content="text/html; charset=UTF-8">
<TITLE>
クラス org.apache.mina.filter.reqres.ResponseType の使用 (Apache MINA Core 2.0.0-RC1 API)
</TITLE>
<META NAME="date" CONTENT="2010-05-29">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="クラス org.apache.mina.filter.reqres.ResponseType の使用 (Apache MINA Core 2.0.0-RC1 API)";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="ナビゲーションリンクをスキップ"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>概要</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>パッケージ</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../org/apache/mina/filter/reqres/ResponseType.html" title="org.apache.mina.filter.reqres 内の列挙型"><FONT CLASS="NavBarFont1"><B>クラス</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>使用</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>階層ツリー</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>非推奨 API</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>索引</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>ヘルプ</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
前
次</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../index.html?org/apache/mina/filter/reqres//class-useResponseType.html" target="_top"><B>フレームあり</B></A>
<A HREF="ResponseType.html" target="_top"><B>フレームなし</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>すべてのクラス</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../allclasses-noframe.html"><B>すべてのクラス</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<CENTER>
<H2>
<B>クラス<br>org.apache.mina.filter.reqres.ResponseType の使用</B></H2>
</CENTER>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
<A HREF="../../../../../../org/apache/mina/filter/reqres/ResponseType.html" title="org.apache.mina.filter.reqres 内の列挙型">ResponseType</A> を使用しているパッケージ</FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.mina.filter.reqres"><B>org.apache.mina.filter.reqres</B></A></TD>
<TD> </TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.mina.filter.reqres"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
<A HREF="../../../../../../org/apache/mina/filter/reqres/package-summary.html">org.apache.mina.filter.reqres</A> での <A HREF="../../../../../../org/apache/mina/filter/reqres/ResponseType.html" title="org.apache.mina.filter.reqres 内の列挙型">ResponseType</A> の使用</FONT></TH>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2"><A HREF="../../../../../../org/apache/mina/filter/reqres/ResponseType.html" title="org.apache.mina.filter.reqres 内の列挙型">ResponseType</A> を返す <A HREF="../../../../../../org/apache/mina/filter/reqres/package-summary.html">org.apache.mina.filter.reqres</A> のメソッド</FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> <A HREF="../../../../../../org/apache/mina/filter/reqres/ResponseType.html" title="org.apache.mina.filter.reqres 内の列挙型">ResponseType</A></CODE></FONT></TD>
<TD><CODE><B>ResponseInspector.</B><B><A HREF="../../../../../../org/apache/mina/filter/reqres/ResponseInspector.html#getResponseType(java.lang.Object)">getResponseType</A></B>(java.lang.Object message)</CODE>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> <A HREF="../../../../../../org/apache/mina/filter/reqres/ResponseType.html" title="org.apache.mina.filter.reqres 内の列挙型">ResponseType</A></CODE></FONT></TD>
<TD><CODE><B>Response.</B><B><A HREF="../../../../../../org/apache/mina/filter/reqres/Response.html#getType()">getType</A></B>()</CODE>
<BR>
</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE>static <A HREF="../../../../../../org/apache/mina/filter/reqres/ResponseType.html" title="org.apache.mina.filter.reqres 内の列挙型">ResponseType</A></CODE></FONT></TD>
<TD><CODE><B>ResponseType.</B><B><A HREF="../../../../../../org/apache/mina/filter/reqres/ResponseType.html#valueOf(java.lang.String)">valueOf</A></B>(java.lang.String name)</CODE>
<BR>
指定した名前を持つこの型の列挙型定数を返します。</TD>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE>static <A HREF="../../../../../../org/apache/mina/filter/reqres/ResponseType.html" title="org.apache.mina.filter.reqres 内の列挙型">ResponseType</A>[]</CODE></FONT></TD>
<TD><CODE><B>ResponseType.</B><B><A HREF="../../../../../../org/apache/mina/filter/reqres/ResponseType.html#values()">values</A></B>()</CODE>
<BR>
この列挙型の定数を含む配列を宣言されている順序で返します。</TD>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2"><A HREF="../../../../../../org/apache/mina/filter/reqres/ResponseType.html" title="org.apache.mina.filter.reqres 内の列挙型">ResponseType</A> 型のパラメータを持つ <A HREF="../../../../../../org/apache/mina/filter/reqres/package-summary.html">org.apache.mina.filter.reqres</A> のコンストラクタ</FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><CODE><B><A HREF="../../../../../../org/apache/mina/filter/reqres/Response.html#Response(org.apache.mina.filter.reqres.Request, java.lang.Object, org.apache.mina.filter.reqres.ResponseType)">Response</A></B>(<A HREF="../../../../../../org/apache/mina/filter/reqres/Request.html" title="org.apache.mina.filter.reqres 内のクラス">Request</A> request,
java.lang.Object message,
<A HREF="../../../../../../org/apache/mina/filter/reqres/ResponseType.html" title="org.apache.mina.filter.reqres 内の列挙型">ResponseType</A> type)</CODE>
<BR>
</TD>
</TR>
</TABLE>
<P>
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="ナビゲーションリンクをスキップ"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>概要</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>パッケージ</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../org/apache/mina/filter/reqres/ResponseType.html" title="org.apache.mina.filter.reqres 内の列挙型"><FONT CLASS="NavBarFont1"><B>クラス</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>使用</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>階層ツリー</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>非推奨 API</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>索引</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>ヘルプ</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
前
次</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../index.html?org/apache/mina/filter/reqres//class-useResponseType.html" target="_top"><B>フレームあり</B></A>
<A HREF="ResponseType.html" target="_top"><B>フレームなし</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>すべてのクラス</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../allclasses-noframe.html"><B>すべてのクラス</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
Copyright © 2004-2010 <a href="http://mina.apache.org/">Apache MINA Project</a>. All Rights Reserved.
</BODY>
</HTML>
|
apache-2.0
|
anton-johansson/ip-to-geolocation-service
|
src/main/java/com/antonjohansson/geolocation/source/es/UpdateDocumentRequest.java
|
1810
|
/**
* Copyright (c) Anton Johansson <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.antonjohansson.geolocation.source.es;
import java.math.BigDecimal;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* Defines a request to update a document.
*
* @param <T> The type of the document.
*/
public class UpdateDocumentRequest<T>
{
private @JsonProperty("doc") T document;
public T getDocument()
{
return document;
}
public void setDocument(T document)
{
this.document = document;
}
/**
* The actual coordinates to update.
*/
public static class UpdateDocumentLocation
{
private @JsonProperty("lon") BigDecimal longitude;
private @JsonProperty("lat") BigDecimal latitude;
public BigDecimal getLongitude()
{
return longitude;
}
public void setLongitude(BigDecimal longitude)
{
this.longitude = longitude;
}
public BigDecimal getLatitude()
{
return latitude;
}
public void setLatitude(BigDecimal latitude)
{
this.latitude = latitude;
}
}
}
|
apache-2.0
|
JulianChina/ParkingApp
|
app/src/main/java/overlay/BusLineOverlay.java
|
7848
|
package overlay;
import java.util.ArrayList;
import java.util.List;
import android.content.Context;
import android.graphics.Color;
import com.amap.api.maps.AMap;
import com.amap.api.maps.CameraUpdateFactory;
import com.amap.api.maps.model.BitmapDescriptor;
import com.amap.api.maps.model.BitmapDescriptorFactory;
import com.amap.api.maps.model.LatLng;
import com.amap.api.maps.model.LatLngBounds;
import com.amap.api.maps.model.Marker;
import com.amap.api.maps.model.MarkerOptions;
import com.amap.api.maps.model.Polyline;
import com.amap.api.maps.model.PolylineOptions;
import com.amap.api.services.busline.BusLineItem;
import com.amap.api.services.busline.BusStationItem;
import com.amap.api.services.core.LatLonPoint;
import com.run.sg.amap3d.R;
/**
* 公交线路图层类。在高德地图API里,如果要显示公交线路,可以用此类来创建公交线路图层。如不满足需求,也可以自己创建自定义的公交线路图层。
*
* @since V2.1.0
*/
public class BusLineOverlay {
private BusLineItem mBusLineItem;
private AMap mAMap;
private ArrayList<Marker> mBusStationMarks = new ArrayList<Marker>();
private Polyline mBusLinePolyline;
private List<BusStationItem> mBusStations;
private BitmapDescriptor startBit, endBit, busBit;
private Context mContext;
/**
* 通过此构造函数创建公交线路图层。
*
* @param context 当前activity。
* @param amap 地图对象。
* @param busLineItem 公交线路。详见搜索服务模块的公交线路和公交站点包(com.amap.api.services.busline)中的类 <strong><a href="../../../../../../Search/com/amap/api/services/busline/BusStationItem.html" title="com.amap.api.services.busline中的类">BusStationItem</a></strong>。
* @since V2.1.0
*/
public BusLineOverlay(Context context, AMap amap, BusLineItem busLineItem) {
mContext = context;
mBusLineItem = busLineItem;
this.mAMap = amap;
mBusStations = mBusLineItem.getBusStations();
}
/**
* 添加公交线路到地图中。
*
* @since V2.1.0
*/
public void addToMap() {
try {
List<LatLonPoint> pointList = mBusLineItem.getDirectionsCoordinates();
List<LatLng> listPolyline = AMapServicesUtil.convertArrList(pointList);
mBusLinePolyline = mAMap.addPolyline(new PolylineOptions()
.addAll(listPolyline).color(getBusColor())
.width(getBuslineWidth()));
if (mBusStations.size() < 1) {
return;
}
for (int i = 1; i < mBusStations.size() - 1; i++) {
Marker marker = mAMap.addMarker(getMarkerOptions(i));
mBusStationMarks.add(marker);
}
Marker markerStart = mAMap.addMarker(getMarkerOptions(0));
mBusStationMarks.add(markerStart);
Marker markerEnd = mAMap
.addMarker(getMarkerOptions(mBusStations.size() - 1));
mBusStationMarks.add(markerEnd);
} catch (Throwable e) {
e.printStackTrace();
}
}
/**
* 去掉BusLineOverlay上所有的Marker。
*
* @since V2.1.0
*/
public void removeFromMap() {
if (mBusLinePolyline != null) {
mBusLinePolyline.remove();
}
try {
for (Marker mark : mBusStationMarks) {
mark.remove();
}
destroyBit();
} catch (Throwable e) {
e.printStackTrace();
}
}
private void destroyBit() {
if (startBit != null) {
startBit.recycle();
startBit = null;
}
if (endBit != null) {
endBit.recycle();
endBit = null;
}
if (busBit != null) {
busBit.recycle();
busBit = null;
}
}
/**
* 移动镜头到当前的视角。
*
* @since V2.1.0
*/
public void zoomToSpan() {
if (mAMap == null)
return;
try {
List<LatLonPoint> coordin = mBusLineItem.getDirectionsCoordinates();
if (coordin != null && coordin.size() > 0) {
LatLngBounds bounds = getLatLngBounds(coordin);
mAMap.moveCamera(CameraUpdateFactory.newLatLngBounds(bounds, 5));
}
} catch (Throwable e) {
e.printStackTrace();
}
}
private LatLngBounds getLatLngBounds(List<LatLonPoint> coordin) {
LatLngBounds.Builder b = LatLngBounds.builder();
for (int i = 0; i < coordin.size(); i++) {
b.include(new LatLng(coordin.get(i).getLatitude(), coordin.get(i)
.getLongitude()));
}
return b.build();
}
private MarkerOptions getMarkerOptions(int index) {
MarkerOptions options = new MarkerOptions()
.position(
new LatLng(mBusStations.get(index).getLatLonPoint()
.getLatitude(), mBusStations.get(index)
.getLatLonPoint().getLongitude()))
.title(getTitle(index)).snippet(getSnippet(index));
if (index == 0) {
options.icon(getStartBitmapDescriptor());
} else if (index == mBusStations.size() - 1) {
options.icon(getEndBitmapDescriptor());
} else {
options.anchor(0.5f, 0.5f);
options.icon(getBusBitmapDescriptor());
}
return options;
}
protected BitmapDescriptor getStartBitmapDescriptor() {
startBit = BitmapDescriptorFactory.fromResource(R.drawable.amap_start);
return startBit;
}
protected BitmapDescriptor getEndBitmapDescriptor() {
endBit = BitmapDescriptorFactory.fromResource(R.drawable.amap_end);
return endBit;
}
protected BitmapDescriptor getBusBitmapDescriptor() {
busBit = BitmapDescriptorFactory.fromResource(R.drawable.amap_bus);
return busBit;
}
/**
* 返回第index的Marker的标题。
*
* @param index 第几个Marker。
* @return marker的标题。
* @since V2.1.0
*/
protected String getTitle(int index) {
return mBusStations.get(index).getBusStationName();
}
/**
* 返回第index的Marker的详情。
*
* @param index 第几个Marker。
* @return marker的详情。
* @since V2.1.0
*/
protected String getSnippet(int index) {
return "";
}
/**
* 从marker中得到公交站点在list的位置。
*
* @param marker 一个标记的对象。
* @return 返回该marker对应的公交站点在list的位置。
* @since V2.1.0
*/
public int getBusStationIndex(Marker marker) {
for (int i = 0; i < mBusStationMarks.size(); i++) {
if (mBusStationMarks.get(i).equals(marker)) {
return i;
}
}
return -1;
}
/**
* 返回第index的公交站点的信息。
*
* @param index 第几个公交站点。
* @return 公交站点的信息。详见搜索服务模块的公交线路和公交站点包(com.amap.api.services.busline)中的类 <strong><a href="../../../../../../Search/com/amap/api/services/busline/BusStationItem.html" title="com.amap.api.services.busline中的类">BusStationItem</a></strong>。
* @since V2.1.0
*/
public BusStationItem getBusStationItem(int index) {
if (index < 0 || index >= mBusStations.size()) {
return null;
}
return mBusStations.get(index);
}
protected int getBusColor() {
return Color.parseColor("#537edc");
}
protected float getBuslineWidth() {
return 18f;
}
}
|
apache-2.0
|
leopardoooo/cambodia
|
ycsoft-lib/src/main/java/com/ycsoft/beans/device/RDeviceUseRecords.java
|
2301
|
/**
* RDevice.java 2013/01/06
*/
package com.ycsoft.beans.device;
import java.io.Serializable;
import java.util.Date;
import com.ycsoft.beans.base.OptrBase;
import com.ycsoft.commons.constants.DictKey;
import com.ycsoft.commons.store.MemoryDict;
import com.ycsoft.daos.config.POJO;
/**
* RDeviceUseRecords -> R_DEVICE_USE_RECORDS mapping
*/
@POJO(tn = "R_DEVICE_USE_RECORDS", sn = "", pk = "")
public class RDeviceUseRecords extends OptrBase implements Serializable {
/**
*
*/
private static final long serialVersionUID = 192137922276462709L;
// RDeviceUseRecords all properties
private Integer done_code;
private String device_id;
private String device_type;
private String device_code;
private String busi_code;
private String cust_id;
private String cust_no;
private Date done_date;
private String busi_name;
private String cust_name;
public Integer getDone_code() {
return done_code;
}
public void setDone_code(Integer done_code) {
this.done_code = done_code;
}
public String getDevice_id() {
return device_id;
}
public void setDevice_id(String device_id) {
this.device_id = device_id;
}
public String getDevice_type() {
return device_type;
}
public void setDevice_type(String device_type) {
this.device_type = device_type;
}
public String getDevice_code() {
return device_code;
}
public void setDevice_code(String device_code) {
this.device_code = device_code;
}
public String getBusi_code() {
return busi_code;
}
public void setBusi_code(String busi_code) {
this.busi_code = busi_code;
this.busi_name = MemoryDict.getDictName(DictKey.BUSI_CODE, busi_code);
}
public String getCust_id() {
return cust_id;
}
public void setCust_id(String cust_id) {
this.cust_id = cust_id;
}
public String getCust_no() {
return cust_no;
}
public void setCust_no(String cust_no) {
this.cust_no = cust_no;
}
public Date getDone_date() {
return done_date;
}
public void setDone_date(Date done_date) {
this.done_date = done_date;
}
public String getCust_name() {
return cust_name;
}
public void setCust_name(String cust_name) {
this.cust_name = cust_name;
}
public String getBusi_name() {
return busi_name;
}
}
|
apache-2.0
|
google-coral/edgetpu
|
src/cpp/basic/edgetpu_resource_manager_test.cc
|
6349
|
#include "src/cpp/basic/edgetpu_resource_manager.h"
#include <chrono> // NOLINT
#include <random>
#include <thread> // NOLINT
#include "absl/flags/parse.h"
#include "glog/logging.h"
#include "gtest/gtest.h"
namespace coral {
namespace {
using EdgeTpuState = EdgeTpuResourceManager::EdgeTpuState;
class EdgeTpuResourceManagerTest : public ::testing::Test {
protected:
void SetUp() override {
resource_manager_ = EdgeTpuResourceManager::GetSingleton();
ASSERT_TRUE(resource_manager_);
unassigned_devices_ =
resource_manager_->ListEdgeTpuPaths(EdgeTpuState::kUnassigned);
ASSERT_GE(unassigned_devices_.size(), 1);
}
void TearDown() override {
EXPECT_EQ(
0, resource_manager_->ListEdgeTpuPaths(EdgeTpuState::kAssigned).size());
}
EdgeTpuResourceManager* resource_manager_;
std::vector<std::string> unassigned_devices_;
};
TEST_F(EdgeTpuResourceManagerTest, GetAllEdgeTpuOnce) {
std::vector<std::unique_ptr<EdgeTpuResource>> edgetpu_resources(
unassigned_devices_.size());
for (int i = 0; i < unassigned_devices_.size(); ++i) {
EXPECT_EQ(
i, resource_manager_->ListEdgeTpuPaths(EdgeTpuState::kAssigned).size());
ASSERT_EQ(kEdgeTpuApiOk,
resource_manager_->GetEdgeTpuResource(&edgetpu_resources[i]));
}
}
TEST_F(EdgeTpuResourceManagerTest, GetSameTpuContextRepeatedly) {
std::unique_ptr<EdgeTpuResource> edgetpu_once;
EXPECT_EQ(kEdgeTpuApiOk, resource_manager_->GetEdgeTpuResource(
unassigned_devices_[0], &edgetpu_once));
std::unique_ptr<EdgeTpuResource> edgetpu_twice;
ASSERT_TRUE(edgetpu_once != nullptr);
EXPECT_EQ(kEdgeTpuApiOk, resource_manager_->GetEdgeTpuResource(
unassigned_devices_[0], &edgetpu_twice));
ASSERT_TRUE(edgetpu_twice != nullptr);
EXPECT_EQ(edgetpu_once->path(), edgetpu_twice->path());
EXPECT_EQ(edgetpu_once->context(), edgetpu_twice->context());
std::unique_ptr<EdgeTpuResource> edgetpu_thrice;
EXPECT_EQ(kEdgeTpuApiOk, resource_manager_->GetEdgeTpuResource(
unassigned_devices_[0], &edgetpu_thrice));
ASSERT_TRUE(edgetpu_thrice != nullptr);
EXPECT_EQ(edgetpu_once->path(), edgetpu_thrice->path());
EXPECT_EQ(edgetpu_once->context(), edgetpu_thrice->context());
}
TEST_F(EdgeTpuResourceManagerTest, CheckDevicePath) {
std::unique_ptr<EdgeTpuResource> edgetpu_resource;
EXPECT_EQ(kEdgeTpuApiOk, resource_manager_->GetEdgeTpuResource(
unassigned_devices_[0], &edgetpu_resource));
EXPECT_EQ(unassigned_devices_[0], edgetpu_resource->path());
}
TEST_F(EdgeTpuResourceManagerTest, DeviceNotExistError) {
std::unique_ptr<EdgeTpuResource> edgetpu_resource;
EXPECT_EQ(kEdgeTpuApiError, resource_manager_->GetEdgeTpuResource(
"invalid_path", &edgetpu_resource));
EXPECT_EQ("Path invalid_path does not map to an Edge TPU device.",
resource_manager_->get_error_message());
}
TEST_F(EdgeTpuResourceManagerTest, ReclaimUnassignedDeviceError) {
EXPECT_EQ(kEdgeTpuApiError,
resource_manager_->ReclaimEdgeTpuResource("unassigned_device"));
EXPECT_EQ("Trying to reclaim unassigned device: unassigned_device.",
resource_manager_->get_error_message());
}
TEST_F(EdgeTpuResourceManagerTest, ExhaustAllEdgeTpu) {
// No need to run this test if there's only one Edge TPU detected.
if (unassigned_devices_.size() <= 1) {
return;
}
// Exhaust all Edge TPU.
std::vector<std::unique_ptr<EdgeTpuResource>> edgetpu_resources(
unassigned_devices_.size());
for (int i = 0; i < edgetpu_resources.size(); ++i) {
EXPECT_EQ(kEdgeTpuApiOk,
resource_manager_->GetEdgeTpuResource(&edgetpu_resources[i]));
VLOG(1) << "assigned: " << edgetpu_resources[i]->path();
}
// Request one more Edge TPU to trigger the error.
const std::string expected_error_message =
"Multiple Edge TPUs detected and all have been mapped to at least one "
"model. If you want to share one Edge TPU with multiple models, specify "
"`device_path` name.";
std::unique_ptr<EdgeTpuResource> another_resource;
EXPECT_EQ(kEdgeTpuApiError,
resource_manager_->GetEdgeTpuResource(&another_resource));
EXPECT_EQ(expected_error_message, resource_manager_->get_error_message());
}
TEST_F(EdgeTpuResourceManagerTest, MultithreadTest) {
const int num_devices = unassigned_devices_.size();
const int num_threads = 3 * num_devices;
// Each thread is randomly assigned to use a device.
std::mt19937 generator(123456);
auto get_device_assignments = [&generator, &num_devices,
&num_threads]() -> std::vector<int> {
std::vector<int> result(num_threads);
std::uniform_int_distribution<> dis(0, num_devices - 1);
for (int i = 0; i < num_threads; ++i) {
result[i] = dis(generator);
}
return result;
};
std::vector<int> device_assignments = get_device_assignments();
// Each thread will randomly sleep 100ms ~ 500ms then release the device.
auto get_sleep_times = [&generator, &num_threads]() -> std::vector<int> {
std::vector<int> result(num_threads);
std::uniform_int_distribution<> dis(100, 500);
for (int i = 0; i < num_threads; ++i) {
result[i] = dis(generator);
}
return result;
};
std::vector<int> sleep_times = get_sleep_times();
auto thread_job = [this](const std::string& device_path, int sleep_time) {
std::unique_ptr<EdgeTpuResource> edgetpu_resource;
EXPECT_EQ(kEdgeTpuApiOk, resource_manager_->GetEdgeTpuResource(
device_path, &edgetpu_resource));
EXPECT_EQ(device_path, edgetpu_resource->path());
EXPECT_TRUE(edgetpu_resource->context());
std::this_thread::sleep_for(std::chrono::milliseconds(sleep_time));
};
std::vector<std::thread> workers(num_threads);
for (int i = 0; i < num_threads; ++i) {
const auto& device_path = unassigned_devices_[device_assignments[i]];
workers[i] = std::thread(thread_job, device_path, sleep_times[i]);
}
for (int i = 0; i < num_threads; ++i) {
workers[i].join();
}
}
} // namespace
} // namespace coral
int main(int argc, char** argv) {
::testing::InitGoogleTest(&argc, argv);
absl::ParseCommandLine(argc, argv);
return RUN_ALL_TESTS();
}
|
apache-2.0
|
phakhruddin/allowance
|
build/iphone/Classes/defines.h
|
3228
|
//
// these are dynamic defines used by the allowance JS compiler
//
// when compiling in a real project these are dynamically generated
// these defined here mainly for when running inside xcode
//
// MODULES
#define USE_TI_STREAM
#define USE_TI_CODEC
#define USE_TI_UTILS
#define USE_TI_XML
#define USE_TI_ACCELEROMETER
#define USE_TI_API
#define USE_TI_APP
#define USE_TI_CALENDAR
#define USE_TI_CONTACTS
#define USE_TI_DATABASE
#define USE_TI_FILESYSTEM
#define USE_TI_GEOLOCATION
#define USE_TI_GESTURE
#define USE_TI_MEDIA
#define USE_TI_NETWORK
#define USE_TI_NETWORKSOCKET
#define USE_TI_PLATFORM
#define USE_TI_YAHOO
#define USE_TI_UI
#define USE_TI_UITAB
#define USE_TI_UILABEL
#define USE_TI_UIBUTTON
#define USE_TI_UIPROGRESSBAR
#define USE_TI_UISEARCHBAR
#define USE_TI_UIACTIVITYINDICATOR
#define USE_TI_UISLIDER
#define USE_TI_UISWITCH
#define USE_TI_UIPICKER
#define USE_TI_UITOOLBAR //DEPRECATED
#define USE_TI_UITEXTAREA
#define USE_TI_UITEXTFIELD
#define USE_TI_UIIMAGEVIEW
#define USE_TI_UIMASKEDIMAGE
#define USE_TI_UIWEBVIEW
#define USE_TI_UIWINDOW
#define USE_TI_UIVIEW
#define USE_TI_UIOPTIONDIALOG
#define USE_TI_UIEMAILDIALOG
#define USE_TI_UIDASHBOARDVIEW
#define USE_TI_UISCROLLVIEW
#define USE_TI_UISCROLLABLEVIEW
#define USE_TI_UITABLEVIEW
#define USE_TI_UILISTVIEW
#define USE_TI_UI2DMATRIX
#define USE_TI_UI3DMATRIX
#define USE_TI_UIANIMATION
#define USE_TI_UICOVERFLOWVIEW //DEPRECATED
#define USE_TI_UITABBEDBAR //DEPRECATED
#define USE_TI_UIATTRIBUTEDSTRING
#define USE_TI_UIIPHONE
#define USE_TI_UIIPHONEROWANIMATIONSTYLE
#define USE_TI_UIIPHONESTATUSBAR
#define USE_TI_UIIPHONESYSTEMICON
#define USE_TI_UIIPHONESYSTEMBUTTONSTYLE
#define USE_TI_UIIPHONESYSTEMBUTTON
#define USE_TI_UIIPHONEACTIVITYINDICATORSTYLE
#define USE_TI_UIIPHONEANIMATIONSTYLE
#define USE_TI_UIIPHONEPROGRESSBARSTYLE
#define USE_TI_UIIPHONESCROLLINDICATORSTYLE
#define USE_TI_UIIPHONETABLEVIEWSTYLE
#define USE_TI_UIIPHONETABLEVIEWSEPARATORSTYLE
#define USE_TI_UIIPHONETABLEVIEWSCROLLPOSITION
#define USE_TI_UIIPHONETABLEVIEWCELLSELECTIONSTYLE
#define USE_TI_UIIPHONEALERTDIALOGSTYLE
#define USE_TI_UIIPHONELISTVIEWSTYLE
#define USE_TI_UIIPHONELISTVIEWSCROLLPOSITION
#define USE_TI_UIIPHONELISTVIEWCELLSELECTIONSTYLE
#define USE_TI_UIIPHONELISTVIEWSEPARATORSTYLE
#define USE_TI_UICLIPBOARD
#define USE_TI_UIIPAD
#define USE_TI_UIIPADPOPOVER
#define USE_TI_UIIPADSPLITWINDOW
#define USE_TI_UIIPADDOCUMENTVIEWER
#define USE_TI_UIIPADSPLITWINDOWBUTTON
#define USE_TI_UIIOS
#define USE_TI_UIIOSADVIEW
#define USE_TI_UIIOS3DMATRIX
#define USE_TI_UIIOSCOVERFLOWVIEW
#define USE_TI_UIIOSTOOLBAR
#define USE_TI_UIIOSTABBEDBAR
#define USE_TI_UIIOSDOCUMENTVIEWER
#define USE_TI_UIIOSNAVIGATIONWINDOW
#define USE_TI_UIIOSATTRIBUTEDSTRING //DEPRECATED
#define USE_TI_UIIOSSPLITWINDOW
#define USE_TI_APPIOS
#define USE_TI_UIIOSANIMATOR
#define USE_TI_UIIOSSNAPBEHAVIOR
#define USE_TI_UIIOSPUSHBEHAVIOR
#define USE_TI_UIIOSGRAVITYBEHAVIOR
#define USE_TI_UIIOSANCHORATTACHMENTBEHAVIOR
#define USE_TI_UIIOSVIEWATTACHMENTBEHAVIOR
#define USE_TI_UIIOSCOLLISIONBEHAVIOR
#define USE_TI_UIIOSDYNAMICITEMBEHAVIOR
#define USE_TI_UIIOSTRANSITIONANIMATION
#define USE_TI_UIREFRESHCONTROL
#define USE_TI_SILENTPUSH
#define USE_TI_FETCH
|
apache-2.0
|
zstackio/zstack-woodpecker
|
integrationtest/vm/mini/multiclusters/paths/multi_path88.py
|
2871
|
import zstackwoodpecker.test_state as ts_header
import os
TestAction = ts_header.TestAction
def path():
return dict(initial_formation="template5", checking_point=1, faild_point=100000, path_list=[
[TestAction.create_mini_vm, 'vm1', 'cluster=cluster2'],
[TestAction.reboot_vm, 'vm1'],
[TestAction.create_vm_backup, 'vm1', 'vm1-backup1'],
[TestAction.create_mini_vm, 'vm2', 'cpu=random', 'cluster=cluster1'],
[TestAction.migrate_vm, 'vm1'],
[TestAction.poweroff_only, 'cluster=cluster2'],
[TestAction.create_volume, 'volume1', 'size=random', 'cluster=cluster2', 'flag=scsi'],
[TestAction.create_volume, 'volume2', 'cluster=cluster2', 'flag=thick,scsi'],
[TestAction.add_image, 'image1', 'root', 'http://172.20.1.28/mirror/diskimages/centos_vdbench.qcow2'],
[TestAction.delete_vm_backup, 'vm1-backup1'],
[TestAction.delete_image, 'image1'],
[TestAction.recover_image, 'image1'],
[TestAction.delete_image, 'image1'],
[TestAction.expunge_image, 'image1'],
[TestAction.start_vm, 'vm1'],
[TestAction.create_vm_backup, 'vm1', 'vm1-backup2'],
[TestAction.stop_vm, 'vm1'],
[TestAction.change_vm_ha, 'vm2'],
[TestAction.poweroff_only, 'cluster=cluster2'],
[TestAction.resize_data_volume, 'volume1', 5*1024*1024],
[TestAction.attach_volume, 'vm1', 'volume1'],
[TestAction.detach_volume, 'volume1'],
[TestAction.create_volume, 'volume3', 'cluster=cluster1', 'flag=scsi'],
[TestAction.delete_volume, 'volume3'],
[TestAction.create_volume, 'volume4', 'cluster=cluster1', 'flag=scsi'],
[TestAction.attach_volume, 'vm2', 'volume4'],
[TestAction.create_volume_backup, 'volume4', 'volume4-backup3'],
[TestAction.change_vm_ha, 'vm2'],
[TestAction.stop_vm, 'vm2'],
[TestAction.use_volume_backup, 'volume4-backup3'],
[TestAction.start_vm, 'vm2'],
[TestAction.change_vm_ha, 'vm2'],
[TestAction.create_mini_vm, 'vm3', 'memory=random', 'cluster=cluster2'],
[TestAction.delete_volume, 'volume1'],
[TestAction.expunge_volume, 'volume1'],
[TestAction.destroy_vm, 'vm2'],
[TestAction.recover_vm, 'vm2'],
[TestAction.start_vm, 'vm1'],
[TestAction.create_vm_backup, 'vm1', 'vm1-backup4'],
[TestAction.stop_vm, 'vm1'],
[TestAction.create_image_from_volume, 'vm1', 'vm1-image2'],
[TestAction.poweroff_only, 'cluster=cluster2'],
[TestAction.attach_volume, 'vm2', 'volume4'],
[TestAction.start_vm, 'vm2'],
[TestAction.create_volume_backup, 'volume4', 'volume4-backup5'],
[TestAction.stop_vm, 'vm2'],
[TestAction.use_volume_backup, 'volume4-backup5'],
])
'''
The final status:
Running:[]
Stopped:['vm1', 'vm3', 'vm2']
Enadbled:['vm1-backup2', 'volume4-backup3', 'vm1-backup4', 'volume4-backup5', 'vm1-image2']
attached:['volume4']
Detached:['volume2']
Deleted:['volume3', 'vm1-backup1']
Expunged:['volume1', 'image1']
Ha:[]
Group:
vm_backup2:['vm1-backup4']---vm1@
vm_backup1:['vm1-backup2']---vm1@
'''
|
apache-2.0
|
pashkobohdan/Music-Player
|
src/controllers/dialogControllers/ControllerCounterStop.java
|
3338
|
package controllers.dialogControllers;
import controllers.Controller;
import javafx.event.ActionEvent;
import javafx.fxml.FXML;
import javafx.fxml.Initializable;
import javafx.scene.Node;
import javafx.scene.control.Button;
import javafx.scene.control.ComboBox;
import javafx.scene.control.Label;
import javafx.scene.control.TextField;
import objects.Song;
import objects.StopCount;
import java.net.URL;
import java.util.ResourceBundle;
public class ControllerCounterStop extends DialogController implements Initializable {
public static final int maxComboBox = 100;
@FXML
private Button buttonOk, buttonCancel, buttonDelete;
@FXML
private ComboBox<Integer> comboBoxCount;
@FXML
private TextField textFieldCount;
@FXML
private Label labelCount;
private StopCount stopCount;
private boolean isCancel = false;
@Override
public void initialize(URL location, ResourceBundle resources) {
for (int i = Song.INDEX_ZERO; i < maxComboBox; i++) {
comboBoxCount.getItems().add(i);
}
}
public void setResourceBundle(ResourceBundle resourceBundle) {
labelCount.setText(resourceBundle.getString("key.dialog.countStop.labelCount"));
buttonOk.setText(resourceBundle.getString("key.button.ok"));
buttonCancel.setText(resourceBundle.getString("key.button.cancel"));
buttonDelete.setText(resourceBundle.getString("key.button.delete"));
textFieldCount.setText(resourceBundle.getString("key,dialog.countStopDialog.textField.defaultText"));
}
public void close(ActionEvent actionEvent) {
(((Node) actionEvent.getSource()).getScene().getWindow()).hide();
}
public void refreshStopCountInfo() {
try {
stopCount.setCount(Integer.parseInt(textFieldCount.getText()));
textFieldCount.setText(Data.EMPTY_STRING);
} catch (NumberFormatException e) {
stopCount.setCount(comboBoxCount.getValue());
}
}
public void deleteStopCount() {
stopCount.setCount(StopCount.DEFAULT_COUNT);
comboBoxCount.getSelectionModel().select(StopCount.DEFAULT_COUNT);
textFieldCount.setText(StopCount.DEFAULT_COUNT + Data.EMPTY_STRING);
refreshStopCountInfo();
}
public void actionButtonClicked(ActionEvent actionEvent) {
if (!(actionEvent.getSource() instanceof Button)) {
return;
}
switch (((Button) actionEvent.getSource()).getId()) {
case "buttonOk":
refreshStopCountInfo();
close(actionEvent);
break;
case "buttonCancel":
isCancel = true;
close(actionEvent);
break;
case "buttonDelete":
deleteStopCount();
close(actionEvent);
break;
}
}
public void setStopCount(StopCount stopCount) {
this.stopCount = stopCount;
isCancel = false;
textFieldCount.setText(stopCount.getCount() + Data.EMPTY_STRING);
comboBoxCount.getSelectionModel().select(stopCount.getCount());
}
public boolean isCancel() {
return isCancel;
}
}
|
apache-2.0
|
CenturyLinkCloud/clc-net-sdk
|
src/CenturyLinkCloudSDK/ServiceModels/Domain/Activity.cs
|
687
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace CenturyLinkCloudSDK.ServiceModels
{
public class Activity
{
public string AccountAlias { get; set; }
public string Body { get; set; }
public string AccountDescription { get; set; }
public int EntityId { get; set; }
public string ReferenceId { get; set; }
public string EntityType { get; set; }
public string LocationAlias { get; set; }
public string CreatedBy { get; set; }
public DateTime CreatedDate { get; set; }
public string Subject { get; set; }
}
}
|
apache-2.0
|
karma4u101/FoBo-Demo
|
fobo-lift-template-demo/src/main/webapp/foboapi/older/v1.7/net/liftmodules/FoBoAJSRes/package$$Resource$$AJMaterial101$.html
|
27949
|
<!DOCTYPE html >
<html>
<head>
<title>AJMaterial101 - net.liftmodules.FoBoAJSRes.Resource.AJMaterial101</title>
<meta name="description" content="AJMaterial101 - net.liftmodules.FoBoAJSRes.Resource.AJMaterial101" />
<meta name="keywords" content="AJMaterial101 net.liftmodules.FoBoAJSRes.Resource.AJMaterial101" />
<meta http-equiv="content-type" content="text/html; charset=UTF-8" />
<link href="../../../lib/template.css" media="screen" type="text/css" rel="stylesheet" />
<link href="../../../lib/diagrams.css" media="screen" type="text/css" rel="stylesheet" id="diagrams-css" />
<script type="text/javascript" src="../../../lib/jquery.js" id="jquery-js"></script>
<script type="text/javascript" src="../../../lib/jquery-ui.js"></script>
<script type="text/javascript" src="../../../lib/template.js"></script>
<script type="text/javascript" src="../../../lib/tools.tooltip.js"></script>
<script type="text/javascript">
if(top === self) {
var url = '../../../index.html';
var hash = 'net.liftmodules.FoBoAJSRes.package$$Resource$$AJMaterial101$';
var anchor = window.location.hash;
var anchor_opt = '';
if (anchor.length >= 1)
anchor_opt = '@' + anchor.substring(1);
window.location.href = url + '#' + hash + anchor_opt;
}
</script>
</head>
<body class="value">
<div id="definition">
<img alt="Object" src="../../../lib/object_big.png" />
<p id="owner"><a href="../../package.html" class="extype" name="net">net</a>.<a href="../package.html" class="extype" name="net.liftmodules">liftmodules</a>.<a href="package.html" class="extype" name="net.liftmodules.FoBoAJSRes">FoBoAJSRes</a>.<a href="package$$Resource$.html" class="extype" name="net.liftmodules.FoBoAJSRes.Resource">Resource</a></p>
<h1>AJMaterial101</h1><h3><span class="morelinks"><div>Related Doc:
<a href="package$$Resource$.html" class="extype" name="net.liftmodules.FoBoAJSRes.Resource">package Resource</a>
</div></span></h3><span class="permalink">
<a href="../../../index.html#net.liftmodules.FoBoAJSRes.package$$Resource$$AJMaterial101$" title="Permalink" target="_top">
<img src="../../../lib/permalink.png" alt="Permalink" />
</a>
</span>
</div>
<h4 id="signature" class="signature">
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">object</span>
</span>
<span class="symbol">
<span class="name deprecated" title="Deprecated: (Since version 1.6.0) Use AJMaterial108 or later">AJMaterial101</span><span class="result"> extends <a href="package$$Resource.html" class="extype" name="net.liftmodules.FoBoAJSRes.Resource">Resource</a> with <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.Product" class="extype" target="_top">Product</a> with <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.Serializable" class="extype" target="_top">Serializable</a></span>
</span>
</h4>
<div id="comment" class="fullcommenttop"><div class="comment cmt"><p>Enable usage of Angular Material version 1․0․1 resource files in your bootstrap liftweb Boot.</p></div><dl class="attributes block"> <dt>Annotations</dt><dd>
<span class="name">@deprecated</span>
</dd><dt>Deprecated</dt><dd class="cmt"><p><i>(Since version 1.6.0)</i> Use AJMaterial108 or later</p></dd><dt>Version</dt><dd><p>1.0.1
<b>Example:</b></p><pre><span class="kw">import</span> net.liftmodules.{FoBoAJSRes <span class="kw">=></span> FoBo}
:
FoBo.Resource.Init=FoBo.Resource.AJMaterial101</pre></dd></dl><div class="toggleContainer block">
<span class="toggle">Linear Supertypes</span>
<div class="superTypes hiddenContent"><a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.Serializable" class="extype" target="_top">Serializable</a>, <span class="extype" name="java.io.Serializable">Serializable</span>, <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.Product" class="extype" target="_top">Product</a>, <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.Equals" class="extype" target="_top">Equals</a>, <a href="package$$Resource.html" class="extype" name="net.liftmodules.FoBoAJSRes.Resource">Resource</a>, <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.AnyRef" class="extype" target="_top">AnyRef</a>, <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.Any" class="extype" target="_top">Any</a></div>
</div></div>
<div id="mbrsel">
<div id="textfilter"><span class="pre"></span><span class="input"><input id="mbrsel-input" type="text" accesskey="/" /></span><span class="post"></span></div>
<div id="order">
<span class="filtertype">Ordering</span>
<ol>
<li class="alpha in"><span>Alphabetic</span></li>
<li class="inherit out"><span>By inheritance</span></li>
</ol>
</div>
<div id="ancestors">
<span class="filtertype">Inherited<br />
</span>
<ol id="linearization">
<li class="in" name="net.liftmodules.FoBoAJSRes.Resource.AJMaterial101"><span>AJMaterial101</span></li><li class="in" name="scala.Serializable"><span>Serializable</span></li><li class="in" name="java.io.Serializable"><span>Serializable</span></li><li class="in" name="scala.Product"><span>Product</span></li><li class="in" name="scala.Equals"><span>Equals</span></li><li class="in" name="net.liftmodules.FoBoAJSRes.Resource"><span>Resource</span></li><li class="in" name="scala.AnyRef"><span>AnyRef</span></li><li class="in" name="scala.Any"><span>Any</span></li>
</ol>
</div><div id="ancestors">
<span class="filtertype"></span>
<ol>
<li class="hideall out"><span>Hide All</span></li>
<li class="showall in"><span>Show all</span></li>
</ol>
</div>
<div id="visbl">
<span class="filtertype">Visibility</span>
<ol><li class="public in"><span>Public</span></li><li class="all out"><span>All</span></li></ol>
</div>
</div>
<div id="template">
<div id="allMembers">
<div id="values" class="values members">
<h3>Value Members</h3>
<ol><li name="scala.AnyRef#!=" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="!=(x$1:Any):Boolean"></a>
<a id="!=(Any):Boolean"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span title="gt4s: $bang$eq" class="name">!=</span><span class="params">(<span name="arg0">arg0: <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.Any" class="extype" target="_top">Any</a></span>)</span><span class="result">: <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.Boolean" class="extype" target="_top">Boolean</a></span>
</span>
</h4><span class="permalink">
<a href="../../../index.html#net.liftmodules.FoBoAJSRes.package$$Resource$$AJMaterial101$@!=(x$1:Any):Boolean" title="Permalink" target="_top">
<img src="../../../lib/permalink.png" alt="Permalink" />
</a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd></dl></div>
</li><li name="scala.AnyRef###" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="##():Int"></a>
<a id="##():Int"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span title="gt4s: $hash$hash" class="name">##</span><span class="params">()</span><span class="result">: <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.Int" class="extype" target="_top">Int</a></span>
</span>
</h4><span class="permalink">
<a href="../../../index.html#net.liftmodules.FoBoAJSRes.package$$Resource$$AJMaterial101$@##():Int" title="Permalink" target="_top">
<img src="../../../lib/permalink.png" alt="Permalink" />
</a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd></dl></div>
</li><li name="scala.AnyRef#==" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="==(x$1:Any):Boolean"></a>
<a id="==(Any):Boolean"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span title="gt4s: $eq$eq" class="name">==</span><span class="params">(<span name="arg0">arg0: <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.Any" class="extype" target="_top">Any</a></span>)</span><span class="result">: <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.Boolean" class="extype" target="_top">Boolean</a></span>
</span>
</h4><span class="permalink">
<a href="../../../index.html#net.liftmodules.FoBoAJSRes.package$$Resource$$AJMaterial101$@==(x$1:Any):Boolean" title="Permalink" target="_top">
<img src="../../../lib/permalink.png" alt="Permalink" />
</a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd></dl></div>
</li><li name="scala.Any#asInstanceOf" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="asInstanceOf[T0]:T0"></a>
<a id="asInstanceOf[T0]:T0"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">asInstanceOf</span><span class="tparams">[<span name="T0">T0</span>]</span><span class="result">: <span class="extype" name="scala.Any.asInstanceOf.T0">T0</span></span>
</span>
</h4><span class="permalink">
<a href="../../../index.html#net.liftmodules.FoBoAJSRes.package$$Resource$$AJMaterial101$@asInstanceOf[T0]:T0" title="Permalink" target="_top">
<img src="../../../lib/permalink.png" alt="Permalink" />
</a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Any</dd></dl></div>
</li><li name="scala.AnyRef#clone" visbl="prt" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="clone():Object"></a>
<a id="clone():AnyRef"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">clone</span><span class="params">()</span><span class="result">: <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.AnyRef" class="extype" target="_top">AnyRef</a></span>
</span>
</h4><span class="permalink">
<a href="../../../index.html#net.liftmodules.FoBoAJSRes.package$$Resource$$AJMaterial101$@clone():Object" title="Permalink" target="_top">
<img src="../../../lib/permalink.png" alt="Permalink" />
</a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Attributes</dt><dd>protected[<a href="../../../java$lang.html" class="extype" name="java.lang">java.lang</a>] </dd><dt>Definition Classes</dt><dd>AnyRef</dd><dt>Annotations</dt><dd>
<span class="name">@throws</span><span class="args">(<span>
<span class="defval" name="classOf[java.lang.CloneNotSupportedException]">...</span>
</span>)</span>
</dd></dl></div>
</li><li name="scala.AnyRef#eq" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="eq(x$1:AnyRef):Boolean"></a>
<a id="eq(AnyRef):Boolean"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">eq</span><span class="params">(<span name="arg0">arg0: <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.AnyRef" class="extype" target="_top">AnyRef</a></span>)</span><span class="result">: <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.Boolean" class="extype" target="_top">Boolean</a></span>
</span>
</h4><span class="permalink">
<a href="../../../index.html#net.liftmodules.FoBoAJSRes.package$$Resource$$AJMaterial101$@eq(x$1:AnyRef):Boolean" title="Permalink" target="_top">
<img src="../../../lib/permalink.png" alt="Permalink" />
</a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd></dl></div>
</li><li name="scala.AnyRef#equals" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="equals(x$1:Any):Boolean"></a>
<a id="equals(Any):Boolean"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">equals</span><span class="params">(<span name="arg0">arg0: <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.Any" class="extype" target="_top">Any</a></span>)</span><span class="result">: <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.Boolean" class="extype" target="_top">Boolean</a></span>
</span>
</h4><span class="permalink">
<a href="../../../index.html#net.liftmodules.FoBoAJSRes.package$$Resource$$AJMaterial101$@equals(x$1:Any):Boolean" title="Permalink" target="_top">
<img src="../../../lib/permalink.png" alt="Permalink" />
</a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd></dl></div>
</li><li name="scala.AnyRef#finalize" visbl="prt" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="finalize():Unit"></a>
<a id="finalize():Unit"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">finalize</span><span class="params">()</span><span class="result">: <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.Unit" class="extype" target="_top">Unit</a></span>
</span>
</h4><span class="permalink">
<a href="../../../index.html#net.liftmodules.FoBoAJSRes.package$$Resource$$AJMaterial101$@finalize():Unit" title="Permalink" target="_top">
<img src="../../../lib/permalink.png" alt="Permalink" />
</a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Attributes</dt><dd>protected[<a href="../../../java$lang.html" class="extype" name="java.lang">java.lang</a>] </dd><dt>Definition Classes</dt><dd>AnyRef</dd><dt>Annotations</dt><dd>
<span class="name">@throws</span><span class="args">(<span>
<span class="symbol">classOf[java.lang.Throwable]</span>
</span>)</span>
</dd></dl></div>
</li><li name="scala.AnyRef#getClass" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="getClass():Class[_]"></a>
<a id="getClass():Class[_]"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">getClass</span><span class="params">()</span><span class="result">: <span class="extype" name="java.lang.Class">Class</span>[_]</span>
</span>
</h4><span class="permalink">
<a href="../../../index.html#net.liftmodules.FoBoAJSRes.package$$Resource$$AJMaterial101$@getClass():Class[_]" title="Permalink" target="_top">
<img src="../../../lib/permalink.png" alt="Permalink" />
</a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef → Any</dd></dl></div>
</li><li name="scala.Any#isInstanceOf" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="isInstanceOf[T0]:Boolean"></a>
<a id="isInstanceOf[T0]:Boolean"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">isInstanceOf</span><span class="tparams">[<span name="T0">T0</span>]</span><span class="result">: <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.Boolean" class="extype" target="_top">Boolean</a></span>
</span>
</h4><span class="permalink">
<a href="../../../index.html#net.liftmodules.FoBoAJSRes.package$$Resource$$AJMaterial101$@isInstanceOf[T0]:Boolean" title="Permalink" target="_top">
<img src="../../../lib/permalink.png" alt="Permalink" />
</a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>Any</dd></dl></div>
</li><li name="scala.AnyRef#ne" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="ne(x$1:AnyRef):Boolean"></a>
<a id="ne(AnyRef):Boolean"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">ne</span><span class="params">(<span name="arg0">arg0: <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.AnyRef" class="extype" target="_top">AnyRef</a></span>)</span><span class="result">: <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.Boolean" class="extype" target="_top">Boolean</a></span>
</span>
</h4><span class="permalink">
<a href="../../../index.html#net.liftmodules.FoBoAJSRes.package$$Resource$$AJMaterial101$@ne(x$1:AnyRef):Boolean" title="Permalink" target="_top">
<img src="../../../lib/permalink.png" alt="Permalink" />
</a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd></dl></div>
</li><li name="scala.AnyRef#notify" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="notify():Unit"></a>
<a id="notify():Unit"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">notify</span><span class="params">()</span><span class="result">: <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.Unit" class="extype" target="_top">Unit</a></span>
</span>
</h4><span class="permalink">
<a href="../../../index.html#net.liftmodules.FoBoAJSRes.package$$Resource$$AJMaterial101$@notify():Unit" title="Permalink" target="_top">
<img src="../../../lib/permalink.png" alt="Permalink" />
</a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd></dl></div>
</li><li name="scala.AnyRef#notifyAll" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="notifyAll():Unit"></a>
<a id="notifyAll():Unit"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">notifyAll</span><span class="params">()</span><span class="result">: <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.Unit" class="extype" target="_top">Unit</a></span>
</span>
</h4><span class="permalink">
<a href="../../../index.html#net.liftmodules.FoBoAJSRes.package$$Resource$$AJMaterial101$@notifyAll():Unit" title="Permalink" target="_top">
<img src="../../../lib/permalink.png" alt="Permalink" />
</a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd></dl></div>
</li><li name="scala.AnyRef#synchronized" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="synchronized[T0](x$1:=>T0):T0"></a>
<a id="synchronized[T0](⇒T0):T0"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">synchronized</span><span class="tparams">[<span name="T0">T0</span>]</span><span class="params">(<span name="arg0">arg0: ⇒ <span class="extype" name="java.lang.AnyRef.synchronized.T0">T0</span></span>)</span><span class="result">: <span class="extype" name="java.lang.AnyRef.synchronized.T0">T0</span></span>
</span>
</h4><span class="permalink">
<a href="../../../index.html#net.liftmodules.FoBoAJSRes.package$$Resource$$AJMaterial101$@synchronized[T0](x$1:=>T0):T0" title="Permalink" target="_top">
<img src="../../../lib/permalink.png" alt="Permalink" />
</a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd></dl></div>
</li><li name="scala.AnyRef#wait" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="wait():Unit"></a>
<a id="wait():Unit"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">wait</span><span class="params">()</span><span class="result">: <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.Unit" class="extype" target="_top">Unit</a></span>
</span>
</h4><span class="permalink">
<a href="../../../index.html#net.liftmodules.FoBoAJSRes.package$$Resource$$AJMaterial101$@wait():Unit" title="Permalink" target="_top">
<img src="../../../lib/permalink.png" alt="Permalink" />
</a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd><dt>Annotations</dt><dd>
<span class="name">@throws</span><span class="args">(<span>
<span class="defval" name="classOf[java.lang.InterruptedException]">...</span>
</span>)</span>
</dd></dl></div>
</li><li name="scala.AnyRef#wait" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="wait(x$1:Long,x$2:Int):Unit"></a>
<a id="wait(Long,Int):Unit"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">wait</span><span class="params">(<span name="arg0">arg0: <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.Long" class="extype" target="_top">Long</a></span>, <span name="arg1">arg1: <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.Int" class="extype" target="_top">Int</a></span>)</span><span class="result">: <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.Unit" class="extype" target="_top">Unit</a></span>
</span>
</h4><span class="permalink">
<a href="../../../index.html#net.liftmodules.FoBoAJSRes.package$$Resource$$AJMaterial101$@wait(x$1:Long,x$2:Int):Unit" title="Permalink" target="_top">
<img src="../../../lib/permalink.png" alt="Permalink" />
</a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd><dt>Annotations</dt><dd>
<span class="name">@throws</span><span class="args">(<span>
<span class="defval" name="classOf[java.lang.InterruptedException]">...</span>
</span>)</span>
</dd></dl></div>
</li><li name="scala.AnyRef#wait" visbl="pub" data-isabs="false" fullComment="yes" group="Ungrouped">
<a id="wait(x$1:Long):Unit"></a>
<a id="wait(Long):Unit"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier">final </span>
<span class="kind">def</span>
</span>
<span class="symbol">
<span class="name">wait</span><span class="params">(<span name="arg0">arg0: <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.Long" class="extype" target="_top">Long</a></span>)</span><span class="result">: <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.Unit" class="extype" target="_top">Unit</a></span>
</span>
</h4><span class="permalink">
<a href="../../../index.html#net.liftmodules.FoBoAJSRes.package$$Resource$$AJMaterial101$@wait(x$1:Long):Unit" title="Permalink" target="_top">
<img src="../../../lib/permalink.png" alt="Permalink" />
</a>
</span>
<div class="fullcomment"><dl class="attributes block"> <dt>Definition Classes</dt><dd>AnyRef</dd><dt>Annotations</dt><dd>
<span class="name">@throws</span><span class="args">(<span>
<span class="defval" name="classOf[java.lang.InterruptedException]">...</span>
</span>)</span>
</dd></dl></div>
</li></ol>
</div>
</div>
<div id="inheritedMembers">
<div class="parent" name="scala.Serializable">
<h3>Inherited from <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.Serializable" class="extype" target="_top">Serializable</a></h3>
</div><div class="parent" name="java.io.Serializable">
<h3>Inherited from <span class="extype" name="java.io.Serializable">Serializable</span></h3>
</div><div class="parent" name="scala.Product">
<h3>Inherited from <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.Product" class="extype" target="_top">Product</a></h3>
</div><div class="parent" name="scala.Equals">
<h3>Inherited from <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.Equals" class="extype" target="_top">Equals</a></h3>
</div><div class="parent" name="net.liftmodules.FoBoAJSRes.Resource">
<h3>Inherited from <a href="package$$Resource.html" class="extype" name="net.liftmodules.FoBoAJSRes.Resource">Resource</a></h3>
</div><div class="parent" name="scala.AnyRef">
<h3>Inherited from <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.AnyRef" class="extype" target="_top">AnyRef</a></h3>
</div><div class="parent" name="scala.Any">
<h3>Inherited from <a href="http://www.scala-lang.org/api/2.11.7/index.html#scala.Any" class="extype" target="_top">Any</a></h3>
</div>
</div>
<div id="groupedMembers">
<div class="group" name="Ungrouped">
<h3>Ungrouped</h3>
</div>
</div>
</div>
<div id="tooltip"></div>
<div id="footer"> </div>
</body>
</html>
|
apache-2.0
|
alessandrojean/manga-no-keiei
|
src/api/mcd/model/ImageCover.java
|
3429
|
package api.mcd.model;
import java.io.File;
import java.net.URL;
import com.google.gson.annotations.SerializedName;
public class ImageCover
{
@SerializedName("MIME")
private String mime;
@SerializedName("Normal")
private URL normal;
@SerializedName("NormalSize")
private int normalSize;
@SerializedName("NormalX")
private int normalX;
@SerializedName("NormalY")
private int normalY;
@SerializedName("Raw")
private URL raw;
@SerializedName("RawSize")
private int rawSize;
@SerializedName("RawX")
private int rawX;
@SerializedName("RawY")
private int rawY;
@SerializedName("Side")
private String side;
@SerializedName("Thumbnail")
private URL thumbnail;
@SerializedName("ThumbnailSize")
private int thumbnailSize;
@SerializedName("ThumbnailX")
private int thumbnailX;
@SerializedName("ThumbnailY")
private int thumbnailY;
@SerializedName("Volume")
private int volume;
private File normalFile;
private File thumbnailFile;
private Serie parent;
public String getMime()
{
return mime;
}
public void setMime(String mime)
{
this.mime = mime;
}
public URL getNormal()
{
return normal;
}
public void setNormal(URL normal)
{
this.normal = normal;
}
public int getNormalSize()
{
return normalSize;
}
public void setNormalSize(int normalSize)
{
this.normalSize = normalSize;
}
public int getNormalX()
{
return normalX;
}
public void setNormalX(int normalX)
{
this.normalX = normalX;
}
public int getNormalY()
{
return normalY;
}
public void setNormalY(int normalY)
{
this.normalY = normalY;
}
public URL getRaw()
{
return raw;
}
public void setRaw(URL raw)
{
this.raw = raw;
}
public int getRawSize()
{
return rawSize;
}
public void setRawSize(int rawSize)
{
this.rawSize = rawSize;
}
public int getRawX()
{
return rawX;
}
public void setRawX(int rawX)
{
this.rawX = rawX;
}
public int getRawY()
{
return rawY;
}
public void setRawY(int rawY)
{
this.rawY = rawY;
}
public String getSide()
{
return side;
}
public void setSide(String side)
{
this.side = side;
}
public URL getThumbnail()
{
return thumbnail;
}
public void setThumbnail(URL thumbnail)
{
this.thumbnail = thumbnail;
}
public int getThumbnailSize()
{
return thumbnailSize;
}
public void setThumbnailSize(int thumbnailSize)
{
this.thumbnailSize = thumbnailSize;
}
public int getThumbnailX()
{
return thumbnailX;
}
public void setThumbnailX(int thumbnailX)
{
this.thumbnailX = thumbnailX;
}
public int getThumbnailY()
{
return thumbnailY;
}
public void setThumbnailY(int thumbnailY)
{
this.thumbnailY = thumbnailY;
}
public int getVolume()
{
return volume;
}
public void setVolume(int volume)
{
this.volume = volume;
}
public File getNormalFile()
{
return normalFile;
}
public void setNormalFile(File normalFile)
{
this.normalFile = normalFile;
}
public Serie getParent()
{
return parent;
}
public void setParent(Serie parent)
{
this.parent = parent;
}
public File getThumbnailFile()
{
return thumbnailFile;
}
public void setThumbnailFile(File thumbnailFile)
{
this.thumbnailFile = thumbnailFile;
}
}
|
apache-2.0
|
jivesoftware/tasmo
|
code/com/jivesoftware/os/tasmo/tasmo-lib/src/test/java/com/jivesoftware/os/tasmo/lib/MultiViewsValuesTest.java
|
2433
|
/*
* $Revision$
* $Date$
*
* Copyright (C) 1999-$year$ Jive Software. All rights reserved.
*
* This software is the proprietary information of Jive Software. Use is subject to license terms.
*/
package com.jivesoftware.os.tasmo.lib;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.jivesoftware.os.jive.utils.id.Id;
import com.jivesoftware.os.jive.utils.id.ObjectId;
import com.jivesoftware.os.tasmo.event.api.write.EventBuilder;
import com.jivesoftware.os.tasmo.model.Views;
import org.testng.annotations.Test;
/**
*
*/
public class MultiViewsValuesTest extends BaseTest {
@Test (dataProvider = "tasmoMaterializer", invocationCount = 1, singleThreaded = true)
public void testValues(TasmoMaterializerHarness t) throws Exception {
String viewClassName1 = "Values1";
String viewFieldName1 = "userInfo1";
String viewClassName2 = "Values2";
String viewFieldName2 = "userInfo2";
String viewClassName3 = "Values3";
String viewFieldName3 = "userInfo3";
Views views = TasmoModelFactory.modelToViews(
viewClassName1 + "::" + viewFieldName1 + "::User.userName,age",
viewClassName2 + "::" + viewFieldName2 + "::User.userName,age",
viewClassName3 + "::" + viewFieldName3 + "::User.userName,age");
t.initModel(views);
ObjectId user1 = t.write(
EventBuilder.create(t.idProvider(), "User", tenantId, actorId)
.set("userName", "ted")
.build());
ObjectNode view = t.readView(tenantId, actorId, new ObjectId(viewClassName1, user1.getId()), Id.NULL);
System.out.println(mapper.writeValueAsString(view));
view = t.readView(tenantId, actorId, new ObjectId(viewClassName2, user1.getId()), Id.NULL);
System.out.println(mapper.writeValueAsString(view));
view = t.readView(tenantId, actorId, new ObjectId(viewClassName3, user1.getId()), Id.NULL);
System.out.println(mapper.writeValueAsString(view));
t.addExpectation(user1, viewClassName1, viewFieldName1, new ObjectId[]{ user1 }, "userName", "ted");
t.addExpectation(user1, viewClassName2, viewFieldName2, new ObjectId[]{ user1 }, "userName", "ted");
t.addExpectation(user1, viewClassName3, viewFieldName3, new ObjectId[]{ user1 }, "userName", "ted");
t.assertExpectation(tenantIdAndCentricId);
t.clearExpectations();
}
}
|
apache-2.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.