text
stringlengths 2
100k
| meta
dict |
---|---|
---
---
------------------------
------------------------
1 : p0(a):-[p10(a)]
1 : p0(a):-[undefined]
1 : p0(b):-[p10(b)]
1 : p0(b):-[undefined]
1 : p10(a):-[p9(a)]
1 : p10(b):-[p9(b)]
1 : p1(a):-[p0(a)]
1 : p1(b):-[p0(b)]
1 : p2(a):-[p1(a)]
1 : p2(b):-[p1(b)]
1 : p3(a):-[p2(a)]
1 : p3(b):-[p2(b)]
1 : p4(a):-[p3(a)]
1 : p4(b):-[p3(b)]
1 : p5(a):-[p4(a)]
1 : p5(b):-[p4(b)]
1 : p6(a):-[p5(a)]
1 : p6(b):-[p5(b)]
1 : p7(a):-[p6(a)]
1 : p7(b):-[p6(b)]
1 : p8(a):-[p7(a)]
1 : p8(b):-[p7(b)]
1 : p9(a):-[p8(a)]
1 : p9(b):-[p8(b)]
1 : undefined:-[tnot undefined]
2 : undefined:-[tnot undefined]
| {
"pile_set_name": "Github"
} |
/*============================================================================
The Medical Imaging Interaction Toolkit (MITK)
Copyright (c) German Cancer Research Center (DKFZ)
All rights reserved.
Use of this source code is governed by a 3-clause BSD license that can be
found in the LICENSE file.
============================================================================*/
#ifndef MITKABSTRACTFILEIO_H
#define MITKABSTRACTFILEIO_H
#include "mitkAbstractFileReader.h"
#include "mitkAbstractFileWriter.h"
namespace mitk
{
#ifndef DOXYGEN_SKIP
// Skip this code during Doxygen processing, because it only
// exists to resolve name clashes when inheriting from both
// AbstractFileReader and AbstractFileWriter.
class AbstractFileIOReader : public AbstractFileReader
{
public:
virtual ConfidenceLevel GetReaderConfidenceLevel() const { return AbstractFileReader::GetConfidenceLevel(); }
ConfidenceLevel GetConfidenceLevel() const override { return this->GetReaderConfidenceLevel(); }
protected:
AbstractFileIOReader() {}
AbstractFileIOReader(const CustomMimeType &mimeType, const std::string &description)
: AbstractFileReader(mimeType, description)
{
}
private:
virtual IFileReader *ReaderClone() const = 0;
IFileReader *Clone() const override { return ReaderClone(); }
};
struct AbstractFileIOWriter : public AbstractFileWriter
{
virtual ConfidenceLevel GetWriterConfidenceLevel() const { return AbstractFileWriter::GetConfidenceLevel(); }
ConfidenceLevel GetConfidenceLevel() const override { return this->GetWriterConfidenceLevel(); }
protected:
AbstractFileIOWriter(const std::string &baseDataType) : AbstractFileWriter(baseDataType) {}
AbstractFileIOWriter(const std::string &baseDataType,
const CustomMimeType &mimeType,
const std::string &description)
: AbstractFileWriter(baseDataType, mimeType, description)
{
}
private:
virtual IFileWriter *WriterClone() const = 0;
IFileWriter *Clone() const override { return WriterClone(); }
};
#endif // DOXYGEN_SKIP
/**
* @ingroup IO
*
* @brief Abstract class for implementing a reader and writer.
*/
class MITKCORE_EXPORT AbstractFileIO : public AbstractFileIOReader, public AbstractFileIOWriter
{
public:
Options GetReaderOptions() const;
us::Any GetReaderOption(const std::string &name) const;
void SetReaderOptions(const Options &options);
void SetReaderOption(const std::string &name, const us::Any &value);
Options GetWriterOptions() const;
us::Any GetWriterOption(const std::string &name) const;
void SetWriterOptions(const Options &options);
void SetWriterOption(const std::string &name, const us::Any &value);
ConfidenceLevel GetReaderConfidenceLevel() const override;
ConfidenceLevel GetWriterConfidenceLevel() const override;
std::pair<us::ServiceRegistration<IFileReader>, us::ServiceRegistration<IFileWriter>> RegisterService(
us::ModuleContext *context = us::GetModuleContext());
protected:
AbstractFileIO(const AbstractFileIO &other);
AbstractFileIO(const std::string &baseDataType);
/**
* Associate this reader instance with the given MIME type.
*
* If the given MIME type has nothing but its name set, the according MIME type
* is looked up in the service registry.
*
* @param mimeType The MIME type this reader can read.
* @param description A human readable description of this reader.
*
* @throws std::invalid_argument if \c mimeType is empty.
*
* @see RegisterService
*/
explicit AbstractFileIO(const std::string &baseDataType,
const CustomMimeType &mimeType,
const std::string &description);
void SetMimeType(const CustomMimeType &mimeType);
/**
* @return The mime-type this reader can handle.
*/
const CustomMimeType *GetMimeType() const;
void SetReaderDescription(const std::string &description);
std::string GetReaderDescription() const;
void SetWriterDescription(const std::string &description);
std::string GetWriterDescription() const;
void SetDefaultReaderOptions(const Options &defaultOptions);
Options GetDefaultReaderOptions() const;
void SetDefaultWriterOptions(const Options &defaultOptions);
Options GetDefaultWriterOptions() const;
/**
* \brief Set the service ranking for this file reader.
*
* Default is zero and should only be chosen differently for a reason.
* The ranking is used to determine which reader to use if several
* equivalent readers have been found.
* It may be used to replace a default reader from MITK in your own project.
* E.g. if you want to use your own reader for nrrd files instead of the default,
* implement it and give it a higher ranking than zero.
*/
void SetReaderRanking(int ranking);
int GetReaderRanking() const;
void SetWriterRanking(int ranking);
int GetWriterRanking() const;
private:
AbstractFileIO &operator=(const AbstractFileIO &other);
virtual AbstractFileIO *IOClone() const = 0;
IFileReader *ReaderClone() const override;
IFileWriter *WriterClone() const override;
};
}
#endif // MITKABSTRACTFILEIO_H
| {
"pile_set_name": "Github"
} |
// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// The LazyInstance<Type, Traits> class manages a single instance of Type,
// which will be lazily created on the first time it's accessed. This class is
// useful for places you would normally use a function-level static, but you
// need to have guaranteed thread-safety. The Type constructor will only ever
// be called once, even if two threads are racing to create the object. Get()
// and Pointer() will always return the same, completely initialized instance.
//
// LazyInstance is completely thread safe, assuming that you create it safely.
// The class was designed to be POD initialized, so it shouldn't require a
// static constructor. It really only makes sense to declare a LazyInstance as
// a global variable using the LAZY_INSTANCE_INITIALIZER initializer.
//
// LazyInstance is similar to Singleton, except it does not have the singleton
// property. You can have multiple LazyInstance's of the same type, and each
// will manage a unique instance. It also preallocates the space for Type, as
// to avoid allocating the Type instance on the heap. This may help with the
// performance of creating the instance, and reducing heap fragmentation. This
// requires that Type be a complete type so we can determine the size. See
// notes for advanced users below for more explanations.
//
// Example usage:
// static LazyInstance<MyClass>::type my_instance = LAZY_INSTANCE_INITIALIZER;
// void SomeMethod() {
// my_instance.Get().SomeMethod(); // MyClass::SomeMethod()
//
// MyClass* ptr = my_instance.Pointer();
// ptr->DoDoDo(); // MyClass::DoDoDo
// }
//
// Additionally you can override the way your instance is constructed by
// providing your own trait:
// Example usage:
// struct MyCreateTrait {
// static void Construct(MyClass* allocated_ptr) {
// new (allocated_ptr) MyClass(/* extra parameters... */);
// }
// };
// static LazyInstance<MyClass, MyCreateTrait>::type my_instance =
// LAZY_INSTANCE_INITIALIZER;
//
// WARNINGS:
// - This implementation of LazyInstance is NOT THREAD-SAFE by default. See
// ThreadSafeInitOnceTrait declared below for that.
// - Lazy initialization comes with a cost. Make sure that you don't use it on
// critical path. Consider adding your initialization code to a function
// which is explicitly called once.
//
// Notes for advanced users:
// LazyInstance can actually be used in two different ways:
//
// - "Static mode" which is the default mode since it is the most efficient
// (no extra heap allocation). In this mode, the instance is statically
// allocated (stored in the global data section at compile time).
// The macro LAZY_STATIC_INSTANCE_INITIALIZER (= LAZY_INSTANCE_INITIALIZER)
// must be used to initialize static lazy instances.
//
// - "Dynamic mode". In this mode, the instance is dynamically allocated and
// constructed (using new) by default. This mode is useful if you have to
// deal with some code already allocating the instance for you (e.g.
// OS::Mutex() which returns a new private OS-dependent subclass of Mutex).
// The macro LAZY_DYNAMIC_INSTANCE_INITIALIZER must be used to initialize
// dynamic lazy instances.
#ifndef V8_LAZY_INSTANCE_H_
#define V8_LAZY_INSTANCE_H_
#include "once.h"
namespace v8 {
namespace internal {
#define LAZY_STATIC_INSTANCE_INITIALIZER { V8_ONCE_INIT, {} }
#define LAZY_DYNAMIC_INSTANCE_INITIALIZER { V8_ONCE_INIT, 0 }
// Default to static mode.
#define LAZY_INSTANCE_INITIALIZER LAZY_STATIC_INSTANCE_INITIALIZER
template <typename T>
struct LeakyInstanceTrait {
static void Destroy(T* /* instance */) {}
};
// Traits that define how an instance is allocated and accessed.
// TODO(kalmard): __alignof__ is only defined for GCC > 4.2. Fix alignment issue
// on MIPS with other compilers.
#if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ > 2))
#define LAZY_ALIGN(x) __attribute__((aligned(__alignof__(x))))
#else
#define LAZY_ALIGN(x)
#endif
template <typename T>
struct StaticallyAllocatedInstanceTrait {
typedef char StorageType[sizeof(T)] LAZY_ALIGN(T);
static T* MutableInstance(StorageType* storage) {
return reinterpret_cast<T*>(storage);
}
template <typename ConstructTrait>
static void InitStorageUsingTrait(StorageType* storage) {
ConstructTrait::Construct(MutableInstance(storage));
}
};
#undef LAZY_ALIGN
template <typename T>
struct DynamicallyAllocatedInstanceTrait {
typedef T* StorageType;
static T* MutableInstance(StorageType* storage) {
return *storage;
}
template <typename CreateTrait>
static void InitStorageUsingTrait(StorageType* storage) {
*storage = CreateTrait::Create();
}
};
template <typename T>
struct DefaultConstructTrait {
// Constructs the provided object which was already allocated.
static void Construct(T* allocated_ptr) {
new(allocated_ptr) T();
}
};
template <typename T>
struct DefaultCreateTrait {
static T* Create() {
return new T();
}
};
struct ThreadSafeInitOnceTrait {
template <typename Function, typename Storage>
static void Init(OnceType* once, Function function, Storage storage) {
CallOnce(once, function, storage);
}
};
// Initialization trait for users who don't care about thread-safety.
struct SingleThreadInitOnceTrait {
template <typename Function, typename Storage>
static void Init(OnceType* once, Function function, Storage storage) {
if (*once == ONCE_STATE_UNINITIALIZED) {
function(storage);
*once = ONCE_STATE_DONE;
}
}
};
// TODO(pliard): Handle instances destruction (using global destructors).
template <typename T, typename AllocationTrait, typename CreateTrait,
typename InitOnceTrait, typename DestroyTrait /* not used yet. */>
struct LazyInstanceImpl {
public:
typedef typename AllocationTrait::StorageType StorageType;
private:
static void InitInstance(StorageType* storage) {
AllocationTrait::template InitStorageUsingTrait<CreateTrait>(storage);
}
void Init() const {
InitOnceTrait::Init(
&once_,
// Casts to void* are needed here to avoid breaking strict aliasing
// rules.
reinterpret_cast<void(*)(void*)>(&InitInstance), // NOLINT
reinterpret_cast<void*>(&storage_));
}
public:
T* Pointer() {
Init();
return AllocationTrait::MutableInstance(&storage_);
}
const T& Get() const {
Init();
return *AllocationTrait::MutableInstance(&storage_);
}
mutable OnceType once_;
// Note that the previous field, OnceType, is an AtomicWord which guarantees
// 4-byte alignment of the storage field below. If compiling with GCC (>4.2),
// the LAZY_ALIGN macro above will guarantee correctness for any alignment.
mutable StorageType storage_;
};
template <typename T,
typename CreateTrait = DefaultConstructTrait<T>,
typename InitOnceTrait = SingleThreadInitOnceTrait,
typename DestroyTrait = LeakyInstanceTrait<T> >
struct LazyStaticInstance {
typedef LazyInstanceImpl<T, StaticallyAllocatedInstanceTrait<T>,
CreateTrait, InitOnceTrait, DestroyTrait> type;
};
template <typename T,
typename CreateTrait = DefaultConstructTrait<T>,
typename InitOnceTrait = SingleThreadInitOnceTrait,
typename DestroyTrait = LeakyInstanceTrait<T> >
struct LazyInstance {
// A LazyInstance is a LazyStaticInstance.
typedef typename LazyStaticInstance<T, CreateTrait, InitOnceTrait,
DestroyTrait>::type type;
};
template <typename T,
typename CreateTrait = DefaultCreateTrait<T>,
typename InitOnceTrait = SingleThreadInitOnceTrait,
typename DestroyTrait = LeakyInstanceTrait<T> >
struct LazyDynamicInstance {
typedef LazyInstanceImpl<T, DynamicallyAllocatedInstanceTrait<T>,
CreateTrait, InitOnceTrait, DestroyTrait> type;
};
} } // namespace v8::internal
#endif // V8_LAZY_INSTANCE_H_
| {
"pile_set_name": "Github"
} |
//
// MenuItemCollection.cs
//
// Author:
// Lluis Sanchez <[email protected]>
//
// Copyright (c) 2011 Xamarin Inc
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
using System;
using System.Collections.ObjectModel;
namespace Xwt
{
public class MenuItemCollection: Collection<MenuItem>
{
Menu parent;
internal MenuItemCollection (Menu parent)
{
this.parent = parent;
}
protected override void InsertItem (int index, MenuItem item)
{
base.InsertItem (index, item);
parent.InsertItem (index, item);
}
protected override void RemoveItem (int index)
{
var item = this[index];
base.RemoveItem (index);
parent.RemoveItem (item);
}
protected override void SetItem (int index, MenuItem item)
{
var oldItem = this[index];
base.SetItem (index, item);
parent.RemoveItem (oldItem);
parent.InsertItem (index, item);
}
protected override void ClearItems ()
{
foreach (var item in this)
parent.RemoveItem (item);
base.ClearItems ();
}
}
}
| {
"pile_set_name": "Github"
} |
/* -*- mode: c++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
/*
Copyright (C) 2009, 2012 StatPro Italia srl
This file is part of QuantLib, a free-software/open-source library
for financial quantitative analysts and developers - http://quantlib.org/
QuantLib is free software: you can redistribute it and/or modify it
under the terms of the QuantLib license. You should have received a
copy of the license along with this program; if not, please email
<[email protected]>. The license is also available online at
<http://quantlib.org/license.shtml>.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the license for more details.
*/
#include "cashflows.hpp"
#include "utilities.hpp"
#include <ql/cashflows/cashflows.hpp>
#include <ql/cashflows/simplecashflow.hpp>
#include <ql/cashflows/fixedratecoupon.hpp>
#include <ql/cashflows/floatingratecoupon.hpp>
#include <ql/cashflows/iborcoupon.hpp>
#include <ql/cashflows/couponpricer.hpp>
#include <ql/termstructures/volatility/optionlet/constantoptionletvol.hpp>
#include <ql/quotes/simplequote.hpp>
#include <ql/time/calendars/target.hpp>
#include <ql/time/daycounters/actualactual.hpp>
#include <ql/time/schedule.hpp>
#include <ql/indexes/ibor/euribor.hpp>
#include <ql/indexes/ibor/usdlibor.hpp>
#include <ql/settings.hpp>
using namespace QuantLib;
using namespace boost::unit_test_framework;
using boost::none;
void CashFlowsTest::testSettings() {
BOOST_TEST_MESSAGE("Testing cash-flow settings...");
SavedSettings backup;
Date today = Date::todaysDate();
Settings::instance().evaluationDate() = today;
// cash flows at T+0, T+1, T+2
std::vector<ext::shared_ptr<CashFlow> > leg;
leg.reserve(3);
for (Integer i = 0; i < 3; ++i)
leg.push_back(ext::shared_ptr<CashFlow>(new SimpleCashFlow(1.0, today+i)));
#define CHECK_INCLUSION(n, days, expected) \
if ((!leg[n]->hasOccurred(today+days)) != expected) { \
BOOST_ERROR("cashflow at T+" << n << " " \
<< (expected ? "not" : "") << "included" \
<< " at T+" << days); \
}
// case 1: don't include reference-date payments, no override at
// today's date
Settings::instance().includeReferenceDateEvents() = false;
Settings::instance().includeTodaysCashFlows() = none;
CHECK_INCLUSION(0, 0, false);
CHECK_INCLUSION(0, 1, false);
CHECK_INCLUSION(1, 0, true);
CHECK_INCLUSION(1, 1, false);
CHECK_INCLUSION(1, 2, false);
CHECK_INCLUSION(2, 1, true);
CHECK_INCLUSION(2, 2, false);
CHECK_INCLUSION(2, 3, false);
// case 2: same, but with explicit setting at today's date
Settings::instance().includeReferenceDateEvents() = false;
Settings::instance().includeTodaysCashFlows() = false;
CHECK_INCLUSION(0, 0, false);
CHECK_INCLUSION(0, 1, false);
CHECK_INCLUSION(1, 0, true);
CHECK_INCLUSION(1, 1, false);
CHECK_INCLUSION(1, 2, false);
CHECK_INCLUSION(2, 1, true);
CHECK_INCLUSION(2, 2, false);
CHECK_INCLUSION(2, 3, false);
// case 3: do include reference-date payments, no override at
// today's date
Settings::instance().includeReferenceDateEvents() = true;
Settings::instance().includeTodaysCashFlows() = none;
CHECK_INCLUSION(0, 0, true);
CHECK_INCLUSION(0, 1, false);
CHECK_INCLUSION(1, 0, true);
CHECK_INCLUSION(1, 1, true);
CHECK_INCLUSION(1, 2, false);
CHECK_INCLUSION(2, 1, true);
CHECK_INCLUSION(2, 2, true);
CHECK_INCLUSION(2, 3, false);
// case 4: do include reference-date payments, explicit (and same)
// setting at today's date
Settings::instance().includeReferenceDateEvents() = true;
Settings::instance().includeTodaysCashFlows() = true;
CHECK_INCLUSION(0, 0, true);
CHECK_INCLUSION(0, 1, false);
CHECK_INCLUSION(1, 0, true);
CHECK_INCLUSION(1, 1, true);
CHECK_INCLUSION(1, 2, false);
CHECK_INCLUSION(2, 1, true);
CHECK_INCLUSION(2, 2, true);
CHECK_INCLUSION(2, 3, false);
// case 5: do include reference-date payments, override at
// today's date
Settings::instance().includeReferenceDateEvents() = true;
Settings::instance().includeTodaysCashFlows() = false;
CHECK_INCLUSION(0, 0, false);
CHECK_INCLUSION(0, 1, false);
CHECK_INCLUSION(1, 0, true);
CHECK_INCLUSION(1, 1, true);
CHECK_INCLUSION(1, 2, false);
CHECK_INCLUSION(2, 1, true);
CHECK_INCLUSION(2, 2, true);
CHECK_INCLUSION(2, 3, false);
// no discount to make calculations easier
InterestRate no_discount(0.0, Actual365Fixed(), Continuous, Annual);
#define CHECK_NPV(includeRef, expected) \
do { \
Real NPV = CashFlows::npv(leg, no_discount, includeRef, today); \
if (std::fabs(NPV - expected) > 1e-6) { \
BOOST_ERROR("NPV mismatch:\n" \
<< " calculated: " << NPV << "\n" \
<< " expected: " << expected); \
} \
} while (false);
// no override
Settings::instance().includeTodaysCashFlows() = none;
CHECK_NPV(false, 2.0);
CHECK_NPV(true, 3.0);
// override
Settings::instance().includeTodaysCashFlows() = false;
CHECK_NPV(false, 2.0);
CHECK_NPV(true, 2.0);
}
void CashFlowsTest::testAccessViolation() {
BOOST_TEST_MESSAGE("Testing dynamic cast of coupon in Black pricer...");
SavedSettings backup;
Date todaysDate(7, April, 2010);
Date settlementDate(9, April, 2010);
Settings::instance().evaluationDate() = todaysDate;
Calendar calendar = TARGET();
Handle<YieldTermStructure> rhTermStructure(
flatRate(settlementDate, 0.04875825, Actual365Fixed()));
Volatility volatility = 0.10;
Handle<OptionletVolatilityStructure> vol;
vol = Handle<OptionletVolatilityStructure>(
ext::shared_ptr<OptionletVolatilityStructure>(
new ConstantOptionletVolatility(
2,
calendar,
ModifiedFollowing,
volatility,
Actual365Fixed())));
ext::shared_ptr<IborIndex> index3m (new USDLibor(3*Months,
rhTermStructure));
Date payDate(20, December, 2013);
Date startDate(20, September, 2013);
Date endDate(20, December, 2013);
Rate spread = 0.0115;
ext::shared_ptr<IborCouponPricer> pricer(new BlackIborCouponPricer(vol));
ext::shared_ptr<FloatingRateCoupon> coupon(
new FloatingRateCoupon(payDate,100, startDate, endDate, 2,
index3m, 1.0 , spread / 100));
coupon->setPricer(pricer);
try {
// this caused an access violation in version 1.0
coupon->amount();
} catch (Error&) {
// ok; proper exception thrown
}
}
void CashFlowsTest::testDefaultSettlementDate() {
BOOST_TEST_MESSAGE("Testing default evaluation date in cashflows methods...");
Date today = Settings::instance().evaluationDate();
Schedule schedule =
MakeSchedule()
.from(today-2*Months).to(today+4*Months)
.withFrequency(Semiannual)
.withCalendar(TARGET())
.withConvention(Unadjusted)
.backwards();
Leg leg = FixedRateLeg(schedule)
.withNotionals(100.0)
.withCouponRates(0.03, Actual360())
.withPaymentCalendar(TARGET())
.withPaymentAdjustment(Following);
Time accruedPeriod = CashFlows::accruedPeriod(leg, false);
if (accruedPeriod == 0.0)
BOOST_ERROR("null accrued period with default settlement date");
Date::serial_type accruedDays = CashFlows::accruedDays(leg, false);
if (accruedDays == 0)
BOOST_ERROR("no accrued days with default settlement date");
Real accruedAmount = CashFlows::accruedAmount(leg, false);
if (accruedAmount == 0.0)
BOOST_ERROR("null accrued amount with default settlement date");
}
void CashFlowsTest::testNullFixingDays() {
BOOST_TEST_MESSAGE("Testing ibor leg construction with null fixing days...");
Date today = Settings::instance().evaluationDate();
Schedule schedule =
MakeSchedule()
.from(today-2*Months).to(today+4*Months)
.withFrequency(Semiannual)
.withCalendar(TARGET())
.withConvention(Following)
.backwards();
ext::shared_ptr<IborIndex> index(new USDLibor(6*Months));
Leg leg = IborLeg(schedule, index)
.withNotionals(100.0)
// this can happen with default values, and caused an
// exception when the null was not managed properly
.withFixingDays(Null<Natural>());
}
void CashFlowsTest::testExCouponDates() {
BOOST_TEST_MESSAGE("Testing ex-coupon date calculation...");
Date today = Date::todaysDate();
Schedule schedule = MakeSchedule()
.from(today)
.to(today + 5 * Years)
.withFrequency(Monthly)
.withCalendar(TARGET())
.withConvention(Following);
// no ex-coupon dates
Leg l1 = FixedRateLeg(schedule).withNotionals(100.0).withCouponRates(0.03, Actual360());
for (Size i = 0; i < l1.size(); ++i) {
ext::shared_ptr<Coupon> c = ext::dynamic_pointer_cast<Coupon>(l1[i]);
if (c->exCouponDate() != Date()) {
BOOST_ERROR("ex-coupon date found (none expected)");
}
}
// same for floating legs
ext::shared_ptr<IborIndex> index(new Euribor3M);
Leg l2 = IborLeg(schedule, index).withNotionals(100.0);
for (Size i = 0; i < l2.size(); ++i) {
ext::shared_ptr<Coupon> c = ext::dynamic_pointer_cast<Coupon>(l2[i]);
if (c->exCouponDate() != Date()) {
BOOST_ERROR("ex-coupon date found (none expected)");
}
}
// calendar days
Leg l5 = FixedRateLeg(schedule)
.withNotionals(100.0)
.withCouponRates(0.03, Actual360())
.withExCouponPeriod(Period(2, Days), NullCalendar(), Unadjusted, false);
for (Size i = 0; i < l5.size(); ++i) {
ext::shared_ptr<Coupon> c = ext::dynamic_pointer_cast<Coupon>(l5[i]);
Date expected = c->accrualEndDate() - 2;
if (c->exCouponDate() != expected) {
BOOST_ERROR("ex-coupon date = " << c->exCouponDate() << " (" << expected
<< " expected)");
}
}
Leg l6 = IborLeg(schedule, index)
.withNotionals(100.0)
.withExCouponPeriod(Period(2, Days), NullCalendar(), Unadjusted, false);
for (Size i = 0; i < l6.size(); ++i) {
ext::shared_ptr<Coupon> c = ext::dynamic_pointer_cast<Coupon>(l6[i]);
Date expected = c->accrualEndDate() - 2;
if (c->exCouponDate() != expected) {
BOOST_ERROR("ex-coupon date = " << c->exCouponDate() << " (" << expected
<< " expected)");
}
}
// business days
Leg l7 = FixedRateLeg(schedule)
.withNotionals(100.0)
.withCouponRates(0.03, Actual360())
.withExCouponPeriod(Period(2, Days), TARGET(), Preceding, false);
for (Size i = 0; i < l7.size(); ++i) {
ext::shared_ptr<Coupon> c = ext::dynamic_pointer_cast<Coupon>(l7[i]);
Date expected = TARGET().advance(c->accrualEndDate(), -2, Days);
if (c->exCouponDate() != expected) {
BOOST_ERROR("ex-coupon date = " << c->exCouponDate() << " (" << expected
<< " expected)");
}
}
Leg l8 = IborLeg(schedule, index)
.withNotionals(100.0)
.withExCouponPeriod(Period(2, Days), TARGET(), Preceding, false);
for (Size i = 0; i < l8.size(); ++i) {
ext::shared_ptr<Coupon> c = ext::dynamic_pointer_cast<Coupon>(l8[i]);
Date expected = TARGET().advance(c->accrualEndDate(), -2, Days);
if (c->exCouponDate() != expected) {
BOOST_ERROR("ex-coupon date = " << c->exCouponDate() << " (" << expected
<< " expected)");
}
}
}
void CashFlowsTest::testIrregularFirstCouponReferenceDatesAtEndOfMonth() {
BOOST_TEST_MESSAGE("Testing irregular first coupon reference dates with end of month enabled...");
Schedule schedule =
MakeSchedule()
.from(Date(17, January, 2017)).to(Date(28, February, 2018))
.withFrequency(Semiannual)
.withConvention(Unadjusted)
.endOfMonth()
.backwards();
Leg leg = FixedRateLeg(schedule)
.withNotionals(100.0)
.withCouponRates(0.01, Actual360());
ext::shared_ptr<Coupon> firstCoupon =
ext::dynamic_pointer_cast<Coupon>(leg.front());
if (firstCoupon->referencePeriodStart() != Date(31, August, 2016))
BOOST_ERROR("Expected reference start date at end of month, "
"got " << firstCoupon->referencePeriodStart());
}
void CashFlowsTest::testIrregularLastCouponReferenceDatesAtEndOfMonth() {
BOOST_TEST_MESSAGE("Testing irregular last coupon reference dates with end of month enabled...");
Schedule schedule =
MakeSchedule()
.from(Date(17, January, 2017)).to(Date(15, September, 2018))
.withNextToLastDate(Date(28, February, 2018))
.withFrequency(Semiannual)
.withConvention(Unadjusted)
.endOfMonth()
.backwards();
Leg leg = FixedRateLeg(schedule)
.withNotionals(100.0)
.withCouponRates(0.01, Actual360());
ext::shared_ptr<Coupon> lastCoupon =
ext::dynamic_pointer_cast<Coupon>(leg.back());
if (lastCoupon->referencePeriodEnd() != Date(31, August, 2018))
BOOST_ERROR("Expected reference end date at end of month, "
"got " << lastCoupon->referencePeriodEnd());
}
void CashFlowsTest::testPartialScheduleLegConstruction() {
BOOST_TEST_MESSAGE("Testing leg construction with partial schedule...");
// schedule with irregular first and last period
Schedule schedule = MakeSchedule()
.from(Date(15, September, 2017))
.to(Date(30, September, 2020))
.withNextToLastDate(Date(25, September, 2020))
.withFrequency(Semiannual)
.backwards();
// same schedule, date based, with metadata
Schedule schedule2(schedule.dates(), NullCalendar(), Unadjusted, Unadjusted,
6 * Months, boost::none, schedule.endOfMonth(),
schedule.isRegular());
// same schedule, date based, without metadata
Schedule schedule3(schedule.dates());
// fixed rate legs based on the three schedule
Leg leg = FixedRateLeg(schedule).withNotionals(100.0).withCouponRates(
0.01, ActualActual(ActualActual::ISMA));
Leg leg2 = FixedRateLeg(schedule2).withNotionals(100.0).withCouponRates(
0.01, ActualActual(ActualActual::ISMA));
Leg leg3 = FixedRateLeg(schedule3).withNotionals(100.0).withCouponRates(
0.01, ActualActual(ActualActual::ISMA));
// check reference period of first and last coupon in all variants
// for the first two we expect a 6M reference period, for the
// third it can not be constructed, so should be equal to the
// respective schedule period
ext::shared_ptr<FixedRateCoupon> firstCpn =
ext::dynamic_pointer_cast<FixedRateCoupon>(leg.front());
ext::shared_ptr<FixedRateCoupon> lastCpn =
ext::dynamic_pointer_cast<FixedRateCoupon>(leg.back());
BOOST_REQUIRE(firstCpn != NULL);
BOOST_REQUIRE(lastCpn != NULL);
BOOST_CHECK_EQUAL(firstCpn->referencePeriodStart(), Date(25, Mar, 2017));
BOOST_CHECK_EQUAL(firstCpn->referencePeriodEnd(), Date(25, Sep, 2017));
BOOST_CHECK_EQUAL(lastCpn->referencePeriodStart(), Date(25, Sep, 2020));
BOOST_CHECK_EQUAL(lastCpn->referencePeriodEnd(), Date(25, Mar, 2021));
ext::shared_ptr<FixedRateCoupon> firstCpn2 =
ext::dynamic_pointer_cast<FixedRateCoupon>(leg2.front());
ext::shared_ptr<FixedRateCoupon> lastCpn2 =
ext::dynamic_pointer_cast<FixedRateCoupon>(leg2.back());
BOOST_REQUIRE(firstCpn2 != NULL);
BOOST_REQUIRE(lastCpn2 != NULL);
BOOST_CHECK_EQUAL(firstCpn2->referencePeriodStart(), Date(25, Mar, 2017));
BOOST_CHECK_EQUAL(firstCpn2->referencePeriodEnd(), Date(25, Sep, 2017));
BOOST_CHECK_EQUAL(lastCpn2->referencePeriodStart(), Date(25, Sep, 2020));
BOOST_CHECK_EQUAL(lastCpn2->referencePeriodEnd(), Date(25, Mar, 2021));
ext::shared_ptr<FixedRateCoupon> firstCpn3 =
ext::dynamic_pointer_cast<FixedRateCoupon>(leg3.front());
ext::shared_ptr<FixedRateCoupon> lastCpn3 =
ext::dynamic_pointer_cast<FixedRateCoupon>(leg3.back());
BOOST_REQUIRE(firstCpn3 != NULL);
BOOST_REQUIRE(lastCpn3 != NULL);
BOOST_CHECK_EQUAL(firstCpn3->referencePeriodStart(), Date(15, Sep, 2017));
BOOST_CHECK_EQUAL(firstCpn3->referencePeriodEnd(), Date(25, Sep, 2017));
BOOST_CHECK_EQUAL(lastCpn3->referencePeriodStart(), Date(25, Sep, 2020));
BOOST_CHECK_EQUAL(lastCpn3->referencePeriodEnd(), Date(30, Sep, 2020));
// same check as above for a floating leg
ext::shared_ptr<IborIndex> iborIndex =
ext::make_shared<USDLibor>(3 * Months);
Leg legf = IborLeg(schedule, iborIndex)
.withNotionals(100.0)
.withPaymentDayCounter(ActualActual(ActualActual::ISMA));
Leg legf2 = IborLeg(schedule2, iborIndex)
.withNotionals(100.0)
.withPaymentDayCounter(ActualActual(ActualActual::ISMA));
Leg legf3 = IborLeg(schedule3, iborIndex)
.withNotionals(100.0)
.withPaymentDayCounter(ActualActual(ActualActual::ISMA));
ext::shared_ptr<FloatingRateCoupon> firstCpnF =
ext::dynamic_pointer_cast<FloatingRateCoupon>(legf.front());
ext::shared_ptr<FloatingRateCoupon> lastCpnF =
ext::dynamic_pointer_cast<FloatingRateCoupon>(legf.back());
BOOST_REQUIRE(firstCpnF != NULL);
BOOST_REQUIRE(lastCpnF != NULL);
BOOST_CHECK_EQUAL(firstCpnF->referencePeriodStart(), Date(25, Mar, 2017));
BOOST_CHECK_EQUAL(firstCpnF->referencePeriodEnd(), Date(25, Sep, 2017));
BOOST_CHECK_EQUAL(lastCpnF->referencePeriodStart(), Date(25, Sep, 2020));
BOOST_CHECK_EQUAL(lastCpnF->referencePeriodEnd(), Date(25, Mar, 2021));
ext::shared_ptr<FloatingRateCoupon> firstCpnF2 =
ext::dynamic_pointer_cast<FloatingRateCoupon>(legf2.front());
ext::shared_ptr<FloatingRateCoupon> lastCpnF2 =
ext::dynamic_pointer_cast<FloatingRateCoupon>(legf2.back());
BOOST_REQUIRE(firstCpnF2 != NULL);
BOOST_REQUIRE(lastCpnF2 != NULL);
BOOST_CHECK_EQUAL(firstCpnF2->referencePeriodStart(), Date(25, Mar, 2017));
BOOST_CHECK_EQUAL(firstCpnF2->referencePeriodEnd(), Date(25, Sep, 2017));
BOOST_CHECK_EQUAL(lastCpnF2->referencePeriodStart(), Date(25, Sep, 2020));
BOOST_CHECK_EQUAL(lastCpnF2->referencePeriodEnd(), Date(25, Mar, 2021));
ext::shared_ptr<FloatingRateCoupon> firstCpnF3 =
ext::dynamic_pointer_cast<FloatingRateCoupon>(legf3.front());
ext::shared_ptr<FloatingRateCoupon> lastCpnF3 =
ext::dynamic_pointer_cast<FloatingRateCoupon>(legf3.back());
BOOST_REQUIRE(firstCpnF3 != NULL);
BOOST_REQUIRE(lastCpnF3 != NULL);
BOOST_CHECK_EQUAL(firstCpnF3->referencePeriodStart(), Date(15, Sep, 2017));
BOOST_CHECK_EQUAL(firstCpnF3->referencePeriodEnd(), Date(25, Sep, 2017));
BOOST_CHECK_EQUAL(lastCpnF3->referencePeriodStart(), Date(25, Sep, 2020));
BOOST_CHECK_EQUAL(lastCpnF3->referencePeriodEnd(), Date(30, Sep, 2020));
}
test_suite* CashFlowsTest::suite() {
test_suite* suite = BOOST_TEST_SUITE("Cash flows tests");
suite->add(QUANTLIB_TEST_CASE(&CashFlowsTest::testSettings));
suite->add(QUANTLIB_TEST_CASE(&CashFlowsTest::testAccessViolation));
suite->add(QUANTLIB_TEST_CASE(&CashFlowsTest::testDefaultSettlementDate));
suite->add(QUANTLIB_TEST_CASE(&CashFlowsTest::testExCouponDates));
if (IborCoupon::usingAtParCoupons())
suite->add(QUANTLIB_TEST_CASE(&CashFlowsTest::testNullFixingDays));
suite->add(QUANTLIB_TEST_CASE(
&CashFlowsTest::testIrregularFirstCouponReferenceDatesAtEndOfMonth));
suite->add(QUANTLIB_TEST_CASE(
&CashFlowsTest::testIrregularLastCouponReferenceDatesAtEndOfMonth));
suite->add(QUANTLIB_TEST_CASE(
&CashFlowsTest::testPartialScheduleLegConstruction));
return suite;
}
| {
"pile_set_name": "Github"
} |
<?php
/**
* Access English lexicon topic
*
* @language en
* @package modx
* @subpackage lexicon
*/
$_lang['access_category_management_msg'] = 'Manage User Group member access to Elements via Categories and optionally apply access policies.';
$_lang['access_category_err_ae'] = 'An ACL for that Category already exists!';
$_lang['access_category_err_nf'] = 'Category ACL not found.';
$_lang['access_category_err_ns'] = 'Category ACL not specified.';
$_lang['access_category_err_remove'] = 'An error occurred while trying to remove the Category ACL.';
$_lang['access_category_remove'] = 'Remove Category Access';
$_lang['access_category_update'] = 'Update Category Access';
$_lang['access_confirm_remove'] = 'Are you sure you want to remove this security access control record?';
$_lang['access_context_management_msg'] = 'Manage User Group member access to Contexts and optionally apply access policies.';
$_lang['access_context_err_ae'] = 'An ACL for that Context already exists!';
$_lang['access_context_err_nf'] = 'Context ACL not found.';
$_lang['access_context_err_ns'] = 'Context ACL not specified.';
$_lang['access_context_err_remove'] = 'An error occurred while trying to remove the Context ACL.';
$_lang['access_context_remove'] = 'Remove Context Access';
$_lang['access_context_update'] = 'Update Context Access';
$_lang['access_err_ae'] = 'ACL already exists!';
$_lang['access_err_create_md'] = 'Cannot create ACL; missing data!';
$_lang['access_err_nf'] = 'Could not find specified ACL!';
$_lang['access_err_remove'] = 'Error removing ACL!';
$_lang['access_err_save'] = 'Error saving ACL!';
$_lang['access_grid_empty'] = 'No ACLs to display';
$_lang['access_grid_paginate'] = 'Displaying ACLs {0} - {1} of {2}';
$_lang['access_permissions'] = 'Access Permissions';
$_lang['access_permissions_add_document_group'] = 'Create a new document group';
$_lang['access_permissions_add_user_group'] = 'Create a new user group';
$_lang['access_permissions_documents_in_group'] = '<strong>Documents in group:</strong> ';
$_lang['access_permissions_documents_tab'] = 'Here you can see which document groups have been set up. You can also create new groups, rename groups, delete groups and see which documents are in the different groups (hover over the id of the document to see its name). To add a document to a group or remove a document from a group, edit the document directly.';
$_lang['access_permissions_document_groups'] = 'Document groups';
$_lang['access_permissions_introtext'] = 'Here you can manage the user groups and document groups used for access permissions. To add user groups or users to user groups, use the left tree and right click. To link User Groups to Document Groups, simply drag user groups from the left tree into document groups in the right tree.';
$_lang['access_permissions_links'] = 'User/Document group links';
$_lang['access_permissions_links_tab'] = 'This is where we specify which user groups are given access (i.e. can edit or create children) for the document groups. To link a document group to a user group, select the group from the dropdown, and click on \'Link\'. To remove the link for a certain group, press \'Remove ->\'. This will immediately remove the link.';
$_lang['access_permissions_no_documents_in_group'] = 'None.';
$_lang['access_permissions_no_users_in_group'] = 'None.';
$_lang['access_permissions_off'] = '<span class="warning">Access Permissions are not activated.</span> This means any changes made here will not have any effect until Access Permissions are activated.';
$_lang['access_permissions_users_in_group'] = '<strong>Users in group:</strong> ';
$_lang['access_permissions_users_tab'] = 'Here you can see which user groups have been set up. You can also create new groups, rename groups, delete groups and see which users are members of the different groups. To add a new user to a group or to remove a user from a group, edit the user directly.';
$_lang['access_permissions_user_group'] = 'User group:';
$_lang['access_permissions_user_groups'] = 'User groups';
$_lang['access_permissions_user_group_access'] = 'Resource Groups this User Group has access to:';
$_lang['access_permissions_user_message'] = 'Here you can select which User Groups this user belongs to. You can drag and drop each row to reorder the rank of each User Group. The User Group that has a rank of 0 will be declared the User\'s Primary Group, and will be the User Group that determines the User\'s Dashboard.';
$_lang['access_permission_denied'] = 'You do not have the correct permissions for this document.';
$_lang['access_permission_parent_denied'] = 'You do not have permission to create or move a document here! Please choose another location.';
$_lang['access_policy_err_nf'] = 'Access Policy not found.';
$_lang['access_policy_err_ns'] = 'Access Policy not specified.';
$_lang['access_policy_grid_empty'] = 'No policies to display';
$_lang['access_policy_grid_paginate'] = 'Displaying policies {0} - {1} of {2}';
$_lang['access_resourcegroup_management_msg'] = 'Manage User Group member access to Resource Groups and optionally apply access policies.';
$_lang['access_rgroup_err_ae'] = 'An ACL for that Resource Group already exists!';
$_lang['access_rgroup_err_nf'] = 'Resource Group ACL not found.';
$_lang['access_rgroup_err_ns'] = 'Resource Group ACL not specified.';
$_lang['access_rgroup_err_remove'] = 'An error occurred while trying to remove the Resource Group ACL.';
$_lang['access_rgroup_remove'] = 'Remove Resource Group Access';
$_lang['access_rgroup_update'] = 'Update Resource Group Access';
$_lang['access_source_err_ae'] = 'An ACL for that Media Source already exists.';
$_lang['access_source_remove'] = 'Remove Media Source Access';
$_lang['access_source_update'] = 'Update Media Source Access';
$_lang['access_to_contexts'] = 'Access to Contexts';
$_lang['access_to_resource_groups'] = 'Access to Resource Groups';
$_lang['access_type_err_ns'] = 'Target type or id not specified!';
$_lang['acl_add'] = 'Add Access Control';
$_lang['authority'] = 'Authority';
$_lang['authority_err_ns'] = 'Please specify a Minimum Role.';
$_lang['category'] = 'Category';
$_lang['category_add'] = 'Add Category';
$_lang['filter_by_context'] = 'Filter by Context...';
$_lang['filter_by_policy'] = 'Filter by Policy...';
$_lang['filter_by_resource_group'] = 'Filter by Resource Group...';
$_lang['filter_by_category'] = 'Filter by Category...';
$_lang['filter_by_source'] = 'Filter by Media Source...';
$_lang['no_policy_option'] = ' (no policy) ';
$_lang['permissions_in_policy'] = 'Permissions in Selected Policy';
$_lang['resource_group'] = 'Resource Group';
$_lang['resource_group_add'] = 'Add Resource Group';
$_lang['resource_group_access_remove'] = 'Remove Resource from Group';
$_lang['resource_group_access_remove_confirm'] = 'Are you sure you want to remove this resource from the resource group?';
$_lang['resource_group_create'] = 'Create Resource Group';
$_lang['resource_group_err_ae'] = 'A resource group with that name already exists.';
$_lang['resource_group_err_create'] = 'An error occurred while trying to create the resource group.';
$_lang['resource_group_err_nf'] = 'Resource group not found.';
$_lang['resource_group_err_ns'] = 'Resource group not specified.';
$_lang['resource_group_err_remove'] = 'An error occurred while trying to remove the resource group.';
$_lang['resource_group_remove'] = 'Delete Resource Group';
$_lang['resource_group_remove_confirm'] = 'Are you sure you want to remove this resource group?';
$_lang['resource_group_resource_err_ae'] = 'The resource is already a part of that resource group.';
$_lang['resource_group_resource_err_create'] = 'An error occurred while trying to put the resource in the resource group.';
$_lang['resource_group_resource_err_nf'] = 'The resource is not a part of that resource group.';
$_lang['resource_group_resource_err_remove'] = 'An error occurred while trying to remove the resource from the resource group.';
$_lang['resource_group_untitled'] = 'Untitled Resource Group';
$_lang['resource_group_update'] = 'Update Resource Group';
$_lang['resource_group_access_contexts'] = 'A comma-separated list of Contexts that the Resource Group should pertain to for the following options.';
$_lang['resource_group_automatic_access'] = 'Access Wizard';
$_lang['resource_group_automatic_access_desc'] = 'Note: You may have to flush sessions after adding the Resource Group if you check any of the options below.';
$_lang['resource_group_access_admin'] = 'Automatically Give Adminstrator Group Access';
$_lang['resource_group_access_admin_desc'] = 'If checked, will give the Administrator Group view and editing access to this Resource Group for the above contexts.';
$_lang['resource_group_access_anon'] = 'Automatically Give Anonymous Access';
$_lang['resource_group_access_anon_desc'] = 'If checked, will give non-logged-in users view access to this Resource Group for the above contexts.';
$_lang['resource_group_access_parallel'] = 'Create Parallel User Group';
$_lang['resource_group_access_parallel_desc'] = 'If checked, will automatically create a User Group with the same name, and give it view access to this Resource Group for the above contexts.';
$_lang['resource_group_access_ugs'] = 'Automatically Give Other User Groups Access';
$_lang['resource_group_access_ugs_desc'] = 'A comma-separated list of User Group names. If non-blank, will give the User Groups specified here view access to this Resource Group for the above contexts.';
$_lang['roles_msg'] = 'A role is, by definition, a position or status one holds within a certain situation. They can be used to group Users into a position or status within a User Group. Roles in MODX also have what is called "Authority". This is a number value that can be any valid integer. Authority levels are "inheritable downward", in the sense that a Role with Authority 1 will inherit any and all Group Policies assigned to itself, and to any Roles with higher Authority level than 1.';
$_lang['source_add'] = 'Add Media Source';
$_lang['namespace_add'] = 'Add Namespace';
$_lang['access_namespace_update'] = 'Update Namespace Access';
$_lang['access_namespace_remove'] = 'Remove Namespace Access';
$_lang['filter_by_namespace'] = 'Filter by Namespace...';
$_lang['user_group_aw'] = 'Access Wizard';
$_lang['user_group_aw_desc'] = 'Note: You may have to flush sessions after adding the User Group if you check any of the options below.';
$_lang['user_group_aw_contexts'] = 'Contexts';
$_lang['user_group_aw_contexts_desc'] = 'A comma-separated list of Contexts that this User Group should be able to view.';
$_lang['user_group_aw_manager_policy'] = 'Manager Policy';
$_lang['user_group_aw_manager_policy_desc'] = 'The Policy to give the User Group for editing in the manager. Select (no policy) if you do not want to grant manager access.';
$_lang['user_group_aw_users'] = 'Users';
$_lang['user_group_aw_users_desc'] = 'A comma-separated list of usernames to add to this User Group. You can use the format username:role to set the role; otherwise, Member will be assumed.';
$_lang['user_group_aw_resource_groups'] = 'Resource Groups';
$_lang['user_group_aw_resource_groups_desc'] = 'A comma-separated list of Resource Groups to give access to in the Contexts specified above.';
$_lang['user_group_aw_categories'] = 'Element Categories';
$_lang['user_group_aw_categories_desc'] = 'A comma-separated list of Element Categories to give access to in the Contexts specified above.';
$_lang['user_group_aw_parallel'] = 'Create Parallel Resource Group';
$_lang['user_group_aw_parallel_desc'] = 'If checked, will automatically create a Resource Group with the same name, and give this User Group view access to it in the Contexts specified above.';
$_lang['user_group_category_access'] = 'Element Category Access';
$_lang['user_group_category_access_msg'] = 'Here you can set which Elements this User Group can access by the Categories the Elements are in.';
$_lang['user_group_category_authority_desc'] = 'The minimum Role that will have access to the Permissions in the selected Policy for this context. Roles with stronger Authority (lower numbers) will inherit this access as well. Most situations can leave this at "Member".';
$_lang['user_group_category_category_desc'] = 'The Category to grant access to.';
$_lang['user_group_category_context_desc'] = 'The Context to target for Elements with the above Category associated to them.';
$_lang['user_group_category_err_ae'] = 'User Group already has access to that Category.';
$_lang['user_group_category_policy_desc'] = 'The Policy to apply to this Context with Elements in the Category for this User Group. This will grant all Users in this User Group with the selected minimum Role all the Permissions in the Policy.';
$_lang['user_group_category_remove_confirm'] = 'Are you sure you want to remove this Category from this User Group?';
$_lang['user_group_context_access'] = 'Context Access';
$_lang['user_group_context_access_msg'] = 'Here you can set which Contexts this User Group can access.';
$_lang['user_group_context_authority_desc'] = 'The minimum Role that will have access to the Permissions in the selected Policy for this context. Roles with stronger Authority (lower numbers) will inherit this access as well. Most situations can leave this at "Member".';
$_lang['user_group_context_context_desc'] = 'The Context to grant access to.';
$_lang['user_group_context_policy_desc'] = 'The Policy to apply to this Context for this User Group. This will grant all Users in this User Group with the selected minimum Role all the Permissions in the Policy.';
$_lang['user_group_context_err_ae'] = 'User Group already has access to that context.';
$_lang['user_group_context_remove_confirm'] = 'Are you sure you want to remove this Context from this User Group?';
$_lang['user_group_resourcegroup_access'] = 'Resource Group Access';
$_lang['user_group_resourcegroup_access_msg'] = 'Here you can set which Resource Groups this User Group can access.';
$_lang['user_group_resourcegroup_authority_desc'] = 'The minimum Role that will have access to the Permissions in the selected Policy for this context. Roles with stronger Authority (lower numbers) will inherit this access as well. Most situations can leave this at "Member".';
$_lang['user_group_resourcegroup_context_desc'] = 'The Context to target for items with the above Resource Group associated to them.';
$_lang['user_group_resourcegroup_err_ae'] = 'User Group already has access to that Resource Group.';
$_lang['user_group_resourcegroup_policy_desc'] = 'The Policy to apply to this Context with Resources in the Resource Group for this User Group. This will grant all Users in this User Group with the selected minimum Role all the Permissions in the Policy.';
$_lang['user_group_resourcegroup_remove_confirm'] = 'Are you sure you want to remove this Resource Group from this User Group?';
$_lang['user_group_resourcegroup_resource_group_desc'] = 'The Resource Group to grant access to.';
$_lang['user_group_source_access'] = 'Media Source Access';
$_lang['user_group_source_access_msg'] = 'Here you can set which Media Sources this User Group can access.';
$_lang['user_group_source_authority_desc'] = 'The minimum Role that will have access to the Permissions in the selected Policy. Roles with stronger Authority (lower numbers) will inherit this access as well. Most situations can leave this at "Member".';
$_lang['user_group_source_err_ae'] = 'User Group already has access to that Media Source.';
$_lang['user_group_source_policy_desc'] = 'The Policy to apply to this Media Source for this User Group. This will grant all Users in this User Group with the selected minimum Role all the Permissions in the Policy.';
$_lang['user_group_source_remove_confirm'] = 'Are you sure you want to remove this Media Source from this User Group?';
$_lang['user_group_source_source_desc'] = 'The Media Source to grant access to.';
$_lang['user_group_user_access_msg'] = 'Select which users you want in this User Group.';
$_lang['user_group_namespace_access'] = 'Namespace access';
$_lang['user_group_namespace_access_desc'] = 'Here you can set which Namespaces this User Group can access.';
| {
"pile_set_name": "Github"
} |
#!/bin/sh
# Called by kernel when last child in cgroup exists
BASE=/sys/fs/cgroup/finit
PROC=`basename $1`
DIR="$BASE$1"
logit -p daemon.info -t finit "$PROC stopped, cleaning up control group $DIR"
rmdir $DIR
exit 0
| {
"pile_set_name": "Github"
} |
package gorm
import (
"fmt"
"strings"
)
// Define callbacks for creating
func init() {
DefaultCallback.Create().Register("gorm:begin_transaction", beginTransactionCallback)
DefaultCallback.Create().Register("gorm:before_create", beforeCreateCallback)
DefaultCallback.Create().Register("gorm:save_before_associations", saveBeforeAssociationsCallback)
DefaultCallback.Create().Register("gorm:update_time_stamp", updateTimeStampForCreateCallback)
DefaultCallback.Create().Register("gorm:create", createCallback)
DefaultCallback.Create().Register("gorm:force_reload_after_create", forceReloadAfterCreateCallback)
DefaultCallback.Create().Register("gorm:save_after_associations", saveAfterAssociationsCallback)
DefaultCallback.Create().Register("gorm:after_create", afterCreateCallback)
DefaultCallback.Create().Register("gorm:commit_or_rollback_transaction", commitOrRollbackTransactionCallback)
}
// beforeCreateCallback will invoke `BeforeSave`, `BeforeCreate` method before creating
func beforeCreateCallback(scope *Scope) {
if !scope.HasError() {
scope.CallMethod("BeforeSave")
}
if !scope.HasError() {
scope.CallMethod("BeforeCreate")
}
}
// updateTimeStampForCreateCallback will set `CreatedAt`, `UpdatedAt` when creating
func updateTimeStampForCreateCallback(scope *Scope) {
if !scope.HasError() {
now := NowFunc()
if createdAtField, ok := scope.FieldByName("CreatedAt"); ok {
if createdAtField.IsBlank {
createdAtField.Set(now)
}
}
if updatedAtField, ok := scope.FieldByName("UpdatedAt"); ok {
if updatedAtField.IsBlank {
updatedAtField.Set(now)
}
}
}
}
// createCallback the callback used to insert data into database
func createCallback(scope *Scope) {
if !scope.HasError() {
defer scope.trace(NowFunc())
var (
columns, placeholders []string
blankColumnsWithDefaultValue []string
)
for _, field := range scope.Fields() {
if scope.changeableField(field) {
if field.IsNormal {
if field.IsBlank && field.HasDefaultValue {
blankColumnsWithDefaultValue = append(blankColumnsWithDefaultValue, scope.Quote(field.DBName))
scope.InstanceSet("gorm:blank_columns_with_default_value", blankColumnsWithDefaultValue)
} else if !field.IsPrimaryKey || !field.IsBlank {
columns = append(columns, scope.Quote(field.DBName))
placeholders = append(placeholders, scope.AddToVars(field.Field.Interface()))
}
} else if field.Relationship != nil && field.Relationship.Kind == "belongs_to" {
for _, foreignKey := range field.Relationship.ForeignDBNames {
if foreignField, ok := scope.FieldByName(foreignKey); ok && !scope.changeableField(foreignField) {
columns = append(columns, scope.Quote(foreignField.DBName))
placeholders = append(placeholders, scope.AddToVars(foreignField.Field.Interface()))
}
}
}
}
}
var (
returningColumn = "*"
quotedTableName = scope.QuotedTableName()
primaryField = scope.PrimaryField()
extraOption string
)
if str, ok := scope.Get("gorm:insert_option"); ok {
extraOption = fmt.Sprint(str)
}
if primaryField != nil {
returningColumn = scope.Quote(primaryField.DBName)
}
lastInsertIDReturningSuffix := scope.Dialect().LastInsertIDReturningSuffix(quotedTableName, returningColumn)
if len(columns) == 0 {
scope.Raw(fmt.Sprintf(
"INSERT INTO %v DEFAULT VALUES%v%v",
quotedTableName,
addExtraSpaceIfExist(extraOption),
addExtraSpaceIfExist(lastInsertIDReturningSuffix),
))
} else {
scope.Raw(fmt.Sprintf(
"INSERT INTO %v (%v) VALUES (%v)%v%v",
scope.QuotedTableName(),
strings.Join(columns, ","),
strings.Join(placeholders, ","),
addExtraSpaceIfExist(extraOption),
addExtraSpaceIfExist(lastInsertIDReturningSuffix),
))
}
// execute create sql
if lastInsertIDReturningSuffix == "" || primaryField == nil {
if result, err := scope.SQLDB().Exec(scope.SQL, scope.SQLVars...); scope.Err(err) == nil {
// set rows affected count
scope.db.RowsAffected, _ = result.RowsAffected()
// set primary value to primary field
if primaryField != nil && primaryField.IsBlank {
if primaryValue, err := result.LastInsertId(); scope.Err(err) == nil {
scope.Err(primaryField.Set(primaryValue))
}
}
}
} else {
if primaryField.Field.CanAddr() {
if err := scope.SQLDB().QueryRow(scope.SQL, scope.SQLVars...).Scan(primaryField.Field.Addr().Interface()); scope.Err(err) == nil {
primaryField.IsBlank = false
scope.db.RowsAffected = 1
}
} else {
scope.Err(ErrUnaddressable)
}
}
}
}
// forceReloadAfterCreateCallback will reload columns that having default value, and set it back to current object
func forceReloadAfterCreateCallback(scope *Scope) {
if blankColumnsWithDefaultValue, ok := scope.InstanceGet("gorm:blank_columns_with_default_value"); ok {
db := scope.DB().New().Table(scope.TableName()).Select(blankColumnsWithDefaultValue.([]string))
for _, field := range scope.Fields() {
if field.IsPrimaryKey && !field.IsBlank {
db = db.Where(fmt.Sprintf("%v = ?", field.DBName), field.Field.Interface())
}
}
db.Scan(scope.Value)
}
}
// afterCreateCallback will invoke `AfterCreate`, `AfterSave` method after creating
func afterCreateCallback(scope *Scope) {
if !scope.HasError() {
scope.CallMethod("AfterCreate")
}
if !scope.HasError() {
scope.CallMethod("AfterSave")
}
}
| {
"pile_set_name": "Github"
} |
<div class="add">
<h2>添加书本:</h2>
<label>标题:
<input [(ngModel)]="books.title" placeholder="请输入标题">
</label>
<label>作者:
<input [(ngModel)]="books.author" placeholder="请输入作者">
</label>
<label>书本id:
<input [(ngModel)]="books.id" placeholder="请输入书本id">
</label>
<label>封面地址:
<input [(ngModel)]="books.url" placeholder="请输入封面地址">
</label>
<div><button (click)="add(books)">添加</button></div>
</div> | {
"pile_set_name": "Github"
} |
return $this->successResponse(
[% model_was_added %],
$this->transform($[% model_name_singular_variable %])
); | {
"pile_set_name": "Github"
} |
package org.hexworks.zircon.internal.component.renderer
import org.hexworks.zircon.api.component.renderer.ComponentRenderContext
import org.hexworks.zircon.api.component.renderer.ComponentRenderer
import org.hexworks.zircon.api.data.Position
import org.hexworks.zircon.api.graphics.TileGraphics
import org.hexworks.zircon.internal.component.impl.DefaultIcon
class DefaultIconRenderer : ComponentRenderer<DefaultIcon> {
override fun render(tileGraphics: TileGraphics, context: ComponentRenderContext<DefaultIcon>) {
tileGraphics.draw(context.component.icon, Position.zero())
}
}
| {
"pile_set_name": "Github"
} |
package test.assertk.assertions.support
import assertk.assertions.support.ListDiffer
import kotlin.test.Test
import kotlin.test.assertEquals
class ListDifferTest {
@Test fun empty_diff() {
val diff = ListDiffer.diff(emptyList<Any>(), emptyList<Any>())
assertEquals(emptyList(), diff)
}
@Test fun single_item_no_change() {
val diff = ListDiffer.diff(listOf(1), listOf(1))
assertEquals(listOf(ListDiffer.Edit.Eq(oldIndex = 0, oldValue = 1, newIndex = 0, newValue = 1)), diff)
}
@Test fun singe_insert() {
val diff = ListDiffer.diff(emptyList<Int>(), listOf(1))
assertEquals(listOf(ListDiffer.Edit.Ins(newIndex = 0, newValue = 1)), diff)
}
@Test fun singe_delete() {
val diff = ListDiffer.diff(listOf(1), emptyList<Int>())
assertEquals(listOf(ListDiffer.Edit.Del(oldIndex = 0, oldValue = 1)), diff)
}
@Test fun single_insert_middle() {
val diff = ListDiffer.diff(listOf(1, 3), listOf(1, 2, 3))
assertEquals(
listOf(
ListDiffer.Edit.Eq(oldIndex = 0, oldValue = 1, newIndex = 0, newValue = 1),
ListDiffer.Edit.Ins(newIndex = 1, newValue = 2),
ListDiffer.Edit.Eq(oldIndex = 1, oldValue = 3, newIndex = 2, newValue = 3)
), diff
)
}
@Test fun singe_delete_middle() {
val diff = ListDiffer.diff(listOf(1, 2, 3), listOf(1, 3))
assertEquals(
listOf(
ListDiffer.Edit.Eq(oldIndex = 0, oldValue = 1, newIndex = 0, newValue = 1),
ListDiffer.Edit.Del(oldIndex = 1, oldValue = 2),
ListDiffer.Edit.Eq(oldIndex = 2, oldValue = 3, newIndex = 1, newValue = 3)
), diff
)
}
@Test fun single_delete_multiple_inserts() {
val diff = ListDiffer.diff(listOf(3), listOf(1, 2))
assertEquals(
listOf(
ListDiffer.Edit.Del(oldIndex = 0, oldValue = 3),
ListDiffer.Edit.Ins(newIndex = 0, newValue = 1),
ListDiffer.Edit.Ins(newIndex = 1, newValue = 2)
), diff
)
}
} | {
"pile_set_name": "Github"
} |
from TensorMol import *
import time
import random
PARAMS["max_checkpoints"] = 3
os.environ["CUDA_VISIBLE_DEVICES"]="0"
# Takes two nearly identical crystal lattices and interpolates a core/shell structure, must be oriented identically and stoichiometric
def InterpolateGeometries():
a=MSet('cspbbr3_tess')
#a.ReadGDB9Unpacked(path='/media/sdb2/jeherr/TensorMol/datasets/cspbbr3/pb_tess_6sc/')
#a.Save()
a.Load()
mol1 = a.mols[0]
mol2 = a.mols[1]
mol2.RotateX()
mol1.AlignAtoms(mol2)
optimizer = Optimizer(None)
optimizer.Interpolate_OptForce(mol1, mol2)
mol1.WriteXYZfile(fpath='./results/cspbbr3_tess', fname='cspbbr3_6sc_pb_tess_goopt', mode='w')
# mol2.WriteXYZfile(fpath='./results/cspbbr3_tess', fname='cspbbr3_6sc_ortho_rot', mode='w')
def read_unpacked_set(set_name="chemspider12", paths="/media/sdb2/jeherr/TensorMol/datasets/chemspider12/*/", properties=["name", "energy", "gradients", "dipole"]):
import glob
a=MSet(set_name)
for path in glob.iglob(paths):
a.read_xyz_set_with_properties(paths, properties)
print len(a.mols), " Molecules"
a.Save()
def TrainKRR(set_ = "SmallMols", dig_ = "GauSH", OType_ ="Force"):
a=MSet("SmallMols_rand")
a.Load()
TreatedAtoms = a.AtomTypes()
d = Digester(TreatedAtoms, name_=dig_,OType_ =OType_)
tset = TensorData(a,d)
tset.BuildTrainMolwise("SmallMols",TreatedAtoms)
manager=TFManage("",tset,True,"KRR_sqdiff")
return
def RandomSmallSet(set_, size_):
""" Returns an MSet of random molecules chosen from a larger set """
print "Selecting a subset of "+str(set_)+" of size "+str(size_)
a=MSet(set_)
a.Load()
b=MSet(set_+"_rand")
mols = random.sample(range(len(a.mols)), size_)
for i in mols:
b.mols.append(a.mols[i])
b.Save()
return b
def TestMetadynamics():
a = MSet("nicotine_opt")
a.ReadXYZ()
m = a.mols[-1]
# ForceField = lambda x: QchemDFT(Mol(m.atoms,x),basis_ = '6-311g**',xc_='wB97X-D', jobtype_='force', filename_='jmols2', path_='./qchem/', threads=8)
manager = TFMolManageDirect(name="BehlerParinelloDirectSymFunc_nicotine_vib_Tue_Nov_21_09.11.26_2017", network_type = "BehlerParinelloDirectSymFunc")
def force_field(coords):
energy, forces = manager.evaluate_mol(Mol(m.atoms, coords), True)
return energy, forces * JOULEPERHARTREE
masses = np.array(list(map(lambda x: ATOMICMASSESAMU[x-1],m.atoms)))
print "Masses:", masses
PARAMS["MDdt"] = 0.5
PARAMS["RemoveInvariant"]=True
PARAMS["MDMaxStep"] = 50000
PARAMS["MDThermostat"] = "Andersen"
PARAMS["MDTemp"]= 300.0
PARAMS["MDV0"] = "Thermal"
PARAMS["MetaMDBumpHeight"] = 0.00
PARAMS["MetaMDBumpWidth"] = 0.01
meta = MetaDynamics(force_field, m, EandF_=force_field)
meta.Prop()
def test_md():
PARAMS["RBFS"] = np.array([[0.35, 0.35], [0.70, 0.35], [1.05, 0.35], [1.40, 0.35], [1.75, 0.35], [2.10, 0.35], [2.45, 0.35],
[2.80, 0.35], [3.15, 0.35], [3.50, 0.35], [3.85, 0.35], [4.20, 0.35], [4.55, 0.35], [4.90, 0.35]])
PARAMS["ANES"] = np.array([2.20, 1.0, 1.0, 1.0, 1.0, 2.55, 3.04, 3.44]) #pauling electronegativity
PARAMS["SH_NRAD"] = 14
PARAMS["SH_LMAX"] = 4
a = MSet("OptMols")
a.ReadXYZ()
mol = a.mols[4]
manager=TFManage(Name_="SmallMols_GauSH_fc_sqdiff_GauSH_direct",Train_=False,NetType_="fc_sqdiff_GauSH_direct")
force_field = lambda x: manager.evaluate_mol_forces_direct(x)
masses = np.array(map(lambda x: ATOMICMASSESAMU[x-1], mol.atoms))
print "Masses:", masses
PARAMS["MDdt"] = 0.2
PARAMS["RemoveInvariant"]=True
PARAMS["MDMaxStep"] = 20000
PARAMS["MDThermostat"] = "Nose"
PARAMS["MDTemp"]= 300.0
md = VelocityVerlet(force_field, mol)
md.Prop()
def TestTFBond():
a=MSet("chemspider_all_rand")
a.Load()
d = MolDigester(a.BondTypes(), name_="CZ", OType_="AtomizationEnergy")
tset = TensorMolData_BPBond_Direct(a,d)
manager=TFMolManage("",tset,True,"fc_sqdiff_BPBond_Direct")
def TestTFGauSH():
tf_precision = eval(PARAMS["tf_prec"])
TensorMol.RawEmbeddings.data_precision = tf_precision
np.set_printoptions(threshold=100000)
a=MSet("SmallMols_rand")
a.Load()
maxnatoms = a.MaxNAtoms()
zlist = []
xyzlist = []
labelslist = []
natomlist = []
for i, mol in enumerate(a.mols):
paddedxyz = np.zeros((maxnatoms,3), dtype=np.float32)
paddedxyz[:mol.atoms.shape[0]] = mol.coords
paddedz = np.zeros((maxnatoms), dtype=np.int32)
paddedz[:mol.atoms.shape[0]] = mol.atoms
paddedlabels = np.zeros((maxnatoms, 3), dtype=np.float32)
paddedlabels[:mol.atoms.shape[0]] = mol.properties["forces"]
xyzlist.append(paddedxyz)
zlist.append(paddedz)
labelslist.append(paddedlabels)
natomlist.append(mol.NAtoms())
if i == 999:
break
xyzstack = tf.stack(xyzlist)
zstack = tf.stack(zlist)
labelstack = tf.stack(labelslist)
natomstack = tf.stack(natomlist)
gaussian_params = tf.Variable(PARAMS["RBFS"], trainable=True, dtype=tf.float32)
atomic_embed_factors = tf.Variable(PARAMS["ANES"], trainable=True, dtype=tf.float32)
elements = tf.constant([1, 6, 7, 8], dtype=tf.int32)
tmp = tf_gaussian_spherical_harmonics_channel(xyzstack, zstack, elements, gaussian_params, 4)
sess = tf.Session()
sess.run(tf.global_variables_initializer())
options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE)
run_metadata = tf.RunMetadata()
# for i in range(a.mols[0].atoms.shape[0]):
# print a.mols[0].atoms[i], " ", a.mols[0].coords[i,0], " ", a.mols[0].coords[i,1], " ", a.mols[0].coords[i,2]
tmp2 = sess.run(tmp, options=options, run_metadata=run_metadata)
print tmp2
# print tmp2[1]
# print tmp2.shape
# print tmp3
fetched_timeline = timeline.Timeline(run_metadata.step_stats)
chrome_trace = fetched_timeline.generate_chrome_trace_format()
with open('timeline_step_tmp_tm_nocheck_h2o.json', 'w') as f:
f.write(chrome_trace)
# print tmp2[3].shape
# print a.mols[0].atoms.shape
# TreatedAtoms = a.AtomTypes()
# d = Digester(TreatedAtoms, name_="GauSH", OType_="Force")
# # tset = TensorData(a,d)
# mol_ = a.mols[0]
# print d.Emb(mol_, -1, mol_.coords[0], MakeOutputs=False)[0]
# print mol_.atoms[0]
def test_gaussian_overlap():
gaussian_params = tf.Variable(PARAMS["RBFS"], trainable=True, dtype=tf.float32)
tf_precision = eval(PARAMS["tf_prec"])
TensorMol.RawEmbeddings.data_precision = tf_precision
tmp = tf_gaussian_overlap(gaussian_params)
sess = tf.Session()
sess.run(tf.global_variables_initializer())
tmp2 = sess.run(tmp)
print tmp2
def train_forces_GauSH_direct(set_ = "SmallMols"):
PARAMS["RBFS"] = np.array([[0.35, 0.35], [0.70, 0.35], [1.05, 0.35], [1.40, 0.35], [1.75, 0.35], [2.10, 0.35], [2.45, 0.35],[2.80, 0.35], [3.15, 0.35], [3.50, 0.35], [3.85, 0.35], [4.20, 0.35], [4.55, 0.35], [4.90, 0.35]])
PARAMS["ANES"] = np.array([2.20, 1.0, 1.0, 1.0, 1.0, 2.55, 3.04, 3.44]) #pauling electronegativity
PARAMS["SH_NRAD"] = 14
PARAMS["SH_LMAX"] = 4
PARAMS["HiddenLayers"] = [512, 512, 512, 512, 512, 512, 512]
PARAMS["max_steps"] = 20000
PARAMS["test_freq"] = 5
PARAMS["batch_size"] = 200
PARAMS["NeuronType"] = "elu"
PARAMS["learning_rate"] = 0.0001
a=MSet(set_)
a.Load()
TreatedAtoms = a.AtomTypes()
print "Number of Mols: ", len(a.mols)
d = Digester(TreatedAtoms, name_="GauSH", OType_="Force")
tset = TensorDataDirect(a,d)
manager=TFManage("",tset,True,"fc_sqdiff_GauSH_direct")
def test_tf_neighbor():
np.set_printoptions(threshold=100000)
a=MSet("SmallMols_rand")
a.Load()
maxnatoms = a.MaxNAtoms()
zlist = []
xyzlist = []
labelslist = []
for i, mol in enumerate(a.mols):
paddedxyz = np.zeros((maxnatoms,3), dtype=np.float32)
paddedxyz[:mol.atoms.shape[0]] = mol.coords
paddedz = np.zeros((maxnatoms), dtype=np.int32)
paddedz[:mol.atoms.shape[0]] = mol.atoms
paddedlabels = np.zeros((maxnatoms, 3), dtype=np.float32)
paddedlabels[:mol.atoms.shape[0]] = mol.properties["forces"]
xyzlist.append(paddedxyz)
zlist.append(paddedz)
labelslist.append(paddedlabels)
if i == 99:
break
xyzstack = tf.stack(xyzlist)
zstack = tf.stack(zlist)
labelstack = tf.stack(labelslist)
gaussian_params = tf.Variable(PARAMS["RBFS"], trainable=True, dtype=tf.float32)
atomic_embed_factors = tf.Variable(PARAMS["ANES"], trainable=True, dtype=tf.float32)
element = tf.constant(1, dtype=tf.int32)
r_cutoff = tf.constant(5.0, dtype=tf.float32)
element_pairs = tf.constant([[1,1,1], [1,1,6], [1,1,7], [1,1,8], [1,6,6], [1,6,7], [1,6,8], [1,7,7], [1,7,8], [1,8,8],
[6,6,6], [6,6,7], [6,6,8], [6,7,7], [6,7,8], [6,8,8], [7,7,7], [7,7,8], [7,8,8], [8,8,8]], dtype=tf.int32)
tmp = tf_triples_list(xyzstack, zstack, r_cutoff, element_pairs)
sess = tf.Session()
sess.run(tf.global_variables_initializer())
options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE)
run_metadata = tf.RunMetadata()
# for i in range(a.mols[0].atoms.shape[0]):
# print a.mols[0].atoms[i], " ", a.mols[0].coords[i,0], " ", a.mols[0].coords[i,1], " ", a.mols[0].coords[i,2]
tmp3 = sess.run([tmp], options=options, run_metadata=run_metadata)
# print tmp3
fetched_timeline = timeline.Timeline(run_metadata.step_stats)
chrome_trace = fetched_timeline.generate_chrome_trace_format()
with open('timeline_step_tmp_tm_nocheck_h2o.json', 'w') as f:
f.write(chrome_trace)
print tmp3
# print tmp4[1]
# print tmp4
# TreatedAtoms = a.AtomTypes()
# d = Digester(TreatedAtoms, name_="GauSH", OType_="Force")
# # tset = TensorData(a,d)
# mol_ = a.mols[0]
# print d.Emb(mol_, -1, mol_.coords[0], MakeOutputs=False)[0]
# print mol_.atoms[0]
def train_energy_pairs_triples():
PARAMS["HiddenLayers"] = [512, 512, 512]
PARAMS["learning_rate"] = 0.0001
PARAMS["max_steps"] = 1000
PARAMS["test_freq"] = 5
PARAMS["batch_size"] = 200
PARAMS["NeuronType"] = "relu"
# PARAMS["tf_prec"] = "tf.float64"
# PARAMS["self.profiling"] = True
a=MSet("SmallMols")
a.Load()
TreatedAtoms = a.AtomTypes()
print "Number of Mols: ", len(a.mols)
d = Digester(TreatedAtoms, name_="GauSH", OType_="AtomizationEnergy")
tset = TensorMolData_BP_Direct(a,d)
manager=TFMolManage("",tset,True,"pairs_triples", Trainable_=True)
def train_energy_symm_func(mset):
PARAMS["train_energy_gradients"] = False
PARAMS["weight_decay"] = None
PARAMS["HiddenLayers"] = [512, 512, 512]
PARAMS["learning_rate"] = 0.0001
PARAMS["max_steps"] = 1000
PARAMS["test_freq"] = 5
PARAMS["batch_size"] = 200
PARAMS["NeuronType"] = "elu"
PARAMS["tf_prec"] = "tf.float64"
a=MSet(mset)
a.Load()
print "Number of Mols: ", len(a.mols)
manager = TFMolManageDirect(a, network_type = "BPSymFunc")
def train_energy_GauSH(mset):
PARAMS["RBFS"] = np.stack((np.linspace(0.1, 6.0, 16), np.repeat(0.30, 16)), axis=1)
PARAMS["SH_NRAD"] = 16
PARAMS["SH_LMAX"] = 5
PARAMS["SH_rot_invar"] = False
PARAMS["EECutoffOn"] = 0.0
PARAMS["Elu_Width"] = 6.0
PARAMS["train_gradients"] = True
PARAMS["train_dipole"] = False
PARAMS["train_rotation"] = True
PARAMS["weight_decay"] = None
PARAMS["HiddenLayers"] = [512, 512, 512]
PARAMS["learning_rate"] = 0.00005
PARAMS["max_steps"] = 1000
PARAMS["test_freq"] = 5
PARAMS["batch_size"] = 100
PARAMS["NeuronType"] = "shifted_softplus"
PARAMS["tf_prec"] = "tf.float64"
PARAMS["Profiling"] = False
manager = TFMolManageDirect(mset, network_type = "BPGauSH")
def test_h2o():
PARAMS["OptMaxCycles"]=60
PARAMS["OptMaxCycles"]=500
PARAMS["OptStepSize"] = 0.1
PARAMS["OptThresh"]=0.0001
PARAMS["MDAnnealT0"] = 20.0
PARAMS["MDAnnealSteps"] = 2000
a = MSet("water")
# a.ReadXYZ()
a.mols.append(Mol(np.array([1,1,8]),np.array([[0.9,0.1,0.1],[1.,0.9,1.],[0.1,0.1,0.1]])))
mol = a.mols[0]
manager = TFMolManageDirect(name="BehlerParinelloDirectGauSH_H2O_wb97xd_1to21_with_prontonated_Mon_Nov_13_11.11.15_2017", network_type = "BehlerParinelloDirectGauSH")
def force_field(mol, eval_forces=True):
if eval_forces:
energy, forces = manager.evaluate_mol(mol, True)
forces = RemoveInvariantForce(mol.coords, forces, mol.atoms)
return energy, forces
else:
energy = manager.evaluate_mol(mol, False)
return energy
Opt = GeometryOptimizer(force_field)
opt_mol = Opt.opt_conjugate_gradient(mol)
def evaluate_BPSymFunc(mset):
a=MSet(mset)
a.Load()
output, labels = [], []
manager = TFMolManageDirect(name="BehlerParinelloDirectSymFunc_nicotine_metamd_10000_Tue_Nov_07_22.35.07_2017", network_type = "BehlerParinelloDirectSymFunc")
random.shuffle(a.mols)
batch = []
for i in range(len(a.mols) / 100):
for j in range(100):
labels.append(a.mols[i*100+j].properties["atomization"])
batch.append(a.mols[i*100+j])
output.append(manager.evaluate_batch(batch, eval_forces=False))
batch = []
output = np.concatenate(output)
labels = np.array(labels)
print "MAE:", np.mean(np.abs(output-labels))*627.509
print "RMSE:",np.sqrt(np.mean(np.square(output-labels)))*627.509
def water_dimer_plot():
PARAMS["RBFS"] = np.stack((np.linspace(0.1, 5.0, 32), np.repeat(0.25, 32)), axis=1)
PARAMS["SH_NRAD"] = 32
PARAMS["SH_LMAX"] = 4
def qchemdft(m_,ghostatoms,basis_ = '6-31g*',xc_='b3lyp', jobtype_='force', filename_='tmp', path_='./qchem/', threads=False):
istring = '$molecule\n0 1 \n'
crds = m_.coords.copy()
crds[abs(crds)<0.0000] *=0.0
for j in range(len(m_.atoms)):
if j in ghostatoms:
istring=istring+"@"+itoa[m_.atoms[j]]+' '+str(crds[j,0])+' '+str(crds[j,1])+' '+str(crds[j,2])+'\n'
else:
istring=istring+itoa[m_.atoms[j]]+' '+str(crds[j,0])+' '+str(crds[j,1])+' '+str(crds[j,2])+'\n'
if jobtype_ == "dipole":
istring =istring + '$end\n\n$rem\njobtype sp\nbasis '+basis_+'\nmethod '+xc_+'\nthresh 11\nsymmetry false\nsym_ignore true\n$end\n'
else:
istring =istring + '$end\n\n$rem\njobtype '+jobtype_+'\nbasis '+basis_+'\nmethod '+xc_+'\nthresh 11\nsymmetry false\nsym_ignore true\n$end\n'
with open(path_+filename_+'.in','w') as fin:
fin.write(istring)
with open(path_+filename_+'.out','a') as fout:
if threads:
proc = subprocess.Popen(['qchem', '-nt', str(threads), path_+filename_+'.in'], stdout=subprocess.PIPE, stderr=subprocess.PIPE,shell=False)
else:
proc = subprocess.Popen(['qchem', path_+filename_+'.in'], stdout=subprocess.PIPE, stderr=subprocess.PIPE,shell=False)
out, err = proc.communicate()
fout.write(out)
lines = out.split('\n')
if jobtype_ == 'force':
Forces = np.zeros((m_.atoms.shape[0],3))
for i, line in enumerate(lines):
if line.count('Convergence criterion met')>0:
Energy = float(line.split()[1])
if line.count("Gradient of SCF Energy") > 0:
k = 0
l = 0
for j in range(1, m_.atoms.shape[0]+1):
Forces[j-1,:] = float(lines[i+k+2].split()[l+1]), float(lines[i+k+3].split()[l+1]), float(lines[i+k+4].split()[l+1])
l += 1
if (j % 6) == 0:
k += 4
l = 0
# return Energy, Forces
return Energy, -Forces*JOULEPERHARTREE/BOHRPERA
elif jobtype_ == 'sp':
for line in lines:
if line.count('Convergence criterion met')>0:
Energy = float(line.split()[1])
return Energy
else:
raise Exception("jobtype needs formatted for return variables")
a = MSet("water_dimer")
a.ReadXYZ()
manager = TFMolManageDirect(name="BehlerParinelloDirectGauSH_H2O_wb97xd_1to21_with_prontonated_Mon_Nov_13_11.35.07_2017", network_type = "BehlerParinelloDirectGauSH")
qchemff = lambda x, y: qchemdft(x, y, basis_ = '6-311g**',xc_='wb97x-d', jobtype_='sp', filename_='tmp', path_='./qchem/', threads=8)
cp_correction = []
for mol in a.mols:
h2o1 = qchemff(Mol(mol.atoms[:3], mol.coords[:3]), [])
h2o2 = qchemff(Mol(mol.atoms[3:], mol.coords[3:]), [])
# h2o1cp = qchemff(mol, [3, 4, 5])
# h2o2cp = qchemff(mol, [0, 1, 2])
dimer = qchemff(mol, [])
# cpc = h2o1cp - h2o1 + h2o2cp - h2o2
# cp_correction.append(cpc)
bond_e = dimer - h2o1 - h2o2
print "{%.10f, %.10f}," % (np.linalg.norm(mol.coords[1] - mol.coords[3]), bond_e * 627.509)
print "TensorMol evaluation"
for i, mol in enumerate(a.mols):
h2o1 = manager.evaluate_mol(Mol(mol.atoms[:3], mol.coords[:3]), False)
h2o2 = manager.evaluate_mol(Mol(mol.atoms[3:], mol.coords[3:]), False)
dimer = manager.evaluate_mol(mol, False)
bond_e = dimer - h2o1 - h2o2
print "{%.10f, %.10f}," % (np.linalg.norm(mol.coords[1] - mol.coords[3]), bond_e * 627.509)
def nicotine_cc_stretch_plot():
def qchemdft(m_,ghostatoms,basis_ = '6-31g*',xc_='b3lyp', jobtype_='force', filename_='tmp', path_='./qchem/', threads=False):
istring = '$molecule\n0 1 \n'
crds = m_.coords.copy()
crds[abs(crds)<0.0000] *=0.0
for j in range(len(m_.atoms)):
if j in ghostatoms:
istring=istring+"@"+itoa[m_.atoms[j]]+' '+str(crds[j,0])+' '+str(crds[j,1])+' '+str(crds[j,2])+'\n'
else:
istring=istring+itoa[m_.atoms[j]]+' '+str(crds[j,0])+' '+str(crds[j,1])+' '+str(crds[j,2])+'\n'
if jobtype_ == "dipole":
istring =istring + '$end\n\n$rem\njobtype sp\nbasis '+basis_+'\nmethod '+xc_+'\nthresh 11\nsymmetry false\nsym_ignore true\n$end\n'
else:
istring =istring + '$end\n\n$rem\njobtype '+jobtype_+'\nbasis '+basis_+'\nmethod '+xc_+'\nthresh 11\nsymmetry false\nsym_ignore true\n$end\n'
with open(path_+filename_+'.in','w') as fin:
fin.write(istring)
with open(path_+filename_+'.out','a') as fout:
if threads:
proc = subprocess.Popen(['qchem', '-nt', str(threads), path_+filename_+'.in'], stdout=subprocess.PIPE, stderr=subprocess.PIPE,shell=False)
else:
proc = subprocess.Popen(['qchem', path_+filename_+'.in'], stdout=subprocess.PIPE, stderr=subprocess.PIPE,shell=False)
out, err = proc.communicate()
fout.write(out)
lines = out.split('\n')
if jobtype_ == 'force':
Forces = np.zeros((m_.atoms.shape[0],3))
for i, line in enumerate(lines):
if line.count('Convergence criterion met')>0:
Energy = float(line.split()[1])
if line.count("Gradient of SCF Energy") > 0:
k = 0
l = 0
for j in range(1, m_.atoms.shape[0]+1):
Forces[j-1,:] = float(lines[i+k+2].split()[l+1]), float(lines[i+k+3].split()[l+1]), float(lines[i+k+4].split()[l+1])
l += 1
if (j % 6) == 0:
k += 4
l = 0
# return Energy, Forces
return Energy, -Forces*JOULEPERHARTREE/BOHRPERA
elif jobtype_ == 'sp':
for line in lines:
if line.count('Convergence criterion met')>0:
Energy = float(line.split()[1])
return Energy
else:
raise Exception("jobtype needs formatted for return variables")
a = MSet("nicotine_vib_collision")
a.ReadXYZ()
manager = TFMolManageDirect(name="BehlerParinelloDirectSymFunc_nicotine_full_Fri_Nov_24_15.52.20_2017", network_type = "BehlerParinelloDirectSymFunc")
qchemff = lambda x, y: qchemdft(x, y, basis_ = '6-311g**',xc_='wb97x-d', jobtype_='sp', filename_='tmp', path_='./qchem/', threads=8)
f=open("vib_collision_energies.dat", "w")
for i, mol in enumerate(a.mols):
# energy = qchemff(mol, [])
energy = manager.evaluate_mol(mol, eval_forces=False)
f.write(str(i * 0.06)+" "+str(energy[0] * 627.509)+"\n")
# print "TensorMol evaluation"
# for i, mol in enumerate(a.mols):
# h2o1 = manager.evaluate_mol(Mol(mol.atoms[:3], mol.coords[:3]), False)
# h2o2 = manager.evaluate_mol(Mol(mol.atoms[3:], mol.coords[3:]), False)
# dimer = manager.evaluate_mol(mol, False)
# bond_e = dimer - h2o1 - h2o2
# print "{%.10f, %.10f}," % (np.linalg.norm(mol.coords[1] - mol.coords[3]), bond_e * 627.509)
def meta_statistics():
PARAMS["MDdt"] = 0.5 # In fs.
PARAMS["MDMaxStep"] = 20000
PARAMS["MetaBumpTime"] = 10.0
PARAMS["MetaMaxBumps"] = 1000
PARAMS["MetaBowlK"] = 0.0
PARAMS["MDThermostat"]="Andersen"
PARAMS["MDTemp"]=300.0
PARAMS["MDV0"]=None
a=MSet("nicotine_opt")
a.ReadXYZ()
m=a.mols[0]
manager=TFMolManageDirect(name="BehlerParinelloDirectSymFunc_nicotine_full_Fri_Nov_24_15.52.20_2017", network_type = "BehlerParinelloDirectSymFunc")
def force_field(coords):
energy, forces = manager.evaluate_mol(Mol(m.atoms, coords), True)
return energy, forces * JOULEPERHARTREE
# for metap in [[0.5, 0.5], [1.0, 0.5], [0.5, 1.0], [1.0, 1.0], [0.5, 1.5], [1.5, 0.5], [1.5, 1.5], [1.0, 2.0], [2.0, 1.0], [2.0, 2.0], [3.0, 3.0]]:
for metap in [[0.0, 0.01]]:
PARAMS["MetaMDBumpHeight"] = metap[0]
PARAMS["MetaMDBumpWidth"] = metap[1]
traj = MetaDynamics(None, m,"MetaMD_nicotine_aimd_sample"+str(metap[0])+"_"+str(metap[1]), force_field)
traj.Prop()
def meta_stat_plot():
for metap in [[0.0, 0.01], [0.5, 2.0], [0.5, 0.5], [1.0, 1.0], [0.5, 1.5], [1.5, 0.5], [1.5, 1.5], [1.0, 2.0], [2.0, 1.0]]:
f1=open("nicotine_metastat_ehist_"+str(metap[0])+"_"+str(metap[1])+".dat", "w")
# f2=open("nicotine_metastat_evar_"+str(metap[0])+"_"+str(metap[1])+".dat", "w")
f3=open("nicotine_metastat_dvar_"+str(metap[0])+"_"+str(metap[1])+".dat", "w")
f=open("./results/MDLogMetaMD_nicotine_"+str(metap[0])+"_"+str(metap[1])+".txt", "r")
lines = f.readlines()
for i, line in enumerate(lines):
if i == 19501:
break
sline = line.split()
f1.write(str(sline[7])+"\n")
# f2.write(str(sline[0])+" "+str(sline[9])+"\n")
f3.write(str(sline[0])+" "+str(sline[10])+"\n")
f.close()
f1.close()
# f2.close()
f3.close()
def harmonic_freq():
# PARAMS["RBFS"] = np.stack((np.linspace(0.1, 6.0, 16), np.repeat(0.35, 16)), axis=1)
# PARAMS["SH_NRAD"] = 16
# PARAMS["SH_LMAX"] = 4
# PARAMS["EECutoffOn"] = 0.0
# PARAMS["Elu_Width"] = 6.0
# PARAMS["HiddenLayers"] = [512, 512, 512]
# PARAMS["NeuronType"] = "shifted_softplus"
# PARAMS["tf_prec"] = "tf.float32"
# manager = TFMolManageDirect(name="BehlerParinelloDirectGauSH_H2O_wb97xd_1to21_with_prontonated_Mon_Dec_11_11.43.09_2017", network_type = "BehlerParinelloDirectGauSH")
# def GetChemSpiderNetwork(a, Solvation_=False):
# TreatedAtoms = np.array([1,6,7,8], dtype=np.uint8)
# PARAMS["tf_prec"] = "tf.float64"
# PARAMS["NeuronType"] = "sigmoid_with_param"
# PARAMS["sigmoid_alpha"] = 100.0
# PARAMS["HiddenLayers"] = [2000, 2000, 2000]
# PARAMS["EECutoff"] = 15.0
# PARAMS["EECutoffOn"] = 0
# PARAMS["Elu_Width"] = 4.6 # when elu is used EECutoffOn should always equal to 0
# PARAMS["EECutoffOff"] = 15.0
# PARAMS["AddEcc"] = True
# PARAMS["KeepProb"] = [1.0, 1.0, 1.0, 0.7]
# d = MolDigester(TreatedAtoms, name_="ANI1_Sym_Direct", OType_="EnergyAndDipole") # Initialize a digester that apply descriptor for the fragme
# tset = TensorMolData_BP_Direct_EE_WithEle(a, d, order_=1, num_indis_=1, type_="mol", WithGrad_ = True)
# if Solvation_:
# PARAMS["DSFAlpha"] = 0.18
# manager=TFMolManage("chemspider12_solvation", tset,False,"fc_sqdiff_BP_Direct_EE_ChargeEncode_Update_vdw_DSF_elu_Normalize_Dropout",False,False)
# else:
# PARAMS["DSFAlpha"] = 0.18*BOHRPERA
# manager=TFMolManage("chemspider12_nosolvation", tset,False,"fc_sqdiff_BP_Direct_EE_ChargeEncode_Update_vdw_DSF_elu_Normalize_Dropout",False,False)
# return manager
PARAMS["OptMaxCycles"]= 2000
PARAMS["OptThresh"] =0.0001
a=MSet("nicotine_opt_qcorder")
a.ReadXYZ()
# m=a.mols[0]
manager = TFMolManageDirect(name="BehlerParinelloDirectSymFunc_nicotine_vib_Tue_Nov_21_09.11.26_2017", network_type = "BehlerParinelloDirectSymFunc")
# dipole_manager = GetChemSpiderNetwork(a, False)
def force_field(coords, eval_forces=True):
if eval_forces:
energy, forces = manager.evaluate_mol(Mol(a.mols[0].atoms, coords), True)
return energy, forces * JOULEPERHARTREE
else:
energy = manager.evaluate_mol(Mol(a.mols[0].atoms, coords), False)
return energy
def energy_field(coords):
energy = manager.evaluate_mol(Mol(a.mols[0].atoms, coords), False)
return energy
# def ChargeField(x_):
# mtmp = Mol(m.atoms,x_)
# Etotal, Ebp, Ebp_atom, Ecc, Evdw, mol_dipole, atom_charge, gradient = dipole_manager.EvalBPDirectEEUpdateSingle(mtmp, PARAMS["AN1_r_Rc"], PARAMS["AN1_a_Rc"], PARAMS["EECutoffOff"], True)
# energy = Etotal[0]
# force = gradient[0]
# return atom_charge[0]
def dipole_field(coords):
q = np.array([-0.355885, -0.306275, -0.138541, -0.129072, -0.199879, 0.092443, -0.073758, 0.004807, -0.280214,
-0.207116, -0.201989, 0.060910, 0.142512, 0.138947, 0.136766, 0.118485
, 0.101182, 0.127422, 0.123743, 0.136352, 0.126561, 0.111861, 0.118059, 0.121731, 0.107663, 0.123283])
# q = np.asarray(ChargeField(coords))
dipole = np.zeros(3)
for i in range(0, q.shape[0]):
dipole += q[i]*coords[i]*BOHRPERA
return dipole
Opt = GeomOptimizer(force_field)
m=Opt.Opt(a.mols[0],"nicotine_nn_opt")
m.WriteXYZfile("./results/", "optimized_nicotine")
masses = np.array(map(lambda x: ATOMICMASSESAMU[x-1],m.atoms))
w,v = HarmonicSpectra(energy_field, m.coords, m.atoms, WriteNM_=True, Mu_ = dipole_field)
def water_ir():
PARAMS["RBFS"] = np.stack((np.linspace(0.1, 6.0, 16), np.repeat(0.35, 16)), axis=1)
PARAMS["SH_NRAD"] = 16
PARAMS["SH_LMAX"] = 4
PARAMS["EECutoffOn"] = 0.0
PARAMS["Elu_Width"] = 6.0
PARAMS["HiddenLayers"] = [512, 512, 512]
PARAMS["NeuronType"] = "shifted_softplus"
PARAMS["tf_prec"] = "tf.float32"
PARAMS["OptMaxCycles"]= 2000
PARAMS["OptThresh"] =0.001
a = MSet()
a.mols.append(Mol(np.array([1,1,8]),np.array([[1.02068516794,-0.0953531498283,-0.117982957286],[0.697763661362,0.883054985795,0.981867638617],[0.282216817502,0.305964294644,0.341190303806]])))
# m = a.mols[0]
manager = TFMolManageDirect(name="BehlerParinelloDirectGauSH_H2O_wb97xd_1to21_with_prontonated_Mon_Dec_11_11.43.09_2017",
network_type = "BehlerParinelloDirectGauSH")
# def force_field(coords, eval_forces=True):
# if eval_forces:
# energy, forces = manager.evaluate_mol(Mol(a.mols[0].atoms, coords), True)
# return energy, forces * JOULEPERHARTREE
# else:
# energy = manager.evaluate_mol(Mol(a.mols[0].atoms, coords), False)
# return energy
#
# Opt = GeomOptimizer(force_field)
# mo = Opt.Opt(m)
# Tesselate that water to create a box
ntess = 4
latv = 2.8*np.eye(3)
# Start with a water in a ten angstrom box.
lat = Lattice(latv)
mc = lat.CenteredInLattice(a.mols[-1])
mt = Mol(*lat.TessNTimes(mc.atoms,mc.coords,ntess))
print mt.NAtoms()
nreal = mt.NAtoms()
mt.Distort(0.01)
def force_field(coords, eval_forces=True):
if eval_forces:
energy, forces = manager.evaluate_mol(Mol(mt.atoms, coords), True)
return energy, forces * JOULEPERHARTREE
else:
energy = manager.evaluate_mol(Mol(mt.atoms, coords), False)
return energy
Opt = GeomOptimizer(force_field)
mt = Opt.Opt(mt,"UCopt")
def train_Poly_GauSH():
PARAMS["RBFS"] = np.stack((np.linspace(0.1, 6.0, 16), np.repeat(0.35, 16)), axis=1)
PARAMS["SH_NRAD"] = 16
PARAMS["SH_LMAX"] = 4
PARAMS["EECutoffOn"] = 0.0
PARAMS["Elu_Width"] = 6.0
PARAMS["train_gradients"] = False
PARAMS["train_dipole"] = False
PARAMS["train_rotation"] = True
PARAMS["weight_decay"] = None
PARAMS["HiddenLayers"] = [512, 512, 512]
PARAMS["learning_rate"] = 0.0001
PARAMS["max_steps"] = 500
PARAMS["test_freq"] = 5
PARAMS["batch_size"] = 400
PARAMS["NeuronType"] = "shifted_softplus"
PARAMS["tf_prec"] = "tf.float32"
PARAMS["Profiling"] = False
a=MSet("H2O_augmented_more_cutoff5_b3lyp_force")
a.Load()
manager = TFMolManageDirect(a, network_type = "BehlerParinelloDirectGauSH")
def GetWaterNetwork():
a=MSet("water_hexamer_bag")
a.ReadXYZ()
TreatedAtoms = a.AtomTypes()
PARAMS["MDdt"] = 0.5
PARAMS["RemoveInvariant"]=True
PARAMS["MDMaxStep"] = 50000
PARAMS["MDThermostat"] = "Andersen"
PARAMS["MDTemp"]= 300.0
PARAMS["MDV0"] = "Random"
PARAMS["MetaMDBumpHeight"] = 1.0
PARAMS["MetaMDBumpWidth"] = 2.0
PARAMS["MetaBowlK"] = 0.2
PARAMS["MetaBumpTime"] = 5.0
PARAMS["tf_prec"] = "tf.float64"
PARAMS["NeuronType"] = "sigmoid_with_param"
PARAMS["sigmoid_alpha"] = 100.0
PARAMS["HiddenLayers"] = [500, 500, 500]
PARAMS["EECutoff"] = 15.0
PARAMS["EECutoffOn"] = 0
PARAMS["Elu_Width"] = 4.6 # when elu is used EECutoffOn should always equal to 0
PARAMS["EECutoffOff"] = 15.0
PARAMS["DSFAlpha"] = 0.18
PARAMS["AddEcc"] = True
PARAMS["KeepProb"] = [1.0, 1.0, 1.0, 1.0]
d = MolDigester(TreatedAtoms, name_="ANI1_Sym_Direct", OType_="EnergyAndDipole")
tset = TensorMolData_BP_Direct_EE_WithEle(a, d, order_=1, num_indis_=1, type_="mol", WithGrad_ = True)
manager=TFMolManage("water_network",tset,False,"fc_sqdiff_BP_Direct_EE_ChargeEncode_Update_vdw_DSF_elu_Normalize_Dropout",False,False)
def EnAndForce(x_, DoForce=True):
mtmp = Mol(m.atoms,x_)
Etotal, Ebp, Ebp_atom, Ecc, Evdw, mol_dipole, atom_charge, gradient = manager.EvalBPDirectEEUpdateSingle(mtmp, PARAMS["AN1_r_Rc"], PARAMS["AN1_a_Rc"], PARAMS["EECutoffOff"], True)
energy = Etotal[0]
force = gradient[0]
if DoForce:
return energy, force
else:
return energy
m=a.mols[0]
PARAMS["OptMaxCycles"]= 2000
PARAMS["OptThresh"] =0.00002
Opt = GeomOptimizer(EnAndForce)
mo=Opt.Opt(a.mols[0],"morphine_tm_opt")
mo.WriteXYZfile("./results/", "opt_h2o_hex_bag")
masses = np.array(list(map(lambda x: ATOMICMASSESAMU[x-1],mo.atoms)))
meta = MetaDynamics(EnAndForce, mo, EandF_=EnAndForce, name_="water_hexamer")
meta.Prop()
def water_meta_opt():
a=MSet("water10")
a.ReadXYZ()
TreatedAtoms = a.AtomTypes()
m=a.mols[0]
PARAMS["MDdt"] = 0.5
PARAMS["RemoveInvariant"] = True
PARAMS["MDMaxStep"] = 50000
PARAMS["MDThermostat"] = "Andersen"
PARAMS["MDTemp"]= 600.0
PARAMS["MDV0"] = "Random"
PARAMS["MetaMDBumpHeight"] = 1.0
PARAMS["MetaMDBumpWidth"] = 2.0
PARAMS["MetaBowlK"] = 0.2
PARAMS["MetaBumpTime"] = 5.0
PARAMS["tf_prec"] = "tf.float64"
PARAMS["NeuronType"] = "sigmoid_with_param"
PARAMS["sigmoid_alpha"] = 100.0
PARAMS["HiddenLayers"] = [500, 500, 500]
PARAMS["EECutoff"] = 15.0
PARAMS["EECutoffOn"] = 0
PARAMS["Elu_Width"] = 4.6 # when elu is used EECutoffOn should always equal to 0
PARAMS["EECutoffOff"] = 15.0
PARAMS["DSFAlpha"] = 0.18
PARAMS["AddEcc"] = True
PARAMS["KeepProb"] = [1.0, 1.0, 1.0, 1.0]
PARAMS["OptMaxCycles"]= 2000
PARAMS["OptThresh"] =0.00002
d = MolDigester(TreatedAtoms, name_="ANI1_Sym_Direct", OType_="EnergyAndDipole")
tset = TensorMolData_BP_Direct_EE_WithEle(a, d, order_=1, num_indis_=1, type_="mol", WithGrad_ = True)
manager=TFMolManage("water_network",tset,False,"fc_sqdiff_BP_Direct_EE_ChargeEncode_Update_vdw_DSF_elu_Normalize_Dropout",False,False)
atomization_energy = 0.0
for atom in mol.atoms:
if atom in ele_U:
atomization_energy += ele_U[atom]
def EnAndForce(x_, DoForce=True):
mtmp = Mol(m.atoms,x_)
Etotal, Ebp, Ebp_atom, Ecc, Evdw, mol_dipole, atom_charge, gradient = manager.EvalBPDirectEEUpdateSingle(mtmp, PARAMS["AN1_r_Rc"], PARAMS["AN1_a_Rc"], PARAMS["EECutoffOff"], True)
energy = Etotal[0] + atomization_energy
force = gradient[0]
if DoForce:
return energy, force
else:
return energy
def water_meta_react():
a=MSet("water10")
a.ReadXYZ()
TreatedAtoms = a.AtomTypes()
m=a.mols[0]
PARAMS["MDdt"] = 0.5
PARAMS["RemoveInvariant"] = True
PARAMS["MDMaxStep"] = 50000
PARAMS["MDThermostat"] = "Andersen"
PARAMS["MDTemp"]= 600.0
PARAMS["MDV0"] = "Random"
PARAMS["MetaMDBumpHeight"] = 2.0
PARAMS["MetaMDBumpWidth"] = 3.0
PARAMS["MetaBowlK"] = 0.2
PARAMS["MetaBumpTime"] = 5.0
PARAMS["tf_prec"] = "tf.float64"
PARAMS["NeuronType"] = "sigmoid_with_param"
PARAMS["sigmoid_alpha"] = 100.0
PARAMS["HiddenLayers"] = [500, 500, 500]
PARAMS["EECutoff"] = 15.0
PARAMS["EECutoffOn"] = 0
PARAMS["Elu_Width"] = 4.6 # when elu is used EECutoffOn should always equal to 0
PARAMS["EECutoffOff"] = 15.0
PARAMS["DSFAlpha"] = 0.18
PARAMS["AddEcc"] = True
PARAMS["KeepProb"] = [1.0, 1.0, 1.0, 1.0]
PARAMS["OptMaxCycles"]= 2000
PARAMS["OptThresh"] =0.00002
d = MolDigester(TreatedAtoms, name_="ANI1_Sym_Direct", OType_="EnergyAndDipole")
tset = TensorMolData_BP_Direct_EE_WithEle(a, d, order_=1, num_indis_=1, type_="mol", WithGrad_ = True)
manager=TFMolManage("water_network",tset,False,"fc_sqdiff_BP_Direct_EE_ChargeEncode_Update_vdw_DSF_elu_Normalize_Dropout",False,False)
atomization_energy = 0.0
for atom in m.atoms:
if atom in ele_U:
atomization_energy += ele_U[atom]
def EnAndForce(x_, DoForce=True):
mtmp = Mol(m.atoms,x_)
Etotal, Ebp, Ebp_atom, Ecc, Evdw, mol_dipole, atom_charge, gradient = manager.EvalBPDirectEEUpdateSingle(mtmp, PARAMS["AN1_r_Rc"], PARAMS["AN1_a_Rc"], PARAMS["EECutoffOff"], True)
energy = Etotal[0] + atomization_energy
force = gradient[0]
if DoForce:
return energy, force
else:
return energy
meta = MetaDynamics(EnAndForce, m,name_="water_10react", EandF_=EnAndForce)
meta.Prop()
def meta_opt():
a=MSet("water10")
a.ReadXYZ()
TreatedAtoms = a.AtomTypes()
m=a.mols[0]
PARAMS["MDdt"] = 0.5
PARAMS["RemoveInvariant"] = True
PARAMS["MDMaxStep"] = 50000
PARAMS["MDThermostat"] = "Andersen"
PARAMS["MDTemp"]= 600.0
PARAMS["MDV0"] = "Random"
PARAMS["MetaMDBumpHeight"] = 2.0
PARAMS["MetaMDBumpWidth"] = 3.0
PARAMS["MetaMaxBumps"] = 2000
PARAMS["MetaBowlK"] = 0.2
PARAMS["MetaBumpTime"] = 5.0
PARAMS["tf_prec"] = "tf.float64"
PARAMS["NeuronType"] = "sigmoid_with_param"
PARAMS["sigmoid_alpha"] = 100.0
PARAMS["HiddenLayers"] = [500, 500, 500]
PARAMS["EECutoff"] = 15.0
PARAMS["EECutoffOn"] = 0
PARAMS["Elu_Width"] = 4.6 # when elu is used EECutoffOn should always equal to 0
PARAMS["EECutoffOff"] = 15.0
PARAMS["DSFAlpha"] = 0.18
PARAMS["AddEcc"] = True
PARAMS["KeepProb"] = [1.0, 1.0, 1.0, 1.0]
PARAMS["OptMaxCycles"]= 2000
PARAMS["OptThresh"] =0.00002
d = MolDigester(TreatedAtoms, name_="ANI1_Sym_Direct", OType_="EnergyAndDipole")
tset = TensorMolData_BP_Direct_EE_WithEle(a, d, order_=1, num_indis_=1, type_="mol", WithGrad_ = True)
manager=TFMolManage("water_network",tset,False,"fc_sqdiff_BP_Direct_EE_ChargeEncode_Update_vdw_DSF_elu_Normalize_Dropout",False,False)
atomization_energy = 0.0
for atom in m.atoms:
if atom in ele_U:
atomization_energy += ele_U[atom]
def EnAndForce(x_, DoForce=True):
mtmp = Mol(m.atoms,x_)
Etotal, Ebp, Ebp_atom, Ecc, Evdw, mol_dipole, atom_charge, gradient = manager.EvalBPDirectEEUpdateSingle(mtmp, PARAMS["AN1_r_Rc"], PARAMS["AN1_a_Rc"], PARAMS["EECutoffOff"], True)
energy = Etotal[0] + atomization_energy
force = gradient[0]
if DoForce:
return energy, force
else:
return energy
# PARAMS["OptMaxCycles"]=500
web = LocalReactions(EnAndForce,m,50)
exit(0)
Opt = MetaOptimizer(EnAndForce,m,Box_=False)
Opt.MetaOpt(m)
def water_web():
a=MSet("WebPath")
a.ReadXYZ()
TreatedAtoms = a.AtomTypes()
m=a.mols[0]
PARAMS["MDdt"] = 0.5
PARAMS["RemoveInvariant"] = True
PARAMS["MDMaxStep"] = 50000
PARAMS["MDThermostat"] = "Andersen"
PARAMS["MDTemp"]= 600.0
PARAMS["MDV0"] = "Random"
PARAMS["MetaMDBumpHeight"] = 2.0
PARAMS["MetaMDBumpWidth"] = 3.0
PARAMS["MetaMaxBumps"] = 2000
PARAMS["MetaBowlK"] = 0.2
PARAMS["MetaBumpTime"] = 5.0
PARAMS["tf_prec"] = "tf.float64"
PARAMS["NeuronType"] = "sigmoid_with_param"
PARAMS["sigmoid_alpha"] = 100.0
PARAMS["HiddenLayers"] = [500, 500, 500]
PARAMS["EECutoff"] = 15.0
PARAMS["EECutoffOn"] = 0
PARAMS["Elu_Width"] = 4.6 # when elu is used EECutoffOn should always equal to 0
PARAMS["EECutoffOff"] = 15.0
PARAMS["DSFAlpha"] = 0.18
PARAMS["AddEcc"] = True
PARAMS["KeepProb"] = [1.0, 1.0, 1.0, 1.0]
PARAMS["OptMaxCycles"]= 2000
PARAMS["OptThresh"] =0.00002
d = MolDigester(TreatedAtoms, name_="ANI1_Sym_Direct", OType_="EnergyAndDipole")
tset = TensorMolData_BP_Direct_EE_WithEle(a, d, order_=1, num_indis_=1, type_="mol", WithGrad_ = True)
manager=TFMolManage("water_network",tset,False,"fc_sqdiff_BP_Direct_EE_ChargeEncode_Update_vdw_DSF_elu_Normalize_Dropout",False,False)
atomization_energy = 0.0
for atom in m.atoms:
if atom in ele_U:
atomization_energy += ele_U[atom]
def EnAndForce(x_, DoForce=True):
mtmp = Mol(m.atoms,x_)
Etotal, Ebp, Ebp_atom, Ecc, Evdw, mol_dipole, atom_charge, gradient = manager.EvalBPDirectEEUpdateSingle(mtmp, PARAMS["AN1_r_Rc"], PARAMS["AN1_a_Rc"], PARAMS["EECutoffOff"], True)
energy = Etotal[0] + atomization_energy
force = gradient[0]
if DoForce:
return energy, force
else:
return energy
f=open("web_energies.dat", "w")
for i, mol in enumerate(a.mols):
en = EnAndForce(mol.coords, DoForce=False)
f.write(str(i)+" "+str(en*627.509)+"\n")
f.close()
# PARAMS["RBFS"] = np.stack((np.linspace(0.1, 5.0, 32), np.repeat(0.25, 32)), axis=1)
# PARAMS["SH_NRAD"] = 32
# PARAMS["SH_LMAX"] = 4
# a = MSet("water_dimer")
# a.ReadXYZ()
# manager = TFMolManageDirect(name="BehlerParinelloDirectGauSH_H2O_wb97xd_1to21_with_prontonated_Mon_Nov_13_11.35.07_2017", network_type = "BehlerParinelloDirectGauSH")
# print manager.evaluate_mol(a.mols[0], False).shape
# for i in range(100):
# mol = Mol(a.mols[0].atoms, rot_coords[0,i])
# mol.WriteXYZfile()
# InterpoleGeometries()
# read_unpacked_set()
# TrainKRR(set_="SmallMols_rand", dig_ = "GauSH", OType_="Force")
# RandomSmallSet("SmallMols", 10000)
# TestMetadynamics()
# test_md()
# TestTFBond()
# TestTFGauSH()
# test_gaussian_overlap()
# train_forces_GauSH_direct("SmallMols_rand")
# test_tf_neighbor()
# train_energy_pairs_triples()
# train_energy_symm_func("water_wb97xd_6311gss")
train_energy_GauSH("water_wb97xd_6311gss")
# test_h2o()
# evaluate_BPSymFunc("nicotine_vib")
# water_dimer_plot()
# nicotine_cc_stretch_plot()
# meta_statistics()
# meta_stat_plot()
# harmonic_freq()
# train_Poly_GauSH()
#water_ir()
# GetWaterNetwork()
# water_meta_opt()
# water_meta_react()
# meta_opt()
# water_web()
# f=open("nicotine_md_aimd_log.dat","r")
# f2=open("nicotine_md_aimd_energies.dat", "w")
# lines=f.readlines()
# for line in lines:
# f2.write(str(float(line.split()[0])/1000.0)+" "+str(float(line.split()[7]) * 627.509)+"\n")
# f.close()
# f2.close()
# import pickle
# water_data = pickle.load(open("./datasets/H2O_wbxd_1to21_with_prontonated.dat","rb"))
# a=MSet("water_clusters")
# for i, mol in enumerate(water_data):
# a.mols.append(Mol(np.array(mol["atoms"]), mol["xyz"]))
# a.mols[-1].properties["name"] = mol["name"]
# a.mols[-1].properties["energy"] = mol["scf_energy"]
# a.mols[-1].properties["dipole"] = np.array(mol["dipole"])
# a.mols[-1].properties["gradients"] = mol["gradients"]
# try:
# a.mols[-1].properties["quadrupole"] = np.array(mol["quad"])
# a.mols[-1].properties["mulliken_charges"] = np.array(mol["charges"])
# except Exception as Ex:
# print Ex
# print i
# pass
# a.Save()
# PARAMS["tf_prec"] = "tf.float32"
# PARAMS["RBFS"] = np.stack((np.linspace(0.1, 6.0, 16), np.repeat(0.35, 16)), axis=1)
# PARAMS["SH_NRAD"] = 16
# a = MSet("SmallMols_rand")
# a.Load()
# # a.mols.append(Mol(np.array([1,1,8]),np.array([[0.9,0.1,0.1],[1.,0.9,1.],[0.1,0.1,0.1]])))
# # # # Tesselate that water to create a box
# # ntess = 16
# # latv = 2.8*np.eye(3)
# # # # # Start with a water in a ten angstrom box.
# # lat = Lattice(latv)
# # mc = lat.CenteredInLattice(a.mols[0])
# # mt = Mol(*lat.TessNTimes(mc.atoms,mc.coords,ntess))
# # # # mt.WriteXYZfile()
# b=MSet()
# for i in range(1):
# b.mols.append(a.mols[i])
# new_mol = copy.deepcopy(a.mols[i])
# new_mol.RotateRandomUniform()
# b.mols.append(new_mol)
# # # a=MSet("SmallMols_rand")
# # # a.Load()
# maxnatoms = b.MaxNAtoms()
# zlist = []
# xyzlist = []
# n_atoms_list = []
# for i, mol in enumerate(b.mols):
# paddedxyz = np.zeros((maxnatoms,3), dtype=np.float32)
# paddedxyz[:mol.atoms.shape[0]] = mol.coords
# paddedz = np.zeros((maxnatoms), dtype=np.int32)
# paddedz[:mol.atoms.shape[0]] = mol.atoms
# xyzlist.append(paddedxyz)
# zlist.append(paddedz)
# n_atoms_list.append(mol.NAtoms())
# if i == 99:
# break
# xyzstack = tf.stack(xyzlist)
# zstack = tf.stack(zlist)
# natomsstack = tf.stack(n_atoms_list)
# r_cutoff = 7.0
# gaussian_params = tf.Variable(PARAMS["RBFS"], trainable=True, dtype=tf.float32)
# # atomic_embed_factors = tf.Variable(PARAMS["ANES"], trainable=True, dtype=tf.float32)
# elements = tf.constant([1, 8], dtype=tf.int32)
# # tmp = tf_neighbor_list_sort(xyzstack, zstack, natomsstack, elements, r_cutoff)
# # tmp = tf_sparse_gauss_harmonics_echannel(xyzstack, zstack, natomsstack, elements, gaussian_params, 4, r_cutoff)
# tmp2 = tf_gauss_harmonics_echannel(xyzstack, zstack, elements, gaussian_params, 8)
# sess = tf.Session()
# sess.run(tf.global_variables_initializer())
# # options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE)
# # run_metadata = tf.RunMetadata()
# # # for i in range(a.mols[0].atoms.shape[0]):
# # # print a.mols[0].atoms[i], " ", a.mols[0].coords[i,0], " ", a.mols[0].coords[i,1], " ", a.mols[0].coords[i,2]
# @TMTiming("test")
# def get_pairs():
# tmp3 = sess.run(tmp2)
# return tmp3
# tmp5 = get_pairs()
# print tmp5[:13].shape
# print tmp5[13:].shape
# print np.allclose(tmp5[:13], tmp5[13:], 1e-03)
# # print np.isclose(tmp5[0][0], tmp6[0][0], 1e-01)
# # fetched_timeline = timeline.Timeline(run_metadata.step_stats)
# # chrome_trace = fetched_timeline.generate_chrome_trace_format()
# # with open('timeline_step_tmp_tm_nocheck_h2o.json', 'w') as f:
# # f.write(chrome_trace)
| {
"pile_set_name": "Github"
} |
<wmts>
<id>WMTSInfoImpl-f946a9e:16005080bca:-8000</id>
<enabled>true</enabled>
<name>WMTS</name>
<title>GeoServer Web Map Tile Service</title>
<maintainer>http://geoserver.org/com</maintainer>
<abstrct>A compliant implementation of WMTS service.</abstrct>
<accessConstraints>NONE</accessConstraints>
<fees>NONE</fees>
<versions>
<org.geotools.util.Version>
<version>1.0.0</version>
</org.geotools.util.Version>
</versions>
<keywords>
<string>WMTS\@language=en\;</string>
</keywords>
<citeCompliant>false</citeCompliant>
<onlineResource>http://geoserver.org</onlineResource>
<schemaBaseURL>http://schemas.opengis.net</schemaBaseURL>
<verbose>false</verbose>
</wmts> | {
"pile_set_name": "Github"
} |
/*BEGIN_LEGAL
Intel Open Source License
Copyright (c) 2002-2015 Intel Corporation. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer. Redistributions
in binary form must reproduce the above copyright notice, this list of
conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution. Neither the name of
the Intel Corporation nor the names of its contributors may be used to
endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE INTEL OR
ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
END_LEGAL */
/*
@ORIGINAL_AUTHOR: Daniel Lemel
*/
/*! @file
* Limit the size of traces in the code cache, then check random traces in it.
*/
#include "pin_tests_util.H"
VOID LimitTraces()
{
CODECACHE_ChangeMaxInsPerTrace(1);
}
VOID Trace(TRACE trace, VOID *v)
{
if (rand() % 100 > 80) {
UINT32 inscount = 0;
for (BBL bbl = TRACE_BblHead(trace); BBL_Valid(bbl); bbl = BBL_Next(bbl)) {
for (INS ins = BBL_InsHead(bbl); INS_Valid(ins); ins = INS_Next(ins)) {
inscount++;
}
}
TEST(inscount == 1, "CODECACHE_ChangeMaxInsPerTrace failed");
//cout << "trace in 0x" << TRACE_CodeCacheAddress(trace) << " has " << inscount << " INS. ";
}
}
int main(INT32 argc, CHAR **argv)
{
PIN_Init(argc, argv);
srand(time(0));
CODECACHE_AddCacheInitFunction(LimitTraces, 0);
TRACE_AddInstrumentFunction(Trace, 0);
// Never returns
PIN_StartProgram();
return 0;
}
| {
"pile_set_name": "Github"
} |
{
"name": "tinyproxy",
"full_name": "tinyproxy",
"oldname": null,
"aliases": [
],
"versioned_formulae": [
],
"desc": "HTTP/HTTPS proxy for POSIX systems",
"license": "GPL-2.0",
"homepage": "https://tinyproxy.github.io/",
"versions": {
"stable": "1.10.0",
"head": null,
"bottle": true
},
"urls": {
"stable": {
"url": "https://github.com/tinyproxy/tinyproxy/releases/download/1.10.0/tinyproxy-1.10.0.tar.xz",
"tag": null,
"revision": null
}
},
"revision": 1,
"version_scheme": 0,
"bottle": {
"stable": {
"rebuild": 0,
"cellar": "/home/linuxbrew/.linuxbrew/Cellar",
"prefix": "/home/linuxbrew/.linuxbrew",
"root_url": "https://linuxbrew.bintray.com/bottles",
"files": {
"catalina": {
"url": "https://linuxbrew.bintray.com/bottles/tinyproxy-1.10.0_1.catalina.bottle.tar.gz",
"sha256": "e5a6e416b7f80da4a8e3af8ebaaf4e4c30d5f375845e44e72878170eeabffac0"
},
"mojave": {
"url": "https://linuxbrew.bintray.com/bottles/tinyproxy-1.10.0_1.mojave.bottle.tar.gz",
"sha256": "fdf164a29e4730795b6b66fdabb34a35f34b91e4d8c896fa461542ec356d464d"
},
"high_sierra": {
"url": "https://linuxbrew.bintray.com/bottles/tinyproxy-1.10.0_1.high_sierra.bottle.tar.gz",
"sha256": "05aed7a81fe9f92f043fe55ac10dba2474df664f710c01ee92283e5cf7fe0324"
},
"sierra": {
"url": "https://linuxbrew.bintray.com/bottles/tinyproxy-1.10.0_1.sierra.bottle.tar.gz",
"sha256": "97cefacaaf1aa12eabe102ad86cee01c24f50f2a3ec07ca1eb17799319f02385"
}
}
}
},
"keg_only": false,
"bottle_disabled": false,
"options": [
],
"build_dependencies": [
"asciidoc",
"docbook-xsl"
],
"dependencies": [
],
"recommended_dependencies": [
],
"optional_dependencies": [
],
"uses_from_macos": [
],
"requirements": [
],
"conflicts_with": [
],
"caveats": null,
"installed": [
],
"linked_keg": null,
"pinned": false,
"outdated": false,
"deprecated": false,
"disabled": false
}
| {
"pile_set_name": "Github"
} |
<!-- [START maps_combining_data_iframe] -->
<script src="https://polyfill.io/v3/polyfill.min.js?features=default"></script>
<script
src="https://maps.googleapis.com/maps/api/js?key=AIzaSyBIwzALxUPNbatRBj3Xi1Uhp0fFzwWNBkE&callback=initMap&libraries=&v=weekly"
defer
></script>
<style type="text/css">
html,
body,
#map {
height: 100%;
margin: 0;
padding: 0;
overflow: hidden;
}
.nicebox {
position: absolute;
text-align: center;
font-family: "Roboto", "Arial", sans-serif;
font-size: 13px;
z-index: 5;
box-shadow: 0 4px 6px -4px #333;
padding: 5px 10px;
background: white;
background: linear-gradient(to bottom, white 0%, whitesmoke 100%);
border: #e5e5e5 1px solid;
}
#controls {
top: 10px;
left: 110px;
width: 360px;
height: 45px;
}
#data-box {
top: 10px;
left: 500px;
height: 45px;
line-height: 45px;
display: none;
}
#census-variable {
width: 360px;
height: 20px;
}
#legend {
display: flex;
display: -webkit-box;
padding-top: 7px;
}
.color-key {
background: linear-gradient(
to right,
#db4639 0%,
#db7f29 17%,
#d1bf1f 33%,
#92c51b 50%,
#48ba17 67%,
#12ab24 83%,
#0f9f59 100%
);
flex: 1;
-webkit-box-flex: 1;
margin: 0 5px;
text-align: left;
font-size: 1em;
line-height: 1em;
}
#data-value {
font-size: 2em;
font-weight: bold;
}
#data-label {
font-size: 2em;
font-weight: normal;
padding-right: 10px;
}
#data-label:after {
content: ":";
}
#data-caret {
margin-left: -5px;
display: none;
font-size: 14px;
width: 14px;
}
</style>
<script>
"use strict";
var mapStyle = [
{
stylers: [
{
visibility: "off",
},
],
},
{
featureType: "landscape",
elementType: "geometry",
stylers: [
{
visibility: "on",
},
{
color: "#fcfcfc",
},
],
},
{
featureType: "water",
elementType: "geometry",
stylers: [
{
visibility: "on",
},
{
color: "#bfd4ff",
},
],
},
];
var map;
var censusMin = Number.MAX_VALUE,
censusMax = -Number.MAX_VALUE;
function initMap() {
// load the map
map = new google.maps.Map(document.getElementById("map"), {
center: {
lat: 40,
lng: -100,
},
zoom: 4,
styles: mapStyle,
}); // set up the style rules and events for google.maps.Data
map.data.setStyle(styleFeature);
map.data.addListener("mouseover", mouseInToRegion);
map.data.addListener("mouseout", mouseOutOfRegion); // wire up the button
var selectBox = document.getElementById("census-variable");
google.maps.event.addDomListener(selectBox, "change", function () {
clearCensusData();
loadCensusData(selectBox.options[selectBox.selectedIndex].value);
}); // state polygons only need to be loaded once, do them now
loadMapShapes();
}
/** Loads the state boundary polygons from a GeoJSON source. */
function loadMapShapes() {
// load US state outline polygons from a GeoJson file
map.data.loadGeoJson(
"https://storage.googleapis.com/mapsdevsite/json/states.js",
{
idPropertyName: "STATE",
}
); // wait for the request to complete by listening for the first feature to be
// added
google.maps.event.addListenerOnce(map.data, "addfeature", function () {
google.maps.event.trigger(
document.getElementById("census-variable"),
"change"
);
});
}
/**
* Loads the census data from a simulated API call to the US Census API.
*
* @param {string} variable
*/
function loadCensusData(variable) {
// load the requested variable from the census API (using local copies)
var xhr = new XMLHttpRequest();
xhr.open("GET", variable + ".json");
xhr.onload = function () {
var censusData = JSON.parse(xhr.responseText);
censusData.shift(); // the first row contains column names
censusData.forEach(function (row) {
var censusVariable = parseFloat(row[0]);
var stateId = row[1]; // keep track of min and max values
if (censusVariable < censusMin) {
censusMin = censusVariable;
}
if (censusVariable > censusMax) {
censusMax = censusVariable;
} // update the existing row with the new data
map.data
.getFeatureById(stateId)
.setProperty("census_variable", censusVariable);
}); // update and display the legend
document.getElementById(
"census-min"
).textContent = censusMin.toLocaleString();
document.getElementById(
"census-max"
).textContent = censusMax.toLocaleString();
};
xhr.send();
}
/** Removes census data from each shape on the map and resets the UI. */
function clearCensusData() {
censusMin = Number.MAX_VALUE;
censusMax = -Number.MAX_VALUE;
map.data.forEach(function (row) {
row.setProperty("census_variable", undefined);
});
document.getElementById("data-box").style.display = "none";
document.getElementById("data-caret").style.display = "none";
}
/**
* Applies a gradient style based on the 'census_variable' column.
* This is the callback passed to data.setStyle() and is called for each row in
* the data set. Check out the docs for Data.StylingFunction.
*
* @param {google.maps.Data.Feature} feature
*/
function styleFeature(feature) {
var low = [5, 69, 54]; // color of smallest datum
var high = [151, 83, 34]; // color of largest datum
// delta represents where the value sits between the min and max
var delta =
(feature.getProperty("census_variable") - censusMin) /
(censusMax - censusMin);
var color = [];
for (var i = 0; i < 3; i++) {
// calculate an integer color based on the delta
color.push((high[i] - low[i]) * delta + low[i]);
} // determine whether to show this shape or not
var showRow = true;
if (
feature.getProperty("census_variable") == null ||
isNaN(feature.getProperty("census_variable"))
) {
showRow = false;
}
var outlineWeight = 0.5,
zIndex = 1;
if (feature.getProperty("state") === "hover") {
outlineWeight = zIndex = 2;
}
return {
strokeWeight: outlineWeight,
strokeColor: "#fff",
zIndex: zIndex,
fillColor: "hsl(" + color[0] + "," + color[1] + "%," + color[2] + "%)",
fillOpacity: 0.75,
visible: showRow,
};
}
/**
* Responds to the mouse-in event on a map shape (state).
*
* @param {?google.maps.MouseEvent} e
*/
function mouseInToRegion(e) {
// set the hover state so the setStyle function can change the border
e.feature.setProperty("state", "hover");
var percent =
((e.feature.getProperty("census_variable") - censusMin) /
(censusMax - censusMin)) *
100; // update the label
document.getElementById("data-label").textContent = e.feature.getProperty(
"NAME"
);
document.getElementById("data-value").textContent = e.feature
.getProperty("census_variable")
.toLocaleString();
document.getElementById("data-box").style.display = "block";
document.getElementById("data-caret").style.display = "block";
document.getElementById("data-caret").style.paddingLeft = percent + "%";
}
/**
* Responds to the mouse-out event on a map shape (state).
*
*/
function mouseOutOfRegion(e) {
// reset the hover state, returning the border to normal
e.feature.setProperty("state", "normal");
}
</script>
<div id="iframe-contents">
<div id="controls" class="nicebox">
<div>
<select id="census-variable">
<option
value="https://storage.googleapis.com/mapsdevsite/json/DP02_0066PE"
>
Percent of population over 25 that completed high school
</option>
<option
value="https://storage.googleapis.com/mapsdevsite/json/DP05_0017E"
>
Median age
</option>
<option
value="https://storage.googleapis.com/mapsdevsite/json/DP05_0001E"
>
Total population
</option>
<option
value="https://storage.googleapis.com/mapsdevsite/json/DP02_0016E"
>
Average family size
</option>
<option
value="https://storage.googleapis.com/mapsdevsite/json/DP03_0088E"
>
Per-capita income
</option>
</select>
</div>
<div id="legend">
<div id="census-min">min</div>
<div class="color-key"><span id="data-caret">◆</span></div>
<div id="census-max">max</div>
</div>
</div>
<div id="data-box" class="nicebox">
<label id="data-label" for="data-value"></label>
<span id="data-value"></span>
</div>
<div id="map"></div>
</div>
<!-- [END maps_combining_data_iframe] -->
| {
"pile_set_name": "Github"
} |
// Test TCP_MAXSEG works on passive/server TCP connections.
`../common/defaults.sh`
// Set up a listening socket.
0 socket(..., SOCK_STREAM, IPPROTO_TCP) = 3
+0 setsockopt(3, SOL_SOCKET, SO_REUSEADDR, [1], 4) = 0
+0 bind(3, ..., ...) = 0
+0 listen(3, 1) = 0
// Set MSS to 1100.
+.01 setsockopt(3, SOL_TCP, TCP_MAXSEG, [1100], 4) = 0
+.01 getsockopt(3, SOL_TCP, TCP_MAXSEG, [536], [4]) = 0
// Establish a connection with an outgoing advertised MSS of 1100.
+.08 < S 0:0(0) win 32792 <mss 1300,nop,wscale 7>
+0 > S. 0:0(0) ack 1 <mss 1100,nop,wscale 8>
+.1 < . 1:1(0) ack 1 win 257
+.1 accept(3, ..., ...) = 4
+.1 getsockopt(4, SOL_TCP, TCP_MAXSEG, [1100], [4]) = 0
+.1 %{ assert tcpi_advmss == 1100, tcpi_advmss; assert tcpi_snd_mss == 1100, tcpi_snd_mss }%
// IW10 MSS should yield outgoing TSO packet with 10*1100 == 11000 bytes:
+.1 write(4, ..., 12000) = 12000
+0 > P. 1:11001(11000) ack 1
| {
"pile_set_name": "Github"
} |
fileFormatVersion: 2
guid: 5578b82f17c954843901e8b68ca21dcc
labels:
- lowpoly
- rock
- low-poly
- rocks
- art
- pack
- set
- free
timeCreated: 1457638041
licenseType: Store
NativeFormatImporter:
userData:
assetBundleName:
assetBundleVariant:
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2000, 2020, Oracle and/or its affiliates.
*
* Licensed under the Universal Permissive License v 1.0 as shown at
* http://oss.oracle.com/licenses/upl.
*/
package com.tangosol.io.pof.reflect;
import com.tangosol.io.ReadBuffer;
import com.tangosol.io.pof.PofContext;
/**
* PofUniformArray is {@link PofValue} implementation for uniform arrays.
*
* @author as 2009.03.06
* @since Coherence 3.5
*/
public class PofUniformArray
extends PofArray
{
// ----- constructors ---------------------------------------------------
/**
* Construct a PofUniformArray instance wrapping the supplied buffer.
*
* @param valueParent parent value within the POF stream
* @param bufValue buffer containing POF representation of this value
* @param ctx POF context to use when reading or writing properties
* @param of offset of this value from the beginning of POF stream
* @param nType POF type identifier for this value
* @param ofChildren offset of the first child element within this value
* @param cElements the length of this array
* @param nElementType a POF type identifier for this value's elements
*/
public PofUniformArray(PofValue valueParent, ReadBuffer bufValue, PofContext ctx,
int of, int nType, int ofChildren, int cElements, int nElementType)
{
super(valueParent, bufValue, ctx, of, nType, ofChildren, cElements);
setUniformElementType(nElementType);
}
} | {
"pile_set_name": "Github"
} |
{
"author": "jbenet",
"bugs": {},
"gx": {
"dvcsimport": "github.com/libp2p/go-msgio"
},
"gxDependencies": [
{
"hash": "QmYNGtJHgaGZkpzq8yG6Wxqm6EQTKqgpBfnyyGBKbZeDUi",
"name": "go-randbuf",
"version": "0.0.0"
},
{
"author": "Stebalien",
"hash": "QmQDvJoB6aJWN3sjr3xsgXqKCXf4jU5zdMXpDMsBkYVNqa",
"name": "go-buffer-pool",
"version": "0.1.3"
}
],
"gxVersion": "0.11.0",
"language": "go",
"license": "",
"name": "go-msgio",
"releaseCmd": "git commit -a -m \"gx publish $VERSION\"",
"version": "0.0.6"
}
| {
"pile_set_name": "Github"
} |
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/*! \page getdescendants
\addindex getdescendants
\section getdescendants getdescendants
\subsection operation Operation
Get descendants.
\subsection format Format
\code
getdescendants
index
\endcode
\subsection forms Forms
\code
getdescendants = 89 (0x59)
\endcode
\subsection stack Stack
\code
..., obj, [ns], [name] => ..., value
\endcode
\subsection description Description
index is a u30 that must be an index into the multiname constant pool. If the multiname at
that index is a runtime multiname the name and/or namespace will also appear on the stack
so that the multiname can be constructed correctly at runtime.
obj is the object to find the descendants in. This will invoke the [[Descendants]] property
on obj with the multiname specified by index. For a description of the [[Descendants]] operator,
see the E4X spec (ECMA-357) sections 9.1.1.8 (for the XML type) and 9.2.1.8 (for the XMLList type).
\subsection rt_execeptions Runtime exceptions
A TypeError is thrown if obj is not of type XML or XMLList.
\subsection notes Notes
None.
*/
| {
"pile_set_name": "Github"
} |
// WARNING: DO NOT EDIT THIS FILE. THIS FILE IS MANAGED BY SPRING ROO.
// You may push code into the target .java compilation unit if you wish to edit any member(s).
package nl.bzk.brp.model.data.kern;
import java.util.List;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import nl.bzk.brp.model.data.kern.HisPersoverlijden;
import org.springframework.transaction.annotation.Transactional;
privileged aspect HisPersoverlijden_Roo_Jpa_ActiveRecord {
@PersistenceContext
transient EntityManager HisPersoverlijden.entityManager;
public static final EntityManager HisPersoverlijden.entityManager() {
EntityManager em = new HisPersoverlijden().entityManager;
if (em == null) throw new IllegalStateException("Entity manager has not been injected (is the Spring Aspects JAR configured as an AJC/AJDT aspects library?)");
return em;
}
public static long HisPersoverlijden.countHisPersoverlijdens() {
return entityManager().createQuery("SELECT COUNT(o) FROM HisPersoverlijden o", Long.class).getSingleResult();
}
public static List<HisPersoverlijden> HisPersoverlijden.findAllHisPersoverlijdens() {
return entityManager().createQuery("SELECT o FROM HisPersoverlijden o", HisPersoverlijden.class).getResultList();
}
public static HisPersoverlijden HisPersoverlijden.findHisPersoverlijden(Long id) {
if (id == null) return null;
return entityManager().find(HisPersoverlijden.class, id);
}
public static List<HisPersoverlijden> HisPersoverlijden.findHisPersoverlijdenEntries(int firstResult, int maxResults) {
return entityManager().createQuery("SELECT o FROM HisPersoverlijden o", HisPersoverlijden.class).setFirstResult(firstResult).setMaxResults(maxResults).getResultList();
}
@Transactional
public void HisPersoverlijden.persist() {
if (this.entityManager == null) this.entityManager = entityManager();
this.entityManager.persist(this);
}
@Transactional
public void HisPersoverlijden.remove() {
if (this.entityManager == null) this.entityManager = entityManager();
if (this.entityManager.contains(this)) {
this.entityManager.remove(this);
} else {
HisPersoverlijden attached = HisPersoverlijden.findHisPersoverlijden(this.id);
this.entityManager.remove(attached);
}
}
@Transactional
public void HisPersoverlijden.flush() {
if (this.entityManager == null) this.entityManager = entityManager();
this.entityManager.flush();
}
@Transactional
public void HisPersoverlijden.clear() {
if (this.entityManager == null) this.entityManager = entityManager();
this.entityManager.clear();
}
@Transactional
public HisPersoverlijden HisPersoverlijden.merge() {
if (this.entityManager == null) this.entityManager = entityManager();
HisPersoverlijden merged = this.entityManager.merge(this);
this.entityManager.flush();
return merged;
}
}
| {
"pile_set_name": "Github"
} |
Reparsing block
----------
PerlStringContentElementImpl(Perl5: STRING_CONTENT_QQ)
----------
start qq4
end qq4
----------
After typing
----------
sub something {
say 'sub start';
say <<'MARKER_Q1';
start q1
MARKER_Q1
say <<'MARKER_Q2';
start q2
MARKER_Q2
say <<~'MARKER_Q1';
start q3
MARKER_Q1
say <<~'MARKER_Q3';
start q4
MARKER_Q3
# dq quoted
say <<"MARKER_QQ1";
start qq1
MARKER_QQ1
say <<"MARKER_QQ2";
start qq2
MARKER_QQ2
say <<~"MARKER_QQ1";
start qq3
MARKER_QQ1
say <<~"MARKER_QQ3";
start qq4
MARKER_QX1<caret>
end qq4
MARKER_QQ3
# dq bare
say <<MARKER_QQ1;
start qq5
MARKER_QQ1
say <<MARKER_QQ2;
start qq6
MARKER_QQ2
say <<~MARKER_QQ1;
start qq7
MARKER_QQ1
say <<~MARKER_QQ3;
start qq8
MARKER_QQ3
# xq quoted
say <<`MARKER_QX1`;
start qx1
MARKER_QX1
say <<`MARKER_QX2`;
start qx2
MARKER_QX2
say <<~`MARKER_QX1`;
start qx3
MARKER_QX1
say <<~`MARKER_QX3`;
start qx4
MARKER_QX3
say 'sub end';
}
----------
Psi structure
----------
Perl5
PsiPerlSubDefinitionImpl(SUB_DEFINITION)@main::something
PsiElement(Perl5: sub)('sub')
PsiWhiteSpace(' ')
PerlSubNameElementImpl(Perl5: subname)('something')
PsiWhiteSpace(' ')
PsiPerlBlockImpl(Perl5: BLOCK)
PsiElement(Perl5: {)('{')
PsiWhiteSpace('\n ')
PsiPerlStatementImpl(Perl5: STATEMENT)
PsiPerlPrintExprImpl(Perl5: PRINT_EXPR)
PsiElement(Perl5: say)('say')
PsiWhiteSpace(' ')
PsiPerlCallArgumentsImpl(Perl5: CALL_ARGUMENTS)
PsiPerlStringSqImpl(Perl5: STRING_SQ)
PsiElement(Perl5: QUOTE_SINGLE_OPEN)(''')
PerlStringContentElementImpl(Perl5: STRING_CONTENT)('sub start')
PsiElement(Perl5: QUOTE_SINGLE_CLOSE)(''')
PsiElement(Perl5: ;)(';')
PsiWhiteSpace('\n\n ')
PsiPerlStatementImpl(Perl5: STATEMENT)
PsiPerlPrintExprImpl(Perl5: PRINT_EXPR)
PsiElement(Perl5: say)('say')
PsiWhiteSpace(' ')
PsiPerlCallArgumentsImpl(Perl5: CALL_ARGUMENTS)
PsiPerlHeredocOpenerImpl(Perl5: HEREDOC_OPENER)
PsiElement(Perl5: heredoc<<)('<<')
PsiPerlStringSqImpl(Perl5: STRING_SQ)
PsiElement(Perl5: QUOTE_SINGLE_OPEN)(''')
PerlStringContentElementImpl(Perl5: STRING_CONTENT)('MARKER_Q1')
PsiElement(Perl5: QUOTE_SINGLE_CLOSE)(''')
PsiElement(Perl5: ;)(';')
PsiWhiteSpace('\n')
PerlHeredocElementImpl(Perl5: HEREDOC)
PerlStringContentElementImpl(Perl5: STRING_CONTENT)(' start q1\n')
PsiComment(Perl5: HEREDOC_END)('MARKER_Q1')
PsiWhiteSpace('\n\n ')
PsiPerlStatementImpl(Perl5: STATEMENT)
PsiPerlPrintExprImpl(Perl5: PRINT_EXPR)
PsiElement(Perl5: say)('say')
PsiWhiteSpace(' ')
PsiPerlCallArgumentsImpl(Perl5: CALL_ARGUMENTS)
PsiPerlHeredocOpenerImpl(Perl5: HEREDOC_OPENER)
PsiElement(Perl5: heredoc<<)('<<')
PsiPerlStringSqImpl(Perl5: STRING_SQ)
PsiElement(Perl5: QUOTE_SINGLE_OPEN)(''')
PerlStringContentElementImpl(Perl5: STRING_CONTENT)('MARKER_Q2')
PsiElement(Perl5: QUOTE_SINGLE_CLOSE)(''')
PsiElement(Perl5: ;)(';')
PsiWhiteSpace('\n')
PerlHeredocElementImpl(Perl5: HEREDOC)
PerlStringContentElementImpl(Perl5: STRING_CONTENT)(' start q2\n')
PsiComment(Perl5: HEREDOC_END)('MARKER_Q2')
PsiWhiteSpace('\n\n ')
PsiPerlStatementImpl(Perl5: STATEMENT)
PsiPerlPrintExprImpl(Perl5: PRINT_EXPR)
PsiElement(Perl5: say)('say')
PsiWhiteSpace(' ')
PsiPerlCallArgumentsImpl(Perl5: CALL_ARGUMENTS)
PsiPerlHeredocOpenerImpl(Perl5: HEREDOC_OPENER)
PsiElement(Perl5: heredoc<<)('<<~')
PsiPerlStringSqImpl(Perl5: STRING_SQ)
PsiElement(Perl5: QUOTE_SINGLE_OPEN)(''')
PerlStringContentElementImpl(Perl5: STRING_CONTENT)('MARKER_Q1')
PsiElement(Perl5: QUOTE_SINGLE_CLOSE)(''')
PsiElement(Perl5: ;)(';')
PsiWhiteSpace('\n')
PerlHeredocElementImpl(Perl5: HEREDOC)
PerlStringContentElementImpl(Perl5: STRING_CONTENT)(' start q3\n')
PsiWhiteSpace(' ')
PsiComment(Perl5: HEREDOC_END_INDENTABLE)('MARKER_Q1')
PsiWhiteSpace('\n\n ')
PsiPerlStatementImpl(Perl5: STATEMENT)
PsiPerlPrintExprImpl(Perl5: PRINT_EXPR)
PsiElement(Perl5: say)('say')
PsiWhiteSpace(' ')
PsiPerlCallArgumentsImpl(Perl5: CALL_ARGUMENTS)
PsiPerlHeredocOpenerImpl(Perl5: HEREDOC_OPENER)
PsiElement(Perl5: heredoc<<)('<<~')
PsiPerlStringSqImpl(Perl5: STRING_SQ)
PsiElement(Perl5: QUOTE_SINGLE_OPEN)(''')
PerlStringContentElementImpl(Perl5: STRING_CONTENT)('MARKER_Q3')
PsiElement(Perl5: QUOTE_SINGLE_CLOSE)(''')
PsiElement(Perl5: ;)(';')
PsiWhiteSpace('\n')
PerlHeredocElementImpl(Perl5: HEREDOC)
PerlStringContentElementImpl(Perl5: STRING_CONTENT)(' start q4\n')
PsiWhiteSpace(' ')
PsiComment(Perl5: HEREDOC_END_INDENTABLE)('MARKER_Q3')
PsiWhiteSpace('\n\n ')
PsiComment(Perl5: COMMENT_LINE)('# dq quoted')
PsiWhiteSpace('\n ')
PsiPerlStatementImpl(Perl5: STATEMENT)
PsiPerlPrintExprImpl(Perl5: PRINT_EXPR)
PsiElement(Perl5: say)('say')
PsiWhiteSpace(' ')
PsiPerlCallArgumentsImpl(Perl5: CALL_ARGUMENTS)
PsiPerlHeredocOpenerImpl(Perl5: HEREDOC_OPENER)
PsiElement(Perl5: heredoc<<)('<<')
PsiPerlStringDqImpl(Perl5: STRING_DQ)
PsiElement(Perl5: QUOTE_DOUBLE_OPEN)('"')
PerlStringContentElementImpl(Perl5: STRING_CONTENT_QQ)('MARKER_QQ1')
PsiElement(Perl5: QUOTE_DOUBLE_CLOSE)('"')
PsiElement(Perl5: ;)(';')
PsiWhiteSpace('\n')
PerlHeredocElementImpl(Perl5: HEREDOC_QQ)
PerlStringContentElementImpl(Perl5: STRING_CONTENT_QQ)(' start qq1\n')
PsiComment(Perl5: HEREDOC_END)('MARKER_QQ1')
PsiWhiteSpace('\n\n ')
PsiPerlStatementImpl(Perl5: STATEMENT)
PsiPerlPrintExprImpl(Perl5: PRINT_EXPR)
PsiElement(Perl5: say)('say')
PsiWhiteSpace(' ')
PsiPerlCallArgumentsImpl(Perl5: CALL_ARGUMENTS)
PsiPerlHeredocOpenerImpl(Perl5: HEREDOC_OPENER)
PsiElement(Perl5: heredoc<<)('<<')
PsiPerlStringDqImpl(Perl5: STRING_DQ)
PsiElement(Perl5: QUOTE_DOUBLE_OPEN)('"')
PerlStringContentElementImpl(Perl5: STRING_CONTENT_QQ)('MARKER_QQ2')
PsiElement(Perl5: QUOTE_DOUBLE_CLOSE)('"')
PsiElement(Perl5: ;)(';')
PsiWhiteSpace('\n')
PerlHeredocElementImpl(Perl5: HEREDOC_QQ)
PerlStringContentElementImpl(Perl5: STRING_CONTENT_QQ)(' start qq2\n')
PsiComment(Perl5: HEREDOC_END)('MARKER_QQ2')
PsiWhiteSpace('\n\n ')
PsiPerlStatementImpl(Perl5: STATEMENT)
PsiPerlPrintExprImpl(Perl5: PRINT_EXPR)
PsiElement(Perl5: say)('say')
PsiWhiteSpace(' ')
PsiPerlCallArgumentsImpl(Perl5: CALL_ARGUMENTS)
PsiPerlHeredocOpenerImpl(Perl5: HEREDOC_OPENER)
PsiElement(Perl5: heredoc<<)('<<~')
PsiPerlStringDqImpl(Perl5: STRING_DQ)
PsiElement(Perl5: QUOTE_DOUBLE_OPEN)('"')
PerlStringContentElementImpl(Perl5: STRING_CONTENT_QQ)('MARKER_QQ1')
PsiElement(Perl5: QUOTE_DOUBLE_CLOSE)('"')
PsiElement(Perl5: ;)(';')
PsiWhiteSpace('\n')
PerlHeredocElementImpl(Perl5: HEREDOC_QQ)
PerlStringContentElementImpl(Perl5: STRING_CONTENT_QQ)(' start qq3\n')
PsiWhiteSpace(' ')
PsiComment(Perl5: HEREDOC_END_INDENTABLE)('MARKER_QQ1')
PsiWhiteSpace('\n\n ')
PsiPerlStatementImpl(Perl5: STATEMENT)
PsiPerlPrintExprImpl(Perl5: PRINT_EXPR)
PsiElement(Perl5: say)('say')
PsiWhiteSpace(' ')
PsiPerlCallArgumentsImpl(Perl5: CALL_ARGUMENTS)
PsiPerlHeredocOpenerImpl(Perl5: HEREDOC_OPENER)
PsiElement(Perl5: heredoc<<)('<<~')
PsiPerlStringDqImpl(Perl5: STRING_DQ)
PsiElement(Perl5: QUOTE_DOUBLE_OPEN)('"')
PerlStringContentElementImpl(Perl5: STRING_CONTENT_QQ)('MARKER_QQ3')
PsiElement(Perl5: QUOTE_DOUBLE_CLOSE)('"')
PsiElement(Perl5: ;)(';')
PsiWhiteSpace('\n')
PerlHeredocElementImpl(Perl5: HEREDOC_QQ)
PerlStringContentElementImpl(Perl5: STRING_CONTENT_QQ)(' start qq4\nMARKER_QX1\n end qq4\n')
PsiWhiteSpace(' ')
PsiComment(Perl5: HEREDOC_END_INDENTABLE)('MARKER_QQ3')
PsiWhiteSpace('\n\n ')
PsiComment(Perl5: COMMENT_LINE)('# dq bare')
PsiWhiteSpace('\n ')
PsiPerlStatementImpl(Perl5: STATEMENT)
PsiPerlPrintExprImpl(Perl5: PRINT_EXPR)
PsiElement(Perl5: say)('say')
PsiWhiteSpace(' ')
PsiPerlCallArgumentsImpl(Perl5: CALL_ARGUMENTS)
PsiPerlHeredocOpenerImpl(Perl5: HEREDOC_OPENER)
PsiElement(Perl5: heredoc<<)('<<')
PsiPerlStringBareImpl(Perl5: STRING_BARE)
PerlStringContentElementImpl(Perl5: STRING_CONTENT)('MARKER_QQ1')
PsiElement(Perl5: ;)(';')
PsiWhiteSpace('\n')
PerlHeredocElementImpl(Perl5: HEREDOC_QQ)
PerlStringContentElementImpl(Perl5: STRING_CONTENT_QQ)(' start qq5\n')
PsiComment(Perl5: HEREDOC_END)('MARKER_QQ1')
PsiWhiteSpace('\n\n ')
PsiPerlStatementImpl(Perl5: STATEMENT)
PsiPerlPrintExprImpl(Perl5: PRINT_EXPR)
PsiElement(Perl5: say)('say')
PsiWhiteSpace(' ')
PsiPerlCallArgumentsImpl(Perl5: CALL_ARGUMENTS)
PsiPerlHeredocOpenerImpl(Perl5: HEREDOC_OPENER)
PsiElement(Perl5: heredoc<<)('<<')
PsiPerlStringBareImpl(Perl5: STRING_BARE)
PerlStringContentElementImpl(Perl5: STRING_CONTENT)('MARKER_QQ2')
PsiElement(Perl5: ;)(';')
PsiWhiteSpace('\n')
PerlHeredocElementImpl(Perl5: HEREDOC_QQ)
PerlStringContentElementImpl(Perl5: STRING_CONTENT_QQ)(' start qq6\n')
PsiComment(Perl5: HEREDOC_END)('MARKER_QQ2')
PsiWhiteSpace('\n\n ')
PsiPerlStatementImpl(Perl5: STATEMENT)
PsiPerlPrintExprImpl(Perl5: PRINT_EXPR)
PsiElement(Perl5: say)('say')
PsiWhiteSpace(' ')
PsiPerlCallArgumentsImpl(Perl5: CALL_ARGUMENTS)
PsiPerlHeredocOpenerImpl(Perl5: HEREDOC_OPENER)
PsiElement(Perl5: heredoc<<)('<<~')
PsiPerlStringBareImpl(Perl5: STRING_BARE)
PerlStringContentElementImpl(Perl5: STRING_CONTENT)('MARKER_QQ1')
PsiElement(Perl5: ;)(';')
PsiWhiteSpace('\n')
PerlHeredocElementImpl(Perl5: HEREDOC_QQ)
PerlStringContentElementImpl(Perl5: STRING_CONTENT_QQ)(' start qq7\n')
PsiWhiteSpace(' ')
PsiComment(Perl5: HEREDOC_END_INDENTABLE)('MARKER_QQ1')
PsiWhiteSpace('\n\n ')
PsiPerlStatementImpl(Perl5: STATEMENT)
PsiPerlPrintExprImpl(Perl5: PRINT_EXPR)
PsiElement(Perl5: say)('say')
PsiWhiteSpace(' ')
PsiPerlCallArgumentsImpl(Perl5: CALL_ARGUMENTS)
PsiPerlHeredocOpenerImpl(Perl5: HEREDOC_OPENER)
PsiElement(Perl5: heredoc<<)('<<~')
PsiPerlStringBareImpl(Perl5: STRING_BARE)
PerlStringContentElementImpl(Perl5: STRING_CONTENT)('MARKER_QQ3')
PsiElement(Perl5: ;)(';')
PsiWhiteSpace('\n')
PerlHeredocElementImpl(Perl5: HEREDOC_QQ)
PerlStringContentElementImpl(Perl5: STRING_CONTENT_QQ)(' start qq8\n')
PsiWhiteSpace(' ')
PsiComment(Perl5: HEREDOC_END_INDENTABLE)('MARKER_QQ3')
PsiWhiteSpace('\n\n ')
PsiComment(Perl5: COMMENT_LINE)('# xq quoted')
PsiWhiteSpace('\n ')
PsiPerlStatementImpl(Perl5: STATEMENT)
PsiPerlPrintExprImpl(Perl5: PRINT_EXPR)
PsiElement(Perl5: say)('say')
PsiWhiteSpace(' ')
PsiPerlCallArgumentsImpl(Perl5: CALL_ARGUMENTS)
PsiPerlHeredocOpenerImpl(Perl5: HEREDOC_OPENER)
PsiElement(Perl5: heredoc<<)('<<')
PsiPerlStringXqImpl(Perl5: STRING_XQ)
PsiElement(Perl5: QUOTE_TICK_OPEN)('`')
PerlStringContentElementImpl(Perl5: STRING_CONTENT_XQ)('MARKER_QX1')
PsiElement(Perl5: QUOTE_TICK_CLOSE)('`')
PsiElement(Perl5: ;)(';')
PsiWhiteSpace('\n')
PerlHeredocElementImpl(Perl5: HEREDOC_QX)
PerlStringContentElementImpl(Perl5: STRING_CONTENT_XQ)(' start qx1\n')
PsiComment(Perl5: HEREDOC_END)('MARKER_QX1')
PsiWhiteSpace('\n\n ')
PsiPerlStatementImpl(Perl5: STATEMENT)
PsiPerlPrintExprImpl(Perl5: PRINT_EXPR)
PsiElement(Perl5: say)('say')
PsiWhiteSpace(' ')
PsiPerlCallArgumentsImpl(Perl5: CALL_ARGUMENTS)
PsiPerlHeredocOpenerImpl(Perl5: HEREDOC_OPENER)
PsiElement(Perl5: heredoc<<)('<<')
PsiPerlStringXqImpl(Perl5: STRING_XQ)
PsiElement(Perl5: QUOTE_TICK_OPEN)('`')
PerlStringContentElementImpl(Perl5: STRING_CONTENT_XQ)('MARKER_QX2')
PsiElement(Perl5: QUOTE_TICK_CLOSE)('`')
PsiElement(Perl5: ;)(';')
PsiWhiteSpace('\n')
PerlHeredocElementImpl(Perl5: HEREDOC_QX)
PerlStringContentElementImpl(Perl5: STRING_CONTENT_XQ)(' start qx2\n')
PsiComment(Perl5: HEREDOC_END)('MARKER_QX2')
PsiWhiteSpace('\n\n ')
PsiPerlStatementImpl(Perl5: STATEMENT)
PsiPerlPrintExprImpl(Perl5: PRINT_EXPR)
PsiElement(Perl5: say)('say')
PsiWhiteSpace(' ')
PsiPerlCallArgumentsImpl(Perl5: CALL_ARGUMENTS)
PsiPerlHeredocOpenerImpl(Perl5: HEREDOC_OPENER)
PsiElement(Perl5: heredoc<<)('<<~')
PsiPerlStringXqImpl(Perl5: STRING_XQ)
PsiElement(Perl5: QUOTE_TICK_OPEN)('`')
PerlStringContentElementImpl(Perl5: STRING_CONTENT_XQ)('MARKER_QX1')
PsiElement(Perl5: QUOTE_TICK_CLOSE)('`')
PsiElement(Perl5: ;)(';')
PsiWhiteSpace('\n')
PerlHeredocElementImpl(Perl5: HEREDOC_QX)
PerlStringContentElementImpl(Perl5: STRING_CONTENT_XQ)(' start qx3\n')
PsiWhiteSpace(' ')
PsiComment(Perl5: HEREDOC_END_INDENTABLE)('MARKER_QX1')
PsiWhiteSpace('\n\n ')
PsiPerlStatementImpl(Perl5: STATEMENT)
PsiPerlPrintExprImpl(Perl5: PRINT_EXPR)
PsiElement(Perl5: say)('say')
PsiWhiteSpace(' ')
PsiPerlCallArgumentsImpl(Perl5: CALL_ARGUMENTS)
PsiPerlHeredocOpenerImpl(Perl5: HEREDOC_OPENER)
PsiElement(Perl5: heredoc<<)('<<~')
PsiPerlStringXqImpl(Perl5: STRING_XQ)
PsiElement(Perl5: QUOTE_TICK_OPEN)('`')
PerlStringContentElementImpl(Perl5: STRING_CONTENT_XQ)('MARKER_QX3')
PsiElement(Perl5: QUOTE_TICK_CLOSE)('`')
PsiElement(Perl5: ;)(';')
PsiWhiteSpace('\n')
PerlHeredocElementImpl(Perl5: HEREDOC_QX)
PerlStringContentElementImpl(Perl5: STRING_CONTENT_XQ)(' start qx4\n')
PsiWhiteSpace(' ')
PsiComment(Perl5: HEREDOC_END_INDENTABLE)('MARKER_QX3')
PsiWhiteSpace('\n\n ')
PsiPerlStatementImpl(Perl5: STATEMENT)
PsiPerlPrintExprImpl(Perl5: PRINT_EXPR)
PsiElement(Perl5: say)('say')
PsiWhiteSpace(' ')
PsiPerlCallArgumentsImpl(Perl5: CALL_ARGUMENTS)
PsiPerlStringSqImpl(Perl5: STRING_SQ)
PsiElement(Perl5: QUOTE_SINGLE_OPEN)(''')
PerlStringContentElementImpl(Perl5: STRING_CONTENT)('sub end')
PsiElement(Perl5: QUOTE_SINGLE_CLOSE)(''')
PsiElement(Perl5: ;)(';')
PsiWhiteSpace('\n')
PsiElement(Perl5: })('}')
| {
"pile_set_name": "Github"
} |
{
"images" : [
{
"idiom" : "universal",
"filename" : "ic_routing_get_light.png",
"scale" : "1x"
},
{
"idiom" : "universal",
"filename" : "[email protected]",
"scale" : "2x"
},
{
"idiom" : "universal",
"filename" : "[email protected]",
"scale" : "3x"
}
],
"info" : {
"version" : 1,
"author" : "xcode"
}
} | {
"pile_set_name": "Github"
} |
# -*- mode: snippet -*-
# name: se
# key: se
# --
import sys; sys.exit(-1)
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:background="#00000000">
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_alignParentBottom="true"
android:orientation="vertical"
android:background="#00000000"
android:paddingLeft="5dp"
android:id="@+id/linearLayout">
<LinearLayout
android:layout_width="match_parent"
android:layout_height="32dp"
android:layout_marginBottom="5dp"
android:orientation="horizontal"
android:weightSum="4">
<TextView
android:id="@+id/bgm_filter"
android:layout_width="0dp"
android:layout_height="match_parent"
android:layout_gravity="center"
android:layout_marginLeft="10dp"
android:layout_weight="1"
android:gravity="center"
android:background="#11123456"
android:text="bgm升降调"
android:textColor="@color/font_color_35"
android:textSize="@dimen/font_size_28px">
</TextView>
<TextView
android:id="@+id/click_to_select_audio_filter"
android:layout_width="0dp"
android:layout_height="match_parent"
android:layout_gravity="center"
android:layout_marginLeft="10dp"
android:layout_weight="1"
android:gravity="center"
android:background="#11123456"
android:text="@string/audio_filter_selected"
android:textColor="@color/font_color_35"
android:textSize="@dimen/font_size_28px">
</TextView>
<CheckBox
android:id="@+id/paint"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:layout_marginLeft="10dp"
android:layout_weight="1"
android:background="#11123456"
android:checked="false"
android:text="手绘"
android:textColor="@color/font_color_35"
android:textSize="@dimen/font_size_28px">
</CheckBox>
<CheckBox
android:id="@+id/bg_image"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:layout_marginLeft="10dp"
android:layout_weight="1"
android:background="#11123456"
android:checked="false"
android:text="背景图推流"
android:textColor="@color/font_color_35"
android:textSize="@dimen/font_size_28px">
</CheckBox>
</LinearLayout>
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginBottom="5dp"
android:orientation="horizontal">
<CheckBox
android:id="@+id/watermark"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:layout_marginLeft="10dp"
android:layout_weight="1"
android:background="#11123456"
android:checked="true"
android:text="@string/watermark"
android:textColor="@color/font_color_35"
android:textSize="@dimen/font_size_28px">
</CheckBox>
<CheckBox
android:id="@+id/mute"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:layout_marginLeft="10dp"
android:layout_weight="1"
android:background="#11123456"
android:checked="false"
android:text="@string/mute"
android:textColor="@color/font_color_35"
android:textSize="@dimen/font_size_28px">
</CheckBox>
<CheckBox
android:id="@+id/ns"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:layout_marginLeft="10dp"
android:layout_weight="1"
android:background="#11123456"
android:checked="false"
android:text="降噪"
android:textColor="@color/font_color_35"
android:textSize="@dimen/font_size_28px">
</CheckBox>
<CheckBox
android:id="@+id/bgm"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:layout_marginLeft="10dp"
android:layout_weight="1"
android:background="#11123456"
android:checked="false"
android:text="@string/bgm"
android:textColor="@color/font_color_35"
android:textSize="@dimen/font_size_28px">
</CheckBox>
<CheckBox
android:id="@+id/audio_ld"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:background="#11123456"
android:checked="false"
android:layout_marginLeft="10dp"
android:layout_weight="1"
android:text="AudioLD"
android:textColor="@color/font_color_35"
android:textSize="@dimen/font_size_28px">
</CheckBox>
</LinearLayout>
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginBottom="5dp"
android:orientation="horizontal">
<CheckBox
android:id="@+id/ear_mirror"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:layout_marginLeft="10dp"
android:layout_weight="1"
android:background="#11123456"
android:checked="false"
android:text="@string/ear_mirror"
android:textColor="@color/font_color_35"
android:textSize="@dimen/font_size_28px">
</CheckBox>
<CheckBox
android:id="@+id/audio_only"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:layout_marginLeft="10dp"
android:layout_weight="1"
android:background="#11123456"
android:checked="false"
android:text="@string/audio_only"
android:textColor="@color/font_color_35"
android:textSize="@dimen/font_size_28px">
</CheckBox>
<CheckBox
android:id="@+id/front_camera_mirror"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:layout_marginLeft="10dp"
android:layout_weight="1"
android:background="#11123456"
android:checked="true"
android:text="@string/front_mirror"
android:textColor="@color/font_color_35"
android:textSize="@dimen/font_size_28px">
</CheckBox>
<CheckBox
android:id="@+id/click_to_switch_beauty"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:layout_marginLeft="10dp"
android:layout_weight="1"
android:background="#11123456"
android:checked="false"
android:text="@string/beauty_switcher"
android:textColor="@color/font_color_35"
android:textSize="@dimen/font_size_28px">
</CheckBox>
</LinearLayout>
<LinearLayout
android:layout_width="match_parent"
android:layout_height="36dp"
android:layout_marginBottom="5dp"
android:orientation="horizontal"
android:weightSum="4">
<ImageView
android:id="@+id/backoff"
android:layout_width="0dp"
android:layout_height="match_parent"
android:layout_gravity="center"
android:layout_marginLeft="10dp"
android:layout_weight="1"
android:paddingLeft="12dp"
android:paddingRight="12dp"
android:scaleType="centerCrop"
android:src="@drawable/recorder_delete_icon"/>
<TextView
android:id="@+id/click_to_shoot"
android:layout_width="0dp"
android:layout_height="match_parent"
android:layout_gravity="center"
android:layout_marginLeft="10dp"
android:layout_weight="1"
android:gravity="center"
android:background="#11123456"
android:text="@string/click_to_shoot"
android:textColor="@color/font_color_35"
android:textSize="@dimen/font_size_28px">
</TextView>
<TextView
android:id="@+id/click_to_record"
android:layout_width="0dp"
android:layout_height="match_parent"
android:layout_gravity="center"
android:layout_marginLeft="10dp"
android:layout_weight="1"
android:gravity="center"
android:background="#11123456"
android:text="@string/start_recording"
android:textColor="@color/font_color_35"
android:textSize="@dimen/font_size_28px">
</TextView>
<TextView
android:id="@+id/click_to_capture_screenshot"
android:layout_width="0dp"
android:layout_height="match_parent"
android:layout_gravity="center"
android:layout_marginLeft="10dp"
android:layout_weight="1"
android:gravity="center"
android:background="#11123456"
android:text="@string/capture_thumbnail"
android:textColor="@color/font_color_35"
android:textSize="@dimen/font_size_28px">
</TextView>
</LinearLayout>
</LinearLayout>
</RelativeLayout> | {
"pile_set_name": "Github"
} |
249.642 150.897 599.615 213.566 0.363847 0.925449 HessianAffine RootSIFT 0
270.133 114.316 481.291 142.384 0.38751 0.38751 MSER RootSIFT 0
668.297 308.817 274.305 455.039 0.402631 0.402631 HessianAffine RootSIFT 0
23.7186 194.456 540.396 317.712 0.407226 0.407226 HessianAffine RootSIFT 0
186.456 143.079 695.443 250.05 0.44958 0.44958 HessianAffine RootSIFT 0
416.678 266.947 386.421 245.579 0.457912 0.959695 MSER RootSIFT 0
166.211 130.271 665.371 238.603 0.459756 0.9066 MSER RootSIFT 0
139.863 109.741 631.952 221.469 0.485567 0.485567 HessianAffine RootSIFT 0
382.571 255.449 272.446 225.534 0.495076 0.929438 MSER RootSIFT 1
470.77 286.674 565.233 280.625 0.504713 0.504713 HessianAffine RootSIFT 0
139.912 215.903 684.453 331.525 0.507688 0.507688 HessianAffine RootSIFT 0
170.806 135.231 672.611 244.019 0.50935 0.50935 MSER RootSIFT 0
130.869 217.169 670.377 332.467 0.521365 0.927422 HessianAffine RootSIFT 0
187.983 137.112 696.542 243.537 0.522703 0.91467 HessianAffine RootSIFT 0
91.0857 54.508 695.443 250.05 0.52527 0.52527 HessianAffine RootSIFT 0
469.295 283.545 573.045 277.941 0.53154 0.993247 MSER RootSIFT 1
148.096 261.077 694.09 383.481 0.533477 0.533477 MSER RootSIFT 0
144.255 215.158 689.685 329.31 0.545252 0.929359 HessianAffine RootSIFT 0
27.1883 189.906 542.949 312.529 0.553864 0.849823 HessianAffine RootSIFT 0
313.485 260.826 299.975 264.815 0.555662 0.976047 MSER RootSIFT 0
353.292 245.116 191.395 210.307 0.568245 0.568245 MSER RootSIFT 1
116.168 98.2115 599.615 213.566 0.56984 0.905292 HessianAffine RootSIFT 0
260.447 159.129 614.021 220.735 0.573215 0.573215 HessianAffine RootSIFT 0
242.354 145.116 588.004 207.814 0.577935 0.577935 HessianAffine RootSIFT 0
396.575 259.455 323.353 232.716 0.578718 0.578718 MSER RootSIFT 1
134.175 233.244 677.216 353.353 0.578745 0.578745 HessianAffine RootSIFT 0
466.128 272.244 622.811 270.153 0.58392 0.58392 HessianAffine RootSIFT 0
441.535 263.801 533.633 253.832 0.583992 0.583992 HessianAffine RootSIFT 1
365.724 203.643 669.3 238.69 0.584786 0.878654 MSER RootSIFT 0
205.869 148.709 717.282 252.536 0.586463 0.842045 HessianAffine RootSIFT 0
459.15 284.3 512.838 273.203 0.588327 0.588327 MSER RootSIFT 0
110.949 35.6986 223.01 86.6516 0.58982 0.58982 HessianAffine RootSIFT 1
142.105 218.381 688.379 334.928 0.59188 0.996922 HessianAffine RootSIFT 0
119.896 225.398 655.261 350.476 0.592063 0.990208 HessianAffine RootSIFT 0
194.965 136.162 703.539 240.607 0.595389 0.921088 HessianAffine RootSIFT 0
445.254 275.908 485.692 264.78 0.59814 0.59814 MSER RootSIFT 0
331.127 210.34 727.583 269.593 0.60239 0.915198 MSER RootSIFT 0
295.397 260.934 557.6 315.027 0.602613 0.602613 HessianAffine RootSIFT 0
209.008 151.669 722.583 255.051 0.605193 0.925872 HessianAffine RootSIFT 0
59.4354 36.0839 545.664 170.858 0.60698 0.60698 HessianAffine RootSIFT 0
137.382 237.385 683.086 357.728 0.608972 0.817037 HessianAffine RootSIFT 0
351.559 146.479 654.323 169.995 0.617577 0.617577 HessianAffine RootSIFT 0
65.268 52.1435 737.882 219.38 0.620577 0.620577 HessianAffine RootSIFT 0
265.958 336.335 444.976 369.15 0.627041 0.627041 HessianAffine RootSIFT 0
184.288 127.622 691.717 233.562 0.627714 0.627714 HessianAffine RootSIFT 0
706.888 84.1086 613.785 497.656 0.632901 0.632901 HessianAffine RootSIFT 0
123.899 228.728 666.323 350.449 0.637588 0.637588 HessianAffine RootSIFT 0
568.212 102.717 274.305 455.039 0.63852 0.63852 HessianAffine RootSIFT 0
457.243 358.245 444.597 364.353 0.639511 0.639511 HessianAffine RootSIFT 0
507.238 231.637 274.305 455.039 0.640292 0.640292 HessianAffine RootSIFT 0
411.5 264.337 367.706 242.775 0.642319 0.642319 MSER RootSIFT 1
509.18 314.32 304.254 265.275 0.646348 0.975992 MSER RootSIFT 0
175.728 136.303 671.172 240.643 0.646423 0.852159 HessianAffine RootSIFT 0
473.779 291.5 548.556 283.685 0.646927 0.993806 MSER RootSIFT 1
233.47 131.906 575.484 191.061 0.648298 0.648298 HessianAffine RootSIFT 0
600.308 169.709 55.5934 444.254 0.650345 0.966146 HessianAffine RootSIFT 0
121.494 210.308 657.616 325.076 0.65039 0.833411 HessianAffine RootSIFT 0
456.309 302.366 668.325 331.555 0.653818 0.936056 HessianAffine RootSIFT 0
343.178 191.16 614.021 220.735 0.655726 0.655726 HessianAffine RootSIFT 0
547.937 104.798 274.305 455.039 0.658449 0.658449 HessianAffine RootSIFT 0
506.22 322.53 534.164 322.081 0.659703 0.659703 HessianAffine RootSIFT 0
369.315 238.364 565.726 258.883 0.663856 0.663856 HessianAffine RootSIFT 0
274.017 162.693 460.163 189.982 0.668286 0.668286 HessianAffine RootSIFT 0
113.944 48.8056 359.577 125.09 0.668795 0.916376 MSER RootSIFT 0
205.869 148.709 713.862 248.05 0.668922 0.867019 HessianAffine RootSIFT 0
495.486 375.903 522.507 385.327 0.669908 0.669908 HessianAffine RootSIFT 0
77.3704 199.83 608.944 317.521 0.670582 0.670582 HessianAffine RootSIFT 0
261.878 155.278 619.678 217.85 0.671611 0.671611 HessianAffine RootSIFT 0
299.039 172.175 689.804 233.029 0.673411 0.673411 HessianAffine RootSIFT 0
168.633 136.9 666.226 243.581 0.673675 0.673675 MSER RootSIFT 0
195.3 135.322 709.532 242.301 0.673746 0.947275 HessianAffine RootSIFT 0
260.657 105.531 466.646 134.58 0.676137 0.676137 HessianAffine RootSIFT 1
194.119 122.947 702.321 224.766 0.678744 0.913716 HessianAffine RootSIFT 0
395.071 173.908 761.101 198.646 0.678919 0.678919 HessianAffine RootSIFT 0
180.979 139.553 688.883 247.325 0.679037 0.679037 HessianAffine RootSIFT 0
705.838 85.1901 274.305 455.039 0.68137 0.68137 HessianAffine RootSIFT 0
113.932 220.956 655.261 350.476 0.681898 0.681898 HessianAffine RootSIFT 0
466.309 239.832 591.215 216.203 0.6828 0.82672 HessianAffine RootSIFT 0
438.409 294.718 304.254 265.275 0.68484 0.981449 MSER RootSIFT 0
65.8738 119.501 375.9 206.706 0.685209 0.685209 HessianAffine RootSIFT 0
460.584 289.118 482.245 276.669 0.685368 0.925724 HessianAffine RootSIFT 0
583.657 118.759 153.997 458.902 0.685381 0.896268 HessianAffine RootSIFT 0
250.238 146.667 599.103 206.788 0.685485 0.685485 HessianAffine RootSIFT 0
129.716 226.608 671.28 346.901 0.687166 0.687166 HessianAffine RootSIFT 0
220.659 165.071 550.985 229.985 0.690907 0.970134 MSER RootSIFT 0
709.328 124.685 295.57 433.492 0.691779 0.97035 HessianAffine RootSIFT 0
427.723 251.277 525.854 240.672 0.69243 0.996467 MSER RootSIFT 1
287.733 259.532 533.23 312.193 0.692565 0.692565 HessianAffine RootSIFT 0
184.726 159.699 678.716 261.762 0.694585 0.694585 HessianAffine RootSIFT 0
197.825 154.178 706.935 257.911 0.695261 0.695261 HessianAffine RootSIFT 0
227.009 162.044 558.57 226.43 0.695958 0.888143 MSER RootSIFT 0
143.208 111.376 624.967 219.641 0.698097 0.698097 HessianAffine RootSIFT 0
413.56 231.783 557.058 226.315 0.700858 0.930049 MSER RootSIFT 1
91.961 202.611 608.944 317.521 0.700929 0.700929 HessianAffine RootSIFT 0
81.6238 265.832 223.677 321.473 0.70126 0.70126 HessianAffine RootSIFT 0
346.797 273.717 672.006 332.87 0.701497 0.701497 HessianAffine RootSIFT 0
352.88 273.099 687.898 333.31 0.70389 0.920981 HessianAffine RootSIFT 0
192.064 130.649 703.843 236.792 0.704628 0.704628 HessianAffine RootSIFT 0
364.405 252.51 194.406 216.42 0.705491 0.931819 HessianAffine RootSIFT 0
94.4918 270.953 256.957 329.5 0.706134 0.982069 MSER RootSIFT 0
432.117 260.677 500.837 248.353 0.706157 0.874703 HessianAffine RootSIFT 1
337.924 265.047 658.197 322.043 0.706954 0.92696 HessianAffine RootSIFT 0
434.918 368.076 461.248 379.318 0.707238 0.707238 HessianAffine RootSIFT 0
203.033 83.5167 481.291 142.384 0.70795 0.70795 MSER RootSIFT 0
171.344 133.279 667.464 238.702 0.70817 0.70817 MSER RootSIFT 0
429.088 228.059 667.464 238.702 0.710854 0.936439 MSER RootSIFT 0
140.705 258.681 685.477 382.92 0.713846 0.713846 MSER RootSIFT 0
117.162 93.0854 599.103 206.788 0.714434 0.714434 HessianAffine RootSIFT 0
339.292 205.799 561.387 229.556 0.715245 0.960602 MSER RootSIFT 0
285.061 180.329 443.071 200.533 0.716925 0.716925 HessianAffine RootSIFT 0
499.756 315.529 536.206 312.673 0.719099 0.719099 HessianAffine RootSIFT 0
148.192 219.424 700.05 337.437 0.719554 0.975583 HessianAffine RootSIFT 0
213.049 160.078 716.289 254.401 0.721608 0.885165 HessianAffine RootSIFT 0
463.003 303.079 688.379 334.928 0.722633 0.722633 HessianAffine RootSIFT 0
474.727 283.098 603.577 280.657 0.723546 0.723546 HessianAffine RootSIFT 1
133.354 203.045 388.809 275.515 0.7237 0.7237 HessianAffine RootSIFT 0
430.291 223.868 679.037 230.004 0.724689 0.724689 HessianAffine RootSIFT 1
90.9471 44.0335 750.463 207.552 0.727906 0.904849 HessianAffine RootSIFT 0
420.29 294.273 533.23 312.193 0.728009 0.992144 HessianAffine RootSIFT 0
294.873 180.493 672.611 244.019 0.728259 0.728259 MSER RootSIFT 0
521.65 327.962 665.371 238.603 0.731177 0.927564 MSER RootSIFT 0
657.865 237.752 274.305 455.039 0.731375 0.731375 HessianAffine RootSIFT 0
439.291 239.949 294.898 240.395 0.73321 0.73321 HessianAffine RootSIFT 0
200.888 338.776 547.753 380.409 0.733329 0.885436 HessianAffine RootSIFT 0
129.88 92.9301 612.026 204.366 0.734095 0.827801 HessianAffine RootSIFT 0
323.815 194.799 722.583 255.051 0.735704 0.973452 HessianAffine RootSIFT 0
245.179 151.369 591.393 215.128 0.736021 0.736021 HessianAffine RootSIFT 0
464.023 235.422 588.004 207.814 0.737588 0.911108 HessianAffine RootSIFT 0
464.233 371.032 435.073 369.096 0.7378 0.835914 HessianAffine RootSIFT 0
351.275 282.696 683.086 357.728 0.738219 0.872538 HessianAffine RootSIFT 0
188.526 159.816 684.717 262.283 0.738501 0.738501 MSER RootSIFT 0
201.124 166.795 700.022 265.879 0.738962 0.955824 HessianAffine RootSIFT 0
128.376 239.728 665.849 374.427 0.740581 0.740581 HessianAffine RootSIFT 0
382.992 206.471 551.288 208.606 0.740685 0.991585 HessianAffine RootSIFT 0
396.824 259.346 318.135 231.864 0.741529 0.741529 HessianAffine RootSIFT 0
207.301 97.0332 462.348 150.949 0.742033 0.742033 HessianAffine RootSIFT 0
328.93 184.86 588.222 215.282 0.742285 0.800536 HessianAffine RootSIFT 0
207.767 89.1444 210.827 94.4388 0.742328 0.742328 MSER RootSIFT 0
640.241 338.596 46.8375 447.105 0.74266 0.855387 HessianAffine RootSIFT 0
773.326 194.4 279.425 421.05 0.743849 0.999477 HessianAffine RootSIFT 0
188.814 162.723 678.716 261.762 0.744986 0.999373 HessianAffine RootSIFT 0
646.975 81.4427 274.305 455.039 0.746036 0.746036 HessianAffine RootSIFT 0
437.847 303.832 721.695 205.094 0.746444 0.864862 HessianAffine RootSIFT 0
527.858 320.219 391.941 285.068 0.746661 0.828534 HessianAffine RootSIFT 0
252.618 147.801 611.207 210.063 0.747023 0.747023 HessianAffine RootSIFT 0
322.701 194.728 715.977 250.177 0.747778 0.918155 HessianAffine RootSIFT 0
158.85 73.8938 527.764 163.938 0.747901 0.747901 HessianAffine RootSIFT 0
603.693 165.224 62.2771 449.028 0.747984 0.747984 HessianAffine RootSIFT 0
599.491 177.505 274.305 455.039 0.749889 0.749889 HessianAffine RootSIFT 0
432.117 260.677 533.633 253.832 0.750092 0.750092 HessianAffine RootSIFT 0
164.454 285.352 718.157 264.433 0.75011 0.75011 HessianAffine RootSIFT 0
312.078 177.947 551.288 208.606 0.750861 0.987398 HessianAffine RootSIFT 0
62.379 195.336 608.944 317.521 0.751041 0.751041 HessianAffine RootSIFT 0
336.868 229.181 500.837 248.353 0.751286 0.751286 HessianAffine RootSIFT 0
299.142 81.1226 574.875 99.2708 0.751905 0.751905 MSER RootSIFT 0
627.776 391.638 249.428 478.73 0.752567 0.8653 HessianAffine RootSIFT 0
703.661 84.0053 243.684 173.178 0.753708 0.841152 HessianAffine RootSIFT 0
711.73 356.59 392.932 342.324 0.754012 0.959834 HessianAffine RootSIFT 0
105.347 97.9515 588.222 215.282 0.755115 0.903242 HessianAffine RootSIFT 0
249.6 187.643 563.114 245.742 0.756072 0.992078 HessianAffine RootSIFT 0
624.228 90.0672 44.3486 447.733 0.756355 0.861253 HessianAffine RootSIFT 0
302.553 180.998 696.542 243.537 0.756359 0.850918 HessianAffine RootSIFT 0
371.362 357.596 547.753 380.409 0.756386 0.939094 HessianAffine RootSIFT 0
201.44 144.457 715.977 250.177 0.756502 0.939178 HessianAffine RootSIFT 0
454.062 250.687 718.157 264.433 0.757198 0.757198 HessianAffine RootSIFT 0
295.276 369.638 544.734 416.829 0.757442 0.960208 MSER RootSIFT 0
171.966 267.171 717.282 252.536 0.757581 0.927999 HessianAffine RootSIFT 0
744.913 263.644 601.744 389.101 0.757751 0.836772 HessianAffine RootSIFT 0
473.437 255.059 557.058 226.315 0.757837 0.971324 MSER RootSIFT 0
503.661 375.854 547.134 389.432 0.758309 0.758309 HessianAffine RootSIFT 0
281.822 251.947 524.485 301.848 0.758378 0.907983 HessianAffine RootSIFT 0
352.88 273.099 391.941 285.068 0.75846 0.849876 HessianAffine RootSIFT 0
679.388 299.345 56.5958 439.542 0.758986 0.975243 HessianAffine RootSIFT 0
346.529 154.868 632 179.548 0.75918 0.75918 MSER RootSIFT 0
678.92 316.018 219.586 122.951 0.759503 0.759503 HessianAffine RootSIFT 0
275.643 174.798 629.723 234.988 0.759881 0.759881 HessianAffine RootSIFT 0
316.633 283.73 607.077 394.895 0.760291 0.760291 HessianAffine RootSIFT 0
436.998 226.346 709.532 242.301 0.760352 0.913516 HessianAffine RootSIFT 0
129.88 92.9301 422.357 169.205 0.762071 0.762071 HessianAffine RootSIFT 0
107.016 57.8495 472.228 156.386 0.762415 0.762415 MSER RootSIFT 0
394.537 273.104 491.666 267.82 0.763886 0.763886 HessianAffine RootSIFT 0
687.411 235.98 781.938 235.014 0.764183 0.764183 HessianAffine RootSIFT 0
343.64 184.902 469.053 182.667 0.764246 0.88586 MSER RootSIFT 0
121.521 68.1483 567.144 235.707 0.764307 0.806791 HessianAffine RootSIFT 0
401.341 207.592 599.103 206.788 0.764385 0.764385 HessianAffine RootSIFT 0
351.275 282.696 677.216 353.353 0.76602 0.76602 HessianAffine RootSIFT 0
460.93 318.57 695.443 250.05 0.766408 0.766408 HessianAffine RootSIFT 0
164.56 143.937 648.432 247.076 0.767677 0.946041 MSER RootSIFT 0
592.297 115.807 248.459 472.966 0.767913 0.974248 MSER RootSIFT 0
193.898 278.213 410.045 329.332 0.767919 0.963849 MSER RootSIFT 0
653.903 210.364 716.627 404.9 0.768334 0.768334 HessianAffine RootSIFT 0
358.997 279.762 688.379 334.928 0.769034 0.882216 HessianAffine RootSIFT 0
427.855 354.676 461.248 379.318 0.769815 0.769815 HessianAffine RootSIFT 0
394.248 206.997 588.004 207.814 0.77073 0.897679 HessianAffine RootSIFT 1
198.68 154.189 711.775 259.319 0.770841 0.770841 HessianAffine RootSIFT 0
405.862 126.572 56.5958 439.542 0.771288 0.903473 HessianAffine RootSIFT 0
179.607 115.749 519.62 186.596 0.772651 0.772651 HessianAffine RootSIFT 0
105.678 79.0153 584.137 193.119 0.773185 0.773185 HessianAffine RootSIFT 0
585.105 108.52 57.9119 445.014 0.773235 0.907259 HessianAffine RootSIFT 0
325.52 178.973 588.004 207.814 0.773911 0.889231 HessianAffine RootSIFT 0
685.54 376.154 245.239 470.678 0.774096 0.774096 MSER RootSIFT 0
124.857 96.7466 611.207 210.063 0.774143 0.871142 HessianAffine RootSIFT 0
776.788 114.152 340.909 348.824 0.774739 0.988016 MSER RootSIFT 0
333.376 179.667 599.103 206.788 0.774901 0.774901 HessianAffine RootSIFT 0
120.728 237.803 650.311 358.134 0.775751 0.808482 HessianAffine RootSIFT 0
730.472 91.7701 284.722 211.674 0.777107 0.857122 HessianAffine RootSIFT 0
585.594 119.987 251.306 368.364 0.777417 0.889341 HessianAffine RootSIFT 0
434.622 220.06 702.321 224.766 0.778217 0.967964 HessianAffine RootSIFT 0
120.387 87.0719 603.37 199.143 0.779075 0.779075 HessianAffine RootSIFT 0
736.58 42.4618 484.109 259.93 0.779388 0.779388 HessianAffine RootSIFT 0
188.72 335.268 447.076 376.966 0.779437 0.779437 HessianAffine RootSIFT 0
106.652 92.9329 587.904 210.286 0.77953 0.77953 HessianAffine RootSIFT 0
261.936 336.311 220.485 160.173 0.779834 0.918307 HessianAffine RootSIFT 0
529.929 105.042 274.305 455.039 0.780222 0.780222 HessianAffine RootSIFT 0
352.21 263.452 697.536 320.153 0.780286 0.780286 HessianAffine RootSIFT 0
497.211 290.789 692.422 298.061 0.780372 0.780372 MSER RootSIFT 1
704.984 78.3951 274.305 455.039 0.781155 0.781155 HessianAffine RootSIFT 0
726.028 343.807 246.838 311.842 0.781187 0.781187 HessianAffine RootSIFT 0
408.539 156.97 128.794 408.843 0.781584 0.99158 HessianAffine RootSIFT 0
340.062 205.075 553.301 226.494 0.782446 0.782446 MSER RootSIFT 0
524.326 320.61 672.006 332.87 0.78259 0.78259 HessianAffine RootSIFT 0
264.182 141.898 605 194.611 0.78273 0.78273 MSER RootSIFT 0
86.3667 345.711 375.639 393.402 0.782989 0.782989 MSER RootSIFT 0
745.796 233.562 323.109 419.641 0.785057 0.960088 HessianAffine RootSIFT 0
366.215 358.12 547.753 380.409 0.785153 0.968079 HessianAffine RootSIFT 0
199.127 161.439 700.842 265.245 0.785923 0.785923 HessianAffine RootSIFT 0
649.458 189.827 252.715 485.401 0.785991 0.785991 HessianAffine RootSIFT 0
29.2995 195.034 225.619 264.171 0.786252 0.786252 HessianAffine RootSIFT 0
385.828 321.934 718.157 264.433 0.786753 0.786753 HessianAffine RootSIFT 0
75.544 50.7338 742.723 213.171 0.786945 0.786945 HessianAffine RootSIFT 0
135.862 212.066 678.652 327.57 0.787466 0.787466 HessianAffine RootSIFT 0
637.104 96.1891 55.5934 444.254 0.787758 0.874744 HessianAffine RootSIFT 0
534.225 326.02 685.102 332.301 0.787789 0.84409 HessianAffine RootSIFT 0
179.935 325.228 249.428 478.73 0.788441 0.868261 HessianAffine RootSIFT 0
63.7605 345.217 185.783 411.812 0.789635 0.923577 HessianAffine RootSIFT 0
630.256 162.845 687.076 376.108 0.789728 0.789728 HessianAffine RootSIFT 0
392.022 137.703 95.7482 411.743 0.790077 0.790077 HessianAffine RootSIFT 0
189.063 164.282 455.617 223.136 0.790218 0.790218 HessianAffine RootSIFT 0
506.486 340.04 410.045 329.332 0.791089 0.951712 MSER RootSIFT 0
119.839 71.2534 64.4394 86.4415 0.791205 0.791205 HessianAffine RootSIFT 0
722.501 341.002 206.065 306.796 0.791455 0.878345 HessianAffine RootSIFT 0
661.723 397.165 442.983 375.992 0.791758 0.909949 HessianAffine RootSIFT 0
218.732 108.115 609.379 185.115 0.79217 0.872328 MSER RootSIFT 0
288.79 174.801 663.429 238.307 0.792274 0.978814 MSER RootSIFT 0
227.996 135.15 779.8 240.338 0.792443 0.792443 HessianAffine RootSIFT 0
232.44 137.158 581.506 200.446 0.792763 0.792763 HessianAffine RootSIFT 0
732.432 349.802 290.021 170.141 0.792971 0.792971 HessianAffine RootSIFT 0
616.859 132.017 183.45 212.505 0.793199 0.942164 MSER RootSIFT 0
191.316 152.734 680.38 393.477 0.79332 0.79332 HessianAffine RootSIFT 0
304.17 267.845 575.806 325.345 0.793378 0.793378 HessianAffine RootSIFT 0
652.818 479.383 301.555 455.291 0.793523 0.793523 HessianAffine RootSIFT 0
409.117 325.404 404.342 334.562 0.793699 0.968738 MSER RootSIFT 0
175.883 334.631 442.983 375.992 0.794048 0.913006 HessianAffine RootSIFT 0
219.781 106.173 475.057 156.893 0.794315 0.794315 MSER RootSIFT 0
267.913 160.741 339.453 324.135 0.795161 0.980257 HessianAffine RootSIFT 0
356.508 150.424 654.323 169.995 0.795218 0.864028 HessianAffine RootSIFT 0
72.9571 242.329 297.324 136.623 0.795273 0.996333 MSER RootSIFT 0
722.83 292.105 183.432 209.653 0.795544 0.937199 MSER RootSIFT 0
125.929 229.22 367.622 304.479 0.795554 0.907833 HessianAffine RootSIFT 0
143.065 250.027 685.313 365.227 0.795683 0.795683 HessianAffine RootSIFT 0
668.901 460.518 287.783 172.434 0.795699 0.795699 HessianAffine RootSIFT 0
725.278 76.5544 171.558 202.665 0.795857 0.795857 HessianAffine RootSIFT 0
92.6695 76.2178 218.171 267.491 0.795966 0.81125 HessianAffine RootSIFT 0
60.3286 124.539 367.703 212.327 0.795996 0.937522 HessianAffine RootSIFT 0
219.781 106.173 557.058 226.315 0.796568 0.977746 MSER RootSIFT 0
327.525 201.327 539.885 222.566 0.796748 0.919443 HessianAffine RootSIFT 0
734.21 398.814 442.983 375.992 0.797114 0.857628 HessianAffine RootSIFT 0
407.732 231.259 561.387 229.556 0.797137 0.886912 MSER RootSIFT 0
92.6695 76.2178 570.021 195.987 0.797247 0.797247 HessianAffine RootSIFT 0
199.653 139.098 709.532 242.301 0.797725 0.818647 HessianAffine RootSIFT 0
260.447 159.129 607.712 217.782 0.798249 0.881956 HessianAffine RootSIFT 0
305.717 174.149 535.019 205.536 0.798368 0.935957 HessianAffine RootSIFT 0
603.575 153.229 668.262 351.877 0.79882 0.917826 HessianAffine RootSIFT 0
364.365 203.539 663.429 238.307 0.798825 0.941894 MSER RootSIFT 0
200.306 282.284 410.045 329.332 0.79886 0.85885 MSER RootSIFT 0
212.927 137.644 556.9 208.919 0.798977 0.798977 HessianAffine RootSIFT 0
92.6695 76.2178 781.938 235.014 0.799337 0.799337 HessianAffine RootSIFT 0
641.045 162.471 20.4121 187.209 0.799379 0.799379 HessianAffine RootSIFT 0
111.731 99.1038 591.393 215.128 0.799587 0.799587 HessianAffine RootSIFT 0
394.866 222.332 715.977 250.177 0.799726 0.799726 HessianAffine RootSIFT 0
192.601 282.95 401.236 333.553 0.800614 0.980098 MSER RootSIFT 0
242.743 363.339 544.734 416.829 0.80147 0.996936 MSER RootSIFT 0
727.283 296.466 183.45 212.505 0.801815 0.999209 MSER RootSIFT 0
573.439 339.612 92.1973 422.139 0.802772 0.944104 MSER RootSIFT 0
497.211 290.789 688.54 294.599 0.803908 0.90172 MSER RootSIFT 0
353.783 165.548 557.058 226.315 0.804563 0.977569 MSER RootSIFT 0
290.465 141.721 500.864 163.179 0.805044 0.937905 MSER RootSIFT 0
745.423 282.077 248.459 472.966 0.806484 0.968269 MSER RootSIFT 0
273.472 153.624 300.47 149.701 0.806626 0.996201 MSER RootSIFT 0
94.325 274.975 622.627 397.506 0.806707 0.806707 MSER RootSIFT 0
699.591 214.045 194.151 210.593 0.807117 0.807117 MSER RootSIFT 0
165.313 141.794 654.526 247.284 0.807802 0.9024 MSER RootSIFT 0
414.6 206.52 469.637 171.342 0.81108 0.91144 MSER RootSIFT 0
182.855 331.097 694.09 383.481 0.812778 0.812778 MSER RootSIFT 0
192.336 152.5 698.257 258.374 0.813795 0.813795 MSER RootSIFT 0
351.874 224.879 297.196 199.845 0.814481 0.98232 MSER RootSIFT 1
210.14 92.16 359.577 125.09 0.815235 0.815235 MSER RootSIFT 0
218.673 104.918 480.123 158.333 0.815762 0.977158 MSER RootSIFT 0
225.782 181.391 297.196 199.845 0.815903 0.939928 MSER RootSIFT 0
193.873 96.3673 505.394 161.155 0.817312 0.817312 MSER RootSIFT 0
616 322.125 302.17 265.711 0.818126 0.972841 MSER RootSIFT 0
140.562 275.362 685.036 399.882 0.818678 0.920908 MSER RootSIFT 0
105.283 418.696 367.688 391.388 0.818874 0.991215 MSER RootSIFT 0
72.3438 41.3542 484.708 208.463 0.818925 0.907936 MSER RootSIFT 0
410.128 260.57 386.182 239.076 0.819399 0.918848 MSER RootSIFT 1
111.461 59.3118 557.058 226.315 0.819914 0.996161 MSER RootSIFT 0
291.955 175.682 669.3 238.69 0.82007 0.82007 MSER RootSIFT 0
352.053 184.904 525.013 241.86 0.821564 0.821564 MSER RootSIFT 0
467.016 253.329 561.387 229.556 0.823069 0.900629 MSER RootSIFT 0
618.267 366.251 406.026 329.289 0.824156 0.90743 MSER RootSIFT 0
354.344 376.59 168 363.711 0.824242 0.824242 MSER RootSIFT 0
188.263 286.899 400.885 329.34 0.825579 0.856942 MSER RootSIFT 0
570.534 356.28 406.026 329.289 0.826873 0.884535 MSER RootSIFT 0
349.378 186.562 483.388 180.988 0.827008 0.827008 MSER RootSIFT 0
667.99 408.969 369.921 394.434 0.827619 0.827619 MSER RootSIFT 0
400.521 227.864 501.964 222.756 0.827705 0.970883 MSER RootSIFT 0
613.782 136.37 183.45 212.505 0.82876 0.889143 MSER RootSIFT 0
406.692 321.976 410.045 329.332 0.829937 0.862533 MSER RootSIFT 0
457.832 249.742 644.827 404.955 0.830059 0.830059 MSER RootSIFT 0
691.989 319.16 253.849 432.993 0.83034 0.951241 MSER RootSIFT 0
463.174 257.267 181.941 287.74 0.832276 0.908942 MSER RootSIFT 0
148.087 309.846 423.44 330.035 0.832299 0.832299 MSER RootSIFT 0
108.059 62.7712 475.616 161.895 0.83322 0.980119 MSER RootSIFT 0
489.557 273.129 525.854 240.672 0.833813 0.833813 MSER RootSIFT 0
563.588 398.225 544.236 417.949 0.834604 0.834604 MSER RootSIFT 0
717.044 336.704 298.39 332.418 0.835068 0.989039 MSER RootSIFT 0
401.697 235.739 727.583 269.593 0.835647 0.948974 MSER RootSIFT 0
691.434 492.795 692.559 297.819 0.83637 0.970594 MSER RootSIFT 0
238.459 362.757 538.439 417.745 0.836731 0.836731 MSER RootSIFT 0
105.375 347.931 685.477 382.92 0.83699 0.83699 MSER RootSIFT 0
279.235 123.969 359.577 125.09 0.837639 0.837639 MSER RootSIFT 0
448.263 309.754 663.429 238.307 0.838834 0.951188 MSER RootSIFT 0
194.316 296.31 402.993 335.092 0.838846 0.973943 MSER RootSIFT 0
385.618 345.211 195.411 296.794 0.839105 0.839105 MSER RootSIFT 0
691.989 319.16 584.059 475.9 0.839604 0.925726 MSER RootSIFT 0
523.768 329.841 624.342 367.132 0.839648 0.95985 MSER RootSIFT 0
623.047 330.46 247.41 470.603 0.839791 0.988656 MSER RootSIFT 0
351.282 185.429 473.05 176.131 0.839844 0.865295 MSER RootSIFT 0
190.038 277.766 185.418 289.925 0.84011 0.875501 MSER RootSIFT 0
291.952 186.891 236.511 167.747 0.840366 0.986734 MSER RootSIFT 0
489.669 289.89 654.087 292.992 0.841528 0.898708 MSER RootSIFT 1
97.6795 77.3462 575.859 195.808 0.841559 0.841559 MSER RootSIFT 0
215.473 169.979 727.583 269.593 0.842544 0.974625 MSER RootSIFT 0
371.792 315.903 513.143 225.476 0.842637 0.958907 MSER RootSIFT 0
113.5 59.1889 482.4 157.6 0.842981 0.894887 MSER RootSIFT 0
197.688 284.161 402.993 335.092 0.843331 0.843331 MSER RootSIFT 0
592.518 345.452 689.515 294.387 0.84361 0.983758 MSER RootSIFT 0
409.737 209.558 473.05 176.131 0.844401 0.895594 MSER RootSIFT 0
197.253 296.591 407.512 342.447 0.844496 0.966485 MSER RootSIFT 0
578.982 343.523 665.371 238.603 0.845156 0.965676 MSER RootSIFT 0
349.378 186.562 466.197 177.796 0.8461 0.919954 MSER RootSIFT 0
652.081 335.047 441.48 207.459 0.846685 0.937388 MSER RootSIFT 0
267.333 126.111 667.464 238.702 0.847672 0.847672 MSER RootSIFT 0
578.561 106.227 248.459 472.966 0.848335 0.904601 MSER RootSIFT 0
55.4016 230.107 292.329 460.619 0.848449 0.848449 MSER RootSIFT 0
243.341 106.5 423.663 137.093 0.849631 0.994297 MSER RootSIFT 0
397.286 312.857 410.045 329.332 0.849784 0.849784 MSER RootSIFT 0
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<html>
<head>
<title>Warning Messages</title>
<link rel="stylesheet" type="text/css" href="style.css">
</head>
<body bgcolor="#ffffff">
<H1><a name="Warnings"></a>15 Warning Messages</H1>
<!-- INDEX -->
<div class="sectiontoc">
<ul>
<li><a href="#Warnings_nn2">Introduction</a>
<li><a href="#Warnings_suppression">Warning message suppression</a>
<li><a href="#Warnings_nn4">Enabling extra warnings</a>
<li><a href="#Warnings_nn5">Issuing a warning message</a>
<li><a href="#Warnings_symbolic_symbols">Symbolic symbols</a>
<li><a href="#Warnings_nn6">Commentary</a>
<li><a href="#Warnings_nn7">Warnings as errors</a>
<li><a href="#Warnings_nn8">Message output format</a>
<li><a href="#Warnings_nn9">Warning number reference</a>
<ul>
<li><a href="#Warnings_nn10">Deprecated features (100-199)</a>
<li><a href="#Warnings_nn11">Preprocessor (200-299)</a>
<li><a href="#Warnings_nn12">C/C++ Parser (300-399)</a>
<li><a href="#Warnings_nn13">Types and typemaps (400-499) </a>
<li><a href="#Warnings_nn14">Code generation (500-599)</a>
<li><a href="#Warnings_nn15">Language module specific (700-899) </a>
<li><a href="#Warnings_nn16">User defined (900-999)</a>
</ul>
<li><a href="#Warnings_nn17">History</a>
</ul>
</div>
<!-- INDEX -->
<H2><a name="Warnings_nn2"></a>15.1 Introduction</H2>
<p>
During compilation, SWIG may generate a variety of warning messages. For example:
</p>
<div class="shell">
<pre>
example.i:16: Warning 501: Overloaded declaration ignored. bar(double)
example.i:15: Warning 501: Previous declaration is bar(int)
</pre>
</div>
<p>
Typically, warning messages indicate non-fatal problems with the input
where the generated wrapper code will probably compile, but it may not
work like you expect.
</p>
<H2><a name="Warnings_suppression"></a>15.2 Warning message suppression</H2>
<p>
All warning messages have a numeric code that is shown in the warning message itself.
To suppress the printing of a warning message, a number of techniques can be used.
First, you can run SWIG with the <tt>-w</tt> command line option. For example:
</p>
<div class="shell">
<pre>
% swig -python -w501 example.i
% swig -python -w501,505,401 example.i
</pre>
</div>
<p>
Alternatively, warnings can be suppressed by inserting a special preprocessor pragma
into the input file:
</p>
<div class="code">
<pre>
%module example
#pragma SWIG nowarn=501
#pragma SWIG nowarn=501,505,401
</pre>
</div>
<p>
Finally, code-generation warnings can be disabled on a declaration by declaration basis using
the <tt>%warnfilter</tt> directive. For example:
</p>
<div class="code">
<pre>
%module example
%warnfilter(501) foo;
...
int foo(int);
int foo(double); // Silently ignored.
</pre>
</div>
<p>
The <tt>%warnfilter</tt> directive has the same semantics as other declaration modifiers like
<tt>%rename</tt>, <tt>%ignore</tt> and <tt>%feature</tt>, see the
<a href="Customization.html#Customization_features">%feature directive</a> section. For example, if you wanted to
suppress a warning for a method in a class hierarchy, you could do this:
</p>
<div class="code">
<pre>
%warnfilter(501) Object::foo;
class Object {
public:
int foo(int);
int foo(double); // Silently ignored
...
};
class Derived : public Object {
public:
int foo(int);
int foo(double); // Silently ignored
...
};
</pre>
</div>
<p>
Warnings can be suppressed for an entire class by supplying a class name. For example:
</p>
<div class="code">
<pre>
%warnfilter(501) Object;
class Object {
public:
... // All 501 warnings ignored in class
};
</pre>
</div>
<p>
There is no option to suppress all SWIG warning messages. The warning messages are there
for a reason---to tell you that something may be <em>broken</em> in
your interface. Ignore the warning messages at your own peril.
</p>
<H2><a name="Warnings_nn4"></a>15.3 Enabling extra warnings</H2>
<p>
Some warning messages are disabled by default and are generated only
to provide additional diagnostics. These warnings can be turned on using the
<tt>-Wextra</tt> option. For example:
</p>
<div class="shell">
<pre>
% swig -Wextra -python example.i
</pre>
</div>
<p>
To selectively turn on extra warning messages, you can use the directives and options in the
previous section--simply add a "+" to all warning numbers. For example:
</p>
<div class="shell">
<pre>
% swig -w+309,+452 example.i
</pre>
</div>
<p>
or in your interface file use either
</p>
<div class="code">
<pre>
#pragma SWIG nowarn=+309,+452
</pre>
</div>
<p>
or
</p>
<div class="code">
<pre>
%warnfilter(+309,+452) foo;
</pre>
</div>
<p>
Note: selective enabling of warnings with <tt>%warnfilter</tt> overrides any global settings you might have
made using <tt>-w</tt> or <tt>#pragma</tt>.
</p>
<p>
You can of course also enable all warnings and suppress a select few, for example:
</p>
<div class="shell">
<pre>
% swig -Wextra -w309,452 example.i
</pre>
</div>
<p>
The warnings on the right take precedence over the warnings on the left, so in the above example <tt>-Wextra</tt> adds numerous warnings including 452, but then <tt>-w309,452</tt> overrides this and so 452 is suppressesed.
</p>
<p>
If you would like all warnings to appear, regardless of the warning filters used, then use the <tt>-Wall</tt> option.
The <tt>-Wall</tt> option also turns on the extra warnings that <tt>-Wextra</tt> adds, however, it is subtely different.
When <tt>-Wall</tt> is used, it also disables all other warning filters,
that is, any warnings suppressed or added in <tt>%warnfilter</tt>, <tt>#pragma SWIG nowarn</tt>
or the <tt>-w</tt> option.
</p>
<H2><a name="Warnings_nn5"></a>15.4 Issuing a warning message</H2>
<p>
Warning messages can be issued from an interface file using a number of directives. The
<tt>%warn</tt> directive is the most simple:
</p>
<div class="code">
<pre>
%warn "900:This is your last warning!"
</pre>
</div>
<p>
All warning messages are optionally prefixed by the warning number to use. If you are generating
your own warnings, make sure you don't use numbers defined in the table at the end of this section.
</p>
<p>
The <tt>%ignorewarn</tt> directive is the same as <tt>%ignore</tt> except that it issues a
warning message whenever a matching declaration is found. For example:
</p>
<div class="code">
<pre>
%ignorewarn("362:operator= ignored") operator=;
</pre>
</div>
<p>
Warning messages can be associated with typemaps using the
<tt>warning</tt> attribute of a typemap declaration. For example:
</p>
<div class="code">
<pre>
%typemap(in, warning="901:You are really going to regret this usage of $1_type $1_name") blah * {
...
}
</pre>
</div>
<p>
In this case, the warning message will be printed whenever the typemap is actually used and the <a href="Typemaps.html#Typemaps_special_variables">special variables</a> will be expanded as appropriate, for example:
</p>
<div class="shell">
<pre>
example.i:23: Warning 901: You are really going to regret this usage of blah * self
example.i:24: Warning 901: You are really going to regret this usage of blah * stuff
</pre>
</div>
<H2><a name="Warnings_symbolic_symbols"></a>15.5 Symbolic symbols</H2>
<p>
The <tt>swigwarn.swg</tt> file that is installed with SWIG contains symbol constants that could also be
used in <tt>%warnfilter</tt> and <tt>#pragma SWIG nowarn</tt>.
For example this file contains the following line:
</p>
<div class="code">
<pre>
%define SWIGWARN_TYPE_UNDEFINED_CLASS 401 %enddef
</pre>
</div>
<p>
so <tt>SWIGWARN_TYPE_UNDEFINED_CLASS</tt> could be used instead of 401, for example:
</p>
<div class="code">
<pre>
#pragma SWIG nowarn=SWIGWARN_TYPE_UNDEFINED_CLASS
</pre>
</div>
<p>
or
</p>
<div class="code">
<pre>
%warnfilter(SWIGWARN_TYPE_UNDEFINED_CLASS) Foo;
</pre>
</div>
<H2><a name="Warnings_nn6"></a>15.6 Commentary</H2>
<p>
The ability to suppress warning messages is really only provided for
advanced users and is not recommended in normal use. You are advised
to modify your interface to fix the problems highlighted by the warnings
wherever possible instead of suppressing warnings.
</p>
<p>
Certain types of SWIG problems are errors. These usually arise due to
parsing errors (bad syntax) or semantic problems for which there is
no obvious recovery. There is no mechanism for suppressing error
messages.
</p>
<H2><a name="Warnings_nn7"></a>15.7 Warnings as errors</H2>
<p>
Warnings can be handled as errors by using the <tt>-Werror</tt> command line
option. This will cause SWIG to exit with a non successful exit code if a
warning is encountered.
</p>
<H2><a name="Warnings_nn8"></a>15.8 Message output format</H2>
<p>
The output format for both warnings and errors can be selected for
integration with your favourite IDE/editor. Editors and IDEs can usually parse
error messages and if in the appropriate format will easily take you
directly to the source of the error. The standard format is used by
default except on Windows where the Microsoft format is used by default.
These can be overridden using command line options, for example:
</p>
<div class="shell"><pre>
$ swig -python -Fstandard example.i
example.i:4: Syntax error in input.
$ swig -python -Fmicrosoft example.i
example.i(4) : Syntax error in input.
</pre></div>
<H2><a name="Warnings_nn9"></a>15.9 Warning number reference</H2>
<H3><a name="Warnings_nn10"></a>15.9.1 Deprecated features (100-199)</H3>
<ul>
<li>101. Deprecated <tt>%extern</tt> directive.
<li>102. Deprecated <tt>%val</tt> directive.
<li>103. Deprecated <tt>%out</tt> directive.
<li>104. Deprecated <tt>%disabledoc</tt> directive.
<li>105. Deprecated <tt>%enabledoc</tt> directive.
<li>106. Deprecated <tt>%doconly</tt> directive.
<li>107. Deprecated <tt>%style</tt> directive.
<li>108. Deprecated <tt>%localstyle</tt> directive.
<li>109. Deprecated <tt>%title</tt> directive.
<li>110. Deprecated <tt>%section</tt> directive.
<li>111. Deprecated <tt>%subsection</tt> directive.
<li>112. Deprecated <tt>%subsubsection</tt> directive.
<li>113. Deprecated <tt>%addmethods</tt> directive.
<li>114. Deprecated <tt>%readonly</tt> directive.
<li>115. Deprecated <tt>%readwrite</tt> directive.
<li>116. Deprecated <tt>%except</tt> directive.
<li>117. Deprecated <tt>%new</tt> directive.
<li>118. Deprecated <tt>%typemap(except)</tt>.
<li>119. Deprecated <tt>%typemap(ignore)</tt>.
<li>120. Deprecated command line option (-runtime, -noruntime).
<li>121. Deprecated <tt>%name</tt> directive.
<li>126. The 'nestedworkaround' feature is deprecated.
</ul>
<H3><a name="Warnings_nn11"></a>15.9.2 Preprocessor (200-299)</H3>
<ul>
<li>201. Unable to find <em>filename</em>.
<li>202. Could not evaluate expression <em>expr</em>.
<li>203. Both includeall and importall are defined: using includeall.
<li>204. CPP #warning, "<em>warning</em>".
<li>205. CPP #error, "<em>error</em>".
<li>206. Unexpected tokens after #<em>directive</em> directive.
</ul>
<H3><a name="Warnings_nn12"></a>15.9.3 C/C++ Parser (300-399)</H3>
<ul>
<li>301. <tt>class</tt> keyword used, but not in C++ mode.
<li>302. Identifier '<em>name</em>' redefined (ignored).
<li>303. <tt>%extend</tt> defined for an undeclared class '<em>name</em>'.
<li>304. Unsupported constant value (ignored).
<li>305. Bad constant value (ignored).
<li>306. '<em>identifier</em>' is private in this context.
<li>307. Can't set default argument value (ignored)
<li>308. Namespace alias '<em>name</em>' not allowed here. Assuming '<em>name</em>'
<li>309. [private | protected] inheritance ignored.
<li>310. Template '<em>name</em>' was already wrapped as '<em>name</em>' (ignored)
<li>312. Unnamed nested class not currently supported (ignored).
<li>313. Unrecognized extern type "<em>name</em>" (ignored).
<li>314. '<em>identifier</em>' is a <em>lang</em> keyword.
<li>315. Nothing known about '<em>identifier</em>'.
<li>316. Repeated %module directive.
<li>317. Specialization of non-template '<em>name</em>'.
<li>318. Instantiation of template '<em>name</em>' is ambiguous, instantiation <em>templ</em> used, instantiation <em>templ</em> ignored.
<li>319. No access specifier given for base class <em>name</em> (ignored).
<li>320. Explicit template instantiation ignored.
<li>321. <em>identifier</em> conflicts with a built-in name.
<li>322. Redundant redeclaration of '<em>name</em>'.
<li>323. Recursive scope inheritance of '<em>name</em>'.
<li>324. Named nested template instantiations not supported. Processing as if no name was given to %template().
<li>325. Nested <em>kind</em> not currently supported (<em>name</em> ignored).
<li>326. Deprecated %extend name used - the <em>kind</em> name '<em>name</em>' should be used instead of the typedef name '<em>name</em>'.
<li>350. operator new ignored.
<li>351. operator delete ignored.
<li>352. operator+ ignored.
<li>353. operator- ignored.
<li>354. operator* ignored.
<li>355. operator/ ignored.
<li>356. operator% ignored.
<li>357. operator^ ignored.
<li>358. operator& ignored.
<li>359. operator| ignored.
<li>360. operator~ ignored.
<li>361. operator! ignored.
<li>362. operator= ignored.
<li>363. operator< ignored.
<li>364. operator> ignored.
<li>365. operator+= ignored.
<li>366. operator-= ignored.
<li>367. operator*= ignored.
<li>368. operator/= ignored.
<li>369. operator%= ignored.
<li>370. operator^= ignored.
<li>371. operator&= ignored.
<li>372. operator|= ignored.
<li>373. operator<< ignored.
<li>374. operator>>ignored.
<li>375. operator<<= ignored.
<li>376. operator>>= ignored.
<li>377. operator== ignored.
<li>378. operator!= ignored.
<li>379. operator<= ignored.
<li>380. operator>= ignored.
<li>381. operator&& ignored.
<li>382. operator|| ignored.
<li>383. operator++ ignored.
<li>384. operator-- ignored.
<li>385. operator, ignored.
<li>386. operator-<* ignored.
<li>387. operator-< ignored.
<li>388. operator() ignored.
<li>389. operator[] ignored.
<li>390. operator+ ignored (unary).
<li>391. operator- ignored (unary).
<li>392. operator* ignored (unary).
<li>393. operator& ignored (unary).
<li>394. operator new[] ignored.
<li>395. operator delete[] ignored.
</ul>
<H3><a name="Warnings_nn13"></a>15.9.4 Types and typemaps (400-499) </H3>
<ul>
<li>401. Nothing known about class 'name'. Ignored.
<li>402. Base class 'name' is incomplete.
<li>403. Class 'name' might be abstract.
<li>450. Deprecated typemap feature ($source/$target).
<li>451. Setting const char * variable may leak memory.
<li>452. Reserved
<li>453. Can't apply (pattern). No typemaps are defined.
<li>460. Unable to use type <em>type</em> as a function argument.
<li>461. Unable to use return type <em>type</em> in function <em>name</em>.
<li>462. Unable to set variable of type <em>type</em>.
<li>463. Unable to read variable of type <em>type</em>.
<li>464. Unsupported constant value.
<li>465. Unable to handle type <em>type</em>.
<li>466. Unsupported variable type <em>type</em>.
<li>467. Overloaded <em>declaration</em> not supported (no type checking rule for '<em>type</em>')
<li>468. No 'throw' typemap defined for exception type <em>type</em>
<li>469. No or improper directorin typemap defined for <em>type</em>
<li>470. Thread/reentrant unsafe wrapping, consider returning by value instead.
<li>471. Unable to use return type <em>type</em> in director method
<li>474. Method <em>method</em> usage of the optimal attribute ignored in the out typemap as the following cannot be used to generate optimal code: <em>code</em>
<li>475. Multiple calls to <em>method</em> might be generated due to optimal attribute usage in the out typemap.
<li>476. Initialization using std::initializer_list.
<li>477. No directorthrows typemap defined for <em>type</em>
</ul>
<H3><a name="Warnings_nn14"></a>15.9.5 Code generation (500-599)</H3>
<ul>
<li>501. Overloaded declaration ignored. <em>decl</em>. Previous declaration is <em>decl</em>.
<li>502. Overloaded constructor ignored. <em>decl</em>. Previous declaration is <em>decl</em>.
<li>503. Can't wrap '<em>identifier</em>' unless renamed to a valid identifier.
<li>504. Function <em>name</em> must have a return type. Ignored.
<li>505. Variable length arguments discarded.
<li>506. Can't wrap varargs with keyword arguments enabled.
<li>507. Adding native function <em>name</em> not supported (ignored).
<li>508. Declaration of '<em>name</em>' shadows declaration accessible via operator->(), previous declaration of'<em>declaration</em>'.
<li>509. Overloaded method <em>declaration</em> effectively ignored, as it is shadowed by <em>declaration</em>.
<li>510. Friend function '<em>name</em>' ignored.
<li>511. Can't use keyword arguments with overloaded functions.
<li>512. Overloaded method <em>declaration</em> ignored, using non-const method <em>declaration</em> instead.
<li>513. Can't generate wrappers for unnamed struct/class.
<li>514.
<li>515.
<li>516. Overloaded method <em>declaration</em> ignored, using <em>declaration</em> instead.
<li>517.
<li>518. Portability warning: File <em>file1</em> will be overwritten by <em>file2</em> on case insensitive filesystems such as Windows' FAT32 and NTFS unless the class/module name is renamed.
<li>519. %template() contains no name. Template method ignored: <em>declaration</em>
<li>520. <em>Base/Derived</em> class '<em>classname1</em>' of '<em>classname2</em>' is not similarly marked as a smart pointer.
<li>521. Illegal destructor name <em>name</em>. Ignored.
<li>522. Use of an illegal constructor name '<em>name</em>' in %extend is deprecated, the constructor name should be '<em>name</em>'.
<li>523. Use of an illegal destructor name '<em>name</em>' in %extend is deprecated, the destructor name should be '<em>name</em>'.
</ul>
<H3><a name="Warnings_nn15"></a>15.9.6 Language module specific (700-899) </H3>
<ul>
<li>801. Wrong name (corrected to '<em>name</em>'). (Ruby).
</ul>
<ul>
<li>810. No jni typemap defined for <em>type</em> (Java).
<li>811. No jtype typemap defined for <em>type</em> (Java).
<li>812. No jstype typemap defined for <em>type</em> (Java).
<li>813. Warning for <em>classname</em>: Base <em>baseclass</em> ignored. Multiple inheritance is not supported in Java. (Java).
<li>814.
<li>815. No javafinalize typemap defined for <em>type</em> (Java).
<li>816. No javabody typemap defined for <em>type</em> (Java).
<li>817. No javaout typemap defined for <em>type</em> (Java).
<li>818. No javain typemap defined for <em>type</em> (Java).
<li>819. No javadirectorin typemap defined for <em>type</em> (Java).
<li>820. No javadirectorout typemap defined for <em>type</em> (Java).
<li>821.
<li>822. Covariant return types not supported in Java. Proxy method will return <em>basetype</em> (Java).
<li>823. No javaconstruct typemap defined for <em>type</em> (Java).
<li>824. Missing JNI descriptor in directorin typemap defined for <em>type</em> (Java).
<li>825. "directorconnect" attribute missing in <em>type</em> "javaconstruct" typemap. (Java).
<li>826. The nspace feature is used on '<em>type</em>' without -package. The generated code may not compile as Java does not support types declared in a named package accessing types declared in an unnamed package. (Java).
</ul>
<ul>
<li>830. No ctype typemap defined for <em>type</em> (C#).
<li>831. No cstype typemap defined for <em>type</em> (C#).
<li>832. No cswtype typemap defined for <em>type</em> (C#).
<li>833. Warning for <em>classname</em>: Base <em>baseclass</em> ignored. Multiple inheritance is not supported in C#. (C#).
<li>834.
<li>835. No csfinalize typemap defined for <em>type</em> (C#).
<li>836. No csbody typemap defined for <em>type</em> (C#).
<li>837. No csout typemap defined for <em>type</em> (C#).
<li>838. No csin typemap defined for <em>type</em> (C#).
<li>839.
<li>840.
<li>841.
<li>842. Covariant return types not supported in C#. Proxy method will return <em>basetype</em> (C#).
<li>843. No csconstruct typemap defined for <em>type</em> (C#).
<li>844. C# exception may not be thrown - no $excode or excode attribute in <em>typemap</em> typemap. (C#).
<li>845. Unmanaged code contains a call to a SWIG_CSharpSetPendingException method and C# code does not handle pending exceptions via the canthrow attribute. (C#).
</ul>
<ul>
<li>870. Warning for <em>classname</em>: Base <em>baseclass</em> ignored. Multiple inheritance is not supported in PHP.
<li>871. Unrecognized pragma <em>pragma</em>. (Php).
</ul>
<H3><a name="Warnings_nn16"></a>15.9.7 User defined (900-999)</H3>
<p>
These numbers can be used by your own application.
</p>
<H2><a name="Warnings_nn17"></a>15.10 History</H2>
<p>
The ability to control warning messages was first added to SWIG-1.3.12.
</p>
</body>
</html>
| {
"pile_set_name": "Github"
} |
#!/usr/bin/env python
"""
This is a generic fuzz testing tool, see --help for more information.
"""
import os
import sys
import random
import subprocess
import itertools
class TestGenerator:
def __init__(self, inputs, delete, insert, replace,
insert_strings, pick_input):
self.inputs = [(s, open(s).read()) for s in inputs]
self.delete = bool(delete)
self.insert = bool(insert)
self.replace = bool(replace)
self.pick_input = bool(pick_input)
self.insert_strings = list(insert_strings)
self.num_positions = sum([len(d) for _,d in self.inputs])
self.num_insert_strings = len(insert_strings)
self.num_tests = ((delete + (insert + replace)*self.num_insert_strings)
* self.num_positions)
self.num_tests += 1
if self.pick_input:
self.num_tests *= self.num_positions
def position_to_source_index(self, position):
for i,(s,d) in enumerate(self.inputs):
n = len(d)
if position < n:
return (i,position)
position -= n
raise ValueError,'Invalid position.'
def get_test(self, index):
assert 0 <= index < self.num_tests
picked_position = None
if self.pick_input:
index,picked_position = divmod(index, self.num_positions)
picked_position = self.position_to_source_index(picked_position)
if index == 0:
return ('nothing', None, None, picked_position)
index -= 1
index,position = divmod(index, self.num_positions)
position = self.position_to_source_index(position)
if self.delete:
if index == 0:
return ('delete', position, None, picked_position)
index -= 1
index,insert_index = divmod(index, self.num_insert_strings)
insert_str = self.insert_strings[insert_index]
if self.insert:
if index == 0:
return ('insert', position, insert_str, picked_position)
index -= 1
assert self.replace
assert index == 0
return ('replace', position, insert_str, picked_position)
class TestApplication:
def __init__(self, tg, test):
self.tg = tg
self.test = test
def apply(self):
if self.test[0] == 'nothing':
pass
else:
i,j = self.test[1]
name,data = self.tg.inputs[i]
if self.test[0] == 'delete':
data = data[:j] + data[j+1:]
elif self.test[0] == 'insert':
data = data[:j] + self.test[2] + data[j:]
elif self.test[0] == 'replace':
data = data[:j] + self.test[2] + data[j+1:]
else:
raise ValueError,'Invalid test %r' % self.test
open(name,'wb').write(data)
def revert(self):
if self.test[0] != 'nothing':
i,j = self.test[1]
name,data = self.tg.inputs[i]
open(name,'wb').write(data)
def quote(str):
return '"' + str + '"'
def run_one_test(test_application, index, input_files, args):
test = test_application.test
# Interpolate arguments.
options = { 'index' : index,
'inputs' : ' '.join(quote(f) for f in input_files) }
# Add picked input interpolation arguments, if used.
if test[3] is not None:
pos = test[3][1]
options['picked_input'] = input_files[test[3][0]]
options['picked_input_pos'] = pos
# Compute the line and column.
file_data = test_application.tg.inputs[test[3][0]][1]
line = column = 1
for i in range(pos):
c = file_data[i]
if c == '\n':
line += 1
column = 1
else:
column += 1
options['picked_input_line'] = line
options['picked_input_col'] = column
test_args = [a % options for a in args]
if opts.verbose:
print '%s: note: executing %r' % (sys.argv[0], test_args)
stdout = None
stderr = None
if opts.log_dir:
stdout_log_path = os.path.join(opts.log_dir, '%s.out' % index)
stderr_log_path = os.path.join(opts.log_dir, '%s.err' % index)
stdout = open(stdout_log_path, 'wb')
stderr = open(stderr_log_path, 'wb')
else:
sys.stdout.flush()
p = subprocess.Popen(test_args, stdout=stdout, stderr=stderr)
p.communicate()
exit_code = p.wait()
test_result = (exit_code == opts.expected_exit_code or
exit_code in opts.extra_exit_codes)
if stdout is not None:
stdout.close()
stderr.close()
# Remove the logs for passes, unless logging all results.
if not opts.log_all and test_result:
os.remove(stdout_log_path)
os.remove(stderr_log_path)
if not test_result:
print 'FAIL: %d' % index
elif not opts.succinct:
print 'PASS: %d' % index
return test_result
def main():
global opts
from optparse import OptionParser, OptionGroup
parser = OptionParser("""%prog [options] ... test command args ...
%prog is a tool for fuzzing inputs and testing them.
The most basic usage is something like:
$ %prog --file foo.txt ./test.sh
which will run a default list of fuzzing strategies on the input. For each
fuzzed input, it will overwrite the input files (in place), run the test script,
then restore the files back to their original contents.
NOTE: You should make sure you have a backup copy of your inputs, in case
something goes wrong!!!
You can cause the fuzzing to not restore the original files with
'--no-revert'. Generally this is used with '--test <index>' to run one failing
test and then leave the fuzzed inputs in place to examine the failure.
For each fuzzed input, %prog will run the test command given on the command
line. Each argument in the command is subject to string interpolation before
being executed. The syntax is "%(VARIABLE)FORMAT" where FORMAT is a standard
printf format, and VARIABLE is one of:
'index' - the test index being run
'inputs' - the full list of test inputs
'picked_input' - (with --pick-input) the selected input file
'picked_input_pos' - (with --pick-input) the selected input position
'picked_input_line' - (with --pick-input) the selected input line
'picked_input_col' - (with --pick-input) the selected input column
By default, the script will run forever continually picking new tests to
run. You can limit the number of tests that are run with '--max-tests <number>',
and you can run a particular test with '--test <index>'.
You can specify '--stop-on-fail' to stop the script on the first failure
without reverting the changes.
""")
parser.add_option("-v", "--verbose", help="Show more output",
action='store_true', dest="verbose", default=False)
parser.add_option("-s", "--succinct", help="Reduce amount of output",
action="store_true", dest="succinct", default=False)
group = OptionGroup(parser, "Test Execution")
group.add_option("", "--expected-exit-code", help="Set expected exit code",
type=int, dest="expected_exit_code",
default=0)
group.add_option("", "--extra-exit-code",
help="Set additional expected exit code",
type=int, action="append", dest="extra_exit_codes",
default=[])
group.add_option("", "--log-dir",
help="Capture test logs to an output directory",
type=str, dest="log_dir",
default=None)
group.add_option("", "--log-all",
help="Log all outputs (not just failures)",
action="store_true", dest="log_all", default=False)
parser.add_option_group(group)
group = OptionGroup(parser, "Input Files")
group.add_option("", "--file", metavar="PATH",
help="Add an input file to fuzz",
type=str, action="append", dest="input_files", default=[])
group.add_option("", "--filelist", metavar="LIST",
help="Add a list of inputs files to fuzz (one per line)",
type=str, action="append", dest="filelists", default=[])
parser.add_option_group(group)
group = OptionGroup(parser, "Fuzz Options")
group.add_option("", "--replacement-chars", dest="replacement_chars",
help="Characters to insert/replace",
default="0{}[]<>\;@#$^%& ")
group.add_option("", "--replacement-string", dest="replacement_strings",
action="append", help="Add a replacement string to use",
default=[])
group.add_option("", "--replacement-list", dest="replacement_lists",
help="Add a list of replacement strings (one per line)",
action="append", default=[])
group.add_option("", "--no-delete", help="Don't delete characters",
action='store_false', dest="enable_delete", default=True)
group.add_option("", "--no-insert", help="Don't insert strings",
action='store_false', dest="enable_insert", default=True)
group.add_option("", "--no-replace", help="Don't replace strings",
action='store_false', dest="enable_replace", default=True)
group.add_option("", "--no-revert", help="Don't revert changes",
action='store_false', dest="revert", default=True)
group.add_option("", "--stop-on-fail", help="Stop on first failure",
action='store_true', dest="stop_on_fail", default=False)
parser.add_option_group(group)
group = OptionGroup(parser, "Test Selection")
group.add_option("", "--test", help="Run a particular test",
type=int, dest="test", default=None, metavar="INDEX")
group.add_option("", "--max-tests", help="Maximum number of tests",
type=int, dest="max_tests", default=None, metavar="COUNT")
group.add_option("", "--pick-input",
help="Randomly select an input byte as well as fuzzing",
action='store_true', dest="pick_input", default=False)
parser.add_option_group(group)
parser.disable_interspersed_args()
(opts, args) = parser.parse_args()
if not args:
parser.error("Invalid number of arguments")
# Collect the list of inputs.
input_files = list(opts.input_files)
for filelist in opts.filelists:
f = open(filelist)
try:
for ln in f:
ln = ln.strip()
if ln:
input_files.append(ln)
finally:
f.close()
input_files.sort()
if not input_files:
parser.error("No input files!")
print '%s: note: fuzzing %d files.' % (sys.argv[0], len(input_files))
# Make sure the log directory exists if used.
if opts.log_dir:
if not os.path.exists(opts.log_dir):
try:
os.mkdir(opts.log_dir)
except OSError:
print "%s: error: log directory couldn't be created!" % (
sys.argv[0],)
raise SystemExit,1
# Get the list if insert/replacement strings.
replacements = list(opts.replacement_chars)
replacements.extend(opts.replacement_strings)
for replacement_list in opts.replacement_lists:
f = open(replacement_list)
try:
for ln in f:
ln = ln[:-1]
if ln:
replacements.append(ln)
finally:
f.close()
# Unique and order the replacement list.
replacements = list(set(replacements))
replacements.sort()
# Create the test generator.
tg = TestGenerator(input_files, opts.enable_delete, opts.enable_insert,
opts.enable_replace, replacements, opts.pick_input)
print '%s: note: %d input bytes.' % (sys.argv[0], tg.num_positions)
print '%s: note: %d total tests.' % (sys.argv[0], tg.num_tests)
if opts.test is not None:
it = [opts.test]
elif opts.max_tests is not None:
it = itertools.imap(random.randrange,
itertools.repeat(tg.num_tests, opts.max_tests))
else:
it = itertools.imap(random.randrange, itertools.repeat(tg.num_tests))
for test in it:
t = tg.get_test(test)
if opts.verbose:
print '%s: note: running test %d: %r' % (sys.argv[0], test, t)
ta = TestApplication(tg, t)
try:
ta.apply()
test_result = run_one_test(ta, test, input_files, args)
if not test_result and opts.stop_on_fail:
opts.revert = False
sys.exit(1)
finally:
if opts.revert:
ta.revert()
sys.stdout.flush()
if __name__ == '__main__':
main()
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2011 Jan Kaluza
* Licensed under the Simplified BSD license.
* See Documentation/Licenses/BSD-simplified.txt for more information.
*/
#include <Swiften/Elements/SpectrumErrorPayload.h>
namespace Swift {
SpectrumErrorPayload::SpectrumErrorPayload(Error error) : error_(error) { }
}
| {
"pile_set_name": "Github"
} |
<?php
$out = '';
foreach ($this->elements as $element) {
$field = $element['field'] ?? '';
$functionButtons = $element['functionButtons'] ?? '';
$before = $element['before'] ?? '';
$after = $element['after'] ?? '';
$out .= $before;
$out .= '<div class="input-group">';
$out .= $field;
if ('' != $functionButtons) {
$out .= '<span class="input-group-btn">' . $functionButtons . '</span>';
}
$out .= '</div>';
$out .= $after;
}
echo $out;
| {
"pile_set_name": "Github"
} |
CreateIPSecConnectionDetails
============================
.. currentmodule:: oci.core.models
.. autoclass:: CreateIPSecConnectionDetails
:show-inheritance:
:special-members: __init__
:members:
:undoc-members:
:inherited-members: | {
"pile_set_name": "Github"
} |
/*
+----------------------------------------------------------------------+
| Zend Engine |
+----------------------------------------------------------------------+
| Copyright (c) 1998-2011 Zend Technologies Ltd. (http://www.zend.com) |
+----------------------------------------------------------------------+
| This source file is subject to version 2.00 of the Zend license, |
| that is bundled with this package in the file LICENSE, and is |
| available through the world-wide-web at the following url: |
| http://www.zend.com/license/2_00.txt. |
| If you did not receive a copy of the Zend license and are unable to |
| obtain it through the world-wide-web, please send a note to |
| [email protected] so we can mail you a copy immediately. |
+----------------------------------------------------------------------+
| Authors: Andi Gutmans <[email protected]> |
| Zeev Suraski <[email protected]> |
+----------------------------------------------------------------------+
*/
/* $Id: zend_objects_API.c 306939 2011-01-01 02:19:59Z felipe $ */
#include "zend.h"
#include "zend_globals.h"
#include "zend_variables.h"
#include "zend_API.h"
#include "zend_objects_API.h"
#define ZEND_DEBUG_OBJECTS 0
ZEND_API void zend_objects_store_init(zend_objects_store *objects, zend_uint init_size)
{
objects->object_buckets = (zend_object_store_bucket *) emalloc(init_size * sizeof(zend_object_store_bucket));
objects->top = 1; /* Skip 0 so that handles are true */
objects->size = init_size;
objects->free_list_head = -1;
memset(&objects->object_buckets[0], 0, sizeof(zend_object_store_bucket));
}
ZEND_API void zend_objects_store_destroy(zend_objects_store *objects)
{
efree(objects->object_buckets);
objects->object_buckets = NULL;
}
ZEND_API void zend_objects_store_call_destructors(zend_objects_store *objects TSRMLS_DC)
{
zend_uint i = 1;
for (i = 1; i < objects->top ; i++) {
if (objects->object_buckets[i].valid) {
struct _store_object *obj = &objects->object_buckets[i].bucket.obj;
if (!objects->object_buckets[i].destructor_called) {
objects->object_buckets[i].destructor_called = 1;
if (obj->dtor && obj->object) {
obj->refcount++;
obj->dtor(obj->object, i TSRMLS_CC);
obj = &objects->object_buckets[i].bucket.obj;
obj->refcount--;
}
}
}
}
}
ZEND_API void zend_objects_store_mark_destructed(zend_objects_store *objects TSRMLS_DC)
{
zend_uint i;
if (!objects->object_buckets) {
return;
}
for (i = 1; i < objects->top ; i++) {
if (objects->object_buckets[i].valid) {
objects->object_buckets[i].destructor_called = 1;
}
}
}
ZEND_API void zend_objects_store_free_object_storage(zend_objects_store *objects TSRMLS_DC)
{
zend_uint i = 1;
for (i = 1; i < objects->top ; i++) {
if (objects->object_buckets[i].valid) {
struct _store_object *obj = &objects->object_buckets[i].bucket.obj;
GC_REMOVE_ZOBJ_FROM_BUFFER(obj);
objects->object_buckets[i].valid = 0;
if (obj->free_storage) {
obj->free_storage(obj->object TSRMLS_CC);
}
/* Not adding to free list as we are shutting down anyway */
}
}
}
/* Store objects API */
ZEND_API zend_object_handle zend_objects_store_put(void *object, zend_objects_store_dtor_t dtor, zend_objects_free_object_storage_t free_storage, zend_objects_store_clone_t clone TSRMLS_DC)
{
zend_object_handle handle;
struct _store_object *obj;
if (EG(objects_store).free_list_head != -1) {
handle = EG(objects_store).free_list_head;
EG(objects_store).free_list_head = EG(objects_store).object_buckets[handle].bucket.free_list.next;
} else {
if (EG(objects_store).top == EG(objects_store).size) {
EG(objects_store).size <<= 1;
EG(objects_store).object_buckets = (zend_object_store_bucket *) erealloc(EG(objects_store).object_buckets, EG(objects_store).size * sizeof(zend_object_store_bucket));
}
handle = EG(objects_store).top++;
}
obj = &EG(objects_store).object_buckets[handle].bucket.obj;
EG(objects_store).object_buckets[handle].destructor_called = 0;
EG(objects_store).object_buckets[handle].valid = 1;
obj->refcount = 1;
GC_OBJ_INIT(obj);
obj->object = object;
obj->dtor = dtor?dtor:(zend_objects_store_dtor_t)zend_objects_destroy_object;
obj->free_storage = free_storage;
obj->clone = clone;
obj->handlers = NULL;
#if ZEND_DEBUG_OBJECTS
fprintf(stderr, "Allocated object id #%d\n", handle);
#endif
return handle;
}
ZEND_API zend_uint zend_objects_store_get_refcount(zval *object TSRMLS_DC)
{
zend_object_handle handle = Z_OBJ_HANDLE_P(object);
return EG(objects_store).object_buckets[handle].bucket.obj.refcount;
}
ZEND_API void zend_objects_store_add_ref(zval *object TSRMLS_DC)
{
zend_object_handle handle = Z_OBJ_HANDLE_P(object);
EG(objects_store).object_buckets[handle].bucket.obj.refcount++;
#if ZEND_DEBUG_OBJECTS
fprintf(stderr, "Increased refcount of object id #%d\n", handle);
#endif
}
/*
* Add a reference to an objects store entry given the object handle.
*/
ZEND_API void zend_objects_store_add_ref_by_handle(zend_object_handle handle TSRMLS_DC)
{
EG(objects_store).object_buckets[handle].bucket.obj.refcount++;
}
#define ZEND_OBJECTS_STORE_ADD_TO_FREE_LIST() \
EG(objects_store).object_buckets[handle].bucket.free_list.next = EG(objects_store).free_list_head; \
EG(objects_store).free_list_head = handle; \
EG(objects_store).object_buckets[handle].valid = 0;
ZEND_API void zend_objects_store_del_ref(zval *zobject TSRMLS_DC)
{
zend_object_handle handle;
handle = Z_OBJ_HANDLE_P(zobject);
Z_ADDREF_P(zobject);
zend_objects_store_del_ref_by_handle_ex(handle, Z_OBJ_HT_P(zobject) TSRMLS_CC);
Z_DELREF_P(zobject);
GC_ZOBJ_CHECK_POSSIBLE_ROOT(zobject);
}
/*
* Delete a reference to an objects store entry given the object handle.
*/
ZEND_API void zend_objects_store_del_ref_by_handle_ex(zend_object_handle handle, const zend_object_handlers *handlers TSRMLS_DC) /* {{{ */
{
struct _store_object *obj;
int failure = 0;
if (!EG(objects_store).object_buckets) {
return;
}
obj = &EG(objects_store).object_buckets[handle].bucket.obj;
/* Make sure we hold a reference count during the destructor call
otherwise, when the destructor ends the storage might be freed
when the refcount reaches 0 a second time
*/
if (EG(objects_store).object_buckets[handle].valid) {
if (obj->refcount == 1) {
if (!EG(objects_store).object_buckets[handle].destructor_called) {
EG(objects_store).object_buckets[handle].destructor_called = 1;
if (obj->dtor) {
if (handlers && !obj->handlers) {
obj->handlers = handlers;
}
zend_try {
obj->dtor(obj->object, handle TSRMLS_CC);
} zend_catch {
failure = 1;
} zend_end_try();
}
}
/* re-read the object from the object store as the store might have been reallocated in the dtor */
obj = &EG(objects_store).object_buckets[handle].bucket.obj;
if (obj->refcount == 1) {
GC_REMOVE_ZOBJ_FROM_BUFFER(obj);
if (obj->free_storage) {
zend_try {
obj->free_storage(obj->object TSRMLS_CC);
} zend_catch {
failure = 1;
} zend_end_try();
}
ZEND_OBJECTS_STORE_ADD_TO_FREE_LIST();
}
}
}
obj->refcount--;
#if ZEND_DEBUG_OBJECTS
if (obj->refcount == 0) {
fprintf(stderr, "Deallocated object id #%d\n", handle);
} else {
fprintf(stderr, "Decreased refcount of object id #%d\n", handle);
}
#endif
if (failure) {
zend_bailout();
}
}
/* }}} */
ZEND_API zend_object_value zend_objects_store_clone_obj(zval *zobject TSRMLS_DC)
{
zend_object_value retval;
void *new_object;
struct _store_object *obj;
zend_object_handle handle = Z_OBJ_HANDLE_P(zobject);
obj = &EG(objects_store).object_buckets[handle].bucket.obj;
if (obj->clone == NULL) {
zend_error(E_CORE_ERROR, "Trying to clone uncloneable object of class %s", Z_OBJCE_P(zobject)->name);
}
obj->clone(obj->object, &new_object TSRMLS_CC);
obj = &EG(objects_store).object_buckets[handle].bucket.obj;
retval.handle = zend_objects_store_put(new_object, obj->dtor, obj->free_storage, obj->clone TSRMLS_CC);
retval.handlers = Z_OBJ_HT_P(zobject);
EG(objects_store).object_buckets[handle].bucket.obj.handlers = retval.handlers;
return retval;
}
ZEND_API void *zend_object_store_get_object(const zval *zobject TSRMLS_DC)
{
zend_object_handle handle = Z_OBJ_HANDLE_P(zobject);
return EG(objects_store).object_buckets[handle].bucket.obj.object;
}
/*
* Retrieve an entry from the objects store given the object handle.
*/
ZEND_API void *zend_object_store_get_object_by_handle(zend_object_handle handle TSRMLS_DC)
{
return EG(objects_store).object_buckets[handle].bucket.obj.object;
}
/* zend_object_store_set_object:
* It is ONLY valid to call this function from within the constructor of an
* overloaded object. Its purpose is to set the object pointer for the object
* when you can't possibly know its value until you have parsed the arguments
* from the constructor function. You MUST NOT use this function for any other
* weird games, or call it at any other time after the object is constructed.
* */
ZEND_API void zend_object_store_set_object(zval *zobject, void *object TSRMLS_DC)
{
zend_object_handle handle = Z_OBJ_HANDLE_P(zobject);
EG(objects_store).object_buckets[handle].bucket.obj.object = object;
}
/* Called when the ctor was terminated by an exception */
ZEND_API void zend_object_store_ctor_failed(zval *zobject TSRMLS_DC)
{
zend_object_handle handle = Z_OBJ_HANDLE_P(zobject);
zend_object_store_bucket *obj_bucket = &EG(objects_store).object_buckets[handle];
obj_bucket->bucket.obj.handlers = Z_OBJ_HT_P(zobject);;
obj_bucket->destructor_called = 1;
}
/* Proxy objects workings */
typedef struct _zend_proxy_object {
zval *object;
zval *property;
} zend_proxy_object;
static zend_object_handlers zend_object_proxy_handlers;
ZEND_API void zend_objects_proxy_free_storage(zend_proxy_object *object TSRMLS_DC)
{
zval_ptr_dtor(&object->object);
zval_ptr_dtor(&object->property);
efree(object);
}
ZEND_API void zend_objects_proxy_clone(zend_proxy_object *object, zend_proxy_object **object_clone TSRMLS_DC)
{
*object_clone = emalloc(sizeof(zend_proxy_object));
(*object_clone)->object = object->object;
(*object_clone)->property = object->property;
zval_add_ref(&(*object_clone)->property);
zval_add_ref(&(*object_clone)->object);
}
ZEND_API zval *zend_object_create_proxy(zval *object, zval *member TSRMLS_DC)
{
zend_proxy_object *pobj = emalloc(sizeof(zend_proxy_object));
zval *retval;
pobj->object = object;
pobj->property = member;
zval_add_ref(&pobj->property);
zval_add_ref(&pobj->object);
MAKE_STD_ZVAL(retval);
Z_TYPE_P(retval) = IS_OBJECT;
Z_OBJ_HANDLE_P(retval) = zend_objects_store_put(pobj, NULL, (zend_objects_free_object_storage_t) zend_objects_proxy_free_storage, (zend_objects_store_clone_t) zend_objects_proxy_clone TSRMLS_CC);
Z_OBJ_HT_P(retval) = &zend_object_proxy_handlers;
return retval;
}
ZEND_API void zend_object_proxy_set(zval **property, zval *value TSRMLS_DC)
{
zend_proxy_object *probj = zend_object_store_get_object(*property TSRMLS_CC);
if (Z_OBJ_HT_P(probj->object) && Z_OBJ_HT_P(probj->object)->write_property) {
Z_OBJ_HT_P(probj->object)->write_property(probj->object, probj->property, value TSRMLS_CC);
} else {
zend_error(E_WARNING, "Cannot write property of object - no write handler defined");
}
}
ZEND_API zval* zend_object_proxy_get(zval *property TSRMLS_DC)
{
zend_proxy_object *probj = zend_object_store_get_object(property TSRMLS_CC);
if (Z_OBJ_HT_P(probj->object) && Z_OBJ_HT_P(probj->object)->read_property) {
return Z_OBJ_HT_P(probj->object)->read_property(probj->object, probj->property, BP_VAR_R TSRMLS_CC);
} else {
zend_error(E_WARNING, "Cannot read property of object - no read handler defined");
}
return NULL;
}
ZEND_API zend_object_handlers *zend_get_std_object_handlers(void)
{
return &std_object_handlers;
}
static zend_object_handlers zend_object_proxy_handlers = {
ZEND_OBJECTS_STORE_HANDLERS,
NULL, /* read_property */
NULL, /* write_property */
NULL, /* read dimension */
NULL, /* write_dimension */
NULL, /* get_property_ptr_ptr */
zend_object_proxy_get, /* get */
zend_object_proxy_set, /* set */
NULL, /* has_property */
NULL, /* unset_property */
NULL, /* has_dimension */
NULL, /* unset_dimension */
NULL, /* get_properties */
NULL, /* get_method */
NULL, /* call_method */
NULL, /* get_constructor */
NULL, /* get_class_entry */
NULL, /* get_class_name */
NULL, /* compare_objects */
NULL, /* cast_object */
NULL, /* count_elements */
};
/*
* Local variables:
* tab-width: 4
* c-basic-offset: 4
* indent-tabs-mode: t
* End:
*/
| {
"pile_set_name": "Github"
} |
#import <Foundation/Foundation.h>
/**
@brief Instance used by subclasses to provide access to service response dictionary entries using methods
with strict return type.
@author Sergey Mamontov
@since 4.0
@copyright © 2010-2018 PubNub, Inc.
*/
@interface PNServiceData : NSObject
#pragma mark -
@end
| {
"pile_set_name": "Github"
} |
//===----------------------------------------------------------------------===//
//
// The LLVM Compiler Infrastructure
//
// This file is dual licensed under the MIT and the University of Illinois Open
// Source Licenses. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
// <random>
// template<class Engine, size_t k>
// class shuffle_order_engine
// {
// public:
// // types
// typedef typename Engine::result_type result_type;
#include <random>
#include <type_traits>
template <class UIntType, UIntType Min, UIntType Max>
class rand1
{
public:
// types
typedef UIntType result_type;
private:
result_type x_;
static_assert(Min < Max, "rand1 invalid parameters");
public:
#ifdef _LIBCPP_HAS_NO_CONSTEXPR
// Workaround for lack of constexpr in C++03
static const result_type _Min = Min;
static const result_type _Max = Max;
#endif
static _LIBCPP_CONSTEXPR result_type min() {return Min;}
static _LIBCPP_CONSTEXPR result_type max() {return Max;}
explicit rand1(result_type sd = Min) : x_(sd)
{
if (x_ < Min)
x_ = Min;
if (x_ > Max)
x_ = Max;
}
result_type operator()()
{
result_type r = x_;
if (x_ < Max)
++x_;
else
x_ = Min;
return r;
}
};
void
test1()
{
static_assert((std::is_same<
std::shuffle_order_engine<rand1<unsigned long, 0, 10>, 16>::result_type,
unsigned long>::value), "");
}
void
test2()
{
static_assert((std::is_same<
std::shuffle_order_engine<rand1<unsigned long long, 0, 10>, 16>::result_type,
unsigned long long>::value), "");
}
int main()
{
test1();
test2();
}
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html>
<html>
<head>
<meta content="text/html; charset=UTF-8" http-equiv="Content-Type">
<title>module Jekyll::Commands - jekyll-watch-1.1.0 Documentation</title>
<link type="text/css" media="screen" href="../rdoc.css" rel="stylesheet">
<script type="text/javascript">
var rdoc_rel_prefix = "../";
</script>
<script type="text/javascript" charset="utf-8" src="../js/jquery.js"></script>
<script type="text/javascript" charset="utf-8" src="../js/navigation.js"></script>
<script type="text/javascript" charset="utf-8" src="../js/search_index.js"></script>
<script type="text/javascript" charset="utf-8" src="../js/search.js"></script>
<script type="text/javascript" charset="utf-8" src="../js/searcher.js"></script>
<script type="text/javascript" charset="utf-8" src="../js/darkfish.js"></script>
<body id="top" class="module">
<nav id="metadata">
<nav id="home-section" class="section">
<h3 class="section-header">
<a href="../index.html">Home</a>
<a href="../table_of_contents.html#classes">Classes</a>
<a href="../table_of_contents.html#methods">Methods</a>
</h3>
</nav>
<nav id="search-section" class="section project-section" class="initially-hidden">
<form action="#" method="get" accept-charset="utf-8">
<h3 class="section-header">
<input type="text" name="search" placeholder="Search" id="search-field"
title="Type to search, Up and Down to navigate, Enter to load">
</h3>
</form>
<ul id="search-results" class="initially-hidden"></ul>
</nav>
<div id="file-metadata">
<nav id="file-list-section" class="section">
<h3 class="section-header">Defined In</h3>
<ul>
<li>lib/jekyll/commands/watch.rb
</ul>
</nav>
</div>
<div id="class-metadata">
</div>
<div id="project-metadata">
<nav id="classindex-section" class="section project-section">
<h3 class="section-header">Class and Module Index</h3>
<ul class="link-list">
<li><a href="../Jekyll.html">Jekyll</a>
<li><a href="../Jekyll/Commands.html">Jekyll::Commands</a>
<li><a href="../Jekyll/Commands/Watch.html">Jekyll::Commands::Watch</a>
<li><a href="../Jekyll/Watcher.html">Jekyll::Watcher</a>
</ul>
</nav>
</div>
</nav>
<div id="documentation">
<h1 class="module">module Jekyll::Commands</h1>
<div id="description" class="description">
</div><!-- description -->
<section id="5Buntitled-5D" class="documentation-section">
<!-- Methods -->
</section><!-- 5Buntitled-5D -->
</div><!-- documentation -->
<footer id="validator-badges">
<p><a href="http://validator.w3.org/check/referer">[Validate]</a>
<p>Generated by <a href="https://github.com/rdoc/rdoc">RDoc</a> 4.0.0.
<p>Generated with the <a href="http://deveiate.org/projects/Darkfish-Rdoc/">Darkfish Rdoc Generator</a> 3.
</footer>
| {
"pile_set_name": "Github"
} |
using AutoMapper;
using Grand.Core.Infrastructure.Mapper;
using Grand.Web.Areas.Admin.Models.Localization;
using System.Collections.Generic;
using System.Linq;
namespace Grand.Web.Areas.Admin.Infrastructure.Mapper.Profiles
{
public class LanguageProfile : Profile, IMapperProfile
{
public LanguageProfile()
{
CreateMap<Domain.Localization.Language, LanguageModel>()
.ForMember(dest => dest.AvailableStores, mo => mo.Ignore())
.ForMember(dest => dest.AvailableCurrencies, mo => mo.Ignore())
.ForMember(dest => dest.SelectedStoreIds, mo => mo.Ignore())
.ForMember(dest => dest.FlagFileNames, mo => mo.Ignore());
CreateMap<LanguageModel, Domain.Localization.Language>()
.ForMember(dest => dest.Id, mo => mo.Ignore())
.ForMember(dest => dest.Stores, mo => mo.MapFrom(x => x.SelectedStoreIds != null ? x.SelectedStoreIds.ToList() : new List<string>()));
}
public int Order => 0;
}
} | {
"pile_set_name": "Github"
} |
/*
* AMD Cryptographic Coprocessor (CCP) driver
*
* Copyright (C) 2013,2016 Advanced Micro Devices, Inc.
*
* Author: Tom Lendacky <[email protected]>
* Author: Gary R Hook <[email protected]>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation.
*/
#include <linux/module.h>
#include <linux/kernel.h>
#include <linux/device.h>
#include <linux/pci.h>
#include <linux/pci_ids.h>
#include <linux/dma-mapping.h>
#include <linux/kthread.h>
#include <linux/sched.h>
#include <linux/interrupt.h>
#include <linux/spinlock.h>
#include <linux/delay.h>
#include <linux/ccp.h>
#include "ccp-dev.h"
#define MSIX_VECTORS 2
struct ccp_msix {
u32 vector;
char name[16];
};
struct ccp_pci {
int msix_count;
struct ccp_msix msix[MSIX_VECTORS];
};
static int ccp_get_msix_irqs(struct ccp_device *ccp)
{
struct ccp_pci *ccp_pci = ccp->dev_specific;
struct device *dev = ccp->dev;
struct pci_dev *pdev = to_pci_dev(dev);
struct msix_entry msix_entry[MSIX_VECTORS];
unsigned int name_len = sizeof(ccp_pci->msix[0].name) - 1;
int v, ret;
for (v = 0; v < ARRAY_SIZE(msix_entry); v++)
msix_entry[v].entry = v;
ret = pci_enable_msix_range(pdev, msix_entry, 1, v);
if (ret < 0)
return ret;
ccp_pci->msix_count = ret;
for (v = 0; v < ccp_pci->msix_count; v++) {
/* Set the interrupt names and request the irqs */
snprintf(ccp_pci->msix[v].name, name_len, "%s-%u",
ccp->name, v);
ccp_pci->msix[v].vector = msix_entry[v].vector;
ret = request_irq(ccp_pci->msix[v].vector,
ccp->vdata->perform->irqhandler,
0, ccp_pci->msix[v].name, dev);
if (ret) {
dev_notice(dev, "unable to allocate MSI-X IRQ (%d)\n",
ret);
goto e_irq;
}
}
return 0;
e_irq:
while (v--)
free_irq(ccp_pci->msix[v].vector, dev);
pci_disable_msix(pdev);
ccp_pci->msix_count = 0;
return ret;
}
static int ccp_get_msi_irq(struct ccp_device *ccp)
{
struct device *dev = ccp->dev;
struct pci_dev *pdev = to_pci_dev(dev);
int ret;
ret = pci_enable_msi(pdev);
if (ret)
return ret;
ccp->irq = pdev->irq;
ret = request_irq(ccp->irq, ccp->vdata->perform->irqhandler, 0,
ccp->name, dev);
if (ret) {
dev_notice(dev, "unable to allocate MSI IRQ (%d)\n", ret);
goto e_msi;
}
return 0;
e_msi:
pci_disable_msi(pdev);
return ret;
}
static int ccp_get_irqs(struct ccp_device *ccp)
{
struct device *dev = ccp->dev;
int ret;
ret = ccp_get_msix_irqs(ccp);
if (!ret)
return 0;
/* Couldn't get MSI-X vectors, try MSI */
dev_notice(dev, "could not enable MSI-X (%d), trying MSI\n", ret);
ret = ccp_get_msi_irq(ccp);
if (!ret)
return 0;
/* Couldn't get MSI interrupt */
dev_notice(dev, "could not enable MSI (%d)\n", ret);
return ret;
}
static void ccp_free_irqs(struct ccp_device *ccp)
{
struct ccp_pci *ccp_pci = ccp->dev_specific;
struct device *dev = ccp->dev;
struct pci_dev *pdev = to_pci_dev(dev);
if (ccp_pci->msix_count) {
while (ccp_pci->msix_count--)
free_irq(ccp_pci->msix[ccp_pci->msix_count].vector,
dev);
pci_disable_msix(pdev);
} else if (ccp->irq) {
free_irq(ccp->irq, dev);
pci_disable_msi(pdev);
}
ccp->irq = 0;
}
static int ccp_find_mmio_area(struct ccp_device *ccp)
{
struct device *dev = ccp->dev;
struct pci_dev *pdev = to_pci_dev(dev);
resource_size_t io_len;
unsigned long io_flags;
io_flags = pci_resource_flags(pdev, ccp->vdata->bar);
io_len = pci_resource_len(pdev, ccp->vdata->bar);
if ((io_flags & IORESOURCE_MEM) &&
(io_len >= (ccp->vdata->offset + 0x800)))
return ccp->vdata->bar;
return -EIO;
}
static int ccp_pci_probe(struct pci_dev *pdev, const struct pci_device_id *id)
{
struct ccp_device *ccp;
struct ccp_pci *ccp_pci;
struct device *dev = &pdev->dev;
unsigned int bar;
int ret;
ret = -ENOMEM;
ccp = ccp_alloc_struct(dev);
if (!ccp)
goto e_err;
ccp_pci = devm_kzalloc(dev, sizeof(*ccp_pci), GFP_KERNEL);
if (!ccp_pci)
goto e_err;
ccp->dev_specific = ccp_pci;
ccp->vdata = (struct ccp_vdata *)id->driver_data;
if (!ccp->vdata || !ccp->vdata->version) {
ret = -ENODEV;
dev_err(dev, "missing driver data\n");
goto e_err;
}
ccp->get_irq = ccp_get_irqs;
ccp->free_irq = ccp_free_irqs;
ret = pci_request_regions(pdev, "ccp");
if (ret) {
dev_err(dev, "pci_request_regions failed (%d)\n", ret);
goto e_err;
}
ret = pci_enable_device(pdev);
if (ret) {
dev_err(dev, "pci_enable_device failed (%d)\n", ret);
goto e_regions;
}
pci_set_master(pdev);
ret = ccp_find_mmio_area(ccp);
if (ret < 0)
goto e_device;
bar = ret;
ret = -EIO;
ccp->io_map = pci_iomap(pdev, bar, 0);
if (!ccp->io_map) {
dev_err(dev, "pci_iomap failed\n");
goto e_device;
}
ccp->io_regs = ccp->io_map + ccp->vdata->offset;
ret = dma_set_mask_and_coherent(dev, DMA_BIT_MASK(48));
if (ret) {
ret = dma_set_mask_and_coherent(dev, DMA_BIT_MASK(32));
if (ret) {
dev_err(dev, "dma_set_mask_and_coherent failed (%d)\n",
ret);
goto e_iomap;
}
}
dev_set_drvdata(dev, ccp);
if (ccp->vdata->setup)
ccp->vdata->setup(ccp);
ret = ccp->vdata->perform->init(ccp);
if (ret)
goto e_iomap;
dev_notice(dev, "enabled\n");
return 0;
e_iomap:
pci_iounmap(pdev, ccp->io_map);
e_device:
pci_disable_device(pdev);
e_regions:
pci_release_regions(pdev);
e_err:
dev_notice(dev, "initialization failed\n");
return ret;
}
static void ccp_pci_remove(struct pci_dev *pdev)
{
struct device *dev = &pdev->dev;
struct ccp_device *ccp = dev_get_drvdata(dev);
if (!ccp)
return;
ccp->vdata->perform->destroy(ccp);
pci_iounmap(pdev, ccp->io_map);
pci_disable_device(pdev);
pci_release_regions(pdev);
dev_notice(dev, "disabled\n");
}
#ifdef CONFIG_PM
static int ccp_pci_suspend(struct pci_dev *pdev, pm_message_t state)
{
struct device *dev = &pdev->dev;
struct ccp_device *ccp = dev_get_drvdata(dev);
unsigned long flags;
unsigned int i;
spin_lock_irqsave(&ccp->cmd_lock, flags);
ccp->suspending = 1;
/* Wake all the queue kthreads to prepare for suspend */
for (i = 0; i < ccp->cmd_q_count; i++)
wake_up_process(ccp->cmd_q[i].kthread);
spin_unlock_irqrestore(&ccp->cmd_lock, flags);
/* Wait for all queue kthreads to say they're done */
while (!ccp_queues_suspended(ccp))
wait_event_interruptible(ccp->suspend_queue,
ccp_queues_suspended(ccp));
return 0;
}
static int ccp_pci_resume(struct pci_dev *pdev)
{
struct device *dev = &pdev->dev;
struct ccp_device *ccp = dev_get_drvdata(dev);
unsigned long flags;
unsigned int i;
spin_lock_irqsave(&ccp->cmd_lock, flags);
ccp->suspending = 0;
/* Wake up all the kthreads */
for (i = 0; i < ccp->cmd_q_count; i++) {
ccp->cmd_q[i].suspended = 0;
wake_up_process(ccp->cmd_q[i].kthread);
}
spin_unlock_irqrestore(&ccp->cmd_lock, flags);
return 0;
}
#endif
static const struct pci_device_id ccp_pci_table[] = {
{ PCI_VDEVICE(AMD, 0x1537), (kernel_ulong_t)&ccpv3 },
{ PCI_VDEVICE(AMD, 0x1456), (kernel_ulong_t)&ccpv5a },
{ PCI_VDEVICE(AMD, 0x1468), (kernel_ulong_t)&ccpv5b },
/* Last entry must be zero */
{ 0, }
};
MODULE_DEVICE_TABLE(pci, ccp_pci_table);
static struct pci_driver ccp_pci_driver = {
.name = "ccp",
.id_table = ccp_pci_table,
.probe = ccp_pci_probe,
.remove = ccp_pci_remove,
#ifdef CONFIG_PM
.suspend = ccp_pci_suspend,
.resume = ccp_pci_resume,
#endif
};
int ccp_pci_init(void)
{
return pci_register_driver(&ccp_pci_driver);
}
void ccp_pci_exit(void)
{
pci_unregister_driver(&ccp_pci_driver);
}
| {
"pile_set_name": "Github"
} |
version: 1
dn: m-oid=2.5.6.21,ou=objectClasses,cn=core,ou=schema
creatorsname: uid=admin,ou=system
objectclass: metaObjectClass
objectclass: metaTop
objectclass: top
m-oid: 2.5.6.21
m-name: pkiUser
m-description: RFC2587: a PKI user
m-obsolete: FALSE
m-supobjectclass: top
entryUUID: 6b2f2a96-b5de-4d9a-86d5-e6b2d3f1e1a4
m-may: userCertificate
m-typeobjectclass: AUXILIARY
entryCSN: 20130919081858.611000Z#000001#000#000000
entryParentId: 0472417b-d9d5-4773-adb4-bd63258ecb89
createTimestamp: 20130919081908.882Z
| {
"pile_set_name": "Github"
} |
// NOTICE!! DO NOT USE ANY OF THIS JAVASCRIPT
// IT'S ALL JUST JUNK FOR OUR DOCS!
// ++++++++++++++++++++++++++++++++++++++++++
!function ($) {
$(function(){
var $window = $(window)
// Disable certain links in docs
$('section [href^=#]').click(function (e) {
e.preventDefault()
})
// side bar
$('.bs-docs-sidenav').affix({
offset: {
top: function () { return $window.width() <= 980 ? 290 : 210 }
, bottom: 270
}
})
// make code pretty
window.prettyPrint && prettyPrint()
// add-ons
$('.add-on :checkbox').on('click', function () {
var $this = $(this)
, method = $this.attr('checked') ? 'addClass' : 'removeClass'
$(this).parents('.add-on')[method]('active')
})
// add tipsies to grid for scaffolding
if ($('#gridSystem').length) {
$('#gridSystem').tooltip({
selector: '.show-grid > div'
, title: function () { return $(this).width() + 'px' }
})
}
// tooltip demo
$('.tooltip-demo').tooltip({
selector: "a[rel=tooltip]"
})
$('.tooltip-test').tooltip()
$('.popover-test').popover()
// popover demo
$("a[rel=popover]")
.popover()
.click(function(e) {
e.preventDefault()
})
// button state demo
$('#fat-btn')
.click(function () {
var btn = $(this)
btn.button('loading')
setTimeout(function () {
btn.button('reset')
}, 3000)
})
// carousel demo
$('#myCarousel').carousel()
// javascript build logic
var inputsComponent = $("#components.download input")
, inputsPlugin = $("#plugins.download input")
, inputsVariables = $("#variables.download input")
// toggle all plugin checkboxes
$('#components.download .toggle-all').on('click', function (e) {
e.preventDefault()
inputsComponent.attr('checked', !inputsComponent.is(':checked'))
})
$('#plugins.download .toggle-all').on('click', function (e) {
e.preventDefault()
inputsPlugin.attr('checked', !inputsPlugin.is(':checked'))
})
$('#variables.download .toggle-all').on('click', function (e) {
e.preventDefault()
inputsVariables.val('')
})
// request built javascript
$('.download-btn').on('click', function () {
var css = $("#components.download input:checked")
.map(function () { return this.value })
.toArray()
, js = $("#plugins.download input:checked")
.map(function () { return this.value })
.toArray()
, vars = {}
, img = ['glyphicons-halflings.png', 'glyphicons-halflings-white.png']
$("#variables.download input")
.each(function () {
$(this).val() && (vars[ $(this).prev().text() ] = $(this).val())
})
$.ajax({
type: 'POST'
, url: /\?dev/.test(window.location) ? 'http://localhost:3000' : 'http://bootstrap.herokuapp.com'
, dataType: 'jsonpi'
, params: {
js: js
, css: css
, vars: vars
, img: img
}
})
})
})
// Modified from the original jsonpi https://github.com/benvinegar/jquery-jsonpi
$.ajaxTransport('jsonpi', function(opts, originalOptions, jqXHR) {
var url = opts.url;
return {
send: function(_, completeCallback) {
var name = 'jQuery_iframe_' + jQuery.now()
, iframe, form
iframe = $('<iframe>')
.attr('name', name)
.appendTo('head')
form = $('<form>')
.attr('method', opts.type) // GET or POST
.attr('action', url)
.attr('target', name)
$.each(opts.params, function(k, v) {
$('<input>')
.attr('type', 'hidden')
.attr('name', k)
.attr('value', typeof v == 'string' ? v : JSON.stringify(v))
.appendTo(form)
})
form.appendTo('body').submit()
}
}
})
}(window.jQuery) | {
"pile_set_name": "Github"
} |
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% %
% SU2 configuration file %
% Case description: Adjoint transonic inviscid flow around a NACA0012 airfoil %
% Author: Thomas D. Economon %
% Institution: Stanford University %
% Date: 2011.11.02 %
% File Version 7.0.6 "Blackbird" %
% %
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% ------------- DIRECT, ADJOINT, AND LINEARIZED PROBLEM DEFINITION ------------%
%
% Physical governing equations (EULER, NAVIER_STOKES,
% WAVE_EQUATION, HEAT_EQUATION, FEM_ELASTICITY,
% POISSON_EQUATION)
SOLVER= EULER
%
% Mathematical problem (DIRECT, CONTINUOUS_ADJOINT)
MATH_PROBLEM= CONTINUOUS_ADJOINT
%
% Restart solution (NO, YES)
RESTART_SOL= YES
%
% Write binary restart files (YES, NO)
WRT_BINARY_RESTART= NO
%
% Read binary restart files (YES, NO)
READ_BINARY_RESTART= NO
%
% Number of zones
NZONES= 1
% ----------- COMPRESSIBLE AND INCOMPRESSIBLE FREE-STREAM DEFINITION ----------%
%
% Mach number (non-dimensional, based on the free-stream values)
MACH_NUMBER= 0.8
%
% Angle of attack (degrees)
AOA= 1.25
%
% Free-stream pressure (101325.0 N/m^2 by default, only Euler flows)
FREESTREAM_PRESSURE= 101325.0
%
% Free-stream temperature (288.15 K by default)
FREESTREAM_TEMPERATURE= 288.15
% ---------------------- REFERENCE VALUE DEFINITION ---------------------------%
%
% Reference origin for moment computation
REF_ORIGIN_MOMENT_X = 0.25
REF_ORIGIN_MOMENT_Y = 0.00
REF_ORIGIN_MOMENT_Z = 0.00
%
% Reference length for pitching, rolling, and yawing non-dimensional moment
REF_LENGTH= 1.0
%
% Reference area for force coefficients (0 implies automatic calculation)
REF_AREA= 1.0
% ----------------------- BOUNDARY CONDITION DEFINITION -----------------------%
%
% Marker of the Euler boundary (0 = no marker)
MARKER_EULER= ( airfoil )
%
% Marker of the far field (0 = no marker)
MARKER_FAR= ( farfield )
%
% Marker of the surface which is going to be plotted or designed
MARKER_PLOTTING= ( airfoil )
%
% Marker of the surface where the functional (Cd, Cl, etc.) will be evaluated
MARKER_MONITORING= ( airfoil )
% ------------- COMMON PARAMETERS TO DEFINE THE NUMERICAL METHOD --------------%
% Numerical method for spatial gradients (GREEN_GAUSS, LEAST_SQUARES,
% WEIGHTED_LEAST_SQUARES)
NUM_METHOD_GRAD= GREEN_GAUSS
%
% Adjoint problem boundary condition (DRAG, LIFT, SIDEFORCE, MOMENT_X,
% MOMENT_Y, MOMENT_Z, EFFICIENCY,
% EQUIVALENT_AREA, NEARFIELD_PRESSURE,
% FORCE_X, FORCE_Y, FORCE_Z, THRUST,
% TORQUE, FREE_SURFACE, TOTAL_HEAT,
% MAXIMUM_HEATFLUX, INVERSE_DESIGN_PRESSURE,
% INVERSE_DESIGN_HEATFLUX)
OBJECTIVE_FUNCTION= DRAG
%
% Courant-Friedrichs-Lewy condition of the finest grid
CFL_NUMBER= 5.0
%
% Adaptive CFL number (NO, YES)
CFL_ADAPT= NO
%
% Parameters of the adaptive CFL number (factor down, factor up, CFL min value,
% CFL max value )
CFL_ADAPT_PARAM= ( 1.5, 0.5, 1.0, 100.0 )
%
% Runge-Kutta alpha coefficients
RK_ALPHA_COEFF= ( 0.66667, 0.66667, 1.000000 )
%
% Number of total iterations
ITER= 150
% ------------------------ LINEAR SOLVER DEFINITION ---------------------------%
%
% Linear solver or smoother for implicit formulations (BCGSTAB, FGMRES, SMOOTHER)
LINEAR_SOLVER= FGMRES
%
% Preconditioner of the Krylov linear solver (ILU, LU_SGS, LINELET, JACOBI)
LINEAR_SOLVER_PREC= LU_SGS
%
% Minimum error of the linear solver for implicit formulations
LINEAR_SOLVER_ERROR= 1E-6
%
% Max number of iterations of the linear solver for the implicit formulation
LINEAR_SOLVER_ITER= 5
%
% Linael solver ILU preconditioner fill-in level (1 by default)
LINEAR_SOLVER_ILU_FILL_IN= 0
% -------------------------- MULTIGRID PARAMETERS -----------------------------%
%
% Multi-Grid Levels (0 = no multi-grid)
MGLEVEL= 2
%
% Multi-grid cycle (V_CYCLE, W_CYCLE, FULLMG_CYCLE)
MGCYCLE= W_CYCLE
%
% Multi-Grid PreSmoothing Level
MG_PRE_SMOOTH= ( 1, 2, 2, 2 )
%
% Multi-Grid PostSmoothing Level
MG_POST_SMOOTH= ( 1, 1, 1, 1 )
%
% Jacobi implicit smoothing of the correction
MG_CORRECTION_SMOOTH= ( 1, 1, 1, 1 )
%
% Damping factor for the residual restriction
MG_DAMP_RESTRICTION= 1.0
%
% Damping factor for the correction prolongation
MG_DAMP_PROLONGATION= 1.0
% -------------------- FLOW NUMERICAL METHOD DEFINITION -----------------------%
%
% Convective numerical method (JST, LAX-FRIEDRICH, CUSP, ROE, AUSM, HLLC,
% TURKEL_PREC, MSW)
CONV_NUM_METHOD_FLOW= JST
%
% Monotonic Upwind Scheme for Conservation Laws (TVD) in the flow equations.
% Required for 2nd order upwind schemes (NO, YES)
MUSCL_FLOW= YES
%
% Slope limiter (VENKATAKRISHNAN, MINMOD)
SLOPE_LIMITER_FLOW= VENKATAKRISHNAN
%
% Coefficient for the limiter
VENKAT_LIMITER_COEFF= 0.03
%
% 2nd and 4th order artificial dissipation coefficients
JST_SENSOR_COEFF= ( 0.5, 0.02 )
%
% Time discretization (RUNGE-KUTTA_EXPLICIT, EULER_IMPLICIT, EULER_EXPLICIT)
TIME_DISCRE_FLOW= EULER_IMPLICIT
% ---------------- ADJOINT-FLOW NUMERICAL METHOD DEFINITION -------------------%
%
% Convective numerical method (JST, LAX-FRIEDRICH, ROE)
CONV_NUM_METHOD_ADJFLOW= ROE
%
% Monotonic Upwind Scheme for Conservation Laws (TVD) in the adjoint flow equations.
% Required for 2nd order upwind schemes (NO, YES)
MUSCL_ADJFLOW= YES
%
% Slope limiter (NONE, VENKATAKRISHNAN, BARTH_JESPERSEN, VAN_ALBADA_EDGE,
% SHARP_EDGES, WALL_DISTANCE)
SLOPE_LIMITER_ADJFLOW= NONE
%
% Coefficient for the sharp edges limiter
ADJ_SHARP_LIMITER_COEFF= 3.0
%
% 2nd, and 4th order artificial dissipation coefficients
ADJ_JST_SENSOR_COEFF= ( 0.5, 0.02 )
%
% Reduction factor of the CFL coefficient in the adjoint problem
CFL_REDUCTION_ADJFLOW= 0.5
%
% Time discretization (RUNGE-KUTTA_EXPLICIT, EULER_IMPLICIT)
TIME_DISCRE_ADJFLOW= EULER_IMPLICIT
% ----------------------- DESIGN VARIABLE PARAMETERS --------------------------%
%
% Kind of deformation (FFD_SETTING, HICKS_HENNE, HICKS_HENNE_NORMAL, PARABOLIC,
% HICKS_HENNE_SHOCK, NACA_4DIGITS, DISPLACEMENT, ROTATION,
% FFD_CONTROL_POINT, FFD_DIHEDRAL_ANGLE, FFD_TWIST_ANGLE,
% FFD_ROTATION)
DV_KIND= HICKS_HENNE
%
% Marker of the surface in which we are going apply the shape deformation
DV_MARKER= ( airfoil )
%
% Parameters of the shape deformation
% - HICKS_HENNE_FAMILY ( Lower(0)/Upper(1) side, x_Loc )
% - NACA_4DIGITS ( 1st digit, 2nd digit, 3rd and 4th digit )
% - PARABOLIC ( 1st digit, 2nd and 3rd digit )
% - DISPLACEMENT ( x_Disp, y_Disp, z_Disp )
% - ROTATION ( x_Orig, y_Orig, z_Orig, x_End, y_End, z_End )
DV_PARAM= ( 1, 0.5 )
%
% Value of the shape deformation deformation
DV_VALUE= 0.01
% ------------------------ GRID DEFORMATION PARAMETERS ------------------------%
%
% Linear solver or smoother for implicit formulations (FGMRES, RESTARTED_FGMRES, BCGSTAB)
DEFORM_LINEAR_SOLVER= FGMRES
%
% Preconditioner of the Krylov linear solver (ILU, LU_SGS, JACOBI)
DEFORM_LINEAR_SOLVER_PREC= ILU
%
% Number of smoothing iterations for mesh deformation
DEFORM_LINEAR_SOLVER_ITER= 1000
%
% Number of nonlinear deformation iterations (surface deformation increments)
DEFORM_NONLINEAR_ITER= 1
%
% Print the residuals during mesh deformation to the console (YES, NO)
DEFORM_CONSOLE_OUTPUT= YES
%
% Minimum residual criteria for the linear solver convergence of grid deformation
DEFORM_LINEAR_SOLVER_ERROR= 1E-14
%
% Deformation coefficient (in theory from -1.0 to 0.5, a large value is also valid)
DEFORM_COEFF = 1E6
%
% Type of element stiffness imposed for FEA mesh deformation (INVERSE_VOLUME,
% WALL_DISTANCE, CONSTANT_STIFFNESS)
DEFORM_STIFFNESS_TYPE= WALL_DISTANCE
%
% Visualize the surface deformation (NO, YES)
VISUALIZE_SURFACE_DEF= NO
%
% Visualize the volume deformation (NO, YES)
VISUALIZE_VOLUME_DEF= NO
% --------------------------- CONVERGENCE PARAMETERS --------------------------%
% Convergence criteria (CAUCHY, RESIDUAL)
%
CONV_CRITERIA= RESIDUAL
%
%
% Min value of the residual (log10 of the residual)
CONV_RESIDUAL_MINVAL= -12
%
% Start Cauchy criteria at iteration number
CONV_STARTITER= 10
%
% Number of elements to apply the criteria
CONV_CAUCHY_ELEMS= 100
%
% Epsilon to control the series convergence
CONV_CAUCHY_EPS= 1E-6
%
% ------------------------- INPUT/OUTPUT INFORMATION --------------------------%
%
% Mesh input file
MESH_FILENAME= mesh_NACA0012_inv.su2
%
% Mesh input file format (SU2, CGNS, NETCDF_ASCII)
MESH_FORMAT= SU2
%
% Mesh output file
MESH_OUT_FILENAME= mesh_out.su2
%
% Restart flow input file
SOLUTION_FILENAME= solution_flow.dat
%
% Restart adjoint input file
SOLUTION_ADJ_FILENAME= solution_adj.dat
%
% Output tabular format (CSV, TECPLOT)
TABULAR_FORMAT= TECPLOT
%
% Output file convergence history (w/o extension)
CONV_FILENAME= history
%
% Output file restart flow
RESTART_FILENAME= restart_flow.dat
%
% Output file restart adjoint
RESTART_ADJ_FILENAME= restart_adj.dat
%
% Output file flow (w/o extension) variables
VOLUME_FILENAME= flow
%
% Output file adjoint (w/o extension) variables
VOLUME_ADJ_FILENAME= adjoint
%
% Output Objective function gradient (using continuous adjoint)
GRAD_OBJFUNC_FILENAME= of_grad.dat
%
% Output file surface flow coefficient (w/o extension)
SURFACE_FILENAME= surface_flow
%
% Output file surface adjoint coefficient (w/o extension)
SURFACE_ADJ_FILENAME= surface_adjoint
%
% Writing solution file frequency
WRT_SOL_FREQ= 250
%
% Writing convergence history frequency
WRT_CON_FREQ= 1
%
% Screen output
SCREEN_OUTPUT = (INNER_ITER, RMS_ADJ_DENSITY, RMS_ADJ_ENERGY, SENS_GEO, SENS_AOA, RMS_DENSITY, RMS_ENERGY, LIFT, DRAG)
HISTORY_OUTPUT= (ITER, RMS_RES, AERO_COEFF, SENSITIVITY)
VOLUME_OUTPUT= (COORDINATES, SOLUTION, PRESSURE, TEMPERATURE, SENSITIVITY)
OUTPUT_FILES=(RESTART_ASCII, PARAVIEW, SURFACE_CSV)
% --------------------- OPTIMAL SHAPE DESIGN DEFINITION -----------------------%
%
% Available flow based objective functions or constraint functions
% DRAG, LIFT, SIDEFORCE, EFFICIENCY, BUFFET,
% FORCE_X, FORCE_Y, FORCE_Z,
% MOMENT_X, MOMENT_Y, MOMENT_Z,
% THRUST, TORQUE, FIGURE_OF_MERIT,
% EQUIVALENT_AREA, NEARFIELD_PRESSURE,
% TOTAL_HEATFLUX, MAXIMUM_HEATFLUX,
% INVERSE_DESIGN_PRESSURE, INVERSE_DESIGN_HEATFLUX,
% SURFACE_TOTAL_PRESSURE, SURFACE_MASSFLOW
% SURFACE_STATIC_PRESSURE, SURFACE_MACH
%
% Available geometrical based objective functions or constraint functions
% AIRFOIL_AREA, AIRFOIL_THICKNESS, AIRFOIL_CHORD, AIRFOIL_TOC, AIRFOIL_AOA,
% WING_VOLUME, WING_MIN_THICKNESS, WING_MAX_THICKNESS, WING_MAX_CHORD, WING_MIN_TOC, WING_MAX_TWIST, WING_MAX_CURVATURE, WING_MAX_DIHEDRAL
% STATION#_WIDTH, STATION#_AREA, STATION#_THICKNESS, STATION#_CHORD, STATION#_TOC,
% STATION#_TWIST (where # is the index of the station defined in GEO_LOCATION_STATIONS)
%
% Available design variables
% 2D Design variables
% FFD_CONTROL_POINT_2D ( 19, Scale | Mark. List | FFD_BoxTag, i_Ind, j_Ind, x_Mov, y_Mov )
% FFD_CAMBER_2D ( 20, Scale | Mark. List | FFD_BoxTag, i_Ind )
% FFD_THICKNESS_2D ( 21, Scale | Mark. List | FFD_BoxTag, i_Ind )
% FFD_TWIST_2D ( 22, Scale | Mark. List | FFD_BoxTag, x_Orig, y_Orig )
% HICKS_HENNE ( 30, Scale | Mark. List | Lower(0)/Upper(1) side, x_Loc )
% ANGLE_OF_ATTACK ( 101, Scale | Mark. List | 1.0 )
%
% 3D Design variables
% FFD_CONTROL_POINT ( 11, Scale | Mark. List | FFD_BoxTag, i_Ind, j_Ind, k_Ind, x_Mov, y_Mov, z_Mov )
% FFD_NACELLE ( 12, Scale | Mark. List | FFD_BoxTag, rho_Ind, theta_Ind, phi_Ind, rho_Mov, phi_Mov )
% FFD_GULL ( 13, Scale | Mark. List | FFD_BoxTag, j_Ind )
% FFD_CAMBER ( 14, Scale | Mark. List | FFD_BoxTag, i_Ind, j_Ind )
% FFD_TWIST ( 15, Scale | Mark. List | FFD_BoxTag, j_Ind, x_Orig, y_Orig, z_Orig, x_End, y_End, z_End )
% FFD_THICKNESS ( 16, Scale | Mark. List | FFD_BoxTag, i_Ind, j_Ind )
% FFD_ROTATION ( 18, Scale | Mark. List | FFD_BoxTag, x_Axis, y_Axis, z_Axis, x_Turn, y_Turn, z_Turn )
% FFD_ANGLE_OF_ATTACK ( 24, Scale | Mark. List | FFD_BoxTag, 1.0 )
%
% Global design variables
% TRANSLATION ( 1, Scale | Mark. List | x_Disp, y_Disp, z_Disp )
% ROTATION ( 2, Scale | Mark. List | x_Axis, y_Axis, z_Axis, x_Turn, y_Turn, z_Turn )
%
% Optimization objective function with scaling factor
% ex= Objective * Scale
OPT_OBJECTIVE= DRAG * 0.001
%
% Optimization constraint functions with scaling factors, separated by semicolons
% ex= (Objective = Value ) * Scale, use '>','<','='
OPT_CONSTRAINT= ( LIFT > 0.327 ) * 0.001; ( MOMENT_Z > 0.0 ) * 0.001; ( AIRFOIL_THICKNESS > 0.12 ) * 0.001
%
% Optimization design variables, separated by semicolons
DEFINITION_DV= ( 30, 1.0 | airfoil | 0, 0.05 ); ( 30, 1.0 | airfoil | 0, 0.10 ); ( 30, 1.0 | airfoil | 0, 0.15 ); ( 30, 1.0 | airfoil | 0, 0.20 ); ( 30, 1.0 | airfoil | 0, 0.25 ); ( 30, 1.0 | airfoil | 0, 0.30 ); ( 30, 1.0 | airfoil | 0, 0.35 ); ( 30, 1.0 | airfoil | 0, 0.40 ); ( 30, 1.0 | airfoil | 0, 0.45 ); ( 30, 1.0 | airfoil | 0, 0.50 ); ( 30, 1.0 | airfoil | 0, 0.55 ); ( 30, 1.0 | airfoil | 0, 0.60 ); ( 30, 1.0 | airfoil | 0, 0.65 ); ( 30, 1.0 | airfoil | 0, 0.70 ); ( 30, 1.0 | airfoil | 0, 0.75 ); ( 30, 1.0 | airfoil | 0, 0.80 ); ( 30, 1.0 | airfoil | 0, 0.85 ); ( 30, 1.0 | airfoil | 0, 0.90 ); ( 30, 1.0 | airfoil | 0, 0.95 ); ( 30, 1.0 | airfoil | 1, 0.05 ); ( 30, 1.0 | airfoil | 1, 0.10 ); ( 30, 1.0 | airfoil | 1, 0.15 ); ( 30, 1.0 | airfoil | 1, 0.20 ); ( 30, 1.0 | airfoil | 1, 0.25 ); ( 30, 1.0 | airfoil | 1, 0.30 ); ( 30, 1.0 | airfoil | 1, 0.35 ); ( 30, 1.0 | airfoil | 1, 0.40 ); ( 30, 1.0 | airfoil | 1, 0.45 ); ( 30, 1.0 | airfoil | 1, 0.50 ); ( 30, 1.0 | airfoil | 1, 0.55 ); ( 30, 1.0 | airfoil | 1, 0.60 ); ( 30, 1.0 | airfoil | 1, 0.65 ); ( 30, 1.0 | airfoil | 1, 0.70 ); ( 30, 1.0 | airfoil | 1, 0.75 ); ( 30, 1.0 | airfoil | 1, 0.80 ); ( 30, 1.0 | airfoil | 1, 0.85 ); ( 30, 1.0 | airfoil | 1, 0.90 ); ( 30, 1.0 | airfoil | 1, 0.95 )
| {
"pile_set_name": "Github"
} |
arXiv arxiv a ar arX arXi v iv Xiv rXiv BLOCKSTART LINESTART ALIGNEDLEFT NEWFONT HIGHERFONT 0 0 NOCAPS NODIGIT 0 0 0 0 0 0 0 0 NOPUNCT 1 0 1 0
: : : : : : : : : : BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 PUNCT 1 0 1 0
cond cond c co con cond d nd ond cond BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 0 0 0 0 0 0 NOPUNCT 1 0 1 0
- - - - - - - - - - BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 HYPHEN 1 0 1 0
mat mat m ma mat mat t at mat mat BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 1 0 1 0
/ / / / / / / / / / BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 NOPUNCT 1 0 1 0
0112448v1 0112448v1 0 01 011 0112 1 v1 8v1 48v1 BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS CONTAINSDIGITS 0 0 0 1 0 0 0 0 NOPUNCT 1 0 1 0
[ [ [ [ [ [ [ [ [ [ BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 OPENBRACKET 1 0 1 0
cond cond c co con cond d nd ond cond BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 0 0 0 0 0 0 NOPUNCT 1 0 1 0
- - - - - - - - - - BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 HYPHEN 1 0 1 0
mat mat m ma mat mat t at mat mat BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 1 0 1 0
. . . . . . . . . . BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 DOT 1 0 1 0
supr supr s su sup supr r pr upr supr BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 0 0 0 0 0 0 NOPUNCT 1 0 1 0
- - - - - - - - - - BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 HYPHEN 1 0 1 0
con con c co con con n on con con BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 1 1 0 0 0 0 0 NOPUNCT 1 0 1 0
] ] ] ] ] ] ] ] ] ] BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 ENDBRACKET 1 0 1 0
25 25 2 25 25 25 5 25 25 25 BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS ALLDIGIT 0 0 0 0 0 0 0 0 NOPUNCT 1 0 1 0
Dec dec D De Dec Dec c ec Dec Dec BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 0 0 1 0 0 0 NOPUNCT 1 0 1 0
2001 2001 2 20 200 2001 1 01 001 2001 BLOCKEND LINEEND ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS ALLDIGIT 0 0 0 1 0 0 0 0 NOPUNCT 1 0 1 0
Quasiparticle quasiparticle Q Qu Qua Quas e le cle icle BLOCKSTART LINESTART ALIGNEDLEFT NEWFONT LOWERFONT 0 0 INITCAP NODIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
Excitation excitation E Ex Exc Exci n on ion tion BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
in in i in in in n in in in BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 1 1 0 0 1 0 0 NOPUNCT 0 0 1 0
the the t th the the e he the the BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
Superconducting superconducting S Su Sup Supe g ng ing ting BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
Pyrochlore pyrochlore P Py Pyr Pyro e re ore lore BLOCKEND LINEEND ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
Cd cd C Cd Cd Cd d Cd Cd Cd BLOCKSTART LINESTART ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
2 2 2 2 2 2 2 2 2 2 BLOCKIN LINEIN ALIGNEDLEFT NEWFONT LOWERFONT 0 0 NOCAPS ALLDIGIT 1 0 0 0 0 1 0 0 NOPUNCT 0 0 1 0
Re re R Re Re Re e Re Re Re BLOCKIN LINEIN ALIGNEDLEFT NEWFONT HIGHERFONT 0 0 INITCAP NODIGIT 0 0 1 0 0 1 0 0 NOPUNCT 0 0 1 0
2 2 2 2 2 2 2 2 2 2 BLOCKIN LINEIN ALIGNEDLEFT NEWFONT LOWERFONT 0 0 NOCAPS ALLDIGIT 1 0 0 0 0 1 0 0 NOPUNCT 0 0 1 0
O o O O O O O O O O BLOCKIN LINEIN ALIGNEDLEFT NEWFONT HIGHERFONT 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
7 7 7 7 7 7 7 7 7 7 BLOCKIN LINEIN ALIGNEDLEFT NEWFONT LOWERFONT 0 0 NOCAPS ALLDIGIT 1 0 0 0 0 1 0 0 NOPUNCT 0 0 1 0
Probed probed P Pr Pro Prob d ed bed obed BLOCKIN LINEIN ALIGNEDLEFT NEWFONT HIGHERFONT 0 0 INITCAP NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
by by b by by by y by by by BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 1 0 0 NOPUNCT 0 0 1 0
Muon muon M Mu Muo Muon n on uon Muon BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
Spin spin S Sp Spi Spin n in pin Spin BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
Rotation rotation R Ro Rot Rota n on ion tion BLOCKEND LINEEND ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
Ryosuke ryosuke R Ry Ryo Ryos e ke uke suke BLOCKSTART LINESTART ALIGNEDLEFT NEWFONT LOWERFONT 0 0 INITCAP NODIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
Kadono kadono K Ka Kad Kado o no ono dono BLOCKIN LINEIN ALIGNEDLEFT NEWFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
* * * * * * * * * * BLOCKIN LINEIN ALIGNEDLEFT NEWFONT LOWERFONT 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
, , , , , , , , , , BLOCKIN LINEIN ALIGNEDLEFT NEWFONT HIGHERFONT 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 COMMA 0 0 1 0
Wataru wataru W Wa Wat Wata u ru aru taru BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
Higemoto higemoto H Hi Hig Hige o to oto moto BLOCKIN LINEIN ALIGNEDLEFT NEWFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
, , , , , , , , , , BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 COMMA 0 0 1 0
Akihiro akihiro A Ak Aki Akih o ro iro hiro BLOCKIN LINEIN ALIGNEDLEFT NEWFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
Koda koda K Ko Kod Koda a da oda Koda BLOCKIN LINEIN ALIGNEDLEFT NEWFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
, , , , , , , , , , BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 COMMA 0 0 1 0
Yu yu Y Yu Yu Yu u Yu Yu Yu BLOCKIN LINEIN ALIGNEDLEFT NEWFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 1 0 0 0 1 0 0 NOPUNCT 0 0 1 0
Kawasaki kawasaki K Ka Kaw Kawa i ki aki saki BLOCKIN LINEIN ALIGNEDLEFT NEWFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 0 0 0 1 0 0 NOPUNCT 0 0 1 0
1 1 1 1 1 1 1 1 1 1 BLOCKIN LINEIN ALIGNEDLEFT NEWFONT LOWERFONT 0 0 NOCAPS ALLDIGIT 1 0 0 0 0 1 0 0 NOPUNCT 0 0 1 0
, , , , , , , , , , BLOCKIN LINEIN ALIGNEDLEFT NEWFONT HIGHERFONT 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 COMMA 0 0 1 0
Masashi masashi M Ma Mas Masa i hi shi ashi BLOCKEND LINEEND ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
Hanawa hanawa H Ha Han Hana a wa awa nawa BLOCKSTART LINESTART ALIGNEDLEFT NEWFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 0 0 0 1 0 0 NOPUNCT 0 0 1 0
2 2 2 2 2 2 2 2 2 2 BLOCKIN LINEIN ALIGNEDLEFT NEWFONT LOWERFONT 0 0 NOCAPS ALLDIGIT 1 0 0 0 0 1 0 0 NOPUNCT 0 0 1 0
, , , , , , , , , , BLOCKIN LINEIN ALIGNEDLEFT NEWFONT HIGHERFONT 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 COMMA 0 0 1 0
and and a an and and d nd and and BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
Zenji zenji Z Ze Zen Zenj i ji nji enji BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
Hiroi hiroi H Hi Hir Hiro i oi roi iroi BLOCKIN LINEIN ALIGNEDLEFT NEWFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
2 2 2 2 2 2 2 2 2 2 BLOCKEND LINEEND ALIGNEDLEFT NEWFONT LOWERFONT 0 0 NOCAPS ALLDIGIT 1 0 0 0 0 1 0 0 NOPUNCT 0 0 1 0
Institute institute I In Ins Inst e te ute tute BLOCKSTART LINESTART ALIGNEDLEFT NEWFONT HIGHERFONT 0 0 INITCAP NODIGIT 0 0 1 0 0 1 0 0 NOPUNCT 0 0 1 0
of of o of of of f of of of BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 1 0 0 NOPUNCT 0 0 1 0
Materials materials M Ma Mat Mate s ls als ials BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
Structure structure S St Str Stru e re ure ture BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
Science science S Sc Sci Scie e ce nce ence BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
, , , , , , , , , , BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 COMMA 0 0 1 0
High high H Hi Hig High h gh igh High BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 1 1 0 0 1 0 0 NOPUNCT 0 0 1 0
Energy energy E En Ene Ener y gy rgy ergy BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 1 0 0 1 0 0 NOPUNCT 0 0 1 0
Accelerator accelerator A Ac Acc Acce r or tor ator BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
Research research R Re Res Rese h ch rch arch BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 1 1 0 0 1 0 0 NOPUNCT 0 0 1 0
Organization organization O Or Org Orga n on ion tion BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 1 1 0 0 0 0 0 NOPUNCT 0 0 1 0
( ( ( ( ( ( ( ( ( ( BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 OPENBRACKET 0 0 1 0
KEK kek K KE KEK KEK K EK KEK KEK BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
) ) ) ) ) ) ) ) ) ) BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 ENDBRACKET 0 0 1 0
, , , , , , , , , , BLOCKEND LINEEND ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 COMMA 0 0 1 0
Tsukuba tsukuba T Ts Tsu Tsuk a ba uba kuba BLOCKSTART LINESTART ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 0 0 0 1 0 0 NOPUNCT 0 0 1 0
, , , , , , , , , , BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 1 0 0 COMMA 0 0 1 0
Ibaraki ibaraki I Ib Iba Ibar i ki aki raki BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 0 0 0 1 0 0 NOPUNCT 0 0 1 0
305 305 3 30 305 305 5 05 305 305 BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS ALLDIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
- - - - - - - - - - BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 HYPHEN 0 0 1 0
0801 0801 0 08 080 0801 1 01 801 0801 BLOCKEND LINEEND ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS ALLDIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
1 1 1 1 1 1 1 1 1 1 BLOCKSTART LINESTART ALIGNEDLEFT NEWFONT LOWERFONT 0 0 NOCAPS ALLDIGIT 1 0 0 0 0 1 0 0 NOPUNCT 0 0 1 0
Graduate graduate G Gr Gra Grad e te ate uate BLOCKIN LINEIN ALIGNEDLEFT NEWFONT HIGHERFONT 0 0 INITCAP NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
School school S Sc Sch Scho l ol ool hool BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 1 0 0 1 0 0 NOPUNCT 0 0 1 0
of of o of of of f of of of BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 1 0 0 NOPUNCT 0 0 1 0
Engineering engineering E En Eng Engi g ng ing ring BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
Science science S Sc Sci Scie e ce nce ence BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
, , , , , , , , , , BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 COMMA 0 0 1 0
Osaka osaka O Os Osa Osak a ka aka saka BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 0 0 0 1 0 0 NOPUNCT 0 0 1 0
University university U Un Uni Univ y ty ity sity BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 1 0 0 1 0 0 NOPUNCT 0 0 1 0
, , , , , , , , , , BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 COMMA 0 0 1 0
Toyonaka toyonaka T To Toy Toyo a ka aka naka BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 0 0 0 1 0 0 NOPUNCT 0 0 1 0
, , , , , , , , , , BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 1 0 0 COMMA 0 0 1 0
Osaka osaka O Os Osa Osak a ka aka saka BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 0 0 0 1 0 0 NOPUNCT 0 0 1 0
560 560 5 56 560 560 0 60 560 560 BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS ALLDIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
- - - - - - - - - - BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 HYPHEN 0 0 1 0
8531 8531 8 85 853 8531 1 31 531 8531 BLOCKEND LINEEND ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS ALLDIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
2 2 2 2 2 2 2 2 2 2 BLOCKSTART LINESTART ALIGNEDLEFT NEWFONT LOWERFONT 0 0 NOCAPS ALLDIGIT 1 0 0 0 0 1 0 0 NOPUNCT 0 0 1 0
Institute institute I In Ins Inst e te ute tute BLOCKIN LINEIN ALIGNEDLEFT NEWFONT HIGHERFONT 0 0 INITCAP NODIGIT 0 0 1 0 0 1 0 0 NOPUNCT 0 0 1 0
for for f fo for for r or for for BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
Solid solid S So Sol Soli d id lid olid BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
State state S St Sta Stat e te ate tate BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
Physics physics P Ph Phy Phys s cs ics sics BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
, , , , , , , , , , BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 COMMA 0 0 1 0
University university U Un Uni Univ y ty ity sity BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 1 0 0 1 0 0 NOPUNCT 0 0 1 0
of of o of of of f of of of BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 1 0 0 NOPUNCT 0 0 1 0
Tokyo tokyo T To Tok Toky o yo kyo okyo BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 0 0 0 1 0 0 NOPUNCT 0 0 1 0
, , , , , , , , , , BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 COMMA 0 0 1 0
Kashiwa kashiwa K Ka Kas Kash a wa iwa hiwa BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 0 0 0 1 0 0 NOPUNCT 0 0 1 0
, , , , , , , , , , BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 1 0 0 COMMA 0 0 1 0
Chiba chiba C Ch Chi Chib a ba iba hiba BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 0 0 0 1 0 0 NOPUNCT 0 0 1 0
277 277 2 27 277 277 7 77 277 277 BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS ALLDIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
- - - - - - - - - - BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 HYPHEN 0 0 1 0
8581 8581 8 85 858 8581 1 81 581 8581 BLOCKEND LINEEND ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS ALLDIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
( ( ( ( ( ( ( ( ( ( BLOCKSTART LINESTART ALIGNEDLEFT NEWFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 OPENBRACKET 0 0 1 0
Received received R Re Rec Rece d ed ved ived BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
September september S Se Sep Sept r er ber mber BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 1 0 0 1 0 0 0 NOPUNCT 0 0 1 0
12 12 1 12 12 12 2 12 12 12 BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS ALLDIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
, , , , , , , , , , BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 COMMA 0 0 1 0
2001 2001 2 20 200 2001 1 01 001 2001 BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS ALLDIGIT 0 0 0 1 0 0 0 0 NOPUNCT 0 0 1 0
) ) ) ) ) ) ) ) ) ) BLOCKEND LINEEND ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 ENDBRACKET 0 0 1 0
Abstract abstract A Ab Abs Abst t ct act ract BLOCKEND LINEEND ALIGNEDLEFT NEWFONT HIGHERFONT 0 0 INITCAP NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
The the T Th The The e he The The BLOCKSTART LINESTART ALIGNEDLEFT NEWFONT LOWERFONT 0 0 INITCAP NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
quasiparticle quasiparticle q qu qua quas e le cle icle BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
excitations excitations e ex exc exci s ns ons ions BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
in in i in in in n in in in BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 1 1 0 0 1 0 0 NOPUNCT 0 0 1 0
the the t th the the e he the the BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
mixed mixed m mi mix mixe d ed xed ixed BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
state state s st sta stat e te ate tate BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
of of o of of of f of of of BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 1 0 0 NOPUNCT 0 0 1 0
Cd cd C Cd Cd Cd d Cd Cd Cd BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
2 2 2 2 2 2 2 2 2 2 BLOCKIN LINEIN ALIGNEDLEFT NEWFONT LOWERFONT 0 0 NOCAPS ALLDIGIT 1 0 0 0 0 1 0 0 NOPUNCT 0 0 1 0
Re re R Re Re Re e Re Re Re BLOCKIN LINEIN ALIGNEDLEFT NEWFONT HIGHERFONT 0 0 INITCAP NODIGIT 0 0 1 0 0 1 0 0 NOPUNCT 0 0 1 0
2 2 2 2 2 2 2 2 2 2 BLOCKIN LINEIN ALIGNEDLEFT NEWFONT LOWERFONT 0 0 NOCAPS ALLDIGIT 1 0 0 0 0 1 0 0 NOPUNCT 0 0 1 0
O o O O O O O O O O BLOCKIN LINEIN ALIGNEDLEFT NEWFONT HIGHERFONT 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
7 7 7 7 7 7 7 7 7 7 BLOCKIN LINEIN ALIGNEDLEFT NEWFONT LOWERFONT 0 0 NOCAPS ALLDIGIT 1 0 0 0 0 1 0 0 NOPUNCT 0 0 1 0
have have h ha hav have e ve ave have BLOCKIN LINEIN ALIGNEDLEFT NEWFONT HIGHERFONT 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
been been b be bee been n en een been BLOCKEND LINEEND ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
studied studied s st stu stud d ed ied died BLOCKSTART LINESTART ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
by by b by by by y by by by BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 1 0 0 NOPUNCT 0 0 1 0
means means m me mea mean s ns ans eans BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 1 1 0 0 0 0 0 NOPUNCT 0 0 1 0
of of o of of of f of of of BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 1 0 0 NOPUNCT 0 0 1 0
muon muon m mu muo muon n on uon muon BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
spin spin s sp spi spin n in pin spin BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
rotation rotation r ro rot rota n on ion tion BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
/ / / / / / / / / / BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
relaxation relaxation r re rel rela n on ion tion BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
( ( ( ( ( ( ( ( ( ( BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 OPENBRACKET 0 0 1 0
µSR µsr µ µS µSR µSR R SR µSR µSR BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
) ) ) ) ) ) ) ) ) ) BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 ENDBRACKET 0 0 1 0
. . . . . . . . . . BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 DOT 0 0 1 0
The the T Th The The e he The The BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
tempera tempera t te tem temp a ra era pera BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
- - - - - - - - - - BLOCKEND LINEEND ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 HYPHEN 0 0 1 0
ture ture t tu tur ture e re ure ture BLOCKSTART LINESTART ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
dependence dependence d de dep depe e ce nce ence BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
of of o of of of f of of of BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 1 0 0 NOPUNCT 0 0 1 0
the the t th the the e he the the BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
magnetic magnetic m ma mag magn c ic tic etic BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
penetration penetration p pe pen pene n on ion tion BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
depth depth d de dep dept h th pth epth BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
( ( ( ( ( ( ( ( ( ( BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 OPENBRACKET 0 0 1 0
λ λ λ λ λ λ λ λ λ λ BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 1 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
) ) ) ) ) ) ) ) ) ) BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 ENDBRACKET 0 0 1 0
is is i is is is s is is is BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
consistent consistent c co con cons t nt ent tent BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
with with w wi wit with h th ith with BLOCKEND LINEEND ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
a a a a a a a a a a BLOCKSTART LINESTART ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 1 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
nearly nearly n ne nea near y ly rly arly BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
isotropic isotropic i is iso isot c ic pic opic BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
superconducting superconducting s su sup supe g ng ing ting BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
order order o or ord orde r er der rder BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
parameter parameter p pa par para r er ter eter BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
, , , , , , , , , , BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 COMMA 0 0 1 0
although although a al alt alth h gh ugh ough BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
a a a a a a a a a a BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 1 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
slight slight s sl sli slig t ht ght ight BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
dis dis d di dis dis s is dis dis BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
- - - - - - - - - - BLOCKEND LINEEND ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 HYPHEN 0 0 1 0
crepancy crepancy c cr cre crep y cy ncy ancy BLOCKSTART LINESTART ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
which which w wh whi whic h ch ich hich BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
is is i is is is s is is is BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
dependent dependent d de dep depe t nt ent dent BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
on on o on on on n on on on BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 1 0 0 NOPUNCT 0 0 1 0
the the t th the the e he the the BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
details details d de det deta s ls ils ails BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
in in i in in in n in in in BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 1 1 0 0 1 0 0 NOPUNCT 0 0 1 0
the the t th the the e he the the BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
analysis analysis a an ana anal s is sis ysis BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
may may m ma may may y ay may may BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 1 1 0 1 1 0 0 NOPUNCT 0 0 1 0
be be b be be be e be be be BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
present present p pr pre pres t nt ent sent BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
. . . . . . . . . . BLOCKEND LINEEND ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 DOT 0 0 1 0
This this T Th Thi This s is his This BLOCKSTART LINESTART ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 1 0 0 1 0 0 NOPUNCT 0 0 1 0
is is i is is is s is is is BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
also also a al als also o so lso also BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
supported supported s su sup supp d ed ted rted BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
by by b by by by y by by by BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 1 0 0 NOPUNCT 0 0 1 0
the the t th the the e he the the BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
relatively relatively r re rel rela y ly ely vely BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
weak weak w we wea weak k ak eak weak BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
field field f fi fie fiel d ld eld ield BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 1 1 0 0 1 0 0 NOPUNCT 0 0 1 0
dependence dependence d de dep depe e ce nce ence BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
of of o of of of f of of of BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 1 0 0 NOPUNCT 0 0 1 0
λ λ λ λ λ λ λ λ λ λ BLOCKIN LINEIN ALIGNEDLEFT NEWFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 1 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
. . . . . . . . . . BLOCKEND LINEEND ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 DOT 0 0 1 0
* * * * * * * * * * BLOCKIN LINESTART ALIGNEDLEFT NEWFONT LOWERFONT 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 NOPUNCT 0 0 1 0
Also also A Al Als Also o so lso Also BLOCKIN LINEIN ALIGNEDLEFT NEWFONT HIGHERFONT 0 0 INITCAP NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
at at a at at at t at at at BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
School school S Sc Sch Scho l ol ool hool BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 1 0 0 1 0 0 NOPUNCT 0 0 1 0
of of o of of of f of of of BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 1 0 0 NOPUNCT 0 0 1 0
Mathematical mathematical M Ma Mat Math l al cal ical BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
and and a an and and d nd and and BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
Physical physical P Ph Phy Phys l al cal ical BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
Science science S Sc Sci Scie e ce nce ence BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
, , , , , , , , , , BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 ALLCAP NODIGIT 1 0 0 0 0 0 0 0 COMMA 0 0 1 0
The the T Th The The e he The The BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
Graduate graduate G Gr Gra Grad e te ate uate BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
University university U Un Uni Univ y ty ity sity BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 1 0 0 1 0 0 NOPUNCT 0 0 1 0
for for f fo for for r or for for BLOCKIN LINEIN ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 NOCAPS NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
Advanced advanced A Ad Adv Adva d ed ced nced BLOCKEND LINEEND ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
Studies studies S St Stu Stud s es ies dies BLOCKEND LINEEND ALIGNEDLEFT SAMEFONT SAMEFONTSIZE 0 0 INITCAP NODIGIT 0 0 1 0 0 0 0 0 NOPUNCT 0 0 1 0
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="RunConfigurationProducerService">
<option name="ignoredProducers">
<set>
<option value="org.jetbrains.plugins.gradle.execution.test.runner.AllInPackageGradleConfigurationProducer" />
<option value="org.jetbrains.plugins.gradle.execution.test.runner.TestClassGradleConfigurationProducer" />
<option value="org.jetbrains.plugins.gradle.execution.test.runner.TestMethodGradleConfigurationProducer" />
</set>
</option>
</component>
</project> | {
"pile_set_name": "Github"
} |
_base_ = './fcn_hr18_512x512_160k_ade20k.py'
model = dict(
pretrained='open-mmlab://msra/hrnetv2_w48',
backbone=dict(
extra=dict(
stage2=dict(num_channels=(48, 96)),
stage3=dict(num_channels=(48, 96, 192)),
stage4=dict(num_channels=(48, 96, 192, 384)))),
decode_head=dict(
in_channels=[48, 96, 192, 384], channels=sum([48, 96, 192, 384])))
| {
"pile_set_name": "Github"
} |
# This workflow will build all flavors of your React Native for Windows app
# project every time changes are pushed to master. This checks that every
# change builds on Windows.
name: App CI
on:
push:
branches:
- master
jobs:
# This job builds and deploys both Debug and Release versions of the app,
# for both x86 and x64 architectures.
build_and_deploy:
name: Build and Deploy
runs-on: windows-latest
strategy:
matrix:
architecture: [x86, x64]
configuration: [Debug, Release]
steps:
- uses: actions/checkout@v2
- name: Gather environment info
run: npx envinfo
- name: Yarn install
run: yarn install
- name: Build ${{ matrix.architecture }} Debug
if: ${{ matrix.configuration == 'Debug' }}
run: npx react-native run-windows --no-launch --no-packager --logging --arch ${{ matrix.architecture }}
- name: Build ${{ matrix.architecture }} Release
if: ${{ matrix.configuration == 'Release' }}
run: npx react-native run-windows --no-launch --no-packager --logging --release --arch ${{ matrix.architecture }}
# This job builds (but does not deploy) both Debug and Release versions of
# the app, for both ARM and ARM64 architectures. They are not deployed
# because the build machines are x64 and the deploy would fail.
build:
name: Build
runs-on: windows-latest
strategy:
matrix:
architecture: [ARM, ARM64]
configuration: [Debug, Release]
steps:
- uses: actions/checkout@v2
- name: Gather environment info
run: npx envinfo
- name: Yarn install
run: yarn install
- name: Build ${{ matrix.architecture }} Debug
if: ${{ matrix.configuration == 'Debug' }}
run: npx react-native run-windows --no-deploy --no-launch --no-packager --logging --arch ${{ matrix.architecture }}
- name: Build ${{ matrix.architecture }} Release
if: ${{ matrix.configuration == 'Release' }}
run: npx react-native run-windows --no-deploy --no-launch --no-packager --logging --release --arch ${{ matrix.architecture }}
| {
"pile_set_name": "Github"
} |
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<parent>
<groupId>net.anotheria</groupId>
<artifactId>moskito-integration</artifactId>
<version>2.10.0-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>moskito-inspect-remote</artifactId>
<version>2.10.0-SNAPSHOT</version>
<name>moskito inspect support for remote inspect connection</name>
<description>
This project contains dependencies and configuration (web-fragment) needed to enable moskito-inspect remotely. To use it simply add it as dependency.
</description>
<dependencies>
<dependency>
<groupId>net.anotheria</groupId>
<artifactId>moskito-webui</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
</project> | {
"pile_set_name": "Github"
} |
#!/usr/bin/env ruby
require 'rubygems' # or use Bundler.setup
require 'eventmachine'
class SimpleChatServer < EM::Connection
@@connected_clients = Array.new
#
# EventMachine handlers
#
def post_init
@@connected_clients.push(self)
puts "A client has connected..."
end
def unbind
@@connected_clients.delete(self)
puts "A client has left..."
end
#
# Helpers
#
def other_peers
@@connected_clients.reject { |c| self == c }
end # other_peers
end
EventMachine.run do
# hit Control + C to stop
Signal.trap("INT") { EventMachine.stop }
Signal.trap("TERM") { EventMachine.stop }
EventMachine.start_server("0.0.0.0", 10000, SimpleChatServer)
end
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.jpa</groupId>
<artifactId>HibernateSpringBootOrderByRandom</artifactId>
<version>1.0</version>
<packaging>jar</packaging>
<name>HibernateSpringBootOrderByRandom</name>
<description>JPA project for Spring Boot</description>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.1.4.RELEASE</version>
<relativePath/> <!-- lookup parent from repository -->
</parent>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<java.version>1.8</java.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-jpa</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-jdbc</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>
| {
"pile_set_name": "Github"
} |
# $NetBSD: md.sparc64,v 1.254 2015/07/03 18:28:37 matt Exp $
./libexec/ld.elf_so-sparc base-sysutil-bin compat,pic
./sbin/edlabel base-sysutil-root obsolete
./usr/bin/fdformat base-util-bin
./usr/libexec/ld.elf_so-sparc base-sysutil-bin compat,pic
./usr/lkm/compat_netbsd32.o base-obsolete obsolete
./usr/lkm/compat_sunos32.o base-obsolete obsolete
./usr/lkm/compat_svr4.o base-obsolete obsolete
./usr/lkm/compat_svr4_32.o base-obsolete obsolete
./usr/lkm/exec_elf.o base-obsolete obsolete
./usr/lkm/exec_netbsd32_aout.o base-obsolete obsolete
./usr/lkm/exec_netbsd32_elf.o base-obsolete obsolete
./usr/lkm/exec_sunos32_aout.o base-obsolete obsolete
./usr/lkm/exec_svr4_32_elf.o base-obsolete obsolete
./usr/mdec/binstall base-sysutil-bin
./usr/mdec/bootblk base-sysutil-bin
./usr/mdec/installboot base-obsolete obsolete
./usr/mdec/ofwboot base-sysutil-bin
./usr/mdec/ofwboot.net base-sysutil-bin
./usr/sbin/eeprom base-sysutil-bin
./usr/sbin/ofctl base-sysutil-bin
| {
"pile_set_name": "Github"
} |
jobs:
- job: Build
pool:
vmImage: 'Ubuntu 16.04'
strategy:
maxParallel: 10
matrix:
%%VERSIONS%%
steps:
- template: ../templates/azure-steps.yml | {
"pile_set_name": "Github"
} |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A module target for TraverseTest.test_module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.tools.common import test_module2
class ModuleClass1(object):
def __init__(self):
self._m2 = test_module2.ModuleClass2()
def __model_class1_method__(self):
pass
| {
"pile_set_name": "Github"
} |
From baeca7ea594fe9faa1d40d35e76066c6002ca638 Mon Sep 17 00:00:00 2001
From: Thomas Petazzoni <[email protected]>
Date: Sat, 10 Nov 2012 17:45:03 +0100
Subject: [PATCH] Add more XDR files needed to build rpcbind on top of libtirpc
Signed-off-by: Thomas Petazzoni <[email protected]>
[[email protected]: update for 0.3.1]
Signed-off-by: "Yann E. MORIN" <[email protected]>
---
Makefile.am | 6 +
tirpc/rpcsvc/mount.x | 257 ++++++++++
tirpc/rpcsvc/nfs_prot.x | 1266 +++++++++++++++++++++++++++++++++++++++++++++++
tirpc/rpcsvc/rquota.x | 67 +++
4 files changed, 1596 insertions(+)
create mode 100644 tirpc/rpcsvc/mount.x
create mode 100644 tirpc/rpcsvc/nfs_prot.x
create mode 100644 tirpc/rpcsvc/rquota.x
diff --git a/Makefile.am b/Makefile.am
index aa5908e..4999066 100644
--- a/Makefile.am
+++ b/Makefile.am
@@ -2,6 +2,9 @@ SUBDIRS = src man doc rpcgen
ACLOCAL_AMFLAGS = -I m4
GENFILES = tirpc/rpcsvc/crypt.h \
+ tirpc/rpcsvc/mount.h \
+ tirpc/rpcsvc/nfs_prot.h \
+ tirpc/rpcsvc/rquota.h \
tirpc/rpc/rpcb_prot.h
noinst_HEADERS = tirpc/reentrant.h \
@@ -11,6 +14,9 @@ noinst_HEADERS = tirpc/reentrant.h \
nobase_include_HEADERS = tirpc/netconfig.h \
tirpc/rpcsvc/crypt.x \
+ tirpc/rpcsvc/mount.x \
+ tirpc/rpcsvc/nfs_prot.x \
+ tirpc/rpcsvc/rquota.x \
tirpc/rpc/xdr.h \
tirpc/rpc/types.h \
tirpc/rpc/svc_soc.h \
diff --git a/tirpc/rpcsvc/mount.x b/tirpc/rpcsvc/mount.x
new file mode 100644
index 0000000..f68a06f
--- /dev/null
+++ b/tirpc/rpcsvc/mount.x
@@ -0,0 +1,257 @@
+/*
+ * Sun RPC is a product of Sun Microsystems, Inc. and is provided for
+ * unrestricted use provided that this legend is included on all tape
+ * media and as a part of the software program in whole or part. Users
+ * may copy or modify Sun RPC without charge, but are not authorized
+ * to license or distribute it to anyone else except as part of a product or
+ * program developed by the user.
+ *
+ * SUN RPC IS PROVIDED AS IS WITH NO WARRANTIES OF ANY KIND INCLUDING THE
+ * WARRANTIES OF DESIGN, MERCHANTIBILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE, OR ARISING FROM A COURSE OF DEALING, USAGE OR TRADE PRACTICE.
+ *
+ * Sun RPC is provided with no support and without any obligation on the
+ * part of Sun Microsystems, Inc. to assist in its use, correction,
+ * modification or enhancement.
+ *
+ * SUN MICROSYSTEMS, INC. SHALL HAVE NO LIABILITY WITH RESPECT TO THE
+ * INFRINGEMENT OF COPYRIGHTS, TRADE SECRETS OR ANY PATENTS BY SUN RPC
+ * OR ANY PART THEREOF.
+ *
+ * In no event will Sun Microsystems, Inc. be liable for any lost revenue
+ * or profits or other special, indirect and consequential damages, even if
+ * Sun has been advised of the possibility of such damages.
+ *
+ * Sun Microsystems, Inc.
+ * 2550 Garcia Avenue
+ * Mountain View, California 94043
+ */
+
+/*
+ * Protocol description for the mount program
+ */
+
+#ifndef RPC_HDR
+%#ifndef lint
+%/*static char sccsid[] = "from: @(#)mount.x 1.2 87/09/18 Copyr 1987 Sun Micro";*/
+%/*static char sccsid[] = "from: @(#)mount.x 2.1 88/08/01 4.0 RPCSRC";*/
+%static const char rcsid[] =
+% "$FreeBSD: src/include/rpcsvc/mount.x,v 1.6 1999/08/27 23:45:08 peter Exp $";
+%#endif /* not lint */
+#endif
+
+const MNTPATHLEN = 1024; /* maximum bytes in a pathname argument */
+const MNTNAMLEN = 255; /* maximum bytes in a name argument */
+const FHSIZE = 32; /* size in bytes of a file handle */
+#ifdef WANT_NFS3
+const FHSIZE3 = 64; /* size in bytes of a file handle (v3) */
+#endif
+
+/*
+ * The fhandle is the file handle that the server passes to the client.
+ * All file operations are done using the file handles to refer to a file
+ * or a directory. The file handle can contain whatever information the
+ * server needs to distinguish an individual file.
+ */
+typedef opaque fhandle[FHSIZE];
+#ifdef WANT_NFS3
+typedef opaque fhandle3<FHSIZE3>;
+#endif
+
+/*
+ * If a status of zero is returned, the call completed successfully, and
+ * a file handle for the directory follows. A non-zero status indicates
+ * some sort of error. The status corresponds with UNIX error numbers.
+ */
+union fhstatus switch (unsigned fhs_status) {
+case 0:
+ fhandle fhs_fhandle;
+default:
+ void;
+};
+
+#ifdef WANT_NFS3
+/*
+ * Status codes returned by the version 3 mount call.
+ */
+enum mountstat3 {
+ MNT3_OK = 0, /* no error */
+ MNT3ERR_PERM = 1, /* Not owner */
+ MNT3ERR_NOENT = 2, /* No such file or directory */
+ MNT3ERR_IO = 5, /* I/O error */
+ MNT3ERR_ACCES = 13, /* Permission denied */
+ MNT3ERR_NOTDIR = 20, /* Not a directory */
+ MNT3ERR_INVAL = 22, /* Invalid argument */
+ MNT3ERR_NAMETOOLONG = 63, /* Filename too long */
+ MNT3ERR_NOTSUPP = 10004, /* Operation not supported */
+ MNT3ERR_SERVERFAULT = 10006 /* A failure on the server */
+};
+
+struct mountres3_ok {
+ fhandle3 fhandle;
+ int auth_flavors<>;
+};
+
+union mountres3 switch (mountstat3 fhs_status) {
+case 0:
+ mountres3_ok mountinfo;
+default:
+ void;
+};
+#endif
+
+/*
+ * The type dirpath is the pathname of a directory
+ */
+typedef string dirpath<MNTPATHLEN>;
+
+/*
+ * The type name is used for arbitrary names (hostnames, groupnames)
+ */
+typedef string name<MNTNAMLEN>;
+
+/*
+ * A list of who has what mounted
+ */
+typedef struct mountbody *mountlist;
+struct mountbody {
+ name ml_hostname;
+ dirpath ml_directory;
+ mountlist ml_next;
+};
+
+/*
+ * A list of netgroups
+ */
+typedef struct groupnode *groups;
+struct groupnode {
+ name gr_name;
+ groups gr_next;
+};
+
+/*
+ * A list of what is exported and to whom
+ */
+typedef struct exportnode *exports;
+struct exportnode {
+ dirpath ex_dir;
+ groups ex_groups;
+ exports ex_next;
+};
+
+program MOUNTPROG {
+ /*
+ * Version one of the mount protocol communicates with version two
+ * of the NFS protocol. Version three communicates with
+ * version three of the NFS protocol. The only connecting
+ * point is the fhandle structure, which is the same for both
+ * protocols.
+ */
+ version MOUNTVERS {
+ /*
+ * Does no work. It is made available in all RPC services
+ * to allow server reponse testing and timing
+ */
+ void
+ MOUNTPROC_NULL(void) = 0;
+
+ /*
+ * If fhs_status is 0, then fhs_fhandle contains the
+ * file handle for the directory. This file handle may
+ * be used in the NFS protocol. This procedure also adds
+ * a new entry to the mount list for this client mounting
+ * the directory.
+ * Unix authentication required.
+ */
+ fhstatus
+ MOUNTPROC_MNT(dirpath) = 1;
+
+ /*
+ * Returns the list of remotely mounted filesystems. The
+ * mountlist contains one entry for each hostname and
+ * directory pair.
+ */
+ mountlist
+ MOUNTPROC_DUMP(void) = 2;
+
+ /*
+ * Removes the mount list entry for the directory
+ * Unix authentication required.
+ */
+ void
+ MOUNTPROC_UMNT(dirpath) = 3;
+
+ /*
+ * Removes all of the mount list entries for this client
+ * Unix authentication required.
+ */
+ void
+ MOUNTPROC_UMNTALL(void) = 4;
+
+ /*
+ * Returns a list of all the exported filesystems, and which
+ * machines are allowed to import it.
+ */
+ exports
+ MOUNTPROC_EXPORT(void) = 5;
+
+ /*
+ * Identical to MOUNTPROC_EXPORT above
+ */
+ exports
+ MOUNTPROC_EXPORTALL(void) = 6;
+ } = 1;
+#ifdef WANT_NFS3
+ version MOUNTVERS3 {
+ /*
+ * Does no work. It is made available in all RPC services
+ * to allow server reponse testing and timing
+ */
+ void
+ MOUNTPROC_NULL(void) = 0;
+
+ /*
+ * If mountres3.fhs_status is MNT3_OK, then
+ * mountres3.mountinfo contains the file handle for
+ * the directory and a list of acceptable
+ * authentication flavors. This file handle may only
+ * be used in the NFS version 3 protocol. This
+ * procedure also results in the server adding a new
+ * entry to its mount list recording that this client
+ * has mounted the directory. AUTH_UNIX authentication
+ * or better is required.
+ */
+ mountres3
+ MOUNTPROC_MNT(dirpath) = 1;
+
+ /*
+ * Returns the list of remotely mounted filesystems. The
+ * mountlist contains one entry for each hostname and
+ * directory pair.
+ */
+ mountlist
+ MOUNTPROC_DUMP(void) = 2;
+
+ /*
+ * Removes the mount list entry for the directory
+ * Unix authentication required.
+ */
+ void
+ MOUNTPROC_UMNT(dirpath) = 3;
+
+ /*
+ * Removes all of the mount list entries for this client
+ * Unix authentication required.
+ */
+ void
+ MOUNTPROC_UMNTALL(void) = 4;
+
+ /*
+ * Returns a list of all the exported filesystems, and which
+ * machines are allowed to import it.
+ */
+ exports
+ MOUNTPROC_EXPORT(void) = 5;
+ } = 3;
+#endif
+} = 100005;
diff --git a/tirpc/rpcsvc/nfs_prot.x b/tirpc/rpcsvc/nfs_prot.x
new file mode 100644
index 0000000..01564f8
--- /dev/null
+++ b/tirpc/rpcsvc/nfs_prot.x
@@ -0,0 +1,1266 @@
+/*
+ * Sun RPC is a product of Sun Microsystems, Inc. and is provided for
+ * unrestricted use provided that this legend is included on all tape
+ * media and as a part of the software program in whole or part. Users
+ * may copy or modify Sun RPC without charge, but are not authorized
+ * to license or distribute it to anyone else except as part of a product or
+ * program developed by the user.
+ *
+ * SUN RPC IS PROVIDED AS IS WITH NO WARRANTIES OF ANY KIND INCLUDING THE
+ * WARRANTIES OF DESIGN, MERCHANTIBILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE, OR ARISING FROM A COURSE OF DEALING, USAGE OR TRADE PRACTICE.
+ *
+ * Sun RPC is provided with no support and without any obligation on the
+ * part of Sun Microsystems, Inc. to assist in its use, correction,
+ * modification or enhancement.
+ *
+ * SUN MICROSYSTEMS, INC. SHALL HAVE NO LIABILITY WITH RESPECT TO THE
+ * INFRINGEMENT OF COPYRIGHTS, TRADE SECRETS OR ANY PATENTS BY SUN RPC
+ * OR ANY PART THEREOF.
+ *
+ * In no event will Sun Microsystems, Inc. be liable for any lost revenue
+ * or profits or other special, indirect and consequential damages, even if
+ * Sun has been advised of the possibility of such damages.
+ *
+ * Sun Microsystems, Inc.
+ * 2550 Garcia Avenue
+ * Mountain View, California 94043
+ */
+
+#ifndef RPC_HDR
+%#ifndef lint
+%/*static char sccsid[] = "from: @(#)nfs_prot.x 1.2 87/10/12 Copyr 1987 Sun Micro";*/
+%/*static char sccsid[] = "from: @(#)nfs_prot.x 2.1 88/08/01 4.0 RPCSRC";*/
+%static const char rcsid[] =
+% "$FreeBSD: src/include/rpcsvc/nfs_prot.x,v 1.7 1999/08/27 23:45:08 peter Exp $";
+%#endif /* not lint */
+#endif
+
+const NFS_PORT = 2049;
+const NFS_MAXDATA = 8192;
+const NFS_MAXPATHLEN = 1024;
+const NFS_MAXNAMLEN = 255;
+const NFS_FHSIZE = 32;
+const NFS_COOKIESIZE = 4;
+const NFS_FIFO_DEV = -1; /* size kludge for named pipes */
+
+/*
+ * File types
+ */
+const NFSMODE_FMT = 0170000; /* type of file */
+const NFSMODE_DIR = 0040000; /* directory */
+const NFSMODE_CHR = 0020000; /* character special */
+const NFSMODE_BLK = 0060000; /* block special */
+const NFSMODE_REG = 0100000; /* regular */
+const NFSMODE_LNK = 0120000; /* symbolic link */
+const NFSMODE_SOCK = 0140000; /* socket */
+const NFSMODE_FIFO = 0010000; /* fifo */
+
+/*
+ * Error status
+ */
+enum nfsstat {
+ NFS_OK= 0, /* no error */
+ NFSERR_PERM=1, /* Not owner */
+ NFSERR_NOENT=2, /* No such file or directory */
+ NFSERR_IO=5, /* I/O error */
+ NFSERR_NXIO=6, /* No such device or address */
+ NFSERR_ACCES=13, /* Permission denied */
+ NFSERR_EXIST=17, /* File exists */
+ NFSERR_NODEV=19, /* No such device */
+ NFSERR_NOTDIR=20, /* Not a directory*/
+ NFSERR_ISDIR=21, /* Is a directory */
+ NFSERR_FBIG=27, /* File too large */
+ NFSERR_NOSPC=28, /* No space left on device */
+ NFSERR_ROFS=30, /* Read-only file system */
+ NFSERR_NAMETOOLONG=63, /* File name too long */
+ NFSERR_NOTEMPTY=66, /* Directory not empty */
+ NFSERR_DQUOT=69, /* Disc quota exceeded */
+ NFSERR_STALE=70, /* Stale NFS file handle */
+ NFSERR_WFLUSH=99 /* write cache flushed */
+};
+
+/*
+ * File types
+ */
+enum ftype {
+ NFNON = 0, /* non-file */
+ NFREG = 1, /* regular file */
+ NFDIR = 2, /* directory */
+ NFBLK = 3, /* block special */
+ NFCHR = 4, /* character special */
+ NFLNK = 5, /* symbolic link */
+ NFSOCK = 6, /* unix domain sockets */
+ NFBAD = 7, /* unused */
+ NFFIFO = 8 /* named pipe */
+};
+
+/*
+ * File access handle
+ */
+struct nfs_fh {
+ opaque data[NFS_FHSIZE];
+};
+
+/*
+ * Timeval
+ */
+struct nfstime {
+ unsigned seconds;
+ unsigned useconds;
+};
+
+
+/*
+ * File attributes
+ */
+struct fattr {
+ ftype type; /* file type */
+ unsigned mode; /* protection mode bits */
+ unsigned nlink; /* # hard links */
+ unsigned uid; /* owner user id */
+ unsigned gid; /* owner group id */
+ unsigned size; /* file size in bytes */
+ unsigned blocksize; /* prefered block size */
+ unsigned rdev; /* special device # */
+ unsigned blocks; /* Kb of disk used by file */
+ unsigned fsid; /* device # */
+ unsigned fileid; /* inode # */
+ nfstime atime; /* time of last access */
+ nfstime mtime; /* time of last modification */
+ nfstime ctime; /* time of last change */
+};
+
+/*
+ * File attributes which can be set
+ */
+struct sattr {
+ unsigned mode; /* protection mode bits */
+ unsigned uid; /* owner user id */
+ unsigned gid; /* owner group id */
+ unsigned size; /* file size in bytes */
+ nfstime atime; /* time of last access */
+ nfstime mtime; /* time of last modification */
+};
+
+
+typedef string filename<NFS_MAXNAMLEN>;
+typedef string nfspath<NFS_MAXPATHLEN>;
+
+/*
+ * Reply status with file attributes
+ */
+union attrstat switch (nfsstat status) {
+case NFS_OK:
+ fattr attributes;
+default:
+ void;
+};
+
+struct sattrargs {
+ nfs_fh file;
+ sattr attributes;
+};
+
+/*
+ * Arguments for directory operations
+ */
+struct diropargs {
+ nfs_fh dir; /* directory file handle */
+ filename name; /* name (up to NFS_MAXNAMLEN bytes) */
+};
+
+struct diropokres {
+ nfs_fh file;
+ fattr attributes;
+};
+
+/*
+ * Results from directory operation
+ */
+union diropres switch (nfsstat status) {
+case NFS_OK:
+ diropokres diropres;
+default:
+ void;
+};
+
+union readlinkres switch (nfsstat status) {
+case NFS_OK:
+ nfspath data;
+default:
+ void;
+};
+
+/*
+ * Arguments to remote read
+ */
+struct readargs {
+ nfs_fh file; /* handle for file */
+ unsigned offset; /* byte offset in file */
+ unsigned count; /* immediate read count */
+ unsigned totalcount; /* total read count (from this offset)*/
+};
+
+/*
+ * Status OK portion of remote read reply
+ */
+struct readokres {
+ fattr attributes; /* attributes, need for pagin*/
+ opaque data<NFS_MAXDATA>;
+};
+
+union readres switch (nfsstat status) {
+case NFS_OK:
+ readokres reply;
+default:
+ void;
+};
+
+/*
+ * Arguments to remote write
+ */
+struct writeargs {
+ nfs_fh file; /* handle for file */
+ unsigned beginoffset; /* beginning byte offset in file */
+ unsigned offset; /* current byte offset in file */
+ unsigned totalcount; /* total write count (to this offset)*/
+ opaque data<NFS_MAXDATA>;
+};
+
+struct createargs {
+ diropargs where;
+ sattr attributes;
+};
+
+struct renameargs {
+ diropargs from;
+ diropargs to;
+};
+
+struct linkargs {
+ nfs_fh from;
+ diropargs to;
+};
+
+struct symlinkargs {
+ diropargs from;
+ nfspath to;
+ sattr attributes;
+};
+
+
+typedef opaque nfscookie[NFS_COOKIESIZE];
+
+/*
+ * Arguments to readdir
+ */
+struct readdirargs {
+ nfs_fh dir; /* directory handle */
+ nfscookie cookie;
+ unsigned count; /* number of directory bytes to read */
+};
+
+struct entry {
+ unsigned fileid;
+ filename name;
+ nfscookie cookie;
+ entry *nextentry;
+};
+
+struct dirlist {
+ entry *entries;
+ bool eof;
+};
+
+union readdirres switch (nfsstat status) {
+case NFS_OK:
+ dirlist reply;
+default:
+ void;
+};
+
+struct statfsokres {
+ unsigned tsize; /* preferred transfer size in bytes */
+ unsigned bsize; /* fundamental file system block size */
+ unsigned blocks; /* total blocks in file system */
+ unsigned bfree; /* free blocks in fs */
+ unsigned bavail; /* free blocks avail to non-superuser */
+};
+
+union statfsres switch (nfsstat status) {
+case NFS_OK:
+ statfsokres reply;
+default:
+ void;
+};
+
+#ifdef WANT_NFS3
+
+/*
+ * NFSv3 constants and types
+ */
+const NFS3_FHSIZE = 64; /* maximum size in bytes of a file handle */
+const NFS3_COOKIEVERFSIZE = 8; /* size of a cookie verifier for READDIR */
+const NFS3_CREATEVERFSIZE = 8; /* size of the verifier used for CREATE */
+const NFS3_WRITEVERFSIZE = 8; /* size of the verifier used for WRITE */
+
+typedef unsigned hyper uint64;
+typedef hyper int64;
+typedef unsigned long uint32;
+typedef long int32;
+typedef string filename3<>;
+typedef string nfspath3<>;
+typedef uint64 fileid3;
+typedef uint64 cookie3;
+typedef opaque cookieverf3[NFS3_COOKIEVERFSIZE];
+typedef opaque createverf3[NFS3_CREATEVERFSIZE];
+typedef opaque writeverf3[NFS3_WRITEVERFSIZE];
+typedef uint32 uid3;
+typedef uint32 gid3;
+typedef uint64 size3;
+typedef uint64 offset3;
+typedef uint32 mode3;
+typedef uint32 count3;
+
+/*
+ * Error status (v3)
+ */
+enum nfsstat3 {
+ NFS3_OK = 0,
+ NFS3ERR_PERM = 1,
+ NFS3ERR_NOENT = 2,
+ NFS3ERR_IO = 5,
+ NFS3ERR_NXIO = 6,
+ NFS3ERR_ACCES = 13,
+ NFS3ERR_EXIST = 17,
+ NFS3ERR_XDEV = 18,
+ NFS3ERR_NODEV = 19,
+ NFS3ERR_NOTDIR = 20,
+ NFS3ERR_ISDIR = 21,
+ NFS3ERR_INVAL = 22,
+ NFS3ERR_FBIG = 27,
+ NFS3ERR_NOSPC = 28,
+ NFS3ERR_ROFS = 30,
+ NFS3ERR_MLINK = 31,
+ NFS3ERR_NAMETOOLONG = 63,
+ NFS3ERR_NOTEMPTY = 66,
+ NFS3ERR_DQUOT = 69,
+ NFS3ERR_STALE = 70,
+ NFS3ERR_REMOTE = 71,
+ NFS3ERR_BADHANDLE = 10001,
+ NFS3ERR_NOT_SYNC = 10002,
+ NFS3ERR_BAD_COOKIE = 10003,
+ NFS3ERR_NOTSUPP = 10004,
+ NFS3ERR_TOOSMALL = 10005,
+ NFS3ERR_SERVERFAULT = 10006,
+ NFS3ERR_BADTYPE = 10007,
+ NFS3ERR_JUKEBOX = 10008
+};
+
+/*
+ * File types (v3)
+ */
+enum ftype3 {
+ NF3REG = 1, /* regular file */
+ NF3DIR = 2, /* directory */
+ NF3BLK = 3, /* block special */
+ NF3CHR = 4, /* character special */
+ NF3LNK = 5, /* symbolic link */
+ NF3SOCK = 6, /* unix domain sockets */
+ NF3FIFO = 7 /* named pipe */
+};
+
+struct specdata3 {
+ uint32 specdata1;
+ uint32 specdata2;
+};
+
+/*
+ * File access handle (v3)
+ */
+struct nfs_fh3 {
+ opaque data<NFS3_FHSIZE>;
+};
+
+/*
+ * Timeval (v3)
+ */
+struct nfstime3 {
+ uint32 seconds;
+ uint32 nseconds;
+};
+
+
+/*
+ * File attributes (v3)
+ */
+struct fattr3 {
+ ftype3 type; /* file type */
+ mode3 mode; /* protection mode bits */
+ uint32 nlink; /* # hard links */
+ uid3 uid; /* owner user id */
+ gid3 gid; /* owner group id */
+ size3 size; /* file size in bytes */
+ size3 used; /* prefered block size */
+ specdata3 rdev; /* special device # */
+ uint64 fsid; /* device # */
+ fileid3 fileid; /* inode # */
+ nfstime3 atime; /* time of last access */
+ nfstime3 mtime; /* time of last modification */
+ nfstime3 ctime; /* time of last change */
+};
+
+union post_op_attr switch (bool attributes_follow) {
+case TRUE:
+ fattr3 attributes;
+case FALSE:
+ void;
+};
+
+struct wcc_attr {
+ size3 size;
+ nfstime3 mtime;
+ nfstime3 ctime;
+};
+
+union pre_op_attr switch (bool attributes_follow) {
+case TRUE:
+ wcc_attr attributes;
+case FALSE:
+ void;
+};
+
+struct wcc_data {
+ pre_op_attr before;
+ post_op_attr after;
+};
+
+union post_op_fh3 switch (bool handle_follows) {
+case TRUE:
+ nfs_fh3 handle;
+case FALSE:
+ void;
+};
+
+/*
+ * File attributes which can be set (v3)
+ */
+enum time_how {
+ DONT_CHANGE = 0,
+ SET_TO_SERVER_TIME = 1,
+ SET_TO_CLIENT_TIME = 2
+};
+
+union set_mode3 switch (bool set_it) {
+case TRUE:
+ mode3 mode;
+default:
+ void;
+};
+
+union set_uid3 switch (bool set_it) {
+case TRUE:
+ uid3 uid;
+default:
+ void;
+};
+
+union set_gid3 switch (bool set_it) {
+case TRUE:
+ gid3 gid;
+default:
+ void;
+};
+
+union set_size3 switch (bool set_it) {
+case TRUE:
+ size3 size;
+default:
+ void;
+};
+
+union set_atime switch (time_how set_it) {
+case SET_TO_CLIENT_TIME:
+ nfstime3 atime;
+default:
+ void;
+};
+
+union set_mtime switch (time_how set_it) {
+case SET_TO_CLIENT_TIME:
+ nfstime3 mtime;
+default:
+ void;
+};
+
+struct sattr3 {
+ set_mode3 mode;
+ set_uid3 uid;
+ set_gid3 gid;
+ set_size3 size;
+ set_atime atime;
+ set_mtime mtime;
+};
+
+/*
+ * Arguments for directory operations (v3)
+ */
+struct diropargs3 {
+ nfs_fh3 dir; /* directory file handle */
+ filename3 name; /* name (up to NFS_MAXNAMLEN bytes) */
+};
+
+/*
+ * Arguments to getattr (v3).
+ */
+struct GETATTR3args {
+ nfs_fh3 object;
+};
+
+struct GETATTR3resok {
+ fattr3 obj_attributes;
+};
+
+union GETATTR3res switch (nfsstat3 status) {
+case NFS3_OK:
+ GETATTR3resok resok;
+default:
+ void;
+};
+
+/*
+ * Arguments to setattr (v3).
+ */
+union sattrguard3 switch (bool check) {
+case TRUE:
+ nfstime3 obj_ctime;
+case FALSE:
+ void;
+};
+
+struct SETATTR3args {
+ nfs_fh3 object;
+ sattr3 new_attributes;
+ sattrguard3 guard;
+};
+
+struct SETATTR3resok {
+ wcc_data obj_wcc;
+};
+
+struct SETATTR3resfail {
+ wcc_data obj_wcc;
+};
+
+union SETATTR3res switch (nfsstat3 status) {
+case NFS3_OK:
+ SETATTR3resok resok;
+default:
+ SETATTR3resfail resfail;
+};
+
+/*
+ * Arguments to lookup (v3).
+ */
+struct LOOKUP3args {
+ diropargs3 what;
+};
+
+struct LOOKUP3resok {
+ nfs_fh3 object;
+ post_op_attr obj_attributes;
+ post_op_attr dir_attributes;
+};
+
+struct LOOKUP3resfail {
+ post_op_attr dir_attributes;
+};
+
+union LOOKUP3res switch (nfsstat3 status) {
+case NFS3_OK:
+ LOOKUP3resok resok;
+default:
+ LOOKUP3resfail resfail;
+};
+
+/*
+ * Arguments to access (v3).
+ */
+const ACCESS3_READ = 0x0001;
+const ACCESS3_LOOKUP = 0x0002;
+const ACCESS3_MODIFY = 0x0004;
+const ACCESS3_EXTEND = 0x0008;
+const ACCESS3_DELETE = 0x0010;
+const ACCESS3_EXECUTE = 0x0020;
+
+struct ACCESS3args {
+ nfs_fh3 object;
+ uint32 access;
+};
+
+struct ACCESS3resok {
+ post_op_attr obj_attributes;
+ uint32 access;
+};
+
+struct ACCESS3resfail {
+ post_op_attr obj_attributes;
+};
+
+union ACCESS3res switch (nfsstat3 status) {
+case NFS3_OK:
+ ACCESS3resok resok;
+default:
+ ACCESS3resfail resfail;
+};
+
+/*
+ * Arguments to readlink (v3).
+ */
+struct READLINK3args {
+ nfs_fh3 symlink;
+};
+
+struct READLINK3resok {
+ post_op_attr symlink_attributes;
+ nfspath3 data;
+};
+
+struct READLINK3resfail {
+ post_op_attr symlink_attributes;
+};
+
+union READLINK3res switch (nfsstat3 status) {
+case NFS3_OK:
+ READLINK3resok resok;
+default:
+ READLINK3resfail resfail;
+};
+
+/*
+ * Arguments to read (v3).
+ */
+struct READ3args {
+ nfs_fh3 file;
+ offset3 offset;
+ count3 count;
+};
+
+struct READ3resok {
+ post_op_attr file_attributes;
+ count3 count;
+ bool eof;
+ opaque data<>;
+};
+
+struct READ3resfail {
+ post_op_attr file_attributes;
+};
+
+/* XXX: solaris 2.6 uses ``nfsstat'' here */
+union READ3res switch (nfsstat3 status) {
+case NFS3_OK:
+ READ3resok resok;
+default:
+ READ3resfail resfail;
+};
+
+/*
+ * Arguments to write (v3).
+ */
+enum stable_how {
+ UNSTABLE = 0,
+ DATA_SYNC = 1,
+ FILE_SYNC = 2
+};
+
+struct WRITE3args {
+ nfs_fh3 file;
+ offset3 offset;
+ count3 count;
+ stable_how stable;
+ opaque data<>;
+};
+
+struct WRITE3resok {
+ wcc_data file_wcc;
+ count3 count;
+ stable_how committed;
+ writeverf3 verf;
+};
+
+struct WRITE3resfail {
+ wcc_data file_wcc;
+};
+
+union WRITE3res switch (nfsstat3 status) {
+case NFS3_OK:
+ WRITE3resok resok;
+default:
+ WRITE3resfail resfail;
+};
+
+/*
+ * Arguments to create (v3).
+ */
+enum createmode3 {
+ UNCHECKED = 0,
+ GUARDED = 1,
+ EXCLUSIVE = 2
+};
+
+union createhow3 switch (createmode3 mode) {
+case UNCHECKED:
+case GUARDED:
+ sattr3 obj_attributes;
+case EXCLUSIVE:
+ createverf3 verf;
+};
+
+struct CREATE3args {
+ diropargs3 where;
+ createhow3 how;
+};
+
+struct CREATE3resok {
+ post_op_fh3 obj;
+ post_op_attr obj_attributes;
+ wcc_data dir_wcc;
+};
+
+struct CREATE3resfail {
+ wcc_data dir_wcc;
+};
+
+union CREATE3res switch (nfsstat3 status) {
+case NFS3_OK:
+ CREATE3resok resok;
+default:
+ CREATE3resfail resfail;
+};
+
+/*
+ * Arguments to mkdir (v3).
+ */
+struct MKDIR3args {
+ diropargs3 where;
+ sattr3 attributes;
+};
+
+struct MKDIR3resok {
+ post_op_fh3 obj;
+ post_op_attr obj_attributes;
+ wcc_data dir_wcc;
+};
+
+struct MKDIR3resfail {
+ wcc_data dir_wcc;
+};
+
+union MKDIR3res switch (nfsstat3 status) {
+case NFS3_OK:
+ MKDIR3resok resok;
+default:
+ MKDIR3resfail resfail;
+};
+
+/*
+ * Arguments to symlink (v3).
+ */
+struct symlinkdata3 {
+ sattr3 symlink_attributes;
+ nfspath3 symlink_data;
+};
+
+struct SYMLINK3args {
+ diropargs3 where;
+ symlinkdata3 symlink;
+};
+
+struct SYMLINK3resok {
+ post_op_fh3 obj;
+ post_op_attr obj_attributes;
+ wcc_data dir_wcc;
+};
+
+struct SYMLINK3resfail {
+ wcc_data dir_wcc;
+};
+
+union SYMLINK3res switch (nfsstat3 status) {
+case NFS3_OK:
+ SYMLINK3resok resok;
+default:
+ SYMLINK3resfail resfail;
+};
+
+/*
+ * Arguments to mknod (v3).
+ */
+struct devicedata3 {
+ sattr3 dev_attributes;
+ specdata3 spec;
+};
+
+union mknoddata3 switch (ftype3 type) {
+case NF3CHR:
+case NF3BLK:
+ devicedata3 device;
+case NF3SOCK:
+case NF3FIFO:
+ sattr3 pipe_attributes;
+default:
+ void;
+};
+
+struct MKNOD3args {
+ diropargs3 where;
+ mknoddata3 what;
+};
+
+struct MKNOD3resok {
+ post_op_fh3 obj;
+ post_op_attr obj_attributes;
+ wcc_data dir_wcc;
+};
+
+struct MKNOD3resfail {
+ wcc_data dir_wcc;
+};
+
+union MKNOD3res switch (nfsstat3 status) {
+case NFS3_OK:
+ MKNOD3resok resok;
+default:
+ MKNOD3resfail resfail;
+};
+
+/*
+ * Arguments to remove (v3).
+ */
+struct REMOVE3args {
+ diropargs3 object;
+};
+
+struct REMOVE3resok {
+ wcc_data dir_wcc;
+};
+
+struct REMOVE3resfail {
+ wcc_data dir_wcc;
+};
+
+union REMOVE3res switch (nfsstat3 status) {
+case NFS3_OK:
+ REMOVE3resok resok;
+default:
+ REMOVE3resfail resfail;
+};
+
+/*
+ * Arguments to rmdir (v3).
+ */
+struct RMDIR3args {
+ diropargs3 object;
+};
+
+struct RMDIR3resok {
+ wcc_data dir_wcc;
+};
+
+struct RMDIR3resfail {
+ wcc_data dir_wcc;
+};
+
+union RMDIR3res switch (nfsstat3 status) {
+case NFS3_OK:
+ RMDIR3resok resok;
+default:
+ RMDIR3resfail resfail;
+};
+
+/*
+ * Arguments to rename (v3).
+ */
+struct RENAME3args {
+ diropargs3 from;
+ diropargs3 to;
+};
+
+struct RENAME3resok {
+ wcc_data fromdir_wcc;
+ wcc_data todir_wcc;
+};
+
+struct RENAME3resfail {
+ wcc_data fromdir_wcc;
+ wcc_data todir_wcc;
+};
+
+union RENAME3res switch (nfsstat3 status) {
+case NFS3_OK:
+ RENAME3resok resok;
+default:
+ RENAME3resfail resfail;
+};
+
+/*
+ * Arguments to link (v3).
+ */
+struct LINK3args {
+ nfs_fh3 file;
+ diropargs3 link;
+};
+
+struct LINK3resok {
+ post_op_attr file_attributes;
+ wcc_data linkdir_wcc;
+};
+
+struct LINK3resfail {
+ post_op_attr file_attributes;
+ wcc_data linkdir_wcc;
+};
+
+union LINK3res switch (nfsstat3 status) {
+case NFS3_OK:
+ LINK3resok resok;
+default:
+ LINK3resfail resfail;
+};
+
+/*
+ * Arguments to readdir (v3).
+ */
+struct READDIR3args {
+ nfs_fh3 dir;
+ cookie3 cookie;
+ cookieverf3 cookieverf;
+ count3 count;
+};
+
+struct entry3 {
+ fileid3 fileid;
+ filename3 name;
+ cookie3 cookie;
+ entry3 *nextentry;
+};
+
+struct dirlist3 {
+ entry3 *entries;
+ bool eof;
+};
+
+struct READDIR3resok {
+ post_op_attr dir_attributes;
+ cookieverf3 cookieverf;
+ dirlist3 reply;
+};
+
+struct READDIR3resfail {
+ post_op_attr dir_attributes;
+};
+
+union READDIR3res switch (nfsstat3 status) {
+case NFS3_OK:
+ READDIR3resok resok;
+default:
+ READDIR3resfail resfail;
+};
+
+/*
+ * Arguments to readdirplus (v3).
+ */
+struct READDIRPLUS3args {
+ nfs_fh3 dir;
+ cookie3 cookie;
+ cookieverf3 cookieverf;
+ count3 dircount;
+ count3 maxcount;
+};
+
+struct entryplus3 {
+ fileid3 fileid;
+ filename3 name;
+ cookie3 cookie;
+ post_op_attr name_attributes;
+ post_op_fh3 name_handle;
+ entryplus3 *nextentry;
+};
+
+struct dirlistplus3 {
+ entryplus3 *entries;
+ bool eof;
+};
+
+struct READDIRPLUS3resok {
+ post_op_attr dir_attributes;
+ cookieverf3 cookieverf;
+ dirlistplus3 reply;
+};
+
+struct READDIRPLUS3resfail {
+ post_op_attr dir_attributes;
+};
+
+union READDIRPLUS3res switch (nfsstat3 status) {
+case NFS3_OK:
+ READDIRPLUS3resok resok;
+default:
+ READDIRPLUS3resfail resfail;
+};
+
+/*
+ * Arguments to fsstat (v3).
+ */
+struct FSSTAT3args {
+ nfs_fh3 fsroot;
+};
+
+struct FSSTAT3resok {
+ post_op_attr obj_attributes;
+ size3 tbytes;
+ size3 fbytes;
+ size3 abytes;
+ size3 tfiles;
+ size3 ffiles;
+ size3 afiles;
+ uint32 invarsec;
+};
+
+struct FSSTAT3resfail {
+ post_op_attr obj_attributes;
+};
+
+union FSSTAT3res switch (nfsstat3 status) {
+case NFS3_OK:
+ FSSTAT3resok resok;
+default:
+ FSSTAT3resfail resfail;
+};
+
+/*
+ * Arguments to fsinfo (v3).
+ */
+const FSF3_LINK = 0x0001;
+const FSF3_SYMLINK = 0x0002;
+const FSF3_HOMOGENEOUS = 0x0008;
+const FSF3_CANSETTIME = 0x0010;
+
+struct FSINFO3args {
+ nfs_fh3 fsroot;
+};
+
+struct FSINFO3resok {
+ post_op_attr obj_attributes;
+ uint32 rtmax;
+ uint32 rtpref;
+ uint32 rtmult;
+ uint32 wtmax;
+ uint32 wtpref;
+ uint32 wtmult;
+ uint32 dtpref;
+ size3 maxfilesize;
+ nfstime3 time_delta;
+ uint32 properties;
+};
+
+struct FSINFO3resfail {
+ post_op_attr obj_attributes;
+};
+
+union FSINFO3res switch (nfsstat3 status) {
+case NFS3_OK:
+ FSINFO3resok resok;
+default:
+ FSINFO3resfail resfail;
+};
+
+/*
+ * Arguments to pathconf (v3).
+ */
+struct PATHCONF3args {
+ nfs_fh3 object;
+};
+
+struct PATHCONF3resok {
+ post_op_attr obj_attributes;
+ uint32 linkmax;
+ uint32 name_max;
+ bool no_trunc;
+ bool chown_restricted;
+ bool case_insensitive;
+ bool case_preserving;
+};
+
+struct PATHCONF3resfail {
+ post_op_attr obj_attributes;
+};
+
+union PATHCONF3res switch (nfsstat3 status) {
+case NFS3_OK:
+ PATHCONF3resok resok;
+default:
+ PATHCONF3resfail resfail;
+};
+
+/*
+ * Arguments to commit (v3).
+ */
+struct COMMIT3args {
+ nfs_fh3 file;
+ offset3 offset;
+ count3 count;
+};
+
+struct COMMIT3resok {
+ wcc_data file_wcc;
+ writeverf3 verf;
+};
+
+struct COMMIT3resfail {
+ wcc_data file_wcc;
+};
+
+union COMMIT3res switch (nfsstat3 status) {
+case NFS3_OK:
+ COMMIT3resok resok;
+default:
+ COMMIT3resfail resfail;
+};
+
+#endif /* WANT_NFS3 */
+
+/*
+ * Remote file service routines
+ */
+program NFS_PROGRAM {
+ version NFS_VERSION {
+ void
+ NFSPROC_NULL(void) = 0;
+
+ attrstat
+ NFSPROC_GETATTR(nfs_fh) = 1;
+
+ attrstat
+ NFSPROC_SETATTR(sattrargs) = 2;
+
+ void
+ NFSPROC_ROOT(void) = 3;
+
+ diropres
+ NFSPROC_LOOKUP(diropargs) = 4;
+
+ readlinkres
+ NFSPROC_READLINK(nfs_fh) = 5;
+
+ readres
+ NFSPROC_READ(readargs) = 6;
+
+ void
+ NFSPROC_WRITECACHE(void) = 7;
+
+ attrstat
+ NFSPROC_WRITE(writeargs) = 8;
+
+ diropres
+ NFSPROC_CREATE(createargs) = 9;
+
+ nfsstat
+ NFSPROC_REMOVE(diropargs) = 10;
+
+ nfsstat
+ NFSPROC_RENAME(renameargs) = 11;
+
+ nfsstat
+ NFSPROC_LINK(linkargs) = 12;
+
+ nfsstat
+ NFSPROC_SYMLINK(symlinkargs) = 13;
+
+ diropres
+ NFSPROC_MKDIR(createargs) = 14;
+
+ nfsstat
+ NFSPROC_RMDIR(diropargs) = 15;
+
+ readdirres
+ NFSPROC_READDIR(readdirargs) = 16;
+
+ statfsres
+ NFSPROC_STATFS(nfs_fh) = 17;
+ } = 2;
+} = 100003;
+#ifdef WANT_NFS3
+program NFS3_PROGRAM {
+ version NFS_V3 {
+ void
+ NFSPROC3_NULL(void) = 0;
+
+ GETATTR3res
+ NFSPROC3_GETATTR(GETATTR3args) = 1;
+
+ SETATTR3res
+ NFSPROC3_SETATTR(SETATTR3args) = 2;
+
+ LOOKUP3res
+ NFSPROC3_LOOKUP(LOOKUP3args) = 3;
+
+ ACCESS3res
+ NFSPROC3_ACCESS(ACCESS3args) = 4;
+
+ READLINK3res
+ NFSPROC3_READLINK(READLINK3args) = 5;
+
+ READ3res
+ NFSPROC3_READ(READ3args) = 6;
+
+ WRITE3res
+ NFSPROC3_WRITE(WRITE3args) = 7;
+
+ CREATE3res
+ NFSPROC3_CREATE(CREATE3args) = 8;
+
+ MKDIR3res
+ NFSPROC3_MKDIR(MKDIR3args) = 9;
+
+ SYMLINK3res
+ NFSPROC3_SYMLINK(SYMLINK3args) = 10;
+
+ MKNOD3res
+ NFSPROC3_MKNOD(MKNOD3args) = 11;
+
+ REMOVE3res
+ NFSPROC3_REMOVE(REMOVE3args) = 12;
+
+ RMDIR3res
+ NFSPROC3_RMDIR(RMDIR3args) = 13;
+
+ RENAME3res
+ NFSPROC3_RENAME(RENAME3args) = 14;
+
+ LINK3res
+ NFSPROC3_LINK(LINK3args) = 15;
+
+ READDIR3res
+ NFSPROC3_READDIR(READDIR3args) = 16;
+
+ READDIRPLUS3res
+ NFSPROC3_READDIRPLUS(READDIRPLUS3args) = 17;
+
+ FSSTAT3res
+ NFSPROC3_FSSTAT(FSSTAT3args) = 18;
+
+ FSINFO3res
+ NFSPROC3_FSINFO(FSINFO3args) = 19;
+
+ PATHCONF3res
+ NFSPROC3_PATHCONF(PATHCONF3args) = 20;
+
+ COMMIT3res
+ NFSPROC3_COMMIT(COMMIT3args) = 21;
+ } = 3;
+} = 100003;
+#endif
+
diff --git a/tirpc/rpcsvc/rquota.x b/tirpc/rpcsvc/rquota.x
new file mode 100644
index 0000000..72864d1
--- /dev/null
+++ b/tirpc/rpcsvc/rquota.x
@@ -0,0 +1,67 @@
+/*
+ * Remote quota protocol
+ * Requires unix authentication
+ */
+
+#ifndef RPC_HDR
+%#ifndef lint
+%/*static char sccsid[] = "from: @(#)rquota.x 1.2 87/09/20 Copyr 1987 Sun Micro";*/
+%/*static char sccsid[] = "from: @(#)rquota.x 2.1 88/08/01 4.0 RPCSRC";*/
+%static const char rcsid[] =
+% "$FreeBSD: src/include/rpcsvc/rquota.x,v 1.6 1999/08/27 23:45:10 peter Exp $";
+%#endif /* not lint */
+#endif
+
+const RQ_PATHLEN = 1024;
+
+struct getquota_args {
+ string gqa_pathp<RQ_PATHLEN>; /* path to filesystem of interest */
+ int gqa_uid; /* inquire about quota for uid */
+};
+
+/*
+ * remote quota structure
+ */
+struct rquota {
+ int rq_bsize; /* block size for block counts */
+ bool rq_active; /* indicates whether quota is active */
+ unsigned int rq_bhardlimit; /* absolute limit on disk blks alloc */
+ unsigned int rq_bsoftlimit; /* preferred limit on disk blks */
+ unsigned int rq_curblocks; /* current block count */
+ unsigned int rq_fhardlimit; /* absolute limit on allocated files */
+ unsigned int rq_fsoftlimit; /* preferred file limit */
+ unsigned int rq_curfiles; /* current # allocated files */
+ unsigned int rq_btimeleft; /* time left for excessive disk use */
+ unsigned int rq_ftimeleft; /* time left for excessive files */
+};
+
+enum gqr_status {
+ Q_OK = 1, /* quota returned */
+ Q_NOQUOTA = 2, /* noquota for uid */
+ Q_EPERM = 3 /* no permission to access quota */
+};
+
+union getquota_rslt switch (gqr_status status) {
+case Q_OK:
+ rquota gqr_rquota; /* valid if status == Q_OK */
+case Q_NOQUOTA:
+ void;
+case Q_EPERM:
+ void;
+};
+
+program RQUOTAPROG {
+ version RQUOTAVERS {
+ /*
+ * Get all quotas
+ */
+ getquota_rslt
+ RQUOTAPROC_GETQUOTA(getquota_args) = 1;
+
+ /*
+ * Get active quotas only
+ */
+ getquota_rslt
+ RQUOTAPROC_GETACTIVEQUOTA(getquota_args) = 2;
+ } = 1;
+} = 100011;
--
1.9.1
| {
"pile_set_name": "Github"
} |
//OpenCL variables
#include <oclUtils.h>
cl_platform_id cpPlatform;
cl_context cxGPUContext;
cl_command_queue cqCommandQueue;
cl_program hProgram;
cl_kernel hKernel;
cl_mem hVbo;
cl_mem X[2];
cl_mem X_last[2];
extern int readID, writeID;
void createProgramAndKernel(const char * clSourcefile, const char * clKernelName, cl_program & cpProgram, cl_kernel & ckKernel)
{
cl_int ciErrNum;
// Program Setup
size_t program_length;
char * source = oclLoadProgSource(clSourcefile, "", &program_length);
oclCheckError(source != NULL, shrTRUE);
// create the program
cpProgram = clCreateProgramWithSource(cxGPUContext, 1,(const char **) &source, &program_length, &ciErrNum);
oclCheckError(ciErrNum, CL_SUCCESS);
free(source);
// build the program
ciErrNum = clBuildProgram(cpProgram, 0, NULL, NULL/*"-cl-fast-relaxed-math"*/, NULL, NULL);
if (ciErrNum != CL_SUCCESS)
{
// write out standard error, Build Log and PTX, then cleanup and exit
oclLogBuildInfo(cpProgram, oclGetFirstDev(cxGPUContext));
oclLogPtx(cpProgram, oclGetFirstDev(cxGPUContext), "oclVerlet.ptx");
// Cleanup(EXIT_FAILURE);
shrLogEx(LOGBOTH | CLOSELOG, 0, "GPGPU cloth exiting...\nPress <Enter> to Quit\n");
shrLogEx(LOGBOTH | ERRORMSG, ciErrNum, STDERROR);
exit(EXIT_FAILURE);
}
// create the kernel
ckKernel = clCreateKernel(cpProgram, clKernelName, &ciErrNum);
oclCheckError(ciErrNum, CL_SUCCESS);
}
void CreateCL_VBO(const int size) {
cl_int ciErrNum;
hVbo=clCreateBuffer(cxGPUContext, CL_MEM_WRITE_ONLY, size, NULL, &ciErrNum);
oclCheckError(ciErrNum, CL_SUCCESS);
}
void InitOpenCL(const unsigned int size, int texture_size_x, int texture_size_y,float stepX, float stepY, float damp, float mass, float dt, float inv_cloth_sizeX, float inv_cloth_sizeY) {
//Setup OpenCL context
//get device ID
cl_device_id cdDevice;
cl_int ciErrNum;
oclCheckError(oclGetPlatformID(&cpPlatform), CL_SUCCESS);
oclCheckError(clGetDeviceIDs(cpPlatform, CL_DEVICE_TYPE_GPU, 1, &cdDevice, NULL), CL_SUCCESS);
cl_context_properties props[] = {CL_CONTEXT_PLATFORM, (cl_context_properties)cpPlatform, 0};
cxGPUContext = clCreateContext(0, 1, &cdDevice, NULL, NULL, &ciErrNum);
oclCheckError(ciErrNum, CL_SUCCESS);
oclPrintDevInfo(LOGBOTH, cdDevice);
cqCommandQueue = clCreateCommandQueue(cxGPUContext, cdDevice, 0, &ciErrNum);
oclCheckError(ciErrNum, CL_SUCCESS);
oclPrintDevName(LOGBOTH, cdDevice);
const unsigned int num_threads = size;
const unsigned int mem_size = 4*sizeof(float) * num_threads;
//create 4 buffers for current and previous positions
X[0] = clCreateBuffer(cxGPUContext, CL_MEM_READ_WRITE, mem_size , NULL, &ciErrNum);
oclCheckError(ciErrNum, CL_SUCCESS);
X_last[0] = clCreateBuffer(cxGPUContext, CL_MEM_READ_WRITE, mem_size , NULL, &ciErrNum);
oclCheckError(ciErrNum, CL_SUCCESS);
X[1] = clCreateBuffer(cxGPUContext, CL_MEM_READ_WRITE, mem_size , NULL, &ciErrNum);
oclCheckError(ciErrNum, CL_SUCCESS);
X_last[1] = clCreateBuffer(cxGPUContext, CL_MEM_READ_WRITE, mem_size , NULL, &ciErrNum);
oclCheckError(ciErrNum, CL_SUCCESS);
const char* clSourcefile = "verlet_kernel.cl";
const char* clKernelName = "verlet";
createProgramAndKernel(clSourcefile, clKernelName, hProgram, hKernel);
CreateCL_VBO(mem_size);
ciErrNum |= clSetKernelArg(hKernel, 0, sizeof(cl_mem), (void *)&(hVbo));
ciErrNum |= clSetKernelArg(hKernel, 5, sizeof(int), (void *)&(texture_size_x));
ciErrNum |= clSetKernelArg(hKernel, 6, sizeof(int), (void *)&(texture_size_y));
ciErrNum |= clSetKernelArg(hKernel, 7, sizeof(float), (void *)&(stepX));
ciErrNum |= clSetKernelArg(hKernel, 8, sizeof(float), (void *)&(stepY));
ciErrNum |= clSetKernelArg(hKernel, 9, sizeof(float), (void *)&(damp));
ciErrNum |= clSetKernelArg(hKernel, 10, sizeof(float), (void *)&(mass));
ciErrNum |= clSetKernelArg(hKernel, 11, sizeof(float), (void *)&(dt));
ciErrNum |= clSetKernelArg(hKernel, 12, sizeof(float), (void *)&(inv_cloth_sizeX));
ciErrNum |= clSetKernelArg(hKernel, 13, sizeof(float), (void *)&(inv_cloth_sizeY));
oclCheckError(ciErrNum, CL_SUCCESS);
}
void ShutdownOpenCL()
{
cl_int ciErrNum = CL_SUCCESS;
// cleanup memory
if (X[0] != NULL)
{
ciErrNum |= clReleaseMemObject(X[0]);
ciErrNum |= clReleaseMemObject(X[1]);
X[0] = NULL;
X[1] = NULL;
}
if (X_last[0] != NULL)
{
ciErrNum |= clReleaseMemObject(X_last[0]);
ciErrNum |= clReleaseMemObject(X_last[1]);
X_last[0] = NULL;
X_last[1] = NULL;
}
oclCheckError(ciErrNum, CL_SUCCESS);
ciErrNum |= clReleaseMemObject(hVbo);
oclCheckError(ciErrNum, CL_SUCCESS);
ciErrNum |= clReleaseProgram(hProgram);
oclCheckError(ciErrNum, CL_SUCCESS);
ciErrNum |= clReleaseKernel(hKernel);
oclCheckError(ciErrNum, CL_SUCCESS);
ciErrNum |= clReleaseCommandQueue(cqCommandQueue);
oclCheckError(ciErrNum, CL_SUCCESS);
ciErrNum |= clReleaseContext(cxGPUContext);
oclCheckError(ciErrNum, CL_SUCCESS);
}
void UploadOpenCL(float * positions, float * positions_old, const int size)
{
static bool start = true;
const unsigned int mem_size = 4*sizeof(float) * size;
cl_int ciErrNum=CL_SUCCESS;
assert(X[readID] != NULL);
assert(X_last[readID] != NULL);
if (start)
{
//Copy the current positions
ciErrNum |= clEnqueueWriteBuffer(cqCommandQueue, X[readID], CL_TRUE, 0, mem_size, positions, 0, NULL, NULL);
oclCheckError(ciErrNum, CL_SUCCESS);
ciErrNum |= clEnqueueWriteBuffer(cqCommandQueue, X_last[readID], CL_TRUE, 0, mem_size, positions_old, 0, NULL, NULL);
oclCheckError(ciErrNum, CL_SUCCESS);
start=false;
}
ciErrNum |= clSetKernelArg(hKernel, 1, sizeof(cl_mem), (void *)&X[readID]);
ciErrNum |= clSetKernelArg(hKernel, 2, sizeof(cl_mem), (void *)&X_last[readID]);
ciErrNum |= clSetKernelArg(hKernel, 3, sizeof(cl_mem), (void *)&X[writeID]);
ciErrNum |= clSetKernelArg(hKernel, 4, sizeof(cl_mem), (void *)&X_last[writeID]);
oclCheckError(ciErrNum, CL_SUCCESS);
int tmp=readID;
readID = writeID;
writeID=tmp;
}
size_t uSnap(size_t a, size_t b)
{
return ((a % b) == 0) ? a : (a - (a % b) + b);
}
void VerletOpenCL(int texsizeX, int texsizeY)
{
// setup execution parameters
//uint numThreads, numBlocks;
int numParticles = texsizeX*texsizeY;
//computeGridSize(numParticles, 256, numBlocks, numThreads);
size_t _szLocalWorkSize = 256;
size_t _szGlobalWorkSize = uSnap(numParticles, _szLocalWorkSize);
// printf("%3d particles, %3d blocks, %3d threads\n", numParticles, numBlocks, numThreads);
// execute the kernel
// printf("numParticles: %d, numThreads: %d numBlocks: %d\n", numParticles, numThreads, numBlocks);
//verlet<<< numBlocks, numThreads >>>(pos_vbo, X_in, X_last_in, X_out, X_last_out, texsize, step, damp, mass, dt, inv_cloth_size);
cl_int ciErrNum = clEnqueueNDRangeKernel(cqCommandQueue, hKernel, 1, NULL, &_szGlobalWorkSize, &_szLocalWorkSize, 0,0,0 );
oclCheckError(ciErrNum, CL_SUCCESS);
// stop the CPU until the kernel has been executed
//cudaThreadSynchronize();
ciErrNum |= clFinish(cqCommandQueue);
// check if kernel execution generated and error
oclCheckError(ciErrNum, CL_SUCCESS);
}
void ReadBuffer(float* ptr, int size) {
cl_int ciErrNum = clEnqueueReadBuffer(cqCommandQueue, hVbo, CL_TRUE, 0, size, ptr, 0, NULL, NULL);
oclCheckError(ciErrNum, CL_SUCCESS);
} | {
"pile_set_name": "Github"
} |
# Mac 命令行相关技巧
## nano 编辑器
+ ctrl 表示 ^
+ F3保存,F2退出
## 切换默认 bash
1. `sudo nano /etc/shells`
2. add `/usr/local/bin/fish` to your list of shells
3. `chsh -s /usr/local/bin/fish`

切换回来大概也就是这样,换对应路径即可
| {
"pile_set_name": "Github"
} |
#pragma once
#include "public.h"
typedef struct {
Block *base;
int front;
int rear;
}SqQueue;
void initQueue(SqQueue &Q)
{
Q.base = (Block*)malloc(MAXQSIZE * sizeof(Block));
if (!Q.base)
return;
Q.front = Q.rear = 0;
}
void enQueue(SqQueue &Q, Block e)
{
if ((Q.rear + 1) % MAXQSIZE == Q.front)
return;
Q.base[Q.rear] = e;
Q.rear = (Q.rear + 1) % MAXQSIZE;
}
int getLength(SqQueue Q)
{
return (Q.rear - Q.front + MAXQSIZE) % MAXQSIZE;
}
void deQueue(SqQueue &Q, Block &e)
{
if (Q.front == Q.rear)
return;
srand((unsigned)time(NULL));
int randIdx = rand() % getLength(Q);
e = Q.base[randIdx];
Q.base[randIdx] = Q.base[Q.rear - 1];
Q.base[Q.rear - 1] = e;
Q.rear = (Q.rear - 1) % MAXQSIZE;
}
bool isEmpty(SqQueue Q)
{
if (Q.front == Q.rear)
return true;
else
return false;
} | {
"pile_set_name": "Github"
} |
namespace Unity.IO.Compression {
using System;
using System.Diagnostics;
internal class OutputBuffer {
private byte[] byteBuffer; // buffer for storing bytes
private int pos; // position
private uint bitBuf; // store uncomplete bits
private int bitCount; // number of bits in bitBuffer
// set the output buffer we will be using
internal void UpdateBuffer(byte[] output) {
byteBuffer = output;
pos = 0;
}
internal int BytesWritten {
get {
return pos;
}
}
internal int FreeBytes {
get {
return byteBuffer.Length - pos;
}
}
internal void WriteUInt16(ushort value) {
Debug.Assert(FreeBytes >= 2, "No enough space in output buffer!");
byteBuffer[pos++] = (byte)value;
byteBuffer[pos++] = (byte)(value >> 8);
}
internal void WriteBits(int n, uint bits) {
Debug.Assert(n <= 16, "length must be larger than 16!");
bitBuf |= bits << bitCount;
bitCount += n;
if (bitCount >= 16) {
Debug.Assert(byteBuffer.Length - pos >= 2, "No enough space in output buffer!");
byteBuffer[pos++] = unchecked((byte)bitBuf);
byteBuffer[pos++] = unchecked((byte)(bitBuf >> 8));
bitCount -= 16;
bitBuf >>= 16;
}
}
// write the bits left in the output as bytes.
internal void FlushBits() {
// flush bits from bit buffer to output buffer
while (bitCount >= 8) {
byteBuffer[pos++] = unchecked((byte)bitBuf);
bitCount -= 8;
bitBuf >>= 8;
}
if (bitCount > 0) {
byteBuffer[pos++] = unchecked((byte)bitBuf);
bitBuf = 0;
bitCount = 0;
}
}
internal void WriteBytes(byte[] byteArray, int offset, int count) {
Debug.Assert(FreeBytes >= count, "Not enough space in output buffer!");
// faster
if (bitCount == 0) {
Array.Copy(byteArray, offset, byteBuffer, pos, count);
pos += count;
}
else {
WriteBytesUnaligned(byteArray, offset, count);
}
}
private void WriteBytesUnaligned(byte[] byteArray, int offset, int count) {
for (int i = 0; i < count; i++) {
byte b = byteArray[offset + i];
WriteByteUnaligned(b);
}
}
private void WriteByteUnaligned(byte b) {
WriteBits(8, b);
}
internal int BitsInBuffer {
get {
return (bitCount / 8) + 1;
}
}
internal OutputBuffer.BufferState DumpState() {
OutputBuffer.BufferState savedState;
savedState.pos = pos;
savedState.bitBuf = bitBuf;
savedState.bitCount = bitCount;
return savedState;
}
internal void RestoreState(OutputBuffer.BufferState state) {
pos = state.pos;
bitBuf = state.bitBuf;
bitCount = state.bitCount;
}
internal struct BufferState {
internal int pos; // position
internal uint bitBuf; // store uncomplete bits
internal int bitCount; // number of bits in bitBuffer
}
}
}
| {
"pile_set_name": "Github"
} |
interactions:
- request:
body: !!python/unicode '{}'
headers:
Accept:
- '*/*'
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '2'
Content-Type:
- application/json
User-Agent:
- python-requests/2.22.0
method: GET
uri: https://my.rcodezero.at/api/v1/zones/lexicon-test.at
response:
body:
string: !!python/unicode '{"id":544219,"domain":"lexicon-test.at","type":"MASTER","masters":[""],"serial":2019093203,"created":"2019-09-30T12:57:51Z","last_check":null,"dnssec_status":"Unsigned","dnssec_status_detail":"Unsigned"}'
headers:
cache-control:
- no-cache, private
connection:
- Keep-Alive
content-length:
- '203'
content-type:
- application/json
date:
- Mon, 30 Sep 2019 13:25:44 GMT
keep-alive:
- timeout=5, max=100
server:
- Apache
strict-transport-security:
- max-age=15768000
vary:
- Authorization,Accept-Encoding
x-frame-options:
- SAMEORIGIN
x-ratelimit-limit:
- '200'
x-ratelimit-remaining:
- '179'
status:
code: 200
message: OK
- request:
body: !!python/unicode '{}'
headers:
Accept:
- '*/*'
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '2'
Content-Type:
- application/json
User-Agent:
- python-requests/2.22.0
method: GET
uri: https://my.rcodezero.at/api/v1/zones/lexicon-test.at/rrsets?page_size=-1
response:
body:
string: !!python/unicode '{"current_page":1,"data":[{"name":"localhost.lexicon-test.at.","type":"A","ttl":3600,"records":[{"content":"127.0.0.1","disabled":false}]},{"name":"random.fqdntest.lexicon-test.at.","type":"TXT","ttl":3600,"records":[{"content":"\"challengetoken\"","disabled":false}]},{"name":"random.fulltest.lexicon-test.at.","type":"TXT","ttl":3600,"records":[{"content":"\"challengetoken\"","disabled":false}]},{"name":"orig.nameonly.test.lexicon-test.at.","type":"TXT","ttl":3600,"records":[{"content":"\"updated\"","disabled":false}]},{"name":"random.test.lexicon-test.at.","type":"TXT","ttl":3600,"records":[{"content":"\"challengetoken\"","disabled":false}]},{"name":"_acme-challenge.test.lexicon-test.at.","type":"TXT","ttl":3600,"records":[{"content":"\"challengetoken\"","disabled":false}]},{"name":"updated.test.lexicon-test.at.","type":"TXT","ttl":3600,"records":[{"content":"\"challengetoken\"","disabled":false}]},{"name":"_acme-challenge.deleterecordinset.lexicon-test.at.","type":"TXT","ttl":3600,"records":[{"content":"\"challengetoken2\"","disabled":false}]},{"name":"_acme-challenge.listrecordset.lexicon-test.at.","type":"TXT","ttl":3600,"records":[{"content":"\"challengetoken2\"","disabled":false},{"content":"\"challengetoken1\"","disabled":false}]},{"name":"_acme-challenge.createrecordset.lexicon-test.at.","type":"TXT","ttl":3600,"records":[{"content":"\"challengetoken2\"","disabled":false},{"content":"\"challengetoken1\"","disabled":false}]},{"name":"docs.lexicon-test.at.","type":"CNAME","ttl":3600,"records":[{"content":"docs.example.com.lexicon-test.at.","disabled":false}]},{"name":"_acme-challenge.noop.lexicon-test.at.","type":"TXT","ttl":3600,"records":[{"content":"\"challengetoken\"","disabled":false}]},{"name":"updated.testfqdn.lexicon-test.at.","type":"TXT","ttl":3600,"records":[{"content":"\"challengetoken\"","disabled":false}]},{"name":"ttl.fqdn.lexicon-test.at.","type":"TXT","ttl":3600,"records":[{"content":"\"ttlshouldbe3600\"","disabled":false}]},{"name":"_acme-challenge.fqdn.lexicon-test.at.","type":"TXT","ttl":3600,"records":[{"content":"\"challengetoken\"","disabled":false}]},{"name":"updated.testfull.lexicon-test.at.","type":"TXT","ttl":3600,"records":[{"content":"\"challengetoken\"","disabled":false}]},{"name":"_acme-challenge.full.lexicon-test.at.","type":"TXT","ttl":3600,"records":[{"content":"\"challengetoken\"","disabled":false}]},{"name":"lexicon-test.at.","type":"SOA","ttl":3600,"records":[{"content":"sec1.rcode0.net.
rcodezero-soa.ipcom.at. 2019093203 10800 3600 604800 3600","disabled":false}]}],"first_page_url":"https:\/\/my.rcodezero.at\/api\/v1\/zones\/lexicon-test.at\/rrsets?page=1","from":1,"last_page":1,"last_page_url":"https:\/\/my.rcodezero.at\/api\/v1\/zones\/lexicon-test.at\/rrsets?page=1","next_page_url":null,"path":"https:\/\/my.rcodezero.at\/api\/v1\/zones\/lexicon-test.at\/rrsets","per_page":-1,"prev_page_url":null,"to":18,"total":19}'
headers:
cache-control:
- no-cache, private
connection:
- Keep-Alive
content-length:
- '2925'
content-type:
- application/json
date:
- Mon, 30 Sep 2019 13:25:44 GMT
keep-alive:
- timeout=5, max=100
server:
- Apache
strict-transport-security:
- max-age=15768000
vary:
- Authorization,Accept-Encoding
x-frame-options:
- SAMEORIGIN
x-ratelimit-limit:
- '200'
x-ratelimit-remaining:
- '178'
status:
code: 200
message: OK
- request:
body: !!python/unicode '[{"records": [{"content": "\"challengetoken\"", "disabled":
false}], "changetype": "UPDATE", "type": "TXT", "name": "_acme-challenge.fqdn.lexicon-test.at.",
"ttl": 3600}]'
headers:
Accept:
- '*/*'
Accept-Encoding:
- gzip, deflate
Connection:
- keep-alive
Content-Length:
- '170'
Content-Type:
- application/json
User-Agent:
- python-requests/2.22.0
method: PATCH
uri: https://my.rcodezero.at/api/v1/zones/lexicon-test.at/rrsets
response:
body:
string: !!python/unicode '{"status":"ok","message":"RRsets updated"}'
headers:
cache-control:
- no-cache, private
connection:
- Keep-Alive
content-length:
- '42'
content-type:
- application/json
date:
- Mon, 30 Sep 2019 13:25:45 GMT
keep-alive:
- timeout=5, max=100
server:
- Apache
strict-transport-security:
- max-age=15768000
vary:
- Authorization
x-frame-options:
- SAMEORIGIN
x-ratelimit-limit:
- '200'
x-ratelimit-remaining:
- '177'
status:
code: 200
message: OK
version: 1
| {
"pile_set_name": "Github"
} |
// +build windows
package fs
import (
"os"
"syscall"
"unsafe"
)
var (
modkernel32 = syscall.NewLazyDLL("kernel32.dll")
procLockFileEx = modkernel32.NewProc("LockFileEx")
)
const (
errorLockViolation = 0x21
)
func mmap(f *os.File, fileSize int64, mmapSize int64) ([]byte, int64, error) {
size := fileSize
low, high := uint32(size), uint32(size>>32)
fmap, err := syscall.CreateFileMapping(syscall.Handle(f.Fd()), nil, syscall.PAGE_READONLY, high, low, nil)
if err != nil {
return nil, 0, err
}
defer syscall.CloseHandle(fmap)
ptr, err := syscall.MapViewOfFile(fmap, syscall.FILE_MAP_READ, 0, 0, uintptr(size))
if err != nil {
return nil, 0, err
}
data := (*[maxMmapSize]byte)(unsafe.Pointer(ptr))[:size]
return data, size, nil
}
func munmap(data []byte) error {
return syscall.UnmapViewOfFile(uintptr(unsafe.Pointer(&data[0])))
}
func madviceRandom(data []byte) error {
return nil
}
func lockfile(f *os.File) error {
var ol syscall.Overlapped
r1, _, err := syscall.Syscall6(
procLockFileEx.Addr(),
6,
uintptr(f.Fd()), // handle
uintptr(0x0003),
uintptr(0), // reserved
uintptr(1), // locklow
uintptr(0), // lockhigh
uintptr(unsafe.Pointer(&ol)),
)
if r1 == 0 && (err == syscall.ERROR_FILE_EXISTS || err == errorLockViolation) {
return os.ErrExist
}
return nil
}
func createLockFile(name string, perm os.FileMode) (LockFile, bool, error) {
acquiredExisting := false
if _, err := os.Stat(name); err == nil {
acquiredExisting = true
}
fd, err := syscall.CreateFile(&(syscall.StringToUTF16(name)[0]),
syscall.GENERIC_READ|syscall.GENERIC_WRITE,
syscall.FILE_SHARE_READ|syscall.FILE_SHARE_WRITE|syscall.FILE_SHARE_DELETE,
nil,
syscall.CREATE_ALWAYS,
syscall.FILE_ATTRIBUTE_NORMAL,
0)
if err != nil {
return nil, false, os.ErrExist
}
f := os.NewFile(uintptr(fd), name)
if err := lockfile(f); err != nil {
f.Close()
return nil, false, err
}
return &oslockfile{f, name}, acquiredExisting, nil
}
| {
"pile_set_name": "Github"
} |
\sum_{m = 1}^{\infty}\sum_{n = 1}^{\infty}\frac{m^{2}\, n}{3^{m}\left( m\, 3^{n} + n\, 3^{m} \right)}
| {
"pile_set_name": "Github"
} |
using System;
using System.Collections.Generic;
using System.Linq.Expressions;
namespace TZM.XFramework.Data
{
/// <summary>
/// 提供对数据类型未知的特定数据源进行 <查> 操作的语义表示
/// </summary>
public interface IDbQueryableInfo_Select
{
/// <summary>
/// JOIN 表达式集合
/// </summary>
List<DbExpression> Joins { get; set; }
/// <summary>
/// ORDER BY 表达式集合
/// </summary>
List<DbExpression> OrderBys { get; set; }
/// <summary>
/// ORDER BY 表达式集合
/// </summary>
List<DbExpression> Includes { get; set; }
/// <summary>
/// GROUP BY 表达式集合
/// </summary>
DbExpression GroupBy { get; set; }
/// <summary>
/// SQL 命令是否含 DISTINCT
/// </summary>
bool HasDistinct { get; set; }
/// <summary>
/// 表达式是否是 Any 表达式
/// </summary>
bool HasAny { get; set; }
/// <summary>
/// 跳过序列中指定数量的元素
/// </summary>
int Skip { get; set; }
/// <summary>
/// 从序列的开头返回指定数量的连续元素
/// </summary>
int Take { get; set; }
/// <summary>
/// 指示 SELECT FROM 子句表对应类型
/// </summary>
Type FromType { get; set; }
/// <summary>
/// SELECT 字段表达式,如果为空则表示选择所有的字段
/// </summary>
DbExpression Select { get; set; }
/// <summary>
/// WHERE 表达式
/// </summary>
DbExpression Where { get; set; }
/// <summary>
/// HAVING 表达式
/// </summary>
DbExpression Having { get; set; }
/// <summary>
/// 聚合函数表达式,包括如:COUNT,MAX,MIN,AVG,SUM
/// </summary>
DbExpression Aggregate { get; set; }
/// <summary>
/// 是否是由一对多导航产生的嵌套查询,=true 时此查询一定是子查询
/// </summary>
bool IsParsedByMany { get; set; }
/// <summary>
/// 表达式是否包含 1:n 类型的导航属性
/// </summary>
bool HasMany { get; set; }
/// <summary>
/// 并集操作,翻译成 UNION ALL
/// </summary>
List<IDbQueryableInfo_Select> Unions { get; set; }
/// <summary>
/// 子查询
/// </summary>
IDbQueryableInfo_Select Subquery { get; set; }
}
} | {
"pile_set_name": "Github"
} |
#region License
// Copyright (c) 2013, ClearCanvas Inc.
// All rights reserved.
// http://www.clearcanvas.ca
//
// This file is part of the ClearCanvas RIS/PACS open source project.
//
// The ClearCanvas RIS/PACS open source project is free software: you can
// redistribute it and/or modify it under the terms of the GNU General Public
// License as published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// The ClearCanvas RIS/PACS open source project is distributed in the hope that it
// will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
// Public License for more details.
//
// You should have received a copy of the GNU General Public License along with
// the ClearCanvas RIS/PACS open source project. If not, see
// <http://www.gnu.org/licenses/>.
#endregion
using System;
using System.ServiceModel;
using ClearCanvas.Enterprise.Common;
namespace ClearCanvas.Ris.Application.Common.Admin.PatientAdmin
{
[RisApplicationService]
[ServiceContract]
public interface IPatientAdminService
{
[OperationContract]
LoadPatientProfileEditorFormDataResponse LoadPatientProfileEditorFormData(LoadPatientProfileEditorFormDataRequest request);
[OperationContract]
LoadPatientProfileForEditResponse LoadPatientProfileForEdit(LoadPatientProfileForEditRequest request);
/// <summary>
/// Updates a patient profile.
/// </summary>
/// <param name="request"></param>
/// <returns></returns>
[OperationContract]
[FaultContract(typeof(ConcurrentModificationException))]
[FaultContract(typeof(RequestValidationException))]
UpdatePatientProfileResponse UpdatePatientProfile(UpdatePatientProfileRequest request);
/// <summary>
/// Adds a new patient to the system.
/// </summary>
/// <param name="request"></param>
/// <returns></returns>
[OperationContract]
[FaultContract(typeof(RequestValidationException))]
AddPatientResponse AddPatient(AddPatientRequest request);
}
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2012, Cameron Rich
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the axTLS project nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* @file os_int.h
*
* Ensure a consistent bit size
*/
#ifndef HEADER_OS_INT_H
#define HEADER_OS_INT_H
#ifdef __cplusplus
extern "C" {
#endif
#if defined(WIN32)
typedef UINT8 uint8_t;
typedef INT8 int8_t;
typedef UINT16 uint16_t;
typedef INT16 int16_t;
typedef UINT32 uint32_t;
typedef INT32 int32_t;
typedef UINT64 uint64_t;
typedef INT64 int64_t;
#else /* Not Win32 */
#ifdef CONFIG_PLATFORM_SOLARIS
#include <inttypes.h>
#else
#include <stdint.h>
#endif /* Not Solaris */
#endif /* Not Win32 */
#ifdef __cplusplus
}
#endif
#endif
| {
"pile_set_name": "Github"
} |
// Copyright (c) 2006, Google Inc.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// ----
// Author: Matt Austern
//
// This code is compiled directly on many platforms, including client
// platforms like Windows, Mac, and embedded systems. Before making
// any changes here, make sure that you're not breaking any platforms.
//
// Define a small subset of tr1 type traits. The traits we define are:
// is_integral
// is_floating_point
// is_pointer
// is_enum
// is_reference
// is_pod
// has_trivial_constructor
// has_trivial_copy
// has_trivial_assign
// has_trivial_destructor
// remove_const
// remove_volatile
// remove_cv
// remove_reference
// add_reference
// remove_pointer
// is_same
// is_convertible
// We can add more type traits as required.
#ifndef GOOGLE_PROTOBUF_TYPE_TRAITS_H_
#define GOOGLE_PROTOBUF_TYPE_TRAITS_H_
#include <utility> // For pair
#include <google/protobuf/stubs/template_util.h> // For true_type and false_type
namespace google {
namespace protobuf {
namespace internal {
template <class T> struct is_integral;
template <class T> struct is_floating_point;
template <class T> struct is_pointer;
// MSVC can't compile this correctly, and neither can gcc 3.3.5 (at least)
#if !defined(_MSC_VER) && !(defined(__GNUC__) && __GNUC__ <= 3)
// is_enum uses is_convertible, which is not available on MSVC.
template <class T> struct is_enum;
#endif
template <class T> struct is_reference;
template <class T> struct is_pod;
template <class T> struct has_trivial_constructor;
template <class T> struct has_trivial_copy;
template <class T> struct has_trivial_assign;
template <class T> struct has_trivial_destructor;
template <class T> struct remove_const;
template <class T> struct remove_volatile;
template <class T> struct remove_cv;
template <class T> struct remove_reference;
template <class T> struct add_reference;
template <class T> struct remove_pointer;
template <class T, class U> struct is_same;
#if !defined(_MSC_VER) && !(defined(__GNUC__) && __GNUC__ <= 3)
template <class From, class To> struct is_convertible;
#endif
// is_integral is false except for the built-in integer types. A
// cv-qualified type is integral if and only if the underlying type is.
template <class T> struct is_integral : false_type { };
template<> struct is_integral<bool> : true_type { };
template<> struct is_integral<char> : true_type { };
template<> struct is_integral<unsigned char> : true_type { };
template<> struct is_integral<signed char> : true_type { };
#if defined(_MSC_VER)
// wchar_t is not by default a distinct type from unsigned short in
// Microsoft C.
// See http://msdn2.microsoft.com/en-us/library/dh8che7s(VS.80).aspx
template<> struct is_integral<__wchar_t> : true_type { };
#else
template<> struct is_integral<wchar_t> : true_type { };
#endif
template<> struct is_integral<short> : true_type { };
template<> struct is_integral<unsigned short> : true_type { };
template<> struct is_integral<int> : true_type { };
template<> struct is_integral<unsigned int> : true_type { };
template<> struct is_integral<long> : true_type { };
template<> struct is_integral<unsigned long> : true_type { };
#ifdef HAVE_LONG_LONG
template<> struct is_integral<long long> : true_type { };
template<> struct is_integral<unsigned long long> : true_type { };
#endif
template <class T> struct is_integral<const T> : is_integral<T> { };
template <class T> struct is_integral<volatile T> : is_integral<T> { };
template <class T> struct is_integral<const volatile T> : is_integral<T> { };
// is_floating_point is false except for the built-in floating-point types.
// A cv-qualified type is integral if and only if the underlying type is.
template <class T> struct is_floating_point : false_type { };
template<> struct is_floating_point<float> : true_type { };
template<> struct is_floating_point<double> : true_type { };
template<> struct is_floating_point<long double> : true_type { };
template <class T> struct is_floating_point<const T>
: is_floating_point<T> { };
template <class T> struct is_floating_point<volatile T>
: is_floating_point<T> { };
template <class T> struct is_floating_point<const volatile T>
: is_floating_point<T> { };
// is_pointer is false except for pointer types. A cv-qualified type (e.g.
// "int* const", as opposed to "int const*") is cv-qualified if and only if
// the underlying type is.
template <class T> struct is_pointer : false_type { };
template <class T> struct is_pointer<T*> : true_type { };
template <class T> struct is_pointer<const T> : is_pointer<T> { };
template <class T> struct is_pointer<volatile T> : is_pointer<T> { };
template <class T> struct is_pointer<const volatile T> : is_pointer<T> { };
#if !defined(_MSC_VER) && !(defined(__GNUC__) && __GNUC__ <= 3)
namespace internal {
template <class T> struct is_class_or_union {
template <class U> static small_ tester(void (U::*)());
template <class U> static big_ tester(...);
static const bool value = sizeof(tester<T>(0)) == sizeof(small_);
};
// is_convertible chokes if the first argument is an array. That's why
// we use add_reference here.
template <bool NotUnum, class T> struct is_enum_impl
: is_convertible<typename add_reference<T>::type, int> { };
template <class T> struct is_enum_impl<true, T> : false_type { };
} // namespace internal
// Specified by TR1 [4.5.1] primary type categories.
// Implementation note:
//
// Each type is either void, integral, floating point, array, pointer,
// reference, member object pointer, member function pointer, enum,
// union or class. Out of these, only integral, floating point, reference,
// class and enum types are potentially convertible to int. Therefore,
// if a type is not a reference, integral, floating point or class and
// is convertible to int, it's a enum. Adding cv-qualification to a type
// does not change whether it's an enum.
//
// Is-convertible-to-int check is done only if all other checks pass,
// because it can't be used with some types (e.g. void or classes with
// inaccessible conversion operators).
template <class T> struct is_enum
: internal::is_enum_impl<
is_same<T, void>::value ||
is_integral<T>::value ||
is_floating_point<T>::value ||
is_reference<T>::value ||
internal::is_class_or_union<T>::value,
T> { };
template <class T> struct is_enum<const T> : is_enum<T> { };
template <class T> struct is_enum<volatile T> : is_enum<T> { };
template <class T> struct is_enum<const volatile T> : is_enum<T> { };
#endif
// is_reference is false except for reference types.
template<typename T> struct is_reference : false_type {};
template<typename T> struct is_reference<T&> : true_type {};
// We can't get is_pod right without compiler help, so fail conservatively.
// We will assume it's false except for arithmetic types, enumerations,
// pointers and cv-qualified versions thereof. Note that std::pair<T,U>
// is not a POD even if T and U are PODs.
template <class T> struct is_pod
: integral_constant<bool, (is_integral<T>::value ||
is_floating_point<T>::value ||
#if !defined(_MSC_VER) && !(defined(__GNUC__) && __GNUC__ <= 3)
// is_enum is not available on MSVC.
is_enum<T>::value ||
#endif
is_pointer<T>::value)> { };
template <class T> struct is_pod<const T> : is_pod<T> { };
template <class T> struct is_pod<volatile T> : is_pod<T> { };
template <class T> struct is_pod<const volatile T> : is_pod<T> { };
// We can't get has_trivial_constructor right without compiler help, so
// fail conservatively. We will assume it's false except for: (1) types
// for which is_pod is true. (2) std::pair of types with trivial
// constructors. (3) array of a type with a trivial constructor.
// (4) const versions thereof.
template <class T> struct has_trivial_constructor : is_pod<T> { };
template <class T, class U> struct has_trivial_constructor<std::pair<T, U> >
: integral_constant<bool,
(has_trivial_constructor<T>::value &&
has_trivial_constructor<U>::value)> { };
template <class A, int N> struct has_trivial_constructor<A[N]>
: has_trivial_constructor<A> { };
template <class T> struct has_trivial_constructor<const T>
: has_trivial_constructor<T> { };
// We can't get has_trivial_copy right without compiler help, so fail
// conservatively. We will assume it's false except for: (1) types
// for which is_pod is true. (2) std::pair of types with trivial copy
// constructors. (3) array of a type with a trivial copy constructor.
// (4) const versions thereof.
template <class T> struct has_trivial_copy : is_pod<T> { };
template <class T, class U> struct has_trivial_copy<std::pair<T, U> >
: integral_constant<bool,
(has_trivial_copy<T>::value &&
has_trivial_copy<U>::value)> { };
template <class A, int N> struct has_trivial_copy<A[N]>
: has_trivial_copy<A> { };
template <class T> struct has_trivial_copy<const T> : has_trivial_copy<T> { };
// We can't get has_trivial_assign right without compiler help, so fail
// conservatively. We will assume it's false except for: (1) types
// for which is_pod is true. (2) std::pair of types with trivial copy
// constructors. (3) array of a type with a trivial assign constructor.
template <class T> struct has_trivial_assign : is_pod<T> { };
template <class T, class U> struct has_trivial_assign<std::pair<T, U> >
: integral_constant<bool,
(has_trivial_assign<T>::value &&
has_trivial_assign<U>::value)> { };
template <class A, int N> struct has_trivial_assign<A[N]>
: has_trivial_assign<A> { };
// We can't get has_trivial_destructor right without compiler help, so
// fail conservatively. We will assume it's false except for: (1) types
// for which is_pod is true. (2) std::pair of types with trivial
// destructors. (3) array of a type with a trivial destructor.
// (4) const versions thereof.
template <class T> struct has_trivial_destructor : is_pod<T> { };
template <class T, class U> struct has_trivial_destructor<std::pair<T, U> >
: integral_constant<bool,
(has_trivial_destructor<T>::value &&
has_trivial_destructor<U>::value)> { };
template <class A, int N> struct has_trivial_destructor<A[N]>
: has_trivial_destructor<A> { };
template <class T> struct has_trivial_destructor<const T>
: has_trivial_destructor<T> { };
// Specified by TR1 [4.7.1]
template<typename T> struct remove_const { typedef T type; };
template<typename T> struct remove_const<T const> { typedef T type; };
template<typename T> struct remove_volatile { typedef T type; };
template<typename T> struct remove_volatile<T volatile> { typedef T type; };
template<typename T> struct remove_cv {
typedef typename remove_const<typename remove_volatile<T>::type>::type type;
};
// Specified by TR1 [4.7.2] Reference modifications.
template<typename T> struct remove_reference { typedef T type; };
template<typename T> struct remove_reference<T&> { typedef T type; };
template <typename T> struct add_reference { typedef T& type; };
template <typename T> struct add_reference<T&> { typedef T& type; };
// Specified by TR1 [4.7.4] Pointer modifications.
template<typename T> struct remove_pointer { typedef T type; };
template<typename T> struct remove_pointer<T*> { typedef T type; };
template<typename T> struct remove_pointer<T* const> { typedef T type; };
template<typename T> struct remove_pointer<T* volatile> { typedef T type; };
template<typename T> struct remove_pointer<T* const volatile> {
typedef T type; };
// Specified by TR1 [4.6] Relationships between types
template<typename T, typename U> struct is_same : public false_type { };
template<typename T> struct is_same<T, T> : public true_type { };
// Specified by TR1 [4.6] Relationships between types
#if !defined(_MSC_VER) && !(defined(__GNUC__) && __GNUC__ <= 3)
namespace internal {
// This class is an implementation detail for is_convertible, and you
// don't need to know how it works to use is_convertible. For those
// who care: we declare two different functions, one whose argument is
// of type To and one with a variadic argument list. We give them
// return types of different size, so we can use sizeof to trick the
// compiler into telling us which function it would have chosen if we
// had called it with an argument of type From. See Alexandrescu's
// _Modern C++ Design_ for more details on this sort of trick.
template <typename From, typename To>
struct ConvertHelper {
static small_ Test(To);
static big_ Test(...);
static From Create();
};
} // namespace internal
// Inherits from true_type if From is convertible to To, false_type otherwise.
template <typename From, typename To>
struct is_convertible
: integral_constant<bool,
sizeof(internal::ConvertHelper<From, To>::Test(
internal::ConvertHelper<From, To>::Create()))
== sizeof(small_)> {
};
#endif
} // namespace internal
} // namespace protobuf
} // namespace google
#endif // GOOGLE_PROTOBUF_TYPE_TRAITS_H_
| {
"pile_set_name": "Github"
} |
# Copyright (c) 2012 Giorgos Verigakis <[email protected]>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from __future__ import print_function
HIDE_CURSOR = '\x1b[?25l'
SHOW_CURSOR = '\x1b[?25h'
class WriteMixin(object):
hide_cursor = False
def __init__(self, message=None, **kwargs):
super(WriteMixin, self).__init__(**kwargs)
self._width = 0
if message:
self.message = message
if self.file and self.file.isatty():
if self.hide_cursor:
print(HIDE_CURSOR, end='', file=self.file)
print(self.message, end='', file=self.file)
self.file.flush()
def write(self, s):
if self.file and self.file.isatty():
b = '\b' * self._width
c = s.ljust(self._width)
print(b + c, end='', file=self.file)
self._width = max(self._width, len(s))
self.file.flush()
def finish(self):
if self.file and self.file.isatty() and self.hide_cursor:
print(SHOW_CURSOR, end='', file=self.file)
class WritelnMixin(object):
hide_cursor = False
def __init__(self, message=None, **kwargs):
super(WritelnMixin, self).__init__(**kwargs)
if message:
self.message = message
if self.file and self.file.isatty() and self.hide_cursor:
print(HIDE_CURSOR, end='', file=self.file)
def clearln(self):
if self.file and self.file.isatty():
print('\r\x1b[K', end='', file=self.file)
def writeln(self, line):
if self.file and self.file.isatty():
self.clearln()
print(line, end='', file=self.file)
self.file.flush()
def finish(self):
if self.file and self.file.isatty():
print(file=self.file)
if self.hide_cursor:
print(SHOW_CURSOR, end='', file=self.file)
from signal import signal, SIGINT
from sys import exit
class SigIntMixin(object):
"""Registers a signal handler that calls finish on SIGINT"""
def __init__(self, *args, **kwargs):
super(SigIntMixin, self).__init__(*args, **kwargs)
signal(SIGINT, self._sigint_handler)
def _sigint_handler(self, signum, frame):
self.finish()
exit(0)
| {
"pile_set_name": "Github"
} |
# Demonstrate the usage of templates with docker service create
## Official Docker Documentation
[Create Docker Swarm Services using templates](https://docs.docker.com/engine/reference/commandline/service_create/#create-services-using-templates)
## Asciinema Examples
[Demonstrate the usage of templates with docker service create](https://asciinema.org/a/zZnpejurCS7bE7F5BQUobLHVo)
| {
"pile_set_name": "Github"
} |
//
// Wire
// Copyright (C) 2016 Wire Swiss GmbH
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see http://www.gnu.org/licenses/.
//
#import <XCTest/XCTest.h>
/// Pass "-XCTestObserverClass MemoryLeaksObserver" as launch arguments to use
///
/// N.B.: XCTestObserver is deprecated.
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdeprecated-declarations"
@interface MemoryLeaksObserver : XCTestObserver
@end
#pragma clang diagnostic pop
| {
"pile_set_name": "Github"
} |
* The `yb-docker-ctl` utility initializes the YEDIS API automatically.
* Run redis-cli to connect to the service.
```sh
$ docker exec -it yb-tserver-n1 /home/yugabyte/bin/redis-cli
```
```
127.0.0.1:6379>
```
* Run a Redis command to verify it is working.
```sh
127.0.0.1:6379> PING
```
```
"PONG"
```
| {
"pile_set_name": "Github"
} |
/**
*
*/
| {
"pile_set_name": "Github"
} |
/*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package unversioned
import (
api "k8s.io/kubernetes/pkg/api"
policy "k8s.io/kubernetes/pkg/apis/policy"
watch "k8s.io/kubernetes/pkg/watch"
)
// PodDisruptionBudgetsGetter has a method to return a PodDisruptionBudgetInterface.
// A group's client should implement this interface.
type PodDisruptionBudgetsGetter interface {
PodDisruptionBudgets(namespace string) PodDisruptionBudgetInterface
}
// PodDisruptionBudgetInterface has methods to work with PodDisruptionBudget resources.
type PodDisruptionBudgetInterface interface {
Create(*policy.PodDisruptionBudget) (*policy.PodDisruptionBudget, error)
Update(*policy.PodDisruptionBudget) (*policy.PodDisruptionBudget, error)
UpdateStatus(*policy.PodDisruptionBudget) (*policy.PodDisruptionBudget, error)
Delete(name string, options *api.DeleteOptions) error
DeleteCollection(options *api.DeleteOptions, listOptions api.ListOptions) error
Get(name string) (*policy.PodDisruptionBudget, error)
List(opts api.ListOptions) (*policy.PodDisruptionBudgetList, error)
Watch(opts api.ListOptions) (watch.Interface, error)
Patch(name string, pt api.PatchType, data []byte, subresources ...string) (result *policy.PodDisruptionBudget, err error)
PodDisruptionBudgetExpansion
}
// podDisruptionBudgets implements PodDisruptionBudgetInterface
type podDisruptionBudgets struct {
client *PolicyClient
ns string
}
// newPodDisruptionBudgets returns a PodDisruptionBudgets
func newPodDisruptionBudgets(c *PolicyClient, namespace string) *podDisruptionBudgets {
return &podDisruptionBudgets{
client: c,
ns: namespace,
}
}
// Create takes the representation of a podDisruptionBudget and creates it. Returns the server's representation of the podDisruptionBudget, and an error, if there is any.
func (c *podDisruptionBudgets) Create(podDisruptionBudget *policy.PodDisruptionBudget) (result *policy.PodDisruptionBudget, err error) {
result = &policy.PodDisruptionBudget{}
err = c.client.Post().
Namespace(c.ns).
Resource("poddisruptionbudgets").
Body(podDisruptionBudget).
Do().
Into(result)
return
}
// Update takes the representation of a podDisruptionBudget and updates it. Returns the server's representation of the podDisruptionBudget, and an error, if there is any.
func (c *podDisruptionBudgets) Update(podDisruptionBudget *policy.PodDisruptionBudget) (result *policy.PodDisruptionBudget, err error) {
result = &policy.PodDisruptionBudget{}
err = c.client.Put().
Namespace(c.ns).
Resource("poddisruptionbudgets").
Name(podDisruptionBudget.Name).
Body(podDisruptionBudget).
Do().
Into(result)
return
}
func (c *podDisruptionBudgets) UpdateStatus(podDisruptionBudget *policy.PodDisruptionBudget) (result *policy.PodDisruptionBudget, err error) {
result = &policy.PodDisruptionBudget{}
err = c.client.Put().
Namespace(c.ns).
Resource("poddisruptionbudgets").
Name(podDisruptionBudget.Name).
SubResource("status").
Body(podDisruptionBudget).
Do().
Into(result)
return
}
// Delete takes name of the podDisruptionBudget and deletes it. Returns an error if one occurs.
func (c *podDisruptionBudgets) Delete(name string, options *api.DeleteOptions) error {
return c.client.Delete().
Namespace(c.ns).
Resource("poddisruptionbudgets").
Name(name).
Body(options).
Do().
Error()
}
// DeleteCollection deletes a collection of objects.
func (c *podDisruptionBudgets) DeleteCollection(options *api.DeleteOptions, listOptions api.ListOptions) error {
return c.client.Delete().
Namespace(c.ns).
Resource("poddisruptionbudgets").
VersionedParams(&listOptions, api.ParameterCodec).
Body(options).
Do().
Error()
}
// Get takes name of the podDisruptionBudget, and returns the corresponding podDisruptionBudget object, and an error if there is any.
func (c *podDisruptionBudgets) Get(name string) (result *policy.PodDisruptionBudget, err error) {
result = &policy.PodDisruptionBudget{}
err = c.client.Get().
Namespace(c.ns).
Resource("poddisruptionbudgets").
Name(name).
Do().
Into(result)
return
}
// List takes label and field selectors, and returns the list of PodDisruptionBudgets that match those selectors.
func (c *podDisruptionBudgets) List(opts api.ListOptions) (result *policy.PodDisruptionBudgetList, err error) {
result = &policy.PodDisruptionBudgetList{}
err = c.client.Get().
Namespace(c.ns).
Resource("poddisruptionbudgets").
VersionedParams(&opts, api.ParameterCodec).
Do().
Into(result)
return
}
// Watch returns a watch.Interface that watches the requested podDisruptionBudgets.
func (c *podDisruptionBudgets) Watch(opts api.ListOptions) (watch.Interface, error) {
return c.client.Get().
Prefix("watch").
Namespace(c.ns).
Resource("poddisruptionbudgets").
VersionedParams(&opts, api.ParameterCodec).
Watch()
}
// Patch applies the patch and returns the patched podDisruptionBudget.
func (c *podDisruptionBudgets) Patch(name string, pt api.PatchType, data []byte, subresources ...string) (result *policy.PodDisruptionBudget, err error) {
result = &policy.PodDisruptionBudget{}
err = c.client.Patch(pt).
Namespace(c.ns).
Resource("poddisruptionbudgets").
SubResource(subresources...).
Name(name).
Body(data).
Do().
Into(result)
return
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<odoo noupdate="True">
<!-- we add l10n_latam_document_number on on a separete line because we need l10n_latam_document_type_id to be auto assigned so that account.move.name can be computed with the _inverse_l10n_latam_document_number -->
<!-- Invoice from gritti support service, auto fiscal position set VAT Not Applicable -->
<record id="demo_sup_invoice_1" model="account.move">
<field name="partner_id" ref="res_partner_gritti_agrimensura"/>
<field name="invoice_user_id" ref="base.user_demo"/>
<field name="invoice_payment_term_id" ref="account.account_payment_term_end_following_month"/>
<field name="type">in_invoice</field>
<field name="invoice_date" eval="time.strftime('%Y-%m')+'-01'"/>
<field name="company_id" ref="company_ri"/>
<field name="invoice_line_ids" eval="[
(0, 0, {'product_id': ref('product.product_product_2'), 'price_unit': 642.0, 'quantity': 1}),
(0, 0, {'product_id': ref('product.product_product_27'), 'price_unit': 642.0, 'quantity': 5}),
(0, 0, {'product_id': ref('product_product_telefonia'), 'price_unit': 250.0, 'quantity': 1}),
(0, 0, {'product_id': ref('product_product_no_gravado'), 'price_unit': 50.0, 'quantity': 10}),
(0, 0, {'product_id': ref('product_product_cero'), 'price_unit': 200.0, 'quantity': 1}),
(0, 0, {'product_id': ref('product_product_exento'), 'price_unit': 100.0, 'quantity': 1}),
]"/>
</record>
<!-- Invoice from Foreign with vat 21, 27 and 10,5 -->
<record id="demo_sup_invoice_2" model="account.move">
<field name="partner_id" ref="res_partner_foreign"/>
<field name="invoice_user_id" ref="base.user_demo"/>
<field name="invoice_payment_term_id" ref="account.account_payment_term_end_following_month"/>
<field name="type">in_invoice</field>
<field name="invoice_date" eval="time.strftime('%Y-%m')+'-01'"/>
<field name="company_id" ref="company_ri"/>
<field name="invoice_line_ids" eval="[
(0, 0, {'product_id': ref('product.product_product_27'), 'price_unit': 642.0, 'quantity': 5}),
(0, 0, {'product_id': ref('product_product_telefonia'), 'price_unit': 250.0, 'quantity': 1}),
(0, 0, {'product_id': ref('product.product_product_25'), 'price_unit': 3245.0, 'quantity': 2}),
]"/>
</record>
<!-- Invoice from Foreign with vat zero and 21 -->
<record id="demo_sup_invoice_3" model="account.move">
<field name="partner_id" ref="res_partner_foreign"/>
<field name="invoice_user_id" ref="base.user_demo"/>
<field name="invoice_payment_term_id" ref="account.account_payment_term_end_following_month"/>
<field name="type">in_invoice</field>
<field name="invoice_date" eval="time.strftime('%Y-%m')+'-11'"/>
<field name="company_id" ref="company_ri"/>
<field name="invoice_line_ids" eval="[
(0, 0, {'product_id': ref('product.product_product_27'), 'price_unit': 642.0, 'quantity': 5}),
(0, 0, {'product_id': ref('product_product_cero'), 'price_unit': 200.0, 'quantity': 1}),
]"/>
</record>
<!-- Invoice to Foreign with vat exempt and 21 -->
<record id="demo_sup_invoice_4" model="account.move">
<field name="partner_id" ref="res_partner_foreign"/>
<field name="invoice_user_id" ref="base.user_demo"/>
<field name="invoice_payment_term_id" ref="account.account_payment_term_end_following_month"/>
<field name="type">in_invoice</field>
<field name="invoice_date" eval="time.strftime('%Y-%m')+'-15'"/>
<field name="company_id" ref="company_ri"/>
<field name="invoice_line_ids" eval="[
(0, 0, {'product_id': ref('product.product_product_27'), 'price_unit': 642.0, 'quantity': 5}),
(0, 0, {'product_id': ref('product_product_exento'), 'price_unit': 100.0, 'quantity': 1}),
]"/>
</record>
<!-- Invoice to Foreign with all type of taxes -->
<record id="demo_sup_invoice_5" model="account.move">
<field name="partner_id" ref="res_partner_foreign"/>
<field name="invoice_user_id" ref="base.user_demo"/>
<field name="invoice_payment_term_id" ref="account.account_payment_term_end_following_month"/>
<field name="type">in_invoice</field>
<field name="invoice_date" eval="time.strftime('%Y-%m')+'-18'"/>
<field name="company_id" ref="company_ri"/>
<field name="invoice_line_ids" eval="[
(0, 0, {'product_id': ref('product.product_product_27'), 'price_unit': 642.0, 'quantity': 5}),
(0, 0, {'product_id': ref('product_product_telefonia'), 'price_unit': 250.0, 'quantity': 1}),
(0, 0, {'product_id': ref('product.product_product_25'), 'price_unit': 3245.0, 'quantity': 2}),
(0, 0, {'product_id': ref('product_product_no_gravado'), 'price_unit': 50.0, 'quantity': 10}),
(0, 0, {'product_id': ref('product_product_cero'), 'price_unit': 200.0, 'quantity': 1}),
(0, 0, {'product_id': ref('product_product_exento'), 'price_unit': 100.0, 'quantity': 1}),
]"/>
</record>
<!-- Service Import to Odoo, fiscal position changes tax not correspond -->
<record id="demo_sup_invoice_6" model="account.move">
<field name="partner_id" ref="res_partner_odoo"/>
<field name="invoice_user_id" ref="base.user_demo"/>
<field name="invoice_payment_term_id" ref="account.account_payment_term_end_following_month"/>
<field name="type">in_invoice</field>
<field name="invoice_date" eval="time.strftime('%Y-%m')+'-26'"/>
<field name="company_id" ref="company_ri"/>
<field name="invoice_line_ids" eval="[
(0, 0, {'product_id': ref('product.product_product_2'), 'price_unit': 1642.0, 'quantity': 1}),
]"/>
</record>
<!-- Similar to last one but with line that have tax not correspond with negative amount -->
<record id="demo_sup_invoice_7" model="account.move">
<field name="partner_id" ref="res_partner_odoo"/>
<field name="invoice_user_id" ref="base.user_demo"/>
<field name="invoice_payment_term_id" ref="account.account_payment_term_end_following_month"/>
<field name="type">in_invoice</field>
<field name="invoice_date" eval="time.strftime('%Y-%m')+'-27'"/>
<field name="company_id" ref="company_ri"/>
<field name="invoice_line_ids" eval="[
(0, 0, {'product_id': ref('product.product_product_2'), 'price_unit': 1642.0, 'quantity': 1}),
(0, 0, {'product_id': ref('product_product_no_gravado'), 'price_unit': -50.0, 'quantity': 10}),
]"/>
</record>
<!-- Invoice to ADHOC with multiple taxes and perceptions -->
<record id="demo_sup_invoice_8" model="account.move">
<field name="partner_id" ref="l10n_ar.res_partner_adhoc"/>
<field name="invoice_user_id" ref="base.user_demo"/>
<field name="invoice_payment_term_id" ref="account.account_payment_term_end_following_month"/>
<field name="type">in_invoice</field>
<field name="invoice_date" eval="time.strftime('%Y-%m')+'-01'"/>
<field name="company_id" ref="company_ri"/>
<field name="invoice_line_ids" eval="[
(0, 0, {'product_id': ref('product.product_product_27'), 'price_unit': 642.0, 'quantity': 5}),
(0, 0, {'product_id': ref('product_product_telefonia'), 'price_unit': 250.0, 'quantity': 1}),
(0, 0, {'product_id': ref('product.product_product_25'), 'price_unit': 3245.0, 'quantity': 2}),
]"/>
</record>
<!-- Import Cleareance -->
<record id="demo_despacho_1" model="account.move">
<field name="partner_id" ref="l10n_ar.partner_afip"/>
<field name="invoice_user_id" ref="base.user_demo"/>
<field name="invoice_payment_term_id" ref="account.account_payment_term_end_following_month"/>
<field name="type">in_invoice</field>
<!-- as we create lines separatelly we need to set journal, if not, misc journal is selected -->
<field name="journal_id" model="account.journal" search="[('type', '=', 'purchase'), ('company_id', '=', obj().env.company.id)]"/>
<field name="invoice_date" eval="time.strftime('%Y-%m')+'-13'"/>
<field name="company_id" ref="company_ri"/>
</record>
<!-- create this lines manually to set taxes and prices -->
<record id="demo_despacho_1_line_1" model="account.move.line" context="{'check_move_validity': False}">
<field name="move_id" ref="demo_despacho_1"/>
<field name="price_unit">5064.98</field>
<field name="name">[AFIP_DESPACHO] Despacho de importación</field>
<field name="quantity">1</field>
<field name="product_id" ref="l10n_ar.product_product_quote_despacho"/>
<field name="product_uom_id" ref="uom.product_uom_unit"/>
<field name="tax_ids" model="account.tax" eval="[(6, 0, obj().search([('company_id', '=', obj().env.ref('l10n_ar.company_ri').id), ('name', '=', 'IVA 21%'), ('type_tax_use', '=', 'purchase')], limit=1).ids)]"/>
<field name="account_id" model="account.move.line" eval="obj().env.ref('l10n_ar.product_product_quote_despacho').categ_id.property_account_income_categ_id.id"/>
</record>
<record id="demo_despacho_1_line_2" model="account.move.line" context="{'check_move_validity': False}">
<field name="move_id" ref="demo_despacho_1"/>
<field name="price_unit">152.08</field>
<field name="name">[AFIP_TASA_EST] Tasa Estadística</field>
<field name="quantity">1</field>
<field name="product_id" ref="l10n_ar.product_product_tasa_estadistica"/>
<field name="product_uom_id" ref="uom.product_uom_unit"/>
<field name="tax_ids" model="account.tax" eval="[(6, 0, obj().search([('company_id', '=', obj().env.ref('l10n_ar.company_ri').id), ('name', '=', 'IVA 21%'), ('type_tax_use', '=', 'purchase')], limit=1).ids)]"/>
<field name="account_id" model="account.move.line" eval="obj().env.ref('l10n_ar.product_product_tasa_estadistica').categ_id.property_account_income_categ_id.id"/>
</record>
<record id="demo_despacho_1_line_3" model="account.move.line" context="{'check_move_validity': False}">
<field name="move_id" ref="demo_despacho_1"/>
<field name="price_unit">10.0</field>
<field name="name">[AFIP_ARANCEL] Arancel</field>
<field name="quantity">1</field>
<field name="product_id" ref="l10n_ar.product_product_arancel"/>
<field name="product_uom_id" ref="uom.product_uom_unit"/>
<field name="tax_ids" model="account.tax" eval="[(6, 0, obj().search([('company_id', '=', obj().env.ref('l10n_ar.company_ri').id), ('name', '=', 'IVA No Gravado'), ('type_tax_use', '=', 'purchase')], limit=1).ids)]"/>
<field name="account_id" model="account.move.line" eval="obj().env.ref('l10n_ar.product_product_arancel').categ_id.property_account_income_categ_id.id"/>
</record>
<record id="demo_despacho_1_line_4" model="account.move.line" context="{'check_move_validity': False}">
<field name="move_id" ref="demo_despacho_1"/>
<field name="price_unit">28.00</field>
<field name="name">[AFIP_SERV_GUARDA] Servicio de Guarda</field>
<field name="quantity">1</field>
<field name="product_id" ref="l10n_ar.product_product_servicio_de_guarda"/>
<field name="product_uom_id" ref="uom.product_uom_unit"/>
<field name="tax_ids" model="account.tax" eval="[(6, 0, obj().search([('company_id', '=', obj().env.ref('l10n_ar.company_ri').id), ('name', '=', 'IVA No Gravado'), ('type_tax_use', '=', 'purchase')], limit=1).ids)]"/>
<field name="account_id" model="account.move.line" eval="obj().env.ref('l10n_ar.product_product_servicio_de_guarda').categ_id.property_account_income_categ_id.id"/>
</record>
<record id="demo_despacho_1_line_5" model="account.move.line" context="{'check_move_validity': False}">
<field name="name">FOB Total</field>
<field name="move_id" ref="demo_despacho_1"/>
<field name="price_unit">28936.06</field>
<field name="quantity">1</field>
<field name="product_uom_id" ref="uom.product_uom_unit"/>
<field name="tax_ids" model="account.tax" eval="[(6, 0, obj().search([('company_id', '=', obj().env.ref('l10n_ar.company_ri').id), ('name', '=', 'IVA 21%'), ('type_tax_use', '=', 'purchase')], limit=1).ids)]"/>
<field name="account_id" model="account.move.line" eval="obj().env.ref('product.product_category_all').property_account_income_categ_id.id"/>
</record>
<record id="demo_despacho_1_line_6" model="account.move.line" context="{'check_move_validity': False}">
<field name="name">Flete</field>
<field name="move_id" ref="demo_despacho_1"/>
<field name="price_unit">1350.00</field>
<field name="quantity">1</field>
<field name="product_uom_id" ref="uom.product_uom_unit"/>
<field name="tax_ids" model="account.tax" eval="[(6, 0, obj().search([('company_id', '=', obj().env.ref('l10n_ar.company_ri').id), ('name', '=', 'IVA 21%'), ('type_tax_use', '=', 'purchase')], limit=1).ids)]"/>
<field name="account_id" model="account.move.line" eval="obj().env.ref('product.product_category_all').property_account_income_categ_id.id"/>
</record>
<record id="demo_despacho_1_line_7" model="account.move.line" context="{'check_move_validity': False}">
<field name="name">Seguro</field>
<field name="move_id" ref="demo_despacho_1"/>
<field name="price_unit">130.21</field>
<field name="quantity">1</field>
<field name="product_uom_id" ref="uom.product_uom_unit"/>
<field name="tax_ids" model="account.tax" eval="[(6, 0, obj().search([('company_id', '=', obj().env.ref('l10n_ar.company_ri').id), ('name', '=', 'IVA 21%'), ('type_tax_use', '=', 'purchase')], limit=1).ids)]"/>
<field name="account_id" model="account.move.line" eval="obj().env.ref('product.product_category_all').property_account_income_categ_id.id"/>
</record>
<record id="demo_despacho_1_line_8" model="account.move.line" context="{'check_move_validity': False}">
<field name="name">-FOB Total</field>
<field name="move_id" ref="demo_despacho_1"/>
<field name="price_unit">-28936.06</field>
<field name="quantity">1</field>
<field name="product_uom_id" ref="uom.product_uom_unit"/>
<field name="tax_ids" model="account.tax" eval="[(6, 0, obj().search([('company_id', '=', obj().env.ref('l10n_ar.company_ri').id), ('name', '=', 'IVA No Gravado'), ('type_tax_use', '=', 'purchase')], limit=1).ids)]"/>
<field name="account_id" model="account.move.line" eval="obj().env.ref('product.product_category_all').property_account_income_categ_id.id"/>
</record>
<record id="demo_despacho_1_line_9" model="account.move.line" context="{'check_move_validity': False}">
<field name="name">-Flete</field>
<field name="move_id" ref="demo_despacho_1"/>
<field name="price_unit">-1350.00</field>
<field name="quantity">1</field>
<field name="product_uom_id" ref="uom.product_uom_unit"/>
<field name="tax_ids" model="account.tax" eval="[(6, 0, obj().search([('company_id', '=', obj().env.ref('l10n_ar.company_ri').id), ('name', '=', 'IVA No Gravado'), ('type_tax_use', '=', 'purchase')], limit=1).ids)]"/>
<field name="account_id" model="account.move.line" eval="obj().env.ref('product.product_category_all').property_account_income_categ_id.id"/>
</record>
<record id="demo_despacho_1_line_10" model="account.move.line" context="{'check_move_validity': False}">
<field name="name">-Seguro</field>
<field name="move_id" ref="demo_despacho_1"/>
<field name="price_unit">-130.21</field>
<field name="quantity">1</field>
<field name="product_uom_id" ref="uom.product_uom_unit"/>
<field name="tax_ids" model="account.tax" eval="[(6, 0, obj().search([('company_id', '=', obj().env.ref('l10n_ar.company_ri').id), ('name', '=', 'IVA No Gravado'), ('type_tax_use', '=', 'purchase')], limit=1).ids)]"/>
<field name="account_id" model="account.move.line" eval="obj().env.ref('product.product_category_all').property_account_income_categ_id.id"/>
</record>
<record id="demo_sup_invoice_1" model="account.move">
<field name="l10n_latam_document_number">0001-00000008</field>
</record>
<record id="demo_sup_invoice_2" model="account.move">
<field name="l10n_latam_document_number">0002-00000123</field>
</record>
<record id="demo_sup_invoice_3" model="account.move">
<field name="l10n_latam_document_number">0003-00000312</field>
</record>
<record id="demo_sup_invoice_4" model="account.move">
<field name="l10n_latam_document_number">0001-00000200</field>
</record>
<record id="demo_sup_invoice_5" model="account.move">
<field name="l10n_latam_document_number">0001-00000222</field>
</record>
<record id="demo_sup_invoice_6" model="account.move">
<field name="l10n_latam_document_number">0001-00000333</field>
</record>
<record id="demo_sup_invoice_7" model="account.move">
<field name="l10n_latam_document_number">0001-00000334</field>
</record>
<record id="demo_sup_invoice_8" model="account.move">
<field name="l10n_latam_document_number">0001-00000335</field>
</record>
<record id="demo_despacho_1" model="account.move">
<field name="l10n_latam_document_number">16052IC04000605L</field>
</record>
<function model="account.move" name="_onchange_partner_id" context="{'check_move_validity': False}">
<value eval="[ref('demo_sup_invoice_1')]"/>
</function>
<function model="account.move" name="_onchange_partner_id" context="{'check_move_validity': False}">
<value eval="[ref('demo_sup_invoice_2')]"/>
</function>
<function model="account.move" name="_onchange_partner_id" context="{'check_move_validity': False}">
<value eval="[ref('demo_sup_invoice_3')]"/>
</function>
<function model="account.move" name="_onchange_partner_id" context="{'check_move_validity': False}">
<value eval="[ref('demo_sup_invoice_4')]"/>
</function>
<function model="account.move" name="_onchange_partner_id" context="{'check_move_validity': False}">
<value eval="[ref('demo_sup_invoice_5')]"/>
</function>
<function model="account.move" name="_onchange_partner_id" context="{'check_move_validity': False}">
<value eval="[ref('demo_sup_invoice_6')]"/>
</function>
<function model="account.move" name="_onchange_partner_id" context="{'check_move_validity': False}">
<value eval="[ref('demo_sup_invoice_7')]"/>
</function>
<function model="account.move" name="_onchange_partner_id" context="{'check_move_validity': False}">
<value eval="[ref('demo_despacho_1')]"/>
</function>
<function model="account.move.line" name="_onchange_product_id" context="{'check_move_validity': False}">
<value model="account.move.line" eval="obj().search([('move_id', 'in', [ref('demo_sup_invoice_1'), ref('demo_sup_invoice_2'), ref('demo_sup_invoice_3'), ref('demo_sup_invoice_4'), ref('demo_sup_invoice_5'), ref('demo_sup_invoice_6'), ref('demo_sup_invoice_7'), ref('demo_sup_invoice_8')])]).ids"/>
</function>
<function model="account.move.line" name="write" context="{'check_move_validity': False, 'active_test': False}">
<value model="account.move.line" search="[('move_id', '=', ref('demo_sup_invoice_8')), ('product_id', '=', ref('product.product_product_27'))]"/>
<value model="account.tax" eval="{'tax_ids': [(4, obj().search([('company_id', '=', ref('company_ri')), ('type_tax_use', '=', 'purchase'), ('tax_group_id.l10n_ar_tribute_afip_code', '=', '06')], limit=1).id)]}"/>
</function>
<function model="account.move.line" name="write" context="{'check_move_validity': False, 'active_test': False}">
<value model="account.move.line" search="[('move_id', '=', ref('demo_sup_invoice_8')), ('product_id', '=', ref('product_product_telefonia'))]"/>
<value model="account.tax" eval="{'tax_ids': [(4, obj().search([('company_id', '=', ref('company_ri')), ('type_tax_use', '=', 'purchase'), ('tax_group_id.l10n_ar_tribute_afip_code', '=', '07')], limit=1).id)]}"/>
</function>
<function model="account.move.line" name="write" context="{'check_move_validity': False, 'active_test': False}">
<value model="account.move.line" search="[('move_id', '=', ref('demo_sup_invoice_8')), ('product_id', '=', ref('product.product_product_25'))]"/>
<value model="account.tax" eval="{'tax_ids': [(4, obj().search([('company_id', '=', ref('company_ri')), ('type_tax_use', '=', 'purchase'), ('tax_group_id.l10n_ar_tribute_afip_code', '=', '99')], limit=1).id)]}"/>
</function>
<function model="account.move" name="_recompute_dynamic_lines" context="{'check_move_validity': False}">
<value eval="[ref('demo_sup_invoice_1'), ref('demo_sup_invoice_2'), ref('demo_sup_invoice_3'), ref('demo_sup_invoice_4'), ref('demo_sup_invoice_5'), ref('demo_sup_invoice_6'), ref('demo_sup_invoice_7'), ref('demo_sup_invoice_8'), ref('demo_despacho_1')]"/>
<value eval="True"/>
</function>
<function model="account.move" name="post">
<value eval="[ref('demo_sup_invoice_1'), ref('demo_sup_invoice_2'), ref('demo_sup_invoice_3'), ref('demo_sup_invoice_4'), ref('demo_sup_invoice_5'), ref('demo_sup_invoice_6'), ref('demo_sup_invoice_7'), ref('demo_sup_invoice_8'), ref('demo_despacho_1')]"/>
</function>
</odoo>
| {
"pile_set_name": "Github"
} |
/*
Copyright 2015 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package generator
import (
"bytes"
"fmt"
"io"
"io/ioutil"
"os"
"path/filepath"
"strings"
"golang.org/x/tools/imports"
"k8s.io/gengo/namer"
"k8s.io/gengo/types"
"k8s.io/klog"
)
func errs2strings(errors []error) []string {
strs := make([]string, len(errors))
for i := range errors {
strs[i] = errors[i].Error()
}
return strs
}
// ExecutePackages runs the generators for every package in 'packages'. 'outDir'
// is the base directory in which to place all the generated packages; it
// should be a physical path on disk, not an import path. e.g.:
// /path/to/home/path/to/gopath/src/
// Each package has its import path already, this will be appended to 'outDir'.
func (c *Context) ExecutePackages(outDir string, packages Packages) error {
var errors []error
for _, p := range packages {
if err := c.ExecutePackage(outDir, p); err != nil {
errors = append(errors, err)
}
}
if len(errors) > 0 {
return fmt.Errorf("some packages had errors:\n%v\n", strings.Join(errs2strings(errors), "\n"))
}
return nil
}
type DefaultFileType struct {
Format func([]byte) ([]byte, error)
Assemble func(io.Writer, *File)
}
func (ft DefaultFileType) AssembleFile(f *File, pathname string) error {
klog.V(2).Infof("Assembling file %q", pathname)
destFile, err := os.Create(pathname)
if err != nil {
return err
}
defer destFile.Close()
b := &bytes.Buffer{}
et := NewErrorTracker(b)
ft.Assemble(et, f)
if et.Error() != nil {
return et.Error()
}
if formatted, err := ft.Format(b.Bytes()); err != nil {
err = fmt.Errorf("unable to format file %q (%v).", pathname, err)
// Write the file anyway, so they can see what's going wrong and fix the generator.
if _, err2 := destFile.Write(b.Bytes()); err2 != nil {
return err2
}
return err
} else {
_, err = destFile.Write(formatted)
return err
}
}
func (ft DefaultFileType) VerifyFile(f *File, pathname string) error {
klog.V(2).Infof("Verifying file %q", pathname)
friendlyName := filepath.Join(f.PackageName, f.Name)
b := &bytes.Buffer{}
et := NewErrorTracker(b)
ft.Assemble(et, f)
if et.Error() != nil {
return et.Error()
}
formatted, err := ft.Format(b.Bytes())
if err != nil {
return fmt.Errorf("unable to format the output for %q: %v", friendlyName, err)
}
existing, err := ioutil.ReadFile(pathname)
if err != nil {
return fmt.Errorf("unable to read file %q for comparison: %v", friendlyName, err)
}
if bytes.Compare(formatted, existing) == 0 {
return nil
}
// Be nice and find the first place where they differ
i := 0
for i < len(formatted) && i < len(existing) && formatted[i] == existing[i] {
i++
}
eDiff, fDiff := existing[i:], formatted[i:]
if len(eDiff) > 100 {
eDiff = eDiff[:100]
}
if len(fDiff) > 100 {
fDiff = fDiff[:100]
}
return fmt.Errorf("output for %q differs; first existing/expected diff: \n %q\n %q", friendlyName, string(eDiff), string(fDiff))
}
func assembleGolangFile(w io.Writer, f *File) {
w.Write(f.Header)
fmt.Fprintf(w, "package %v\n\n", f.PackageName)
if len(f.Imports) > 0 {
fmt.Fprint(w, "import (\n")
for i := range f.Imports {
if strings.Contains(i, "\"") {
// they included quotes, or are using the
// `name "path/to/pkg"` format.
fmt.Fprintf(w, "\t%s\n", i)
} else {
fmt.Fprintf(w, "\t%q\n", i)
}
}
fmt.Fprint(w, ")\n\n")
}
if f.Vars.Len() > 0 {
fmt.Fprint(w, "var (\n")
w.Write(f.Vars.Bytes())
fmt.Fprint(w, ")\n\n")
}
if f.Consts.Len() > 0 {
fmt.Fprint(w, "const (\n")
w.Write(f.Consts.Bytes())
fmt.Fprint(w, ")\n\n")
}
w.Write(f.Body.Bytes())
}
func importsWrapper(src []byte) ([]byte, error) {
return imports.Process("", src, nil)
}
func NewGolangFile() *DefaultFileType {
return &DefaultFileType{
Format: importsWrapper,
Assemble: assembleGolangFile,
}
}
// format should be one line only, and not end with \n.
func addIndentHeaderComment(b *bytes.Buffer, format string, args ...interface{}) {
if b.Len() > 0 {
fmt.Fprintf(b, "\n// "+format+"\n", args...)
} else {
fmt.Fprintf(b, "// "+format+"\n", args...)
}
}
func (c *Context) filteredBy(f func(*Context, *types.Type) bool) *Context {
c2 := *c
c2.Order = []*types.Type{}
for _, t := range c.Order {
if f(c, t) {
c2.Order = append(c2.Order, t)
}
}
return &c2
}
// make a new context; inheret c.Namers, but add on 'namers'. In case of a name
// collision, the namer in 'namers' wins.
func (c *Context) addNameSystems(namers namer.NameSystems) *Context {
if namers == nil {
return c
}
c2 := *c
// Copy the existing name systems so we don't corrupt a parent context
c2.Namers = namer.NameSystems{}
for k, v := range c.Namers {
c2.Namers[k] = v
}
for name, namer := range namers {
c2.Namers[name] = namer
}
return &c2
}
// ExecutePackage executes a single package. 'outDir' is the base directory in
// which to place the package; it should be a physical path on disk, not an
// import path. e.g.: '/path/to/home/path/to/gopath/src/' The package knows its
// import path already, this will be appended to 'outDir'.
func (c *Context) ExecutePackage(outDir string, p Package) error {
path := filepath.Join(outDir, p.Path())
klog.V(2).Infof("Processing package %q, disk location %q", p.Name(), path)
// Filter out any types the *package* doesn't care about.
packageContext := c.filteredBy(p.Filter)
os.MkdirAll(path, 0755)
files := map[string]*File{}
for _, g := range p.Generators(packageContext) {
// Filter out types the *generator* doesn't care about.
genContext := packageContext.filteredBy(g.Filter)
// Now add any extra name systems defined by this generator
genContext = genContext.addNameSystems(g.Namers(genContext))
fileType := g.FileType()
if len(fileType) == 0 {
return fmt.Errorf("generator %q must specify a file type", g.Name())
}
f := files[g.Filename()]
if f == nil {
// This is the first generator to reference this file, so start it.
f = &File{
Name: g.Filename(),
FileType: fileType,
PackageName: p.Name(),
PackagePath: p.Path(),
PackageSourcePath: p.SourcePath(),
Header: p.Header(g.Filename()),
Imports: map[string]struct{}{},
}
files[f.Name] = f
} else {
if f.FileType != g.FileType() {
return fmt.Errorf("file %q already has type %q, but generator %q wants to use type %q", f.Name, f.FileType, g.Name(), g.FileType())
}
}
if vars := g.PackageVars(genContext); len(vars) > 0 {
addIndentHeaderComment(&f.Vars, "Package-wide variables from generator %q.", g.Name())
for _, v := range vars {
if _, err := fmt.Fprintf(&f.Vars, "%s\n", v); err != nil {
return err
}
}
}
if consts := g.PackageConsts(genContext); len(consts) > 0 {
addIndentHeaderComment(&f.Consts, "Package-wide consts from generator %q.", g.Name())
for _, v := range consts {
if _, err := fmt.Fprintf(&f.Consts, "%s\n", v); err != nil {
return err
}
}
}
if err := genContext.executeBody(&f.Body, g); err != nil {
return err
}
if imports := g.Imports(genContext); len(imports) > 0 {
for _, i := range imports {
f.Imports[i] = struct{}{}
}
}
}
var errors []error
for _, f := range files {
finalPath := filepath.Join(path, f.Name)
assembler, ok := c.FileTypes[f.FileType]
if !ok {
return fmt.Errorf("the file type %q registered for file %q does not exist in the context", f.FileType, f.Name)
}
var err error
if c.Verify {
err = assembler.VerifyFile(f, finalPath)
} else {
err = assembler.AssembleFile(f, finalPath)
}
if err != nil {
errors = append(errors, err)
}
}
if len(errors) > 0 {
return fmt.Errorf("errors in package %q:\n%v\n", p.Path(), strings.Join(errs2strings(errors), "\n"))
}
return nil
}
func (c *Context) executeBody(w io.Writer, generator Generator) error {
et := NewErrorTracker(w)
if err := generator.Init(c, et); err != nil {
return err
}
for _, t := range c.Order {
if err := generator.GenerateType(c, t, et); err != nil {
return err
}
}
if err := generator.Finalize(c, et); err != nil {
return err
}
return et.Error()
}
| {
"pile_set_name": "Github"
} |
import { mount, route } from 'navi'
export default mount({
'/': route({
title: "React Site",
getView: () => import('./index.mdx')
}),
'/getting-started': route({
title: "Getting Started",
getView: async () => {
// This simulates some async content loading, so that
// you can test the site's loading bar.
await new Promise(resolve => setTimeout(resolve, 1000))
return import('./getting-started.mdx')
}
}),
}) | {
"pile_set_name": "Github"
} |
# 蒸馏通道剪裁模型教程
该文档介绍如何使用[PaddleSlim](https://paddlepaddle.github.io/PaddleSlim)的蒸馏接口和卷积通道剪裁接口对检测库中的模型进行卷积层的通道剪裁并使用较高精度模型对其蒸馏。
在阅读该示例前,建议您先了解以下内容:
- [检测库的使用方法](https://github.com/PaddlePaddle/PaddleDetection)
- [PaddleSlim通道剪裁API文档](https://paddlepaddle.github.io/PaddleSlim/api/prune_api/)
- [PaddleSlim蒸馏API文档](https://paddlepaddle.github.io/PaddleSlim/api/single_distiller_api/)
- [检测库模型通道剪裁文档](../../prune/README.md)
- [检测库模型蒸馏文档](../../distillation/README.md)
请确保已正确[安装PaddleDetection](../../../docs/tutorials/INSTALL_cn.md)及其依赖。
已发布蒸馏通道剪裁模型见[压缩模型库](../../README.md)
蒸馏通道剪裁模型示例见[Ipython notebook示例](./distill_pruned_model_demo.ipynb)
## 1. 数据准备
请参考检测库[数据下载](../../../docs/tutorials/INSTALL_cn.md)文档准备数据。
## 2. 模型选择
通过`-c`选项指定待剪裁模型的配置文件的相对路径,更多可选配置文件请参考: [检测库配置文件](../../../configs)。
蒸馏通道剪裁模型中,我们使用原模型全量权重来初始化待剪裁模型,已发布模型的权重可在[模型库](../../../docs/MODEL_ZOO_cn.md)中获取。
通过`-o pretrain_weights`指定待剪裁模型的预训练权重,可以指定url或本地文件系统的路径。如下所示:
```
-o pretrain_weights=https://paddlemodels.bj.bcebos.com/object_detection/yolov3_mobilenet_v1_voc.tar
```
或
```
-o pretrain_weights=output/yolov3_mobilenet_v1_voc/model_final
```
## 4. 启动蒸馏剪裁任务
使用`distill_pruned_model.py`启动蒸馏剪裁任务时,通过`--pruned_params`选项指定待剪裁的参数名称列表,参数名之间用空格分隔,通过`--pruned_ratios`选项指定各个参数被裁掉的比例。 获取待裁剪模型参数名称方法可参考[通道剪裁模教程](../../prune/README.md)。
通过`-t`参数指定teacher模型配置文件,`--teacher_pretrained`指定teacher模型权重,更多关于蒸馏模型设置可参考[模型蒸馏文档](../../distillation/README.md)。
蒸馏通道检测模型脚本目前只支持使用YOLOv3细粒度损失训练,即训练过程中须指定`-o use_fine_grained_loss=true`。
```
python distill_pruned_model.py \
-c ../../../configs/yolov3_mobilenet_v1_voc.yml \
-t ../../../configs/yolov3_r34_voc.yml \
--teacher_pretrained=https://paddlemodels.bj.bcebos.com/object_detection/yolov3_r34_voc.tar \
--pruned_params "yolo_block.0.0.0.conv.weights,yolo_block.0.0.1.conv.weights,yolo_block.0.1.0.conv.weights" \
--pruned_ratios="0.2,0.3,0.4" \
-o use_fine_grained_loss=true pretrain_weights=https://paddlemodels.bj.bcebos.com/object_detection/yolov3_mobilenet_v1_voc.tar
```
## 5. 评估模型
由于产出模型为通道剪裁模型,训练完成后,可通过通道剪裁中提供的评估脚本`../../prune/eval.py`评估模型精度,通过`--pruned_params`和`--pruned_ratios`指定剪裁的参数名称列表和各参数剪裁比例。
```
python ../../prune/eval.py \
-c ../../../configs/yolov3_mobilenet_v1_voc.yml \
--pruned_params "yolo_block.0.0.0.conv.weights,yolo_block.0.0.1.conv.weights,yolo_block.0.1.0.conv.weights" \
--pruned_ratios="0.2,0.3,0.4" \
-o weights=output/yolov3_mobilenet_v1_voc/model_final
```
## 6. 模型导出
如果想要将剪裁模型接入到C++预测库或者Serving服务,可通过`../../prune/export_model.py`导出该模型。
```
python ../../prune/export_model.py \
-c ../../../configs/yolov3_mobilenet_v1_voc.yml \
--pruned_params "yolo_block.0.0.0.conv.weights,yolo_block.0.0.1.conv.weights,yolo_block.0.1.0.conv.weights" \
--pruned_ratios="0.2,0.3,0.4" \
-o weights=output/yolov3_mobilenet_v1_voc/model_final
```
| {
"pile_set_name": "Github"
} |
html { padding:0 10px; }
body {
margin:0;
padding:10px 0;
background:#000;
}
#epiceditor-preview h1,
#epiceditor-preview h2,
#epiceditor-preview h3,
#epiceditor-preview h4,
#epiceditor-preview h5,
#epiceditor-preview h6,
#epiceditor-preview p,
#epiceditor-preview blockquote {
margin: 0;
padding: 0;
}
#epiceditor-preview {
background:#000;
font-family: "Helvetica Neue", Helvetica, "Hiragino Sans GB", Arial, sans-serif;
font-size: 13px;
line-height: 18px;
color: #ccc;
}
#epiceditor-preview a {
color: #fff;
}
#epiceditor-preview a:hover {
color: #00ff00;
text-decoration: none;
}
#epiceditor-preview a img {
border: none;
}
#epiceditor-preview p {
margin-bottom: 9px;
}
#epiceditor-preview h1,
#epiceditor-preview h2,
#epiceditor-preview h3,
#epiceditor-preview h4,
#epiceditor-preview h5,
#epiceditor-preview h6 {
color: #cdcdcd;
line-height: 36px;
}
#epiceditor-preview h1 {
margin-bottom: 18px;
font-size: 30px;
}
#epiceditor-preview h2 {
font-size: 24px;
}
#epiceditor-preview h3 {
font-size: 18px;
}
#epiceditor-preview h4 {
font-size: 16px;
}
#epiceditor-preview h5 {
font-size: 14px;
}
#epiceditor-preview h6 {
font-size: 13px;
}
#epiceditor-preview hr {
margin: 0 0 19px;
border: 0;
border-bottom: 1px solid #ccc;
}
#epiceditor-preview blockquote {
padding: 13px 13px 21px 15px;
margin-bottom: 18px;
font-family:georgia,serif;
font-style: italic;
}
#epiceditor-preview blockquote:before {
content:"\201C";
font-size:40px;
margin-left:-10px;
font-family:georgia,serif;
color:#eee;
}
#epiceditor-preview blockquote p {
font-size: 14px;
font-weight: 300;
line-height: 18px;
margin-bottom: 0;
font-style: italic;
}
#epiceditor-preview code, #epiceditor-preview pre {
font-family: Monaco, Andale Mono, Courier New, monospace;
}
#epiceditor-preview code {
background-color: #000;
color: #f92672;
padding: 1px 3px;
font-size: 12px;
-webkit-border-radius: 3px;
-moz-border-radius: 3px;
border-radius: 3px;
}
#epiceditor-preview pre {
display: block;
padding: 14px;
color:#66d9ef;
margin: 0 0 18px;
line-height: 16px;
font-size: 11px;
border: 1px solid #d9d9d9;
white-space: pre-wrap;
word-wrap: break-word;
}
#epiceditor-preview pre code {
background-color: #000;
color:#ccc;
font-size: 11px;
padding: 0;
}
| {
"pile_set_name": "Github"
} |
/*
* OpenSpeedMonitor (OSM)
* Copyright 2014 iteratec GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.iteratec.osm.csi
class NightlyMvCalculation {
Integer calculatedMvs
Date calculatedFrom
Date calculatedUntil
}
| {
"pile_set_name": "Github"
} |
# -*- coding: utf-8 -*-
from __future__ import division, absolute_import, print_function
import sys
import gc
import pytest
import numpy as np
from numpy.testing import (
assert_, assert_equal, assert_raises, assert_warns, HAS_REFCOUNT,
assert_raises_regex,
)
import textwrap
class TestArrayRepr(object):
def test_nan_inf(self):
x = np.array([np.nan, np.inf])
assert_equal(repr(x), 'array([nan, inf])')
def test_subclass(self):
class sub(np.ndarray): pass
# one dimensional
x1d = np.array([1, 2]).view(sub)
assert_equal(repr(x1d), 'sub([1, 2])')
# two dimensional
x2d = np.array([[1, 2], [3, 4]]).view(sub)
assert_equal(repr(x2d),
'sub([[1, 2],\n'
' [3, 4]])')
# two dimensional with flexible dtype
xstruct = np.ones((2,2), dtype=[('a', '<i4')]).view(sub)
assert_equal(repr(xstruct),
"sub([[(1,), (1,)],\n"
" [(1,), (1,)]], dtype=[('a', '<i4')])"
)
@pytest.mark.xfail(reason="See gh-10544")
def test_object_subclass(self):
class sub(np.ndarray):
def __new__(cls, inp):
obj = np.asarray(inp).view(cls)
return obj
def __getitem__(self, ind):
ret = super(sub, self).__getitem__(ind)
return sub(ret)
# test that object + subclass is OK:
x = sub([None, None])
assert_equal(repr(x), 'sub([None, None], dtype=object)')
assert_equal(str(x), '[None None]')
x = sub([None, sub([None, None])])
assert_equal(repr(x),
'sub([None, sub([None, None], dtype=object)], dtype=object)')
assert_equal(str(x), '[None sub([None, None], dtype=object)]')
def test_0d_object_subclass(self):
# make sure that subclasses which return 0ds instead
# of scalars don't cause infinite recursion in str
class sub(np.ndarray):
def __new__(cls, inp):
obj = np.asarray(inp).view(cls)
return obj
def __getitem__(self, ind):
ret = super(sub, self).__getitem__(ind)
return sub(ret)
x = sub(1)
assert_equal(repr(x), 'sub(1)')
assert_equal(str(x), '1')
x = sub([1, 1])
assert_equal(repr(x), 'sub([1, 1])')
assert_equal(str(x), '[1 1]')
# check it works properly with object arrays too
x = sub(None)
assert_equal(repr(x), 'sub(None, dtype=object)')
assert_equal(str(x), 'None')
# plus recursive object arrays (even depth > 1)
y = sub(None)
x[()] = y
y[()] = x
assert_equal(repr(x),
'sub(sub(sub(..., dtype=object), dtype=object), dtype=object)')
assert_equal(str(x), '...')
x[()] = 0 # resolve circular references for garbage collector
# nested 0d-subclass-object
x = sub(None)
x[()] = sub(None)
assert_equal(repr(x), 'sub(sub(None, dtype=object), dtype=object)')
assert_equal(str(x), 'None')
# gh-10663
class DuckCounter(np.ndarray):
def __getitem__(self, item):
result = super(DuckCounter, self).__getitem__(item)
if not isinstance(result, DuckCounter):
result = result[...].view(DuckCounter)
return result
def to_string(self):
return {0: 'zero', 1: 'one', 2: 'two'}.get(self.item(), 'many')
def __str__(self):
if self.shape == ():
return self.to_string()
else:
fmt = {'all': lambda x: x.to_string()}
return np.array2string(self, formatter=fmt)
dc = np.arange(5).view(DuckCounter)
assert_equal(str(dc), "[zero one two many many]")
assert_equal(str(dc[0]), "zero")
def test_self_containing(self):
arr0d = np.array(None)
arr0d[()] = arr0d
assert_equal(repr(arr0d),
'array(array(..., dtype=object), dtype=object)')
arr0d[()] = 0 # resolve recursion for garbage collector
arr1d = np.array([None, None])
arr1d[1] = arr1d
assert_equal(repr(arr1d),
'array([None, array(..., dtype=object)], dtype=object)')
arr1d[1] = 0 # resolve recursion for garbage collector
first = np.array(None)
second = np.array(None)
first[()] = second
second[()] = first
assert_equal(repr(first),
'array(array(array(..., dtype=object), dtype=object), dtype=object)')
first[()] = 0 # resolve circular references for garbage collector
def test_containing_list(self):
# printing square brackets directly would be ambiguuous
arr1d = np.array([None, None])
arr1d[0] = [1, 2]
arr1d[1] = [3]
assert_equal(repr(arr1d),
'array([list([1, 2]), list([3])], dtype=object)')
def test_void_scalar_recursion(self):
# gh-9345
repr(np.void(b'test')) # RecursionError ?
def test_fieldless_structured(self):
# gh-10366
no_fields = np.dtype([])
arr_no_fields = np.empty(4, dtype=no_fields)
assert_equal(repr(arr_no_fields), 'array([(), (), (), ()], dtype=[])')
class TestComplexArray(object):
def test_str(self):
rvals = [0, 1, -1, np.inf, -np.inf, np.nan]
cvals = [complex(rp, ip) for rp in rvals for ip in rvals]
dtypes = [np.complex64, np.cdouble, np.clongdouble]
actual = [str(np.array([c], dt)) for c in cvals for dt in dtypes]
wanted = [
'[0.+0.j]', '[0.+0.j]', '[0.+0.j]',
'[0.+1.j]', '[0.+1.j]', '[0.+1.j]',
'[0.-1.j]', '[0.-1.j]', '[0.-1.j]',
'[0.+infj]', '[0.+infj]', '[0.+infj]',
'[0.-infj]', '[0.-infj]', '[0.-infj]',
'[0.+nanj]', '[0.+nanj]', '[0.+nanj]',
'[1.+0.j]', '[1.+0.j]', '[1.+0.j]',
'[1.+1.j]', '[1.+1.j]', '[1.+1.j]',
'[1.-1.j]', '[1.-1.j]', '[1.-1.j]',
'[1.+infj]', '[1.+infj]', '[1.+infj]',
'[1.-infj]', '[1.-infj]', '[1.-infj]',
'[1.+nanj]', '[1.+nanj]', '[1.+nanj]',
'[-1.+0.j]', '[-1.+0.j]', '[-1.+0.j]',
'[-1.+1.j]', '[-1.+1.j]', '[-1.+1.j]',
'[-1.-1.j]', '[-1.-1.j]', '[-1.-1.j]',
'[-1.+infj]', '[-1.+infj]', '[-1.+infj]',
'[-1.-infj]', '[-1.-infj]', '[-1.-infj]',
'[-1.+nanj]', '[-1.+nanj]', '[-1.+nanj]',
'[inf+0.j]', '[inf+0.j]', '[inf+0.j]',
'[inf+1.j]', '[inf+1.j]', '[inf+1.j]',
'[inf-1.j]', '[inf-1.j]', '[inf-1.j]',
'[inf+infj]', '[inf+infj]', '[inf+infj]',
'[inf-infj]', '[inf-infj]', '[inf-infj]',
'[inf+nanj]', '[inf+nanj]', '[inf+nanj]',
'[-inf+0.j]', '[-inf+0.j]', '[-inf+0.j]',
'[-inf+1.j]', '[-inf+1.j]', '[-inf+1.j]',
'[-inf-1.j]', '[-inf-1.j]', '[-inf-1.j]',
'[-inf+infj]', '[-inf+infj]', '[-inf+infj]',
'[-inf-infj]', '[-inf-infj]', '[-inf-infj]',
'[-inf+nanj]', '[-inf+nanj]', '[-inf+nanj]',
'[nan+0.j]', '[nan+0.j]', '[nan+0.j]',
'[nan+1.j]', '[nan+1.j]', '[nan+1.j]',
'[nan-1.j]', '[nan-1.j]', '[nan-1.j]',
'[nan+infj]', '[nan+infj]', '[nan+infj]',
'[nan-infj]', '[nan-infj]', '[nan-infj]',
'[nan+nanj]', '[nan+nanj]', '[nan+nanj]']
for res, val in zip(actual, wanted):
assert_equal(res, val)
class TestArray2String(object):
def test_basic(self):
"""Basic test of array2string."""
a = np.arange(3)
assert_(np.array2string(a) == '[0 1 2]')
assert_(np.array2string(a, max_line_width=4, legacy='1.13') == '[0 1\n 2]')
assert_(np.array2string(a, max_line_width=4) == '[0\n 1\n 2]')
def test_unexpected_kwarg(self):
# ensure than an appropriate TypeError
# is raised when array2string receives
# an unexpected kwarg
with assert_raises_regex(TypeError, 'nonsense'):
np.array2string(np.array([1, 2, 3]),
nonsense=None)
def test_format_function(self):
"""Test custom format function for each element in array."""
def _format_function(x):
if np.abs(x) < 1:
return '.'
elif np.abs(x) < 2:
return 'o'
else:
return 'O'
x = np.arange(3)
if sys.version_info[0] >= 3:
x_hex = "[0x0 0x1 0x2]"
x_oct = "[0o0 0o1 0o2]"
else:
x_hex = "[0x0L 0x1L 0x2L]"
x_oct = "[0L 01L 02L]"
assert_(np.array2string(x, formatter={'all':_format_function}) ==
"[. o O]")
assert_(np.array2string(x, formatter={'int_kind':_format_function}) ==
"[. o O]")
assert_(np.array2string(x, formatter={'all':lambda x: "%.4f" % x}) ==
"[0.0000 1.0000 2.0000]")
assert_equal(np.array2string(x, formatter={'int':lambda x: hex(x)}),
x_hex)
assert_equal(np.array2string(x, formatter={'int':lambda x: oct(x)}),
x_oct)
x = np.arange(3.)
assert_(np.array2string(x, formatter={'float_kind':lambda x: "%.2f" % x}) ==
"[0.00 1.00 2.00]")
assert_(np.array2string(x, formatter={'float':lambda x: "%.2f" % x}) ==
"[0.00 1.00 2.00]")
s = np.array(['abc', 'def'])
assert_(np.array2string(s, formatter={'numpystr':lambda s: s*2}) ==
'[abcabc defdef]')
def test_structure_format(self):
dt = np.dtype([('name', np.str_, 16), ('grades', np.float64, (2,))])
x = np.array([('Sarah', (8.0, 7.0)), ('John', (6.0, 7.0))], dtype=dt)
assert_equal(np.array2string(x),
"[('Sarah', [8., 7.]) ('John', [6., 7.])]")
np.set_printoptions(legacy='1.13')
try:
# for issue #5692
A = np.zeros(shape=10, dtype=[("A", "M8[s]")])
A[5:].fill(np.datetime64('NaT'))
assert_equal(
np.array2string(A),
textwrap.dedent("""\
[('1970-01-01T00:00:00',) ('1970-01-01T00:00:00',) ('1970-01-01T00:00:00',)
('1970-01-01T00:00:00',) ('1970-01-01T00:00:00',) ('NaT',) ('NaT',)
('NaT',) ('NaT',) ('NaT',)]""")
)
finally:
np.set_printoptions(legacy=False)
# same again, but with non-legacy behavior
assert_equal(
np.array2string(A),
textwrap.dedent("""\
[('1970-01-01T00:00:00',) ('1970-01-01T00:00:00',)
('1970-01-01T00:00:00',) ('1970-01-01T00:00:00',)
('1970-01-01T00:00:00',) ( 'NaT',)
( 'NaT',) ( 'NaT',)
( 'NaT',) ( 'NaT',)]""")
)
# and again, with timedeltas
A = np.full(10, 123456, dtype=[("A", "m8[s]")])
A[5:].fill(np.datetime64('NaT'))
assert_equal(
np.array2string(A),
textwrap.dedent("""\
[(123456,) (123456,) (123456,) (123456,) (123456,) ( 'NaT',) ( 'NaT',)
( 'NaT',) ( 'NaT',) ( 'NaT',)]""")
)
# See #8160
struct_int = np.array([([1, -1],), ([123, 1],)], dtype=[('B', 'i4', 2)])
assert_equal(np.array2string(struct_int),
"[([ 1, -1],) ([123, 1],)]")
struct_2dint = np.array([([[0, 1], [2, 3]],), ([[12, 0], [0, 0]],)],
dtype=[('B', 'i4', (2, 2))])
assert_equal(np.array2string(struct_2dint),
"[([[ 0, 1], [ 2, 3]],) ([[12, 0], [ 0, 0]],)]")
# See #8172
array_scalar = np.array(
(1., 2.1234567890123456789, 3.), dtype=('f8,f8,f8'))
assert_equal(np.array2string(array_scalar), "(1., 2.12345679, 3.)")
def test_unstructured_void_repr(self):
a = np.array([27, 91, 50, 75, 7, 65, 10, 8,
27, 91, 51, 49,109, 82,101,100], dtype='u1').view('V8')
assert_equal(repr(a[0]), r"void(b'\x1B\x5B\x32\x4B\x07\x41\x0A\x08')")
assert_equal(str(a[0]), r"b'\x1B\x5B\x32\x4B\x07\x41\x0A\x08'")
assert_equal(repr(a),
r"array([b'\x1B\x5B\x32\x4B\x07\x41\x0A\x08'," "\n"
r" b'\x1B\x5B\x33\x31\x6D\x52\x65\x64'], dtype='|V8')")
assert_equal(eval(repr(a), vars(np)), a)
assert_equal(eval(repr(a[0]), vars(np)), a[0])
def test_edgeitems_kwarg(self):
# previously the global print options would be taken over the kwarg
arr = np.zeros(3, int)
assert_equal(
np.array2string(arr, edgeitems=1, threshold=0),
"[0 ... 0]"
)
def test_summarize_1d(self):
A = np.arange(1001)
strA = '[ 0 1 2 ... 998 999 1000]'
assert_equal(str(A), strA)
reprA = 'array([ 0, 1, 2, ..., 998, 999, 1000])'
assert_equal(repr(A), reprA)
def test_summarize_2d(self):
A = np.arange(1002).reshape(2, 501)
strA = '[[ 0 1 2 ... 498 499 500]\n' \
' [ 501 502 503 ... 999 1000 1001]]'
assert_equal(str(A), strA)
reprA = 'array([[ 0, 1, 2, ..., 498, 499, 500],\n' \
' [ 501, 502, 503, ..., 999, 1000, 1001]])'
assert_equal(repr(A), reprA)
def test_linewidth(self):
a = np.full(6, 1)
def make_str(a, width, **kw):
return np.array2string(a, separator="", max_line_width=width, **kw)
assert_equal(make_str(a, 8, legacy='1.13'), '[111111]')
assert_equal(make_str(a, 7, legacy='1.13'), '[111111]')
assert_equal(make_str(a, 5, legacy='1.13'), '[1111\n'
' 11]')
assert_equal(make_str(a, 8), '[111111]')
assert_equal(make_str(a, 7), '[11111\n'
' 1]')
assert_equal(make_str(a, 5), '[111\n'
' 111]')
b = a[None,None,:]
assert_equal(make_str(b, 12, legacy='1.13'), '[[[111111]]]')
assert_equal(make_str(b, 9, legacy='1.13'), '[[[111111]]]')
assert_equal(make_str(b, 8, legacy='1.13'), '[[[11111\n'
' 1]]]')
assert_equal(make_str(b, 12), '[[[111111]]]')
assert_equal(make_str(b, 9), '[[[111\n'
' 111]]]')
assert_equal(make_str(b, 8), '[[[11\n'
' 11\n'
' 11]]]')
def test_wide_element(self):
a = np.array(['xxxxx'])
assert_equal(
np.array2string(a, max_line_width=5),
"['xxxxx']"
)
assert_equal(
np.array2string(a, max_line_width=5, legacy='1.13'),
"[ 'xxxxx']"
)
@pytest.mark.skipif(not HAS_REFCOUNT, reason="Python lacks refcounts")
def test_refcount(self):
# make sure we do not hold references to the array due to a recursive
# closure (gh-10620)
gc.disable()
a = np.arange(2)
r1 = sys.getrefcount(a)
np.array2string(a)
np.array2string(a)
r2 = sys.getrefcount(a)
gc.collect()
gc.enable()
assert_(r1 == r2)
class TestPrintOptions(object):
"""Test getting and setting global print options."""
def setup(self):
self.oldopts = np.get_printoptions()
def teardown(self):
np.set_printoptions(**self.oldopts)
def test_basic(self):
x = np.array([1.5, 0, 1.234567890])
assert_equal(repr(x), "array([1.5 , 0. , 1.23456789])")
np.set_printoptions(precision=4)
assert_equal(repr(x), "array([1.5 , 0. , 1.2346])")
def test_precision_zero(self):
np.set_printoptions(precision=0)
for values, string in (
([0.], "0."), ([.3], "0."), ([-.3], "-0."), ([.7], "1."),
([1.5], "2."), ([-1.5], "-2."), ([-15.34], "-15."),
([100.], "100."), ([.2, -1, 122.51], " 0., -1., 123."),
([0], "0"), ([-12], "-12"), ([complex(.3, -.7)], "0.-1.j")):
x = np.array(values)
assert_equal(repr(x), "array([%s])" % string)
def test_formatter(self):
x = np.arange(3)
np.set_printoptions(formatter={'all':lambda x: str(x-1)})
assert_equal(repr(x), "array([-1, 0, 1])")
def test_formatter_reset(self):
x = np.arange(3)
np.set_printoptions(formatter={'all':lambda x: str(x-1)})
assert_equal(repr(x), "array([-1, 0, 1])")
np.set_printoptions(formatter={'int':None})
assert_equal(repr(x), "array([0, 1, 2])")
np.set_printoptions(formatter={'all':lambda x: str(x-1)})
assert_equal(repr(x), "array([-1, 0, 1])")
np.set_printoptions(formatter={'all':None})
assert_equal(repr(x), "array([0, 1, 2])")
np.set_printoptions(formatter={'int':lambda x: str(x-1)})
assert_equal(repr(x), "array([-1, 0, 1])")
np.set_printoptions(formatter={'int_kind':None})
assert_equal(repr(x), "array([0, 1, 2])")
x = np.arange(3.)
np.set_printoptions(formatter={'float':lambda x: str(x-1)})
assert_equal(repr(x), "array([-1.0, 0.0, 1.0])")
np.set_printoptions(formatter={'float_kind':None})
assert_equal(repr(x), "array([0., 1., 2.])")
def test_0d_arrays(self):
unicode = type(u'')
assert_equal(unicode(np.array(u'café', '<U4')), u'café')
if sys.version_info[0] >= 3:
assert_equal(repr(np.array('café', '<U4')),
"array('café', dtype='<U4')")
else:
assert_equal(repr(np.array(u'café', '<U4')),
"array(u'caf\\xe9', dtype='<U4')")
assert_equal(str(np.array('test', np.str_)), 'test')
a = np.zeros(1, dtype=[('a', '<i4', (3,))])
assert_equal(str(a[0]), '([0, 0, 0],)')
assert_equal(repr(np.datetime64('2005-02-25')[...]),
"array('2005-02-25', dtype='datetime64[D]')")
assert_equal(repr(np.timedelta64('10', 'Y')[...]),
"array(10, dtype='timedelta64[Y]')")
# repr of 0d arrays is affected by printoptions
x = np.array(1)
np.set_printoptions(formatter={'all':lambda x: "test"})
assert_equal(repr(x), "array(test)")
# str is unaffected
assert_equal(str(x), "1")
# check `style` arg raises
assert_warns(DeprecationWarning, np.array2string,
np.array(1.), style=repr)
# but not in legacy mode
np.array2string(np.array(1.), style=repr, legacy='1.13')
# gh-10934 style was broken in legacy mode, check it works
np.array2string(np.array(1.), legacy='1.13')
def test_float_spacing(self):
x = np.array([1., 2., 3.])
y = np.array([1., 2., -10.])
z = np.array([100., 2., -1.])
w = np.array([-100., 2., 1.])
assert_equal(repr(x), 'array([1., 2., 3.])')
assert_equal(repr(y), 'array([ 1., 2., -10.])')
assert_equal(repr(np.array(y[0])), 'array(1.)')
assert_equal(repr(np.array(y[-1])), 'array(-10.)')
assert_equal(repr(z), 'array([100., 2., -1.])')
assert_equal(repr(w), 'array([-100., 2., 1.])')
assert_equal(repr(np.array([np.nan, np.inf])), 'array([nan, inf])')
assert_equal(repr(np.array([np.nan, -np.inf])), 'array([ nan, -inf])')
x = np.array([np.inf, 100000, 1.1234])
y = np.array([np.inf, 100000, -1.1234])
z = np.array([np.inf, 1.1234, -1e120])
np.set_printoptions(precision=2)
assert_equal(repr(x), 'array([ inf, 1.00e+05, 1.12e+00])')
assert_equal(repr(y), 'array([ inf, 1.00e+05, -1.12e+00])')
assert_equal(repr(z), 'array([ inf, 1.12e+000, -1.00e+120])')
def test_bool_spacing(self):
assert_equal(repr(np.array([True, True])),
'array([ True, True])')
assert_equal(repr(np.array([True, False])),
'array([ True, False])')
assert_equal(repr(np.array([True])),
'array([ True])')
assert_equal(repr(np.array(True)),
'array(True)')
assert_equal(repr(np.array(False)),
'array(False)')
def test_sign_spacing(self):
a = np.arange(4.)
b = np.array([1.234e9])
c = np.array([1.0 + 1.0j, 1.123456789 + 1.123456789j], dtype='c16')
assert_equal(repr(a), 'array([0., 1., 2., 3.])')
assert_equal(repr(np.array(1.)), 'array(1.)')
assert_equal(repr(b), 'array([1.234e+09])')
assert_equal(repr(np.array([0.])), 'array([0.])')
assert_equal(repr(c),
"array([1. +1.j , 1.12345679+1.12345679j])")
assert_equal(repr(np.array([0., -0.])), 'array([ 0., -0.])')
np.set_printoptions(sign=' ')
assert_equal(repr(a), 'array([ 0., 1., 2., 3.])')
assert_equal(repr(np.array(1.)), 'array( 1.)')
assert_equal(repr(b), 'array([ 1.234e+09])')
assert_equal(repr(c),
"array([ 1. +1.j , 1.12345679+1.12345679j])")
assert_equal(repr(np.array([0., -0.])), 'array([ 0., -0.])')
np.set_printoptions(sign='+')
assert_equal(repr(a), 'array([+0., +1., +2., +3.])')
assert_equal(repr(np.array(1.)), 'array(+1.)')
assert_equal(repr(b), 'array([+1.234e+09])')
assert_equal(repr(c),
"array([+1. +1.j , +1.12345679+1.12345679j])")
np.set_printoptions(legacy='1.13')
assert_equal(repr(a), 'array([ 0., 1., 2., 3.])')
assert_equal(repr(b), 'array([ 1.23400000e+09])')
assert_equal(repr(-b), 'array([ -1.23400000e+09])')
assert_equal(repr(np.array(1.)), 'array(1.0)')
assert_equal(repr(np.array([0.])), 'array([ 0.])')
assert_equal(repr(c),
"array([ 1.00000000+1.j , 1.12345679+1.12345679j])")
# gh-10383
assert_equal(str(np.array([-1., 10])), "[ -1. 10.]")
assert_raises(TypeError, np.set_printoptions, wrongarg=True)
def test_float_overflow_nowarn(self):
# make sure internal computations in FloatingFormat don't
# warn about overflow
repr(np.array([1e4, 0.1], dtype='f2'))
def test_sign_spacing_structured(self):
a = np.ones(2, dtype='<f,<f')
assert_equal(repr(a),
"array([(1., 1.), (1., 1.)], dtype=[('f0', '<f4'), ('f1', '<f4')])")
assert_equal(repr(a[0]), "(1., 1.)")
def test_floatmode(self):
x = np.array([0.6104, 0.922, 0.457, 0.0906, 0.3733, 0.007244,
0.5933, 0.947, 0.2383, 0.4226], dtype=np.float16)
y = np.array([0.2918820979355541, 0.5064172631089138,
0.2848750619642916, 0.4342965294660567,
0.7326538397312751, 0.3459503329096204,
0.0862072768214508, 0.39112753029631175],
dtype=np.float64)
z = np.arange(6, dtype=np.float16)/10
c = np.array([1.0 + 1.0j, 1.123456789 + 1.123456789j], dtype='c16')
# also make sure 1e23 is right (is between two fp numbers)
w = np.array(['1e{}'.format(i) for i in range(25)], dtype=np.float64)
# note: we construct w from the strings `1eXX` instead of doing
# `10.**arange(24)` because it turns out the two are not equivalent in
# python. On some architectures `1e23 != 10.**23`.
wp = np.array([1.234e1, 1e2, 1e123])
# unique mode
np.set_printoptions(floatmode='unique')
assert_equal(repr(x),
"array([0.6104 , 0.922 , 0.457 , 0.0906 , 0.3733 , 0.007244,\n"
" 0.5933 , 0.947 , 0.2383 , 0.4226 ], dtype=float16)")
assert_equal(repr(y),
"array([0.2918820979355541 , 0.5064172631089138 , 0.2848750619642916 ,\n"
" 0.4342965294660567 , 0.7326538397312751 , 0.3459503329096204 ,\n"
" 0.0862072768214508 , 0.39112753029631175])")
assert_equal(repr(z),
"array([0. , 0.1, 0.2, 0.3, 0.4, 0.5], dtype=float16)")
assert_equal(repr(w),
"array([1.e+00, 1.e+01, 1.e+02, 1.e+03, 1.e+04, 1.e+05, 1.e+06, 1.e+07,\n"
" 1.e+08, 1.e+09, 1.e+10, 1.e+11, 1.e+12, 1.e+13, 1.e+14, 1.e+15,\n"
" 1.e+16, 1.e+17, 1.e+18, 1.e+19, 1.e+20, 1.e+21, 1.e+22, 1.e+23,\n"
" 1.e+24])")
assert_equal(repr(wp), "array([1.234e+001, 1.000e+002, 1.000e+123])")
assert_equal(repr(c),
"array([1. +1.j , 1.123456789+1.123456789j])")
# maxprec mode, precision=8
np.set_printoptions(floatmode='maxprec', precision=8)
assert_equal(repr(x),
"array([0.6104 , 0.922 , 0.457 , 0.0906 , 0.3733 , 0.007244,\n"
" 0.5933 , 0.947 , 0.2383 , 0.4226 ], dtype=float16)")
assert_equal(repr(y),
"array([0.2918821 , 0.50641726, 0.28487506, 0.43429653, 0.73265384,\n"
" 0.34595033, 0.08620728, 0.39112753])")
assert_equal(repr(z),
"array([0. , 0.1, 0.2, 0.3, 0.4, 0.5], dtype=float16)")
assert_equal(repr(w[::5]),
"array([1.e+00, 1.e+05, 1.e+10, 1.e+15, 1.e+20])")
assert_equal(repr(wp), "array([1.234e+001, 1.000e+002, 1.000e+123])")
assert_equal(repr(c),
"array([1. +1.j , 1.12345679+1.12345679j])")
# fixed mode, precision=4
np.set_printoptions(floatmode='fixed', precision=4)
assert_equal(repr(x),
"array([0.6104, 0.9219, 0.4570, 0.0906, 0.3733, 0.0072, 0.5933, 0.9468,\n"
" 0.2383, 0.4226], dtype=float16)")
assert_equal(repr(y),
"array([0.2919, 0.5064, 0.2849, 0.4343, 0.7327, 0.3460, 0.0862, 0.3911])")
assert_equal(repr(z),
"array([0.0000, 0.1000, 0.2000, 0.3000, 0.3999, 0.5000], dtype=float16)")
assert_equal(repr(w[::5]),
"array([1.0000e+00, 1.0000e+05, 1.0000e+10, 1.0000e+15, 1.0000e+20])")
assert_equal(repr(wp), "array([1.2340e+001, 1.0000e+002, 1.0000e+123])")
assert_equal(repr(np.zeros(3)), "array([0.0000, 0.0000, 0.0000])")
assert_equal(repr(c),
"array([1.0000+1.0000j, 1.1235+1.1235j])")
# for larger precision, representation error becomes more apparent:
np.set_printoptions(floatmode='fixed', precision=8)
assert_equal(repr(z),
"array([0.00000000, 0.09997559, 0.19995117, 0.30004883, 0.39990234,\n"
" 0.50000000], dtype=float16)")
# maxprec_equal mode, precision=8
np.set_printoptions(floatmode='maxprec_equal', precision=8)
assert_equal(repr(x),
"array([0.610352, 0.921875, 0.457031, 0.090576, 0.373291, 0.007244,\n"
" 0.593262, 0.946777, 0.238281, 0.422607], dtype=float16)")
assert_equal(repr(y),
"array([0.29188210, 0.50641726, 0.28487506, 0.43429653, 0.73265384,\n"
" 0.34595033, 0.08620728, 0.39112753])")
assert_equal(repr(z),
"array([0.0, 0.1, 0.2, 0.3, 0.4, 0.5], dtype=float16)")
assert_equal(repr(w[::5]),
"array([1.e+00, 1.e+05, 1.e+10, 1.e+15, 1.e+20])")
assert_equal(repr(wp), "array([1.234e+001, 1.000e+002, 1.000e+123])")
assert_equal(repr(c),
"array([1.00000000+1.00000000j, 1.12345679+1.12345679j])")
def test_legacy_mode_scalars(self):
# in legacy mode, str of floats get truncated, and complex scalars
# use * for non-finite imaginary part
np.set_printoptions(legacy='1.13')
assert_equal(str(np.float64(1.123456789123456789)), '1.12345678912')
assert_equal(str(np.complex128(complex(1, np.nan))), '(1+nan*j)')
np.set_printoptions(legacy=False)
assert_equal(str(np.float64(1.123456789123456789)),
'1.1234567891234568')
assert_equal(str(np.complex128(complex(1, np.nan))), '(1+nanj)')
def test_legacy_stray_comma(self):
np.set_printoptions(legacy='1.13')
assert_equal(str(np.arange(10000)), '[ 0 1 2 ..., 9997 9998 9999]')
np.set_printoptions(legacy=False)
assert_equal(str(np.arange(10000)), '[ 0 1 2 ... 9997 9998 9999]')
def test_dtype_linewidth_wrapping(self):
np.set_printoptions(linewidth=75)
assert_equal(repr(np.arange(10,20., dtype='f4')),
"array([10., 11., 12., 13., 14., 15., 16., 17., 18., 19.], dtype=float32)")
assert_equal(repr(np.arange(10,23., dtype='f4')), textwrap.dedent("""\
array([10., 11., 12., 13., 14., 15., 16., 17., 18., 19., 20., 21., 22.],
dtype=float32)"""))
styp = '<U4' if sys.version_info[0] >= 3 else '|S4'
assert_equal(repr(np.ones(3, dtype=styp)),
"array(['1', '1', '1'], dtype='{}')".format(styp))
assert_equal(repr(np.ones(12, dtype=styp)), textwrap.dedent("""\
array(['1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1', '1'],
dtype='{}')""".format(styp)))
def test_linewidth_repr(self):
a = np.full(7, fill_value=2)
np.set_printoptions(linewidth=17)
assert_equal(
repr(a),
textwrap.dedent("""\
array([2, 2, 2,
2, 2, 2,
2])""")
)
np.set_printoptions(linewidth=17, legacy='1.13')
assert_equal(
repr(a),
textwrap.dedent("""\
array([2, 2, 2,
2, 2, 2, 2])""")
)
a = np.full(8, fill_value=2)
np.set_printoptions(linewidth=18, legacy=False)
assert_equal(
repr(a),
textwrap.dedent("""\
array([2, 2, 2,
2, 2, 2,
2, 2])""")
)
np.set_printoptions(linewidth=18, legacy='1.13')
assert_equal(
repr(a),
textwrap.dedent("""\
array([2, 2, 2, 2,
2, 2, 2, 2])""")
)
def test_linewidth_str(self):
a = np.full(18, fill_value=2)
np.set_printoptions(linewidth=18)
assert_equal(
str(a),
textwrap.dedent("""\
[2 2 2 2 2 2 2 2
2 2 2 2 2 2 2 2
2 2]""")
)
np.set_printoptions(linewidth=18, legacy='1.13')
assert_equal(
str(a),
textwrap.dedent("""\
[2 2 2 2 2 2 2 2 2
2 2 2 2 2 2 2 2 2]""")
)
def test_edgeitems(self):
np.set_printoptions(edgeitems=1, threshold=1)
a = np.arange(27).reshape((3, 3, 3))
assert_equal(
repr(a),
textwrap.dedent("""\
array([[[ 0, ..., 2],
...,
[ 6, ..., 8]],
...,
[[18, ..., 20],
...,
[24, ..., 26]]])""")
)
b = np.zeros((3, 3, 1, 1))
assert_equal(
repr(b),
textwrap.dedent("""\
array([[[[0.]],
...,
[[0.]]],
...,
[[[0.]],
...,
[[0.]]]])""")
)
# 1.13 had extra trailing spaces, and was missing newlines
np.set_printoptions(legacy='1.13')
assert_equal(
repr(a),
textwrap.dedent("""\
array([[[ 0, ..., 2],
...,
[ 6, ..., 8]],
...,
[[18, ..., 20],
...,
[24, ..., 26]]])""")
)
assert_equal(
repr(b),
textwrap.dedent("""\
array([[[[ 0.]],
...,
[[ 0.]]],
...,
[[[ 0.]],
...,
[[ 0.]]]])""")
)
def test_bad_args(self):
assert_raises(ValueError, np.set_printoptions, threshold=float('nan'))
assert_raises(TypeError, np.set_printoptions, threshold='1')
assert_raises(TypeError, np.set_printoptions, threshold=b'1')
def test_unicode_object_array():
import sys
if sys.version_info[0] >= 3:
expected = "array(['é'], dtype=object)"
else:
expected = "array([u'\\xe9'], dtype=object)"
x = np.array([u'\xe9'], dtype=object)
assert_equal(repr(x), expected)
class TestContextManager(object):
def test_ctx_mgr(self):
# test that context manager actuall works
with np.printoptions(precision=2):
s = str(np.array([2.0]) / 3)
assert_equal(s, '[0.67]')
def test_ctx_mgr_restores(self):
# test that print options are actually restrored
opts = np.get_printoptions()
with np.printoptions(precision=opts['precision'] - 1,
linewidth=opts['linewidth'] - 4):
pass
assert_equal(np.get_printoptions(), opts)
def test_ctx_mgr_exceptions(self):
# test that print options are restored even if an exception is raised
opts = np.get_printoptions()
try:
with np.printoptions(precision=2, linewidth=11):
raise ValueError
except ValueError:
pass
assert_equal(np.get_printoptions(), opts)
def test_ctx_mgr_as_smth(self):
opts = {"precision": 2}
with np.printoptions(**opts) as ctx:
saved_opts = ctx.copy()
assert_equal({k: saved_opts[k] for k in opts}, opts)
| {
"pile_set_name": "Github"
} |
package cmd
import (
"bytes"
"fmt"
"io"
"io/ioutil"
"os"
"sort"
"strings"
"time"
"github.com/MakeNowJust/heredoc"
docker "github.com/fsouza/go-dockerclient"
"github.com/golang/glog"
"github.com/spf13/cobra"
kapi "k8s.io/kubernetes/pkg/api"
kapierrors "k8s.io/kubernetes/pkg/api/errors"
"k8s.io/kubernetes/pkg/client/restclient"
kclient "k8s.io/kubernetes/pkg/client/unversioned"
ctl "k8s.io/kubernetes/pkg/kubectl"
kcmd "k8s.io/kubernetes/pkg/kubectl/cmd"
kcmdutil "k8s.io/kubernetes/pkg/kubectl/cmd/util"
"k8s.io/kubernetes/pkg/kubectl/resource"
"k8s.io/kubernetes/pkg/runtime"
"k8s.io/kubernetes/pkg/util/errors"
"k8s.io/kubernetes/pkg/util/sets"
"k8s.io/kubernetes/pkg/util/wait"
buildapi "github.com/openshift/origin/pkg/build/api"
cmdutil "github.com/openshift/origin/pkg/cmd/util"
"github.com/openshift/origin/pkg/cmd/util/clientcmd"
dockerutil "github.com/openshift/origin/pkg/cmd/util/docker"
configcmd "github.com/openshift/origin/pkg/config/cmd"
newapp "github.com/openshift/origin/pkg/generate/app"
newcmd "github.com/openshift/origin/pkg/generate/app/cmd"
"github.com/openshift/origin/pkg/generate/git"
imageapi "github.com/openshift/origin/pkg/image/api"
"github.com/openshift/origin/pkg/util"
)
type usage interface {
UsageError(commandName string) string
}
const (
newAppLong = `
Create a new application by specifying source code, templates, and/or images
This command will try to build up the components of an application using images, templates,
or code that has a public repository. It will lookup the images on the local Docker installation
(if available), a Docker registry, an integrated image stream, or stored templates.
If you specify a source code URL, it will set up a build that takes your source code and converts
it into an image that can run inside of a pod. Local source must be in a git repository that has a
remote repository that the server can see. The images will be deployed via a deployment
configuration, and a service will be connected to the first public port of the app. You may either specify
components using the various existing flags or let new-app autodetect what kind of components
you have provided.
If you provide source code, a new build will be automatically triggered.
You can use '%[1]s status' to check the progress.`
newAppExample = `
# List all local templates and image streams that can be used to create an app
%[1]s new-app --list
# Create an application based on the source code in the current git repository (with a public remote)
# and a Docker image
%[1]s new-app . --docker-image=repo/langimage
# Create a Ruby application based on the provided [image]~[source code] combination
%[1]s new-app centos/ruby-22-centos7~https://github.com/openshift/ruby-ex.git
# Use the public Docker Hub MySQL image to create an app. Generated artifacts will be labeled with db=mysql
%[1]s new-app mysql MYSQL_USER=user MYSQL_PASSWORD=pass MYSQL_DATABASE=testdb -l db=mysql
# Use a MySQL image in a private registry to create an app and override application artifacts' names
%[1]s new-app --docker-image=myregistry.com/mycompany/mysql --name=private
# Create an application from a remote repository using its beta4 branch
%[1]s new-app https://github.com/openshift/ruby-hello-world#beta4
# Create an application based on a stored template, explicitly setting a parameter value
%[1]s new-app --template=ruby-helloworld-sample --param=MYSQL_USER=admin
# Create an application from a remote repository and specify a context directory
%[1]s new-app https://github.com/youruser/yourgitrepo --context-dir=src/build
# Create an application based on a template file, explicitly setting a parameter value
%[1]s new-app --file=./example/myapp/template.json --param=MYSQL_USER=admin
# Search all templates, image streams, and Docker images for the ones that match "ruby"
%[1]s new-app --search ruby
# Search for "ruby", but only in stored templates (--template, --image-stream and --docker-image
# can be used to filter search results)
%[1]s new-app --search --template=ruby
# Search for "ruby" in stored templates and print the output as an YAML
%[1]s new-app --search --template=ruby --output=yaml`
newAppNoInput = `You must specify one or more images, image streams, templates, or source code locations to create an application.
To list all local templates and image streams, use:
%[1]s new-app -L
To search templates, image streams, and Docker images that match the arguments provided, use:
%[1]s new-app -S php
%[1]s new-app -S --template=ruby
%[1]s new-app -S --image-stream=mysql
%[1]s new-app -S --docker-image=python
`
)
type NewAppOptions struct {
Action configcmd.BulkAction
Config *newcmd.AppConfig
CommandPath string
CommandName string
Out, ErrOut io.Writer
Output string
PrintObject func(obj runtime.Object) error
LogsForObject LogsForObjectFunc
}
// NewCmdNewApplication implements the OpenShift cli new-app command
func NewCmdNewApplication(commandName string, f *clientcmd.Factory, out io.Writer) *cobra.Command {
config := newcmd.NewAppConfig()
config.Deploy = true
options := &NewAppOptions{Config: config}
cmd := &cobra.Command{
Use: "new-app (IMAGE | IMAGESTREAM | TEMPLATE | PATH | URL ...)",
Short: "Create a new application",
Long: fmt.Sprintf(newAppLong, commandName),
Example: fmt.Sprintf(newAppExample, commandName),
SuggestFor: []string{"app", "application"},
Run: func(c *cobra.Command, args []string) {
kcmdutil.CheckErr(options.Complete(commandName, f, c, args, out))
err := options.Run()
if err == cmdutil.ErrExit {
os.Exit(1)
}
kcmdutil.CheckErr(err)
},
}
cmd.Flags().BoolVar(&config.AsTestDeployment, "as-test", config.AsTestDeployment, "If true create this application as a test deployment, which validates that the deployment succeeds and then scales down.")
cmd.Flags().StringSliceVar(&config.SourceRepositories, "code", config.SourceRepositories, "Source code to use to build this application.")
cmd.Flags().StringVar(&config.ContextDir, "context-dir", "", "Context directory to be used for the build.")
cmd.Flags().StringSliceVarP(&config.ImageStreams, "image", "", config.ImageStreams, "Name of an image stream to use in the app. (deprecated)")
cmd.Flags().MarkDeprecated("image", "use --image-stream instead")
cmd.Flags().StringSliceVarP(&config.ImageStreams, "image-stream", "i", config.ImageStreams, "Name of an image stream to use in the app.")
cmd.Flags().StringSliceVar(&config.DockerImages, "docker-image", config.DockerImages, "Name of a Docker image to include in the app.")
cmd.Flags().StringSliceVar(&config.Templates, "template", config.Templates, "Name of a stored template to use in the app.")
cmd.Flags().StringSliceVarP(&config.TemplateFiles, "file", "f", config.TemplateFiles, "Path to a template file to use for the app.")
cmd.MarkFlagFilename("file", "yaml", "yml", "json")
cmd.Flags().StringSliceVarP(&config.TemplateParameters, "param", "p", config.TemplateParameters, "Specify a list of key value pairs (e.g., -p FOO=BAR,BAR=FOO) to set/override parameter values in the template.")
cmd.Flags().StringSliceVar(&config.Groups, "group", config.Groups, "Indicate components that should be grouped together as <comp1>+<comp2>.")
cmd.Flags().StringSliceVarP(&config.Environment, "env", "e", config.Environment, "Specify key-value pairs of environment variables to set into each container. This doesn't apply to objects created from a template, use parameters instead.")
cmd.Flags().StringVar(&config.Name, "name", "", "Set name to use for generated application artifacts")
cmd.Flags().StringVar(&config.Strategy, "strategy", "", "Specify the build strategy to use if you don't want to detect (docker|source).")
cmd.Flags().StringP("labels", "l", "", "Label to set in all resources for this application.")
cmd.Flags().BoolVar(&config.InsecureRegistry, "insecure-registry", false, "If true, indicates that the referenced Docker images are on insecure registries and should bypass certificate checking")
cmd.Flags().BoolVarP(&config.AsList, "list", "L", false, "List all local templates and image streams that can be used to create.")
cmd.Flags().BoolVarP(&config.AsSearch, "search", "S", false, "Search all templates, image streams, and Docker images that match the arguments provided.")
cmd.Flags().BoolVar(&config.AllowMissingImages, "allow-missing-images", false, "If true, indicates that referenced Docker images that cannot be found locally or in a registry should still be used.")
cmd.Flags().BoolVar(&config.AllowMissingImageStreamTags, "allow-missing-imagestream-tags", false, "If true, indicates that image stream tags that don't exist should still be used.")
cmd.Flags().BoolVar(&config.AllowSecretUse, "grant-install-rights", false, "If true, a component that requires access to your account may use your token to install software into your project. Only grant images you trust the right to run with your token.")
cmd.Flags().BoolVar(&config.SkipGeneration, "no-install", false, "Do not attempt to run images that describe themselves as being installable")
options.Action.BindForOutput(cmd.Flags())
cmd.Flags().String("output-version", "", "The preferred API versions of the output objects")
return cmd
}
// Complete sets any default behavior for the command
func (o *NewAppOptions) Complete(commandName string, f *clientcmd.Factory, c *cobra.Command, args []string, out io.Writer) error {
o.Out = out
o.ErrOut = c.OutOrStderr()
o.Output = kcmdutil.GetFlagString(c, "output")
// Only output="" should print descriptions of intermediate steps. Everything
// else should print only some specific output (json, yaml, go-template, ...)
if len(o.Output) == 0 {
o.Config.Out = o.Out
} else {
o.Config.Out = ioutil.Discard
}
o.Config.ErrOut = o.ErrOut
o.Action.Out, o.Action.ErrOut = o.Out, o.ErrOut
o.Action.Bulk.Mapper = clientcmd.ResourceMapper(f)
o.Action.Bulk.Op = configcmd.Create
// Retry is used to support previous versions of the API server that will
// consider the presence of an unknown trigger type to be an error.
o.Action.Bulk.Retry = retryBuildConfig
o.Config.DryRun = o.Action.DryRun
o.CommandPath = c.CommandPath()
o.CommandName = commandName
mapper, _ := f.Object(false)
o.PrintObject = cmdutil.VersionedPrintObject(f.PrintObject, c, mapper, out)
o.LogsForObject = f.LogsForObject
if err := CompleteAppConfig(o.Config, f, c, args); err != nil {
return err
}
if err := setAppConfigLabels(c, o.Config); err != nil {
return err
}
return nil
}
// Run contains all the necessary functionality for the OpenShift cli new-app command
func (o *NewAppOptions) Run() error {
config := o.Config
out := o.Out
if config.Querying() {
result, err := config.RunQuery()
if err != nil {
return handleRunError(err, o.CommandName, o.CommandPath)
}
if o.Action.ShouldPrint() {
return o.PrintObject(result.List)
}
return printHumanReadableQueryResult(result, out, o.CommandName)
}
checkGitInstalled(out)
result, err := config.Run()
if err := handleRunError(err, o.CommandName, o.CommandPath); err != nil {
return err
}
// if the user has set the "app" label explicitly on their objects in the template,
// we should not return a failure when we can't set it ourselves.
ignoreLabelFailure := false
if len(config.Labels) == 0 && len(result.Name) > 0 {
config.Labels = map[string]string{"app": result.Name}
ignoreLabelFailure = true
}
if err := setLabels(config.Labels, result, ignoreLabelFailure); err != nil {
return err
}
if err := setAnnotations(map[string]string{newcmd.GeneratedByNamespace: newcmd.GeneratedByNewApp}, result); err != nil {
return err
}
if o.Action.ShouldPrint() {
return o.PrintObject(result.List)
}
if result.GeneratedJobs {
o.Action.Compact()
}
if errs := o.Action.WithMessage(configcmd.CreateMessage(config.Labels), "created").Run(result.List, result.Namespace); len(errs) > 0 {
return cmdutil.ErrExit
}
if !o.Action.Verbose() || o.Action.DryRun {
return nil
}
hasMissingRepo := false
installing := []*kapi.Pod{}
indent := o.Action.DefaultIndent()
for _, item := range result.List.Items {
switch t := item.(type) {
case *kapi.Pod:
if t.Annotations[newcmd.GeneratedForJob] == "true" {
installing = append(installing, t)
}
case *buildapi.BuildConfig:
triggered := false
for _, trigger := range t.Spec.Triggers {
switch trigger.Type {
case buildapi.ImageChangeBuildTriggerType, buildapi.ConfigChangeBuildTriggerType:
triggered = true
break
}
}
if triggered {
fmt.Fprintf(out, "%[1]sBuild scheduled, use '%[3]s logs -f bc/%[2]s' to track its progress.\n", indent, t.Name, o.CommandName)
} else {
fmt.Fprintf(out, "%[1]sUse '%[3]s start-build %[2]s' to start a build.\n", indent, t.Name, o.CommandName)
}
case *imageapi.ImageStream:
if len(t.Status.DockerImageRepository) == 0 {
if hasMissingRepo {
continue
}
hasMissingRepo = true
fmt.Fprintf(out, "%sWARNING: No Docker registry has been configured with the server. Automatic builds and deployments may not function.\n", indent)
}
}
}
switch {
case len(installing) == 1:
jobInput := installing[0].Annotations[newcmd.GeneratedForJobFor]
return followInstallation(config, jobInput, installing[0], o.LogsForObject)
case len(installing) > 1:
for i := range installing {
fmt.Fprintf(out, "%sTrack installation of %s with '%s logs %s'.\n", indent, installing[i].Name, o.CommandName, installing[i].Name)
}
case len(result.List.Items) > 0:
fmt.Fprintf(out, "%sRun '%s %s' to view your app.\n", indent, o.CommandName, StatusRecommendedName)
}
return nil
}
type LogsForObjectFunc func(object, options runtime.Object) (*restclient.Request, error)
func followInstallation(config *newcmd.AppConfig, input string, pod *kapi.Pod, logsForObjectFn LogsForObjectFunc) error {
fmt.Fprintf(config.Out, "--> Installing ...\n")
// we cannot retrieve logs until the pod is out of pending
// TODO: move this to the server side
podClient := config.KubeClient.Pods(pod.Namespace)
if err := wait.PollImmediate(500*time.Millisecond, 60*time.Second, installationStarted(podClient, pod.Name, config.KubeClient.Secrets(pod.Namespace))); err != nil {
return err
}
opts := &kcmd.LogsOptions{
Namespace: pod.Namespace,
ResourceArg: pod.Name,
Options: &kapi.PodLogOptions{
Follow: true,
Container: pod.Spec.Containers[0].Name,
},
Mapper: config.Mapper,
Typer: config.Typer,
ClientMapper: config.ClientMapper,
LogsForObject: logsForObjectFn,
Out: config.Out,
}
_, logErr := opts.RunLogs()
// status of the pod may take tens of seconds to propagate
if err := wait.PollImmediate(500*time.Millisecond, 30*time.Second, installationComplete(podClient, pod.Name, config.Out)); err != nil {
if err == wait.ErrWaitTimeout {
if logErr != nil {
// output the log error if one occurred
err = logErr
} else {
err = fmt.Errorf("installation may not have completed, see logs for %q for more information", pod.Name)
}
}
return err
}
return nil
}
func installationStarted(c kclient.PodInterface, name string, s kclient.SecretsInterface) wait.ConditionFunc {
return func() (bool, error) {
pod, err := c.Get(name)
if err != nil {
return false, err
}
if pod.Status.Phase == kapi.PodPending {
return false, nil
}
// delete a secret named the same as the pod if it exists
if secret, err := s.Get(name); err == nil {
if secret.Annotations[newcmd.GeneratedForJob] == "true" &&
secret.Annotations[newcmd.GeneratedForJobFor] == pod.Annotations[newcmd.GeneratedForJobFor] {
if err := s.Delete(name); err != nil {
glog.V(4).Infof("Failed to delete install secret %s: %v", name, err)
}
}
}
return true, nil
}
}
func installationComplete(c kclient.PodInterface, name string, out io.Writer) wait.ConditionFunc {
return func() (bool, error) {
pod, err := c.Get(name)
if err != nil {
if kapierrors.IsNotFound(err) {
return false, fmt.Errorf("installation pod was deleted; unable to determine whether it completed successfully")
}
return false, nil
}
switch pod.Status.Phase {
case kapi.PodSucceeded:
fmt.Fprintf(out, "--> Success\n")
if err := c.Delete(name, nil); err != nil {
glog.V(4).Infof("Failed to delete install pod %s: %v", name, err)
}
return true, nil
case kapi.PodFailed:
return true, fmt.Errorf("installation of %q did not complete successfully", name)
default:
return false, nil
}
}
}
func setAppConfigLabels(c *cobra.Command, config *newcmd.AppConfig) error {
labelStr := kcmdutil.GetFlagString(c, "labels")
if len(labelStr) != 0 {
var err error
config.Labels, err = ctl.ParseLabels(labelStr)
if err != nil {
return err
}
}
return nil
}
// getDockerClient returns a client capable of communicating with the local
// docker daemon. If an error occurs (such as no local daemon being available),
// it will return nil.
func getDockerClient() (*docker.Client, error) {
dockerClient, _, err := dockerutil.NewHelper().GetClient()
if err == nil {
if err = dockerClient.Ping(); err != nil {
glog.V(4).Infof("Docker client did not respond to a ping: %v", err)
return nil, err
}
return dockerClient, nil
}
glog.V(2).Infof("No local Docker daemon detected: %v", err)
return nil, err
}
func CompleteAppConfig(config *newcmd.AppConfig, f *clientcmd.Factory, c *cobra.Command, args []string) error {
mapper, typer := f.Object(false)
if config.Mapper == nil {
config.Mapper = mapper
}
if config.Typer == nil {
config.Typer = typer
}
if config.ClientMapper == nil {
config.ClientMapper = resource.ClientMapperFunc(f.ClientForMapping)
}
namespace, _, err := f.DefaultNamespace()
if err != nil {
return err
}
osclient, kclient, err := f.Clients()
if err != nil {
return err
}
config.KubeClient = kclient
dockerClient, _ := getDockerClient()
config.SetOpenShiftClient(osclient, namespace, dockerClient)
if config.AllowSecretUse {
cfg, err := f.OpenShiftClientConfig.ClientConfig()
if err != nil {
return err
}
config.SecretAccessor = newConfigSecretRetriever(cfg)
}
unknown := config.AddArguments(args)
if len(unknown) != 0 {
return kcmdutil.UsageError(c, "Did not recognize the following arguments: %v", unknown)
}
if config.AllowMissingImages && config.AsSearch {
return kcmdutil.UsageError(c, "--allow-missing-images and --search are mutually exclusive.")
}
if len(config.SourceImage) != 0 && len(config.SourceImagePath) == 0 {
return kcmdutil.UsageError(c, "--source-image-path must be specified when --source-image is specified.")
}
if len(config.SourceImage) == 0 && len(config.SourceImagePath) != 0 {
return kcmdutil.UsageError(c, "--source-image must be specified when --source-image-path is specified.")
}
return nil
}
func setAnnotations(annotations map[string]string, result *newcmd.AppResult) error {
for _, object := range result.List.Items {
err := util.AddObjectAnnotations(object, annotations)
if err != nil {
return err
}
}
return nil
}
func setLabels(labels map[string]string, result *newcmd.AppResult, ignoreFailure bool) error {
for _, object := range result.List.Items {
err := util.AddObjectLabels(object, labels)
if err != nil && !ignoreFailure {
return err
}
}
return nil
}
// isInvalidTriggerError returns true if the given error is
// a validation error that contains 'invalid trigger type' in its
// error message. This error is returned from older servers that
// consider the presence of unknown trigger types to be an error.
func isInvalidTriggerError(err error) bool {
if !kapierrors.IsInvalid(err) {
return false
}
statusErr, ok := err.(*kapierrors.StatusError)
if !ok {
return false
}
return strings.Contains(statusErr.Status().Message, "invalid trigger type")
}
// retryBuildConfig determines if the given error is caused by an invalid trigger
// error on a BuildConfig. If that is the case, it will remove all triggers with a
// type that is not in the whitelist for an older server.
func retryBuildConfig(info *resource.Info, err error) runtime.Object {
triggerTypeWhiteList := map[buildapi.BuildTriggerType]struct{}{
buildapi.GitHubWebHookBuildTriggerType: {},
buildapi.GenericWebHookBuildTriggerType: {},
buildapi.ImageChangeBuildTriggerType: {},
}
if info.Mapping.GroupVersionKind.GroupKind() == buildapi.Kind("BuildConfig") && isInvalidTriggerError(err) {
bc, ok := info.Object.(*buildapi.BuildConfig)
if !ok {
return nil
}
triggers := []buildapi.BuildTriggerPolicy{}
for _, t := range bc.Spec.Triggers {
if _, inList := triggerTypeWhiteList[t.Type]; inList {
triggers = append(triggers, t)
}
}
bc.Spec.Triggers = triggers
return bc
}
return nil
}
func handleRunError(err error, commandName, commandPath string) error {
if err == nil {
return nil
}
errs := []error{err}
if agg, ok := err.(errors.Aggregate); ok {
errs = agg.Errors()
}
groups := errorGroups{}
for _, err := range errs {
transformError(err, commandName, commandPath, groups)
}
buf := &bytes.Buffer{}
for _, group := range groups {
fmt.Fprint(buf, kcmdutil.MultipleErrors("error: ", group.errs))
if len(group.suggestion) > 0 {
fmt.Fprintln(buf)
}
fmt.Fprint(buf, group.suggestion)
}
return fmt.Errorf(buf.String())
}
type errorGroup struct {
errs []error
suggestion string
}
type errorGroups map[string]errorGroup
func (g errorGroups) Add(group string, suggestion string, err error, errs ...error) {
all := g[group]
all.errs = append(all.errs, errs...)
all.errs = append(all.errs, err)
all.suggestion = suggestion
g[group] = all
}
func transformError(err error, commandName, commandPath string, groups errorGroups) {
switch t := err.(type) {
case newcmd.ErrRequiresExplicitAccess:
if t.Input.Token != nil && t.Input.Token.ServiceAccount {
groups.Add(
"explicit-access-installer",
heredoc.Doc(`
WARNING: This will allow the pod to create and manage resources within your namespace -
ensure you trust the image with those permissions before you continue.
You can see more information about the image by adding the --dry-run flag.
If you trust the provided image, include the flag --grant-install-rights.`,
),
fmt.Errorf("installing %q requires an 'installer' service account with project editor access", t.Match.Value),
)
} else {
groups.Add(
"explicit-access-you",
heredoc.Doc(`
WARNING: This will allow the pod to act as you across the entire cluster - ensure you
trust the image with those permissions before you continue.
You can see more information about the image by adding the --dry-run flag.
If you trust the provided image, include the flag --grant-install-rights.`,
),
fmt.Errorf("installing %q requires that you grant the image access to run with your credentials", t.Match.Value),
)
}
return
case newapp.ErrNoMatch:
groups.Add(
"no-matches",
heredoc.Docf(`
The '%[1]s' command will match arguments to the following types:
1. Images tagged into image streams in the current project or the 'openshift' project
- if you don't specify a tag, we'll add ':latest'
2. Images in the Docker Hub, on remote registries, or on the local Docker engine
3. Templates in the current project or the 'openshift' project
4. Git repository URLs or local paths that point to Git repositories
--allow-missing-images can be used to point to an image that does not exist yet.
See '%[1]s -h' for examples.`, commandPath,
),
t,
t.Errs...,
)
return
case newapp.ErrMultipleMatches:
buf := &bytes.Buffer{}
for i, match := range t.Matches {
// If we have more than 5 matches, stop output and recommend searching
// after the fifth
if i >= 5 {
groups.Add(
"multiple-matches",
heredoc.Docf(`
The argument %[1]q could apply to the following Docker images, OpenShift image streams, or templates:
%[2]sTo view a full list of matches, use '%[3]s new-app -S %[1]s'`, t.Value, buf.String(), commandName,
),
t,
t.Errs...,
)
return
}
fmt.Fprintf(buf, "* %s\n", match.Description)
fmt.Fprintf(buf, " Use %[1]s to specify this image or template\n\n", match.Argument)
}
groups.Add(
"multiple-matches",
heredoc.Docf(`
The argument %[1]q could apply to the following Docker images, OpenShift image streams, or templates:
%[2]s`, t.Value, buf.String(),
),
t,
t.Errs...,
)
return
case newapp.ErrPartialMatch:
buf := &bytes.Buffer{}
fmt.Fprintf(buf, "* %s\n", t.Match.Description)
fmt.Fprintf(buf, " Use %[1]s to specify this image or template\n\n", t.Match.Argument)
groups.Add(
"partial-match",
heredoc.Docf(`
The argument %[1]q only partially matched the following Docker image, OpenShift image stream, or template:
%[2]s`, t.Value, buf.String(),
),
t,
t.Errs...,
)
return
case newapp.ErrNoTagsFound:
buf := &bytes.Buffer{}
fmt.Fprintf(buf, " Use --allow-missing-imagestream-tags to use this image stream\n\n")
groups.Add(
"no-tags",
heredoc.Docf(`
The image stream %[1]q exists, but it has no tags.
%[2]s`, t.Match.Name, buf.String(),
),
t,
t.Errs...,
)
return
}
switch err {
case errNoTokenAvailable:
// TODO: improve by allowing token generation
groups.Add("", "", fmt.Errorf("to install components you must be logged in with an OAuth token (instead of only a certificate)"))
case newcmd.ErrNoInputs:
// TODO: suggest things to the user
groups.Add("", "", usageError(commandPath, newAppNoInput, commandName))
default:
groups.Add("", "", err)
}
}
func usageError(commandPath, format string, args ...interface{}) error {
msg := fmt.Sprintf(format, args...)
return fmt.Errorf("%s\nSee '%s -h' for help and examples.", msg, commandPath)
}
func printHumanReadableQueryResult(r *newcmd.QueryResult, out io.Writer, commandName string) error {
if len(r.Matches) == 0 {
return fmt.Errorf("no matches found")
}
templates := newapp.ComponentMatches{}
imageStreams := newapp.ComponentMatches{}
dockerImages := newapp.ComponentMatches{}
for _, match := range r.Matches {
switch {
case match.IsTemplate():
templates = append(templates, match)
case match.IsImage() && match.ImageStream != nil:
imageStreams = append(imageStreams, match)
case match.IsImage() && match.Image != nil:
dockerImages = append(dockerImages, match)
}
}
sort.Sort(newapp.ScoredComponentMatches(templates))
sort.Sort(newapp.ScoredComponentMatches(imageStreams))
sort.Sort(newapp.ScoredComponentMatches(dockerImages))
if len(templates) > 0 {
fmt.Fprintf(out, "Templates (%s new-app --template=<template>)\n", commandName)
fmt.Fprintln(out, "-----")
for _, match := range templates {
template := match.Template
description := template.ObjectMeta.Annotations["description"]
fmt.Fprintln(out, template.Name)
fmt.Fprintf(out, " Project: %v\n", template.Namespace)
if len(description) > 0 {
fmt.Fprintf(out, " %v\n", description)
}
}
fmt.Fprintln(out)
}
if len(imageStreams) > 0 {
fmt.Fprintf(out, "Image streams (%s new-app --image-stream=<image-stream> [--code=<source>])\n", commandName)
fmt.Fprintln(out, "-----")
for _, match := range imageStreams {
imageStream := match.ImageStream
description := imageStream.ObjectMeta.Annotations["description"]
tags := "<none>"
if len(imageStream.Status.Tags) > 0 {
set := sets.NewString()
for tag := range imageStream.Status.Tags {
set.Insert(tag)
}
tags = strings.Join(set.List(), ", ")
}
fmt.Fprintln(out, imageStream.Name)
fmt.Fprintf(out, " Project: %v\n", imageStream.Namespace)
if len(imageStream.Spec.DockerImageRepository) > 0 {
fmt.Fprintf(out, " Tracks: %v\n", imageStream.Spec.DockerImageRepository)
}
fmt.Fprintf(out, " Tags: %v\n", tags)
if len(description) > 0 {
fmt.Fprintf(out, " %v\n", description)
}
}
fmt.Fprintln(out)
}
if len(dockerImages) > 0 {
fmt.Fprintf(out, "Docker images (%s new-app --docker-image=<docker-image> [--code=<source>])\n", commandName)
fmt.Fprintln(out, "-----")
for _, match := range dockerImages {
image := match.Image
name, tag, ok := imageapi.SplitImageStreamTag(match.Name)
if !ok {
name = match.Name
tag = match.ImageTag
}
fmt.Fprintln(out, name)
fmt.Fprintf(out, " Registry: %v\n", match.Meta["registry"])
fmt.Fprintf(out, " Tags: %v\n", tag)
if len(image.Comment) > 0 {
fmt.Fprintf(out, " %v\n", image.Comment)
}
}
fmt.Fprintln(out)
}
return nil
}
type configSecretRetriever struct {
config *restclient.Config
}
func newConfigSecretRetriever(config *restclient.Config) newapp.SecretAccessor {
return &configSecretRetriever{config}
}
var errNoTokenAvailable = fmt.Errorf("you are not logged in with a token - unable to provide a secret to the installable component")
func (r *configSecretRetriever) Token() (string, error) {
if len(r.config.BearerToken) > 0 {
return r.config.BearerToken, nil
}
return "", errNoTokenAvailable
}
func (r *configSecretRetriever) CACert() (string, error) {
if len(r.config.CAData) > 0 {
return string(r.config.CAData), nil
}
if len(r.config.CAFile) > 0 {
data, err := ioutil.ReadFile(r.config.CAFile)
if err != nil {
return "", fmt.Errorf("unable to read CA cert from config %s: %v", r.config.CAFile, err)
}
return string(data), nil
}
return "", nil
}
func checkGitInstalled(w io.Writer) {
if !git.IsGitInstalled() {
fmt.Fprintf(w, "warning: Cannot find git. Ensure that it is installed and in your path. Git is required to work with git repositories.\n")
}
}
| {
"pile_set_name": "Github"
} |
@mixin popover {
z-index: 300;
&.popover--top:before {
content: '';
display: block;
position: absolute;
bottom: -8px;
left: 50%;
border-color: getColor(n4) transparent transparent transparent;
border-style: solid;
border-width: 7px;
margin-left: -7px;
}
&.popover--right:before {
content: '';
display: block;
position: absolute;
top: 50%;
left: -8px;
border-color: transparent getColor(n4) transparent transparent;
border-style: solid;
border-width: 7px;
margin-top: -7px;
}
&.popover--bottom:before {
content: '';
display: block;
position: absolute;
top: -8px;
left: 50%;
border-color: transparent transparent getColor(n4) transparent;
border-style: solid;
border-width: 7px;
margin-left: -7px;
}
&.popover--left:before {
content: '';
display: block;
position: absolute;
top: 50%;
right: -8px;
border-color: transparent transparent transparent getColor(n4);
border-style: solid;
border-width: 7px;
margin-top: -7px;
}
.popover-inner {
background-color: getColor(n0);
color: getColor(n8);
border: 1px solid getColor(n4);
text-align: left;
padding: 5px 10px;
}
}
.popover.top .popover-arrow {
border-top-color: getColor(n0);
border-width: 6px 5px 0;
}
.popover.top-left .popover-arrow {
border-top-color: getColor(n0);
}
.popover.top-right .popover-arrow {
border-top-color: getColor(n0);
}
.popover.right .popover-arrow {
border-right-color: getColor(n0);
border-width: 5px 6px 5px 0;
}
.popover.left .popover-arrow {
border-left-color: getColor(n0);
border-width: 5px 0 5px 6px;
}
.popover.bottom .popover-arrow {
border-bottom-color: getColor(n0);
border-width: 0 5px 6px;
}
.popover.bottom-left .popover-arrow {
border-bottom-color: getColor(n0);
}
.popover.bottom-right .popover-arrow {
border-bottom-color: getColor(n0);
}
.popover {
@include popover;
}
| {
"pile_set_name": "Github"
} |
namespace App.Metrics
{
public static class Metrics
{
public static IMetricsRoot Instance { get; private set; }
internal static void SetInstance(IMetricsRoot metricsRoot)
{
Instance = metricsRoot;
}
}
} | {
"pile_set_name": "Github"
} |
.tab-menu {
margin-bottom: 0!important;
}
.main .menu.text {
margin-top: 0!important;
margin-bottom: 0!important;
}
.main .form {
margin-top: 1em;
} | {
"pile_set_name": "Github"
} |
(function($) {
'use strict';
var SelectBox = {
cache: {},
init: function(id) {
var box = document.getElementById(id);
var node;
SelectBox.cache[id] = [];
var cache = SelectBox.cache[id];
var boxOptions = box.options;
var boxOptionsLength = boxOptions.length;
for (var i = 0, j = boxOptionsLength; i < j; i++) {
node = boxOptions[i];
cache.push({value: node.value, text: node.text, displayed: 1});
}
},
redisplay: function(id) {
// Repopulate HTML select box from cache
var box = document.getElementById(id);
var node;
$(box).empty(); // clear all options
var new_options = box.outerHTML.slice(0, -9); // grab just the opening tag
var cache = SelectBox.cache[id];
for (var i = 0, j = cache.length; i < j; i++) {
node = cache[i];
if (node.displayed) {
var new_option = new Option(node.text, node.value, false, false);
// Shows a tooltip when hovering over the option
new_option.setAttribute("title", node.text);
new_options += new_option.outerHTML;
}
}
new_options += '</select>';
box.outerHTML = new_options;
},
filter: function(id, text) {
// Redisplay the HTML select box, displaying only the choices containing ALL
// the words in text. (It's an AND search.)
var tokens = text.toLowerCase().split(/\s+/);
var node, token;
var cache = SelectBox.cache[id];
for (var i = 0, j = cache.length; i < j; i++) {
node = cache[i];
node.displayed = 1;
var node_text = node.text.toLowerCase();
var numTokens = tokens.length;
for (var k = 0; k < numTokens; k++) {
token = tokens[k];
if (node_text.indexOf(token) === -1) {
node.displayed = 0;
break; // Once the first token isn't found we're done
}
}
}
SelectBox.redisplay(id);
},
delete_from_cache: function(id, value) {
var node, delete_index = null;
var cache = SelectBox.cache[id];
for (var i = 0, j = cache.length; i < j; i++) {
node = cache[i];
if (node.value === value) {
delete_index = i;
break;
}
}
cache.splice(delete_index, 1);
},
add_to_cache: function(id, option) {
SelectBox.cache[id].push({value: option.value, text: option.text, displayed: 1});
},
cache_contains: function(id, value) {
// Check if an item is contained in the cache
var node;
var cache = SelectBox.cache[id];
for (var i = 0, j = cache.length; i < j; i++) {
node = cache[i];
if (node.value === value) {
return true;
}
}
return false;
},
move: function(from, to) {
var from_box = document.getElementById(from);
var option;
var boxOptions = from_box.options;
var boxOptionsLength = boxOptions.length;
for (var i = 0, j = boxOptionsLength; i < j; i++) {
option = boxOptions[i];
var option_value = option.value;
if (option.selected && SelectBox.cache_contains(from, option_value)) {
SelectBox.add_to_cache(to, {value: option_value, text: option.text, displayed: 1});
SelectBox.delete_from_cache(from, option_value);
}
}
SelectBox.redisplay(from);
SelectBox.redisplay(to);
},
move_all: function(from, to) {
var from_box = document.getElementById(from);
var option;
var boxOptions = from_box.options;
var boxOptionsLength = boxOptions.length;
for (var i = 0, j = boxOptionsLength; i < j; i++) {
option = boxOptions[i];
var option_value = option.value;
if (SelectBox.cache_contains(from, option_value)) {
SelectBox.add_to_cache(to, {value: option_value, text: option.text, displayed: 1});
SelectBox.delete_from_cache(from, option_value);
}
}
SelectBox.redisplay(from);
SelectBox.redisplay(to);
},
sort: function(id) {
SelectBox.cache[id].sort(function(a, b) {
a = a.text.toLowerCase();
b = b.text.toLowerCase();
try {
if (a > b) {
return 1;
}
if (a < b) {
return -1;
}
}
catch (e) {
// silently fail on IE 'unknown' exception
}
return 0;
} );
},
select_all: function(id) {
var box = document.getElementById(id);
var boxOptions = box.options;
var boxOptionsLength = boxOptions.length;
for (var i = 0; i < boxOptionsLength; i++) {
boxOptions[i].selected = 'selected';
}
}
};
window.SelectBox = SelectBox;
})(django.jQuery);
| {
"pile_set_name": "Github"
} |
// (C) Copyright Edward Diener 2015
// Use, modification and distribution are subject to the Boost Software License,
// Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt).
#if !defined(BOOST_VMD_ARRAY_HPP)
#define BOOST_VMD_ARRAY_HPP
#include <boost/vmd/detail/setup.hpp>
#if BOOST_PP_VARIADICS
#include <boost/vmd/array/to_seq.hpp>
#include <boost/vmd/array/to_tuple.hpp>
#endif /* BOOST_PP_VARIADICS */
#endif /* BOOST_VMD_ARRAY_HPP */
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<shape xmlns:android="http://schemas.android.com/apk/res/android"
android:shape="rectangle">
<corners android:radius="20dp" />
<padding
android:bottom="10dp"
android:left="10dp"
android:right="10dp"
android:top="10dp" />
<solid android:color="@android:color/white" />
</shape> | {
"pile_set_name": "Github"
} |
// ------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License (MIT). See License.txt in the repo root for license information.
// ------------------------------------------------------------
#include "stdafx.h"
using namespace ktl;
using namespace Data::Utilities;
using namespace TxnReplicator;
using namespace Data::StateManager;
#define INVALID_ERROR_CODE 0
Common::WStringLiteral const ApiDispatcher::OpenAsync_FunctionName(L"OpenAsync");
Common::WStringLiteral const ApiDispatcher::CloseAsync_FunctionName(L"CloseAsync");
Common::WStringLiteral const ApiDispatcher::RecoverCheckpointAsync_FunctionName(L"RecoverCheckpointAsync");
Common::WStringLiteral const ApiDispatcher::PerformCheckpointAsync_FunctionName(L"PerformCheckpointAsync");
Common::WStringLiteral const ApiDispatcher::CompleteCheckpointAsync_FunctionName(L"CompleteCheckpointAsync");
Common::WStringLiteral const ApiDispatcher::RestoreCheckpointAsync_FunctionName(L"RestoreCheckpointAsync");
Common::WStringLiteral const ApiDispatcher::BeginSettingCurrentState_FunctionName(L"BeginSettingCurrentState");
Common::WStringLiteral const ApiDispatcher::EndSettingCurrentStateAsync_FunctionName(L"EndSettingCurrentStateAsync");
Common::WStringLiteral const ApiDispatcher::PrepareForRemoveAsync_FunctionName(L"PrepareForRemoveAsync");
Common::WStringLiteral const ApiDispatcher::RemoveStateAsync_FunctionName(L"RemoveStateAsync");
ApiDispatcher::SPtr ApiDispatcher::Create(
__in PartitionedReplicaId const & traceId,
__in IStateProvider2Factory & stateProviderFactory,
__in KAllocator& allocator)
{
ApiDispatcher* pointer = _new(API_DISPATCHER_TAG, allocator) ApiDispatcher(
traceId,
stateProviderFactory);
THROW_ON_ALLOCATION_FAILURE(pointer);
return ApiDispatcher::SPtr(pointer);
}
NTSTATUS ApiDispatcher::CreateStateProvider(
__in KUri const & name,
__in FABRIC_STATE_PROVIDER_ID stateProviderId,
__in KString const & typeString,
__in_opt OperationData const * const initializationParameters,
__out TxnReplicator::IStateProvider2::SPtr & outStateProvider) noexcept
{
NTSTATUS status = STATUS_UNSUCCESSFUL;
IStateProvider2::SPtr stateProviderSPtr = nullptr;
FactoryArguments factoryArguments(name, stateProviderId, typeString, PartitionId, ReplicaId, initializationParameters);
status = stateProviderFactorySPtr_->Create(factoryArguments, stateProviderSPtr);
if (NT_SUCCESS(status) == false)
{
StateManagerEventSource::Events->ISP2_Factory_ApiError(
TracePartitionId,
ReplicaId,
stateProviderId,
ToStringLiteral(name),
ToStringLiteral(typeString),
status);
outStateProvider = nullptr;
return status;
}
outStateProvider = stateProviderSPtr;
return status;
}
NTSTATUS ApiDispatcher::Initialize(
__in Metadata const & metadata,
__in KWeakRef<TxnReplicator::ITransactionalReplicator> & transactionalReplicatorWRef,
__in KStringView const & workFolder,
__in_opt KSharedArray<TxnReplicator::IStateProvider2::SPtr> const * const children) noexcept
{
KShared$ApiEntry();
NTSTATUS status = STATUS_SUCCESS;
try
{
metadata.StateProvider->Initialize(
transactionalReplicatorWRef,
workFolder,
children);
}
catch (Exception const & exception)
{
ULONG32 numberOfChildren = children == nullptr ? 0 : children->Count();
StateManagerEventSource::Events->ISP2_Initialize_ApiError(
TracePartitionId,
ReplicaId,
metadata.StateProviderId,
ToStringLiteral(workFolder),
numberOfChildren,
exception.GetStatus());
status = exception.GetStatus();
}
return status;
}
Awaitable<NTSTATUS> ApiDispatcher::OpenAsync(
__in Metadata const & metadata,
__in ktl::CancellationToken const & cancellationToken) noexcept
{
KShared$ApiEntry();
NTSTATUS status = STATUS_SUCCESS;
try
{
co_await metadata.StateProvider->OpenAsync(cancellationToken);
}
catch (Exception const & exception)
{
StateManagerEventSource::Events->ISP2_ApiError(
TracePartitionId,
ReplicaId,
metadata.StateProviderId,
OpenAsync_FunctionName,
exception.GetStatus());
status = exception.GetStatus();
}
co_return status;
}
Awaitable<NTSTATUS> ApiDispatcher::ChangeRoleAsync(
__in Metadata const & metadata,
__in FABRIC_REPLICA_ROLE role,
__in ktl::CancellationToken const & cancellationToken) noexcept
{
KShared$ApiEntry();
NTSTATUS status = STATUS_SUCCESS;
try
{
co_await metadata.StateProvider->ChangeRoleAsync(role, cancellationToken);
}
catch (Exception const & exception)
{
StateManagerEventSource::Events->ISP2_ChangeRoleAsync_ApiError(
TracePartitionId,
ReplicaId,
metadata.StateProviderId,
role,
exception.GetStatus());
status = exception.GetStatus();
}
co_return status;
}
// Closes a given state
Awaitable<NTSTATUS> ApiDispatcher::CloseAsync(
__in Metadata const & metadata,
__in FailureAction failureAction,
__in ktl::CancellationToken const & cancellationToken) noexcept
{
KShared$ApiEntry();
NTSTATUS status = STATUS_SUCCESS;
try
{
co_await metadata.StateProvider->CloseAsync(cancellationToken);
}
catch (Exception const & exception)
{
StateManagerEventSource::Events->ISP2_ApiError(
TracePartitionId,
ReplicaId,
metadata.StateProviderId,
CloseAsync_FunctionName,
exception.GetStatus());
status = exception.GetStatus();
}
catch (...)
{
ASSERT_IFNOT(
false,
"{0}: SPid: {1} threw non-ktl::Exception in CloseAsync.",
TraceId,
metadata.StateProviderId);
}
if (NT_SUCCESS(status) == false)
{
if (failureAction == FailureAction::AbortStateProvider)
{
Abort(metadata);
}
else
{
co_return status;
}
}
co_return STATUS_SUCCESS;
}
void ApiDispatcher::Abort(
__in Metadata const & metadata) noexcept
{
try
{
metadata.StateProvider->Abort();
}
catch (Exception const & e)
{
ASSERT_IFNOT(
false,
"{0}: ISP2_Abort threw ktl::Exception. SPid: {1}. Error Code: {2}",
TraceId,
metadata.StateProviderId,
e.GetStatus());
}
return;
}
Awaitable<NTSTATUS> ApiDispatcher::RecoverCheckpointAsync(
__in Metadata const & metadata,
__in ktl::CancellationToken const & cancellationToken) noexcept
{
KShared$ApiEntry();
try
{
co_await metadata.StateProvider->RecoverCheckpointAsync(cancellationToken);
}
catch (Exception const & exception)
{
StateManagerEventSource::Events->ISP2_ApiError(
TracePartitionId,
ReplicaId,
metadata.StateProviderId,
RecoverCheckpointAsync_FunctionName,
exception.GetStatus());
co_return exception.GetStatus();
}
co_return STATUS_SUCCESS;
}
ktl::Awaitable<NTSTATUS> ApiDispatcher::BeginSettingCurrentStateAsync(
__in Metadata const & metadata) noexcept
{
try
{
co_await metadata.StateProvider->BeginSettingCurrentStateAsync();
}
catch (Exception const & exception)
{
StateManagerEventSource::Events->ISP2_ApiError(
TracePartitionId,
ReplicaId,
metadata.StateProviderId,
BeginSettingCurrentState_FunctionName,
exception.GetStatus());
co_return exception.GetStatus();
}
co_return STATUS_SUCCESS;
}
Awaitable<NTSTATUS> ApiDispatcher::SetCurrentStateAsync(
__in Metadata const & metadata,
__in LONG64 stateRecordNumber,
__in OperationData const & data,
__in ktl::CancellationToken const & cancellationToken) noexcept
{
KShared$ApiEntry();
try
{
co_await metadata.StateProvider->SetCurrentStateAsync(stateRecordNumber, data, cancellationToken);
}
catch (Exception const & exception)
{
StateManagerEventSource::Events->ISP2_SetCurrentStateAsync_ApiError(
TracePartitionId,
ReplicaId,
metadata.StateProviderId,
stateRecordNumber,
data.BufferCount,
exception.GetStatus());
co_return exception.GetStatus();
}
co_return STATUS_SUCCESS;
}
Awaitable<NTSTATUS> ApiDispatcher::EndSettingCurrentStateAsync(
__in Metadata const & metadata,
__in ktl::CancellationToken const & cancellationToken) noexcept
{
KShared$ApiEntry();
try
{
co_await metadata.StateProvider->EndSettingCurrentStateAsync(cancellationToken);
}
catch (Exception const & exception)
{
StateManagerEventSource::Events->ISP2_ApiError(
TracePartitionId,
ReplicaId,
metadata.StateProviderId,
EndSettingCurrentStateAsync_FunctionName,
exception.GetStatus());
co_return exception.GetStatus();
}
co_return STATUS_SUCCESS;
}
Awaitable<NTSTATUS> ApiDispatcher::OpenAsync(
__in KArray<Metadata::CSPtr> const & metadataArray,
__in ktl::CancellationToken const & cancellationToken) noexcept
{
KShared$ApiEntry();
NTSTATUS status = STATUS_UNSUCCESSFUL;
KArray<Awaitable<NTSTATUS>> awaitableArray(GetThisAllocator(), metadataArray.Count());
ASSERT_IFNOT(NT_SUCCESS(awaitableArray.Status()), "{0}: Failed to create KArray. Status: {1}", TraceId, status);
SharedException::CSPtr sharedException = nullptr;
for (Metadata::CSPtr metadata : metadataArray)
{
// Assumption: Below code assumes that if the exception is not a ktl::exception, the process will go down.
status = awaitableArray.Append(OpenAsync(*metadata, cancellationToken));
ASSERT_IFNOT(
NT_SUCCESS(status),
"{0}: Failed to append with code {1}. Array is correctly sized. This is not expected",
TraceId,
status);
}
// Even if one of the sync paths of OpenAsync failed, we need to ensure that all existing Awaitables are co_awaited.
// We expect the KThreadPool and scheduler to ensure the optimal number of Awaitables execute asynchronously.
status = co_await Utilities::TaskUtilities<NTSTATUS>::WhenAll_NoException(awaitableArray);
if (NT_SUCCESS(status) == false)
{
ASSERT_IFNOT(
awaitableArray.Count() <= metadataArray.Count(),
"{0}: AwaitableArray count {1} <= metadataArray count {2}",
TraceId,
awaitableArray.Count(),
metadataArray.Count());
// Clean opened state providers.
// #11908301: As an additional feature we can close the opened state providers.
for (ULONG index = 0; index < awaitableArray.Count(); index++)
{
NTSTATUS awaitableStatus = co_await awaitableArray[index];
if (NT_SUCCESS(awaitableStatus))
{
Abort(*metadataArray[index]);
}
}
}
co_return status;
}
Awaitable<NTSTATUS> ApiDispatcher::ChangeRoleAsync(
__in KArray<Metadata::CSPtr> const & metadataArray,
__in FABRIC_REPLICA_ROLE role,
__in ktl::CancellationToken const & cancellationToken) noexcept
{
KShared$ApiEntry();
NTSTATUS status = STATUS_UNSUCCESSFUL;
KArray<Awaitable<NTSTATUS>> awaitableArray(GetThisAllocator(), metadataArray.Count());
ASSERT_IFNOT(NT_SUCCESS(awaitableArray.Status()), "{0}: Failed to create KArray. Status: {1}", TraceId, status);
SharedException::CSPtr sharedException = nullptr;
for (Metadata::CSPtr metadata : metadataArray)
{
// Assumption: Below code assumes that if the exception is not a ktl::exception, the process will go down.
status = awaitableArray.Append(ChangeRoleAsync(*metadata, role, cancellationToken));
ASSERT_IFNOT(
NT_SUCCESS(status),
"{0}: Failed to append with code {1}. Array is correctly sized. This is not expected",
TraceId,
status);
}
// Even if one of the sync paths of OpenAsync failed, we need to ensure that all existing Awaitables are co_awaited.
// We expect the KThreadPool and scheduler to ensure the optimal number of Awaitables execute asynchronously.
status = co_await Utilities::TaskUtilities<NTSTATUS>::WhenAll_NoException(awaitableArray);
co_return status;
}
Awaitable<NTSTATUS> ApiDispatcher::CloseAsync(
__in KArray<Metadata::CSPtr> const & metadataArray,
__in FailureAction failureAction,
__in ktl::CancellationToken const & cancellationToken) noexcept
{
KShared$ApiEntry();
NTSTATUS status = STATUS_UNSUCCESSFUL;
// Note: Since we only support one action, we could have chosen not to pass in a failure action.
// I have it there to make it explicit and easily readable.
ASSERT_IFNOT(
failureAction == FailureAction::AbortStateProvider,
"{0}: ApiDispatcher::CloseAsync only supports AbortStateProvider. FailureAction: {1}",
TraceId,
static_cast<int>(failureAction));
KArray<Awaitable<NTSTATUS>> awaitableArray(GetThisAllocator(), metadataArray.Count());
ASSERT_IFNOT(NT_SUCCESS(awaitableArray.Status()), "{0}: Failed to create KArray. Status: {1}", TraceId, status);
for (Metadata::CSPtr metadata : metadataArray)
{
Awaitable<NTSTATUS> closeAwaitable = CloseAsync(*metadata, failureAction, cancellationToken);
status = awaitableArray.Append(Ktl::Move(closeAwaitable));
ASSERT_IFNOT(
NT_SUCCESS(status),
"{0}: Failed to append with code {1}. Array is correctly sized. This is not expected",
TraceId,
status);
}
// Even if one of the sync paths of CloseAsync failed, we need to ensure that all existing awaitables are co_awaited.
// We expect the KThreadPool and scheduler to ensure the optimal number of awaitables execute asynchronously.
status = co_await Utilities::TaskUtilities<NTSTATUS>::WhenAll_NoException(awaitableArray);
ASSERT_IFNOT(
NT_SUCCESS(status),
"{0} ApiDispatcher::CloseAsync failed even though FailureAction is AbortStateProvider. Status: {1}",
TraceId,
status);
#ifdef DBG
for (ULONG index = 0; index < awaitableArray.Count(); index++)
{
NTSTATUS awaitableStatus = co_await awaitableArray[index];
ASSERT_IFNOT(
NT_SUCCESS(awaitableStatus),
"{0} ApiDispatcher::CloseAsync failed even though FailureAction is AbortStateProvider. Status: {1}",
TraceId,
status);
}
#endif
co_return status;
}
void ApiDispatcher::Abort(
__in KArray<Metadata::CSPtr> const & metadataArray,
__in ULONG startingIndex,
__in ULONG count) noexcept
{
KShared$ApiEntry();
for (ULONG index = startingIndex; index < startingIndex + count; index++)
{
Metadata::CSPtr current = metadataArray[index];
try
{
// Rule: Abort cannot throw.
Abort(*current);
}
catch (Exception & e)
{
ASSERT_IFNOT(false,
"{0}: State Provider {1} threw ktl::Exception from Abort. Error Code: {2}.",
TraceId,
current->StateProviderId,
e.GetStatus());
}
catch (...)
{
ASSERT_IFNOT(false,
"{0}: State Provider {1} threw non-ktl::Exception from Abort.",
TraceId,
current->StateProviderId);
}
}
return;
}
Awaitable<NTSTATUS> ApiDispatcher::EndSettingCurrentStateAsync(
__in KArray<Metadata::CSPtr> const & metadataArray,
__in ktl::CancellationToken const & cancellationToken) noexcept
{
KShared$ApiEntry();
NTSTATUS status = STATUS_UNSUCCESSFUL;
KArray<Awaitable<NTSTATUS>> awaitableArray(GetThisAllocator(), metadataArray.Count());
ASSERT_IFNOT(NT_SUCCESS(awaitableArray.Status()), "{0}: Failed to create KArray. Status: {1}", TraceId, status);
for (Metadata::CSPtr metadata : metadataArray)
{
// Assumption: Below code assumes that if the exception is not a ktl::exception, the process will go down.
status = awaitableArray.Append(EndSettingCurrentStateAsync(*metadata, cancellationToken));
ASSERT_IFNOT(
NT_SUCCESS(status),
"{0}: Failed to append with code {1}. Array is correctly sized. This is not expected",
TraceId,
status);
}
// We expect the KThreadPool and scheduler to ensure the optimal number of Awaitables execute asynchronously.
status = co_await Utilities::TaskUtilities<void>::WhenAll_NoException(awaitableArray);
co_return status;
}
NTSTATUS ApiDispatcher::PrepareCheckpoint(
__in KArray<Metadata::CSPtr> const & metadataArray,
__in FABRIC_SEQUENCE_NUMBER checkpointLSN) noexcept
{
KShared$ApiEntry();
NTSTATUS status = STATUS_SUCCESS;
for (Metadata::CSPtr current : metadataArray)
{
// Rule: Abort cannot throw.
status = PrepareCheckpoint(*current, checkpointLSN);
if (NT_SUCCESS(status) == false)
{
return status;
}
}
return status;
}
Awaitable<NTSTATUS> ApiDispatcher::PerformCheckpointAsync(
__in KArray<Metadata::CSPtr> const & metadataArray,
__in ktl::CancellationToken const & cancellationToken) noexcept
{
KShared$ApiEntry();
NTSTATUS status = STATUS_UNSUCCESSFUL;
KArray<Awaitable<NTSTATUS>> awaitableArray(GetThisAllocator(), metadataArray.Count());
ASSERT_IFNOT(NT_SUCCESS(awaitableArray.Status()), "{0}: Failed to create KArray. Status: {1}", TraceId, status);
for (Metadata::CSPtr metadata : metadataArray)
{
Awaitable<NTSTATUS> performCheckpointAwaitable = PerformCheckpointAsync(*metadata, cancellationToken);
status = awaitableArray.Append(Ktl::Move(performCheckpointAwaitable));
ASSERT_IFNOT(
NT_SUCCESS(status),
"{0}: Failed to append with code {1}. Array is correctly sized. This is not expected",
TraceId,
status);
}
// We expect the KThreadPool and scheduler to ensure the optimal number of awaitables execute asynchronously.
status = co_await Utilities::TaskUtilities<NTSTATUS>::WhenAll_NoException(awaitableArray);
co_return status;
}
Awaitable<NTSTATUS> ApiDispatcher::CompleteCheckpointAsync(
__in KArray<Metadata::CSPtr> const & metadataArray,
__in ktl::CancellationToken const & cancellationToken) noexcept
{
KShared$ApiEntry();
NTSTATUS status = STATUS_UNSUCCESSFUL;
KArray<Awaitable<NTSTATUS>> awaitableArray(GetThisAllocator(), metadataArray.Count());
ASSERT_IFNOT(NT_SUCCESS(awaitableArray.Status()), "{0}: Failed to create KArray. Status: {1}", TraceId, status);
for (Metadata::CSPtr metadata : metadataArray)
{
Awaitable<NTSTATUS> completeAwaitable = CompleteCheckpointAsync(*metadata, cancellationToken);
status = awaitableArray.Append(Ktl::Move(completeAwaitable));
ASSERT_IFNOT(
NT_SUCCESS(status),
"{0}: Failed to append with code {1}. Array is correctly sized. This is not expected",
TraceId,
status);
}
// We expect the KThreadPool and scheduler to ensure the optimal number of awaitables execute asynchronously.
status = co_await Utilities::TaskUtilities<NTSTATUS>::WhenAll_NoException(awaitableArray);
co_return status;
}
Awaitable<NTSTATUS> ApiDispatcher::RecoverCheckpointAsync(
__in KArray<Metadata::CSPtr> const & metadataArray,
__in ktl::CancellationToken const & cancellationToken) noexcept
{
KShared$ApiEntry();
NTSTATUS status = STATUS_UNSUCCESSFUL;
KArray<Awaitable<NTSTATUS>> awaitableArray(GetThisAllocator(), metadataArray.Count());
ASSERT_IFNOT(NT_SUCCESS(awaitableArray.Status()), "{0}: Failed to create KArray. Status: {1}", TraceId, status);
for (Metadata::CSPtr metadata : metadataArray)
{
Awaitable<NTSTATUS> recoverAwaitable = RecoverCheckpointAsync(*metadata, cancellationToken);
status = awaitableArray.Append(Ktl::Move(recoverAwaitable));
ASSERT_IFNOT(
NT_SUCCESS(status),
"{0}: Failed to append with code {1}. Array is correctly sized. This is not expected",
TraceId,
status);
}
// We expect the KThreadPool and scheduler to ensure the optimal number of awaitables execute asynchronously.
status = co_await Utilities::TaskUtilities<NTSTATUS>::WhenAll_NoException(awaitableArray);
co_return status;
}
Awaitable<NTSTATUS> ApiDispatcher::RestoreCheckpointAsync(
__in KArray<Metadata::CSPtr> const & metadataArray,
__in KArray<KString::CSPtr> const & backupDirectoryArray,
__in ktl::CancellationToken const & cancellationToken) noexcept
{
KShared$ApiEntry();
NTSTATUS status = STATUS_UNSUCCESSFUL;
ASSERT_IFNOT(
metadataArray.Count() == backupDirectoryArray.Count(),
"{0}: Input array counts do not match. Metadata Count: {1} Directory Count: {2}",
TraceId,
metadataArray.Count(),
backupDirectoryArray.Count());
KArray<Awaitable<NTSTATUS>> awaitableArray(GetThisAllocator(), metadataArray.Count());
ASSERT_IFNOT(NT_SUCCESS(awaitableArray.Status()), "{0}: Failed to create KArray. Status: {1}", TraceId, status);
for (ULONG i = 0; i < metadataArray.Count(); i++)
{
Awaitable<NTSTATUS> recoverAwaitable = RestoreCheckpointAsync(*metadataArray[i], *backupDirectoryArray[i], cancellationToken);
status = awaitableArray.Append(Ktl::Move(recoverAwaitable));
ASSERT_IFNOT(
NT_SUCCESS(status),
"{0}: Failed to append with code {1}. Array is correctly sized. This is not expected",
TraceId,
status);
}
// We expect the KThreadPool and scheduler to ensure the optimal number of Awaitables execute asynchronously.
status = co_await Utilities::TaskUtilities<NTSTATUS>::WhenAll_NoException(awaitableArray);
co_return status;
}
Awaitable<NTSTATUS> ApiDispatcher::RemoveStateAsync(
__in KArray<Metadata::CSPtr> const & metadataArray,
__in ktl::CancellationToken const & cancellationToken) noexcept
{
KShared$ApiEntry();
NTSTATUS status = STATUS_UNSUCCESSFUL;
KArray<Awaitable<NTSTATUS>> awaitableArray(GetThisAllocator(), metadataArray.Count());
ASSERT_IFNOT(NT_SUCCESS(awaitableArray.Status()), "{0}: Failed to create KArray. Status: {1}", TraceId, status);
for (Metadata::CSPtr metadata : metadataArray)
{
Awaitable<NTSTATUS> recoverAwaitable = RemoveStateAsync(*metadata, cancellationToken);
status = awaitableArray.Append(Ktl::Move(recoverAwaitable));
ASSERT_IFNOT(
NT_SUCCESS(status),
"{0}: Failed to append with code {1}. Array is correctly sized. This is not expected",
TraceId,
status);
}
// We expect the KThreadPool and scheduler to ensure the optimal number of awaitables execute asynchronously.
status = co_await Utilities::TaskUtilities<NTSTATUS>::WhenAll_NoException(awaitableArray);
co_return status;
}
ktl::Awaitable<NTSTATUS> ApiDispatcher::PrepareForRemoveAsync(
__in Metadata const & metadata,
__in TxnReplicator::Transaction const & transaction,
__in ktl::CancellationToken const & cancellationToken) noexcept
{
KShared$ApiEntry();
NTSTATUS status = STATUS_SUCCESS;
try
{
co_await metadata.StateProvider->PrepareForRemoveAsync(transaction, cancellationToken);
}
catch (Exception const & exception)
{
StateManagerEventSource::Events->ISP2_PrepareForRemoveAsync_ApiError(
TracePartitionId,
ReplicaId,
metadata.StateProviderId,
transaction.TransactionId,
exception.GetStatus());
status = exception.GetStatus();
}
catch (...)
{
ASSERT_IFNOT(
false,
"{0}: SPid: {1} threw non-ktl::Exception in PrepareForRemoveAsync.",
TraceId,
metadata.StateProviderId);
}
co_return status;
}
ktl::Awaitable<NTSTATUS> ApiDispatcher::RemoveStateAsync(
__in Metadata const & metadata,
__in ktl::CancellationToken const & cancellationToken) noexcept
{
KShared$ApiEntry();
NTSTATUS status = STATUS_SUCCESS;
try
{
co_await metadata.StateProvider->RemoveStateAsync(cancellationToken);
}
catch (Exception const & exception)
{
StateManagerEventSource::Events->ISP2_ApiError(
TracePartitionId,
ReplicaId,
metadata.StateProviderId,
RemoveStateAsync_FunctionName,
exception.GetStatus());
status = exception.GetStatus();
}
catch (...)
{
ASSERT_IFNOT(
false,
"{0}: SPid: {1} threw non-ktl::Exception in {2}.",
TraceId,
metadata.StateProviderId,
RemoveStateAsync_FunctionName);
}
co_return status;
}
NTSTATUS ApiDispatcher::PrepareCheckpoint(
__in Metadata const & metadata,
__in FABRIC_SEQUENCE_NUMBER checkpointLSN) noexcept
{
try
{
metadata.StateProvider->PrepareCheckpoint(checkpointLSN);
}
catch (Exception const & e)
{
StateManagerEventSource::Events->ISP2_PrepareCheckpoint_ApiError(
TracePartitionId,
ReplicaId,
metadata.StateProviderId,
checkpointLSN,
e.GetStatus());
return e.GetStatus();
}
return STATUS_SUCCESS;;
}
Awaitable<NTSTATUS> ApiDispatcher::PerformCheckpointAsync(
__in Metadata const & metadata,
__in ktl::CancellationToken const & cancellationToken) noexcept
{
KShared$ApiEntry();
try
{
co_await metadata.StateProvider->PerformCheckpointAsync(cancellationToken);
}
catch (Exception const & e)
{
StateManagerEventSource::Events->ISP2_ApiError(
TracePartitionId,
ReplicaId,
metadata.StateProviderId,
PerformCheckpointAsync_FunctionName,
e.GetStatus());
co_return e.GetStatus();
}
co_return STATUS_SUCCESS;;
}
Awaitable<NTSTATUS> ApiDispatcher::CompleteCheckpointAsync(
__in Metadata const & metadata,
__in ktl::CancellationToken const & cancellationToken) noexcept
{
KShared$ApiEntry();
try
{
co_await metadata.StateProvider->CompleteCheckpointAsync(cancellationToken);
}
catch (Exception & e)
{
StateManagerEventSource::Events->ISP2_ApiError(
TracePartitionId,
ReplicaId,
metadata.StateProviderId,
CompleteCheckpointAsync_FunctionName,
e.GetStatus());
co_return e.GetStatus();
}
co_return STATUS_SUCCESS;;
}
Awaitable<NTSTATUS> ApiDispatcher::RestoreCheckpointAsync(
__in Metadata const & metadata,
__in KString const & backupDirectory,
__in ktl::CancellationToken const & cancellationToken) noexcept
{
KShared$ApiEntry();
try
{
co_await metadata.StateProvider->RestoreCheckpointAsync(backupDirectory, cancellationToken);
}
catch (Exception const & e)
{
StateManagerEventSource::Events->ISP2_ApiError(
TracePartitionId,
ReplicaId,
metadata.StateProviderId,
RestoreCheckpointAsync_FunctionName,
e.GetStatus());
co_return e.GetStatus();
}
co_return STATUS_SUCCESS;
}
NOFAIL ApiDispatcher::ApiDispatcher(
__in PartitionedReplicaId const & traceId,
__in IStateProvider2Factory & stateProviderFactory) noexcept
: KObject()
, KShared()
, PartitionedReplicaTraceComponent(traceId)
, stateProviderFactorySPtr_(&stateProviderFactory)
{
}
ApiDispatcher::~ApiDispatcher()
{
}
| {
"pile_set_name": "Github"
} |
### 【友盟+】统计、推送、分享三合一组件化SDK集成Demo
---
* 本Demo工程仅适用于【友盟+】组件化版本SDK,即包含基础组件库umeng-common-x.x.x.jar的SDK版本。
* 集成v2.0.0(umeng-common-2.0.0.jar)及更高版本基础组件库的客户,请参照分支"master"代码集成。
* 集成v2.0.0以下(不包含2.0.0,如:umeng-common-1.5.4.jar)版本基础组件库的客户,请参照分支"common_1.5.4"代码集成。
* 集成v9.0.0及以上(包含9.0.0)版本统计、游戏统计的客户,请参照分支"master"代码集成。
* 集成9.2.x版本统计、游戏统计、推送、分享 SDK的客户,请参照分支"beta"代码集成。
| {
"pile_set_name": "Github"
} |
class C001_NumberLiterals {
{
int x = 0;
int y = 1000000;
long z = 100000000000L;
int a = 0x1fa1;
int b = 0100;
double c = 100.0;
double d = 0.0;
double e = 123e10;
double f = 123e-10;
double g = 123e+10;
double h = 0x1p1;
float i = 123F;
float j = 0xap-1F;
boolean b1 = true;
boolean b2 = false;
Object o = null;
}
} | {
"pile_set_name": "Github"
} |
/*
* The MIT License
*
* Copyright (c) 2020, Mahmoud Ben Hassine ([email protected])
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.jeasy.batch.core.converter;
import org.junit.Before;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
public class ByteTypeConverterTest extends BaseConverterTest<Byte> {
@Before
public void setUp() {
converter = new ByteTypeConverter();
}
@Test
public void whenInputIsLegalValue_ThenShouldReturnBigInteger() {
assertThat(converter.convert("5")).isEqualTo((byte) 5);
assertThat(converter.convert("5.0")).isEqualTo((byte) 5);
}
}
| {
"pile_set_name": "Github"
} |
var B = require("./sourceB");
var C = require("./sourceC");
module.exports = B + C; | {
"pile_set_name": "Github"
} |
<!DOCTYPE html>
<!--
Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/
-->
<html>
<head>
<meta charset="utf-8">
<title>CSS Test: various 'object-position' values on a fixed-size embed element, with a PNG image and 'object-fit:contain'.</title>
<link rel="author" title="Daniel Holbert" href="mailto:[email protected]">
<link rel="help" href="http://www.w3.org/TR/css3-images/#sizing">
<link rel="help" href="http://www.w3.org/TR/css3-images/#the-object-fit">
<link rel="help" href="http://www.w3.org/TR/css3-images/#the-object-position">
<link rel="match" href="object-position-png-002-ref.html">
<style type="text/css">
embed {
background: lightgray;
margin-right: 2px;
object-fit: contain;
float: left;
width: 20px;
height: 20px;
}
.op_x-7 { object-position: -7% 50% }
.op_x13 { object-position: 13% 50% }
.op_x23 { object-position: 23% 50% }
.op_x50 { object-position: 50% 50% }
.op_x75 { object-position: 75% 50% }
.op_x88 { object-position: 88% 50% }
.op_x111 { object-position: 111% 50% }
</style>
</head>
<body>
<embed src="support/colors-8x16.png" class="op_x-7">
<embed src="support/colors-8x16.png" class="op_x13">
<embed src="support/colors-8x16.png" class="op_x23">
<embed src="support/colors-8x16.png" class="op_x50">
<embed src="support/colors-8x16.png" class="op_x75">
<embed src="support/colors-8x16.png" class="op_x88">
<embed src="support/colors-8x16.png" class="op_x111">
</body>
</html>
| {
"pile_set_name": "Github"
} |
{
"desc": "Dell 5520",
"control": 1,
"data": 0
}
| {
"pile_set_name": "Github"
} |
function kernel = traceKernelKPCASystemsSqrtHist(sysParams1, sysParams2, lambda)
% (c) Rizwan Chaudhry - JHU Vision Lab
Y_12 = [sysParams1.Yoriginal, sysParams2.Yoriginal];
% Evaluate the kernel on these vectors;
Y_12 = sqrt(Y_12);
kernel_12 = Y_12'*Y_12;
N1 = size(sysParams1.Yoriginal,2);
N2 = size(sysParams2.Yoriginal,2);
e1 = ones(1,N1)';
e2 = ones(1,N2)';
alphaPrime1 = sysParams1.alpha-1/N1*(repmat(sum(sysParams1.alpha,1),size(sysParams1.alpha,1),1));
alphaPrime2 = sysParams2.alpha-1/N2*(repmat(sum(sysParams2.alpha,1),size(sysParams2.alpha,1),1));
F_12 = alphaPrime1'*kernel_12(1:N1,N1+1:end)*alphaPrime2;
M_12 = dlyap(lambda*sysParams1.A',sysParams2.A,F_12);
M_12 = real(M_12);
B1 = real(chol(sysParams1.Q,'lower'));
B2 = real(chol(sysParams2.Q,'lower'));
kernel = sysParams1.X(:,1)'*M_12*sysParams2.X(:,1) + lambda/(1-lambda)*trace(B1'*M_12*B2); | {
"pile_set_name": "Github"
} |
object DatePickerForm: TDatePickerForm
Left = 0
Top = 0
ClientHeight = 567
ClientWidth = 384
FormFactor.Width = 1920
FormFactor.Height = 1022
FormFactor.Devices = [dkDesktop]
DesignerMobile = True
DesignerWidth = 384
DesignerHeight = 592
DesignerDeviceName = 'Google Nexus 4'
DesignerOrientation = 0
DesignerOSVersion = ''
object ListBox1: TListBox
Align = alTop
Height = 93.000000000000000000
Margins.Top = 10.000000000000000000
Position.Y = 58.000000000000000000
ShowScrollBars = False
StyleLookup = 'transparentlistboxstyle'
TabOrder = 0
Width = 384.000000000000000000
DefaultItemStyles.ItemStyle = 'listboxitemnodetail'
DefaultItemStyles.GroupHeaderStyle = ''
DefaultItemStyles.GroupFooterStyle = ''
GroupingKind = gsGrouped
object ListBoxItem5: TListBoxItem
Height = 44.000000000000000000
StyleLookup = 'listboxitemnodetail'
Text = 'Pick a Time'
Width = 344.000000000000000000
object TimeEdit1: TTimeEdit
Time = 0.415277777777777800
OnTimeChanged = TimeEdit1TimeChanged
Align = alRight
Height = 32.000000000000000000
Margins.Right = 5.000000000000000000
Position.X = 242.000000000000000000
Position.Y = 6.000000000000000000
TabOrder = 0
Width = 97.000000000000000000
end
end
object ListBoxItem6: TListBoxItem
Height = 44.000000000000000000
Position.Y = 44.000000000000000000
StyleLookup = 'listboxitemnodetail'
Text = 'Picked'
Width = 344.000000000000000000
end
end
object ToolBar1: TToolBar
Height = 48.000000000000000000
TabOrder = 1
Width = 384.000000000000000000
object Label1: TLabel
Align = alClient
Height = 48.000000000000000000
StyleLookup = 'toollabel'
Text = 'Time Picker'
TextAlign = taCenter
Width = 384.000000000000000000
Trimming = ttCharacter
end
end
end
| {
"pile_set_name": "Github"
} |
坂井里美最新番号
【GAH-100】萌えコス8時間 2
【GAH-097】絶対的最強美少女30人8時間 3
【GAH-087】俺たちのいいなりマネージャー8時間
【GNE-177】俺達のいいなり女子マネージャー 3
【GNE-165】最強のAV女優お貸しします。20人4時間 4
【ABP-468】女子マネージャーは、僕達の性処理ペット。 018 坂井里美
【CHN-102】新・絶対的美少女、お貸しします。 ACT.56 坂井里美
【ABP-442】坂井里美の、いっぱいコスって萌えてイこう!
【BGN-033】新人 プレステージ専属デビュー 坂井里美</a>2016-01-08プレステージ$$$beginning173分钟 | {
"pile_set_name": "Github"
} |
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#include <aws/machinelearning/model/DeleteTagsResult.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <aws/core/AmazonWebServiceResult.h>
#include <aws/core/utils/StringUtils.h>
#include <aws/core/utils/UnreferencedParam.h>
#include <utility>
using namespace Aws::MachineLearning::Model;
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
using namespace Aws;
DeleteTagsResult::DeleteTagsResult() :
m_resourceType(TaggableResourceType::NOT_SET)
{
}
DeleteTagsResult::DeleteTagsResult(const Aws::AmazonWebServiceResult<JsonValue>& result) :
m_resourceType(TaggableResourceType::NOT_SET)
{
*this = result;
}
DeleteTagsResult& DeleteTagsResult::operator =(const Aws::AmazonWebServiceResult<JsonValue>& result)
{
JsonView jsonValue = result.GetPayload().View();
if(jsonValue.ValueExists("ResourceId"))
{
m_resourceId = jsonValue.GetString("ResourceId");
}
if(jsonValue.ValueExists("ResourceType"))
{
m_resourceType = TaggableResourceTypeMapper::GetTaggableResourceTypeForName(jsonValue.GetString("ResourceType"));
}
return *this;
}
| {
"pile_set_name": "Github"
} |
# determine path to here, package name
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import os
def rreplace(s, old, new, occurrence):
li = s.rsplit(old, occurrence)
return new.join(li)
buildpath = (os.popen('pwd').read()).strip('\n')
buildname = os.path.basename(buildpath)
try: # madagascar version
Import('env root pkgdir libdir bindir')
# replace the last occurrence
thispath = rreplace(buildpath,'build/','',1)
thisname = rreplace(buildname,'build/','',1)
testflag = 'no'
docflag = 'no'
except: # autonomous version
# import compiler variables, testflag
Import('vars', 'testflag', 'docflag')
thispath = buildpath
thisname = buildname
root = None
#print 'thispath = '+thispath
#print 'thisname = '+thisname
def getlist(filename):
alist=[]
pathfile=os.path.join(thispath,filename)
if os.path.exists(pathfile):
f = open(pathfile,'r')
alist = (f.read().strip('\n')).split(':')
f.close()
return alist
# extract package dependencies from hclasspath file
deplist=getlist('hclasspath')
#print ['deplist = ']+deplist
# extract nonstandard external header dependencies from hincpath file
extincs=getlist('hincpath')
# extract nonstandard external library dependencies from hlibpath file
extlibs=getlist('hlibpath')
#print ['extlibs = ']+extlibs
# extract nonstandard source search path from hsrcpath file, if it exists,
# else set up standard search in src and/or lib if either/both exist
srclist=getlist('hsrcpath')
if not srclist:
if os.path.exists(os.path.join(thispath,'src')):
srclist.append('src')
if os.path.exists(os.path.join(thispath,'lib')):
srclist.append('lib')
#print ['srclist = ']+srclist
cpplist = []
libdirlist = []
liblist = []
# start include list with this package
if os.path.exists(thispath+'/include'):
cpplist.append(thispath+'/include')
# start library list with this package
if os.path.exists(thispath+'/lib'):
libdirlist.append(buildpath+'/lib')
liblist.append('lib'+thisname+'.a')
# add other standard packages - headers in "include", library in "lib" with title
# = lib[basename of package].a
for n in deplist:
# if root undefined, then not in madagascar environment
# Madagascar branch
if root:
testpath=os.path.abspath(os.path.expandvars(os.path.join(thispath,n)))
else:
testpath=os.path.abspath(os.path.expandvars(n))
testincl=testpath+'/include'
if os.path.exists(testincl):
cpplist.append(testincl)
testbuildpath=os.path.abspath(os.path.expandvars(os.path.join(buildpath,n)))
testlibdir=testpath+'/lib'
testlib='lib'+os.path.basename(testpath)+'.a'
# print 'testing for '+testlibdir
# only test for lib dir - lib may not yet be built!
if os.path.exists(testlibdir):
liblist.append(testlib)
# Madagascar branch
if root:
libdirlist.append(testbuildpath + '/lib')
# Standalone branch
else:
libdirlist.append(testpath + '/lib')
# add other possibly nonstandard sources of headers
# it is ALWAYS PRESUMED that header files live in final leaves
# of directory trees titled "include", so only path to parent dir
# should be included
for n in extincs:
testpath=os.path.abspath(os.path.expandvars(n))
testincl=testpath+'/include'
if os.path.exists(testincl):
cpplist.append(testincl)
# add other possibly nonstandard sources of libraries
# it CANNOT BE PRESUMED that library files live in any particular
# place, so full pathnames to external libraries must be given
for n in extlibs:
libpath=os.path.abspath(os.path.expandvars(n))
liblist.append(os.path.basename(libpath))
libdirlist.append(os.path.dirname(libpath))
# finish with -lm - always!
liblist.append('-lm')
# determine base name of library - same as package name
libname = thisname
# test for lib - build library
if os.path.exists(thispath+'/lib'):
# export environment and basename
if root:
Export('env root pkgdir libdir libname cpplist srclist')
else:
Export('vars libname cpplist srclist')
# build in lib
SConscript('lib/SConscript')
#test for main - build executables
if os.path.exists(thispath+'/main'):
if root:
Export('env root pkgdir bindir cpplist liblist libname libdirlist')
else:
Export('vars cpplist liblist libname libdirlist')
SConscript('main/SConscript')
#test for test flag, test source directories - build executables, run tests
if testflag == 'yes':
if os.path.exists('testsrc'):
Export('vars', 'cpplist','liblist','libdirlist')
SConscript('testsrc/SConscript')
if os.path.exists('test'):
Export('vars', 'cpplist','liblist','libdirlist')
SConscript('test/SConscript')
#test for doc flag, attempt to build docs if it exists
if docflag == 'yes':
if os.path.exists('doc'):
os.system('cd doc; doxygen')
| {
"pile_set_name": "Github"
} |
require('../modules/core.dict');
module.exports = require('../modules/_core').Dict;
| {
"pile_set_name": "Github"
} |
/*
jQuery grab
Ported from Jin.js::gestures
Created by Jussi Kalliokoski
Licensed under MIT License.
*/
(function($){
var extend = $.extend,
mousedown = 'mousedown',
mousemove = 'mousemove',
mouseup = 'mouseup',
touchstart = 'touchstart',
touchmove = 'touchmove',
touchend = 'touchend',
touchcancel = 'touchcancel',
gesturestart = 'gesturestart',
gesturechange = 'gesturechange',
gestureend = 'gestureend',
gesturecancel = 'gesturecancel',
MozTouchDown = 'MozTouchDown',
MozTouchMove = 'MozTouchMove',
MozTouchUp = 'MozTouchUp',
mousewheel = 'mousewheel',
DOMMouseScroll = 'DOMMouseScroll';
function unbind(elem, type, func){
var fnc, i;
for (i=0; i<bind._binds.length; i++){
if (bind._binds[i].elem === elem && bind._binds[i].type === type && bind._binds[i].func === func){
if (document.addEventListener){
elem.removeEventListener(type, bind._binds[i].fnc, false);
} else {
elem.detachEvent('on'+type, bind._binds[i].fnc);
}
bind._binds.splice(i--, 1);
}
}
}
function bind(elem, type, func, pass){
var fnc, i;
if (bind[type]){
return bind[type].bind(elem, type, func, pass);
}
fnc = function(e){
if (!e){ // Fix some ie bugs...
e = window.event;
}
if (!e.stopPropagation){
e.stopPropagation = function(){ this.cancelBubble = true; };
}
e.data = pass;
func.call(elem, e);
};
if (document.addEventListener){
elem.addEventListener(type, fnc, false);
} else {
elem.attachEvent('on' + type, fnc);
}
bind._binds.push({elem: elem, type: type, func: func, fnc: fnc});
}
function grab(elem, options)
{
var data = {
move: {x: 0, y: 0},
offset: {x: 0, y: 0},
position: {x: 0, y: 0},
start: {x: 0, y: 0},
affects: document.documentElement,
stopPropagation: false,
preventDefault: true,
touch: true // Implementation unfinished, and doesn't support multitouch
};
extend(data, options);
data.element = elem;
bind(elem, mousedown, mouseDown, data);
if (data.touch){
bind(elem, touchstart, touchStart, data);
}
}
function ungrab(elem){
unbind(elem, mousedown, mousedown);
}
function mouseDown(e){
e.data.position.x = e.pageX;
e.data.position.y = e.pageY;
e.data.start.x = e.pageX;
e.data.start.y = e.pageY;
e.data.event = e;
if (e.data.onstart && e.data.onstart.call(e.data.element, e.data)){
return;
}
if (e.preventDefault && e.data.preventDefault){
e.preventDefault();
}
if (e.stopPropagation && e.data.stopPropagation){
e.stopPropagation();
}
bind(e.data.affects, mousemove, mouseMove, e.data);
bind(e.data.affects, mouseup, mouseUp, e.data);
}
function mouseMove(e){
if (e.preventDefault && e.data.preventDefault){
e.preventDefault();
}
if (e.stopPropagation && e.data.preventDefault){
e.stopPropagation();
}
e.data.move.x = e.pageX - e.data.position.x;
e.data.move.y = e.pageY - e.data.position.y;
e.data.position.x = e.pageX;
e.data.position.y = e.pageY;
e.data.offset.x = e.pageX - e.data.start.x;
e.data.offset.y = e.pageY - e.data.start.y;
e.data.event = e;
if (e.data.onmove){
e.data.onmove.call(e.data.element, e.data);
}
}
function mouseUp(e){
if (e.preventDefault && e.data.preventDefault){
e.preventDefault();
}
if (e.stopPropagation && e.data.stopPropagation){
e.stopPropagation();
}
unbind(e.data.affects, mousemove, mouseMove);
unbind(e.data.affects, mouseup, mouseUp);
e.data.event = e;
if (e.data.onfinish){
e.data.onfinish.call(e.data.element, e.data);
}
}
function touchStart(e){
e.data.position.x = e.touches[0].pageX;
e.data.position.y = e.touches[0].pageY;
e.data.start.x = e.touches[0].pageX;
e.data.start.y = e.touches[0].pageY;
e.data.event = e;
if (e.data.onstart && e.data.onstart.call(e.data.element, e.data)){
return;
}
if (e.preventDefault && e.data.preventDefault){
e.preventDefault();
}
if (e.stopPropagation && e.data.stopPropagation){
e.stopPropagation();
}
bind(e.data.affects, touchmove, touchMove, e.data);
bind(e.data.affects, touchend, touchEnd, e.data);
}
function touchMove(e){
if (e.preventDefault && e.data.preventDefault){
e.preventDefault();
}
if (e.stopPropagation && e.data.stopPropagation){
e.stopPropagation();
}
e.data.move.x = e.touches[0].pageX - e.data.position.x;
e.data.move.y = e.touches[0].pageY - e.data.position.y;
e.data.position.x = e.touches[0].pageX;
e.data.position.y = e.touches[0].pageY;
e.data.offset.x = e.touches[0].pageX - e.data.start.x;
e.data.offset.y = e.touches[0].pageY - e.data.start.y;
e.data.event = e;
if (e.data.onmove){
e.data.onmove.call(e.data.elem, e.data);
}
}
function touchEnd(e){
if (e.preventDefault && e.data.preventDefault){
e.preventDefault();
}
if (e.stopPropagation && e.data.stopPropagation){
e.stopPropagation();
}
unbind(e.data.affects, touchmove, touchMove);
unbind(e.data.affects, touchend, touchEnd);
e.data.event = e;
if (e.data.onfinish){
e.data.onfinish.call(e.data.element, e.data);
}
}
bind._binds = [];
$.fn.grab = function(a, b){
return this.each(function(){
return grab(this, a, b);
});
};
$.fn.ungrab = function(a){
return this.each(function(){
return ungrab(this, a);
});
};
})(jQuery);
| {
"pile_set_name": "Github"
} |
---
title: "工欲善其事必先利其器--Java Web快速开发框架--Springside"
wordpress_id: 1188
wordpress_url: http://www.wsria.com/?p=1188
date: 2010-10-12 00:45:38 +08:00
tags:
- springside
---
今天开始“工欲善其事必先利其器”系列第一篇,介绍目前可以快速开发并且有规范可维护性的开源框架——<span style="background-color: #00ff00;">Springside</span>。
<h3>一、老板,来二两点废话:</h3>
如果你从事Java Web编程你会发现怎么那么多的技术或者框架,还有那么多关于的Java规范、协议;框架比如最著名的MVC框架Struts,持久化框架Hibernate,和解耦合Struts和Hibernate的Spring(虽然它现在不再是单一功能);这些框架的学习不是很困难,但是对于初学者或者需要快速构建项目的团队或者个人来说就需要花费成本构建新的项目,这无疑是项目初期的拦路虎。
关于这一点我是有过深的体会,去年给一个小公司开发一个小系统使用的架构就是我之前发布过的《<a href="http://www.wsria.com/archives/136" target="_blank">个人账务管理系统</a>》的架构,由于当时写这个架构的时候工作经验比较少,遇到的问题也不多,所以很多地方功能欠缺,而且不够灵活还有BUG……惭愧
<h3>二、推荐Springside</h3>
去年年底的一次偶然机会让我发现了<a href="http://www.springside.org.cn" target="_blank">Springside</a>,以下简称为SS;关于SS的详细资料大家请到SS的官网查看,有这非常详细的WIKI文档而且在最新的3.3.4版本中<a href="http://www.wsria.com/msg#comment-1553" target="_blank">白衣</a>重新检查、调整了代码注释,使使用者可以很容易理解代码的意图。
<pre>SpringSide是以<a href="http://springside.org.cn/www.springframework.org" target="_blank">Spring Framework</a>为核心,提供Pragmatic之中又适合工业化大规模开发的企业应用Kickstart。
如果说Sun的代码规范规定了大家在代码中如何命名变量,怎么编写注释的话,SpringSide则是以主流的技术选型、Pragmatic的编程实践来开发JavaEE企业级应用的总结与演示。</pre>
<!--more-->
刚刚说到了spring是Struts和Hibernate的解耦合框架,如果说spring是struts和hibernate的<strong>粘合剂</strong>,那么SS就是sturts、spring、hibernate的粘合剂,但是幸运的告诉你中奖了,springside中还提供了对于<span style="background-color: #ffcc00;">团队开发</span>和企业应用开发中经常使用的用例演示,很多功能你可以直接搬运过来再自己的项目中实施,包含了我们平常经常使用<strong>CRUD</strong>操作的-<a href="http://wiki.springside.org.cn/display/SpringSide3/SpringSide+Mini-Example" target="_blank">MiniWeb</a>模块,千奇百怪的企业应用演示-<a href="http://wiki.springside.org.cn/display/SpringSide3/SpringSide+Showcase" target="_blank">Showcase</a>模块,这两个模块都依赖于一个功能的模块——<a href="http://wiki.springside.org.cn/display/SpringSide3/Springside+Modules" target="_blank">Modules</a>,在Modules中就会看到了SS的核心功能<span style="background-color: #ff0000;">Core</span>、Showcase的依赖<span style="background-color: #ff0000;">Extension</span>,还有SS的的<span style="background-color: #ff0000;">Parent</span>
<span style="font-size: medium;">这里就简单的说明SS的大体结构,引导,详细还是请看全面的<a href="http://wiki.springside.org.cn/display/SpringSide3/Home" target="_blank">SS WIKI文档</a>。</span>
<pre>如果你只是想快速开发,那么你需要做的仅仅是<strong>拿来主义</strong>。如果想看源码白衣做了很详细的源码说明和详细的WIKI文档,各种企业应用的:选型、使用、注意事项、问题列表、规范等等尽在<a href="http://wiki.springside.org.cn/display/SpringSide3/Reference" target="_blank">WIKI</a>中写明。</pre>
<h3>三、学习使用Springside</h3>
既然推荐了SS那就要说说怎么使用了;
首先说一下SS的整体结构,SS是以<a href="http://maven.apache.org" target="_blank">Maven</a>为<strong>构建</strong>工具,以Eclipse With JAVAEE为<strong>开发</strong>工具(外加各种插件),目前最新的版本要求使用JDK6+
<ol>
<li>
<h4>转变你的思想</h4>
就比如一个C(面向过程)语言程序员转换到Java(面向对象)程序员一个道理,加入你一直使用C语言开发,然后为了发展你需要学习Java语言,那么你必须学会<strong>强奸式</strong>的接受方式,你必须遵守面向对象规范……
到了这里同样适用,使用Springside会进一步提升应用的开发进度和稳定性,毕竟是经过全方位测试的核心框架;对于提供的示例你只需要按照Springside团队的代码风格来写,当然如果需要你也可以修改源码扩展一些<span title="我在SS基础上扩展了jqGrid的CRUD功能,以后会写教程"><strong>功能</strong></span></li>
<li>
<h4>Springside使用的构建工具——Maven</h4>
<pre><strong>程序员的固定思想</strong>:在做某件事情之前会想想有没有工具可以帮我实现(自动化、代工)?很明显的体现出程序员的懒惰……</pre>
<pre>关于Maven是说明东东或者刚刚接触的同学请看《<a href="http://www.wsria.com/archives/1072" target="_blank">Maven学习引导</a>》</pre>
如果你一直处于传统的开发模式或者公司技术落后的话你可能感觉怎么怪怪的呢,如果你没有接触过Maven首先会感觉目录结构为什么是这样的,和之前使用的WebRoot方式完全不一样,这个就是Maven的标准结构,Maven的设计思想是以<strong><span style="color: #800080;">约定优于配置</span></strong>为中心的,如果你接触过ruby on rails或者Struts2的话就能理解说明是约定由于配置,简单来说就是有工具、架构的设计者来制定一系列规则,开发人员只需要按照预设的规则书写、配置自己的功能,把注意力集中到业务上而不是分散部分注意力到架构或工具上,维基上有<a href="http://zh.wikipedia.org/zh/%E7%BA%A6%E5%AE%9A%E4%BC%98%E4%BA%8E%E9%85%8D%E7%BD%AE" target="_blank">详细解释</a>。
<pre>一个设计(工具)的出现必定有它存在的道理,所以<strong>告诫</strong>一些人不要盲目的反驳,先研究一下这样的结构设计对自己有利还是有弊,或者是否是利大于弊!</pre>
</li>
<li>
<h4>万变不离其宗</h4>
<strong>声明</strong>:再次强调一下SS只是SSH框架的粘合剂,外加一些企业应用的示例集合;
<strong>基础</strong>:所以使用SS必须有Struts(严格来说是Struts2)、Spring、Hibernate基础
<strong>变化</strong>:SS的每次版本升级目的都是怎么包裹SSH框架使得其在应用中更稳定、更易用</li>
<li>
<h4>不入虎穴焉得虎子</h4>
如果你有时间那么我建议你研究(或者浏览)一下SS的源码,有两个目的:
<strong>一、看其概貌</strong>
把SS的大体结构储存于脑海中,看点是对于Struts、Spring、Hibernate各个框架是怎么封装并整合在一起的,这是以后在开发过程中解决问题的最牢靠的线索(当然你可以到QQ群和论坛)
<strong>二、使用方法</strong>
上面提到了SS的几个Modules,例如你只需要普通的CRUD操作,那么就要仔细的浏览<a href="http://wiki.springside.org.cn/display/SpringSide3/SpringSide+Mini-Example" target="_blank">Mini-Web</a>模块的结构、代码实现,如果有必要可以Debug;虽然是拿来主义但是也要做<strong>有底气的拿来主义。</strong>
Mini-Service和Showcase模块也是同样的道理
除了代码部分还需要了解一些SS的parent模块的<a href="http://code.google.com/p/springside/source/browse/springside3/trunk/modules/parent/pom.xml?spec=svn1105&r=1105">pom.xml</a>配置,这里配置的SS的最外层约束配置,每个框架、插件的版本和团队配置都在这里面,如果你懂Maven就不用多说了。</li>
</ol>
<h3>四、结束语</h3>
可以想象你现在已经大体了解了SS是什么了,或许也和我一样对它产生了浓厚的兴趣,然后就是<a href="http://www.springside.org.cn/download.php" target="_blank">下载</a>源码、浏览<a href="http://wiki.springside.org.cn/display/SpringSide3/Home" target="_blank">WIKI文档</a>,想在自己的项目中实验一下效果,那么恭喜你咱们在同一行列了……
我写此文章的目的在于怎么提高企业应用的开发效率和稳定性,还有一点就是<strong>规范</strong>;当然如果你公司自己封装的比SS好你可以忽略我的存在。
PS:接下来会和大家分享一些使用SS开发过程中积累的一些技术知识和一些DEMO。
如果需要联系我请看<strong><a href="http://www.wsria.com/about">这里</a></strong>。
| {
"pile_set_name": "Github"
} |
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
)
go_library(
name = "go_default_library",
srcs = [
"card.go",
"http.go",
],
importpath = "go-common/app/service/main/card/server/http",
tags = ["automanaged"],
visibility = ["//visibility:public"],
deps = [
"//app/service/main/card/conf:go_default_library",
"//app/service/main/card/model:go_default_library",
"//app/service/main/card/service:go_default_library",
"//library/log:go_default_library",
"//library/net/http/blademaster:go_default_library",
"//library/net/http/blademaster/middleware/verify:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
visibility = ["//visibility:public"],
)
| {
"pile_set_name": "Github"
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.