repo_name
stringlengths 4
116
| path
stringlengths 3
942
| size
stringlengths 1
7
| content
stringlengths 3
1.05M
| license
stringclasses 15
values |
---|---|---|---|---|
rajmahesh/magento2-master | vendor/magento/module-paypal/Test/Unit/Model/Payflow/Service/Response/Validator/ResponseValidatorTest.php | 3762 | <?php
/**
* Copyright © 2016 Magento. All rights reserved.
* See COPYING.txt for license details.
*/
namespace Magento\Paypal\Test\Unit\Model\Payflow\Service\Response\Validator;
use Magento\Framework\DataObject;
use Magento\Paypal\Model\Payflow\Transparent;
use Magento\Paypal\Model\Payflowpro;
use Magento\Paypal\Model\Payflow\Service\Response\ValidatorInterface;
use Magento\Paypal\Model\Payflow\Service\Response\Validator\ResponseValidator;
/**
* Class ResponseValidatorTest
*
* Test for class \Magento\Paypal\Model\Payflow\Service\Response\Validator\ResponseValidator
*/
class ResponseValidatorTest extends \PHPUnit_Framework_TestCase
{
/**
* @var ResponseValidator
*/
protected $responseValidator;
/**
* @var ValidatorInterface|\PHPUnit_Framework_MockObject_MockObject
*/
protected $validatorMock;
/**
* @var Transparent|\PHPUnit_Framework_MockObject_MockObject
*/
protected $payflowFacade;
protected function setUp()
{
$this->validatorMock = $this->getMockBuilder(
'Magento\Paypal\Model\Payflow\Service\Response\ValidatorInterface'
)
->setMethods(['validate'])
->getMockForAbstractClass();
$this->payflowFacade = $this->getMockBuilder(Transparent::class)
->disableOriginalConstructor()
->setMethods([])
->getMock();
$this->responseValidator = new ResponseValidator([$this->validatorMock]);
}
/**
* @param Object $response
* @param int $exactlyCount
*
* @dataProvider dataProviderForTestValidate
*/
public function testValidate(DataObject $response, $exactlyCount)
{
$this->validatorMock->expects($this->exactly($exactlyCount))
->method('validate')
->with($response);
$this->responseValidator->validate($response, $this->payflowFacade);
}
/**
* @return array
*/
public function dataProviderForTestValidate()
{
return [
[
'response' => new DataObject(['result' => Payflowpro::RESPONSE_CODE_APPROVED]),
'exactlyCount' => 1
],
[
'response' => new DataObject(['result' => Payflowpro::RESPONSE_CODE_FRAUDSERVICE_FILTER]),
'exactlyCount' => 1
],
[
'response' => new DataObject(['result' => Payflowpro::RESPONSE_CODE_INVALID_AMOUNT]),
'exactlyCount' => 0
]
];
}
/**
* @expectedException \Magento\Framework\Exception\LocalizedException
* @expectedExceptionMessage Transaction has been declined
*/
public function testValidateFail()
{
$response = new DataObject(
[
'result' => Payflowpro::RESPONSE_CODE_APPROVED,
'respmsg' => 'Test error msg',
]
);
$this->validatorMock->expects($this->once())
->method('validate')
->with($response)
->willReturn(false);
$this->responseValidator->validate($response, $this->payflowFacade);
}
/**
* @expectedException \Magento\Framework\Exception\LocalizedException
* @expectedExceptionMessage Transaction has been declined
*/
public function testValidateUnknownCode()
{
$response = new DataObject(
[
'result' => 7777777777,
'respmsg' => 'Test error msg',
]
);
$this->validatorMock->expects($this->never())
->method('validate')
->with($response)
->willReturn(false);
$this->responseValidator->validate($response, $this->payflowFacade);
}
}
| gpl-3.0 |
zdavis/manifold | api/db/migrate/20161112145302_create_project_subject_table.rb | 224 | class CreateProjectSubjectTable < ActiveRecord::Migration[5.0]
def change
create_table :project_subjects do |t|
t.uuid :project_id, foreign_key: true
t.uuid :subject_id, foreign_key: true
end
end
end
| gpl-3.0 |
alvinhsian/Opencart1556 | upload/admin/language/zh-TW/extension/total.php | 473 | <?php
// Heading
$_['heading_title'] = '訂單計算項目(Order Totals)';
// Text
$_['text_install'] = '安裝(Install)';
$_['text_uninstall'] = '解除安裝(Uninstall)';
// Column
$_['column_name'] = '項目(Order Totals)';
$_['column_status'] = '狀態(Status)';
$_['column_sort_order'] = '排序(Sort Order)';
$_['column_action'] = '動作(Action)';
// Error
$_['error_permission'] = '你沒有權限更改訂單計算項目的設置';
?> | gpl-3.0 |
nistormihai/superdesk | client/spec/desks_management_spec.js | 386 |
'use strict';
var desks = require('./helpers/desks');
describe('desks management', function () {
beforeEach(function() {
desks.openDesksSettings();
});
it('lists macros under the Macro tab for new desks', function () {
desks.newDeskBtn.click();
desks.showTab('macros');
expect(desks.listedMacros.count()).toBeGreaterThan(0);
});
});
| gpl-3.0 |
moravianlibrary/kramerius | common/src/main/java/cz/incad/kramerius/security/impl/criteria/AbstractIPAddressFilter.java | 3192 | /*
* Copyright (C) 2010 Pavel Stastny
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package cz.incad.kramerius.security.impl.criteria;
import java.util.logging.Level;
import java.util.regex.Pattern;
import cz.incad.kramerius.security.RightCriterium;
public abstract class AbstractIPAddressFilter extends AbstractCriterium implements RightCriterium {
static java.util.logging.Logger LOGGER = java.util.logging.Logger.getLogger(AbstractIPAddressFilter.class.getName());
protected boolean matchIPAddresses(Object[] objs) {
String remoteAddr = this.getEvaluateContext().getRemoteAddr();
return matchIPAddresses(objs, remoteAddr);
}
protected boolean matchIPAddresses(Object[] objs, String remoteAddr) {
for (Object pattern : objs) {
boolean negativePattern = false;
String patternStr = pattern.toString();
if (patternStr.startsWith("!")) {
patternStr = patternStr.substring(1);
negativePattern = true;
}
boolean matched = remoteAddr.matches(patternStr);
if ((matched) && (!negativePattern)) {
LOGGER.fine("\t regexpattern '"+patternStr+"' trying to match with address '"+remoteAddr+"' - ACCEPTING");
return true;
} else if ((!matched) && (negativePattern)) {
LOGGER.fine("\t regexpattern '"+patternStr+"' trying to match with address '"+remoteAddr+"' - (negative pattern) ACCEPTING");
return true;
}
// only debug
if ((!matched) && (!negativePattern)) {
LOGGER.fine("\t regexpattern '"+patternStr+"' trying to match with address '"+remoteAddr+"' - NOT ACCEPTING");
} else if ((matched) && (negativePattern)) {
LOGGER.fine("\t regexpattern '"+patternStr+"' trying to match with address '"+remoteAddr+"' -(negative pattern) NOT ACCEPTING");
}
}
return false;
}
@Override
public boolean isParamsNecessary() {
return true;
}
@Override
public boolean validateParams(Object[] vals) {
try {
for (Object pattern : vals) {
String patternStr = pattern.toString();
Pattern compiled = Pattern.compile(patternStr);
if (compiled == null) return false;
}
return true;
} catch (Exception e) {
LOGGER.log(Level.SEVERE, e.getMessage(), e);
return false;
}
}
}
| gpl-3.0 |
joergoster/cutechess | projects/lib/src/enginebuilder.h | 1472 | /*
This file is part of Cute Chess.
Copyright (C) 2008-2018 Cute Chess authors
Cute Chess is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Cute Chess is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Cute Chess. If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef ENGINEBUILDER_H
#define ENGINEBUILDER_H
#include "playerbuilder.h"
#include <QCoreApplication>
#include "engineconfiguration.h"
/*! \brief A class for constructing local chess engines. */
class LIB_EXPORT EngineBuilder : public PlayerBuilder
{
Q_DECLARE_TR_FUNCTIONS(EngineBuilder)
public:
/*! Creates a new EngineBuilder. */
EngineBuilder(const EngineConfiguration& config);
// Inherited from PlayerBuilder
virtual bool isHuman() const;
virtual ChessPlayer* create(QObject* receiver,
const char* method,
QObject* parent,
QString* error) const;
private:
void setError(QString* error, const QString& message) const;
EngineConfiguration m_config;
};
#endif // ENGINEBUILDER_H
| gpl-3.0 |
vvrs/vvrs.github.io | hydejack/_posts/2018-06-30-introducing-hydejack-8.md | 2379 | ---
title: Introducing Hydejack 8
image: /assets/img/blog/hydejack-8.png
description: >
Hydejack 8 is the best version of Hydejack yet.
It introduces Cover Pages, Offline Support, and soon... Dark Mode.
---
After a long wait, Hydejack 8 finally sees the day of its release. It makes Hydejack look more elegant (no more super bold headings) and introduces features that make your site more impressive to first time visitors, while loading faster for repeat visitors.
### Cover Pages
The new design doubles down on Hydejack as your personal site. The new Cover Pages let visitors know what you're about at a glance, showing your logo or profile picture, tagline or description, and selection of social media icons.
{:data-width="1440" data-height="836"}
Sliding over a cover page will reveal the content below.
{:.figure}
### Lazy-Loading Images
Using lots of images can severely impact the performance of a site. It can also cause layout quirks when images pop into existence.
In Hydejack 8 you have the option to provide `width` and `height` information for your images and let Hydejack lazy-load them as readers scroll the page.
{:data-width="1440" data-height="836"}
Images are loaded as they are scrolled into view.
{:.figure}
### Better Blog Layout
The `blog` layout finally catches up to modern design standards and renders a posts's `image` as part of the preview, giving it a tastier look that makes visitors more likely to engage.
{:data-width="1440" data-height="836"}
Hydejack's improved blog layout renders each post's image.
{:.figure}
### Dark Mode 🌗 (coming soon)
In 8.1, buyers of the PRO version will have access to Dark Mode. At your choosing, it will be enabled by default, enabled based on visitors' local time, or flipped on by a switch.
{:data-width="1440" data-height="836"}
Hydejack switches between light and dark mode fluidly.
{:.figure}
### Offline Support ⚡️
Version 8 introduces experimental offline support. This allows visitors to navigate your site while offline and continue reading articles when connectivity is lost. It also improves loading times for repeat visitors dramatically.
| gpl-3.0 |
satta/GeneDB | ng/src/org/genedb/web/mvc/controller/BasketController.java | 3651 | /*
* Copyright (c) 2006 Genome Research Limited.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Library General Public License as published
* by the Free Software Foundation; either version 2 of the License or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public License
* along with this program; see the file COPYING.LIB. If not, write to
* the Free Software Foundation Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307 USA
*/
package org.genedb.web.mvc.controller;
import org.genedb.db.dao.SequenceDao;
import org.genedb.querying.history.HistoryManager;
import org.genedb.querying.history.HistoryType;
import org.gmod.schema.feature.Transcript;
import org.gmod.schema.mapped.Feature;
import org.apache.log4j.Logger;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
/**
* Looks up a feature by unique name
*
* @author Chinmay Patel (cp2)
* @author Adrian Tivey (art)
*/
@Controller
@RequestMapping("/Basket")
public class BasketController {
private static final Logger logger = Logger.getLogger(BasketController.class);
private SequenceDao sequenceDao;
private HistoryManagerFactory hmFactory;
//private ModelBuilder modelBuilder;
@RequestMapping(method=RequestMethod.GET, value="/{name}")
protected void addFeatureToBasket(
@PathVariable("name") String name,
@RequestParam(value="historyType", required=true) HistoryType historyType,
HttpSession session,
HttpServletResponse response
) throws Exception {
logger.info("Trying to store in basket " + name + ", history type " + historyType + " for session " + session.getId());
Feature feature = sequenceDao.getFeatureByUniqueName(name, Feature.class);
if (feature == null) {
logger.warn(String.format("Failed to find feature '%s'", name));
return;
}
// Transcript transcript = modelBuilder.findTranscriptForFeature(feature);
// if (transcript == null) {
// // If feature isn't transcript redirect - include model
// // is it part of a gene
// logger.warn(String.format("Failed to find transcript for an id of '%s'", name));
// //be.reject("no.results");
// return;
// }
//logger.trace("dto cache hit for '"+feature.getUniqueName());
HistoryManager hm = hmFactory.getHistoryManager(session);
hm.addHistoryItem(historyType, feature.getUniqueName());
// Add messag
response.setStatus(HttpServletResponse.SC_OK);
return;
}
// public void setModelBuilder(ModelBuilder modelBuilder) {
// this.modelBuilder = modelBuilder;
// }
public void setHistoryManagerFactory(HistoryManagerFactory hmFactory) {
this.hmFactory = hmFactory;
}
public void setSequenceDao(SequenceDao sequenceDao) {
this.sequenceDao = sequenceDao;
}
}
| gpl-3.0 |
lavima/MLLib | src/torch/aten/src/ATen/native/quantized/cpu/qnnpack/src/qnnpack/u8lut32norm.h | 686 | /*
* Copyright (c) Facebook, Inc. and its affiliates.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*/
#pragma once
#include <stddef.h>
#include <stdint.h>
#include <qnnpack/common.h>
#include <qnnpack/params.h>
#ifdef __cplusplus
extern "C" {
#endif
#define DECLARE_PYTORCH_X8LUT32NORM_UKERNEL_FUNCTION(fn_name) \
PYTORCH_QNNP_INTERNAL void fn_name( \
size_t n, const uint8_t* x, const uint32_t* t, uint8_t* y);
DECLARE_PYTORCH_X8LUT32NORM_UKERNEL_FUNCTION(pytorch_u8lut32norm_ukernel__scalar)
#ifdef __cplusplus
} /* extern "C" */
#endif
| gpl-3.0 |
cernvm/cernvm-config | etc/cernvm/cernvm.d/S90reinstallpackages.sh | 568 | ################################################################################
# Add user packages to RPM database after update
################################################################################
cernvm_start() {
if ls /var/lib/cernvm-update/run/*.rpm >/dev/null 2>&1; then
echo
for PKG in /var/lib/cernvm-update/run/*.rpm; do
echo -n "Re-registering ${PKG}... "
rpm -i --justdb --replacefiles --oldpackage "$PKG"
if [ $? -eq 0 ]; then
rm -f "$PKG"
echo "OK"
fi
done
fi
}
cernvm_stop() {
:
}
| gpl-3.0 |
sangwook236/SWDT | sw_dev/cpp/rnd/test/object_representation/pictorial_structures_revisited_lib/libPictStruct/HypothesisList.pb.h | 15720 | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: HypothesisList.proto
#ifndef PROTOBUF_HypothesisList_2eproto__INCLUDED
#define PROTOBUF_HypothesisList_2eproto__INCLUDED
#include <string>
#include <google/protobuf/stubs/common.h>
#if GOOGLE_PROTOBUF_VERSION < 3001000
#error This file was generated by a newer version of protoc which is
#error incompatible with your Protocol Buffer headers. Please update
#error your headers.
#endif
#if 3001000 < GOOGLE_PROTOBUF_MIN_PROTOC_VERSION
#error This file was generated by an older version of protoc which is
#error incompatible with your Protocol Buffer headers. Please
#error regenerate this file with a newer version of protoc.
#endif
#include <google/protobuf/arena.h>
#include <google/protobuf/arenastring.h>
#include <google/protobuf/generated_message_util.h>
#include <google/protobuf/metadata.h>
#include <google/protobuf/message.h>
#include <google/protobuf/repeated_field.h>
#include <google/protobuf/extension_set.h>
#include <google/protobuf/unknown_field_set.h>
// @@protoc_insertion_point(includes)
// Internal implementation detail -- do not call these.
void protobuf_AddDesc_HypothesisList_2eproto();
void protobuf_InitDefaults_HypothesisList_2eproto();
void protobuf_AssignDesc_HypothesisList_2eproto();
void protobuf_ShutdownFile_HypothesisList_2eproto();
class HypothesisList;
class HypothesisList_ObjectHypothesis;
// ===================================================================
class HypothesisList_ObjectHypothesis : public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:HypothesisList.ObjectHypothesis) */ {
public:
HypothesisList_ObjectHypothesis();
virtual ~HypothesisList_ObjectHypothesis();
HypothesisList_ObjectHypothesis(const HypothesisList_ObjectHypothesis& from);
inline HypothesisList_ObjectHypothesis& operator=(const HypothesisList_ObjectHypothesis& from) {
CopyFrom(from);
return *this;
}
inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const {
return _internal_metadata_.unknown_fields();
}
inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() {
return _internal_metadata_.mutable_unknown_fields();
}
static const ::google::protobuf::Descriptor* descriptor();
static const HypothesisList_ObjectHypothesis& default_instance();
static const HypothesisList_ObjectHypothesis* internal_default_instance();
void Swap(HypothesisList_ObjectHypothesis* other);
// implements Message ----------------------------------------------
inline HypothesisList_ObjectHypothesis* New() const { return New(NULL); }
HypothesisList_ObjectHypothesis* New(::google::protobuf::Arena* arena) const;
void CopyFrom(const ::google::protobuf::Message& from);
void MergeFrom(const ::google::protobuf::Message& from);
void CopyFrom(const HypothesisList_ObjectHypothesis& from);
void MergeFrom(const HypothesisList_ObjectHypothesis& from);
void Clear();
bool IsInitialized() const;
size_t ByteSizeLong() const;
bool MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input);
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* output) const;
::google::protobuf::uint8* SerializeWithCachedSizesToArray(::google::protobuf::uint8* output) const {
return InternalSerializeWithCachedSizesToArray(false, output);
}
int GetCachedSize() const { return _cached_size_; }
private:
void SharedCtor();
void SharedDtor();
void SetCachedSize(int size) const;
void InternalSwap(HypothesisList_ObjectHypothesis* other);
void UnsafeMergeFrom(const HypothesisList_ObjectHypothesis& from);
private:
inline ::google::protobuf::Arena* GetArenaNoVirtual() const {
return _internal_metadata_.arena();
}
inline void* MaybeArenaPtr() const {
return _internal_metadata_.raw_arena_ptr();
}
public:
::google::protobuf::Metadata GetMetadata() const;
// nested types ----------------------------------------------------
// accessors -------------------------------------------------------
// optional float x = 1;
bool has_x() const;
void clear_x();
static const int kXFieldNumber = 1;
float x() const;
void set_x(float value);
// optional float y = 2;
bool has_y() const;
void clear_y();
static const int kYFieldNumber = 2;
float y() const;
void set_y(float value);
// optional float scale = 3;
bool has_scale() const;
void clear_scale();
static const int kScaleFieldNumber = 3;
float scale() const;
void set_scale(float value);
// optional float score = 4;
bool has_score() const;
void clear_score();
static const int kScoreFieldNumber = 4;
float score() const;
void set_score(float value);
// optional bool flip = 5 [default = false];
bool has_flip() const;
void clear_flip();
static const int kFlipFieldNumber = 5;
bool flip() const;
void set_flip(bool value);
// @@protoc_insertion_point(class_scope:HypothesisList.ObjectHypothesis)
private:
inline void set_has_x();
inline void clear_has_x();
inline void set_has_y();
inline void clear_has_y();
inline void set_has_scale();
inline void clear_has_scale();
inline void set_has_score();
inline void clear_has_score();
inline void set_has_flip();
inline void clear_has_flip();
::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_;
::google::protobuf::internal::HasBits<1> _has_bits_;
mutable int _cached_size_;
float x_;
float y_;
float scale_;
float score_;
bool flip_;
friend void protobuf_InitDefaults_HypothesisList_2eproto_impl();
friend void protobuf_AddDesc_HypothesisList_2eproto_impl();
friend void protobuf_AssignDesc_HypothesisList_2eproto();
friend void protobuf_ShutdownFile_HypothesisList_2eproto();
void InitAsDefaultInstance();
};
extern ::google::protobuf::internal::ExplicitlyConstructed<HypothesisList_ObjectHypothesis> HypothesisList_ObjectHypothesis_default_instance_;
// -------------------------------------------------------------------
class HypothesisList : public ::google::protobuf::Message /* @@protoc_insertion_point(class_definition:HypothesisList) */ {
public:
HypothesisList();
virtual ~HypothesisList();
HypothesisList(const HypothesisList& from);
inline HypothesisList& operator=(const HypothesisList& from) {
CopyFrom(from);
return *this;
}
inline const ::google::protobuf::UnknownFieldSet& unknown_fields() const {
return _internal_metadata_.unknown_fields();
}
inline ::google::protobuf::UnknownFieldSet* mutable_unknown_fields() {
return _internal_metadata_.mutable_unknown_fields();
}
static const ::google::protobuf::Descriptor* descriptor();
static const HypothesisList& default_instance();
static const HypothesisList* internal_default_instance();
void Swap(HypothesisList* other);
// implements Message ----------------------------------------------
inline HypothesisList* New() const { return New(NULL); }
HypothesisList* New(::google::protobuf::Arena* arena) const;
void CopyFrom(const ::google::protobuf::Message& from);
void MergeFrom(const ::google::protobuf::Message& from);
void CopyFrom(const HypothesisList& from);
void MergeFrom(const HypothesisList& from);
void Clear();
bool IsInitialized() const;
size_t ByteSizeLong() const;
bool MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input);
void SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const;
::google::protobuf::uint8* InternalSerializeWithCachedSizesToArray(
bool deterministic, ::google::protobuf::uint8* output) const;
::google::protobuf::uint8* SerializeWithCachedSizesToArray(::google::protobuf::uint8* output) const {
return InternalSerializeWithCachedSizesToArray(false, output);
}
int GetCachedSize() const { return _cached_size_; }
private:
void SharedCtor();
void SharedDtor();
void SetCachedSize(int size) const;
void InternalSwap(HypothesisList* other);
void UnsafeMergeFrom(const HypothesisList& from);
private:
inline ::google::protobuf::Arena* GetArenaNoVirtual() const {
return _internal_metadata_.arena();
}
inline void* MaybeArenaPtr() const {
return _internal_metadata_.raw_arena_ptr();
}
public:
::google::protobuf::Metadata GetMetadata() const;
// nested types ----------------------------------------------------
typedef HypothesisList_ObjectHypothesis ObjectHypothesis;
// accessors -------------------------------------------------------
// repeated .HypothesisList.ObjectHypothesis hyp = 1;
int hyp_size() const;
void clear_hyp();
static const int kHypFieldNumber = 1;
const ::HypothesisList_ObjectHypothesis& hyp(int index) const;
::HypothesisList_ObjectHypothesis* mutable_hyp(int index);
::HypothesisList_ObjectHypothesis* add_hyp();
::google::protobuf::RepeatedPtrField< ::HypothesisList_ObjectHypothesis >*
mutable_hyp();
const ::google::protobuf::RepeatedPtrField< ::HypothesisList_ObjectHypothesis >&
hyp() const;
// @@protoc_insertion_point(class_scope:HypothesisList)
private:
::google::protobuf::internal::InternalMetadataWithArena _internal_metadata_;
::google::protobuf::internal::HasBits<1> _has_bits_;
mutable int _cached_size_;
::google::protobuf::RepeatedPtrField< ::HypothesisList_ObjectHypothesis > hyp_;
friend void protobuf_InitDefaults_HypothesisList_2eproto_impl();
friend void protobuf_AddDesc_HypothesisList_2eproto_impl();
friend void protobuf_AssignDesc_HypothesisList_2eproto();
friend void protobuf_ShutdownFile_HypothesisList_2eproto();
void InitAsDefaultInstance();
};
extern ::google::protobuf::internal::ExplicitlyConstructed<HypothesisList> HypothesisList_default_instance_;
// ===================================================================
// ===================================================================
#if !PROTOBUF_INLINE_NOT_IN_HEADERS
// HypothesisList_ObjectHypothesis
// optional float x = 1;
inline bool HypothesisList_ObjectHypothesis::has_x() const {
return (_has_bits_[0] & 0x00000001u) != 0;
}
inline void HypothesisList_ObjectHypothesis::set_has_x() {
_has_bits_[0] |= 0x00000001u;
}
inline void HypothesisList_ObjectHypothesis::clear_has_x() {
_has_bits_[0] &= ~0x00000001u;
}
inline void HypothesisList_ObjectHypothesis::clear_x() {
x_ = 0;
clear_has_x();
}
inline float HypothesisList_ObjectHypothesis::x() const {
// @@protoc_insertion_point(field_get:HypothesisList.ObjectHypothesis.x)
return x_;
}
inline void HypothesisList_ObjectHypothesis::set_x(float value) {
set_has_x();
x_ = value;
// @@protoc_insertion_point(field_set:HypothesisList.ObjectHypothesis.x)
}
// optional float y = 2;
inline bool HypothesisList_ObjectHypothesis::has_y() const {
return (_has_bits_[0] & 0x00000002u) != 0;
}
inline void HypothesisList_ObjectHypothesis::set_has_y() {
_has_bits_[0] |= 0x00000002u;
}
inline void HypothesisList_ObjectHypothesis::clear_has_y() {
_has_bits_[0] &= ~0x00000002u;
}
inline void HypothesisList_ObjectHypothesis::clear_y() {
y_ = 0;
clear_has_y();
}
inline float HypothesisList_ObjectHypothesis::y() const {
// @@protoc_insertion_point(field_get:HypothesisList.ObjectHypothesis.y)
return y_;
}
inline void HypothesisList_ObjectHypothesis::set_y(float value) {
set_has_y();
y_ = value;
// @@protoc_insertion_point(field_set:HypothesisList.ObjectHypothesis.y)
}
// optional float scale = 3;
inline bool HypothesisList_ObjectHypothesis::has_scale() const {
return (_has_bits_[0] & 0x00000004u) != 0;
}
inline void HypothesisList_ObjectHypothesis::set_has_scale() {
_has_bits_[0] |= 0x00000004u;
}
inline void HypothesisList_ObjectHypothesis::clear_has_scale() {
_has_bits_[0] &= ~0x00000004u;
}
inline void HypothesisList_ObjectHypothesis::clear_scale() {
scale_ = 0;
clear_has_scale();
}
inline float HypothesisList_ObjectHypothesis::scale() const {
// @@protoc_insertion_point(field_get:HypothesisList.ObjectHypothesis.scale)
return scale_;
}
inline void HypothesisList_ObjectHypothesis::set_scale(float value) {
set_has_scale();
scale_ = value;
// @@protoc_insertion_point(field_set:HypothesisList.ObjectHypothesis.scale)
}
// optional float score = 4;
inline bool HypothesisList_ObjectHypothesis::has_score() const {
return (_has_bits_[0] & 0x00000008u) != 0;
}
inline void HypothesisList_ObjectHypothesis::set_has_score() {
_has_bits_[0] |= 0x00000008u;
}
inline void HypothesisList_ObjectHypothesis::clear_has_score() {
_has_bits_[0] &= ~0x00000008u;
}
inline void HypothesisList_ObjectHypothesis::clear_score() {
score_ = 0;
clear_has_score();
}
inline float HypothesisList_ObjectHypothesis::score() const {
// @@protoc_insertion_point(field_get:HypothesisList.ObjectHypothesis.score)
return score_;
}
inline void HypothesisList_ObjectHypothesis::set_score(float value) {
set_has_score();
score_ = value;
// @@protoc_insertion_point(field_set:HypothesisList.ObjectHypothesis.score)
}
// optional bool flip = 5 [default = false];
inline bool HypothesisList_ObjectHypothesis::has_flip() const {
return (_has_bits_[0] & 0x00000010u) != 0;
}
inline void HypothesisList_ObjectHypothesis::set_has_flip() {
_has_bits_[0] |= 0x00000010u;
}
inline void HypothesisList_ObjectHypothesis::clear_has_flip() {
_has_bits_[0] &= ~0x00000010u;
}
inline void HypothesisList_ObjectHypothesis::clear_flip() {
flip_ = false;
clear_has_flip();
}
inline bool HypothesisList_ObjectHypothesis::flip() const {
// @@protoc_insertion_point(field_get:HypothesisList.ObjectHypothesis.flip)
return flip_;
}
inline void HypothesisList_ObjectHypothesis::set_flip(bool value) {
set_has_flip();
flip_ = value;
// @@protoc_insertion_point(field_set:HypothesisList.ObjectHypothesis.flip)
}
inline const HypothesisList_ObjectHypothesis* HypothesisList_ObjectHypothesis::internal_default_instance() {
return &HypothesisList_ObjectHypothesis_default_instance_.get();
}
// -------------------------------------------------------------------
// HypothesisList
// repeated .HypothesisList.ObjectHypothesis hyp = 1;
inline int HypothesisList::hyp_size() const {
return hyp_.size();
}
inline void HypothesisList::clear_hyp() {
hyp_.Clear();
}
inline const ::HypothesisList_ObjectHypothesis& HypothesisList::hyp(int index) const {
// @@protoc_insertion_point(field_get:HypothesisList.hyp)
return hyp_.Get(index);
}
inline ::HypothesisList_ObjectHypothesis* HypothesisList::mutable_hyp(int index) {
// @@protoc_insertion_point(field_mutable:HypothesisList.hyp)
return hyp_.Mutable(index);
}
inline ::HypothesisList_ObjectHypothesis* HypothesisList::add_hyp() {
// @@protoc_insertion_point(field_add:HypothesisList.hyp)
return hyp_.Add();
}
inline ::google::protobuf::RepeatedPtrField< ::HypothesisList_ObjectHypothesis >*
HypothesisList::mutable_hyp() {
// @@protoc_insertion_point(field_mutable_list:HypothesisList.hyp)
return &hyp_;
}
inline const ::google::protobuf::RepeatedPtrField< ::HypothesisList_ObjectHypothesis >&
HypothesisList::hyp() const {
// @@protoc_insertion_point(field_list:HypothesisList.hyp)
return hyp_;
}
inline const HypothesisList* HypothesisList::internal_default_instance() {
return &HypothesisList_default_instance_.get();
}
#endif // !PROTOBUF_INLINE_NOT_IN_HEADERS
// -------------------------------------------------------------------
// @@protoc_insertion_point(namespace_scope)
// @@protoc_insertion_point(global_scope)
#endif // PROTOBUF_HypothesisList_2eproto__INCLUDED
| gpl-3.0 |
rvega/pd-bbb-gpio | vendors/FreeBASIC-0.90.0-source/src/rtlib/strw_bin_lng.c | 1188 | /* binw$ routine for long long's */
#include "fb.h"
/*:::::*/
FBCALL FB_WCHAR *fb_WstrBinEx_l ( unsigned long long num, int digits )
{
FB_WCHAR *dst, *buf;
int i, totdigs;
if( digits > 0 )
{
totdigs = (digits < sizeof( long long ) << 3? digits: sizeof( long long ) << 3);
if( digits > sizeof( long long ) << 3 )
digits = sizeof( long long ) << 3;
}
else
totdigs = sizeof( long long ) << 3;
/* alloc temp string */
dst = fb_wstr_AllocTemp( totdigs );
if( dst == NULL )
return NULL;
/* convert */
buf = dst;
if( num == 0ULL )
{
if( digits <= 0 )
digits = 1;
while( digits-- )
*buf++ = _LC('0');
}
else
{
num <<= ((sizeof( long long ) << 3) - totdigs);
for( i = 0; i < totdigs; i++, num <<= 1 )
if( num & 0x8000000000000000ULL )
break;
if( digits > 0 )
{
digits -= totdigs - i;
while( digits-- )
*buf++ = _LC('0');
}
for( ; i < totdigs; i++, num <<= 1 )
if( num & 0x8000000000000000ULL )
*buf++ = _LC('1');
else
*buf++ = _LC('0');
}
/* add null-term */
*buf = _LC('\0');
return dst;
}
/*:::::*/
FBCALL FB_WCHAR *fb_WstrBin_l ( unsigned long long num )
{
return fb_WstrBinEx_l( num, 0 );
}
| gpl-3.0 |
joergoster/cutechess | projects/lib/src/board/pocketknightboard.h | 2056 | /*
This file is part of Cute Chess.
Copyright (C) 2008-2018 Cute Chess authors
Cute Chess is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Cute Chess is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Cute Chess. If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef POCKETKNIGHTBOARD_H
#define POCKETKNIGHTBOARD_H
#include "westernboard.h"
namespace Chess {
/*!
* \brief A board for Pocket Knight Chess
*
* Pocket Knight Chess is a variant of standard chess.
* Each side has an additional Knight in their reserve that can be
* dropped onto an empty square during the game instead of making a
* normal move.
*
* This variant originates from the early 20th century and is also known
* under the name Tombola Chess.
*
* \note Rules: http://www.chessvariants.com/other.dir/pocket.html
*
*/
class LIB_EXPORT PocketKnightBoard : public WesternBoard
{
public:
/*! Creates a new PocketKnightBoard object. */
PocketKnightBoard();
// Inherited from WesternBoard
virtual Board* copy() const;
virtual QList< Piece > reservePieceTypes() const;
virtual QString variant() const;
virtual QString defaultFenString() const;
virtual bool variantHasDrops() const;
protected:
// Inherited from WesternBoard
virtual int reserveType(int pieceType) const;
virtual void vMakeMove(const Move& move,
BoardTransition* transition);
virtual void vUndoMove(const Move& move);
virtual void generateMovesForPiece(QVarLengthArray< Move >& moves,
int pieceType,
int square) const;
};
} // namespace Chess
#endif // POCKETKNIGHTBOARD_H
| gpl-3.0 |
kwemi/Phraseanet | tests/Alchemy/Tests/Phrasea/SearchEngine/SearchEngineOptionsTest.php | 5841 | <?php
namespace Alchemy\Tests\Phrasea\SearchEngine;
use Alchemy\Phrasea\SearchEngine\SearchEngineOptions;
use Symfony\Component\HttpFoundation\Request;
/**
* @group functional
* @group legacy
*/
class SearchEngineOptionsTest extends \PhraseanetTestCase
{
/**
* @covers Alchemy\Phrasea\SearchEngine\SearchEngineOptions
*/
public function testSerialize()
{
$options = new SearchEngineOptions(self::$DI['app']);
$options->onCollections([self::$DI['collection']]);
$options->allowBusinessFieldsOn([self::$DI['collection']]);
foreach (self::$DI['collection']->get_databox()->get_meta_structure() as $field) {
$options->setFields([$field]);
$options->setDateFields([$field]);
break;
}
$min_date = new \DateTime('-5 days');
$max_date = new \DateTime('+5 days');
$options->setMinDate(\DateTime::createFromFormat(DATE_ATOM, $min_date->format(DATE_ATOM)));
$options->setMaxDate(\DateTime::createFromFormat(DATE_ATOM, $max_date->format(DATE_ATOM)));
$serialized = $options->serialize();
$this->assertEquals($options, SearchEngineOptions::hydrate(self::$DI['app'], $serialized));
}
/**
* @covers Alchemy\Phrasea\SearchEngine\SearchEngineOptions::fromRequest
*/
public function testFromRequest()
{
$this->authenticate(self::$DI['app']);
foreach ($this->provideRequestData() as $pack) {
list ($query, $request, $field, $dateField) = $pack;
$httpRequest = new Request($query, $request);
$options = SearchEngineOptions::fromRequest(self::$DI['app'], $httpRequest);
// Check done this way because returned array can be indexed differently
$collections = $options->getCollections();
$this->assertCount(1, $collections);
$this->assertContains(self::$DI['collection'], $collections);
$this->assertEquals([$field], $options->getFields());
$this->assertEquals('video', $options->getRecordType());
$this->assertEquals('1', $options->getSearchType());
$this->assertEquals('2012/12/21', $options->getMaxDate()->format('Y/m/d'));
$this->assertEquals('2009/04/24', $options->getMinDate()->format('Y/m/d'));
$this->assertEquals([$dateField], $options->getDateFields());
$this->assertEquals('asc', $options->getSortOrder());
$this->assertEquals('topinambour', $options->getSortBy());
$this->assertEquals(true, $options->isStemmed());
}
}
/**
* @covers Alchemy\Phrasea\SearchEngine\SearchEngineOptions::fromRequest
*/
public function testFromRequestUnauthenticated()
{
foreach ($this->provideRequestData() as $pack) {
list ($query, $request, $field, $dateField) = $pack;
$httpRequest = new Request($query, $request);
$options = SearchEngineOptions::fromRequest(self::$DI['app'], $httpRequest);
$this->assertEquals([], $options->getCollections());
$this->assertEquals([], $options->getFields());
$this->assertEquals('video', $options->getRecordType());
$this->assertEquals('1', $options->getSearchType());
$this->assertEquals('2012/12/21', $options->getMaxDate()->format('Y/m/d'));
$this->assertEquals('2009/04/24', $options->getMinDate()->format('Y/m/d'));
$this->assertEquals([], $options->getDateFields());
$this->assertEquals('asc', $options->getSortOrder());
$this->assertEquals('topinambour', $options->getSortBy());
$this->assertEquals(true, $options->isStemmed());
}
}
/**
* @covers Alchemy\Phrasea\SearchEngine\SearchEngineOptions::fromRequest
*/
public function testFromRequestEmptyUnauthenticated()
{
$options = SearchEngineOptions::fromRequest(self::$DI['app'], new Request());
$this->assertEquals([], $options->getCollections());
$this->assertEquals([], $options->getFields());
$this->assertEquals(null, $options->getRecordType());
$this->assertEquals('0', $options->getSearchType());
$this->assertEquals(null, $options->getMaxDate());
$this->assertEquals(null, $options->getMinDate());
$this->assertEquals([], $options->getDateFields());
$this->assertEquals('desc', $options->getSortOrder());
$this->assertEquals(null, $options->getSortBy());
$this->assertEquals(false, $options->isStemmed());
}
private function provideRequestData()
{
$field = $dateField = null;
foreach (self::$DI['collection']->get_databox()->get_meta_structure() as $db_field) {
if (!$field) {
$field = $db_field;
} elseif (!$dateField) {
$dateField = $db_field;
} else {
break;
}
}
if (!$field || !$dateField) {
$this->fail('Unable to get a field');
}
$data = [
'bases' => [self::$DI['collection']->get_base_id()],
'status' => ['4' => ['on' => [self::$DI['collection']->get_databox()->get_sbas_id()]]],
'fields' => [$field->get_name()],
'record_type' => 'video',
'search_type' => '1',
'date_min' => '2009/04/24',
'date_max' => '2012/12/21',
'date_field' => $dateField->get_name(),
'ord' => 'asc',
'sort' => 'topinambour',
'stemme' => 'true',
];
$dataWithoutBases = $data;
unset($dataWithoutBases['bases']);
return [
[[], $data, $field, $dateField],
[$data, [], $field, $dateField],
];
}
}
| gpl-3.0 |
DragonZX/fdm | Gecko.SDK/30/include/nsCycleCollector.h | 2645 | /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef nsCycleCollector_h__
#define nsCycleCollector_h__
class nsICycleCollectorListener;
class nsISupports;
#include "nsError.h"
#include "nsID.h"
namespace mozilla {
class CycleCollectedJSRuntime;
// See the comments in nsContentUtils.h for explanations of these functions.
typedef void* (*DeferredFinalizeAppendFunction)(void* pointers, void* thing);
typedef bool (*DeferredFinalizeFunction)(uint32_t slice, void* data);
}
bool nsCycleCollector_init();
void nsCycleCollector_startup();
typedef void (*CC_BeforeUnlinkCallback)(void);
void nsCycleCollector_setBeforeUnlinkCallback(CC_BeforeUnlinkCallback aCB);
typedef void (*CC_ForgetSkippableCallback)(void);
void nsCycleCollector_setForgetSkippableCallback(CC_ForgetSkippableCallback aCB);
void nsCycleCollector_forgetSkippable(bool aRemoveChildlessNodes = false,
bool aAsyncSnowWhiteFreeing = false);
void nsCycleCollector_prepareForGarbageCollection();
void nsCycleCollector_dispatchDeferredDeletion(bool aContinuation = false);
bool nsCycleCollector_doDeferredDeletion();
void nsCycleCollector_collect(nsICycleCollectorListener *aManualListener);
// If aSliceTime is negative, the CC will run to completion. If aSliceTime
// is 0, only a minimum quantum of work will be done. Otherwise, aSliceTime
// will be used as the time budget for the slice, in ms.
void nsCycleCollector_collectSlice(int64_t aSliceTime);
uint32_t nsCycleCollector_suspectedCount();
void nsCycleCollector_shutdown();
// Helpers for interacting with JS
void nsCycleCollector_registerJSRuntime(mozilla::CycleCollectedJSRuntime *aRt);
void nsCycleCollector_forgetJSRuntime();
#define NS_CYCLE_COLLECTOR_LOGGER_CID \
{ 0x58be81b4, 0x39d2, 0x437c, \
{ 0x94, 0xea, 0xae, 0xde, 0x2c, 0x62, 0x08, 0xd3 } }
extern nsresult
nsCycleCollectorLoggerConstructor(nsISupports* outer,
const nsIID& aIID,
void* *aInstancePtr);
namespace mozilla {
namespace cyclecollector {
#ifdef DEBUG
bool IsJSHolder(void* aHolder);
#endif
void DeferredFinalize(DeferredFinalizeAppendFunction aAppendFunc,
DeferredFinalizeFunction aFunc,
void* aThing);
void DeferredFinalize(nsISupports* aSupports);
} // namespace cyclecollector
} // namespace mozilla
#endif // nsCycleCollector_h__
| gpl-3.0 |
gohdan/DFC | known_files/hashes/bitrix/modules/photogallery/install/components/bitrix/photogallery_user/templates/old/bitrix/photogallery.section.edit/.default/lang/ru/template.php | 61 | Bitrix 16.5 Business Demo = be1a97cd8e66f427f52f0ba0dc4d6ffb
| gpl-3.0 |
pramithkm/moodle | user/lib.php | 54890 | <?php
// This file is part of Moodle - http://moodle.org/
//
// Moodle is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Moodle is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with Moodle. If not, see <http://www.gnu.org/licenses/>.
/**
* External user API
*
* @package core_user
* @copyright 2009 Moodle Pty Ltd (http://moodle.com)
* @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later
*/
/**
* Creates a user
*
* @throws moodle_exception
* @param stdClass $user user to create
* @param bool $updatepassword if true, authentication plugin will update password.
* @param bool $triggerevent set false if user_created event should not be triggred.
* This will not affect user_password_updated event triggering.
* @return int id of the newly created user
*/
function user_create_user($user, $updatepassword = true, $triggerevent = true) {
global $DB;
// Set the timecreate field to the current time.
if (!is_object($user)) {
$user = (object) $user;
}
// Check username.
if ($user->username !== core_text::strtolower($user->username)) {
throw new moodle_exception('usernamelowercase');
} else {
if ($user->username !== core_user::clean_field($user->username, 'username')) {
throw new moodle_exception('invalidusername');
}
}
// Save the password in a temp value for later.
if ($updatepassword && isset($user->password)) {
// Check password toward the password policy.
if (!check_password_policy($user->password, $errmsg)) {
throw new moodle_exception($errmsg);
}
$userpassword = $user->password;
unset($user->password);
}
// Apply default values for user preferences that are stored in users table.
if (!isset($user->calendartype)) {
$user->calendartype = core_user::get_property_default('calendartype');
}
if (!isset($user->maildisplay)) {
$user->maildisplay = core_user::get_property_default('maildisplay');
}
if (!isset($user->mailformat)) {
$user->mailformat = core_user::get_property_default('mailformat');
}
if (!isset($user->maildigest)) {
$user->maildigest = core_user::get_property_default('maildigest');
}
if (!isset($user->autosubscribe)) {
$user->autosubscribe = core_user::get_property_default('autosubscribe');
}
if (!isset($user->trackforums)) {
$user->trackforums = core_user::get_property_default('trackforums');
}
if (!isset($user->lang)) {
$user->lang = core_user::get_property_default('lang');
}
$user->timecreated = time();
$user->timemodified = $user->timecreated;
// Validate user data object.
$uservalidation = core_user::validate($user);
if ($uservalidation !== true) {
foreach ($uservalidation as $field => $message) {
debugging("The property '$field' has invalid data and has been cleaned.", DEBUG_DEVELOPER);
$user->$field = core_user::clean_field($user->$field, $field);
}
}
// Insert the user into the database.
$newuserid = $DB->insert_record('user', $user);
// Create USER context for this user.
$usercontext = context_user::instance($newuserid);
// Update user password if necessary.
if (isset($userpassword)) {
// Get full database user row, in case auth is default.
$newuser = $DB->get_record('user', array('id' => $newuserid));
$authplugin = get_auth_plugin($newuser->auth);
$authplugin->user_update_password($newuser, $userpassword);
}
// Trigger event If required.
if ($triggerevent) {
\core\event\user_created::create_from_userid($newuserid)->trigger();
}
return $newuserid;
}
/**
* Update a user with a user object (will compare against the ID)
*
* @throws moodle_exception
* @param stdClass $user the user to update
* @param bool $updatepassword if true, authentication plugin will update password.
* @param bool $triggerevent set false if user_updated event should not be triggred.
* This will not affect user_password_updated event triggering.
*/
function user_update_user($user, $updatepassword = true, $triggerevent = true) {
global $DB;
// Set the timecreate field to the current time.
if (!is_object($user)) {
$user = (object) $user;
}
// Check username.
if (isset($user->username)) {
if ($user->username !== core_text::strtolower($user->username)) {
throw new moodle_exception('usernamelowercase');
} else {
if ($user->username !== core_user::clean_field($user->username, 'username')) {
throw new moodle_exception('invalidusername');
}
}
}
// Unset password here, for updating later, if password update is required.
if ($updatepassword && isset($user->password)) {
// Check password toward the password policy.
if (!check_password_policy($user->password, $errmsg)) {
throw new moodle_exception($errmsg);
}
$passwd = $user->password;
unset($user->password);
}
// Make sure calendartype, if set, is valid.
if (empty($user->calendartype)) {
// Unset this variable, must be an empty string, which we do not want to update the calendartype to.
unset($user->calendartype);
}
$user->timemodified = time();
// Validate user data object.
$uservalidation = core_user::validate($user);
if ($uservalidation !== true) {
foreach ($uservalidation as $field => $message) {
debugging("The property '$field' has invalid data and has been cleaned.", DEBUG_DEVELOPER);
$user->$field = core_user::clean_field($user->$field, $field);
}
}
$DB->update_record('user', $user);
if ($updatepassword) {
// Get full user record.
$updateduser = $DB->get_record('user', array('id' => $user->id));
// If password was set, then update its hash.
if (isset($passwd)) {
$authplugin = get_auth_plugin($updateduser->auth);
if ($authplugin->can_change_password()) {
$authplugin->user_update_password($updateduser, $passwd);
}
}
}
// Trigger event if required.
if ($triggerevent) {
\core\event\user_updated::create_from_userid($user->id)->trigger();
}
}
/**
* Marks user deleted in internal user database and notifies the auth plugin.
* Also unenrols user from all roles and does other cleanup.
*
* @todo Decide if this transaction is really needed (look for internal TODO:)
* @param object $user Userobject before delete (without system magic quotes)
* @return boolean success
*/
function user_delete_user($user) {
return delete_user($user);
}
/**
* Get users by id
*
* @param array $userids id of users to retrieve
* @return array
*/
function user_get_users_by_id($userids) {
global $DB;
return $DB->get_records_list('user', 'id', $userids);
}
/**
* Returns the list of default 'displayable' fields
*
* Contains database field names but also names used to generate information, such as enrolledcourses
*
* @return array of user fields
*/
function user_get_default_fields() {
return array( 'id', 'username', 'fullname', 'firstname', 'lastname', 'email',
'address', 'phone1', 'phone2', 'icq', 'skype', 'yahoo', 'aim', 'msn', 'department',
'institution', 'interests', 'firstaccess', 'lastaccess', 'auth', 'confirmed',
'idnumber', 'lang', 'theme', 'timezone', 'mailformat', 'description', 'descriptionformat',
'city', 'url', 'country', 'profileimageurlsmall', 'profileimageurl', 'customfields',
'groups', 'roles', 'preferences', 'enrolledcourses', 'suspended'
);
}
/**
*
* Give user record from mdl_user, build an array contains all user details.
*
* Warning: description file urls are 'webservice/pluginfile.php' is use.
* it can be changed with $CFG->moodlewstextformatlinkstoimagesfile
*
* @throws moodle_exception
* @param stdClass $user user record from mdl_user
* @param stdClass $course moodle course
* @param array $userfields required fields
* @return array|null
*/
function user_get_user_details($user, $course = null, array $userfields = array()) {
global $USER, $DB, $CFG, $PAGE;
require_once($CFG->dirroot . "/user/profile/lib.php"); // Custom field library.
require_once($CFG->dirroot . "/lib/filelib.php"); // File handling on description and friends.
$defaultfields = user_get_default_fields();
if (empty($userfields)) {
$userfields = $defaultfields;
}
foreach ($userfields as $thefield) {
if (!in_array($thefield, $defaultfields)) {
throw new moodle_exception('invaliduserfield', 'error', '', $thefield);
}
}
// Make sure id and fullname are included.
if (!in_array('id', $userfields)) {
$userfields[] = 'id';
}
if (!in_array('fullname', $userfields)) {
$userfields[] = 'fullname';
}
if (!empty($course)) {
$context = context_course::instance($course->id);
$usercontext = context_user::instance($user->id);
$canviewdetailscap = (has_capability('moodle/user:viewdetails', $context) || has_capability('moodle/user:viewdetails', $usercontext));
} else {
$context = context_user::instance($user->id);
$usercontext = $context;
$canviewdetailscap = has_capability('moodle/user:viewdetails', $usercontext);
}
$currentuser = ($user->id == $USER->id);
$isadmin = is_siteadmin($USER);
$showuseridentityfields = get_extra_user_fields($context);
if (!empty($course)) {
$canviewhiddenuserfields = has_capability('moodle/course:viewhiddenuserfields', $context);
} else {
$canviewhiddenuserfields = has_capability('moodle/user:viewhiddendetails', $context);
}
$canviewfullnames = has_capability('moodle/site:viewfullnames', $context);
if (!empty($course)) {
$canviewuseremail = has_capability('moodle/course:useremail', $context);
} else {
$canviewuseremail = false;
}
$cannotviewdescription = !empty($CFG->profilesforenrolledusersonly) && !$currentuser && !$DB->record_exists('role_assignments', array('userid' => $user->id));
if (!empty($course)) {
$canaccessallgroups = has_capability('moodle/site:accessallgroups', $context);
} else {
$canaccessallgroups = false;
}
if (!$currentuser && !$canviewdetailscap && !has_coursecontact_role($user->id)) {
// Skip this user details.
return null;
}
$userdetails = array();
$userdetails['id'] = $user->id;
if (in_array('username', $userfields)) {
if ($currentuser or has_capability('moodle/user:viewalldetails', $context)) {
$userdetails['username'] = $user->username;
}
}
if ($isadmin or $canviewfullnames) {
if (in_array('firstname', $userfields)) {
$userdetails['firstname'] = $user->firstname;
}
if (in_array('lastname', $userfields)) {
$userdetails['lastname'] = $user->lastname;
}
}
$userdetails['fullname'] = fullname($user);
if (in_array('customfields', $userfields)) {
$fields = $DB->get_recordset_sql("SELECT f.*
FROM {user_info_field} f
JOIN {user_info_category} c
ON f.categoryid=c.id
ORDER BY c.sortorder ASC, f.sortorder ASC");
$userdetails['customfields'] = array();
foreach ($fields as $field) {
require_once($CFG->dirroot.'/user/profile/field/'.$field->datatype.'/field.class.php');
$newfield = 'profile_field_'.$field->datatype;
$formfield = new $newfield($field->id, $user->id);
if ($formfield->is_visible() and !$formfield->is_empty()) {
// TODO: Part of MDL-50728, this conditional coding must be moved to
// proper profile fields API so they are self-contained.
// We only use display_data in fields that require text formatting.
if ($field->datatype == 'text' or $field->datatype == 'textarea') {
$fieldvalue = $formfield->display_data();
} else {
// Cases: datetime, checkbox and menu.
$fieldvalue = $formfield->data;
}
$userdetails['customfields'][] =
array('name' => $formfield->field->name, 'value' => $fieldvalue,
'type' => $field->datatype, 'shortname' => $formfield->field->shortname);
}
}
$fields->close();
// Unset customfields if it's empty.
if (empty($userdetails['customfields'])) {
unset($userdetails['customfields']);
}
}
// Profile image.
if (in_array('profileimageurl', $userfields)) {
$userpicture = new user_picture($user);
$userpicture->size = 1; // Size f1.
$userdetails['profileimageurl'] = $userpicture->get_url($PAGE)->out(false);
}
if (in_array('profileimageurlsmall', $userfields)) {
if (!isset($userpicture)) {
$userpicture = new user_picture($user);
}
$userpicture->size = 0; // Size f2.
$userdetails['profileimageurlsmall'] = $userpicture->get_url($PAGE)->out(false);
}
// Hidden user field.
if ($canviewhiddenuserfields) {
$hiddenfields = array();
// Address, phone1 and phone2 not appears in hidden fields list but require viewhiddenfields capability
// according to user/profile.php.
if (!empty($user->address) && in_array('address', $userfields)) {
$userdetails['address'] = $user->address;
}
} else {
$hiddenfields = array_flip(explode(',', $CFG->hiddenuserfields));
}
if (!empty($user->phone1) && in_array('phone1', $userfields) &&
(in_array('phone1', $showuseridentityfields) or $canviewhiddenuserfields)) {
$userdetails['phone1'] = $user->phone1;
}
if (!empty($user->phone2) && in_array('phone2', $userfields) &&
(in_array('phone2', $showuseridentityfields) or $canviewhiddenuserfields)) {
$userdetails['phone2'] = $user->phone2;
}
if (isset($user->description) &&
((!isset($hiddenfields['description']) && !$cannotviewdescription) or $isadmin)) {
if (in_array('description', $userfields)) {
// Always return the descriptionformat if description is requested.
list($userdetails['description'], $userdetails['descriptionformat']) =
external_format_text($user->description, $user->descriptionformat,
$usercontext->id, 'user', 'profile', null);
}
}
if (in_array('country', $userfields) && (!isset($hiddenfields['country']) or $isadmin) && $user->country) {
$userdetails['country'] = $user->country;
}
if (in_array('city', $userfields) && (!isset($hiddenfields['city']) or $isadmin) && $user->city) {
$userdetails['city'] = $user->city;
}
if (in_array('url', $userfields) && $user->url && (!isset($hiddenfields['webpage']) or $isadmin)) {
$url = $user->url;
if (strpos($user->url, '://') === false) {
$url = 'http://'. $url;
}
$user->url = clean_param($user->url, PARAM_URL);
$userdetails['url'] = $user->url;
}
if (in_array('icq', $userfields) && $user->icq && (!isset($hiddenfields['icqnumber']) or $isadmin)) {
$userdetails['icq'] = $user->icq;
}
if (in_array('skype', $userfields) && $user->skype && (!isset($hiddenfields['skypeid']) or $isadmin)) {
$userdetails['skype'] = $user->skype;
}
if (in_array('yahoo', $userfields) && $user->yahoo && (!isset($hiddenfields['yahooid']) or $isadmin)) {
$userdetails['yahoo'] = $user->yahoo;
}
if (in_array('aim', $userfields) && $user->aim && (!isset($hiddenfields['aimid']) or $isadmin)) {
$userdetails['aim'] = $user->aim;
}
if (in_array('msn', $userfields) && $user->msn && (!isset($hiddenfields['msnid']) or $isadmin)) {
$userdetails['msn'] = $user->msn;
}
if (in_array('suspended', $userfields) && (!isset($hiddenfields['suspended']) or $isadmin)) {
$userdetails['suspended'] = (bool)$user->suspended;
}
if (in_array('firstaccess', $userfields) && (!isset($hiddenfields['firstaccess']) or $isadmin)) {
if ($user->firstaccess) {
$userdetails['firstaccess'] = $user->firstaccess;
} else {
$userdetails['firstaccess'] = 0;
}
}
if (in_array('lastaccess', $userfields) && (!isset($hiddenfields['lastaccess']) or $isadmin)) {
if ($user->lastaccess) {
$userdetails['lastaccess'] = $user->lastaccess;
} else {
$userdetails['lastaccess'] = 0;
}
}
if (in_array('email', $userfields) && ($isadmin // The admin is allowed the users email.
or $currentuser // Of course the current user is as well.
or $canviewuseremail // This is a capability in course context, it will be false in usercontext.
or in_array('email', $showuseridentityfields)
or $user->maildisplay == 1
or ($user->maildisplay == 2 and enrol_sharing_course($user, $USER)))) {
$userdetails['email'] = $user->email;
}
if (in_array('interests', $userfields)) {
$interests = core_tag_tag::get_item_tags_array('core', 'user', $user->id, core_tag_tag::BOTH_STANDARD_AND_NOT, 0, false);
if ($interests) {
$userdetails['interests'] = join(', ', $interests);
}
}
// Departement/Institution/Idnumber are not displayed on any profile, however you can get them from editing profile.
if (in_array('idnumber', $userfields) && $user->idnumber) {
if (in_array('idnumber', $showuseridentityfields) or $currentuser or
has_capability('moodle/user:viewalldetails', $context)) {
$userdetails['idnumber'] = $user->idnumber;
}
}
if (in_array('institution', $userfields) && $user->institution) {
if (in_array('institution', $showuseridentityfields) or $currentuser or
has_capability('moodle/user:viewalldetails', $context)) {
$userdetails['institution'] = $user->institution;
}
}
// Isset because it's ok to have department 0.
if (in_array('department', $userfields) && isset($user->department)) {
if (in_array('department', $showuseridentityfields) or $currentuser or
has_capability('moodle/user:viewalldetails', $context)) {
$userdetails['department'] = $user->department;
}
}
if (in_array('roles', $userfields) && !empty($course)) {
// Not a big secret.
$roles = get_user_roles($context, $user->id, false);
$userdetails['roles'] = array();
foreach ($roles as $role) {
$userdetails['roles'][] = array(
'roleid' => $role->roleid,
'name' => $role->name,
'shortname' => $role->shortname,
'sortorder' => $role->sortorder
);
}
}
// If groups are in use and enforced throughout the course, then make sure we can meet in at least one course level group.
if (in_array('groups', $userfields) && !empty($course) && $canaccessallgroups) {
$usergroups = groups_get_all_groups($course->id, $user->id, $course->defaultgroupingid,
'g.id, g.name,g.description,g.descriptionformat');
$userdetails['groups'] = array();
foreach ($usergroups as $group) {
list($group->description, $group->descriptionformat) =
external_format_text($group->description, $group->descriptionformat,
$context->id, 'group', 'description', $group->id);
$userdetails['groups'][] = array('id' => $group->id, 'name' => $group->name,
'description' => $group->description, 'descriptionformat' => $group->descriptionformat);
}
}
// List of courses where the user is enrolled.
if (in_array('enrolledcourses', $userfields) && !isset($hiddenfields['mycourses'])) {
$enrolledcourses = array();
if ($mycourses = enrol_get_users_courses($user->id, true)) {
foreach ($mycourses as $mycourse) {
if ($mycourse->category) {
$coursecontext = context_course::instance($mycourse->id);
$enrolledcourse = array();
$enrolledcourse['id'] = $mycourse->id;
$enrolledcourse['fullname'] = format_string($mycourse->fullname, true, array('context' => $coursecontext));
$enrolledcourse['shortname'] = format_string($mycourse->shortname, true, array('context' => $coursecontext));
$enrolledcourses[] = $enrolledcourse;
}
}
$userdetails['enrolledcourses'] = $enrolledcourses;
}
}
// User preferences.
if (in_array('preferences', $userfields) && $currentuser) {
$preferences = array();
$userpreferences = get_user_preferences();
foreach ($userpreferences as $prefname => $prefvalue) {
$preferences[] = array('name' => $prefname, 'value' => $prefvalue);
}
$userdetails['preferences'] = $preferences;
}
if ($currentuser or has_capability('moodle/user:viewalldetails', $context)) {
$extrafields = ['auth', 'confirmed', 'lang', 'theme', 'timezone', 'mailformat'];
foreach ($extrafields as $extrafield) {
if (in_array($extrafield, $userfields) && isset($user->$extrafield)) {
$userdetails[$extrafield] = $user->$extrafield;
}
}
}
return $userdetails;
}
/**
* Tries to obtain user details, either recurring directly to the user's system profile
* or through one of the user's course enrollments (course profile).
*
* @param stdClass $user The user.
* @return array if unsuccessful or the allowed user details.
*/
function user_get_user_details_courses($user) {
global $USER;
$userdetails = null;
// Get the courses that the user is enrolled in (only active).
$courses = enrol_get_users_courses($user->id, true);
$systemprofile = false;
if (can_view_user_details_cap($user) || ($user->id == $USER->id) || has_coursecontact_role($user->id)) {
$systemprofile = true;
}
// Try using system profile.
if ($systemprofile) {
$userdetails = user_get_user_details($user, null);
} else {
// Try through course profile.
foreach ($courses as $course) {
if (can_view_user_details_cap($user, $course) || ($user->id == $USER->id) || has_coursecontact_role($user->id)) {
$userdetails = user_get_user_details($user, $course);
}
}
}
return $userdetails;
}
/**
* Check if $USER have the necessary capabilities to obtain user details.
*
* @param stdClass $user
* @param stdClass $course if null then only consider system profile otherwise also consider the course's profile.
* @return bool true if $USER can view user details.
*/
function can_view_user_details_cap($user, $course = null) {
// Check $USER has the capability to view the user details at user context.
$usercontext = context_user::instance($user->id);
$result = has_capability('moodle/user:viewdetails', $usercontext);
// Otherwise can $USER see them at course context.
if (!$result && !empty($course)) {
$context = context_course::instance($course->id);
$result = has_capability('moodle/user:viewdetails', $context);
}
return $result;
}
/**
* Return a list of page types
* @param string $pagetype current page type
* @param stdClass $parentcontext Block's parent context
* @param stdClass $currentcontext Current context of block
* @return array
*/
function user_page_type_list($pagetype, $parentcontext, $currentcontext) {
return array('user-profile' => get_string('page-user-profile', 'pagetype'));
}
/**
* Count the number of failed login attempts for the given user, since last successful login.
*
* @param int|stdclass $user user id or object.
* @param bool $reset Resets failed login count, if set to true.
*
* @return int number of failed login attempts since the last successful login.
*/
function user_count_login_failures($user, $reset = true) {
global $DB;
if (!is_object($user)) {
$user = $DB->get_record('user', array('id' => $user), '*', MUST_EXIST);
}
if ($user->deleted) {
// Deleted user, nothing to do.
return 0;
}
$count = get_user_preferences('login_failed_count_since_success', 0, $user);
if ($reset) {
set_user_preference('login_failed_count_since_success', 0, $user);
}
return $count;
}
/**
* Converts a string into a flat array of menu items, where each menu items is a
* stdClass with fields type, url, title, pix, and imgsrc.
*
* @param string $text the menu items definition
* @param moodle_page $page the current page
* @return array
*/
function user_convert_text_to_menu_items($text, $page) {
global $OUTPUT, $CFG;
$lines = explode("\n", $text);
$items = array();
$lastchild = null;
$lastdepth = null;
$lastsort = 0;
$children = array();
foreach ($lines as $line) {
$line = trim($line);
$bits = explode('|', $line, 3);
$itemtype = 'link';
if (preg_match("/^#+$/", $line)) {
$itemtype = 'divider';
} else if (!array_key_exists(0, $bits) or empty($bits[0])) {
// Every item must have a name to be valid.
continue;
} else {
$bits[0] = ltrim($bits[0], '-');
}
// Create the child.
$child = new stdClass();
$child->itemtype = $itemtype;
if ($itemtype === 'divider') {
// Add the divider to the list of children and skip link
// processing.
$children[] = $child;
continue;
}
// Name processing.
$namebits = explode(',', $bits[0], 2);
if (count($namebits) == 2) {
// Check the validity of the identifier part of the string.
if (clean_param($namebits[0], PARAM_STRINGID) !== '') {
// Treat this as a language string.
$child->title = get_string($namebits[0], $namebits[1]);
$child->titleidentifier = implode(',', $namebits);
}
}
if (empty($child->title)) {
// Use it as is, don't even clean it.
$child->title = $bits[0];
$child->titleidentifier = str_replace(" ", "-", $bits[0]);
}
// URL processing.
if (!array_key_exists(1, $bits) or empty($bits[1])) {
// Set the url to null, and set the itemtype to invalid.
$bits[1] = null;
$child->itemtype = "invalid";
} else {
// Nasty hack to replace the grades with the direct url.
if (strpos($bits[1], '/grade/report/mygrades.php') !== false) {
$bits[1] = user_mygrades_url();
}
// Make sure the url is a moodle url.
$bits[1] = new moodle_url(trim($bits[1]));
}
$child->url = $bits[1];
// PIX processing.
$pixpath = "t/edit";
if (!array_key_exists(2, $bits) or empty($bits[2])) {
// Use the default.
$child->pix = $pixpath;
} else {
// Check for the specified image existing.
$pixpath = "t/" . $bits[2];
if ($page->theme->resolve_image_location($pixpath, 'moodle', true)) {
// Use the image.
$child->pix = $pixpath;
} else {
// Treat it like a URL.
$child->pix = null;
$child->imgsrc = $bits[2];
}
}
// Add this child to the list of children.
$children[] = $child;
}
return $children;
}
/**
* Get a list of essential user navigation items.
*
* @param stdclass $user user object.
* @param moodle_page $page page object.
* @param array $options associative array.
* options are:
* - avatarsize=35 (size of avatar image)
* @return stdClass $returnobj navigation information object, where:
*
* $returnobj->navitems array array of links where each link is a
* stdClass with fields url, title, and
* pix
* $returnobj->metadata array array of useful user metadata to be
* used when constructing navigation;
* fields include:
*
* ROLE FIELDS
* asotherrole bool whether viewing as another role
* rolename string name of the role
*
* USER FIELDS
* These fields are for the currently-logged in user, or for
* the user that the real user is currently logged in as.
*
* userid int the id of the user in question
* userfullname string the user's full name
* userprofileurl moodle_url the url of the user's profile
* useravatar string a HTML fragment - the rendered
* user_picture for this user
* userloginfail string an error string denoting the number
* of login failures since last login
*
* "REAL USER" FIELDS
* These fields are for when asotheruser is true, and
* correspond to the underlying "real user".
*
* asotheruser bool whether viewing as another user
* realuserid int the id of the user in question
* realuserfullname string the user's full name
* realuserprofileurl moodle_url the url of the user's profile
* realuseravatar string a HTML fragment - the rendered
* user_picture for this user
*
* MNET PROVIDER FIELDS
* asmnetuser bool whether viewing as a user from an
* MNet provider
* mnetidprovidername string name of the MNet provider
* mnetidproviderwwwroot string URL of the MNet provider
*/
function user_get_user_navigation_info($user, $page, $options = array()) {
global $OUTPUT, $DB, $SESSION, $CFG;
$returnobject = new stdClass();
$returnobject->navitems = array();
$returnobject->metadata = array();
$course = $page->course;
// Query the environment.
$context = context_course::instance($course->id);
// Get basic user metadata.
$returnobject->metadata['userid'] = $user->id;
$returnobject->metadata['userfullname'] = fullname($user, true);
$returnobject->metadata['userprofileurl'] = new moodle_url('/user/profile.php', array(
'id' => $user->id
));
$avataroptions = array('link' => false, 'visibletoscreenreaders' => false);
if (!empty($options['avatarsize'])) {
$avataroptions['size'] = $options['avatarsize'];
}
$returnobject->metadata['useravatar'] = $OUTPUT->user_picture (
$user, $avataroptions
);
// Build a list of items for a regular user.
// Query MNet status.
if ($returnobject->metadata['asmnetuser'] = is_mnet_remote_user($user)) {
$mnetidprovider = $DB->get_record('mnet_host', array('id' => $user->mnethostid));
$returnobject->metadata['mnetidprovidername'] = $mnetidprovider->name;
$returnobject->metadata['mnetidproviderwwwroot'] = $mnetidprovider->wwwroot;
}
// Did the user just log in?
if (isset($SESSION->justloggedin)) {
// Don't unset this flag as login_info still needs it.
if (!empty($CFG->displayloginfailures)) {
// Don't reset the count either, as login_info() still needs it too.
if ($count = user_count_login_failures($user, false)) {
// Get login failures string.
$a = new stdClass();
$a->attempts = html_writer::tag('span', $count, array('class' => 'value'));
$returnobject->metadata['userloginfail'] =
get_string('failedloginattempts', '', $a);
}
}
}
// Links: Dashboard.
$myhome = new stdClass();
$myhome->itemtype = 'link';
$myhome->url = new moodle_url('/my/');
$myhome->title = get_string('mymoodle', 'admin');
$myhome->titleidentifier = 'mymoodle,admin';
$myhome->pix = "i/dashboard";
$returnobject->navitems[] = $myhome;
// Links: My Profile.
$myprofile = new stdClass();
$myprofile->itemtype = 'link';
$myprofile->url = new moodle_url('/user/profile.php', array('id' => $user->id));
$myprofile->title = get_string('profile');
$myprofile->titleidentifier = 'profile,moodle';
$myprofile->pix = "i/user";
$returnobject->navitems[] = $myprofile;
$returnobject->metadata['asotherrole'] = false;
// Before we add the last items (usually a logout + switch role link), add any
// custom-defined items.
$customitems = user_convert_text_to_menu_items($CFG->customusermenuitems, $page);
foreach ($customitems as $item) {
$returnobject->navitems[] = $item;
}
if ($returnobject->metadata['asotheruser'] = \core\session\manager::is_loggedinas()) {
$realuser = \core\session\manager::get_realuser();
// Save values for the real user, as $user will be full of data for the
// user the user is disguised as.
$returnobject->metadata['realuserid'] = $realuser->id;
$returnobject->metadata['realuserfullname'] = fullname($realuser, true);
$returnobject->metadata['realuserprofileurl'] = new moodle_url('/user/profile.php', array(
'id' => $realuser->id
));
$returnobject->metadata['realuseravatar'] = $OUTPUT->user_picture($realuser, $avataroptions);
// Build a user-revert link.
$userrevert = new stdClass();
$userrevert->itemtype = 'link';
$userrevert->url = new moodle_url('/course/loginas.php', array(
'id' => $course->id,
'sesskey' => sesskey()
));
$userrevert->pix = "a/logout";
$userrevert->title = get_string('logout');
$userrevert->titleidentifier = 'logout,moodle';
$returnobject->navitems[] = $userrevert;
} else {
// Build a logout link.
$logout = new stdClass();
$logout->itemtype = 'link';
$logout->url = new moodle_url('/login/logout.php', array('sesskey' => sesskey()));
$logout->pix = "a/logout";
$logout->title = get_string('logout');
$logout->titleidentifier = 'logout,moodle';
$returnobject->navitems[] = $logout;
}
if (is_role_switched($course->id)) {
if ($role = $DB->get_record('role', array('id' => $user->access['rsw'][$context->path]))) {
// Build role-return link instead of logout link.
$rolereturn = new stdClass();
$rolereturn->itemtype = 'link';
$rolereturn->url = new moodle_url('/course/switchrole.php', array(
'id' => $course->id,
'sesskey' => sesskey(),
'switchrole' => 0,
'returnurl' => $page->url->out_as_local_url(false)
));
$rolereturn->pix = "a/logout";
$rolereturn->title = get_string('switchrolereturn');
$rolereturn->titleidentifier = 'switchrolereturn,moodle';
$returnobject->navitems[] = $rolereturn;
$returnobject->metadata['asotherrole'] = true;
$returnobject->metadata['rolename'] = role_get_name($role, $context);
}
} else {
// Build switch role link.
$roles = get_switchable_roles($context);
if (is_array($roles) && (count($roles) > 0)) {
$switchrole = new stdClass();
$switchrole->itemtype = 'link';
$switchrole->url = new moodle_url('/course/switchrole.php', array(
'id' => $course->id,
'switchrole' => -1,
'returnurl' => $page->url->out_as_local_url(false)
));
$switchrole->pix = "i/switchrole";
$switchrole->title = get_string('switchroleto');
$switchrole->titleidentifier = 'switchroleto,moodle';
$returnobject->navitems[] = $switchrole;
}
}
return $returnobject;
}
/**
* Add password to the list of used hashes for this user.
*
* This is supposed to be used from:
* 1/ change own password form
* 2/ password reset process
* 3/ user signup in auth plugins if password changing supported
*
* @param int $userid user id
* @param string $password plaintext password
* @return void
*/
function user_add_password_history($userid, $password) {
global $CFG, $DB;
if (empty($CFG->passwordreuselimit) or $CFG->passwordreuselimit < 0) {
return;
}
// Note: this is using separate code form normal password hashing because
// we need to have this under control in the future. Also the auth
// plugin might not store the passwords locally at all.
$record = new stdClass();
$record->userid = $userid;
$record->hash = password_hash($password, PASSWORD_DEFAULT);
$record->timecreated = time();
$DB->insert_record('user_password_history', $record);
$i = 0;
$records = $DB->get_records('user_password_history', array('userid' => $userid), 'timecreated DESC, id DESC');
foreach ($records as $record) {
$i++;
if ($i > $CFG->passwordreuselimit) {
$DB->delete_records('user_password_history', array('id' => $record->id));
}
}
}
/**
* Was this password used before on change or reset password page?
*
* The $CFG->passwordreuselimit setting determines
* how many times different password needs to be used
* before allowing previously used password again.
*
* @param int $userid user id
* @param string $password plaintext password
* @return bool true if password reused
*/
function user_is_previously_used_password($userid, $password) {
global $CFG, $DB;
if (empty($CFG->passwordreuselimit) or $CFG->passwordreuselimit < 0) {
return false;
}
$reused = false;
$i = 0;
$records = $DB->get_records('user_password_history', array('userid' => $userid), 'timecreated DESC, id DESC');
foreach ($records as $record) {
$i++;
if ($i > $CFG->passwordreuselimit) {
$DB->delete_records('user_password_history', array('id' => $record->id));
continue;
}
// NOTE: this is slow but we cannot compare the hashes directly any more.
if (password_verify($password, $record->hash)) {
$reused = true;
}
}
return $reused;
}
/**
* Remove a user device from the Moodle database (for PUSH notifications usually).
*
* @param string $uuid The device UUID.
* @param string $appid The app id. If empty all the devices matching the UUID for the user will be removed.
* @return bool true if removed, false if the device didn't exists in the database
* @since Moodle 2.9
*/
function user_remove_user_device($uuid, $appid = "") {
global $DB, $USER;
$conditions = array('uuid' => $uuid, 'userid' => $USER->id);
if (!empty($appid)) {
$conditions['appid'] = $appid;
}
if (!$DB->count_records('user_devices', $conditions)) {
return false;
}
$DB->delete_records('user_devices', $conditions);
return true;
}
/**
* Trigger user_list_viewed event.
*
* @param stdClass $course course object
* @param stdClass $context course context object
* @since Moodle 2.9
*/
function user_list_view($course, $context) {
$event = \core\event\user_list_viewed::create(array(
'objectid' => $course->id,
'courseid' => $course->id,
'context' => $context,
'other' => array(
'courseshortname' => $course->shortname,
'coursefullname' => $course->fullname
)
));
$event->trigger();
}
/**
* Returns the url to use for the "Grades" link in the user navigation.
*
* @param int $userid The user's ID.
* @param int $courseid The course ID if available.
* @return mixed A URL to be directed to for "Grades".
*/
function user_mygrades_url($userid = null, $courseid = SITEID) {
global $CFG, $USER;
$url = null;
if (isset($CFG->grade_mygrades_report) && $CFG->grade_mygrades_report != 'external') {
if (isset($userid) && $USER->id != $userid) {
// Send to the gradebook report.
$url = new moodle_url('/grade/report/' . $CFG->grade_mygrades_report . '/index.php',
array('id' => $courseid, 'userid' => $userid));
} else {
$url = new moodle_url('/grade/report/' . $CFG->grade_mygrades_report . '/index.php');
}
} else if (isset($CFG->grade_mygrades_report) && $CFG->grade_mygrades_report == 'external'
&& !empty($CFG->gradereport_mygradeurl)) {
$url = $CFG->gradereport_mygradeurl;
} else {
$url = $CFG->wwwroot;
}
return $url;
}
/**
* Check if a user has the permission to viewdetails in a shared course's context.
*
* @param object $user The other user's details.
* @param object $course Use this course to see if we have permission to see this user's profile.
* @param context $usercontext The user context if available.
* @return bool true for ability to view this user, else false.
*/
function user_can_view_profile($user, $course = null, $usercontext = null) {
global $USER, $CFG;
if ($user->deleted) {
return false;
}
// Perform some quick checks and eventually return early.
// Number 1.
if (empty($CFG->forceloginforprofiles)) {
return true;
} else {
if (!isloggedin() || isguestuser()) {
// User is not logged in and forceloginforprofile is set, we need to return now.
return false;
}
}
// Number 2.
if ($USER->id == $user->id) {
return true;
}
if (empty($usercontext)) {
$usercontext = context_user::instance($user->id);
}
// Number 3.
if (has_capability('moodle/user:viewdetails', $usercontext) || has_capability('moodle/user:viewalldetails', $usercontext)) {
return true;
}
// Number 4.
if (has_coursecontact_role($user->id)) {
return true;
}
if (isset($course)) {
$sharedcourses = array($course);
} else {
$sharedcourses = enrol_get_shared_courses($USER->id, $user->id, true);
}
if (empty($sharedcourses)) {
return false;
}
foreach ($sharedcourses as $sharedcourse) {
$coursecontext = context_course::instance($sharedcourse->id);
if (has_capability('moodle/user:viewdetails', $coursecontext)) {
if (!groups_user_groups_visible($sharedcourse, $user->id)) {
// Not a member of the same group.
continue;
}
return true;
}
}
return false;
}
/**
* Returns users tagged with a specified tag.
*
* @param core_tag_tag $tag
* @param bool $exclusivemode if set to true it means that no other entities tagged with this tag
* are displayed on the page and the per-page limit may be bigger
* @param int $fromctx context id where the link was displayed, may be used by callbacks
* to display items in the same context first
* @param int $ctx context id where to search for records
* @param bool $rec search in subcontexts as well
* @param int $page 0-based number of page being displayed
* @return \core_tag\output\tagindex
*/
function user_get_tagged_users($tag, $exclusivemode = false, $fromctx = 0, $ctx = 0, $rec = 1, $page = 0) {
global $PAGE;
if ($ctx && $ctx != context_system::instance()->id) {
$usercount = 0;
} else {
// Users can only be displayed in system context.
$usercount = $tag->count_tagged_items('core', 'user',
'it.deleted=:notdeleted', array('notdeleted' => 0));
}
$perpage = $exclusivemode ? 24 : 5;
$content = '';
$totalpages = ceil($usercount / $perpage);
if ($usercount) {
$userlist = $tag->get_tagged_items('core', 'user', $page * $perpage, $perpage,
'it.deleted=:notdeleted', array('notdeleted' => 0));
$renderer = $PAGE->get_renderer('core', 'user');
$content .= $renderer->user_list($userlist, $exclusivemode);
}
return new core_tag\output\tagindex($tag, 'core', 'user', $content,
$exclusivemode, $fromctx, $ctx, $rec, $page, $totalpages);
}
/**
* Returns the SQL used by the participants table.
*
* @param int $courseid The course id
* @param int $groupid The groupid, 0 means all groups
* @param int $accesssince The time since last access, 0 means any time
* @param int $roleid The role id, 0 means all roles
* @param int $enrolid The enrolment id, 0 means all enrolment methods will be returned.
* @param int $statusid The user enrolment status, -1 means all enrolments regardless of the status will be returned, if allowed.
* @param string|array $search The search that was performed, empty means perform no search
* @param string $additionalwhere Any additional SQL to add to where
* @param array $additionalparams The additional params
* @return array
*/
function user_get_participants_sql($courseid, $groupid = 0, $accesssince = 0, $roleid = 0, $enrolid = 0, $statusid = -1,
$search = '', $additionalwhere = '', $additionalparams = array()) {
global $DB;
// Get the context.
$context = \context_course::instance($courseid, MUST_EXIST);
$isfrontpage = ($courseid == SITEID);
// Default filter settings. We only show active by default, especially if the user has no capability to review enrolments.
$onlyactive = true;
$onlysuspended = false;
if (has_capability('moodle/course:enrolreview', $context)) {
switch ($statusid) {
case ENROL_USER_ACTIVE:
// Nothing to do here.
break;
case ENROL_USER_SUSPENDED:
$onlyactive = false;
$onlysuspended = true;
break;
default:
// If the user has capability to review user enrolments, but statusid is set to -1, set $onlyactive to false.
$onlyactive = false;
break;
}
}
list($esql, $params) = get_enrolled_sql($context, null, $groupid, $onlyactive, $onlysuspended, $enrolid);
$joins = array('FROM {user} u');
$wheres = array();
$userfields = get_extra_user_fields($context, array('username', 'lang', 'timezone', 'maildisplay'));
$userfieldssql = user_picture::fields('u', $userfields);
if ($isfrontpage) {
$select = "SELECT $userfieldssql, u.lastaccess";
$joins[] = "JOIN ($esql) e ON e.id = u.id"; // Everybody on the frontpage usually.
if ($accesssince) {
$wheres[] = user_get_user_lastaccess_sql($accesssince);
}
} else {
$select = "SELECT $userfieldssql, COALESCE(ul.timeaccess, 0) AS lastaccess";
$joins[] = "JOIN ($esql) e ON e.id = u.id"; // Course enrolled users only.
// Not everybody has accessed the course yet.
$joins[] = 'LEFT JOIN {user_lastaccess} ul ON (ul.userid = u.id AND ul.courseid = :courseid)';
$params['courseid'] = $courseid;
if ($accesssince) {
$wheres[] = user_get_course_lastaccess_sql($accesssince);
}
}
// Performance hacks - we preload user contexts together with accounts.
$ccselect = ', ' . context_helper::get_preload_record_columns_sql('ctx');
$ccjoin = 'LEFT JOIN {context} ctx ON (ctx.instanceid = u.id AND ctx.contextlevel = :contextlevel)';
$params['contextlevel'] = CONTEXT_USER;
$select .= $ccselect;
$joins[] = $ccjoin;
// Limit list to users with some role only.
if ($roleid) {
// We want to query both the current context and parent contexts.
list($relatedctxsql, $relatedctxparams) = $DB->get_in_or_equal($context->get_parent_context_ids(true),
SQL_PARAMS_NAMED, 'relatedctx');
$wheres[] = "u.id IN (SELECT userid FROM {role_assignments} WHERE roleid = :roleid AND contextid $relatedctxsql)";
$params = array_merge($params, array('roleid' => $roleid), $relatedctxparams);
}
if (!empty($search)) {
if (!is_array($search)) {
$search = [$search];
}
foreach ($search as $index => $keyword) {
$searchkey1 = 'search' . $index . '1';
$searchkey2 = 'search' . $index . '2';
$searchkey3 = 'search' . $index . '3';
$fullname = $DB->sql_fullname('u.firstname', 'u.lastname');
$wheres[] = '(' . $DB->sql_like($fullname, ':' . $searchkey1, false, false) .
' OR ' . $DB->sql_like('email', ':' . $searchkey2, false, false) .
' OR ' . $DB->sql_like('idnumber', ':' . $searchkey3, false, false) . ') ';
$params[$searchkey1] = "%$keyword%";
$params[$searchkey2] = "%$keyword%";
$params[$searchkey3] = "%$keyword%";
}
}
if (!empty($additionalwhere)) {
$wheres[] = $additionalwhere;
$params = array_merge($params, $additionalparams);
}
$from = implode("\n", $joins);
if ($wheres) {
$where = 'WHERE ' . implode(' AND ', $wheres);
} else {
$where = '';
}
return array($select, $from, $where, $params);
}
/**
* Returns the total number of participants for a given course.
*
* @param int $courseid The course id
* @param int $groupid The groupid, 0 means all groups
* @param int $accesssince The time since last access, 0 means any time
* @param int $roleid The role id, 0 means all roles
* @param int $enrolid The applied filter for the user enrolment ID.
* @param int $status The applied filter for the user's enrolment status.
* @param string|array $search The search that was performed, empty means perform no search
* @param string $additionalwhere Any additional SQL to add to where
* @param array $additionalparams The additional params
* @return int
*/
function user_get_total_participants($courseid, $groupid = 0, $accesssince = 0, $roleid = 0, $enrolid = 0, $statusid = -1,
$search = '', $additionalwhere = '', $additionalparams = array()) {
global $DB;
list($select, $from, $where, $params) = user_get_participants_sql($courseid, $groupid, $accesssince, $roleid, $enrolid,
$statusid, $search, $additionalwhere, $additionalparams);
return $DB->count_records_sql("SELECT COUNT(u.id) $from $where", $params);
}
/**
* Returns the participants for a given course.
*
* @param int $courseid The course id
* @param int $groupid The group id
* @param int $accesssince The time since last access
* @param int $roleid The role id
* @param int $enrolid The applied filter for the user enrolment ID.
* @param int $status The applied filter for the user's enrolment status.
* @param string $search The search that was performed
* @param string $additionalwhere Any additional SQL to add to where
* @param array $additionalparams The additional params
* @param string $sort The SQL sort
* @param int $limitfrom return a subset of records, starting at this point (optional).
* @param int $limitnum return a subset comprising this many records (optional, required if $limitfrom is set).
* @return moodle_recordset
*/
function user_get_participants($courseid, $groupid = 0, $accesssince, $roleid, $enrolid = 0, $statusid, $search,
$additionalwhere = '', $additionalparams = array(), $sort = '', $limitfrom = 0, $limitnum = 0) {
global $DB;
list($select, $from, $where, $params) = user_get_participants_sql($courseid, $groupid, $accesssince, $roleid, $enrolid,
$statusid, $search, $additionalwhere, $additionalparams);
return $DB->get_recordset_sql("$select $from $where $sort", $params, $limitfrom, $limitnum);
}
/**
* Returns SQL that can be used to limit a query to a period where the user last accessed a course.
*
* @param int $accesssince The time since last access
* @param string $tableprefix
* @return string
*/
function user_get_course_lastaccess_sql($accesssince = null, $tableprefix = 'ul') {
if (empty($accesssince)) {
return '';
}
if ($accesssince == -1) { // Never.
return $tableprefix . '.timeaccess = 0';
} else {
return $tableprefix . '.timeaccess != 0 AND ul.timeaccess < ' . $accesssince;
}
}
/**
* Returns SQL that can be used to limit a query to a period where the user last accessed the system.
*
* @param int $accesssince The time since last access
* @param string $tableprefix
* @return string
*/
function user_get_user_lastaccess_sql($accesssince = null, $tableprefix = 'u') {
if (empty($accesssince)) {
return '';
}
if ($accesssince == -1) { // Never.
return $tableprefix . '.lastaccess = 0';
} else {
return $tableprefix . '.lastaccess != 0 AND u.lastaccess < ' . $accesssince;
}
}
/**
* Callback for inplace editable API.
*
* @param string $itemtype - Only user_roles is supported.
* @param string $itemid - Courseid and userid separated by a :
* @param string $newvalue - json encoded list of roleids.
* @return \core\output\inplace_editable
*/
function core_user_inplace_editable($itemtype, $itemid, $newvalue) {
if ($itemtype === 'user_roles') {
return \core_user\output\user_roles_editable::update($itemid, $newvalue);
}
}
| gpl-3.0 |
ttomasini/AWS | dist/Doxygen/html/search/functions_0.js | 510 | var searchData=
[
['booltobyte',['boolToByte',['../convert_8cpp.html#a86cb12eecbe381c8a16ab8334bb3d96f',1,'boolToByte(bool b): convert.cpp'],['../convert_8h.html#a86cb12eecbe381c8a16ab8334bb3d96f',1,'boolToByte(bool b): convert.cpp']]],
['bytetoint',['byteToInt',['../convert_8cpp.html#a5ceb93f16a36822ce84fe75152be3876',1,'byteToInt(uint8_t b1, uint8_t b2): convert.cpp'],['../convert_8h.html#a5ceb93f16a36822ce84fe75152be3876',1,'byteToInt(uint8_t b1, uint8_t b2): convert.cpp']]]
];
| gpl-3.0 |
TeoTwawki/darkstar | scripts/zones/Abyssea-Vunkerl/Zone.lua | 808 | -----------------------------------
--
-- Zone: Abyssea - Vunkerl
--
-----------------------------------
local ID = require("scripts/zones/Abyssea-Vunkerl/IDs")
require("scripts/globals/quests")
-----------------------------------
function onInitialize(zone)
end
function onZoneIn(player,prevZone)
local cs = -1
if player:getXPos() == 0 and player:getYPos() == 0 and player:getZPos() == 0 then
player:setPos(-351,-46.750,699.5,10)
end
if player:getQuestStatus(ABYSSEA, dsp.quest.id.abyssea.THE_TRUTH_BECKONS) == QUEST_ACCEPTED and player:getVar("1stTimeAbyssea") == 0 then
player:setVar("1stTimeAbyssea",1)
end
return cs
end
function onRegionEnter(player,region)
end
function onEventUpdate(player,csid,option)
end
function onEventFinish(player,csid,option)
end | gpl-3.0 |
null--/graviton | doc/GraVitoN/html/search/classes_66.js | 109 | var searchData=
[
['file',['File',['../class_gra_vito_n_1_1_utils_1_1_file.html',1,'GraVitoN::Utils']]]
];
| gpl-3.0 |
R-Lefebvre/ardupilot | libraries/AP_RangeFinder/AP_RangeFinder_UAVCAN.cpp | 6923 | #include <AP_HAL/AP_HAL.h>
#if HAL_WITH_UAVCAN
#include "AP_RangeFinder_UAVCAN.h"
#include <AP_BoardConfig/AP_BoardConfig_CAN.h>
#include <AP_UAVCAN/AP_UAVCAN.h>
#include <uavcan/equipment/range_sensor/Measurement.hpp>
extern const AP_HAL::HAL& hal;
#define debug_range_finder_uavcan(level_debug, can_driver, fmt, args...) do { if ((level_debug) <= AP::can().get_debug_level_driver(can_driver)) { hal.console->printf(fmt, ##args); }} while (0)
//UAVCAN Frontend Registry Binder
UC_REGISTRY_BINDER(MeasurementCb, uavcan::equipment::range_sensor::Measurement);
/*
constructor - registers instance at top RangeFinder driver
*/
AP_RangeFinder_UAVCAN::AP_RangeFinder_UAVCAN(RangeFinder::RangeFinder_State &_state, AP_RangeFinder_Params &_params) :
AP_RangeFinder_Backend(_state, _params)
{}
//links the rangefinder uavcan message to this backend
void AP_RangeFinder_UAVCAN::subscribe_msgs(AP_UAVCAN* ap_uavcan)
{
if (ap_uavcan == nullptr) {
return;
}
auto* node = ap_uavcan->get_node();
uavcan::Subscriber<uavcan::equipment::range_sensor::Measurement, MeasurementCb> *measurement_listener;
measurement_listener = new uavcan::Subscriber<uavcan::equipment::range_sensor::Measurement, MeasurementCb>(*node);
// Register method to handle incoming RangeFinder measurement
const int measurement_listener_res = measurement_listener->start(MeasurementCb(ap_uavcan, &handle_measurement));
if (measurement_listener_res < 0) {
AP_HAL::panic("UAVCAN RangeFinder subscriber start problem\n\r");
return;
}
}
//Method to find the backend relating to the node id
AP_RangeFinder_UAVCAN* AP_RangeFinder_UAVCAN::get_uavcan_backend(AP_UAVCAN* ap_uavcan, uint8_t node_id, uint8_t address, bool create_new)
{
if (ap_uavcan == nullptr) {
return nullptr;
}
AP_RangeFinder_UAVCAN* driver = nullptr;
//Scan through the Rangefinder params to find UAVCAN RFND with matching address.
for (uint8_t i = 0; i < RANGEFINDER_MAX_INSTANCES; i++) {
if ((RangeFinder::Type)AP::rangefinder()->params[i].type.get() == RangeFinder::Type::UAVCAN &&
AP::rangefinder()->params[i].address == address) {
driver = (AP_RangeFinder_UAVCAN*)AP::rangefinder()->drivers[i];
}
//Double check if the driver was initialised as UAVCAN Type
if (driver != nullptr && (driver->_backend_type == RangeFinder::Type::UAVCAN)) {
if (driver->_ap_uavcan == ap_uavcan &&
driver->_node_id == node_id) {
return driver;
} else {
//we found a possible duplicate addressed sensor
//we return nothing in such scenario
return nullptr;
}
}
}
if (create_new) {
for (uint8_t i = 0; i < RANGEFINDER_MAX_INSTANCES; i++) {
if ((RangeFinder::Type)AP::rangefinder()->params[i].type.get() == RangeFinder::Type::UAVCAN &&
AP::rangefinder()->params[i].address == address) {
if (AP::rangefinder()->drivers[i] != nullptr) {
//we probably initialised this driver as something else, reboot is required for setting
//it up as UAVCAN type
return nullptr;
}
AP::rangefinder()->drivers[i] = new AP_RangeFinder_UAVCAN(AP::rangefinder()->state[i], AP::rangefinder()->params[i]);
driver = (AP_RangeFinder_UAVCAN*)AP::rangefinder()->drivers[i];
if (driver == nullptr) {
break;
}
AP::rangefinder()->num_instances = MAX(i+1, AP::rangefinder()->num_instances);
//Assign node id and respective uavcan driver, for identification
if (driver->_ap_uavcan == nullptr) {
driver->_ap_uavcan = ap_uavcan;
driver->_node_id = node_id;
break;
}
}
}
}
return driver;
}
//Called from frontend to update with the readings received by handler
void AP_RangeFinder_UAVCAN::update()
{
WITH_SEMAPHORE(_sem);
if ((AP_HAL::millis() - _last_reading_ms) > 500) {
//if data is older than 500ms, report NoData
set_status(RangeFinder::Status::NoData);
} else if (_status == RangeFinder::Status::Good && new_data) {
//copy over states
state.distance_cm = _distance_cm;
state.last_reading_ms = _last_reading_ms;
update_status();
new_data = false;
} else if (_status != RangeFinder::Status::Good) {
//handle additional states received by measurement handler
set_status(_status);
}
}
//RangeFinder message handler
void AP_RangeFinder_UAVCAN::handle_measurement(AP_UAVCAN* ap_uavcan, uint8_t node_id, const MeasurementCb &cb)
{
//fetch the matching uavcan driver, node id and sensor id backend instance
AP_RangeFinder_UAVCAN* driver = get_uavcan_backend(ap_uavcan, node_id, cb.msg->sensor_id, true);
if (driver == nullptr) {
return;
}
WITH_SEMAPHORE(driver->_sem);
switch (cb.msg->reading_type) {
case uavcan::equipment::range_sensor::Measurement::READING_TYPE_VALID_RANGE:
{
//update the states in backend instance
driver->_distance_cm = cb.msg->range*100.0f;
driver->_last_reading_ms = AP_HAL::millis();
driver->_status = RangeFinder::Status::Good;
driver->new_data = true;
break;
}
//Additional states supported by RFND message
case uavcan::equipment::range_sensor::Measurement::READING_TYPE_TOO_CLOSE:
{
driver->_last_reading_ms = AP_HAL::millis();
driver->_status = RangeFinder::Status::OutOfRangeLow;
break;
}
case uavcan::equipment::range_sensor::Measurement::READING_TYPE_TOO_FAR:
{
driver->_last_reading_ms = AP_HAL::millis();
driver->_status = RangeFinder::Status::OutOfRangeHigh;
break;
}
default:
{
break;
}
}
//copy over the sensor type of Rangefinder
switch (cb.msg->sensor_type) {
case uavcan::equipment::range_sensor::Measurement::SENSOR_TYPE_SONAR:
{
driver->_sensor_type = MAV_DISTANCE_SENSOR_ULTRASOUND;
break;
}
case uavcan::equipment::range_sensor::Measurement::SENSOR_TYPE_LIDAR:
{
driver->_sensor_type = MAV_DISTANCE_SENSOR_LASER;
break;
}
case uavcan::equipment::range_sensor::Measurement::SENSOR_TYPE_RADAR:
{
driver->_sensor_type = MAV_DISTANCE_SENSOR_RADAR;
break;
}
default:
{
driver->_sensor_type = MAV_DISTANCE_SENSOR_UNKNOWN;
break;
}
}
}
#endif // HAL_WITH_UAVCAN
| gpl-3.0 |
hellasmoon/graylog2-server | graylog2-server/src/main/java/org/graylog2/alerts/AlertScanner.java | 4756 | /**
* This file is part of Graylog.
*
* Graylog is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Graylog is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Graylog. If not, see <http://www.gnu.org/licenses/>.
*/
package org.graylog2.alerts;
import org.graylog2.plugin.alarms.AlertCondition;
import org.graylog2.plugin.database.ValidationException;
import org.graylog2.plugin.streams.Stream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import java.util.Optional;
public class AlertScanner {
private static final Logger LOG = LoggerFactory.getLogger(AlertScanner.class);
private final AlertService alertService;
private final AlertNotificationsSender alertNotificationsSender;
@Inject
public AlertScanner(AlertService alertService, AlertNotificationsSender alertNotificationsSender) {
this.alertService = alertService;
this.alertNotificationsSender = alertNotificationsSender;
}
private Alert handleTriggeredAlert(AlertCondition.CheckResult result, Stream stream, AlertCondition alertCondition) throws ValidationException {
// Persist alert.
final Alert alert = alertService.factory(result);
alertService.save(alert);
alertNotificationsSender.send(result, stream, alert, alertCondition);
return alert;
}
private void handleRepeatedAlert(Stream stream, AlertCondition alertCondition, AlertCondition.CheckResult result, Alert alert2) {
alertNotificationsSender.send(result, stream, alert2, alertCondition);
}
private void handleResolveAlert(Alert alert) {
alertService.resolveAlert(alert);
// TODO: Send resolve notifications
}
public boolean checkAlertCondition(Stream stream, AlertCondition alertCondition) {
if (stream.isPaused() || alertService.inGracePeriod(alertCondition)) {
return false;
}
try {
final AlertCondition.CheckResult result = alertCondition.runCheck();
final Optional<Alert> alert = alertService.getLastTriggeredAlert(stream.getId(), alertCondition.getId());
if (result.isTriggered()) {
if (!alert.isPresent() || alertService.isResolved(alert.get())) {
// Alert is triggered for the first time
LOG.debug("Alert condition [{}] is triggered. Sending alerts.", alertCondition);
handleTriggeredAlert(result, stream, alertCondition);
} else {
final Alert triggeredAlert = alert.get();
// There is already an alert for this condition and is unresolved
if (alertService.shouldRepeatNotifications(alertCondition, triggeredAlert)) {
// Repeat notifications because user wants to do that
LOG.debug("Alert condition [{}] is triggered and configured to repeat alert notifications. Sending alerts.", alertCondition);
handleRepeatedAlert(stream, alertCondition, result, triggeredAlert);
} else {
LOG.debug("Alert condition [{}] is triggered but alerts were already sent. Nothing to do.", alertCondition);
}
}
return true;
} else {
// if stream and condition had already an alert, mark it as resolved
if (alert.isPresent() && !alertService.isResolved(alert.get())) {
LOG.debug("Alert condition [{}] is not triggered anymore. Resolving alert.", alertCondition);
handleResolveAlert(alert.get());
} else {
LOG.debug("Alert condition [{}] is not triggered and is marked as resolved. Nothing to do.", alertCondition);
}
}
} catch (Exception e) {
if (LOG.isDebugEnabled()) {
LOG.error("Skipping alert check <{}/{}>", alertCondition.getTitle(), alertCondition.getId(), e);
} else {
LOG.error("Skipping alert check <{}/{}>: {} ({})", alertCondition.getTitle(),
alertCondition.getId(), e.getMessage(), e.getClass().getSimpleName());
}
}
return false;
}
}
| gpl-3.0 |
tonybreak/Registered | handlers/index.py | 2512 | # coding: utf-8
from handlers import base
from common import functions
class IndexHandler(base.BaseHandler):
def get(self, *args, **kwargs):
self.render('index.html')
class InfoHandler(base.SocketHandler):
def on_message(self, message):
data = functions.jsonToObject(message)
if not data:
return None
if not data.get('target') or not isinstance(data['target'], basestring):
return self.write_message('done')
base.SocketHandler.status = True # 重置查询状态
findRes = self.db.targets.find_one({'target': data['target']})
if not findRes:
result = self._insertTarget(data['target'])
if not result:
return self.write_message('done')
findRes = {'plugins': []}
# 如果数据库中存在某些插件的记录就先输出, 再校验不存在记录的插件
for pluginName in findRes['plugins']:
tempObj = self.getPlugins.get(pluginName)
# 防止插件名变动后与数据库中的记录不统一,所以移除数据库中已发生变更的插件记录
if not tempObj:
self._removePlugin(data['target'], pluginName)
continue
self.write_message({
'title': tempObj.__title__,
'url': tempObj.__url__
})
# 计算差集,然后使用数据库中不存在记录的插件进行校验
diffList = list(set(self.getPlugins.keys()).difference(set(findRes['plugins'])))
if diffList:
map(lambda x: self.taskQueue.put(self.getPlugins[x]), diffList)
self.start(data['target'])
else:
self.write_message('done')
def _insertTarget(self, target):
insertRes = self.db.targets.insert_one({
'target': target,
'plugins': []
})
if insertRes.inserted_id:
return True
else:
return False
def _removePlugin(self, target, name):
updateRes = self.db.targets.update_one({
'target': target
}, {
'$pull': {
'plugins': name
}
})
# 因为mongodb < 2.6的版本没有modified_count,所以通过 raw_result里 n的值来判断是否更新成功
if not updateRes.raw_result.has_key('n'):
return False
if updateRes.raw_result['n']:
return True
else:
return False
| gpl-3.0 |
Terry-Weymouth/transmart-docker | 1.2.4/embedded/start-postgres.sh | 361 | #!/bin/sh
mkdir /etc/ssl/private-copy; mv /etc/ssl/private/* /etc/ssl/private-copy/; rm -r /etc/ssl/private; mv /etc/ssl/private-copy /etc/ssl/private; chmod -R 0700 /etc/ssl/private; chown -R postgres /etc/ssl/private
sudo -u postgres /usr/lib/postgresql/9.4/bin/postgres -D /var/lib/postgresql/9.4/main -c config_file=/etc/postgresql/9.4/main/postgresql.conf
| gpl-3.0 |
ffxinfinity/ffxinfinity | FFXI Server-Development/Build Files/scripts/globals/spells/barblind.lua | 792 | -----------------------------------------
-- Spell: Barblind
-----------------------------------------
require("scripts/globals/status");
-----------------------------------------
-- OnSpellCast
-----------------------------------------
function OnMagicCastingCheck(caster,target,spell)
return 0;
end;
function onSpellCast(caster,target,spell)
enchanceSkill = caster:getSkillLevel(34);
duration = 150;
power = 1 + 0.02 * enchanceSkill;
if(enchanceSkill >180)then
duration = 150 + 0.8 * (enchanceSkill - 180);
end
if (caster:hasStatusEffect(EFFECT_COMPOSURE) == true and caster:getID() == target:getID()) then
duration = duration * 3;
end
target:addStatusEffect(EFFECT_BARBLIND,power,0,duration,0,1);
return EFFECT_BARBLIND;
end;
| gpl-3.0 |
raoulbhatia/roundcubemail | program/lib/Roundcube/spellchecker/engine.php | 2697 | <?php
/**
+-----------------------------------------------------------------------+
| This file is part of the Roundcube Webmail client |
| |
| Copyright (C) 2011-2013, Kolab Systems AG |
| Copyright (C) 2008-2013, The Roundcube Dev Team |
| |
| Licensed under the GNU General Public License version 3 or |
| any later version with exceptions for skins & plugins. |
| See the README file for a full license statement. |
| |
| PURPOSE: |
| Interface class for a spell-checking backend |
+-----------------------------------------------------------------------+
| Author: Thomas Bruederli <[email protected]> |
+-----------------------------------------------------------------------+
*/
/**
* Interface class for a spell-checking backend
*
* @package Framework
* @subpackage Utils
*/
abstract class rcube_spellchecker_engine
{
const MAX_SUGGESTIONS = 10;
protected $lang;
protected $error;
protected $dictionary;
protected $separator = '/[\s\r\n\t\(\)\/\[\]{}<>\\"]+|[:;?!,\.](?=\W|$)/';
/**
* Default constructor
*/
public function __construct($dict, $lang)
{
$this->dictionary = $dict;
$this->lang = $lang;
}
/**
* Return a list of languages supported by this backend
*
* @return array Indexed list of language codes
*/
abstract function languages();
/**
* Set content and check spelling
*
* @param string $text Text content for spellchecking
*
* @return bool True when no mispelling found, otherwise false
*/
abstract function check($text);
/**
* Returns suggestions for the specified word
*
* @param string $word The word
*
* @return array Suggestions list
*/
abstract function get_suggestions($word);
/**
* Returns misspelled words
*
* @param string $text The content for spellchecking. If empty content
* used for check() method will be used.
*
* @return array List of misspelled words
*/
abstract function get_words($text = null);
/**
* Returns error message
*
* @return string Error message
*/
public function error()
{
return $this->error;
}
}
| gpl-3.0 |
jooooooon/core | tests/acceptance/xml-c14nize.c | 1068 | #include "platform.h"
#include <libxml/parser.h>
#include <libxml/xpathInternals.h>
#include <libxml/c14n.h>
static bool xmlC14nizeFile(const char *filename)
{
xmlDocPtr doc = xmlParseFile(filename);
if (doc == NULL)
{
fprintf(stderr, "Unable to open %s for canonicalization\n", filename);
return false;
}
xmlOutputBufferPtr out = xmlOutputBufferCreateFile(stdout, NULL);
if (out == NULL)
{
fprintf(stderr, "Unable to set up writer for stdout\n");
return false;
}
if (xmlC14NDocSaveTo(doc, NULL, XML_C14N_1_0, 0, true, out) < 0)
{
fprintf(stderr, "Unable to c14nize XML document\n");
return false;
}
if (xmlOutputBufferClose(out) < 0)
{
fprintf(stderr, "Unable to close writer for stdout\n");
return false;
}
xmlFreeDoc(doc);
return true;
}
int main(int argc, char **argv)
{
if (argc != 2)
{
fprintf(stderr, "Usage: xml-c14nize <XML file>\n");
return 2;
}
return xmlC14nizeFile(argv[1]) ? 0 : 1;
}
| gpl-3.0 |
openeyesarchive/eyedraw | src/ED/Drawing/Range.js | 4875 | /**
* (C) Moorfields Eye Hospital NHS Foundation Trust, 2008-2011
* (C) OpenEyes Foundation, 2011-2014
* This file is part of OpenEyes.
*
* OpenEyes is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* OpenEyes is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with OpenEyes. If not, see <http://www.gnu.org/licenses/>.
*/
/**
* Represents a range of numerical values
*
* @class Range
* @property {Float} min Minimum value
* @property {Float} max Maximum value
* @param {Float} _min
* @param {Float} _max
*/
ED.Range = function(_min, _max) {
// Properties
this.min = _min;
this.max = _max;
}
/**
* Set min and max with one function call
*
* @param {Float} _min
* @param {Float} _max
*/
ED.Range.prototype.setMinAndMax = function(_min, _max) {
// Set properties
this.min = _min;
this.max = _max;
}
/**
* Returns true if the parameter is less than the minimum of the range
*
* @param {Float} _num
* @returns {Bool} True if the parameter is less than the minimum
*/
ED.Range.prototype.isBelow = function(_num) {
if (_num < this.min) {
return true;
} else {
return false;
}
}
/**
* Returns true if the parameter is more than the maximum of the range
*
* @param {Float} _num
* @returns {Bool} True if the parameter is more than the maximum
*/
ED.Range.prototype.isAbove = function(_num) {
if (_num > this.max) {
return true;
} else {
return false;
}
}
/**
* Returns true if the parameter is inclusively within the range
*
* @param {Float} _num
* @returns {Bool} True if the parameter is within the range
*/
ED.Range.prototype.includes = function(_num) {
if (_num < this.min || _num > this.max) {
return false;
} else {
return true;
}
}
/**
* Constrains a value to the limits of the range
*
* @param {Float} _num
* @param {Float} _scaleLevel The drawing scale level.
* @returns {Float} The constrained value
*/
ED.Range.prototype.constrain = function(_num, _scaleLevel) {
_scaleLevel = _scaleLevel === undefined ? 1 : _scaleLevel
var min = this.min * _scaleLevel;
var max = this.max * _scaleLevel;
if (_num < min) {
return min;
} else if (_num > max) {
return max;
} else {
return _num;
}
}
/**
* Returns true if the parameter is within the 'clockface' range represented by the min and max values
*
* @param {Float} _angle Angle to test
* @param {Bool} _isDegrees Flag indicating range is in degrees rather than radians
* @returns {Bool} True if the parameter is within the range
*/
ED.Range.prototype.includesInAngularRange = function(_angle, _isDegrees) {
// Arbitrary radius
var r = 100;
// Points representing vectos of angles within range
var min = new ED.Point(0, 0);
var max = new ED.Point(0, 0);
var angle = new ED.Point(0, 0);
// Set points using polar coordinates
if (!_isDegrees) {
min.setWithPolars(r, this.min);
max.setWithPolars(r, this.max);
angle.setWithPolars(r, _angle);
} else {
min.setWithPolars(r, this.min * Math.PI / 180);
max.setWithPolars(r, this.max * Math.PI / 180);
angle.setWithPolars(r, _angle * Math.PI / 180);
}
return (min.clockwiseAngleTo(angle) <= min.clockwiseAngleTo(max));
}
/**
* Constrains a value to the limits of the angular range
*
* @param {Float} _angle Angle to test
* @param {Bool} _isDegrees Flag indicating range is in degrees rather than radians
* @returns {Float} The constrained value
*/
ED.Range.prototype.constrainToAngularRange = function(_angle, _isDegrees) {
// No point in constraining unless range is less than 360 degrees!
if ((this.max - this.min) < (_isDegrees ? 360 : (2 * Math.PI))) {
// Arbitrary radius
var r = 100;
// Points representing vectors of angles within range
var min = new ED.Point(0, 0);
var max = new ED.Point(0, 0);
var angle = new ED.Point(0, 0);
// Set points using polar coordinates
if (!_isDegrees) {
min.setWithPolars(r, this.min);
max.setWithPolars(r, this.max);
angle.setWithPolars(r, _angle);
} else {
min.setWithPolars(r, this.min * Math.PI / 180);
max.setWithPolars(r, this.max * Math.PI / 180);
angle.setWithPolars(r, _angle * Math.PI / 180);
}
// Return appropriate value depending on relationship to range
if (min.clockwiseAngleTo(angle) <= min.clockwiseAngleTo(max)) {
return _angle;
} else {
if (angle.clockwiseAngleTo(min) < max.clockwiseAngleTo(angle)) {
return this.min;
} else {
return this.max;
}
}
} else {
return _angle;
}
} | gpl-3.0 |
jam7/chromebrew | packages/openjpeg.rb | 1284 | require 'package'
class Openjpeg < Package
description 'OpenJPEG is an open-source JPEG 2000 codec written in C language.'
homepage 'https://github.com/uclouvain/openjpeg/'
version '2.1.2'
source_url 'https://github.com/uclouvain/openjpeg/archive/v2.1.2.tar.gz'
source_sha256 '4ce77b6ef538ef090d9bde1d5eeff8b3069ab56c4906f083475517c2c023dfa7'
binary_url ({
aarch64: 'https://dl.bintray.com/chromebrew/chromebrew/openjpeg-2.1.2-chromeos-armv7l.tar.xz',
armv7l: 'https://dl.bintray.com/chromebrew/chromebrew/openjpeg-2.1.2-chromeos-armv7l.tar.xz',
i686: 'https://dl.bintray.com/chromebrew/chromebrew/openjpeg-2.1.2-chromeos-i686.tar.xz',
x86_64: 'https://dl.bintray.com/chromebrew/chromebrew/openjpeg-2.1.2-chromeos-x86_64.tar.xz',
})
binary_sha256 ({
aarch64: '5a757e5b3576e636c9b04def1784dab0d54fb2d2b397a8f41f96e973920b5dad',
armv7l: '5a757e5b3576e636c9b04def1784dab0d54fb2d2b397a8f41f96e973920b5dad',
i686: '023b8baa817e114c2fa97a5cc0a0e79728d3587c0fd8d385b13d1d5a0994470f',
x86_64: '218d4224019530780f6b739b4f28e3c3a29d04a0f471f49290961d3956d7d9aa',
})
depends_on 'cmake'
def self.build
system "cmake ."
system "make"
end
def self.install
system "make DESTDIR=#{CREW_DEST_DIR} install"
end
end
| gpl-3.0 |
cs-au-dk/Artemis | WebKit/Source/WebCore/Modules/speech/SpeechRecognitionAlternative.h | 2088 | /*
* Copyright (C) 2012 Google Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef SpeechRecognitionAlternative_h
#define SpeechRecognitionAlternative_h
#if ENABLE(SCRIPTED_SPEECH)
#include "PlatformString.h"
#include <wtf/RefCounted.h>
namespace WebCore {
class ScriptExecutionContext;
class SpeechRecognitionAlternative : public RefCounted<SpeechRecognitionAlternative> {
public:
static PassRefPtr<SpeechRecognitionAlternative> create(const String&, double);
const String& transcript() const { return m_transcript; }
double confidence() const { return m_confidence; }
private:
SpeechRecognitionAlternative(const String&, double);
String m_transcript;
double m_confidence;
};
} // namespace WebCore
#endif // ENABLE(SCRIPTED_SPEECH)
#endif // SpeechRecognitionAlternative_h
| gpl-3.0 |
islanderz/pneumaticcraft | src/pneumaticCraft/client/gui/GuiPlasticMixer.java | 5420 | package pneumaticCraft.client.gui;
import java.awt.Point;
import java.util.ArrayList;
import java.util.List;
import net.minecraft.client.resources.I18n;
import net.minecraft.entity.player.InventoryPlayer;
import net.minecraft.init.Items;
import net.minecraft.item.ItemStack;
import net.minecraft.util.MathHelper;
import pneumaticCraft.client.gui.widget.GuiAnimatedStat;
import pneumaticCraft.client.gui.widget.GuiCheckBox;
import pneumaticCraft.client.gui.widget.WidgetTank;
import pneumaticCraft.client.gui.widget.WidgetTemperature;
import pneumaticCraft.common.inventory.ContainerPlasticMixer;
import pneumaticCraft.common.item.Itemss;
import pneumaticCraft.common.tileentity.TileEntityPlasticMixer;
import pneumaticCraft.lib.PneumaticValues;
import pneumaticCraft.lib.Textures;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
@SideOnly(Side.CLIENT)
public class GuiPlasticMixer extends GuiPneumaticContainerBase<TileEntityPlasticMixer>{
private GuiButtonSpecial[] buttons;
private GuiCheckBox lockSelection;
public GuiPlasticMixer(InventoryPlayer player, TileEntityPlasticMixer te){
super(new ContainerPlasticMixer(player, te), te, Textures.GUI_PLASTIC_MIXER);
}
@Override
public void initGui(){
super.initGui();
addWidget(new WidgetTemperature(0, guiLeft + 55, guiTop + 25, 295, 500, te.getLogic(0)));
addWidget(new WidgetTemperature(1, guiLeft + 82, guiTop + 25, 295, 500, te.getLogic(1), PneumaticValues.PLASTIC_MIXER_MELTING_TEMP));
addWidget(new WidgetTank(3, guiLeft + 152, guiTop + 14, te.getFluidTank()));
GuiAnimatedStat stat = addAnimatedStat("gui.tab.plasticMixer.plasticSelection", new ItemStack(Itemss.plastic, 1, 1), 0xFF005500, false);
List<String> text = new ArrayList<String>();
for(int i = 0; i < 12; i++) {
text.add(" ");
}
stat.setTextWithoutCuttingString(text);
buttons = new GuiButtonSpecial[16];
for(int x = 0; x < 4; x++) {
for(int y = 0; y < 4; y++) {
int index = y * 4 + x;
ItemStack plastic = new ItemStack(Itemss.plastic, 1, index);
buttons[index] = new GuiButtonSpecial(index, x * 21 + 4, y * 21 + 30, 20, 20, "").setRenderStacks(plastic).setTooltipText(plastic.getDisplayName());
stat.addWidget(buttons[index]);
}
}
stat.addWidget(lockSelection = new GuiCheckBox(16, 4, 18, 0xFF000000, "gui.plasticMixer.lockSelection").setChecked(te.lockSelection).setTooltip(I18n.format("gui.plasticMixer.lockSelection.tooltip")));
}
@Override
public void updateScreen(){
super.updateScreen();
for(int i = 0; i < buttons.length; i++) {
buttons[i].enabled = te.selectedPlastic != i;
}
lockSelection.checked = te.lockSelection;
}
@Override
protected void drawGuiContainerForegroundLayer(int x, int y){
super.drawGuiContainerForegroundLayer(x, y);
fontRendererObj.drawString("Upgr.", 15, 19, 4210752);
fontRendererObj.drawString("Hull", 56, 16, 4210752);
fontRendererObj.drawString("Item", 88, 16, 4210752);
}
@Override
protected void drawGuiContainerBackgroundLayer(float partialTicks, int x, int y){
super.drawGuiContainerBackgroundLayer(partialTicks, x, y);
for(int i = 0; i < 3; i++) {
double percentage = (double)te.dyeBuffers[i] / TileEntityPlasticMixer.DYE_BUFFER_MAX;
drawVerticalLine(guiLeft + 123, guiTop + 37 + i * 18, guiTop + 37 - MathHelper.clamp_int((int)(percentage * 16), 1, 15) + i * 18, 0xFF000000 | 0xFF0000 >> 8 * i);
}
}
@Override
protected Point getInvNameOffset(){
return new Point(0, -1);
}
@Override
protected Point getInvTextOffset(){
return null;
}
@Override
protected void addProblems(List<String> curInfo){
super.addProblems(curInfo);
if(te.getFluidTank().getFluidAmount() == 0) {
if(te.getStackInSlot(4) == null) {
curInfo.add("gui.tab.problems.plasticMixer.noPlastic");
} else {
curInfo.add("gui.tab.problems.notEnoughHeat");
}
} else {
if(te.getStackInSlot(4) != null) {
if(te.getLogic(1).getTemperature() >= PneumaticValues.PLASTIC_MIXER_MELTING_TEMP && te.getFluidTank().getCapacity() - te.getFluidTank().getFluidAmount() < 1000) {
curInfo.add("gui.tab.problems.plasticMixer.plasticLiquidOverflow");
}
}
}
if(te.getStackInSlot(TileEntityPlasticMixer.INV_DYE_RED) == null) {
curInfo.add(I18n.format("gui.tab.problems.plasticMixer.noDye", new ItemStack(Items.dye, 1, 1).getDisplayName()));
}
if(te.getStackInSlot(TileEntityPlasticMixer.INV_DYE_GREEN) == null) {
curInfo.add(I18n.format("gui.tab.problems.plasticMixer.noDye", new ItemStack(Items.dye, 1, 2).getDisplayName()));
}
if(te.getStackInSlot(TileEntityPlasticMixer.INV_DYE_BLUE) == null) {
curInfo.add(I18n.format("gui.tab.problems.plasticMixer.noDye", new ItemStack(Items.dye, 1, 4).getDisplayName()));
}
if(curInfo.size() == 0) {
curInfo.add(I18n.format("gui.tab.problems.plasticMixer.noProblems"));
}
}
}
| gpl-3.0 |
cchampet/TuttleOFX | plugins/image/process/color/Normalize/src/NormalizeDefinitions.hpp | 2016 | #ifndef _TUTTLE_PLUGIN_NORMALIZE_DEFINITIONS_HPP_
#define _TUTTLE_PLUGIN_NORMALIZE_DEFINITIONS_HPP_
#include <tuttle/plugin/global.hpp>
namespace tuttle {
namespace plugin {
namespace normalize {
static const std::string kParamHelpButton = "Help";
static const std::string kParamMode = "mode";
static const std::string kParamModeAnalyse = "analyse";
static const std::string kParamModeCustom = "custom";
enum EParamMode
{
eParamModeAnalyse = 0,
eParamModeCustom
};
static const std::string kParamAnalyseNow = "analyseNow";
static const std::string kParamAnalyseMode = "analyseMode";
static const std::string kParamAnalysePerChannel = "perChannel";
static const std::string kParamAnalyseLuminosity = "luminosity";
static const std::string kParamAnalyseR = "r";
static const std::string kParamAnalyseG = "g";
static const std::string kParamAnalyseB = "b";
static const std::string kParamAnalyseA = "a";
enum EParamAnalyseMode
{
eParamAnalyseModePerChannel = 0,
eParamAnalyseModeLuminosity,
eParamAnalyseModeR,
eParamAnalyseModeG,
eParamAnalyseModeB,
eParamAnalyseModeA
};
static const std::string kParamSrcGroup = "srcGroup";
static const std::string kParamSrcCustomColorMin = "srcColorMin";
static const std::string kParamSrcCustomColorMax = "srcColorMax";
static const std::string kParamSrcCustomValueMin = "srcValueMin";
static const std::string kParamSrcCustomValueMax = "srcValueMax";
static const std::string kParamDstGroup = "dstGroup";
static const std::string kParamDstCustomColorMin = "dstColorMin";
static const std::string kParamDstCustomColorMax = "dstColorMax";
static const std::string kParamDstCustomValueMin = "dstValueMin";
static const std::string kParamDstCustomValueMax = "dstValueMax";
static const std::string kParamProcessGroup = "processGroup";
static const std::string kParamProcessR = "processR";
static const std::string kParamProcessG = "processG";
static const std::string kParamProcessB = "processB";
static const std::string kParamProcessA = "processA";
}
}
}
#endif
| gpl-3.0 |
polymec/polyglot-dev | 3rdparty/exodus/exodus/cbind/src/ex_get_sets.c | 3284 | /*
* Copyright (c) 2012 Sandia Corporation. Under the terms of Contract
* DE-AC04-94AL85000 with Sandia Corporation, the U.S. Government
* retains certain rights in this software.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* * Neither the name of Sandia Corporation nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#include <stddef.h> // for size_t
#include <stdlib.h> // for NULL
#include "exodusII.h" // for ex_set, ex_get_set_param, etc
#include "exodusII_int.h" // for EX_FATAL, EX_NOERR
int ex_get_sets (int exoid,
size_t set_count,
struct ex_set *sets)
{
size_t i;
int status = EX_NOERR;
int stat;
for (i=0; i < set_count; i++) {
if (ex_int64_status(exoid) & EX_BULK_INT64_API) {
stat = ex_get_set_param(exoid, sets[i].type, sets[i].id,
&sets[i].num_entry, &sets[i].num_distribution_factor);
} else {
/* API expecting 32-bit ints; ex_set structure has 64-bit ints. */
int num_entry;
int num_dist;
stat = ex_get_set_param(exoid, sets[i].type, sets[i].id,
&num_entry, &num_dist);
sets[i].num_entry = num_entry;
sets[i].num_distribution_factor = num_dist;
}
if (stat != EX_NOERR) status = (status == EX_FATAL) ? EX_FATAL : stat;
if (stat == EX_NOERR && (sets[i].entry_list != NULL || sets[i].extra_list != NULL)) {
stat = ex_get_set(exoid, sets[i].type, sets[i].id, sets[i].entry_list, sets[i].extra_list);
if (stat != EX_NOERR) status = (status == EX_FATAL) ? EX_FATAL : stat;
}
if (stat == EX_NOERR && sets[i].distribution_factor_list != NULL) {
stat = ex_get_set_dist_fact(exoid, sets[i].type, sets[i].id, sets[i].distribution_factor_list);
if (stat != EX_NOERR) status = (status == EX_FATAL) ? EX_FATAL : stat;
}
}
return status;
}
| mpl-2.0 |
cdr-stats/cdr-stats | cdr_stats/frontend/templates/frontend/master.html | 6513 | {% load i18n crispy_forms_tags %}
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<head xmlns="http://www.w3.org/1999/xhtml" lang="{{ LANGUAGE_CODE }}" xml:lang="{{ LANGUAGE_CODE }}">
<!--
- CDR-Stats License
- http://www.cdr-stats.org
-
- This Source Code Form is subject to the terms of the Mozilla Public
- License, v. 2.0. If a copy of the MPL was not distributed with this file,
- You can obtain one at http://mozilla.org/MPL/2.0/.
-
- Copyright (C) 2011-2015 Star2Billing S.L.
-
- The Initial Developer of the Original Code is
- Arezqui Belaid <[email protected]>
-
-->
<meta http-equiv="Content-Type" content="text/html; charset=ISO-8859-1">
<title>{% block title %}CDR-Stats{% endblock %} | {% trans "Customer Interface" noop %}</title>
<link rel="shortcut icon" type="image/x-icon" href="{{ STATIC_URL }}cdr-stats/images/favicon.ico">
{% block header_files %}
<script src="{{ STATIC_URL }}jquery/jquery.min.js" type="text/javascript"></script>
<!-- CSS -->
<link href="{{ STATIC_URL }}bootstrap/dist/css/bootstrap.css" rel="stylesheet">
<link href="{{ STATIC_URL }}cdr-stats/css/sidebar-offcanvas.css" rel="stylesheet">
<link href="{{ STATIC_URL }}jquery-ui/themes/flick/jquery-ui.min.css" type="text/css" rel="stylesheet" media="all"/>
<link href="{{ STATIC_URL }}components-font-awesome/css/font-awesome.min.css" type="text/css" rel="stylesheet" media="all"/>
<link href="{{ STATIC_URL }}cdr-stats/css/cdr_stats_custom.css" rel="stylesheet" type="text/css" media="screen" />
<!-- nvd3 -->
{% block nvd3_header %}
<link href="{{ STATIC_URL }}nvd3/build/nv.d3.min.css" rel="stylesheet" type="text/css" media="screen" />
<script src="{{ STATIC_URL }}d3/d3.js" type="text/javascript"></script>
<script src="{{ STATIC_URL }}nvd3/build/nv.d3.min.js" type="text/javascript"></script>
{% endblock %}
<!-- javascript -->
<script type="text/javascript" src="/jsi18n/"></script>
<script src="{{ STATIC_URL }}jquery-ui/ui/minified/jquery-ui.min.js" type="text/javascript"></script>
<script src="{{ STATIC_URL }}bootstrap/dist/js/bootstrap.js" type="text/javascript"></script>
<script src="{{ STATIC_URL }}bootbox/bootbox.js" type="text/javascript"></script>
<script src="{{ STATIC_URL }}cdr-stats/js/sidebar-offcanvas.js" type="text/javascript"></script>
<script src="{{ STATIC_URL }}cdr-stats/js/cdr_stats_custom.js" type="text/javascript"></script>
{% block bootstrap_switch_js %}
<link href="{{ STATIC_URL }}bootstrap-switch/build/css/bootstrap3/bootstrap-switch.min.css" rel="stylesheet" />
<script src="{{ STATIC_URL }}bootstrap-switch/build/js/bootstrap-switch.min.js" type="text/javascript"></script>
{% endblock %}
<!-- Include Print CSS -->
<link rel="stylesheet" href="{{ STATIC_URL }}cdr-stats/css/cdr_stats_print.css" type="text/css" media="print" />
{% endblock %}
{% block extra_header %}
{{ extra_header }}
{% endblock %}
</head>
<body>
{% block menu %}
{% include "frontend/bootstrap_menu.html" %}
{% endblock %}
{% block container %}
<div class="container">
<div class="row row-offcanvas row-offcanvas-right">
<div class="col-xs-12 col-sm-12">
<!--offcanvas side menu toggle button-->
<div class="row">
{% if errorlogin %}
<div class="alert alert-danger">
<strong>{% trans "alert"|capfirst %} : </strong> {{ errorlogin|capfirst }}
</div>
{% else %}
{% if notlogged %}
<div class="alert alert-danger">
<strong>{% trans "alert"|capfirst %} : </strong> {% trans "please login by clicking on login button"|capfirst %}
</div>
{% endif %}
{% endif %}
{% block row_fluid %}
<div class="row">
{% if not user.is_authenticated %}
<br/>
<div class="col-6 col-sm-6 col-lg-6">
{% crispy loginform loginform.helper %}
</div>
<div class="col-6 col-sm-6 col-lg-6">
<form class="form-inline well" role="form" action="/i18n/setlang/" method="post">
{% csrf_token %}
{% trans "display language"|capfirst %} :
<div class="checkbox">
<label>
<select name="language" id="language-container" class="form-control" onchange="this.form.submit()">
{% for lang in LANGUAGES %}
<option value="{{ lang.0 }}" {% ifequal lang.0 LANGUAGE_CODE %}selected{% endifequal %}>{{ lang.1 }}</option>
{% endfor %}
</select>
</label>
</div>
</form>
</div>
{% endif %}
</div>
{% endblock %}
{% block header %}
<div class="page-header">
{% block content_header %}
<h1>CDR-Stats <small>{% trans "call traffic analysis and alert solution"|title %}</small></h1>
{% endblock %}
</div>
{% endblock %}
{% block extra_head %}
{{ extra_head }}
{% endblock %}
<!--Success Message of view-->
{% if msg %}
<div class="alert alert-success">
<strong>{% trans "alert"|capfirst %} : </strong> {{ msg|capfirst }}
</div>
{% endif %}
<!--Error Message of view-->
{% if error_msg %}
<div class="alert alert-danger">
<strong>{% trans "error"|capfirst %} : </strong> {{ error_msg|capfirst }}
</div>
{% endif %}
<!--Info Message of view-->
{% if info_msg %}
<div class="alert alert-info">
<strong>{% trans "info"|capfirst %} : </strong> {{ info_msg|capfirst }}
</div>
{% endif %}
{% block content %}
{{ content }}
{% endblock %}
</div>
</div><!--end right side-->
</div><!--div row row-offcanvas row-offcanvas-right-->
{% ifnotequal menu 'off' %}
{% include "frontend/footer.html" %}
{% endifnotequal %}
</div><!-- div container -->
{% endblock %}
</body>
</html>
| mpl-2.0 |
ameihm0912/MozDef | meteor/app/lib/collections.js | 12418 | /*
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
Copyright (c) 2014 Mozilla Corporation
Contributors:
Jeff Bryner [email protected]
Anthony Verez [email protected]
Yash Mehrotra [email protected]
*/
//collections shared by client/server
events = new Meteor.Collection("events");
alerts = new Meteor.Collection("alerts");
investigations = new Meteor.Collection("investigations");
incidents = new Meteor.Collection("incidents");
veris = new Meteor.Collection("veris");
kibanadashboards = new Meteor.Collection("kibanadashboards");
mozdefsettings = new Meteor.Collection("mozdefsettings");
healthfrontend = new Meteor.Collection("healthfrontend");
healthescluster = new Meteor.Collection("healthescluster");
healthesnodes = new Meteor.Collection("healthesnodes");
healtheshotthreads = new Meteor.Collection("healtheshotthreads");
attackers = new Meteor.Collection("attackers");
actions = new Meteor.Collection("actions");
userActivity = new Meteor.Collection("userActivity");
if (Meteor.isServer) {
//Publishing setups
Meteor.publish("mozdefsettings",function(){
return mozdefsettings.find();
});
Meteor.publish("alerts-summary", function (searchregex,timeperiod,recordlimit) {
//tail the last 100 records by default
//default parameters
timeperiod = typeof timeperiod !== 'undefined' ? timeperiod: 'tail';
searchregex = typeof searchregex !== 'undefined' ? searchregex: '';
recordlimit = ['number'].indexOf(typeof(recordlimit)) ? 100:recordlimit;
//sanity check the record limit
if ( recordlimit >10000 || recordlimit < 1){
recordlimit = 100;
}
if ( timeperiod ==='tail' || timeperiod == 'none' ){
return alerts.find(
{summary: {$regex:searchregex}},
{fields:{
_id:1,
esmetadata:1,
utctimestamp:1,
utcepoch:1,
summary:1,
severity:1,
category:1,
acknowledged:1,
acknowledgedby:1,
url:1
},
sort: {utcepoch: -1},
limit:recordlimit}
);
} else {
//determine the utcepoch range
beginningtime=moment().utc();
//expect timeperiod like '1 days'
timevalue=Number(timeperiod.split(" ")[0]);
timeunits=timeperiod.split(" ")[1];
beginningtime.subtract(timevalue,timeunits);
return alerts.find(
{summary: {$regex:searchregex},
utcepoch: {$gte: beginningtime.unix()}},
{fields:{
_id:1,
esmetadata:1,
utctimestamp:1,
utcepoch:1,
summary:1,
severity:1,
category:1,
acknowledged:1
},
sort: {utcepoch: -1},
limit:recordlimit}
);
}
});
Meteor.publish("alerts-details",function(alertid,includeEvents){
//return alerts.find({'esmetadata.id': alertid});
//alert ids can be either mongo or elastic search IDs
//look for both to publish to the collection.
//default parameters
includeEvents = typeof includeEvents !== 'undefined' ? includeEvents: true;
if ( includeEvents ){
return alerts.find({
$or:[
{'esmetadata.id': alertid},
{'_id': alertid},
]
});
}else{
return alerts.find({
$or:[
{'esmetadata.id': alertid},
{'_id': alertid},
]
},
{fields:{events:0},
});
}
});
Meteor.publish("alerts-count", function () {
var self = this;
var count = 0;
var initializing = true;
var recordID=Meteor.uuid();
//get a count by watching for only 1 new entry sorted in reverse date order.
//use that hook to return a find().count rather than iterating the entire result set over and over
var handle = alerts.find({}, {sort: {utcepoch: -1},limit:1}).observeChanges({
added: function (newDoc,oldDoc) {
count=alerts.find().count();
if (!initializing) {
self.changed("alerts-count", recordID,{count: count});
//console.log('added alerts count to' + count);
}
},
changed: function (newDoc,oldDoc) {
count=alerts.find().count();
if (!initializing) {
self.changed("alerts-count", recordID,{count: count});
//console.log('changed alerts count to' + count);
}
},
removed: function (newDoc,oldDoc) {
count=alerts.find().count();
if (!initializing) {
self.changed("alerts-count", recordID,{count: count});
//console.log('changed alerts count to' + count);
}
}
});
initializing = false;
self.added("alerts-count", recordID,{count: count});
//console.log('count is ready: ' + count);
self.ready();
// Stop observing the cursor when client unsubs.
// Stopping a subscription automatically takes
// care of sending the client any removed messages.
self.onStop(function () {
//console.log('stopped publishing alerts count.')
handle.stop();
});
});
//publish the last X event/alerts
//using document index instead of date
// Meteor.publish("attacker-details",function(attackerid){
// return attackers.find({'_id': attackerid},
// {fields: {
// events:{$slice: 20,
// $sort: { documentindex: -1 }},
// alerts:{$slice: -10}
// }}
// );
// });
Meteor.publish("attacker-details",function(attackerid){
return attackers.find({'_id': attackerid},
{fields: {
events:{$slice: -20},
alerts:{$slice: -10}
},
sort: { 'events.documentsource.utctimestamp': -1 },
reactive:false
}
);
});
Meteor.publish("attackers-summary", function () {
//limit to the last 100 records by default
//to ease the sync transfer to dc.js/crossfilter
return attackers.find({},
{fields:{
events:0,
alerts:0,
},
sort: {lastseentimestamp: -1},
limit:100});
});
Meteor.publish("attackers-summary-landmass", function () {
//limit to the last 100 records by default
//to ease the sync transfer to dc.js/crossfilter
var inModifier = { $in: ["broxss", "brotunnel", "brosqli"]};
return attackers.find({"events.documentsource.category": inModifier},
{sort: {lastseentimestamp: -1},
limit: 100});
});
Meteor.publish("investigations-summary", function () {
return investigations.find({},
{fields: {
_id:1,
summary:1,
phase:1,
dateOpened:1,
dateClosed:1,
creator:1
},
sort: {dateOpened: -1},
limit:100});
});
Meteor.publish("investigation-details",function(investigationid){
return investigations.find({'_id': investigationid});
});
Meteor.publish("incidents-summary", function () {
return incidents.find({},
{fields: {
_id:1,
summary:1,
phase:1,
dateOpened:1,
dateClosed:1,
creator:1
},
sort: {dateOpened: -1},
limit:100});
});
Meteor.publish("incident-details",function(incidentid){
return incidents.find({'_id': incidentid});
});
Meteor.publish("veris", function () {
return veris.find({}, {limit:0});
});
Meteor.publish("healthfrontend", function () {
return healthfrontend.find({}, {limit:0});
});
Meteor.publish("healthescluster", function () {
return healthescluster.find({}, {limit:0});
});
Meteor.publish("healthesnodes", function () {
return healthesnodes.find({}, {limit:0});
});
Meteor.publish("healtheshotthreads", function () {
return healtheshotthreads.find({}, {limit:0});
});
Meteor.publish("kibanadashboards", function () {
return kibanadashboards.find({},{sort:{name:1}, limit:30});
});
Meteor.publish("userActivity", function () {
return userActivity.find({},{sort:{userID:1}, limit:100});
});
//access rules from clients
//barebones to allow you to specify rules
//currently incidents collection is the only one updated by clients
//for speed of access
//the only rule is that the incident creator is the only one who can delete an incident.
incidents.allow({
insert: function (userId, doc) {
// the user must be logged in
return (userId);
},
update: function (userId, doc, fields, modifier) {
// the user must be logged in
return (userId);
},
remove: function (userId, doc) {
// can only remove one's own indicents
return doc.creator === Meteor.user().profile.email;
},
fetch: ['creator']
});
attackers.allow({
update: function (userId, doc, fields, modifier) {
// the user must be logged in
return (userId);
}
});
alerts.allow({
update: function (userId, doc, fields, modifier) {
// the user must be logged in
return (userId);
}
});
investigations.allow({
insert: function (userId, doc) {
// the user must be logged in
return (userId);
},
update: function (userId, doc, fields, modifier) {
// the user must be logged in
return (userId);
},
remove: function (userId, doc) {
// can only remove one's own items
return doc.creator === Meteor.user().profile.email;
},
fetch: ['creator']
});
userActivity.allow({
insert: function (userId, doc) {
// the user must be logged in
return (userId);
},
remove: function (userId, doc) {
// can only remove one's own items
return doc.userId === Meteor.user().profile.email;
},
});
};
if (Meteor.isClient) {
//client side collections:
alertsCount = new Meteor.Collection("alerts-count");
//client-side subscriptions
Meteor.subscribe("mozdefsettings");
Meteor.subscribe("veris");
Meteor.subscribe("kibanadashboards");
Meteor.subscribe("userActivity");
};
| mpl-2.0 |
TiWinDeTea/Raoul-the-Game | src/main/java/com/github/tiwindetea/raoulthegame/model/livings/Pet.java | 2638 | //////////////////////////////////////////////////////////////////////////////////
// //
// This Source Code Form is subject to the terms of the Mozilla Public //
// License, v. 2.0. If a copy of the MPL was not distributed with this //
// file, You can obtain one at http://mozilla.org/MPL/2.0/. //
// //
//////////////////////////////////////////////////////////////////////////////////
package com.github.tiwindetea.raoulthegame.model.livings;
import com.github.tiwindetea.raoulthegame.model.space.Vector2i;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.lang.ref.WeakReference;
import java.util.Collection;
import java.util.List;
/**
* Created by organic-code on 7/3/16.
*/
public abstract class Pet extends LivingThing {
protected WeakReference<LivingThing> owner;
protected LivingThing target;
public Pet(LivingThing owner,
String name,
int level,
double maxHitPoints,
double attackPower,
double defensePower,
@Nonnull Vector2i position) {
super();
this.name = name;
this.level = level;
this.maxHitPoints = maxHitPoints;
this.hitPoints = 0;
this.attackPower = attackPower;
this.defensePower = defensePower;
this.position = position.copy();
this.owner = new WeakReference<>(owner);
}
protected LivingThing getOwner() {
return this.owner.get();
}
@Override
public final LivingThingType getType() {
return LivingThingType.PET;
}
/**
* This function should be called when you consider this should be upgraded
*/
public abstract void levelUp();
/**
* Handler. This function should be called each time the pet's owner is attacked
*
* @param source the source of the damages
*/
public abstract void ownerDamaged(@Nullable LivingThing source);
/**
* Handler. This function should be called time the pet's owner is attacking
*
* @param target the damages' target
*/
public abstract void ownerAttacking(@Nonnull LivingThing target);
@Override
public void live(List<Mob> mobs, Collection<Player> players, Collection<LivingThing> others, boolean[][] los) {
this.live(mobs, players, null);
}
public abstract void live(List<Mob> mobs, Collection<Player> players, Collection<LivingThing> all);
}
| mpl-2.0 |
mkodekar/Fennece-Browser | base/background/bagheera/BoundedByteArrayEntity.java | 2490 | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
package org.mozilla.gecko.background.bagheera;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import ch.boye.httpclientandroidlib.entity.AbstractHttpEntity;
import ch.boye.httpclientandroidlib.entity.ByteArrayEntity;
/**
* An entity that acts like {@link ByteArrayEntity}, but exposes a window onto
* the byte array that is a subsection of the array. The purpose of this is to
* allow a smaller entity to be created without having to resize the source
* array.
*/
public class BoundedByteArrayEntity extends AbstractHttpEntity implements
Cloneable {
protected final byte[] content;
protected final int start;
protected final int end;
protected final int length;
/**
* Create a new entity that behaves exactly like a {@link ByteArrayEntity}
* created with a copy of <code>b</code> truncated to (
* <code>end - start</code>) bytes, starting at <code>start</code>.
*
* @param b the byte array to use.
* @param start the start index.
* @param end the end index.
*/
public BoundedByteArrayEntity(final byte[] b, final int start, final int end) {
if (b == null) {
throw new IllegalArgumentException("Source byte array may not be null.");
}
if (end < start ||
start < 0 ||
end < 0 ||
start > b.length ||
end > b.length) {
throw new IllegalArgumentException("Bounds out of range.");
}
this.content = b;
this.start = start;
this.end = end;
this.length = end - start;
}
@Override
public boolean isRepeatable() {
return true;
}
@Override
public long getContentLength() {
return this.length;
}
@Override
public InputStream getContent() {
return new ByteArrayInputStream(this.content, this.start, this.length);
}
@Override
public void writeTo(final OutputStream outstream) throws IOException {
if (outstream == null) {
throw new IllegalArgumentException("Output stream may not be null.");
}
outstream.write(this.content);
outstream.flush();
}
@Override
public boolean isStreaming() {
return false;
}
@Override
public Object clone() throws CloneNotSupportedException {
return super.clone();
}
} | mpl-2.0 |
pyecs/servo | components/layout/table_cell.rs | 14810 | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! CSS table formatting contexts.
#![deny(unsafe_code)]
use block::{BlockFlow, ISizeAndMarginsComputer, MarginsMayCollapseFlag};
use context::LayoutContext;
use display_list_builder::{BlockFlowDisplayListBuilding, BorderPaintingMode};
use flow::{Flow, FlowClass, OpaqueFlow};
use fragment::{Fragment, FragmentBorderBoxIterator};
use model::MaybeAuto;
use layout_debug;
use table::InternalTable;
use table_row::{CollapsedBorder, CollapsedBorderProvenance};
use wrapper::ThreadSafeLayoutNode;
use cssparser::Color;
use euclid::{Point2D, Rect, SideOffsets2D, Size2D};
use gfx::display_list::DisplayList;
use std::fmt;
use std::sync::Arc;
use style::computed_values::{border_collapse, border_top_style};
use style::legacy::UnsignedIntegerAttribute;
use style::properties::ComputedValues;
use util::geometry::Au;
use util::logical_geometry::{LogicalMargin, LogicalRect, LogicalSize, WritingMode};
/// A table formatting context.
#[derive(RustcEncodable)]
pub struct TableCellFlow {
/// Data common to all block flows.
pub block_flow: BlockFlow,
/// Border collapse information for the cell.
pub collapsed_borders: CollapsedBordersForCell,
/// The column span of this cell.
pub column_span: u32,
/// Whether this cell is visible. If false, the value of `empty-cells` means that we must not
/// display this cell.
pub visible: bool,
}
impl TableCellFlow {
pub fn from_node_fragment_and_visibility_flag(node: &ThreadSafeLayoutNode,
fragment: Fragment,
visible: bool)
-> TableCellFlow {
TableCellFlow {
block_flow: BlockFlow::from_node_and_fragment(node, fragment, None),
collapsed_borders: CollapsedBordersForCell::new(),
column_span: node.get_unsigned_integer_attribute(UnsignedIntegerAttribute::ColSpan)
.unwrap_or(1),
visible: visible,
}
}
pub fn fragment<'a>(&'a mut self) -> &'a Fragment {
&self.block_flow.fragment
}
pub fn mut_fragment<'a>(&'a mut self) -> &'a mut Fragment {
&mut self.block_flow.fragment
}
/// Assign block-size for table-cell flow.
///
/// inline(always) because this is only ever called by in-order or non-in-order top-level
/// methods.
#[inline(always)]
fn assign_block_size_table_cell_base<'a>(&mut self, layout_context: &'a LayoutContext<'a>) {
self.block_flow.assign_block_size_block_base(
layout_context,
MarginsMayCollapseFlag::MarginsMayNotCollapse)
}
}
impl Flow for TableCellFlow {
fn class(&self) -> FlowClass {
FlowClass::TableCell
}
fn as_table_cell<'a>(&'a mut self) -> &'a mut TableCellFlow {
self
}
fn as_immutable_table_cell<'a>(&'a self) -> &'a TableCellFlow {
self
}
fn as_block<'a>(&'a mut self) -> &'a mut BlockFlow {
&mut self.block_flow
}
fn as_immutable_block(&self) -> &BlockFlow {
&self.block_flow
}
/// Minimum/preferred inline-sizes set by this function are used in automatic table layout
/// calculation.
fn bubble_inline_sizes(&mut self) {
let _scope = layout_debug_scope!("table_cell::bubble_inline_sizes {:x}",
self.block_flow.base.debug_id());
self.block_flow.bubble_inline_sizes();
let specified_inline_size = MaybeAuto::from_style(self.block_flow
.fragment
.style()
.content_inline_size(),
Au(0)).specified_or_zero();
if self.block_flow.base.intrinsic_inline_sizes.minimum_inline_size <
specified_inline_size {
self.block_flow.base.intrinsic_inline_sizes.minimum_inline_size = specified_inline_size
}
if self.block_flow.base.intrinsic_inline_sizes.preferred_inline_size <
self.block_flow.base.intrinsic_inline_sizes.minimum_inline_size {
self.block_flow.base.intrinsic_inline_sizes.preferred_inline_size =
self.block_flow.base.intrinsic_inline_sizes.minimum_inline_size;
}
}
/// Recursively (top-down) determines the actual inline-size of child contexts and fragments.
/// When called on this context, the context has had its inline-size set by the parent table
/// row.
fn assign_inline_sizes(&mut self, layout_context: &LayoutContext) {
let _scope = layout_debug_scope!("table_cell::assign_inline_sizes {:x}",
self.block_flow.base.debug_id());
debug!("assign_inline_sizes({}): assigning inline_size for flow", "table_cell");
// The position was set to the column inline-size by the parent flow, table row flow.
let containing_block_inline_size = self.block_flow.base.block_container_inline_size;
let inline_size_computer = InternalTable {
border_collapse: self.block_flow.fragment.style.get_inheritedtable().border_collapse,
};
inline_size_computer.compute_used_inline_size(&mut self.block_flow,
layout_context,
containing_block_inline_size);
let inline_start_content_edge =
self.block_flow.fragment.border_box.start.i +
self.block_flow.fragment.border_padding.inline_start;
let inline_end_content_edge =
self.block_flow.base.block_container_inline_size -
self.block_flow.fragment.border_padding.inline_start_end() -
self.block_flow.fragment.border_box.size.inline;
let padding_and_borders = self.block_flow.fragment.border_padding.inline_start_end();
let content_inline_size =
self.block_flow.fragment.border_box.size.inline - padding_and_borders;
self.block_flow.propagate_assigned_inline_size_to_children(layout_context,
inline_start_content_edge,
inline_end_content_edge,
content_inline_size,
|_, _, _, _, _, _| {});
}
fn assign_block_size<'a>(&mut self, layout_context: &'a LayoutContext<'a>) {
debug!("assign_block_size: assigning block_size for table_cell");
self.assign_block_size_table_cell_base(layout_context);
}
fn compute_absolute_position(&mut self, layout_context: &LayoutContext) {
self.block_flow.compute_absolute_position(layout_context)
}
fn update_late_computed_inline_position_if_necessary(&mut self, inline_position: Au) {
self.block_flow.update_late_computed_inline_position_if_necessary(inline_position)
}
fn update_late_computed_block_position_if_necessary(&mut self, block_position: Au) {
self.block_flow.update_late_computed_block_position_if_necessary(block_position)
}
fn build_display_list(&mut self, layout_context: &LayoutContext) {
if !self.visible {
return
}
let border_painting_mode = match self.block_flow
.fragment
.style
.get_inheritedtable()
.border_collapse {
border_collapse::T::separate => BorderPaintingMode::Separate,
border_collapse::T::collapse => BorderPaintingMode::Collapse(&self.collapsed_borders),
};
self.block_flow.build_display_list_for_block(box DisplayList::new(),
layout_context,
border_painting_mode)
}
fn repair_style(&mut self, new_style: &Arc<ComputedValues>) {
self.block_flow.repair_style(new_style)
}
fn compute_overflow(&self) -> Rect<Au> {
self.block_flow.compute_overflow()
}
fn generated_containing_block_size(&self, flow: OpaqueFlow) -> LogicalSize<Au> {
self.block_flow.generated_containing_block_size(flow)
}
fn iterate_through_fragment_border_boxes(&self,
iterator: &mut FragmentBorderBoxIterator,
stacking_context_position: &Point2D<Au>) {
self.block_flow.iterate_through_fragment_border_boxes(iterator, stacking_context_position)
}
fn mutate_fragments(&mut self, mutator: &mut FnMut(&mut Fragment)) {
self.block_flow.mutate_fragments(mutator)
}
}
impl fmt::Debug for TableCellFlow {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "TableCellFlow: {:?}", self.block_flow)
}
}
#[derive(Copy, Clone, Debug, RustcEncodable)]
pub struct CollapsedBordersForCell {
pub inline_start_border: CollapsedBorder,
pub inline_end_border: CollapsedBorder,
pub block_start_border: CollapsedBorder,
pub block_end_border: CollapsedBorder,
pub inline_start_width: Au,
pub inline_end_width: Au,
pub block_start_width: Au,
pub block_end_width: Au,
}
impl CollapsedBordersForCell {
fn new() -> CollapsedBordersForCell {
CollapsedBordersForCell {
inline_start_border: CollapsedBorder::new(),
inline_end_border: CollapsedBorder::new(),
block_start_border: CollapsedBorder::new(),
block_end_border: CollapsedBorder::new(),
inline_start_width: Au(0),
inline_end_width: Au(0),
block_start_width: Au(0),
block_end_width: Au(0),
}
}
fn should_paint_inline_start_border(&self) -> bool {
self.inline_start_border.provenance != CollapsedBorderProvenance::FromPreviousTableCell
}
fn should_paint_inline_end_border(&self) -> bool {
self.inline_end_border.provenance != CollapsedBorderProvenance::FromNextTableCell
}
fn should_paint_block_start_border(&self) -> bool {
self.block_start_border.provenance != CollapsedBorderProvenance::FromPreviousTableCell
}
fn should_paint_block_end_border(&self) -> bool {
self.block_end_border.provenance != CollapsedBorderProvenance::FromNextTableCell
}
pub fn adjust_border_widths_for_painting(&self, border_widths: &mut LogicalMargin<Au>) {
border_widths.inline_start = if !self.should_paint_inline_start_border() {
Au(0)
} else {
self.inline_start_border.width
};
border_widths.inline_end = if !self.should_paint_inline_end_border() {
Au(0)
} else {
self.inline_end_border.width
};
border_widths.block_start = if !self.should_paint_block_start_border() {
Au(0)
} else {
self.block_start_border.width
};
border_widths.block_end = if !self.should_paint_block_end_border() {
Au(0)
} else {
self.block_end_border.width
}
}
pub fn adjust_border_bounds_for_painting(&self,
border_bounds: &mut Rect<Au>,
writing_mode: WritingMode) {
let inline_start_divisor = if self.should_paint_inline_start_border() {
2
} else {
-2
};
let inline_start_offset = self.inline_start_width / 2 + self.inline_start_border.width /
inline_start_divisor;
let inline_end_divisor = if self.should_paint_inline_end_border() {
2
} else {
-2
};
let inline_end_offset = self.inline_end_width / 2 + self.inline_end_border.width /
inline_end_divisor;
let block_start_divisor = if self.should_paint_block_start_border() {
2
} else {
-2
};
let block_start_offset = self.block_start_width / 2 + self.block_start_border.width /
block_start_divisor;
let block_end_divisor = if self.should_paint_block_end_border() {
2
} else {
-2
};
let block_end_offset = self.block_end_width / 2 + self.block_end_border.width /
block_end_divisor;
// FIXME(pcwalton): Get the real container size.
let mut logical_bounds =
LogicalRect::from_physical(writing_mode, *border_bounds, Size2D::new(Au(0), Au(0)));
logical_bounds.start.i = logical_bounds.start.i - inline_start_offset;
logical_bounds.start.b = logical_bounds.start.b - block_start_offset;
logical_bounds.size.inline = logical_bounds.size.inline + inline_start_offset +
inline_end_offset;
logical_bounds.size.block = logical_bounds.size.block + block_start_offset +
block_end_offset;
*border_bounds = logical_bounds.to_physical(writing_mode, Size2D::new(Au(0), Au(0)))
}
pub fn adjust_border_colors_and_styles_for_painting(
&self,
border_colors: &mut SideOffsets2D<Color>,
border_styles: &mut SideOffsets2D<border_top_style::T>,
writing_mode: WritingMode) {
let logical_border_colors = LogicalMargin::new(writing_mode,
self.block_start_border.color,
self.inline_end_border.color,
self.block_end_border.color,
self.inline_start_border.color);
*border_colors = logical_border_colors.to_physical(writing_mode);
let logical_border_styles = LogicalMargin::new(writing_mode,
self.block_start_border.style,
self.inline_end_border.style,
self.block_end_border.style,
self.inline_start_border.style);
*border_styles = logical_border_styles.to_physical(writing_mode);
}
}
| mpl-2.0 |
Yukarumya/Yukarum-Redfoxes | media/libsoundtouch/src/FIRFilter.cpp | 9499 | ////////////////////////////////////////////////////////////////////////////////
///
/// General FIR digital filter routines with MMX optimization.
///
/// Note : MMX optimized functions reside in a separate, platform-specific file,
/// e.g. 'mmx_win.cpp' or 'mmx_gcc.cpp'
///
/// Author : Copyright (c) Olli Parviainen
/// Author e-mail : oparviai 'at' iki.fi
/// SoundTouch WWW: http://www.surina.net/soundtouch
///
////////////////////////////////////////////////////////////////////////////////
//
// Last changed : $Date: 2015-02-21 21:24:29 +0000 (Sat, 21 Feb 2015) $
// File revision : $Revision: 4 $
//
// $Id: FIRFilter.cpp 202 2015-02-21 21:24:29Z oparviai $
//
////////////////////////////////////////////////////////////////////////////////
//
// License :
//
// SoundTouch audio processing library
// Copyright (c) Olli Parviainen
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
//
////////////////////////////////////////////////////////////////////////////////
#include <memory.h>
#include <assert.h>
#include <math.h>
#include <stdlib.h>
#include "FIRFilter.h"
#include "cpu_detect.h"
using namespace soundtouch;
/*****************************************************************************
*
* Implementation of the class 'FIRFilter'
*
*****************************************************************************/
FIRFilter::FIRFilter()
{
resultDivFactor = 0;
resultDivider = 0;
length = 0;
lengthDiv8 = 0;
filterCoeffs = NULL;
}
FIRFilter::~FIRFilter()
{
delete[] filterCoeffs;
}
// Usual C-version of the filter routine for stereo sound
uint FIRFilter::evaluateFilterStereo(SAMPLETYPE *dest, const SAMPLETYPE *src, uint numSamples) const
{
int j, end;
#ifdef SOUNDTOUCH_FLOAT_SAMPLES
// when using floating point samples, use a scaler instead of a divider
// because division is much slower operation than multiplying.
double dScaler = 1.0 / (double)resultDivider;
#endif
assert(length != 0);
assert(src != NULL);
assert(dest != NULL);
assert(filterCoeffs != NULL);
end = 2 * (numSamples - length);
#pragma omp parallel for
for (j = 0; j < end; j += 2)
{
const SAMPLETYPE *ptr;
LONG_SAMPLETYPE suml, sumr;
uint i;
suml = sumr = 0;
ptr = src + j;
for (i = 0; i < length; i += 4)
{
// loop is unrolled by factor of 4 here for efficiency
suml += ptr[2 * i + 0] * filterCoeffs[i + 0] +
ptr[2 * i + 2] * filterCoeffs[i + 1] +
ptr[2 * i + 4] * filterCoeffs[i + 2] +
ptr[2 * i + 6] * filterCoeffs[i + 3];
sumr += ptr[2 * i + 1] * filterCoeffs[i + 0] +
ptr[2 * i + 3] * filterCoeffs[i + 1] +
ptr[2 * i + 5] * filterCoeffs[i + 2] +
ptr[2 * i + 7] * filterCoeffs[i + 3];
}
#ifdef SOUNDTOUCH_INTEGER_SAMPLES
suml >>= resultDivFactor;
sumr >>= resultDivFactor;
// saturate to 16 bit integer limits
suml = (suml < -32768) ? -32768 : (suml > 32767) ? 32767 : suml;
// saturate to 16 bit integer limits
sumr = (sumr < -32768) ? -32768 : (sumr > 32767) ? 32767 : sumr;
#else
suml *= dScaler;
sumr *= dScaler;
#endif // SOUNDTOUCH_INTEGER_SAMPLES
dest[j] = (SAMPLETYPE)suml;
dest[j + 1] = (SAMPLETYPE)sumr;
}
return numSamples - length;
}
// Usual C-version of the filter routine for mono sound
uint FIRFilter::evaluateFilterMono(SAMPLETYPE *dest, const SAMPLETYPE *src, uint numSamples) const
{
int j, end;
#ifdef SOUNDTOUCH_FLOAT_SAMPLES
// when using floating point samples, use a scaler instead of a divider
// because division is much slower operation than multiplying.
double dScaler = 1.0 / (double)resultDivider;
#endif
assert(length != 0);
end = numSamples - length;
#pragma omp parallel for
for (j = 0; j < end; j ++)
{
const SAMPLETYPE *pSrc = src + j;
LONG_SAMPLETYPE sum;
uint i;
sum = 0;
for (i = 0; i < length; i += 4)
{
// loop is unrolled by factor of 4 here for efficiency
sum += pSrc[i + 0] * filterCoeffs[i + 0] +
pSrc[i + 1] * filterCoeffs[i + 1] +
pSrc[i + 2] * filterCoeffs[i + 2] +
pSrc[i + 3] * filterCoeffs[i + 3];
}
#ifdef SOUNDTOUCH_INTEGER_SAMPLES
sum >>= resultDivFactor;
// saturate to 16 bit integer limits
sum = (sum < -32768) ? -32768 : (sum > 32767) ? 32767 : sum;
#else
sum *= dScaler;
#endif // SOUNDTOUCH_INTEGER_SAMPLES
dest[j] = (SAMPLETYPE)sum;
}
return end;
}
uint FIRFilter::evaluateFilterMulti(SAMPLETYPE *dest, const SAMPLETYPE *src, uint numSamples, uint numChannels)
{
int j, end;
#ifdef SOUNDTOUCH_FLOAT_SAMPLES
// when using floating point samples, use a scaler instead of a divider
// because division is much slower operation than multiplying.
double dScaler = 1.0 / (double)resultDivider;
#endif
assert(length != 0);
assert(src != NULL);
assert(dest != NULL);
assert(filterCoeffs != NULL);
assert(numChannels < 16);
end = numChannels * (numSamples - length);
#pragma omp parallel for
for (j = 0; j < end; j += numChannels)
{
const SAMPLETYPE *ptr;
LONG_SAMPLETYPE sums[16];
uint c, i;
for (c = 0; c < numChannels; c ++)
{
sums[c] = 0;
}
ptr = src + j;
for (i = 0; i < length; i ++)
{
SAMPLETYPE coef=filterCoeffs[i];
for (c = 0; c < numChannels; c ++)
{
sums[c] += ptr[0] * coef;
ptr ++;
}
}
for (c = 0; c < numChannels; c ++)
{
#ifdef SOUNDTOUCH_INTEGER_SAMPLES
sums[c] >>= resultDivFactor;
#else
sums[c] *= dScaler;
#endif // SOUNDTOUCH_INTEGER_SAMPLES
dest[j+c] = (SAMPLETYPE)sums[c];
}
}
return numSamples - length;
}
// Set filter coeffiecients and length.
//
// Throws an exception if filter length isn't divisible by 8
void FIRFilter::setCoefficients(const SAMPLETYPE *coeffs, uint newLength, uint uResultDivFactor)
{
assert(newLength > 0);
if (newLength % 8) ST_THROW_RT_ERROR("FIR filter length not divisible by 8");
lengthDiv8 = newLength / 8;
length = lengthDiv8 * 8;
assert(length == newLength);
resultDivFactor = uResultDivFactor;
resultDivider = (SAMPLETYPE)::pow(2.0, (int)resultDivFactor);
delete[] filterCoeffs;
filterCoeffs = new SAMPLETYPE[length];
memcpy(filterCoeffs, coeffs, length * sizeof(SAMPLETYPE));
}
uint FIRFilter::getLength() const
{
return length;
}
// Applies the filter to the given sequence of samples.
//
// Note : The amount of outputted samples is by value of 'filter_length'
// smaller than the amount of input samples.
uint FIRFilter::evaluate(SAMPLETYPE *dest, const SAMPLETYPE *src, uint numSamples, uint numChannels)
{
assert(length > 0);
assert(lengthDiv8 * 8 == length);
if (numSamples < length) return 0;
#ifndef USE_MULTICH_ALWAYS
if (numChannels == 1)
{
return evaluateFilterMono(dest, src, numSamples);
}
else if (numChannels == 2)
{
return evaluateFilterStereo(dest, src, numSamples);
}
else
#endif // USE_MULTICH_ALWAYS
{
assert(numChannels > 0);
return evaluateFilterMulti(dest, src, numSamples, numChannels);
}
}
// Operator 'new' is overloaded so that it automatically creates a suitable instance
// depending on if we've a MMX-capable CPU available or not.
void * FIRFilter::operator new(size_t s)
{
// Notice! don't use "new FIRFilter" directly, use "newInstance" to create a new instance instead!
ST_THROW_RT_ERROR("Error in FIRFilter::new: Don't use 'new FIRFilter', use 'newInstance' member instead!");
return newInstance();
}
FIRFilter * FIRFilter::newInstance()
{
#if defined(SOUNDTOUCH_ALLOW_MMX) || defined(SOUNDTOUCH_ALLOW_SSE)
uint uExtensions;
uExtensions = detectCPUextensions();
#endif
// Check if MMX/SSE instruction set extensions supported by CPU
#ifdef SOUNDTOUCH_ALLOW_MMX
// MMX routines available only with integer sample types
if (uExtensions & SUPPORT_MMX)
{
return ::new FIRFilterMMX;
}
else
#endif // SOUNDTOUCH_ALLOW_MMX
#ifdef SOUNDTOUCH_ALLOW_SSE
if (uExtensions & SUPPORT_SSE)
{
// SSE support
return ::new FIRFilterSSE;
}
else
#endif // SOUNDTOUCH_ALLOW_SSE
{
// ISA optimizations not supported, use plain C version
return ::new FIRFilter;
}
}
| mpl-2.0 |
garbas/mozilla-releng-services | src/shipit_pipeline/tests/test_pipeline.py | 4166 | # -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import
import pytest
import requests
from unittest.mock import MagicMock
from shipit_pipeline.pipeline import PipelineStep, get_runnable_steps, refresh_pipeline_steps
@pytest.fixture
def pipeline_steps():
pipeline_steps_ = [
{
'api_url': 'http://localhost:5001/signoff1',
'description': 'signoff 1',
'parameters': {
},
'parameters_schema': 'https://null',
'requires': [
],
'uid': 'signoff1'
}, {
'api_url': 'http://localhost:5001/signoff2',
'description': 'signoff 2 - relman gatekeeps all the things',
'parameters': {
},
'parameters_schema': 'https://null',
'requires': [
'signoff1'
],
'uid': 'signoff2'
}, {
'api_url': 'http://localhost:5001/publish1',
'description': 'final publish',
'parameters': {
},
'parameters_schema': 'https://null',
'requires': [
'signoff2'
],
'uid': 'publish1'
}
]
return [PipelineStep.from_dict(step) for step in pipeline_steps_]
def test_get_runnable_steps_when_nothing_has_started(pipeline_steps):
runnables = get_runnable_steps(pipeline_steps)
assert len(runnables) == 1
assert runnables[0].uid == 'signoff1'
def test_get_runnable_steps_state_changed(pipeline_steps):
pipeline_steps[0].state = 'completed'
runnables = get_runnable_steps(pipeline_steps)
assert len(runnables) == 1
assert runnables[0].uid == 'signoff2'
def test_get_runnable_steps_dependency_in_failure(pipeline_steps):
pipeline_steps[0].state = 'exception'
runnables = get_runnable_steps(pipeline_steps)
assert len(runnables) == 0
def test_get_runnable_steps_state_changed2(pipeline_steps):
pipeline_steps[0].state = 'completed'
pipeline_steps[1].state = 'completed'
runnables = get_runnable_steps(pipeline_steps)
assert len(runnables) == 1
assert runnables[0].uid == 'publish1'
def test_get_runnable_steps_many_can_run_at_the_beginning(pipeline_steps):
another_first_step = PipelineStep(uid='parallel_action_to_signoff1', url='http://null', params={}, requires=[])
pipeline_steps.append(another_first_step)
runnables = get_runnable_steps(pipeline_steps)
assert [r.uid for r in runnables] == ['signoff1', 'parallel_action_to_signoff1']
def test_get_runnable_steps_many_upstream_dependencies(pipeline_steps):
upstream_dep = PipelineStep(uid='upstream_dep', url='http://null', params={}, requires=[])
upstream_dep.state = 'completed'
pipeline_steps[1].requires.append(upstream_dep.uid)
pipeline_steps.append(upstream_dep)
runnables = get_runnable_steps(pipeline_steps)
assert [r.uid for r in runnables] == ['signoff1']
pipeline_steps[0].state = 'completed'
runnables = get_runnable_steps(pipeline_steps)
assert [r.uid for r in runnables] == ['signoff2']
def test_get_runnable_steps_many_many_downstream_deps_run(pipeline_steps):
downstream_dep = PipelineStep(uid='another_downstream_dep', url='http://null', params={}, requires=[])
pipeline_steps.append(downstream_dep)
pipeline_steps[0].state = 'completed'
runnables = get_runnable_steps(pipeline_steps)
assert [r.uid for r in runnables] == ['signoff2', 'another_downstream_dep']
def test_refresh_pipeline_steps(pipeline_steps, monkeypatch):
def mock_get_request(url, verify):
get_response = MagicMock()
get_response.json.return_value = {'state': 'completed'} if 'signoff1' in url else {'state': 'busted'}
return get_response
monkeypatch.setattr(requests, 'get', mock_get_request)
pipeline_steps[0].state = 'running'
pipeline_steps = refresh_pipeline_steps(pipeline_steps)
assert pipeline_steps[0].state == 'completed'
assert pipeline_steps[1].state == 'pending'
assert pipeline_steps[2].state == 'pending'
| mpl-2.0 |
openmrs-gci/openmrs-module-appframework | omod/src/test/java/org/openmrs/module/appframework/test/Matchers.java | 1461 | package org.openmrs.module.appframework.test;
import org.hamcrest.BaseMatcher;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hamcrest.SelfDescribing;
import org.openmrs.util.OpenmrsUtil;
import java.util.Map;
public class Matchers {
/**
* @param key
* @param expected a Matcher (or an Object, which will be tested with OpenmrsUtil.nullSafeEquals)
* @return a matcher that matches a Map entry (for key) matching expected
*/
public static Matcher<? super Map<String, ?>> hasEntry(final String key, final Object expected) {
return new BaseMatcher<Map<String, ?>>() {
@Override
public boolean matches(Object o) {
Object actual = ((Map) o).get(key);
if (expected instanceof Matcher) {
return ((Matcher) expected).matches(actual);
}
else {
return OpenmrsUtil.nullSafeEquals(actual, expected);
}
}
@Override
public void describeTo(Description description) {
description.appendText("map entry " + key + " should ");
if (expected instanceof Matcher) {
description.appendDescriptionOf((SelfDescribing) expected);
}
else {
description.appendText("equal " + expected);
}
}
};
}
}
| mpl-2.0 |
jvelasques/cfr | cfr-core/resources/cdeComponents/FileBrowserComponent/FileBrowserComponent.js | 2715 |
var FileBrowserComponent = BaseComponent.extend({
update: function(){
var myself = this,
$ph = $("#"+this.htmlObject),
root = this.rootFolder.charAt(this.rootFolder.length - 1) == "/" ? this.rootFolder : this.rootFolder+"/",
$content;
if (!this.fileExtensions)
this.fileExtensions = "";
$ph.addClass('fileBrowserComponent');
if(this.chartDefinition.height != undefined){
$ph.css('height',this.chartDefinition.height+'px');
}
if(this.chartDefinition.width != undefined){
$ph.css('width',this.chartDefinition.width+'px');
}
$ph.css('overflow','auto');
$ph.fileTree(
{
root: root,
script: myself.buildTreeURL(),
expandSpeed: 1,
collapseSpeed: 1,
multiFolder: true,
htmlTreeModifier: function(content){
return myself.modifyTree(content);
}
},
function(){});
},
getValue: function() {
},
buildTreeURL: function(){
return Endpoints.getListFiles() + "?fileExtensions=" + this.fileExtensions;
},
buildGetURL: function(rel){
return Endpoints.getFile() + "?fileName=" + rel;
},
modifyTree: function(content){
var myself = this;
var $content = content;
if(!$content.hasClass('directory'))
$content.find('ul').addClass("treeview filetree");
$content.find('li:last').addClass("last");
$.each($content.find('li.directory'),function(){
//get rel from a
var rel = $(this).find('a').attr('rel');
$("<div/>").addClass("hitarea expandable-hitarea").attr('rel',rel).prependTo($(this));
});
$.each($content.find('li.directory a'), function(){
$(this).addClass('folder');
});
$.each($content.find('li.file'), function(){
$("<div/>").addClass("file").prependTo($(this));
});
$.each($content.find('li.file a'), function(){
var rel = $(this).attr('rel');
//$(this).attr({target: '_blank', href : myself.buildGetURL(rel)});
$(this).click(function(){
window.location.href = myself.buildGetURL(rel);
});
});
return $content;
},
downloadDataURI :function(options) {
if(!options) {
return;
}
$.isPlainObject(options) || (options = {data: options});
if(!$.browser.webkit) {
location.href = options.data;
}
options.filename || (options.filename = "download." + options.data.split(",")[0].split(";")[0].substring(5).split("/")[1]);
$('<form method="post" action="'+options.url+'" style="display:none"><input type="hidden" name="filename" value="'+options.filename+'"/><input type="hidden" name="data" value="'+options.data+'"/></form>').submit().remove();
}
});
| mpl-2.0 |
dfober/libmusicxml | src/lilypond/messagesHandling.cpp | 7291 | /*
MusicXML Library
Copyright (C) Grame 2006-2013
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
Grame Research Laboratory, 11, cours de Verdun Gensoul 69002 Lyon - France
[email protected]
*/
#include "utilities.h"
#include "messagesHandling.h"
#include "musicXMLOah.h"
#include "generalOah.h"
using namespace std;
namespace MusicXML2
{
//______________________________________________________________________________
void msrAssert (
bool condition,
string messageIfFalse)
{
if (! condition) {
gLogOstream <<
"#### msrAssert failure: " << messageIfFalse <<
", aborting." <<
endl;
abort ();
}
}
//______________________________________________________________________________
void msrWarning (
string context,
string inputSourceName,
int inputLineNumber,
string message)
{
if (! gGeneralOah->fQuiet) {
gLogOstream <<
"*** " << context << " warning *** " <<
inputSourceName << ":" << inputLineNumber << ": " <<message <<
endl;
gWarningsInputLineNumbers.insert (inputLineNumber);
}
}
//______________________________________________________________________________
void lpsrMusicXMLWarning (
string inputSourceName,
int inputLineNumber,
string message)
{
msrWarning (
"LPSR",
inputSourceName,
inputLineNumber,
message);
}
//______________________________________________________________________________
void msrMusicXMLWarning (
string inputSourceName,
int inputLineNumber,
string message)
{
msrWarning (
"MusicXML",
inputSourceName,
inputLineNumber,
message);
}
//______________________________________________________________________________
void msrInternalWarning (
string inputSourceName,
int inputLineNumber,
string message)
{
msrWarning (
"INTERNAL",
inputSourceName,
inputLineNumber,
message);
}
//______________________________________________________________________________
void msrError (
string context,
string inputSourceName,
int inputLineNumber,
string sourceCodeFileName,
int sourceCodeLineNumber,
string message)
{
if (! gGeneralOah->fQuiet) {
if (gGeneralOah->fDisplaySourceCodePosition) {
gLogOstream <<
baseName (sourceCodeFileName) << ":" << sourceCodeLineNumber <<
" ";
}
if (! gGeneralOah->fDontShowErrors) {
gLogOstream <<
"### " << context << " ERROR ### " <<
inputSourceName << ":" << inputLineNumber << ": " << message <<
endl;
gErrorsInputLineNumbers.insert (inputLineNumber);
}
}
}
//______________________________________________________________________________
void msrMusicXMLError (
string inputSourceName,
int inputLineNumber,
string sourceCodeFileName,
int sourceCodeLineNumber,
string message)
{
msrError (
"MusicXML",
inputSourceName,
inputLineNumber,
sourceCodeFileName,
sourceCodeLineNumber,
message);
if (! gGeneralOah->fDontShowErrors) {
if (! gGeneralOah->fDontAbortOnErrors) {
abort ();
}
else {
exit (15);
}
}
}
//______________________________________________________________________________
void lpsrMusicXMLError (
string inputSourceName,
int inputLineNumber,
string sourceCodeFileName,
int sourceCodeLineNumber,
string message)
{
msrError (
"LPSR",
inputSourceName,
inputLineNumber,
sourceCodeFileName,
sourceCodeLineNumber,
message);
if (! gGeneralOah->fDontShowErrors) {
exit (16);
}
}
//______________________________________________________________________________
void msrInternalError (
string inputSourceName,
int inputLineNumber,
string sourceCodeFileName,
int sourceCodeLineNumber,
string message)
{
msrError (
"MSR INTERNAL",
inputSourceName,
inputLineNumber,
sourceCodeFileName,
sourceCodeLineNumber,
message);
abort ();
}
void msrLimitation (
string inputSourceName,
int inputLineNumber,
string sourceCodeFileName,
int sourceCodeLineNumber,
string message)
{
if (! (gGeneralOah->fQuiet && gGeneralOah->fDontShowErrors)) {
if (gGeneralOah->fDisplaySourceCodePosition) {
gLogOstream <<
baseName (sourceCodeFileName) << ":" << sourceCodeLineNumber <<
" ";
}
gLogOstream <<
"### MSR LIMITATION ### " <<
inputSourceName << ":" << inputLineNumber << ": " << message <<
endl;
abort ();
}
}
//______________________________________________________________________________
void msrStreamsError (
int inputLineNumber,
string sourceCodeFileName,
int sourceCodeLineNumber,
string message)
{
if (! (gGeneralOah->fQuiet && gGeneralOah->fDontShowErrors)) {
if (gGeneralOah->fDisplaySourceCodePosition) {
gLogOstream <<
baseName (sourceCodeFileName) << ":" << sourceCodeLineNumber <<
" ";
}
gLogOstream <<
"### " << "MSR STREAMS" << " ERROR ### " <<
"fake line number" << ":" << inputLineNumber << ": " << message <<
endl;
}
abort ();
}
void msrStreamsWarning (
int inputLineNumber,
string sourceCodeFileName,
int sourceCodeLineNumber,
string message)
{
if (! (gGeneralOah->fQuiet && gGeneralOah->fDontShowErrors)) {
if (gGeneralOah->fDisplaySourceCodePosition) {
gLogOstream <<
baseName (sourceCodeFileName) << ":" << sourceCodeLineNumber <<
" ";
}
gLogOstream <<
"*** " << "MSR STREAMS" << " warning *** " <<
" ### " << "MSR STREAMS" << " ERROR ### " <<
"fake line number" << ":" << inputLineNumber << ": " << message <<
endl;
}
abort ();
}
//______________________________________________________________________________
std::set<int> gWarningsInputLineNumbers;
std::set<int> gErrorsInputLineNumbers;
void displayWarningsAndErrorsInputLineNumbers ()
{
int warningsInputLineNumbersSize =
gWarningsInputLineNumbers.size ();
if (warningsInputLineNumbersSize && ! gGeneralOah->fQuiet) {
gLogOstream <<
"Warning message(s) were issued for input " <<
singularOrPluralWithoutNumber (
warningsInputLineNumbersSize, "line", "lines") <<
" ";
set<int>::const_iterator
iBegin = gWarningsInputLineNumbers.begin (),
iEnd = gWarningsInputLineNumbers.end (),
i = iBegin;
for ( ; ; ) {
gLogOstream << (*i);
if (++i == iEnd) break;
gLogOstream << ", ";
} // for
gLogOstream << endl;
}
int errorsInputLineNumbersSize =
gErrorsInputLineNumbers.size ();
if (errorsInputLineNumbersSize) {
gLogOstream <<
endl <<
"Error message(s) were issued for input " <<
singularOrPluralWithoutNumber (
errorsInputLineNumbersSize, "line", "lines") <<
" ";
set<int>::const_iterator
iBegin = gErrorsInputLineNumbers.begin (),
iEnd = gErrorsInputLineNumbers.end (),
i = iBegin;
for ( ; ; ) {
gLogOstream << (*i);
if (++i == iEnd) break;
gLogOstream << ", ";
} // for
gLogOstream << endl;
}
}
}
| mpl-2.0 |
pradeepbhadani/terraform | terraform/context_apply_test.go | 275129 | package terraform
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"log"
"reflect"
"runtime"
"sort"
"strings"
"sync"
"sync/atomic"
"testing"
"time"
"github.com/davecgh/go-spew/spew"
"github.com/go-test/deep"
"github.com/google/go-cmp/cmp"
"github.com/hashicorp/terraform/addrs"
"github.com/hashicorp/terraform/configs"
"github.com/hashicorp/terraform/configs/configschema"
"github.com/hashicorp/terraform/configs/hcl2shim"
"github.com/hashicorp/terraform/plans"
"github.com/hashicorp/terraform/providers"
"github.com/hashicorp/terraform/provisioners"
"github.com/hashicorp/terraform/states"
"github.com/hashicorp/terraform/states/statefile"
"github.com/hashicorp/terraform/tfdiags"
"github.com/zclconf/go-cty/cty"
)
func TestContext2Apply_basic(t *testing.T) {
m := testModule(t, "apply-good")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
mod := state.RootModule()
if len(mod.Resources) < 2 {
t.Fatalf("bad: %#v", mod.Resources)
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_unstable(t *testing.T) {
// This tests behavior when the configuration contains an unstable value,
// such as the result of uuid() or timestamp(), where each call produces
// a different result.
//
// This is an important case to test because we need to ensure that
// we don't re-call the function during the apply phase: the value should
// be fixed during plan
m := testModule(t, "apply-unstable")
p := testProvider("test")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"test": testProviderFuncFixed(p),
},
),
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected error during Plan: %s", diags.Err())
}
addr := addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_resource",
Name: "foo",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance)
schema := p.GetSchemaReturn.ResourceTypes["test_resource"] // automatically available in mock
rds := plan.Changes.ResourceInstance(addr)
rd, err := rds.Decode(schema.ImpliedType())
if err != nil {
t.Fatal(err)
}
if rd.After.GetAttr("random").IsKnown() {
t.Fatalf("Attribute 'random' has known value %#v; should be unknown in plan", rd.After.GetAttr("random"))
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("unexpected error during Apply: %s", diags.Err())
}
mod := state.Module(addr.Module)
rss := state.ResourceInstance(addr)
if len(mod.Resources) != 1 {
t.Fatalf("wrong number of resources %d; want 1", len(mod.Resources))
}
rs, err := rss.Current.Decode(schema.ImpliedType())
got := rs.Value.GetAttr("random")
if !got.IsKnown() {
t.Fatalf("random is still unknown after apply")
}
if got, want := len(got.AsString()), 36; got != want {
t.Fatalf("random string has wrong length %d; want %d", got, want)
}
}
func TestContext2Apply_escape(t *testing.T) {
m := testModule(t, "apply-escape")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `
aws_instance.bar:
ID = foo
provider = provider.aws
foo = "bar"
type = aws_instance
`)
}
func TestContext2Apply_resourceCountOneList(t *testing.T) {
m := testModule(t, "apply-resource-count-one-list")
p := testProvider("null")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"null": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
assertNoDiagnostics(t, diags)
got := strings.TrimSpace(state.String())
want := strings.TrimSpace(`null_resource.foo.0:
ID = foo
provider = provider.null
Outputs:
test = [foo]`)
if got != want {
t.Fatalf("got:\n%s\n\nwant:\n%s\n", got, want)
}
}
func TestContext2Apply_resourceCountZeroList(t *testing.T) {
m := testModule(t, "apply-resource-count-zero-list")
p := testProvider("null")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"null": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
got := strings.TrimSpace(state.String())
want := strings.TrimSpace(`Outputs:
test = []`)
if got != want {
t.Fatalf("wrong state\n\ngot:\n%s\n\nwant:\n%s\n", got, want)
}
}
func TestContext2Apply_resourceDependsOnModule(t *testing.T) {
m := testModule(t, "apply-resource-depends-on-module")
p := testProvider("aws")
p.DiffFn = testDiffFn
// verify the apply happens in the correct order
var mu sync.Mutex
var order []string
p.ApplyFn = func(
info *InstanceInfo,
is *InstanceState,
id *InstanceDiff) (*InstanceState, error) {
if id.Attributes["ami"].New == "child" {
// make the child slower than the parent
time.Sleep(50 * time.Millisecond)
mu.Lock()
order = append(order, "child")
mu.Unlock()
} else {
mu.Lock()
order = append(order, "parent")
mu.Unlock()
}
return testApplyFn(info, is, id)
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
if !reflect.DeepEqual(order, []string{"child", "parent"}) {
t.Fatal("resources applied out of order")
}
checkStateString(t, state, testTerraformApplyResourceDependsOnModuleStr)
}
// Test that without a config, the Dependencies in the state are enough
// to maintain proper ordering.
func TestContext2Apply_resourceDependsOnModuleStateOnly(t *testing.T) {
m := testModule(t, "apply-resource-depends-on-module-empty")
p := testProvider("aws")
p.DiffFn = testDiffFn
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.a": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "parent",
},
Dependencies: []string{"module.child"},
Provider: "provider.aws",
},
},
},
&ModuleState{
Path: []string{"root", "child"},
Resources: map[string]*ResourceState{
"aws_instance.child": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "child",
},
Provider: "provider.aws",
},
},
},
},
})
{
// verify the apply happens in the correct order
var mu sync.Mutex
var order []string
p.ApplyFn = func(
info *InstanceInfo,
is *InstanceState,
id *InstanceDiff) (*InstanceState, error) {
if is.ID == "parent" {
// make the dep slower than the parent
time.Sleep(50 * time.Millisecond)
mu.Lock()
order = append(order, "child")
mu.Unlock()
} else {
mu.Lock()
order = append(order, "parent")
mu.Unlock()
}
return testApplyFn(info, is, id)
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
state, diags := ctx.Apply()
assertNoErrors(t, diags)
if !reflect.DeepEqual(order, []string{"child", "parent"}) {
t.Fatal("resources applied out of order")
}
checkStateString(t, state, "<no state>")
}
}
func TestContext2Apply_resourceDependsOnModuleDestroy(t *testing.T) {
m := testModule(t, "apply-resource-depends-on-module")
p := testProvider("aws")
p.DiffFn = testDiffFn
var globalState *states.State
{
p.ApplyFn = testApplyFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
globalState = state
}
{
// Wait for the dependency, sleep, and verify the graph never
// called a child.
var called int32
var checked bool
p.ApplyFn = func(
info *InstanceInfo,
is *InstanceState,
id *InstanceDiff) (*InstanceState, error) {
if is.Attributes["ami"] == "parent" {
checked = true
// Sleep to allow parallel execution
time.Sleep(50 * time.Millisecond)
// Verify that called is 0 (dep not called)
if atomic.LoadInt32(&called) != 0 {
return nil, fmt.Errorf("module child should not be called")
}
}
atomic.AddInt32(&called, 1)
return testApplyFn(info, is, id)
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: globalState,
Destroy: true,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
if !checked {
t.Fatal("should check")
}
checkStateString(t, state, `<no state>`)
}
}
func TestContext2Apply_resourceDependsOnModuleGrandchild(t *testing.T) {
m := testModule(t, "apply-resource-depends-on-module-deep")
p := testProvider("aws")
p.DiffFn = testDiffFn
{
// Wait for the dependency, sleep, and verify the graph never
// called a child.
var called int32
var checked bool
p.ApplyFn = func(
info *InstanceInfo,
is *InstanceState,
id *InstanceDiff) (*InstanceState, error) {
if id.Attributes["ami"].New == "grandchild" {
checked = true
// Sleep to allow parallel execution
time.Sleep(50 * time.Millisecond)
// Verify that called is 0 (dep not called)
if atomic.LoadInt32(&called) != 0 {
return nil, fmt.Errorf("aws_instance.a should not be called")
}
}
atomic.AddInt32(&called, 1)
return testApplyFn(info, is, id)
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
if !checked {
t.Fatal("should check")
}
checkStateString(t, state, testTerraformApplyResourceDependsOnModuleDeepStr)
}
}
func TestContext2Apply_resourceDependsOnModuleInModule(t *testing.T) {
m := testModule(t, "apply-resource-depends-on-module-in-module")
p := testProvider("aws")
p.DiffFn = testDiffFn
{
// Wait for the dependency, sleep, and verify the graph never
// called a child.
var called int32
var checked bool
p.ApplyFn = func(info *InstanceInfo, is *InstanceState, id *InstanceDiff) (*InstanceState, error) {
if id.Attributes["ami"].New == "grandchild" {
checked = true
// Sleep to allow parallel execution
time.Sleep(50 * time.Millisecond)
// Verify that called is 0 (dep not called)
if atomic.LoadInt32(&called) != 0 {
return nil, fmt.Errorf("something else was applied before grandchild; grandchild should be first")
}
}
atomic.AddInt32(&called, 1)
return testApplyFn(info, is, id)
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
if !checked {
t.Fatal("should check")
}
checkStateString(t, state, testTerraformApplyResourceDependsOnModuleInModuleStr)
}
}
func TestContext2Apply_mapVarBetweenModules(t *testing.T) {
m := testModule(t, "apply-map-var-through-module")
p := testProvider("null")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"null": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(`<no state>
Outputs:
amis_from_module = {eu-west-1:ami-789012 eu-west-2:ami-989484 us-west-1:ami-123456 us-west-2:ami-456789 }
module.test:
null_resource.noop:
ID = foo
provider = provider.null
Outputs:
amis_out = {eu-west-1:ami-789012 eu-west-2:ami-989484 us-west-1:ami-123456 us-west-2:ami-456789 }`)
if actual != expected {
t.Fatalf("expected: \n%s\n\ngot: \n%s\n", expected, actual)
}
}
func TestContext2Apply_refCount(t *testing.T) {
m := testModule(t, "apply-ref-count")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
mod := state.RootModule()
if len(mod.Resources) < 2 {
t.Fatalf("bad: %#v", mod.Resources)
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyRefCountStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_providerAlias(t *testing.T) {
m := testModule(t, "apply-provider-alias")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
mod := state.RootModule()
if len(mod.Resources) < 2 {
t.Fatalf("bad: %#v", mod.Resources)
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyProviderAliasStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
// Two providers that are configured should both be configured prior to apply
func TestContext2Apply_providerAliasConfigure(t *testing.T) {
m := testModule(t, "apply-provider-alias-configure")
p2 := testProvider("another")
p2.ApplyFn = testApplyFn
p2.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"another": testProviderFuncFixed(p2),
},
),
})
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
} else {
t.Logf(legacyDiffComparisonString(p.Changes))
}
// Configure to record calls AFTER Plan above
var configCount int32
p2.ConfigureFn = func(c *ResourceConfig) error {
atomic.AddInt32(&configCount, 1)
foo, ok := c.Get("foo")
if !ok {
return fmt.Errorf("foo is not found")
}
if foo != "bar" {
return fmt.Errorf("foo: %#v", foo)
}
return nil
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
if configCount != 2 {
t.Fatalf("provider config expected 2 calls, got: %d", configCount)
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyProviderAliasConfigStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
// GH-2870
func TestContext2Apply_providerWarning(t *testing.T) {
m := testModule(t, "apply-provider-warning")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
p.ValidateFn = func(c *ResourceConfig) (ws []string, es []error) {
ws = append(ws, "Just a warning")
return
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(`
aws_instance.foo:
ID = foo
provider = provider.aws
`)
if actual != expected {
t.Fatalf("got: \n%s\n\nexpected:\n%s", actual, expected)
}
if !p.ConfigureCalled {
t.Fatalf("provider Configure() was never called!")
}
}
func TestContext2Apply_emptyModule(t *testing.T) {
m := testModule(t, "apply-empty-module")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
actual = strings.Replace(actual, " ", "", -1)
expected := strings.TrimSpace(testTerraformApplyEmptyModuleStr)
if actual != expected {
t.Fatalf("bad: \n%s\nexpect:\n%s", actual, expected)
}
}
func TestContext2Apply_createBeforeDestroy(t *testing.T) {
m := testModule(t, "apply-good-create-before")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"require_new": "abc",
},
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
})
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
} else {
t.Logf(legacyDiffComparisonString(p.Changes))
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
mod := state.RootModule()
if got, want := len(mod.Resources), 1; got != want {
t.Logf("state:\n%s", state)
t.Fatalf("wrong number of resources %d; want %d", got, want)
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyCreateBeforeStr)
if actual != expected {
t.Fatalf("expected:\n%s\ngot:\n%s", expected, actual)
}
}
func TestContext2Apply_createBeforeDestroyUpdate(t *testing.T) {
m := testModule(t, "apply-good-create-before-update")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"foo": "bar",
},
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
})
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
} else {
t.Logf(legacyDiffComparisonString(p.Changes))
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
mod := state.RootModule()
if len(mod.Resources) != 1 {
t.Fatalf("bad: %s", state)
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyCreateBeforeUpdateStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
// This tests that when a CBD resource depends on a non-CBD resource,
// we can still properly apply changes that require new for both.
func TestContext2Apply_createBeforeDestroy_dependsNonCBD(t *testing.T) {
m := testModule(t, "apply-cbd-depends-non-cbd")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"require_new": "abc",
},
},
},
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "foo",
Attributes: map[string]string{
"require_new": "abc",
},
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
})
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
} else {
t.Logf(legacyDiffComparisonString(p.Changes))
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `
aws_instance.bar:
ID = foo
provider = provider.aws
require_new = yes
type = aws_instance
value = foo
Dependencies:
aws_instance.foo
aws_instance.foo:
ID = foo
provider = provider.aws
require_new = yes
type = aws_instance
`)
}
func TestContext2Apply_createBeforeDestroy_hook(t *testing.T) {
h := new(MockHook)
m := testModule(t, "apply-good-create-before")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"require_new": "abc",
},
},
Provider: "provider.aws",
},
},
},
},
})
var actual []cty.Value
var actualLock sync.Mutex
h.PostApplyFn = func(addr addrs.AbsResourceInstance, gen states.Generation, sv cty.Value, e error) (HookAction, error) {
actualLock.Lock()
defer actualLock.Unlock()
actual = append(actual, sv)
return HookActionContinue, nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
Hooks: []Hook{h},
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
})
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
} else {
t.Logf(legacyDiffComparisonString(p.Changes))
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
expected := []cty.Value{
cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("foo"),
"require_new": cty.StringVal("xyz"),
"type": cty.StringVal("aws_instance"),
}),
cty.NullVal(cty.DynamicPseudoType),
}
cmpOpt := cmp.Transformer("ctyshim", hcl2shim.ConfigValueFromHCL2)
if !cmp.Equal(actual, expected, cmpOpt) {
t.Fatalf("wrong state snapshot sequence\n%s", cmp.Diff(expected, actual, cmpOpt))
}
}
// Test that we can perform an apply with CBD in a count with deposed instances.
func TestContext2Apply_createBeforeDestroy_deposedCount(t *testing.T) {
m := testModule(t, "apply-cbd-count")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.bar.0": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Tainted: true,
},
Deposed: []*InstanceState{
&InstanceState{
ID: "foo",
},
},
},
"aws_instance.bar.1": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Tainted: true,
},
Deposed: []*InstanceState{
&InstanceState{
ID: "bar",
},
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
})
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
} else {
t.Logf(legacyDiffComparisonString(p.Changes))
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `
aws_instance.bar.0:
ID = foo
provider = provider.aws
foo = bar
type = aws_instance
aws_instance.bar.1:
ID = foo
provider = provider.aws
foo = bar
type = aws_instance
`)
}
// Test that when we have a deposed instance but a good primary, we still
// destroy the deposed instance.
func TestContext2Apply_createBeforeDestroy_deposedOnly(t *testing.T) {
m := testModule(t, "apply-cbd-deposed-only")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
Deposed: []*InstanceState{
&InstanceState{
ID: "foo",
},
},
Provider: "provider.aws",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
})
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
} else {
t.Logf(legacyDiffComparisonString(p.Changes))
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `
aws_instance.bar:
ID = bar
provider = provider.aws
`)
}
func TestContext2Apply_destroyComputed(t *testing.T) {
m := testModule(t, "apply-destroy-computed")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "foo",
Attributes: map[string]string{
"output": "value",
},
},
Provider: "provider.aws",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
Destroy: true,
})
if p, diags := ctx.Plan(); diags.HasErrors() {
logDiagnostics(t, diags)
t.Fatal("plan failed")
} else {
t.Logf("plan:\n\n%s", legacyDiffComparisonString(p.Changes))
}
if _, diags := ctx.Apply(); diags.HasErrors() {
logDiagnostics(t, diags)
t.Fatal("apply failed")
}
}
// Test that the destroy operation uses depends_on as a source of ordering.
func TestContext2Apply_destroyDependsOn(t *testing.T) {
// It is possible for this to be racy, so we loop a number of times
// just to check.
for i := 0; i < 10; i++ {
testContext2Apply_destroyDependsOn(t)
}
}
func testContext2Apply_destroyDependsOn(t *testing.T) {
m := testModule(t, "apply-destroy-depends-on")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "foo",
Attributes: map[string]string{},
},
},
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{},
},
},
},
},
},
})
// Record the order we see Apply
var actual []string
var actualLock sync.Mutex
p.ApplyFn = func(
_ *InstanceInfo, is *InstanceState, _ *InstanceDiff) (*InstanceState, error) {
actualLock.Lock()
defer actualLock.Unlock()
actual = append(actual, is.ID)
return nil, nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
Destroy: true,
Parallelism: 1, // To check ordering
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
expected := []string{"foo", "bar"}
if !reflect.DeepEqual(actual, expected) {
t.Fatalf("wrong order\ngot: %#v\nwant: %#v", actual, expected)
}
}
// Test that destroy ordering is correct with dependencies only
// in the state.
func TestContext2Apply_destroyDependsOnStateOnly(t *testing.T) {
legacyState := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "foo",
Attributes: map[string]string{},
},
Provider: "provider.aws",
},
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{},
},
Dependencies: []string{"aws_instance.foo"},
Provider: "provider.aws",
},
},
},
},
})
newState := states.NewState()
root := newState.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "foo",
}.Instance(addrs.NoKey),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"foo"}`),
Dependencies: []addrs.AbsResource{},
},
addrs.ProviderConfig{
Type: "aws",
}.Absolute(addrs.RootModuleInstance),
)
root.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "bar",
}.Instance(addrs.NoKey),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar"}`),
Dependencies: []addrs.AbsResource{
addrs.AbsResource{
Resource: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "foo",
},
Module: root.Addr,
},
},
},
addrs.ProviderConfig{
Type: "aws",
}.Absolute(addrs.RootModuleInstance),
)
// It is possible for this to be racy, so we loop a number of times
// just to check.
for i := 0; i < 10; i++ {
t.Run("legacy", func(t *testing.T) {
testContext2Apply_destroyDependsOnStateOnly(t, legacyState)
})
t.Run("new", func(t *testing.T) {
testContext2Apply_destroyDependsOnStateOnly(t, newState)
})
}
}
func testContext2Apply_destroyDependsOnStateOnly(t *testing.T, state *states.State) {
m := testModule(t, "empty")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
// Record the order we see Apply
var actual []string
var actualLock sync.Mutex
p.ApplyFn = func(
_ *InstanceInfo, is *InstanceState, _ *InstanceDiff) (*InstanceState, error) {
actualLock.Lock()
defer actualLock.Unlock()
actual = append(actual, is.ID)
return nil, nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
Destroy: true,
Parallelism: 1, // To check ordering
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
expected := []string{"bar", "foo"}
if !reflect.DeepEqual(actual, expected) {
t.Fatalf("wrong order\ngot: %#v\nwant: %#v", actual, expected)
}
}
// Test that destroy ordering is correct with dependencies only
// in the state within a module (GH-11749)
func TestContext2Apply_destroyDependsOnStateOnlyModule(t *testing.T) {
legacyState := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root", "child"},
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "foo",
Attributes: map[string]string{},
},
Provider: "provider.aws",
},
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{},
},
Dependencies: []string{"aws_instance.foo"},
Provider: "provider.aws",
},
},
},
},
})
newState := states.NewState()
child := newState.EnsureModule(addrs.RootModuleInstance.Child("child", addrs.NoKey))
child.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "foo",
}.Instance(addrs.NoKey),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"foo"}`),
Dependencies: []addrs.AbsResource{},
},
addrs.ProviderConfig{
Type: "aws",
}.Absolute(addrs.RootModuleInstance),
)
child.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "bar",
}.Instance(addrs.NoKey),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar"}`),
Dependencies: []addrs.AbsResource{
addrs.AbsResource{
Resource: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "foo",
},
Module: child.Addr,
},
},
},
addrs.ProviderConfig{
Type: "aws",
}.Absolute(addrs.RootModuleInstance),
)
// It is possible for this to be racy, so we loop a number of times
// just to check.
for i := 0; i < 10; i++ {
t.Run("legacy", func(t *testing.T) {
testContext2Apply_destroyDependsOnStateOnlyModule(t, legacyState)
})
t.Run("new", func(t *testing.T) {
testContext2Apply_destroyDependsOnStateOnlyModule(t, newState)
})
}
}
func testContext2Apply_destroyDependsOnStateOnlyModule(t *testing.T, state *states.State) {
m := testModule(t, "empty")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
// Record the order we see Apply
var actual []string
var actualLock sync.Mutex
p.ApplyFn = func(
_ *InstanceInfo, is *InstanceState, _ *InstanceDiff) (*InstanceState, error) {
actualLock.Lock()
defer actualLock.Unlock()
actual = append(actual, is.ID)
return nil, nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
Destroy: true,
Parallelism: 1, // To check ordering
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
expected := []string{"bar", "foo"}
if !reflect.DeepEqual(actual, expected) {
t.Fatalf("wrong order\ngot: %#v\nwant: %#v", actual, expected)
}
}
func TestContext2Apply_dataBasic(t *testing.T) {
m := testModule(t, "apply-data-basic")
p := testProvider("null")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
p.ReadDataSourceResponse = providers.ReadDataSourceResponse{
State: cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("yo"),
"foo": cty.NullVal(cty.String),
}),
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"null": testProviderFuncFixed(p),
},
),
})
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
} else {
t.Logf(legacyDiffComparisonString(p.Changes))
}
state, diags := ctx.Apply()
assertNoErrors(t, diags)
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyDataBasicStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_destroyData(t *testing.T) {
m := testModule(t, "apply-destroy-data-resource")
p := testProvider("null")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"data.null_data_source.testing": &ResourceState{
Type: "null_data_source",
Primary: &InstanceState{
ID: "-",
Attributes: map[string]string{
"inputs.#": "1",
"inputs.test": "yes",
},
},
},
},
},
},
})
hook := &testHook{}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"null": testProviderFuncFixed(p),
},
),
State: state,
Destroy: true,
Hooks: []Hook{hook},
})
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
} else {
t.Logf(legacyDiffComparisonString(p.Changes))
}
newState, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
if got := len(newState.Modules); got != 1 {
t.Fatalf("state has %d modules after destroy; want 1", got)
}
if got := len(newState.RootModule().Resources); got != 0 {
t.Fatalf("state has %d resources after destroy; want 0", got)
}
wantHookCalls := []*testHookCall{
{"PreDiff", "data.null_data_source.testing"},
{"PostDiff", "data.null_data_source.testing"},
{"PostStateUpdate", ""},
}
if !reflect.DeepEqual(hook.Calls, wantHookCalls) {
t.Errorf("wrong hook calls\ngot: %swant: %s", spew.Sdump(hook.Calls), spew.Sdump(wantHookCalls))
}
}
// https://github.com/hashicorp/terraform/pull/5096
func TestContext2Apply_destroySkipsCBD(t *testing.T) {
// Config contains CBD resource depending on non-CBD resource, which triggers
// a cycle if they are both replaced, but should _not_ trigger a cycle when
// just doing a `terraform destroy`.
m := testModule(t, "apply-destroy-cbd")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "foo",
},
},
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "foo",
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
Destroy: true,
})
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
} else {
t.Logf(legacyDiffComparisonString(p.Changes))
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
}
func TestContext2Apply_destroyModuleVarProviderConfig(t *testing.T) {
m := testModule(t, "apply-destroy-mod-var-provider-config")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root", "child"},
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "foo",
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
Destroy: true,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
_, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
}
// https://github.com/hashicorp/terraform/issues/2892
func TestContext2Apply_destroyCrossProviders(t *testing.T) {
m := testModule(t, "apply-destroy-cross-providers")
p_aws := testProvider("aws")
p_aws.ApplyFn = testApplyFn
p_aws.DiffFn = testDiffFn
p_aws.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"id": {
Type: cty.String,
Computed: true,
},
},
},
"aws_vpc": {
Attributes: map[string]*configschema.Attribute{
"value": {
Type: cty.String,
Optional: true,
},
},
},
},
}
providers := map[string]providers.Factory{
"aws": testProviderFuncFixed(p_aws),
}
// Bug only appears from time to time,
// so we run this test multiple times
// to check for the race-condition
// FIXME: this test flaps now, so run it more times
for i := 0; i <= 100; i++ {
ctx := getContextForApply_destroyCrossProviders(t, m, providers)
if _, diags := ctx.Plan(); diags.HasErrors() {
logDiagnostics(t, diags)
t.Fatal("plan failed")
}
if _, diags := ctx.Apply(); diags.HasErrors() {
logDiagnostics(t, diags)
t.Fatal("apply failed")
}
}
}
func getContextForApply_destroyCrossProviders(t *testing.T, m *configs.Config, providerFactories map[string]providers.Factory) *Context {
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.shared": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "remote-2652591293",
Attributes: map[string]string{
"id": "test",
},
},
Provider: "provider.aws",
},
},
},
&ModuleState{
Path: []string{"root", "child"},
Resources: map[string]*ResourceState{
"aws_vpc.bar": &ResourceState{
Type: "aws_vpc",
Primary: &InstanceState{
ID: "vpc-aaabbb12",
Attributes: map[string]string{
"value": "test",
},
},
Provider: "provider.aws",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(providerFactories),
State: state,
Destroy: true,
})
return ctx
}
func TestContext2Apply_minimal(t *testing.T) {
m := testModule(t, "apply-minimal")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyMinimalStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_badDiff(t *testing.T) {
m := testModule(t, "apply-good")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
p.DiffFn = func(*InstanceInfo, *InstanceState, *ResourceConfig) (*InstanceDiff, error) {
return &InstanceDiff{
Attributes: map[string]*ResourceAttrDiff{
"newp": &ResourceAttrDiff{
Old: "",
New: "",
NewComputed: true,
},
},
}, nil
}
if _, diags := ctx.Apply(); diags == nil {
t.Fatal("should error")
}
}
func TestContext2Apply_cancel(t *testing.T) {
stopped := false
m := testModule(t, "apply-cancel")
p := testProvider("aws")
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
p.ApplyFn = func(*InstanceInfo, *InstanceState, *InstanceDiff) (*InstanceState, error) {
if !stopped {
stopped = true
go ctx.Stop()
for {
if ctx.sh.Stopped() {
break
}
time.Sleep(10 * time.Millisecond)
}
}
return &InstanceState{
ID: "foo",
Attributes: map[string]string{
"value": "2",
},
}, nil
}
p.DiffFn = func(info *InstanceInfo, s *InstanceState, rc *ResourceConfig) (*InstanceDiff, error) {
d := &InstanceDiff{
Attributes: map[string]*ResourceAttrDiff{},
}
if new, ok := rc.Get("value"); ok {
d.Attributes["value"] = &ResourceAttrDiff{
New: new.(string),
}
}
if new, ok := rc.Get("foo"); ok {
d.Attributes["foo"] = &ResourceAttrDiff{
New: new.(string),
}
}
return d, nil
}
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
// Start the Apply in a goroutine
var applyDiags tfdiags.Diagnostics
stateCh := make(chan *states.State)
go func() {
state, diags := ctx.Apply()
applyDiags = diags
stateCh <- state
}()
state := <-stateCh
if applyDiags.HasErrors() {
t.Fatalf("unexpected errors: %s", applyDiags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyCancelStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
if !p.StopCalled {
t.Fatal("stop should be called")
}
}
func TestContext2Apply_cancelBlock(t *testing.T) {
m := testModule(t, "apply-cancel-block")
p := testProvider("aws")
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
applyCh := make(chan struct{})
p.DiffFn = func(*InstanceInfo, *InstanceState, *ResourceConfig) (*InstanceDiff, error) {
return &InstanceDiff{
Attributes: map[string]*ResourceAttrDiff{
"id": &ResourceAttrDiff{
New: "foo",
},
"num": &ResourceAttrDiff{
New: "2",
},
},
}, nil
}
p.ApplyFn = func(*InstanceInfo, *InstanceState, *InstanceDiff) (*InstanceState, error) {
close(applyCh)
for !ctx.sh.Stopped() {
// Wait for stop to be called. We call Gosched here so that
// the other goroutines can always be scheduled to set Stopped.
runtime.Gosched()
}
// Sleep
time.Sleep(100 * time.Millisecond)
return &InstanceState{
ID: "foo",
Attributes: map[string]string{
"num": "2",
},
}, nil
}
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
// Start the Apply in a goroutine
var applyDiags tfdiags.Diagnostics
stateCh := make(chan *states.State)
go func() {
state, diags := ctx.Apply()
applyDiags = diags
stateCh <- state
}()
stopDone := make(chan struct{})
go func() {
defer close(stopDone)
<-applyCh
ctx.Stop()
}()
// Make sure that stop blocks
select {
case <-stopDone:
t.Fatal("stop should block")
case <-time.After(10 * time.Millisecond):
}
// Wait for stop
select {
case <-stopDone:
case <-time.After(500 * time.Millisecond):
t.Fatal("stop should be done")
}
// Wait for apply to complete
state := <-stateCh
if applyDiags.HasErrors() {
t.Fatalf("unexpected error: %s", applyDiags.Err())
}
checkStateString(t, state, `
aws_instance.foo:
ID = foo
provider = provider.aws
num = 2
`)
}
// for_each values cannot be used in the provisioner during destroy.
// There may be a way to handle this, but for now make sure we print an error
// rather than crashing with an invalid config.
func TestContext2Apply_provisionerDestroyForEach(t *testing.T) {
m := testModule(t, "apply-provisioner-each")
p := testProvider("aws")
pr := testProvisioner()
p.DiffFn = testDiffFn
p.ApplyFn = testApplyFn
s := &states.State{
Modules: map[string]*states.Module{
"": &states.Module{
Resources: map[string]*states.Resource{
"aws_instance.bar": &states.Resource{
Addr: addrs.Resource{Mode: 77, Type: "aws_instance", Name: "bar"},
EachMode: states.EachMap,
Instances: map[addrs.InstanceKey]*states.ResourceInstance{
addrs.StringKey("a"): &states.ResourceInstance{
Current: &states.ResourceInstanceObjectSrc{
AttrsJSON: []byte(`{"foo":"bar","id":"foo"}`),
},
},
addrs.StringKey("b"): &states.ResourceInstance{
Current: &states.ResourceInstanceObjectSrc{
AttrsJSON: []byte(`{"foo":"bar","id":"foo"}`),
},
},
},
ProviderConfig: addrs.AbsProviderConfig{
Module: addrs.ModuleInstance(nil),
ProviderConfig: addrs.ProviderConfig{Type: "aws", Alias: ""},
},
},
},
},
},
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
State: s,
Destroy: true,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
_, diags := ctx.Apply()
if diags == nil {
t.Fatal("should error")
}
if !strings.Contains(diags.Err().Error(), "each.value cannot be used in this context") {
t.Fatal("unexpected error:", diags.Err())
}
}
func TestContext2Apply_cancelProvisioner(t *testing.T) {
m := testModule(t, "apply-cancel-provisioner")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr := testProvisioner()
pr.GetSchemaResponse = provisioners.GetSchemaResponse{
Provisioner: &configschema.Block{
Attributes: map[string]*configschema.Attribute{
"foo": {
Type: cty.String,
Optional: true,
},
},
},
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
prStopped := make(chan struct{})
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
// Start the stop process
go ctx.Stop()
<-prStopped
return nil
}
pr.StopFn = func() error {
close(prStopped)
return nil
}
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
// Start the Apply in a goroutine
var applyDiags tfdiags.Diagnostics
stateCh := make(chan *states.State)
go func() {
state, diags := ctx.Apply()
applyDiags = diags
stateCh <- state
}()
// Wait for completion
state := <-stateCh
assertNoErrors(t, applyDiags)
checkStateString(t, state, `
aws_instance.foo: (tainted)
ID = foo
provider = provider.aws
num = 2
type = aws_instance
`)
if !pr.StopCalled {
t.Fatal("stop should be called")
}
}
func TestContext2Apply_compute(t *testing.T) {
m := testModule(t, "apply-compute")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
p.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"num": {
Type: cty.Number,
Optional: true,
},
"compute": {
Type: cty.String,
Optional: true,
},
"compute_value": {
Type: cty.String,
Optional: true,
},
"foo": {
Type: cty.String,
Optional: true,
},
"id": {
Type: cty.String,
Computed: true,
},
"type": {
Type: cty.String,
Computed: true,
},
"value": { // Populated from compute_value because compute = "value" in the config fixture
Type: cty.String,
Computed: true,
},
},
},
},
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
ctx.variables = InputValues{
"value": &InputValue{
Value: cty.NumberIntVal(1),
SourceType: ValueFromCaller,
},
}
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyComputeStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_countDecrease(t *testing.T) {
m := testModule(t, "apply-count-dec")
p := testProvider("aws")
p.DiffFn = testDiffFn
p.ApplyFn = testApplyFn
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo.0": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"foo": "foo",
"type": "aws_instance",
},
},
},
"aws_instance.foo.1": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"foo": "foo",
"type": "aws_instance",
},
},
},
"aws_instance.foo.2": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"foo": "foo",
"type": "aws_instance",
},
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: s,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
logDiagnostics(t, diags)
t.Fatal("plan failed")
}
state, diags := ctx.Apply()
assertNoErrors(t, diags)
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyCountDecStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_countDecreaseToOneX(t *testing.T) {
m := testModule(t, "apply-count-dec-one")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo.0": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"foo": "foo",
"type": "aws_instance",
},
},
},
"aws_instance.foo.1": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
},
"aws_instance.foo.2": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: s,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyCountDecToOneStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
// https://github.com/PeoplePerHour/terraform/pull/11
//
// This tests a case where both a "resource" and "resource.0" are in
// the state file, which apparently is a reasonable backwards compatibility
// concern found in the above 3rd party repo.
func TestContext2Apply_countDecreaseToOneCorrupted(t *testing.T) {
m := testModule(t, "apply-count-dec-one")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"foo": "foo",
"type": "aws_instance",
},
},
},
"aws_instance.foo.0": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "baz",
Attributes: map[string]string{
"type": "aws_instance",
},
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: s,
})
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
} else {
got := strings.TrimSpace(legacyPlanComparisonString(ctx.State(), p.Changes))
want := strings.TrimSpace(testTerraformApplyCountDecToOneCorruptedPlanStr)
if got != want {
t.Fatalf("wrong plan result\ngot:\n%s\nwant:\n%s", got, want)
}
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyCountDecToOneCorruptedStr)
if actual != expected {
t.Fatalf("wrong final state\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_countTainted(t *testing.T) {
m := testModule(t, "apply-count-tainted")
p := testProvider("aws")
p.DiffFn = testDiffFn
p.ApplyFn = testApplyFn
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo.0": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"foo": "foo",
"type": "aws_instance",
},
Tainted: true,
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: s,
})
{
plan, diags := ctx.Plan()
assertNoErrors(t, diags)
got := strings.TrimSpace(legacyDiffComparisonString(plan.Changes))
want := strings.TrimSpace(`
DESTROY/CREATE: aws_instance.foo[0]
foo: "foo" => "foo"
id: "bar" => "<computed>"
type: "aws_instance" => "aws_instance"
CREATE: aws_instance.foo[1]
foo: "" => "foo"
id: "" => "<computed>"
type: "" => "aws_instance"
`)
if got != want {
t.Fatalf("wrong plan\n\ngot:\n%s\n\nwant:\n%s", got, want)
}
}
state, diags := ctx.Apply()
assertNoErrors(t, diags)
got := strings.TrimSpace(state.String())
want := strings.TrimSpace(`
aws_instance.foo.0:
ID = foo
provider = provider.aws
foo = foo
type = aws_instance
aws_instance.foo.1:
ID = foo
provider = provider.aws
foo = foo
type = aws_instance
`)
if got != want {
t.Fatalf("wrong final state\n\ngot:\n%s\n\nwant:\n%s", got, want)
}
}
func TestContext2Apply_countVariable(t *testing.T) {
m := testModule(t, "apply-count-variable")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyCountVariableStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_countVariableRef(t *testing.T) {
m := testModule(t, "apply-count-variable-ref")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyCountVariableRefStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_provisionerInterpCount(t *testing.T) {
// This test ensures that a provisioner can interpolate a resource count
// even though the provisioner expression is evaluated during the plan
// walk. https://github.com/hashicorp/terraform/issues/16840
m, snap := testModuleWithSnapshot(t, "apply-provisioner-interp-count")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr := testProvisioner()
providerResolver := providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
)
provisioners := map[string]ProvisionerFactory{
"local-exec": testProvisionerFuncFixed(pr),
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providerResolver,
Provisioners: provisioners,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("plan failed unexpectedly: %s", diags.Err())
}
state := ctx.State()
// We'll marshal and unmarshal the plan here, to ensure that we have
// a clean new context as would be created if we separately ran
// terraform plan -out=tfplan && terraform apply tfplan
ctxOpts, err := contextOptsForPlanViaFile(snap, state, plan)
if err != nil {
t.Fatal(err)
}
ctxOpts.ProviderResolver = providerResolver
ctxOpts.Provisioners = provisioners
ctx, diags = NewContext(ctxOpts)
if diags.HasErrors() {
t.Fatalf("failed to create context for plan: %s", diags.Err())
}
// Applying the plan should now succeed
_, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("apply failed unexpectedly: %s", diags.Err())
}
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner was not called")
}
}
func TestContext2Apply_foreachVariable(t *testing.T) {
m := testModule(t, "plan-for-each-unknown-value")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Variables: InputValues{
"foo": &InputValue{
Value: cty.StringVal("hello"),
},
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyForEachVariableStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_moduleBasic(t *testing.T) {
m := testModule(t, "apply-module")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyModuleStr)
if actual != expected {
t.Fatalf("bad, expected:\n%s\n\nactual:\n%s", expected, actual)
}
}
func TestContext2Apply_moduleDestroyOrder(t *testing.T) {
m := testModule(t, "apply-module-destroy-order")
p := testProvider("aws")
p.DiffFn = testDiffFn
// Create a custom apply function to track the order they were destroyed
var order []string
var orderLock sync.Mutex
p.ApplyFn = func(
info *InstanceInfo,
is *InstanceState,
id *InstanceDiff) (*InstanceState, error) {
if is.ID == "b" {
// Pause briefly to make any race conditions more visible, since
// missing edges here can cause undeterministic ordering.
time.Sleep(100 * time.Millisecond)
}
orderLock.Lock()
defer orderLock.Unlock()
order = append(order, is.ID)
return nil, nil
}
p.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"id": {Type: cty.String, Required: true},
"blah": {Type: cty.String, Optional: true},
"value": {Type: cty.String, Optional: true},
},
},
},
}
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.b": resourceState("aws_instance", "b"),
},
},
&ModuleState{
Path: []string{"root", "child"},
Resources: map[string]*ResourceState{
"aws_instance.a": resourceState("aws_instance", "a"),
},
Outputs: map[string]*OutputState{
"a_output": &OutputState{
Type: "string",
Sensitive: false,
Value: "a",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
Destroy: true,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
expected := []string{"b", "a"}
if !reflect.DeepEqual(order, expected) {
t.Errorf("wrong order\ngot: %#v\nwant: %#v", order, expected)
}
{
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyModuleDestroyOrderStr)
if actual != expected {
t.Errorf("wrong final state\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
}
func TestContext2Apply_moduleInheritAlias(t *testing.T) {
m := testModule(t, "apply-module-provider-inherit-alias")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
p.ConfigureFn = func(c *ResourceConfig) error {
if _, ok := c.Get("value"); !ok {
return nil
}
if _, ok := c.Get("root"); ok {
return fmt.Errorf("child should not get root")
}
return nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `
<no state>
module.child:
aws_instance.foo:
ID = foo
provider = provider.aws.eu
`)
}
func TestContext2Apply_orphanResource(t *testing.T) {
// This is a two-step test:
// 1. Apply a configuration with resources that have count set.
// This should place the empty resource object in the state to record
// that each exists, and record any instances.
// 2. Apply an empty configuration against the same state, which should
// then clean up both the instances and the containing resource objects.
p := testProvider("test")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
p.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"test_thing": {},
},
}
// Step 1: create the resources and instances
m := testModule(t, "apply-orphan-resource")
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"test": testProviderFuncFixed(p),
},
),
})
_, diags := ctx.Plan()
assertNoErrors(t, diags)
state, diags := ctx.Apply()
assertNoErrors(t, diags)
// At this point both resources should be recorded in the state, along
// with the single instance associated with test_thing.one.
want := states.BuildState(func(s *states.SyncState) {
providerAddr := addrs.ProviderConfig{
Type: "test",
}.Absolute(addrs.RootModuleInstance)
zeroAddr := addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_thing",
Name: "zero",
}.Absolute(addrs.RootModuleInstance)
oneAddr := addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_thing",
Name: "one",
}.Absolute(addrs.RootModuleInstance)
s.SetResourceMeta(zeroAddr, states.EachList, providerAddr)
s.SetResourceMeta(oneAddr, states.EachList, providerAddr)
s.SetResourceInstanceCurrent(oneAddr.Instance(addrs.IntKey(0)), &states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{}`),
}, providerAddr)
})
// compare the marshaled form to easily remove empty and nil slices
if !statefile.StatesMarshalEqual(state, want) {
t.Fatalf("wrong state after step 1\n%s", cmp.Diff(want, state))
}
// Step 2: update with an empty config, to destroy everything
m = testModule(t, "empty")
ctx = testContext2(t, &ContextOpts{
Config: m,
State: state,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"test": testProviderFuncFixed(p),
},
),
})
_, diags = ctx.Plan()
assertNoErrors(t, diags)
state, diags = ctx.Apply()
assertNoErrors(t, diags)
// The state should now be _totally_ empty, with just an empty root module
// (since that always exists) and no resources at all.
want = states.NewState()
if !cmp.Equal(state, want) {
t.Fatalf("wrong state after step 2\ngot: %swant: %s", spew.Sdump(state), spew.Sdump(want))
}
}
func TestContext2Apply_moduleOrphanInheritAlias(t *testing.T) {
m := testModule(t, "apply-module-provider-inherit-alias-orphan")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
called := false
p.ConfigureFn = func(c *ResourceConfig) error {
called = true
if _, ok := c.Get("child"); !ok {
return nil
}
if _, ok := c.Get("root"); ok {
return fmt.Errorf("child should not get root")
}
return nil
}
// Create a state with an orphan module
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root", "child"},
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
Provider: "provider.aws.eu",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
State: state,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
if !called {
t.Fatal("must call configure")
}
checkStateString(t, state, "<no state>")
}
func TestContext2Apply_moduleOrphanProvider(t *testing.T) {
m := testModule(t, "apply-module-orphan-provider-inherit")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
p.ConfigureFn = func(c *ResourceConfig) error {
if _, ok := c.Get("value"); !ok {
return fmt.Errorf("value is not found")
}
return nil
}
// Create a state with an orphan module
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root", "child"},
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
Provider: "provider.aws",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
State: state,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
}
func TestContext2Apply_moduleOrphanGrandchildProvider(t *testing.T) {
m := testModule(t, "apply-module-orphan-provider-inherit")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
p.ConfigureFn = func(c *ResourceConfig) error {
if _, ok := c.Get("value"); !ok {
return fmt.Errorf("value is not found")
}
return nil
}
// Create a state with an orphan module that is nested (grandchild)
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root", "parent", "child"},
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
Provider: "provider.aws",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
State: state,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
}
func TestContext2Apply_moduleGrandchildProvider(t *testing.T) {
m := testModule(t, "apply-module-grandchild-provider-inherit")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
var callLock sync.Mutex
called := false
p.ConfigureFn = func(c *ResourceConfig) error {
if _, ok := c.Get("value"); !ok {
return fmt.Errorf("value is not found")
}
callLock.Lock()
called = true
callLock.Unlock()
return nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
callLock.Lock()
defer callLock.Unlock()
if called != true {
t.Fatalf("err: configure never called")
}
}
// This tests an issue where all the providers in a module but not
// in the root weren't being added to the root properly. In this test
// case: aws is explicitly added to root, but "test" should be added to.
// With the bug, it wasn't.
func TestContext2Apply_moduleOnlyProvider(t *testing.T) {
m := testModule(t, "apply-module-only-provider")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pTest := testProvider("test")
pTest.ApplyFn = testApplyFn
pTest.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
"test": testProviderFuncFixed(pTest),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyModuleOnlyProviderStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_moduleProviderAlias(t *testing.T) {
m := testModule(t, "apply-module-provider-alias")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyModuleProviderAliasStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_moduleProviderAliasTargets(t *testing.T) {
m := testModule(t, "apply-module-provider-alias")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Targets: []addrs.Targetable{
addrs.AbsResource{
Module: addrs.RootModuleInstance,
Resource: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "nonexistent",
Name: "thing",
},
},
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(`
<no state>
`)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_moduleProviderCloseNested(t *testing.T) {
m := testModule(t, "apply-module-provider-close-nested")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root", "child", "subchild"},
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
},
},
},
},
}),
Destroy: true,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
}
// Tests that variables used as module vars that reference data that
// already exists in the state and requires no diff works properly. This
// fixes an issue faced where module variables were pruned because they were
// accessing "non-existent" resources (they existed, just not in the graph
// cause they weren't in the diff).
func TestContext2Apply_moduleVarRefExisting(t *testing.T) {
m := testModule(t, "apply-ref-existing")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "foo",
Attributes: map[string]string{
"foo": "bar",
},
},
Provider: "provider.aws",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyModuleVarRefExistingStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_moduleVarResourceCount(t *testing.T) {
m := testModule(t, "apply-module-var-resource-count")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Variables: InputValues{
"num": &InputValue{
Value: cty.NumberIntVal(2),
SourceType: ValueFromCaller,
},
},
Destroy: true,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
ctx = testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Variables: InputValues{
"num": &InputValue{
Value: cty.NumberIntVal(5),
SourceType: ValueFromCaller,
},
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
}
// GH-819
func TestContext2Apply_moduleBool(t *testing.T) {
m := testModule(t, "apply-module-bool")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyModuleBoolStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
// Tests that a module can be targeted and everything is properly created.
// This adds to the plan test to also just verify that apply works.
func TestContext2Apply_moduleTarget(t *testing.T) {
m := testModule(t, "plan-targeted-cross-module")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Child("B", addrs.NoKey),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `
<no state>
module.A:
aws_instance.foo:
ID = foo
provider = provider.aws
foo = bar
type = aws_instance
Outputs:
value = foo
module.B:
aws_instance.bar:
ID = foo
provider = provider.aws
foo = foo
type = aws_instance
Dependencies:
module.A.aws_instance.foo
`)
}
func TestContext2Apply_multiProvider(t *testing.T) {
m := testModule(t, "apply-multi-provider")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pDO := testProvider("do")
pDO.ApplyFn = testApplyFn
pDO.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
"do": testProviderFuncFixed(pDO),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
mod := state.RootModule()
if len(mod.Resources) < 2 {
t.Fatalf("bad: %#v", mod.Resources)
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyMultiProviderStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_multiProviderDestroy(t *testing.T) {
m := testModule(t, "apply-multi-provider-destroy")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
p.GetSchemaReturn = &ProviderSchema{
Provider: &configschema.Block{
Attributes: map[string]*configschema.Attribute{
"addr": {Type: cty.String, Optional: true},
},
},
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"foo": {Type: cty.String, Optional: true},
},
},
},
}
p2 := testProvider("vault")
p2.ApplyFn = testApplyFn
p2.DiffFn = testDiffFn
p2.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"vault_instance": {
Attributes: map[string]*configschema.Attribute{
"id": {Type: cty.String, Computed: true},
},
},
},
}
var state *states.State
// First, create the instances
{
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
"vault": testProviderFuncFixed(p2),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("errors during create plan: %s", diags.Err())
}
s, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("errors during create apply: %s", diags.Err())
}
state = s
}
// Destroy them
{
// Verify that aws_instance.bar is destroyed first
var checked bool
var called int32
var lock sync.Mutex
applyFn := func(
info *InstanceInfo,
is *InstanceState,
id *InstanceDiff) (*InstanceState, error) {
lock.Lock()
defer lock.Unlock()
if info.Type == "aws_instance" {
checked = true
// Sleep to allow parallel execution
time.Sleep(50 * time.Millisecond)
// Verify that called is 0 (dep not called)
if atomic.LoadInt32(&called) != 0 {
return nil, fmt.Errorf("nothing else should be called")
}
}
atomic.AddInt32(&called, 1)
return testApplyFn(info, is, id)
}
// Set the apply functions
p.ApplyFn = applyFn
p2.ApplyFn = applyFn
ctx := testContext2(t, &ContextOpts{
Destroy: true,
State: state,
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
"vault": testProviderFuncFixed(p2),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("errors during destroy plan: %s", diags.Err())
}
s, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("errors during destroy apply: %s", diags.Err())
}
if !checked {
t.Fatal("should be checked")
}
state = s
}
checkStateString(t, state, `<no state>`)
}
// This is like the multiProviderDestroy test except it tests that
// dependent resources within a child module that inherit provider
// configuration are still destroyed first.
func TestContext2Apply_multiProviderDestroyChild(t *testing.T) {
m := testModule(t, "apply-multi-provider-destroy-child")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
p.GetSchemaReturn = &ProviderSchema{
Provider: &configschema.Block{
Attributes: map[string]*configschema.Attribute{
"value": {Type: cty.String, Optional: true},
},
},
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"foo": {Type: cty.String, Optional: true},
},
},
},
}
p2 := testProvider("vault")
p2.ApplyFn = testApplyFn
p2.DiffFn = testDiffFn
p2.GetSchemaReturn = &ProviderSchema{
Provider: &configschema.Block{},
ResourceTypes: map[string]*configschema.Block{
"vault_instance": {
Attributes: map[string]*configschema.Attribute{
"id": {Type: cty.String, Computed: true},
},
},
},
}
var state *states.State
// First, create the instances
{
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
"vault": testProviderFuncFixed(p2),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
s, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
state = s
}
// Destroy them
{
// Verify that aws_instance.bar is destroyed first
var checked bool
var called int32
var lock sync.Mutex
applyFn := func(
info *InstanceInfo,
is *InstanceState,
id *InstanceDiff) (*InstanceState, error) {
lock.Lock()
defer lock.Unlock()
if info.Type == "aws_instance" {
checked = true
// Sleep to allow parallel execution
time.Sleep(50 * time.Millisecond)
// Verify that called is 0 (dep not called)
if atomic.LoadInt32(&called) != 0 {
return nil, fmt.Errorf("nothing else should be called")
}
}
atomic.AddInt32(&called, 1)
return testApplyFn(info, is, id)
}
// Set the apply functions
p.ApplyFn = applyFn
p2.ApplyFn = applyFn
ctx := testContext2(t, &ContextOpts{
Destroy: true,
State: state,
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
"vault": testProviderFuncFixed(p2),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
s, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
if !checked {
t.Fatal("should be checked")
}
state = s
}
checkStateString(t, state, `
<no state>
`)
}
func TestContext2Apply_multiVar(t *testing.T) {
m := testModule(t, "apply-multi-var")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
// First, apply with a count of 3
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Variables: InputValues{
"num": &InputValue{
Value: cty.NumberIntVal(3),
SourceType: ValueFromCaller,
},
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := state.RootModule().OutputValues["output"]
expected := cty.StringVal("bar0,bar1,bar2")
if actual == nil || actual.Value != expected {
t.Fatalf("wrong value\ngot: %#v\nwant: %#v", actual.Value, expected)
}
t.Logf("Initial state: %s", state.String())
// Apply again, reduce the count to 1
{
ctx := testContext2(t, &ContextOpts{
Config: m,
State: state,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Variables: InputValues{
"num": &InputValue{
Value: cty.NumberIntVal(1),
SourceType: ValueFromCaller,
},
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
t.Logf("End state: %s", state.String())
actual := state.RootModule().OutputValues["output"]
if actual == nil {
t.Fatal("missing output")
}
expected := cty.StringVal("bar0")
if actual.Value != expected {
t.Fatalf("wrong value\ngot: %#v\nwant: %#v", actual.Value, expected)
}
}
}
// This is a holistic test of multi-var (aka "splat variable") handling
// across several different Terraform subsystems. This is here because
// historically there were quirky differences in handling across different
// parts of Terraform and so here we want to assert the expected behavior and
// ensure that it remains consistent in future.
func TestContext2Apply_multiVarComprehensive(t *testing.T) {
m := testModule(t, "apply-multi-var-comprehensive")
p := testProvider("test")
configs := map[string]*ResourceConfig{}
var configsLock sync.Mutex
p.ApplyFn = testApplyFn
p.PlanResourceChangeFn = func(req providers.PlanResourceChangeRequest) providers.PlanResourceChangeResponse {
proposed := req.ProposedNewState
configsLock.Lock()
defer configsLock.Unlock()
key := proposed.GetAttr("key").AsString()
// This test was originally written using the legacy p.DiffFn interface,
// and so the assertions below expect an old-style ResourceConfig, which
// we'll construct via our shim for now to avoid rewriting all of the
// assertions.
configs[key] = NewResourceConfigShimmed(req.Config, p.GetSchemaReturn.ResourceTypes["test_thing"])
retVals := make(map[string]cty.Value)
for it := proposed.ElementIterator(); it.Next(); {
idxVal, val := it.Element()
idx := idxVal.AsString()
switch idx {
case "id":
retVals[idx] = cty.UnknownVal(cty.String)
case "name":
retVals[idx] = cty.StringVal(key)
default:
retVals[idx] = val
}
}
return providers.PlanResourceChangeResponse{
PlannedState: cty.ObjectVal(retVals),
}
}
p.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"test_thing": {
Attributes: map[string]*configschema.Attribute{
"key": {Type: cty.String, Required: true},
"source_id": {Type: cty.String, Optional: true},
"source_name": {Type: cty.String, Optional: true},
"first_source_id": {Type: cty.String, Optional: true},
"first_source_name": {Type: cty.String, Optional: true},
"source_ids": {Type: cty.List(cty.String), Optional: true},
"source_names": {Type: cty.List(cty.String), Optional: true},
"source_ids_from_func": {Type: cty.List(cty.String), Optional: true},
"source_names_from_func": {Type: cty.List(cty.String), Optional: true},
"source_ids_wrapped": {Type: cty.List(cty.List(cty.String)), Optional: true},
"source_names_wrapped": {Type: cty.List(cty.List(cty.String)), Optional: true},
"id": {Type: cty.String, Computed: true},
"name": {Type: cty.String, Computed: true},
},
},
},
}
// First, apply with a count of 3
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"test": testProviderFuncFixed(p),
},
),
Variables: InputValues{
"num": &InputValue{
Value: cty.NumberIntVal(3),
SourceType: ValueFromCaller,
},
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
logDiagnostics(t, diags)
t.Fatalf("errors during plan")
}
checkConfig := func(key string, want map[string]interface{}) {
configsLock.Lock()
defer configsLock.Unlock()
if _, ok := configs[key]; !ok {
t.Errorf("no config recorded for %s; expected a configuration", key)
return
}
got := configs[key].Config
t.Run("config for "+key, func(t *testing.T) {
want["key"] = key // to avoid doing this for every example
for _, problem := range deep.Equal(got, want) {
t.Errorf(problem)
}
})
}
checkConfig("multi_count_var.0", map[string]interface{}{
"source_id": hcl2shim.UnknownVariableValue,
"source_name": "source.0",
})
checkConfig("multi_count_var.2", map[string]interface{}{
"source_id": hcl2shim.UnknownVariableValue,
"source_name": "source.2",
})
checkConfig("multi_count_derived.0", map[string]interface{}{
"source_id": hcl2shim.UnknownVariableValue,
"source_name": "source.0",
})
checkConfig("multi_count_derived.2", map[string]interface{}{
"source_id": hcl2shim.UnknownVariableValue,
"source_name": "source.2",
})
checkConfig("whole_splat", map[string]interface{}{
"source_ids": []interface{}{
hcl2shim.UnknownVariableValue,
hcl2shim.UnknownVariableValue,
hcl2shim.UnknownVariableValue,
},
"source_names": []interface{}{
"source.0",
"source.1",
"source.2",
},
"source_ids_from_func": hcl2shim.UnknownVariableValue,
"source_names_from_func": []interface{}{
"source.0",
"source.1",
"source.2",
},
"source_ids_wrapped": []interface{}{
[]interface{}{
hcl2shim.UnknownVariableValue,
hcl2shim.UnknownVariableValue,
hcl2shim.UnknownVariableValue,
},
},
"source_names_wrapped": []interface{}{
[]interface{}{
"source.0",
"source.1",
"source.2",
},
},
"first_source_id": hcl2shim.UnknownVariableValue,
"first_source_name": "source.0",
})
checkConfig("child.whole_splat", map[string]interface{}{
"source_ids": []interface{}{
hcl2shim.UnknownVariableValue,
hcl2shim.UnknownVariableValue,
hcl2shim.UnknownVariableValue,
},
"source_names": []interface{}{
"source.0",
"source.1",
"source.2",
},
"source_ids_wrapped": []interface{}{
[]interface{}{
hcl2shim.UnknownVariableValue,
hcl2shim.UnknownVariableValue,
hcl2shim.UnknownVariableValue,
},
},
"source_names_wrapped": []interface{}{
[]interface{}{
"source.0",
"source.1",
"source.2",
},
},
})
t.Run("apply", func(t *testing.T) {
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("error during apply: %s", diags.Err())
}
want := map[string]interface{}{
"source_ids": []interface{}{"foo", "foo", "foo"},
"source_names": []interface{}{
"source.0",
"source.1",
"source.2",
},
}
got := map[string]interface{}{}
for k, s := range state.RootModule().OutputValues {
got[k] = hcl2shim.ConfigValueFromHCL2(s.Value)
}
if !reflect.DeepEqual(got, want) {
t.Errorf(
"wrong outputs\ngot: %s\nwant: %s",
spew.Sdump(got), spew.Sdump(want),
)
}
})
}
// Test that multi-var (splat) access is ordered by count, not by
// value.
func TestContext2Apply_multiVarOrder(t *testing.T) {
m := testModule(t, "apply-multi-var-order")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
// First, apply with a count of 3
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
t.Logf("State: %s", state.String())
actual := state.RootModule().OutputValues["should-be-11"]
expected := cty.StringVal("index-11")
if actual == nil || actual.Value != expected {
t.Fatalf("wrong value\ngot: %#v\nwant: %#v", actual.Value, expected)
}
}
// Test that multi-var (splat) access is ordered by count, not by
// value, through interpolations.
func TestContext2Apply_multiVarOrderInterp(t *testing.T) {
m := testModule(t, "apply-multi-var-order-interp")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
// First, apply with a count of 3
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
t.Logf("State: %s", state.String())
actual := state.RootModule().OutputValues["should-be-11"]
expected := cty.StringVal("baz-index-11")
if actual == nil || actual.Value != expected {
t.Fatalf("wrong value\ngot: %#v\nwant: %#v", actual.Value, expected)
}
}
// Based on GH-10440 where a graph edge wasn't properly being created
// between a modified resource and a count instance being destroyed.
func TestContext2Apply_multiVarCountDec(t *testing.T) {
var s *states.State
// First create resources. Nothing sneaky here.
{
m := testModule(t, "apply-multi-var-count-dec")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Variables: InputValues{
"num": &InputValue{
Value: cty.NumberIntVal(2),
SourceType: ValueFromCaller,
},
},
})
log.Print("\n========\nStep 1 Plan\n========")
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
log.Print("\n========\nStep 1 Apply\n========")
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
t.Logf("Step 1 state:\n%s", state)
s = state
}
// Decrease the count by 1 and verify that everything happens in the
// right order.
{
m := testModule(t, "apply-multi-var-count-dec")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
// Verify that aws_instance.bar is modified first and nothing
// else happens at the same time.
var checked bool
var called int32
var lock sync.Mutex
p.ApplyFn = func(
info *InstanceInfo,
is *InstanceState,
id *InstanceDiff) (*InstanceState, error) {
lock.Lock()
defer lock.Unlock()
if id != nil && id.Attributes != nil && id.Attributes["ami"] != nil && id.Attributes["ami"].New == "special" {
checked = true
// Sleep to allow parallel execution
time.Sleep(50 * time.Millisecond)
// Verify that called is 0 (dep not called)
if atomic.LoadInt32(&called) != 1 {
return nil, fmt.Errorf("nothing else should be called")
}
}
atomic.AddInt32(&called, 1)
return testApplyFn(info, is, id)
}
ctx := testContext2(t, &ContextOpts{
State: s,
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Variables: InputValues{
"num": &InputValue{
Value: cty.NumberIntVal(1),
SourceType: ValueFromCaller,
},
},
})
log.Print("\n========\nStep 2 Plan\n========")
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
t.Logf("Step 2 plan:\n%s", legacyDiffComparisonString(plan.Changes))
log.Print("\n========\nStep 2 Apply\n========")
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
if !checked {
t.Error("apply never called")
}
t.Logf("Step 2 state:\n%s", state)
s = state
}
}
// Test that we can resolve a multi-var (splat) for the first resource
// created in a non-root module, which happens when the module state doesn't
// exist yet.
// https://github.com/hashicorp/terraform/issues/14438
func TestContext2Apply_multiVarMissingState(t *testing.T) {
m := testModule(t, "apply-multi-var-missing-state")
p := testProvider("test")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
p.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"test_thing": {
Attributes: map[string]*configschema.Attribute{
"a_ids": {Type: cty.String, Optional: true},
"id": {Type: cty.String, Computed: true},
},
},
},
}
// First, apply with a count of 3
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"test": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan failed: %s", diags.Err())
}
// Before the relevant bug was fixed, Tdiagsaform would panic during apply.
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply failed: %s", diags.Err())
}
// If we get here with no errors or panics then our test was successful.
}
func TestContext2Apply_nilDiff(t *testing.T) {
m := testModule(t, "apply-good")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
p.DiffFn = func(*InstanceInfo, *InstanceState, *ResourceConfig) (*InstanceDiff, error) {
return nil, nil
}
if _, diags := ctx.Apply(); diags == nil {
t.Fatal("should error")
}
}
func TestContext2Apply_outputDependsOn(t *testing.T) {
m := testModule(t, "apply-output-depends-on")
p := testProvider("aws")
p.DiffFn = testDiffFn
{
// Create a custom apply function that sleeps a bit (to allow parallel
// graph execution) and then returns an error to force a partial state
// return. We then verify the output is NOT there.
p.ApplyFn = func(
info *InstanceInfo,
is *InstanceState,
id *InstanceDiff) (*InstanceState, error) {
// Sleep to allow parallel execution
time.Sleep(50 * time.Millisecond)
// Return error to force partial state
return nil, fmt.Errorf("abcd")
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
state, diags := ctx.Apply()
if !diags.HasErrors() || !strings.Contains(diags.Err().Error(), "abcd") {
t.Fatalf("err: %s", diags.Err())
}
checkStateString(t, state, `<no state>`)
}
{
// Create the standard apply function and verify we get the output
p.ApplyFn = testApplyFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `
aws_instance.foo:
ID = foo
provider = provider.aws
Outputs:
value = result
`)
}
}
func TestContext2Apply_outputOrphan(t *testing.T) {
m := testModule(t, "apply-output-orphan")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Outputs: map[string]*OutputState{
"foo": &OutputState{
Type: "string",
Sensitive: false,
Value: "bar",
},
"bar": &OutputState{
Type: "string",
Sensitive: false,
Value: "baz",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyOutputOrphanStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_outputOrphanModule(t *testing.T) {
m := testModule(t, "apply-output-orphan-module")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root", "child"},
Outputs: map[string]*OutputState{
"foo": &OutputState{
Type: "string",
Value: "bar",
},
"bar": &OutputState{
Type: "string",
Value: "baz",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state.DeepCopy(),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyOutputOrphanModuleStr)
if actual != expected {
t.Fatalf("expected:\n%s\n\ngot:\n%s", expected, actual)
}
// now apply with no module in the config, which should remove the
// remaining output
ctx = testContext2(t, &ContextOpts{
Config: configs.NewEmptyConfig(),
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state.DeepCopy(),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
if !state.Empty() {
t.Fatalf("wrong final state %s\nwant empty state", spew.Sdump(state))
}
}
func TestContext2Apply_providerComputedVar(t *testing.T) {
m := testModule(t, "apply-provider-computed")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pTest := testProvider("test")
pTest.ApplyFn = testApplyFn
pTest.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
"test": testProviderFuncFixed(pTest),
},
),
})
p.ConfigureFn = func(c *ResourceConfig) error {
if c.IsComputed("value") {
return fmt.Errorf("value is computed")
}
v, ok := c.Get("value")
if !ok {
return fmt.Errorf("value is not found")
}
if v != "yes" {
return fmt.Errorf("value is not 'yes': %v", v)
}
return nil
}
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
}
func TestContext2Apply_providerConfigureDisabled(t *testing.T) {
m := testModule(t, "apply-provider-configure-disabled")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
called := false
p.ConfigureFn = func(c *ResourceConfig) error {
called = true
if _, ok := c.Get("value"); !ok {
return fmt.Errorf("value is not found")
}
return nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
if !called {
t.Fatal("configure never called")
}
}
func TestContext2Apply_provisionerModule(t *testing.T) {
m := testModule(t, "apply-provisioner-module")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr := testProvisioner()
pr.GetSchemaResponse = provisioners.GetSchemaResponse{
Provisioner: &configschema.Block{
Attributes: map[string]*configschema.Attribute{
"foo": {Type: cty.String, Optional: true},
},
},
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyProvisionerModuleStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner not invoked")
}
}
func TestContext2Apply_Provisioner_compute(t *testing.T) {
m := testModule(t, "apply-provisioner-compute")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
val, ok := c.Config["command"]
if !ok || val != "computed_value" {
t.Fatalf("bad value for foo: %v %#v", val, c)
}
return nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
Variables: InputValues{
"value": &InputValue{
Value: cty.NumberIntVal(1),
SourceType: ValueFromCaller,
},
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyProvisionerStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner not invoked")
}
}
func TestContext2Apply_provisionerCreateFail(t *testing.T) {
m := testModule(t, "apply-provisioner-fail-create")
p := testProvider("aws")
pr := testProvisioner()
p.DiffFn = testDiffFn
p.ApplyFn = func(info *InstanceInfo, is *InstanceState, id *InstanceDiff) (*InstanceState, error) {
is.ID = "foo"
return is, fmt.Errorf("error")
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags == nil {
t.Fatal("should error")
}
got := strings.TrimSpace(state.String())
want := strings.TrimSpace(testTerraformApplyProvisionerFailCreateStr)
if got != want {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", got, want)
}
}
func TestContext2Apply_provisionerCreateFailNoId(t *testing.T) {
m := testModule(t, "apply-provisioner-fail-create")
p := testProvider("aws")
pr := testProvisioner()
p.DiffFn = testDiffFn
p.ApplyFn = func(
info *InstanceInfo,
is *InstanceState,
id *InstanceDiff) (*InstanceState, error) {
return nil, fmt.Errorf("error")
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags == nil {
t.Fatal("should error")
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyProvisionerFailCreateNoIdStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_provisionerFail(t *testing.T) {
m := testModule(t, "apply-provisioner-fail")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr.ApplyFn = func(*InstanceState, *ResourceConfig) error {
return fmt.Errorf("EXPLOSION")
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags == nil {
t.Fatal("should error")
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyProvisionerFailStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_provisionerFail_createBeforeDestroy(t *testing.T) {
m := testModule(t, "apply-provisioner-fail-create-before")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr.ApplyFn = func(*InstanceState, *ResourceConfig) error {
return fmt.Errorf("EXPLOSION")
}
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"require_new": "abc",
},
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
State: state,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags == nil {
t.Fatal("should error")
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyProvisionerFailCreateBeforeDestroyStr)
if actual != expected {
t.Fatalf("expected:\n%s\n:got\n%s", expected, actual)
}
}
func TestContext2Apply_error_createBeforeDestroy(t *testing.T) {
m := testModule(t, "apply-error-create-before")
p := testProvider("aws")
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"require_new": "abc",
},
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
})
p.ApplyFn = func(info *InstanceInfo, is *InstanceState, id *InstanceDiff) (*InstanceState, error) {
return nil, fmt.Errorf("error")
}
p.DiffFn = testDiffFn
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags == nil {
t.Fatal("should have error")
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyErrorCreateBeforeDestroyStr)
if actual != expected {
t.Fatalf("wrong final state\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_errorDestroy_createBeforeDestroy(t *testing.T) {
m := testModule(t, "apply-error-create-before")
p := testProvider("aws")
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"require_new": "abc",
},
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
})
p.ApplyFn = func(info *InstanceInfo, is *InstanceState, id *InstanceDiff) (*InstanceState, error) {
// Fail the destroy!
if id.Destroy {
return is, fmt.Errorf("error")
}
// Create should work
is = &InstanceState{
ID: "foo",
Attributes: map[string]string{
"type": "aws_instance",
"require_new": "xyz",
},
}
return is, nil
}
p.DiffFn = testDiffFn
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags == nil {
t.Fatal("should have error")
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyErrorDestroyCreateBeforeDestroyStr)
if actual != expected {
t.Fatalf("bad: actual:\n%s\n\nexpected:\n%s", actual, expected)
}
}
func TestContext2Apply_multiDepose_createBeforeDestroy(t *testing.T) {
m := testModule(t, "apply-multi-depose-create-before-destroy")
p := testProvider("aws")
p.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"require_new": {Type: cty.String, Optional: true},
"id": {Type: cty.String, Computed: true},
},
},
},
}
ps := map[string]providers.Factory{"aws": testProviderFuncFixed(p)}
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.web": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{ID: "foo"},
},
},
},
},
})
p.DiffFn = func(info *InstanceInfo, s *InstanceState, rc *ResourceConfig) (*InstanceDiff, error) {
if rc == nil {
return &InstanceDiff{
Destroy: true,
}, nil
}
rn, _ := rc.Get("require_new")
return &InstanceDiff{
Attributes: map[string]*ResourceAttrDiff{
"id": {
New: hcl2shim.UnknownVariableValue,
NewComputed: true,
RequiresNew: true,
},
"require_new": {
Old: s.Attributes["require_new"],
New: rn.(string),
RequiresNew: true,
},
},
}, nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(ps),
State: state,
})
createdInstanceId := "bar"
// Create works
createFunc := func(is *InstanceState, id *InstanceDiff) (*InstanceState, error) {
return &InstanceState{
ID: createdInstanceId,
Attributes: map[string]string{
"require_new": id.Attributes["require_new"].New,
},
}, nil
}
// Destroy starts broken
destroyFunc := func(is *InstanceState) (*InstanceState, error) {
return is, fmt.Errorf("destroy failed")
}
p.ApplyFn = func(info *InstanceInfo, is *InstanceState, id *InstanceDiff) (*InstanceState, error) {
if id.Destroy {
return destroyFunc(is)
} else {
return createFunc(is, id)
}
}
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
// Destroy is broken, so even though CBD successfully replaces the instance,
// we'll have to save the Deposed instance to destroy later
state, diags := ctx.Apply()
if diags == nil {
t.Fatal("should have error")
}
checkStateString(t, state, `
aws_instance.web: (1 deposed)
ID = bar
provider = provider.aws
require_new = yes
Deposed ID 1 = foo
`)
createdInstanceId = "baz"
ctx = testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(ps),
State: state,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
// We're replacing the primary instance once again. Destroy is _still_
// broken, so the Deposed list gets longer
state, diags = ctx.Apply()
if diags == nil {
t.Fatal("should have error")
}
// For this one we can't rely on checkStateString because its result is
// not deterministic when multiple deposed objects are present. Instead,
// we will probe the state object directly.
{
is := state.RootModule().Resources["aws_instance.web"].Instances[addrs.NoKey]
t.Logf("aws_instance.web is %s", spew.Sdump(is))
if is.Current == nil {
t.Fatalf("no current object for aws_instance web; should have one")
}
if !bytes.Contains(is.Current.AttrsJSON, []byte("baz")) {
t.Fatalf("incorrect current object attrs %s; want id=baz", is.Current.AttrsJSON)
}
if got, want := len(is.Deposed), 2; got != want {
t.Fatalf("wrong number of deposed instances %d; want %d", got, want)
}
var foos, bars int
for _, obj := range is.Deposed {
if bytes.Contains(obj.AttrsJSON, []byte("foo")) {
foos++
}
if bytes.Contains(obj.AttrsJSON, []byte("bar")) {
bars++
}
}
if got, want := foos, 1; got != want {
t.Fatalf("wrong number of deposed instances with id=foo %d; want %d", got, want)
}
if got, want := bars, 1; got != want {
t.Fatalf("wrong number of deposed instances with id=bar %d; want %d", got, want)
}
}
// Destroy partially fixed!
destroyFunc = func(is *InstanceState) (*InstanceState, error) {
if is.ID == "foo" || is.ID == "baz" {
return nil, nil
} else {
return is, fmt.Errorf("destroy partially failed")
}
}
createdInstanceId = "qux"
ctx = testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(ps),
State: state,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags = ctx.Apply()
// Expect error because 1/2 of Deposed destroys failed
if diags == nil {
t.Fatal("should have error")
}
// foo and baz are now gone, bar sticks around
checkStateString(t, state, `
aws_instance.web: (1 deposed)
ID = qux
provider = provider.aws
require_new = yes
Deposed ID 1 = bar
`)
// Destroy working fully!
destroyFunc = func(is *InstanceState) (*InstanceState, error) {
return nil, nil
}
createdInstanceId = "quux"
ctx = testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(ps),
State: state,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatal("should not have error:", diags.Err())
}
// And finally the state is clean
checkStateString(t, state, `
aws_instance.web:
ID = quux
provider = provider.aws
require_new = yes
`)
}
// Verify that a normal provisioner with on_failure "continue" set won't
// taint the resource and continues executing.
func TestContext2Apply_provisionerFailContinue(t *testing.T) {
m := testModule(t, "apply-provisioner-fail-continue")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
return fmt.Errorf("provisioner error")
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `
aws_instance.foo:
ID = foo
provider = provider.aws
foo = bar
type = aws_instance
`)
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner not invoked")
}
}
// Verify that a normal provisioner with on_failure "continue" records
// the error with the hook.
func TestContext2Apply_provisionerFailContinueHook(t *testing.T) {
h := new(MockHook)
m := testModule(t, "apply-provisioner-fail-continue")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
return fmt.Errorf("provisioner error")
}
ctx := testContext2(t, &ContextOpts{
Config: m,
Hooks: []Hook{h},
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
if !h.PostProvisionInstanceStepCalled {
t.Fatal("PostProvisionInstanceStep not called")
}
if h.PostProvisionInstanceStepErrorArg == nil {
t.Fatal("should have error")
}
}
func TestContext2Apply_provisionerDestroy(t *testing.T) {
m := testModule(t, "apply-provisioner-destroy")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
val, ok := c.Config["command"]
if !ok || val != "destroy" {
t.Fatalf("bad value for foo: %v %#v", val, c)
}
return nil
}
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
State: state,
Destroy: true,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `<no state>`)
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner not invoked")
}
}
// Verify that on destroy provisioner failure, nothing happens to the instance
func TestContext2Apply_provisionerDestroyFail(t *testing.T) {
m := testModule(t, "apply-provisioner-destroy")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
return fmt.Errorf("provisioner error")
}
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
State: state,
Destroy: true,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags == nil {
t.Fatal("should error")
}
checkStateString(t, state, `
aws_instance.foo:
ID = bar
provider = provider.aws
`)
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner not invoked")
}
}
// Verify that on destroy provisioner failure with "continue" that
// we continue to the next provisioner.
func TestContext2Apply_provisionerDestroyFailContinue(t *testing.T) {
m := testModule(t, "apply-provisioner-destroy-continue")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
var l sync.Mutex
var calls []string
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
val, ok := c.Config["command"]
if !ok {
t.Fatalf("bad value for foo: %v %#v", val, c)
}
l.Lock()
defer l.Unlock()
calls = append(calls, val.(string))
return fmt.Errorf("provisioner error")
}
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
State: state,
Destroy: true,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `<no state>`)
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner not invoked")
}
expected := []string{"one", "two"}
if !reflect.DeepEqual(calls, expected) {
t.Fatalf("wrong commands\ngot: %#v\nwant: %#v", calls, expected)
}
}
// Verify that on destroy provisioner failure with "continue" that
// we continue to the next provisioner. But if the next provisioner defines
// to fail, then we fail after running it.
func TestContext2Apply_provisionerDestroyFailContinueFail(t *testing.T) {
m := testModule(t, "apply-provisioner-destroy-fail")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
var l sync.Mutex
var calls []string
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
val, ok := c.Config["command"]
if !ok {
t.Fatalf("bad value for foo: %v %#v", val, c)
}
l.Lock()
defer l.Unlock()
calls = append(calls, val.(string))
return fmt.Errorf("provisioner error")
}
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
State: state,
Destroy: true,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags == nil {
t.Fatal("apply succeeded; wanted error from second provisioner")
}
checkStateString(t, state, `
aws_instance.foo:
ID = bar
provider = provider.aws
`)
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner not invoked")
}
expected := []string{"one", "two"}
if !reflect.DeepEqual(calls, expected) {
t.Fatalf("bad: %#v", calls)
}
}
// Verify destroy provisioners are not run for tainted instances.
func TestContext2Apply_provisionerDestroyTainted(t *testing.T) {
m := testModule(t, "apply-provisioner-destroy")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
destroyCalled := false
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
expected := "create"
if rs.ID == "bar" {
destroyCalled = true
return nil
}
val, ok := c.Config["command"]
if !ok || val != expected {
t.Fatalf("bad value for command: %v %#v", val, c)
}
return nil
}
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Tainted: true,
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
State: state,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `
aws_instance.foo:
ID = foo
provider = provider.aws
foo = bar
type = aws_instance
`)
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner not invoked")
}
if destroyCalled {
t.Fatal("destroy should not be called")
}
}
func TestContext2Apply_provisionerDestroyModule(t *testing.T) {
m := testModule(t, "apply-provisioner-destroy-module")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
val, ok := c.Config["command"]
if !ok || val != "value" {
t.Fatalf("bad value for foo: %v %#v", val, c)
}
return nil
}
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root", "child"},
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
State: state,
Destroy: true,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `<no state>`)
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner not invoked")
}
}
func TestContext2Apply_provisionerDestroyRef(t *testing.T) {
m := testModule(t, "apply-provisioner-destroy-ref")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
val, ok := c.Config["command"]
if !ok || val != "hello" {
return fmt.Errorf("bad value for command: %v %#v", val, c)
}
return nil
}
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"value": "hello",
},
},
Provider: "provider.aws",
},
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
Provider: "provider.aws",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
State: state,
Destroy: true,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `<no state>`)
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner not invoked")
}
}
// Test that a destroy provisioner referencing an invalid key errors.
func TestContext2Apply_provisionerDestroyRefInvalid(t *testing.T) {
m := testModule(t, "apply-provisioner-destroy-ref-invalid")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
return nil
}
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
},
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
State: state,
Destroy: true,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
// this was an apply test, but this is now caught in Validation
if diags := ctx.Validate(); !diags.HasErrors() {
t.Fatal("expected error")
}
}
func TestContext2Apply_provisionerResourceRef(t *testing.T) {
m := testModule(t, "apply-provisioner-resource-ref")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr := testProvisioner()
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
val, ok := c.Config["command"]
if !ok || val != "2" {
t.Fatalf("bad value for foo: %v %#v", val, c)
}
return nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyProvisionerResourceRefStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner not invoked")
}
}
func TestContext2Apply_provisionerSelfRef(t *testing.T) {
m := testModule(t, "apply-provisioner-self-ref")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
val, ok := c.Config["command"]
if !ok || val != "bar" {
t.Fatalf("bad value for command: %v %#v", val, c)
}
return nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyProvisionerSelfRefStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner not invoked")
}
}
func TestContext2Apply_provisionerMultiSelfRef(t *testing.T) {
var lock sync.Mutex
commands := make([]string, 0, 5)
m := testModule(t, "apply-provisioner-multi-self-ref")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
lock.Lock()
defer lock.Unlock()
val, ok := c.Config["command"]
if !ok {
t.Fatalf("bad value for command: %v %#v", val, c)
}
commands = append(commands, val.(string))
return nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyProvisionerMultiSelfRefStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner not invoked")
}
// Verify our result
sort.Strings(commands)
expectedCommands := []string{"number 0", "number 1", "number 2"}
if !reflect.DeepEqual(commands, expectedCommands) {
t.Fatalf("bad: %#v", commands)
}
}
func TestContext2Apply_provisionerMultiSelfRefSingle(t *testing.T) {
var lock sync.Mutex
order := make([]string, 0, 5)
m := testModule(t, "apply-provisioner-multi-self-ref-single")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
lock.Lock()
defer lock.Unlock()
val, ok := c.Config["order"]
if !ok {
t.Fatalf("bad value for order: %v %#v", val, c)
}
order = append(order, val.(string))
return nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyProvisionerMultiSelfRefSingleStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner not invoked")
}
// Verify our result
sort.Strings(order)
expectedOrder := []string{"0", "1", "2"}
if !reflect.DeepEqual(order, expectedOrder) {
t.Fatalf("bad: %#v", order)
}
}
func TestContext2Apply_provisionerExplicitSelfRef(t *testing.T) {
m := testModule(t, "apply-provisioner-explicit-self-ref")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
val, ok := c.Config["command"]
if !ok || val != "bar" {
t.Fatalf("bad value for command: %v %#v", val, c)
}
return nil
}
var state *states.State
{
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
_, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner not invoked")
}
}
{
ctx := testContext2(t, &ContextOpts{
Config: m,
Destroy: true,
State: state,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
_, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `<no state>`)
}
}
func TestContext2Apply_provisionerForEachSelfRef(t *testing.T) {
m := testModule(t, "apply-provisioner-for-each-self")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
val, ok := c.Config["command"]
if !ok {
t.Fatalf("bad value for command: %v %#v", val, c)
}
return nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
_, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
}
// Provisioner should NOT run on a diff, only create
func TestContext2Apply_Provisioner_Diff(t *testing.T) {
m := testModule(t, "apply-provisioner-diff")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
return nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
logDiagnostics(t, diags)
t.Fatal("plan failed")
}
state, diags := ctx.Apply()
if diags.HasErrors() {
logDiagnostics(t, diags)
t.Fatal("apply failed")
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyProvisionerDiffStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
// Verify apply was invoked
if !pr.ProvisionResourceCalled {
t.Fatalf("provisioner was not called on first apply")
}
pr.ProvisionResourceCalled = false
// Change the state to force a diff
mod := state.RootModule()
obj := mod.Resources["aws_instance.bar"].Instances[addrs.NoKey].Current
var attrs map[string]interface{}
err := json.Unmarshal(obj.AttrsJSON, &attrs)
if err != nil {
t.Fatal(err)
}
attrs["foo"] = "baz"
obj.AttrsJSON, err = json.Marshal(attrs)
if err != nil {
t.Fatal(err)
}
// Re-create context with state
ctx = testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
State: state,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
logDiagnostics(t, diags)
t.Fatal("plan failed")
}
state2, diags := ctx.Apply()
if diags.HasErrors() {
logDiagnostics(t, diags)
t.Fatal("apply failed")
}
actual = strings.TrimSpace(state2.String())
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
// Verify apply was NOT invoked
if pr.ProvisionResourceCalled {
t.Fatalf("provisioner was called on second apply; should not have been")
}
}
func TestContext2Apply_outputDiffVars(t *testing.T) {
m := testModule(t, "apply-good")
p := testProvider("aws")
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.baz": &ResourceState{ // This one is not in config, so should be destroyed
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: s,
})
p.ApplyFn = func(info *InstanceInfo, s *InstanceState, d *InstanceDiff) (*InstanceState, error) {
if d.Destroy {
return nil, nil
}
result := s.MergeDiff(d)
result.ID = "foo"
return result, nil
}
p.DiffFn = func(info *InstanceInfo, s *InstanceState, rc *ResourceConfig) (*InstanceDiff, error) {
d := &InstanceDiff{
Attributes: map[string]*ResourceAttrDiff{},
}
if new, ok := rc.Get("value"); ok {
d.Attributes["value"] = &ResourceAttrDiff{
New: new.(string),
}
}
if new, ok := rc.Get("foo"); ok {
d.Attributes["foo"] = &ResourceAttrDiff{
New: new.(string),
}
} else if rc.IsComputed("foo") {
d.Attributes["foo"] = &ResourceAttrDiff{
NewComputed: true,
Type: DiffAttrOutput, // This doesn't actually really do anything anymore, but this test originally set it.
}
}
if new, ok := rc.Get("num"); ok {
d.Attributes["num"] = &ResourceAttrDiff{
New: fmt.Sprintf("%#v", new),
}
}
return d, nil
}
if _, diags := ctx.Plan(); diags.HasErrors() {
logDiagnostics(t, diags)
t.Fatal("plan failed")
}
if _, diags := ctx.Apply(); diags.HasErrors() {
logDiagnostics(t, diags)
t.Fatal("apply failed")
}
}
func TestContext2Apply_destroyX(t *testing.T) {
m := testModule(t, "apply-destroy")
h := new(HookRecordApplyOrder)
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
Hooks: []Hook{h},
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
// First plan and apply a create operation
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
// Next, plan and apply a destroy operation
h.Active = true
ctx = testContext2(t, &ContextOpts{
Destroy: true,
State: state,
Config: m,
Hooks: []Hook{h},
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
// Test that things were destroyed
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyDestroyStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
// Test that things were destroyed _in the right order_
expected2 := []string{"aws_instance.bar", "aws_instance.foo"}
actual2 := h.IDs
if !reflect.DeepEqual(actual2, expected2) {
t.Fatalf("expected: %#v\n\ngot:%#v", expected2, actual2)
}
}
func TestContext2Apply_destroyOrder(t *testing.T) {
m := testModule(t, "apply-destroy")
h := new(HookRecordApplyOrder)
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
Hooks: []Hook{h},
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
// First plan and apply a create operation
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
t.Logf("State 1: %s", state)
// Next, plan and apply a destroy
h.Active = true
ctx = testContext2(t, &ContextOpts{
Destroy: true,
State: state,
Config: m,
Hooks: []Hook{h},
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
// Test that things were destroyed
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyDestroyStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
// Test that things were destroyed _in the right order_
expected2 := []string{"aws_instance.bar", "aws_instance.foo"}
actual2 := h.IDs
if !reflect.DeepEqual(actual2, expected2) {
t.Fatalf("expected: %#v\n\ngot:%#v", expected2, actual2)
}
}
// https://github.com/hashicorp/terraform/issues/2767
func TestContext2Apply_destroyModulePrefix(t *testing.T) {
m := testModule(t, "apply-destroy-module-resource-prefix")
h := new(MockHook)
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
Hooks: []Hook{h},
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
// First plan and apply a create operation
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
// Verify that we got the apply info correct
if v := h.PreApplyAddr.String(); v != "module.child.aws_instance.foo" {
t.Fatalf("bad: %s", v)
}
// Next, plan and apply a destroy operation and reset the hook
h = new(MockHook)
ctx = testContext2(t, &ContextOpts{
Destroy: true,
State: state,
Config: m,
Hooks: []Hook{h},
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
// Test that things were destroyed
if v := h.PreApplyAddr.String(); v != "module.child.aws_instance.foo" {
t.Fatalf("bad: %s", v)
}
}
func TestContext2Apply_destroyNestedModule(t *testing.T) {
m := testModule(t, "apply-destroy-nested-module")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root", "child", "subchild"},
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
Provider: "provider.aws",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: s,
})
// First plan and apply a create operation
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
// Test that things were destroyed
actual := strings.TrimSpace(state.String())
if actual != "<no state>" {
t.Fatalf("expected no state, got: %s", actual)
}
}
func TestContext2Apply_destroyDeeplyNestedModule(t *testing.T) {
m := testModule(t, "apply-destroy-deeply-nested-module")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root", "child", "subchild", "subsubchild"},
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
Provider: "provider.aws",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: s,
})
// First plan and apply a create operation
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
// Test that things were destroyed
if !state.Empty() {
t.Fatalf("wrong final state %s\nwant empty state", spew.Sdump(state))
}
}
// https://github.com/hashicorp/terraform/issues/5440
func TestContext2Apply_destroyModuleWithAttrsReferencingResource(t *testing.T) {
m, snap := testModuleWithSnapshot(t, "apply-destroy-module-with-attrs")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
var state *states.State
{
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
// First plan and apply a create operation
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan diags: %s", diags.Err())
} else {
t.Logf("Step 1 plan: %s", legacyDiffComparisonString(p.Changes))
}
var diags tfdiags.Diagnostics
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("apply errs: %s", diags.Err())
}
t.Logf("Step 1 state: %s", state)
}
h := new(HookRecordApplyOrder)
h.Active = true
{
ctx := testContext2(t, &ContextOpts{
Destroy: true,
Config: m,
State: state,
Hooks: []Hook{h},
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
// First plan and apply a create operation
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("destroy plan err: %s", diags.Err())
}
t.Logf("Step 2 plan: %s", legacyDiffComparisonString(plan.Changes))
ctxOpts, err := contextOptsForPlanViaFile(snap, state, plan)
if err != nil {
t.Fatalf("failed to round-trip through planfile: %s", err)
}
ctxOpts.ProviderResolver = providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
)
ctx, diags = NewContext(ctxOpts)
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("destroy apply err: %s", diags.Err())
}
t.Logf("Step 2 state: %s", state)
}
//Test that things were destroyed
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(`<no state>`)
if actual != expected {
t.Fatalf("expected:\n\n%s\n\nactual:\n\n%s", expected, actual)
}
}
func TestContext2Apply_destroyWithModuleVariableAndCount(t *testing.T) {
m, snap := testModuleWithSnapshot(t, "apply-destroy-mod-var-and-count")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
var state *states.State
var diags tfdiags.Diagnostics
{
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
// First plan and apply a create operation
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan err: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("apply err: %s", diags.Err())
}
}
h := new(HookRecordApplyOrder)
h.Active = true
{
ctx := testContext2(t, &ContextOpts{
Destroy: true,
Config: m,
State: state,
Hooks: []Hook{h},
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
// First plan and apply a create operation
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("destroy plan err: %s", diags.Err())
}
ctxOpts, err := contextOptsForPlanViaFile(snap, state, plan)
if err != nil {
t.Fatalf("failed to round-trip through planfile: %s", err)
}
ctxOpts.ProviderResolver = providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
)
ctx, diags = NewContext(ctxOpts)
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("destroy apply err: %s", diags.Err())
}
}
//Test that things were destroyed
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(`
<no state>
module.child:
`)
if actual != expected {
t.Fatalf("expected: \n%s\n\nbad: \n%s", expected, actual)
}
}
func TestContext2Apply_destroyTargetWithModuleVariableAndCount(t *testing.T) {
m := testModule(t, "apply-destroy-mod-var-and-count")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
var state *states.State
var diags tfdiags.Diagnostics
{
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
// First plan and apply a create operation
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan err: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("apply err: %s", diags.Err())
}
}
{
ctx := testContext2(t, &ContextOpts{
Destroy: true,
Config: m,
State: state,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Child("child", addrs.NoKey),
},
})
_, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("plan err: %s", diags)
}
if len(diags) != 1 {
// Should have one warning that -target is in effect.
t.Fatalf("got %d diagnostics in plan; want 1", len(diags))
}
if got, want := diags[0].Severity(), tfdiags.Warning; got != want {
t.Errorf("wrong diagnostic severity %#v; want %#v", got, want)
}
if got, want := diags[0].Description().Summary, "Resource targeting is in effect"; got != want {
t.Errorf("wrong diagnostic summary %#v; want %#v", got, want)
}
// Destroy, targeting the module explicitly
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("destroy apply err: %s", diags)
}
if len(diags) != 1 {
t.Fatalf("got %d diagnostics; want 1", len(diags))
}
if got, want := diags[0].Severity(), tfdiags.Warning; got != want {
t.Errorf("wrong diagnostic severity %#v; want %#v", got, want)
}
if got, want := diags[0].Description().Summary, "Applied changes may be incomplete"; got != want {
t.Errorf("wrong diagnostic summary %#v; want %#v", got, want)
}
}
//Test that things were destroyed
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(`<no state>`)
if actual != expected {
t.Fatalf("expected: \n%s\n\nbad: \n%s", expected, actual)
}
}
func TestContext2Apply_destroyWithModuleVariableAndCountNested(t *testing.T) {
m, snap := testModuleWithSnapshot(t, "apply-destroy-mod-var-and-count-nested")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
var state *states.State
var diags tfdiags.Diagnostics
{
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
// First plan and apply a create operation
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan err: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("apply err: %s", diags.Err())
}
}
h := new(HookRecordApplyOrder)
h.Active = true
{
ctx := testContext2(t, &ContextOpts{
Destroy: true,
Config: m,
State: state,
Hooks: []Hook{h},
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
// First plan and apply a create operation
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("destroy plan err: %s", diags.Err())
}
ctxOpts, err := contextOptsForPlanViaFile(snap, state, plan)
if err != nil {
t.Fatalf("failed to round-trip through planfile: %s", err)
}
ctxOpts.ProviderResolver = providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
)
ctx, diags = NewContext(ctxOpts)
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("destroy apply err: %s", diags.Err())
}
}
//Test that things were destroyed
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(`
<no state>
module.child.child2:
`)
if actual != expected {
t.Fatalf("expected: \n%s\n\nbad: \n%s", expected, actual)
}
}
func TestContext2Apply_destroyOutputs(t *testing.T) {
m := testModule(t, "apply-destroy-outputs")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
// First plan and apply a create operation
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
// Next, plan and apply a destroy operation
ctx = testContext2(t, &ContextOpts{
Destroy: true,
State: state,
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
mod := state.RootModule()
if len(mod.Resources) > 0 {
t.Fatalf("expected no resources, got: %#v", mod)
}
// destroying again should produce no errors
ctx = testContext2(t, &ContextOpts{
Destroy: true,
State: state,
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatal(diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatal(diags.Err())
}
}
func TestContext2Apply_destroyOrphan(t *testing.T) {
m := testModule(t, "apply-error")
p := testProvider("aws")
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.baz": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: s,
})
p.ApplyFn = func(info *InstanceInfo, s *InstanceState, d *InstanceDiff) (*InstanceState, error) {
if d.Destroy {
return nil, nil
}
result := s.MergeDiff(d)
result.ID = "foo"
return result, nil
}
p.DiffFn = func(info *InstanceInfo, s *InstanceState, rc *ResourceConfig) (*InstanceDiff, error) {
d := &InstanceDiff{
Attributes: map[string]*ResourceAttrDiff{},
}
if new, ok := rc.Get("value"); ok {
d.Attributes["value"] = &ResourceAttrDiff{
New: new.(string),
}
}
if new, ok := rc.Get("foo"); ok {
d.Attributes["foo"] = &ResourceAttrDiff{
New: new.(string),
}
}
return d, nil
}
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
mod := state.RootModule()
if _, ok := mod.Resources["aws_instance.baz"]; ok {
t.Fatalf("bad: %#v", mod.Resources)
}
}
func TestContext2Apply_destroyTaintedProvisioner(t *testing.T) {
m := testModule(t, "apply-destroy-provisioner")
p := testProvider("aws")
pr := testProvisioner()
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
called := false
pr.ApplyFn = func(rs *InstanceState, c *ResourceConfig) error {
called = true
return nil
}
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"id": "bar",
},
Tainted: true,
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
State: s,
Destroy: true,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
if called {
t.Fatal("provisioner should not be called")
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace("<no state>")
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_error(t *testing.T) {
errored := false
m := testModule(t, "apply-error")
p := testProvider("aws")
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
p.ApplyFn = func(*InstanceInfo, *InstanceState, *InstanceDiff) (*InstanceState, error) {
if errored {
state := &InstanceState{
ID: "bar",
}
return state, fmt.Errorf("error")
}
errored = true
return &InstanceState{
ID: "foo",
Attributes: map[string]string{
"value": "2",
},
}, nil
}
p.DiffFn = func(info *InstanceInfo, s *InstanceState, rc *ResourceConfig) (*InstanceDiff, error) {
d := &InstanceDiff{
Attributes: map[string]*ResourceAttrDiff{},
}
if new, ok := rc.Get("value"); ok {
d.Attributes["value"] = &ResourceAttrDiff{
New: new.(string),
}
}
if new, ok := rc.Get("foo"); ok {
d.Attributes["foo"] = &ResourceAttrDiff{
New: new.(string),
}
}
return d, nil
}
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags == nil {
t.Fatal("should have error")
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyErrorStr)
if actual != expected {
t.Fatalf("expected:\n%s\n\ngot:\n%s", expected, actual)
}
}
func TestContext2Apply_errorDestroy(t *testing.T) {
m := testModule(t, "empty")
p := testProvider("test")
p.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"test_thing": {
Attributes: map[string]*configschema.Attribute{
"id": {Type: cty.String, Optional: true},
},
},
},
}
p.PlanResourceChangeFn = func(req providers.PlanResourceChangeRequest) providers.PlanResourceChangeResponse {
// Should actually be called for this test, because Terraform Core
// constructs the plan for a destroy operation itself.
return providers.PlanResourceChangeResponse{
PlannedState: req.ProposedNewState,
}
}
p.ApplyResourceChangeFn = func(req providers.ApplyResourceChangeRequest) providers.ApplyResourceChangeResponse {
// The apply (in this case, a destroy) always fails, so we can verify
// that the object stays in the state after a destroy fails even though
// we aren't returning a new state object here.
return providers.ApplyResourceChangeResponse{
Diagnostics: tfdiags.Diagnostics(nil).Append(fmt.Errorf("failed")),
}
}
ctx := testContext2(t, &ContextOpts{
Config: m,
State: states.BuildState(func(ss *states.SyncState) {
ss.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_thing",
Name: "foo",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"baz"}`),
},
addrs.ProviderConfig{
Type: "test",
}.Absolute(addrs.RootModuleInstance),
)
}),
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"test": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags == nil {
t.Fatal("should have error")
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(`
test_thing.foo:
ID = baz
provider = provider.test
`) // test_thing.foo is still here, even though provider returned no new state along with its error
if actual != expected {
t.Fatalf("expected:\n%s\n\ngot:\n%s", expected, actual)
}
}
func TestContext2Apply_errorCreateInvalidNew(t *testing.T) {
m := testModule(t, "apply-error")
p := testProvider("aws")
p.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"value": {Type: cty.String, Optional: true},
"foo": {Type: cty.String, Optional: true},
},
},
},
}
p.PlanResourceChangeFn = func(req providers.PlanResourceChangeRequest) providers.PlanResourceChangeResponse {
return providers.PlanResourceChangeResponse{
PlannedState: req.ProposedNewState,
}
}
p.ApplyResourceChangeFn = func(req providers.ApplyResourceChangeRequest) providers.ApplyResourceChangeResponse {
// We're intentionally returning an inconsistent new state here
// because we want to test that Terraform ignores the inconsistency
// when accompanied by another error.
return providers.ApplyResourceChangeResponse{
NewState: cty.ObjectVal(map[string]cty.Value{
"value": cty.StringVal("wrong wrong wrong wrong"),
"foo": cty.StringVal("absolutely brimming over with wrongability"),
}),
Diagnostics: tfdiags.Diagnostics(nil).Append(fmt.Errorf("forced error")),
}
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags == nil {
t.Fatal("should have error")
}
if got, want := len(diags), 1; got != want {
// There should be no additional diagnostics generated by Terraform's own eval logic,
// because the provider's own error supersedes them.
t.Errorf("wrong number of diagnostics %d; want %d\n%s", got, want, diags.Err())
}
if got, want := diags.Err().Error(), "forced error"; !strings.Contains(got, want) {
t.Errorf("returned error does not contain %q, but it should\n%s", want, diags.Err())
}
if got, want := len(state.RootModule().Resources), 2; got != want {
t.Errorf("%d resources in state before prune; should have %d\n%s", got, want, spew.Sdump(state))
}
state.PruneResourceHusks() // aws_instance.bar with no instances gets left behind when we bail out, but that's okay
if got, want := len(state.RootModule().Resources), 1; got != want {
t.Errorf("%d resources in state after prune; should have only one (aws_instance.foo, tainted)\n%s", got, spew.Sdump(state))
}
}
func TestContext2Apply_errorUpdateNullNew(t *testing.T) {
m := testModule(t, "apply-error")
p := testProvider("aws")
p.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": {
Attributes: map[string]*configschema.Attribute{
"value": {Type: cty.String, Optional: true},
"foo": {Type: cty.String, Optional: true},
},
},
},
}
p.PlanResourceChangeFn = func(req providers.PlanResourceChangeRequest) providers.PlanResourceChangeResponse {
return providers.PlanResourceChangeResponse{
PlannedState: req.ProposedNewState,
}
}
p.ApplyResourceChangeFn = func(req providers.ApplyResourceChangeRequest) providers.ApplyResourceChangeResponse {
// We're intentionally returning no NewState here because we want to
// test that Terraform retains the prior state, rather than treating
// the returned null as "no state" (object deleted).
return providers.ApplyResourceChangeResponse{
Diagnostics: tfdiags.Diagnostics(nil).Append(fmt.Errorf("forced error")),
}
}
ctx := testContext2(t, &ContextOpts{
Config: m,
State: states.BuildState(func(ss *states.SyncState) {
ss.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "foo",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"value":"old"}`),
},
addrs.ProviderConfig{
Type: "aws",
}.Absolute(addrs.RootModuleInstance),
)
}),
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags == nil {
t.Fatal("should have error")
}
if got, want := len(diags), 1; got != want {
// There should be no additional diagnostics generated by Terraform's own eval logic,
// because the provider's own error supersedes them.
t.Errorf("wrong number of diagnostics %d; want %d\n%s", got, want, diags.Err())
}
if got, want := diags.Err().Error(), "forced error"; !strings.Contains(got, want) {
t.Errorf("returned error does not contain %q, but it should\n%s", want, diags.Err())
}
state.PruneResourceHusks()
if got, want := len(state.RootModule().Resources), 1; got != want {
t.Fatalf("%d resources in state; should have only one (aws_instance.foo, unmodified)\n%s", got, spew.Sdump(state))
}
is := state.ResourceInstance(addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "foo",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance))
if is == nil {
t.Fatalf("aws_instance.foo is not in the state after apply")
}
if got, want := is.Current.AttrsJSON, []byte(`"old"`); !bytes.Contains(got, want) {
t.Fatalf("incorrect attributes for aws_instance.foo\ngot: %s\nwant: JSON containing %s\n\n%s", got, want, spew.Sdump(is))
}
}
func TestContext2Apply_errorPartial(t *testing.T) {
errored := false
m := testModule(t, "apply-error")
p := testProvider("aws")
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: s,
})
p.ApplyFn = func(info *InstanceInfo, s *InstanceState, d *InstanceDiff) (*InstanceState, error) {
if errored {
return s, fmt.Errorf("error")
}
errored = true
return &InstanceState{
ID: "foo",
Attributes: map[string]string{
"value": "2",
},
}, nil
}
p.DiffFn = func(info *InstanceInfo, s *InstanceState, rc *ResourceConfig) (*InstanceDiff, error) {
d := &InstanceDiff{
Attributes: map[string]*ResourceAttrDiff{},
}
if new, ok := rc.Get("value"); ok {
d.Attributes["value"] = &ResourceAttrDiff{
New: new.(string),
}
}
if new, ok := rc.Get("foo"); ok {
d.Attributes["foo"] = &ResourceAttrDiff{
New: new.(string),
}
}
return d, nil
}
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags == nil {
t.Fatal("should have error")
}
mod := state.RootModule()
if len(mod.Resources) != 2 {
t.Fatalf("bad: %#v", mod.Resources)
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyErrorPartialStr)
if actual != expected {
t.Fatalf("expected:\n%s\n\ngot:\n%s", expected, actual)
}
}
func TestContext2Apply_hook(t *testing.T) {
m := testModule(t, "apply-good")
h := new(MockHook)
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
Hooks: []Hook{h},
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
if !h.PreApplyCalled {
t.Fatal("should be called")
}
if !h.PostApplyCalled {
t.Fatal("should be called")
}
if !h.PostStateUpdateCalled {
t.Fatalf("should call post state update")
}
}
func TestContext2Apply_hookOrphan(t *testing.T) {
m := testModule(t, "apply-blank")
h := new(MockHook)
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
Provider: "provider.aws",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
State: state,
Hooks: []Hook{h},
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
if !h.PreApplyCalled {
t.Fatal("should be called")
}
if !h.PostApplyCalled {
t.Fatal("should be called")
}
if !h.PostStateUpdateCalled {
t.Fatalf("should call post state update")
}
}
func TestContext2Apply_idAttr(t *testing.T) {
m := testModule(t, "apply-idattr")
p := testProvider("aws")
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
p.ApplyFn = func(info *InstanceInfo, s *InstanceState, d *InstanceDiff) (*InstanceState, error) {
result := s.MergeDiff(d)
result.ID = "foo"
result.Attributes = map[string]string{
"id": "bar",
"num": "42",
}
return result, nil
}
p.DiffFn = func(*InstanceInfo, *InstanceState, *ResourceConfig) (*InstanceDiff, error) {
return &InstanceDiff{
Attributes: map[string]*ResourceAttrDiff{
"num": &ResourceAttrDiff{
New: "42",
},
},
}, nil
}
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
mod := state.RootModule()
rs, ok := mod.Resources["aws_instance.foo"]
if !ok {
t.Fatal("not in state")
}
var attrs map[string]interface{}
err := json.Unmarshal(rs.Instances[addrs.NoKey].Current.AttrsJSON, &attrs)
if err != nil {
t.Fatal(err)
}
if got, want := attrs["id"], "foo"; got != want {
t.Fatalf("wrong id\ngot: %#v\nwant: %#v", got, want)
}
}
func TestContext2Apply_outputBasic(t *testing.T) {
m := testModule(t, "apply-output")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyOutputStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_outputAdd(t *testing.T) {
m1 := testModule(t, "apply-output-add-before")
p1 := testProvider("aws")
p1.ApplyFn = testApplyFn
p1.DiffFn = testDiffFn
ctx1 := testContext2(t, &ContextOpts{
Config: m1,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p1),
},
),
})
if _, diags := ctx1.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
state1, diags := ctx1.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
m2 := testModule(t, "apply-output-add-after")
p2 := testProvider("aws")
p2.ApplyFn = testApplyFn
p2.DiffFn = testDiffFn
ctx2 := testContext2(t, &ContextOpts{
Config: m2,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p2),
},
),
State: state1,
})
if _, diags := ctx2.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
state2, diags := ctx2.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state2.String())
expected := strings.TrimSpace(testTerraformApplyOutputAddStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_outputList(t *testing.T) {
m := testModule(t, "apply-output-list")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyOutputListStr)
if actual != expected {
t.Fatalf("expected: \n%s\n\nbad: \n%s", expected, actual)
}
}
func TestContext2Apply_outputMulti(t *testing.T) {
m := testModule(t, "apply-output-multi")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyOutputMultiStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_outputMultiIndex(t *testing.T) {
m := testModule(t, "apply-output-multi-index")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyOutputMultiIndexStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_taintX(t *testing.T) {
m := testModule(t, "apply-taint")
p := testProvider("aws")
// destroyCount tests against regression of
// https://github.com/hashicorp/terraform/issues/1056
var destroyCount = int32(0)
var once sync.Once
simulateProviderDelay := func() {
time.Sleep(10 * time.Millisecond)
}
p.ApplyFn = func(info *InstanceInfo, s *InstanceState, d *InstanceDiff) (*InstanceState, error) {
once.Do(simulateProviderDelay)
if d.Destroy {
atomic.AddInt32(&destroyCount, 1)
}
return testApplyFn(info, s, d)
}
p.DiffFn = testDiffFn
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "baz",
Attributes: map[string]string{
"num": "2",
"type": "aws_instance",
},
Tainted: true,
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: s,
})
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
} else {
t.Logf("plan: %s", legacyDiffComparisonString(p.Changes))
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyTaintStr)
if actual != expected {
t.Fatalf("bad:\n%s", actual)
}
if destroyCount != 1 {
t.Fatalf("Expected 1 destroy, got %d", destroyCount)
}
}
func TestContext2Apply_taintDep(t *testing.T) {
m := testModule(t, "apply-taint-dep")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "baz",
Attributes: map[string]string{
"num": "2",
"type": "aws_instance",
},
Tainted: true,
},
},
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"foo": "baz",
"num": "2",
"type": "aws_instance",
},
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: s,
})
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
} else {
t.Logf("plan: %s", legacyDiffComparisonString(p.Changes))
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyTaintDepStr)
if actual != expected {
t.Fatalf("bad:\n%s", actual)
}
}
func TestContext2Apply_taintDepRequiresNew(t *testing.T) {
m := testModule(t, "apply-taint-dep-requires-new")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "baz",
Attributes: map[string]string{
"num": "2",
"type": "aws_instance",
},
Tainted: true,
},
},
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
Attributes: map[string]string{
"foo": "baz",
"num": "2",
"type": "aws_instance",
},
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: s,
})
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
} else {
t.Logf("plan: %s", legacyDiffComparisonString(p.Changes))
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyTaintDepRequireNewStr)
if actual != expected {
t.Fatalf("bad:\n%s", actual)
}
}
func TestContext2Apply_targeted(t *testing.T) {
m := testModule(t, "apply-targeted")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Resource(
addrs.ManagedResourceMode, "aws_instance", "foo",
),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
mod := state.RootModule()
if len(mod.Resources) != 1 {
t.Fatalf("expected 1 resource, got: %#v", mod.Resources)
}
checkStateString(t, state, `
aws_instance.foo:
ID = foo
provider = provider.aws
num = 2
type = aws_instance
`)
}
func TestContext2Apply_targetedCount(t *testing.T) {
m := testModule(t, "apply-targeted-count")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Resource(
addrs.ManagedResourceMode, "aws_instance", "foo",
),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `
aws_instance.foo.0:
ID = foo
provider = provider.aws
aws_instance.foo.1:
ID = foo
provider = provider.aws
aws_instance.foo.2:
ID = foo
provider = provider.aws
`)
}
func TestContext2Apply_targetedCountIndex(t *testing.T) {
m := testModule(t, "apply-targeted-count")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Targets: []addrs.Targetable{
addrs.RootModuleInstance.ResourceInstance(
addrs.ManagedResourceMode, "aws_instance", "foo", addrs.IntKey(1),
),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `
aws_instance.foo.1:
ID = foo
provider = provider.aws
`)
}
func TestContext2Apply_targetedDestroy(t *testing.T) {
m := testModule(t, "apply-targeted")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": resourceState("aws_instance", "i-bcd345"),
"aws_instance.bar": resourceState("aws_instance", "i-abc123"),
},
},
},
}),
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Resource(
addrs.ManagedResourceMode, "aws_instance", "foo",
),
},
Destroy: true,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
mod := state.RootModule()
if len(mod.Resources) != 1 {
t.Fatalf("expected 1 resource, got: %#v", mod.Resources)
}
checkStateString(t, state, `
aws_instance.bar:
ID = i-abc123
provider = provider.aws
`)
}
func TestContext2Apply_destroyProvisionerWithLocals(t *testing.T) {
m := testModule(t, "apply-provisioner-destroy-locals")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr := testProvisioner()
pr.ApplyFn = func(_ *InstanceState, rc *ResourceConfig) error {
cmd, ok := rc.Get("command")
if !ok || cmd != "local" {
return fmt.Errorf("provisioner got %v:%s", ok, cmd)
}
return nil
}
pr.GetSchemaResponse = provisioners.GetSchemaResponse{
Provisioner: &configschema.Block{
Attributes: map[string]*configschema.Attribute{
"command": {
Type: cty.String,
Required: true,
},
"when": {
Type: cty.String,
Optional: true,
},
},
},
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
State: MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root"},
Resources: map[string]*ResourceState{
"aws_instance.foo": resourceState("aws_instance", "1234"),
},
},
},
}),
Destroy: true,
// the test works without targeting, but this also tests that the local
// node isn't inadvertently pruned because of the wrong evaluation
// order.
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Resource(
addrs.ManagedResourceMode, "aws_instance", "foo",
),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatal(diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatal(diags.Err())
}
if !pr.ProvisionResourceCalled {
t.Fatal("provisioner not called")
}
}
// this also tests a local value in the config referencing a resource that
// wasn't in the state during destroy.
func TestContext2Apply_destroyProvisionerWithMultipleLocals(t *testing.T) {
m := testModule(t, "apply-provisioner-destroy-multiple-locals")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr := testProvisioner()
pr.GetSchemaResponse = provisioners.GetSchemaResponse{
Provisioner: &configschema.Block{
Attributes: map[string]*configschema.Attribute{
"id": {
Type: cty.String,
Required: true,
},
"command": {
Type: cty.String,
Required: true,
},
"when": {
Type: cty.String,
Optional: true,
},
},
},
}
pr.ApplyFn = func(is *InstanceState, rc *ResourceConfig) error {
cmd, ok := rc.Get("command")
if !ok {
return errors.New("no command in provisioner")
}
id, ok := rc.Get("id")
if !ok {
return errors.New("no id in provisioner")
}
switch id {
case "1234":
if cmd != "local" {
return fmt.Errorf("provisioner %q got:%q", is.ID, cmd)
}
case "3456":
if cmd != "1234" {
return fmt.Errorf("provisioner %q got:%q", is.ID, cmd)
}
default:
t.Fatal("unknown instance")
}
return nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
State: MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root"},
Resources: map[string]*ResourceState{
"aws_instance.foo": resourceState("aws_instance", "1234"),
"aws_instance.bar": resourceState("aws_instance", "3456"),
},
},
},
}),
Destroy: true,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatal(diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatal(diags.Err())
}
if !pr.ProvisionResourceCalled {
t.Fatal("provisioner not called")
}
}
func TestContext2Apply_destroyProvisionerWithOutput(t *testing.T) {
m := testModule(t, "apply-provisioner-destroy-outputs")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
pr := testProvisioner()
pr.ApplyFn = func(is *InstanceState, rc *ResourceConfig) error {
cmd, ok := rc.Get("command")
if !ok || cmd != "3" {
return fmt.Errorf("provisioner for %s got %v:%s", is.ID, ok, cmd)
}
return nil
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Provisioners: map[string]ProvisionerFactory{
"shell": testProvisionerFuncFixed(pr),
},
State: MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root"},
Resources: map[string]*ResourceState{
"aws_instance.foo": resourceState("aws_instance", "1"),
},
Outputs: map[string]*OutputState{
"value": {
Type: "string",
Value: "3",
},
},
},
&ModuleState{
Path: []string{"root", "mod"},
Resources: map[string]*ResourceState{
"aws_instance.baz": resourceState("aws_instance", "3"),
},
// state needs to be properly initialized
Outputs: map[string]*OutputState{},
},
&ModuleState{
Path: []string{"root", "mod2"},
Resources: map[string]*ResourceState{
"aws_instance.bar": resourceState("aws_instance", "2"),
},
},
},
}),
Destroy: true,
// targeting the source of the value used by all resources should still
// destroy them all.
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Child("mod", addrs.NoKey).Resource(
addrs.ManagedResourceMode, "aws_instance", "baz",
),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatal(diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatal(diags.Err())
}
if !pr.ProvisionResourceCalled {
t.Fatal("provisioner not called")
}
// confirm all outputs were removed too
for _, mod := range state.Modules {
if len(mod.OutputValues) > 0 {
t.Fatalf("output left in module state: %#v\n", mod)
}
}
}
func TestContext2Apply_targetedDestroyCountDeps(t *testing.T) {
m := testModule(t, "apply-destroy-targeted-count")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": resourceState("aws_instance", "i-bcd345"),
"aws_instance.bar": resourceState("aws_instance", "i-abc123"),
},
},
},
}),
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Resource(
addrs.ManagedResourceMode, "aws_instance", "foo",
),
},
Destroy: true,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `<no state>`)
}
// https://github.com/hashicorp/terraform/issues/4462
func TestContext2Apply_targetedDestroyModule(t *testing.T) {
m := testModule(t, "apply-targeted-module")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo": resourceState("aws_instance", "i-bcd345"),
"aws_instance.bar": resourceState("aws_instance", "i-abc123"),
},
},
&ModuleState{
Path: []string{"root", "child"},
Resources: map[string]*ResourceState{
"aws_instance.foo": resourceState("aws_instance", "i-bcd345"),
"aws_instance.bar": resourceState("aws_instance", "i-abc123"),
},
},
},
}),
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Child("child", addrs.NoKey).Resource(
addrs.ManagedResourceMode, "aws_instance", "foo",
),
},
Destroy: true,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `
aws_instance.bar:
ID = i-abc123
provider = provider.aws
aws_instance.foo:
ID = i-bcd345
provider = provider.aws
module.child:
aws_instance.bar:
ID = i-abc123
provider = provider.aws
`)
}
func TestContext2Apply_targetedDestroyCountIndex(t *testing.T) {
m := testModule(t, "apply-targeted-count")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo.0": resourceState("aws_instance", "i-bcd345"),
"aws_instance.foo.1": resourceState("aws_instance", "i-bcd345"),
"aws_instance.foo.2": resourceState("aws_instance", "i-bcd345"),
"aws_instance.bar.0": resourceState("aws_instance", "i-abc123"),
"aws_instance.bar.1": resourceState("aws_instance", "i-abc123"),
"aws_instance.bar.2": resourceState("aws_instance", "i-abc123"),
},
},
},
}),
Targets: []addrs.Targetable{
addrs.RootModuleInstance.ResourceInstance(
addrs.ManagedResourceMode, "aws_instance", "foo", addrs.IntKey(2),
),
addrs.RootModuleInstance.ResourceInstance(
addrs.ManagedResourceMode, "aws_instance", "bar", addrs.IntKey(1),
),
},
Destroy: true,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `
aws_instance.bar.0:
ID = i-abc123
provider = provider.aws
aws_instance.bar.2:
ID = i-abc123
provider = provider.aws
aws_instance.foo.0:
ID = i-bcd345
provider = provider.aws
aws_instance.foo.1:
ID = i-bcd345
provider = provider.aws
`)
}
func TestContext2Apply_targetedModule(t *testing.T) {
m := testModule(t, "apply-targeted-module")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Child("child", addrs.NoKey),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
mod := state.Module(addrs.RootModuleInstance.Child("child", addrs.NoKey))
if mod == nil {
t.Fatalf("no child module found in the state!\n\n%#v", state)
}
if len(mod.Resources) != 2 {
t.Fatalf("expected 2 resources, got: %#v", mod.Resources)
}
checkStateString(t, state, `
<no state>
module.child:
aws_instance.bar:
ID = foo
provider = provider.aws
num = 2
type = aws_instance
aws_instance.foo:
ID = foo
provider = provider.aws
num = 2
type = aws_instance
`)
}
// GH-1858
func TestContext2Apply_targetedModuleDep(t *testing.T) {
m := testModule(t, "apply-targeted-module-dep")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Resource(
addrs.ManagedResourceMode, "aws_instance", "foo",
),
},
})
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
} else {
t.Logf("Diff: %s", legacyDiffComparisonString(p.Changes))
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, `
aws_instance.foo:
ID = foo
provider = provider.aws
foo = foo
type = aws_instance
Dependencies:
module.child.aws_instance.mod
module.child:
aws_instance.mod:
ID = foo
provider = provider.aws
Outputs:
output = foo
`)
}
// GH-10911 untargeted outputs should not be in the graph, and therefore
// not execute.
func TestContext2Apply_targetedModuleUnrelatedOutputs(t *testing.T) {
m := testModule(t, "apply-targeted-module-unrelated-outputs")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Child("child2", addrs.NoKey),
},
State: MustShimLegacyState(&State{
Modules: []*ModuleState{
{
Path: []string{"root"},
Outputs: map[string]*OutputState{},
Resources: map[string]*ResourceState{},
},
{
Path: []string{"root", "child1"},
Outputs: map[string]*OutputState{
"instance_id": {
Type: "string",
Value: "foo-bar-baz",
},
},
Resources: map[string]*ResourceState{},
},
{
Path: []string{"root", "child2"},
Outputs: map[string]*OutputState{},
Resources: map[string]*ResourceState{},
},
},
}),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
// - module.child1's instance_id output is dropped because we don't preserve
// non-root module outputs between runs (they can be recalculated from config)
// - module.child2's instance_id is updated because its dependency is updated
// - child2_id is updated because if its transitive dependency via module.child2
checkStateString(t, state, `
<no state>
Outputs:
child2_id = foo
module.child2:
aws_instance.foo:
ID = foo
provider = provider.aws
Outputs:
instance_id = foo
`)
}
func TestContext2Apply_targetedModuleResource(t *testing.T) {
m := testModule(t, "apply-targeted-module-resource")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Child("child", addrs.NoKey).Resource(
addrs.ManagedResourceMode, "aws_instance", "foo",
),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
mod := state.Module(addrs.RootModuleInstance.Child("child", addrs.NoKey))
if mod == nil || len(mod.Resources) != 1 {
t.Fatalf("expected 1 resource, got: %#v", mod)
}
checkStateString(t, state, `
<no state>
module.child:
aws_instance.foo:
ID = foo
provider = provider.aws
num = 2
type = aws_instance
`)
}
func TestContext2Apply_targetedResourceOrphanModule(t *testing.T) {
m := testModule(t, "apply-targeted-resource-orphan-module")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
// Create a state with an orphan module
state := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root", "child"},
Resources: map[string]*ResourceState{
"aws_instance.bar": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{},
Provider: "provider.aws",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Resource(
addrs.ManagedResourceMode, "aws_instance", "foo",
),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
if _, diags := ctx.Apply(); diags.HasErrors() {
t.Fatalf("apply errors: %s", diags.Err())
}
}
func TestContext2Apply_unknownAttribute(t *testing.T) {
m := testModule(t, "apply-unknown")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if !diags.HasErrors() {
t.Error("should error, because attribute 'unknown' is still unknown after apply")
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyUnknownAttrStr)
if actual != expected {
t.Fatalf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_unknownAttributeInterpolate(t *testing.T) {
m := testModule(t, "apply-unknown-interpolate")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags == nil {
t.Fatal("should error")
}
}
func TestContext2Apply_vars(t *testing.T) {
fixture := contextFixtureApplyVars(t)
opts := fixture.ContextOpts()
opts.Variables = InputValues{
"foo": &InputValue{
Value: cty.StringVal("us-east-1"),
SourceType: ValueFromCaller,
},
"test_list": &InputValue{
Value: cty.ListVal([]cty.Value{
cty.StringVal("Hello"),
cty.StringVal("World"),
}),
SourceType: ValueFromCaller,
},
"test_map": &InputValue{
Value: cty.MapVal(map[string]cty.Value{
"Hello": cty.StringVal("World"),
"Foo": cty.StringVal("Bar"),
"Baz": cty.StringVal("Foo"),
}),
SourceType: ValueFromCaller,
},
"amis": &InputValue{
Value: cty.MapVal(map[string]cty.Value{
"us-east-1": cty.StringVal("override"),
}),
SourceType: ValueFromCaller,
},
}
ctx := testContext2(t, opts)
diags := ctx.Validate()
if len(diags) != 0 {
t.Fatalf("bad: %s", diags.ErrWithWarnings())
}
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
got := strings.TrimSpace(state.String())
want := strings.TrimSpace(testTerraformApplyVarsStr)
if got != want {
t.Errorf("wrong result\n\ngot:\n%s\n\nwant:\n%s", got, want)
}
}
func TestContext2Apply_varsEnv(t *testing.T) {
fixture := contextFixtureApplyVarsEnv(t)
opts := fixture.ContextOpts()
opts.Variables = InputValues{
"string": &InputValue{
Value: cty.StringVal("baz"),
SourceType: ValueFromEnvVar,
},
"list": &InputValue{
Value: cty.ListVal([]cty.Value{
cty.StringVal("Hello"),
cty.StringVal("World"),
}),
SourceType: ValueFromEnvVar,
},
"map": &InputValue{
Value: cty.MapVal(map[string]cty.Value{
"Hello": cty.StringVal("World"),
"Foo": cty.StringVal("Bar"),
"Baz": cty.StringVal("Foo"),
}),
SourceType: ValueFromEnvVar,
},
}
ctx := testContext2(t, opts)
diags := ctx.Validate()
if len(diags) != 0 {
t.Fatalf("bad: %s", diags.ErrWithWarnings())
}
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(testTerraformApplyVarsEnvStr)
if actual != expected {
t.Errorf("wrong result\n\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_createBefore_depends(t *testing.T) {
m := testModule(t, "apply-depends-create-before")
h := new(HookRecordApplyOrder)
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "web",
}.Instance(addrs.NoKey),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar","require_new":"ami-old"}`),
},
addrs.ProviderConfig{
Type: "aws",
}.Absolute(addrs.RootModuleInstance),
)
root.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "lb",
}.Instance(addrs.NoKey),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"baz","instance":"bar"}`),
Dependencies: []addrs.AbsResource{
addrs.AbsResource{
Resource: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "web",
},
Module: addrs.RootModuleInstance,
},
},
},
addrs.ProviderConfig{
Type: "aws",
}.Absolute(addrs.RootModuleInstance),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Hooks: []Hook{h},
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
})
if p, diags := ctx.Plan(); diags.HasErrors() {
logDiagnostics(t, diags)
t.Fatal("plan failed")
} else {
t.Logf("plan:\n%s", legacyDiffComparisonString(p.Changes))
}
h.Active = true
state, diags := ctx.Apply()
if diags.HasErrors() {
logDiagnostics(t, diags)
t.Fatal("apply failed")
}
mod := state.RootModule()
if len(mod.Resources) < 2 {
t.Logf("state after apply:\n%s", state.String())
t.Fatalf("only %d resources in root module; want at least 2", len(mod.Resources))
}
got := strings.TrimSpace(state.String())
want := strings.TrimSpace(testTerraformApplyDependsCreateBeforeStr)
if got != want {
t.Fatalf("wrong final state\ngot:\n%s\n\nwant:\n%s", got, want)
}
// Test that things were managed _in the right order_
order := h.States
diffs := h.Diffs
if !order[0].IsNull() || diffs[0].Action == plans.Delete {
t.Fatalf("should create new instance first: %#v", order)
}
if order[1].GetAttr("id").AsString() != "baz" {
t.Fatalf("update must happen after create: %#v", order[1])
}
if order[2].GetAttr("id").AsString() != "bar" || diffs[2].Action != plans.Delete {
t.Fatalf("destroy must happen after update: %#v", order[2])
}
}
func TestContext2Apply_singleDestroy(t *testing.T) {
m := testModule(t, "apply-depends-create-before")
h := new(HookRecordApplyOrder)
p := testProvider("aws")
invokeCount := 0
p.ApplyFn = func(info *InstanceInfo, s *InstanceState, d *InstanceDiff) (*InstanceState, error) {
invokeCount++
switch invokeCount {
case 1:
if d.Destroy {
t.Fatalf("should not destroy")
}
if s.ID != "" {
t.Fatalf("should not have ID")
}
case 2:
if d.Destroy {
t.Fatalf("should not destroy")
}
if s.ID != "baz" {
t.Fatalf("should have id")
}
case 3:
if !d.Destroy {
t.Fatalf("should destroy")
}
if s.ID == "" {
t.Fatalf("should have ID")
}
default:
t.Fatalf("bad invoke count %d", invokeCount)
}
return testApplyFn(info, s, d)
}
p.DiffFn = testDiffFn
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "web",
}.Instance(addrs.NoKey),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"bar","require_new":"ami-old"}`),
},
addrs.ProviderConfig{
Type: "aws",
}.Absolute(addrs.RootModuleInstance),
)
root.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "lb",
}.Instance(addrs.NoKey),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"baz","instance":"bar"}`),
Dependencies: []addrs.AbsResource{
addrs.AbsResource{
Resource: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "web",
},
Module: addrs.RootModuleInstance,
},
},
},
addrs.ProviderConfig{
Type: "aws",
}.Absolute(addrs.RootModuleInstance),
)
ctx := testContext2(t, &ContextOpts{
Config: m,
Hooks: []Hook{h},
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
h.Active = true
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
if invokeCount != 3 {
t.Fatalf("bad: %d", invokeCount)
}
}
// GH-7824
func TestContext2Apply_issue7824(t *testing.T) {
p := testProvider("template")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
p.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"template_file": {
Attributes: map[string]*configschema.Attribute{
"template": {Type: cty.String, Optional: true},
"__template_requires_new": {Type: cty.Bool, Optional: true},
},
},
},
}
m, snap := testModuleWithSnapshot(t, "issue-7824")
// Apply cleanly step 0
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"template": testProviderFuncFixed(p),
},
),
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
// Write / Read plan to simulate running it through a Plan file
ctxOpts, err := contextOptsForPlanViaFile(snap, ctx.State(), plan)
if err != nil {
t.Fatalf("failed to round-trip through planfile: %s", err)
}
ctxOpts.ProviderResolver = providers.ResolverFixed(
map[string]providers.Factory{
"template": testProviderFuncFixed(p),
},
)
ctx, diags = NewContext(ctxOpts)
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
_, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
}
// This deals with the situation where a splat expression is used referring
// to another resource whose count is non-constant.
func TestContext2Apply_issue5254(t *testing.T) {
// Create a provider. We use "template" here just to match the repro
// we got from the issue itself.
p := testProvider("template")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
p.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"template_file": {
Attributes: map[string]*configschema.Attribute{
"template": {Type: cty.String, Optional: true},
"__template_requires_new": {Type: cty.Bool, Optional: true},
"id": {Type: cty.String, Computed: true},
"type": {Type: cty.String, Computed: true},
},
},
},
}
// Apply cleanly step 0
ctx := testContext2(t, &ContextOpts{
Config: testModule(t, "issue-5254/step-0"),
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"template": testProviderFuncFixed(p),
},
),
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
m, snap := testModuleWithSnapshot(t, "issue-5254/step-1")
// Application success. Now make the modification and store a plan
ctx = testContext2(t, &ContextOpts{
Config: m,
State: state,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"template": testProviderFuncFixed(p),
},
),
})
plan, diags = ctx.Plan()
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
// Write / Read plan to simulate running it through a Plan file
ctxOpts, err := contextOptsForPlanViaFile(snap, state, plan)
if err != nil {
t.Fatalf("failed to round-trip through planfile: %s", err)
}
ctxOpts.ProviderResolver = providers.ResolverFixed(
map[string]providers.Factory{
"template": testProviderFuncFixed(p),
},
)
ctx, diags = NewContext(ctxOpts)
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(`
template_file.child:
ID = foo
provider = provider.template
__template_requires_new = true
template = Hi
type = template_file
Dependencies:
template_file.parent
template_file.parent.0:
ID = foo
provider = provider.template
template = Hi
type = template_file
`)
if actual != expected {
t.Fatalf("wrong final state\ngot:\n%s\n\nwant:\n%s", actual, expected)
}
}
func TestContext2Apply_targetedWithTaintedInState(t *testing.T) {
p := testProvider("aws")
p.DiffFn = testDiffFn
p.ApplyFn = testApplyFn
m, snap := testModuleWithSnapshot(t, "apply-tainted-targets")
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Resource(
addrs.ManagedResourceMode, "aws_instance", "iambeingadded",
),
},
State: MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.ifailedprovisioners": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "ifailedprovisioners",
Tainted: true,
},
},
},
},
},
}),
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
// Write / Read plan to simulate running it through a Plan file
ctxOpts, err := contextOptsForPlanViaFile(snap, ctx.State(), plan)
if err != nil {
t.Fatalf("failed to round-trip through planfile: %s", err)
}
ctxOpts.ProviderResolver = providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
)
ctx, diags = NewContext(ctxOpts)
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(`
aws_instance.iambeingadded:
ID = foo
provider = provider.aws
aws_instance.ifailedprovisioners: (tainted)
ID = ifailedprovisioners
provider = provider.aws
`)
if actual != expected {
t.Fatalf("expected state: \n%s\ngot: \n%s", expected, actual)
}
}
// Higher level test exposing the bug this covers in
// TestResource_ignoreChangesRequired
func TestContext2Apply_ignoreChangesCreate(t *testing.T) {
m := testModule(t, "apply-ignore-changes-create")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
instanceSchema := p.GetSchemaReturn.ResourceTypes["aws_instance"]
instanceSchema.Attributes["required_field"] = &configschema.Attribute{
Type: cty.String,
Required: true,
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if p, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
} else {
t.Logf(legacyDiffComparisonString(p.Changes))
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
mod := state.RootModule()
if len(mod.Resources) != 1 {
t.Fatalf("bad: %s", state)
}
actual := strings.TrimSpace(state.String())
// Expect no changes from original state
expected := strings.TrimSpace(`
aws_instance.foo:
ID = foo
provider = provider.aws
required_field = set
type = aws_instance
`)
if actual != expected {
t.Fatalf("expected:\n%s\ngot:\n%s", expected, actual)
}
}
func TestContext2Apply_ignoreChangesWithDep(t *testing.T) {
m := testModule(t, "apply-ignore-changes-dep")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = func(i *InstanceInfo, s *InstanceState, c *ResourceConfig) (*InstanceDiff, error) {
switch i.Type {
case "aws_instance":
newAmi, _ := c.Get("ami")
return &InstanceDiff{
Attributes: map[string]*ResourceAttrDiff{
"ami": &ResourceAttrDiff{
Old: s.Attributes["ami"],
New: newAmi.(string),
RequiresNew: true,
},
},
}, nil
case "aws_eip":
return testDiffFn(i, s, c)
default:
t.Fatalf("Unexpected type: %s", i.Type)
return nil, nil
}
}
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.foo.0": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "i-abc123",
Attributes: map[string]string{
"ami": "ami-abcd1234",
"id": "i-abc123",
},
},
},
"aws_instance.foo.1": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "i-bcd234",
Attributes: map[string]string{
"ami": "ami-abcd1234",
"id": "i-bcd234",
},
},
},
"aws_eip.foo.0": &ResourceState{
Type: "aws_eip",
Primary: &InstanceState{
ID: "eip-abc123",
Attributes: map[string]string{
"id": "eip-abc123",
"instance": "i-abc123",
},
},
},
"aws_eip.foo.1": &ResourceState{
Type: "aws_eip",
Primary: &InstanceState{
ID: "eip-bcd234",
Attributes: map[string]string{
"id": "eip-bcd234",
"instance": "i-bcd234",
},
},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: s,
})
_, diags := ctx.Plan()
assertNoErrors(t, diags)
state, diags := ctx.Apply()
assertNoErrors(t, diags)
actual := strings.TrimSpace(state.String())
expected := strings.TrimSpace(s.String())
if actual != expected {
t.Fatalf("expected:\n%s\n\ngot:\n%s", expected, actual)
}
}
func TestContext2Apply_ignoreChangesWildcard(t *testing.T) {
m := testModule(t, "apply-ignore-changes-wildcard")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
instanceSchema := p.GetSchemaReturn.ResourceTypes["aws_instance"]
instanceSchema.Attributes["required_field"] = &configschema.Attribute{
Type: cty.String,
Required: true,
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if p, diags := ctx.Plan(); diags.HasErrors() {
logDiagnostics(t, diags)
t.Fatal("plan failed")
} else {
t.Logf(legacyDiffComparisonString(p.Changes))
}
state, diags := ctx.Apply()
assertNoErrors(t, diags)
mod := state.RootModule()
if len(mod.Resources) != 1 {
t.Fatalf("bad: %s", state)
}
actual := strings.TrimSpace(state.String())
// Expect no changes from original state
expected := strings.TrimSpace(`
aws_instance.foo:
ID = foo
provider = provider.aws
required_field = set
type = aws_instance
`)
if actual != expected {
t.Fatalf("expected:\n%s\ngot:\n%s", expected, actual)
}
}
// https://github.com/hashicorp/terraform/issues/7378
func TestContext2Apply_destroyNestedModuleWithAttrsReferencingResource(t *testing.T) {
m, snap := testModuleWithSnapshot(t, "apply-destroy-nested-module-with-attrs")
p := testProvider("null")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
var state *states.State
var diags tfdiags.Diagnostics
{
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"null": testProviderFuncFixed(p),
},
),
})
// First plan and apply a create operation
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan err: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("apply err: %s", diags.Err())
}
}
{
ctx := testContext2(t, &ContextOpts{
Destroy: true,
Config: m,
State: state,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"null": testProviderFuncFixed(p),
},
),
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("destroy plan err: %s", diags.Err())
}
ctxOpts, err := contextOptsForPlanViaFile(snap, state, plan)
if err != nil {
t.Fatalf("failed to round-trip through planfile: %s", err)
}
ctxOpts.ProviderResolver = providers.ResolverFixed(
map[string]providers.Factory{
"null": testProviderFuncFixed(p),
},
)
ctx, diags = NewContext(ctxOpts)
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("destroy apply err: %s", diags.Err())
}
}
if !state.Empty() {
t.Fatalf("state after apply: %s\nwant empty state", spew.Sdump(state))
}
}
// If a data source explicitly depends on another resource, it's because we need
// that resource to be applied first.
func TestContext2Apply_dataDependsOn(t *testing.T) {
p := testProvider("null")
m := testModule(t, "apply-data-depends-on")
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"null": testProviderFuncFixed(p),
},
),
})
// the "provisioner" here writes to this variable, because the intent is to
// create a dependency which can't be viewed through the graph, and depends
// solely on the configuration providing "depends_on"
provisionerOutput := ""
p.ApplyFn = func(info *InstanceInfo, s *InstanceState, d *InstanceDiff) (*InstanceState, error) {
// the side effect of the resource being applied
provisionerOutput = "APPLIED"
return testApplyFn(info, s, d)
}
p.DiffFn = testDiffFn
p.ReadDataSourceFn = func(req providers.ReadDataSourceRequest) providers.ReadDataSourceResponse {
return providers.ReadDataSourceResponse{
State: cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("boop"),
"foo": cty.StringVal(provisionerOutput),
}),
}
}
_, diags := ctx.Refresh()
assertNoErrors(t, diags)
_, diags = ctx.Plan()
assertNoErrors(t, diags)
state, diags := ctx.Apply()
assertNoErrors(t, diags)
root := state.Module(addrs.RootModuleInstance)
is := root.ResourceInstance(addrs.Resource{
Mode: addrs.DataResourceMode,
Type: "null_data_source",
Name: "read",
}.Instance(addrs.NoKey))
if is == nil {
t.Fatal("data resource instance is not present in state; should be")
}
var attrs map[string]interface{}
err := json.Unmarshal(is.Current.AttrsJSON, &attrs)
if err != nil {
t.Fatal(err)
}
actual := attrs["foo"]
expected := "APPLIED"
if actual != expected {
t.Fatalf("bad:\n%s", strings.TrimSpace(state.String()))
}
}
func TestContext2Apply_terraformWorkspace(t *testing.T) {
m := testModule(t, "apply-terraform-workspace")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Meta: &ContextMeta{Env: "foo"},
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
actual := state.RootModule().OutputValues["output"]
expected := cty.StringVal("foo")
if actual == nil || actual.Value != expected {
t.Fatalf("wrong value\ngot: %#v\nwant: %#v", actual.Value, expected)
}
}
// verify that multiple config references only create a single depends_on entry
func TestContext2Apply_multiRef(t *testing.T) {
m := testModule(t, "apply-multi-ref")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
deps := state.Modules[""].Resources["aws_instance.other"].Instances[addrs.NoKey].Current.Dependencies
if len(deps) != 1 || deps[0].String() != "aws_instance.create" {
t.Fatalf("expected 1 depends_on entry for aws_instance.create, got %q", deps)
}
}
func TestContext2Apply_targetedModuleRecursive(t *testing.T) {
m := testModule(t, "apply-targeted-module-recursive")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
Targets: []addrs.Targetable{
addrs.RootModuleInstance.Child("child", addrs.NoKey),
},
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
mod := state.Module(
addrs.RootModuleInstance.Child("child", addrs.NoKey).Child("subchild", addrs.NoKey),
)
if mod == nil {
t.Fatalf("no subchild module found in the state!\n\n%#v", state)
}
if len(mod.Resources) != 1 {
t.Fatalf("expected 1 resources, got: %#v", mod.Resources)
}
checkStateString(t, state, `
<no state>
module.child.subchild:
aws_instance.foo:
ID = foo
provider = provider.aws
num = 2
type = aws_instance
`)
}
func TestContext2Apply_localVal(t *testing.T) {
m := testModule(t, "apply-local-val")
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("error during plan: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("error during apply: %s", diags.Err())
}
got := strings.TrimSpace(state.String())
want := strings.TrimSpace(`
<no state>
Outputs:
result_1 = hello
result_3 = hello world
module.child:
<no state>
Outputs:
result = hello
`)
if got != want {
t.Fatalf("wrong final state\ngot:\n%s\nwant:\n%s", got, want)
}
}
func TestContext2Apply_destroyWithLocals(t *testing.T) {
m := testModule(t, "apply-destroy-with-locals")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = func(info *InstanceInfo, s *InstanceState, c *ResourceConfig) (*InstanceDiff, error) {
d, err := testDiffFn(info, s, c)
return d, err
}
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Outputs: map[string]*OutputState{
"name": &OutputState{
Type: "string",
Value: "test-bar",
},
},
Resources: map[string]*ResourceState{
"aws_instance.foo": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "foo",
// FIXME: id should only exist in one place
Attributes: map[string]string{
"id": "foo",
},
},
Provider: "provider.aws",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: s,
Destroy: true,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("error during apply: %s", diags.Err())
}
got := strings.TrimSpace(state.String())
want := strings.TrimSpace(`<no state>`)
if got != want {
t.Fatalf("wrong final state\ngot:\n%s\nwant:\n%s", got, want)
}
}
func TestContext2Apply_providerWithLocals(t *testing.T) {
m := testModule(t, "provider-with-locals")
p := testProvider("aws")
providerRegion := ""
// this should not be overridden during destroy
p.ConfigureFn = func(c *ResourceConfig) error {
if r, ok := c.Get("region"); ok {
providerRegion = r.(string)
}
return nil
}
p.DiffFn = testDiffFn
p.ApplyFn = testApplyFn
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
ctx = testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
Destroy: true,
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
state, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
if state.HasResources() {
t.Fatal("expected no state, got:", state)
}
if providerRegion != "bar" {
t.Fatalf("expected region %q, got: %q", "bar", providerRegion)
}
}
func TestContext2Apply_destroyWithProviders(t *testing.T) {
m := testModule(t, "destroy-module-with-provider")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
},
&ModuleState{
Path: []string{"root", "child"},
},
&ModuleState{
Path: []string{"root", "mod", "removed"},
Resources: map[string]*ResourceState{
"aws_instance.child": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
// this provider doesn't exist
Provider: "provider.aws.baz",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: s,
Destroy: true,
})
// test that we can't destroy if the provider is missing
if _, diags := ctx.Plan(); diags == nil {
t.Fatal("expected plan error, provider.aws.baz doesn't exist")
}
// correct the state
s.Modules["module.mod.module.removed"].Resources["aws_instance.child"].ProviderConfig = addrs.ProviderConfig{
Type: "aws",
Alias: "bar",
}.Absolute(addrs.RootModuleInstance)
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatal(diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("error during apply: %s", diags.Err())
}
got := strings.TrimSpace(state.String())
want := strings.TrimSpace("<no state>")
if got != want {
t.Fatalf("wrong final state\ngot:\n%s\nwant:\n%s", got, want)
}
}
func TestContext2Apply_providersFromState(t *testing.T) {
m := configs.NewEmptyConfig()
p := testProvider("aws")
p.DiffFn = testDiffFn
for _, tc := range []struct {
name string
state *states.State
output string
err bool
}{
{
name: "add implicit provider",
state: MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root"},
Resources: map[string]*ResourceState{
"aws_instance.a": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
Provider: "provider.aws",
},
},
},
},
}),
err: false,
output: "<no state>",
},
// an aliased provider must be in the config to remove a resource
{
name: "add aliased provider",
state: MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root"},
Resources: map[string]*ResourceState{
"aws_instance.a": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
Provider: "provider.aws.bar",
},
},
},
},
}),
err: true,
},
// a provider in a module implies some sort of config, so this isn't
// allowed even without an alias
{
name: "add unaliased module provider",
state: MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: []string{"root", "child"},
Resources: map[string]*ResourceState{
"aws_instance.a": &ResourceState{
Type: "aws_instance",
Primary: &InstanceState{
ID: "bar",
},
Provider: "module.child.provider.aws",
},
},
},
},
}),
err: true,
},
} {
t.Run(tc.name, func(t *testing.T) {
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: tc.state,
})
_, diags := ctx.Plan()
if tc.err {
if diags == nil {
t.Fatal("expected error")
} else {
return
}
}
if !tc.err && diags.HasErrors() {
t.Fatal(diags.Err())
}
state, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
checkStateString(t, state, "<no state>")
})
}
}
func TestContext2Apply_plannedInterpolatedCount(t *testing.T) {
m, snap := testModuleWithSnapshot(t, "apply-interpolated-count")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
providerResolver := providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
)
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.test": {
Type: "aws_instance",
Primary: &InstanceState{
ID: "foo",
},
Provider: "provider.aws",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providerResolver,
State: s,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("plan failed: %s", diags.Err())
}
// We'll marshal and unmarshal the plan here, to ensure that we have
// a clean new context as would be created if we separately ran
// terraform plan -out=tfplan && terraform apply tfplan
ctxOpts, err := contextOptsForPlanViaFile(snap, ctx.State(), plan)
if err != nil {
t.Fatalf("failed to round-trip through planfile: %s", err)
}
ctxOpts.ProviderResolver = providerResolver
ctx, diags = NewContext(ctxOpts)
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
// Applying the plan should now succeed
_, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("apply failed: %s", diags.Err())
}
}
func TestContext2Apply_plannedDestroyInterpolatedCount(t *testing.T) {
m, snap := testModuleWithSnapshot(t, "plan-destroy-interpolated-count")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
providerResolver := providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
)
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.a.0": {
Type: "aws_instance",
Primary: &InstanceState{
ID: "foo",
},
Provider: "provider.aws",
},
"aws_instance.a.1": {
Type: "aws_instance",
Primary: &InstanceState{
ID: "foo",
},
Provider: "provider.aws",
},
},
Outputs: map[string]*OutputState{
"out": {
Type: "list",
Value: []string{"foo", "foo"},
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providerResolver,
State: s,
Destroy: true,
})
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("plan failed: %s", diags.Err())
}
// We'll marshal and unmarshal the plan here, to ensure that we have
// a clean new context as would be created if we separately ran
// terraform plan -out=tfplan && terraform apply tfplan
ctxOpts, err := contextOptsForPlanViaFile(snap, ctx.State(), plan)
if err != nil {
t.Fatalf("failed to round-trip through planfile: %s", err)
}
ctxOpts.ProviderResolver = providerResolver
ctxOpts.Destroy = true
ctx, diags = NewContext(ctxOpts)
if diags.HasErrors() {
t.Fatalf("err: %s", diags.Err())
}
// Applying the plan should now succeed
_, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("apply failed: %s", diags.Err())
}
}
func TestContext2Apply_scaleInMultivarRef(t *testing.T) {
m := testModule(t, "apply-resource-scale-in")
p := testProvider("aws")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
providerResolver := providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
)
s := MustShimLegacyState(&State{
Modules: []*ModuleState{
&ModuleState{
Path: rootModulePath,
Resources: map[string]*ResourceState{
"aws_instance.one": {
Type: "aws_instance",
Primary: &InstanceState{
ID: "foo",
},
Provider: "provider.aws",
},
"aws_instance.two": {
Type: "aws_instance",
Primary: &InstanceState{
ID: "foo",
Attributes: map[string]string{
"value": "foo",
},
},
Provider: "provider.aws",
},
},
},
},
})
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providerResolver,
State: s,
Variables: InputValues{
"instance_count": {
Value: cty.NumberIntVal(0),
SourceType: ValueFromCaller,
},
},
})
_, diags := ctx.Plan()
assertNoErrors(t, diags)
// Applying the plan should now succeed
_, diags = ctx.Apply()
assertNoErrors(t, diags)
}
func TestContext2Apply_inconsistentWithPlan(t *testing.T) {
m := testModule(t, "apply-inconsistent-with-plan")
p := testProvider("test")
p.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"test": {
Attributes: map[string]*configschema.Attribute{
"id": {Type: cty.String, Computed: true},
},
},
},
}
p.PlanResourceChangeFn = func(req providers.PlanResourceChangeRequest) providers.PlanResourceChangeResponse {
return providers.PlanResourceChangeResponse{
PlannedState: cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("before"),
}),
}
}
p.ApplyResourceChangeFn = func(req providers.ApplyResourceChangeRequest) providers.ApplyResourceChangeResponse {
return providers.ApplyResourceChangeResponse{
NewState: cty.ObjectVal(map[string]cty.Value{
// This is intentionally incorrect: because id was fixed at "before"
// during plan, it must not change during apply.
"id": cty.StringVal("after"),
}),
}
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"test": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
_, diags := ctx.Apply()
if !diags.HasErrors() {
t.Fatalf("apply succeeded; want error")
}
if got, want := diags.Err().Error(), "Provider produced inconsistent result after apply"; !strings.Contains(got, want) {
t.Fatalf("wrong error\ngot: %s\nshould contain: %s", got, want)
}
}
// Issue 19908 was about retaining an existing object in the state when an
// update to it fails and the provider does not return a partially-updated
// value for it. Previously we were incorrectly removing it from the state
// in that case, but instead it should be retained so the update can be
// retried.
func TestContext2Apply_issue19908(t *testing.T) {
m := testModule(t, "apply-issue19908")
p := testProvider("test")
p.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"test": {
Attributes: map[string]*configschema.Attribute{
"baz": {Type: cty.String, Required: true},
},
},
},
}
p.PlanResourceChangeFn = func(req providers.PlanResourceChangeRequest) providers.PlanResourceChangeResponse {
return providers.PlanResourceChangeResponse{
PlannedState: req.ProposedNewState,
}
}
p.ApplyResourceChangeFn = func(req providers.ApplyResourceChangeRequest) providers.ApplyResourceChangeResponse {
var diags tfdiags.Diagnostics
diags = diags.Append(fmt.Errorf("update failed"))
return providers.ApplyResourceChangeResponse{
Diagnostics: diags,
}
}
ctx := testContext2(t, &ContextOpts{
Config: m,
State: states.BuildState(func(s *states.SyncState) {
s.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test",
Name: "foo",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance),
&states.ResourceInstanceObjectSrc{
AttrsJSON: []byte(`{"baz":"old"}`),
Status: states.ObjectReady,
},
addrs.ProviderConfig{
Type: "test",
}.Absolute(addrs.RootModuleInstance),
)
}),
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"test": testProviderFuncFixed(p),
},
),
})
if _, diags := ctx.Plan(); diags.HasErrors() {
t.Fatalf("plan errors: %s", diags.Err())
}
state, diags := ctx.Apply()
if !diags.HasErrors() {
t.Fatalf("apply succeeded; want error")
}
if got, want := diags.Err().Error(), "update failed"; !strings.Contains(got, want) {
t.Fatalf("wrong error\ngot: %s\nshould contain: %s", got, want)
}
mod := state.RootModule()
rs := mod.Resources["test.foo"]
if rs == nil {
t.Fatalf("test.foo not in state after apply, but should be")
}
is := rs.Instances[addrs.NoKey]
if is == nil {
t.Fatalf("test.foo not in state after apply, but should be")
}
obj := is.Current
if obj == nil {
t.Fatalf("test.foo has no current object in state after apply, but should do")
}
if got, want := obj.Status, states.ObjectReady; got != want {
t.Errorf("test.foo has wrong status %s after apply; want %s", got, want)
}
if got, want := obj.AttrsJSON, []byte(`"old"`); !bytes.Contains(got, want) {
t.Errorf("test.foo attributes JSON doesn't contain %s after apply\ngot: %s", want, got)
}
}
func TestContext2Apply_invalidIndexRef(t *testing.T) {
p := testProvider("test")
p.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"test_instance": {
Attributes: map[string]*configschema.Attribute{
"value": {Type: cty.String, Optional: true, Computed: true},
},
},
},
}
p.DiffFn = testDiffFn
m := testModule(t, "apply-invalid-index")
c := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"test": testProviderFuncFixed(p),
},
),
})
diags := c.Validate()
if diags.HasErrors() {
t.Fatalf("unexpected validation failure: %s", diags.Err())
}
wantErr := `The given key does not identify an element in this collection value`
_, diags = c.Plan()
if !diags.HasErrors() {
t.Fatalf("plan succeeded; want error")
}
gotErr := diags.Err().Error()
if !strings.Contains(gotErr, wantErr) {
t.Fatalf("missing expected error\ngot: %s\n\nwant: error containing %q", gotErr, wantErr)
}
}
func TestContext2Apply_moduleReplaceCycle(t *testing.T) {
for _, mode := range []string{"normal", "cbd"} {
var m *configs.Config
switch mode {
case "normal":
m = testModule(t, "apply-module-replace-cycle")
case "cbd":
m = testModule(t, "apply-module-replace-cycle-cbd")
}
p := testProvider("aws")
p.DiffFn = testDiffFn
p.ApplyFn = testApplyFn
instanceSchema := &configschema.Block{
Attributes: map[string]*configschema.Attribute{
"id": {Type: cty.String, Computed: true},
"require_new": {Type: cty.String, Optional: true},
},
}
p.GetSchemaReturn = &ProviderSchema{
ResourceTypes: map[string]*configschema.Block{
"aws_instance": instanceSchema,
},
}
state := states.NewState()
modA := state.EnsureModule(addrs.RootModuleInstance.Child("a", addrs.NoKey))
modA.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "a",
}.Instance(addrs.NoKey),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"a","require_new":"old"}`),
},
addrs.ProviderConfig{
Type: "aws",
}.Absolute(addrs.RootModuleInstance),
)
modB := state.EnsureModule(addrs.RootModuleInstance.Child("b", addrs.NoKey))
modB.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "b",
}.Instance(addrs.IntKey(0)),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"b","require_new":"old"}`),
},
addrs.ProviderConfig{
Type: "aws",
}.Absolute(addrs.RootModuleInstance),
)
aBefore, _ := plans.NewDynamicValue(
cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("a"),
"require_new": cty.StringVal("old"),
}), instanceSchema.ImpliedType())
aAfter, _ := plans.NewDynamicValue(
cty.ObjectVal(map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"require_new": cty.StringVal("new"),
}), instanceSchema.ImpliedType())
bBefore, _ := plans.NewDynamicValue(
cty.ObjectVal(map[string]cty.Value{
"id": cty.StringVal("b"),
"require_new": cty.StringVal("old"),
}), instanceSchema.ImpliedType())
bAfter, _ := plans.NewDynamicValue(
cty.ObjectVal(map[string]cty.Value{
"id": cty.UnknownVal(cty.String),
"require_new": cty.UnknownVal(cty.String),
}), instanceSchema.ImpliedType())
var aAction plans.Action
switch mode {
case "normal":
aAction = plans.DeleteThenCreate
case "cbd":
aAction = plans.CreateThenDelete
}
changes := &plans.Changes{
Resources: []*plans.ResourceInstanceChangeSrc{
{
Addr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "a",
}.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance.Child("a", addrs.NoKey)),
ProviderAddr: addrs.ProviderConfig{
Type: "aws",
}.Absolute(addrs.RootModuleInstance),
ChangeSrc: plans.ChangeSrc{
Action: aAction,
Before: aBefore,
After: aAfter,
},
},
{
Addr: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "aws_instance",
Name: "b",
}.Instance(addrs.IntKey(0)).Absolute(addrs.RootModuleInstance.Child("b", addrs.NoKey)),
ProviderAddr: addrs.ProviderConfig{
Type: "aws",
}.Absolute(addrs.RootModuleInstance),
ChangeSrc: plans.ChangeSrc{
Action: plans.DeleteThenCreate,
Before: bBefore,
After: bAfter,
},
},
},
}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providers.ResolverFixed(
map[string]providers.Factory{
"aws": testProviderFuncFixed(p),
},
),
State: state,
Changes: changes,
})
t.Run(mode, func(t *testing.T) {
_, diags := ctx.Apply()
if diags.HasErrors() {
t.Fatal(diags.Err())
}
})
}
}
func TestContext2Apply_destroyDataCycle(t *testing.T) {
m, snap := testModuleWithSnapshot(t, "apply-destroy-data-cycle")
p := testProvider("null")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "null_resource",
Name: "a",
}.Instance(addrs.IntKey(0)),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"a"}`),
},
addrs.ProviderConfig{
Type: "null",
}.Absolute(addrs.RootModuleInstance),
)
root.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.DataResourceMode,
Type: "null_data_source",
Name: "d",
}.Instance(addrs.NoKey),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"data"}`),
},
addrs.ProviderConfig{
Type: "null",
}.Absolute(addrs.RootModuleInstance),
)
providerResolver := providers.ResolverFixed(
map[string]providers.Factory{
"null": testProviderFuncFixed(p),
},
)
hook := &testHook{}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providerResolver,
State: state,
Destroy: true,
Hooks: []Hook{hook},
})
plan, diags := ctx.Plan()
diags.HasErrors()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
// We'll marshal and unmarshal the plan here, to ensure that we have
// a clean new context as would be created if we separately ran
// terraform plan -out=tfplan && terraform apply tfplan
ctxOpts, err := contextOptsForPlanViaFile(snap, state, plan)
if err != nil {
t.Fatal(err)
}
ctxOpts.ProviderResolver = providerResolver
ctx, diags = NewContext(ctxOpts)
if diags.HasErrors() {
t.Fatalf("failed to create context for plan: %s", diags.Err())
}
_, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
}
func TestContext2Apply_taintedDestroyFailure(t *testing.T) {
m := testModule(t, "apply-destroy-tainted")
p := testProvider("test")
p.DiffFn = testDiffFn
p.ApplyFn = func(info *InstanceInfo, s *InstanceState, d *InstanceDiff) (*InstanceState, error) {
// All destroys fail.
// c will also fail to create, meaning the existing tainted instance
// becomes deposed, ans is then promoted back to current.
// only C has a foo attribute
attr := d.Attributes["foo"]
if d.Destroy || (attr != nil && attr.New == "c") {
return nil, errors.New("failure")
}
return testApplyFn(info, s, d)
}
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_instance",
Name: "a",
}.Instance(addrs.NoKey),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectTainted,
AttrsJSON: []byte(`{"id":"a","foo":"a"}`),
},
addrs.ProviderConfig{
Type: "test",
}.Absolute(addrs.RootModuleInstance),
)
root.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_instance",
Name: "b",
}.Instance(addrs.NoKey),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectTainted,
AttrsJSON: []byte(`{"id":"b","foo":"b"}`),
},
addrs.ProviderConfig{
Type: "test",
}.Absolute(addrs.RootModuleInstance),
)
root.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_instance",
Name: "c",
}.Instance(addrs.NoKey),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectTainted,
AttrsJSON: []byte(`{"id":"c","foo":"old"}`),
},
addrs.ProviderConfig{
Type: "test",
}.Absolute(addrs.RootModuleInstance),
)
providerResolver := providers.ResolverFixed(
map[string]providers.Factory{
"test": testProviderFuncFixed(p),
},
)
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providerResolver,
State: state,
Hooks: []Hook{&testHook{}},
})
_, diags := ctx.Plan()
diags.HasErrors()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
state, diags = ctx.Apply()
if !diags.HasErrors() {
t.Fatal("expected error")
}
root = state.Module(addrs.RootModuleInstance)
// the instance that failed to destroy should remain tainted
a := root.ResourceInstance(addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_instance",
Name: "a",
}.Instance(addrs.NoKey))
if a.Current.Status != states.ObjectTainted {
t.Fatal("test_instance.a should be tainted")
}
// b is create_before_destroy, and the destroy failed, so there should be 1
// deposed instance.
b := root.ResourceInstance(addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_instance",
Name: "b",
}.Instance(addrs.NoKey))
if b.Current.Status != states.ObjectReady {
t.Fatal("test_instance.b should be Ready")
}
if len(b.Deposed) != 1 {
t.Fatal("test_instance.b failed to keep deposed instance")
}
// the desposed c instance should be promoted back to Current, and remain
// tainted
c := root.ResourceInstance(addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_instance",
Name: "c",
}.Instance(addrs.NoKey))
if c.Current.Status != states.ObjectTainted {
t.Fatal("test_instance.c should be tainted")
}
if len(c.Deposed) != 0 {
t.Fatal("test_instance.c should have no deposed instances")
}
if string(c.Current.AttrsJSON) != `{"id":"c","foo":"old"}` {
t.Fatalf("unexpected attrs for c: %q\n", c.Current.AttrsJSON)
}
}
func TestContext2Apply_cbdCycle(t *testing.T) {
m, snap := testModuleWithSnapshot(t, "apply-cbd-cycle")
p := testProvider("test")
p.ApplyFn = testApplyFn
p.DiffFn = testDiffFn
state := states.NewState()
root := state.EnsureModule(addrs.RootModuleInstance)
root.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_instance",
Name: "a",
}.Instance(addrs.NoKey),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"a","require_new":"old","foo":"b"}`),
Dependencies: []addrs.AbsResource{
addrs.AbsResource{
Resource: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_instance",
Name: "b",
},
Module: addrs.RootModuleInstance,
},
addrs.AbsResource{
Resource: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_instance",
Name: "c",
},
Module: addrs.RootModuleInstance,
},
},
},
addrs.ProviderConfig{
Type: "test",
}.Absolute(addrs.RootModuleInstance),
)
root.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_instance",
Name: "b",
}.Instance(addrs.NoKey),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"b","require_new":"old","foo":"c"}`),
Dependencies: []addrs.AbsResource{
addrs.AbsResource{
Resource: addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_instance",
Name: "c",
},
Module: addrs.RootModuleInstance,
},
},
},
addrs.ProviderConfig{
Type: "test",
}.Absolute(addrs.RootModuleInstance),
)
root.SetResourceInstanceCurrent(
addrs.Resource{
Mode: addrs.ManagedResourceMode,
Type: "test_instance",
Name: "c",
}.Instance(addrs.NoKey),
&states.ResourceInstanceObjectSrc{
Status: states.ObjectReady,
AttrsJSON: []byte(`{"id":"c","require_new":"old"}`),
},
addrs.ProviderConfig{
Type: "test",
}.Absolute(addrs.RootModuleInstance),
)
providerResolver := providers.ResolverFixed(
map[string]providers.Factory{
"test": testProviderFuncFixed(p),
},
)
hook := &testHook{}
ctx := testContext2(t, &ContextOpts{
Config: m,
ProviderResolver: providerResolver,
State: state,
Hooks: []Hook{hook},
})
plan, diags := ctx.Plan()
diags.HasErrors()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
// We'll marshal and unmarshal the plan here, to ensure that we have
// a clean new context as would be created if we separately ran
// terraform plan -out=tfplan && terraform apply tfplan
ctxOpts, err := contextOptsForPlanViaFile(snap, state, plan)
if err != nil {
t.Fatal(err)
}
ctxOpts.ProviderResolver = providerResolver
ctx, diags = NewContext(ctxOpts)
if diags.HasErrors() {
t.Fatalf("failed to create context for plan: %s", diags.Err())
}
_, diags = ctx.Apply()
if diags.HasErrors() {
t.Fatalf("diags: %s", diags.Err())
}
}
| mpl-2.0 |
surrsurus/edgequest | doc/libc/unix/notbsd/linux/other/b64/x86_64/F_UNLCK.v.html | 325 | <!DOCTYPE html>
<html lang="en">
<head>
<meta http-equiv="refresh" content="0;URL=constant.F_UNLCK.html">
</head>
<body>
<p>Redirecting to <a href="constant.F_UNLCK.html">constant.F_UNLCK.html</a>...</p>
<script>location.replace("constant.F_UNLCK.html" + location.search + location.hash);</script>
</body>
</html> | mpl-2.0 |
kim/amazonka | amazonka-ec2/gen/Network/AWS/EC2/CreateInstanceExportTask.hs | 5345 | {-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.EC2.CreateInstanceExportTask
-- Copyright : (c) 2013-2014 Brendan Hay <[email protected]>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <[email protected]>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Exports a running or stopped instance to an Amazon S3 bucket.
--
-- For information about the supported operating systems, image formats, and
-- known limitations for the types of instances you can export, see <http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ExportingEC2Instances.html ExportingEC2 Instances> in the /Amazon Elastic Compute Cloud User Guide for Linux/.
--
-- <http://docs.aws.amazon.com/AWSEC2/latest/APIReference/ApiReference-query-CreateInstanceExportTask.html>
module Network.AWS.EC2.CreateInstanceExportTask
(
-- * Request
CreateInstanceExportTask
-- ** Request constructor
, createInstanceExportTask
-- ** Request lenses
, cietDescription
, cietExportToS3Task
, cietInstanceId
, cietTargetEnvironment
-- * Response
, CreateInstanceExportTaskResponse
-- ** Response constructor
, createInstanceExportTaskResponse
-- ** Response lenses
, cietrExportTask
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.EC2.Types
import qualified GHC.Exts
data CreateInstanceExportTask = CreateInstanceExportTask
{ _cietDescription :: Maybe Text
, _cietExportToS3Task :: Maybe ExportToS3TaskSpecification
, _cietInstanceId :: Text
, _cietTargetEnvironment :: Maybe ExportEnvironment
} deriving (Eq, Read, Show)
-- | 'CreateInstanceExportTask' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'cietDescription' @::@ 'Maybe' 'Text'
--
-- * 'cietExportToS3Task' @::@ 'Maybe' 'ExportToS3TaskSpecification'
--
-- * 'cietInstanceId' @::@ 'Text'
--
-- * 'cietTargetEnvironment' @::@ 'Maybe' 'ExportEnvironment'
--
createInstanceExportTask :: Text -- ^ 'cietInstanceId'
-> CreateInstanceExportTask
createInstanceExportTask p1 = CreateInstanceExportTask
{ _cietInstanceId = p1
, _cietDescription = Nothing
, _cietTargetEnvironment = Nothing
, _cietExportToS3Task = Nothing
}
-- | A description for the conversion task or the resource being exported. The
-- maximum length is 255 bytes.
cietDescription :: Lens' CreateInstanceExportTask (Maybe Text)
cietDescription = lens _cietDescription (\s a -> s { _cietDescription = a })
cietExportToS3Task :: Lens' CreateInstanceExportTask (Maybe ExportToS3TaskSpecification)
cietExportToS3Task =
lens _cietExportToS3Task (\s a -> s { _cietExportToS3Task = a })
-- | The ID of the instance.
cietInstanceId :: Lens' CreateInstanceExportTask Text
cietInstanceId = lens _cietInstanceId (\s a -> s { _cietInstanceId = a })
-- | The target virtualization environment.
cietTargetEnvironment :: Lens' CreateInstanceExportTask (Maybe ExportEnvironment)
cietTargetEnvironment =
lens _cietTargetEnvironment (\s a -> s { _cietTargetEnvironment = a })
newtype CreateInstanceExportTaskResponse = CreateInstanceExportTaskResponse
{ _cietrExportTask :: Maybe ExportTask
} deriving (Eq, Read, Show)
-- | 'CreateInstanceExportTaskResponse' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'cietrExportTask' @::@ 'Maybe' 'ExportTask'
--
createInstanceExportTaskResponse :: CreateInstanceExportTaskResponse
createInstanceExportTaskResponse = CreateInstanceExportTaskResponse
{ _cietrExportTask = Nothing
}
cietrExportTask :: Lens' CreateInstanceExportTaskResponse (Maybe ExportTask)
cietrExportTask = lens _cietrExportTask (\s a -> s { _cietrExportTask = a })
instance ToPath CreateInstanceExportTask where
toPath = const "/"
instance ToQuery CreateInstanceExportTask where
toQuery CreateInstanceExportTask{..} = mconcat
[ "Description" =? _cietDescription
, "ExportToS3" =? _cietExportToS3Task
, "InstanceId" =? _cietInstanceId
, "TargetEnvironment" =? _cietTargetEnvironment
]
instance ToHeaders CreateInstanceExportTask
instance AWSRequest CreateInstanceExportTask where
type Sv CreateInstanceExportTask = EC2
type Rs CreateInstanceExportTask = CreateInstanceExportTaskResponse
request = post "CreateInstanceExportTask"
response = xmlResponse
instance FromXML CreateInstanceExportTaskResponse where
parseXML x = CreateInstanceExportTaskResponse
<$> x .@? "exportTask"
| mpl-2.0 |
ephemeralsnow/packer | builder/googlecompute/step_create_instance.go | 4045 | package googlecompute
import (
"errors"
"fmt"
"time"
"github.com/mitchellh/multistep"
"github.com/mitchellh/packer/packer"
)
// StepCreateInstance represents a Packer build step that creates GCE instances.
type StepCreateInstance struct {
Debug bool
}
func (config *Config) getImage() Image {
project := config.ProjectId
if config.SourceImageProjectId != "" {
project = config.SourceImageProjectId
}
return Image{Name: config.SourceImage, ProjectId: project}
}
func (config *Config) getInstanceMetadata(sshPublicKey string) map[string]string {
instanceMetadata := make(map[string]string)
// Copy metadata from config
for k, v := range config.Metadata {
instanceMetadata[k] = v
}
// Merge any existing ssh keys with our public key
sshMetaKey := "sshKeys"
sshKeys := fmt.Sprintf("%s:%s", config.Comm.SSHUsername, sshPublicKey)
if confSshKeys, exists := instanceMetadata[sshMetaKey]; exists {
sshKeys = fmt.Sprintf("%s\n%s", sshKeys, confSshKeys)
}
instanceMetadata[sshMetaKey] = sshKeys
return instanceMetadata
}
// Run executes the Packer build step that creates a GCE instance.
func (s *StepCreateInstance) Run(state multistep.StateBag) multistep.StepAction {
config := state.Get("config").(*Config)
driver := state.Get("driver").(Driver)
sshPublicKey := state.Get("ssh_public_key").(string)
ui := state.Get("ui").(packer.Ui)
ui.Say("Creating instance...")
name := config.InstanceName
errCh, err := driver.RunInstance(&InstanceConfig{
Description: "New instance created by Packer",
DiskSizeGb: config.DiskSizeGb,
Image: config.getImage(),
MachineType: config.MachineType,
Metadata: config.getInstanceMetadata(sshPublicKey),
Name: name,
Network: config.Network,
Subnetwork: config.Subnetwork,
Address: config.Address,
Preemptible: config.Preemptible,
Tags: config.Tags,
Region: config.Region,
Zone: config.Zone,
})
if err == nil {
ui.Message("Waiting for creation operation to complete...")
select {
case err = <-errCh:
case <-time.After(config.stateTimeout):
err = errors.New("time out while waiting for instance to create")
}
}
if err != nil {
err := fmt.Errorf("Error creating instance: %s", err)
state.Put("error", err)
ui.Error(err.Error())
return multistep.ActionHalt
}
ui.Message("Instance has been created!")
if s.Debug {
if name != "" {
ui.Message(fmt.Sprintf("Instance: %s started in %s", name, config.Zone))
}
}
// Things succeeded, store the name so we can remove it later
state.Put("instance_name", name)
return multistep.ActionContinue
}
// Cleanup destroys the GCE instance created during the image creation process.
func (s *StepCreateInstance) Cleanup(state multistep.StateBag) {
nameRaw, ok := state.GetOk("instance_name")
if !ok {
return
}
name := nameRaw.(string)
if name == "" {
return
}
config := state.Get("config").(*Config)
driver := state.Get("driver").(Driver)
ui := state.Get("ui").(packer.Ui)
ui.Say("Deleting instance...")
errCh, err := driver.DeleteInstance(config.Zone, name)
if err == nil {
select {
case err = <-errCh:
case <-time.After(config.stateTimeout):
err = errors.New("time out while waiting for instance to delete")
}
}
if err != nil {
ui.Error(fmt.Sprintf(
"Error deleting instance. Please delete it manually.\n\n"+
"Name: %s\n"+
"Error: %s", name, err))
}
ui.Message("Instance has been deleted!")
state.Put("instance_name", "")
// Deleting the instance does not remove the boot disk. This cleanup removes
// the disk.
ui.Say("Deleting disk...")
errCh, err = driver.DeleteDisk(config.Zone, config.DiskName)
if err == nil {
select {
case err = <-errCh:
case <-time.After(config.stateTimeout):
err = errors.New("time out while waiting for disk to delete")
}
}
if err != nil {
ui.Error(fmt.Sprintf(
"Error deleting disk. Please delete it manually.\n\n"+
"Name: %s\n"+
"Error: %s", config.InstanceName, err))
}
ui.Message("Disk has been deleted!")
return
}
| mpl-2.0 |
haplo-org/haplo-safe-view-templates | src/main/java/org/haplo/template/html/NodeTag.java | 6747 | /* Haplo Safe View Templates http://haplo.org
* (c) Haplo Services Ltd 2015 - 2016 http://www.haplo-services.com
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
package org.haplo.template.html;
final class NodeTag extends Node {
private String name;
private String start;
private Attribute attributesHead;
private Node attributeDictionaryValue;
public NodeTag(String name) {
this.name = name;
this.start = "<"+name;
}
public boolean allowedInURLContext() {
return false; // caught by Context.TEXT check as well
}
public String getName() {
return this.name;
}
public void addAttribute(String attributeName, Node value, Context valueContext, boolean tagQuoteMinimisationAllowed) {
if(value instanceof NodeLiteral) {
// Value is just a literal string, so can be optimised
// Literal values should not be escaped, because the author is trusted
String attributeValue = ((NodeLiteral)value).getLiteralString();
if(tagQuoteMinimisationAllowed && canOmitQuotesForValue(attributeValue)) {
this.start += " "+attributeName+"="+attributeValue;
} else {
this.start += " "+attributeName+"=\""+attributeValue+'"';
}
return;
}
Attribute attribute = new Attribute();
attribute.name = attributeName;
attribute.preparedNameEquals = " "+attributeName+"=\"";
attribute.value = value;
// If a URL, and the value is a single NodeValue element, it has to output as a URL path
if((valueContext == Context.URL) && (value instanceof NodeValue)) {
valueContext = Context.URL_PATH;
}
attribute.valueContext = valueContext;
// Add to list
Attribute tail = this.attributesHead;
while(tail != null) {
if(tail.nextAttribute == null) { break; }
tail = tail.nextAttribute;
}
if(tail == null) {
this.attributesHead = attribute;
} else {
tail.nextAttribute = attribute;
}
}
public void setAttributesDictionary(Parser parser, Node value) throws ParseException {
if(this.attributeDictionaryValue != null) {
parser.error("Tag can only have one attribute dictionary");
}
if(!value.nodeRepresentsValueFromView()) {
parser.error("Attribute dictionary for tag must be a value");
}
this.attributeDictionaryValue = value;
}
private boolean canOmitQuotesForValue(CharSequence value) {
int len = value.length();
if(len == 0) { return false; }
for(int i = 0; i < len; ++i) {
char c = value.charAt(i);
if(!(
((c >= 'a') && (c <= 'z')) ||
((c >= 'A') && (c <= 'Z')) ||
((c >= '0' && (c <= '9')))
)) { return false; }
}
return true;
}
private static class Attribute {
public Attribute nextAttribute;
public String name;
public String preparedNameEquals; // " name=\"" for rendering
public Node value;
public Context valueContext;
}
protected Node orSimplifiedNode() {
if(this.attributesHead == null && this.attributeDictionaryValue == null) {
return new NodeLiteral(this.start+">");
}
return this;
}
public void render(StringBuilder builder, Driver driver, Object view, Context context) throws RenderException {
builder.append(this.start);
Attribute attribute = this.attributesHead;
while(attribute != null) {
int attributeStart = builder.length();
builder.append(attribute.preparedNameEquals);
int valueStart = builder.length();
attribute.value.render(builder, driver, view, attribute.valueContext);
// If nothing was rendered, remove the attribute
if(valueStart == builder.length()) {
builder.setLength(attributeStart);
} else {
builder.append('"');
}
attribute = attribute.nextAttribute;
}
if(this.attributeDictionaryValue != null) {
driver.iterateOverValueAsDictionary(this.attributeDictionaryValue.value(driver, view), (key, value) -> {
// Check the key (attribute name) in the dictionary isn't a special attribute or a value which isn't allowed
if(!(HTML.validTagAttributeNameAndNoSpecialHandlingRequired(this.name, key))) {
throw new RenderException(driver, "Bad attribute name for tag attribute dictionary expansion: '"+key+"'");
}
String valueString = driver.valueToStringRepresentation(value);
if((valueString != null) && (valueString.length() > 0)) {
builder.append(' ').
append(key). // safey checked above
append("=\"");
Escape.escape(valueString, builder, Context.ATTRIBUTE_VALUE);
builder.append('"');
}
});
}
builder.append('>');
}
public void dumpToBuilder(StringBuilder builder, String linePrefix) {
builder.append(linePrefix).append("TAG ").append(this.start);
if(this.attributesHead == null) {
builder.append(">\n"); // although this case should be simplified to a literal
} else {
int count = 0;
StringBuilder attributesBuilder = new StringBuilder(256);
Attribute attribute = this.attributesHead;
while(attribute != null) {
count++;
attributesBuilder.append(linePrefix+" ").append(attribute.name).append("\n");
attribute.value.dumpToBuilder(attributesBuilder, linePrefix+" ");
attribute = attribute.nextAttribute;
}
builder.append("> with "+count+" attributes:\n").
append(attributesBuilder);
}
}
protected void interateOverAttributes(AttributeIterator i) {
Attribute attribute = this.attributesHead;
while(attribute != null) {
i.attribute(attribute.name, attribute.value, attribute.valueContext);
attribute = attribute.nextAttribute;
}
}
protected interface AttributeIterator {
void attribute(String name, Node value, Context context);
}
}
| mpl-2.0 |
snoweye/EMCluster | src/mb_tool.h | 5741 | /* This file contains several functions to perform model-based initializers.
Created: Wei-Chen Chen on 2009/03/04.
*/
#include <R.h>
#include <Rinternals.h>
#include <Rmath.h>
#include <math.h>
/* These are all required for EMCluster and defined at other files
provided by Dr. Ranjan Maitra and Volodymyr Melnykov.
*/
#include "array.h"
#include "mat_vec.h"
#include "order.h"
#include "quantile.h"
#define Inf 1e+140
int srswor(int n, int k, int *ranordr);
int classify(double *X,int p,int k,double *pi, double **Mu, double **LTSigma);
int assign_closest(double *X, int p, int nclass, double **Mu);
double findzero(double ax, double bx, double (*f)(double x, void *info),
void *info, double *Tol, int *Maxit);
double determinant(double *LTSigma,int n);
void meandispersion(double **x, int n, int p, double *mu, double *ltsigma);
double dlmvnorm(double *x, int p, double *mu, double *LTsigma);
double lnlikelihood(int n,int p,int k,double *pi,double **X,double **Mu,
double **LTSigma);
double mixllhd(int p,int k,double *x,double *pi,double **Mu,double **LTSigma);
int emcluster_org(int n,int p,int k,double *pi,double **X,double **Mu,
double **LTSigma,int maxiter,double eps,double *llhdval);
void estep(int n,int p,int k,double **X,double **Gamma,double *pi,double **Mu,
double **LTSigma);
void mstep(double **X,int n,int p,int k,double *pi,double **Mu,
double **LTSigma,double **Gamma);
void assign(int n, int p,int k,double **X,double *pi,double **Mu,
double **LTSigma,int *class,int *nc);
int shortemcluster_org(int n,int p,int k,double *pi,double **X,double **Mu,
double **LTSigma,int maxiter,double eps,double *llhdval);
int shortems(int n,int p,int nclass,double *pi,double **X,double **Mu,
double **LTSigma,int maxshortiter,double shorteps,
int *conv_iter,double *conv_eps);
int initials(double **x,int n,int p,int nclass,int *nc,
double **Mu,double **LTSigma,int *class);
int randomEMinit(double **x,int n,int p,int nclass,double *pi,
double **Mu,double **LTSigma);
int starts_via_svd(int n,int m,double **Mu,double **x,int nclus,int *ningrp,
double *pi,int *grpids,double **LTSigma,double alpha, int llhdnotW);
/*-----------------------------------------------------------------------------
The following functions are added by Wei-Chen Chen for EMCluster.
*/
/* Function in "M_emculster". */
double lnlikelihood_gamma(int n, int k, double **Gamma, double *pi);
void estep_gamma(int n, int p, int k, double **X, double **Gamma,
double **Mu, double **LTSigma);
void estep_unnorm_dlmvn(int n, int p, int k, double **X, double **Gamma,
double *pi, double **Mu, double **LTSigma);
void norm_gamma(int n, int k, double **Gamma, double *pi);
void emcluster(int n, int p, int k, double *pi, double **X, double **Mu,
double **LTSigma, int maxiter, double eps, double *llhdval,
int *conv_iter, double *conv_eps);
/* Function in "M_init_other.c". */
int shortemcluster(int n, int p, int k, double *pi, double **X,
double **Mu, double **LTSigma, int maxiter, double eps, double *llhdval,
int *conv_iter, double *conv_eps);
/* Functions in "meandispersion.c". */
void meandispersion_MLE(double **x, int n, int p, double *mu, double *ltsigma);
void meandispersion_MME(double **x, int n, int p, double *mu, double *ltsigma);
void est_ltsigma_mle_given_mu(double **x, int n, int p, double *mu,
double *ltsigma);
/* Functions in "rand_EM.c". */
int mod_shortemcluster(int n, int p, int k, double *pi, double **X,
double **Mu, double **LTSigma, int fixed_iter, double *llhdval,
int *conv_iter, double *conv_eps);
void mod_shortems(int n, int p, int nclass, double *pi, double **X,
double **Mu, double **LTSigma, int maxshortiter, int fixed_iter,
int *conv_iter, double *conv_eps);
/* Functions in "Rtool.c". */
double** allocate_double_array(int n);
/* Functions in "M_emgroup.c". */
int M_emgroup(double **x,int n,int p,int nclass,double *pi,double **Mu,
double **LTSigma,double *llhdval,int *nc,int *class,
double alpha, int em_iter, double em_eps,
int *conv_iter, double *conv_eps);
/* Functions in "mb_em_EM.c". */
void shortems_mb(int n, int p, int nclass, double *pi, double **X, double **Mu,
double **LTSigma, int maxshortiter, double shorteps,
int *conv_iter, double *conv_eps);
/* Functions in "mb_init.c". */
void cut_sub(double **X, int n, int p, int G, int min_n, double lambda,
double *prob, double **Mu, double **LTSigma);
void mb_init(double **X, int n, int p, int k, double *pi, double **Mu,
double **LTSigma);
/* Functions in "mb_rand_EM.c". */
void mod_shortems_mb(int n, int p, int nclass, double *pi, double **X,
double **Mu, double **LTSigma, int maxshortiter, int fixed_iter,
int *conv_iter, double *conv_eps);
/* Function in "mb_randomEMinit.c". */
int mb_assign_closest(double **X, int n, int p, int nclass, double **Mu,
double **LTSigma, int* clas);
void mb_randomEMinit(double **x, int n, int p, int nclass, double *pi,
double **Mu, double **LTSigma);
/* Functions in "mb_tool.c". */
typedef struct Param{
double lower_bound, upper_bound, n_nclass, tol;
int maxit;
} PARAM; /* End of Param. */
double mb_median(int n, double *x);
double mb_quantile(int n, double *x, double p);
double fcn(double lambda, void *pt_param);
double find_lambda(void *pt_param);
/* Functions in "ac_EM.c". */
//int shortems_ac(int n, int p, int nclass, double *pi, double **X, double **Mu,
// double **LTSigma, int maxshortiter, double shorteps, int n_candidate);
//void mod_shortems_ac(int n, int p, int nclass, double *pi, double **X,
// double **Mu, double **LTSigma, int maxshortiter, int fixed_iter,
// int n_candidate);
| mpl-2.0 |
joansmith/RoyalCommands | modules/RoyalCommands/src/main/java/org/royaldev/royalcommands/rcommands/CmdUses.java | 10138 | /*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
package org.royaldev.royalcommands.rcommands;
import org.bukkit.Material;
import org.bukkit.command.Command;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.EventPriority;
import org.bukkit.event.Listener;
import org.bukkit.event.inventory.InventoryClickEvent;
import org.bukkit.event.inventory.InventoryCloseEvent;
import org.bukkit.event.inventory.InventoryType;
import org.bukkit.inventory.FurnaceRecipe;
import org.bukkit.inventory.Inventory;
import org.bukkit.inventory.InventoryHolder;
import org.bukkit.inventory.ItemStack;
import org.bukkit.inventory.Recipe;
import org.bukkit.inventory.ShapedRecipe;
import org.bukkit.inventory.ShapelessRecipe;
import org.royaldev.royalcommands.MessageColor;
import org.royaldev.royalcommands.RUtils;
import org.royaldev.royalcommands.RoyalCommands;
import org.royaldev.royalcommands.exceptions.InvalidItemNameException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
// TODO: Use inventory gui (3 rows, with forward/back buttons)
@ReflectCommand
public class CmdUses extends TabCommand {
private final Map<String, Integer> tasks = new HashMap<>();
public CmdUses(final RoyalCommands instance, final String name) {
super(instance, name, true, new Short[]{CompletionType.ITEM_ALIAS.getShort()});
this.plugin.getServer().getPluginManager().registerEvents(new WorkbenchCloseListener(), this.plugin);
}
private void cancelTask(final Player p) {
if (!this.tasks.containsKey(p.getName())) return;
final int taskID = this.tasks.get(p.getName());
if (taskID != -1) this.plugin.getServer().getScheduler().cancelTask(taskID);
this.tasks.remove(p.getName());
}
private boolean containsItemStack(final Collection<? extends ItemStack> collection, final ItemStack b) {
for (final ItemStack a : collection) {
if (this.itemStackEquals(a, b)) return true;
}
return false;
}
private boolean itemStackEquals(final ItemStack a, final ItemStack b) {
return !(a == null || b == null) && a.getType() == b.getType() && (a.getDurability() == -1 || a.getDurability() == Short.MAX_VALUE || a.getDurability() == b.getDurability());
}
private void scheduleUsesTask(final Player p, final ItemStack is) {
final List<Inventory> workbenches = new ArrayList<>();
final Iterator<Recipe> recipeIterator = this.plugin.getServer().recipeIterator();
while (recipeIterator.hasNext()) {
final Recipe r = recipeIterator.next();
final Inventory i;
if (r instanceof ShapedRecipe) {
final ShapedRecipe sr = (ShapedRecipe) r;
if (!this.containsItemStack(sr.getIngredientMap().values(), is)) continue;
i = this.plugin.getServer().createInventory(new UsesHolder(), InventoryType.WORKBENCH);
final Map<Character, ItemStack> im = sr.getIngredientMap();
final String[] lines = sr.getShape();
for (int lineNum = 0; lineNum < lines.length; lineNum++) {
final String line = lines[lineNum];
for (int slot = 1; slot <= 3; slot++) {
if (slot > line.length()) continue;
final ItemStack slotItem = im.get(line.charAt(slot - 1));
if (slotItem == null) continue;
i.setItem(slot + (lineNum * 3), this.syncDurabilities(slotItem, is));
}
}
i.setItem(0, sr.getResult());
} else if (r instanceof ShapelessRecipe) {
final ShapelessRecipe sr = (ShapelessRecipe) r;
if (!this.containsItemStack(sr.getIngredientList(), is)) continue;
i = this.plugin.getServer().createInventory(new UsesHolder(), InventoryType.WORKBENCH);
final List<ItemStack> ingredients = sr.getIngredientList();
for (int slot = 1; slot <= ingredients.size(); slot++) {
if (slot > ingredients.size()) continue;
i.setItem(slot, this.syncDurabilities(ingredients.get(slot - 1), is));
}
i.setItem(0, sr.getResult());
} else if (r instanceof FurnaceRecipe) {
final FurnaceRecipe fr = (FurnaceRecipe) r;
if (!this.itemStackEquals(fr.getInput(), is)) continue;
i = this.plugin.getServer().createInventory(new UsesHolder(), InventoryType.FURNACE);
i.setItem(0, this.syncDurabilities(fr.getInput(), is));
i.setItem(2, fr.getResult());
} else continue;
workbenches.add(i);
}
if (workbenches.size() < 1) {
p.sendMessage(MessageColor.NEGATIVE + "No uses for that item!");
return;
}
final Runnable r = new Runnable() {
private int currentRecipe = 0;
private boolean display = true;
private void setClosing(final boolean closing) {
final InventoryHolder ih = p.getOpenInventory().getTopInventory().getHolder();
if (!(ih instanceof UsesHolder)) return;
final UsesHolder uh = (UsesHolder) ih;
uh.setClosing(closing);
}
@Override
public void run() {
// let's not open new workbenches, as that can cause the items to disappear
if (!this.display) return;
if (!CmdUses.this.tasks.containsKey(p.getName())) return;
if (this.currentRecipe >= workbenches.size()) this.currentRecipe = 0;
this.setClosing(true);
p.openInventory(workbenches.get(this.currentRecipe));
this.setClosing(false);
this.currentRecipe++;
if (workbenches.size() == 1) this.display = false;
}
};
final int taskID = this.plugin.getServer().getScheduler().scheduleSyncRepeatingTask(this.plugin, r, 0L, 30L);
if (taskID == -1) {
p.sendMessage(MessageColor.NEGATIVE + "Could not schedule task!");
return;
}
this.cancelTask(p);
this.tasks.put(p.getName(), taskID);
}
private ItemStack syncDurabilities(final ItemStack base, final ItemStack copyDurability) {
if (base.getType() != copyDurability.getType()) return base;
if (base.getDurability() != -1 && base.getDurability() != Short.MAX_VALUE) return base;
base.setDurability(copyDurability.getDurability());
return base;
}
@Override
protected boolean runCommand(final CommandSender cs, final Command cmd, final String label, final String[] eargs, final CommandArguments ca) {
if (eargs.length < 1) {
cs.sendMessage(cmd.getDescription());
return false;
}
if (!(cs instanceof Player)) {
cs.sendMessage(MessageColor.NEGATIVE + "This command is only available to players!");
return true;
}
final Player p = (Player) cs;
ItemStack is;
if (eargs[0].equalsIgnoreCase("hand")) {
is = p.getItemInHand();
} else {
try {
is = RUtils.getItemFromAlias(eargs[0], 1);
} catch (InvalidItemNameException e) {
is = RUtils.getItem(eargs[0], 1);
} catch (NullPointerException e) {
cs.sendMessage(MessageColor.NEGATIVE + "ItemNameManager was not loaded. Let an administrator know.");
return true;
}
}
if (is == null) {
cs.sendMessage(MessageColor.NEGATIVE + "Invalid item name!");
return true;
}
this.scheduleUsesTask(p, is);
return true;
}
private class WorkbenchCloseListener implements Listener {
@EventHandler(ignoreCancelled = true)
public void workbenchClick(final InventoryClickEvent e) {
if (!(e.getWhoClicked() instanceof Player)) return;
final ItemStack is = e.getCurrentItem();
if (is == null || is.getType() == Material.AIR) return;
final InventoryType it = e.getInventory().getType();
if (it != InventoryType.WORKBENCH && it != InventoryType.FURNACE) return;
if (!(e.getInventory().getHolder() instanceof UsesHolder)) return;
e.setCancelled(true);
if (!(e.getWhoClicked() instanceof Player)) return;
final Player p = (Player) e.getWhoClicked();
CmdUses.this.scheduleUsesTask(p, is);
}
@EventHandler(priority = EventPriority.MONITOR, ignoreCancelled = true)
public void workbenchClose(final InventoryCloseEvent e) {
if (!(e.getPlayer() instanceof Player)) return;
final Player p = (Player) e.getPlayer();
final InventoryType it = e.getInventory().getType();
if (it != InventoryType.WORKBENCH && it != InventoryType.FURNACE) return;
if (!CmdUses.this.tasks.containsKey(p.getName())) return;
if (!(e.getInventory().getHolder() instanceof UsesHolder)) return;
final UsesHolder uh = (UsesHolder) e.getInventory().getHolder();
if (uh.isClosing()) return;
CmdUses.this.cancelTask(p);
}
}
private class UsesHolder implements InventoryHolder {
private boolean closing = false;
private boolean isClosing() {
return this.closing;
}
private void setClosing(final boolean closing) {
this.closing = closing;
}
@Override
public Inventory getInventory() {
return null;
}
}
}
| mpl-2.0 |
Faithfinder/OneScript | src/ScriptEngine.HostedScript/Library/EnvironmentVariablesImpl.cs | 5140 | /*----------------------------------------------------------
This Source Code Form is subject to the terms of the
Mozilla Public License, v.2.0. If a copy of the MPL
was not distributed with this file, You can obtain one
at http://mozilla.org/MPL/2.0/.
----------------------------------------------------------*/
using ScriptEngine.Machine;
using ScriptEngine.Machine.Contexts;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace ScriptEngine.HostedScript.Library
{
[GlobalContext(Category = "Работа с переменными окружения")]
public class EnvironmentVariablesImpl : GlobalContextBase<EnvironmentVariablesImpl>
{
/// <summary>
/// Возвращает соответствие переменных среды. Ключом является имя переменной, а значением - значение переменной
/// </summary>
/// <param name="target">Расположение переменной среды</param>
/// <example>
/// Для Каждого Переменная Из ПеременныеСреды() Цикл
/// Сообщить(Переменная.Ключ + " = " + Переменная.Значение);
/// КонецЦикла;
/// </example>
/// <returns>Соответствие</returns>
[ContextMethod("ПеременныеСреды", "EnvironmentVariables")]
public MapImpl EnvironmentVariables(EnvironmentVariableTargetEnum target = EnvironmentVariableTargetEnum.Process)
{
EnvironmentVariableTarget targetParam = GetSystemEnvVariableTarget(target);
var varsMap = new MapImpl();
var allVars = System.Environment.GetEnvironmentVariables(targetParam);
foreach (DictionaryEntry item in allVars)
{
varsMap.Insert(
ValueFactory.Create((string)item.Key),
ValueFactory.Create((string)item.Value));
}
return varsMap;
}
/// <summary>
/// Позволяет установить переменную среды.
/// По умолчанию переменная устанавливается в области видимости процесса и очищается после его завершения.
/// </summary>
/// <param name="varName">Имя переменной</param>
/// <param name="value">Значение переменной</param>
/// <param name="target">Расположение переменной среды</param>
[ContextMethod("УстановитьПеременнуюСреды", "SetEnvironmentVariable")]
public void SetEnvironmentVariable(string varName, string value, EnvironmentVariableTargetEnum target = EnvironmentVariableTargetEnum.Process)
{
EnvironmentVariableTarget targetParam = GetSystemEnvVariableTarget(target);
System.Environment.SetEnvironmentVariable(varName, value, targetParam);
}
/// <summary>
/// Получить значение переменной среды.
/// </summary>
/// <param name="varName">Имя переменной</param>
/// <param name="target">Расположение переменной среды</param>
/// <returns>Строка. Значение переменной</returns>
[ContextMethod("ПолучитьПеременнуюСреды", "GetEnvironmentVariable")]
public IValue GetEnvironmentVariable(string varName, EnvironmentVariableTargetEnum target = EnvironmentVariableTargetEnum.Process)
{
EnvironmentVariableTarget targetParam = GetSystemEnvVariableTarget(target);
string value = System.Environment.GetEnvironmentVariable(varName, targetParam);
if (value == null)
return ValueFactory.Create();
else
return ValueFactory.Create(value);
}
public static IAttachableContext CreateInstance()
{
return new EnvironmentVariablesImpl();
}
private static EnvironmentVariableTarget GetSystemEnvVariableTarget(EnvironmentVariableTargetEnum target)
{
EnvironmentVariableTarget targetParam = EnvironmentVariableTarget.Process;
switch (target)
{
case EnvironmentVariableTargetEnum.Process:
targetParam = EnvironmentVariableTarget.Process;
break;
case EnvironmentVariableTargetEnum.User:
targetParam = EnvironmentVariableTarget.User;
break;
case EnvironmentVariableTargetEnum.Machine:
targetParam = EnvironmentVariableTarget.Machine;
break;
}
return targetParam;
}
}
}
| mpl-2.0 |
hooklift/govsphere | reference/vim.fault.VsanFault.html | 4197 | <html xmlns:soap="http://schemas.xmlsoap.org/wsdl/soap/" xmlns:mime="http://schemas.xmlsoap.org/wsdl/mime/" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:vim2="urn:vim2" xmlns:wsdl="http://schemas.xmlsoap.org/wsdl/">
<head>
<META http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title></title>
<script src="./commonRes.js" language="JavaScript"> type="text/javascript"></script>
<link href="doc-style.css" type="text/css" rel="StyleSheet">
</head>
<script src="dynamic-content.js" type="text/javascript"></script>
<body>
<table cellspacing="0" class="header-footer" id="top">
<tr>
<td>
<br>
</td><td></td><td></td><td></td>
</tr>
<tr>
<td><a href="index-mo_types.html">Managed Object Types</a></td><td><a href="index-do_types.html">Data Object Types</a></td><td><a href="index-properties.html">All Properties</a></td><td><a href="index-methods.html">All Methods</a></td>
</tr>
</table>
<br>
<a id="vim.fault.VsanFault" name="vim.fault.VsanFault"></a>
<h1>Fault - VsanFault</h1>
<dl>
<dt>Extended by</dt>
<dd>
<a href="vim.fault.CannotChangeVsanClusterUuid.html">CannotChangeVsanClusterUuid</a>, <a href="vim.fault.CannotChangeVsanNodeUuid.html">CannotChangeVsanNodeUuid</a>, <a href="vim.fault.CannotMoveVsanEnabledHost.html">CannotMoveVsanEnabledHost</a>, <a href="vim.fault.CannotReconfigureVsanWhenHaEnabled.html">CannotReconfigureVsanWhenHaEnabled</a>, <a href="vim.fault.DuplicateVsanNetworkInterface.html">DuplicateVsanNetworkInterface</a>, <a href="vim.fault.VsanDiskFault.html">VsanDiskFault</a>
</dd>
<dt>Extends</dt>
<dd>
<a href="vim.fault.VimFault.html">VimFault</a>
</dd>
<dt>Since </dt>
<dd>vSphere API 5.5</dd>
<p></p>
</dl>
<H2>Fault
Description</H2>
<p></p>
Base exception class for VSAN-specific faults raised for host
or cluster operations.<br>See <a href="vim.host.VsanSystem.html">HostVsanSystem</a><br>See <a href="vim.ComputeResource.html#reconfigureEx">ReconfigureComputeResource_Task</a><br>
<p></p>
<a id="field_detail" name="field_detail"></a>
<p class="table-title">Properties</p>
<table cellspacing="0">
<tr>
<th>
Name
</th><th>
Type
</th><th>
Description
</th>
</tr>
<tr class="r0">
<td colspan="3">None</td>
</tr>
<tr class="r1">
<td colspan="3">
Properties inherited from <a href="vim.fault.VimFault.html">VimFault</a></td>
</tr>
<tr class="r0">
<td colspan="3">None</td>
</tr>
<tr class="r1">
<td colspan="3">
Properties inherited from <a href="vmodl.MethodFault.html">MethodFault</a></td>
</tr>
<tr class="r0">
<td colspan="3"><a href="vmodl.MethodFault.html#dynamicProperty">dynamicProperty</a>, <a href="vmodl.MethodFault.html#dynamicType">dynamicType</a>, <a href="vmodl.MethodFault.html#faultCause">faultCause</a>, <a href="vmodl.MethodFault.html#faultMessage">faultMessage</a></td>
</tr>
</table>
<br>
<a style="margin-bottom:10px; margin-top:10px; cursor:hand; cursor:pointer" onclick="resize_textarea('wsdl-textarea');expandcontent(this, 'wsdl-div')">Show WSDL type definition</a>
<div class="switchcontent" id="wsdl-div">
<textarea cols="20" rows="10" name="wsdl-textarea" wrap="off" readonly="1" id="wsdl-textarea"> <complexType xmlns="http://www.w3.org/2001/XMLSchema" xmlns:vim25="urn:vim25" name="VsanFault">
<complexContent>
<extension base="vim25:VimFault">
<sequence>
</sequence>
</extension>
</complexContent>
</complexType></textarea>
</div>
<br>
<br>
<table cellspacing="0" class="header-footer" id="bottom">
<tr>
<td><a href="#top">Top of page</a></td><td></td><td></td><td></td>
</tr>
<tr>
<td><a href="index-mo_types.html">Managed Object Types</a></td><td><a href="index-do_types.html">Data Object Types</a></td><td><a href="index-properties.html">All Properties</a></td><td><a href="index-methods.html">All Methods</a></td>
</tr>
</table>
<br>
<script language="javascript">document.write(ID_Copyright);</script>
<br>
<script language="javascript">document.write(ID_VersionInformation);</script>
</body>
</html>
| mpl-2.0 |
fabricedesre/servo | components/plugins/macros.rs | 811 | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Exports macros for use in other Servo crates.
#[macro_export]
macro_rules! bitfield(
($bitfieldname:ident, $getter:ident, $setter:ident, $value:expr) => (
impl $bitfieldname {
#[inline]
pub fn $getter(self) -> bool {
let $bitfieldname(this) = self;
(this & $value) != 0
}
#[inline]
pub fn $setter(&mut self, value: bool) {
let $bitfieldname(this) = *self;
*self = $bitfieldname((this & !$value) | (if value { $value } else { 0 }))
}
}
)
)
| mpl-2.0 |
moquist/raft.js | test/test_up.js | 1370 | // Usage:
// - Start rtc_server.js first:
// ./rtc_server --port 8000
//
// - Then use a docker build of slimerjs to run the test:
// IP_ADDR=$(hostname -I | awk '{print $1}')
// docker run -it -v `pwd`/test:/test slimerjs-0.9.6 slimerjs /test/test_up.js http://${IP_ADDR}:8000/
var common = require('common'),
system = require('system'),
home = '/rtc.html',
channel = Math.round(Math.random()*1000000),
base_address = system.args[1],
server_count = (system.args.length >= 3) ? parseInt(system.args[2]) : 3,
up_timeout = (10 + (server_count*server_count)/4)*1000;
pred_timeout = (1 + (server_count*server_count)/6)*1000;
var pages = [];
// Start checking the states
common.wait_cluster_up(pages, server_count, up_timeout, function(status, nodes, elapsed) {
if (status) {
console.log('Cluster is up after ' + elapsed + 'ms');
//common.show_nodes(nodes);
phantom.exit(0);
} else {
console.log('Cluster failed to come up after ' + elapsed + 'ms');
//common.show_nodes(nodes);
phantom.exit(1);
}
});
// Start each page/cluster node
var opts = {base_address: base_address, home: home, channel: channel};
pages.push(common.new_page(0, true, opts, function() {
for (var idx = 1; idx < server_count; idx++) {
pages.push(common.new_page(idx, false, opts));
}
}));
| mpl-2.0 |
lundjordan/services | src/shipit/frontend/src/components/auth/Auth0LoginMenuItem.js | 925 | import React from 'react';
import { OverlayTrigger, Tooltip, NavItem, Glyphicon } from 'react-bootstrap';
// This authenticates to Auth0 by opening a new Window where Auth0 will do its
// thing, then closing that window when login is complete.
export default class Auth0LoginMenuItem extends React.PureComponent {
static handleSelect() {
const loginView = new URL('/login', window.location);
window.open(loginView, '_blank');
}
render() {
const tooltip = (
<Tooltip id="auth0-signin">
Sign in with the LDAP account you use to push to version control, or
with email if you do not have version control access.
</Tooltip>
);
return (
<OverlayTrigger placement="bottom" delay={600} overlay={tooltip}>
<NavItem onSelect={Auth0LoginMenuItem.handleSelect}>
<Glyphicon glyph="log-in" /> Sign In
</NavItem>
</OverlayTrigger>
);
}
}
| mpl-2.0 |
banterle/piccante | include/filtering/filter_log_2d.hpp | 1225 | /*
PICCANTE
The hottest HDR imaging library!
http://vcg.isti.cnr.it/piccante
Copyright (C) 2014
Visual Computing Laboratory - ISTI CNR
http://vcg.isti.cnr.it
First author: Francesco Banterle
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
#ifndef PIC_FILTERING_FILTER_LOG_2D_HPP
#define PIC_FILTERING_FILTER_LOG_2D_HPP
#include "../filtering/filter_diff_gauss_2d.hpp"
namespace pic {
/**
* @brief The FilterLoG2D class
*/
class FilterLoG2D: public FilterDiffGauss
{
public:
float sigma;
/**
* @brief FilterLoG2D
* @param sigma
*/
FilterLoG2D(float sigma) : FilterDiffGauss(sigma * sqrtf(2.0f), sigma / sqrtf(2.0f))
{
this->sigma = sigma;
}
/**
* @brief execute
* @param imgIn
* @param imgOut
* @param sigma_1
* @param sigma_2
* @return
*/
static Image *execute(Image *imgIn, Image *imgOut, float sigma)
{
FilterLoG2D filter(sigma);
return filter.Process(Single(imgIn), imgOut);
}
};
} // end namespace pic
#endif /* PIC_FILTERING_FILTER_LOG_2D_HPP */
| mpl-2.0 |
sdteffen/libvisio | src/lib/VSDStencils.cpp | 6217 | /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/*
* This file is part of the libvisio project.
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
#include "VSDStencils.h"
#include "libvisio_utils.h"
libvisio::VSDShape::VSDShape()
: m_geometries(), m_shapeList(), m_fields(), m_foreign(0), m_parent(0), m_masterPage(MINUS_ONE),
m_masterShape(MINUS_ONE), m_shapeId(MINUS_ONE), m_lineStyleId(MINUS_ONE), m_fillStyleId(MINUS_ONE),
m_textStyleId(MINUS_ONE), m_lineStyle(), m_fillStyle(), m_textBlockStyle(), m_charStyle(),
m_themeRef(), m_charList(), m_paraStyle(), m_paraList(), m_text(), m_names(),
m_textFormat(libvisio::VSD_TEXT_UTF16), m_nurbsData(), m_polylineData(), m_xform(), m_txtxform(0),
m_misc()
{
}
libvisio::VSDShape::VSDShape(const libvisio::VSDShape &shape)
: m_geometries(shape.m_geometries), m_shapeList(shape.m_shapeList), m_fields(shape.m_fields),
m_foreign(shape.m_foreign ? new ForeignData(*(shape.m_foreign)) : 0), m_parent(shape.m_parent),
m_masterPage(shape.m_masterPage), m_masterShape(shape.m_masterShape), m_shapeId(shape.m_shapeId),
m_lineStyleId(shape.m_lineStyleId), m_fillStyleId(shape.m_fillStyleId), m_textStyleId(shape.m_textStyleId),
m_lineStyle(shape.m_lineStyle), m_fillStyle(shape.m_fillStyle), m_textBlockStyle(shape.m_textBlockStyle),
m_charStyle(shape.m_charStyle), m_themeRef(shape.m_themeRef), m_charList(shape.m_charList),
m_paraStyle(shape.m_paraStyle), m_paraList(shape.m_paraList), m_text(shape.m_text), m_names(shape.m_names),
m_textFormat(shape.m_textFormat), m_nurbsData(shape.m_nurbsData), m_polylineData(shape.m_polylineData),
m_xform(shape.m_xform), m_txtxform(shape.m_txtxform ? new XForm(*(shape.m_txtxform)) : 0), m_misc(shape.m_misc)
{
}
libvisio::VSDShape::~VSDShape()
{
clear();
}
libvisio::VSDShape &libvisio::VSDShape::operator=(const libvisio::VSDShape &shape)
{
if (this != &shape)
{
m_geometries = shape.m_geometries;
m_shapeList = shape.m_shapeList;
m_fields = shape.m_fields;
if (m_foreign)
delete m_foreign;
m_foreign = shape.m_foreign ? new ForeignData(*(shape.m_foreign)) : 0;
m_parent = shape.m_parent;
m_masterPage = shape.m_masterPage;
m_masterShape = shape.m_masterShape;
m_shapeId = shape.m_shapeId;
m_lineStyleId = shape.m_lineStyleId;
m_fillStyleId = shape.m_fillStyleId;
m_textStyleId = shape.m_textStyleId;
m_lineStyle = shape.m_lineStyle;
m_fillStyle = shape.m_fillStyle;
m_textBlockStyle = shape.m_textBlockStyle;
m_charStyle = shape.m_charStyle;
m_themeRef = shape.m_themeRef;
m_charList = shape.m_charList;
m_paraStyle = shape.m_paraStyle;
m_paraList = shape.m_paraList;
m_text = shape.m_text;
m_names = shape.m_names;
m_textFormat = shape.m_textFormat;
m_nurbsData = shape.m_nurbsData;
m_polylineData = shape.m_polylineData;
m_xform = shape.m_xform;
if (m_txtxform)
delete m_txtxform;
m_txtxform = shape.m_txtxform ? new XForm(*(shape.m_txtxform)) : 0;
m_misc = shape.m_misc;
}
return *this;
}
void libvisio::VSDShape::clear()
{
if (m_foreign)
delete m_foreign;
m_foreign = 0;
if (m_txtxform)
delete m_txtxform;
m_txtxform = 0;
m_geometries.clear();
m_shapeList.clear();
m_fields.clear();
m_lineStyle = VSDOptionalLineStyle();
m_fillStyle = VSDOptionalFillStyle();
m_textBlockStyle = VSDOptionalTextBlockStyle();
m_charStyle = VSDOptionalCharStyle();
m_themeRef = VSDOptionalThemeReference();
m_charList.clear();
m_paraStyle = VSDOptionalParaStyle();
m_paraList.clear();
m_text.clear();
m_names.clear();
m_nurbsData.clear();
m_polylineData.clear();
m_xform = XForm();
m_parent = 0;
m_masterPage = MINUS_ONE;
m_masterShape = MINUS_ONE;
m_shapeId = MINUS_ONE;
m_lineStyleId = MINUS_ONE;
m_fillStyleId = MINUS_ONE;
m_textStyleId = MINUS_ONE;
m_textFormat = libvisio::VSD_TEXT_UTF16;
m_misc = VSDMisc();
}
libvisio::VSDStencil::VSDStencil()
: m_shapes(), m_shadowOffsetX(0.0), m_shadowOffsetY(0.0), m_firstShapeId(MINUS_ONE)
{
}
libvisio::VSDStencil::VSDStencil(const libvisio::VSDStencil &stencil)
: m_shapes(stencil.m_shapes), m_shadowOffsetX(stencil.m_shadowOffsetX),
m_shadowOffsetY(stencil.m_shadowOffsetY), m_firstShapeId(stencil.m_firstShapeId)
{
}
libvisio::VSDStencil::~VSDStencil()
{
}
libvisio::VSDStencil &libvisio::VSDStencil::operator=(const libvisio::VSDStencil &stencil)
{
if (this != &stencil)
{
m_shapes = stencil.m_shapes;
m_shadowOffsetX = stencil.m_shadowOffsetX;
m_shadowOffsetY = stencil.m_shadowOffsetY;
m_firstShapeId = stencil.m_firstShapeId;
}
return *this;
}
void libvisio::VSDStencil::addStencilShape(unsigned id, const VSDShape &shape)
{
m_shapes[id] = shape;
}
void libvisio::VSDStencil::setFirstShape(unsigned id)
{
if (m_firstShapeId == MINUS_ONE)
m_firstShapeId = id;
}
const libvisio::VSDShape *libvisio::VSDStencil::getStencilShape(unsigned id) const
{
std::map<unsigned, VSDShape>::const_iterator iter = m_shapes.find(id);
if (iter != m_shapes.end())
return &(iter->second);
else
return 0;
}
libvisio::VSDStencils::VSDStencils() :
m_stencils()
{
}
libvisio::VSDStencils::~VSDStencils()
{
}
void libvisio::VSDStencils::addStencil(unsigned idx, const libvisio::VSDStencil &stencil)
{
m_stencils[idx] = stencil;
}
const libvisio::VSDStencil *libvisio::VSDStencils::getStencil(unsigned idx) const
{
std::map<unsigned, VSDStencil>::const_iterator iter = m_stencils.find(idx);
if (iter != m_stencils.end())
return &(iter->second);
else
return 0;
}
const libvisio::VSDShape *libvisio::VSDStencils::getStencilShape(unsigned pageId, unsigned shapeId) const
{
if (MINUS_ONE == pageId)
return 0;
const libvisio::VSDStencil *tmpStencil = getStencil(pageId);
if (!tmpStencil)
return 0;
if (MINUS_ONE == shapeId)
shapeId = tmpStencil->m_firstShapeId;
return tmpStencil->getStencilShape(shapeId);
}
/* vim:set shiftwidth=2 softtabstop=2 expandtab: */
| mpl-2.0 |
bifurcation/boulder | Godeps/_workspace/src/github.com/cloudflare/cfssl/api/generator/generator_test.go | 1702 | package generator
import (
"bytes"
"encoding/json"
"net/http"
"net/http/httptest"
"testing"
"github.com/letsencrypt/boulder/Godeps/_workspace/src/github.com/cloudflare/cfssl/csr"
)
func csrData(t *testing.T) *bytes.Reader {
req := &csr.CertificateRequest{
Names: []csr.Name{
{
C: "US",
ST: "California",
L: "San Francisco",
O: "CloudFlare",
OU: "Systems Engineering",
},
},
CN: "cloudflare.com",
Hosts: []string{"cloudflare.com"},
KeyRequest: &csr.KeyRequest{
Algo: "ecdsa",
Size: 256,
},
}
csrBytes, err := json.Marshal(req)
if err != nil {
t.Fatal(err)
}
return bytes.NewReader(csrBytes)
}
func TestGeneratorRESTfulVerbs(t *testing.T) {
handler, _ := NewHandler(CSRValidate)
ts := httptest.NewServer(handler)
data := csrData(t)
// POST should work.
req, _ := http.NewRequest("POST", ts.URL, data)
resp, _ := http.DefaultClient.Do(req)
if resp.StatusCode != http.StatusOK {
t.Fatal(resp.Status)
}
// Test GET, PUT, DELETE and whatever, expect 400 errors.
req, _ = http.NewRequest("GET", ts.URL, data)
resp, _ = http.DefaultClient.Do(req)
if resp.StatusCode != http.StatusMethodNotAllowed {
t.Fatal(resp.Status)
}
req, _ = http.NewRequest("PUT", ts.URL, data)
resp, _ = http.DefaultClient.Do(req)
if resp.StatusCode != http.StatusMethodNotAllowed {
t.Fatal(resp.Status)
}
req, _ = http.NewRequest("DELETE", ts.URL, data)
resp, _ = http.DefaultClient.Do(req)
if resp.StatusCode != http.StatusMethodNotAllowed {
t.Fatal(resp.Status)
}
req, _ = http.NewRequest("WHATEVER", ts.URL, data)
resp, _ = http.DefaultClient.Do(req)
if resp.StatusCode != http.StatusMethodNotAllowed {
t.Fatal(resp.Status)
}
}
| mpl-2.0 |
nnethercote/servo | components/background_hang_monitor/sampler.rs | 2840 | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use backtrace;
use msg::constellation_msg::{HangProfile, HangProfileSymbol};
use std::ptr;
const MAX_NATIVE_FRAMES: usize = 1024;
pub trait Sampler: Send {
fn suspend_and_sample_thread(&self) -> Result<NativeStack, ()>;
}
#[allow(dead_code)]
pub struct DummySampler;
impl DummySampler {
#[allow(dead_code)]
pub fn new() -> Box<dyn Sampler> {
Box::new(DummySampler)
}
}
impl Sampler for DummySampler {
fn suspend_and_sample_thread(&self) -> Result<NativeStack, ()> {
Err(())
}
}
// Several types in this file are currently not used in a Linux or Windows build.
#[allow(dead_code)]
pub type Address = *const libc::uint8_t;
/// The registers used for stack unwinding
#[allow(dead_code)]
pub struct Registers {
/// Instruction pointer.
pub instruction_ptr: Address,
/// Stack pointer.
pub stack_ptr: Address,
/// Frame pointer.
pub frame_ptr: Address,
}
pub struct NativeStack {
instruction_ptrs: [*mut std::ffi::c_void; MAX_NATIVE_FRAMES],
stack_ptrs: [*mut std::ffi::c_void; MAX_NATIVE_FRAMES],
count: usize,
}
impl NativeStack {
pub fn new() -> Self {
NativeStack {
instruction_ptrs: [ptr::null_mut(); MAX_NATIVE_FRAMES],
stack_ptrs: [ptr::null_mut(); MAX_NATIVE_FRAMES],
count: 0,
}
}
pub fn process_register(
&mut self,
instruction_ptr: *mut std::ffi::c_void,
stack_ptr: *mut std::ffi::c_void,
) -> Result<(), ()> {
if !(self.count < MAX_NATIVE_FRAMES) {
return Err(());
}
self.instruction_ptrs[self.count] = instruction_ptr;
self.stack_ptrs[self.count] = stack_ptr;
self.count = self.count + 1;
Ok(())
}
pub fn to_hangprofile(&self) -> HangProfile {
let mut profile = HangProfile {
backtrace: Vec::new(),
};
for ip in self.instruction_ptrs.iter().rev() {
if ip.is_null() {
continue;
}
backtrace::resolve(*ip, |symbol| {
// TODO: use the demangled or C++ demangled symbols if available.
let name = symbol
.name()
.map(|n| String::from_utf8_lossy(&n.as_bytes()).to_string());
let filename = symbol.filename().map(|n| n.to_string_lossy().to_string());
let lineno = symbol.lineno();
profile.backtrace.push(HangProfileSymbol {
name,
filename,
lineno,
});
});
}
profile
}
}
| mpl-2.0 |
SoftwareFactoryUPC/ProjectTemplates | Mobile/Windows Phone/Ejemplos Windows Phone 8.1/App tiles and badges sample/Shared/js/scenario9_imageProtocols.js | 5266 | //// THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF
//// ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO
//// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A
//// PARTICULAR PURPOSE.
////
//// Copyright (c) Microsoft Corporation. All rights reserved
(function () {
"use strict";
var page = WinJS.UI.Pages.define("/html/scenario9_imageProtocols.html", {
ready: function (element, options) {
document.getElementById("imageProtocolSelector").addEventListener("change", imageProtocolSelector, false);
document.getElementById("openPicker").addEventListener("click", openPicker, false);
document.getElementById("sendTileNotification").addEventListener("click", sendTileNotification, false);
document.getElementById("imageProtocolSelector").selectedIndex = 0;
}
});
function imageProtocolSelector() {
var protocol = document.getElementById("imageProtocolSelector").selectedIndex;
if (protocol === 0) {
document.getElementById("appdataURLDiv").style.display = "none";
document.getElementById("httpURLDiv").style.display = "none";
} else if (protocol === 1) {
document.getElementById("appdataURLDiv").style.display = "block";
document.getElementById("httpURLDiv").style.display = "none";
} else if (protocol === 2) {
document.getElementById("appdataURLDiv").style.display = "none";
document.getElementById("httpURLDiv").style.display = "block";
}
}
var imageRelativePath;
function openPicker() {
var picker = new Windows.Storage.Pickers.FileOpenPicker();
picker.fileTypeFilter.replaceAll([".jpg", ".jpeg", ".png", ".gif"]);
picker.commitButtonText = "Copy";
picker.pickSingleFileAsync().then(function (file) {
return file.copyAsync(Windows.Storage.ApplicationData.current.localFolder, file.name, Windows.Storage.NameCollisionOption.generateUniqueName);
}).done(function (newFile) {
var imageAbsolutePath = newFile.path;
//change image to relative path
imageRelativePath = imageAbsolutePath.substring(imageAbsolutePath.lastIndexOf("\\") + 1);
document.getElementById("notificationXmlContent").innerText = "";
WinJS.log && WinJS.log("Image copied to application data local storage: " + newFile.path, "sample", "status");
}, function (e) {
document.getElementById("notificationXmlContent").innerText = "";
WinJS.log && WinJS.log(e, "sample", "error");
});
}
function sendTileNotification() {
var protocol = document.getElementById("imageProtocolSelector").selectedIndex;
var wide310x150TileContent;
if (protocol === 0) { //using the ms-appx: protocol
wide310x150TileContent = NotificationsExtensions.TileContent.TileContentFactory.createTileWide310x150ImageAndText01();
wide310x150TileContent.textCaptionWrap.text = "The image is in the appx package";
wide310x150TileContent.image.src = "ms-appx:///images/redWide310x150.png";
} else if (protocol === 1) { //using the ms-appdata:/// protocol
wide310x150TileContent = NotificationsExtensions.TileContent.TileContentFactory.createTileWide310x150Image();
wide310x150TileContent.image.src = "ms-appdata:///local/" + imageRelativePath; // make sure you are providing a relative path!
} else if (protocol === 2) { //using http:// protocol
// Important - The Internet (Client) capability must be checked in the manifest in the Capabilities tab
wide310x150TileContent = NotificationsExtensions.TileContent.TileContentFactory.createTileWide310x150PeekImageCollection04();
wide310x150TileContent.textBodyWrap.text = "The baseUri is " + document.getElementById("baseUri").value;
wide310x150TileContent.imageMain.src = document.getElementById("image" + 0).value;
wide310x150TileContent.imageSmallColumn1Row1.src = document.getElementById("image" + 1).value;
wide310x150TileContent.imageSmallColumn1Row2.src = document.getElementById("image" + 2).value;
wide310x150TileContent.imageSmallColumn2Row1.src = document.getElementById("image" + 3).value;
wide310x150TileContent.imageSmallColumn2Row2.src = document.getElementById("image" + 4).value;
// set the baseUri
try {
wide310x150TileContent.baseUri = document.getElementById("baseUri").value;
} catch (e) {
document.getElementById("notificationXmlContent").innerText = "";
WinJS.log && WinJS.log(e.message, "sample", "error");
return;
}
}
wide310x150TileContent.requireSquare150x150Content = false;
Windows.UI.Notifications.TileUpdateManager.createTileUpdaterForApplication().update(wide310x150TileContent.createNotification());
document.getElementById("notificationXmlContent").innerText = wide310x150TileContent.getContent();
WinJS.log && WinJS.log("Tile notification sent", "sample", "status");
}
})(); | mpl-2.0 |
wizi4d/TaskManagerFor1C | scripts/decompile.cmd | 89 | oscript "..\utils\precommit1c\v8files-extractor.os" --decompile "..\tests" "..\src\tests" | mpl-2.0 |
blockstack/blockstack-portal | test/account/AccountApp.test.js | 621 | import React from 'react'
import { expect } from 'chai'
import { shallow } from 'enzyme'
import { AccountApp } from '../../app/js/account/AccountApp'
describe('AccountApp', () => {
let props
let wrapper
before(() => {
props = {
children: {},
storageConnected: true,
location: {
pathname: '/not-account'
}
}
wrapper = shallow(<AccountApp {...props} />)
})
it('renders the NavBar', () => {
expect(wrapper.find('Navbar').length).to.equal(1)
})
it('renders the SecondaryNavBar', () => {
expect(wrapper.find('SecondaryNavBar').length).to.equal(1)
})
})
| mpl-2.0 |
loconomics/loconomics | iCalendarLib/CalendarDll/Data/address.cs | 1479 | //------------------------------------------------------------------------------
// <auto-generated>
// Este código se generó a partir de una plantilla.
//
// Los cambios manuales en este archivo pueden causar un comportamiento inesperado de la aplicación.
// Los cambios manuales en este archivo se sobrescribirán si se regenera el código.
// </auto-generated>
//------------------------------------------------------------------------------
namespace CalendarDll.Data
{
using System;
using System.Collections.Generic;
public partial class address
{
public int AddressID { get; set; }
public int UserID { get; set; }
public int AddressTypeID { get; set; }
public string AddressName { get; set; }
public string AddressLine1 { get; set; }
public string AddressLine2 { get; set; }
public string City { get; set; }
public int StateProvinceID { get; set; }
public int PostalCodeID { get; set; }
public int CountryID { get; set; }
public Nullable<double> Latitude { get; set; }
public Nullable<double> Longitude { get; set; }
public string GoogleMapsURL { get; set; }
public string SpecialInstructions { get; set; }
public System.DateTime CreatedDate { get; set; }
public System.DateTime UpdatedDate { get; set; }
public string ModifiedBy { get; set; }
public Nullable<bool> Active { get; set; }
}
}
| mpl-2.0 |
Aloomaio/vault | ui/app/serializers/transit-key.js | 1606 | import DS from 'ember-data';
import Ember from 'ember';
const { decamelize } = Ember.String;
export default DS.RESTSerializer.extend({
primaryKey: 'name',
keyForAttribute: function(attr) {
return decamelize(attr);
},
normalizeSecrets(payload) {
if (payload.data.keys && Array.isArray(payload.data.keys)) {
const secrets = payload.data.keys.map(secret => ({ name: secret }));
return secrets;
}
Ember.assign(payload, payload.data);
delete payload.data;
return [payload];
},
normalizeResponse(store, primaryModelClass, payload, id, requestType) {
const nullResponses = ['updateRecord', 'createRecord', 'deleteRecord'];
const secrets = nullResponses.includes(requestType) ? { name: id } : this.normalizeSecrets(payload);
const { modelName } = primaryModelClass;
let transformedPayload = { [modelName]: secrets };
// just return the single object because ember is picky
if (requestType === 'queryRecord') {
transformedPayload = { [modelName]: secrets[0] };
}
return this._super(store, primaryModelClass, transformedPayload, id, requestType);
},
serialize(snapshot, requestType) {
if (requestType === 'update') {
const min_decryption_version = snapshot.attr('minDecryptionVersion');
const min_encryption_version = snapshot.attr('minEncryptionVersion');
const deletion_allowed = snapshot.attr('deletionAllowed');
return {
min_decryption_version,
min_encryption_version,
deletion_allowed,
};
} else {
return this._super(...arguments);
}
},
});
| mpl-2.0 |
jefferai/terraform | helper/schema/resource.go | 5452 | package schema
import (
"errors"
"fmt"
"github.com/hashicorp/terraform/terraform"
)
// Resource represents a thing in Terraform that has a set of configurable
// attributes and a lifecycle (create, read, update, delete).
//
// The Resource schema is an abstraction that allows provider writers to
// worry only about CRUD operations while off-loading validation, diff
// generation, etc. to this higher level library.
type Resource struct {
// Schema is the schema for the configuration of this resource.
//
// The keys of this map are the configuration keys, and the values
// describe the schema of the configuration value.
//
// The schema is used to represent both configurable data as well
// as data that might be computed in the process of creating this
// resource.
Schema map[string]*Schema
// The functions below are the CRUD operations for this resource.
//
// The only optional operation is Update. If Update is not implemented,
// then updates will not be supported for this resource.
//
// The ResourceData parameter in the functions below are used to
// query configuration and changes for the resource as well as to set
// the ID, computed data, etc.
//
// The interface{} parameter is the result of the ConfigureFunc in
// the provider for this resource. If the provider does not define
// a ConfigureFunc, this will be nil. This parameter should be used
// to store API clients, configuration structures, etc.
//
// If any errors occur during each of the operation, an error should be
// returned. If a resource was partially updated, be careful to enable
// partial state mode for ResourceData and use it accordingly.
//
// Exists is a function that is called to check if a resource still
// exists. If this returns false, then this will affect the diff
// accordingly. If this function isn't set, it will not be called. It
// is highly recommended to set it. The *ResourceData passed to Exists
// should _not_ be modified.
Create CreateFunc
Read ReadFunc
Update UpdateFunc
Delete DeleteFunc
Exists ExistsFunc
}
// See Resource documentation.
type CreateFunc func(*ResourceData, interface{}) error
// See Resource documentation.
type ReadFunc func(*ResourceData, interface{}) error
// See Resource documentation.
type UpdateFunc func(*ResourceData, interface{}) error
// See Resource documentation.
type DeleteFunc func(*ResourceData, interface{}) error
// See Resource documentation.
type ExistsFunc func(*ResourceData, interface{}) (bool, error)
// Apply creates, updates, and/or deletes a resource.
func (r *Resource) Apply(
s *terraform.InstanceState,
d *terraform.InstanceDiff,
meta interface{}) (*terraform.InstanceState, error) {
data, err := schemaMap(r.Schema).Data(s, d)
if err != nil {
return s, err
}
if s == nil {
// The Terraform API dictates that this should never happen, but
// it doesn't hurt to be safe in this case.
s = new(terraform.InstanceState)
}
if d.Destroy || d.RequiresNew() {
if s.ID != "" {
// Destroy the resource since it is created
if err := r.Delete(data, meta); err != nil {
return data.State(), err
}
// Make sure the ID is gone.
data.SetId("")
}
// If we're only destroying, and not creating, then return
// now since we're done!
if !d.RequiresNew() {
return nil, nil
}
// Reset the data to be stateless since we just destroyed
data, err = schemaMap(r.Schema).Data(nil, d)
if err != nil {
return nil, err
}
}
err = nil
if data.Id() == "" {
// We're creating, it is a new resource.
err = r.Create(data, meta)
} else {
if r.Update == nil {
return s, fmt.Errorf("doesn't support update")
}
err = r.Update(data, meta)
}
return data.State(), err
}
// Diff returns a diff of this resource and is API compatible with the
// ResourceProvider interface.
func (r *Resource) Diff(
s *terraform.InstanceState,
c *terraform.ResourceConfig) (*terraform.InstanceDiff, error) {
return schemaMap(r.Schema).Diff(s, c)
}
// Validate validates the resource configuration against the schema.
func (r *Resource) Validate(c *terraform.ResourceConfig) ([]string, []error) {
return schemaMap(r.Schema).Validate(c)
}
// Refresh refreshes the state of the resource.
func (r *Resource) Refresh(
s *terraform.InstanceState,
meta interface{}) (*terraform.InstanceState, error) {
if r.Exists != nil {
// Make a copy of data so that if it is modified it doesn't
// affect our Read later.
data, err := schemaMap(r.Schema).Data(s, nil)
if err != nil {
return s, err
}
exists, err := r.Exists(data, meta)
if err != nil {
return s, err
}
if !exists {
return nil, nil
}
}
data, err := schemaMap(r.Schema).Data(s, nil)
if err != nil {
return s, err
}
err = r.Read(data, meta)
state := data.State()
if state != nil && state.ID == "" {
state = nil
}
return state, err
}
// InternalValidate should be called to validate the structure
// of the resource.
//
// This should be called in a unit test for any resource to verify
// before release that a resource is properly configured for use with
// this library.
//
// Provider.InternalValidate() will automatically call this for all of
// the resources it manages, so you don't need to call this manually if it
// is part of a Provider.
func (r *Resource) InternalValidate() error {
if r == nil {
return errors.New("resource is nil")
}
return schemaMap(r.Schema).InternalValidate()
}
| mpl-2.0 |
park-manager/park-manager | Makefile | 3502 | QA_DOCKER_IMAGE=parkmanager/phpqa:latest
QA_DOCKER_COMMAND=docker run --init -t --rm --env "COMPOSER_HOME=/composer" --user "$(shell id -u):$(shell id -g)" --volume /tmp/tmp-phpqa-$(shell id -u):/tmp:delegated --volume "$(shell pwd):/project:delegated" --volume "${HOME}/.composer:/composer:delegated" --workdir /project ${QA_DOCKER_IMAGE}
install: composer-install
ci: install check test
check: composer-validate lint-xml lint-yaml lint-twig cs-check phpstan psalm
test: phpunit
test-coverage: infection
clean:
rm -rf var/
composer-validate: ensure
@echo "Validating local composer files"
@sh -c "${QA_DOCKER_COMMAND} composer validate"
@sh -c "${QA_DOCKER_COMMAND} composer-normalize --dry-run"
encore:
docker-compose run --rm encore make in-docker-encore
lint-xml:
@echo "Validating XML files"
ifeq (, $(shell which xmllint))
@echo "[SKIPPED] No xmllint in $(PATH), consider installing it"
else
@find . \( -name '*.xml' -or -name '*.xliff' -or -name '*.xlf' \) \
-not -path './vendor/*' \
-not -path './.*' \
-not -path './var/*' \
-type f \
-exec xmllint --format --encode UTF-8 --noout '{}' \;
endif
lint-yaml:
@echo "Validating YAML files"
docker-compose run --rm php php bin/console lint:yaml -vv config/
lint-twig:
@echo "Validating Twig files"
docker-compose run --rm php php bin/console lint:twig -vv templates/
sh -c "${QA_DOCKER_COMMAND} twigcs --severity=error templates/"
composer-install: clean
docker-compose run --rm php composer install
cs: ensure
sh -c "${QA_DOCKER_COMMAND} php-cs-fixer fix -vvv --using-cache=false --diff"
cs-check: ensure
sh -c "${QA_DOCKER_COMMAND} php-cs-fixer fix -vvv --diff --dry-run"
sh -c "docker-compose run --rm php vendor/bin/phpcs"
phpstan: ensure
docker-compose run --user "$(shell id -u):$(shell id -g)" php bin/console cache:clear --env=dev
docker-compose run --user "$(shell id -u):$(shell id -g)" --rm php vendor/bin/phpstan analyse
rector: ensure
sh -c "${QA_DOCKER_COMMAND} rector process /project --config /project/rector.yaml --dry-run"
psalm: ensure
sh -c "${QA_DOCKER_COMMAND} vendor/bin/psalm --show-info=false"
phpunit: encore
docker-compose run --rm php make in-docker-phpunit
infection: clean ensure
docker-compose run --rm php make in-docker-infection
##
# Private targets
##
db-fixtures:
bin/console doctrine:database:drop --force || true
bin/console doctrine:database:create
bin/console doctrine:schema:validate || true
bin/console doctrine:schema:update --force
bin/console doctrine:fixtures:load --no-interaction
in-docker-phpunit:
bin/console cache:clear --env=test
APP_ENV=test make db-fixtures
APP_ENV=test vendor/bin/phpunit --verbose --configuration phpunit.xml.dist --exclude-group ""
in-docker-infection:
bin/console cache:clear --env=test
APP_ENV=test make db-fixtures
phpdbg -qrr vendor/bin/phpunit --verbose --configuration phpunit.xml.dist --exclude-group "" --coverage-text --log-junit=var/junit.xml --coverage-xml var/coverage-xml/
phpdbg -qrr /usr/local/bin/infection run --verbose --show-mutations --no-interaction --only-covered --coverage var/ --min-msi=90 --min-covered-msi=90
in-docker-encore:
yarn install
yarn encore dev
fetch:
docker pull "${QA_DOCKER_IMAGE}"
ensure:
mkdir -p ${HOME}/.composer /tmp/tmp-phpqa-$(shell id -u) var/
.PHONY: clean composer-validate lint-xml lint-yaml lint-twig
.PHONY: composer-install cs cs-check phpstan psalm phpunit infection
.PHONY: db-fixtures in-docker-phpunit in-docker-infection fetch ensure
| mpl-2.0 |
ouyangpeter/WinFormFileSystem_Client | Google.Protobuf/Reflection/EnumDescriptor.cs | 4899 | #region Copyright notice and license
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#endregion
using System;
using System.Collections.Generic;
namespace Google.Protobuf.Reflection
{
/// <summary>
/// Descriptor for an enum type in a .proto file.
/// </summary>
public sealed class EnumDescriptor : DescriptorBase
{
private readonly EnumDescriptorProto proto;
private readonly MessageDescriptor containingType;
private readonly IList<EnumValueDescriptor> values;
private readonly Type generatedType;
internal EnumDescriptor(EnumDescriptorProto proto, FileDescriptor file, MessageDescriptor parent, int index, Type generatedType)
: base(file, file.ComputeFullName(parent, proto.Name), index)
{
this.proto = proto;
this.generatedType = generatedType;
containingType = parent;
if (proto.Value.Count == 0)
{
// We cannot allow enums with no values because this would mean there
// would be no valid default value for fields of this type.
throw new DescriptorValidationException(this, "Enums must contain at least one value.");
}
values = DescriptorUtil.ConvertAndMakeReadOnly(proto.Value,
(value, i) => new EnumValueDescriptor(value, file, this, i));
File.DescriptorPool.AddSymbol(this);
}
internal EnumDescriptorProto Proto { get { return proto; } }
/// <summary>
/// The brief name of the descriptor's target.
/// </summary>
public override string Name { get { return proto.Name; } }
/// <summary>
/// The generated type for this enum, or <c>null</c> if the descriptor does not represent a generated type.
/// </summary>
public Type GeneratedType { get { return generatedType; } }
/// <value>
/// If this is a nested type, get the outer descriptor, otherwise null.
/// </value>
public MessageDescriptor ContainingType
{
get { return containingType; }
}
/// <value>
/// An unmodifiable list of defined value descriptors for this enum.
/// </value>
public IList<EnumValueDescriptor> Values
{
get { return values; }
}
/// <summary>
/// Finds an enum value by number. If multiple enum values have the
/// same number, this returns the first defined value with that number.
/// If there is no value for the given number, this returns <c>null</c>.
/// </summary>
public EnumValueDescriptor FindValueByNumber(int number)
{
return File.DescriptorPool.FindEnumValueByNumber(this, number);
}
/// <summary>
/// Finds an enum value by name.
/// </summary>
/// <param name="name">The unqualified name of the value (e.g. "FOO").</param>
/// <returns>The value's descriptor, or null if not found.</returns>
public EnumValueDescriptor FindValueByName(string name)
{
return File.DescriptorPool.FindSymbol<EnumValueDescriptor>(FullName + "." + name);
}
}
} | mpl-2.0 |
huridocs/OpenEvSys | 3rd/phpgacl/gacl.ini.php | 644 | ;
; *WARNING*
;
; DO NOT PUT THIS FILE IN YOUR WEBROOT DIRECTORY.
;
; *WARNING*
;
; Anyone can view your database password if you do!
;
debug = FALSE
;
;Database
;
db_type = "mysql"
db_host = "localhost"
db_user = "root"
db_password = ""
db_name = "openevsys"
db_table_prefix = "gacl_"
;
;Caching
;
caching = FALSE
force_cache_expire = TRUE
cache_dir = "/tmp/phpgacl_cache"
cache_expire_time = 600
;
;Admin interface
;
items_per_page = 100
max_select_box_items = 100
max_search_return_items = 200
;NO Trailing slashes
smarty_dir = "smarty/libs"
smarty_template_dir = "templates"
smarty_compile_dir = "templates_c"
| agpl-3.0 |
sandrineBeauche/scheduling | rm/rm-infrastructure/rm-infrastructure-ec2/src/main/java/org/ow2/proactive/resourcemanager/nodesource/infrastructure/EC2Deployer.java | 17047 | package org.ow2.proactive.resourcemanager.nodesource.infrastructure;/*
* ################################################################
*
* ProActive Parallel Suite(TM): The Java(TM) library for
* Parallel, Distributed, Multi-Core Computing for
* Enterprise Grids & Clouds
*
* Copyright (C) 1997-2011 INRIA/University of
* Nice-Sophia Antipolis/ActiveEon
* Contact: [email protected] or [email protected]
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Affero General Public License
* as published by the Free Software Foundation; version 3 of
* the License.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
* USA
*
* If needed, contact us to obtain a release under GPL Version 2 or 3
* or a different license than the AGPL.
*
* Initial developer(s): The ProActive Team
* http://proactive.inria.fr/team_members.htm
* Contributor(s):
*
* ################################################################
* $$PROACTIVE_INITIAL_DEV$$
*/
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.objectweb.proactive.core.ProActiveException;
import com.xerox.amazonws.ec2.EC2Exception;
import com.xerox.amazonws.ec2.ImageDescription;
import com.xerox.amazonws.ec2.InstanceType;
import com.xerox.amazonws.ec2.Jec2;
import com.xerox.amazonws.ec2.ReservationDescription;
import com.xerox.amazonws.ec2.ReservationDescription.Instance;
import org.apache.log4j.Logger;
/**
*
* Amazon EC2 Node deployer backend
* <p>
* Contains a Java wrapper for EC2 operations ; Requires proper Amazon credentials
*
*
* @author The ProActive Team
* @since ProActive Scheduling 1.0
*
*/
public class EC2Deployer implements java.io.Serializable {
/** logger */
protected static Logger logger = Logger.getLogger(EC2Deployer.class);
/** Access Key */
private String AWS_AKEY;
/** Secret Key */
private String AWS_SKEY;
/** Amazon username */
private String AWS_USER;
/** KeyPair container */
private String AWS_KPINFO;
/** KeyPair name */
private String keyName;
/** Deployed instances */
private List<String> instanceIds;
/** Activity checker */
private boolean active;
/** Minimum instances to deploy */
private int minInstances;
/** Maximum instances to deploy */
private int maxInstances;
/** Current number of deployed instances */
private int currentInstances;
/** instance type: smaller is cheaper; bigger is faster;
* x86_64 AMIs requires extra large, or will fail to deploy */
private InstanceType instanceType;
/**
* Once an image descriptor is retrieved, cache it
*/
private Map<String, ImageDescription> cachedImageDescriptors =
Collections.synchronizedMap(new HashMap<String, ImageDescription>());
/**
* EC2 server URL - the EC2 zone used depends on this url
* Leave null for ec2 default behavior
*/
private String ec2RegionHost = null;
public String getEc2RegionHost() {
return ec2RegionHost;
}
public void setEc2RegionHost(String ec2ServerURL) {
this.ec2RegionHost = ec2ServerURL;
}
/**
* Constructs a new node deployer for Amazon EC2
*/
public EC2Deployer() {
this.instanceIds = new ArrayList<>();
this.active = false;
this.minInstances = 1;
this.maxInstances = 1;
this.instanceType = InstanceType.DEFAULT;
}
/**
* Constructs a new node deployer/killer for Amazon EC2
*
* @param aws_accesskey
* Amazon access key
* @param aws_secretkey
* Amazon secret key
* @param aws_user
* Amazon user name
*/
public EC2Deployer(String aws_accesskey, String aws_secretkey, String aws_user) {
this();
this.resetKeys(aws_accesskey, aws_secretkey, aws_user);
}
/**
* Reset amazon deployment
*
* @param aws_accesskey
* Amazon access key
* @param aws_secretkey
* Amazon secret key
* @param aws_user
* Amazon user name
* @return a Java EC2 Wrapper with the new credentials
*/
public Jec2 resetKeys(String aws_accesskey, String aws_secretkey, String aws_user) {
Jec2 EC2Requester;
this.AWS_AKEY = aws_accesskey;
this.AWS_SKEY = aws_secretkey;
this.AWS_USER = aws_user;
EC2Requester = new Jec2(this.AWS_AKEY, this.AWS_SKEY);
keyName = AWS_USER + "-" + AWS_AKEY.charAt(0) + AWS_SKEY.charAt(0);
try {
// if (terminateAllInstances(true)) {
// EC2Requester.deleteKeyPair(keyName);
this.AWS_KPINFO = EC2Requester.createKeyPair(keyName).getKeyName();
// }
} catch (EC2Exception e) {
// this should happen frequently,
// as keys can't be generated more than once without being deleted,
logger.warn("Can't regen keypair ", e);
}
this.active = true;
return EC2Requester;
}
private Jec2 getEC2Wrapper() {
Jec2 jec2 = resetKeys(this.AWS_AKEY, this.AWS_SKEY, this.AWS_USER);
if (ec2RegionHost != null) {
jec2.setRegionUrl(ec2RegionHost);
}
return jec2;
}
/**
* Retrieves all available images on AmazonS3
*
* @param all
* if true Get all AMI, if false, get only user's AMI
* @return User's or All AMI from Amazon S3
*/
public List<ImageDescription> getAvailableImages(boolean all) {
Jec2 ec2req = getEC2Wrapper();
if (ec2req == null)
return null;
List<String> params = new ArrayList<>();
if (!all)
params.add(AWS_USER);
List<ImageDescription> images = null;
try {
images = ec2req.describeImagesByOwner(params);
} catch (EC2Exception e) {
logger.error("Unable to get image description", e);
}
return images;
}
/**
* Retrieves all available images on AmazonS3
*
* @param amiId
* an unique AMI id
* @param all
* if true Get all AMI, if false, get only user's AMI
* @return first AMI from Amazon S3 corresponding to pattern
*/
public ImageDescription getAvailableImages(String amiId, boolean all) {
synchronized (cachedImageDescriptors) {
if (cachedImageDescriptors.containsKey(amiId))
return cachedImageDescriptors.get(amiId);
}
Jec2 ec2req = getEC2Wrapper();
if (ec2req == null)
return null;
List<ImageDescription> imgs = this.getAvailableImages(all);
for (ImageDescription img : imgs) {
if (img.getImageId().equals(amiId)) {
//cache it
cachedImageDescriptors.put(amiId, img);
return img;
}
}
logger.error("Could nod find AMI: " + amiId);
return null;
}
/**
* Gets a set of instances
*
* @return a set of instances
*/
public List<Instance> getInstances() {
Jec2 ec2req = getEC2Wrapper();
if (ec2req == null)
return null;
List<String> params = new ArrayList<>();
List<ReservationDescription> res = null;
List<Instance> instances = new ArrayList<>();
try {
res = ec2req.describeInstances(params);
} catch (EC2Exception e) {
logger.error("Unable to get instances list", e);
return null;
}
for (ReservationDescription rdesc : res) {
instances.addAll(rdesc.getInstances());
}
return instances;
}
/**
* Returns the hostname of a running instance
* If the instance is not running, will return an empty string
*
* @param id the unique id of the instance
* @return the hostname of the running instance corresponding to the id,
* or an empty string
*/
public String getInstanceHostname(String id) {
Jec2 ec2req = getEC2Wrapper();
if (ec2req == null)
return "";
try {
for (ReservationDescription desc : ec2req.describeInstances(new String[] {})) {
for (Instance inst : desc.getInstances()) {
if (id.equals(inst.getInstanceId())) {
return inst.getDnsName();
}
}
}
} catch (EC2Exception e) {
return "";
}
return "";
}
/**
* Attempts to terminate all instances deployed by this EC2Deployer
*
* @return the number of terminated instances
*/
public int terminateAll() {
Jec2 ec2req = getEC2Wrapper();
int t = 0;
for (String id : this.instanceIds) {
try {
ec2req.terminateInstances(new String[] { id });
logger.debug("Successfully terminated orphan EC2 node: " + id);
t++;
} catch (EC2Exception e) {
logger.error("Cannot terminate instance " + id + " with IP " + this.getInstanceHostname(id) +
". Do it manually.");
}
}
return t;
}
/**
* Launch a new instance with the provided AMI id
*
* @param imageId
* an unique AMI id
* @param userData
* the user data to use for this deployment
* @return the Reservation's id
* @throws ProActiveException
* acquisition failed
*/
public List<Instance> runInstances(String imageId, String userData) throws ProActiveException {
return this.runInstances(this.minInstances, this.maxInstances, imageId, userData);
}
/**
* Launch a new instance with the provided AMI id
*
* @param minNumber
* minimal number of instances to deploy
* @param maxNumber
* maximal number of instances to deploy
* @param imageId
* an unique AMI id
* @param userData
* the user data to use for this deployment
* @return the Reservation's id
* @throws ProActiveException
* acquisition failed
*/
public List<Instance> runInstances(int minNumber, int maxNumber, String imageId, String userData)
throws ProActiveException {
ImageDescription imgd = getAvailableImages(imageId, true);
if (imgd == null) {
throw new ProActiveException("Could not find AMI : " + imageId);
}
return this.runInstances(minNumber, maxNumber, imgd, userData);
}
/**
* Launch a new instance with provided AMI
*
* @param min
* minimal number of instances to deploy
* @param max
* maximal number of instances to deploy
* @param imgd
* an image description containing AMI id
* @param userData
* the user data to use for this deployment
* @return the Reservation's id
* @throws ProActiveException
* acquisition failed
*/
public List<Instance> runInstances(int min, int max, ImageDescription imgd, String userData)
throws ProActiveException {
Jec2 ec2req = getEC2Wrapper();
if (ec2req == null) {
throw new ProActiveException();
}
if (this.currentInstances + min > this.maxInstances) {
max = this.maxInstances - this.currentInstances;
}
if (min > max) {
min = max;
}
if (imgd == null) {
imgd = this.getAvailableImages(false).get(0);
}
try {
//Do not force large instance, small works fine on windows. Let the user chose.
if (imgd.getArchitecture().equals("x86_64")) {
if (instanceType != InstanceType.XLARGE && instanceType != InstanceType.XLARGE_HCPU &&
instanceType != InstanceType.LARGE) {
logger.warn("AMI " + imgd.getImageId() + " is " + imgd.getPlatform() + " x86_64 Arch," +
" it might not be compatible with the chosen Instance Type " +
instanceType.getTypeId());
//instanceType = InstanceType.LARGE;
}
}
ReservationDescription rdesc = ec2req.runInstances(imgd.getImageId(), min, max,
new ArrayList<String>(), userData, this.AWS_KPINFO, instanceType);
int number = rdesc.getInstances().size();
for (Instance inst : rdesc.getInstances()) {
this.instanceIds.add(inst.getInstanceId());
}
currentInstances += number;
logger.debug("Created " + number + " instance" + ((number != 1) ? "s" : ""));
return rdesc.getInstances();
} catch (EC2Exception e) {
throw new ProActiveException(e);
}
}
/**
* Terminate a running instance
*
* @param inst the instance to terminate
* @return true upon success, or false
*/
public boolean terminateInstance(Instance inst) {
Jec2 ec2req = getEC2Wrapper();
if (ec2req == null)
return false;
try {
ec2req.terminateInstances(new String[] { inst.getInstanceId() });
this.currentInstances--;
return true;
} catch (EC2Exception e) {
logger.error("Failed to terminate instance: " + inst, e);
return false;
}
}
/**
* Try to terminate an instance from EC2 with IP/Host addr
*
* @param hostname
* hostname of the node
* @param ip
* ip of the node
*
* @return True on success, false otherwise
*/
public boolean terminateInstanceByAddr(InetAddress addr) {
Jec2 ec2req = getEC2Wrapper();
if (ec2req == null)
return false;
List<Instance> instances = this.getInstances();
for (Instance i : instances) {
try {
InetAddress inetAddr = InetAddress.getByName(i.getDnsName());
if (inetAddr.equals(addr)) {
terminateInstance(i);
}
} catch (UnknownHostException e1) {
logger.error("Unable to resolve instance Inet Address: " + i.getDnsName(), e1);
}
}
return false;
}
/**
*
* @return the number of instances currently running
*/
public int getCurrentInstances() {
return currentInstances;
}
/**
*
* @return the maximum number of instances to attempt to reserve
*/
public int getMaxInstances() {
return maxInstances;
}
/**
* Sets the number of instances to request
*
* @param min
* Minimum number of instance to attempt to reserve
* @param max
* Maximum number of instance to attempt to reserve
*/
public void setNumInstances(int min, int max) {
this.minInstances = Math.max(min, 1);
this.maxInstances = Math.max(max, minInstances);
}
/**
*
* @return <code>true</code> if this infrastructure is allowed to acquire more nodes
*/
public boolean canGetMoreNodes() {
return (currentInstances < maxInstances);
}
/**
* Sets the instance type
*
* the smaller the cheaper;
* the larger the faster;
* 64bit AMI need to be run on xlarge instances
*
* @param it The type of hardware on which nodes will be deployed
* @throws IllegalArgumentException when the provided String does not match any
* existing instance type
*/
public void setInstanceType(String it) {
this.instanceType = InstanceType.getTypeFromString(it);
if (instanceType == null) {
throw new IllegalArgumentException("Invalid instance type: " + it);
}
}
/**
* {@inheritDoc}
*/
@Override
public String toString() {
return "EC2Deployer :: " + "User[" + this.AWS_USER + "] " + "Status[" +
((this.active) ? "active" : "unactive") + "] ";
// "Instances[" + this.getInstances(true).size() + "]";
}
}
| agpl-3.0 |
theclimber/Bilboplanet | admin/manage-permissions.php | 2136 | <?php
/******* BEGIN LICENSE BLOCK *****
* BilboPlanet - An Open Source RSS feed aggregator written in PHP
* Copyright (C) 2010 By French Dev Team : Dev BilboPlanet
* Contact : [email protected]
* Website : www.bilboplanet.com
* Tracker : http://chili.kiwais.com/projects/bilboplanet
* Blog : www.bilboplanet.com
*
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
***** END LICENSE BLOCK *****/
?>
<?php
require_once(dirname(__FILE__).'/../inc/admin/prepend.php');
if ($core->auth->sessionExists()):
if (!$core->auth->superUser()){
__error(T_("Permission denied"),
T_('You are not allowed to see this page.')
.' '.T_('You can delete your session if you logout : ').'<a href="?logout">Logout</a>');
exit;
}
include_once(dirname(__FILE__).'/head.php');
include_once(dirname(__FILE__).'/sidebar.php');
?>
<div id="BP_page" class="page">
<div class="inpage">
<div id="flash-log" style="display:none;">
<div id="flash-msg"><!-- spanner --></div>
</div>
<fieldset><legend><?php echo T_('Manage user permissions');?></legend>
<div class="message">
<p><?php echo T_('Check user statuses and configure their permissions');?></p>
</div>
<div id="users-list"></div>
</fieldset>
<script type="text/javascript" src="meta/js/manage-permissions.js"></script>
<script type="text/javascript" src="meta/js/jquery.boxy.js"></script>
<?php
include(dirname(__FILE__).'/footer.php');
else:
$page_url = urlencode(http::getHost().$_SERVER['REQUEST_URI']);
http::redirect('../auth.php?came_from='.$page_url);
endif;
?>
| agpl-3.0 |
miing/mci_migo | identityprovider/static/yui/3.10.0/uploader/uploader.js | 1747 | /*
YUI 3.10.0 (build a03ce0e)
Copyright 2013 Yahoo! Inc. All rights reserved.
Licensed under the BSD License.
http://yuilibrary.com/license/
*/
YUI.add('uploader', function (Y, NAME) {
/**
* Provides UI for selecting multiple files and functionality for
* uploading multiple files to the server with support for either
* html5 or Flash transport mechanisms, automatic queue management,
* upload progress monitoring, and error events.
* @module uploader
* @main uploader
* @since 3.5.0
*/
/**
* `Y.Uploader` serves as an alias for either <a href="UploaderFlash.html">`Y.UploaderFlash`</a>
* or <a href="UploaderHTML5.html">`Y.UploaderHTML5`</a>, depending on the feature set available
* in a specific browser. If neither HTML5 nor Flash transport layers are available, `Y.Uploader.TYPE`
* static property is set to `"none"`.
*
* @class Uploader
*/
/**
* The static property reflecting the type of uploader that `Y.Uploader`
* aliases. The possible values are:
* <ul>
* <li><strong>`"html5"`</strong>: Y.Uploader is an alias for <a href="UploaderHTML5.html">Y.UploaderHTML5</a></li>
* <li><strong>`"flash"`</strong>: Y.Uploader is an alias for <a href="UploaderFlash.html">Y.UploaderFlash</a></li>
* <li><strong>`"none"`</strong>: Neither Flash not HTML5 are available, and Y.Uploader does
* not reference an actual implementation.</li>
* </ul>
*
* @property TYPE
* @type {String}
* @static
*/
var Win = Y.config.win;
if (Win && Win.File && Win.FormData && Win.XMLHttpRequest) {
Y.Uploader = Y.UploaderHTML5;
}
else if (Y.SWFDetect.isFlashVersionAtLeast(10,0,45)) {
Y.Uploader = Y.UploaderFlash;
}
else {
Y.namespace("Uploader");
Y.Uploader.TYPE = "none";
}
}, '3.10.0', {"requires": ["uploader-html5", "uploader-flash"]});
| agpl-3.0 |
rup/Tarrax-1 | app/models/course_column.rb | 1171 | class CourseColumn < ActiveRecord::Base
belongs_to :course
attr_accessible :slug, :name, :content, :position
before_save :save_show
def save_show
self.show = self.content.present?
true
end
named_scope :active, { :conditions => ["show = ?", true] }
acts_as_list :scope => :course
COLUMN_INTRO = "intro"
COLUMN_GUIDE = "guide"
COLUMN_SYLLABUS = "syllabus"
COLUMN_CALENDAR = "calendar"
COLUMN_MATERIALS = "materials"
COLUMN_REQUIRMENTS = "requirment"
COLUMN_FACULTY = "faculty"
COLUMN_PRODUCE_TEAM = "prodteam"
def self.columns_for_college
{
COLUMN_INTRO => t("intro" , 'Introduction') ,
COLUMN_GUIDE => t("guide" , 'Guides') ,
COLUMN_SYLLABUS => t("syllabus" , 'Syllabus') ,
COLUMN_CALENDAR => t("calendar" , 'Calendar') ,
COLUMN_MATERIALS => t("materials" , 'Materials') ,
COLUMN_REQUIRMENTS => t("requirments" , 'Requirments') ,
COLUMN_FACULTY => t("faculty" , 'Faculty') ,
COLUMN_PRODUCE_TEAM => t("produce_team" , 'Produce Team') ,
}.freeze
end
end
| agpl-3.0 |
IMS-MAXIMS/openMAXIMS | Source Library/openmaxims_workspace/ValueObjects/src/ims/therapies/treatment/vo/DeepFrictionMassageRefVoCollection.java | 6037 | //#############################################################################
//# #
//# Copyright (C) <2015> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//# IMS MAXIMS provides absolutely NO GUARANTEE OF THE CLINICAL SAFTEY of #
//# this program. Users of this software do so entirely at their own risk. #
//# IMS MAXIMS only ensures the Clinical Safety of unaltered run-time #
//# software that it builds, deploys and maintains. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5589.25814)
// Copyright (C) 1995-2015 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.therapies.treatment.vo;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import ims.framework.enumerations.SortOrder;
/**
* Linked to therapies.treatment.DeepFrictionMassage business object (ID: 1019100020).
*/
public class DeepFrictionMassageRefVoCollection extends ims.vo.ValueObjectCollection implements ims.domain.IDomainCollectionGetter, ims.vo.ImsCloneable, Iterable<DeepFrictionMassageRefVo>
{
private static final long serialVersionUID = 1L;
private ArrayList<DeepFrictionMassageRefVo> col = new ArrayList<DeepFrictionMassageRefVo>();
public final String getBoClassName()
{
return "ims.therapies.treatment.domain.objects.DeepFrictionMassage";
}
public ims.domain.IDomainGetter[] getIDomainGetterItems()
{
ims.domain.IDomainGetter[] result = new ims.domain.IDomainGetter[col.size()];
col.toArray(result);
return result;
}
public boolean add(DeepFrictionMassageRefVo value)
{
if(value == null)
return false;
if(this.col.indexOf(value) < 0)
{
return this.col.add(value);
}
return false;
}
public boolean add(int index, DeepFrictionMassageRefVo value)
{
if(value == null)
return false;
if(this.col.indexOf(value) < 0)
{
this.col.add(index, value);
return true;
}
return false;
}
public void clear()
{
this.col.clear();
}
public void remove(int index)
{
this.col.remove(index);
}
public int size()
{
return this.col.size();
}
public int indexOf(DeepFrictionMassageRefVo instance)
{
return col.indexOf(instance);
}
public DeepFrictionMassageRefVo get(int index)
{
return this.col.get(index);
}
public boolean set(int index, DeepFrictionMassageRefVo value)
{
if(value == null)
return false;
this.col.set(index, value);
return true;
}
public void remove(DeepFrictionMassageRefVo instance)
{
if(instance != null)
{
int index = indexOf(instance);
if(index >= 0)
remove(index);
}
}
public boolean contains(DeepFrictionMassageRefVo instance)
{
return indexOf(instance) >= 0;
}
public Object clone()
{
DeepFrictionMassageRefVoCollection clone = new DeepFrictionMassageRefVoCollection();
for(int x = 0; x < this.col.size(); x++)
{
if(this.col.get(x) != null)
clone.col.add((DeepFrictionMassageRefVo)this.col.get(x).clone());
else
clone.col.add(null);
}
return clone;
}
public boolean isValidated()
{
return true;
}
public String[] validate()
{
return null;
}
public DeepFrictionMassageRefVo[] toArray()
{
DeepFrictionMassageRefVo[] arr = new DeepFrictionMassageRefVo[col.size()];
col.toArray(arr);
return arr;
}
public DeepFrictionMassageRefVoCollection sort()
{
return sort(SortOrder.ASCENDING);
}
public DeepFrictionMassageRefVoCollection sort(SortOrder order)
{
return sort(new DeepFrictionMassageRefVoComparator(order));
}
@SuppressWarnings("unchecked")
public DeepFrictionMassageRefVoCollection sort(Comparator comparator)
{
Collections.sort(this.col, comparator);
return this;
}
public Iterator<DeepFrictionMassageRefVo> iterator()
{
return col.iterator();
}
@Override
protected ArrayList getTypedCollection()
{
return col;
}
private class DeepFrictionMassageRefVoComparator implements Comparator
{
private int direction = 1;
public DeepFrictionMassageRefVoComparator()
{
this(SortOrder.ASCENDING);
}
public DeepFrictionMassageRefVoComparator(SortOrder order)
{
if (order == SortOrder.DESCENDING)
{
this.direction = -1;
}
}
public int compare(Object obj1, Object obj2)
{
DeepFrictionMassageRefVo voObj1 = (DeepFrictionMassageRefVo)obj1;
DeepFrictionMassageRefVo voObj2 = (DeepFrictionMassageRefVo)obj2;
return direction*(voObj1.compareTo(voObj2));
}
}
}
| agpl-3.0 |
owncloud/announcementcenter | appinfo/routes.php | 1097 | <?php
/**
* @author Joas Schilling <[email protected]>
*
* @copyright Copyright (c) 2016, Joas Schilling <[email protected]>
* @license AGPL-3.0
*
* This code is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License, version 3,
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License, version 3,
* along with this program. If not, see <http://www.gnu.org/licenses/>
*
*/
return [
'routes' => [
['name' => 'page#index', 'url' => '/', 'verb' => 'GET'],
['name' => 'page#get', 'url' => '/announcement', 'verb' => 'GET'],
['name' => 'page#add', 'url' => '/announcement', 'verb' => 'POST'],
['name' => 'page#delete', 'url' => '/announcement/{id}', 'verb' => 'DELETE'],
]
];
| agpl-3.0 |
DrXyzzy/cocalc | src/packages/next/components/landing/r-libraries.tsx | 509 | import SoftwareLibraries, { renderName } from "./software-libraries";
const COLUMNS = [
{
width: "60%",
title: "Library",
key: "library",
dataIndex: "name",
render: renderName,
},
{
width: "20%",
title: "R (systemwide)",
key: "r",
dataIndex: "r",
},
{
width: "20%",
title: "SageMath R",
key: "sage_r",
dataIndex: "sage_r",
},
];
export default function RLibraries() {
return <SoftwareLibraries prog="R" maxWidth={15} columns={COLUMNS} />;
}
| agpl-3.0 |
xionghuiCoder/db4o | src/main/java/com/db4o/nativequery/expr/AndExpression.java | 1002 | /* This file is part of the db4o object database http://www.db4o.com
Copyright (C) 2004 - 2011 Versant Corporation http://www.versant.com
db4o is free software; you can redistribute it and/or modify it under
the terms of version 3 of the GNU General Public License as published
by the Free Software Foundation.
db4o is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License along
with this program. If not, see http://www.gnu.org/licenses/. */
package com.db4o.nativequery.expr;
public class AndExpression extends BinaryExpression {
public AndExpression(Expression left, Expression right) {
super(left, right);
}
public String toString() {
return "("+_left+")&&("+_right+")";
}
public void accept(ExpressionVisitor visitor) {
visitor.visit(this);
}
}
| agpl-3.0 |
open-synergy/contract | contract_payment_mode/__init__.py | 201 | # -*- coding: utf-8 -*-
# © 2016 Antiun Ingenieria S.L. - Antonio Espinosa
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from . import models
from .hooks import post_init_hook
| agpl-3.0 |
DrXyzzy/cocalc | src/packages/next/components/licenses/editable-license.tsx | 2354 | import { Alert, Input } from "antd";
import { useEffect, useState } from "react";
import apiPost from "lib/api/post";
import { capitalize } from "@cocalc/util/misc";
export function EditableTitle({
license_id,
title,
onChange,
}: {
license_id: string;
title: string;
onChange?: () => void;
}) {
return (
<EditableTextField
license_id={license_id}
field="title"
value={title}
onChange={onChange}
/>
);
}
export function EditableDescription({
license_id,
description,
onChange,
}: {
license_id: string;
description: string;
onChange?: () => void;
}) {
return (
<EditableTextField
license_id={license_id}
field="description"
value={description}
rows={3}
onChange={onChange}
/>
);
}
function EditableTextField({
license_id,
field,
value,
rows,
onChange,
}: {
license_id: string;
field: "title" | "description";
value?: string;
rows?: number;
onChange?: () => void;
}) {
const [edit, setEdit] = useState<boolean>(false);
const [value2, setValue] = useState<string>(value ?? "");
const [error, setError] = useState<string>("");
useEffect(() => {
setValue(value ?? "");
setEdit(false);
setError("");
}, [value]);
async function save(value: string): Promise<void> {
setEdit(false);
setError("");
const query = { manager_site_licenses: { id: license_id, [field]: value } };
try {
await apiPost("/user-query", { query });
onChange?.();
} catch (err) {
setError(err.message);
}
}
return (
<div style={{ cursor: "pointer" }} onClick={() => setEdit(true)}>
{error && (
<Alert type="error" message={`Error saving ${field} - ${error}`} />
)}
{capitalize(field)}:{" "}
{edit &&
(rows ? (
<Input.TextArea
autoFocus
value={value2}
onChange={(e) => setValue(e.target.value)}
onBlur={() => save(value2)}
rows={rows}
/>
) : (
<Input
autoFocus
value={value2}
onChange={(e) => setValue(e.target.value)}
onBlur={() => save(value2)}
onPressEnter={() => save(value2)}
/>
))}
{!edit && <>{value2.trim() ? value2 : `(set ${field}...)`}</>}
</div>
);
}
| agpl-3.0 |
topiacloud/topia-online | src/plugins/core/data/option.js | 349 | // A frame represents the flow of game time, and is updated up to 60 times per second.
define(["data"], function (data) {
var Option = function () {
// The name of the option setting
this.name = "";
// The value of the option setting
this.value = false;
};
return data.define("option", Option);
}); | agpl-3.0 |
shashi792/courtlistener | alert/corpus_importer/resource_org/import_f2.py | 23715 | #!/usr/bin/env python
import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'alert.settings'
import sys
# append these to the path to make the dev machines and the server happy (respectively)
execfile('/etc/courtlistener')
sys.path.append(INSTALL_ROOT)
from django import db
from django.core.exceptions import MultipleObjectsReturned
from django.utils.text import slugify
from alert.search.models import Court, Document
from alert.lib.parse_dates import parse_dates
from juriscraper.lib.string_utils import trunc
from alert.lib.scrape_tools import hasDuplicate
from lxml.html import fromstring, tostring
from urlparse import urljoin
import datetime
import re
import subprocess
import time
import urllib2
def load_fix_files():
"""Loads the fix files into memory so they can be accessed efficiently."""
court_fix_file = open('../logs/f2_court_fix_file.txt', 'r')
date_fix_file = open('../logs/f2_date_fix_file.txt', 'r')
case_name_short_fix_file = open('../logs/f2_short_case_name_fix_file.txt', 'r')
court_fix_dict = {}
date_fix_dict = {}
case_name_short_dict = {}
for line in court_fix_file:
key, value = line.split('|')
court_fix_dict[key] = value
for line in date_fix_file:
key, value = line.split('|')
date_fix_dict[key] = value
for line in case_name_short_fix_file:
key, value = line.split('|')
case_name_short_dict[key] = value
court_fix_file.close()
date_fix_file.close()
case_name_short_fix_file.close()
return court_fix_dict, date_fix_dict, case_name_short_dict
def check_fix_list(sha1, fix_dict):
""" Given a sha1, return the correction for a case. Return false if no values.
Corrections are strings that the parser can interpret as needed. Items are
written to this file the first time the cases are imported, and this file
can be used to import F2 into later systems.
"""
try:
return fix_dict[sha1].strip()
except KeyError:
return False
def exceptional_cleaner(caseName):
"""Cleans common Resource.org special cases off of case names, and
sets the precedential_status for a document.
Returns caseName, precedential_status
"""
caseName = caseName.lower()
ca1regex = re.compile('(unpublished disposition )?notice: first circuit local rule 36.2\(b\)6 states unpublished opinions may be cited only in related cases.?')
ca2regex = re.compile('(unpublished disposition )?notice: second circuit local rule 0.23 states unreported opinions shall not be cited or otherwise used in unrelated cases.?')
ca3regex = re.compile('(unpublished disposition )?notice: third circuit rule 21\(i\) states citations to federal decisions which have not been formally reported should identify the court, docket number and date.?')
ca4regex = re.compile('(unpublished disposition )?notice: fourth circuit (local rule 36\(c\)|i.o.p. 36.6) states that citation of unpublished dispositions is disfavored except for establishing res judicata, estoppel, or the law of the case and requires service of copies of cited unpublished dispositions of the fourth circuit.?')
ca5regex = re.compile('(unpublished disposition )?notice: fifth circuit local rule 47.5.3 states that unpublished opinions should normally be cited only when they establish the law of the case, are relied upon as a basis for res judicata or collateral estoppel, or involve related facts. if an unpublished opinion is cited, a copy shall be attached to each copy of the brief.?')
ca6regex = re.compile('(unpublished disposition )?notice: sixth circuit rule 24\(c\) states that citation of unpublished dispositions is disfavored except for establishing res judicata, estoppel, or the law of the case and requires service of copies of cited unpublished dispositions of the sixth circuit.?')
ca7regex = re.compile('(unpublished disposition )?notice: seventh circuit rule 53\(b\)\(2\) states unpublished orders shall not be cited or used as precedent except to support a claim of res judicata, collateral estoppel or law of the case in any federal court within the circuit.?')
ca8regex = re.compile('(unpublished disposition )?notice: eighth circuit rule 28a\(k\) governs citation of unpublished opinions and provides that (no party may cite an opinion not intended for publication unless the cases are related by identity between the parties or the causes of action|they are not precedent and generally should not be cited unless relevant to establishing the doctrines of res judicata, collateral estoppel, the law of the case, or if the opinion has persuasive value on a material issue and no published opinion would serve as well).?')
ca9regex = re.compile('(unpublished disposition )?notice: ninth circuit rule 36-3 provides that dispositions other than opinions or orders designated for publication are not precedential and should not be cited except when relevant under the doctrines of law of the case, res judicata, or collateral estoppel.?')
ca10regex = re.compile('(unpublished disposition )?notice: tenth circuit rule 36.3 states that unpublished opinions and orders and judgments have no precedential value and shall not be cited except for purposes of establishing the doctrines of the law of the case, res judicata, or collateral estoppel.?')
cadcregex = re.compile('(unpublished disposition )?notice: d.c. circuit local rule 11\(c\) states that unpublished orders, judgments, and explanatory memoranda may not be cited as precedents, but counsel may refer to unpublished dispositions when the binding or preclusive effect of the disposition, rather than its quality as precedent, is relevant.?')
cafcregex = re.compile('(unpublished disposition )?notice: federal circuit local rule 47.(6|8)\(b\) states that opinions and orders which are designated as not citable as precedent shall not be employed or cited as precedent. this does not preclude assertion of issues of claim preclusion, issue preclusion, judicial estoppel, law of the case or the like based on a decision of the court rendered in a nonprecedential opinion or order.?')
# Clean off special cases
if 'first circuit' in caseName:
caseName = re.sub(ca1regex, '', caseName)
precedential_status = 'Unpublished'
elif 'second circuit' in caseName:
caseName = re.sub(ca2regex, '', caseName)
precedential_status = 'Unpublished'
elif 'third circuit' in caseName:
caseName = re.sub(ca3regex, '', caseName)
precedential_status = 'Unpublished'
elif 'fourth circuit' in caseName:
caseName = re.sub(ca4regex, '', caseName)
precedential_status = 'Unpublished'
elif 'fifth circuit' in caseName:
caseName = re.sub(ca5regex, '', caseName)
precedential_status = 'Unpublished'
elif 'sixth circuit' in caseName:
caseName = re.sub(ca6regex, '', caseName)
precedential_status = 'Unpublished'
elif 'seventh circuit' in caseName:
caseName = re.sub(ca7regex, '', caseName)
precedential_status = 'Unpublished'
elif 'eighth circuit' in caseName:
caseName = re.sub(ca8regex, '', caseName)
precedential_status = 'Unpublished'
elif 'ninth circuit' in caseName:
caseName = re.sub(ca9regex, '', caseName)
precedential_status = 'Unpublished'
elif 'tenth circuit' in caseName:
caseName = re.sub(ca10regex, '', caseName)
precedential_status = 'Unpublished'
elif 'd.c. circuit' in caseName:
caseName = re.sub(cadcregex, '', caseName)
precedential_status = 'Unpublished'
elif 'federal circuit' in caseName:
caseName = re.sub(cafcregex, '', caseName)
precedential_status = 'Unpublished'
else:
precedential_status = 'Published'
return caseName, precedential_status
def scrape_and_parse():
"""Traverses the bulk data from public.resource.org, and puts them in the
DB.
Probably lots of ways to go about this, but I think the easiest will be the following:
- look at the index page of all volumes, and follow all the links it has.
- for each volume, look at its index page, and follow the link to all cases
- for each case, collect information wisely.
- put it all in the DB
"""
# begin by loading up the fix files into memory
court_fix_dict, date_fix_dict, case_name_short_dict = load_fix_files()
results = []
DEBUG = 4
# Set to False to disable automatic browser usage. Else, set to the
# command you want to run, e.g. 'firefox'
BROWSER = False
court_fix_file = open('../logs/f2_court_fix_file.txt', 'a')
date_fix_file = open('../logs/f2_date_fix_file.txt', 'a')
case_name_short_fix_file = open('../logs/f2_short_case_name_fix_file.txt', 'a')
vol_file = open('../logs/vol_file.txt', 'r+')
case_file = open('../logs/case_file.txt', 'r+')
url = "file://%s/Resource.org/F2/index.html" % INSTALL_ROOT
openedURL = urllib2.urlopen(url)
content = openedURL.read()
openedURL.close()
tree = fromstring(content)
volumeLinks = tree.xpath('//table/tbody/tr/td[1]/a')
try:
i = int(vol_file.readline())
except ValueError:
# the volume file is emtpy or otherwise failing.
i = 0
vol_file.close()
if DEBUG >= 1:
print "Number of remaining volumes is: %d" % (len(volumeLinks) - i)
# used later, needs a default value.
saved_caseDate = None
saved_court = None
while i < len(volumeLinks):
# we iterate over every case in the volume
volumeURL = volumeLinks[i].text + "/index.html"
volumeURL = urljoin(url, volumeURL)
if DEBUG >= 1:
print "Current volumeURL is: %s" % volumeURL
openedVolumeURL = urllib2.urlopen(volumeURL)
content = openedVolumeURL.read()
volumeTree = fromstring(content)
openedVolumeURL.close()
caseLinks = volumeTree.xpath('//table/tbody/tr/td[1]/a')
caseDates = volumeTree.xpath('//table/tbody/tr/td[2]')
sha1Hashes = volumeTree.xpath('//table/tbody/tr/td[3]/a')
# The following loads a serialized placeholder from disk.
try:
j = int(case_file.readline())
except ValueError:
j = 0
case_file.close()
while j < len(caseLinks):
# iterate over each case, throwing it in the DB
if DEBUG >= 1:
print ''
# like the scraper, we begin with the caseLink field (relative for
# now, not absolute)
caseLink = caseLinks[j].get('href')
# sha1 is easy
sha1Hash = sha1Hashes[j].text
if DEBUG >= 4:
print "SHA1 is: %s" % sha1Hash
# using the caselink from above, and the volumeURL, we can get the
# html
absCaseLink = urljoin(volumeURL, caseLink)
html = urllib2.urlopen(absCaseLink).read()
htmlTree = fromstring(html)
bodyContents = htmlTree.xpath('//body/*[not(@id="footer")]')
body = ""
bodyText = ""
for element in bodyContents:
body += tostring(element)
try:
bodyText += tostring(element, method='text')
except UnicodeEncodeError:
# Happens with odd characters. Simply pass this iteration.
pass
if DEBUG >= 5:
print body
print bodyText
# need to figure out the court ID
try:
courtPs = htmlTree.xpath('//p[@class = "court"]')
# Often the court ends up in the parties field.
partiesPs = htmlTree.xpath("//p[@class= 'parties']")
court = ""
for courtP in courtPs:
court += tostring(courtP).lower()
for party in partiesPs:
court += tostring(party).lower()
except IndexError:
court = check_fix_list(sha1Hash, court_fix_dict)
if not court:
print absCaseLink
if BROWSER:
subprocess.Popen([BROWSER, absCaseLink], shell=False).communicate()
court = raw_input("Please input court name (e.g. \"First Circuit of Appeals\"): ").lower()
court_fix_file.write("%s|%s\n" % (sha1Hash, court))
if ('first' in court) or ('ca1' == court):
court = 'ca1'
elif ('second' in court) or ('ca2' == court):
court = 'ca2'
elif ('third' in court) or ('ca3' == court):
court = 'ca3'
elif ('fourth' in court) or ('ca4' == court):
court = 'ca4'
elif ('fifth' in court) or ('ca5' == court):
court = 'ca5'
elif ('sixth' in court) or ('ca6' == court):
court = 'ca6'
elif ('seventh' in court) or ('ca7' == court):
court = 'ca7'
elif ('eighth' in court) or ('ca8' == court):
court = 'ca8'
elif ('ninth' in court) or ('ca9' == court):
court = 'ca9'
elif ("tenth" in court) or ('ca10' == court):
court = 'ca10'
elif ("eleventh" in court) or ('ca11' == court):
court = 'ca11'
elif ('columbia' in court) or ('cadc' == court):
court = 'cadc'
elif ('federal' in court) or ('cafc' == court):
court = 'cafc'
elif ('patent' in court) or ('ccpa' == court):
court = 'ccpa'
elif (('emergency' in court) and ('temporary' not in court)) or ('eca' == court):
court = 'eca'
elif ('claims' in court) or ('uscfc' == court):
court = 'uscfc'
else:
# No luck extracting the court name. Try the fix file.
court = check_fix_list(sha1Hash, court_fix_dict)
if not court:
# Not yet in the fix file. Check if it's a crazy ca5 case
court = ''
ca5courtPs = htmlTree.xpath('//p[@class = "center"]')
for ca5courtP in ca5courtPs:
court += tostring(ca5courtP).lower()
if 'fifth circuit' in court:
court = 'ca5'
else:
court = False
if not court:
# Still no luck. Ask for input, then append it to
# the fix file.
print absCaseLink
if BROWSER:
subprocess.Popen([BROWSER, absCaseLink], shell=False).communicate()
court = raw_input("Unknown court. Input the court code to proceed successfully [%s]: " % saved_court)
court = court or saved_court
court_fix_file.write("%s|%s\n" % (sha1Hash, court))
saved_court = court
court = Court.objects.get(pk=court)
if DEBUG >= 4:
print "Court is: %s" % court
# next: west_cite, docket_number and caseName. Full casename is gotten later.
west_cite = caseLinks[j].text
docket_number = absCaseLink.split('.')[-2]
caseName = caseLinks[j].get('title')
caseName, precedential_status = exceptional_cleaner(caseName)
cite, new = hasDuplicate(caseName, west_cite, docket_number)
if cite.caseNameShort == '':
# No luck getting the case name
savedCaseNameShort = check_fix_list(sha1Hash, case_name_short_dict)
if not savedCaseNameShort:
print absCaseLink
if BROWSER:
subprocess.Popen([BROWSER, absCaseLink], shell=False).communicate()
caseName = raw_input("Short casename: ")
cite.caseNameShort = trunc(caseName, 100)
cite.caseNameFull = caseName
case_name_short_fix_file.write("%s|%s\n" % (sha1Hash, caseName))
else:
# We got both the values from the save files. Use 'em.
cite.caseNameShort = trunc(savedCaseNameShort, 100)
cite.caseNameFull = savedCaseNameShort
# The slug needs to be done here, b/c it is only done automatically
# the first time the citation is saved, and this will be
# at least the second.
cite.slug = trunc(slugify(cite.caseNameShort), 50)
cite.save()
if DEBUG >= 4:
print "precedential_status: " + precedential_status
print "west_cite: " + cite.west_cite
print "docket_number: " + cite.docket_number
print "caseName: " + cite.caseNameFull
# date is kinda tricky...details here:
# http://pleac.sourceforge.net/pleac_python/datesandtimes.html
rawDate = caseDates[j].find('a')
try:
if rawDate is not None:
# Special cases
if sha1Hash == 'f0da421f117ef16223d7e61d1e4e5526036776e6':
date_text = 'August 28, 1980'
elif sha1Hash == '8cc192eaacd1c544b5e8ffbd751d9be84c311932':
date_text = 'August 16, 1985'
elif sha1Hash == 'd19bce155f72a9f981a12efabd760a35e1e7dbe7':
date_text = 'October 12, 1979'
elif sha1Hash == '9f7583cf0d46ddc9cad4e7943dd775f9e9ea99ff':
date_text = 'July 30, 1980'
elif sha1Hash == '211ea81a4ab4132483c483698d2a40f4366f5640':
date_text = 'November 3, 1981'
elif sha1Hash == 'eefb344034461e9c6912689677a32cd18381d5c2':
date_text = 'July 28, 1983'
else:
date_text = rawDate.text
try:
caseDate = datetime.datetime(*time.strptime(date_text, "%B, %Y")[0:5])
except ValueError, TypeError:
caseDate = datetime.datetime(*time.strptime(date_text, "%B %d, %Y")[0:5])
else:
# No value was found. Throw an exception.
raise ValueError
except:
# No date provided.
try:
# Try to get it from the saved list
caseDate = datetime.datetime(*time.strptime(check_fix_list(sha1Hash, date_fix_dict), "%B %d, %Y")[0:5])
except:
caseDate = False
if not caseDate:
# Parse out the dates with debug set to false.
try:
dates = parse_dates(bodyText, False)
except OverflowError:
# Happens when we try to make a date from a very large number
dates = []
try:
first_date_found = dates[0]
except IndexError:
# No dates found.
first_date_found = False
if first_date_found == saved_caseDate:
# High likelihood of date being correct. Use it.
caseDate = saved_caseDate
else:
print absCaseLink
if BROWSER:
subprocess.Popen([BROWSER, absCaseLink], shell=False).communicate()
print "Unknown date. Possible options are:"
try:
print " 1) %s" % saved_caseDate.strftime("%B %d, %Y")
except AttributeError:
# Happens on first iteration when saved_caseDate has no strftime attribute.
try:
saved_caseDate = dates[0]
print " 1) %s" % saved_caseDate.strftime(
"%B %d, %Y")
except IndexError:
# Happens when dates has no values.
print " No options available."
for k, date in enumerate(dates[0:4]):
if date.year >= 1900:
# strftime can't handle dates before 1900.
print " %s) %s" % (k + 2,
date.strftime("%B %d, %Y"))
choice = raw_input("Enter the date or an option to proceed [1]: ")
choice = choice or 1
if str(choice) == '1':
# The user chose the default. Use the saved value from the last case
caseDate = saved_caseDate
elif choice in ['2', '3', '4', '5']:
# The user chose an option between 2 and 5. Use it.
caseDate = dates[int(choice) - 2]
else:
# The user typed a new date. Use it.
caseDate = datetime.datetime(*time.strptime(choice, "%B %d, %Y")[0:5])
date_fix_file.write("%s|%s\n" % (sha1Hash, caseDate.strftime("%B %d, %Y")))
# Used during the next iteration as the default value
saved_caseDate = caseDate
if DEBUG >= 3:
print "caseDate is: %s" % caseDate
try:
doc, created = Document.objects.get_or_create(
sha1=sha1Hash, court=court)
except MultipleObjectsReturned:
# this shouldn't happen now that we're using SHA1 as the dup
# check, but the old data is problematic, so we must catch this.
created = False
if created:
# we only do this if it's new
doc.html = body
doc.sha1 = sha1Hash
doc.download_url = "http://bulk.resource.org/courts.gov/c/F2/"\
+ str(i + 178) + "/" + caseLink
doc.date_filed = caseDate
doc.source = "R"
doc.precedential_status = precedential_status
doc.citation = cite
doc.save()
if not created:
# something is afoot. Throw a big error.
print "Duplicate found at volume " + str(i + 1) + \
" and row " + str(j + 1) + "!!!!"
print "Found document %s in the database with doc id of %d!" % (doc, doc.pk)
exit(1)
# save our location within the volume.
j += 1
case_file = open('../logs/case_file.txt', 'w')
case_file.write(str(j))
case_file.close()
# save the last volume completed.
i += 1
vol_file = open('../logs/vol_file.txt', 'w')
vol_file.write(str(i))
vol_file.close()
# Clear query cache, as it presents a memory leak
db.reset_queries()
return 0
def main():
print scrape_and_parse()
print "Completed all volumes successfully. Exiting."
exit(0)
if __name__ == '__main__':
main()
| agpl-3.0 |
admazzola/javatari-deeplearn | javatari/src/org/javatari/general/m6502/instructions/BIT.java | 1049 | // Copyright 2011-2012 Paulo Augusto Peccin. See licence.txt distributed with this file.
package org.javatari.general.m6502.instructions;
import org.javatari.general.m6502.Instruction;
import org.javatari.general.m6502.M6502;
import org.javatari.general.m6502.OperandType;
public final class BIT extends Instruction {
public BIT(M6502 cpu, int type) {
super(cpu);
this.type = type;
}
@Override
public int fetch() {
if (type == OperandType.Z_PAGE) { ea = cpu.fetchZeroPageAddress(); return 3; }
if (type == OperandType.ABS) { ea = cpu.fetchAbsoluteAddress(); return 4; }
throw new IllegalStateException("BIT Invalid Operand Type: " + type);
}
@Override
public void execute() {
final byte val = cpu.bus.readByte(ea);
cpu.ZERO = (val & cpu.A) == 0;
cpu.OVERFLOW = (val & 0x40) != 0; // value of bit 6 from memory
cpu.NEGATIVE = (val & 0x80) != 0; // value of bit 7 from memory
}
private final int type;
private int ea;
public static final long serialVersionUID = 1L;
}
| agpl-3.0 |
IMS-MAXIMS/openMAXIMS | Source Library/openmaxims_workspace/ClinicalAdmin/src/ims/clinicaladmin/forms/dailypatternandshifts/Handlers.java | 6150 | //#############################################################################
//# #
//# Copyright (C) <2015> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//# IMS MAXIMS provides absolutely NO GUARANTEE OF THE CLINICAL SAFTEY of #
//# this program. Users of this software do so entirely at their own risk. #
//# IMS MAXIMS only ensures the Clinical Safety of unaltered run-time #
//# software that it builds, deploys and maintains. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5589.25814)
// Copyright (C) 1995-2015 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.clinicaladmin.forms.dailypatternandshifts;
import ims.framework.delegates.*;
abstract public class Handlers implements ims.framework.UILogic, IFormUILogicCode, ims.framework.interfaces.IClearInfo
{
abstract protected void onFormModeChanged();
abstract protected void onFormOpen() throws ims.framework.exceptions.PresentationLogicException;
abstract protected void onBtnSaveClick() throws ims.framework.exceptions.PresentationLogicException;
abstract protected void onBtnCancelClick() throws ims.framework.exceptions.PresentationLogicException;
abstract protected void onBtnUpdateClick() throws ims.framework.exceptions.PresentationLogicException;
abstract protected void onBtnNewClick() throws ims.framework.exceptions.PresentationLogicException;
abstract protected void onCmbNumberOfShiftsValueChanged() throws ims.framework.exceptions.PresentationLogicException;
abstract protected void onChkUsePeriodsValueChanged() throws ims.framework.exceptions.PresentationLogicException;
abstract protected void onTimStartValueChanged() throws ims.framework.exceptions.PresentationLogicException;
abstract protected void onGrdTypeSelectionChanged() throws ims.framework.exceptions.PresentationLogicException;
public final void setContext(ims.framework.UIEngine engine, GenForm form)
{
this.engine = engine;
this.form = form;
this.form.setFormModeChangedEvent(new FormModeChanged()
{
private static final long serialVersionUID = 1L;
public void handle()
{
onFormModeChanged();
}
});
this.form.setFormOpenEvent(new FormOpen()
{
private static final long serialVersionUID = 1L;
public void handle(Object[] args) throws ims.framework.exceptions.PresentationLogicException
{
onFormOpen();
}
});
this.form.btnSave().setClickEvent(new Click()
{
private static final long serialVersionUID = 1L;
public void handle() throws ims.framework.exceptions.PresentationLogicException
{
onBtnSaveClick();
}
});
this.form.btnCancel().setClickEvent(new Click()
{
private static final long serialVersionUID = 1L;
public void handle() throws ims.framework.exceptions.PresentationLogicException
{
onBtnCancelClick();
}
});
this.form.btnUpdate().setClickEvent(new Click()
{
private static final long serialVersionUID = 1L;
public void handle() throws ims.framework.exceptions.PresentationLogicException
{
onBtnUpdateClick();
}
});
this.form.btnNew().setClickEvent(new Click()
{
private static final long serialVersionUID = 1L;
public void handle() throws ims.framework.exceptions.PresentationLogicException
{
onBtnNewClick();
}
});
this.form.cmbNumberOfShifts().setValueChangedEvent(new ValueChanged()
{
private static final long serialVersionUID = 1L;
public void handle() throws ims.framework.exceptions.PresentationLogicException
{
onCmbNumberOfShiftsValueChanged();
}
});
this.form.chkUsePeriods().setValueChangedEvent(new ValueChanged()
{
private static final long serialVersionUID = 1L;
public void handle() throws ims.framework.exceptions.PresentationLogicException
{
onChkUsePeriodsValueChanged();
}
});
this.form.timStart().setValueChangedEvent(new ValueChanged()
{
private static final long serialVersionUID = 1L;
public void handle() throws ims.framework.exceptions.PresentationLogicException
{
onTimStartValueChanged();
}
});
this.form.grdType().setSelectionChangedEvent(new GridSelectionChanged()
{
private static final long serialVersionUID = 1L;
public void handle(ims.framework.enumerations.MouseButton mouseButton) throws ims.framework.exceptions.PresentationLogicException
{
onGrdTypeSelectionChanged();
}
});
}
public void free()
{
this.engine = null;
this.form = null;
}
public abstract void clearContextInformation();
protected ims.framework.UIEngine engine;
protected GenForm form;
}
| agpl-3.0 |
REGOVAR/Annso | annso/core/annso/filter_manager.py | 34605 | #!env/python3
# coding: utf-8
import ipdb
import os
import json
import datetime
import uuid
import psycopg2
import hashlib
import asyncio
import ped_parser
from config import *
from core.framework.common import *
from core.model import *
# =====================================================================================================================
# FILTER ENGINE
# =====================================================================================================================
class FilterEngine:
op_map = {'AND': ' AND ', 'OR': ' OR ', '==': '=', '!=': '<>', '>': '>', '<': '<', '>=': '>=', '<=': '<=', '~': ' LIKE ', '!~': ' NOT LIKE ',
# As a left join will be done on the chr+pos or chr+pos+ref+alt according to the type of the set operation (by site or by variant)
# We just need to test if one of the "joined" field is set or not
'IN': '{0}.chr is not null',
'NOTIN': '{0}.chr is null'}
sql_type_map = {'int': 'integer', 'string': 'text', 'float': 'real', 'percent': 'real', 'enum': 'integer', 'range': 'int8range', 'bool': 'boolean',
'list_i': 'text', 'list_s': 'text', 'list_f': 'text', 'list_i': 'text', 'list_pb': 'text'}
def __init__(self):
run_until_complete(self.load_annotation_metadata())
async def load_annotation_metadata(self):
"""
Init Annso Filtering engine.
Init mapping collection for annotations databases and fields
"""
refname = 'hg19' # execute("SELECT table_suffix FROM reference WHERE id="+str(reference)).first()["table_suffix"]
self.reference = 2
self.fields_map = {}
self.db_map = {}
self.variant_table = "sample_variant_{0}".format(refname)
query = "SELECT d.uid AS duid, d.name AS dname, d.name_ui AS dname_ui, d.jointure, d.reference_id, d.type AS dtype, d.db_pk_field_uid, a.uid AS fuid, a.name AS fname, a.type, a.wt_default FROM annotation_field a LEFT JOIN annotation_database d ON a.database_uid=d.uid"
result = await execute_aio(query)
for row in result:
if row.duid not in self.db_map:
self.db_map[row.duid] = {"name": row.dname, "join": row.jointure, "fields": {}, "reference_id": row.reference_id, "type": row.dtype, "db_pk_field_uid" : row.db_pk_field_uid}
self.db_map[row.duid]["fields"][row.fuid] = {"name": row.fname, "type": row.type}
self.fields_map[row.fuid] = {"name": row.fname, "type": row.type, "db_uid": row.duid, "db_name_ui": row.dname_ui, "db_name": row.dname, "db_type": row.dtype, "join": row.jointure, "wt_default": row.wt_default}
def create_working_table(self, analysis_id, sample_ids, field_uids, dbs_uids, filter_ids=[], attributes={}):
"""
Create a working sql table for the analysis to improove speed of filtering/annotation.
A Working table contains all variants used by the analysis, with all annotations used by filters or displayed
"""
from core.core import core
if len(sample_ids) == 0: raise RegovarException("No sample... so not able to retrieve data")
db_ref_suffix= "hg19" # execute("SELECT table_suffix FROM reference WHERE id={}".format(reference_id)).first().table_suffix
progress = {"msg": "wt_processing", "start": datetime.datetime.now().ctime(), "analysis_id": analysis_id, "step": 1}
core.notify_all(progress)
# Create schema
w_table = 'wt_{}'.format(analysis_id)
query = "DROP TABLE IF EXISTS {0} CASCADE; CREATE TABLE {0} (\
is_variant boolean DEFAULT False, \
annotated boolean DEFAULT False, \
variant_id bigint, \
bin integer, \
chr bigint, \
pos integer, \
ref text, \
alt text,\
transcript_pk_field_uid character varying(32), \
transcript_pk_value character varying(100), \
is_transition boolean, \
sample_tlist integer[], \
sample_tcount integer, \
sample_alist integer[], \
sample_acount integer, \
depth integer, "
query += ", ".join(["s{}_gt integer".format(i) for i in sample_ids]) + ", "
query += ", ".join(["s{}_dp integer".format(i) for i in sample_ids])
query += ", CONSTRAINT {0}_ukey UNIQUE (variant_id, transcript_pk_field_uid, transcript_pk_value));"
execute(query.format(w_table))
# Insert variant without annotation first
query = "INSERT INTO {0} (variant_id, bin, chr, pos, ref, alt, is_transition, sample_tlist) \
SELECT DISTINCT sample_variant_{1}.variant_id, sample_variant_{1}.bin, sample_variant_{1}.chr, sample_variant_{1}.pos, sample_variant_{1}.ref, sample_variant_{1}.alt, \
variant_{1}.is_transition, \
variant_{1}.sample_list \
FROM sample_variant_{1} INNER JOIN variant_{1} ON sample_variant_{1}.variant_id=variant_{1}.id \
WHERE sample_variant_{1}.sample_id IN ({2}) \
ON CONFLICT (variant_id, transcript_pk_field_uid, transcript_pk_value) DO NOTHING;"
execute(query.format(w_table, db_ref_suffix, ','.join([str(i) for i in sample_ids])))
# Complete sample-variant's associations
for sid in sample_ids:
execute("UPDATE {0} SET s{2}_gt=_sub.genotype, s{2}_dp=_sub.depth FROM (SELECT variant_id, genotype, depth FROM sample_variant_{1} WHERE sample_id={2}) AS _sub WHERE {0}.variant_id=_sub.variant_id".format(w_table, db_ref_suffix, sid))
query = "UPDATE {0} SET \
is_variant=(CASE WHEN ref<>alt THEN True ELSE False END), \
sample_tcount=array_length(sample_tlist,1), \
sample_alist=array_intersect(sample_tlist, array[{1}]), \
sample_acount=array_length(array_intersect(sample_tlist, array[{1}]),1), \
depth=GREATEST({2})"
execute(query.format(w_table, ",".join([str(i) for i in sample_ids]), ", ".join(["s{}_dp".format(i) for i in sample_ids])))
# Create indexes
# FIXME : do we need to create index on boolean fields ? Is partition a better way to do for low cardinality fields : http://www.postgresql.org/docs/9.1/static/ddl-partitioning.html
# query = "CREATE INDEX {0}_idx_ann ON {0} USING btree (annotated);".format(w_table)
query = "CREATE INDEX {0}_idx_vid ON {0} USING btree (variant_id);".format(w_table)
query += "CREATE INDEX {0}_idx_var ON {0} USING btree (bin, chr, pos, transcript_pk_field_uid, transcript_pk_value);".format(w_table)
query += "CREATE INDEX {0}_idx_trx ON {0} USING btree (transcript_pk_field_uid, transcript_pk_value);".format(w_table)
query += "".join(["CREATE INDEX {0}_idx_s{1}_gt ON {0} USING btree (s{1}_gt);".format(w_table, i) for i in sample_ids])
query += "".join(["CREATE INDEX {0}_idx_s{1}_dp ON {0} USING btree (s{1}_dp);".format(w_table, i) for i in sample_ids])
execute(query)
# Update count stat of the analysis
query = "UPDATE analysis SET total_variants=(SELECT COUNT(*) FROM {} WHERE is_variant), status='ANNOTATING' WHERE id={}".format(w_table, analysis_id)
execute(query)
# Update working table by computing annotation
self.update_working_table(analysis_id, sample_ids, field_uids, dbs_uids, filter_ids, attributes)
def update_working_table(self, analysis_id, sample_ids, field_uids, dbs_uids, filter_ids=[], attributes={}):
"""
Update annotation of the working table of an analysis. The working table shall already exists
"""
from core.core import core
# Get list of fields to add in the wt
analysis = Analysis.from_id(analysis_id)
total = analysis.total_variants
diff_fields = []
diff_dbs = []
progress = {"msg": "wt_processing", "start": datetime.datetime.now().ctime(), "analysis_id": analysis_id, "step": 2, "progress_total": total, "progress_current": 0}
core.notify_all(progress)
try:
query = "SELECT column_name FROM information_schema.columns WHERE table_name='wt_{}'".format(analysis_id)
current_fields = [row.column_name if row.column_name[0] != '_' else row.column_name[1:] for row in execute(query)]
current_dbs = []
for f_uid in current_fields:
if f_uid in self.fields_map and self.fields_map[f_uid]['db_uid'] not in current_dbs:
current_dbs.append(self.fields_map[f_uid]['db_uid'])
for f_uid in field_uids:
if f_uid not in current_fields and self.fields_map[f_uid]['db_name_ui'] != 'Variant':
diff_fields.append('_{}'.format(f_uid))
if self.fields_map[f_uid]['db_uid'] not in diff_dbs and self.fields_map[f_uid]['db_uid'] not in current_dbs:
diff_dbs.append(self.fields_map[f_uid]['db_uid'])
except:
# working table doesn't exist
return False
# Alter working table to add new fields
pattern = "ALTER TABLE wt_{0} ADD COLUMN {1}{2} {3};"
query = ""
update_queries = []
for f_uid in diff_fields:
if f_uid[0] == '_':
f_uid = f_uid[1:]
query += pattern.format(analysis_id, '_', f_uid, self.sql_type_map[self.fields_map[f_uid]['type']])
for a_name in attributes.keys():
att_checked = []
for sid, att in attributes[a_name].items():
if 'attr_{}_{}'.format(a_name.lower(), att.lower()) in current_fields:
# We consider that if the first key_value for the attribute is define, the whole attribute's columns are defined,
# So break and switch to the next attribute.
# That's why before updating and attribute-value, we need before to drop all former columns in the wt
break;
else:
if att not in att_checked:
att_checked.append(att)
query += pattern.format(analysis_id, 'attr_', "{}_{}".format(a_name.lower(), att.lower()), 'boolean DEFAULT False')
update_queries.append("UPDATE wt_{} SET attr_{}_{}=True WHERE s{}_gt IS NOT NULL; ".format(analysis_id, a_name.lower(), att.lower(), sid))
for f_id in filter_ids:
if 'filter_{}'.format(f_id) not in current_fields:
query += pattern.format(analysis_id, 'filter_', f_id, 'boolean DEFAULT False')
f_filter = json.loads(execute("SELECT filter FROM filter WHERE id={}".format(f_id)).first().filter)
q = self.build_query(analysis_id, analysis.reference_id, 'table', f_filter, [], None, None)
queries = q[0]
if len(queries) > 0:
# add all query to create temps tables needed by the filter if they do not yet exists
for q in queries[:-1]:
query += q
# add the query to update wt with the filter
# Note : As transcript_pk_field_uid and transcript_pk_field_value may be null, we cannot use '=' operator and must use 'IS NOT DISTINCT FROM'
# as two expressions that return 'null' are not considered as equal in SQL.
update_queries.append("UPDATE wt_{0} SET filter_{1}=True FROM ({2}) AS _sub WHERE wt_{0}.variant_id=_sub.variant_id AND wt_{0}.transcript_pk_field_uid IS NOT DISTINCT FROM _sub.transcript_pk_field_uid AND wt_{0}.transcript_pk_value IS NOT DISTINCT FROM _sub.transcript_pk_value ; ".format(analysis_id, f_id, queries[-1].strip()[:-1]))
if query != "":
# Add new annotation columns to the working table
execute(query)
progress.update({"step": 3})
core.notify_all(progress)
# Loop over new annotation's databases, because if new: need to add new transcripts to the working table
fields_to_copy_from_variant = ["variant_id","bin","chr","pos","ref","alt","is_transition","sample_tlist","sample_tcount","sample_alist","sample_acount","depth"]
fields_to_copy_from_variant.extend(['s{}_gt'.format(s) for s in sample_ids])
fields_to_copy_from_variant.extend(['s{}_dp'.format(s) for s in sample_ids])
fields_to_copy_from_variant.extend(['attr_{}'.format(a.lower()) for a in attributes.keys()])
fields_to_copy_from_variant.extend(['filter_{}'.format(f) for f in filter_ids])
pattern = "INSERT INTO wt_{0} (annotated, transcript_pk_field_uid, transcript_pk_value, {1}) \
SELECT False, '{2}', {4}.transcript_id, {3} \
FROM (SELECT {1} FROM wt_{0} WHERE transcript_pk_field_uid IS NULL) AS _var \
INNER JOIN {4} ON _var.variant_id={4}.variant_id" # TODO : check if more optim to select with JOIN ON bin/chr/pos/ref/alt
for uid in diff_dbs:
if self.db_map[uid]["type"] == "transcript":
query = pattern.format(analysis_id,
', '.join(fields_to_copy_from_variant),
self.db_map[uid]["db_pk_field_uid"],
', '.join(["_var.{}".format(f) for f in fields_to_copy_from_variant]),
self.db_map[uid]["name"])
execute(query)
progress.update({"step": 4})
core.notify_all(progress)
# Create update query to retrieve annotation
UPDATE_LOOP_RANGE = 1000
to_update = {}
for f_uid in diff_fields:
if self.fields_map[f_uid[1:]]['db_uid'] not in to_update.keys():
to_update[self.fields_map[f_uid[1:]]['db_uid']] = []
to_update[self.fields_map[f_uid[1:]]['db_uid']].append({
"name": self.fields_map[f_uid[1:]]['name'],
"uid":f_uid[1:],
"db_name": self.fields_map[f_uid[1:]]['db_name']})
# Loop to update working table annotation (queries "packed" fields requested by annotation's database)
for db_uid in to_update.keys():
if self.db_map[db_uid]["type"] == "transcript":
qset_ann = ', '.join(['_{0}=_ann._{0}'.format(f["uid"]) for f in to_update[db_uid]])
qslt_ann = ','.join(['{0}.{1} AS _{2}'.format(f['db_name'], f["name"], f["uid"]) for f in to_update[db_uid]])
qslt_var = "SELECT variant_id, bin, chr, pos, ref, alt, transcript_pk_value FROM wt_{0} WHERE annotated=False AND transcript_pk_field_uid='{1}' LIMIT {2}".format(analysis_id, self.db_map[self.fields_map[f_uid[1:]]['db_uid']]['db_pk_field_uid'], UPDATE_LOOP_RANGE)
qjoin = 'LEFT JOIN {0} '.format(self.db_map[db_uid]['join'].format('_var'))
query = "UPDATE wt_{0} SET annotated=True, {1} FROM (SELECT _var.variant_id, _var.transcript_pk_value, {2} FROM ({3}) AS _var {4}) AS _ann \
WHERE wt_{0}.variant_id=_ann.variant_id AND wt_{0}.transcript_pk_field_uid='{5}' AND wt_{0}.transcript_pk_value=_ann.transcript_pk_value".format(
analysis_id,
qset_ann,
qslt_ann,
qslt_var,
qjoin,
self.db_map[self.fields_map[f_uid[1:]]['db_uid']]['db_pk_field_uid'])
else:
qset_ann = ', '.join(['{0}=_ann._{0}'.format(f_uid) for f_uid in diff_fields])
qslt_ann = ','.join(['{0}.{1} AS _{2}'.format(self.fields_map[f_uid[1:]]['db_name'], self.fields_map[f_uid[1:]]['name'], f_uid) for f_uid in diff_fields])
qslt_var = 'SELECT variant_id, bin, chr, pos, ref, alt FROM wt_{0} WHERE annotated=False AND transcript_pk_field_uid IS NULL LIMIT {1}'.format(analysis_id, UPDATE_LOOP_RANGE)
qjoin = ' '.join(['LEFT JOIN {0} '.format(self.db_map[db_uid]['join'].format('_var'), self.db_map[db_uid]) for db_uid in diff_dbs])
query = "UPDATE wt_{0} SET annotated=True, {1} FROM (SELECT _var.variant_id, {2} FROM ({3}) AS _var {4}) AS _ann WHERE wt_{0}.variant_id=_ann.variant_id".format(analysis_id, qset_ann, qslt_ann, qslt_var, qjoin)
if qset_ann != "":
# Mark all variant as not annotated (to be able to do a "resumable update")
execute("UPDATE wt_{} SET annotated=False".format(analysis_id))
for page in range(0, total, UPDATE_LOOP_RANGE):
execute(query)
progress.update({"progress_current": page})
core.notify_all(progress)
progress.update({"step": 5, "progress_current": total})
core.notify_all(progress)
# Apply queries to update attributes and filters columns in the wt
if len(update_queries) > 0:
execute("".join(update_queries))
progress.update({"step": 6})
core.notify_all(progress)
# Update count stat of the analysis
query = "UPDATE analysis SET status='READY' WHERE id={}".format(analysis_id)
execute(query)
def request(self, analysis_id, mode, filter_json, fields=None, order=None, limit=100, offset=0, count=False):
"""
"""
# Check parameters: if no field, select by default the first field avalaible to avoir error
if fields is None:
fields = [next(iter(self.fields_map.keys()))]
if type(analysis_id) != int or analysis_id <= 0:
analysis_id = None
if mode not in ["table", "list"]:
mode = "table"
# Get analysis data and check status if ok to do filtering
analysis = Analysis.from_id(analysis_id)
if analysis is None:
raise RegovarException("Not able to retrieve analysis with provided id: {}".format(analysis_id))
# Parse data to generate sql query and retrieve list of needed annotations databases/fields
query, field_uids, dbs_uids, sample_ids, filter_ids, attributes = self.build_query(analysis_id, analysis.reference_id, mode, filter_json, fields, order, limit, offset, count)
# Prepare database working table
if analysis.status is None or analysis.status == '':
self.create_working_table(analysis_id, sample_ids, field_uids, dbs_uids, filter_ids, attributes)
else:
self.update_working_table(analysis_id, sample_ids, field_uids, dbs_uids, filter_ids, attributes)
# Execute query
sql_result = None
with Timer() as t:
sql_result = execute(' '.join(query))
log("---\nFields:\n{0}\nFilter:\n{1}\nQuery:\n{2}\nRequest query: {3}".format(fields, filter_json, '\n'.join(query), t))
# Save filter in analysis settings
if not count and analysis_id > 0:
settings = {}
try:
settings = json.loads(execute("SELECT settings FROM analysis WHERE id={}".format(analysis_id)).first().settings)
settings["filter"] = filter_json
settings["fields"] = fields
settings["order"] = [] if order is None else order
execute("UPDATE analysis SET {0}update_date=CURRENT_TIMESTAMP WHERE id={1}".format("settings='{0}', ".format(json.dumps(settings)), analysis_id))
except:
# TODO: log error
err("Not able to save current filter")
# Get result
if count:
result = sql_result.first()[0]
else:
result = []
with Timer() as t:
if sql_result is not None:
for row in sql_result:
entry = {"id" : "{}_{}_{}".format(row.variant_id, row.transcript_pk_field_uid, row.transcript_pk_value )}
for f_uid in fields:
# Manage special case for fields splitted by sample
if self.fields_map[f_uid]['name'].startswith('s{}_'):
pattern = "row." + self.fields_map[f_uid]['name']
r = {}
for sid in sample_ids:
r[sid] = FilterEngine.parse_result(eval(pattern.format(sid)))
entry[f_uid] = r
else:
if self.fields_map[f_uid]['db_name_ui'] == 'Variant':
entry[f_uid] = FilterEngine.parse_result(eval("row.{}".format(self.fields_map[f_uid]['name'])))
else:
entry[f_uid] = FilterEngine.parse_result(eval("row._{}".format(f_uid)))
result.append(entry)
log("Result processing: {0}\nTotal result: {1}".format(t, "-"))
return result
def build_query(self, analysis_id, reference_id, mode, filter, fields, order=None, limit=100, offset=0, count=False):
"""
This method build the sql query according to the provided parameters, and also build several list with ids of
fields, databases, sample, etc... all information that could be used by the analysis to work.
"""
# Data that will be computed and returned by this method !
query = [] # sql queries that correspond to the provided parameters (we will have several queries if need to create temp tables)
field_uids = [] # list of annotation field's uids that need to be present in the analysis working table
db_uids = [] # list of annotation databases uids used for the analysis
sample_ids = [] # list of sample's ids used for the analysis
filter_ids = [] # list of saved filter's ids for this analysis
attributes = {} # list of attributes (and their values by sample) defined for this analysis
# Retrieve sample ids of the analysis
for row in execute("select sample_id from analysis_sample where analysis_id={0}".format(analysis_id)):
sample_ids.append(str(row.sample_id))
# Retrieve attributes of the analysis
for row in execute("select sample_id, value, name from attribute where analysis_id={0}".format(analysis_id)):
if row.name not in attributes.keys():
attributes[row.name] = {row.sample_id: row.value}
else:
attributes[row.name].update({row.sample_id: row.value})
# Init fields uid and db uids with the defaults annotations fields according to the reference (hg19 by example)
# for row in execute("SELECT d.uid AS duid, f.uid FROM annotation_database d INNER JOIN annotation_field f ON d.uid=f.database_uid WHERE d.reference_id={} AND d.type='variant' AND f.wt_default=True".format(reference_id)):
# if row.duid not in db_uids:
# db_uids.append(row.duid)
# field_uids.append(row.uid)
# Retrieve saved filter's ids of the analysis - and parse their filter to get list of dbs/fields used by filters
for row in execute("select id, filter from filter where analysis_id={0} ORDER BY id ASC".format(analysis_id)): # ORDER BY is important as a filter can "called" an oldest filter to be build.
filter_ids.append(row.id)
q, f, d = self.parse_filter(analysis_id, mode, sample_ids, row.filter, fields, None, None)
field_uids = array_merge(field_uids, f)
db_uids = array_merge(db_uids, d)
# Parse the current filter
query, f, d = self.parse_filter(analysis_id, mode, sample_ids, filter, fields, order, limit, offset, count)
field_uids = array_merge(field_uids, f)
db_uids = array_merge(db_uids, d)
# return query and all usefulldata about annotations needed to execute the query
return query, field_uids, db_uids, sample_ids, filter_ids, attributes
def parse_filter(self, analysis_id, mode, sample_ids, filters, fields=[], order=None, limit=100, offset=0, count=False):
"""
This method parse the json filter and return the corresponding postgreSQL query, and also the list of fields and databases uid used by the query
(thoses databases/fields must be present in the working table to be run succefully the query)
"""
# Init some global variables
wt = 'wt_{}'.format(analysis_id)
query = ""
field_uids = []
db_uids = []
with_trx = False
# Build SELECT
fields_names = []
for f_uid in fields:
if self.fields_map[f_uid]["db_uid"] not in db_uids:
db_uids.append(self.fields_map[f_uid]["db_uid"])
field_uids.append(f_uid)
if self.fields_map[f_uid]['db_name_ui'] == 'Variant':
# Manage special case for fields splitted by sample
if self.fields_map[f_uid]['name'].startswith('s{}_'):
fields_names.extend(['{}.'.format(wt) + self.fields_map[f_uid]['name'].format(s) for s in sample_ids])
else:
fields_names.append('{}.{}'.format(wt, self.fields_map[f_uid]["name"]))
else:
with_trx = with_trx or self.fields_map[f_uid]["db_type"] == "transcript"
fields_names.append('{}._{}'.format(wt, f_uid))
q_select = 'variant_id, transcript_pk_field_uid, transcript_pk_value{} {}'.format(',' if len(fields_names) > 0 else '', ', '.join(fields_names))
# Build FROM/JOIN
q_from = wt
# Build WHERE
temporary_to_import = {}
def check_field_uid(data):
if data[0] == 'field':
if self.fields_map[data[1]]["db_uid"] not in db_uids:
db_uids.append(self.fields_map[data[1]]["db_uid"])
field_uids.append(data[1])
def build_filter(data):
"""
Recursive method that build the query from the filter json data at operator level
"""
operator = data[0]
if operator in ['AND', 'OR']:
if len(data[1]) == 0:
return ''
return ' (' + FilterEngine.op_map[operator].join([build_filter(f) for f in data[1]]) + ') '
elif operator in ['==', '!=', '>', '<', '>=', '<=']:
# If comparaison with a field, the field MUST BE the first operande
if data[1][0] == 'field':
metadata = self.fields_map[data[1][1]]
else:
metadata = {"type": "string", "name":""}
check_field_uid(data[1])
check_field_uid(data[2])
# Manage special case for fields splitted by sample
if metadata['name'].startswith('s{}_'):
# With these special fields, we don't allow field tot field comparaison.
# First shall always be the special fields, and the second shall be everythong except another special fields
return ' (' + ' OR '.join(['{0}{1}{2}'.format(metadata['name'].format(s), FilterEngine.op_map[operator], parse_value(metadata["type"], data[2])) for s in sample_ids]) + ') '
else:
return '{0}{1}{2}'.format(parse_value(metadata["type"], data[1]), FilterEngine.op_map[operator], parse_value(metadata["type"], data[2]))
elif operator in ['~', '!~']:
check_field_uid(data[1])
check_field_uid(data[2])
return '{0}{1}{2}'.format(parse_value('string', data[1]), FilterEngine.op_map[operator], parse_value('string%', data[2]))
elif operator in ['IN', 'NOTIN']:
tmp_table = get_tmp_table(data[1], data[2])
temporary_to_import[tmp_table]['where'] = FilterEngine.op_map[operator].format(tmp_table, wt)
if data[1] == 'site':
temporary_to_import[tmp_table]['from'] = " LEFT JOIN {1} ON {0}.bin={1}.bin AND {0}.chr={1}.chr AND {0}.pos={1}.pos".format(wt, tmp_table)
else: # if data[1] == 'variant':
temporary_to_import[tmp_table]['from'] = " LEFT JOIN {1} ON {0}.bin={1}.bin AND {0}.chr={1}.chr AND {0}.pos={1}.pos AND {0}.ref={1}.ref AND {0}.alt={1}.alt".format(wt, tmp_table)
return temporary_to_import[tmp_table]['where']
def get_tmp_table(mode, data):
"""
Parse json data to build temp table for ensemblist operation IN/NOTIN
mode: site or variant
data: json data about the temp table to create
"""
ttable_quer_map = "CREATE TABLE IF NOT EXISTS {0} AS {1}; "
if data[0] == 'sample':
tmp_table_name = "tmp_sample_{0}_{1}".format(data[1], mode)
if mode == 'site':
tmp_table_query = ttable_quer_map.format(tmp_table_name, "SELECT DISTINCT {0}.bin, {0}.chr, {0}.pos FROM {0} WHERE {0}.s{1}_gt IS NOT NULL".format(wt, data[1]))
else: # if mode = 'variant':
tmp_table_query = ttable_quer_map.format(tmp_table_name, "SELECT DISTINCT {0}.bin, {0}.chr, {0}.pos, {0}.ref, {0}.alt FROM {0} WHERE {0}.s{1}_gt IS NOT NULL".format(wt, data[1]))
elif data[0] == 'filter':
tmp_table_name = "tmp_filter_{0}".format(data[1])
if mode == 'site':
tmp_table_query = ttable_quer_map.format(tmp_table_name, "SELECT DISTINCT {0}.bin, {0}.chr, {0}.pos FROM {0} WHERE {0}.filter_{1}=True".format(wt, data[1]))
else: # if mode = 'variant':
tmp_table_query = ttable_quer_map.format(tmp_table_name, "SELECT DISTINCT {0}.bin, {0}.chr, {0}.pos, {0}.ref, {0}.alt FROM {0} WHERE {0}.filter_{1}=True".format(wt, data[1]))
elif data[0] == 'attribute':
key, value = data[1].split(':')
tmp_table_name = "tmp_attribute_{0}_{1}_{2}_{3}".format(analysis_id, key, value, mode)
if mode == 'site':
tmp_table_query = ttable_quer_map.format(tmp_table_name, "SELECT DISTINCT {0}.bin, {0}.chr, {0}.pos FROM {0} WHERE {0}.attr_{1}='{2}'".format(wt, key, value))
else: # if mode = 'variant':
tmp_table_query = ttable_quer_map.format(tmp_table_name, "SELECT DISTINCT {0}.bin, {0}.chr, {0}.pos, {0}.ref, {0}.alt FROM {0} WHERE {0}.attr_{1}='{2}'".format(wt, key, value))
temporary_to_import[tmp_table_name] = {'query': tmp_table_query + "CREATE INDEX IF NOT EXISTS {0}_idx_var ON {0} USING btree (bin, chr, pos);".format(tmp_table_name)}
return tmp_table_name
def parse_value(ftype, data):
if data[0] == 'field':
if self.fields_map[data[1]]["type"] == ftype:
if self.fields_map[data[1]]['db_name_ui'] == 'Variant':
return "{0}".format(self.fields_map[data[1]]["name"])
else:
return "_{0}".format(data[1])
if data[0] == 'value':
if ftype in ['int', 'float', 'enum', 'percent']:
return str(data[1])
elif ftype == 'string':
return "'{0}'".format(data[1])
elif ftype == 'string%':
return "'%%{0}%%'".format(data[1])
elif ftype == 'range' and len(data) == 3:
return 'int8range({0}, {1})'.format(data[1], data[2])
raise RegovarException("FilterEngine.request.parse_value - Unknow type: {0} ({1})".format(ftype, data))
# q_where = ""
# if len(sample_ids) == 1:
# q_where = "{0}.sample_id={1}".format(wt, sample_ids[0])
# elif len(sample_ids) > 1:
# q_where = "{0}.sample_id IN ({1})".format(wt, ','.join(sample_ids))
q_where = build_filter(filters)
if q_where is not None and len(q_where.strip()) > 0:
q_where = "WHERE " + q_where
# Build FROM/JOIN according to the list of used annotations databases
q_from += " ".join([t['from'] for t in temporary_to_import.values()])
# Build ORDER BY
# TODO : actually, it's not possible to do "order by" on special fields (GT and DP because they are split by sample)
q_order = ""
if order is not None and len(order) > 0:
orders = []
for f_uid in order:
asc = 'ASC'
if f_uid[0] == '-':
f_uid = f_uid[1:]
asc = 'DESC'
if self.fields_map[f_uid]['db_name_ui'] == 'Variant':
# Manage special case for fields splitted by sample
if self.fields_map[f_uid]['name'].startswith('s{}_'):
pass
else:
orders.append('{} {}'.format(self.fields_map[f_uid]["name"], asc))
else:
orders.append('_{} {}'.format(f_uid, asc))
q_order = 'ORDER BY {}'.format(', '.join(orders))
# build final query
query_tpm = [t['query'] for t in temporary_to_import.values()]
if count:
query_req = "SELECT DISTINCT {0} FROM {1} {2}".format(q_select, q_from, q_where)
query = query_tpm + ['SELECT COUNT(*) FROM ({0}) AS sub;'.format(query_req)]
else:
query_req = "SELECT DISTINCT {0} FROM {1} {2} {3} {4} {5};".format(q_select, q_from, q_where, q_order, 'LIMIT {}'.format(limit) if limit is not None else '', 'OFFSET {}'.format(offset) if offset is not None else '')
query = query_tpm + [query_req]
return query, field_uids, db_uids
@staticmethod
def get_hasname(analysis_id, mode, fields, filter_json):
# clean and sort fields list
clean_fields = fields
clean_fields.sort()
clean_fields = list(set(clean_fields))
string_id = "{0}{1}{2}{3}".format(analysis_id, mode, clean_fields, json.dumps(filter_json))
return hashlib.md5(string_id.encode()).hexdigest()
@staticmethod
def parse_result(value):
"""
Parse value returned by sqlAlchemy and cast it, if needed, into "simples" python types
"""
# if value is None:
# return ""
if type(value) == psycopg2._range.NumericRange:
return (value.lower, value.upper)
return value
| agpl-3.0 |
xavoctechnocratspvtltd/property | atk4-addons/hierarchy/lib/Controller/Table.php | 1421 | <?php
namespace hierarchy;
class Controller_Table extends \AbstractController {
public $class_name;
public $child_ref;
public $parent_ref;
function init(){
parent::init();
$this->owner->hierarchy_controller=$this;
}
function useField($field){
if(!$this->class_name)$this->class_name=preg_replace('/^Model_/', '', get_class($this->owner)); // remove "Model_" from class
if(!$this->child_ref)$this->child_ref=$this->class_name;
$this->parent_ref=$field;
if(!$this->owner->hasElement($this->parent_ref))$this->owner->hasOne($this->class_name,$field)
->display(array('form'=>'hierarchy/drilldown'));
if(!$this->owner->hasElement($this->child_ref))$this->owner->hasMany($this->child_ref,$field);
$this->addCountColumn(strtolower($this->child_ref).'_cnt');
}
function addCountColumn($f){
$self=$this;
$this->owner->addExpression($f)->set(function($m)use($self,$f){
$m=$self->owner->newInstance();
//$m->table_alias=$f; // Imants: This still don't work as expected and creates Exception_DB when we use Models where model name is not the same as table name or table alias.
$ref=$self->owner->getElement($self->child_ref);
$m->addCondition($ref->their_field,$self->owner->getElement($ref->our_field));
return $m->count();
});
}
}
| agpl-3.0 |
HBEE/odoo-addons | report_extended_stock/__openerp__.py | 1603 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 ADHOC SA (http://www.adhoc.com.ar)
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Report Configurator - Stock',
'version': '8.0.1.0.0',
'category': 'Reporting Subsystem',
'sequence': 14,
'summary': '',
'description': """
Report Configurator - Stock
=============================
""",
'author': 'ADHOC SA',
'website': 'www.adhoc.com.ar',
'images': [
],
'depends': [
'report_extended',
'stock_voucher',
],
'data': [
'views/report_view.xml',
],
'demo': [
],
'test': [
],
'installable': True,
'auto_install': True,
'application': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
cfelipesouza/qgroundcontrol | src/FirmwarePlugin/FirmwarePlugin.h | 5604 | /*=====================================================================
QGroundControl Open Source Ground Control Station
(c) 2009 - 2014 QGROUNDCONTROL PROJECT <http://www.qgroundcontrol.org>
This file is part of the QGROUNDCONTROL project
QGROUNDCONTROL is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
QGROUNDCONTROL is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with QGROUNDCONTROL. If not, see <http://www.gnu.org/licenses/>.
======================================================================*/
/// @file
/// @author Don Gagne <[email protected]>
#ifndef FirmwarePlugin_H
#define FirmwarePlugin_H
#include "QGCMAVLink.h"
#include "VehicleComponent.h"
#include "AutoPilotPlugin.h"
#include <QList>
#include <QString>
class Vehicle;
/// This is the base class for Firmware specific plugins
///
/// The FirmwarePlugin class is the abstract base class which represents the methods and objects
/// which are specific to a certain Firmware flight stack. This is the only place where
/// flight stack specific code should reside in QGroundControl. The remainder of the
/// QGroundControl source is generic to a common mavlink implementation. The implementation
/// in the base class supports mavlink generic firmware. Override the base clase virtuals
/// to create you firmware specific plugin.
class FirmwarePlugin : public QObject
{
Q_OBJECT
public:
/// Set of optional capabilites which firmware may support
typedef enum {
SetFlightModeCapability, ///< FirmwarePlugin::setFlightMode method is supported
MavCmdPreflightStorageCapability, ///< MAV_CMD_PREFLIGHT_STORAGE is supported
} FirmwareCapabilities;
/// @return true: Firmware supports all specified capabilites
virtual bool isCapable(FirmwareCapabilities capabilities) = 0;
/// Returns VehicleComponents for specified Vehicle
/// @param vehicle Vehicle to associate with components
/// @return List of VehicleComponents for the specified vehicle. Caller owns returned objects and must
/// free when no longer needed.
virtual QList<VehicleComponent*> componentsForVehicle(AutoPilotPlugin* vehicle) = 0;
/// Returns the list of available flight modes
virtual QStringList flightModes(void) = 0;
/// Returns the name for this flight mode. Flight mode names must be human readable as well as audio speakable.
/// @param base_mode Base mode from mavlink HEARTBEAT message
/// @param custom_mode Custom mode from mavlink HEARTBEAT message
virtual QString flightMode(uint8_t base_mode, uint32_t custom_mode) = 0;
/// Sets base_mode and custom_mode to specified flight mode.
/// @param[out] base_mode Base mode for SET_MODE mavlink message
/// @param[out] custom_mode Custom mode for SET_MODE mavlink message
virtual bool setFlightMode(const QString& flightMode, uint8_t* base_mode, uint32_t* custom_mode) = 0;
/// FIXME: This isn't quite correct being here. All code for Joystick suvehicleTypepport is currently firmware specific
/// not just this. I'm going to try to change that. If not, this will need to be removed.
/// Returns the number of buttons which are reserved for firmware use in the MANUAL_CONTROL mavlink
/// message. For example PX4 Flight Stack reserves the first 8 buttons to simulate rc switches.
/// The remainder can be assigned to Vehicle actions.
/// @return -1: reserver all buttons, >0 number of buttons to reserve
virtual int manualControlReservedButtonCount(void) = 0;
/// Called before any mavlink message is processed by Vehicle such taht the firmwre plugin
/// can adjust any message characteristics. This is handy to adjust or differences in mavlink
/// spec implementations such that the base code can remain mavlink generic.
/// @param vehicle Vehicle message came from
/// @param message[in,out] Mavlink message to adjust if needed.
virtual void adjustMavlinkMessage(Vehicle* vehicle, mavlink_message_t* message) = 0;
/// Called when Vehicle is first created to send any necessary mavlink messages to the firmware.
virtual void initializeVehicle(Vehicle* vehicle) = 0;
/// Determines how to handle the first item of the mission item list. Internally to QGC the first item
/// is always the home position.
/// @return
/// true: Send first mission item as home position to vehicle. When vehicle has no mission items on
/// it, it may or may not return a home position back in position 0.
/// false: Do not send first item to vehicle, sequence numbers must be adjusted
virtual bool sendHomePositionToVehicle(void) = 0;
/// Returns the parameter that is used to identify the default component
virtual QString getDefaultComponentIdParam(void) const = 0;
/// Adds the parameter meta data to the Fact
virtual void addMetaDataToFact(Fact* fact, MAV_TYPE vehicleType) = 0;
/// List of supported mission commands. Empty list for all commands supported.
virtual QList<MAV_CMD> supportedMissionCommands(void) = 0;
};
#endif
| agpl-3.0 |
FreudianNM/openMAXIMS | Source Library/openmaxims_workspace/Clinical/src/ims/clinical/forms/outpatientstreatmentplan/BaseAccessLogic.java | 5236 | //#############################################################################
//# #
//# Copyright (C) <2015> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//# IMS MAXIMS provides absolutely NO GUARANTEE OF THE CLINICAL SAFTEY of #
//# this program. Users of this software do so entirely at their own risk. #
//# IMS MAXIMS only ensures the Clinical Safety of unaltered run-time #
//# software that it builds, deploys and maintains. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5589.25814)
// Copyright (C) 1995-2015 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.clinical.forms.outpatientstreatmentplan;
import java.io.Serializable;
import ims.framework.Context;
import ims.framework.FormName;
import ims.framework.FormAccessLogic;
public class BaseAccessLogic extends FormAccessLogic implements Serializable
{
private static final long serialVersionUID = 1L;
public final void setContext(Context context, FormName formName)
{
form = new CurrentForm(new GlobalContext(context), new CurrentForms());
engine = new CurrentEngine(formName);
}
public boolean isAccessible()
{
if(!form.getGlobalContext().Core.getPatientShortIsNotNull())
return false;
if(!form.getGlobalContext().Core.getCurrentCareContextIsNotNull())
return false;
return true;
}
public boolean isReadOnly()
{
return false;
}
public CurrentEngine engine;
public CurrentForm form;
public final static class CurrentForm implements Serializable
{
private static final long serialVersionUID = 1L;
CurrentForm(GlobalContext globalcontext, CurrentForms forms)
{
this.globalcontext = globalcontext;
this.forms = forms;
}
public final GlobalContext getGlobalContext()
{
return globalcontext;
}
public final CurrentForms getForms()
{
return forms;
}
private GlobalContext globalcontext;
private CurrentForms forms;
}
public final static class CurrentEngine implements Serializable
{
private static final long serialVersionUID = 1L;
CurrentEngine(FormName formName)
{
this.formName = formName;
}
public final FormName getFormName()
{
return formName;
}
private FormName formName;
}
public static final class CurrentForms implements Serializable
{
private static final long serialVersionUID = 1L;
protected final class LocalFormName extends FormName
{
private static final long serialVersionUID = 1L;
protected LocalFormName(int value)
{
super(value);
}
}
private CurrentForms()
{
Clinical = new ClinicalForms();
OCRR = new OCRRForms();
}
public final class ClinicalForms implements Serializable
{
private static final long serialVersionUID = 1L;
private ClinicalForms()
{
OutPatientsTreatmentPlan = new LocalFormName(122120);
MedicationOnAdmission = new LocalFormName(122126);
DocumentGeneration = new LocalFormName(122140);
DiagnosisComplications = new LocalFormName(123117);
PatientProblems = new LocalFormName(123120);
Procedures = new LocalFormName(123119);
}
public final FormName OutPatientsTreatmentPlan;
public final FormName MedicationOnAdmission;
public final FormName DocumentGeneration;
public final FormName DiagnosisComplications;
public final FormName PatientProblems;
public final FormName Procedures;
}
public final class OCRRForms implements Serializable
{
private static final long serialVersionUID = 1L;
private OCRRForms()
{
MyOrder = new LocalFormName(116111);
}
public final FormName MyOrder;
}
public ClinicalForms Clinical;
public OCRRForms OCRR;
}
}
| agpl-3.0 |
Fiware/data.Orion | test/unittests/ngsi10/SubscribeContextRequest_test.cpp | 12342 | /*
*
* Copyright 2013 Telefonica Investigacion y Desarrollo, S.A.U
*
* This file is part of Orion Context Broker.
*
* Orion Context Broker is free software: you can redistribute it and/or
* modify it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* Orion Context Broker is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero
* General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Orion Context Broker. If not, see http://www.gnu.org/licenses/.
*
* For those usages not covered by this license please contact with
* iot_support at tid dot es
*
* Author: Ken Zangelin
*/
#include "logMsg/logMsg.h"
#include "logMsg/traceLevels.h"
#include "common/globals.h"
#include "ngsi/ParseData.h"
#include "jsonParse/jsonRequest.h"
#include "unittest.h"
/* ****************************************************************************
*
* ok_json -
*/
TEST(SubscribeContextRequest, ok_json)
{
ParseData parseData;
ConnectionInfo ci("", "POST", "1.1");
const char* infile = "ngsi10.subscribeContextRequest.ok.valid.json";
utInit();
EXPECT_EQ("OK", testDataFromFile(testBuf, sizeof(testBuf), infile)) << "Error getting test data from '" << infile << "'";
ci.inMimeType = JSON;
ci.outMimeType = JSON;
lmTraceLevelSet(LmtDump, true);
std::string result = jsonTreat(testBuf, &ci, &parseData, SubscribeContext, NULL);
EXPECT_EQ("OK", result);
lmTraceLevelSet(LmtDump, false);
//
// With the data obtained present and release methods are exercised
//
SubscribeContextRequest* scrP = &parseData.scr.res;
scrP->release();
utExit();
}
/* ****************************************************************************
*
* badIsPattern_json -
*/
TEST(SubscribeContextRequest, badIsPattern_json)
{
ParseData parseData;
ConnectionInfo ci("", "POST", "1.1");
const char* infile = "ngsi10.subscribeContextRequest.badIsPattern.invalid.json";
const char* outfile = "ngsi10.subscribeContextResponse.badIsPattern.valid.json";
utInit();
EXPECT_EQ("OK", testDataFromFile(testBuf, sizeof(testBuf), infile)) << "Error getting test data from '" << infile << "'";
EXPECT_EQ("OK", testDataFromFile(expectedBuf, sizeof(expectedBuf), outfile)) << "Error getting test data from '" << outfile << "'";
ci.inMimeType = JSON;
ci.outMimeType = JSON;
std::string out = jsonTreat(testBuf, &ci, &parseData, SubscribeContext, NULL);
EXPECT_STREQ(expectedBuf, out.c_str());
utExit();
}
/* ****************************************************************************
*
* invalidDuration_json -
*/
TEST(SubscribeContextRequest, invalidDuration_json)
{
ParseData parseData;
ConnectionInfo ci("", "POST", "1.1");
const char* infile = "ngsi10.subscribeContextRequest.duration.invalid.json";
const char* outfile = "ngsi10.subscribeContextResponse.durationInvalid.valid.json";
utInit();
EXPECT_EQ("OK", testDataFromFile(testBuf, sizeof(testBuf), infile)) << "Error getting test data from '" << infile << "'";
EXPECT_EQ("OK", testDataFromFile(expectedBuf, sizeof(expectedBuf), outfile)) << "Error getting test data from '" << outfile << "'";
ci.inMimeType = JSON;
ci.outMimeType = JSON;
std::string out = jsonTreat(testBuf, &ci, &parseData, SubscribeContext, NULL);
EXPECT_STREQ(expectedBuf, out.c_str());
utExit();
}
/* ****************************************************************************
*
* scopeGeolocationCircleOkJson -
*/
TEST(SubscribeContextRequest, scopeGeolocationCircleOkJson)
{
ParseData reqData;
const char* inFile = "ngsi10.subscribeContextRequest.circleOk.postponed.json";
ConnectionInfo ci("/ngsi10/subscribeContext", "POST", "1.1");
std::string result;
utInit();
ci.inMimeType = JSON;
ci.outMimeType = JSON;
EXPECT_EQ("OK", testDataFromFile(testBuf, sizeof(testBuf), inFile)) << "Error getting test data from '" << inFile << "'";
result = jsonTreat(testBuf, &ci, &reqData, SubscribeContext, NULL);
EXPECT_STREQ("OK", result.c_str());
utExit();
}
/* ****************************************************************************
*
* scopeGeolocationCircleInvertedJson -
*/
TEST(SubscribeContextRequest, scopeGeolocationCircleInvertedJson)
{
ParseData reqData;
const char* inFile = "ngsi10.subscribeContextRequest.circleInverted.postponed.json";
ConnectionInfo ci("/ngsi10/subscribeContext", "POST", "1.1");
std::string result;
utInit();
ci.inMimeType = JSON;
ci.outMimeType = JSON;
EXPECT_EQ("OK", testDataFromFile(testBuf, sizeof(testBuf), inFile)) << "Error getting test data from '" << inFile << "'";
result = jsonTreat(testBuf, &ci, &reqData, SubscribeContext, NULL);
EXPECT_STREQ("OK", result.c_str());
utExit();
}
/* ****************************************************************************
*
* scopeGeolocationCircleInvertedBadValueJson -
*/
TEST(SubscribeContextRequest, scopeGeolocationCircleInvertedBadValueJson)
{
ParseData reqData;
const char* inFile = "ngsi10.subscribeContextRequest.circleInvertedBadValue.invalid.json";
const char* outFile = "ngsi10.subscribeContextResponse.circleInvertedBadValue.ok.json";
ConnectionInfo ci("/ngsi10/subscribeContext", "POST", "1.1");
std::string result;
utInit();
ci.inMimeType = JSON;
ci.outMimeType = JSON;
EXPECT_EQ("OK", testDataFromFile(testBuf, sizeof(testBuf), inFile)) << "Error getting test data from '" << inFile << "'";
EXPECT_EQ("OK", testDataFromFile(expectedBuf, sizeof(expectedBuf), outFile)) << "Error getting test data from '" << outFile << "'";
result = jsonTreat(testBuf, &ci, &reqData, SubscribeContext, NULL);
EXPECT_STREQ(expectedBuf, result.c_str());
utExit();
}
/* ****************************************************************************
*
* scopeGeolocationCircleZeroRadiusJson -
*/
TEST(SubscribeContextRequest, scopeGeolocationCircleZeroRadiusJson)
{
ParseData reqData;
const char* inFile = "ngsi10.subscribeContextRequest.circleZeroRadius.postponed.json";
const char* outFile = "ngsi10.subscribeContextResponse.circleZeroRadius.valid.json";
ConnectionInfo ci("/ngsi10/subscribeContext", "POST", "1.1");
std::string result;
utInit();
ci.inMimeType = JSON;
ci.outMimeType = JSON;
EXPECT_EQ("OK", testDataFromFile(testBuf, sizeof(testBuf), inFile)) << "Error getting test data from '" << inFile << "'";
EXPECT_EQ("OK", testDataFromFile(expectedBuf, sizeof(expectedBuf), outFile)) << "Error getting test data from '" << outFile << "'";
result = jsonTreat(testBuf, &ci, &reqData, SubscribeContext, NULL);
EXPECT_STREQ(expectedBuf, result.c_str());
utExit();
}
/* ****************************************************************************
*
* scopeGeolocationPolygonOkJson -
*/
TEST(SubscribeContextRequest, scopeGeolocationPolygonOkJson)
{
ParseData parseData;
const char* inFile = "ngsi10.subscribeContextRequest.polygonOk.postponed.json";
ConnectionInfo ci("/ngsi10/subscribeContext", "POST", "1.1");
std::string result;
utInit();
ci.inMimeType = JSON;
ci.outMimeType = JSON;
EXPECT_EQ("OK", testDataFromFile(testBuf, sizeof(testBuf), inFile)) << "Error getting test data from '" << inFile << "'";
result = jsonTreat(testBuf, &ci, &parseData, SubscribeContext, NULL);
EXPECT_STREQ("OK", result.c_str());
utExit();
}
/* ****************************************************************************
*
* scopeGeolocationPolygonInvertedJson -
*/
TEST(SubscribeContextRequest, scopeGeolocationPolygonInvertedJson)
{
ParseData parseData;
const char* inFile = "ngsi10.subscribeContextRequest.polygonInverted.postponed.json";
ConnectionInfo ci("/ngsi10/subscribeContext", "POST", "1.1");
std::string result;
utInit();
ci.inMimeType = JSON;
ci.outMimeType = JSON;
EXPECT_EQ("OK", testDataFromFile(testBuf, sizeof(testBuf), inFile)) << "Error getting test data from '" << inFile << "'";
result = jsonTreat(testBuf, &ci, &parseData, SubscribeContext, NULL);
EXPECT_STREQ("OK", result.c_str());
utExit();
}
/* ****************************************************************************
*
* scopeGeolocationPolygonInvertedBadValueJson -
*/
TEST(SubscribeContextRequest, scopeGeolocationPolygonInvertedBadValueJson)
{
ParseData parseData;
const char* inFile = "ngsi10.subscribeContextRequest.polygonInvertedBadValue.invalid.json";
const char* outFile = "ngsi10.subscribeContextResponse.polygonInvertedBadValue.valid.json";
ConnectionInfo ci("/ngsi10/subscribeContext", "POST", "1.1");
std::string result;
utInit();
ci.inMimeType = JSON;
ci.outMimeType = JSON;
EXPECT_EQ("OK", testDataFromFile(testBuf, sizeof(testBuf), inFile)) << "Error getting test data from '" << inFile << "'";
EXPECT_EQ("OK", testDataFromFile(expectedBuf, sizeof(expectedBuf), outFile)) << "Error getting test data from '" << outFile << "'";
result = jsonTreat(testBuf, &ci, &parseData, SubscribeContext, NULL);
EXPECT_STREQ(expectedBuf, result.c_str());
utExit();
}
/* ****************************************************************************
*
* scopeGeolocationPolygonNoVerticesJson -
*/
TEST(SubscribeContextRequest, scopeGeolocationPolygonNoVerticesJson)
{
ParseData parseData;
const char* inFile = "ngsi10.subscribeContextRequest.polygonInvertedNoVertices.postponed.json";
const char* outFile = "ngsi10.subscribeContextResponse.polygonInvertedNoVertices.valid.json";
ConnectionInfo ci("/ngsi10/subscribeContext", "POST", "1.1");
std::string result;
utInit();
ci.inMimeType = JSON;
ci.outMimeType = JSON;
EXPECT_EQ("OK", testDataFromFile(testBuf, sizeof(testBuf), inFile)) << "Error getting test data from '" << inFile << "'";
EXPECT_EQ("OK", testDataFromFile(expectedBuf, sizeof(expectedBuf), outFile)) << "Error getting test data from '" << outFile << "'";
result = jsonTreat(testBuf, &ci, &parseData, SubscribeContext, NULL);
EXPECT_STREQ(expectedBuf, result.c_str());
utExit();
}
/* ****************************************************************************
*
* scopeGeolocationPolygonOneVertexJson -
*/
TEST(SubscribeContextRequest, scopeGeolocationPolygonOneVertexJson)
{
ParseData parseData;
const char* inFile = "ngsi10.subscribeContextRequest.polygonInvertedOneVertex.postponed.json";
const char* outFile = "ngsi10.subscribeContextResponse.polygonInvertedOneVertex.valid.json";
ConnectionInfo ci("/ngsi10/subscribeContext", "POST", "1.1");
std::string result;
utInit();
ci.inMimeType = JSON;
ci.outMimeType = JSON;
EXPECT_EQ("OK", testDataFromFile(testBuf, sizeof(testBuf), inFile)) << "Error getting test data from '" << inFile << "'";
EXPECT_EQ("OK", testDataFromFile(expectedBuf, sizeof(expectedBuf), outFile)) << "Error getting test data from '" << outFile << "'";
result = jsonTreat(testBuf, &ci, &parseData, SubscribeContext, NULL);
EXPECT_STREQ(expectedBuf, result.c_str());
utExit();
}
/* ****************************************************************************
*
* scopeGeolocationPolygonTwoVerticesJson -
*/
TEST(SubscribeContextRequest, scopeGeolocationPolygonTwoVerticesJson)
{
ParseData parseData;
const char* inFile = "ngsi10.subscribeContextRequest.polygonTwoVertices.postponed.json";
const char* outFile = "ngsi10.subscribeContextResponse.polygonTwoVertices.valid.json";
ConnectionInfo ci("/ngsi10/subscribeContext", "POST", "1.1");
std::string result;
utInit();
ci.inMimeType = JSON;
ci.outMimeType = JSON;
EXPECT_EQ("OK", testDataFromFile(testBuf, sizeof(testBuf), inFile)) << "Error getting test data from '" << inFile << "'";
EXPECT_EQ("OK", testDataFromFile(expectedBuf, sizeof(expectedBuf), outFile)) << "Error getting test data from '" << outFile << "'";
result = jsonTreat(testBuf, &ci, &parseData, SubscribeContext, NULL);
EXPECT_STREQ(expectedBuf, result.c_str());
utExit();
}
| agpl-3.0 |
telefonicaid/fiware-orion | test/unittests/orionTypes/EntityTypeResponse_test.cpp | 1452 | /*
*
* Copyright 2015 Telefonica Investigacion y Desarrollo, S.A.U
*
* This file is part of Orion Context Broker.
*
* Orion Context Broker is free software: you can redistribute it and/or
* modify it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* Orion Context Broker is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero
* General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Orion Context Broker. If not, see http://www.gnu.org/licenses/.
*
* For those usages not covered by this license please contact with
* iot_support at tid dot es
*
* Author: Fermin Galan
*/
#include "orionTypes/EntityTypeResponse.h"
#include "unittests/unittest.h"
/* ****************************************************************************
*
* check
*/
TEST(EntityTypeResponse, check)
{
utInit();
EntityTypeResponse etR1;
EntityTypeResponse etR2;
etR1.entityType.type = "myType";
etR2.entityType.type = "";
EXPECT_EQ("OK", etR1.check(V1, false, false, false, ""));
EXPECT_NE("OK", etR2.check(V1, false, false, false, ""));
EXPECT_NE("OK", etR1.check(V1, false, false, false, "foo"));
utExit();
}
| agpl-3.0 |
aborg0/RapidMiner-Unuk | src/com/rapidminer/operator/preprocessing/transformation/aggregation/LogProductAggregationFunction.java | 2633 | /*
* RapidMiner
*
* Copyright (C) 2001-2013 by Rapid-I and the contributors
*
* Complete list of developers available at our web site:
*
* http://rapid-i.com
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see http://www.gnu.org/licenses/.
*/
package com.rapidminer.operator.preprocessing.transformation.aggregation;
import com.rapidminer.example.Attribute;
import com.rapidminer.example.table.DoubleArrayDataRow;
import com.rapidminer.tools.Ontology;
/**
* This class implements the Log Product Aggregation function. This will calculate the
* logarithm of a product of a source attribute for each group. This can help in situations, where
* the normal product would exceed the numerical range and should be used as an intermediate result.
*
* This obviously only works on numbers being all greater 0.
*
* @author Sebastian Land
*/
public class LogProductAggregationFunction extends NumericalAggregationFunction {
public static final String FUNCTION_LOG_PRODUCT = "logProduct";
public LogProductAggregationFunction(Attribute sourceAttribute, boolean ignoreMissings, boolean countOnlyDisctinct) {
super(sourceAttribute, ignoreMissings, countOnlyDisctinct, FUNCTION_LOG_PRODUCT, FUNCTION_SEPARATOR_OPEN, FUNCTION_SEPARATOR_CLOSE);
}
public LogProductAggregationFunction(Attribute sourceAttribute, boolean ignoreMissings, boolean countOnlyDisctinct, String functionName, String separatorOpen, String separatorClose) {
super(sourceAttribute, ignoreMissings, countOnlyDisctinct, functionName, separatorOpen, separatorClose);
}
@Override
public Aggregator createAggregator() {
return new LogProductAggregator(this);
}
@Override
public void setDefault(Attribute attribute, DoubleArrayDataRow row) {
row.set(attribute, 0);
}
@Override
protected int getTargetValueType(int sourceValueType) {
return Ontology.REAL;
}
@Override
public boolean isCompatible() {
return getSourceAttribute().isNumerical();
}
}
| agpl-3.0 |
KmolYuan/python-solvespace | src/platform/platform.cpp | 17403 | //-----------------------------------------------------------------------------
// Platform-dependent functionality.
//
// Copyright 2017 whitequark
//-----------------------------------------------------------------------------
#if defined(__APPLE__)
// Include Apple headers before solvespace.h to avoid identifier clashes.
# include <CoreFoundation/CFString.h>
# include <CoreFoundation/CFURL.h>
# include <CoreFoundation/CFBundle.h>
#endif
#include "solvespace.h"
#include "config.h"
#if defined(WIN32)
// Conversely, include Microsoft headers after solvespace.h to avoid clashes.
# include <windows.h>
#else
# include <unistd.h>
# include <sys/stat.h>
#endif
namespace SolveSpace {
namespace Platform {
//-----------------------------------------------------------------------------
// UTF-8 ⟷ UTF-16 conversion, on Windows.
//-----------------------------------------------------------------------------
#if defined(WIN32)
std::string Narrow(const wchar_t *in)
{
std::string out;
DWORD len = WideCharToMultiByte(CP_UTF8, 0, in, -1, NULL, 0, NULL, NULL);
out.resize(len - 1);
ssassert(WideCharToMultiByte(CP_UTF8, 0, in, -1, &out[0], len, NULL, NULL),
"Invalid UTF-16");
return out;
}
std::string Narrow(const std::wstring &in)
{
if(in == L"") return "";
std::string out;
out.resize(WideCharToMultiByte(CP_UTF8, 0, &in[0], (int)in.length(),
NULL, 0, NULL, NULL));
ssassert(WideCharToMultiByte(CP_UTF8, 0, &in[0], (int)in.length(),
&out[0], (int)out.length(), NULL, NULL),
"Invalid UTF-16");
return out;
}
std::wstring Widen(const char *in)
{
std::wstring out;
DWORD len = MultiByteToWideChar(CP_UTF8, 0, in, -1, NULL, 0);
out.resize(len - 1);
ssassert(MultiByteToWideChar(CP_UTF8, 0, in, -1, &out[0], len),
"Invalid UTF-8");
return out;
}
std::wstring Widen(const std::string &in)
{
if(in == "") return L"";
std::wstring out;
out.resize(MultiByteToWideChar(CP_UTF8, 0, &in[0], (int)in.length(), NULL, 0));
ssassert(MultiByteToWideChar(CP_UTF8, 0, &in[0], (int)in.length(),
&out[0], (int)out.length()),
"Invalid UTF-8");
return out;
}
#endif
//-----------------------------------------------------------------------------
// Path utility functions.
//-----------------------------------------------------------------------------
static std::vector<std::string> Split(const std::string &joined, char separator) {
std::vector<std::string> parts;
size_t oldpos = 0, pos = 0;
while(true) {
oldpos = pos;
pos = joined.find(separator, pos);
if(pos == std::string::npos) break;
parts.push_back(joined.substr(oldpos, pos - oldpos));
pos += 1;
}
if(oldpos != joined.length() - 1) {
parts.push_back(joined.substr(oldpos));
}
return parts;
}
static std::string Concat(const std::vector<std::string> &parts, char separator) {
std::string joined;
bool first = true;
for(auto &part : parts) {
if(!first) joined += separator;
joined += part;
first = false;
}
return joined;
}
//-----------------------------------------------------------------------------
// Path manipulation.
//-----------------------------------------------------------------------------
#if defined(WIN32)
const char SEPARATOR = '\\';
#else
const char SEPARATOR = '/';
#endif
Path Path::From(std::string raw) {
Path path = { raw };
return path;
}
Path Path::CurrentDirectory() {
#if defined(WIN32)
// On Windows, OpenFile needs an absolute UNC path proper, so get that.
std::wstring rawW;
rawW.resize(GetCurrentDirectoryW(0, NULL));
DWORD length = GetCurrentDirectoryW((int)rawW.length(), &rawW[0]);
ssassert(length > 0 && length == rawW.length() - 1, "Cannot get current directory");
rawW.resize(length);
return From(Narrow(rawW));
#else
char *raw = getcwd(NULL, 0);
ssassert(raw != NULL, "Cannot get current directory");
Path path = From(raw);
free(raw);
return path;
#endif
}
std::string Path::FileName() const {
std::string fileName = raw;
size_t slash = fileName.rfind(SEPARATOR);
if(slash != std::string::npos) {
fileName = fileName.substr(slash + 1);
}
return fileName;
}
std::string Path::FileStem() const {
std::string baseName = FileName();
size_t dot = baseName.rfind('.');
if(dot != std::string::npos) {
baseName = baseName.substr(0, dot);
}
return baseName;
}
std::string Path::Extension() const {
size_t dot = raw.rfind('.');
if(dot != std::string::npos) {
return raw.substr(dot + 1);
}
return "";
}
bool Path::HasExtension(std::string theirExt) const {
std::string ourExt = Extension();
std::transform(ourExt.begin(), ourExt.end(), ourExt.begin(), ::tolower);
std::transform(theirExt.begin(), theirExt.end(), theirExt.begin(), ::tolower);
return ourExt == theirExt;
}
Path Path::WithExtension(std::string ext) const {
Path withExt = *this;
size_t dot = withExt.raw.rfind('.');
if(dot != std::string::npos) {
withExt.raw.erase(dot);
}
withExt.raw += ".";
withExt.raw += ext;
return withExt;
}
static void FindPrefix(const std::string &raw, size_t *pos) {
*pos = std::string::npos;
#if defined(WIN32)
if(raw.size() >= 7 && raw[2] == '?' && raw[3] == '\\' &&
isalpha(raw[4]) && raw[5] == ':' && raw[6] == '\\') {
*pos = 7;
} else if(raw.size() >= 3 && isalpha(raw[0]) && raw[1] == ':' && raw[2] == '\\') {
*pos = 3;
} else if(raw.size() >= 2 && raw[0] == '\\' && raw[1] == '\\') {
size_t slashAt = raw.find('\\', 2);
if(slashAt != std::string::npos) {
*pos = raw.find('\\', slashAt + 1);
}
}
#else
if(raw.size() >= 1 && raw[0] == '/') {
*pos = 1;
}
#endif
}
bool Path::IsAbsolute() const {
size_t pos;
FindPrefix(raw, &pos);
return pos != std::string::npos;
}
// Removes one component from the end of the path.
// Returns an empty path if the path consists only of a root.
Path Path::Parent() const {
Path parent = { raw };
if(!parent.raw.empty() && parent.raw.back() == SEPARATOR) {
parent.raw.pop_back();
}
size_t slash = parent.raw.rfind(SEPARATOR);
if(slash != std::string::npos) {
parent.raw = parent.raw.substr(0, slash + 1);
} else {
parent.raw.clear();
}
if(IsAbsolute() && !parent.IsAbsolute()) {
return From("");
}
return parent;
}
// Concatenates a component to this path.
// Returns an empty path if this path or the component is empty.
Path Path::Join(const std::string &component) const {
ssassert(component.find(SEPARATOR) == std::string::npos,
"Use the Path::Join(const Path &) overload to append an entire path");
return Join(Path::From(component));
}
// Concatenates a relative path to this path.
// Returns an empty path if either path is empty, or the other path is absolute.
Path Path::Join(const Path &other) const {
if(IsEmpty() || other.IsEmpty() || other.IsAbsolute()) {
return From("");
}
Path joined = { raw };
if(joined.raw.back() != SEPARATOR) {
joined.raw += SEPARATOR;
}
joined.raw += other.raw;
return joined;
}
// Expands the "." and ".." components in this path.
// On Windows, additionally prepends the UNC prefix to absolute paths without one.
// Returns an empty path if a ".." component would escape from the root.
Path Path::Expand(bool fromCurrentDirectory) const {
Path source;
Path expanded;
if(fromCurrentDirectory && !IsAbsolute()) {
source = CurrentDirectory().Join(*this);
} else {
source = *this;
}
size_t splitAt;
FindPrefix(source.raw, &splitAt);
if(splitAt != std::string::npos) {
expanded.raw = source.raw.substr(0, splitAt);
} else {
splitAt = 0;
}
std::vector<std::string> expandedComponents;
for(std::string component : Split(source.raw.substr(splitAt), SEPARATOR)) {
if(component == ".") {
// skip
} else if(component == "..") {
if(!expandedComponents.empty()) {
expandedComponents.pop_back();
} else {
return From("");
}
} else if(!component.empty()) {
expandedComponents.push_back(component);
}
}
if(expanded.IsEmpty()) {
if(expandedComponents.empty()) {
expandedComponents.push_back(".");
}
expanded = From(Concat(expandedComponents, SEPARATOR));
} else if(!expandedComponents.empty()) {
expanded = expanded.Join(From(Concat(expandedComponents, SEPARATOR)));
}
#if defined(WIN32)
if(expanded.IsAbsolute() && expanded.raw.substr(0, 2) != "\\\\") {
expanded.raw = "\\\\?\\" + expanded.raw;
}
#endif
return expanded;
}
static std::string FilesystemNormalize(const std::string &str) {
#if defined(WIN32)
std::wstring strW = Widen(str);
std::transform(strW.begin(), strW.end(), strW.begin(), towlower);
return Narrow(strW);
#elif defined(__APPLE__)
CFMutableStringRef cfStr =
CFStringCreateMutableCopy(NULL, 0,
CFStringCreateWithBytesNoCopy(NULL, (const UInt8*)str.data(), str.size(),
kCFStringEncodingUTF8, /*isExternalRepresentation=*/false, kCFAllocatorNull));
CFStringLowercase(cfStr, NULL);
std::string normalizedStr;
normalizedStr.resize(CFStringGetMaximumSizeOfFileSystemRepresentation(cfStr));
CFStringGetFileSystemRepresentation(cfStr, &normalizedStr[0], normalizedStr.size());
normalizedStr.erase(normalizedStr.find('\0'));
return normalizedStr;
#else
return str;
#endif
}
bool Path::Equals(const Path &other) const {
return FilesystemNormalize(raw) == FilesystemNormalize(other.raw);
}
// Returns a relative path from a given base path.
// Returns an empty path if any of the paths is not absolute, or
// if they belong to different roots, or
// if they cannot be expanded.
Path Path::RelativeTo(const Path &base) const {
Path expanded = Expand();
Path baseExpanded = base.Expand();
if(!(expanded.IsAbsolute() && baseExpanded.IsAbsolute())){
return From("");
}
size_t splitAt;
FindPrefix(expanded.raw, &splitAt);
size_t baseSplitAt;
FindPrefix(baseExpanded.raw, &baseSplitAt);
if(FilesystemNormalize(expanded.raw.substr(0, splitAt)) !=
FilesystemNormalize(baseExpanded.raw.substr(0, splitAt))) {
return From("");
}
std::vector<std::string> components =
Split(expanded.raw.substr(splitAt), SEPARATOR);
std::vector<std::string> baseComponents =
Split(baseExpanded.raw.substr(baseSplitAt), SEPARATOR);
size_t common;
for(common = 0; common < baseComponents.size() &&
common < components.size(); common++) {
if(FilesystemNormalize(baseComponents[common]) !=
FilesystemNormalize(components[common])) {
break;
}
}
std::vector<std::string> resultComponents;
for(size_t i = common; i < baseComponents.size(); i++) {
resultComponents.push_back("..");
}
resultComponents.insert(resultComponents.end(),
components.begin() + common, components.end());
if(resultComponents.empty()) {
resultComponents.push_back(".");
}
return From(Concat(resultComponents, SEPARATOR));
}
Path Path::FromPortable(const std::string &repr) {
return From(Concat(Split(repr, '/'), SEPARATOR));
}
std::string Path::ToPortable() const {
ssassert(!IsAbsolute(), "absolute paths cannot be made portable");
return Concat(Split(raw, SEPARATOR), '/');
}
//-----------------------------------------------------------------------------
// File manipulation.
//-----------------------------------------------------------------------------
FILE *OpenFile(const Platform::Path &filename, const char *mode) {
ssassert(filename.raw.length() == strlen(filename.raw.c_str()),
"Unexpected null byte in middle of a path");
#if defined(WIN32)
return _wfopen(Widen(filename.Expand().raw).c_str(), Widen(mode).c_str());
#else
return fopen(filename.raw.c_str(), mode);
#endif
}
void RemoveFile(const Platform::Path &filename) {
ssassert(filename.raw.length() == strlen(filename.raw.c_str()),
"Unexpected null byte in middle of a path");
#if defined(WIN32)
_wremove(Widen(filename.Expand().raw).c_str());
#else
remove(filename.raw.c_str());
#endif
}
bool ReadFile(const Platform::Path &filename, std::string *data) {
FILE *f = OpenFile(filename, "rb");
if(f == NULL) return false;
fseek(f, 0, SEEK_END);
data->resize(ftell(f));
fseek(f, 0, SEEK_SET);
fread(&(*data)[0], 1, data->size(), f);
fclose(f);
return true;
}
bool WriteFile(const Platform::Path &filename, const std::string &data) {
FILE *f = OpenFile(filename, "wb");
if(f == NULL) return false;
fwrite(&data[0], 1, data.size(), f);
fclose(f);
return true;
}
//-----------------------------------------------------------------------------
// Loading resources, on Windows.
//-----------------------------------------------------------------------------
#if defined(WIN32) && !defined(LIBRARY)
const void *LoadResource(const std::string &name, size_t *size) {
HRSRC hres = FindResourceW(NULL, Widen(name).c_str(), RT_RCDATA);
ssassert(hres != NULL, "Cannot find resource");
HGLOBAL res = ::LoadResource(NULL, hres);
ssassert(res != NULL, "Cannot load resource");
*size = SizeofResource(NULL, hres);
return LockResource(res);
}
#endif
//-----------------------------------------------------------------------------
// Loading resources, on *nix.
//-----------------------------------------------------------------------------
#if defined(__APPLE__)
static Platform::Path PathFromCFURL(CFURLRef cfUrl) {
Path path;
CFStringRef cfPath = CFURLCopyFileSystemPath(cfUrl, kCFURLPOSIXPathStyle);
path.raw.resize(CFStringGetMaximumSizeOfFileSystemRepresentation(cfPath));
CFStringGetFileSystemRepresentation(cfPath, &path.raw[0], path.raw.size());
path.raw.erase(path.raw.find('\0'));
CFRelease(cfPath);
return path;
}
static Platform::Path ResourcePath(const std::string &name) {
Path path;
// First, try to get the URL from the bundle.
CFStringRef cfName = CFStringCreateWithCString(kCFAllocatorDefault, name.c_str(),
kCFStringEncodingUTF8);
CFURLRef cfUrl = CFBundleCopyResourceURL(CFBundleGetMainBundle(), cfName, NULL, NULL);
if(cfUrl != NULL) {
path = PathFromCFURL(cfUrl);
CFRelease(cfUrl);
}
CFRelease(cfName);
if(!path.IsEmpty()) return path;
// If that failed, it means we aren't running from the bundle.
// Reference off the executable path, then.
cfUrl = CFBundleCopyExecutableURL(CFBundleGetMainBundle());
if(cfUrl != NULL) {
path = PathFromCFURL(cfUrl).Parent().Parent().Join("res");
path = path.Join(Path::FromPortable(name));
CFRelease(cfUrl);
}
return path;
}
#elif !defined(WIN32)
# if defined(__linux__)
static const char *selfSymlink = "/proc/self/exe";
# elif defined(__NetBSD__)
static const char *selfSymlink = "/proc/curproc/exe";
# elif defined(__OpenBSD__) || defined(__FreeBSD__)
static const char *selfSymlink = "/proc/curproc/file";
# else
static const char *selfSymlink = "";
# endif
static Platform::Path FindLocalResourceDir() {
// Find out the path to the running binary.
Platform::Path selfPath;
char *expandedSelfPath = realpath(selfSymlink, NULL);
if(expandedSelfPath != NULL) {
selfPath = Path::From(expandedSelfPath);
}
free(expandedSelfPath);
Platform::Path resourceDir;
if(selfPath.IsEmpty()) {
// We don't know how to find the local resource directory on this platform,
// so use the global one (by returning an empty string).
return Path::From(UNIX_DATADIR);
} else {
resourceDir = selfPath.Parent().Parent().Join("res");
}
struct stat st;
if(stat(resourceDir.raw.c_str(), &st) != -1) {
// An executable-adjacent resource directory exists, good.
return resourceDir;
}
// No executable-adjacent resource directory; use the one from compile-time prefix.
return Path::From(UNIX_DATADIR);
}
static Platform::Path ResourcePath(const std::string &name) {
static Platform::Path resourceDir;
if(resourceDir.IsEmpty()) {
resourceDir = FindLocalResourceDir();
}
return resourceDir.Join(Path::FromPortable(name));
}
#endif
#if !defined(WIN32)
const void *LoadResource(const std::string &name, size_t *size) {
static std::map<std::string, std::string> cache;
auto it = cache.find(name);
if(it == cache.end()) {
ssassert(ReadFile(ResourcePath(name), &cache[name]), "Cannot read resource");
it = cache.find(name);
}
const std::string &content = (*it).second;
*size = content.size();
return (const void*)content.data();
}
#endif
}
}
| agpl-3.0 |
PeaceWorksTechnologySolutions/atom | plugins/sfIsdfPlugin/modules/sfIsdfPlugin/templates/editSuccess.php | 8993 | <?php decorate_with('layout_1col.php') ?>
<?php slot('title') ?>
<h1 class="multiline">
<?php echo __('Edit %1% - ISDF', array('%1%' => sfConfig::get('app_ui_label_function'))) ?>
<span class="sub"><?php echo render_title($resource->getLabel()) ?></span>
</h1>
<?php end_slot() ?>
<?php slot('content') ?>
<?php echo $form->renderGlobalErrors() ?>
<?php if (isset($sf_request->getAttribute('sf_route')->resource)): ?>
<?php echo $form->renderFormTag(url_for(array($resource, 'module' => 'function', 'action' => 'edit')), array('id' => 'editForm')) ?>
<?php else: ?>
<?php echo $form->renderFormTag(url_for(array('module' => 'function', 'action' => 'add')), array('id' => 'editForm')) ?>
<?php endif; ?>
<?php echo $form->renderHiddenFields() ?>
<section id="content">
<fieldset class="collapsible collapsed" id="identityArea">
<legend><?php echo __('Identity area') ?></legend>
<?php echo $form->type
->help(__('"Specify whether the description is a function or one of its subdivisions." (ISDF 5.1.1) Select the type from the drop-down menu; these values are drawn from the ISDF Function Types taxonomy.'))
->label(__('Type').' <span class="form-required" title="'.__('This is a mandatory element.').'">*</span>')
->renderRow() ?>
<?php echo render_field($form->authorizedFormOfName
->help(__('"Record the authorised name of the function being described. In cases where the name is not enough, add qualifiers to make it unique such as the territorial or administrative scope, or the name of the institution which performs it. This element is to be used in conjunction with the Function description identifier element (5.4.1)." (ISDF 5.1.2)'))
->label(__('Authorized form of name').' <span class="form-required" title="'.__('This is a mandatory element.').'">*</span>'), $resource) ?>
<?php echo $form->parallelName
->help(__('"Purpose: To indicate the various forms in which the authorized form(s) of name occurs in other languages or script forms. Rule: Record the parallel form(s) of name in accordance with any relevant national or international conventions or rules applied by the agency that created the description, including any necessary sub elements and/or qualifiers required by those conventions or rules. Specify in the Rules and/or conventions element (5.4.3.) which rules have been applied." (ISDF 5.1.3)'))
->label(__('Parallel form(s) of name'))
->renderRow() ?>
<?php echo $form->otherName
->help(__('"Record any other names for the function being described." (ISDF 5.1.4)'))
->label(__('Other form(s) of name'))
->renderRow() ?>
<?php echo render_field($form->classification
->help(__('"Record any term and/or code from a classification scheme of functions. Record the classification scheme used in the element Rules and/or conventions used (5.4.3)." (ISDF 5.1.5)')), $resource) ?>
</fieldset>
<fieldset class="collapsible collapsed" id="descriptionArea">
<legend><?php echo __('Context area') ?></legend>
<?php echo render_field($form->dates
->help(__('"Provide a date or date span which covers the dates when the function was started and when it finished. If a function is ongoing, no end date is needed." (ISDF 5.2.1)')), $resource) ?>
<?php echo render_field($form->description
->help(__('"Record a narrative description of the purpose of the function." (ISDF 5.2.2)')), $resource, array('class' => 'resizable')) ?>
<?php echo render_field($form->history
->help(__('"Record in narrative form or as a chronology the main events relating to the function." (ISDF 5.2.3)')), $resource, array('class' => 'resizable')) ?>
<?php echo render_field($form->legislation
->help(__('"Record any law, directive or charter which creates, changes or ends the function." (ISDF 5.2.4)')), $resource, array('class' => 'resizable')) ?>
</fieldset>
<fieldset class="collapsible collapsed" id="relationshipsArea">
<legend><?php echo __('Relationships area') ?></legend>
<?php echo get_partial('relatedFunction', $relatedFunctionComponent->getVarHolder()->getAll()) ?>
<?php echo get_partial('relatedAuthorityRecord', $relatedAuthorityRecordComponent->getVarHolder()->getAll()) ?>
<?php echo get_partial('relatedResource', $relatedResourceComponent->getVarHolder()->getAll()) ?>
</fieldset>
<fieldset class="collapsible collapsed" id="controlArea">
<legend><?php echo __('Control area') ?></legend>
<?php echo render_field($form->descriptionIdentifier
->help(__('"Record a unique description identifier in accordance with local and/or national conventions. If the description is to be used internationally, record the code of the country in which the description was created in accordance with the latest version of ISO 3166 Codes for the representation of names of countries. Where the creator of the description is an international organisation, give the organisational identifier in place of the country code." (ISDF 5.4.1)'))
->label(__('Description identifier').' <span class="form-required" title="'.__('This is a mandatory element.').'">*</span>'), $resource) ?>
<?php echo render_field($form->institutionIdentifier
->help(__('"Record the full authorised form of name(s) of agency(ies) responsible for creating, modifying or disseminating the description or, alternatively, record a recognized code for the agency." (ISDF 5.4.2)'))
->label(__('Institution identifier')), $resource) ?>
<?php echo render_field($form->rules
->help(__('"Purpose: To identify the national or international conventions or rules applied in creating the archival description. Rule: Record the names and where useful the editions or publication dates of the conventions or rules applied." (ISDF 5.4.3)'))
->label(__('Rules and/or conventions used')), $resource, array('class' => 'resizable')) ?>
<?php echo $form->descriptionStatus
->help(__('The purpose of this field is "[t]o indicate the drafting status of the description so that users can understand the current status of the description." (ISDF 5.4.4). Select Final, Revised or Draft from the drop-down menu.'))
->label(__('Status'))
->renderRow() ?>
<?php echo $form->descriptionDetail
->help(__('Select Full, Partial or Minimal from the drop-down menu. "In the absence of national guidelines or rules, minimum records are those that consist only of the three essential elements of an ISDF compliant record (see 4.7), while full records are those that convey information for all relevant ISDF elements of description." (ISDF 5.4.5)'))
->label(__('Level of detail'))
->renderRow() ?>
<?php echo render_field($form->revisionHistory
->help(__('"Record the date the description was created and the dates of any revisions to the description." (ISDF 5.4.6)'))
->label(__('Dates of creation, revision or deletion')), $resource, array('class' => 'resizable')) ?>
<?php echo $form->language
->help(__('Select the language(s) of this record from the drop-down menu; enter the first few letters to narrow the choices. (ISDF 5.4.7)'))
->label(__('Language(s)'))
->renderRow(array('class' => 'form-autocomplete')) ?>
<?php echo $form->script
->help(__('Select the script(s) of this record from the drop-down menu; enter the first few letters to narrow the choices. (ISDF 5.4.7)'))
->label(__('Script(s)'))
->renderRow(array('class' => 'form-autocomplete')) ?>
<?php echo render_field($form->sources
->help(__('"Record the sources consulted in establishing the function description." (ISDF 5.4.8)')), $resource, array('class' => 'resizable')) ?>
<?php echo render_field($form->maintenanceNotes
->help(__('"Record notes pertinent to the creation and maintenance of the description." (ISDF 5.4.9)')), $isdf, array('class' => 'resizable')) ?>
</fieldset>
</section>
<section class="actions">
<ul>
<?php if (isset($sf_request->getAttribute('sf_route')->resource)): ?>
<li><?php echo link_to(__('Cancel'), array($resource, 'module' => 'function'), array('class' => 'c-btn')) ?></li>
<li><input class="c-btn c-btn-submit" type="submit" value="<?php echo __('Save') ?>"/></li>
<?php else: ?>
<li><?php echo link_to(__('Cancel'), array('module' => 'function', 'action' => 'list'), array('class' => 'c-btn')) ?></li>
<li><input class="c-btn c-btn-submit" type="submit" value="<?php echo __('Create') ?>"/></li>
<?php endif; ?>
</ul>
</section>
</form>
<?php end_slot() ?>
| agpl-3.0 |
FreudianNM/openMAXIMS | Source Library/openmaxims_workspace/ValueObjects/src/ims/ocrr/vo/OrdersRequiringAuthorisationSearchCriteriaVo.java | 12173 | //#############################################################################
//# #
//# Copyright (C) <2015> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//# IMS MAXIMS provides absolutely NO GUARANTEE OF THE CLINICAL SAFTEY of #
//# this program. Users of this software do so entirely at their own risk. #
//# IMS MAXIMS only ensures the Clinical Safety of unaltered run-time #
//# software that it builds, deploys and maintains. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5589.25814)
// Copyright (C) 1995-2015 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.ocrr.vo;
public class OrdersRequiringAuthorisationSearchCriteriaVo extends ims.vo.ValueObject implements ims.vo.ImsCloneable, Comparable
{
private static final long serialVersionUID = 1L;
public OrdersRequiringAuthorisationSearchCriteriaVo()
{
}
public OrdersRequiringAuthorisationSearchCriteriaVo(ims.ocrr.vo.beans.OrdersRequiringAuthorisationSearchCriteriaVoBean bean)
{
this.fromdate = bean.getFromDate() == null ? null : bean.getFromDate().buildDate();
this.todate = bean.getToDate() == null ? null : bean.getToDate().buildDate();
this.respclinician = bean.getRespClinician() == null ? null : bean.getRespClinician().buildVo();
this.hospital = bean.getHospital() == null ? null : new ims.core.resource.place.vo.LocationRefVo(new Integer(bean.getHospital().getId()), bean.getHospital().getVersion());
this.loctype = bean.getLoctype();
this.patientlocation = bean.getPatientLocation() == null ? null : bean.getPatientLocation().buildVo();
this.patientclinic = bean.getPatientClinic() == null ? null : bean.getPatientClinic().buildVo();
}
public void populate(ims.vo.ValueObjectBeanMap map, ims.ocrr.vo.beans.OrdersRequiringAuthorisationSearchCriteriaVoBean bean)
{
this.fromdate = bean.getFromDate() == null ? null : bean.getFromDate().buildDate();
this.todate = bean.getToDate() == null ? null : bean.getToDate().buildDate();
this.respclinician = bean.getRespClinician() == null ? null : bean.getRespClinician().buildVo(map);
this.hospital = bean.getHospital() == null ? null : new ims.core.resource.place.vo.LocationRefVo(new Integer(bean.getHospital().getId()), bean.getHospital().getVersion());
this.loctype = bean.getLoctype();
this.patientlocation = bean.getPatientLocation() == null ? null : bean.getPatientLocation().buildVo(map);
this.patientclinic = bean.getPatientClinic() == null ? null : bean.getPatientClinic().buildVo(map);
}
public ims.vo.ValueObjectBean getBean()
{
return this.getBean(new ims.vo.ValueObjectBeanMap());
}
public ims.vo.ValueObjectBean getBean(ims.vo.ValueObjectBeanMap map)
{
ims.ocrr.vo.beans.OrdersRequiringAuthorisationSearchCriteriaVoBean bean = null;
if(map != null)
bean = (ims.ocrr.vo.beans.OrdersRequiringAuthorisationSearchCriteriaVoBean)map.getValueObjectBean(this);
if (bean == null)
{
bean = new ims.ocrr.vo.beans.OrdersRequiringAuthorisationSearchCriteriaVoBean();
map.addValueObjectBean(this, bean);
bean.populate(map, this);
}
return bean;
}
public boolean getFromDateIsNotNull()
{
return this.fromdate != null;
}
public ims.framework.utils.Date getFromDate()
{
return this.fromdate;
}
public void setFromDate(ims.framework.utils.Date value)
{
this.isValidated = false;
this.fromdate = value;
}
public boolean getToDateIsNotNull()
{
return this.todate != null;
}
public ims.framework.utils.Date getToDate()
{
return this.todate;
}
public void setToDate(ims.framework.utils.Date value)
{
this.isValidated = false;
this.todate = value;
}
public boolean getRespClinicianIsNotNull()
{
return this.respclinician != null;
}
public ims.core.vo.HcpLiteVo getRespClinician()
{
return this.respclinician;
}
public void setRespClinician(ims.core.vo.HcpLiteVo value)
{
this.isValidated = false;
this.respclinician = value;
}
public boolean getHospitalIsNotNull()
{
return this.hospital != null;
}
public ims.core.resource.place.vo.LocationRefVo getHospital()
{
return this.hospital;
}
public void setHospital(ims.core.resource.place.vo.LocationRefVo value)
{
this.isValidated = false;
this.hospital = value;
}
public boolean getLoctypeIsNotNull()
{
return this.loctype != null;
}
public Integer getLoctype()
{
return this.loctype;
}
public void setLoctype(Integer value)
{
this.isValidated = false;
this.loctype = value;
}
public boolean getPatientLocationIsNotNull()
{
return this.patientlocation != null;
}
public ims.core.vo.LocationLiteVo getPatientLocation()
{
return this.patientlocation;
}
public void setPatientLocation(ims.core.vo.LocationLiteVo value)
{
this.isValidated = false;
this.patientlocation = value;
}
public boolean getPatientClinicIsNotNull()
{
return this.patientclinic != null;
}
public ims.core.vo.ClinicLiteVo getPatientClinic()
{
return this.patientclinic;
}
public void setPatientClinic(ims.core.vo.ClinicLiteVo value)
{
this.isValidated = false;
this.patientclinic = value;
}
public final String getIItemText()
{
return toString();
}
public final Integer getBoId()
{
return null;
}
public final String getBoClassName()
{
return null;
}
public boolean isValidated()
{
if(this.isBusy)
return true;
this.isBusy = true;
if(!this.isValidated)
{
this.isBusy = false;
return false;
}
if(this.respclinician != null)
{
if(!this.respclinician.isValidated())
{
this.isBusy = false;
return false;
}
}
if(this.patientlocation != null)
{
if(!this.patientlocation.isValidated())
{
this.isBusy = false;
return false;
}
}
if(this.patientclinic != null)
{
if(!this.patientclinic.isValidated())
{
this.isBusy = false;
return false;
}
}
this.isBusy = false;
return true;
}
public String[] validate()
{
return validate(null);
}
public String[] validate(String[] existingErrors)
{
if(this.isBusy)
return null;
this.isBusy = true;
java.util.ArrayList<String> listOfErrors = new java.util.ArrayList<String>();
if(existingErrors != null)
{
for(int x = 0; x < existingErrors.length; x++)
{
listOfErrors.add(existingErrors[x]);
}
}
if(this.respclinician != null)
{
String[] listOfOtherErrors = this.respclinician.validate();
if(listOfOtherErrors != null)
{
for(int x = 0; x < listOfOtherErrors.length; x++)
{
listOfErrors.add(listOfOtherErrors[x]);
}
}
}
if(this.patientlocation != null)
{
String[] listOfOtherErrors = this.patientlocation.validate();
if(listOfOtherErrors != null)
{
for(int x = 0; x < listOfOtherErrors.length; x++)
{
listOfErrors.add(listOfOtherErrors[x]);
}
}
}
if(this.patientclinic != null)
{
String[] listOfOtherErrors = this.patientclinic.validate();
if(listOfOtherErrors != null)
{
for(int x = 0; x < listOfOtherErrors.length; x++)
{
listOfErrors.add(listOfOtherErrors[x]);
}
}
}
int errorCount = listOfErrors.size();
if(errorCount == 0)
{
this.isBusy = false;
this.isValidated = true;
return null;
}
String[] result = new String[errorCount];
for(int x = 0; x < errorCount; x++)
result[x] = (String)listOfErrors.get(x);
this.isBusy = false;
this.isValidated = false;
return result;
}
public Object clone()
{
if(this.isBusy)
return this;
this.isBusy = true;
OrdersRequiringAuthorisationSearchCriteriaVo clone = new OrdersRequiringAuthorisationSearchCriteriaVo();
if(this.fromdate == null)
clone.fromdate = null;
else
clone.fromdate = (ims.framework.utils.Date)this.fromdate.clone();
if(this.todate == null)
clone.todate = null;
else
clone.todate = (ims.framework.utils.Date)this.todate.clone();
if(this.respclinician == null)
clone.respclinician = null;
else
clone.respclinician = (ims.core.vo.HcpLiteVo)this.respclinician.clone();
clone.hospital = this.hospital;
clone.loctype = this.loctype;
if(this.patientlocation == null)
clone.patientlocation = null;
else
clone.patientlocation = (ims.core.vo.LocationLiteVo)this.patientlocation.clone();
if(this.patientclinic == null)
clone.patientclinic = null;
else
clone.patientclinic = (ims.core.vo.ClinicLiteVo)this.patientclinic.clone();
clone.isValidated = this.isValidated;
this.isBusy = false;
return clone;
}
public int compareTo(Object obj)
{
return compareTo(obj, true);
}
public int compareTo(Object obj, boolean caseInsensitive)
{
if (obj == null)
{
return -1;
}
if(caseInsensitive); // this is to avoid eclipse warning only.
if (!(OrdersRequiringAuthorisationSearchCriteriaVo.class.isAssignableFrom(obj.getClass())))
{
throw new ClassCastException("A OrdersRequiringAuthorisationSearchCriteriaVo object cannot be compared an Object of type " + obj.getClass().getName());
}
OrdersRequiringAuthorisationSearchCriteriaVo compareObj = (OrdersRequiringAuthorisationSearchCriteriaVo)obj;
int retVal = 0;
if (retVal == 0)
{
if(this.getHospital() == null && compareObj.getHospital() != null)
return -1;
if(this.getHospital() != null && compareObj.getHospital() == null)
return 1;
if(this.getHospital() != null && compareObj.getHospital() != null)
retVal = this.getHospital().compareTo(compareObj.getHospital());
}
return retVal;
}
public synchronized static int generateValueObjectUniqueID()
{
return ims.vo.ValueObject.generateUniqueID();
}
public int countFieldsWithValue()
{
int count = 0;
if(this.fromdate != null)
count++;
if(this.todate != null)
count++;
if(this.respclinician != null)
count++;
if(this.hospital != null)
count++;
if(this.loctype != null)
count++;
if(this.patientlocation != null)
count++;
if(this.patientclinic != null)
count++;
return count;
}
public int countValueObjectFields()
{
return 7;
}
protected ims.framework.utils.Date fromdate;
protected ims.framework.utils.Date todate;
protected ims.core.vo.HcpLiteVo respclinician;
protected ims.core.resource.place.vo.LocationRefVo hospital;
protected Integer loctype;
protected ims.core.vo.LocationLiteVo patientlocation;
protected ims.core.vo.ClinicLiteVo patientclinic;
private boolean isValidated = false;
private boolean isBusy = false;
}
| agpl-3.0 |
DBezemer/server | plugins/content/caption/base/lib/model/kCopyCaptionsJobData.php | 1140 | <?php
/**
* @package plugins.caption
* @subpackage model.data
*/
class kCopyCaptionsJobData extends kJobData
{
/** entry Id
* @var string
*/
private $entryId;
/**
* the sources start time and duration
* @var array
*/
private $clipsDescriptionArray;
/**
* @var bool
*/
private $fullCopy;
/**
* @return string
*/
public function getEntryId()
{
return $this->entryId;
}
/**
* @param string $entryId
*/
public function setEntryId($entryId)
{
$this->entryId = $entryId;
}
/**
* @return array
*/
public function getClipsDescriptionArray()
{
return $this->clipsDescriptionArray;
}
/**
* @param array $clipsDescriptionArray
*/
public function setClipsDescriptionArray($clipsDescriptionArray)
{
$this->clipsDescriptionArray = $clipsDescriptionArray;
}
/**
* @return bool
*/
public function getFullCopy()
{
return $this->fullCopy;
}
/**
* @param bool $fullCopy
*/
public function setFullCopy($fullCopy)
{
$this->fullCopy = $fullCopy;
}
}
| agpl-3.0 |
0612800232/sns_shop | test/unit/helpers/place_shares_helper_test.rb | 78 | require 'test_helper'
class PlaceSharesHelperTest < ActionView::TestCase
end
| agpl-3.0 |
medsob/Tanaguru | rules/accessiweb2.1/src/test/java/org/tanaguru/rules/accessiweb21/Aw21Rule08012Test.java | 9718 | /*
* Tanaguru - Automated webpage assessment
* Copyright (C) 2008-2015 Tanaguru.org
*
* This file is part of Tanaguru.
*
* Tanaguru is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* Contact us by mail: tanaguru AT tanaguru DOT org
*/
package org.tanaguru.rules.accessiweb21;
import org.tanaguru.entity.audit.TestSolution;
import org.tanaguru.rules.accessiweb21.test.Aw21RuleImplementationTestCase;
/**
*
* @author jkowalczyk
*/
public class Aw21Rule08012Test extends Aw21RuleImplementationTestCase {
public Aw21Rule08012Test(String testName) {
super(testName);
}
@Override
protected void setUpRuleImplementationClassName() {
setRuleImplementationClassName(
"org.tanaguru.rules.accessiweb21.Aw21Rule08012");
}
@Override
protected void setUpWebResourceMap() {
getWebResourceMap().put("AW21.Test.08.01.02-1Passed-01",
getWebResourceFactory().createPage(
getTestcasesFilePath() + "AW21/Aw21Rule08012/AW21.Test.08.01.02-1Passed-01.html"));
getWebResourceMap().put("AW21.Test.08.01.02-1Passed-02",
getWebResourceFactory().createPage(
getTestcasesFilePath() + "AW21/Aw21Rule08012/AW21.Test.08.01.02-1Passed-02.html"));
getWebResourceMap().put("AW21.Test.08.01.02-1Passed-03",
getWebResourceFactory().createPage(
getTestcasesFilePath() + "AW21/Aw21Rule08012/AW21.Test.08.01.02-1Passed-03.html"));
getWebResourceMap().put("AW21.Test.08.01.02-1Passed-04",
getWebResourceFactory().createPage(
getTestcasesFilePath() + "AW21/Aw21Rule08012/AW21.Test.08.01.02-1Passed-04.html"));
getWebResourceMap().put("AW21.Test.08.01.02-1Passed-05",
getWebResourceFactory().createPage(
getTestcasesFilePath() + "AW21/Aw21Rule08012/AW21.Test.08.01.02-1Passed-05.html"));
getWebResourceMap().put("AW21.Test.08.01.02-1Passed-06",
getWebResourceFactory().createPage(
getTestcasesFilePath() + "AW21/Aw21Rule08012/AW21.Test.08.01.02-1Passed-06.html"));
getWebResourceMap().put("AW21.Test.08.01.02-1Passed-07",
getWebResourceFactory().createPage(
getTestcasesFilePath() + "AW21/Aw21Rule08012/AW21.Test.08.01.02-1Passed-07.html"));
getWebResourceMap().put("AW21.Test.08.01.02-1Passed-08",
getWebResourceFactory().createPage(
getTestcasesFilePath() + "AW21/Aw21Rule08012/AW21.Test.08.01.02-1Passed-08.html"));
getWebResourceMap().put("AW21.Test.08.01.02-1Passed-09",
getWebResourceFactory().createPage(
getTestcasesFilePath() + "AW21/Aw21Rule08012/AW21.Test.08.01.02-1Passed-09.html"));
getWebResourceMap().put("AW21.Test.08.01.02-1Passed-10",
getWebResourceFactory().createPage(
getTestcasesFilePath() + "AW21/Aw21Rule08012/AW21.Test.08.01.02-1Passed-10.html"));
getWebResourceMap().put("AW21.Test.08.01.02-1Passed-11",
getWebResourceFactory().createPage(
getTestcasesFilePath() + "AW21/Aw21Rule08012/AW21.Test.08.01.02-1Passed-11.html"));
getWebResourceMap().put("AW21.Test.08.01.02-1Passed-12",
getWebResourceFactory().createPage(
getTestcasesFilePath() + "AW21/Aw21Rule08012/AW21.Test.08.01.02-1Passed-12.html"));
getWebResourceMap().put("AW21.Test.08.01.02-1Passed-13",
getWebResourceFactory().createPage(
getTestcasesFilePath() + "AW21/Aw21Rule08012/AW21.Test.08.01.02-1Passed-13.html"));
getWebResourceMap().put("AW21.Test.08.01.02-1Passed-14",
getWebResourceFactory().createPage(
getTestcasesFilePath() + "AW21/Aw21Rule08012/AW21.Test.08.01.02-1Passed-14.html"));
getWebResourceMap().put("AW21.Test.08.01.02-1Passed-15",
getWebResourceFactory().createPage(
getTestcasesFilePath() + "AW21/Aw21Rule08012/AW21.Test.08.01.02-1Passed-15.html"));
getWebResourceMap().put("AW21.Test.08.01.02-2Failed-01",
getWebResourceFactory().createPage(
getTestcasesFilePath() + "AW21/Aw21Rule08012/AW21.Test.08.01.02-2Failed-01.html"));
getWebResourceMap().put("AW21.Test.08.01.02-2Failed-02",
getWebResourceFactory().createPage(
getTestcasesFilePath() + "AW21/Aw21Rule08012/AW21.Test.08.01.02-2Failed-02.html"));
getWebResourceMap().put("AW21.Test.08.01.02-4NA-01",
getWebResourceFactory().createPage(
getTestcasesFilePath() + "AW21/Aw21Rule08012/AW21.Test.08.01.02-4NA-01.html"));
}
@Override
protected void setProcess() {
assertEquals(TestSolution.PASSED,
processPageTest("AW21.Test.08.01.02-1Passed-01").getValue());
assertEquals(TestSolution.PASSED,
processPageTest("AW21.Test.08.01.02-1Passed-02").getValue());
assertEquals(TestSolution.PASSED,
processPageTest("AW21.Test.08.01.02-1Passed-03").getValue());
assertEquals(TestSolution.PASSED,
processPageTest("AW21.Test.08.01.02-1Passed-04").getValue());
assertEquals(TestSolution.PASSED,
processPageTest("AW21.Test.08.01.02-1Passed-05").getValue());
assertEquals(TestSolution.PASSED,
processPageTest("AW21.Test.08.01.02-1Passed-06").getValue());
assertEquals(TestSolution.PASSED,
processPageTest("AW21.Test.08.01.02-1Passed-07").getValue());
assertEquals(TestSolution.PASSED,
processPageTest("AW21.Test.08.01.02-1Passed-08").getValue());
assertEquals(TestSolution.PASSED,
processPageTest("AW21.Test.08.01.02-1Passed-09").getValue());
assertEquals(TestSolution.PASSED,
processPageTest("AW21.Test.08.01.02-1Passed-10").getValue());
assertEquals(TestSolution.PASSED,
processPageTest("AW21.Test.08.01.02-1Passed-11").getValue());
assertEquals(TestSolution.PASSED,
processPageTest("AW21.Test.08.01.02-1Passed-12").getValue());
assertEquals(TestSolution.PASSED,
processPageTest("AW21.Test.08.01.02-1Passed-13").getValue());
assertEquals(TestSolution.PASSED,
processPageTest("AW21.Test.08.01.02-1Passed-14").getValue());
assertEquals(TestSolution.PASSED,
processPageTest("AW21.Test.08.01.02-1Passed-15").getValue());
assertEquals(TestSolution.FAILED,
processPageTest("AW21.Test.08.01.02-2Failed-01").getValue());
assertEquals(TestSolution.FAILED,
processPageTest("AW21.Test.08.01.02-2Failed-02").getValue());
assertEquals(TestSolution.NOT_APPLICABLE,
processPageTest("AW21.Test.08.01.02-4NA-01").getValue());
}
@Override
protected void setConsolidate() {
assertEquals(TestSolution.PASSED,
consolidate("AW21.Test.08.01.02-1Passed-01").getValue());
assertEquals(TestSolution.PASSED,
consolidate("AW21.Test.08.01.02-1Passed-02").getValue());
assertEquals(TestSolution.PASSED,
consolidate("AW21.Test.08.01.02-1Passed-03").getValue());
assertEquals(TestSolution.PASSED,
consolidate("AW21.Test.08.01.02-1Passed-04").getValue());
assertEquals(TestSolution.PASSED,
consolidate("AW21.Test.08.01.02-1Passed-05").getValue());
assertEquals(TestSolution.PASSED,
consolidate("AW21.Test.08.01.02-1Passed-06").getValue());
assertEquals(TestSolution.PASSED,
consolidate("AW21.Test.08.01.02-1Passed-07").getValue());
assertEquals(TestSolution.PASSED,
consolidate("AW21.Test.08.01.02-1Passed-08").getValue());
assertEquals(TestSolution.PASSED,
consolidate("AW21.Test.08.01.02-1Passed-09").getValue());
assertEquals(TestSolution.PASSED,
consolidate("AW21.Test.08.01.02-1Passed-10").getValue());
assertEquals(TestSolution.PASSED,
consolidate("AW21.Test.08.01.02-1Passed-11").getValue());
assertEquals(TestSolution.PASSED,
consolidate("AW21.Test.08.01.02-1Passed-12").getValue());
assertEquals(TestSolution.PASSED,
consolidate("AW21.Test.08.01.02-1Passed-13").getValue());
assertEquals(TestSolution.PASSED,
consolidate("AW21.Test.08.01.02-1Passed-14").getValue());
assertEquals(TestSolution.PASSED,
consolidate("AW21.Test.08.01.02-1Passed-15").getValue());
assertEquals(TestSolution.FAILED,
consolidate("AW21.Test.08.01.02-2Failed-01").getValue());
assertEquals(TestSolution.FAILED,
consolidate("AW21.Test.08.01.02-2Failed-02").getValue());
assertEquals(TestSolution.NOT_APPLICABLE,
consolidate("AW21.Test.08.01.02-4NA-01").getValue());
}
}
| agpl-3.0 |
gcoop-libre/Advanced-Workflows-For-Sugar-CRM | install_dir/modules/gcoop_notificaciones/metadata/popupdefs.php | 2445 | <?php
if(!defined('sugarEntry') || !sugarEntry) die('Not A Valid Entry Point');
/*********************************************************************************
* SugarCRM is a customer relationship management program developed by
* SugarCRM, Inc. Copyright (C) 2004 - 2010 SugarCRM Inc.
*
* This program is free software; you can redistribute it and/or modify it under
* the terms of the GNU General Public License version 3 as published by the
* Free Software Foundation with the addition of the following permission added
* to Section 15 as permitted in Section 7(a): FOR ANY PART OF THE COVERED WORK
* IN WHICH THE COPYRIGHT IS OWNED BY SUGARCRM, SUGARCRM DISCLAIMS THE WARRANTY
* OF NON INFRINGEMENT OF THIRD PARTY RIGHTS.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License along with
* this program; if not, see http://www.gnu.org/licenses or write to the Free
* Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301 USA.
*
* You can contact SugarCRM, Inc. headquarters at 10050 North Wolfe Road,
* SW2-130, Cupertino, CA 95014, USA. or at email address [email protected].
*
* The interactive user interfaces in modified source and object code versions
* of this program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU General Public License version 3.
*
* In accordance with Section 7(b) of the GNU General Public License version 3,
* these Appropriate Legal Notices must retain the display of the "Powered by
* SugarCRM" logo. If the display of the logo is not reasonably feasible for
* technical reasons, the Appropriate Legal Notices must display the words
* "Powered by SugarCRM".
********************************************************************************/
$module_name = 'gcoop_notificaciones';
$_module_name = 'gcoop_notificaciones';
$popupMeta = array('moduleMain' => $module_name,
'varName' => $module_name,
'orderBy' => $_module_name.'.name',
'whereClauses' =>
array('name' => $_module_name . '.name',
),
'searchInputs'=> array($_module_name. '_number', 'name', 'priority','status'),
);
?>
| agpl-3.0 |
gsnbng/erpnext | erpnext/hr/doctype/salary_structure/salary_structure.py | 8133 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import flt, cint, cstr
from frappe import _
from frappe.model.mapper import get_mapped_doc
from frappe.model.document import Document
from six import iteritems
class SalaryStructure(Document):
def validate(self):
self.set_missing_values()
self.validate_amount()
self.strip_condition_and_formula_fields()
self.validate_max_benefits_with_flexi()
self.validate_component_based_on_tax_slab()
def set_missing_values(self):
overwritten_fields = ["depends_on_payment_days", "variable_based_on_taxable_salary", "is_tax_applicable", "is_flexible_benefit"]
overwritten_fields_if_missing = ["amount_based_on_formula", "formula", "amount"]
for table in ["earnings", "deductions"]:
for d in self.get(table):
component_default_value = frappe.db.get_value("Salary Component", cstr(d.salary_component),
overwritten_fields + overwritten_fields_if_missing, as_dict=1)
if component_default_value:
for fieldname in overwritten_fields:
value = component_default_value.get(fieldname)
if d.get(fieldname) != value:
d.set(fieldname, value)
if not (d.get("amount") or d.get("formula")):
for fieldname in overwritten_fields_if_missing:
d.set(fieldname, component_default_value.get(fieldname))
def validate_component_based_on_tax_slab(self):
for row in self.deductions:
if row.variable_based_on_taxable_salary and (row.amount or row.formula):
frappe.throw(_("Row #{0}: Cannot set amount or formula for Salary Component {1} with Variable Based On Taxable Salary")
.format(row.idx, row.salary_component))
def validate_amount(self):
if flt(self.net_pay) < 0 and self.salary_slip_based_on_timesheet:
frappe.throw(_("Net pay cannot be negative"))
def strip_condition_and_formula_fields(self):
# remove whitespaces from condition and formula fields
for row in self.earnings:
row.condition = row.condition.strip() if row.condition else ""
row.formula = row.formula.strip() if row.formula else ""
for row in self.deductions:
row.condition = row.condition.strip() if row.condition else ""
row.formula = row.formula.strip() if row.formula else ""
def validate_max_benefits_with_flexi(self):
have_a_flexi = False
if self.earnings:
flexi_amount = 0
for earning_component in self.earnings:
if earning_component.is_flexible_benefit == 1:
have_a_flexi = True
max_of_component = frappe.db.get_value("Salary Component", earning_component.salary_component, "max_benefit_amount")
flexi_amount += max_of_component
if have_a_flexi and flt(self.max_benefits) == 0:
frappe.throw(_("Max benefits should be greater than zero to dispense benefits"))
if have_a_flexi and flexi_amount and flt(self.max_benefits) > flexi_amount:
frappe.throw(_("Total flexible benefit component amount {0} should not be less than max benefits {1}")
.format(flexi_amount, self.max_benefits))
if not have_a_flexi and flt(self.max_benefits) > 0:
frappe.throw(_("Salary Structure should have flexible benefit component(s) to dispense benefit amount"))
def get_employees(self, **kwargs):
conditions, values = [], []
for field, value in kwargs.items():
if value:
conditions.append("{0}=%s".format(field))
values.append(value)
condition_str = " and " + " and ".join(conditions) if conditions else ""
employees = frappe.db.sql_list("select name from tabEmployee where status='Active' {condition}"
.format(condition=condition_str), tuple(values))
return employees
@frappe.whitelist()
def assign_salary_structure(self, company=None, grade=None, department=None, designation=None,employee=None,
from_date=None, base=None, variable=None, income_tax_slab=None):
employees = self.get_employees(company= company, grade= grade,department= department,designation= designation,name=employee)
if employees:
if len(employees) > 20:
frappe.enqueue(assign_salary_structure_for_employees, timeout=600,
employees=employees, salary_structure=self,from_date=from_date,
base=base, variable=variable, income_tax_slab=income_tax_slab)
else:
assign_salary_structure_for_employees(employees, self, from_date=from_date,
base=base, variable=variable, income_tax_slab=income_tax_slab)
else:
frappe.msgprint(_("No Employee Found"))
def assign_salary_structure_for_employees(employees, salary_structure, from_date=None, base=None, variable=None, income_tax_slab=None):
salary_structures_assignments = []
existing_assignments_for = get_existing_assignments(employees, salary_structure, from_date)
count=0
for employee in employees:
if employee in existing_assignments_for:
continue
count +=1
salary_structures_assignment = create_salary_structures_assignment(employee,
salary_structure, from_date, base, variable, income_tax_slab)
salary_structures_assignments.append(salary_structures_assignment)
frappe.publish_progress(count*100/len(set(employees) - set(existing_assignments_for)), title = _("Assigning Structures..."))
if salary_structures_assignments:
frappe.msgprint(_("Structures have been assigned successfully"))
def create_salary_structures_assignment(employee, salary_structure, from_date, base, variable, income_tax_slab=None):
assignment = frappe.new_doc("Salary Structure Assignment")
assignment.employee = employee
assignment.salary_structure = salary_structure.name
assignment.company = salary_structure.company
assignment.from_date = from_date
assignment.base = base
assignment.variable = variable
assignment.income_tax_slab = income_tax_slab
assignment.save(ignore_permissions = True)
assignment.submit()
return assignment.name
def get_existing_assignments(employees, salary_structure, from_date):
salary_structures_assignments = frappe.db.sql_list("""
select distinct employee from `tabSalary Structure Assignment`
where salary_structure=%s and employee in (%s)
and from_date=%s and company= %s and docstatus=1
""" % ('%s', ', '.join(['%s']*len(employees)),'%s', '%s'), [salary_structure.name] + employees+[from_date]+[salary_structure.company])
if salary_structures_assignments:
frappe.msgprint(_("Skipping Salary Structure Assignment for the following employees, as Salary Structure Assignment records already exists against them. {0}")
.format("\n".join(salary_structures_assignments)))
return salary_structures_assignments
@frappe.whitelist()
def make_salary_slip(source_name, target_doc = None, employee = None, as_print = False, print_format = None, for_preview=0, ignore_permissions=False):
def postprocess(source, target):
if employee:
employee_details = frappe.db.get_value("Employee", employee,
["employee_name", "branch", "designation", "department"], as_dict=1)
target.employee = employee
target.employee_name = employee_details.employee_name
target.branch = employee_details.branch
target.designation = employee_details.designation
target.department = employee_details.department
target.run_method('process_salary_structure', for_preview=for_preview)
doc = get_mapped_doc("Salary Structure", source_name, {
"Salary Structure": {
"doctype": "Salary Slip",
"field_map": {
"total_earning": "gross_pay",
"name": "salary_structure"
}
}
}, target_doc, postprocess, ignore_child_tables=True, ignore_permissions=ignore_permissions)
if cint(as_print):
doc.name = 'Preview for {0}'.format(employee)
return frappe.get_print(doc.doctype, doc.name, doc = doc, print_format = print_format)
else:
return doc
@frappe.whitelist()
def get_employees(salary_structure):
employees = frappe.get_list('Salary Structure Assignment',
filters={'salary_structure': salary_structure, 'docstatus': 1}, fields=['employee'])
if not employees:
frappe.throw(_("There's no Employee with Salary Structure: {0}. \
Assign {1} to an Employee to preview Salary Slip").format(salary_structure, salary_structure))
return list(set([d.employee for d in employees]))
| agpl-3.0 |
ronancpl/MapleSolaxiaV2 | scripts/npc/2042009.js | 511 | var status = 0;
var request;
function start() {
status = -1;
action(1, 0, 0);
}
function action(mode, type, selection) {
if (mode == -1) {
cm.dispose();
} else {
if (mode == 0 && status == 0) {
cm.dispose();
return;
}
if (mode == 1)
status++;
else
status--;
if (status == 0) {
cm.warpParty(980030000, 4);
cm.cancelCPQLobby();
cm.dispose();
}
}
}
| agpl-3.0 |
FreudianNM/openMAXIMS | Source Library/openmaxims_workspace/ValueObjects/src/ims/emergency/vo/AttendanceHistoryDialogParamsVoCollection.java | 8947 | //#############################################################################
//# #
//# Copyright (C) <2015> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//# IMS MAXIMS provides absolutely NO GUARANTEE OF THE CLINICAL SAFTEY of #
//# this program. Users of this software do so entirely at their own risk. #
//# IMS MAXIMS only ensures the Clinical Safety of unaltered run-time #
//# software that it builds, deploys and maintains. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5589.25814)
// Copyright (C) 1995-2015 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.emergency.vo;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import ims.framework.enumerations.SortOrder;
public class AttendanceHistoryDialogParamsVoCollection extends ims.vo.ValueObjectCollection implements ims.vo.ImsCloneable, Iterable<AttendanceHistoryDialogParamsVo>
{
private static final long serialVersionUID = 1L;
private ArrayList<AttendanceHistoryDialogParamsVo> col = new ArrayList<AttendanceHistoryDialogParamsVo>();
public String getBoClassName()
{
return null;
}
public boolean add(AttendanceHistoryDialogParamsVo value)
{
if(value == null)
return false;
if(this.col.indexOf(value) < 0)
{
return this.col.add(value);
}
return false;
}
public boolean add(int index, AttendanceHistoryDialogParamsVo value)
{
if(value == null)
return false;
if(this.col.indexOf(value) < 0)
{
this.col.add(index, value);
return true;
}
return false;
}
public void clear()
{
this.col.clear();
}
public void remove(int index)
{
this.col.remove(index);
}
public int size()
{
return this.col.size();
}
public int indexOf(AttendanceHistoryDialogParamsVo instance)
{
return col.indexOf(instance);
}
public AttendanceHistoryDialogParamsVo get(int index)
{
return this.col.get(index);
}
public boolean set(int index, AttendanceHistoryDialogParamsVo value)
{
if(value == null)
return false;
this.col.set(index, value);
return true;
}
public void remove(AttendanceHistoryDialogParamsVo instance)
{
if(instance != null)
{
int index = indexOf(instance);
if(index >= 0)
remove(index);
}
}
public boolean contains(AttendanceHistoryDialogParamsVo instance)
{
return indexOf(instance) >= 0;
}
public Object clone()
{
AttendanceHistoryDialogParamsVoCollection clone = new AttendanceHistoryDialogParamsVoCollection();
for(int x = 0; x < this.col.size(); x++)
{
if(this.col.get(x) != null)
clone.col.add((AttendanceHistoryDialogParamsVo)this.col.get(x).clone());
else
clone.col.add(null);
}
return clone;
}
public boolean isValidated()
{
for(int x = 0; x < col.size(); x++)
if(!this.col.get(x).isValidated())
return false;
return true;
}
public String[] validate()
{
return validate(null);
}
public String[] validate(String[] existingErrors)
{
if(col.size() == 0)
return null;
java.util.ArrayList<String> listOfErrors = new java.util.ArrayList<String>();
if(existingErrors != null)
{
for(int x = 0; x < existingErrors.length; x++)
{
listOfErrors.add(existingErrors[x]);
}
}
for(int x = 0; x < col.size(); x++)
{
String[] listOfOtherErrors = this.col.get(x).validate();
if(listOfOtherErrors != null)
{
for(int y = 0; y < listOfOtherErrors.length; y++)
{
listOfErrors.add(listOfOtherErrors[y]);
}
}
}
int errorCount = listOfErrors.size();
if(errorCount == 0)
return null;
String[] result = new String[errorCount];
for(int x = 0; x < errorCount; x++)
result[x] = (String)listOfErrors.get(x);
return result;
}
public AttendanceHistoryDialogParamsVoCollection sort()
{
return sort(SortOrder.ASCENDING);
}
public AttendanceHistoryDialogParamsVoCollection sort(boolean caseInsensitive)
{
return sort(SortOrder.ASCENDING, caseInsensitive);
}
public AttendanceHistoryDialogParamsVoCollection sort(SortOrder order)
{
return sort(new AttendanceHistoryDialogParamsVoComparator(order));
}
public AttendanceHistoryDialogParamsVoCollection sort(SortOrder order, boolean caseInsensitive)
{
return sort(new AttendanceHistoryDialogParamsVoComparator(order, caseInsensitive));
}
@SuppressWarnings("unchecked")
public AttendanceHistoryDialogParamsVoCollection sort(Comparator comparator)
{
Collections.sort(col, comparator);
return this;
}
public AttendanceHistoryDialogParamsVo[] toArray()
{
AttendanceHistoryDialogParamsVo[] arr = new AttendanceHistoryDialogParamsVo[col.size()];
col.toArray(arr);
return arr;
}
public Iterator<AttendanceHistoryDialogParamsVo> iterator()
{
return col.iterator();
}
@Override
protected ArrayList getTypedCollection()
{
return col;
}
private class AttendanceHistoryDialogParamsVoComparator implements Comparator
{
private int direction = 1;
private boolean caseInsensitive = true;
public AttendanceHistoryDialogParamsVoComparator()
{
this(SortOrder.ASCENDING);
}
public AttendanceHistoryDialogParamsVoComparator(SortOrder order)
{
if (order == SortOrder.DESCENDING)
{
direction = -1;
}
}
public AttendanceHistoryDialogParamsVoComparator(SortOrder order, boolean caseInsensitive)
{
if (order == SortOrder.DESCENDING)
{
direction = -1;
}
this.caseInsensitive = caseInsensitive;
}
public int compare(Object obj1, Object obj2)
{
AttendanceHistoryDialogParamsVo voObj1 = (AttendanceHistoryDialogParamsVo)obj1;
AttendanceHistoryDialogParamsVo voObj2 = (AttendanceHistoryDialogParamsVo)obj2;
return direction*(voObj1.compareTo(voObj2, this.caseInsensitive));
}
public boolean equals(Object obj)
{
return false;
}
}
public ims.emergency.vo.beans.AttendanceHistoryDialogParamsVoBean[] getBeanCollection()
{
return getBeanCollectionArray();
}
public ims.emergency.vo.beans.AttendanceHistoryDialogParamsVoBean[] getBeanCollectionArray()
{
ims.emergency.vo.beans.AttendanceHistoryDialogParamsVoBean[] result = new ims.emergency.vo.beans.AttendanceHistoryDialogParamsVoBean[col.size()];
for(int i = 0; i < col.size(); i++)
{
AttendanceHistoryDialogParamsVo vo = ((AttendanceHistoryDialogParamsVo)col.get(i));
result[i] = (ims.emergency.vo.beans.AttendanceHistoryDialogParamsVoBean)vo.getBean();
}
return result;
}
public static AttendanceHistoryDialogParamsVoCollection buildFromBeanCollection(java.util.Collection beans)
{
AttendanceHistoryDialogParamsVoCollection coll = new AttendanceHistoryDialogParamsVoCollection();
if(beans == null)
return coll;
java.util.Iterator iter = beans.iterator();
while (iter.hasNext())
{
coll.add(((ims.emergency.vo.beans.AttendanceHistoryDialogParamsVoBean)iter.next()).buildVo());
}
return coll;
}
public static AttendanceHistoryDialogParamsVoCollection buildFromBeanCollection(ims.emergency.vo.beans.AttendanceHistoryDialogParamsVoBean[] beans)
{
AttendanceHistoryDialogParamsVoCollection coll = new AttendanceHistoryDialogParamsVoCollection();
if(beans == null)
return coll;
for(int x = 0; x < beans.length; x++)
{
coll.add(beans[x].buildVo());
}
return coll;
}
}
| agpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.