code
stringlengths
3
1.05M
repo_name
stringlengths
4
116
path
stringlengths
3
942
language
stringclasses
30 values
license
stringclasses
15 values
size
int32
3
1.05M
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.psi.impl.compiled; import com.intellij.openapi.util.NotNullLazyValue; import com.intellij.psi.*; import com.intellij.psi.impl.java.stubs.JavaStubElementTypes; import com.intellij.psi.impl.java.stubs.PsiRecordComponentStub; import com.intellij.psi.impl.source.SourceTreeToPsiMap; import com.intellij.psi.impl.source.tree.TreeElement; import com.intellij.psi.search.LocalSearchScope; import com.intellij.psi.search.SearchScope; import com.intellij.psi.stubs.StubElement; import com.intellij.util.ArrayUtil; import com.intellij.util.IncorrectOperationException; import com.intellij.util.ObjectUtils; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; public final class ClsRecordComponentImpl extends ClsRepositoryPsiElement<PsiRecordComponentStub> implements PsiRecordComponent { private final NotNullLazyValue<PsiTypeElement> myType; public ClsRecordComponentImpl(@NotNull PsiRecordComponentStub stub) { super(stub); myType = NotNullLazyValue.atomicLazy(() -> new ClsTypeElementImpl(this, getStub().getType())); } @Override public PsiIdentifier getNameIdentifier() { return null; } @NotNull @Override public String getName() { return getStub().getName(); } @Override public PsiElement setName(@NotNull String name) throws IncorrectOperationException { throw cannotModifyException(this); } @Override @NotNull public PsiTypeElement getTypeElement() { return myType.getValue(); } @Override @NotNull public PsiType getType() { return getTypeElement().getType(); } @Override @NotNull public PsiModifierList getModifierList() { final StubElement<PsiModifierList> child = getStub().findChildStubByType(JavaStubElementTypes.MODIFIER_LIST); assert child != null; return child.getPsi(); } @Override public boolean hasModifierProperty(@NotNull String name) { return getModifierList().hasModifierProperty(name); } @Override public PsiExpression getInitializer() { return null; } @Override public boolean hasInitializer() { return false; } @Override public Object computeConstantValue() { return null; } @Override public void normalizeDeclaration() throws IncorrectOperationException { } @Override public void appendMirrorText(int indentLevel, @NotNull StringBuilder buffer) { PsiAnnotation[] annotations = getModifierList().getAnnotations(); for (PsiAnnotation annotation : annotations) { appendText(annotation, indentLevel, buffer); buffer.append(' '); } appendText(getTypeElement(), indentLevel, buffer, " "); buffer.append(getName()); } @Override public void setMirror(@NotNull TreeElement element) throws InvalidMirrorException { setMirrorCheckingType(element, null); PsiParameter mirror = SourceTreeToPsiMap.treeToPsiNotNull(element); setMirror(getModifierList(), mirror.getModifierList()); setMirror(getTypeElement(), mirror.getTypeElement()); } @Override public void accept(@NotNull PsiElementVisitor visitor) { if (visitor instanceof JavaElementVisitor) { ((JavaElementVisitor)visitor).visitRecordComponent(this); } else { visitor.visitElement(this); } } @Override public boolean isVarArgs() { return getStub().isVararg(); } @Override @NotNull public SearchScope getUseScope() { return new LocalSearchScope(getParent()); } @NotNull @Override public PsiElement getNavigationElement() { PsiClass clsClass = getContainingClass(); if (clsClass != null) { PsiClass psiClass = ObjectUtils.tryCast(clsClass.getNavigationElement(), PsiClass.class); if (psiClass != null && psiClass != clsClass) { PsiRecordComponent[] clsComponents = clsClass.getRecordComponents(); int index = ArrayUtil.indexOf(clsComponents, this); if (index >= 0) { PsiRecordComponent[] psiComponents = psiClass.getRecordComponents(); if (psiComponents.length == clsComponents.length) { return psiComponents[index]; } } } } return this; } @Override public String toString() { return "PsiRecordComponent:" + getName(); } @Override public @Nullable PsiClass getContainingClass() { PsiElement parent = getParent(); return parent instanceof PsiRecordHeader ? ((PsiRecordHeader)parent).getContainingClass() : null; } }
siosio/intellij-community
java/java-psi-impl/src/com/intellij/psi/impl/compiled/ClsRecordComponentImpl.java
Java
apache-2.0
4,582
#!/usr/bin/env python # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import sys, os, pwd import unittest2 as unittest try: gphome = os.environ.get('GPHOME') if not gphome: raise Exception("GPHOME not set") location = "%s/bin/lib" % gphome sys.path.append(location) from gppylib.util.ssh_utils import HostList, Session except Exception as e: print "PYTHON PATH: %s" % ":".join(sys.path) print str(e) raise class SshUtilsTestCase(unittest.TestCase): def test00_test_filterMultiHomedHosts(self): """ filterMultiHomedHosts should deduplicate hostnames """ hostlist = HostList() hostlist.add('localhost') hostlist.add('localhost') hostlist.add('localhost') hostlist.filterMultiHomedHosts() self.assertEqual(len(hostlist.get()), 1, "There should be only 1 host in the hostlist after calling filterMultiHomedHosts") def test01_test_SessionLogin(self): """ Session.login test, one success and one failure """ uname = pwd.getpwuid(os.getuid()).pw_name s = Session() s.login(['localhost', 'fakehost'], uname) if __name__ == "__main__": unittest.main()
lavjain/incubator-hawq
tools/bin/gppylib/util/test/unit/test_unit_ssh_utils.py
Python
apache-2.0
2,002
/* Copyright 2007-2015 QReal Research Group * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "removeFileBlock.h" using namespace trik::blocks::details; RemoveFileBlock::RemoveFileBlock(kitBase::robotModel::RobotModelInterface &robotModel) : kitBase::blocksBase::common::DeviceBlock<robotModel::parts::TrikShell>(robotModel) { } void RemoveFileBlock::doJob(robotModel::parts::TrikShell &shell) { shell.removeFile(stringProperty(id(), "File")); emit done(mNextBlockId); }
RomanBelkov/qreal
plugins/robots/common/trikKit/src/blocks/details/removeFileBlock.cpp
C++
apache-2.0
998
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.streaming import java.util.concurrent.TimeUnit.NANOSECONDS import org.apache.spark.rdd.RDD import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.encoders.ExpressionEncoder import org.apache.spark.sql.catalyst.expressions.{Ascending, Attribute, Expression, SortOrder, UnsafeRow} import org.apache.spark.sql.catalyst.plans.logical._ import org.apache.spark.sql.catalyst.plans.physical.{ClusteredDistribution, Distribution} import org.apache.spark.sql.execution._ import org.apache.spark.sql.execution.streaming.StreamingSymmetricHashJoinHelper._ import org.apache.spark.sql.execution.streaming.state._ import org.apache.spark.sql.streaming.{GroupStateTimeout, OutputMode} import org.apache.spark.sql.streaming.GroupStateTimeout.NoTimeout import org.apache.spark.util.{CompletionIterator, SerializableConfiguration} /** * Physical operator for executing `FlatMapGroupsWithState` * * @param func function called on each group * @param keyDeserializer used to extract the key object for each group. * @param valueDeserializer used to extract the items in the iterator from an input row. * @param initialStateDeserializer used to extract the state object from the initialState dataset * @param groupingAttributes used to group the data * @param dataAttributes used to read the data * @param outputObjAttr Defines the output object * @param stateEncoder used to serialize/deserialize state before calling `func` * @param outputMode the output mode of `func` * @param timeoutConf used to timeout groups that have not received data in a while * @param batchTimestampMs processing timestamp of the current batch. * @param eventTimeWatermark event time watermark for the current batch * @param initialState the user specified initial state * @param hasInitialState indicates whether the initial state is provided or not * @param child the physical plan for the underlying data */ case class FlatMapGroupsWithStateExec( func: (Any, Iterator[Any], LogicalGroupState[Any]) => Iterator[Any], keyDeserializer: Expression, valueDeserializer: Expression, initialStateDeserializer: Expression, groupingAttributes: Seq[Attribute], initialStateGroupAttrs: Seq[Attribute], dataAttributes: Seq[Attribute], initialStateDataAttrs: Seq[Attribute], outputObjAttr: Attribute, stateInfo: Option[StatefulOperatorStateInfo], stateEncoder: ExpressionEncoder[Any], stateFormatVersion: Int, outputMode: OutputMode, timeoutConf: GroupStateTimeout, batchTimestampMs: Option[Long], eventTimeWatermark: Option[Long], initialState: SparkPlan, hasInitialState: Boolean, child: SparkPlan ) extends BinaryExecNode with ObjectProducerExec with StateStoreWriter with WatermarkSupport { import FlatMapGroupsWithStateExecHelper._ import GroupStateImpl._ override def left: SparkPlan = child override def right: SparkPlan = initialState private val isTimeoutEnabled = timeoutConf != NoTimeout private val watermarkPresent = child.output.exists { case a: Attribute if a.metadata.contains(EventTimeWatermark.delayKey) => true case _ => false } private[sql] val stateManager = createStateManager(stateEncoder, isTimeoutEnabled, stateFormatVersion) /** * Distribute by grouping attributes - We need the underlying data and the initial state data * to have the same grouping so that the data are co-lacated on the same task. */ override def requiredChildDistribution: Seq[Distribution] = { // NOTE: Please read through the NOTE on the classdoc of StatefulOpClusteredDistribution // before making any changes. // TODO(SPARK-38204) ClusteredDistribution( groupingAttributes, requiredNumPartitions = stateInfo.map(_.numPartitions)) :: ClusteredDistribution( initialStateGroupAttrs, requiredNumPartitions = stateInfo.map(_.numPartitions)) :: Nil } /** * Ordering needed for using GroupingIterator. * We need the initial state to also use the ordering as the data so that we can co-locate the * keys from the underlying data and the initial state. */ override def requiredChildOrdering: Seq[Seq[SortOrder]] = Seq( groupingAttributes.map(SortOrder(_, Ascending)), initialStateGroupAttrs.map(SortOrder(_, Ascending))) override def keyExpressions: Seq[Attribute] = groupingAttributes override def shortName: String = "flatMapGroupsWithState" override def shouldRunAnotherBatch(newMetadata: OffsetSeqMetadata): Boolean = { timeoutConf match { case ProcessingTimeTimeout => true // Always run batches to process timeouts case EventTimeTimeout => // Process another non-data batch only if the watermark has changed in this executed plan eventTimeWatermark.isDefined && newMetadata.batchWatermarkMs > eventTimeWatermark.get case _ => false } } /** * Process data by applying the user defined function on a per partition basis. * * @param iter - Iterator of the data rows * @param store - associated state store for this partition * @param processor - handle to the input processor object. * @param initialStateIterOption - optional initial state iterator */ def processDataWithPartition( iter: Iterator[InternalRow], store: StateStore, processor: InputProcessor, initialStateIterOption: Option[Iterator[InternalRow]] = None ): CompletionIterator[InternalRow, Iterator[InternalRow]] = { val allUpdatesTimeMs = longMetric("allUpdatesTimeMs") val commitTimeMs = longMetric("commitTimeMs") val timeoutLatencyMs = longMetric("allRemovalsTimeMs") val currentTimeNs = System.nanoTime val updatesStartTimeNs = currentTimeNs var timeoutProcessingStartTimeNs = currentTimeNs // If timeout is based on event time, then filter late data based on watermark val filteredIter = watermarkPredicateForData match { case Some(predicate) if timeoutConf == EventTimeTimeout => applyRemovingRowsOlderThanWatermark(iter, predicate) case _ => iter } val processedOutputIterator = initialStateIterOption match { case Some(initStateIter) if initStateIter.hasNext => processor.processNewDataWithInitialState(filteredIter, initStateIter) case _ => processor.processNewData(filteredIter) } val newDataProcessorIter = CompletionIterator[InternalRow, Iterator[InternalRow]]( processedOutputIterator, { // Once the input is processed, mark the start time for timeout processing to measure // it separately from the overall processing time. timeoutProcessingStartTimeNs = System.nanoTime }) val timeoutProcessorIter = CompletionIterator[InternalRow, Iterator[InternalRow]](processor.processTimedOutState(), { // Note: `timeoutLatencyMs` also includes the time the parent operator took for // processing output returned through iterator. timeoutLatencyMs += NANOSECONDS.toMillis(System.nanoTime - timeoutProcessingStartTimeNs) }) // Generate a iterator that returns the rows grouped by the grouping function // Note that this code ensures that the filtering for timeout occurs only after // all the data has been processed. This is to ensure that the timeout information of all // the keys with data is updated before they are processed for timeouts. val outputIterator = newDataProcessorIter ++ timeoutProcessorIter // Return an iterator of all the rows generated by all the keys, such that when fully // consumed, all the state updates will be committed by the state store CompletionIterator[InternalRow, Iterator[InternalRow]](outputIterator, { // Note: Due to the iterator lazy execution, this metric also captures the time taken // by the upstream (consumer) operators in addition to the processing in this operator. allUpdatesTimeMs += NANOSECONDS.toMillis(System.nanoTime - updatesStartTimeNs) commitTimeMs += timeTakenMs { store.commit() } setStoreMetrics(store) setOperatorMetrics() }) } override protected def doExecute(): RDD[InternalRow] = { metrics // force lazy init at driver // Throw errors early if parameters are not as expected timeoutConf match { case ProcessingTimeTimeout => require(batchTimestampMs.nonEmpty) case EventTimeTimeout => require(eventTimeWatermark.nonEmpty) // watermark value has been populated require(watermarkExpression.nonEmpty) // input schema has watermark attribute case _ => } if (hasInitialState) { // If the user provided initial state we need to have the initial state and the // data in the same partition so that we can still have just one commit at the end. val storeConf = new StateStoreConf(session.sqlContext.sessionState.conf) val hadoopConfBroadcast = sparkContext.broadcast( new SerializableConfiguration(session.sqlContext.sessionState.newHadoopConf())) child.execute().stateStoreAwareZipPartitions( initialState.execute(), getStateInfo, storeNames = Seq(), session.sqlContext.streams.stateStoreCoordinator) { // The state store aware zip partitions will provide us with two iterators, // child data iterator and the initial state iterator per partition. case (partitionId, childDataIterator, initStateIterator) => val stateStoreId = StateStoreId( stateInfo.get.checkpointLocation, stateInfo.get.operatorId, partitionId) val storeProviderId = StateStoreProviderId(stateStoreId, stateInfo.get.queryRunId) val store = StateStore.get( storeProviderId, groupingAttributes.toStructType, stateManager.stateSchema, numColsPrefixKey = 0, stateInfo.get.storeVersion, storeConf, hadoopConfBroadcast.value.value) val processor = new InputProcessor(store) processDataWithPartition(childDataIterator, store, processor, Some(initStateIterator)) } } else { child.execute().mapPartitionsWithStateStore[InternalRow]( getStateInfo, groupingAttributes.toStructType, stateManager.stateSchema, numColsPrefixKey = 0, session.sqlContext.sessionState, Some(session.sqlContext.streams.stateStoreCoordinator) ) { case (store: StateStore, singleIterator: Iterator[InternalRow]) => val processor = new InputProcessor(store) processDataWithPartition(singleIterator, store, processor) } } } /** Helper class to update the state store */ class InputProcessor(store: StateStore) { // Converters for translating input keys, values, output data between rows and Java objects private val getKeyObj = ObjectOperator.deserializeRowToObject(keyDeserializer, groupingAttributes) private val getValueObj = ObjectOperator.deserializeRowToObject(valueDeserializer, dataAttributes) private val getOutputRow = ObjectOperator.wrapObjectToRow(outputObjectType) private val getStateObj = if (hasInitialState) { Some(ObjectOperator.deserializeRowToObject(initialStateDeserializer, initialStateDataAttrs)) } else { None } // Metrics private val numUpdatedStateRows = longMetric("numUpdatedStateRows") private val numOutputRows = longMetric("numOutputRows") private val numRemovedStateRows = longMetric("numRemovedStateRows") /** * For every group, get the key, values and corresponding state and call the function, * and return an iterator of rows */ def processNewData(dataIter: Iterator[InternalRow]): Iterator[InternalRow] = { val groupedIter = GroupedIterator(dataIter, groupingAttributes, child.output) groupedIter.flatMap { case (keyRow, valueRowIter) => val keyUnsafeRow = keyRow.asInstanceOf[UnsafeRow] callFunctionAndUpdateState( stateManager.getState(store, keyUnsafeRow), valueRowIter, hasTimedOut = false) } } /** * Process the new data iterator along with the initial state. The initial state is applied * before processing the new data for every key. The user defined function is called only * once for every key that has either initial state or data or both. */ def processNewDataWithInitialState( childDataIter: Iterator[InternalRow], initStateIter: Iterator[InternalRow] ): Iterator[InternalRow] = { if (!childDataIter.hasNext && !initStateIter.hasNext) return Iterator.empty // Create iterators for the child data and the initial state grouped by their grouping // attributes. val groupedChildDataIter = GroupedIterator(childDataIter, groupingAttributes, child.output) val groupedInitialStateIter = GroupedIterator(initStateIter, initialStateGroupAttrs, initialState.output) // Create a CoGroupedIterator that will group the two iterators together for every key group. new CoGroupedIterator( groupedChildDataIter, groupedInitialStateIter, groupingAttributes).flatMap { case (keyRow, valueRowIter, initialStateRowIter) => val keyUnsafeRow = keyRow.asInstanceOf[UnsafeRow] var foundInitialStateForKey = false initialStateRowIter.foreach { initialStateRow => if (foundInitialStateForKey) { FlatMapGroupsWithStateExec.foundDuplicateInitialKeyException() } foundInitialStateForKey = true val initStateObj = getStateObj.get(initialStateRow) stateManager.putState(store, keyUnsafeRow, initStateObj, NO_TIMESTAMP) } // We apply the values for the key after applying the initial state. callFunctionAndUpdateState( stateManager.getState(store, keyUnsafeRow), valueRowIter, hasTimedOut = false ) } } /** Find the groups that have timeout set and are timing out right now, and call the function */ def processTimedOutState(): Iterator[InternalRow] = { if (isTimeoutEnabled) { val timeoutThreshold = timeoutConf match { case ProcessingTimeTimeout => batchTimestampMs.get case EventTimeTimeout => eventTimeWatermark.get case _ => throw new IllegalStateException( s"Cannot filter timed out keys for $timeoutConf") } val timingOutPairs = stateManager.getAllState(store).filter { state => state.timeoutTimestamp != NO_TIMESTAMP && state.timeoutTimestamp < timeoutThreshold } timingOutPairs.flatMap { stateData => callFunctionAndUpdateState(stateData, Iterator.empty, hasTimedOut = true) } } else Iterator.empty } /** * Call the user function on a key's data, update the state store, and return the return data * iterator. Note that the store updating is lazy, that is, the store will be updated only * after the returned iterator is fully consumed. * * @param stateData All the data related to the state to be updated * @param valueRowIter Iterator of values as rows, cannot be null, but can be empty * @param hasTimedOut Whether this function is being called for a key timeout */ private def callFunctionAndUpdateState( stateData: StateData, valueRowIter: Iterator[InternalRow], hasTimedOut: Boolean): Iterator[InternalRow] = { val keyObj = getKeyObj(stateData.keyRow) // convert key to objects val valueObjIter = valueRowIter.map(getValueObj.apply) // convert value rows to objects val groupState = GroupStateImpl.createForStreaming( Option(stateData.stateObj), batchTimestampMs.getOrElse(NO_TIMESTAMP), eventTimeWatermark.getOrElse(NO_TIMESTAMP), timeoutConf, hasTimedOut, watermarkPresent) // Call function, get the returned objects and convert them to rows val mappedIterator = func(keyObj, valueObjIter, groupState).map { obj => numOutputRows += 1 getOutputRow(obj) } // When the iterator is consumed, then write changes to state def onIteratorCompletion: Unit = { if (groupState.isRemoved && !groupState.getTimeoutTimestampMs.isPresent()) { stateManager.removeState(store, stateData.keyRow) numRemovedStateRows += 1 } else { val currentTimeoutTimestamp = groupState.getTimeoutTimestampMs.orElse(NO_TIMESTAMP) val hasTimeoutChanged = currentTimeoutTimestamp != stateData.timeoutTimestamp val shouldWriteState = groupState.isUpdated || groupState.isRemoved || hasTimeoutChanged if (shouldWriteState) { val updatedStateObj = if (groupState.exists) groupState.get else null stateManager.putState(store, stateData.keyRow, updatedStateObj, currentTimeoutTimestamp) numUpdatedStateRows += 1 } } } // Return an iterator of rows such that fully consumed, the updated state value will be saved CompletionIterator[InternalRow, Iterator[InternalRow]](mappedIterator, onIteratorCompletion) } } override protected def withNewChildrenInternal( newLeft: SparkPlan, newRight: SparkPlan): FlatMapGroupsWithStateExec = copy(child = newLeft, initialState = newRight) } object FlatMapGroupsWithStateExec { def foundDuplicateInitialKeyException(): Exception = { throw new IllegalArgumentException("The initial state provided contained " + "multiple rows(state) with the same key. Make sure to de-duplicate the " + "initial state before passing it.") } /** * Plan logical flatmapGroupsWIthState for batch queries * If the initial state is provided, we create an instance of the CoGroupExec, if the initial * state is not provided we create an instance of the MapGroupsExec */ // scalastyle:off argcount def generateSparkPlanForBatchQueries( userFunc: (Any, Iterator[Any], LogicalGroupState[Any]) => Iterator[Any], keyDeserializer: Expression, valueDeserializer: Expression, initialStateDeserializer: Expression, groupingAttributes: Seq[Attribute], initialStateGroupAttrs: Seq[Attribute], dataAttributes: Seq[Attribute], initialStateDataAttrs: Seq[Attribute], outputObjAttr: Attribute, timeoutConf: GroupStateTimeout, hasInitialState: Boolean, initialState: SparkPlan, child: SparkPlan): SparkPlan = { if (hasInitialState) { val watermarkPresent = child.output.exists { case a: Attribute if a.metadata.contains(EventTimeWatermark.delayKey) => true case _ => false } val func = (keyRow: Any, values: Iterator[Any], states: Iterator[Any]) => { // Check if there is only one state for every key. var foundInitialStateForKey = false val optionalStates = states.map { stateValue => if (foundInitialStateForKey) { foundDuplicateInitialKeyException() } foundInitialStateForKey = true stateValue }.toArray // Create group state object val groupState = GroupStateImpl.createForStreaming( optionalStates.headOption, System.currentTimeMillis, GroupStateImpl.NO_TIMESTAMP, timeoutConf, hasTimedOut = false, watermarkPresent) // Call user function with the state and values for this key userFunc(keyRow, values, groupState) } CoGroupExec( func, keyDeserializer, valueDeserializer, initialStateDeserializer, groupingAttributes, initialStateGroupAttrs, dataAttributes, initialStateDataAttrs, outputObjAttr, child, initialState) } else { MapGroupsExec( userFunc, keyDeserializer, valueDeserializer, groupingAttributes, dataAttributes, outputObjAttr, timeoutConf, child) } } }
ueshin/apache-spark
sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/FlatMapGroupsWithStateExec.scala
Scala
apache-2.0
20,929
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.client.watcher; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.XContentTestUtils; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import java.io.IOException; import java.util.function.Predicate; public class WatchStatusTests extends ESTestCase { public void testBasicParsing() throws IOException { int expectedVersion = randomIntBetween(0, 100); ExecutionState expectedExecutionState = randomFrom(ExecutionState.values()); boolean expectedActive = randomBoolean(); ActionStatus.AckStatus.State expectedAckState = randomFrom(ActionStatus.AckStatus.State.values()); XContentBuilder builder = createTestXContent(expectedVersion, expectedExecutionState, expectedActive, expectedAckState); BytesReference bytes = BytesReference.bytes(builder); WatchStatus watchStatus = parse(builder.contentType(), bytes); assertEquals(expectedVersion, watchStatus.version()); assertEquals(expectedExecutionState, watchStatus.getExecutionState()); assertEquals(new DateTime(1432663467763L, DateTimeZone.UTC), watchStatus.lastChecked()); assertEquals(DateTime.parse("2015-05-26T18:04:27.763Z"), watchStatus.lastMetCondition()); WatchStatus.State watchState = watchStatus.state(); assertEquals(expectedActive, watchState.isActive()); assertEquals(DateTime.parse("2015-05-26T18:04:27.723Z"), watchState.getTimestamp()); ActionStatus actionStatus = watchStatus.actionStatus("test_index"); assertNotNull(actionStatus); ActionStatus.AckStatus ackStatus = actionStatus.ackStatus(); assertEquals(DateTime.parse("2015-05-26T18:04:27.763Z"), ackStatus.timestamp()); assertEquals(expectedAckState, ackStatus.state()); ActionStatus.Execution lastExecution = actionStatus.lastExecution(); assertEquals(DateTime.parse("2015-05-25T18:04:27.733Z"), lastExecution.timestamp()); assertFalse(lastExecution.successful()); assertEquals("failed to send email", lastExecution.reason()); ActionStatus.Execution lastSuccessfulExecution = actionStatus.lastSuccessfulExecution(); assertEquals(DateTime.parse("2015-05-25T18:04:27.773Z"), lastSuccessfulExecution.timestamp()); assertTrue(lastSuccessfulExecution.successful()); assertNull(lastSuccessfulExecution.reason()); ActionStatus.Throttle lastThrottle = actionStatus.lastThrottle(); assertEquals(DateTime.parse("2015-04-25T18:05:23.445Z"), lastThrottle.timestamp()); assertEquals("throttling interval is set to [5 seconds] ...", lastThrottle.reason()); } public void testParsingWithUnknownKeys() throws IOException { int expectedVersion = randomIntBetween(0, 100); ExecutionState expectedExecutionState = randomFrom(ExecutionState.values()); boolean expectedActive = randomBoolean(); ActionStatus.AckStatus.State expectedAckState = randomFrom(ActionStatus.AckStatus.State.values()); XContentBuilder builder = createTestXContent(expectedVersion, expectedExecutionState, expectedActive, expectedAckState); BytesReference bytes = BytesReference.bytes(builder); Predicate<String> excludeFilter = field -> field.equals("actions"); BytesReference bytesWithRandomFields = XContentTestUtils.insertRandomFields( builder.contentType(), bytes, excludeFilter, random()); WatchStatus watchStatus = parse(builder.contentType(), bytesWithRandomFields); assertEquals(expectedVersion, watchStatus.version()); assertEquals(expectedExecutionState, watchStatus.getExecutionState()); } public void testOptionalFieldsParsing() throws IOException { XContentType contentType = randomFrom(XContentType.values()); XContentBuilder builder = XContentFactory.contentBuilder(contentType).startObject() .field("version", 42) .startObject("actions") .startObject("test_index") .startObject("ack") .field("timestamp", "2015-05-26T18:04:27.763Z") .field("state", "ackable") .endObject() .startObject("last_execution") .field("timestamp", "2015-05-25T18:04:27.733Z") .field("successful", false) .field("reason", "failed to send email") .endObject() .endObject() .endObject() .endObject(); BytesReference bytes = BytesReference.bytes(builder); WatchStatus watchStatus = parse(builder.contentType(), bytes); assertEquals(42, watchStatus.version()); assertNull(watchStatus.getExecutionState()); assertFalse(watchStatus.checked()); } private XContentBuilder createTestXContent(int version, ExecutionState executionState, boolean active, ActionStatus.AckStatus.State ackState) throws IOException { XContentType contentType = randomFrom(XContentType.values()); return XContentFactory.contentBuilder(contentType).startObject() .field("version", version) .field("execution_state", executionState) .field("last_checked", 1432663467763L) .field("last_met_condition", "2015-05-26T18:04:27.763Z") .startObject("state") .field("active", active) .field("timestamp", "2015-05-26T18:04:27.723Z") .endObject() .startObject("actions") .startObject("test_index") .startObject("ack") .field("timestamp", "2015-05-26T18:04:27.763Z") .field("state", ackState) .endObject() .startObject("last_execution") .field("timestamp", "2015-05-25T18:04:27.733Z") .field("successful", false) .field("reason", "failed to send email") .endObject() .startObject("last_successful_execution") .field("timestamp", "2015-05-25T18:04:27.773Z") .field("successful", true) .endObject() .startObject("last_throttle") .field("timestamp", "2015-04-25T18:05:23.445Z") .field("reason", "throttling interval is set to [5 seconds] ...") .endObject() .endObject() .endObject() .endObject(); } private WatchStatus parse(XContentType contentType, BytesReference bytes) throws IOException { XContentParser parser = XContentFactory.xContent(contentType) .createParser(NamedXContentRegistry.EMPTY, null, bytes.streamInput()); parser.nextToken(); return WatchStatus.parse(parser); } }
gfyoung/elasticsearch
client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/WatchStatusTests.java
Java
apache-2.0
8,301
# -*- coding: utf-8 -*- # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # import six import sys import unittest from io import StringIO from itertools import dropwhile from mock import patch, call from airflow import configuration, models from airflow.utils import db from airflow.contrib.hooks.spark_sql_hook import SparkSqlHook def get_after(sentinel, iterable): """Get the value after `sentinel` in an `iterable`""" truncated = dropwhile(lambda el: el != sentinel, iterable) next(truncated) return next(truncated) class TestSparkSqlHook(unittest.TestCase): _config = { 'conn_id': 'spark_default', 'executor_cores': 4, 'executor_memory': '22g', 'keytab': 'privileged_user.keytab', 'name': 'spark-job', 'num_executors': 10, 'verbose': True, 'sql': ' /path/to/sql/file.sql ', 'conf': 'key=value,PROP=VALUE' } def setUp(self): configuration.load_test_config() db.merge_conn( models.Connection( conn_id='spark_default', conn_type='spark', host='yarn://yarn-master') ) def test_build_command(self): hook = SparkSqlHook(**self._config) # The subprocess requires an array but we build the cmd by joining on a space cmd = ' '.join(hook._prepare_command("")) # Check all the parameters assert "--executor-cores {}".format(self._config['executor_cores']) in cmd assert "--executor-memory {}".format(self._config['executor_memory']) in cmd assert "--keytab {}".format(self._config['keytab']) in cmd assert "--name {}".format(self._config['name']) in cmd assert "--num-executors {}".format(self._config['num_executors']) in cmd sql_path = get_after('-f', hook._prepare_command("")) assert self._config['sql'].strip() == sql_path # Check if all config settings are there for kv in self._config['conf'].split(","): k, v = kv.split('=') assert "--conf {0}={1}".format(k, v) in cmd if self._config['verbose']: assert "--verbose" in cmd @patch('airflow.contrib.hooks.spark_sql_hook.subprocess.Popen') def test_spark_process_runcmd(self, mock_popen): # Given mock_popen.return_value.stdout = six.StringIO('Spark-sql communicates using stdout') mock_popen.return_value.stderr = six.StringIO('stderr') mock_popen.return_value.wait.return_value = 0 # When hook = SparkSqlHook( conn_id='spark_default', sql='SELECT 1' ) with patch.object(hook.log, 'debug') as mock_debug: with patch.object(hook.log, 'info') as mock_info: hook.run_query() mock_debug.assert_called_with( 'Spark-Sql cmd: %s', ['spark-sql', '-e', 'SELECT 1', '--master', 'yarn', '--name', 'default-name', '--verbose', '--queue', 'default'] ) mock_info.assert_called_with( 'Spark-sql communicates using stdout' ) # Then self.assertEqual( mock_popen.mock_calls[0], call(['spark-sql', '-e', 'SELECT 1', '--master', 'yarn', '--name', 'default-name', '--verbose', '--queue', 'default'], stderr=-2, stdout=-1) ) if __name__ == '__main__': unittest.main()
danielvdende/incubator-airflow
tests/contrib/hooks/test_spark_sql_hook.py
Python
apache-2.0
4,171
# Copyright 2020 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for the generic Grappler optimizations used within tf.data.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from absl.testing import parameterized from tensorflow.core.example import example_pb2 from tensorflow.core.example import feature_pb2 from tensorflow.python.data.kernel_tests import test_base from tensorflow.python.data.ops import dataset_ops from tensorflow.python.framework import combinations from tensorflow.python.framework import dtypes from tensorflow.python.framework import test_util from tensorflow.python.ops import array_ops from tensorflow.python.ops import control_flow_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import nn_ops from tensorflow.python.ops import parsing_ops from tensorflow.python.platform import test class GrapplerTest(test_base.DatasetTestBase, parameterized.TestCase): @combinations.generate(test_base.default_test_combinations()) def testConstantFoldingVarLenFeature(self): example = example_pb2.Example(features=feature_pb2.Features(feature={})) dataset = dataset_ops.Dataset.from_tensors(example.SerializeToString()) def parse_fn(serialized): features = {"x": parsing_ops.VarLenFeature(dtypes.int64)} parsed = parsing_ops.parse_single_example(serialized, features) parsed = parsed["x"].values size = array_ops.size(parsed) value = math_ops.cast(parsed, dtypes.bool) return control_flow_ops.cond(size > 0, lambda: array_ops.reshape(value, []), lambda: array_ops.zeros([], dtypes.bool)) dataset = dataset.map(parse_fn) self.assertDatasetProduces(dataset, expected_output=[0]) @combinations.generate(test_base.default_test_combinations()) def testLayoutOptimizationConv2D(self): if not test_util.is_gpu_available(): self.skipTest("No GPU available") # Compute convolution with input and filter of [1, 1, 1, 1] shape. # Verify that Grappler doesn't transpose Conv2D data format to NCHW. dataset = dataset_ops.Dataset.from_tensors((1, 1)) def map_function(x, y): i = math_ops.cast(x, dtypes.float32) i = array_ops.reshape(i, [1, 1, 1, 1]) f = math_ops.cast(y, dtypes.float32) f = array_ops.reshape(f, [1, 1, 1, 1]) c = nn_ops.conv2d(i, f, strides=[1, 1, 1, 1], padding="VALID") return array_ops.reshape(c, ()) dataset = dataset.map(map_function) self.assertDatasetProduces(dataset, expected_output=[1]) if __name__ == "__main__": test.main()
karllessard/tensorflow
tensorflow/python/data/experimental/kernel_tests/optimization/grappler_test.py
Python
apache-2.0
3,296
/* * Copyright 2005-2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.ldap.config; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.parsing.BeanComponentDefinition; import org.springframework.beans.factory.support.BeanDefinitionBuilder; import org.springframework.beans.factory.xml.AbstractBeanDefinitionParser; import org.springframework.beans.factory.xml.BeanDefinitionParser; import org.springframework.beans.factory.xml.ParserContext; import org.springframework.ldap.transaction.compensating.manager.ContextSourceAndDataSourceTransactionManager; import org.springframework.ldap.transaction.compensating.manager.ContextSourceAndHibernateTransactionManager; import org.springframework.ldap.transaction.compensating.manager.ContextSourceTransactionManager; import org.springframework.ldap.transaction.compensating.support.DefaultTempEntryRenamingStrategy; import org.springframework.ldap.transaction.compensating.support.DifferentSubtreeTempEntryRenamingStrategy; import org.springframework.util.Assert; import org.springframework.util.StringUtils; import org.springframework.util.xml.DomUtils; import org.w3c.dom.Element; import static org.springframework.ldap.config.ParserUtils.getString; /** * @author Mattias Hellborg Arthursson */ public class TransactionManagerParser implements BeanDefinitionParser { private static final String ATT_CONTEXT_SOURCE_REF = "context-source-ref"; private static final String ATT_DATA_SOURCE_REF = "data-source-ref"; private static final String ATT_SESSION_FACTORY_REF = "session-factory-ref"; private static final String ATT_TEMP_SUFFIX = "temp-suffix"; private static final String ATT_SUBTREE_NODE = "subtree-node"; private static final String DEFAULT_ID = "transactionManager"; @Override public BeanDefinition parse(Element element, ParserContext parserContext) { String contextSourceRef = getString(element, ATT_CONTEXT_SOURCE_REF, ContextSourceParser.DEFAULT_ID); String dataSourceRef = element.getAttribute(ATT_DATA_SOURCE_REF); String sessionFactoryRef = element.getAttribute(ATT_SESSION_FACTORY_REF); if(StringUtils.hasText(dataSourceRef) && StringUtils.hasText(sessionFactoryRef)) { throw new IllegalArgumentException( String.format("Only one of %s and %s can be specified", ATT_DATA_SOURCE_REF, ATT_SESSION_FACTORY_REF)); } BeanDefinitionBuilder builder; if(StringUtils.hasText(dataSourceRef)) { builder = BeanDefinitionBuilder.rootBeanDefinition(ContextSourceAndDataSourceTransactionManager.class); builder.addPropertyReference("dataSource", dataSourceRef); } else if(StringUtils.hasText(sessionFactoryRef)) { builder = BeanDefinitionBuilder.rootBeanDefinition(ContextSourceAndHibernateTransactionManager.class); builder.addPropertyReference("sessionFactory", sessionFactoryRef); } else { // Standard transaction manager builder = BeanDefinitionBuilder.rootBeanDefinition(ContextSourceTransactionManager.class); } builder.addPropertyReference("contextSource", contextSourceRef); Element defaultStrategyChild = DomUtils.getChildElementByTagName(element, Elements.DEFAULT_RENAMING_STRATEGY); Element differentSubtreeChild = DomUtils.getChildElementByTagName(element, Elements.DIFFERENT_SUBTREE_RENAMING_STRATEGY); if(defaultStrategyChild != null) { builder.addPropertyValue("renamingStrategy", parseDefaultRenamingStrategy(defaultStrategyChild)); } if(differentSubtreeChild != null) { builder.addPropertyValue("renamingStrategy", parseDifferentSubtreeRenamingStrategy(differentSubtreeChild)); } String id = getString(element, AbstractBeanDefinitionParser.ID_ATTRIBUTE, DEFAULT_ID); BeanDefinition beanDefinition = builder.getBeanDefinition(); parserContext.registerBeanComponent(new BeanComponentDefinition(beanDefinition, id)); return beanDefinition; } private BeanDefinition parseDifferentSubtreeRenamingStrategy(Element element) { BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(DifferentSubtreeTempEntryRenamingStrategy.class); String subtreeNode = element.getAttribute(ATT_SUBTREE_NODE); Assert.hasText(subtreeNode, ATT_SUBTREE_NODE + " must be specified"); builder.addConstructorArgValue(subtreeNode); return builder.getBeanDefinition(); } public BeanDefinition parseDefaultRenamingStrategy(Element element) { BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(DefaultTempEntryRenamingStrategy.class); builder.addPropertyValue("tempSuffix", getString(element, ATT_TEMP_SUFFIX, DefaultTempEntryRenamingStrategy.DEFAULT_TEMP_SUFFIX)); return builder.getBeanDefinition(); } }
n8rogers/spring-ldap
core/src/main/java/org/springframework/ldap/config/TransactionManagerParser.java
Java
apache-2.0
5,614
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.spi.checkpoint.s3; import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.BasicAWSCredentials; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.managers.checkpoint.GridCheckpointManagerAbstractSelfTest; import org.apache.ignite.testsuites.IgniteS3TestSuite; import org.junit.Ignore; import org.junit.Test; /** * Checkpoint manager test using {@link S3CheckpointSpi}. */ public class S3CheckpointManagerSelfTest extends GridCheckpointManagerAbstractSelfTest { /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { assertTrue("Unexpected Ignite instance name: " + igniteInstanceName, igniteInstanceName.contains("s3")); IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName); S3CheckpointSpi spi = new S3CheckpointSpi(); AWSCredentials cred = new BasicAWSCredentials(IgniteS3TestSuite.getAccessKey(), IgniteS3TestSuite.getSecretKey()); spi.setAwsCredentials(cred); spi.setBucketNameSuffix(S3CheckpointSpiSelfTest.getBucketNameSuffix()); cfg.setCheckpointSpi(spi); return cfg; } /** * @throws Exception Thrown if any exception occurs. */ @Ignore("https://issues.apache.org/jira/browse/IGNITE-2420") @Test public void testS3Based() throws Exception { retries = 6; doTest("s3"); } /** * @throws Exception Thrown if any exception occurs. */ @Ignore("https://issues.apache.org/jira/browse/IGNITE-2420") @Test public void testMultiNodeS3Based() throws Exception { retries = 6; doMultiNodeTest("s3"); } }
samaitra/ignite
modules/aws/src/test/java/org/apache/ignite/spi/checkpoint/s3/S3CheckpointManagerSelfTest.java
Java
apache-2.0
2,563
// compile-flags: -Z mir-opt-level=4 // EMIT_MIR multiple_return_terminators.test.MultipleReturnTerminators.diff fn test(x: bool) { if x { // test } else { // test } } fn main() { test(true) }
graydon/rust
src/test/mir-opt/multiple_return_terminators.rs
Rust
apache-2.0
227
// Copyright 2012 Cloudera Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.cloudera.impala.analysis; import java.util.ArrayList; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.cloudera.impala.catalog.Db; import com.cloudera.impala.catalog.Function.CompareMode; import com.cloudera.impala.catalog.ScalarFunction; import com.cloudera.impala.catalog.Type; import com.cloudera.impala.common.AnalysisException; import com.cloudera.impala.common.Pair; import com.cloudera.impala.common.Reference; import com.cloudera.impala.extdatasource.thrift.TComparisonOp; import com.cloudera.impala.thrift.TExprNode; import com.cloudera.impala.thrift.TExprNodeType; import com.google.common.base.Objects; import com.google.common.base.Preconditions; import com.google.common.base.Predicates; import com.google.common.collect.Lists; /** * Most predicates with two operands. * */ public class BinaryPredicate extends Predicate { private final static Logger LOG = LoggerFactory.getLogger(BinaryPredicate.class); public enum Operator { EQ("=", "eq", TComparisonOp.EQ), NE("!=", "ne", TComparisonOp.NE), LE("<=", "le", TComparisonOp.LE), GE(">=", "ge", TComparisonOp.GE), LT("<", "lt", TComparisonOp.LT), GT(">", "gt", TComparisonOp.GT), // Same as EQ, except it returns True if the rhs is NULL. There is no backend // function for this. The functionality is embedded in the hash-join // implementation. NULL_MATCHING_EQ("=", "null_matching_eq", TComparisonOp.EQ); private final String description_; private final String name_; private final TComparisonOp thriftOp_; private Operator(String description, String name, TComparisonOp thriftOp) { this.description_ = description; this.name_ = name; this.thriftOp_ = thriftOp; } @Override public String toString() { return description_; } public String getName() { return name_; } public TComparisonOp getThriftOp() { return thriftOp_; } public Operator converse() { switch (this) { case EQ: return EQ; case NE: return NE; case LE: return GE; case GE: return LE; case LT: return GT; case GT: return LT; case NULL_MATCHING_EQ: throw new IllegalStateException("Not implemented"); default: throw new IllegalStateException("Invalid operator"); } } } public static void initBuiltins(Db db) { for (Type t: Type.getSupportedTypes()) { if (t.isNull()) continue; // NULL is handled through type promotion. db.addBuiltin(ScalarFunction.createBuiltinOperator( Operator.EQ.getName(), Lists.newArrayList(t, t), Type.BOOLEAN)); db.addBuiltin(ScalarFunction.createBuiltinOperator( Operator.NE.getName(), Lists.newArrayList(t, t), Type.BOOLEAN)); db.addBuiltin(ScalarFunction.createBuiltinOperator( Operator.LE.getName(), Lists.newArrayList(t, t), Type.BOOLEAN)); db.addBuiltin(ScalarFunction.createBuiltinOperator( Operator.GE.getName(), Lists.newArrayList(t, t), Type.BOOLEAN)); db.addBuiltin(ScalarFunction.createBuiltinOperator( Operator.LT.getName(), Lists.newArrayList(t, t), Type.BOOLEAN)); db.addBuiltin(ScalarFunction.createBuiltinOperator( Operator.GT.getName(), Lists.newArrayList(t, t), Type.BOOLEAN)); } } private Operator op_; public Operator getOp() { return op_; } public void setOp(Operator op) { op_ = op; } public BinaryPredicate(Operator op, Expr e1, Expr e2) { super(); this.op_ = op; Preconditions.checkNotNull(e1); children_.add(e1); Preconditions.checkNotNull(e2); children_.add(e2); } protected BinaryPredicate(BinaryPredicate other) { super(other); op_ = other.op_; } public boolean isNullMatchingEq() { return op_ == Operator.NULL_MATCHING_EQ; } @Override public String toSqlImpl() { return getChild(0).toSql() + " " + op_.toString() + " " + getChild(1).toSql(); } @Override protected void toThrift(TExprNode msg) { Preconditions.checkState(children_.size() == 2); // Cannot serialize a nested predicate. Preconditions.checkState(!contains(Subquery.class)); // This check is important because we often clone and/or evaluate predicates, // and it's easy to get the casting logic wrong, e.g., cloned predicates // with expr substitutions need to be re-analyzed with reanalyze(). Preconditions.checkState(getChild(0).getType().getPrimitiveType() == getChild(1).getType().getPrimitiveType(), "child 0 type: " + getChild(0).getType() + " child 1 type: " + getChild(1).getType()); msg.node_type = TExprNodeType.FUNCTION_CALL; } @Override public String debugString() { return Objects.toStringHelper(this) .add("op", op_) .addValue(super.debugString()) .toString(); } @Override public void analyze(Analyzer analyzer) throws AnalysisException { if (isAnalyzed_) return; super.analyze(analyzer); convertNumericLiteralsFromDecimal(analyzer); String opName = op_.getName().equals("null_matching_eq") ? "eq" : op_.getName(); fn_ = getBuiltinFunction(analyzer, opName, collectChildReturnTypes(), CompareMode.IS_SUPERTYPE_OF); if (fn_ == null) { // Construct an appropriate error message and throw an AnalysisException. String errMsg = "operands of type " + getChild(0).getType().toSql() + " and " + getChild(1).getType().toSql() + " are not comparable: " + toSql(); // Check if any of the children is a Subquery that does not return a // scalar. for (Expr expr: children_) { if (expr instanceof Subquery && !expr.getType().isScalarType()) { errMsg = "Subquery must return a single row: " + expr.toSql(); break; } } throw new AnalysisException(errMsg); } Preconditions.checkState(fn_.getReturnType().isBoolean()); ArrayList<Expr> subqueries = Lists.newArrayList(); collectAll(Predicates.instanceOf(Subquery.class), subqueries); if (subqueries.size() > 1) { // TODO Remove that restriction when we add support for independent subquery // evaluation. throw new AnalysisException("Multiple subqueries are not supported in binary " + "predicates: " + toSql()); } if (contains(ExistsPredicate.class)) { throw new AnalysisException("EXISTS subquery predicates are not " + "supported in binary predicates: " + toSql()); } List<InPredicate> inPredicates = Lists.newArrayList(); collect(InPredicate.class, inPredicates); for (InPredicate inPredicate: inPredicates) { if (inPredicate.contains(Subquery.class)) { throw new AnalysisException("IN subquery predicates are not supported in " + "binary predicates: " + toSql()); } } // Don't perform any casting for predicates with subqueries here. Any casting // required will be performed when the subquery is unnested. if (!contains(Subquery.class)) castForFunctionCall(true); // determine selectivity // TODO: Compute selectivity for nested predicates Reference<SlotRef> slotRefRef = new Reference<SlotRef>(); if (op_ == Operator.EQ && isSingleColumnPredicate(slotRefRef, null) && slotRefRef.getRef().getNumDistinctValues() > 0) { Preconditions.checkState(slotRefRef.getRef() != null); selectivity_ = 1.0 / slotRefRef.getRef().getNumDistinctValues(); selectivity_ = Math.max(0, Math.min(1, selectivity_)); } else { // TODO: improve using histograms, once they show up selectivity_ = Expr.DEFAULT_SELECTIVITY; } } /** * If predicate is of the form "<slotref> <op> <expr>", returns expr, * otherwise returns null. Slotref may be wrapped in a CastExpr. * TODO: revisit CAST handling at the caller */ public Expr getSlotBinding(SlotId id) { // check left operand SlotRef slotRef = getChild(0).unwrapSlotRef(false); if (slotRef != null && slotRef.getSlotId() == id) return getChild(1); // check right operand slotRef = getChild(1).unwrapSlotRef(false); if (slotRef != null && slotRef.getSlotId() == id) return getChild(0); return null; } /** * If e is an equality predicate between two slots that only require implicit * casts, returns those two slots; otherwise returns null. */ public static Pair<SlotId, SlotId> getEqSlots(Expr e) { if (!(e instanceof BinaryPredicate)) return null; return ((BinaryPredicate) e).getEqSlots(); } /** * If this is an equality predicate between two slots that only require implicit * casts, returns those two slots; otherwise returns null. */ @Override public Pair<SlotId, SlotId> getEqSlots() { if (op_ != Operator.EQ) return null; SlotRef lhs = getChild(0).unwrapSlotRef(true); if (lhs == null) return null; SlotRef rhs = getChild(1).unwrapSlotRef(true); if (rhs == null) return null; return new Pair<SlotId, SlotId>(lhs.getSlotId(), rhs.getSlotId()); } /** * If predicate is of the form "<SlotRef> op <Expr>" or "<Expr> op <SlotRef>", * returns the SlotRef, otherwise returns null. */ @Override public SlotRef getBoundSlot() { SlotRef slotRef = getChild(0).unwrapSlotRef(true); if (slotRef != null) return slotRef; return getChild(1).unwrapSlotRef(true); } /** * Negates a BinaryPredicate. */ @Override public Expr negate() { Operator newOp = null; switch (op_) { case EQ: newOp = Operator.NE; break; case NE: newOp = Operator.EQ; break; case LT: newOp = Operator.GE; break; case LE: newOp = Operator.GT; break; case GE: newOp = Operator.LT; break; case GT: newOp = Operator.LE; break; case NULL_MATCHING_EQ: throw new IllegalStateException("Not implemented"); } return new BinaryPredicate(newOp, getChild(0), getChild(1)); } @Override public boolean equals(Object obj) { if (!super.equals(obj)) return false; BinaryPredicate other = (BinaryPredicate) obj; return op_.equals(other.op_); } @Override public Expr clone() { return new BinaryPredicate(this); } }
brightchen/Impala
fe/src/main/java/com/cloudera/impala/analysis/BinaryPredicate.java
Java
apache-2.0
10,905
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from .....testing import assert_equal from ..featuredetection import ErodeImage def test_ErodeImage_inputs(): input_map = dict(args=dict(argstr='%s', ), environ=dict(nohash=True, usedefault=True, ), ignore_exception=dict(nohash=True, usedefault=True, ), inputMaskVolume=dict(argstr='--inputMaskVolume %s', ), inputRadius=dict(argstr='--inputRadius %d', ), inputVolume=dict(argstr='--inputVolume %s', ), outputVolume=dict(argstr='--outputVolume %s', hash_files=False, ), terminal_output=dict(nohash=True, ), ) inputs = ErodeImage.input_spec() for key, metadata in list(input_map.items()): for metakey, value in list(metadata.items()): yield assert_equal, getattr(inputs.traits()[key], metakey), value def test_ErodeImage_outputs(): output_map = dict(outputVolume=dict(), ) outputs = ErodeImage.output_spec() for key, metadata in list(output_map.items()): for metakey, value in list(metadata.items()): yield assert_equal, getattr(outputs.traits()[key], metakey), value
FCP-INDI/nipype
nipype/interfaces/semtools/filtering/tests/test_auto_ErodeImage.py
Python
bsd-3-clause
1,171
// ========================================================================== // SeqAn - The Library for Sequence Analysis // ========================================================================== // Copyright (c) 2006-2015, Knut Reinert, FU Berlin // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // * Neither the name of Knut Reinert or the FU Berlin nor the names of // its contributors may be used to endorse or promote products derived // from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL KNUT REINERT OR THE FU BERLIN BE LIABLE // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT // LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY // OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH // DAMAGE. // // ========================================================================== // Author: Manuel Holtgrewe <[email protected]> // ========================================================================== #include <sstream> #include <seqan/basic.h> #include <seqan/seq_io.h> #include <seqan/roi_io.h> SEQAN_DEFINE_TEST(test_roi_read_roi_record) { seqan::String<char> inString = "I\t1\t3\tregion0\t3\t+\t4\t0.55\t33\t1,2,4\n"; seqan::DirectionIterator<seqan::String<char>, seqan::Input>::Type iter = begin(inString); seqan::RoiRecord record; seqan::RoiIOContext context; readRecord(record, context, iter, seqan::Roi()); SEQAN_ASSERT_EQ(record.ref, "I"); SEQAN_ASSERT_EQ(record.beginPos, 0); SEQAN_ASSERT_EQ(record.endPos, 3); SEQAN_ASSERT_EQ(record.strand, '+'); SEQAN_ASSERT_EQ(record.len, 3u); SEQAN_ASSERT_EQ(record.name, "region0"); SEQAN_ASSERT_EQ(record.countMax, 4u); SEQAN_ASSERT_EQ(length(record.data), 2u); SEQAN_ASSERT_EQ(record.data[0], "0.55"); SEQAN_ASSERT_EQ(record.data[1], "33"); SEQAN_ASSERT_EQ(length(record.count), 3u); SEQAN_ASSERT_EQ(record.count[0], 1u); SEQAN_ASSERT_EQ(record.count[1], 2u); SEQAN_ASSERT_EQ(record.count[2], 4u); } SEQAN_DEFINE_TEST(test_roi_write_roi_record) { seqan::RoiRecord record; record.ref = "I"; record.beginPos = 0; record.endPos = 3; record.strand = '+'; record.len = 3; record.name = "region0"; appendValue(record.data, "0.55"); appendValue(record.data, "33"); record.countMax = 4; appendValue(record.count, 1); appendValue(record.count, 2); appendValue(record.count, 4); seqan::String<char> outString; writeRecord(outString, record, seqan::Roi()); seqan::String<char> expected = "I\t1\t3\tregion0\t3\t+\t4\t0.55\t33\t1,2,4\n"; SEQAN_ASSERT_EQ(expected, outString); } SEQAN_DEFINE_TEST(test_roi_roi_file_read) { seqan::CharString inPath = SEQAN_PATH_TO_ROOT(); append(inPath, "/tests/roi_io/example.roi"); seqan::RoiFileIn roiFileIn(toCString(inPath)); seqan::RoiRecord record1; readRecord(record1, roiFileIn); SEQAN_ASSERT_EQ(record1.ref, "I"); SEQAN_ASSERT_EQ(record1.beginPos, 0); SEQAN_ASSERT_EQ(record1.endPos, 3); SEQAN_ASSERT_EQ(record1.strand, '+'); SEQAN_ASSERT_EQ(record1.len, 3u); SEQAN_ASSERT_EQ(record1.name, "region0"); SEQAN_ASSERT_EQ(record1.countMax, 4u); SEQAN_ASSERT_EQ(length(record1.count), 3u); SEQAN_ASSERT_EQ(record1.count[0], 1u); SEQAN_ASSERT_EQ(record1.count[1], 2u); SEQAN_ASSERT_EQ(record1.count[2], 4u); seqan::RoiRecord record2; readRecord(record2, roiFileIn); SEQAN_ASSERT(atEnd(roiFileIn)); SEQAN_ASSERT_EQ(record2.ref, "II"); SEQAN_ASSERT_EQ(record2.beginPos, 1); SEQAN_ASSERT_EQ(record2.endPos, 4); SEQAN_ASSERT_EQ(record2.strand, '+'); SEQAN_ASSERT_EQ(record2.len, 3u); SEQAN_ASSERT_EQ(record2.name, "region1"); SEQAN_ASSERT_EQ(record2.countMax, 10u); SEQAN_ASSERT_EQ(length(record2.count), 3u); SEQAN_ASSERT_EQ(record2.count[0], 8u); SEQAN_ASSERT_EQ(record2.count[1], 9u); SEQAN_ASSERT_EQ(record2.count[2], 10u); } SEQAN_DEFINE_TEST(test_roi_roi_file_write) { seqan::CharString tmpPath = SEQAN_PATH_TO_ROOT(); append(tmpPath, ".roi"); seqan::RoiFileOut roiFileOut(toCString(tmpPath)); seqan::RoiRecord record1; record1.ref = "I"; record1.beginPos = 0; record1.endPos = 3; record1.strand = '+'; record1.len = 3; record1.name = "region0"; record1.countMax = 4; appendValue(record1.count, 1); appendValue(record1.count, 2); appendValue(record1.count, 4); writeRecord(roiFileOut, record1); seqan::RoiRecord record2; record2.ref = "II"; record2.beginPos = 1; record2.endPos = 4; record2.strand = '+'; record2.len = 3; record2.name = "region1"; record2.countMax = 10; appendValue(record2.count, 8); appendValue(record2.count, 9); appendValue(record2.count, 10); writeRecord(roiFileOut, record2); close(roiFileOut); seqan::CharString goldPath(SEQAN_PATH_TO_ROOT()); append(goldPath, "/tests/roi_io/example.roi"); SEQAN_ASSERT(seqan::_compareTextFiles(toCString(tmpPath), toCString(goldPath))); } SEQAN_BEGIN_TESTSUITE(test_roi_io) { // Reading of ROI records. SEQAN_CALL_TEST(test_roi_read_roi_record); // Writing of ROI records. SEQAN_CALL_TEST(test_roi_write_roi_record); // RoiFile SEQAN_CALL_TEST(test_roi_roi_file_read); SEQAN_CALL_TEST(test_roi_roi_file_write); } SEQAN_END_TESTSUITE
catkira/seqan
tests/roi_io/test_roi_io.cpp
C++
bsd-3-clause
6,457
////////////////////////////////////////////////////////////////////////// // // Copyright (c) 2008-2010, Image Engine Design Inc. All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // // * Neither the name of Image Engine Design nor the names of any // other contributors to this software may be used to endorse or // promote products derived from this software without specific prior // written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS // IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, // THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // ////////////////////////////////////////////////////////////////////////// #include "boost/python.hpp" #include "IECorePython/BezierAlgoBinding.h" #include "IECore/BezierAlgo.h" using namespace boost::python; using namespace IECore; namespace IECorePython { template<typename V> struct BezierCallback { object c; void operator()( const V &v ) { c( v ); } }; template<typename Vec> void bezierSubdivideBinding( const Vec &v0, const Vec &v1, const Vec &v2, const Vec &v3, typename Vec::BaseType tolerance, object f ) { BezierCallback<Vec> c; c.c = f; bezierSubdivide( v0, v1, v2, v3, tolerance, c ); } template<typename Vec> void bezierSubdivideBindingQuadratic( const Vec &v0, const Vec &v1, const Vec &v2, typename Vec::BaseType tolerance, object f ) { BezierCallback<Vec> c; c.c = f; bezierSubdivide( v0, v1, v2, tolerance, c ); } void bindBezierAlgo() { def( "bezierSubdivide", &bezierSubdivideBinding<Imath::V2f> ); def( "bezierSubdivide", &bezierSubdivideBinding<Imath::V2d> ); def( "bezierSubdivide", &bezierSubdivideBinding<Imath::V3f> ); def( "bezierSubdivide", &bezierSubdivideBinding<Imath::V3d> ); def( "bezierSubdivide", &bezierSubdivideBindingQuadratic<Imath::V2f> ); def( "bezierSubdivide", &bezierSubdivideBindingQuadratic<Imath::V2d> ); def( "bezierSubdivide", &bezierSubdivideBindingQuadratic<Imath::V3f> ); def( "bezierSubdivide", &bezierSubdivideBindingQuadratic<Imath::V3d> ); } } // namespace IECorePython
lento/cortex
src/IECorePython/BezierAlgoBinding.cpp
C++
bsd-3-clause
3,199
// Copyright 2016 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package runtime_test import ( "internal/testenv" "io/ioutil" "os" "os/exec" "path/filepath" "runtime" "strings" "testing" ) var lldbPath string func checkLldbPython(t *testing.T) { cmd := exec.Command("lldb", "-P") out, err := cmd.CombinedOutput() if err != nil { t.Skipf("skipping due to issue running lldb: %v\n%s", err, out) } lldbPath = strings.TrimSpace(string(out)) cmd = exec.Command("/usr/bin/python2.7", "-c", "import sys;sys.path.append(sys.argv[1]);import lldb; print('go lldb python support')", lldbPath) out, err = cmd.CombinedOutput() if err != nil { t.Skipf("skipping due to issue running python: %v\n%s", err, out) } if string(out) != "go lldb python support\n" { t.Skipf("skipping due to lack of python lldb support: %s", out) } if runtime.GOOS == "darwin" { // Try to see if we have debugging permissions. cmd = exec.Command("/usr/sbin/DevToolsSecurity", "-status") out, err = cmd.CombinedOutput() if err != nil { t.Skipf("DevToolsSecurity failed: %v", err) } else if !strings.Contains(string(out), "enabled") { t.Skip(string(out)) } cmd = exec.Command("/usr/bin/groups") out, err = cmd.CombinedOutput() if err != nil { t.Skipf("groups failed: %v", err) } else if !strings.Contains(string(out), "_developer") { t.Skip("Not in _developer group") } } } const lldbHelloSource = ` package main import "fmt" func main() { mapvar := make(map[string]string,5) mapvar["abc"] = "def" mapvar["ghi"] = "jkl" intvar := 42 ptrvar := &intvar fmt.Println("hi") // line 10 _ = ptrvar } ` const lldbScriptSource = ` import sys sys.path.append(sys.argv[1]) import lldb import os TIMEOUT_SECS = 5 debugger = lldb.SBDebugger.Create() debugger.SetAsync(True) target = debugger.CreateTargetWithFileAndArch("a.exe", None) if target: print "Created target" main_bp = target.BreakpointCreateByLocation("main.go", 10) if main_bp: print "Created breakpoint" process = target.LaunchSimple(None, None, os.getcwd()) if process: print "Process launched" listener = debugger.GetListener() process.broadcaster.AddListener(listener, lldb.SBProcess.eBroadcastBitStateChanged) while True: event = lldb.SBEvent() if listener.WaitForEvent(TIMEOUT_SECS, event): if lldb.SBProcess.GetRestartedFromEvent(event): continue state = process.GetState() if state in [lldb.eStateUnloaded, lldb.eStateLaunching, lldb.eStateRunning]: continue else: print "Timeout launching" break if state == lldb.eStateStopped: for t in process.threads: if t.GetStopReason() == lldb.eStopReasonBreakpoint: print "Hit breakpoint" frame = t.GetFrameAtIndex(0) if frame: if frame.line_entry: print "Stopped at %s:%d" % (frame.line_entry.file.basename, frame.line_entry.line) if frame.function: print "Stopped in %s" % (frame.function.name,) var = frame.FindVariable('intvar') if var: print "intvar = %s" % (var.GetValue(),) else: print "no intvar" else: print "Process state", state process.Destroy() else: print "Failed to create target a.exe" lldb.SBDebugger.Destroy(debugger) sys.exit() ` const expectedLldbOutput = `Created target Created breakpoint Process launched Hit breakpoint Stopped at main.go:10 Stopped in main.main intvar = 42 ` func TestLldbPython(t *testing.T) { testenv.MustHaveGoBuild(t) if final := os.Getenv("GOROOT_FINAL"); final != "" && runtime.GOROOT() != final { t.Skip("gdb test can fail with GOROOT_FINAL pending") } testenv.SkipFlaky(t, 31188) checkLldbPython(t) dir, err := ioutil.TempDir("", "go-build") if err != nil { t.Fatalf("failed to create temp directory: %v", err) } defer os.RemoveAll(dir) src := filepath.Join(dir, "main.go") err = ioutil.WriteFile(src, []byte(lldbHelloSource), 0644) if err != nil { t.Fatalf("failed to create src file: %v", err) } mod := filepath.Join(dir, "go.mod") err = ioutil.WriteFile(mod, []byte("module lldbtest"), 0644) if err != nil { t.Fatalf("failed to create mod file: %v", err) } // As of 2018-07-17, lldb doesn't support compressed DWARF, so // disable it for this test. cmd := exec.Command(testenv.GoToolPath(t), "build", "-gcflags=all=-N -l", "-ldflags=-compressdwarf=false", "-o", "a.exe") cmd.Dir = dir cmd.Env = append(os.Environ(), "GOPATH=") // issue 31100 out, err := cmd.CombinedOutput() if err != nil { t.Fatalf("building source %v\n%s", err, out) } src = filepath.Join(dir, "script.py") err = ioutil.WriteFile(src, []byte(lldbScriptSource), 0755) if err != nil { t.Fatalf("failed to create script: %v", err) } cmd = exec.Command("/usr/bin/python2.7", "script.py", lldbPath) cmd.Dir = dir got, _ := cmd.CombinedOutput() if string(got) != expectedLldbOutput { if strings.Contains(string(got), "Timeout launching") { t.Skip("Timeout launching") } t.Fatalf("Unexpected lldb output:\n%s", got) } }
akutz/go
src/runtime/runtime-lldb_test.go
GO
bsd-3-clause
5,217
#!/bin/bash export PHP_HOME=${IROOT}/php-5.5.17 export PHP_FPM=$PHP_HOME/sbin/php-fpm export NGINX_HOME=${IROOT}/nginx sed -i 's|localhost|'"${DBHOST}"'|g' index.php sed -i 's|SourceRoot = .*/FrameworkBenchmarks/php-slim|SourceRoot = '"${TROOT}"'|g' deploy/config.hdf sed -i 's|Path = .*/.hhvm.hhbc|Path = '"${TROOT}"'/.hhvm.bbhc|g' deploy/config.hdf sed -i 's|PidFile = .*/hhvm.pid|PidFile = '"${TROOT}"'/hhvm.pid|g' deploy/config.hdf sed -i 's|File = .*/error.log|File = '"${TROOT}"'/error.log|g' deploy/config.hdf sed -i 's|root .*/FrameworkBenchmarks/php-slim| root '"${TROOT}"'|g' deploy/nginx.conf sed -i 's|/usr/local/nginx/|'"${IROOT}"'/nginx/|g' deploy/nginx.conf export PATH="$PHP_HOME/bin:$PHP_HOME/sbin:$PATH" hhvm -m daemon --config $TROOT/deploy/config.hdf --user $(whoami) $NGINX_HOME/sbin/nginx -c $TROOT/deploy/nginx.conf
joshk/FrameworkBenchmarks
frameworks/PHP/php-slim/setup_hhvm.sh
Shell
bsd-3-clause
842
#!/bin/bash fw_depends luna CC=gcc-4.9 CXX=g++-4.9 conan install --build=missing -s compiler="gcc" -s compiler.version="4.9" . cmake . -DCMAKE_CXX_COMPILER=g++-4.9 -DCMAKE_CC_COMPILER=gcc-4.9 cmake --build . MAX_THREADS=$((2 * $CPU_COUNT)) $TROOT/bin/lunabench_epoll 8080 $MAX_THREADS
steveklabnik/FrameworkBenchmarks
frameworks/C++/luna/setup_epoll.sh
Shell
bsd-3-clause
289
package org.scalaide.core package quickassist import org.eclipse.jdt.core.compiler.IProblem import org.eclipse.jdt.ui.JavaUI import org.eclipse.jface.text.quickassist.IQuickAssistInvocationContext import org.junit.Assert._ import org.scalaide.core.internal.jdt.model.ScalaCompilationUnit import org.scalaide.core.internal.quickassist.QuickAssistProcessor import testsetup.TestProjectSetup /** * Provides test behavior that relies on a working UI environment. */ object UiQuickAssistTests extends TestProjectSetup("quickassist") { def assertNumberOfProblems(nProblems: Int, problems: Array[IProblem]): Unit = { if (problems.length != nProblems) { val buf = new StringBuffer("Wrong number of problems, is: ") buf.append(problems.length).append(", expected: ").append(nProblems).append('\n') for (problem <- problems) { buf.append(problem).append(" at ") buf.append('[').append(problem.getSourceStart()).append(" ,").append(problem.getSourceEnd()).append(']') buf.append('\n') } assertEquals(buf.toString, nProblems, problems.length) } } def withQuickFixes(pathToSource: String)(expectedQuickFixes: String*): Unit = { withManyQuickFixesPerLine(pathToSource)(expectedQuickFixes.map(List(_)).toList) } def withManyQuickFixesPerLine(pathToSource: String)(expectedQuickFixesList: List[List[String]]): Unit = { val unit = compilationUnit(pathToSource).asInstanceOf[ScalaCompilationUnit] unit.withSourceFile { (src, compiler) => compiler.askReload(List(unit)).get val problems = compiler.problemsOf(unit) assertTrue("No problems found.", problems.nonEmpty) assertNumberOfProblems(expectedQuickFixesList.size, problems.toArray) val part = JavaUI.openInEditor(unit.getCompilationUnit) for ((problem, expectedQuickFixes) <- problems zip expectedQuickFixesList) { val offset = problem.getSourceStart val length = problem.getSourceEnd + 1 - offset val processor = new QuickAssistProcessor(part.getEditorInput, QuickAssistProcessor.DefaultId) val proposals = processor.computeQuickAssistProposals(new IQuickAssistInvocationContext { override def getOffset = offset override def getLength = length override def getSourceViewer = null }) val corrections = proposals.map(_.getDisplayString) for (quickFix <- expectedQuickFixes) { assertTrue("Quick fix " + quickFix + " was not offered. Offered were: " + corrections.mkString(", "), corrections contains quickFix) } } } } }
stephenh/scala-ide
org.scala-ide.sdt.core.tests/src/org/scalaide/core/quickassist/UiQuickAssistTests.scala
Scala
bsd-3-clause
2,617
//==- llvm/Analysis/MemoryBuiltins.h - Calls to memory builtins --*- C++ -*-==// // // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. // See https://llvm.org/LICENSE.txt for license information. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception // //===----------------------------------------------------------------------===// // // This family of functions identifies calls to builtin functions that allocate // or free memory. // //===----------------------------------------------------------------------===// #ifndef LLVM_ANALYSIS_MEMORYBUILTINS_H #define LLVM_ANALYSIS_MEMORYBUILTINS_H #include "llvm/ADT/APInt.h" #include "llvm/ADT/DenseMap.h" #include "llvm/ADT/SmallPtrSet.h" #include "llvm/Analysis/TargetFolder.h" #include "llvm/Analysis/TargetLibraryInfo.h" #include "llvm/IR/CallSite.h" #include "llvm/IR/IRBuilder.h" #include "llvm/IR/InstVisitor.h" #include "llvm/IR/ValueHandle.h" #include <cstdint> #include <utility> namespace llvm { class AllocaInst; class Argument; class CallInst; class ConstantInt; class ConstantPointerNull; class DataLayout; class ExtractElementInst; class ExtractValueInst; class GEPOperator; class GlobalAlias; class GlobalVariable; class Instruction; class IntegerType; class IntrinsicInst; class IntToPtrInst; class LLVMContext; class LoadInst; class PHINode; class PointerType; class SelectInst; class TargetLibraryInfo; class Type; class UndefValue; class Value; /// Tests if a value is a call or invoke to a library function that /// allocates or reallocates memory (either malloc, calloc, realloc, or strdup /// like). bool isAllocationFn(const Value *V, const TargetLibraryInfo *TLI, bool LookThroughBitCast = false); bool isAllocationFn(const Value *V, function_ref<const TargetLibraryInfo &(Function &)> GetTLI, bool LookThroughBitCast = false); /// Tests if a value is a call or invoke to a function that returns a /// NoAlias pointer (including malloc/calloc/realloc/strdup-like functions). bool isNoAliasFn(const Value *V, const TargetLibraryInfo *TLI, bool LookThroughBitCast = false); /// Tests if a value is a call or invoke to a library function that /// allocates uninitialized memory (such as malloc). bool isMallocLikeFn(const Value *V, const TargetLibraryInfo *TLI, bool LookThroughBitCast = false); bool isMallocLikeFn(const Value *V, function_ref<const TargetLibraryInfo &(Function &)> GetTLI, bool LookThroughBitCast = false); /// Tests if a value is a call or invoke to a library function that /// allocates zero-filled memory (such as calloc). bool isCallocLikeFn(const Value *V, const TargetLibraryInfo *TLI, bool LookThroughBitCast = false); /// Tests if a value is a call or invoke to a library function that /// allocates memory similar to malloc or calloc. bool isMallocOrCallocLikeFn(const Value *V, const TargetLibraryInfo *TLI, bool LookThroughBitCast = false); /// Tests if a value is a call or invoke to a library function that /// allocates memory (either malloc, calloc, or strdup like). bool isAllocLikeFn(const Value *V, const TargetLibraryInfo *TLI, bool LookThroughBitCast = false); /// Tests if a value is a call or invoke to a library function that /// reallocates memory (e.g., realloc). bool isReallocLikeFn(const Value *V, const TargetLibraryInfo *TLI, bool LookThroughBitCast = false); /// Tests if a function is a call or invoke to a library function that /// reallocates memory (e.g., realloc). bool isReallocLikeFn(const Function *F, const TargetLibraryInfo *TLI); /// Tests if a value is a call or invoke to a library function that /// allocates memory and throws if an allocation failed (e.g., new). bool isOpNewLikeFn(const Value *V, const TargetLibraryInfo *TLI, bool LookThroughBitCast = false); /// Tests if a value is a call or invoke to a library function that /// allocates memory (strdup, strndup). bool isStrdupLikeFn(const Value *V, const TargetLibraryInfo *TLI, bool LookThroughBitCast = false); //===----------------------------------------------------------------------===// // malloc Call Utility Functions. // /// extractMallocCall - Returns the corresponding CallInst if the instruction /// is a malloc call. Since CallInst::CreateMalloc() only creates calls, we /// ignore InvokeInst here. const CallInst * extractMallocCall(const Value *I, function_ref<const TargetLibraryInfo &(Function &)> GetTLI); inline CallInst * extractMallocCall(Value *I, function_ref<const TargetLibraryInfo &(Function &)> GetTLI) { return const_cast<CallInst *>(extractMallocCall((const Value *)I, GetTLI)); } /// getMallocType - Returns the PointerType resulting from the malloc call. /// The PointerType depends on the number of bitcast uses of the malloc call: /// 0: PointerType is the malloc calls' return type. /// 1: PointerType is the bitcast's result type. /// >1: Unique PointerType cannot be determined, return NULL. PointerType *getMallocType(const CallInst *CI, const TargetLibraryInfo *TLI); /// getMallocAllocatedType - Returns the Type allocated by malloc call. /// The Type depends on the number of bitcast uses of the malloc call: /// 0: PointerType is the malloc calls' return type. /// 1: PointerType is the bitcast's result type. /// >1: Unique PointerType cannot be determined, return NULL. Type *getMallocAllocatedType(const CallInst *CI, const TargetLibraryInfo *TLI); /// getMallocArraySize - Returns the array size of a malloc call. If the /// argument passed to malloc is a multiple of the size of the malloced type, /// then return that multiple. For non-array mallocs, the multiple is /// constant 1. Otherwise, return NULL for mallocs whose array size cannot be /// determined. Value *getMallocArraySize(CallInst *CI, const DataLayout &DL, const TargetLibraryInfo *TLI, bool LookThroughSExt = false); //===----------------------------------------------------------------------===// // calloc Call Utility Functions. // /// extractCallocCall - Returns the corresponding CallInst if the instruction /// is a calloc call. const CallInst *extractCallocCall(const Value *I, const TargetLibraryInfo *TLI); inline CallInst *extractCallocCall(Value *I, const TargetLibraryInfo *TLI) { return const_cast<CallInst*>(extractCallocCall((const Value*)I, TLI)); } //===----------------------------------------------------------------------===// // free Call Utility Functions. // /// isLibFreeFunction - Returns true if the function is a builtin free() bool isLibFreeFunction(const Function *F, const LibFunc TLIFn); /// isFreeCall - Returns non-null if the value is a call to the builtin free() const CallInst *isFreeCall(const Value *I, const TargetLibraryInfo *TLI); inline CallInst *isFreeCall(Value *I, const TargetLibraryInfo *TLI) { return const_cast<CallInst*>(isFreeCall((const Value*)I, TLI)); } //===----------------------------------------------------------------------===// // Utility functions to compute size of objects. // /// Various options to control the behavior of getObjectSize. struct ObjectSizeOpts { /// Controls how we handle conditional statements with unknown conditions. enum class Mode : uint8_t { /// Fail to evaluate an unknown condition. Exact, /// Evaluate all branches of an unknown condition. If all evaluations /// succeed, pick the minimum size. Min, /// Same as Min, except we pick the maximum size of all of the branches. Max }; /// How we want to evaluate this object's size. Mode EvalMode = Mode::Exact; /// Whether to round the result up to the alignment of allocas, byval /// arguments, and global variables. bool RoundToAlign = false; /// If this is true, null pointers in address space 0 will be treated as /// though they can't be evaluated. Otherwise, null is always considered to /// point to a 0 byte region of memory. bool NullIsUnknownSize = false; }; /// Compute the size of the object pointed by Ptr. Returns true and the /// object size in Size if successful, and false otherwise. In this context, by /// object we mean the region of memory starting at Ptr to the end of the /// underlying object pointed to by Ptr. bool getObjectSize(const Value *Ptr, uint64_t &Size, const DataLayout &DL, const TargetLibraryInfo *TLI, ObjectSizeOpts Opts = {}); /// Try to turn a call to \@llvm.objectsize into an integer value of the given /// Type. Returns null on failure. If MustSucceed is true, this function will /// not return null, and may return conservative values governed by the second /// argument of the call to objectsize. Value *lowerObjectSizeCall(IntrinsicInst *ObjectSize, const DataLayout &DL, const TargetLibraryInfo *TLI, bool MustSucceed); using SizeOffsetType = std::pair<APInt, APInt>; /// Evaluate the size and offset of an object pointed to by a Value* /// statically. Fails if size or offset are not known at compile time. class ObjectSizeOffsetVisitor : public InstVisitor<ObjectSizeOffsetVisitor, SizeOffsetType> { const DataLayout &DL; const TargetLibraryInfo *TLI; ObjectSizeOpts Options; unsigned IntTyBits; APInt Zero; SmallPtrSet<Instruction *, 8> SeenInsts; APInt align(APInt Size, uint64_t Align); SizeOffsetType unknown() { return std::make_pair(APInt(), APInt()); } public: ObjectSizeOffsetVisitor(const DataLayout &DL, const TargetLibraryInfo *TLI, LLVMContext &Context, ObjectSizeOpts Options = {}); SizeOffsetType compute(Value *V); static bool knownSize(const SizeOffsetType &SizeOffset) { return SizeOffset.first.getBitWidth() > 1; } static bool knownOffset(const SizeOffsetType &SizeOffset) { return SizeOffset.second.getBitWidth() > 1; } static bool bothKnown(const SizeOffsetType &SizeOffset) { return knownSize(SizeOffset) && knownOffset(SizeOffset); } // These are "private", except they can't actually be made private. Only // compute() should be used by external users. SizeOffsetType visitAllocaInst(AllocaInst &I); SizeOffsetType visitArgument(Argument &A); SizeOffsetType visitCallSite(CallSite CS); SizeOffsetType visitConstantPointerNull(ConstantPointerNull&); SizeOffsetType visitExtractElementInst(ExtractElementInst &I); SizeOffsetType visitExtractValueInst(ExtractValueInst &I); SizeOffsetType visitGEPOperator(GEPOperator &GEP); SizeOffsetType visitGlobalAlias(GlobalAlias &GA); SizeOffsetType visitGlobalVariable(GlobalVariable &GV); SizeOffsetType visitIntToPtrInst(IntToPtrInst&); SizeOffsetType visitLoadInst(LoadInst &I); SizeOffsetType visitPHINode(PHINode&); SizeOffsetType visitSelectInst(SelectInst &I); SizeOffsetType visitUndefValue(UndefValue&); SizeOffsetType visitInstruction(Instruction &I); private: bool CheckedZextOrTrunc(APInt &I); }; using SizeOffsetEvalType = std::pair<Value *, Value *>; /// Evaluate the size and offset of an object pointed to by a Value*. /// May create code to compute the result at run-time. class ObjectSizeOffsetEvaluator : public InstVisitor<ObjectSizeOffsetEvaluator, SizeOffsetEvalType> { using BuilderTy = IRBuilder<TargetFolder, IRBuilderCallbackInserter>; using WeakEvalType = std::pair<WeakTrackingVH, WeakTrackingVH>; using CacheMapTy = DenseMap<const Value *, WeakEvalType>; using PtrSetTy = SmallPtrSet<const Value *, 8>; const DataLayout &DL; const TargetLibraryInfo *TLI; LLVMContext &Context; BuilderTy Builder; IntegerType *IntTy; Value *Zero; CacheMapTy CacheMap; PtrSetTy SeenVals; ObjectSizeOpts EvalOpts; SmallPtrSet<Instruction *, 8> InsertedInstructions; SizeOffsetEvalType compute_(Value *V); public: static SizeOffsetEvalType unknown() { return std::make_pair(nullptr, nullptr); } ObjectSizeOffsetEvaluator(const DataLayout &DL, const TargetLibraryInfo *TLI, LLVMContext &Context, ObjectSizeOpts EvalOpts = {}); SizeOffsetEvalType compute(Value *V); bool knownSize(SizeOffsetEvalType SizeOffset) { return SizeOffset.first; } bool knownOffset(SizeOffsetEvalType SizeOffset) { return SizeOffset.second; } bool anyKnown(SizeOffsetEvalType SizeOffset) { return knownSize(SizeOffset) || knownOffset(SizeOffset); } bool bothKnown(SizeOffsetEvalType SizeOffset) { return knownSize(SizeOffset) && knownOffset(SizeOffset); } // The individual instruction visitors should be treated as private. SizeOffsetEvalType visitAllocaInst(AllocaInst &I); SizeOffsetEvalType visitCallSite(CallSite CS); SizeOffsetEvalType visitExtractElementInst(ExtractElementInst &I); SizeOffsetEvalType visitExtractValueInst(ExtractValueInst &I); SizeOffsetEvalType visitGEPOperator(GEPOperator &GEP); SizeOffsetEvalType visitIntToPtrInst(IntToPtrInst&); SizeOffsetEvalType visitLoadInst(LoadInst &I); SizeOffsetEvalType visitPHINode(PHINode &PHI); SizeOffsetEvalType visitSelectInst(SelectInst &I); SizeOffsetEvalType visitInstruction(Instruction &I); }; } // end namespace llvm #endif // LLVM_ANALYSIS_MEMORYBUILTINS_H
endlessm/chromium-browser
third_party/swiftshader/third_party/llvm-10.0/llvm/include/llvm/Analysis/MemoryBuiltins.h
C
bsd-3-clause
13,378
package openstack import ( "fmt" "log" "time" "github.com/hashicorp/terraform/helper/resource" "github.com/hashicorp/terraform/helper/schema" "github.com/rackspace/gophercloud" "github.com/rackspace/gophercloud/openstack/networking/v2/extensions/layer3/routers" ) func resourceNetworkingRouterV2() *schema.Resource { return &schema.Resource{ Create: resourceNetworkingRouterV2Create, Read: resourceNetworkingRouterV2Read, Update: resourceNetworkingRouterV2Update, Delete: resourceNetworkingRouterV2Delete, Schema: map[string]*schema.Schema{ "region": &schema.Schema{ Type: schema.TypeString, Required: true, ForceNew: true, DefaultFunc: schema.EnvDefaultFunc("OS_REGION_NAME", ""), }, "name": &schema.Schema{ Type: schema.TypeString, Optional: true, ForceNew: false, }, "admin_state_up": &schema.Schema{ Type: schema.TypeBool, Optional: true, ForceNew: false, Computed: true, }, "distributed": &schema.Schema{ Type: schema.TypeBool, Optional: true, ForceNew: true, Computed: true, }, "external_gateway": &schema.Schema{ Type: schema.TypeString, Optional: true, ForceNew: false, }, "tenant_id": &schema.Schema{ Type: schema.TypeString, Optional: true, ForceNew: true, Computed: true, }, "value_specs": &schema.Schema{ Type: schema.TypeMap, Optional: true, ForceNew: true, }, }, } } // routerCreateOpts contains all the values needed to create a new router. There are // no required values. type RouterCreateOpts struct { Name string AdminStateUp *bool Distributed *bool TenantID string GatewayInfo *routers.GatewayInfo ValueSpecs map[string]string } // ToRouterCreateMap casts a routerCreateOpts struct to a map. func (opts RouterCreateOpts) ToRouterCreateMap() (map[string]interface{}, error) { r := make(map[string]interface{}) if gophercloud.MaybeString(opts.Name) != nil { r["name"] = opts.Name } if opts.AdminStateUp != nil { r["admin_state_up"] = opts.AdminStateUp } if opts.Distributed != nil { r["distributed"] = opts.Distributed } if gophercloud.MaybeString(opts.TenantID) != nil { r["tenant_id"] = opts.TenantID } if opts.GatewayInfo != nil { r["external_gateway_info"] = opts.GatewayInfo } if opts.ValueSpecs != nil { for k, v := range opts.ValueSpecs { r[k] = v } } return map[string]interface{}{"router": r}, nil } func resourceNetworkingRouterV2Create(d *schema.ResourceData, meta interface{}) error { config := meta.(*Config) networkingClient, err := config.networkingV2Client(d.Get("region").(string)) if err != nil { return fmt.Errorf("Error creating OpenStack networking client: %s", err) } createOpts := RouterCreateOpts{ Name: d.Get("name").(string), TenantID: d.Get("tenant_id").(string), ValueSpecs: routerValueSpecs(d), } if asuRaw, ok := d.GetOk("admin_state_up"); ok { asu := asuRaw.(bool) createOpts.AdminStateUp = &asu } if dRaw, ok := d.GetOk("distributed"); ok { d := dRaw.(bool) createOpts.Distributed = &d } externalGateway := d.Get("external_gateway").(string) if externalGateway != "" { gatewayInfo := routers.GatewayInfo{ NetworkID: externalGateway, } createOpts.GatewayInfo = &gatewayInfo } log.Printf("[DEBUG] Create Options: %#v", createOpts) n, err := routers.Create(networkingClient, createOpts).Extract() if err != nil { return fmt.Errorf("Error creating OpenStack Neutron router: %s", err) } log.Printf("[INFO] Router ID: %s", n.ID) log.Printf("[DEBUG] Waiting for OpenStack Neutron Router (%s) to become available", n.ID) stateConf := &resource.StateChangeConf{ Pending: []string{"BUILD", "PENDING_CREATE", "PENDING_UPDATE"}, Target: []string{"ACTIVE"}, Refresh: waitForRouterActive(networkingClient, n.ID), Timeout: 2 * time.Minute, Delay: 5 * time.Second, MinTimeout: 3 * time.Second, } _, err = stateConf.WaitForState() d.SetId(n.ID) return resourceNetworkingRouterV2Read(d, meta) } func resourceNetworkingRouterV2Read(d *schema.ResourceData, meta interface{}) error { config := meta.(*Config) networkingClient, err := config.networkingV2Client(d.Get("region").(string)) if err != nil { return fmt.Errorf("Error creating OpenStack networking client: %s", err) } n, err := routers.Get(networkingClient, d.Id()).Extract() if err != nil { httpError, ok := err.(*gophercloud.UnexpectedResponseCodeError) if !ok { return fmt.Errorf("Error retrieving OpenStack Neutron Router: %s", err) } if httpError.Actual == 404 { d.SetId("") return nil } return fmt.Errorf("Error retrieving OpenStack Neutron Router: %s", err) } log.Printf("[DEBUG] Retreived Router %s: %+v", d.Id(), n) d.Set("name", n.Name) d.Set("admin_state_up", n.AdminStateUp) d.Set("distributed", n.Distributed) d.Set("tenant_id", n.TenantID) d.Set("external_gateway", n.GatewayInfo.NetworkID) return nil } func resourceNetworkingRouterV2Update(d *schema.ResourceData, meta interface{}) error { routerId := d.Id() osMutexKV.Lock(routerId) defer osMutexKV.Unlock(routerId) config := meta.(*Config) networkingClient, err := config.networkingV2Client(d.Get("region").(string)) if err != nil { return fmt.Errorf("Error creating OpenStack networking client: %s", err) } var updateOpts routers.UpdateOpts if d.HasChange("name") { updateOpts.Name = d.Get("name").(string) } if d.HasChange("admin_state_up") { asu := d.Get("admin_state_up").(bool) updateOpts.AdminStateUp = &asu } log.Printf("[DEBUG] Updating Router %s with options: %+v", d.Id(), updateOpts) _, err = routers.Update(networkingClient, d.Id(), updateOpts).Extract() if err != nil { return fmt.Errorf("Error updating OpenStack Neutron Router: %s", err) } return resourceNetworkingRouterV2Read(d, meta) } func resourceNetworkingRouterV2Delete(d *schema.ResourceData, meta interface{}) error { config := meta.(*Config) networkingClient, err := config.networkingV2Client(d.Get("region").(string)) if err != nil { return fmt.Errorf("Error creating OpenStack networking client: %s", err) } stateConf := &resource.StateChangeConf{ Pending: []string{"ACTIVE"}, Target: []string{"DELETED"}, Refresh: waitForRouterDelete(networkingClient, d.Id()), Timeout: 2 * time.Minute, Delay: 5 * time.Second, MinTimeout: 3 * time.Second, } _, err = stateConf.WaitForState() if err != nil { return fmt.Errorf("Error deleting OpenStack Neutron Router: %s", err) } d.SetId("") return nil } func waitForRouterActive(networkingClient *gophercloud.ServiceClient, routerId string) resource.StateRefreshFunc { return func() (interface{}, string, error) { r, err := routers.Get(networkingClient, routerId).Extract() if err != nil { return nil, r.Status, err } log.Printf("[DEBUG] OpenStack Neutron Router: %+v", r) return r, r.Status, nil } } func waitForRouterDelete(networkingClient *gophercloud.ServiceClient, routerId string) resource.StateRefreshFunc { return func() (interface{}, string, error) { log.Printf("[DEBUG] Attempting to delete OpenStack Router %s.\n", routerId) r, err := routers.Get(networkingClient, routerId).Extract() if err != nil { errCode, ok := err.(*gophercloud.UnexpectedResponseCodeError) if !ok { return r, "ACTIVE", err } if errCode.Actual == 404 { log.Printf("[DEBUG] Successfully deleted OpenStack Router %s", routerId) return r, "DELETED", nil } } err = routers.Delete(networkingClient, routerId).ExtractErr() if err != nil { errCode, ok := err.(*gophercloud.UnexpectedResponseCodeError) if !ok { return r, "ACTIVE", err } if errCode.Actual == 404 { log.Printf("[DEBUG] Successfully deleted OpenStack Router %s", routerId) return r, "DELETED", nil } } log.Printf("[DEBUG] OpenStack Router %s still active.\n", routerId) return r, "ACTIVE", nil } } func routerValueSpecs(d *schema.ResourceData) map[string]string { m := make(map[string]string) for key, val := range d.Get("value_specs").(map[string]interface{}) { m[key] = val.(string) } return m }
voith/terraform-provider-etcd
vendor/github.com/hashicorp/terraform/builtin/providers/openstack/resource_openstack_networking_router_v2.go
GO
isc
8,203
<?php namespace Illuminate\Validation\Concerns; use Egulias\EmailValidator\EmailLexer; use Egulias\EmailValidator\Validation\EmailValidation; class FilterEmailValidation implements EmailValidation { /** * The flags to pass to the filter_var function. * * @var int|null */ protected $flags; /** * Create a new validation instance. * * @param int $flags * @return void */ public function __construct($flags = null) { $this->flags = $flags; } /** * Create a new instance which allows any unicode characters in local-part. * * @return static */ public static function unicode() { return new static(FILTER_FLAG_EMAIL_UNICODE); } /** * Returns true if the given email is valid. * * @param string $email * @param \Egulias\EmailValidator\EmailLexer $emailLexer * @return bool */ public function isValid($email, EmailLexer $emailLexer) { return is_null($this->flags) ? filter_var($email, FILTER_VALIDATE_EMAIL) !== false : filter_var($email, FILTER_VALIDATE_EMAIL, $this->flags) !== false; } /** * Returns the validation error. * * @return \Egulias\EmailValidator\Exception\InvalidEmail|null */ public function getError() { // } /** * Returns the validation warnings. * * @return \Egulias\EmailValidator\Warning\Warning[] */ public function getWarnings() { return []; } }
drakakisgeo/mailtester
vendor/laravel/framework/src/Illuminate/Validation/Concerns/FilterEmailValidation.php
PHP
mit
1,580
define([ "../create-error" ], function( createError ) { return function( name, value ) { return createError( "E_INVALID_PAR_VALUE", "Invalid `{name}` value ({value}).", { name: name, value: value }); }; });
adeebaboud/dastasks
vendor/plugins/globalize/src/common/create-error/invalid-parameter-value.js
JavaScript
mit
216
// Copyright (c) 2006, 2007 Julio M. Merino Vidal // Copyright (c) 2008 Ilya Sokolov, Boris Schaeling // Copyright (c) 2009 Boris Schaeling // Copyright (c) 2010 Felipe Tanus, Boris Schaeling // Copyright (c) 2011, 2012 Jeff Flinn, Boris Schaeling // Copyright (c) 2016 Klemens D. Morgenstern // // Distributed under the Boost Software License, Version 1.0. (See accompanying // file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) #ifndef BOOST_PROCESS_POSIX_PIPE_OUT_HPP #define BOOST_PROCESS_POSIX_PIPE_OUT_HPP #include <boost/process/detail/posix/handler.hpp> #include <boost/process/detail/posix/file_descriptor.hpp> #include <boost/process/detail/used_handles.hpp> #include <unistd.h> #include <array> namespace boost { namespace process { namespace detail { namespace posix { template<int p1, int p2> struct null_out : handler_base_ext, ::boost::process::detail::uses_handles { file_descriptor sink{"/dev/null", file_descriptor::write}; template <typename Executor> void on_exec_setup(Executor &e) const; std::array<int, 3> get_used_handles() { const auto pp1 = p1 != -1 ? p1 : p2; const auto pp2 = p2 != -1 ? p2 : p1; return {sink.handle(), pp1, pp2}; } }; template<> template<typename Executor> void null_out<1,-1>::on_exec_setup(Executor &e) const { if (::dup2(sink.handle(), STDOUT_FILENO) == -1) e.set_error(::boost::process::detail::get_last_error(), "dup2() failed"); } template<> template<typename Executor> void null_out<2,-1>::on_exec_setup(Executor &e) const { if (::dup2(sink.handle(), STDERR_FILENO) == -1) e.set_error(::boost::process::detail::get_last_error(), "dup2() failed"); } template<> template<typename Executor> void null_out<1,2>::on_exec_setup(Executor &e) const { if (::dup2(sink.handle(), STDOUT_FILENO) == -1) e.set_error(::boost::process::detail::get_last_error(), "dup2() failed"); if (::dup2(sink.handle(), STDERR_FILENO) == -1) e.set_error(::boost::process::detail::get_last_error(), "dup2() failed"); } }}}} #endif
kumakoko/KumaGL
third_lib/boost/1.75.0/boost/process/detail/posix/null_out.hpp
C++
mit
2,089
// Copyright (c) .NET Foundation. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for details. namespace OpenLiveWriter.BrowserControl { /// <summary> /// Enumeration of available BrowserCommands. /// </summary> public enum BrowserCommand { // standard file menu commands NewWindow, SaveAs, PageSetup, Print, PrintPreview, Properties, // standard edit menu commands Cut, Copy, Paste, SelectAll, Find, // standard view menu commands GoBack, GoForward, Stop, Refresh, GoHome, GoSearch, ViewSource, Languages, // standard favorites menu commands AddFavorite, OrganizeFavorites, // standard tools menu commands InternetOptions } /// <summary> /// Interface for accessing browser commands /// </summary> public interface IBrowserCommand { /// <summary> /// Determine whether the current command is enabled. /// </summary> bool Enabled { get; } /// <summary> /// Execute the command /// </summary> void Execute(); } }
willduff/OpenLiveWriter-1
src/managed/OpenLiveWriter.BrowserControl/IBrowserCommand.cs
C#
mit
1,293
import Cat from '../models/cat'; import BaseCtrl from './base'; export default class CatCtrl extends BaseCtrl { model = Cat; }
MOstafaMRady/organizer
server/controllers/cat.ts
TypeScript
mit
130
module Github # Determines the links in the current response link header to be used # to find the links to other pages of request responses. These will # only be present if the result set size exceeds the per page limit. # # @api private class PageLinks include Github::Constants DELIM_LINKS = ','.freeze # :nodoc: # Hold the extracted values for URI from the Link header # for the first, last, next and previous page. attr_accessor :first, :last, :next, :prev LINK_REGEX = /<([^>]+)>; rel=\"([^\"]+)\"/ # Parses links from executed request # # @param [Hash] response_headers # # @api private def initialize(response_headers) link_header = response_headers[HEADER_LINK] if link_header && link_header =~ /(next|first|last|prev)/ extract_links(link_header) else # When on the first page self.next = response_headers[HEADER_NEXT] self.last = response_headers[HEADER_LAST] end end private def extract_links(link_header) link_header.split(DELIM_LINKS).each do |link| LINK_REGEX.match(link.strip) do |match| url_part, meta_part = match[1], match[2] next if !url_part || !meta_part assign_url_part(meta_part, url_part) end end end def assign_url_part(meta_part, url_part) case meta_part when META_FIRST self.first = url_part when META_LAST self.last = url_part when META_NEXT self.next = url_part when META_PREV self.prev = url_part end end end # PageLinks end # Github
firstval/github
lib/github_api/page_links.rb
Ruby
mit
1,638
using System; namespace EmptyProject.Windows8 { /// <summary> /// The main class. /// </summary> public static class Program { /// <summary> /// The main entry point for the application. /// </summary> static void Main() { var factory = new MonoGame.Framework.GameFrameworkViewSource<Game1>(); Windows.ApplicationModel.Core.CoreApplication.Run(factory); } } }
MSylvia/CocosSharp
ProjectTemplates/Templates/StarterTemplates/Windows81/Source/EmptyProject.Windows8/EmptyProject.Windows81/Program.cs
C#
mit
460
require "spec_helper" describe Mongoid::Relations::Referenced::One do describe "#=" do context "when the relationship is an illegal embedded reference" do let(:game) do Game.new end let(:video) do Video.new end it "raises a mixed relation error" do expect { game.video = video }.to raise_error(Mongoid::Errors::MixedRelations) end end context "when the relation is cyclic" do let(:user) do User.new end let(:role) do Role.new end before do user.role = role end it "does not raise an error" do expect(user.role).to eq(role) end end context "when the relation is not polymorphic" do context "when the parent is a new record" do let(:person) do Person.new end let(:game) do Game.new end let(:metadata) do Game.relations["person"] end before do metadata.should_receive(:criteria).never person.game = game end it "sets the target of the relation" do expect(person.game.target).to eq(game) end it "sets the foreign key on the relation" do expect(game.person_id).to eq(person.id) end it "sets the base on the inverse relation" do expect(game.person).to eq(person) end it "sets the same instance on the inverse relation" do expect(game.person).to eql(person) end it "does not save the target" do expect(game).to_not be_persisted end end context "when the parent is not a new record" do let(:person) do Person.create end let(:game) do Game.new end before do person.game = game end it "sets the target of the relation" do expect(person.game.target).to eq(game) end it "sets the foreign key of the relation" do expect(game.person_id).to eq(person.id) end it "sets the base on the inverse relation" do expect(game.person).to eq(person) end it "sets the same instance on the inverse relation" do expect(game.person).to eql(person) end it "saves the target" do expect(game).to be_persisted end context "when reloading the parent" do before do person.reload end context "when setting a new document on the relation" do before do person.game = Game.new end it "detaches the previous relation" do expect { game.reload }.to raise_error(Mongoid::Errors::DocumentNotFound) end end end end context "when relation have a different primary_key" do let(:person) do Person.create end let(:cat) do Cat.new end before do person.cat = cat end it "sets the target of the relation" do expect(person.cat.target).to eq(cat) end it "sets the foreign key of the relation" do expect(cat.person_id).to eq(person.username) end it "sets the base on the inverse relation" do expect(cat.person).to eq(person) end it "sets the same instance on the inverse relation" do expect(cat.person).to eql(person) end it "saves the target" do expect(cat).to be_persisted end end end context "when the relation is polymorphic" do context "when the parent is a new record" do let(:bar) do Bar.new end let(:rating) do Rating.new end before do bar.rating = rating end it "sets the target of the relation" do expect(bar.rating.target).to eq(rating) end it "sets the foreign key on the relation" do expect(rating.ratable_id).to eq(bar.id) end it "sets the base on the inverse relation" do expect(rating.ratable).to eq(bar) end it "sets the same instance on the inverse relation" do expect(rating.ratable).to eql(bar) end it "does not save the target" do expect(rating).to_not be_persisted end end context "when the parent is not a new record" do let(:bar) do Bar.create end let(:rating) do Rating.new end before do bar.rating = rating end it "sets the target of the relation" do expect(bar.rating.target).to eq(rating) end it "sets the foreign key of the relation" do expect(rating.ratable_id).to eq(bar.id) end it "sets the base on the inverse relation" do expect(rating.ratable).to eq(bar) end it "sets the same instance on the inverse relation" do expect(rating.ratable).to eql(bar) end it "saves the target" do expect(rating).to be_persisted end end context "when replacing an existing persisted (dependent: :destroy) relation" do let!(:person) do Person.create end let!(:game) do person.create_game(name: "Starcraft") end context "with a new one created via the parent" do let!(:new_game) do person.create_game(name: "Starcraft 2") end it "sets the new relation on the parent" do expect(person.game).to eq(new_game) end it "removes the old foreign key reference" do expect(game.person_id).to be_nil end it "removes the reference to the parent" do expect(game.person).to be_nil end it "destroys the old child" do expect(game).to be_destroyed end it "leaves the old child unpersisted" do expect(game.persisted?).to be_false end it "leaves the new child persisted" do expect(new_game.persisted?).to be_true end end context "with a new one built via the parent" do let!(:new_game) do person.build_game(name: "Starcraft 2") end it "sets the new relation on the parent" do expect(person.game).to eq(new_game) end it "removes the old foreign key reference" do expect(game.person_id).to be_nil end it "removes the reference to the parent" do expect(game.person).to be_nil end it "does not destroy the old child" do expect(game).to_not be_destroyed end it "leaves the old child persisted" do expect(game.persisted?).to be_true end it "leaves the new child unpersisted" do expect(new_game.persisted?).to be_false end end end context "when replacing an existing unpersisted (dependent: :destroy) relation" do let!(:person) do Person.create end let!(:game) do person.build_game(name: "Starcraft") end context "with a new one created via the parent" do let!(:new_game) do person.create_game(name: "Starcraft 2") end it "sets the new relation on the parent" do expect(person.game).to eq(new_game) end it "removes the old foreign key reference" do expect(game.person_id).to be_nil end it "removes the reference to the parent" do expect(game.person).to be_nil end it "destroys the old child" do expect(game).to be_destroyed end it "leaves the old child unpersisted" do expect(game.persisted?).to be_false end it "leaves the new child persisted" do expect(new_game.persisted?).to be_true end end context "with a new one built via the parent" do let!(:new_game) do person.build_game(name: "Starcraft 2") end it "sets the new relation on the parent" do expect(person.game).to eq(new_game) end it "removes the old foreign key reference" do expect(game.person_id).to be_nil end it "removes the reference to the parent" do expect(game.person).to be_nil end it "does not destroy the old child" do expect(game).to_not be_destroyed end it "leaves the old child unpersisted" do expect(game.persisted?).to be_false end it "leaves the new child unpersisted" do expect(new_game.persisted?).to be_false end end end context "when replacing an existing persisted (dependent: :nullify) relation" do let!(:person) do Person.create end let!(:cat) do person.create_cat(name: "Cuddles") end context "with a new one created via the parent" do let!(:new_cat) do person.create_cat(name: "Brutus") end it "sets the new relation on the parent" do expect(person.cat).to eq(new_cat) end it "removes the old foreign key reference" do expect(cat.person_id).to be_nil end it "removes the reference to the parent" do expect(cat.person).to be_nil end it "does not destroy the old child" do expect(cat).to_not be_destroyed end it "leaves the old child persisted" do expect(cat.persisted?).to be_true end it "leaves the new child persisted" do expect(new_cat.persisted?).to be_true end end context "with a new one built via the parent" do let!(:new_cat) do person.build_cat(name: "Brutus") end it "sets the new relation on the parent" do expect(person.cat).to eq(new_cat) end it "removes the old foreign key reference" do expect(cat.person_id).to be_nil end it "removes the reference to the parent" do expect(cat.person).to be_nil end it "does not destroy the old child" do expect(cat).to_not be_destroyed end it "leaves the old child persisted" do expect(cat.persisted?).to be_true end it "leaves the new child unpersisted" do expect(new_cat.persisted?).to be_false end end end context "when replacing an existing unpersisted (dependent: :nullify) relation" do let!(:person) do Person.create end let!(:cat) do person.build_cat(name: "Cuddles") end context "with a new one created via the parent" do let!(:new_cat) do person.create_cat(name: "Brutus") end it "sets the new relation on the parent" do expect(person.cat).to eq(new_cat) end it "removes the old foreign key reference" do expect(cat.person_id).to be_nil end it "removes the reference to the parent" do expect(cat.person).to be_nil end it "does not destroy the old child" do expect(cat).to_not be_destroyed end it "leaves the old child unpersisted" do expect(cat.persisted?).to be_false end it "leaves the new child persisted" do expect(new_cat.persisted?).to be_true end end context "with a new one built via the parent" do let!(:new_cat) do person.build_cat(name: "Brutus") end it "sets the new relation on the parent" do expect(person.cat).to eq(new_cat) end it "removes the old foreign key reference" do expect(cat.person_id).to be_nil end it "removes the reference to the parent" do expect(cat.person).to be_nil end it "does not destroy the old child" do expect(cat).to_not be_destroyed end it "leaves the old child unpersisted" do expect(cat.persisted?).to be_false end it "leaves the new child unpersisted" do expect(new_cat.persisted?).to be_false end end end context "when replacing an existing relation with a new one" do let!(:person) do Person.create end context "when dependent is destroy" do let!(:game) do person.create_game(name: "Starcraft") end let!(:new_game) do Game.create(name: "Starcraft 2") end before do person.game = new_game end it "sets the new relation on the parent" do expect(person.game).to eq(new_game) end it "removes the old foreign key reference" do expect(game.person_id).to be_nil end it "removes the reference to the parent" do expect(game.person).to be_nil end it "destroys the old child" do expect(game).to be_destroyed end end context "when dependent is not set" do let!(:account) do person.create_account(name: "savings") end let!(:new_account) do Account.create(name: "checking") end before do person.account = new_account end it "sets the new relation on the parent" do expect(person.account).to eq(new_account) end it "removes the old foreign key reference" do expect(account.person_id).to be_nil end it "removes the reference to the parent" do expect(account.person).to be_nil end it "nullifies the old child" do expect(account).to_not be_destroyed end end end end end describe "#= nil" do context "when the relation is not polymorphic" do context "when the parent is a new record" do let(:person) do Person.new end let(:game) do Game.new end before do person.game = game person.game = nil end it "sets the relation to nil" do expect(person.game).to be_nil end it "removed the inverse relation" do expect(game.person).to be_nil end it "removes the foreign key value" do expect(game.person_id).to be_nil end end context "when the parent is not a new record" do let(:person) do Person.create end let(:game) do Game.new end before do person.game = game person.game = nil end it "sets the relation to nil" do expect(person.game).to be_nil end it "removed the inverse relation" do expect(game.person).to be_nil end it "removes the foreign key value" do expect(game.person_id).to be_nil end it "deletes the target from the database" do expect(game).to be_destroyed end end end context "when the relation is polymorphic" do context "when the parent is a new record" do let(:bar) do Bar.new end let(:rating) do Rating.new end before do bar.rating = rating bar.rating = nil end it "sets the relation to nil" do expect(bar.rating).to be_nil end it "removed the inverse relation" do expect(rating.ratable).to be_nil end it "removes the foreign key value" do expect(rating.ratable_id).to be_nil end end context "when the parent is not a new record" do let(:bar) do Bar.create end let(:rating) do Rating.new end before do bar.rating = rating bar.rating = nil end it "sets the relation to nil" do expect(bar.rating).to be_nil end it "removed the inverse relation" do expect(rating.ratable).to be_nil end it "removes the foreign key value" do expect(rating.ratable_id).to be_nil end it "applies the appropriate dependent option" do expect(rating).to_not be_destroyed end end end end describe "#build_#\{name}" do context "when the relationship is an illegal embedded reference" do let(:game) do Game.new end it "raises a mixed relation error" do expect { game.build_video(title: "Tron") }.to raise_error(Mongoid::Errors::MixedRelations) end end context "when the relation is not polymorphic" do context "when using object ids" do let(:person) do Person.create end let(:game) do person.build_game(score: 50) end it "returns a new document" do expect(game.score).to eq(50) end it "sets the foreign key on the document" do expect(game.person_id).to eq(person.id) end it "sets the inverse relation" do expect(game.person).to eq(person) end it "does not save the built document" do expect(game).to_not be_persisted end end context "when providing no attributes" do let(:person) do Person.create end let(:game) do person.build_game end it "sets the foreign key on the document" do expect(game.person_id).to eq(person.id) end it "sets the inverse relation" do expect(game.person).to eq(person) end it "does not save the built document" do expect(game).to_not be_persisted end end context "when providing nil attributes" do let(:person) do Person.create end let(:game) do person.build_game(nil) end it "sets the foreign key on the document" do expect(game.person_id).to eq(person.id) end it "sets the inverse relation" do expect(game.person).to eq(person) end it "does not save the built document" do expect(game).to_not be_persisted end end end context "when the relation is polymorphic" do context "when using object ids" do let(:bar) do Bar.create end let(:rating) do bar.build_rating(value: 5) end it "returns a new document" do expect(rating.value).to eq(5) end it "sets the foreign key on the document" do expect(rating.ratable_id).to eq(bar.id) end it "sets the inverse relation" do expect(rating.ratable).to eq(bar) end it "does not save the built document" do expect(rating).to_not be_persisted end end end end describe ".builder" do let(:builder_klass) do Mongoid::Relations::Builders::Referenced::One end let(:document) do double end let(:metadata) do double(extension?: false) end it "returns the embedded in builder" do expect( described_class.builder(nil, metadata, document) ).to be_a_kind_of(builder_klass) end end describe "#create_#\{name}" do context "when the relationship is an illegal embedded reference" do let(:game) do Game.new end it "raises a mixed relation error" do expect { game.create_video(title: "Tron") }.to raise_error(Mongoid::Errors::MixedRelations) end end context "when the relation is not polymorphic" do let(:person) do Person.create end let(:game) do person.create_game(score: 50) end it "returns a new document" do expect(game.score).to eq(50) end it "sets the foreign key on the document" do expect(game.person_id).to eq(person.id) end it "sets the inverse relation" do expect(game.person).to eq(person) end it "saves the document" do expect(game).to be_persisted end end context "when providing no attributes" do let(:person) do Person.create end let(:game) do person.create_game end it "sets the foreign key on the document" do expect(game.person_id).to eq(person.id) end it "sets the inverse relation" do expect(game.person).to eq(person) end it "saves the document" do expect(game).to be_persisted end end context "when providing nil attributes" do let(:person) do Person.create end let(:game) do person.create_game(nil) end it "sets the foreign key on the document" do expect(game.person_id).to eq(person.id) end it "sets the inverse relation" do expect(game.person).to eq(person) end it "saves the document" do expect(game).to be_persisted end end context "when the relation is polymorphic" do let(:bar) do Bar.create end let(:rating) do bar.create_rating(value: 5) end it "returns a new document" do expect(rating.value).to eq(5) end it "sets the foreign key on the document" do expect(rating.ratable_id).to eq(bar.id) end it "sets the inverse relation" do expect(rating.ratable).to eq(bar) end it "saves the document" do expect(rating).to be_persisted end end end describe ".criteria" do let(:id) do BSON::ObjectId.new end context "when the relation is polymorphic" do let(:metadata) do Book.relations["rating"] end let(:criteria) do described_class.criteria(metadata, id, Book) end it "includes the type in the criteria" do expect(criteria.selector).to eq( { "ratable_id" => id, "ratable_type" => "Book" } ) end end context "when the relation is not polymorphic" do let(:metadata) do Person.relations["game"] end let(:criteria) do described_class.criteria(metadata, id, Person) end it "does not include the type in the criteria" do expect(criteria.selector).to eq({ "person_id" => id }) end end end describe ".embedded?" do it "returns false" do expect(described_class).to_not be_embedded end end describe ".foreign_key_suffix" do it "returns _id" do expect(described_class.foreign_key_suffix).to eq("_id") end end describe ".macro" do it "returns has_one" do expect(described_class.macro).to eq(:has_one) end end describe "#nullify" do let(:person) do Person.create end let!(:game) do person.create_game(name: "Starcraft II") end context "when the instance has been set" do before do person.game.nullify end it "removes the foreign key from the target" do expect(game.person_id).to be_nil end it "removes the reference from the target" do expect(game.person).to be_nil end end context "when the instance has been reloaded" do let(:from_db) do Person.find(person.id) end let(:game_reloaded) do Game.find(game.id) end before do from_db.game.nullify end it "removes the foreign key from the target" do expect(game_reloaded.person_id).to be_nil end it "removes the reference from the target" do expect(game_reloaded.person).to be_nil end end end describe "#respond_to?" do let(:person) do Person.new end let!(:game) do person.build_game(name: "Tron") end let(:document) do person.game end Mongoid::Document.public_instance_methods(true).each do |method| context "when checking #{method}" do it "returns true" do expect(document.respond_to?(method)).to be_true end end end end describe ".stores_foreign_key?" do it "returns false" do expect(described_class.stores_foreign_key?).to be_false end end describe ".valid_options" do it "returns the valid options" do expect(described_class.valid_options).to eq( [ :as, :autobuild, :autosave, :dependent, :foreign_key, :primary_key ] ) end end describe ".validation_default" do it "returns true" do expect(described_class.validation_default).to be_true end end context "when reloading the relation" do let!(:person) do Person.create end let!(:game_one) do Game.create(name: "Warcraft 3") end let!(:game_two) do Game.create(name: "Starcraft 2") end before do person.game = game_one end context "when the relation references the same document" do before do Game.collection.find({ _id: game_one.id }). update({ "$set" => { name: "Diablo 2" }}) end let(:reloaded) do person.game(true) end it "reloads the document from the database" do expect(reloaded.name).to eq("Diablo 2") end it "sets a new document instance" do expect(reloaded).to_not equal(game_one) end end context "when the relation references a different document" do before do person.game = game_two end let(:reloaded) do person.game(true) end it "reloads the new document from the database" do expect(reloaded.name).to eq("Starcraft 2") end it "sets a new document instance" do expect(reloaded).to_not equal(game_one) end end end end
kevinmarvin/mongoid
spec/mongoid/relations/referenced/one_spec.rb
Ruby
mit
27,062
package com.insightfullogic.java8.examples.chapter4; import com.insightfullogic.java8.examples.chapter1.Album; import com.insightfullogic.java8.examples.chapter1.SampleData; import com.insightfullogic.java8.examples.chapter1.Track; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.util.Collection; import java.util.List; import java.util.function.Function; import static java.util.Arrays.asList; import static org.junit.Assert.assertEquals; import static org.junit.runners.Parameterized.Parameters; @RunWith(Parameterized.class) public class TestOrder { private final OrderFactory factory; private Order order; @Parameters public static Collection<Object[]> data() { Object[][] data = new Object[][] { of(OrderImperative::new), of(OrderStreams::new), of(OrderDomain::new) }; return asList(data); } private static interface OrderFactory extends Function<List<Album>, Order> {} private static Object[] of(OrderFactory factory) { return new Object[] { factory }; } public TestOrder(OrderFactory factory) { this.factory = factory; } @Before public void initOrder() { List<Track> tracks = asList(new Track("Acknowledgement", 467), new Track("Resolution", 442)); Album aLoveSupreme = new Album("A Love Supreme", tracks, asList(SampleData.johnColtrane)); order = factory.apply(asList(aLoveSupreme)); } @Test public void countsRunningTime() { assertEquals(909, order.countRunningTime()); } @Test public void countsArtists() { assertEquals(1, order.countMusicians()); } @Test public void countsTracks() { assertEquals(2, order.countTracks()); } }
sonia1985/java-8-lambdas-exercises
src/test/java/com/insightfullogic/java8/examples/chapter4/TestOrder.java
Java
mit
1,805
#-- # Copyright (c) 2005-2010 Tobias Luetke # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. #++ require 'active_support' require 'active_support/core_ext/string/inflections' require 'active_support/core_ext/hash/indifferent_access' require 'active_support/core_ext/hash/conversions' require 'active_support/core_ext/object/conversions' require 'active_support/core_ext/class/attribute' require 'active_support/core_ext/enumerable.rb' if(!defined?(ActiveSupport::VERSION) || (ActiveSupport::VERSION::STRING < "4.1")) require 'active_support/core_ext/class/attribute_accessors' end require 'active_support/core_ext/class/delegating_attributes' require 'active_support/core_ext/module/attribute_accessors' begin require 'active_support/base64' unless defined?(Base64) Base64 = ActiveSupport::Base64 end unless Base64.respond_to?(:strict_encode64) def Base64.strict_encode64(v) ActiveSupport::Base64.encode64s(v) end end rescue LoadError require 'base64' end require 'securerandom' require 'builder' require 'cgi' require 'rexml/document' require 'active_utils' require 'active_merchant/billing' require 'active_merchant/version' module ActiveMerchant #:nodoc: module Billing #:nodoc: autoload :Integrations, 'active_merchant/billing/integrations' end end
michaelherold/active_merchant
lib/active_merchant.rb
Ruby
mit
2,312
/* j/6/ap.c ** */ #include "all.h" /** forward declares **/ u3_noun u3wfp_rake(u3_noun); u3_noun u3wfp_open(u3_noun); u3_noun u3wfp_hack(u3_noun); static u3_noun _ap_open_l(u3_noun, u3_noun); // make sure these match the array below! // # define _ap_jet_open 0 # define _ap_jet_rake 1 # define _ap_jet_hack 2 static u3_noun _open_in(u3_noun ter, u3_noun gen); /* ~(. al gen) */ static u3_noun _al_core(u3_noun ter, u3_noun gen) { u3_noun gat = u3j_hook(u3k(ter), "al"); return u3i_molt(gat, u3x_sam, u3k(gen), 0); } /* van is transferred, gen is retained */ static u3_noun _ap_bunt(u3_noun van, u3_noun gen) { u3_noun pro = u3qfl_bunt(van, gen); u3z(van); return pro; } /** open cases **/ #define _open_do_p(stem) \ static u3_noun _open_in_##stem \ ( u3_noun ter, u3_noun p_gen) #define _open_do_pq(stem) \ static u3_noun _open_in_##stem \ ( u3_noun ter, u3_noun p_gen, u3_noun q_gen) #define _open_do_pqr(stem) \ static u3_noun _open_in_##stem \ ( u3_noun ter, u3_noun p_gen, u3_noun q_gen, u3_noun r_gen) #define _open_do_pqrs(stem) \ static u3_noun _open_in_##stem \ ( u3_noun ter, u3_noun p_gen, u3_noun q_gen, u3_noun r_gen, \ u3_noun s_gen) /*** **** ***/ _open_do_pq(tsbr) // =: { return u3nt(c3__tsls, _ap_bunt(_al_core(ter, p_gen), p_gen), u3k(q_gen)); } _open_do_pq(tscl) // =: { return u3nt(c3__tsgr, u3nt(c3__cncb, u3nc(u3nc(u3_nul, 1), u3_nul), u3k(p_gen)), u3k(q_gen)); } _open_do_pqr(tsdt) // =. { return u3nt(c3__tsgr, u3nt(c3__cncb, u3nc(u3nc(u3_nul, 1), u3_nul), u3nc(u3nc(u3k(p_gen), u3k(q_gen)), u3_nul)), u3k(r_gen)); } _open_do_pq(tsgl) // =< { return u3nt(c3__tsgr, u3k(q_gen), u3k(p_gen)); } _open_do_pq(tshp) // =- { return u3nt(c3__tsls, u3k(q_gen), u3k(p_gen)); } _open_do_pq(tsls) // =+ { return u3nt(c3__tsgr, u3nc(u3k(p_gen), u3nc(u3_nul, 1)), u3k(q_gen)); } _open_do_p(tssg) // =~ { if ( !_(u3du(p_gen)) ) { return u3nc(0, 1); } else { u3_noun tp_gen = u3t(p_gen); u3_noun ip_gen = u3h(p_gen); if ( (u3_nul == p_gen) ) { return u3nc(u3_blip, 1); } else if ( (u3_nul == tp_gen) ) { return u3k(ip_gen); } else { return u3nt(c3__tsgr, u3k(ip_gen), _open_in_tssg(ter, tp_gen)); } } } /*** **** ***/ _open_do_p(bccb) // $_ { return _ap_bunt(_al_core(ter, p_gen), p_gen); } _open_do_p(bctr) // $* { return u3nc(c3__ktsg, _ap_bunt(_al_core(ter, p_gen), p_gen)); } _open_do_p(bczp) // $! { return u3nt(c3__bccb, c3__axil, u3k(p_gen)); } /*** **** ***/ _open_do_p(brhp) // |- { return u3nt(c3__tsgl, u3nc(c3__cnzy, u3_blip), u3nc(c3__brdt, u3k(p_gen))); } _open_do_p(brdt) // |. { return u3nc(c3__brcn, u3nt(u3nt(u3_blip, c3__ash, u3k(p_gen)), u3_nul, u3_nul)); } /*** **** ***/ _open_do_p(wtbr) // ?| { if ( (u3_nul == p_gen) ) { return u3nt(c3__dtzz, 'f', c3n); } else { u3_noun ip_gen = u3h(p_gen); u3_noun tp_gen = u3t(p_gen); return u3nq(c3__wtcl, u3k(ip_gen), u3nt(c3__dtzz, 'f', c3y), _open_in_wtbr(ter, tp_gen)); } } _open_do_pqr(wtkt) // ?^ { return u3nq(c3__wtcl, u3nt(c3__wtts, u3nt(c3__axil, c3__atom, u3_blip), u3k(p_gen)), u3k(r_gen), u3k(q_gen)); } _open_do_pq(wtgl) // ?< { return u3nq(c3__wtcl, u3k(p_gen), u3nc(c3__zpzp, u3_nul), u3k(q_gen)); } _open_do_pqr(wtdt) // ?. { return u3nq(c3__wtcl, u3k(p_gen), u3k(r_gen), u3k(q_gen)); } _open_do_pq(wtgr) // ?> { return u3nq(c3__wtcl, u3k(p_gen), u3k(q_gen), u3nc(c3__zpzp, u3_nul)); } _open_do_pq(wthp) // ?- { if ( (u3_nul == q_gen) ) { return u3nc(c3__zpfs, u3nc(c3__cnzz, u3k(p_gen))); } else { u3_noun iq_gen = u3h(q_gen); u3_noun tq_gen = u3t(q_gen); u3_noun piq_gen = u3h(iq_gen); u3_noun qiq_gen = u3t(iq_gen); return u3nq(c3__wtcl, u3nt(c3__wtts, u3k(piq_gen), u3k(p_gen)), u3k(qiq_gen), _open_in_wthp(ter, p_gen, tq_gen)); } } _open_do_p(wtpm) // ?& { if ( (u3_nul == p_gen) ) { return u3nt(c3__dtzz, 'f', c3y); } else { u3_noun ip_gen = u3h(p_gen); u3_noun tp_gen = u3t(p_gen); return u3nq(c3__wtcl, u3k(ip_gen), _open_in_wtpm(ter, tp_gen), u3nt(c3__dtzz, 'f', c3n)); } } _open_do_pqr(wtls) // ?+ { u3_noun tul = u3nc(u3nc(u3nc(c3__axil, c3__noun), u3k(q_gen)), u3_nul); u3_noun zal = u3qb_weld(r_gen, tul); u3_noun ret = u3nt(c3__wthp, u3k(p_gen), zal); u3z(tul); return ret; } _open_do_pqr(wtpt) // ?@ { return u3nq(c3__wtcl, u3nt(c3__wtts, u3nt(c3__axil, c3__atom, u3_blip), u3k(p_gen)), u3k(q_gen), u3k(r_gen)); } _open_do_pqr(wtsg) // ?~ { return u3nq(c3__wtcl, u3nt(c3__wtts, u3nc(c3__axil, c3__null), u3k(p_gen)), u3k(q_gen), u3k(r_gen)); } _open_do_p(wtzp) // ?! { return u3nq(c3__wtcl, u3k(p_gen), u3nt(c3__dtzz, 'f', c3n), u3nt(c3__dtzz, 'f', c3y)); } /*** **** ***/ _open_do_pq(zpcb) // !_ { return u3k(q_gen); } _open_do_p(zpgr) // !> { return u3nq(c3__cnhp, u3nc(c3__cnzy, c3__onan), u3nt(c3__zpsm, u3nc(c3__bctr, u3nc(c3__herb, u3nc(c3__cnzy, c3__abel))), u3k(p_gen)), u3_nul); } /*** **** ***/ _open_do_pq(clhp) // :- { return u3nc(u3k(p_gen), u3k(q_gen)); } _open_do_pq(clcb) // :_ { return u3nc(u3k(q_gen), u3k(p_gen)); } _open_do_p(clcn) // :% { return u3nc(u3nc(c3__clsg, u3k(p_gen)), u3nc(c3__bczp, c3__null)); } _open_do_pqrs(clkt) // :^ { return u3nq(u3k(p_gen), u3k(q_gen), u3k(r_gen), u3k(s_gen)); } _open_do_pqr(clls) // :+ { return u3nt(u3k(p_gen), u3k(q_gen), u3k(r_gen)); } _open_do_p(clsg) // :~ { if ( (u3_nul == p_gen) ) { return u3nt(c3__dtzz, 'n', u3_nul); } else { u3_noun ip_gen = u3h(p_gen); u3_noun tp_gen = u3t(p_gen); return u3nc(u3k(ip_gen), _open_in_clsg(ter, tp_gen)); } } _open_do_p(cltr) // :* { if ( (u3_nul == p_gen) ) { return u3nc(c3__zpzp, u3_nul); } else { u3_noun ip_gen = u3h(p_gen); u3_noun tp_gen = u3t(p_gen); if ( (u3_nul == tp_gen) ) { return u3k(ip_gen); } else { return u3nc(u3k(ip_gen), _open_in_cltr(ter, tp_gen)); } } } /*** **** ***/ _open_do_pq(cncb) // %_ { return u3nc(c3__ktls, u3nq(u3nc(c3__cnzz, u3k(p_gen)), c3__cnts, u3k(p_gen), u3k(q_gen))); } #if 0 _open_do_pq(cncl) // %: { return u3nq (c3__cnsg, u3nc(u3_blip, u3_nul), u3k(p_gen), u3k(q_gen)); } #endif _open_do_pq(cndt) // %. { return u3nt(c3__cnhp, u3k(q_gen), u3nc(u3k(p_gen), u3_nul)); } _open_do_pqrs(cnkt) // %^ { return u3nq(c3__cnhp, u3k(p_gen), u3k(q_gen), u3nt(u3k(r_gen), u3k(s_gen), u3_nul)); } _open_do_pq(cnhp) // %- { if ( (u3_nul == q_gen) ) { return u3nt(c3__tsgr, u3k(p_gen), u3nc(c3__cnzy, u3_blip)); } else { return u3nq(c3__cncl, u3k(p_gen), c3__cltr, u3k(q_gen)); } } _open_do_pqr(cnls) // %+ { return u3nc(c3__cnhp, u3nq(u3k(p_gen), u3k(q_gen), u3k(r_gen), u3_nul)); } _open_do_pqr(cnsg) // %~ { return u3nq(c3__cntr, u3k(p_gen), u3k(q_gen), u3nc(u3nc(u3nc(u3nc(u3_nul, 6), 0), u3k(r_gen)), 0)); } _open_do_p(cnzy) // %cnzy { return u3nt(c3__cnts, u3nc(u3k(p_gen), u3_nul), u3_nul); } _open_do_p(cnzz) // %cnzz { return u3nt(c3__cnts, u3k(p_gen), u3_nul); } /*** **** ***/ _open_do_p(hxgl) // #< { return u3nq(c3__cnhp, u3nc(c3__cnzy, c3__noah), u3nc(c3__zpgr, u3nc(c3__cltr, u3k(p_gen))), u3_nul); } _open_do_p(hxgr) // #> { return u3nq(c3__cnhp, u3nc(c3__cnzy, c3__cain), u3nc(c3__zpgr, u3nc(c3__cltr, u3k(p_gen))), u3_nul); } /*** **** ***/ _open_do_pq(ktdt) // ^. { return u3nt(c3__ktls, u3nq(c3__cnhp, u3k(p_gen), u3k(q_gen), u3_nul), u3k(q_gen)); } _open_do_pq(kthp) // ^- { return u3nt(c3__ktls, _ap_bunt(_al_core(ter, p_gen), p_gen), u3k(q_gen)); } /*** **** ***/ _open_do_pq(brcb) // |_ { return u3nt(c3__tsls, u3nc(c3__bctr, u3k(p_gen)), u3nc(c3__brcn, u3k(q_gen))); } _open_do_pq(brkt) // |^ { u3_noun diz = u3nc(c3__ash, u3k(p_gen)); u3_noun ret = u3nt(c3__tsgr, u3nc(c3__brcn, u3qdb_put(q_gen, u3_blip, diz)), u3nc(c3__cnzy, u3_blip)); u3z(diz); return ret; } _open_do_pq(brls) // |+ { return u3nc(c3__ktbr, u3nt(c3__brts, u3k(p_gen), u3k(q_gen))); } _open_do_p(brwt) // |? { return u3nt(c3__ktwt, c3__brdt, u3k(p_gen)); } /*** **** ***/ _open_do_pq(sgts) // ~= { return u3nt(c3__sggr, u3nc(c3__germ, u3k(p_gen)), u3k(q_gen)); } #if 0 _open_do_pq(sgbr) // ~| { return u3nt (c3__sggr, u3nc(c3__mean, u3k(p_gen)), u3k(q_gen)); } #endif _open_do_pq(sggl) // ~> { return u3nt(c3__tsgl, u3nq(c3__sggr, u3k(p_gen), u3_nul, 1), u3k(q_gen)); } _open_do_pq(sgbc) // ~$ { return u3nt(c3__sggr, u3nq(c3__live, c3__dtzz, u3_blip, u3k(p_gen)), u3k(q_gen)); } _open_do_pq(sgcb) // ~_ { return u3nt(c3__sggr, u3nc(c3__mean, u3nc(c3__brdt, u3k(p_gen))), u3k(q_gen)); } static u3_noun _sgcn_a(u3_noun r_gen, u3_noun nob) { if ( c3n == u3du(r_gen) ) { return u3k(nob); } else { u3_noun ir_gen = u3h(r_gen); u3_noun tr_gen = u3t(r_gen); u3_noun pir_gen, qir_gen; u3x_cell(ir_gen, &pir_gen, &qir_gen); return u3nc(u3nc(u3nt(c3__dtzz, u3_blip, u3k(pir_gen)), u3nc(c3__zpts, u3k(qir_gen))), _sgcn_a(tr_gen, nob)); } } _open_do_pqrs(sgcn) // ~% { return u3nt(c3__sggl, u3nq(c3__fast, c3__clls, u3nt(c3__dtzz, u3_blip, u3k(p_gen)), u3nt(u3nc(c3__zpts, u3k(q_gen)), c3__clsg, _sgcn_a(r_gen, u3_nul))), u3k(s_gen)); } _open_do_pq(sgfs) // ~/ { return u3nc(c3__sgcn, u3nq(u3k(p_gen), u3nc(u3_nul, 7), u3_nul, u3k(q_gen))); } _open_do_pq(sgls) // ~+ { return u3nt(c3__sggr, u3nq(c3__memo, c3__dtzz, u3_blip, u3k(p_gen)), u3k(q_gen)); } _open_do_pqr(sgpm) // ~& { return u3nt(c3__sggr, u3nt(c3__slog, u3nt(c3__dtzy, u3_blip, u3k(p_gen)), u3nq(c3__cnhp, u3nc(c3__cnzy, c3__cain), u3nc(c3__zpgr, u3k(q_gen)), u3_nul)), u3k(r_gen)); } _open_do_pqrs(sgwt) // ~? { return u3nt(c3__tsls, u3nq(c3__wtdt, u3k(q_gen), u3nc(c3__bczp, c3__null), u3nc(u3nc(c3__bczp, c3__null), u3k(r_gen))), u3nq(c3__wtsg, u3nc(u3nc(u3_nul, 2),u3_nul), u3nt(c3__tsgr, u3nc(u3_nul, 3), u3k(s_gen)), u3nq(c3__sgpm, u3k(p_gen), u3nc(u3_nul, 5), u3nt(c3__tsgr, u3nc(u3_nul, 3), u3k(s_gen))))); } /*** **** ***/ static u3_noun _smcl_in(u3_noun q_gen) { u3_noun hq_gen = u3h(q_gen); u3_noun tq_gen = u3t(q_gen); if ( c3n == u3du(tq_gen) ) { return u3nt(c3__tsgr, u3nc(u3_nul, 3), u3k(hq_gen)); } else { return u3nc(c3__cnhp, u3nq(u3nc(u3_nul, 2), u3nt(c3__tsgr, u3nc(u3_nul, 3), u3k(hq_gen)), _smcl_in(tq_gen), u3_nul)); } } _open_do_pq(smcl) { if ( c3n == u3du(q_gen) ) { return u3nc(c3__zpzp, u3_nul); } else if ( u3_nul == u3t(q_gen) ) { return u3k(u3h(q_gen)); } else { return u3nt(c3__tsls, u3k(p_gen), _smcl_in(q_gen)); } } #if 0 _open_do_pq(smsm) { return u3nt(c3__tsgr, u3nq(c3__ktts, c3__v, u3_nul, 1), u3nt(c3__tsls, u3nt(c3__ktts, c3__a, u3nt(c3__tsgr, u3nc(c3__cnzy, c3__v), u3k(p_gen))), u3nt(c3__tsls, u3nt(c3__ktts, c3__b, u3nt(c3__tsgr, u3nc(c3__cnzy, c3__v), u3k(q_gen))), u3nt(c3__tsls, u3nt(c3__ktts, c3__c, u3nq(c3__cnhp, u3nc(c3__cnzy, c3__a), u3nc(c3__cnzy, c3__b), u3_nul)), u3nt(c3__wtgr, u3nt(c3__dtts, u3nc(c3__cnzy, c3__c), u3nc(c3__cnzy, c3__b)), u3nc(c3__cnzy, c3__c)))))); } #endif /* functions */ /** open **/ static u3_noun _open_in(u3_noun ter, u3_noun gen) { u3_noun p_gen, q_gen, r_gen, s_gen; if ( c3y == u3ud(gen) ) { // printf("studly\n"); // u3_err("stud m", gen); return u3m_bail(c3__exit); return u3nt(c3__cnts, u3nc(u3k(gen), u3_nul), u3_nul); } else switch ( u3h(gen) ) { default: return u3_none; case u3_nul: { return u3nt(c3__cnts, u3nc(u3k(gen), u3_nul), u3_nul); } # define _open_p(stem) \ case c3__##stem: \ return _open_in_##stem(ter, u3t(gen)); \ # define _open_pq(stem) \ case c3__##stem: \ if ( c3n == u3r_cell(u3t(gen), &p_gen, &q_gen) ) { \ return u3m_bail(c3__fail); \ } else return _open_in_##stem(ter, p_gen, q_gen); # define _open_pqr(stem) \ case c3__##stem: \ if ( c3n == u3r_trel(u3t(gen), &p_gen, &q_gen, &r_gen) ) { \ return u3m_bail(c3__fail); \ } else return _open_in_##stem(ter, p_gen, q_gen, r_gen); # define _open_pqrs(stem) \ case c3__##stem: \ if ( c3n == u3r_qual\ (u3t(gen), &p_gen, &q_gen, &r_gen, &s_gen) )\ { \ return u3m_bail(c3__fail); \ } else return _open_in_##stem(ter, p_gen, q_gen, r_gen, s_gen); _open_p (bccb); _open_p (bctr); _open_p (bczp); _open_p (brdt); _open_pq (brcb); _open_p (brhp); _open_pq (brkt); _open_pq (brls); _open_p (brwt); _open_pq (clcb); _open_p (clcn); _open_pq (clhp); _open_pqrs(clkt); _open_pqr (clls); _open_p (cltr); _open_p (clsg); _open_pq (cncb); // _open_pq (cncl); _open_pq (cndt); _open_pqrs(cnkt); _open_pq (cnhp); _open_pqr (cnls); _open_pqr (cnsg); _open_p (cnzy); _open_p (cnzz); _open_p (hxgl); _open_p (hxgr); _open_pq (ktdt); _open_pq (kthp); _open_pq (sgts); // _open_pq (sgbr); _open_pq (sggl); _open_pq (sgbc); _open_pq (sgcb); _open_pqrs(sgcn); _open_pq (sgfs); _open_pq (sgls); _open_pqr (sgpm); _open_pqrs(sgwt); _open_pq (smcl); // _open_pq (smsm); _open_pq (tsbr); _open_pq (tscl); _open_pqr (tsdt); _open_pq (tsgl); _open_pq (tshp); _open_pq (tsls); _open_p (tssg); _open_pqr (wtdt); _open_pq (wtgl); _open_pqr (wtpt); _open_pqr (wtsg); _open_p (wtzp); _open_p (wtbr); _open_pq (wthp); _open_pq (wtgr); _open_pqr (wtls); _open_pqr (wtkt); _open_p (wtpm); _open_pq (zpcb); _open_p (zpgr); } } /** rake **/ u3_noun u3qfp_rake(u3_noun gen) { u3_noun p_gen, q_gen; if ( c3y == u3ud(gen) ) { return u3nc(u3k(gen), u3_nul); } else switch ( u3h(gen) ) { default: return u3m_error("rake-twig"); case u3_nul: return u3nc(u3k(gen), u3_nul); case c3__cnzy: { return u3nc(u3k(u3t(gen)), u3_nul); } case c3__cnzz: { return u3k(u3t(gen)); } case c3__cnts: { if ( c3n == u3r_cell(u3t(gen), &p_gen, &q_gen) ) { return u3m_bail(c3__fail); } else { if ( u3_nul != q_gen ) { return u3m_bail(c3__fail); } else { return u3k(p_gen); } } } case c3__zpcb: { if ( c3n == u3r_cell(u3t(gen), &p_gen, &q_gen) ) { return u3m_bail(c3__fail); } else return u3qfp_rake(q_gen); } } } u3_noun u3wfp_rake(u3_noun cor) { u3_noun gen; if ( u3_none == (gen = u3r_at(u3x_sam, cor)) ) { return u3m_bail(c3__fail); } else { return u3qfp_rake(gen); } } /** hack **/ u3_noun u3qfp_hack(u3_noun ter, u3_noun gen) { u3_noun p_gen, q_gen; u3_noun ret; if ( c3y == u3du(u3h(gen)) ) { return u3nt(c3y, u3k(u3h(gen)), u3k(u3t(gen))); } else switch ( u3h(gen) ) { case c3__tsgr: u3x_cell(u3t(gen), &p_gen, &q_gen); { if ( (c3n == u3du(p_gen)) || (u3_nul != u3h(p_gen)) ) { return u3nc(c3n, u3k(gen)); } else { u3_noun pyr = u3qfp_hack(ter, q_gen); if ( c3y == u3h(pyr) ) { ret = u3nt(c3y, u3nt(c3__tsgr, u3k(p_gen), u3k(u3h(u3t(pyr)))), u3nt(c3__tsgr, u3k(p_gen), u3k(u3t(u3t(pyr))))); } else { ret = u3nc(c3n, u3nt(c3__tsgr, u3k(p_gen), u3k(u3t(pyr)))); } u3z(pyr); return ret; } } case c3__zpcb: u3x_cell(u3t(gen), &p_gen, &q_gen); { u3_noun pyr = u3qfp_hack(ter, q_gen); if ( c3y == u3h(pyr) ) { ret = u3nt(c3y, u3nt(c3__zpcb, u3k(p_gen), u3k(u3h(u3t(pyr)))), u3nt(c3__zpcb, u3k(p_gen), u3k(u3t(u3t(pyr))))); } else { ret = u3nc(c3n, u3nt(c3__zpcb, u3k(p_gen), u3k(u3t(pyr)))); } u3z(pyr); return ret; } default: break; } { u3_noun voq = _ap_open_l(ter, gen); if ( u3_none == voq ) { return u3nc(c3n, u3k(gen)); } else if ( c3y == u3r_sing(voq, gen) ) { return u3nc(c3n, voq); } else { ret = u3qfp_hack(ter, voq); u3z(voq); return ret; } } } u3_noun u3wfp_hack(u3_noun cor) { u3_noun gen; if ( u3_none == (gen = u3r_at(u3x_sam, cor)) ) { return u3m_bail(c3__fail); } else { u3_noun ter = u3r_at(u3x_con, cor); return u3qfp_hack(ter, gen); } } /* boilerplate */ u3_noun _ap_core(u3_noun ter, u3_noun gen) { u3_noun gat = u3j_hook(u3k(ter), "ap"); return u3i_molt(gat, u3x_sam, u3k(gen), 0); } /* open */ static u3_noun _ap_open_n(u3_noun ter, u3_noun gen) { u3_noun cor = _ap_core(ter, gen); return u3j_soft(cor, "open"); } static u3_noun _ap_open_l(u3_noun ter, u3_noun gen) { u3_noun pro = _open_in(ter, gen); if ( u3_none != pro ) { return pro; } else { return _ap_open_n(ter, gen); } } u3_noun u3qfp_open(u3_noun ter, u3_noun gen) { return _ap_open_l(ter, gen); } u3_noun u3wfp_open(u3_noun cor) { u3_noun gen; if ( u3_none == (gen = u3r_at(u3x_sam, cor)) ) { return u3m_bail(c3__fail); } else { u3_noun ter = u3r_at(u3x_con, cor); return u3qfp_open(ter, gen); } }
jpt4/urbit
jets/f/ap.c
C
mit
23,361
cordova-plugin-media -------------------- To install this plugin, follow the [Command-line Interface Guide](http://cordova.apache.org/docs/en/edge/guide_cli_index.md.html#The%20Command-line%20Interface). If you are not using the Cordova Command-line Interface, follow [Using Plugman to Manage Plugins](http://cordova.apache.org/docs/en/edge/plugin_ref_plugman.md.html).
Centaur/DrGuoDemo
plugins/org.apache.cordova.media/README.md
Markdown
mit
371
SecurityBundle ============== SecurityBundle provides a tight integration of the Security component into the Symfony full-stack framework. Resources --------- * [Contributing](https://symfony.com/doc/current/contributing/index.html) * [Report issues](https://github.com/symfony/symfony/issues) and [send Pull Requests](https://github.com/symfony/symfony/pulls) in the [main Symfony repository](https://github.com/symfony/symfony)
MatTheCat/symfony
src/Symfony/Bundle/SecurityBundle/README.md
Markdown
mit
441
var fieldTests = require('./commonFieldTestUtils.js'); var ModelTestConfig = require('../../../modelTestConfig/LocationModelTestConfig'); module.exports = { before: function (browser) { fieldTests.before(browser); browser.adminUIInitialFormScreen.setDefaultModelTestConfig(ModelTestConfig); browser.adminUIItemScreen.setDefaultModelTestConfig(ModelTestConfig); browser.adminUIListScreen.setDefaultModelTestConfig(ModelTestConfig); }, after: fieldTests.after, 'Location field should show correctly in the initial modal': function (browser) { browser.adminUIApp.openList({ section: 'fields', list: 'Location' }); browser.adminUIListScreen.clickCreateItemButton(); browser.adminUIApp.waitForInitialFormScreen(); browser.adminUIInitialFormScreen.assertFieldUIVisible({ fields: [ { name: 'name', }, { name: 'fieldA', options: { showMore: false }, }, ], }); browser.adminUIInitialFormScreen.clickFieldUI({ fields: [ { name: 'fieldA', click: 'showMore', } ], }); browser.adminUIInitialFormScreen.assertFieldUIVisible({ fields: [ { name: 'name', }, { name: 'fieldA', options: { showMore: true }, }, ], }); browser.adminUIInitialFormScreen.cancel(); browser.adminUIApp.waitForListScreen(); }, 'Location field can be filled via the initial modal': function (browser) { browser.adminUIApp.openList({ section: 'fields', list: 'Location' }); browser.adminUIListScreen.clickCreateItemButton(); browser.adminUIApp.waitForInitialFormScreen(); browser.adminUIInitialFormScreen.clickFieldUI({ fields: [ { name: 'fieldA', click: 'showMore', } ], }); browser.adminUIInitialFormScreen.fillFieldInputs({ fields: [ { name: 'name', input: { value: 'Location Field Test 1' }, }, { name: 'fieldA', input: { 'number': 'Field A', 'name': 'Building A', 'street1': 'Street A', 'street2': 'Town A', 'suburb': 'Suburb A', 'state': 'State A', 'postcode': 'AAA AAA', 'country': 'AAA', 'geoLat': '90', 'geoLng': '100' }, }, ], }); browser.adminUIInitialFormScreen.assertFieldInputs({ fields: [ { name: 'name', input: { value: 'Location Field Test 1' }, }, { name: 'fieldA', input: { 'number': 'Field A', 'name': 'Building A', 'street1': 'Street A', 'street2': 'Town A', 'suburb': 'Suburb A', 'state': 'State A', 'postcode': 'AAA AAA', 'country': 'AAA', 'geoLat': '90', 'geoLng': '100' }, }, ], }); browser.adminUIInitialFormScreen.save(); browser.adminUIApp.waitForItemScreen(); }, 'Location field should show correctly in the edit form': function (browser) { browser.adminUIItemScreen.assertFieldUIVisible({ fields: [ { name: 'name', }, { name: 'fieldA', options: { showMore: true }, }, { name: 'fieldB', options: { showMore: false }, }, ], }); browser.adminUIItemScreen.assertFieldInputs({ fields: [ { name: 'name', input: { value: 'Location Field Test 1' }, }, { name: 'fieldA', input: { 'number': 'Field A', 'name': 'Building A', 'street1': 'Street A', 'street2': 'Town A', 'suburb': 'Suburb A', 'state': 'State A', 'postcode': 'AAA AAA', 'country': 'AAA', 'geoLat': '90', 'geoLng': '100' }, }, ], }); browser.adminUIItemScreen.clickFieldUI({ fields: [ { name: 'fieldB', click: 'showMore', } ], }); browser.adminUIItemScreen.assertFieldUIVisible({ fields: [ { name: 'name', }, { name: 'fieldA', options: { showMore: true }, }, { name: 'fieldB', options: { showMore: true }, }, ], }); }, 'Location field can be filled via the edit form': function (browser) { browser.adminUIItemScreen.fillFieldInputs({ fields: [ { name: 'name', input: { value: 'Location Field Test 1' }, }, { name: 'fieldB', input: { 'number': 'Field B', 'name': 'Building B', 'street1': 'Street B', 'street2': 'Town B', 'suburb': 'Suburb B', 'state': 'State B', 'postcode': 'BBB BBB', 'country': 'BBB', 'geoLat': '90', 'geoLng': '100' }, }, ], }); browser.adminUIItemScreen.save(); browser.adminUIApp.waitForItemScreen(); browser.adminUIItemScreen.assertElementTextEquals({ element: '@flashMessage', text: 'Your changes have been saved successfully' }); browser.adminUIItemScreen.assertFieldInputs({ fields: [ { name: 'name', input: { value: 'Location Field Test 1' }, }, { name: 'fieldA', input: { 'number': 'Field A', 'name': 'Building A', 'street1': 'Street A', 'street2': 'Town A', 'suburb': 'Suburb A', 'state': 'State A', 'postcode': 'AAA AAA', 'country': 'AAA', 'geoLat': '90', 'geoLng': '100' }, }, { name: 'fieldB', input: { 'number': 'Field B', 'name': 'Building B', 'street1': 'Street B', 'street2': 'Town B', 'suburb': 'Suburb B', 'state': 'State B', 'postcode': 'BBB BBB', 'country': 'BBB', 'geoLat': '90', 'geoLng': '100' }, }, ], }); }, };
ONode/keystone
test/e2e/adminUI/tests/group006Fields/testLocationField.js
JavaScript
mit
5,214
require 'rails_helper' require 'import_orphan_settings' describe ImportOrphanSettings do let(:settings) { double("settings") } describe '.settings' do it 'should return rails settings' do expect(ImportOrphanSettings.settings).to eq Settings.import end end context '::DataColumn and its subclasses' do specify '#class_name should return the class name' do expect(ImportOrphanSettings::StringColumn.new(nil, nil).class_name).to\ eq 'StringColumn' end specify '#data_type should return the class name' do expect(ImportOrphanSettings::StringColumn.new(nil, nil).data_type).to\ eq 'string' end specify '#options should return the options for the data type' do expect(ImportOrphanSettings::StringColumn.new(nil, nil).options).to\ eq Settings.import['string'] end specify '#has_options? is true if options for the type exist' do expect(ImportOrphanSettings::BooleanColumn.new(nil, nil).has_options?).to\ be true end specify '#has_options? is false if options for the type do not exist' do expect(ImportOrphanSettings::StringColumn.new(nil, nil).has_options?).to\ be false end specify '#mandatory? is false if options for the type are not mandatory' do expect(settings).to receive(:mandatory).and_return(false) expect(ImportOrphanSettings::StringColumn.new(nil, settings).mandatory?).to\ be false end specify '#mandatory? is true if options for the type are mandatory' do expect(settings).to receive(:mandatory).and_return(true) expect(ImportOrphanSettings::StringColumn.new(nil, settings).mandatory?).to\ be true end specify '#permitted_options gives allowable options for the excel cell' do expect(ImportOrphanSettings::BooleanColumn.new(nil, settings). permitted_options).to eq ["Y", "N"] end context "#valid option?" do specify 'accepts all valid options' do expect(settings).to receive(:mandatory).at_least(:once).and_return(true) ["Y", "N"].each do |val| expect(ImportOrphanSettings::BooleanColumn.new(val, settings). valid_option?).to eq true end end specify 'accepts nil if not mandatory' do expect(settings).to receive(:mandatory).and_return(false) expect(ImportOrphanSettings::BooleanColumn.new(nil, settings). valid_option?).to eq true end specify 'does not accept invalid options' do expect(settings).to receive(:mandatory).at_least(:once).and_return(true) ["string", "true"].each do |val| expect(ImportOrphanSettings::BooleanColumn.new(val, settings). valid_option?).to eq false end end end context "#to_val" do specify "ordinarily just returns a string representation of the value" do expect(ImportOrphanSettings::BooleanColumn.new("Y", settings). to_val).to eq "Y" end specify "DateColumn returns a date representation of the value" do date = Date.parse ('2014-08-08') expect(settings).to receive(:mandatory).and_return(true) expect(ImportOrphanSettings::DateColumn.new("2014-08-08", settings). to_val).to eq date end specify "DateColumn raises an error with an invalid date" do expect{ImportOrphanSettings::DateColumn.new("not a date", settings). to_val}.to raise_error end specify "IntegerColumn returns an int representation of the value" do expect(settings).to receive(:mandatory).and_return(true) expect(ImportOrphanSettings::IntegerColumn.new("7", settings). to_val).to eq 7 end end specify ".convert_option_value will translate a cell input to a db output" do expect(ImportOrphanSettings::BooleanColumn.new("Y", settings). convert_option_value).to be true end context ".parse_value" do specify "it will return its value if no options are present" do expect(ImportOrphanSettings::StringColumn.new("Fred", nil). parse_value).to eq "Fred" end specify "it will return a converted value if options are present" do expect(settings).to receive(:mandatory).and_return(true) expect(ImportOrphanSettings::BooleanColumn.new("Y", settings). parse_value).to be true end specify "it will raise an error if options are present but not valid" do expect(settings).to receive(:mandatory).and_return(false) expect{ImportOrphanSettings::BooleanColumn.new("no", settings). parse_value}.to raise_error end end end end
evo2013/osra
spec/lib/import_orphan_settings_spec.rb
Ruby
mit
4,694
<!-- Including InstantSearch.js library and styling --> <script src="https://cdn.jsdelivr.net/npm/[email protected]/dist/instantsearch.min.js"></script> <link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/[email protected]/dist/instantsearch.min.css"> <link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/[email protected]/dist/instantsearch-theme-algolia.min.css"> <script> // Instanciating InstantSearch.js with Algolia credentials const search = instantsearch({ appId: '{{ site.algolia.application_id }}', apiKey: '{{ site.algolia.search_only_api_key }}', indexName: '{{ site.algolia.index_name }}', searchParameters: { restrictSearchableAttributes: [ 'title', 'content' ] } }); const hitTemplate = function(hit) { const url = hit.url; const title = hit._highlightResult.title.value; const content = hit._highlightResult.html.value; return ` <div class="list__item"> <article class="archive__item" itemscope itemtype="http://schema.org/CreativeWork"> <h2 class="archive__item-title" itemprop="headline"><a href="{{ site.baseurl }}${url}">${title}</a></h2> <div class="archive__item-excerpt" itemprop="description">${content}</div> </article> </div> `; } // Adding searchbar and results widgets search.addWidget( instantsearch.widgets.searchBox({ container: '.search-searchbar', {% unless site.algolia.powered_by == false %}poweredBy: true,{% endunless %} placeholder: '{{ site.data.ui-text[site.locale].search_placeholder_text | default: "Enter your search term..." }}' }) ); search.addWidget( instantsearch.widgets.hits({ container: '.search-hits', templates: { item: hitTemplate } }) ); // Starting the search search.start(); </script>
AZURECITADEL/azurecitadel.github.io
_includes/search/algolia-search-scripts.html
HTML
mit
1,786
import inspect def check_unexpected_kwargs(kwargs, **unexpected): for key, message in unexpected.items(): if key in kwargs: raise ValueError(message) def parse_kwargs(kwargs, *name_and_values, **unexpected): values = [kwargs.pop(name, default_value) for name, default_value in name_and_values] if kwargs: check_unexpected_kwargs(kwargs, **unexpected) caller = inspect.stack()[1] args = ', '.join(repr(arg) for arg in sorted(kwargs.keys())) message = caller[3] + \ '() got unexpected keyword argument(s) {}'.format(args) raise TypeError(message) return tuple(values) def assert_kwargs_empty(kwargs): # It only checks if kwargs is empty. parse_kwargs(kwargs)
ronekko/chainer
chainer/utils/argument.py
Python
mit
773
########################################################################### # # This file is auto-generated by the Perl DateTime Suite locale # generator (0.05). This code generator comes with the # DateTime::Locale distribution in the tools/ directory, and is called # generate-from-cldr. # # This file as generated from the CLDR XML locale data. See the # LICENSE.cldr file included in this distribution for license details. # # This file was generated from the source file rw.xml # The source file version number was 1.28, generated on # 2009/06/15 17:12:26. # # Do not edit this file directly. # ########################################################################### package DateTime::Locale::rw; use strict; use warnings; use utf8; use base 'DateTime::Locale::root'; sub cldr_version { return "1\.7\.1" } { my $am_pm_abbreviated = [ "AM", "PM" ]; sub am_pm_abbreviated { return $am_pm_abbreviated } } { my $date_format_full = "EEEE\,\ y\ MMMM\ dd"; sub date_format_full { return $date_format_full } } { my $date_format_long = "y\ MMMM\ d"; sub date_format_long { return $date_format_long } } { my $date_format_medium = "y\ MMM\ d"; sub date_format_medium { return $date_format_medium } } { my $date_format_short = "yy\/MM\/dd"; sub date_format_short { return $date_format_short } } { my $day_format_abbreviated = [ "mbe\.", "kab\.", "gtu\.", "kan\.", "gnu\.", "gnd\.", "cyu\." ]; sub day_format_abbreviated { return $day_format_abbreviated } } sub day_format_narrow { $_[0]->day_stand_alone_narrow() } { my $day_format_wide = [ "Kuwa\ mbere", "Kuwa\ kabiri", "Kuwa\ gatatu", "Kuwa\ kane", "Kuwa\ gatanu", "Kuwa\ gatandatu", "Ku\ cyumweru" ]; sub day_format_wide { return $day_format_wide } } sub day_stand_alone_abbreviated { $_[0]->day_format_abbreviated() } { my $day_stand_alone_narrow = [ "2", "3", "4", "5", "6", "7", "1" ]; sub day_stand_alone_narrow { return $day_stand_alone_narrow } } sub day_stand_alone_wide { $_[0]->day_format_wide() } { my $era_abbreviated = [ "BCE", "CE" ]; sub era_abbreviated { return $era_abbreviated } } sub era_narrow { $_[0]->era_abbreviated() } { my $first_day_of_week = "1"; sub first_day_of_week { return $first_day_of_week } } { my $month_format_abbreviated = [ "mut\.", "gas\.", "wer\.", "mat\.", "gic\.", "kam\.", "nya\.", "kan\.", "nze\.", "ukw\.", "ugu\.", "uku\." ]; sub month_format_abbreviated { return $month_format_abbreviated } } sub month_format_narrow { $_[0]->month_stand_alone_narrow() } { my $month_format_wide = [ "Mutarama", "Gashyantare", "Werurwe", "Mata", "Gicuransi", "Kamena", "Nyakanga", "Kanama", "Nzeli", "Ukwakira", "Ugushyingo", "Ukuboza" ]; sub month_format_wide { return $month_format_wide } } sub month_stand_alone_abbreviated { $_[0]->month_format_abbreviated() } { my $month_stand_alone_narrow = [ "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12" ]; sub month_stand_alone_narrow { return $month_stand_alone_narrow } } sub month_stand_alone_wide { $_[0]->month_format_wide() } { my $quarter_format_abbreviated = [ "I1", "I2", "I3", "I4" ]; sub quarter_format_abbreviated { return $quarter_format_abbreviated } } { my $quarter_format_wide = [ "igihembwe\ cya\ mbere", "igihembwe\ cya\ kabiri", "igihembwe\ cya\ gatatu", "igihembwe\ cya\ kane" ]; sub quarter_format_wide { return $quarter_format_wide } } sub quarter_stand_alone_abbreviated { $_[0]->quarter_format_abbreviated() } sub quarter_stand_alone_wide { $_[0]->quarter_format_wide() } { my $time_format_full = "HH\:mm\:ss\ zzzz"; sub time_format_full { return $time_format_full } } { my $time_format_long = "HH\:mm\:ss\ z"; sub time_format_long { return $time_format_long } } { my $time_format_medium = "HH\:mm\:ss"; sub time_format_medium { return $time_format_medium } } { my $time_format_short = "HH\:mm"; sub time_format_short { return $time_format_short } } { my $_format_for_yyQ = "Q\ yy"; sub _format_for_yyQ { return $_format_for_yyQ } } { my $_available_formats = { "yyQ" => "Q\ yy" }; sub _available_formats { return $_available_formats } } 1; __END__ =pod =encoding utf8 =head1 NAME DateTime::Locale::rw =head1 SYNOPSIS use DateTime; my $dt = DateTime->now( locale => 'rw' ); print $dt->month_name(); =head1 DESCRIPTION This is the DateTime locale package for Kinyarwanda. =head1 DATA This locale inherits from the L<DateTime::Locale::root> locale. It contains the following data. =head2 Days =head3 Wide (format) Kuwa mbere Kuwa kabiri Kuwa gatatu Kuwa kane Kuwa gatanu Kuwa gatandatu Ku cyumweru =head3 Abbreviated (format) mbe. kab. gtu. kan. gnu. gnd. cyu. =head3 Narrow (format) 2 3 4 5 6 7 1 =head3 Wide (stand-alone) Kuwa mbere Kuwa kabiri Kuwa gatatu Kuwa kane Kuwa gatanu Kuwa gatandatu Ku cyumweru =head3 Abbreviated (stand-alone) mbe. kab. gtu. kan. gnu. gnd. cyu. =head3 Narrow (stand-alone) 2 3 4 5 6 7 1 =head2 Months =head3 Wide (format) Mutarama Gashyantare Werurwe Mata Gicuransi Kamena Nyakanga Kanama Nzeli Ukwakira Ugushyingo Ukuboza =head3 Abbreviated (format) mut. gas. wer. mat. gic. kam. nya. kan. nze. ukw. ugu. uku. =head3 Narrow (format) 1 2 3 4 5 6 7 8 9 10 11 12 =head3 Wide (stand-alone) Mutarama Gashyantare Werurwe Mata Gicuransi Kamena Nyakanga Kanama Nzeli Ukwakira Ugushyingo Ukuboza =head3 Abbreviated (stand-alone) mut. gas. wer. mat. gic. kam. nya. kan. nze. ukw. ugu. uku. =head3 Narrow (stand-alone) 1 2 3 4 5 6 7 8 9 10 11 12 =head2 Quarters =head3 Wide (format) igihembwe cya mbere igihembwe cya kabiri igihembwe cya gatatu igihembwe cya kane =head3 Abbreviated (format) I1 I2 I3 I4 =head3 Narrow (format) 1 2 3 4 =head3 Wide (stand-alone) igihembwe cya mbere igihembwe cya kabiri igihembwe cya gatatu igihembwe cya kane =head3 Abbreviated (stand-alone) I1 I2 I3 I4 =head3 Narrow (stand-alone) 1 2 3 4 =head2 Eras =head3 Wide BCE CE =head3 Abbreviated BCE CE =head3 Narrow BCE CE =head2 Date Formats =head3 Full 2008-02-05T18:30:30 = Kuwa kabiri, 2008 Gashyantare 05 1995-12-22T09:05:02 = Kuwa gatanu, 1995 Ukuboza 22 -0010-09-15T04:44:23 = Kuwa gatandatu, -10 Nzeli 15 =head3 Long 2008-02-05T18:30:30 = 2008 Gashyantare 5 1995-12-22T09:05:02 = 1995 Ukuboza 22 -0010-09-15T04:44:23 = -10 Nzeli 15 =head3 Medium 2008-02-05T18:30:30 = 2008 gas. 5 1995-12-22T09:05:02 = 1995 uku. 22 -0010-09-15T04:44:23 = -10 nze. 15 =head3 Short 2008-02-05T18:30:30 = 08/02/05 1995-12-22T09:05:02 = 95/12/22 -0010-09-15T04:44:23 = -10/09/15 =head3 Default 2008-02-05T18:30:30 = 2008 gas. 5 1995-12-22T09:05:02 = 1995 uku. 22 -0010-09-15T04:44:23 = -10 nze. 15 =head2 Time Formats =head3 Full 2008-02-05T18:30:30 = 18:30:30 UTC 1995-12-22T09:05:02 = 09:05:02 UTC -0010-09-15T04:44:23 = 04:44:23 UTC =head3 Long 2008-02-05T18:30:30 = 18:30:30 UTC 1995-12-22T09:05:02 = 09:05:02 UTC -0010-09-15T04:44:23 = 04:44:23 UTC =head3 Medium 2008-02-05T18:30:30 = 18:30:30 1995-12-22T09:05:02 = 09:05:02 -0010-09-15T04:44:23 = 04:44:23 =head3 Short 2008-02-05T18:30:30 = 18:30 1995-12-22T09:05:02 = 09:05 -0010-09-15T04:44:23 = 04:44 =head3 Default 2008-02-05T18:30:30 = 18:30:30 1995-12-22T09:05:02 = 09:05:02 -0010-09-15T04:44:23 = 04:44:23 =head2 Datetime Formats =head3 Full 2008-02-05T18:30:30 = Kuwa kabiri, 2008 Gashyantare 05 18:30:30 UTC 1995-12-22T09:05:02 = Kuwa gatanu, 1995 Ukuboza 22 09:05:02 UTC -0010-09-15T04:44:23 = Kuwa gatandatu, -10 Nzeli 15 04:44:23 UTC =head3 Long 2008-02-05T18:30:30 = 2008 Gashyantare 5 18:30:30 UTC 1995-12-22T09:05:02 = 1995 Ukuboza 22 09:05:02 UTC -0010-09-15T04:44:23 = -10 Nzeli 15 04:44:23 UTC =head3 Medium 2008-02-05T18:30:30 = 2008 gas. 5 18:30:30 1995-12-22T09:05:02 = 1995 uku. 22 09:05:02 -0010-09-15T04:44:23 = -10 nze. 15 04:44:23 =head3 Short 2008-02-05T18:30:30 = 08/02/05 18:30 1995-12-22T09:05:02 = 95/12/22 09:05 -0010-09-15T04:44:23 = -10/09/15 04:44 =head3 Default 2008-02-05T18:30:30 = 2008 gas. 5 18:30:30 1995-12-22T09:05:02 = 1995 uku. 22 09:05:02 -0010-09-15T04:44:23 = -10 nze. 15 04:44:23 =head2 Available Formats =head3 d (d) 2008-02-05T18:30:30 = 5 1995-12-22T09:05:02 = 22 -0010-09-15T04:44:23 = 15 =head3 EEEd (d EEE) 2008-02-05T18:30:30 = 5 kab. 1995-12-22T09:05:02 = 22 gnu. -0010-09-15T04:44:23 = 15 gnd. =head3 Hm (H:mm) 2008-02-05T18:30:30 = 18:30 1995-12-22T09:05:02 = 9:05 -0010-09-15T04:44:23 = 4:44 =head3 hm (h:mm a) 2008-02-05T18:30:30 = 6:30 PM 1995-12-22T09:05:02 = 9:05 AM -0010-09-15T04:44:23 = 4:44 AM =head3 Hms (H:mm:ss) 2008-02-05T18:30:30 = 18:30:30 1995-12-22T09:05:02 = 9:05:02 -0010-09-15T04:44:23 = 4:44:23 =head3 hms (h:mm:ss a) 2008-02-05T18:30:30 = 6:30:30 PM 1995-12-22T09:05:02 = 9:05:02 AM -0010-09-15T04:44:23 = 4:44:23 AM =head3 M (L) 2008-02-05T18:30:30 = 2 1995-12-22T09:05:02 = 12 -0010-09-15T04:44:23 = 9 =head3 Md (M-d) 2008-02-05T18:30:30 = 2-5 1995-12-22T09:05:02 = 12-22 -0010-09-15T04:44:23 = 9-15 =head3 MEd (E, M-d) 2008-02-05T18:30:30 = kab., 2-5 1995-12-22T09:05:02 = gnu., 12-22 -0010-09-15T04:44:23 = gnd., 9-15 =head3 MMM (LLL) 2008-02-05T18:30:30 = gas. 1995-12-22T09:05:02 = uku. -0010-09-15T04:44:23 = nze. =head3 MMMd (MMM d) 2008-02-05T18:30:30 = gas. 5 1995-12-22T09:05:02 = uku. 22 -0010-09-15T04:44:23 = nze. 15 =head3 MMMEd (E MMM d) 2008-02-05T18:30:30 = kab. gas. 5 1995-12-22T09:05:02 = gnu. uku. 22 -0010-09-15T04:44:23 = gnd. nze. 15 =head3 MMMMd (MMMM d) 2008-02-05T18:30:30 = Gashyantare 5 1995-12-22T09:05:02 = Ukuboza 22 -0010-09-15T04:44:23 = Nzeli 15 =head3 MMMMEd (E MMMM d) 2008-02-05T18:30:30 = kab. Gashyantare 5 1995-12-22T09:05:02 = gnu. Ukuboza 22 -0010-09-15T04:44:23 = gnd. Nzeli 15 =head3 ms (mm:ss) 2008-02-05T18:30:30 = 30:30 1995-12-22T09:05:02 = 05:02 -0010-09-15T04:44:23 = 44:23 =head3 y (y) 2008-02-05T18:30:30 = 2008 1995-12-22T09:05:02 = 1995 -0010-09-15T04:44:23 = -10 =head3 yM (y-M) 2008-02-05T18:30:30 = 2008-2 1995-12-22T09:05:02 = 1995-12 -0010-09-15T04:44:23 = -10-9 =head3 yMEd (EEE, y-M-d) 2008-02-05T18:30:30 = kab., 2008-2-5 1995-12-22T09:05:02 = gnu., 1995-12-22 -0010-09-15T04:44:23 = gnd., -10-9-15 =head3 yMMM (y MMM) 2008-02-05T18:30:30 = 2008 gas. 1995-12-22T09:05:02 = 1995 uku. -0010-09-15T04:44:23 = -10 nze. =head3 yMMMEd (EEE, y MMM d) 2008-02-05T18:30:30 = kab., 2008 gas. 5 1995-12-22T09:05:02 = gnu., 1995 uku. 22 -0010-09-15T04:44:23 = gnd., -10 nze. 15 =head3 yMMMM (y MMMM) 2008-02-05T18:30:30 = 2008 Gashyantare 1995-12-22T09:05:02 = 1995 Ukuboza -0010-09-15T04:44:23 = -10 Nzeli =head3 yQ (y Q) 2008-02-05T18:30:30 = 2008 1 1995-12-22T09:05:02 = 1995 4 -0010-09-15T04:44:23 = -10 3 =head3 yQQQ (y QQQ) 2008-02-05T18:30:30 = 2008 I1 1995-12-22T09:05:02 = 1995 I4 -0010-09-15T04:44:23 = -10 I3 =head3 yyQ (Q yy) 2008-02-05T18:30:30 = 1 08 1995-12-22T09:05:02 = 4 95 -0010-09-15T04:44:23 = 3 -10 =head2 Miscellaneous =head3 Prefers 24 hour time? Yes =head3 Local first day of the week Kuwa mbere =head1 SUPPORT See L<DateTime::Locale>. =head1 AUTHOR Dave Rolsky <[email protected]> =head1 COPYRIGHT Copyright (c) 2008 David Rolsky. All rights reserved. This program is free software; you can redistribute it and/or modify it under the same terms as Perl itself. This module was generated from data provided by the CLDR project, see the LICENSE.cldr in this distribution for details on the CLDR data's license. =cut
Dokaponteam/ITF_Project
xampp/perl/vendor/lib/DateTime/Locale/rw.pm
Perl
mit
12,068
// RUN: %clang_cc1 %s -std=c++11 -triple x86_64-linux-gnu -emit-llvm -o - | FileCheck %s // PR10531. int make_a(); static union { int a = make_a(); char *b; }; int f() { return a; } // CHECK-LABEL: define internal void @__cxx_global_var_init // CHECK-NOT: } // CHECK: call {{.*}}@"[[CONSTRUCT_GLOBAL:.*]]C1Ev" int g() { union { int a; int b = 81; }; // CHECK-LABEL: define {{.*}}_Z1gv // CHECK-NOT: } // CHECK: call {{.*}}@"[[CONSTRUCT_LOCAL:.*]]C1Ev" return b; } struct A { A(); }; union B { int k; struct { A x; int y = 123; }; B() {} B(int n) : k(n) {} }; B b1; B b2(0); // CHECK-LABEL: define {{.*}} @_ZN1BC2Ei( // CHECK-NOT: call void @_ZN1AC1Ev( // CHECK-NOT: store i32 123, // CHECK: store i32 % // CHECK-NOT: call void @_ZN1AC1Ev( // CHECK-NOT: store i32 123, // CHECK: } // CHECK-LABEL: define {{.*}} @_ZN1BC2Ev( // CHECK: call void @_ZN1AC1Ev( // CHECK: store i32 123, // CHECK: } // CHECK: define {{.*}}@"[[CONSTRUCT_LOCAL]]C2Ev" // CHECK-NOT: } // CHECK: store i32 81 // CHECK: define {{.*}}@"[[CONSTRUCT_GLOBAL]]C2Ev" // CHECK-NOT: } // CHECK: call {{.*}}@_Z6make_a
Rapier-Foundation/rapier-script
src/rapierlang/test/CodeGenCXX/member-init-anon-union.cpp
C++
mit
1,138
# Use this file to set configuration options for Jasminerice, all of these are initialized to their respective defaults, # but you can change them here. if defined?(Jasminerice) == 'constant' Jasminerice.setup do |config| # Tell Jasminerice to automatically mount itself in your application. If set to false, you must manually mount the # engine in order to use Jasminerice. #config.mount = true # If automatically mounting Jasminerice, specify the location that it should be mounted at. Defaults to /jasmine, so # you could access your tests at http://YOUR_SERVER_URL/jasmine #config.mount_at = '/jasmine' # Specify a path where your specs can be found. Defaults to 'spec' #config.spec_path = 'spec' # Specify a path where your fixutures can be found. Defaults to 'spec/javascripts/fixtures' #config.fixture_path = 'spec/javascripts/fixtures' end end
heroku/jasminerice
lib/generators/jasminerice/templates/jasminerice.rb
Ruby
mit
900
// // ____ _ __ _ _____ // / ___\ /_\ /\/\ /\ /\ /__\ /_\ \_ \ // \ \ //_\\ / \ / / \ \ / \// //_\\ / /\/ // /\_\ \ / _ \ / /\/\ \ \ \_/ / / _ \ / _ \ /\/ /_ // \____/ \_/ \_/ \/ \/ \___/ \/ \_/ \_/ \_/ \____/ // // Copyright Samurai development team and other contributors // // http://www.samurai-framework.com // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // #import "Samurai_Core.h" #import "Samurai_Event.h" #import "Samurai_ViewConfig.h" #import "Samurai_ViewCore.h" #import "Samurai_ViewEvent.h" #if (TARGET_OS_IPHONE || TARGET_IPHONE_SIMULATOR) #import "Samurai_UIView.h" #pragma mark - @interface SamuraiUISwitchAgent : NSObject @prop_unsafe( UISwitch *, switchh ); - (void)enableEvents; - (void)disableEvents; @end #pragma mark - @interface UISwitch(Samurai) @signal( eventValueChanged ) - (SamuraiUISwitchAgent *)switchAgent; @end #endif // #if (TARGET_OS_IPHONE || TARGET_IPHONE_SIMULATOR)
Alan007/samurai-native
samurai-framework/samurai-view/modules/view-component/Samurai_UISwitch.h
C
mit
2,039
/** * vee-validate v2.0.0-rc.21 * (c) 2017 Abdelrahman Awad * @license MIT */ (function (global, factory) { typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() : typeof define === 'function' && define.amd ? define(factory) : (global.VeeValidate = factory()); }(this, (function () { 'use strict'; var MILLISECONDS_IN_HOUR = 3600000; var MILLISECONDS_IN_MINUTE = 60000; var DEFAULT_ADDITIONAL_DIGITS = 2; var patterns = { dateTimeDelimeter: /[T ]/, plainTime: /:/, // year tokens YY: /^(\d{2})$/, YYY: [ /^([+-]\d{2})$/, // 0 additional digits /^([+-]\d{3})$/, // 1 additional digit /^([+-]\d{4})$/ // 2 additional digits ], YYYY: /^(\d{4})/, YYYYY: [ /^([+-]\d{4})/, // 0 additional digits /^([+-]\d{5})/, // 1 additional digit /^([+-]\d{6})/ // 2 additional digits ], // date tokens MM: /^-(\d{2})$/, DDD: /^-?(\d{3})$/, MMDD: /^-?(\d{2})-?(\d{2})$/, Www: /^-?W(\d{2})$/, WwwD: /^-?W(\d{2})-?(\d{1})$/, HH: /^(\d{2}([.,]\d*)?)$/, HHMM: /^(\d{2}):?(\d{2}([.,]\d*)?)$/, HHMMSS: /^(\d{2}):?(\d{2}):?(\d{2}([.,]\d*)?)$/, // timezone tokens timezone: /([Z+-].*)$/, timezoneZ: /^(Z)$/, timezoneHH: /^([+-])(\d{2})$/, timezoneHHMM: /^([+-])(\d{2}):?(\d{2})$/ }; /** * @name toDate * @category Common Helpers * @summary Convert the given argument to an instance of Date. * * @description * Convert the given argument to an instance of Date. * * If the argument is an instance of Date, the function returns its clone. * * If the argument is a number, it is treated as a timestamp. * * If an argument is a string, the function tries to parse it. * Function accepts complete ISO 8601 formats as well as partial implementations. * ISO 8601: http://en.wikipedia.org/wiki/ISO_8601 * * If the argument is null, it is treated as an invalid date. * * If all above fails, the function passes the given argument to Date constructor. * * **Note**: *all* Date arguments passed to any *date-fns* function is processed by `toDate`. * All *date-fns* functions will throw `RangeError` if `options.additionalDigits` is not 0, 1, 2 or undefined. * * @param {*} argument - the value to convert * @param {Options} [options] - the object with options. See [Options]{@link https://date-fns.org/docs/Options} * @param {0|1|2} [options.additionalDigits=2] - the additional number of digits in the extended year format * @returns {Date} the parsed date in the local time zone * @throws {TypeError} 1 argument required * @throws {RangeError} `options.additionalDigits` must be 0, 1 or 2 * * @example * // Convert string '2014-02-11T11:30:30' to date: * var result = toDate('2014-02-11T11:30:30') * //=> Tue Feb 11 2014 11:30:30 * * @example * // Convert string '+02014101' to date, * // if the additional number of digits in the extended year format is 1: * var result = toDate('+02014101', {additionalDigits: 1}) * //=> Fri Apr 11 2014 00:00:00 */ function toDate (argument, dirtyOptions) { if (arguments.length < 1) { throw new TypeError('1 argument required, but only ' + arguments.length + ' present') } if (argument === null) { return new Date(NaN) } var options = dirtyOptions || {}; var additionalDigits = options.additionalDigits === undefined ? DEFAULT_ADDITIONAL_DIGITS : Number(options.additionalDigits); if (additionalDigits !== 2 && additionalDigits !== 1 && additionalDigits !== 0) { throw new RangeError('additionalDigits must be 0, 1 or 2') } // Clone the date if (argument instanceof Date) { // Prevent the date to lose the milliseconds when passed to new Date() in IE10 return new Date(argument.getTime()) } else if (typeof argument !== 'string') { return new Date(argument) } var dateStrings = splitDateString(argument); var parseYearResult = parseYear(dateStrings.date, additionalDigits); var year = parseYearResult.year; var restDateString = parseYearResult.restDateString; var date = parseDate(restDateString, year); if (date) { var timestamp = date.getTime(); var time = 0; var offset; if (dateStrings.time) { time = parseTime(dateStrings.time); } if (dateStrings.timezone) { offset = parseTimezone(dateStrings.timezone); } else { // get offset accurate to hour in timezones that change offset offset = new Date(timestamp + time).getTimezoneOffset(); offset = new Date(timestamp + time + offset * MILLISECONDS_IN_MINUTE).getTimezoneOffset(); } return new Date(timestamp + time + offset * MILLISECONDS_IN_MINUTE) } else { return new Date(argument) } } function splitDateString (dateString) { var dateStrings = {}; var array = dateString.split(patterns.dateTimeDelimeter); var timeString; if (patterns.plainTime.test(array[0])) { dateStrings.date = null; timeString = array[0]; } else { dateStrings.date = array[0]; timeString = array[1]; } if (timeString) { var token = patterns.timezone.exec(timeString); if (token) { dateStrings.time = timeString.replace(token[1], ''); dateStrings.timezone = token[1]; } else { dateStrings.time = timeString; } } return dateStrings } function parseYear (dateString, additionalDigits) { var patternYYY = patterns.YYY[additionalDigits]; var patternYYYYY = patterns.YYYYY[additionalDigits]; var token; // YYYY or ±YYYYY token = patterns.YYYY.exec(dateString) || patternYYYYY.exec(dateString); if (token) { var yearString = token[1]; return { year: parseInt(yearString, 10), restDateString: dateString.slice(yearString.length) } } // YY or ±YYY token = patterns.YY.exec(dateString) || patternYYY.exec(dateString); if (token) { var centuryString = token[1]; return { year: parseInt(centuryString, 10) * 100, restDateString: dateString.slice(centuryString.length) } } // Invalid ISO-formatted year return { year: null } } function parseDate (dateString, year) { // Invalid ISO-formatted year if (year === null) { return null } var token; var date; var month; var week; // YYYY if (dateString.length === 0) { date = new Date(0); date.setUTCFullYear(year); return date } // YYYY-MM token = patterns.MM.exec(dateString); if (token) { date = new Date(0); month = parseInt(token[1], 10) - 1; date.setUTCFullYear(year, month); return date } // YYYY-DDD or YYYYDDD token = patterns.DDD.exec(dateString); if (token) { date = new Date(0); var dayOfYear = parseInt(token[1], 10); date.setUTCFullYear(year, 0, dayOfYear); return date } // YYYY-MM-DD or YYYYMMDD token = patterns.MMDD.exec(dateString); if (token) { date = new Date(0); month = parseInt(token[1], 10) - 1; var day = parseInt(token[2], 10); date.setUTCFullYear(year, month, day); return date } // YYYY-Www or YYYYWww token = patterns.Www.exec(dateString); if (token) { week = parseInt(token[1], 10) - 1; return dayOfISOYear(year, week) } // YYYY-Www-D or YYYYWwwD token = patterns.WwwD.exec(dateString); if (token) { week = parseInt(token[1], 10) - 1; var dayOfWeek = parseInt(token[2], 10) - 1; return dayOfISOYear(year, week, dayOfWeek) } // Invalid ISO-formatted date return null } function parseTime (timeString) { var token; var hours; var minutes; // hh token = patterns.HH.exec(timeString); if (token) { hours = parseFloat(token[1].replace(',', '.')); return (hours % 24) * MILLISECONDS_IN_HOUR } // hh:mm or hhmm token = patterns.HHMM.exec(timeString); if (token) { hours = parseInt(token[1], 10); minutes = parseFloat(token[2].replace(',', '.')); return (hours % 24) * MILLISECONDS_IN_HOUR + minutes * MILLISECONDS_IN_MINUTE } // hh:mm:ss or hhmmss token = patterns.HHMMSS.exec(timeString); if (token) { hours = parseInt(token[1], 10); minutes = parseInt(token[2], 10); var seconds = parseFloat(token[3].replace(',', '.')); return (hours % 24) * MILLISECONDS_IN_HOUR + minutes * MILLISECONDS_IN_MINUTE + seconds * 1000 } // Invalid ISO-formatted time return null } function parseTimezone (timezoneString) { var token; var absoluteOffset; // Z token = patterns.timezoneZ.exec(timezoneString); if (token) { return 0 } // ±hh token = patterns.timezoneHH.exec(timezoneString); if (token) { absoluteOffset = parseInt(token[2], 10) * 60; return (token[1] === '+') ? -absoluteOffset : absoluteOffset } // ±hh:mm or ±hhmm token = patterns.timezoneHHMM.exec(timezoneString); if (token) { absoluteOffset = parseInt(token[2], 10) * 60 + parseInt(token[3], 10); return (token[1] === '+') ? -absoluteOffset : absoluteOffset } return 0 } function dayOfISOYear (isoYear, week, day) { week = week || 0; day = day || 0; var date = new Date(0); date.setUTCFullYear(isoYear, 0, 4); var fourthOfJanuaryDay = date.getUTCDay() || 7; var diff = week * 7 + day + 1 - fourthOfJanuaryDay; date.setUTCDate(date.getUTCDate() + diff); return date } /** * @name addMilliseconds * @category Millisecond Helpers * @summary Add the specified number of milliseconds to the given date. * * @description * Add the specified number of milliseconds to the given date. * * @param {Date|String|Number} date - the date to be changed * @param {Number} amount - the amount of milliseconds to be added * @param {Options} [options] - the object with options. See [Options]{@link https://date-fns.org/docs/Options} * @param {0|1|2} [options.additionalDigits=2] - passed to `toDate`. See [toDate]{@link https://date-fns.org/docs/toDate} * @returns {Date} the new date with the milliseconds added * @throws {TypeError} 2 arguments required * @throws {RangeError} `options.additionalDigits` must be 0, 1 or 2 * * @example * // Add 750 milliseconds to 10 July 2014 12:45:30.000: * var result = addMilliseconds(new Date(2014, 6, 10, 12, 45, 30, 0), 750) * //=> Thu Jul 10 2014 12:45:30.750 */ function addMilliseconds (dirtyDate, dirtyAmount, dirtyOptions) { if (arguments.length < 2) { throw new TypeError('2 arguments required, but only ' + arguments.length + ' present') } var timestamp = toDate(dirtyDate, dirtyOptions).getTime(); var amount = Number(dirtyAmount); return new Date(timestamp + amount) } function cloneObject (dirtyObject) { dirtyObject = dirtyObject || {}; var object = {}; for (var property in dirtyObject) { if (dirtyObject.hasOwnProperty(property)) { object[property] = dirtyObject[property]; } } return object } var MILLISECONDS_IN_MINUTE$2 = 60000; /** * @name addMinutes * @category Minute Helpers * @summary Add the specified number of minutes to the given date. * * @description * Add the specified number of minutes to the given date. * * @param {Date|String|Number} date - the date to be changed * @param {Number} amount - the amount of minutes to be added * @param {Options} [options] - the object with options. See [Options]{@link https://date-fns.org/docs/Options} * @param {0|1|2} [options.additionalDigits=2] - passed to `toDate`. See [toDate]{@link https://date-fns.org/docs/toDate} * @returns {Date} the new date with the minutes added * @throws {TypeError} 2 arguments required * @throws {RangeError} `options.additionalDigits` must be 0, 1 or 2 * * @example * // Add 30 minutes to 10 July 2014 12:00:00: * var result = addMinutes(new Date(2014, 6, 10, 12, 0), 30) * //=> Thu Jul 10 2014 12:30:00 */ function addMinutes (dirtyDate, dirtyAmount, dirtyOptions) { if (arguments.length < 2) { throw new TypeError('2 arguments required, but only ' + arguments.length + ' present') } var amount = Number(dirtyAmount); return addMilliseconds(dirtyDate, amount * MILLISECONDS_IN_MINUTE$2, dirtyOptions) } /** * @name isValid * @category Common Helpers * @summary Is the given date valid? * * @description * Returns false if argument is Invalid Date and true otherwise. * Argument is converted to Date using `toDate`. See [toDate]{@link https://date-fns.org/docs/toDate} * Invalid Date is a Date, whose time value is NaN. * * Time value of Date: http://es5.github.io/#x15.9.1.1 * * @param {*} date - the date to check * @param {Options} [options] - the object with options. See [Options]{@link https://date-fns.org/docs/Options} * @param {0|1|2} [options.additionalDigits=2] - passed to `toDate`. See [toDate]{@link https://date-fns.org/docs/toDate} * @returns {Boolean} the date is valid * @throws {TypeError} 1 argument required * @throws {RangeError} `options.additionalDigits` must be 0, 1 or 2 * * @example * // For the valid date: * var result = isValid(new Date(2014, 1, 31)) * //=> true * * @example * // For the value, convertable into a date: * var result = isValid('2014-02-31') * //=> true * * @example * // For the invalid date: * var result = isValid(new Date('')) * //=> false */ function isValid (dirtyDate, dirtyOptions) { if (arguments.length < 1) { throw new TypeError('1 argument required, but only ' + arguments.length + ' present') } var date = toDate(dirtyDate, dirtyOptions); return !isNaN(date) } var formatDistanceLocale = { lessThanXSeconds: { one: 'less than a second', other: 'less than {{count}} seconds' }, xSeconds: { one: '1 second', other: '{{count}} seconds' }, halfAMinute: 'half a minute', lessThanXMinutes: { one: 'less than a minute', other: 'less than {{count}} minutes' }, xMinutes: { one: '1 minute', other: '{{count}} minutes' }, aboutXHours: { one: 'about 1 hour', other: 'about {{count}} hours' }, xHours: { one: '1 hour', other: '{{count}} hours' }, xDays: { one: '1 day', other: '{{count}} days' }, aboutXMonths: { one: 'about 1 month', other: 'about {{count}} months' }, xMonths: { one: '1 month', other: '{{count}} months' }, aboutXYears: { one: 'about 1 year', other: 'about {{count}} years' }, xYears: { one: '1 year', other: '{{count}} years' }, overXYears: { one: 'over 1 year', other: 'over {{count}} years' }, almostXYears: { one: 'almost 1 year', other: 'almost {{count}} years' } }; function formatDistance (token, count, options) { options = options || {}; var result; if (typeof formatDistanceLocale[token] === 'string') { result = formatDistanceLocale[token]; } else if (count === 1) { result = formatDistanceLocale[token].one; } else { result = formatDistanceLocale[token].other.replace('{{count}}', count); } if (options.addSuffix) { if (options.comparison > 0) { return 'in ' + result } else { return result + ' ago' } } return result } var tokensToBeShortedPattern = /MMMM|MM|DD|dddd/g; function buildShortLongFormat (format) { return format.replace(tokensToBeShortedPattern, function (token) { return token.slice(1) }) } /** * @name buildFormatLongFn * @category Locale Helpers * @summary Build `formatLong` property for locale used by `format`, `formatRelative` and `parse` functions. * * @description * Build `formatLong` property for locale used by `format`, `formatRelative` and `parse` functions. * Returns a function which takes one of the following tokens as the argument: * `'LTS'`, `'LT'`, `'L'`, `'LL'`, `'LLL'`, `'l'`, `'ll'`, `'lll'`, `'llll'` * and returns a long format string written as `format` token strings. * See [format]{@link https://date-fns.org/docs/format} * * `'l'`, `'ll'`, `'lll'` and `'llll'` formats are built automatically * by shortening some of the tokens from corresponding unshortened formats * (e.g., if `LL` is `'MMMM DD YYYY'` then `ll` will be `MMM D YYYY`) * * @param {Object} obj - the object with long formats written as `format` token strings * @param {String} obj.LT - time format: hours and minutes * @param {String} obj.LTS - time format: hours, minutes and seconds * @param {String} obj.L - short date format: numeric day, month and year * @param {String} [obj.l] - short date format: numeric day, month and year (shortened) * @param {String} obj.LL - long date format: day, month in words, and year * @param {String} [obj.ll] - long date format: day, month in words, and year (shortened) * @param {String} obj.LLL - long date and time format * @param {String} [obj.lll] - long date and time format (shortened) * @param {String} obj.LLLL - long date, time and weekday format * @param {String} [obj.llll] - long date, time and weekday format (shortened) * @returns {Function} `formatLong` property of the locale * * @example * // For `en-US` locale: * locale.formatLong = buildFormatLongFn({ * LT: 'h:mm aa', * LTS: 'h:mm:ss aa', * L: 'MM/DD/YYYY', * LL: 'MMMM D YYYY', * LLL: 'MMMM D YYYY h:mm aa', * LLLL: 'dddd, MMMM D YYYY h:mm aa' * }) */ function buildFormatLongFn (obj) { var formatLongLocale = { LTS: obj.LTS, LT: obj.LT, L: obj.L, LL: obj.LL, LLL: obj.LLL, LLLL: obj.LLLL, l: obj.l || buildShortLongFormat(obj.L), ll: obj.ll || buildShortLongFormat(obj.LL), lll: obj.lll || buildShortLongFormat(obj.LLL), llll: obj.llll || buildShortLongFormat(obj.LLLL) }; return function (token) { return formatLongLocale[token] } } var formatLong = buildFormatLongFn({ LT: 'h:mm aa', LTS: 'h:mm:ss aa', L: 'MM/DD/YYYY', LL: 'MMMM D YYYY', LLL: 'MMMM D YYYY h:mm aa', LLLL: 'dddd, MMMM D YYYY h:mm aa' }); var formatRelativeLocale = { lastWeek: '[last] dddd [at] LT', yesterday: '[yesterday at] LT', today: '[today at] LT', tomorrow: '[tomorrow at] LT', nextWeek: 'dddd [at] LT', other: 'L' }; function formatRelative (token, date, baseDate, options) { return formatRelativeLocale[token] } /** * @name buildLocalizeFn * @category Locale Helpers * @summary Build `localize.weekday`, `localize.month` and `localize.timeOfDay` properties for the locale. * * @description * Build `localize.weekday`, `localize.month` and `localize.timeOfDay` properties for the locale * used by `format` function. * If no `type` is supplied to the options of the resulting function, `defaultType` will be used (see example). * * `localize.weekday` function takes the weekday index as argument (0 - Sunday). * `localize.month` takes the month index (0 - January). * `localize.timeOfDay` takes the hours. Use `indexCallback` to convert them to an array index (see example). * * @param {Object} values - the object with arrays of values * @param {String} defaultType - the default type for the localize function * @param {Function} [indexCallback] - the callback which takes the resulting function argument * and converts it into value array index * @returns {Function} the resulting function * * @example * var timeOfDayValues = { * uppercase: ['AM', 'PM'], * lowercase: ['am', 'pm'], * long: ['a.m.', 'p.m.'] * } * locale.localize.timeOfDay = buildLocalizeFn(timeOfDayValues, 'long', function (hours) { * // 0 is a.m. array index, 1 is p.m. array index * return (hours / 12) >= 1 ? 1 : 0 * }) * locale.localize.timeOfDay(16, {type: 'uppercase'}) //=> 'PM' * locale.localize.timeOfDay(5) //=> 'a.m.' */ function buildLocalizeFn (values, defaultType, indexCallback) { return function (dirtyIndex, dirtyOptions) { var options = dirtyOptions || {}; var type = options.type ? String(options.type) : defaultType; var valuesArray = values[type] || values[defaultType]; var index = indexCallback ? indexCallback(Number(dirtyIndex)) : Number(dirtyIndex); return valuesArray[index] } } /** * @name buildLocalizeArrayFn * @category Locale Helpers * @summary Build `localize.weekdays`, `localize.months` and `localize.timesOfDay` properties for the locale. * * @description * Build `localize.weekdays`, `localize.months` and `localize.timesOfDay` properties for the locale. * If no `type` is supplied to the options of the resulting function, `defaultType` will be used (see example). * * @param {Object} values - the object with arrays of values * @param {String} defaultType - the default type for the localize function * @returns {Function} the resulting function * * @example * var weekdayValues = { * narrow: ['Su', 'Mo', 'Tu', 'We', 'Th', 'Fr', 'Sa'], * short: ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat'], * long: ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday'] * } * locale.localize.weekdays = buildLocalizeArrayFn(weekdayValues, 'long') * locale.localize.weekdays({type: 'narrow'}) //=> ['Su', 'Mo', ...] * locale.localize.weekdays() //=> ['Sunday', 'Monday', ...] */ function buildLocalizeArrayFn (values, defaultType) { return function (dirtyOptions) { var options = dirtyOptions || {}; var type = options.type ? String(options.type) : defaultType; return values[type] || values[defaultType] } } // Note: in English, the names of days of the week and months are capitalized. // If you are making a new locale based on this one, check if the same is true for the language you're working on. // Generally, formatted dates should look like they are in the middle of a sentence, // e.g. in Spanish language the weekdays and months should be in the lowercase. var weekdayValues = { narrow: ['Su', 'Mo', 'Tu', 'We', 'Th', 'Fr', 'Sa'], short: ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat'], long: ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday'] }; var monthValues = { short: ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'], long: ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December'] }; // `timeOfDay` is used to designate which part of the day it is, when used with 12-hour clock. // Use the system which is used the most commonly in the locale. // For example, if the country doesn't use a.m./p.m., you can use `night`/`morning`/`afternoon`/`evening`: // // var timeOfDayValues = { // any: ['in the night', 'in the morning', 'in the afternoon', 'in the evening'] // } // // And later: // // var localize = { // // The callback takes the hours as the argument and returns the array index // timeOfDay: buildLocalizeFn(timeOfDayValues, 'any', function (hours) { // if (hours >= 17) { // return 3 // } else if (hours >= 12) { // return 2 // } else if (hours >= 4) { // return 1 // } else { // return 0 // } // }), // timesOfDay: buildLocalizeArrayFn(timeOfDayValues, 'any') // } var timeOfDayValues = { uppercase: ['AM', 'PM'], lowercase: ['am', 'pm'], long: ['a.m.', 'p.m.'] }; function ordinalNumber (dirtyNumber, dirtyOptions) { var number = Number(dirtyNumber); // If ordinal numbers depend on context, for example, // if they are different for different grammatical genders, // use `options.unit`: // // var options = dirtyOptions || {} // var unit = String(options.unit) // // where `unit` can be 'month', 'quarter', 'week', 'isoWeek', 'dayOfYear', // 'dayOfMonth' or 'dayOfWeek' var rem100 = number % 100; if (rem100 > 20 || rem100 < 10) { switch (rem100 % 10) { case 1: return number + 'st' case 2: return number + 'nd' case 3: return number + 'rd' } } return number + 'th' } var localize = { ordinalNumber: ordinalNumber, weekday: buildLocalizeFn(weekdayValues, 'long'), weekdays: buildLocalizeArrayFn(weekdayValues, 'long'), month: buildLocalizeFn(monthValues, 'long'), months: buildLocalizeArrayFn(monthValues, 'long'), timeOfDay: buildLocalizeFn(timeOfDayValues, 'long', function (hours) { return (hours / 12) >= 1 ? 1 : 0 }), timesOfDay: buildLocalizeArrayFn(timeOfDayValues, 'long') }; /** * @name buildMatchFn * @category Locale Helpers * @summary Build `match.weekdays`, `match.months` and `match.timesOfDay` properties for the locale. * * @description * Build `match.weekdays`, `match.months` and `match.timesOfDay` properties for the locale used by `parse` function. * If no `type` is supplied to the options of the resulting function, `defaultType` will be used (see example). * The result of the match function will be passed into corresponding parser function * (`match.weekday`, `match.month` or `match.timeOfDay` respectively. See `buildParseFn`). * * @param {Object} values - the object with RegExps * @param {String} defaultType - the default type for the match function * @returns {Function} the resulting function * * @example * var matchWeekdaysPatterns = { * narrow: /^(su|mo|tu|we|th|fr|sa)/i, * short: /^(sun|mon|tue|wed|thu|fri|sat)/i, * long: /^(sunday|monday|tuesday|wednesday|thursday|friday|saturday)/i * } * locale.match.weekdays = buildMatchFn(matchWeekdaysPatterns, 'long') * locale.match.weekdays('Sunday', {type: 'narrow'}) //=> ['Su', 'Su', ...] * locale.match.weekdays('Sunday') //=> ['Sunday', 'Sunday', ...] */ function buildMatchFn (patterns, defaultType) { return function (dirtyString, dirtyOptions) { var options = dirtyOptions || {}; var type = options.type ? String(options.type) : defaultType; var pattern = patterns[type] || patterns[defaultType]; var string = String(dirtyString); return string.match(pattern) } } /** * @name buildParseFn * @category Locale Helpers * @summary Build `match.weekday`, `match.month` and `match.timeOfDay` properties for the locale. * * @description * Build `match.weekday`, `match.month` and `match.timeOfDay` properties for the locale used by `parse` function. * The argument of the resulting function is the result of the corresponding match function * (`match.weekdays`, `match.months` or `match.timesOfDay` respectively. See `buildMatchFn`). * * @param {Object} values - the object with arrays of RegExps * @param {String} defaultType - the default type for the parser function * @returns {Function} the resulting function * * @example * var parseWeekdayPatterns = { * any: [/^su/i, /^m/i, /^tu/i, /^w/i, /^th/i, /^f/i, /^sa/i] * } * locale.match.weekday = buildParseFn(matchWeekdaysPatterns, 'long') * var matchResult = locale.match.weekdays('Friday') * locale.match.weekday(matchResult) //=> 5 */ function buildParseFn (patterns, defaultType) { return function (matchResult, dirtyOptions) { var options = dirtyOptions || {}; var type = options.type ? String(options.type) : defaultType; var patternsArray = patterns[type] || patterns[defaultType]; var string = matchResult[1]; return patternsArray.findIndex(function (pattern) { return pattern.test(string) }) } } /** * @name buildMatchPatternFn * @category Locale Helpers * @summary Build match function from a single RegExp. * * @description * Build match function from a single RegExp. * Usually used for building `match.ordinalNumbers` property of the locale. * * @param {Object} pattern - the RegExp * @returns {Function} the resulting function * * @example * locale.match.ordinalNumbers = buildMatchPatternFn(/^(\d+)(th|st|nd|rd)?/i) * locale.match.ordinalNumbers('3rd') //=> ['3rd', '3', 'rd', ...] */ function buildMatchPatternFn (pattern) { return function (dirtyString) { var string = String(dirtyString); return string.match(pattern) } } /** * @name parseDecimal * @category Locale Helpers * @summary Parses the match result into decimal number. * * @description * Parses the match result into decimal number. * Uses the string matched with the first set of parentheses of match RegExp. * * @param {Array} matchResult - the object returned by matching function * @returns {Number} the parsed value * * @example * locale.match = { * ordinalNumbers: (dirtyString) { * return String(dirtyString).match(/^(\d+)(th|st|nd|rd)?/i) * }, * ordinalNumber: parseDecimal * } */ function parseDecimal (matchResult) { return parseInt(matchResult[1], 10) } var matchOrdinalNumbersPattern = /^(\d+)(th|st|nd|rd)?/i; var matchWeekdaysPatterns = { narrow: /^(su|mo|tu|we|th|fr|sa)/i, short: /^(sun|mon|tue|wed|thu|fri|sat)/i, long: /^(sunday|monday|tuesday|wednesday|thursday|friday|saturday)/i }; var parseWeekdayPatterns = { any: [/^su/i, /^m/i, /^tu/i, /^w/i, /^th/i, /^f/i, /^sa/i] }; var matchMonthsPatterns = { short: /^(jan|feb|mar|apr|may|jun|jul|aug|sep|oct|nov|dec)/i, long: /^(january|february|march|april|may|june|july|august|september|october|november|december)/i }; var parseMonthPatterns = { any: [/^ja/i, /^f/i, /^mar/i, /^ap/i, /^may/i, /^jun/i, /^jul/i, /^au/i, /^s/i, /^o/i, /^n/i, /^d/i] }; // `timeOfDay` is used to designate which part of the day it is, when used with 12-hour clock. // Use the system which is used the most commonly in the locale. // For example, if the country doesn't use a.m./p.m., you can use `night`/`morning`/`afternoon`/`evening`: // // var matchTimesOfDayPatterns = { // long: /^((in the)? (night|morning|afternoon|evening?))/i // } // // var parseTimeOfDayPatterns = { // any: [/(night|morning)/i, /(afternoon|evening)/i] // } var matchTimesOfDayPatterns = { short: /^(am|pm)/i, long: /^([ap]\.?\s?m\.?)/i }; var parseTimeOfDayPatterns = { any: [/^a/i, /^p/i] }; var match = { ordinalNumbers: buildMatchPatternFn(matchOrdinalNumbersPattern), ordinalNumber: parseDecimal, weekdays: buildMatchFn(matchWeekdaysPatterns, 'long'), weekday: buildParseFn(parseWeekdayPatterns, 'any'), months: buildMatchFn(matchMonthsPatterns, 'long'), month: buildParseFn(parseMonthPatterns, 'any'), timesOfDay: buildMatchFn(matchTimesOfDayPatterns, 'long'), timeOfDay: buildParseFn(parseTimeOfDayPatterns, 'any') }; /** * @type {Locale} * @category Locales * @summary English locale (United States). * @language English * @iso-639-2 eng */ var locale = { formatDistance: formatDistance, formatLong: formatLong, formatRelative: formatRelative, localize: localize, match: match, options: { weekStartsOn: 0 /* Sunday */, firstWeekContainsDate: 1 } }; var MILLISECONDS_IN_DAY$1 = 86400000; // This function will be a part of public API when UTC function will be implemented. // See issue: https://github.com/date-fns/date-fns/issues/376 function getUTCDayOfYear (dirtyDate, dirtyOptions) { var date = toDate(dirtyDate, dirtyOptions); var timestamp = date.getTime(); date.setUTCMonth(0, 1); date.setUTCHours(0, 0, 0, 0); var startOfYearTimestamp = date.getTime(); var difference = timestamp - startOfYearTimestamp; return Math.floor(difference / MILLISECONDS_IN_DAY$1) + 1 } // This function will be a part of public API when UTC function will be implemented. // See issue: https://github.com/date-fns/date-fns/issues/376 function startOfUTCISOWeek (dirtyDate, dirtyOptions) { var weekStartsOn = 1; var date = toDate(dirtyDate, dirtyOptions); var day = date.getUTCDay(); var diff = (day < weekStartsOn ? 7 : 0) + day - weekStartsOn; date.setUTCDate(date.getUTCDate() - diff); date.setUTCHours(0, 0, 0, 0); return date } // This function will be a part of public API when UTC function will be implemented. // See issue: https://github.com/date-fns/date-fns/issues/376 function getUTCISOWeekYear (dirtyDate, dirtyOptions) { var date = toDate(dirtyDate, dirtyOptions); var year = date.getUTCFullYear(); var fourthOfJanuaryOfNextYear = new Date(0); fourthOfJanuaryOfNextYear.setUTCFullYear(year + 1, 0, 4); fourthOfJanuaryOfNextYear.setUTCHours(0, 0, 0, 0); var startOfNextYear = startOfUTCISOWeek(fourthOfJanuaryOfNextYear, dirtyOptions); var fourthOfJanuaryOfThisYear = new Date(0); fourthOfJanuaryOfThisYear.setUTCFullYear(year, 0, 4); fourthOfJanuaryOfThisYear.setUTCHours(0, 0, 0, 0); var startOfThisYear = startOfUTCISOWeek(fourthOfJanuaryOfThisYear, dirtyOptions); if (date.getTime() >= startOfNextYear.getTime()) { return year + 1 } else if (date.getTime() >= startOfThisYear.getTime()) { return year } else { return year - 1 } } // This function will be a part of public API when UTC function will be implemented. // See issue: https://github.com/date-fns/date-fns/issues/376 function startOfUTCISOWeekYear (dirtyDate, dirtyOptions) { var year = getUTCISOWeekYear(dirtyDate, dirtyOptions); var fourthOfJanuary = new Date(0); fourthOfJanuary.setUTCFullYear(year, 0, 4); fourthOfJanuary.setUTCHours(0, 0, 0, 0); var date = startOfUTCISOWeek(fourthOfJanuary, dirtyOptions); return date } var MILLISECONDS_IN_WEEK$2 = 604800000; // This function will be a part of public API when UTC function will be implemented. // See issue: https://github.com/date-fns/date-fns/issues/376 function getUTCISOWeek (dirtyDate, dirtyOptions) { var date = toDate(dirtyDate, dirtyOptions); var diff = startOfUTCISOWeek(date, dirtyOptions).getTime() - startOfUTCISOWeekYear(date, dirtyOptions).getTime(); // Round the number of days to the nearest integer // because the number of milliseconds in a week is not constant // (e.g. it's different in the week of the daylight saving time clock shift) return Math.round(diff / MILLISECONDS_IN_WEEK$2) + 1 } var formatters = { // Month: 1, 2, ..., 12 'M': function (date) { return date.getUTCMonth() + 1 }, // Month: 1st, 2nd, ..., 12th 'Mo': function (date, options) { var month = date.getUTCMonth() + 1; return options.locale.localize.ordinalNumber(month, {unit: 'month'}) }, // Month: 01, 02, ..., 12 'MM': function (date) { return addLeadingZeros(date.getUTCMonth() + 1, 2) }, // Month: Jan, Feb, ..., Dec 'MMM': function (date, options) { return options.locale.localize.month(date.getUTCMonth(), {type: 'short'}) }, // Month: January, February, ..., December 'MMMM': function (date, options) { return options.locale.localize.month(date.getUTCMonth(), {type: 'long'}) }, // Quarter: 1, 2, 3, 4 'Q': function (date) { return Math.ceil((date.getUTCMonth() + 1) / 3) }, // Quarter: 1st, 2nd, 3rd, 4th 'Qo': function (date, options) { var quarter = Math.ceil((date.getUTCMonth() + 1) / 3); return options.locale.localize.ordinalNumber(quarter, {unit: 'quarter'}) }, // Day of month: 1, 2, ..., 31 'D': function (date) { return date.getUTCDate() }, // Day of month: 1st, 2nd, ..., 31st 'Do': function (date, options) { return options.locale.localize.ordinalNumber(date.getUTCDate(), {unit: 'dayOfMonth'}) }, // Day of month: 01, 02, ..., 31 'DD': function (date) { return addLeadingZeros(date.getUTCDate(), 2) }, // Day of year: 1, 2, ..., 366 'DDD': function (date) { return getUTCDayOfYear(date) }, // Day of year: 1st, 2nd, ..., 366th 'DDDo': function (date, options) { return options.locale.localize.ordinalNumber(getUTCDayOfYear(date), {unit: 'dayOfYear'}) }, // Day of year: 001, 002, ..., 366 'DDDD': function (date) { return addLeadingZeros(getUTCDayOfYear(date), 3) }, // Day of week: Su, Mo, ..., Sa 'dd': function (date, options) { return options.locale.localize.weekday(date.getUTCDay(), {type: 'narrow'}) }, // Day of week: Sun, Mon, ..., Sat 'ddd': function (date, options) { return options.locale.localize.weekday(date.getUTCDay(), {type: 'short'}) }, // Day of week: Sunday, Monday, ..., Saturday 'dddd': function (date, options) { return options.locale.localize.weekday(date.getUTCDay(), {type: 'long'}) }, // Day of week: 0, 1, ..., 6 'd': function (date) { return date.getUTCDay() }, // Day of week: 0th, 1st, 2nd, ..., 6th 'do': function (date, options) { return options.locale.localize.ordinalNumber(date.getUTCDay(), {unit: 'dayOfWeek'}) }, // Day of ISO week: 1, 2, ..., 7 'E': function (date) { return date.getUTCDay() || 7 }, // ISO week: 1, 2, ..., 53 'W': function (date) { return getUTCISOWeek(date) }, // ISO week: 1st, 2nd, ..., 53th 'Wo': function (date, options) { return options.locale.localize.ordinalNumber(getUTCISOWeek(date), {unit: 'isoWeek'}) }, // ISO week: 01, 02, ..., 53 'WW': function (date) { return addLeadingZeros(getUTCISOWeek(date), 2) }, // Year: 00, 01, ..., 99 'YY': function (date) { return addLeadingZeros(date.getUTCFullYear(), 4).substr(2) }, // Year: 1900, 1901, ..., 2099 'YYYY': function (date) { return addLeadingZeros(date.getUTCFullYear(), 4) }, // ISO week-numbering year: 00, 01, ..., 99 'GG': function (date) { return String(getUTCISOWeekYear(date)).substr(2) }, // ISO week-numbering year: 1900, 1901, ..., 2099 'GGGG': function (date) { return getUTCISOWeekYear(date) }, // Hour: 0, 1, ... 23 'H': function (date) { return date.getUTCHours() }, // Hour: 00, 01, ..., 23 'HH': function (date) { return addLeadingZeros(date.getUTCHours(), 2) }, // Hour: 1, 2, ..., 12 'h': function (date) { var hours = date.getUTCHours(); if (hours === 0) { return 12 } else if (hours > 12) { return hours % 12 } else { return hours } }, // Hour: 01, 02, ..., 12 'hh': function (date) { return addLeadingZeros(formatters['h'](date), 2) }, // Minute: 0, 1, ..., 59 'm': function (date) { return date.getUTCMinutes() }, // Minute: 00, 01, ..., 59 'mm': function (date) { return addLeadingZeros(date.getUTCMinutes(), 2) }, // Second: 0, 1, ..., 59 's': function (date) { return date.getUTCSeconds() }, // Second: 00, 01, ..., 59 'ss': function (date) { return addLeadingZeros(date.getUTCSeconds(), 2) }, // 1/10 of second: 0, 1, ..., 9 'S': function (date) { return Math.floor(date.getUTCMilliseconds() / 100) }, // 1/100 of second: 00, 01, ..., 99 'SS': function (date) { return addLeadingZeros(Math.floor(date.getUTCMilliseconds() / 10), 2) }, // Millisecond: 000, 001, ..., 999 'SSS': function (date) { return addLeadingZeros(date.getUTCMilliseconds(), 3) }, // Timezone: -01:00, +00:00, ... +12:00 'Z': function (date, options) { var originalDate = options._originalDate || date; return formatTimezone(originalDate.getTimezoneOffset(), ':') }, // Timezone: -0100, +0000, ... +1200 'ZZ': function (date, options) { var originalDate = options._originalDate || date; return formatTimezone(originalDate.getTimezoneOffset()) }, // Seconds timestamp: 512969520 'X': function (date, options) { var originalDate = options._originalDate || date; return Math.floor(originalDate.getTime() / 1000) }, // Milliseconds timestamp: 512969520900 'x': function (date, options) { var originalDate = options._originalDate || date; return originalDate.getTime() }, // AM, PM 'A': function (date, options) { return options.locale.localize.timeOfDay(date.getUTCHours(), {type: 'uppercase'}) }, // am, pm 'a': function (date, options) { return options.locale.localize.timeOfDay(date.getUTCHours(), {type: 'lowercase'}) }, // a.m., p.m. 'aa': function (date, options) { return options.locale.localize.timeOfDay(date.getUTCHours(), {type: 'long'}) } }; function formatTimezone (offset, delimeter) { delimeter = delimeter || ''; var sign = offset > 0 ? '-' : '+'; var absOffset = Math.abs(offset); var hours = Math.floor(absOffset / 60); var minutes = absOffset % 60; return sign + addLeadingZeros(hours, 2) + delimeter + addLeadingZeros(minutes, 2) } function addLeadingZeros (number, targetLength) { var output = Math.abs(number).toString(); while (output.length < targetLength) { output = '0' + output; } return output } // This function will be a part of public API when UTC function will be implemented. // See issue: https://github.com/date-fns/date-fns/issues/376 function addUTCMinutes (dirtyDate, dirtyAmount, dirtyOptions) { var date = toDate(dirtyDate, dirtyOptions); var amount = Number(dirtyAmount); date.setUTCMinutes(date.getUTCMinutes() + amount); return date } var longFormattingTokensRegExp = /(\[[^[]*])|(\\)?(LTS|LT|LLLL|LLL|LL|L|llll|lll|ll|l)/g; var defaultFormattingTokensRegExp = /(\[[^[]*])|(\\)?(x|ss|s|mm|m|hh|h|do|dddd|ddd|dd|d|aa|a|ZZ|Z|YYYY|YY|X|Wo|WW|W|SSS|SS|S|Qo|Q|Mo|MMMM|MMM|MM|M|HH|H|GGGG|GG|E|Do|DDDo|DDDD|DDD|DD|D|A|.)/g; /** * @name format * @category Common Helpers * @summary Format the date. * * @description * Return the formatted date string in the given format. * * Accepted tokens: * | Unit | Token | Result examples | * |-------------------------|-------|----------------------------------| * | Month | M | 1, 2, ..., 12 | * | | Mo | 1st, 2nd, ..., 12th | * | | MM | 01, 02, ..., 12 | * | | MMM | Jan, Feb, ..., Dec | * | | MMMM | January, February, ..., December | * | Quarter | Q | 1, 2, 3, 4 | * | | Qo | 1st, 2nd, 3rd, 4th | * | Day of month | D | 1, 2, ..., 31 | * | | Do | 1st, 2nd, ..., 31st | * | | DD | 01, 02, ..., 31 | * | Day of year | DDD | 1, 2, ..., 366 | * | | DDDo | 1st, 2nd, ..., 366th | * | | DDDD | 001, 002, ..., 366 | * | Day of week | d | 0, 1, ..., 6 | * | | do | 0th, 1st, ..., 6th | * | | dd | Su, Mo, ..., Sa | * | | ddd | Sun, Mon, ..., Sat | * | | dddd | Sunday, Monday, ..., Saturday | * | Day of ISO week | E | 1, 2, ..., 7 | * | ISO week | W | 1, 2, ..., 53 | * | | Wo | 1st, 2nd, ..., 53rd | * | | WW | 01, 02, ..., 53 | * | Year | YY | 00, 01, ..., 99 | * | | YYYY | 1900, 1901, ..., 2099 | * | ISO week-numbering year | GG | 00, 01, ..., 99 | * | | GGGG | 1900, 1901, ..., 2099 | * | AM/PM | A | AM, PM | * | | a | am, pm | * | | aa | a.m., p.m. | * | Hour | H | 0, 1, ... 23 | * | | HH | 00, 01, ... 23 | * | | h | 1, 2, ..., 12 | * | | hh | 01, 02, ..., 12 | * | Minute | m | 0, 1, ..., 59 | * | | mm | 00, 01, ..., 59 | * | Second | s | 0, 1, ..., 59 | * | | ss | 00, 01, ..., 59 | * | 1/10 of second | S | 0, 1, ..., 9 | * | 1/100 of second | SS | 00, 01, ..., 99 | * | Millisecond | SSS | 000, 001, ..., 999 | * | Timezone | Z | -01:00, +00:00, ... +12:00 | * | | ZZ | -0100, +0000, ..., +1200 | * | Seconds timestamp | X | 512969520 | * | Milliseconds timestamp | x | 512969520900 | * | Long format | LT | 05:30 a.m. | * | | LTS | 05:30:15 a.m. | * | | L | 07/02/1995 | * | | l | 7/2/1995 | * | | LL | July 2 1995 | * | | ll | Jul 2 1995 | * | | LLL | July 2 1995 05:30 a.m. | * | | lll | Jul 2 1995 05:30 a.m. | * | | LLLL | Sunday, July 2 1995 05:30 a.m. | * | | llll | Sun, Jul 2 1995 05:30 a.m. | * * The characters wrapped in square brackets are escaped. * * The result may vary by locale. * * @param {Date|String|Number} date - the original date * @param {String} format - the string of tokens * @param {Options} [options] - the object with options. See [Options]{@link https://date-fns.org/docs/Options} * @param {0|1|2} [options.additionalDigits=2] - passed to `toDate`. See [toDate]{@link https://date-fns.org/docs/toDate} * @param {Locale} [options.locale=defaultLocale] - the locale object. See [Locale]{@link https://date-fns.org/docs/Locale} * @returns {String} the formatted date string * @throws {TypeError} 2 arguments required * @throws {RangeError} `options.additionalDigits` must be 0, 1 or 2 * @throws {RangeError} `options.locale` must contain `localize` property * @throws {RangeError} `options.locale` must contain `formatLong` property * * @example * // Represent 11 February 2014 in middle-endian format: * var result = format( * new Date(2014, 1, 11), * 'MM/DD/YYYY' * ) * //=> '02/11/2014' * * @example * // Represent 2 July 2014 in Esperanto: * import { eoLocale } from 'date-fns/locale/eo' * var result = format( * new Date(2014, 6, 2), * 'Do [de] MMMM YYYY', * {locale: eoLocale} * ) * //=> '2-a de julio 2014' */ function format (dirtyDate, dirtyFormatStr, dirtyOptions) { if (arguments.length < 2) { throw new TypeError('2 arguments required, but only ' + arguments.length + ' present') } var formatStr = String(dirtyFormatStr); var options = dirtyOptions || {}; var locale$$1 = options.locale || locale; if (!locale$$1.localize) { throw new RangeError('locale must contain localize property') } if (!locale$$1.formatLong) { throw new RangeError('locale must contain formatLong property') } var localeFormatters = locale$$1.formatters || {}; var formattingTokensRegExp = locale$$1.formattingTokensRegExp || defaultFormattingTokensRegExp; var formatLong = locale$$1.formatLong; var originalDate = toDate(dirtyDate, options); if (!isValid(originalDate, options)) { return 'Invalid Date' } // Convert the date in system timezone to the same date in UTC+00:00 timezone. // This ensures that when UTC functions will be implemented, locales will be compatible with them. // See an issue about UTC functions: https://github.com/date-fns/date-fns/issues/376 var timezoneOffset = originalDate.getTimezoneOffset(); var utcDate = addUTCMinutes(originalDate, -timezoneOffset, options); var formatterOptions = cloneObject(options); formatterOptions.locale = locale$$1; formatterOptions.formatters = formatters; // When UTC functions will be implemented, options._originalDate will likely be a part of public API. // Right now, please don't use it in locales. If you have to use an original date, // please restore it from `date`, adding a timezone offset to it. formatterOptions._originalDate = originalDate; var result = formatStr .replace(longFormattingTokensRegExp, function (substring) { if (substring[0] === '[') { return substring } if (substring[0] === '\\') { return cleanEscapedString(substring) } return formatLong(substring) }) .replace(formattingTokensRegExp, function (substring) { var formatter = localeFormatters[substring] || formatters[substring]; if (formatter) { return formatter(utcDate, formatterOptions) } else { return cleanEscapedString(substring) } }); return result } function cleanEscapedString (input) { if (input.match(/\[[\s\S]/)) { return input.replace(/^\[|]$/g, '') } return input.replace(/\\/g, '') } /** * @name subMinutes * @category Minute Helpers * @summary Subtract the specified number of minutes from the given date. * * @description * Subtract the specified number of minutes from the given date. * * @param {Date|String|Number} date - the date to be changed * @param {Number} amount - the amount of minutes to be subtracted * @param {Options} [options] - the object with options. See [Options]{@link https://date-fns.org/docs/Options} * @param {0|1|2} [options.additionalDigits=2] - passed to `toDate`. See [toDate]{@link https://date-fns.org/docs/toDate} * @returns {Date} the new date with the mintues subtracted * @throws {TypeError} 2 arguments required * @throws {RangeError} `options.additionalDigits` must be 0, 1 or 2 * * @example * // Subtract 30 minutes from 10 July 2014 12:00:00: * var result = subMinutes(new Date(2014, 6, 10, 12, 0), 30) * //=> Thu Jul 10 2014 11:30:00 */ function subMinutes (dirtyDate, dirtyAmount, dirtyOptions) { if (arguments.length < 2) { throw new TypeError('2 arguments required, but only ' + arguments.length + ' present') } var amount = Number(dirtyAmount); return addMinutes(dirtyDate, -amount, dirtyOptions) } /** * @name isAfter * @category Common Helpers * @summary Is the first date after the second one? * * @description * Is the first date after the second one? * * @param {Date|String|Number} date - the date that should be after the other one to return true * @param {Date|String|Number} dateToCompare - the date to compare with * @param {Options} [options] - the object with options. See [Options]{@link https://date-fns.org/docs/Options} * @param {0|1|2} [options.additionalDigits=2] - passed to `toDate`. See [toDate]{@link https://date-fns.org/docs/toDate} * @returns {Boolean} the first date is after the second date * @throws {TypeError} 2 arguments required * @throws {RangeError} `options.additionalDigits` must be 0, 1 or 2 * * @example * // Is 10 July 1989 after 11 February 1987? * var result = isAfter(new Date(1989, 6, 10), new Date(1987, 1, 11)) * //=> true */ function isAfter (dirtyDate, dirtyDateToCompare, dirtyOptions) { if (arguments.length < 2) { throw new TypeError('2 arguments required, but only ' + arguments.length + ' present') } var date = toDate(dirtyDate, dirtyOptions); var dateToCompare = toDate(dirtyDateToCompare, dirtyOptions); return date.getTime() > dateToCompare.getTime() } /** * @name isBefore * @category Common Helpers * @summary Is the first date before the second one? * * @description * Is the first date before the second one? * * @param {Date|String|Number} date - the date that should be before the other one to return true * @param {Date|String|Number} dateToCompare - the date to compare with * @param {Options} [options] - the object with options. See [Options]{@link https://date-fns.org/docs/Options} * @param {0|1|2} [options.additionalDigits=2] - passed to `toDate`. See [toDate]{@link https://date-fns.org/docs/toDate} * @returns {Boolean} the first date is before the second date * @throws {TypeError} 2 arguments required * @throws {RangeError} `options.additionalDigits` must be 0, 1 or 2 * * @example * // Is 10 July 1989 before 11 February 1987? * var result = isBefore(new Date(1989, 6, 10), new Date(1987, 1, 11)) * //=> false */ function isBefore (dirtyDate, dirtyDateToCompare, dirtyOptions) { if (arguments.length < 2) { throw new TypeError('2 arguments required, but only ' + arguments.length + ' present') } var date = toDate(dirtyDate, dirtyOptions); var dateToCompare = toDate(dirtyDateToCompare, dirtyOptions); return date.getTime() < dateToCompare.getTime() } /** * @name isEqual * @category Common Helpers * @summary Are the given dates equal? * * @description * Are the given dates equal? * * @param {Date|String|Number} dateLeft - the first date to compare * @param {Date|String|Number} dateRight - the second date to compare * @param {Options} [options] - the object with options. See [Options]{@link https://date-fns.org/docs/Options} * @param {0|1|2} [options.additionalDigits=2] - passed to `toDate`. See [toDate]{@link https://date-fns.org/docs/toDate} * @returns {Boolean} the dates are equal * @throws {TypeError} 2 arguments required * @throws {RangeError} `options.additionalDigits` must be 0, 1 or 2 * * @example * // Are 2 July 2014 06:30:45.000 and 2 July 2014 06:30:45.500 equal? * var result = isEqual( * new Date(2014, 6, 2, 6, 30, 45, 0) * new Date(2014, 6, 2, 6, 30, 45, 500) * ) * //=> false */ function isEqual (dirtyLeftDate, dirtyRightDate, dirtyOptions) { if (arguments.length < 2) { throw new TypeError('2 arguments required, but only ' + arguments.length + ' present') } var dateLeft = toDate(dirtyLeftDate, dirtyOptions); var dateRight = toDate(dirtyRightDate, dirtyOptions); return dateLeft.getTime() === dateRight.getTime() } var patterns$1 = { 'M': /^(1[0-2]|0?\d)/, // 0 to 12 'D': /^(3[0-1]|[0-2]?\d)/, // 0 to 31 'DDD': /^(36[0-6]|3[0-5]\d|[0-2]?\d?\d)/, // 0 to 366 'W': /^(5[0-3]|[0-4]?\d)/, // 0 to 53 'YYYY': /^(\d{1,4})/, // 0 to 9999 'H': /^(2[0-3]|[0-1]?\d)/, // 0 to 23 'm': /^([0-5]?\d)/, // 0 to 59 'Z': /^([+-])(\d{2}):(\d{2})/, 'ZZ': /^([+-])(\d{2})(\d{2})/, singleDigit: /^(\d)/, twoDigits: /^(\d{2})/, threeDigits: /^(\d{3})/, fourDigits: /^(\d{4})/, anyDigits: /^(\d+)/ }; function parseDecimal$1 (matchResult) { return parseInt(matchResult[1], 10) } var parsers = { // Year: 00, 01, ..., 99 'YY': { unit: 'twoDigitYear', match: patterns$1.twoDigits, parse: function (matchResult) { return parseDecimal$1(matchResult) } }, // Year: 1900, 1901, ..., 2099 'YYYY': { unit: 'year', match: patterns$1.YYYY, parse: parseDecimal$1 }, // ISO week-numbering year: 00, 01, ..., 99 'GG': { unit: 'isoYear', match: patterns$1.twoDigits, parse: function (matchResult) { return parseDecimal$1(matchResult) + 1900 } }, // ISO week-numbering year: 1900, 1901, ..., 2099 'GGGG': { unit: 'isoYear', match: patterns$1.YYYY, parse: parseDecimal$1 }, // Quarter: 1, 2, 3, 4 'Q': { unit: 'quarter', match: patterns$1.singleDigit, parse: parseDecimal$1 }, // Ordinal quarter 'Qo': { unit: 'quarter', match: function (string, options) { return options.locale.match.ordinalNumbers(string, {unit: 'quarter'}) }, parse: function (matchResult, options) { return options.locale.match.ordinalNumber(matchResult, {unit: 'quarter'}) } }, // Month: 1, 2, ..., 12 'M': { unit: 'month', match: patterns$1.M, parse: function (matchResult) { return parseDecimal$1(matchResult) - 1 } }, // Ordinal month 'Mo': { unit: 'month', match: function (string, options) { return options.locale.match.ordinalNumbers(string, {unit: 'month'}) }, parse: function (matchResult, options) { return options.locale.match.ordinalNumber(matchResult, {unit: 'month'}) - 1 } }, // Month: 01, 02, ..., 12 'MM': { unit: 'month', match: patterns$1.twoDigits, parse: function (matchResult) { return parseDecimal$1(matchResult) - 1 } }, // Month: Jan, Feb, ..., Dec 'MMM': { unit: 'month', match: function (string, options) { return options.locale.match.months(string, {type: 'short'}) }, parse: function (matchResult, options) { return options.locale.match.month(matchResult, {type: 'short'}) } }, // Month: January, February, ..., December 'MMMM': { unit: 'month', match: function (string, options) { return options.locale.match.months(string, {type: 'long'}) || options.locale.match.months(string, {type: 'short'}) }, parse: function (matchResult, options) { var parseResult = options.locale.match.month(matchResult, {type: 'long'}); if (parseResult == null) { parseResult = options.locale.match.month(matchResult, {type: 'short'}); } return parseResult } }, // ISO week: 1, 2, ..., 53 'W': { unit: 'isoWeek', match: patterns$1.W, parse: parseDecimal$1 }, // Ordinal ISO week 'Wo': { unit: 'isoWeek', match: function (string, options) { return options.locale.match.ordinalNumbers(string, {unit: 'isoWeek'}) }, parse: function (matchResult, options) { return options.locale.match.ordinalNumber(matchResult, {unit: 'isoWeek'}) } }, // ISO week: 01, 02, ..., 53 'WW': { unit: 'isoWeek', match: patterns$1.twoDigits, parse: parseDecimal$1 }, // Day of week: 0, 1, ..., 6 'd': { unit: 'dayOfWeek', match: patterns$1.singleDigit, parse: parseDecimal$1 }, // Ordinal day of week 'do': { unit: 'dayOfWeek', match: function (string, options) { return options.locale.match.ordinalNumbers(string, {unit: 'dayOfWeek'}) }, parse: function (matchResult, options) { return options.locale.match.ordinalNumber(matchResult, {unit: 'dayOfWeek'}) } }, // Day of week: Su, Mo, ..., Sa 'dd': { unit: 'dayOfWeek', match: function (string, options) { return options.locale.match.weekdays(string, {type: 'narrow'}) }, parse: function (matchResult, options) { return options.locale.match.weekday(matchResult, {type: 'narrow'}) } }, // Day of week: Sun, Mon, ..., Sat 'ddd': { unit: 'dayOfWeek', match: function (string, options) { return options.locale.match.weekdays(string, {type: 'short'}) || options.locale.match.weekdays(string, {type: 'narrow'}) }, parse: function (matchResult, options) { var parseResult = options.locale.match.weekday(matchResult, {type: 'short'}); if (parseResult == null) { parseResult = options.locale.match.weekday(matchResult, {type: 'narrow'}); } return parseResult } }, // Day of week: Sunday, Monday, ..., Saturday 'dddd': { unit: 'dayOfWeek', match: function (string, options) { return options.locale.match.weekdays(string, {type: 'long'}) || options.locale.match.weekdays(string, {type: 'short'}) || options.locale.match.weekdays(string, {type: 'narrow'}) }, parse: function (matchResult, options) { var parseResult = options.locale.match.weekday(matchResult, {type: 'long'}); if (parseResult == null) { parseResult = options.locale.match.weekday(matchResult, {type: 'short'}); if (parseResult == null) { parseResult = options.locale.match.weekday(matchResult, {type: 'narrow'}); } } return parseResult } }, // Day of ISO week: 1, 2, ..., 7 'E': { unit: 'dayOfISOWeek', match: patterns$1.singleDigit, parse: function (matchResult) { return parseDecimal$1(matchResult) } }, // Day of month: 1, 2, ..., 31 'D': { unit: 'dayOfMonth', match: patterns$1.D, parse: parseDecimal$1 }, // Ordinal day of month 'Do': { unit: 'dayOfMonth', match: function (string, options) { return options.locale.match.ordinalNumbers(string, {unit: 'dayOfMonth'}) }, parse: function (matchResult, options) { return options.locale.match.ordinalNumber(matchResult, {unit: 'dayOfMonth'}) } }, // Day of month: 01, 02, ..., 31 'DD': { unit: 'dayOfMonth', match: patterns$1.twoDigits, parse: parseDecimal$1 }, // Day of year: 1, 2, ..., 366 'DDD': { unit: 'dayOfYear', match: patterns$1.DDD, parse: parseDecimal$1 }, // Ordinal day of year 'DDDo': { unit: 'dayOfYear', match: function (string, options) { return options.locale.match.ordinalNumbers(string, {unit: 'dayOfYear'}) }, parse: function (matchResult, options) { return options.locale.match.ordinalNumber(matchResult, {unit: 'dayOfYear'}) } }, // Day of year: 001, 002, ..., 366 'DDDD': { unit: 'dayOfYear', match: patterns$1.threeDigits, parse: parseDecimal$1 }, // AM, PM 'A': { unit: 'timeOfDay', match: function (string, options) { return options.locale.match.timesOfDay(string, {type: 'short'}) }, parse: function (matchResult, options) { return options.locale.match.timeOfDay(matchResult, {type: 'short'}) } }, // a.m., p.m. 'aa': { unit: 'timeOfDay', match: function (string, options) { return options.locale.match.timesOfDay(string, {type: 'long'}) || options.locale.match.timesOfDay(string, {type: 'short'}) }, parse: function (matchResult, options) { var parseResult = options.locale.match.timeOfDay(matchResult, {type: 'long'}); if (parseResult == null) { parseResult = options.locale.match.timeOfDay(matchResult, {type: 'short'}); } return parseResult } }, // Hour: 0, 1, ... 23 'H': { unit: 'hours', match: patterns$1.H, parse: parseDecimal$1 }, // Hour: 00, 01, ..., 23 'HH': { unit: 'hours', match: patterns$1.twoDigits, parse: parseDecimal$1 }, // Hour: 1, 2, ..., 12 'h': { unit: 'timeOfDayHours', match: patterns$1.M, parse: parseDecimal$1 }, // Hour: 01, 02, ..., 12 'hh': { unit: 'timeOfDayHours', match: patterns$1.twoDigits, parse: parseDecimal$1 }, // Minute: 0, 1, ..., 59 'm': { unit: 'minutes', match: patterns$1.m, parse: parseDecimal$1 }, // Minute: 00, 01, ..., 59 'mm': { unit: 'minutes', match: patterns$1.twoDigits, parse: parseDecimal$1 }, // Second: 0, 1, ..., 59 's': { unit: 'seconds', match: patterns$1.m, parse: parseDecimal$1 }, // Second: 00, 01, ..., 59 'ss': { unit: 'seconds', match: patterns$1.twoDigits, parse: parseDecimal$1 }, // 1/10 of second: 0, 1, ..., 9 'S': { unit: 'milliseconds', match: patterns$1.singleDigit, parse: function (matchResult) { return parseDecimal$1(matchResult) * 100 } }, // 1/100 of second: 00, 01, ..., 99 'SS': { unit: 'milliseconds', match: patterns$1.twoDigits, parse: function (matchResult) { return parseDecimal$1(matchResult) * 10 } }, // Millisecond: 000, 001, ..., 999 'SSS': { unit: 'milliseconds', match: patterns$1.threeDigits, parse: parseDecimal$1 }, // Timezone: -01:00, +00:00, ... +12:00 'Z': { unit: 'timezone', match: patterns$1.Z, parse: function (matchResult) { var sign = matchResult[1]; var hours = parseInt(matchResult[2], 10); var minutes = parseInt(matchResult[3], 10); var absoluteOffset = hours * 60 + minutes; return (sign === '+') ? absoluteOffset : -absoluteOffset } }, // Timezone: -0100, +0000, ... +1200 'ZZ': { unit: 'timezone', match: patterns$1.ZZ, parse: function (matchResult) { var sign = matchResult[1]; var hours = parseInt(matchResult[2], 10); var minutes = parseInt(matchResult[3], 10); var absoluteOffset = hours * 60 + minutes; return (sign === '+') ? absoluteOffset : -absoluteOffset } }, // Seconds timestamp: 512969520 'X': { unit: 'timestamp', match: patterns$1.anyDigits, parse: function (matchResult) { return parseDecimal$1(matchResult) * 1000 } }, // Milliseconds timestamp: 512969520900 'x': { unit: 'timestamp', match: patterns$1.anyDigits, parse: parseDecimal$1 } }; parsers['a'] = parsers['A']; // This function will be a part of public API when UTC function will be implemented. // See issue: https://github.com/date-fns/date-fns/issues/376 function setUTCDay (dirtyDate, dirtyDay, dirtyOptions) { var options = dirtyOptions || {}; var locale = options.locale; var localeWeekStartsOn = locale && locale.options && locale.options.weekStartsOn; var defaultWeekStartsOn = localeWeekStartsOn === undefined ? 0 : Number(localeWeekStartsOn); var weekStartsOn = options.weekStartsOn === undefined ? defaultWeekStartsOn : Number(options.weekStartsOn); // Test if weekStartsOn is between 0 and 6 _and_ is not NaN if (!(weekStartsOn >= 0 && weekStartsOn <= 6)) { throw new RangeError('weekStartsOn must be between 0 and 6 inclusively') } var date = toDate(dirtyDate, dirtyOptions); var day = Number(dirtyDay); var currentDay = date.getUTCDay(); var remainder = day % 7; var dayIndex = (remainder + 7) % 7; var diff = (dayIndex < weekStartsOn ? 7 : 0) + day - currentDay; date.setUTCDate(date.getUTCDate() + diff); return date } // This function will be a part of public API when UTC function will be implemented. // See issue: https://github.com/date-fns/date-fns/issues/376 function setUTCISODay (dirtyDate, dirtyDay, dirtyOptions) { var day = Number(dirtyDay); if (day % 7 === 0) { day = day - 7; } var weekStartsOn = 1; var date = toDate(dirtyDate, dirtyOptions); var currentDay = date.getUTCDay(); var remainder = day % 7; var dayIndex = (remainder + 7) % 7; var diff = (dayIndex < weekStartsOn ? 7 : 0) + day - currentDay; date.setUTCDate(date.getUTCDate() + diff); return date } // This function will be a part of public API when UTC function will be implemented. // See issue: https://github.com/date-fns/date-fns/issues/376 function setUTCISOWeek (dirtyDate, dirtyISOWeek, dirtyOptions) { var date = toDate(dirtyDate, dirtyOptions); var isoWeek = Number(dirtyISOWeek); var diff = getUTCISOWeek(date, dirtyOptions) - isoWeek; date.setUTCDate(date.getUTCDate() - diff * 7); return date } var MILLISECONDS_IN_DAY$3 = 86400000; // This function will be a part of public API when UTC function will be implemented. // See issue: https://github.com/date-fns/date-fns/issues/376 function setUTCISOWeekYear (dirtyDate, dirtyISOYear, dirtyOptions) { var date = toDate(dirtyDate, dirtyOptions); var isoYear = Number(dirtyISOYear); var dateStartOfYear = startOfUTCISOWeekYear(date, dirtyOptions); var diff = Math.floor((date.getTime() - dateStartOfYear.getTime()) / MILLISECONDS_IN_DAY$3); var fourthOfJanuary = new Date(0); fourthOfJanuary.setUTCFullYear(isoYear, 0, 4); fourthOfJanuary.setUTCHours(0, 0, 0, 0); date = startOfUTCISOWeekYear(fourthOfJanuary, dirtyOptions); date.setUTCDate(date.getUTCDate() + diff); return date } var MILLISECONDS_IN_MINUTE$7 = 60000; function setTimeOfDay (hours, timeOfDay) { var isAM = timeOfDay === 0; if (isAM) { if (hours === 12) { return 0 } } else { if (hours !== 12) { return 12 + hours } } return hours } var units = { twoDigitYear: { priority: 10, set: function (dateValues, value) { var century = Math.floor(dateValues.date.getUTCFullYear() / 100); var year = century * 100 + value; dateValues.date.setUTCFullYear(year, 0, 1); dateValues.date.setUTCHours(0, 0, 0, 0); return dateValues } }, year: { priority: 10, set: function (dateValues, value) { dateValues.date.setUTCFullYear(value, 0, 1); dateValues.date.setUTCHours(0, 0, 0, 0); return dateValues } }, isoYear: { priority: 10, set: function (dateValues, value, options) { dateValues.date = startOfUTCISOWeekYear(setUTCISOWeekYear(dateValues.date, value, options), options); return dateValues } }, quarter: { priority: 20, set: function (dateValues, value) { dateValues.date.setUTCMonth((value - 1) * 3, 1); dateValues.date.setUTCHours(0, 0, 0, 0); return dateValues } }, month: { priority: 30, set: function (dateValues, value) { dateValues.date.setUTCMonth(value, 1); dateValues.date.setUTCHours(0, 0, 0, 0); return dateValues } }, isoWeek: { priority: 40, set: function (dateValues, value, options) { dateValues.date = startOfUTCISOWeek(setUTCISOWeek(dateValues.date, value, options), options); return dateValues } }, dayOfWeek: { priority: 50, set: function (dateValues, value, options) { dateValues.date = setUTCDay(dateValues.date, value, options); dateValues.date.setUTCHours(0, 0, 0, 0); return dateValues } }, dayOfISOWeek: { priority: 50, set: function (dateValues, value, options) { dateValues.date = setUTCISODay(dateValues.date, value, options); dateValues.date.setUTCHours(0, 0, 0, 0); return dateValues } }, dayOfMonth: { priority: 50, set: function (dateValues, value) { dateValues.date.setUTCDate(value); dateValues.date.setUTCHours(0, 0, 0, 0); return dateValues } }, dayOfYear: { priority: 50, set: function (dateValues, value) { dateValues.date.setUTCMonth(0, value); dateValues.date.setUTCHours(0, 0, 0, 0); return dateValues } }, timeOfDay: { priority: 60, set: function (dateValues, value, options) { dateValues.timeOfDay = value; return dateValues } }, hours: { priority: 70, set: function (dateValues, value, options) { dateValues.date.setUTCHours(value, 0, 0, 0); return dateValues } }, timeOfDayHours: { priority: 70, set: function (dateValues, value, options) { var timeOfDay = dateValues.timeOfDay; if (timeOfDay != null) { value = setTimeOfDay(value, timeOfDay); } dateValues.date.setUTCHours(value, 0, 0, 0); return dateValues } }, minutes: { priority: 80, set: function (dateValues, value) { dateValues.date.setUTCMinutes(value, 0, 0); return dateValues } }, seconds: { priority: 90, set: function (dateValues, value) { dateValues.date.setUTCSeconds(value, 0); return dateValues } }, milliseconds: { priority: 100, set: function (dateValues, value) { dateValues.date.setUTCMilliseconds(value); return dateValues } }, timezone: { priority: 110, set: function (dateValues, value) { dateValues.date = new Date(dateValues.date.getTime() - value * MILLISECONDS_IN_MINUTE$7); return dateValues } }, timestamp: { priority: 120, set: function (dateValues, value) { dateValues.date = new Date(value); return dateValues } } }; var TIMEZONE_UNIT_PRIORITY = 110; var MILLISECONDS_IN_MINUTE$6 = 60000; var longFormattingTokensRegExp$1 = /(\[[^[]*])|(\\)?(LTS|LT|LLLL|LLL|LL|L|llll|lll|ll|l)/g; var defaultParsingTokensRegExp = /(\[[^[]*])|(\\)?(x|ss|s|mm|m|hh|h|do|dddd|ddd|dd|d|aa|a|ZZ|Z|YYYY|YY|X|Wo|WW|W|SSS|SS|S|Qo|Q|Mo|MMMM|MMM|MM|M|HH|H|GGGG|GG|E|Do|DDDo|DDDD|DDD|DD|D|A|.)/g; /** * @name parse * @category Common Helpers * @summary Parse the date. * * @description * Return the date parsed from string using the given format. * * Accepted format tokens: * | Unit | Priority | Token | Input examples | * |-------------------------|----------|-------|----------------------------------| * | Year | 10 | YY | 00, 01, ..., 99 | * | | | YYYY | 1900, 1901, ..., 2099 | * | ISO week-numbering year | 10 | GG | 00, 01, ..., 99 | * | | | GGGG | 1900, 1901, ..., 2099 | * | Quarter | 20 | Q | 1, 2, 3, 4 | * | | | Qo | 1st, 2nd, 3rd, 4th | * | Month | 30 | M | 1, 2, ..., 12 | * | | | Mo | 1st, 2nd, ..., 12th | * | | | MM | 01, 02, ..., 12 | * | | | MMM | Jan, Feb, ..., Dec | * | | | MMMM | January, February, ..., December | * | ISO week | 40 | W | 1, 2, ..., 53 | * | | | Wo | 1st, 2nd, ..., 53rd | * | | | WW | 01, 02, ..., 53 | * | Day of week | 50 | d | 0, 1, ..., 6 | * | | | do | 0th, 1st, ..., 6th | * | | | dd | Su, Mo, ..., Sa | * | | | ddd | Sun, Mon, ..., Sat | * | | | dddd | Sunday, Monday, ..., Saturday | * | Day of ISO week | 50 | E | 1, 2, ..., 7 | * | Day of month | 50 | D | 1, 2, ..., 31 | * | | | Do | 1st, 2nd, ..., 31st | * | | | DD | 01, 02, ..., 31 | * | Day of year | 50 | DDD | 1, 2, ..., 366 | * | | | DDDo | 1st, 2nd, ..., 366th | * | | | DDDD | 001, 002, ..., 366 | * | Time of day | 60 | A | AM, PM | * | | | a | am, pm | * | | | aa | a.m., p.m. | * | Hour | 70 | H | 0, 1, ... 23 | * | | | HH | 00, 01, ... 23 | * | Time of day hour | 70 | h | 1, 2, ..., 12 | * | | | hh | 01, 02, ..., 12 | * | Minute | 80 | m | 0, 1, ..., 59 | * | | | mm | 00, 01, ..., 59 | * | Second | 90 | s | 0, 1, ..., 59 | * | | | ss | 00, 01, ..., 59 | * | 1/10 of second | 100 | S | 0, 1, ..., 9 | * | 1/100 of second | 100 | SS | 00, 01, ..., 99 | * | Millisecond | 100 | SSS | 000, 001, ..., 999 | * | Timezone | 110 | Z | -01:00, +00:00, ... +12:00 | * | | | ZZ | -0100, +0000, ..., +1200 | * | Seconds timestamp | 120 | X | 512969520 | * | Milliseconds timestamp | 120 | x | 512969520900 | * * Values will be assigned to the date in the ascending order of its unit's priority. * Units of an equal priority overwrite each other in the order of appearance. * * If no values of higher priority are parsed (e.g. when parsing string 'January 1st' without a year), * the values will be taken from 3rd argument `baseDate` which works as a context of parsing. * * `baseDate` must be passed for correct work of the function. * If you're not sure which `baseDate` to supply, create a new instance of Date: * `parse('02/11/2014', 'MM/DD/YYYY', new Date())` * In this case parsing will be done in the context of the current date. * If `baseDate` is `Invalid Date` or a value not convertible to valid `Date`, * then `Invalid Date` will be returned. * * Also, `parse` unfolds long formats like those in [format]{@link https://date-fns.org/docs/format}: * | Token | Input examples | * |-------|--------------------------------| * | LT | 05:30 a.m. | * | LTS | 05:30:15 a.m. | * | L | 07/02/1995 | * | l | 7/2/1995 | * | LL | July 2 1995 | * | ll | Jul 2 1995 | * | LLL | July 2 1995 05:30 a.m. | * | lll | Jul 2 1995 05:30 a.m. | * | LLLL | Sunday, July 2 1995 05:30 a.m. | * | llll | Sun, Jul 2 1995 05:30 a.m. | * * The characters wrapped in square brackets in the format string are escaped. * * The result may vary by locale. * * If `formatString` matches with `dateString` but does not provides tokens, `baseDate` will be returned. * * If parsing failed, `Invalid Date` will be returned. * Invalid Date is a Date, whose time value is NaN. * Time value of Date: http://es5.github.io/#x15.9.1.1 * * @param {String} dateString - the string to parse * @param {String} formatString - the string of tokens * @param {Date|String|Number} baseDate - the date to took the missing higher priority values from * @param {Options} [options] - the object with options. See [Options]{@link https://date-fns.org/docs/Options} * @param {0|1|2} [options.additionalDigits=2] - passed to `toDate`. See [toDate]{@link https://date-fns.org/docs/toDate} * @param {Locale} [options.locale=defaultLocale] - the locale object. See [Locale]{@link https://date-fns.org/docs/Locale} * @param {0|1|2|3|4|5|6} [options.weekStartsOn=0] - the index of the first day of the week (0 - Sunday) * @returns {Date} the parsed date * @throws {TypeError} 3 arguments required * @throws {RangeError} `options.additionalDigits` must be 0, 1 or 2 * @throws {RangeError} `options.weekStartsOn` must be between 0 and 6 * @throws {RangeError} `options.locale` must contain `match` property * @throws {RangeError} `options.locale` must contain `formatLong` property * * @example * // Parse 11 February 2014 from middle-endian format: * var result = parse( * '02/11/2014', * 'MM/DD/YYYY', * new Date() * ) * //=> Tue Feb 11 2014 00:00:00 * * @example * // Parse 28th of February in English locale in the context of 2010 year: * import eoLocale from 'date-fns/locale/eo' * var result = parse( * '28-a de februaro', * 'Do [de] MMMM', * new Date(2010, 0, 1) * {locale: eoLocale} * ) * //=> Sun Feb 28 2010 00:00:00 */ function parse (dirtyDateString, dirtyFormatString, dirtyBaseDate, dirtyOptions) { if (arguments.length < 3) { throw new TypeError('3 arguments required, but only ' + arguments.length + ' present') } var dateString = String(dirtyDateString); var options = dirtyOptions || {}; var weekStartsOn = options.weekStartsOn === undefined ? 0 : Number(options.weekStartsOn); // Test if weekStartsOn is between 0 and 6 _and_ is not NaN if (!(weekStartsOn >= 0 && weekStartsOn <= 6)) { throw new RangeError('weekStartsOn must be between 0 and 6 inclusively') } var locale$$1 = options.locale || locale; var localeParsers = locale$$1.parsers || {}; var localeUnits = locale$$1.units || {}; if (!locale$$1.match) { throw new RangeError('locale must contain match property') } if (!locale$$1.formatLong) { throw new RangeError('locale must contain formatLong property') } var formatString = String(dirtyFormatString) .replace(longFormattingTokensRegExp$1, function (substring) { if (substring[0] === '[') { return substring } if (substring[0] === '\\') { return cleanEscapedString$1(substring) } return locale$$1.formatLong(substring) }); if (formatString === '') { if (dateString === '') { return toDate(dirtyBaseDate, options) } else { return new Date(NaN) } } var subFnOptions = cloneObject(options); subFnOptions.locale = locale$$1; var tokens = formatString.match(locale$$1.parsingTokensRegExp || defaultParsingTokensRegExp); var tokensLength = tokens.length; // If timezone isn't specified, it will be set to the system timezone var setters = [{ priority: TIMEZONE_UNIT_PRIORITY, set: dateToSystemTimezone, index: 0 }]; var i; for (i = 0; i < tokensLength; i++) { var token = tokens[i]; var parser = localeParsers[token] || parsers[token]; if (parser) { var matchResult; if (parser.match instanceof RegExp) { matchResult = parser.match.exec(dateString); } else { matchResult = parser.match(dateString, subFnOptions); } if (!matchResult) { return new Date(NaN) } var unitName = parser.unit; var unit = localeUnits[unitName] || units[unitName]; setters.push({ priority: unit.priority, set: unit.set, value: parser.parse(matchResult, subFnOptions), index: setters.length }); var substring = matchResult[0]; dateString = dateString.slice(substring.length); } else { var head = tokens[i].match(/^\[.*]$/) ? tokens[i].replace(/^\[|]$/g, '') : tokens[i]; if (dateString.indexOf(head) === 0) { dateString = dateString.slice(head.length); } else { return new Date(NaN) } } } var uniquePrioritySetters = setters .map(function (setter) { return setter.priority }) .sort(function (a, b) { return a - b }) .filter(function (priority, index, array) { return array.indexOf(priority) === index }) .map(function (priority) { return setters .filter(function (setter) { return setter.priority === priority }) .reverse() }) .map(function (setterArray) { return setterArray[0] }); var date = toDate(dirtyBaseDate, options); if (isNaN(date)) { return new Date(NaN) } // Convert the date in system timezone to the same date in UTC+00:00 timezone. // This ensures that when UTC functions will be implemented, locales will be compatible with them. // See an issue about UTC functions: https://github.com/date-fns/date-fns/issues/37 var utcDate = subMinutes(date, date.getTimezoneOffset()); var dateValues = {date: utcDate}; var settersLength = uniquePrioritySetters.length; for (i = 0; i < settersLength; i++) { var setter = uniquePrioritySetters[i]; dateValues = setter.set(dateValues, setter.value, subFnOptions); } return dateValues.date } function dateToSystemTimezone (dateValues) { var date = dateValues.date; var time = date.getTime(); // Get the system timezone offset at (moment of time - offset) var offset = date.getTimezoneOffset(); // Get the system timezone offset at the exact moment of time offset = new Date(time + offset * MILLISECONDS_IN_MINUTE$6).getTimezoneOffset(); // Convert date in timezone "UTC+00:00" to the system timezone dateValues.date = new Date(time + offset * MILLISECONDS_IN_MINUTE$6); return dateValues } function cleanEscapedString$1 (input) { if (input.match(/\[[\s\S]/)) { return input.replace(/^\[|]$/g, '') } return input.replace(/\\/g, '') } // This file is generated automatically by `scripts/build/indices.js`. Please, don't change it. // /** * Custom parse behavior on top of date-fns parse function. */ function parseDate$1 (date, format$$1) { if (typeof date !== 'string') { return isValid(date) ? date : null; } var parsed = parse(date, format$$1, new Date()); // if date is not valid or the formatted output after parsing does not match // the string value passed in (avoids overflows) if (!isValid(parsed) || format(parsed, format$$1) !== date) { return null; } return parsed; } var after = function (value, ref) { var otherValue = ref[0]; var inclusion = ref[1]; var format = ref[2]; if (typeof format === 'undefined') { format = inclusion; inclusion = false; } value = parseDate$1(value, format); otherValue = parseDate$1(otherValue, format); // if either is not valid. if (!value || !otherValue) { return false; } return isAfter(value, otherValue) || (inclusion && isEqual(value, otherValue)); }; /** * Some Alpha Regex helpers. * https://github.com/chriso/validator.js/blob/master/src/lib/alpha.js */ var alpha$1 = { en: /^[A-Z]*$/i, cs: /^[A-ZÁČĎÉĚÍŇÓŘŠŤÚŮÝŽ]*$/i, da: /^[A-ZÆØÅ]*$/i, de: /^[A-ZÄÖÜß]*$/i, es: /^[A-ZÁÉÍÑÓÚÜ]*$/i, fr: /^[A-ZÀÂÆÇÉÈÊËÏÎÔŒÙÛÜŸ]*$/i, lt: /^[A-ZĄČĘĖĮŠŲŪŽ]*$/i, nl: /^[A-ZÉËÏÓÖÜ]*$/i, hu: /^[A-ZÁÉÍÓÖŐÚÜŰ]*$/i, pl: /^[A-ZĄĆĘŚŁŃÓŻŹ]*$/i, pt: /^[A-ZÃÁÀÂÇÉÊÍÕÓÔÚÜ]*$/i, ru: /^[А-ЯЁ]*$/i, sk: /^[A-ZÁÄČĎÉÍĹĽŇÓŔŠŤÚÝŽ]*$/i, sr: /^[A-ZČĆŽŠĐ]*$/i, tr: /^[A-ZÇĞİıÖŞÜ]*$/i, uk: /^[А-ЩЬЮЯЄІЇҐ]*$/i, ar: /^[ءآأؤإئابةتثجحخدذرزسشصضطظعغفقكلمنهوىيًٌٍَُِّْٰ]*$/ }; var alphaSpaces = { en: /^[A-Z\s]*$/i, cs: /^[A-ZÁČĎÉĚÍŇÓŘŠŤÚŮÝŽ\s]*$/i, da: /^[A-ZÆØÅ\s]*$/i, de: /^[A-ZÄÖÜß\s]*$/i, es: /^[A-ZÁÉÍÑÓÚÜ\s]*$/i, fr: /^[A-ZÀÂÆÇÉÈÊËÏÎÔŒÙÛÜŸ\s]*$/i, lt: /^[A-ZĄČĘĖĮŠŲŪŽ\s]*$/i, nl: /^[A-ZÉËÏÓÖÜ\s]*$/i, hu: /^[A-ZÁÉÍÓÖŐÚÜŰ\s]*$/i, pl: /^[A-ZĄĆĘŚŁŃÓŻŹ\s]*$/i, pt: /^[A-ZÃÁÀÂÇÉÊÍÕÓÔÚÜ\s]*$/i, ru: /^[А-ЯЁ\s]*$/i, sk: /^[A-ZÁÄČĎÉÍĹĽŇÓŔŠŤÚÝŽ\s]*$/i, sr: /^[A-ZČĆŽŠĐ\s]*$/i, tr: /^[A-ZÇĞİıÖŞÜ\s]*$/i, uk: /^[А-ЩЬЮЯЄІЇҐ\s]*$/i, ar: /^[ءآأؤإئابةتثجحخدذرزسشصضطظعغفقكلمنهوىيًٌٍَُِّْٰ\s]*$/ }; var alphanumeric = { en: /^[0-9A-Z]*$/i, cs: /^[0-9A-ZÁČĎÉĚÍŇÓŘŠŤÚŮÝŽ]*$/i, da: /^[0-9A-ZÆØÅ]$/i, de: /^[0-9A-ZÄÖÜß]*$/i, es: /^[0-9A-ZÁÉÍÑÓÚÜ]*$/i, fr: /^[0-9A-ZÀÂÆÇÉÈÊËÏÎÔŒÙÛÜŸ]*$/i, lt: /^[0-9A-ZĄČĘĖĮŠŲŪŽ]*$/i, hu: /^[0-9A-ZÁÉÍÓÖŐÚÜŰ]*$/i, nl: /^[0-9A-ZÉËÏÓÖÜ]*$/i, pl: /^[0-9A-ZĄĆĘŚŁŃÓŻŹ]*$/i, pt: /^[0-9A-ZÃÁÀÂÇÉÊÍÕÓÔÚÜ]*$/i, ru: /^[0-9А-ЯЁ]*$/i, sk: /^[0-9A-ZÁÄČĎÉÍĹĽŇÓŔŠŤÚÝŽ]*$/i, sr: /^[0-9A-ZČĆŽŠĐ]*$/i, tr: /^[0-9A-ZÇĞİıÖŞÜ]*$/i, uk: /^[0-9А-ЩЬЮЯЄІЇҐ]*$/i, ar: /^[٠١٢٣٤٥٦٧٨٩0-9ءآأؤإئابةتثجحخدذرزسشصضطظعغفقكلمنهوىيًٌٍَُِّْٰ]*$/ }; var alphaDash = { en: /^[0-9A-Z_-]*$/i, cs: /^[0-9A-ZÁČĎÉĚÍŇÓŘŠŤÚŮÝŽ_-]*$/i, da: /^[0-9A-ZÆØÅ_-]*$/i, de: /^[0-9A-ZÄÖÜß_-]*$/i, es: /^[0-9A-ZÁÉÍÑÓÚÜ_-]*$/i, fr: /^[0-9A-ZÀÂÆÇÉÈÊËÏÎÔŒÙÛÜŸ_-]*$/i, lt: /^[0-9A-ZĄČĘĖĮŠŲŪŽ_-]*$/i, nl: /^[0-9A-ZÉËÏÓÖÜ_-]*$/i, hu: /^[0-9A-ZÁÉÍÓÖŐÚÜŰ_-]*$/i, pl: /^[0-9A-ZĄĆĘŚŁŃÓŻŹ_-]*$/i, pt: /^[0-9A-ZÃÁÀÂÇÉÊÍÕÓÔÚÜ_-]*$/i, ru: /^[0-9А-ЯЁ_-]*$/i, sk: /^[0-9A-ZÁÄČĎÉÍĹĽŇÓŔŠŤÚÝŽ_-]*$/i, sr: /^[0-9A-ZČĆŽŠĐ_-]*$/i, tr: /^[0-9A-ZÇĞİıÖŞÜ_-]*$/i, uk: /^[0-9А-ЩЬЮЯЄІЇҐ_-]*$/i, ar: /^[٠١٢٣٤٥٦٧٨٩0-9ءآأؤإئابةتثجحخدذرزسشصضطظعغفقكلمنهوىيًٌٍَُِّْٰ_-]*$/ }; var validate = function (value, ref) { if ( ref === void 0 ) ref = []; var locale = ref[0]; if ( locale === void 0 ) locale = null; if (Array.isArray(value)) { return value.every(function (val) { return validate(val, [locale]); }); } // Match at least one locale. if (! locale) { return Object.keys(alpha$1).some(function (loc) { return alpha$1[loc].test(value); }); } return (alpha$1[locale] || alpha$1.en).test(value); }; var validate$1 = function (value, ref) { if ( ref === void 0 ) ref = []; var locale = ref[0]; if ( locale === void 0 ) locale = null; if (Array.isArray(value)) { return value.every(function (val) { return validate$1(val, [locale]); }); } // Match at least one locale. if (! locale) { return Object.keys(alphaDash).some(function (loc) { return alphaDash[loc].test(value); }); } return (alphaDash[locale] || alphaDash.en).test(value); }; var validate$2 = function (value, ref) { if ( ref === void 0 ) ref = []; var locale = ref[0]; if ( locale === void 0 ) locale = null; if (Array.isArray(value)) { return value.every(function (val) { return validate$2(val, [locale]); }); } // Match at least one locale. if (! locale) { return Object.keys(alphanumeric).some(function (loc) { return alphanumeric[loc].test(value); }); } return (alphanumeric[locale] || alphanumeric.en).test(value); }; var validate$3 = function (value, ref) { if ( ref === void 0 ) ref = []; var locale = ref[0]; if ( locale === void 0 ) locale = null; if (Array.isArray(value)) { return value.every(function (val) { return validate$3(val, [locale]); }); } // Match at least one locale. if (! locale) { return Object.keys(alphaSpaces).some(function (loc) { return alphaSpaces[loc].test(value); }); } return (alphaSpaces[locale] || alphaSpaces.en).test(value); }; var before = function (value, ref) { var otherValue = ref[0]; var inclusion = ref[1]; var format = ref[2]; if (typeof format === 'undefined') { format = inclusion; inclusion = false; } value = parseDate$1(value, format); otherValue = parseDate$1(otherValue, format); // if either is not valid. if (!value || !otherValue) { return false; } return isBefore(value, otherValue) || (inclusion && isEqual(value, otherValue)); }; var validate$4 = function (value, ref) { var min = ref[0]; var max = ref[1]; if (Array.isArray(value)) { return value.every(function (val) { return validate$4(val, [min, max]); }); } return Number(min) <= value && Number(max) >= value; }; var confirmed = function (value, other) { return String(value) === String(other); }; function unwrapExports (x) { return x && x.__esModule && Object.prototype.hasOwnProperty.call(x, 'default') ? x['default'] : x; } function createCommonjsModule(fn, module) { return module = { exports: {} }, fn(module, module.exports), module.exports; } var assertString_1 = createCommonjsModule(function (module, exports) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); exports.default = assertString; function assertString(input) { var isString = typeof input === 'string' || input instanceof String; if (!isString) { throw new TypeError('This library (validator.js) validates strings only'); } } module.exports = exports['default']; }); unwrapExports(assertString_1); var isCreditCard_1 = createCommonjsModule(function (module, exports) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); exports.default = isCreditCard; var _assertString2 = _interopRequireDefault(assertString_1); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } /* eslint-disable max-len */ var creditCard = /^(?:4[0-9]{12}(?:[0-9]{3})?|5[1-5][0-9]{14}|(222[1-9]|22[3-9][0-9]|2[3-6][0-9]{2}|27[01][0-9]|2720)[0-9]{12}|6(?:011|5[0-9][0-9])[0-9]{12}|3[47][0-9]{13}|3(?:0[0-5]|[68][0-9])[0-9]{11}|(?:2131|1800|35\d{3})\d{11}|62[0-9]{14})$/; /* eslint-enable max-len */ function isCreditCard(str) { (0, _assertString2.default)(str); var sanitized = str.replace(/[- ]+/g, ''); if (!creditCard.test(sanitized)) { return false; } var sum = 0; var digit = void 0; var tmpNum = void 0; var shouldDouble = void 0; for (var i = sanitized.length - 1; i >= 0; i--) { digit = sanitized.substring(i, i + 1); tmpNum = parseInt(digit, 10); if (shouldDouble) { tmpNum *= 2; if (tmpNum >= 10) { sum += tmpNum % 10 + 1; } else { sum += tmpNum; } } else { sum += tmpNum; } shouldDouble = !shouldDouble; } return !!(sum % 10 === 0 ? sanitized : false); } module.exports = exports['default']; }); var isCreditCard = unwrapExports(isCreditCard_1); var credit_card = function (value) { return isCreditCard(String(value)); }; var validate$5 = function (value, ref) { if ( ref === void 0 ) ref = []; var decimals = ref[0]; if ( decimals === void 0 ) decimals = '*'; var separator = ref[1]; if ( separator === void 0 ) separator = '.'; if (Array.isArray(value)) { return value.every(function (val) { return validate$5(val, [decimals, separator]); }); } if (value === null || value === undefined || value === '') { return true; } // if is 0. if (Number(decimals) === 0) { return /^-?\d*$/.test(value); } var regexPart = decimals === '*' ? '+' : ("{1," + decimals + "}"); var regex = new RegExp(("^-?\\d*(\\" + separator + "\\d" + regexPart + ")?$")); if (! regex.test(value)) { return false; } var parsedValue = parseFloat(value); // eslint-disable-next-line return parsedValue === parsedValue; }; var date_between = function (value, params) { var min; var max; var format; var inclusivity = '()'; if (params.length > 3) { var assign; (assign = params, min = assign[0], max = assign[1], inclusivity = assign[2], format = assign[3]); } else { var assign$1; (assign$1 = params, min = assign$1[0], max = assign$1[1], format = assign$1[2]); } var minDate = parseDate$1(min, format); var maxDate = parseDate$1(max, format); var dateVal = parseDate$1(value, format); if (!minDate || !maxDate || !dateVal) { return false; } if (inclusivity === '()') { return isAfter(dateVal, minDate) && isBefore(dateVal, maxDate); } if (inclusivity === '(]') { return isAfter(dateVal, minDate) && (isEqual(dateVal, maxDate) || isBefore(dateVal, maxDate)); } if (inclusivity === '[)') { return isBefore(dateVal, maxDate) && (isEqual(dateVal, minDate) || isAfter(dateVal, minDate)); } return isEqual(dateVal, maxDate) || isEqual(dateVal, minDate) || (isBefore(dateVal, maxDate) && isAfter(dateVal, minDate)); }; var date_format = function (value, ref) { var format = ref[0]; return !!parseDate$1(value, format); }; var validate$6 = function (value, ref) { var length = ref[0]; if (Array.isArray(value)) { return value.every(function (val) { return validate$6(val, [length]); }); } var strVal = String(value); return /^[0-9]*$/.test(strVal) && strVal.length === Number(length); }; var validateImage = function (file, width, height) { var URL = window.URL || window.webkitURL; return new Promise(function (resolve) { var image = new Image(); image.onerror = function () { return resolve({ valid: false }); }; image.onload = function () { return resolve({ valid: image.width === Number(width) && image.height === Number(height) }); }; image.src = URL.createObjectURL(file); }); }; var dimensions = function (files, ref) { var width = ref[0]; var height = ref[1]; var list = []; for (var i = 0; i < files.length; i++) { // if file is not an image, reject. if (! /\.(jpg|svg|jpeg|png|bmp|gif)$/i.test(files[i].name)) { return false; } list.push(files[i]); } return Promise.all(list.map(function (file) { return validateImage(file, width, height); })); }; var merge_1 = createCommonjsModule(function (module, exports) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); exports.default = merge; function merge() { var obj = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; var defaults = arguments[1]; for (var key in defaults) { if (typeof obj[key] === 'undefined') { obj[key] = defaults[key]; } } return obj; } module.exports = exports['default']; }); unwrapExports(merge_1); var isByteLength_1 = createCommonjsModule(function (module, exports) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; exports.default = isByteLength; var _assertString2 = _interopRequireDefault(assertString_1); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } /* eslint-disable prefer-rest-params */ function isByteLength(str, options) { (0, _assertString2.default)(str); var min = void 0; var max = void 0; if ((typeof options === 'undefined' ? 'undefined' : _typeof(options)) === 'object') { min = options.min || 0; max = options.max; } else { // backwards compatibility: isByteLength(str, min [, max]) min = arguments[1]; max = arguments[2]; } var len = encodeURI(str).split(/%..|./).length - 1; return len >= min && (typeof max === 'undefined' || len <= max); } module.exports = exports['default']; }); unwrapExports(isByteLength_1); var isFQDN = createCommonjsModule(function (module, exports) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); exports.default = isFDQN; var _assertString2 = _interopRequireDefault(assertString_1); var _merge2 = _interopRequireDefault(merge_1); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } var default_fqdn_options = { require_tld: true, allow_underscores: false, allow_trailing_dot: false }; function isFDQN(str, options) { (0, _assertString2.default)(str); options = (0, _merge2.default)(options, default_fqdn_options); /* Remove the optional trailing dot before checking validity */ if (options.allow_trailing_dot && str[str.length - 1] === '.') { str = str.substring(0, str.length - 1); } var parts = str.split('.'); if (options.require_tld) { var tld = parts.pop(); if (!parts.length || !/^([a-z\u00a1-\uffff]{2,}|xn[a-z0-9-]{2,})$/i.test(tld)) { return false; } // disallow spaces if (/[\s\u2002-\u200B\u202F\u205F\u3000\uFEFF\uDB40\uDC20]/.test(tld)) { return false; } } for (var part, i = 0; i < parts.length; i++) { part = parts[i]; if (options.allow_underscores) { part = part.replace(/_/g, ''); } if (!/^[a-z\u00a1-\uffff0-9-]+$/i.test(part)) { return false; } // disallow full-width chars if (/[\uff01-\uff5e]/.test(part)) { return false; } if (part[0] === '-' || part[part.length - 1] === '-') { return false; } } return true; } module.exports = exports['default']; }); unwrapExports(isFQDN); var isEmail_1 = createCommonjsModule(function (module, exports) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); exports.default = isEmail; var _assertString2 = _interopRequireDefault(assertString_1); var _merge2 = _interopRequireDefault(merge_1); var _isByteLength2 = _interopRequireDefault(isByteLength_1); var _isFQDN2 = _interopRequireDefault(isFQDN); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } var default_email_options = { allow_display_name: false, require_display_name: false, allow_utf8_local_part: true, require_tld: true }; /* eslint-disable max-len */ /* eslint-disable no-control-regex */ var displayName = /^[a-z\d!#\$%&'\*\+\-\/=\?\^_`{\|}~\.\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]+[a-z\d!#\$%&'\*\+\-\/=\?\^_`{\|}~\,\.\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF\s]*<(.+)>$/i; var emailUserPart = /^[a-z\d!#\$%&'\*\+\-\/=\?\^_`{\|}~]+$/i; var quotedEmailUser = /^([\s\x01-\x08\x0b\x0c\x0e-\x1f\x7f\x21\x23-\x5b\x5d-\x7e]|(\\[\x01-\x09\x0b\x0c\x0d-\x7f]))*$/i; var emailUserUtf8Part = /^[a-z\d!#\$%&'\*\+\-\/=\?\^_`{\|}~\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]+$/i; var quotedEmailUserUtf8 = /^([\s\x01-\x08\x0b\x0c\x0e-\x1f\x7f\x21\x23-\x5b\x5d-\x7e\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]|(\\[\x01-\x09\x0b\x0c\x0d-\x7f\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]))*$/i; /* eslint-enable max-len */ /* eslint-enable no-control-regex */ function isEmail(str, options) { (0, _assertString2.default)(str); options = (0, _merge2.default)(options, default_email_options); if (options.require_display_name || options.allow_display_name) { var display_email = str.match(displayName); if (display_email) { str = display_email[1]; } else if (options.require_display_name) { return false; } } var parts = str.split('@'); var domain = parts.pop(); var user = parts.join('@'); var lower_domain = domain.toLowerCase(); if (lower_domain === 'gmail.com' || lower_domain === 'googlemail.com') { user = user.replace(/\./g, '').toLowerCase(); } if (!(0, _isByteLength2.default)(user, { max: 64 }) || !(0, _isByteLength2.default)(domain, { max: 254 })) { return false; } if (!(0, _isFQDN2.default)(domain, { require_tld: options.require_tld })) { return false; } if (user[0] === '"') { user = user.slice(1, user.length - 1); return options.allow_utf8_local_part ? quotedEmailUserUtf8.test(user) : quotedEmailUser.test(user); } var pattern = options.allow_utf8_local_part ? emailUserUtf8Part : emailUserPart; var user_parts = user.split('.'); for (var i = 0; i < user_parts.length; i++) { if (!pattern.test(user_parts[i])) { return false; } } return true; } module.exports = exports['default']; }); var isEmail = unwrapExports(isEmail_1); var validate$7 = function (value) { if (Array.isArray(value)) { return value.every(function (val) { return isEmail(String(val)); }); } return isEmail(String(value)); }; var ext = function (files, extensions) { var regex = new RegExp((".(" + (extensions.join('|')) + ")$"), 'i'); return files.every(function (file) { return regex.test(file.name); }); }; var image = function (files) { return files.every(function (file) { return /\.(jpg|svg|jpeg|png|bmp|gif)$/i.test(file.name); } ); }; var validate$8 = function (value, options) { if (Array.isArray(value)) { return value.every(function (val) { return validate$8(val, options); }); } // eslint-disable-next-line return !! options.filter(function (option) { return option == value; }).length; }; var isIP_1 = createCommonjsModule(function (module, exports) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); exports.default = isIP; var _assertString2 = _interopRequireDefault(assertString_1); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } var ipv4Maybe = /^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/; var ipv6Block = /^[0-9A-F]{1,4}$/i; function isIP(str) { var version = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : ''; (0, _assertString2.default)(str); version = String(version); if (!version) { return isIP(str, 4) || isIP(str, 6); } else if (version === '4') { if (!ipv4Maybe.test(str)) { return false; } var parts = str.split('.').sort(function (a, b) { return a - b; }); return parts[3] <= 255; } else if (version === '6') { var blocks = str.split(':'); var foundOmissionBlock = false; // marker to indicate :: // At least some OS accept the last 32 bits of an IPv6 address // (i.e. 2 of the blocks) in IPv4 notation, and RFC 3493 says // that '::ffff:a.b.c.d' is valid for IPv4-mapped IPv6 addresses, // and '::a.b.c.d' is deprecated, but also valid. var foundIPv4TransitionBlock = isIP(blocks[blocks.length - 1], 4); var expectedNumberOfBlocks = foundIPv4TransitionBlock ? 7 : 8; if (blocks.length > expectedNumberOfBlocks) { return false; } // initial or final :: if (str === '::') { return true; } else if (str.substr(0, 2) === '::') { blocks.shift(); blocks.shift(); foundOmissionBlock = true; } else if (str.substr(str.length - 2) === '::') { blocks.pop(); blocks.pop(); foundOmissionBlock = true; } for (var i = 0; i < blocks.length; ++i) { // test for a :: which can not be at the string start/end // since those cases have been handled above if (blocks[i] === '' && i > 0 && i < blocks.length - 1) { if (foundOmissionBlock) { return false; // multiple :: in address } foundOmissionBlock = true; } else if (foundIPv4TransitionBlock && i === blocks.length - 1) { // it has been checked before that the last // block is a valid IPv4 address } else if (!ipv6Block.test(blocks[i])) { return false; } } if (foundOmissionBlock) { return blocks.length >= 1; } return blocks.length === expectedNumberOfBlocks; } return false; } module.exports = exports['default']; }); var isIP = unwrapExports(isIP_1); var ip = function (value, ref) { if ( ref === void 0 ) ref = []; var version = ref[0]; if ( version === void 0 ) version = 4; if (Array.isArray(value)) { return value.every(function (val) { return isIP(val, version); }); } return isIP(value, version); }; // /** * Gets the data attribute. the name must be kebab-case. */ var getDataAttribute = function (el, name) { return el.getAttribute(("data-vv-" + name)); }; /** * Checks if the value is either null or undefined. */ var isNullOrUndefined = function (value) { return value === null || value === undefined; }; /** * Sets the data attribute. */ var setDataAttribute = function (el, name, value) { return el.setAttribute(("data-vv-" + name), value); }; /** * Creates a proxy object if available in the environment. */ var createProxy = function (target, handler) { if (typeof Proxy === 'undefined') { return target; } return new Proxy(target, handler); }; /** * Creates the default flags object. */ var createFlags = function () { return ({ untouched: true, touched: false, dirty: false, pristine: true, valid: null, invalid: null, validated: false, pending: false, required: false }); }; /** * Shallow object comparison. */ var isEqual$1 = function (lhs, rhs) { if (lhs instanceof RegExp && rhs instanceof RegExp) { return isEqual$1(lhs.source, rhs.source) && isEqual$1(lhs.flags, rhs.flags); } if (Array.isArray(lhs) && Array.isArray(rhs)) { if (lhs.length !== rhs.length) { return false; } for (var i = 0; i < lhs.length; i++) { if (!isEqual$1(lhs[i], rhs[i])) { return false; } } return true; } // if both are objects, compare each key recursively. if (isObject(lhs) && isObject(rhs)) { return Object.keys(lhs).every(function (key) { return isEqual$1(lhs[key], rhs[key]); }) && Object.keys(rhs).every(function (key) { return isEqual$1(lhs[key], rhs[key]); }); } return lhs === rhs; }; /** * Determines the input field scope. */ var getScope = function (el) { var scope = getDataAttribute(el, 'scope'); if (isNullOrUndefined(scope) && el.form) { scope = getDataAttribute(el.form, 'scope'); } return !isNullOrUndefined(scope) ? scope : null; }; /** * Gets the value in an object safely. */ var getPath = function (path, target, def) { if ( def === void 0 ) def = undefined; if (!path || !target) { return def; } var value = target; path.split('.').every(function (prop) { if (! Object.prototype.hasOwnProperty.call(value, prop) && value[prop] === undefined) { value = def; return false; } value = value[prop]; return true; }); return value; }; /** * Checks if path exists within an object. */ var hasPath = function (path, target) { var obj = target; return path.split('.').every(function (prop) { if (! Object.prototype.hasOwnProperty.call(obj, prop)) { return false; } obj = obj[prop]; return true; }); }; /** * Parses a rule string expression. */ var parseRule = function (rule) { var params = []; var name = rule.split(':')[0]; if (~rule.indexOf(':')) { params = rule.split(':').slice(1).join(':').split(','); } return { name: name, params: params }; }; /** * Debounces a function. */ var debounce = function (fn, wait, immediate) { if ( wait === void 0 ) wait = 0; if ( immediate === void 0 ) immediate = false; if (wait === 0) { return fn; } var timeout; return function () { var args = [], len = arguments.length; while ( len-- ) args[ len ] = arguments[ len ]; var later = function () { timeout = null; if (!immediate) { fn.apply(void 0, args); } }; /* istanbul ignore next */ var callNow = immediate && !timeout; clearTimeout(timeout); timeout = setTimeout(later, wait); /* istanbul ignore next */ if (callNow) { fn.apply(void 0, args); } }; }; /** * Normalizes the given rules expression. */ var normalizeRules = function (rules) { // if falsy value return an empty object. if (!rules) { return {}; } if (isObject(rules)) { // $FlowFixMe return Object.keys(rules).reduce(function (prev, curr) { var params = []; // $FlowFixMe if (rules[curr] === true) { params = []; } else if (Array.isArray(rules[curr])) { params = rules[curr]; } else { params = [rules[curr]]; } // $FlowFixMe if (rules[curr] !== false) { prev[curr] = params; } return prev; }, {}); } if (typeof rules !== 'string') { warn('rules must be either a string or an object.'); return {}; } return rules.split('|').reduce(function (prev, rule) { var parsedRule = parseRule(rule); if (!parsedRule.name) { return prev; } prev[parsedRule.name] = parsedRule.params; return prev; }, {}); }; /** * Emits a warning to the console. */ var warn = function (message) { console.warn(("[vee-validate] " + message)); // eslint-disable-line }; /** * Creates a branded error object. */ var createError = function (message) { return new Error(("[vee-validate] " + message)); }; /** * Checks if the value is an object. */ var isObject = function (obj) { return obj !== null && obj && typeof obj === 'object' && ! Array.isArray(obj); }; /** * Checks if a function is callable. */ var isCallable = function (func) { return typeof func === 'function'; }; /** * Check if element has the css class on it. */ var hasClass = function (el, className) { if (el.classList) { return el.classList.contains(className); } return !!el.className.match(new RegExp(("(\\s|^)" + className + "(\\s|$)"))); }; /** * Adds the provided css className to the element. */ var addClass = function (el, className) { if (el.classList) { el.classList.add(className); return; } if (!hasClass(el, className)) { el.className += " " + className; } }; /** * Remove the provided css className from the element. */ var removeClass = function (el, className) { if (el.classList) { el.classList.remove(className); return; } if (hasClass(el, className)) { var reg = new RegExp(("(\\s|^)" + className + "(\\s|$)")); el.className = el.className.replace(reg, ' '); } }; /** * Adds or removes a class name on the input depending on the status flag. */ var toggleClass = function (el, className, status) { if (!el || !className) { return; } if (status) { return addClass(el, className); } removeClass(el, className); }; /** * Converts an array-like object to array, provides a simple polyfill for Array.from */ var toArray = function (arrayLike) { if (isCallable(Array.from)) { return Array.from(arrayLike); } var array = []; var length = arrayLike.length; for (var i = 0; i < length; i++) { array.push(arrayLike[i]); } return array; }; /** * Assign polyfill from the mdn. */ var assign = function (target) { var others = [], len = arguments.length - 1; while ( len-- > 0 ) others[ len ] = arguments[ len + 1 ]; /* istanbul ignore else */ if (isCallable(Object.assign)) { return Object.assign.apply(Object, [ target ].concat( others )); } /* istanbul ignore next */ if (target == null) { throw new TypeError('Cannot convert undefined or null to object'); } /* istanbul ignore next */ var to = Object(target); /* istanbul ignore next */ others.forEach(function (arg) { // Skip over if undefined or null if (arg != null) { Object.keys(arg).forEach(function (key) { to[key] = arg[key]; }); } }); /* istanbul ignore next */ return to; }; /** * Generates a unique id. */ var uniqId = function () { return ("_" + (Math.random().toString(36).substr(2, 9))); }; /** * finds the first element that satisfies the predicate callback, polyfills array.find */ var find = function (arrayLike, predicate) { var array = toArray(arrayLike); if (isCallable(array.find)) { return array.find(predicate); } var result; array.some(function (item) { if (predicate(item)) { result = item; return true; } return false; }); return result; }; /** * Returns a suitable event name for the input element. */ var getInputEventName = function (el) { if (el && (el.tagName === 'SELECT' || ~['radio', 'checkbox', 'file'].indexOf(el.type))) { return 'change'; } return 'input'; }; var isBuiltInComponent = function (vnode) { if (!vnode) { return false; } var tag = vnode.componentOptions.tag; return /keep-alive|transition|transition-group/.test(tag); }; /** * @param {Array|String} value * @param {Number} length * @param {Number} max */ var compare = function (value, length, max) { if (max === undefined) { return value.length === length; } // cast to number. max = Number(max); return value.length >= length && value.length <= max; }; var length = function (value, ref) { var length = ref[0]; var max = ref[1]; if ( max === void 0 ) max = undefined; length = Number(length); if (value === undefined || value === null) { return false; } if (typeof value === 'number') { value = String(value); } if (!value.length) { value = toArray(value); } return compare(value, length, max); }; var integer = function (value) { if (Array.isArray(value)) { return value.every(function (val) { return /^-?[0-9]+$/.test(String(val)); }); } return /^-?[0-9]+$/.test(String(value)); }; var max$1 = function (value, ref) { var length = ref[0]; if (value === undefined || value === null) { return length >= 0; } return String(value).length <= length; }; var max_value = function (value, ref) { var max = ref[0]; if (Array.isArray(value) || value === null || value === undefined || value === '') { return false; } return Number(value) <= max; }; var mimes = function (files, mimes) { var regex = new RegExp(((mimes.join('|').replace('*', '.+')) + "$"), 'i'); return files.every(function (file) { return regex.test(file.type); }); }; var min$1 = function (value, ref) { var length = ref[0]; if (value === undefined || value === null) { return false; } return String(value).length >= length; }; var min_value = function (value, ref) { var min = ref[0]; if (Array.isArray(value) || value === null || value === undefined || value === '') { return false; } return Number(value) >= min; }; var validate$9 = function (value, options) { if (Array.isArray(value)) { return value.every(function (val) { return validate$9(val, options); }); } // eslint-disable-next-line return ! options.filter(function (option) { return option == value; }).length; }; var numeric = function (value) { if (Array.isArray(value)) { return value.every(function (val) { return /^[0-9]+$/.test(String(val)); }); } return /^[0-9]+$/.test(String(value)); }; var regex = function (value, ref) { var regex = ref[0]; var flags = ref.slice(1); if (regex instanceof RegExp) { return regex.test(value); } return new RegExp(regex, flags).test(String(value)); }; var required = function (value, ref) { if ( ref === void 0 ) ref = []; var invalidateFalse = ref[0]; if ( invalidateFalse === void 0 ) invalidateFalse = false; if (Array.isArray(value)) { return !! value.length; } // incase a field considers `false` as an empty value like checkboxes. if (value === false && invalidateFalse) { return false; } if (value === undefined || value === null) { return false; } return !! String(value).trim().length; }; var size = function (files, ref) { var size = ref[0]; if (isNaN(size)) { return false; } var nSize = Number(size) * 1024; for (var i = 0; i < files.length; i++) { if (files[i].size > nSize) { return false; } } return true; }; var isURL_1 = createCommonjsModule(function (module, exports) { 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); exports.default = isURL; var _assertString2 = _interopRequireDefault(assertString_1); var _isFQDN2 = _interopRequireDefault(isFQDN); var _isIP2 = _interopRequireDefault(isIP_1); var _merge2 = _interopRequireDefault(merge_1); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } var default_url_options = { protocols: ['http', 'https', 'ftp'], require_tld: true, require_protocol: false, require_host: true, require_valid_protocol: true, allow_underscores: false, allow_trailing_dot: false, allow_protocol_relative_urls: false }; var wrapped_ipv6 = /^\[([^\]]+)\](?::([0-9]+))?$/; function isRegExp(obj) { return Object.prototype.toString.call(obj) === '[object RegExp]'; } function checkHost(host, matches) { for (var i = 0; i < matches.length; i++) { var match = matches[i]; if (host === match || isRegExp(match) && match.test(host)) { return true; } } return false; } function isURL(url, options) { (0, _assertString2.default)(url); if (!url || url.length >= 2083 || /[\s<>]/.test(url)) { return false; } if (url.indexOf('mailto:') === 0) { return false; } options = (0, _merge2.default)(options, default_url_options); var protocol = void 0, auth = void 0, host = void 0, hostname = void 0, port = void 0, port_str = void 0, split = void 0, ipv6 = void 0; split = url.split('#'); url = split.shift(); split = url.split('?'); url = split.shift(); split = url.split('://'); if (split.length > 1) { protocol = split.shift(); if (options.require_valid_protocol && options.protocols.indexOf(protocol) === -1) { return false; } } else if (options.require_protocol) { return false; } else if (options.allow_protocol_relative_urls && url.substr(0, 2) === '//') { split[0] = url.substr(2); } url = split.join('://'); if (url === '') { return false; } split = url.split('/'); url = split.shift(); if (url === '' && !options.require_host) { return true; } split = url.split('@'); if (split.length > 1) { auth = split.shift(); if (auth.indexOf(':') >= 0 && auth.split(':').length > 2) { return false; } } hostname = split.join('@'); port_str = null; ipv6 = null; var ipv6_match = hostname.match(wrapped_ipv6); if (ipv6_match) { host = ''; ipv6 = ipv6_match[1]; port_str = ipv6_match[2] || null; } else { split = hostname.split(':'); host = split.shift(); if (split.length) { port_str = split.join(':'); } } if (port_str !== null) { port = parseInt(port_str, 10); if (!/^[0-9]+$/.test(port_str) || port <= 0 || port > 65535) { return false; } } if (!(0, _isIP2.default)(host) && !(0, _isFQDN2.default)(host, options) && (!ipv6 || !(0, _isIP2.default)(ipv6, 6))) { return false; } host = host || ipv6; if (options.host_whitelist && !checkHost(host, options.host_whitelist)) { return false; } if (options.host_blacklist && checkHost(host, options.host_blacklist)) { return false; } return true; } module.exports = exports['default']; }); var isURL = unwrapExports(isURL_1); var url = function (value, ref) { if ( ref === void 0 ) ref = []; var requireProtocol = ref[0]; if ( requireProtocol === void 0 ) requireProtocol = false; var options = { require_protocol: !!requireProtocol, allow_underscores: true }; if (Array.isArray(value)) { return value.every(function (val) { return isURL(val, options); }); } return isURL(value, options); }; /* eslint-disable camelcase */ var Rules = { after: after, alpha_dash: validate$1, alpha_num: validate$2, alpha_spaces: validate$3, alpha: validate, before: before, between: validate$4, confirmed: confirmed, credit_card: credit_card, date_between: date_between, date_format: date_format, decimal: validate$5, digits: validate$6, dimensions: dimensions, email: validate$7, ext: ext, image: image, in: validate$8, integer: integer, length: length, ip: ip, max: max$1, max_value: max_value, mimes: mimes, min: min$1, min_value: min_value, not_in: validate$9, numeric: numeric, regex: regex, required: required, size: size, url: url }; /** * Formates file size. * * @param {Number|String} size */ var formatFileSize = function (size) { var units = ['Byte', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB']; var threshold = 1024; size = Number(size) * threshold; var i = size === 0 ? 0 : Math.floor(Math.log(size) / Math.log(threshold)); return (((size / Math.pow(threshold, i)).toFixed(2) * 1) + " " + (units[i])); }; /** * Checks if vee-validate is defined globally. */ var isDefinedGlobally = function () { return typeof VeeValidate !== 'undefined'; }; var messages = { _default: function (field) { return ("The " + field + " value is not valid."); }, after: function (field, ref) { var target = ref[0]; var inclusion = ref[1]; return ("The " + field + " must be after " + (inclusion ? 'or equal to ' : '') + target + "."); }, alpha_dash: function (field) { return ("The " + field + " field may contain alpha-numeric characters as well as dashes and underscores."); }, alpha_num: function (field) { return ("The " + field + " field may only contain alpha-numeric characters."); }, alpha_spaces: function (field) { return ("The " + field + " field may only contain alphabetic characters as well as spaces."); }, alpha: function (field) { return ("The " + field + " field may only contain alphabetic characters."); }, before: function (field, ref) { var target = ref[0]; var inclusion = ref[1]; return ("The " + field + " must be before " + (inclusion ? 'or equal to ' : '') + target + "."); }, between: function (field, ref) { var min = ref[0]; var max = ref[1]; return ("The " + field + " field must be between " + min + " and " + max + "."); }, confirmed: function (field) { return ("The " + field + " confirmation does not match."); }, credit_card: function (field) { return ("The " + field + " field is invalid."); }, date_between: function (field, ref) { var min = ref[0]; var max = ref[1]; return ("The " + field + " must be between " + min + " and " + max + "."); }, date_format: function (field, ref) { var format = ref[0]; return ("The " + field + " must be in the format " + format + "."); }, decimal: function (field, ref) { if ( ref === void 0 ) ref = []; var decimals = ref[0]; if ( decimals === void 0 ) decimals = '*'; return ("The " + field + " field must be numeric and may contain " + (!decimals || decimals === '*' ? '' : decimals) + " decimal points."); }, digits: function (field, ref) { var length = ref[0]; return ("The " + field + " field must be numeric and exactly contain " + length + " digits."); }, dimensions: function (field, ref) { var width = ref[0]; var height = ref[1]; return ("The " + field + " field must be " + width + " pixels by " + height + " pixels."); }, email: function (field) { return ("The " + field + " field must be a valid email."); }, ext: function (field) { return ("The " + field + " field must be a valid file."); }, image: function (field) { return ("The " + field + " field must be an image."); }, in: function (field) { return ("The " + field + " field must be a valid value."); }, integer: function (field) { return ("The " + field + " field must be an integer."); }, ip: function (field) { return ("The " + field + " field must be a valid ip address."); }, length: function (field, ref) { var length = ref[0]; var max = ref[1]; if (max) { return ("The " + field + " length be between " + length + " and " + max + "."); } return ("The " + field + " length must be " + length + "."); }, max: function (field, ref) { var length = ref[0]; return ("The " + field + " field may not be greater than " + length + " characters."); }, max_value: function (field, ref) { var max = ref[0]; return ("The " + field + " field must be " + max + " or less."); }, mimes: function (field) { return ("The " + field + " field must have a valid file type."); }, min: function (field, ref) { var length = ref[0]; return ("The " + field + " field must be at least " + length + " characters."); }, min_value: function (field, ref) { var min = ref[0]; return ("The " + field + " field must be " + min + " or more."); }, not_in: function (field) { return ("The " + field + " field must be a valid value."); }, numeric: function (field) { return ("The " + field + " field may only contain numeric characters."); }, regex: function (field) { return ("The " + field + " field format is invalid."); }, required: function (field) { return ("The " + field + " field is required."); }, size: function (field, ref) { var size = ref[0]; return ("The " + field + " size must be less than " + (formatFileSize(size)) + "."); }, url: function (field) { return ("The " + field + " field is not a valid URL."); } }; var locale$1 = { name: 'en', messages: messages, attributes: {} }; if (isDefinedGlobally()) { // eslint-disable-next-line VeeValidate.Validator.addLocale(locale$1); } // var ErrorBag = function ErrorBag () { this.items = []; }; /** * Adds an error to the internal array. */ ErrorBag.prototype.add = function add (error) { // handle old signature. if (arguments.length > 1) { error = { field: arguments[0], msg: arguments[1], rule: arguments[2], scope: !isNullOrUndefined(arguments[3]) ? arguments[3] : null }; } error.scope = !isNullOrUndefined(error.scope) ? error.scope : null; this.items.push(error); }; /** * Updates a field error with the new field scope. */ ErrorBag.prototype.update = function update (id, error) { var item = find(this.items, function (i) { return i.id === id; }); if (!item) { return; } var idx = this.items.indexOf(item); this.items.splice(idx, 1); item.scope = error.scope; this.items.push(item); }; /** * Gets all error messages from the internal array. */ ErrorBag.prototype.all = function all (scope) { if (isNullOrUndefined(scope)) { return this.items.map(function (e) { return e.msg; }); } return this.items.filter(function (e) { return e.scope === scope; }).map(function (e) { return e.msg; }); }; /** * Checks if there are any errors in the internal array. */ ErrorBag.prototype.any = function any (scope) { if (isNullOrUndefined(scope)) { return !!this.items.length; } return !!this.items.filter(function (e) { return e.scope === scope; }).length; }; /** * Removes all items from the internal array. */ ErrorBag.prototype.clear = function clear (scope) { var this$1 = this; if (isNullOrUndefined(scope)) { scope = null; } for (var i = 0; i < this.items.length; ++i) { if (this$1.items[i].scope === scope) { this$1.items.splice(i, 1); --i; } } }; /** * Collects errors into groups or for a specific field. */ ErrorBag.prototype.collect = function collect (field, scope, map) { if ( map === void 0 ) map = true; if (!field) { var collection = {}; this.items.forEach(function (e) { if (! collection[e.field]) { collection[e.field] = []; } collection[e.field].push(map ? e.msg : e); }); return collection; } field = !isNullOrUndefined(field) ? String(field) : field; if (isNullOrUndefined(scope)) { return this.items.filter(function (e) { return e.field === field; }).map(function (e) { return (map ? e.msg : e); }); } return this.items.filter(function (e) { return e.field === field && e.scope === scope; }) .map(function (e) { return (map ? e.msg : e); }); }; /** * Gets the internal array length. */ ErrorBag.prototype.count = function count () { return this.items.length; }; /** * Finds and fetches the first error message for the specified field id. */ ErrorBag.prototype.firstById = function firstById (id) { var error = find(this.items, function (i) { return i.id === id; }); return error ? error.msg : null; }; /** * Gets the first error message for a specific field. */ ErrorBag.prototype.first = function first (field, scope) { var this$1 = this; if ( scope === void 0 ) scope = null; field = !isNullOrUndefined(field) ? String(field) : field; var selector = this._selector(field); var scoped = this._scope(field); if (scoped) { var result = this.first(scoped.name, scoped.scope); // if such result exist, return it. otherwise it could be a field. // with dot in its name. if (result) { return result; } } if (selector) { return this.firstByRule(selector.name, selector.rule, scope); } for (var i = 0; i < this.items.length; ++i) { if (this$1.items[i].field === field && (this$1.items[i].scope === scope)) { return this$1.items[i].msg; } } return null; }; /** * Returns the first error rule for the specified field */ ErrorBag.prototype.firstRule = function firstRule (field, scope) { var errors = this.collect(field, scope, false); return (errors.length && errors[0].rule) || null; }; /** * Checks if the internal array has at least one error for the specified field. */ ErrorBag.prototype.has = function has (field, scope) { if ( scope === void 0 ) scope = null; return !!this.first(field, scope); }; /** * Gets the first error message for a specific field and a rule. */ ErrorBag.prototype.firstByRule = function firstByRule (name, rule, scope) { if ( scope === void 0 ) scope = null; var error = this.collect(name, scope, false).filter(function (e) { return e.rule === rule; })[0]; return (error && error.msg) || null; }; /** * Gets the first error message for a specific field that not match the rule. */ ErrorBag.prototype.firstNot = function firstNot (name, rule, scope) { if ( rule === void 0 ) rule = 'required'; if ( scope === void 0 ) scope = null; var error = this.collect(name, scope, false).filter(function (e) { return e.rule !== rule; })[0]; return (error && error.msg) || null; }; /** * Removes errors by matching against the id. */ ErrorBag.prototype.removeById = function removeById (id) { var this$1 = this; for (var i = 0; i < this.items.length; ++i) { if (this$1.items[i].id === id) { this$1.items.splice(i, 1); --i; } } }; /** * Removes all error messages associated with a specific field. */ ErrorBag.prototype.remove = function remove (field, scope, id) { var this$1 = this; field = !isNullOrUndefined(field) ? String(field) : field; var removeCondition = function (e) { if (e.id && id) { return e.id === id; } if (!isNullOrUndefined(scope)) { return e.field === field && e.scope === scope; } return e.field === field && e.scope === null; }; for (var i = 0; i < this.items.length; ++i) { if (removeCondition(this$1.items[i])) { this$1.items.splice(i, 1); --i; } } }; /** * Get the field attributes if there's a rule selector. */ ErrorBag.prototype._selector = function _selector (field) { if (field.indexOf(':') > -1) { var ref = field.split(':'); var name = ref[0]; var rule = ref[1]; return { name: name, rule: rule }; } return null; }; /** * Get the field scope if specified using dot notation. */ ErrorBag.prototype._scope = function _scope (field) { if (field.indexOf('.') > -1) { var ref = field.split('.'); var scope = ref[0]; var name = ref.slice(1); return { name: name.join('.'), scope: scope }; } return null; }; // var Dictionary = function Dictionary (dictionary) { if ( dictionary === void 0 ) dictionary = {}; this.container = {}; this.merge(dictionary); }; Dictionary.prototype.hasLocale = function hasLocale (locale) { return !!this.container[locale]; }; Dictionary.prototype.setDateFormat = function setDateFormat (locale, format) { if (!this.container[locale]) { this.container[locale] = {}; } this.container[locale].dateFormat = format; }; Dictionary.prototype.getDateFormat = function getDateFormat (locale) { if (!this.container[locale]) { return undefined; } return this.container[locale].dateFormat; }; Dictionary.prototype.getMessage = function getMessage (locale, key, fallback) { if (!this.hasMessage(locale, key)) { return fallback || this._getDefaultMessage(locale); } return this.container[locale].messages[key]; }; /** * Gets a specific message for field. falls back to the rule message. */ Dictionary.prototype.getFieldMessage = function getFieldMessage (locale, field, key) { if (!this.hasLocale(locale)) { return this.getMessage(locale, key); } var dict = this.container[locale].custom && this.container[locale].custom[field]; if (!dict || !dict[key]) { return this.getMessage(locale, key); } return dict[key]; }; Dictionary.prototype._getDefaultMessage = function _getDefaultMessage (locale) { if (this.hasMessage(locale, '_default')) { return this.container[locale].messages._default; } return this.container.en.messages._default; }; Dictionary.prototype.getAttribute = function getAttribute (locale, key, fallback) { if ( fallback === void 0 ) fallback = ''; if (!this.hasAttribute(locale, key)) { return fallback; } return this.container[locale].attributes[key]; }; Dictionary.prototype.hasMessage = function hasMessage (locale, key) { return !! ( this.hasLocale(locale) && this.container[locale].messages && this.container[locale].messages[key] ); }; Dictionary.prototype.hasAttribute = function hasAttribute (locale, key) { return !! ( this.hasLocale(locale) && this.container[locale].attributes && this.container[locale].attributes[key] ); }; Dictionary.prototype.merge = function merge (dictionary) { this._merge(this.container, dictionary); }; Dictionary.prototype.setMessage = function setMessage (locale, key, message) { if (! this.hasLocale(locale)) { this.container[locale] = { messages: {}, attributes: {} }; } this.container[locale].messages[key] = message; }; Dictionary.prototype.setAttribute = function setAttribute (locale, key, attribute) { if (! this.hasLocale(locale)) { this.container[locale] = { messages: {}, attributes: {} }; } this.container[locale].attributes[key] = attribute; }; Dictionary.prototype._merge = function _merge (target, source) { var this$1 = this; if (! (isObject(target) && isObject(source))) { return target; } Object.keys(source).forEach(function (key) { if (isObject(source[key])) { if (! target[key]) { assign(target, ( obj = {}, obj[key] = {}, obj )); var obj; } this$1._merge(target[key], source[key]); return; } assign(target, ( obj$1 = {}, obj$1[key] = source[key], obj$1 )); var obj$1; }); return target; }; // var defaultConfig = { locale: 'en', delay: 0, errorBagName: 'errors', dictionary: null, strict: true, fieldsBagName: 'fields', classes: false, classNames: null, events: 'input|blur', inject: true, fastExit: true, aria: true, validity: false }; var currentConfig = assign({}, defaultConfig); var Config = function Config () {}; var staticAccessors$1 = { default: {},current: {} }; staticAccessors$1.default.get = function () { return defaultConfig; }; staticAccessors$1.current.get = function () { return currentConfig; }; /** * Merges the config with a new one. */ Config.merge = function merge (config) { currentConfig = assign({}, currentConfig, config); }; /** * Resolves the working config from a Vue instance. */ Config.resolve = function resolve (context) { var selfConfig = getPath('$options.$_veeValidate', context, {}); return assign({}, Config.current, selfConfig); }; Object.defineProperties( Config, staticAccessors$1 ); /** * Generates the options required to construct a field. */ var Generator = function Generator () {}; Generator.generate = function generate (el, binding, vnode) { var model = Generator.resolveModel(binding, vnode); var options = Config.resolve(vnode.context); return { name: Generator.resolveName(el, vnode), el: el, listen: !binding.modifiers.disable, scope: Generator.resolveScope(el, binding, vnode), vm: Generator.makeVM(vnode.context), expression: binding.value, component: vnode.child, classes: options.classes, classNames: options.classNames, getter: Generator.resolveGetter(el, vnode, model), events: Generator.resolveEvents(el, vnode) || options.events, model: model, delay: Generator.resolveDelay(el, vnode, options), rules: Generator.resolveRules(el, binding), initial: !!binding.modifiers.initial, alias: Generator.resolveAlias(el, vnode), validity: options.validity, aria: options.aria, initialValue: Generator.resolveInitialValue(vnode) }; }; Generator.getCtorConfig = function getCtorConfig (vnode) { if (!vnode.child) { return null; } var config = getPath('child.$options.$_veeValidate', vnode); return config; }; /** * * @param {*} el * @param {*} binding */ Generator.resolveRules = function resolveRules (el, binding) { if (!binding || !binding.expression) { return getDataAttribute(el, 'rules'); } if (typeof binding.value === 'string') { return binding.value; } if (~['string', 'object'].indexOf(typeof binding.value.rules)) { return binding.value.rules; } return binding.value; }; /** * @param {*} vnode */ Generator.resolveInitialValue = function resolveInitialValue (vnode) { var model = vnode.data.model || find(vnode.data.directives, function (d) { return d.name === 'model'; }); return model && model.value; }; /** * Creates a non-circular partial VM instance from a Vue instance. * @param {*} vm */ Generator.makeVM = function makeVM (vm) { return { get $el () { return vm.$el; }, get $refs () { return vm.$refs; }, $watch: vm.$watch ? vm.$watch.bind(vm) : function () {}, $validator: vm.$validator ? { errors: vm.$validator.errors, validate: vm.$validator.validate.bind(vm.$validator), update: vm.$validator.update.bind(vm.$validator) } : null }; }; /** * Resolves the delay value. * @param {*} el * @param {*} vnode * @param {Object} options */ Generator.resolveDelay = function resolveDelay (el, vnode, options) { if ( options === void 0 ) options = {}; return getDataAttribute(el, 'delay') || (vnode.child && vnode.child.$attrs && vnode.child.$attrs['data-vv-delay']) || options.delay; }; /** * Resolves the alias for the field. * @param {*} el * @param {*} vnode * @return {Function} alias getter */ Generator.resolveAlias = function resolveAlias (el, vnode) { return function () { return getDataAttribute(el, 'as') || (vnode.child && vnode.child.$attrs && vnode.child.$attrs['data-vv-as']) || el.title || null; }; }; /** * Resolves the events to validate in response to. * @param {*} el * @param {*} vnode */ Generator.resolveEvents = function resolveEvents (el, vnode) { var events = getDataAttribute(el, 'validate-on'); if (!events && vnode.child && vnode.child.$attrs) { events = vnode.child.$attrs['data-vv-validate-on']; } if (!events && vnode.child) { var config = Generator.getCtorConfig(vnode); events = config && config.events; } return events; }; /** * Resolves the scope for the field. * @param {*} el * @param {*} binding */ Generator.resolveScope = function resolveScope (el, binding, vnode) { if ( vnode === void 0 ) vnode = {}; var scope = null; if (isObject(binding.value)) { scope = binding.value.scope; } if (vnode.child && isNullOrUndefined(scope)) { scope = vnode.child.$attrs && vnode.child.$attrs['data-vv-scope']; } return !isNullOrUndefined(scope) ? scope : getScope(el); }; /** * Checks if the node directives contains a v-model or a specified arg. * Args take priority over models. * * @return {Object} */ Generator.resolveModel = function resolveModel (binding, vnode) { if (binding.arg) { return binding.arg; } if (isObject(binding.value) && binding.value.arg) { return binding.value.arg; } var model = vnode.data.model || find(vnode.data.directives, function (d) { return d.name === 'model'; }); if (!model) { return null; } var watchable = /^[a-z_]+[0-9]*(\w*\.[a-z_]\w*)*$/i.test(model.expression) && hasPath(model.expression, vnode.context); if (!watchable) { return null; } return model.expression; }; /** * Resolves the field name to trigger validations. * @return {String} The field name. */ Generator.resolveName = function resolveName (el, vnode) { var name = getDataAttribute(el, 'name'); if (!name && !vnode.child) { return el.name; } if (!name && vnode.child && vnode.child.$attrs) { name = vnode.child.$attrs['data-vv-name'] || vnode.child.$attrs['name']; } if (!name && vnode.child) { var config = Generator.getCtorConfig(vnode); if (config && isCallable(config.name)) { var boundGetter = config.name.bind(vnode.child); return boundGetter(); } return vnode.child.name; } return name; }; /** * Returns a value getter input type. */ Generator.resolveGetter = function resolveGetter (el, vnode, model) { if (model) { return function () { return getPath(model, vnode.context); }; } if (vnode.child) { var path = getDataAttribute(el, 'value-path') || (vnode.child.$attrs && vnode.child.$attrs['data-vv-value-path']); if (path) { return function () { return getPath(path, vnode.child); }; } var config = Generator.getCtorConfig(vnode); if (config && isCallable(config.value)) { var boundGetter = config.value.bind(vnode.child); return function () { return boundGetter(); }; } return function () { return vnode.child.value; }; } switch (el.type) { case 'checkbox': return function () { var els = document.querySelectorAll(("input[name=\"" + (el.name) + "\"]")); els = toArray(els).filter(function (el) { return el.checked; }); if (!els.length) { return undefined; } return els.map(function (checkbox) { return checkbox.value; }); }; case 'radio': return function () { var els = document.querySelectorAll(("input[name=\"" + (el.name) + "\"]")); var elm = find(els, function (el) { return el.checked; }); return elm && elm.value; }; case 'file': return function (context) { return toArray(el.files); }; case 'select-multiple': return function () { return toArray(el.options).filter(function (opt) { return opt.selected; }).map(function (opt) { return opt.value; }); }; default: return function () { return el && el.value; }; } }; // var DEFAULT_OPTIONS = { targetOf: null, initial: false, scope: null, listen: true, name: null, active: true, required: false, rules: {}, vm: null, classes: false, validity: true, aria: true, events: 'input|blur', delay: 0, classNames: { touched: 'touched', // the control has been blurred untouched: 'untouched', // the control hasn't been blurred valid: 'valid', // model is valid invalid: 'invalid', // model is invalid pristine: 'pristine', // control has not been interacted with dirty: 'dirty' // control has been interacted with } }; var Field = function Field (el, options) { if ( options === void 0 ) options = {}; this.id = uniqId(); this.el = el; this.updated = false; this.dependencies = []; this.watchers = []; this.events = []; this.rules = {}; if (!this.isHeadless && !options.targetOf) { setDataAttribute(this.el, 'id', this.id); // cache field id if it is independent and has a root element. } options = assign({}, DEFAULT_OPTIONS, options); this.validity = options.validity; this.aria = options.aria; this.flags = createFlags(); this.vm = options.vm; this.component = options.component; this.ctorConfig = this.component ? getPath('$options.$_veeValidate', this.component) : undefined; this.update(options); this.updated = false; }; var prototypeAccessors$1 = { isVue: {},validator: {},isRequired: {},isDisabled: {},isHeadless: {},displayName: {},value: {},rejectsFalse: {} }; prototypeAccessors$1.isVue.get = function () { return !!this.component; }; prototypeAccessors$1.validator.get = function () { if (!this.vm || !this.vm.$validator) { warn('No validator instance detected.'); return { validate: function () {} }; } return this.vm.$validator; }; prototypeAccessors$1.isRequired.get = function () { return !!this.rules.required; }; prototypeAccessors$1.isDisabled.get = function () { return !!(this.component && this.component.disabled) || !!(this.el && this.el.disabled); }; prototypeAccessors$1.isHeadless.get = function () { return !this.el; }; /** * Gets the display name (user-friendly name). */ prototypeAccessors$1.displayName.get = function () { return isCallable(this.alias) ? this.alias() : this.alias; }; /** * Gets the input value. */ prototypeAccessors$1.value.get = function () { if (!isCallable(this.getter)) { return undefined; } return this.getter(); }; /** * If the field rejects false as a valid value for the required rule. */ prototypeAccessors$1.rejectsFalse.get = function () { if (this.isVue && this.ctorConfig) { return !!this.ctorConfig.rejectsFalse; } if (this.isHeadless) { return false; } return this.el.type === 'checkbox'; }; /** * Determines if the instance matches the options provided. */ Field.prototype.matches = function matches (options) { if (options.id) { return this.id === options.id; } if (options.name === undefined && options.scope === undefined) { return true; } if (options.scope === undefined) { return this.name === options.name; } if (options.name === undefined) { return this.scope === options.scope; } return options.name === this.name && options.scope === this.scope; }; /** * Updates the field with changed data. */ Field.prototype.update = function update (options) { this.targetOf = options.targetOf || null; this.initial = options.initial || this.initial || false; // update errors scope if the field scope was changed. if (this.updated && !isNullOrUndefined(options.scope) && options.scope !== this.scope && isCallable(this.validator.update)) { this.validator.update(this.id, { scope: options.scope }); } this.scope = !isNullOrUndefined(options.scope) ? options.scope : !isNullOrUndefined(this.scope) ? this.scope : null; this.name = (!isNullOrUndefined(options.name) ? String(options.name) : options.name) || this.name || null; this.rules = options.rules !== undefined ? normalizeRules(options.rules) : this.rules; this.model = options.model || this.model; this.listen = options.listen !== undefined ? options.listen : this.listen; this.classes = options.classes || this.classes || false; this.classNames = options.classNames || this.classNames || DEFAULT_OPTIONS.classNames; this.alias = options.alias || this.alias; this.getter = isCallable(options.getter) ? options.getter : this.getter; this.delay = options.delay || this.delay || 0; this.events = typeof options.events === 'string' && options.events.length ? options.events.split('|') : this.events; this.updateDependencies(); this.addActionListeners(); // update required flag flags if (options.rules !== undefined) { this.flags.required = this.isRequired; } // validate if it was validated before and field was updated and there was a rules mutation. if (this.flags.validated && options.rules !== undefined && this.updated) { this.validator.validate(("#" + (this.id))); } this.updated = true; // no need to continue. if (this.isHeadless) { return; } this.updateClasses(); this.addValueListeners(); this.updateAriaAttrs(); }; /** * Resets field flags and errors. */ Field.prototype.reset = function reset () { var this$1 = this; var def = createFlags(); Object.keys(this.flags).forEach(function (flag) { this$1.flags[flag] = def[flag]; }); this.addActionListeners(); this.updateClasses(); this.updateAriaAttrs(); this.updateCustomValidity(); }; /** * Sets the flags and their negated counterparts, and updates the classes and re-adds action listeners. */ Field.prototype.setFlags = function setFlags (flags) { var this$1 = this; var negated = { pristine: 'dirty', dirty: 'pristine', valid: 'invalid', invalid: 'valid', touched: 'untouched', untouched: 'touched' }; Object.keys(flags).forEach(function (flag) { this$1.flags[flag] = flags[flag]; // if it has a negation and was not specified, set it as well. if (negated[flag] && flags[negated[flag]] === undefined) { this$1.flags[negated[flag]] = !flags[flag]; } }); if ( flags.untouched !== undefined || flags.touched !== undefined || flags.dirty !== undefined || flags.pristine !== undefined ) { this.addActionListeners(); } this.updateClasses(); this.updateAriaAttrs(); this.updateCustomValidity(); }; /** * Determines if the field requires references to target fields. */ Field.prototype.updateDependencies = function updateDependencies () { var this$1 = this; // reset dependencies. this.dependencies.forEach(function (d) { return d.field.destroy(); }); this.dependencies = []; // we get the selectors for each field. var fields = Object.keys(this.rules).reduce(function (prev, r) { if (r === 'confirmed') { prev.push({ selector: this$1.rules[r][0] || ((this$1.name) + "_confirmation"), name: r }); } else if (/after|before/.test(r)) { prev.push({ selector: this$1.rules[r][0], name: r }); } return prev; }, []); if (!fields.length || !this.vm || !this.vm.$el) { return; } // must be contained within the same component, so we use the vm root element constrain our dom search. fields.forEach(function (ref) { var selector = ref.selector; var name = ref.name; var el = null; // vue ref selector. if (selector[0] === '$') { el = this$1.vm.$refs[selector.slice(1)]; } else { try { // try query selector el = this$1.vm.$el.querySelector(selector); } catch (err) { el = null; } } if (!el) { try { el = this$1.vm.$el.querySelector(("input[name=\"" + selector + "\"]")); } catch (err) { el = null; } } if (!el) { return; } var options = { vm: this$1.vm, classes: this$1.classes, classNames: this$1.classNames, delay: this$1.delay, scope: this$1.scope, events: this$1.events.join('|'), initial: this$1.initial, targetOf: this$1.id }; // probably a component. if (isCallable(el.$watch)) { options.component = el; options.el = el.$el; options.alias = Generator.resolveAlias(el.$el, { child: el }); options.getter = Generator.resolveGetter(el.$el, { child: el }); } else { options.el = el; options.alias = Generator.resolveAlias(el, {}); options.getter = Generator.resolveGetter(el, {}); } this$1.dependencies.push({ name: name, field: new Field(options.el, options) }); }); }; /** * Removes listeners. */ Field.prototype.unwatch = function unwatch (tag) { if ( tag === void 0 ) tag = null; if (!tag) { this.watchers.forEach(function (w) { return w.unwatch(); }); this.watchers = []; return; } this.watchers.filter(function (w) { return tag.test(w.tag); }).forEach(function (w) { return w.unwatch(); }); this.watchers = this.watchers.filter(function (w) { return !tag.test(w.tag); }); }; /** * Updates the element classes depending on each field flag status. */ Field.prototype.updateClasses = function updateClasses () { if (!this.classes) { return; } toggleClass(this.el, this.classNames.dirty, this.flags.dirty); toggleClass(this.el, this.classNames.pristine, this.flags.pristine); toggleClass(this.el, this.classNames.valid, !!this.flags.valid); toggleClass(this.el, this.classNames.invalid, !!this.flags.invalid); toggleClass(this.el, this.classNames.touched, this.flags.touched); toggleClass(this.el, this.classNames.untouched, this.flags.untouched); }; /** * Adds the listeners required for automatic classes and some flags. */ Field.prototype.addActionListeners = function addActionListeners () { var this$1 = this; // remove previous listeners. this.unwatch(/class/); var onBlur = function () { this$1.flags.touched = true; this$1.flags.untouched = false; if (this$1.classes) { toggleClass(this$1.el, this$1.classNames.touched, true); toggleClass(this$1.el, this$1.classNames.untouched, false); } // only needed once. this$1.unwatch(/^class_blur$/); }; var inputEvent = getInputEventName(this.el); var onInput = function () { this$1.flags.dirty = true; this$1.flags.pristine = false; if (this$1.classes) { toggleClass(this$1.el, this$1.classNames.pristine, false); toggleClass(this$1.el, this$1.classNames.dirty, true); } // only needed once. this$1.unwatch(/^class_input$/); }; if (this.isVue && isCallable(this.component.$once)) { this.component.$once('input', onInput); this.component.$once('blur', onBlur); this.watchers.push({ tag: 'class_input', unwatch: function () { this$1.component.$off('input', onInput); } }); this.watchers.push({ tag: 'class_blur', unwatch: function () { this$1.component.$off('blur', onBlur); } }); return; } if (this.isHeadless) { return; } this.el.addEventListener(inputEvent, onInput); // Checkboxes and radio buttons on Mac don't emit blur naturally, so we listen on click instead. var blurEvent = ['radio', 'checkbox'].indexOf(this.el.type) === -1 ? 'blur' : 'click'; this.el.addEventListener(blurEvent, onBlur); this.watchers.push({ tag: 'class_input', unwatch: function () { this$1.el.removeEventListener(inputEvent, onInput); } }); this.watchers.push({ tag: 'class_blur', unwatch: function () { this$1.el.removeEventListener(blurEvent, onBlur); } }); }; /** * Adds the listeners required for validation. */ Field.prototype.addValueListeners = function addValueListeners () { var this$1 = this; this.unwatch(/^input_.+/); if (!this.listen) { return; } var fn = this.targetOf ? function () { this$1.validator.validate(("#" + (this$1.targetOf))); } : function () { var args = [], len = arguments.length; while ( len-- ) args[ len ] = arguments[ len ]; // if its a DOM event, resolve the value, otherwise use the first parameter as the value. if (args.length === 0 || (isCallable(Event) && args[0] instanceof Event) || (args[0] && args[0].srcElement)) { args[0] = this$1.value; } this$1.validator.validate(("#" + (this$1.id)), args[0]); }; var validate = debounce(fn, this.delay); var inputEvent = getInputEventName(this.el); // replace input event with suitable one. var events = this.events.map(function (e) { return e === 'input' ? inputEvent : e; }); // if there is a watchable model and an on input validation is requested. if (this.model && events.indexOf(inputEvent) !== -1) { var unwatch = this.vm.$watch(this.model, validate); this.watchers.push({ tag: 'input_model', unwatch: unwatch }); // filter out input event as it is already handled by the watcher API. events = events.filter(function (e) { return e !== inputEvent; }); } // Add events. events.forEach(function (e) { if (this$1.isVue) { this$1.component.$on(e, validate); this$1.watchers.push({ tag: 'input_vue', unwatch: function () { this$1.component.$off(e, validate); } }); return; } if (~['radio', 'checkbox'].indexOf(this$1.el.type)) { var els = document.querySelectorAll(("input[name=\"" + (this$1.el.name) + "\"]")); toArray(els).forEach(function (el) { el.addEventListener(e, validate); this$1.watchers.push({ tag: 'input_native', unwatch: function () { el.removeEventListener(e, validate); } }); }); return; } this$1.el.addEventListener(e, validate); this$1.watchers.push({ tag: 'input_native', unwatch: function () { this$1.el.removeEventListener(e, validate); } }); }); }; /** * Updates aria attributes on the element. */ Field.prototype.updateAriaAttrs = function updateAriaAttrs () { if (!this.aria || this.isHeadless || !isCallable(this.el.setAttribute)) { return; } this.el.setAttribute('aria-required', this.isRequired ? 'true' : 'false'); this.el.setAttribute('aria-invalid', this.flags.invalid ? 'true' : 'false'); }; /** * Updates the custom validity for the field. */ Field.prototype.updateCustomValidity = function updateCustomValidity () { if (!this.validity || this.isHeadless || !isCallable(this.el.setCustomValidity)) { return; } this.el.setCustomValidity(this.flags.valid ? '' : (this.validator.errors.firstById(this.id) || '')); }; /** * Removes all listeners. */ Field.prototype.destroy = function destroy () { this.watchers.forEach(function (w) { return w.unwatch(); }); this.watchers = []; this.dependencies.forEach(function (d) { return d.field.destroy(); }); this.dependencies = []; }; Object.defineProperties( Field.prototype, prototypeAccessors$1 ); // var FieldBag = function FieldBag () { this.items = []; }; var prototypeAccessors$2 = { length: {} }; /** * Gets the current items length. */ prototypeAccessors$2.length.get = function () { return this.items.length; }; /** * Finds the first field that matches the provided matcher object. */ FieldBag.prototype.find = function find$1 (matcher) { return find(this.items, function (item) { return item.matches(matcher); }); }; /** * Filters the items down to the matched fields. */ FieldBag.prototype.filter = function filter (matcher) { // multiple matchers to be tried. if (Array.isArray(matcher)) { return this.items.filter(function (item) { return matcher.some(function (m) { return item.matches(m); }); }); } return this.items.filter(function (item) { return item.matches(matcher); }); }; /** * Maps the field items using the mapping function. */ FieldBag.prototype.map = function map (mapper) { return this.items.map(mapper); }; /** * Finds and removes the first field that matches the provided matcher object, returns the removed item. */ FieldBag.prototype.remove = function remove (matcher) { var item = null; if (matcher instanceof Field) { item = matcher; } else { item = this.find(matcher); } if (!item) { return null; } var index = this.items.indexOf(item); this.items.splice(index, 1); return item; }; /** * Adds a field item to the list. */ FieldBag.prototype.push = function push (item) { if (! (item instanceof Field)) { throw createError('FieldBag only accepts instances of Field that has an id defined.'); } if (!item.id) { throw createError('Field id must be defined.'); } if (this.find({ id: item.id })) { throw createError(("Field with id " + (item.id) + " is already added.")); } this.items.push(item); }; Object.defineProperties( FieldBag.prototype, prototypeAccessors$2 ); // var RULES = {}; var LOCALE = 'en'; var STRICT_MODE = true; var DICTIONARY = new Dictionary({ en: { messages: {}, attributes: {}, custom: {} } }); var Validator = function Validator (validations, options) { var this$1 = this; if ( options === void 0 ) options = { vm: null, fastExit: true }; this.strict = STRICT_MODE; this.errors = new ErrorBag(); this.fields = new FieldBag(); this.flags = {}; this._createFields(validations); this.paused = false; this.fastExit = options.fastExit || false; this.ownerId = options.vm && options.vm._uid; // create it statically since we don't need constant access to the vm. this.reset = options.vm && isCallable(options.vm.$nextTick) ? function () { return new Promise(function (resolve, reject) { options.vm.$nextTick(function () { this$1.fields.items.forEach(function (i) { return i.reset(); }); this$1.errors.clear(); resolve(); }); }); } : function () { return new Promise(function (resolve, reject) { this$1.fields.items.forEach(function (i) { return i.reset(); }); this$1.errors.clear(); resolve(); }); }; /* istanbul ignore next */ this.clean = function () { warn('validator.clean is marked for deprecation, please use validator.reset instead.'); this$1.reset(); }; }; var prototypeAccessors = { dictionary: {},locale: {},rules: {} }; var staticAccessors = { dictionary: {},locale: {},rules: {} }; /** * Getter for the dictionary. */ prototypeAccessors.dictionary.get = function () { return DICTIONARY; }; /** * Static Getter for the dictionary. */ staticAccessors.dictionary.get = function () { return DICTIONARY; }; /** * Getter for the current locale. */ prototypeAccessors.locale.get = function () { return LOCALE; }; /** * Setter for the validator locale. */ prototypeAccessors.locale.set = function (value) { Validator.locale = value; }; /** * Static getter for the validator locale. */ staticAccessors.locale.get = function () { return LOCALE; }; /** * Static setter for the validator locale. */ staticAccessors.locale.set = function (value) { /* istanbul ignore if */ if (!DICTIONARY.hasLocale(value)) { // eslint-disable-next-line warn('You are setting the validator locale to a locale that is not defined in the dictionary. English messages may still be generated.'); } LOCALE = value; }; /** * Getter for the rules object. */ prototypeAccessors.rules.get = function () { return RULES; }; /** * Static Getter for the rules object. */ staticAccessors.rules.get = function () { return RULES; }; /** * Static constructor. */ Validator.create = function create (validations, options) { return new Validator(validations, options); }; /** * Adds a custom validator to the list of validation rules. */ Validator.extend = function extend (name, validator) { Validator._guardExtend(name, validator); Validator._merge(name, validator); }; /** * Removes a rule from the list of validators. */ Validator.remove = function remove (name) { delete RULES[name]; }; /** * Sets the default locale for all validators. * @deprecated */ Validator.setLocale = function setLocale (language) { if ( language === void 0 ) language = 'en'; Validator.locale = language; }; /** * @deprecated */ Validator.installDateTimeValidators = function installDateTimeValidators () { /* istanbul ignore next */ warn('Date validations are now installed by default, you no longer need to install it.'); }; /** * @deprecated */ Validator.prototype.installDateTimeValidators = function installDateTimeValidators () { /* istanbul ignore next */ warn('Date validations are now installed by default, you no longer need to install it.'); }; /** * Sets the operating mode for all newly created validators. * strictMode = true: Values without a rule are invalid and cause failure. * strictMode = false: Values without a rule are valid and are skipped. */ Validator.setStrictMode = function setStrictMode (strictMode) { if ( strictMode === void 0 ) strictMode = true; STRICT_MODE = strictMode; }; /** * Updates the dictionary, overwriting existing values and adding new ones. * @deprecated */ Validator.updateDictionary = function updateDictionary (data) { DICTIONARY.merge(data); }; /** * Adds a locale object to the dictionary. * @deprecated */ Validator.addLocale = function addLocale (locale) { if (! locale.name) { warn('Your locale must have a name property'); return; } this.updateDictionary(( obj = {}, obj[locale.name] = locale, obj )); var obj; }; /** * Adds a locale object to the dictionary. * @deprecated * @param {Object} locale */ Validator.prototype.addLocale = function addLocale (locale) { Validator.addLocale(locale); }; /** * Adds and sets the current locale for the validator. */ Validator.prototype.localize = function localize (lang, dictionary) { Validator.localize(lang, dictionary); }; /** * Adds and sets the current locale for the validator. */ Validator.localize = function localize (lang, dictionary) { // merge the dictionary. if (dictionary) { dictionary = assign({}, dictionary, { name: lang }); Validator.addLocale(dictionary); } // set the locale. Validator.locale = lang; }; /** * Registers a field to be validated. */ Validator.prototype.attach = function attach (field) { // deprecate: handle old signature. if (arguments.length > 1) { field = assign({}, { name: arguments[0], rules: arguments[1] }, arguments[2] || { vm: { $validator: this } }); } // fixes initial value detection with v-model and select elements. var value = field.initialValue; if (!(field instanceof Field)) { field = new Field(field.el || null, field); } this.fields.push(field); // validate the field initially if (field.initial) { this.validate(("#" + (field.id)), value || field.value); } else { this._validate(field, value || field.value, true).then(function (valid) { field.flags.valid = valid; field.flags.invalid = !valid; }); } this._addFlag(field, field.scope); return field; }; /** * Sets the flags on a field. */ Validator.prototype.flag = function flag (name, flags) { var field = this._resolveField(name); if (! field || !flags) { return; } field.setFlags(flags); }; /** * Removes a field from the validator. */ Validator.prototype.detach = function detach (name, scope) { var field = name instanceof Field ? name : this._resolveField(name, scope); if (!field) { return; } field.destroy(); this.errors.remove(field.name, field.scope, field.id); this.fields.remove(field); var flags = this.flags; if (!isNullOrUndefined(field.scope) && flags[("$" + (field.scope))]) { delete flags[("$" + (field.scope))][field.name]; } else if (isNullOrUndefined(field.scope)) { delete flags[field.name]; } this.flags = assign({}, flags); }; /** * Adds a custom validator to the list of validation rules. */ Validator.prototype.extend = function extend (name, validator) { Validator.extend(name, validator); }; /** * Updates a field, updating both errors and flags. */ Validator.prototype.update = function update (id, ref) { var scope = ref.scope; var field = this._resolveField(("#" + id)); this.errors.update(id, { scope: scope }); // remove old scope. if (!isNullOrUndefined(field.scope) && this.flags[("$" + (field.scope))]) { delete this.flags[("$" + (field.scope))][field.name]; } else if (isNullOrUndefined(field.scope)) { delete this.flags[field.name]; } this._addFlag(field, scope); }; /** * Removes a rule from the list of validators. */ Validator.prototype.remove = function remove (name) { Validator.remove(name); }; /** * Sets the validator current language. * @deprecated */ Validator.prototype.setLocale = function setLocale (language) { this.locale = language; }; /** * Updates the messages dictionary, overwriting existing values and adding new ones. * @deprecated */ Validator.prototype.updateDictionary = function updateDictionary (data) { Validator.updateDictionary(data); }; /** * Validates a value against a registered field validations. */ Validator.prototype.validate = function validate (name, value, scope) { if ( scope === void 0 ) scope = null; if (this.paused) { return Promise.resolve(true); } // overload to validate all. if (arguments.length === 0) { return this.validateScopes(); } // overload to validate scope-less fields. if (arguments.length === 1 && arguments[0] === '*') { return this.validateAll(); } // overload to validate a scope. if (arguments.length === 1 && typeof arguments[0] === 'string' && /^(.+)\.\*$/.test(arguments[0])) { var matched = arguments[0].match(/^(.+)\.\*$/)[1]; return this.validateAll(matched); } var field = this._resolveField(name, scope); if (!field) { return this._handleFieldNotFound(name, scope); } this.errors.remove(field.name, field.scope, field.id); field.flags.pending = true; if (arguments.length === 1) { value = field.value; } var silentRun = field.isDisabled; return this._validate(field, value, silentRun).then(function (result) { field.setFlags({ pending: false, valid: result, validated: true }); if (silentRun) { return Promise.resolve(true); } return result; }); }; /** * Pauses the validator. */ Validator.prototype.pause = function pause () { this.paused = true; return this; }; /** * Resumes the validator. */ Validator.prototype.resume = function resume () { this.paused = false; return this; }; /** * Validates each value against the corresponding field validations. */ Validator.prototype.validateAll = function validateAll (values) { var arguments$1 = arguments; var this$1 = this; if (this.paused) { return Promise.resolve(true); } var matcher = null; var providedValues = false; if (typeof values === 'string') { matcher = { scope: values }; } else if (isObject(values)) { matcher = Object.keys(values).map(function (key) { return { name: key, scope: arguments$1[1] || null }; }); providedValues = true; } else if (arguments.length === 0) { matcher = { scope: null }; // global scope. } else if (Array.isArray(values)) { matcher = values.map(function (key) { return { name: key, scope: arguments$1[1] || null }; }); } var promises = this.fields.filter(matcher).map(function (field) { return this$1.validate( ("#" + (field.id)), providedValues ? values[field.name] : field.value ); }); return Promise.all(promises).then(function (results) { return results.every(function (t) { return t; }); }); }; /** * Validates all scopes. */ Validator.prototype.validateScopes = function validateScopes () { var this$1 = this; if (this.paused) { return Promise.resolve(true); } var promises = this.fields.map(function (field) { return this$1.validate( ("#" + (field.id)), field.value ); }); return Promise.all(promises).then(function (results) { return results.every(function (t) { return t; }); }); }; /** * Creates the fields to be validated. */ Validator.prototype._createFields = function _createFields (validations) { var this$1 = this; if (!validations) { return; } Object.keys(validations).forEach(function (field) { var options = assign({}, { name: field, rules: validations[field] }); this$1.attach(options); }); }; /** * Date rules need the existence of a format, so date_format must be supplied. */ Validator.prototype._getDateFormat = function _getDateFormat (validations) { var format = null; if (validations.date_format && Array.isArray(validations.date_format)) { format = validations.date_format[0]; } return format || this.dictionary.getDateFormat(this.locale); }; /** * Checks if the passed rule is a date rule. */ Validator.prototype._isADateRule = function _isADateRule (rule) { return !! ~['after', 'before', 'date_between', 'date_format'].indexOf(rule); }; /** * Formats an error message for field and a rule. */ Validator.prototype._formatErrorMessage = function _formatErrorMessage (field, rule, data, targetName) { if ( data === void 0 ) data = {}; if ( targetName === void 0 ) targetName = null; var name = this._getFieldDisplayName(field); var params = this._getLocalizedParams(rule, targetName); // Defaults to english message. if (!this.dictionary.hasLocale(LOCALE)) { var msg$1 = this.dictionary.getFieldMessage('en', field.name, rule.name); return isCallable(msg$1) ? msg$1(name, params, data) : msg$1; } var msg = this.dictionary.getFieldMessage(LOCALE, field.name, rule.name); return isCallable(msg) ? msg(name, params, data) : msg; }; /** * Translates the parameters passed to the rule (mainly for target fields). */ Validator.prototype._getLocalizedParams = function _getLocalizedParams (rule, targetName) { if ( targetName === void 0 ) targetName = null; if (~['after', 'before', 'confirmed'].indexOf(rule.name) && rule.params && rule.params[0]) { var localizedName = targetName || this.dictionary.getAttribute(LOCALE, rule.params[0], rule.params[0]); return [localizedName].concat(rule.params.slice(1)); } return rule.params; }; /** * Resolves an appropriate display name, first checking 'data-as' or the registered 'prettyName' */ Validator.prototype._getFieldDisplayName = function _getFieldDisplayName (field) { return field.displayName || this.dictionary.getAttribute(LOCALE, field.name, field.name); }; /** * Adds a field flags to the flags collection. */ Validator.prototype._addFlag = function _addFlag (field, scope) { if ( scope === void 0 ) scope = null; if (isNullOrUndefined(scope)) { this.flags = assign({}, this.flags, ( obj = {}, obj[("" + (field.name))] = field.flags, obj )); var obj; return; } var scopeObj = assign({}, this.flags[("$" + scope)] || {}, ( obj$1 = {}, obj$1[("" + (field.name))] = field.flags, obj$1 )); var obj$1; this.flags = assign({}, this.flags, ( obj$2 = {}, obj$2[("$" + scope)] = scopeObj, obj$2 )); var obj$2; }; /** * Tests a single input value against a rule. */ Validator.prototype._test = function _test (field, value, rule, silent) { var this$1 = this; var validator = RULES[rule.name]; var params = Array.isArray(rule.params) ? toArray(rule.params) : []; var targetName = null; if (!validator || typeof validator !== 'function') { throw createError(("No such validator '" + (rule.name) + "' exists.")); } // has field dependencies. if (/(confirmed|after|before)/.test(rule.name)) { var target = find(field.dependencies, function (d) { return d.name === rule.name; }); if (target) { targetName = target.field.displayName; params = [target.field.value].concat(params.slice(1)); } } else if (rule.name === 'required' && field.rejectsFalse) { // invalidate false if no args were specified and the field rejects false by default. params = params.length ? params : [true]; } if (this._isADateRule(rule.name)) { var dateFormat = this._getDateFormat(field.rules); if (rule.name !== 'date_format') { params.push(dateFormat); } } var result = validator(value, params); // If it is a promise. if (isCallable(result.then)) { return result.then(function (values) { var allValid = true; var data = {}; if (Array.isArray(values)) { allValid = values.every(function (t) { return (isObject(t) ? t.valid : t); }); } else { // Is a single object/boolean. allValid = isObject(values) ? values.valid : values; data = values.data; } if (!allValid && !silent) { this$1.errors.add({ id: field.id, field: field.name, msg: this$1._formatErrorMessage(field, rule, data, targetName), rule: rule.name, scope: field.scope }); } return allValid; }); } if (!isObject(result)) { result = { valid: result, data: {} }; } if (!result.valid && !silent) { this.errors.add({ id: field.id, field: field.name, msg: this._formatErrorMessage(field, rule, result.data, targetName), rule: rule.name, scope: field.scope }); } return result.valid; }; /** * Merges a validator object into the RULES and Messages. */ Validator._merge = function _merge (name, validator) { if (isCallable(validator)) { RULES[name] = validator; return; } RULES[name] = validator.validate; if (isCallable(validator.getMessage)) { DICTIONARY.setMessage(LOCALE, name, validator.getMessage); } if (validator.messages) { DICTIONARY.merge( Object.keys(validator.messages).reduce(function (prev, curr) { var dict = prev; dict[curr] = { messages: ( obj = {}, obj[name] = validator.messages[curr], obj ) }; var obj; return dict; }, {}) ); } }; /** * Guards from extension violations. */ Validator._guardExtend = function _guardExtend (name, validator) { if (isCallable(validator)) { return; } if (!isCallable(validator.validate)) { throw createError( // eslint-disable-next-line ("Extension Error: The validator '" + name + "' must be a function or have a 'validate' method.") ); } if (!isCallable(validator.getMessage) && !isObject(validator.messages)) { throw createError( // eslint-disable-next-line ("Extension Error: The validator '" + name + "' must have a 'getMessage' method or have a 'messages' object.") ); } }; /** * Tries different strategies to find a field. */ Validator.prototype._resolveField = function _resolveField (name, scope) { if (!isNullOrUndefined(scope)) { return this.fields.find({ name: name, scope: scope }); } if (name[0] === '#') { return this.fields.find({ id: name.slice(1) }); } if (name.indexOf('.') > -1) { var ref = name.split('.'); var fieldScope = ref[0]; var fieldName = ref.slice(1); var field = this.fields.find({ name: fieldName.join('.'), scope: fieldScope }); if (field) { return field; } } return this.fields.find({ name: name, scope: null }); }; /** * Handles when a field is not found depending on the strict flag. */ Validator.prototype._handleFieldNotFound = function _handleFieldNotFound (name, scope) { if (!this.strict) { return Promise.resolve(true); } var fullName = isNullOrUndefined(scope) ? name : ("" + (!isNullOrUndefined(scope) ? scope + '.' : '') + name); throw createError( ("Validating a non-existent field: \"" + fullName + "\". Use \"attach()\" first.") ); }; /** * Starts the validation process. */ Validator.prototype._validate = function _validate (field, value, silent) { var this$1 = this; if ( silent === void 0 ) silent = false; if (!field.isRequired && (isNullOrUndefined(value) || value === '')) { return Promise.resolve(true); } var promises = []; var isExitEarly = false; // use of '.some()' is to break iteration in middle by returning true Object.keys(field.rules).some(function (rule) { var result = this$1._test(field, value, { name: rule, params: field.rules[rule] }, silent); if (isCallable(result.then)) { promises.push(result); } else if (this$1.fastExit && !result) { isExitEarly = true; } else { var resultAsPromise = new Promise(function (resolve) { resolve(result); }); promises.push(resultAsPromise); } return isExitEarly; }); if (isExitEarly) { return Promise.resolve(false); } return Promise.all(promises).then(function (values) { return values.every(function (t) { return t; }); }); }; Object.defineProperties( Validator.prototype, prototypeAccessors ); Object.defineProperties( Validator, staticAccessors ); // /* istanbul ignore next */ var fakeFlags = createProxy({}, { get: function get (target, key) { // is a scope if (String(key).indexOf('$') === 0) { return fakeFlags; } return createFlags(); } }); /** * Checks if a parent validator instance was requested. */ var requestsValidator = function (injections) { if (! injections) { return false; } /* istanbul ignore next */ if (Array.isArray(injections) && ~injections.indexOf('$validator')) { return true; } if (isObject(injections) && injections.$validator) { return true; } return false; }; /** * Creates a validator instance. */ var createValidator = function (vm, options) { return new Validator(null, { vm: vm, fastExit: options.fastExit }); }; var mixin = { provide: function provide () { if (this.$validator && !isBuiltInComponent(this.$vnode)) { return { $validator: this.$validator }; } return {}; }, beforeCreate: function beforeCreate () { // if built in do nothing. if (isBuiltInComponent(this.$vnode)) { return; } // if its a root instance set the config if it exists. if (!this.$parent) { Config.merge(this.$options.$_veeValidate || {}); } var options = Config.resolve(this); var Vue = this.$options._base; // the vue constructor. // TODO: Deprecate /* istanbul ignore next */ if (this.$options.$validates) { warn('The ctor $validates option has been deprecated please set the $_veeValidate.validator option to "new" instead'); this.$validator = createValidator(this, options); } // if its a root instance, inject anyways, or if it requested a new instance. if (!this.$parent || (this.$options.$_veeValidate && /new/.test(this.$options.$_veeValidate.validator))) { this.$validator = createValidator(this, options); } var requested = requestsValidator(this.$options.inject); // if automatic injection is enabled and no instance was requested. if (! this.$validator && options.inject && !requested) { this.$validator = createValidator(this, options); } // don't inject errors or fieldBag as no validator was resolved. if (! requested && ! this.$validator) { return; } // There is a validator but it isn't injected, mark as reactive. if (! requested && this.$validator) { Vue.util.defineReactive(this.$validator, 'errors', this.$validator.errors); Vue.util.defineReactive(this.$validator, 'flags', this.$validator.flags); } if (! this.$options.computed) { this.$options.computed = {}; } this.$options.computed[options.errorBagName || 'errors'] = function errorBagGetter () { return this.$validator.errors; }; this.$options.computed[options.fieldsBagName || 'fields'] = function fieldBagGetter () { if (!Object.keys(this.$validator.flags).length) { return fakeFlags; } return this.$validator.flags; }; }, beforeDestroy: function beforeDestroy () { if (isBuiltInComponent(this.$vnode)) { return; } // mark the validator paused to prevent delayed validation. if (this.$validator && this.$validator.ownerId === this._uid && isCallable(this.$validator.pause)) { this.$validator.pause(); } } }; // /** * Finds the requested field by id from the context object. */ var findField = function (el, context) { if (!context || !context.$validator) { return null; } return context.$validator.fields.find({ id: getDataAttribute(el, 'id') }); }; var directive = { bind: function bind (el, binding, vnode) { var validator = vnode.context.$validator; if (! validator) { warn("No validator instance is present on vm, did you forget to inject '$validator'?"); return; } var fieldOptions = Generator.generate(el, binding, vnode); validator.attach(fieldOptions); }, inserted: function (el, binding, vnode) { var field = findField(el, vnode.context); var scope = Generator.resolveScope(el, binding, vnode); // skip if scope hasn't changed. if (!field || scope === field.scope) { return; } // only update scope. field.update({ scope: scope }); // allows the field to re-evaluated once more in the update hook. field.updated = false; }, update: function (el, binding, vnode) { var field = findField(el, vnode.context); // make sure we don't do unneccasary work if no important change was done. if (!field || (field.updated && isEqual$1(binding.value, binding.oldValue))) { return; } var scope = Generator.resolveScope(el, binding, vnode); var rules = Generator.resolveRules(el, binding); field.update({ scope: scope, rules: rules }); }, unbind: function unbind (el, binding, ref) { var context = ref.context; var field = findField(el, context); if (!field) { return; } context.$validator.detach(field); } }; var Vue; function install (_Vue, options) { if ( options === void 0 ) options = {}; if (Vue) { warn('already installed, Vue.use(VeeValidate) should only be called once.'); return; } Vue = _Vue; Config.merge(options); if (Config.current.dictionary) { Validator.updateDictionary(Config.current.dictionary); } if (options) { if (options.locale) { Validator.locale = options.locale; } if (options.strict) { Validator.setStrictMode(Config.current.strict); } } Vue.mixin(mixin); Vue.directive('validate', directive); } // function use (plugin, options) { if ( options === void 0 ) options = {}; if (!isCallable(plugin)) { return warn('The plugin must be a callable function'); } plugin({ Validator: Validator, ErrorBag: ErrorBag, Rules: Validator.rules }, options); } // var normalize = function (fields) { if (Array.isArray(fields)) { return fields.reduce(function (prev, curr) { if (~curr.indexOf('.')) { prev[curr.split('.')[1]] = curr; } else { prev[curr] = curr; } return prev; }, {}); } return fields; }; /** * Maps fields to computed functions. */ var mapFields = function (fields) { var normalized = normalize(fields); return Object.keys(normalized).reduce(function (prev, curr) { var field = normalized[curr]; prev[curr] = function mappedField () { // if field exists if (this.$validator.flags[field]) { return this.$validator.flags[field]; } // if it has a scope defined var index = field.indexOf('.'); if (index <= 0) { return {}; } var ref = field.split('.'); var scope = ref[0]; var name = ref.slice(1); scope = this.$validator.flags[("$" + scope)]; name = name.join('.'); if (scope && scope[name]) { return scope[name]; } return {}; }; return prev; }, {}); }; var minimal$1 = { install: install, use: use, directive: directive, mixin: mixin, mapFields: mapFields, Validator: Validator, ErrorBag: ErrorBag, version: '2.0.0-rc.21' }; // rules plugin definition. var rulesPlugin = function (ref) { var Validator = ref.Validator; Object.keys(Rules).forEach(function (rule) { Validator.extend(rule, Rules[rule]); }); // Merge the english messages. Validator.localize('en', locale$1); }; // install the rules via the plugin API. minimal$1.use(rulesPlugin); minimal$1.Rules = Rules; return minimal$1; })));
joeyparrish/cdnjs
ajax/libs/vee-validate/2.0.0-rc.21/vee-validate.js
JavaScript
mit
188,808
// $Id$ Drupal.behaviors.messaging_shoutbox = function (context) { $('div.shoutform:not(.processed)').each(function() { $(this).addClass('processed').parents('form').each(function() { $(this).submit(function() { var form = $(this); $("input.form-submit", form).mousedown(); $("input.form-text", form).val(''); return false; }); $("input.form-submit", this).mousedown(function() { var form = $(this).parents('form'); $("input.form-text, textarea", form).val(''); return false; }); }); }); };
stooni/Atrium
sites/all/modules/custom/messaging_shoutbox/messaging_shoutbox.js
JavaScript
gpl-2.0
584
// SPDX-License-Identifier: GPL-2.0-or-later /* * Copyright (C) 2016 Namjae Jeon <[email protected]> * Copyright (C) 2018 Samsung Electronics Co., Ltd. */ #include <linux/slab.h> #include "glob.h" #include "smb2pdu.h" #include "auth.h" #include "connection.h" #include "smb_common.h" #include "server.h" static struct smb_version_values smb21_server_values = { .version_string = SMB21_VERSION_STRING, .protocol_id = SMB21_PROT_ID, .capabilities = SMB2_GLOBAL_CAP_LARGE_MTU, .max_read_size = SMB21_DEFAULT_IOSIZE, .max_write_size = SMB21_DEFAULT_IOSIZE, .max_trans_size = SMB21_DEFAULT_IOSIZE, .large_lock_type = 0, .exclusive_lock_type = SMB2_LOCKFLAG_EXCLUSIVE, .shared_lock_type = SMB2_LOCKFLAG_SHARED, .unlock_lock_type = SMB2_LOCKFLAG_UNLOCK, .header_size = sizeof(struct smb2_hdr), .max_header_size = MAX_SMB2_HDR_SIZE, .read_rsp_size = sizeof(struct smb2_read_rsp) - 1, .lock_cmd = SMB2_LOCK, .cap_unix = 0, .cap_nt_find = SMB2_NT_FIND, .cap_large_files = SMB2_LARGE_FILES, .create_lease_size = sizeof(struct create_lease), .create_durable_size = sizeof(struct create_durable_rsp), .create_mxac_size = sizeof(struct create_mxac_rsp), .create_disk_id_size = sizeof(struct create_disk_id_rsp), .create_posix_size = sizeof(struct create_posix_rsp), }; static struct smb_version_values smb30_server_values = { .version_string = SMB30_VERSION_STRING, .protocol_id = SMB30_PROT_ID, .capabilities = SMB2_GLOBAL_CAP_LARGE_MTU, .max_read_size = SMB3_DEFAULT_IOSIZE, .max_write_size = SMB3_DEFAULT_IOSIZE, .max_trans_size = SMB3_DEFAULT_TRANS_SIZE, .large_lock_type = 0, .exclusive_lock_type = SMB2_LOCKFLAG_EXCLUSIVE, .shared_lock_type = SMB2_LOCKFLAG_SHARED, .unlock_lock_type = SMB2_LOCKFLAG_UNLOCK, .header_size = sizeof(struct smb2_hdr), .max_header_size = MAX_SMB2_HDR_SIZE, .read_rsp_size = sizeof(struct smb2_read_rsp) - 1, .lock_cmd = SMB2_LOCK, .cap_unix = 0, .cap_nt_find = SMB2_NT_FIND, .cap_large_files = SMB2_LARGE_FILES, .create_lease_size = sizeof(struct create_lease_v2), .create_durable_size = sizeof(struct create_durable_rsp), .create_durable_v2_size = sizeof(struct create_durable_v2_rsp), .create_mxac_size = sizeof(struct create_mxac_rsp), .create_disk_id_size = sizeof(struct create_disk_id_rsp), .create_posix_size = sizeof(struct create_posix_rsp), }; static struct smb_version_values smb302_server_values = { .version_string = SMB302_VERSION_STRING, .protocol_id = SMB302_PROT_ID, .capabilities = SMB2_GLOBAL_CAP_LARGE_MTU, .max_read_size = SMB3_DEFAULT_IOSIZE, .max_write_size = SMB3_DEFAULT_IOSIZE, .max_trans_size = SMB3_DEFAULT_TRANS_SIZE, .large_lock_type = 0, .exclusive_lock_type = SMB2_LOCKFLAG_EXCLUSIVE, .shared_lock_type = SMB2_LOCKFLAG_SHARED, .unlock_lock_type = SMB2_LOCKFLAG_UNLOCK, .header_size = sizeof(struct smb2_hdr), .max_header_size = MAX_SMB2_HDR_SIZE, .read_rsp_size = sizeof(struct smb2_read_rsp) - 1, .lock_cmd = SMB2_LOCK, .cap_unix = 0, .cap_nt_find = SMB2_NT_FIND, .cap_large_files = SMB2_LARGE_FILES, .create_lease_size = sizeof(struct create_lease_v2), .create_durable_size = sizeof(struct create_durable_rsp), .create_durable_v2_size = sizeof(struct create_durable_v2_rsp), .create_mxac_size = sizeof(struct create_mxac_rsp), .create_disk_id_size = sizeof(struct create_disk_id_rsp), .create_posix_size = sizeof(struct create_posix_rsp), }; static struct smb_version_values smb311_server_values = { .version_string = SMB311_VERSION_STRING, .protocol_id = SMB311_PROT_ID, .capabilities = SMB2_GLOBAL_CAP_LARGE_MTU, .max_read_size = SMB3_DEFAULT_IOSIZE, .max_write_size = SMB3_DEFAULT_IOSIZE, .max_trans_size = SMB3_DEFAULT_TRANS_SIZE, .large_lock_type = 0, .exclusive_lock_type = SMB2_LOCKFLAG_EXCLUSIVE, .shared_lock_type = SMB2_LOCKFLAG_SHARED, .unlock_lock_type = SMB2_LOCKFLAG_UNLOCK, .header_size = sizeof(struct smb2_hdr), .max_header_size = MAX_SMB2_HDR_SIZE, .read_rsp_size = sizeof(struct smb2_read_rsp) - 1, .lock_cmd = SMB2_LOCK, .cap_unix = 0, .cap_nt_find = SMB2_NT_FIND, .cap_large_files = SMB2_LARGE_FILES, .create_lease_size = sizeof(struct create_lease_v2), .create_durable_size = sizeof(struct create_durable_rsp), .create_durable_v2_size = sizeof(struct create_durable_v2_rsp), .create_mxac_size = sizeof(struct create_mxac_rsp), .create_disk_id_size = sizeof(struct create_disk_id_rsp), .create_posix_size = sizeof(struct create_posix_rsp), }; static struct smb_version_ops smb2_0_server_ops = { .get_cmd_val = get_smb2_cmd_val, .init_rsp_hdr = init_smb2_rsp_hdr, .set_rsp_status = set_smb2_rsp_status, .allocate_rsp_buf = smb2_allocate_rsp_buf, .set_rsp_credits = smb2_set_rsp_credits, .check_user_session = smb2_check_user_session, .get_ksmbd_tcon = smb2_get_ksmbd_tcon, .is_sign_req = smb2_is_sign_req, .check_sign_req = smb2_check_sign_req, .set_sign_rsp = smb2_set_sign_rsp }; static struct smb_version_ops smb3_0_server_ops = { .get_cmd_val = get_smb2_cmd_val, .init_rsp_hdr = init_smb2_rsp_hdr, .set_rsp_status = set_smb2_rsp_status, .allocate_rsp_buf = smb2_allocate_rsp_buf, .set_rsp_credits = smb2_set_rsp_credits, .check_user_session = smb2_check_user_session, .get_ksmbd_tcon = smb2_get_ksmbd_tcon, .is_sign_req = smb2_is_sign_req, .check_sign_req = smb3_check_sign_req, .set_sign_rsp = smb3_set_sign_rsp, .generate_signingkey = ksmbd_gen_smb30_signingkey, .generate_encryptionkey = ksmbd_gen_smb30_encryptionkey, .is_transform_hdr = smb3_is_transform_hdr, .decrypt_req = smb3_decrypt_req, .encrypt_resp = smb3_encrypt_resp }; static struct smb_version_ops smb3_11_server_ops = { .get_cmd_val = get_smb2_cmd_val, .init_rsp_hdr = init_smb2_rsp_hdr, .set_rsp_status = set_smb2_rsp_status, .allocate_rsp_buf = smb2_allocate_rsp_buf, .set_rsp_credits = smb2_set_rsp_credits, .check_user_session = smb2_check_user_session, .get_ksmbd_tcon = smb2_get_ksmbd_tcon, .is_sign_req = smb2_is_sign_req, .check_sign_req = smb3_check_sign_req, .set_sign_rsp = smb3_set_sign_rsp, .generate_signingkey = ksmbd_gen_smb311_signingkey, .generate_encryptionkey = ksmbd_gen_smb311_encryptionkey, .is_transform_hdr = smb3_is_transform_hdr, .decrypt_req = smb3_decrypt_req, .encrypt_resp = smb3_encrypt_resp }; static struct smb_version_cmds smb2_0_server_cmds[NUMBER_OF_SMB2_COMMANDS] = { [SMB2_NEGOTIATE_HE] = { .proc = smb2_negotiate_request, }, [SMB2_SESSION_SETUP_HE] = { .proc = smb2_sess_setup, }, [SMB2_TREE_CONNECT_HE] = { .proc = smb2_tree_connect,}, [SMB2_TREE_DISCONNECT_HE] = { .proc = smb2_tree_disconnect,}, [SMB2_LOGOFF_HE] = { .proc = smb2_session_logoff,}, [SMB2_CREATE_HE] = { .proc = smb2_open}, [SMB2_QUERY_INFO_HE] = { .proc = smb2_query_info}, [SMB2_QUERY_DIRECTORY_HE] = { .proc = smb2_query_dir}, [SMB2_CLOSE_HE] = { .proc = smb2_close}, [SMB2_ECHO_HE] = { .proc = smb2_echo}, [SMB2_SET_INFO_HE] = { .proc = smb2_set_info}, [SMB2_READ_HE] = { .proc = smb2_read}, [SMB2_WRITE_HE] = { .proc = smb2_write}, [SMB2_FLUSH_HE] = { .proc = smb2_flush}, [SMB2_CANCEL_HE] = { .proc = smb2_cancel}, [SMB2_LOCK_HE] = { .proc = smb2_lock}, [SMB2_IOCTL_HE] = { .proc = smb2_ioctl}, [SMB2_OPLOCK_BREAK_HE] = { .proc = smb2_oplock_break}, [SMB2_CHANGE_NOTIFY_HE] = { .proc = smb2_notify}, }; /** * init_smb2_1_server() - initialize a smb server connection with smb2.1 * command dispatcher * @conn: connection instance */ void init_smb2_1_server(struct ksmbd_conn *conn) { conn->vals = &smb21_server_values; conn->ops = &smb2_0_server_ops; conn->cmds = smb2_0_server_cmds; conn->max_cmds = ARRAY_SIZE(smb2_0_server_cmds); conn->max_credits = SMB2_MAX_CREDITS; conn->signing_algorithm = SIGNING_ALG_HMAC_SHA256; if (server_conf.flags & KSMBD_GLOBAL_FLAG_SMB2_LEASES) conn->vals->capabilities |= SMB2_GLOBAL_CAP_LEASING; } /** * init_smb3_0_server() - initialize a smb server connection with smb3.0 * command dispatcher * @conn: connection instance */ void init_smb3_0_server(struct ksmbd_conn *conn) { conn->vals = &smb30_server_values; conn->ops = &smb3_0_server_ops; conn->cmds = smb2_0_server_cmds; conn->max_cmds = ARRAY_SIZE(smb2_0_server_cmds); conn->max_credits = SMB2_MAX_CREDITS; conn->signing_algorithm = SIGNING_ALG_AES_CMAC; if (server_conf.flags & KSMBD_GLOBAL_FLAG_SMB2_LEASES) conn->vals->capabilities |= SMB2_GLOBAL_CAP_LEASING; if (server_conf.flags & KSMBD_GLOBAL_FLAG_SMB2_ENCRYPTION && conn->cli_cap & SMB2_GLOBAL_CAP_ENCRYPTION) conn->vals->capabilities |= SMB2_GLOBAL_CAP_ENCRYPTION; if (server_conf.flags & KSMBD_GLOBAL_FLAG_SMB3_MULTICHANNEL) conn->vals->capabilities |= SMB2_GLOBAL_CAP_MULTI_CHANNEL; } /** * init_smb3_02_server() - initialize a smb server connection with smb3.02 * command dispatcher * @conn: connection instance */ void init_smb3_02_server(struct ksmbd_conn *conn) { conn->vals = &smb302_server_values; conn->ops = &smb3_0_server_ops; conn->cmds = smb2_0_server_cmds; conn->max_cmds = ARRAY_SIZE(smb2_0_server_cmds); conn->max_credits = SMB2_MAX_CREDITS; conn->signing_algorithm = SIGNING_ALG_AES_CMAC; if (server_conf.flags & KSMBD_GLOBAL_FLAG_SMB2_LEASES) conn->vals->capabilities |= SMB2_GLOBAL_CAP_LEASING; if (server_conf.flags & KSMBD_GLOBAL_FLAG_SMB2_ENCRYPTION && conn->cli_cap & SMB2_GLOBAL_CAP_ENCRYPTION) conn->vals->capabilities |= SMB2_GLOBAL_CAP_ENCRYPTION; if (server_conf.flags & KSMBD_GLOBAL_FLAG_SMB3_MULTICHANNEL) conn->vals->capabilities |= SMB2_GLOBAL_CAP_MULTI_CHANNEL; } /** * init_smb3_11_server() - initialize a smb server connection with smb3.11 * command dispatcher * @conn: connection instance */ int init_smb3_11_server(struct ksmbd_conn *conn) { conn->vals = &smb311_server_values; conn->ops = &smb3_11_server_ops; conn->cmds = smb2_0_server_cmds; conn->max_cmds = ARRAY_SIZE(smb2_0_server_cmds); conn->max_credits = SMB2_MAX_CREDITS; conn->signing_algorithm = SIGNING_ALG_AES_CMAC; if (server_conf.flags & KSMBD_GLOBAL_FLAG_SMB2_LEASES) conn->vals->capabilities |= SMB2_GLOBAL_CAP_LEASING; if (conn->cipher_type) conn->vals->capabilities |= SMB2_GLOBAL_CAP_ENCRYPTION; if (server_conf.flags & KSMBD_GLOBAL_FLAG_SMB3_MULTICHANNEL) conn->vals->capabilities |= SMB2_GLOBAL_CAP_MULTI_CHANNEL; INIT_LIST_HEAD(&conn->preauth_sess_table); return 0; } void init_smb2_max_read_size(unsigned int sz) { sz = clamp_val(sz, SMB3_MIN_IOSIZE, SMB3_MAX_IOSIZE); smb21_server_values.max_read_size = sz; smb30_server_values.max_read_size = sz; smb302_server_values.max_read_size = sz; smb311_server_values.max_read_size = sz; } void init_smb2_max_write_size(unsigned int sz) { sz = clamp_val(sz, SMB3_MIN_IOSIZE, SMB3_MAX_IOSIZE); smb21_server_values.max_write_size = sz; smb30_server_values.max_write_size = sz; smb302_server_values.max_write_size = sz; smb311_server_values.max_write_size = sz; } void init_smb2_max_trans_size(unsigned int sz) { sz = clamp_val(sz, SMB3_MIN_IOSIZE, SMB3_MAX_IOSIZE); smb21_server_values.max_trans_size = sz; smb30_server_values.max_trans_size = sz; smb302_server_values.max_trans_size = sz; smb311_server_values.max_trans_size = sz; }
mpe/powerpc
fs/ksmbd/smb2ops.c
C
gpl-2.0
11,171
/**************************************************************************** Copyright (c) 2008-2010 Ricardo Quesada Copyright (c) 2010-2012 cocos2d-x.org Copyright (c) 2011 Zynga Inc. Copyright (c) 2013-2017 Chukong Technologies Inc. http://www.cocos2d-x.org Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ****************************************************************************/ #include "renderer/CCTextureCache.h" #include <errno.h> #include <stack> #include <cctype> #include <list> #include "renderer/CCTexture2D.h" #include "base/ccMacros.h" #include "base/ccUTF8.h" #include "base/CCDirector.h" #include "base/CCScheduler.h" #include "platform/CCFileUtils.h" #include "base/ccUtils.h" #include "base/CCNinePatchImageParser.h" using namespace std; NS_CC_BEGIN std::string TextureCache::s_etc1AlphaFileSuffix = "@alpha"; // implementation TextureCache void TextureCache::setETC1AlphaFileSuffix(const std::string& suffix) { s_etc1AlphaFileSuffix = suffix; } std::string TextureCache::getETC1AlphaFileSuffix() { return s_etc1AlphaFileSuffix; } TextureCache * TextureCache::getInstance() { return Director::getInstance()->getTextureCache(); } TextureCache::TextureCache() : _loadingThread(nullptr) , _needQuit(false) , _asyncRefCount(0) { } TextureCache::~TextureCache() { CCLOGINFO("deallocing TextureCache: %p", this); for (auto& texture : _textures) texture.second->release(); CC_SAFE_DELETE(_loadingThread); } void TextureCache::destroyInstance() { } TextureCache * TextureCache::sharedTextureCache() { return Director::getInstance()->getTextureCache(); } void TextureCache::purgeSharedTextureCache() { } std::string TextureCache::getDescription() const { return StringUtils::format("<TextureCache | Number of textures = %d>", static_cast<int>(_textures.size())); } struct TextureCache::AsyncStruct { public: AsyncStruct ( const std::string& fn,const std::function<void(Texture2D*)>& f, const std::string& key ) : filename(fn), callback(f),callbackKey( key ), pixelFormat(Texture2D::getDefaultAlphaPixelFormat()), loadSuccess(false) {} std::string filename; std::function<void(Texture2D*)> callback; std::string callbackKey; Image image; Image imageAlpha; Texture2D::PixelFormat pixelFormat; bool loadSuccess; }; /** The addImageAsync logic follow the steps: - find the image has been add or not, if not add an AsyncStruct to _requestQueue (GL thread) - get AsyncStruct from _requestQueue, load res and fill image data to AsyncStruct.image, then add AsyncStruct to _responseQueue (Load thread) - on schedule callback, get AsyncStruct from _responseQueue, convert image to texture, then delete AsyncStruct (GL thread) the Critical Area include these members: - _requestQueue: locked by _requestMutex - _responseQueue: locked by _responseMutex the object's life time: - AsyncStruct: construct and destruct in GL thread - image data: new in Load thread, delete in GL thread(by Image instance) Note: - all AsyncStruct referenced in _asyncStructQueue, for unbind function use. How to deal add image many times? - At first, this situation is abnormal, we only ensure the logic is correct. - If the image has been loaded, the after load image call will return immediately. - If the image request is in queue already, there will be more than one request in queue, - In addImageAsyncCallback, will deduplicate the request to ensure only create one texture. Does process all response in addImageAsyncCallback consume more time? - Convert image to texture faster than load image from disk, so this isn't a problem. Call unbindImageAsync(path) to prevent the call to the callback when the texture is loaded. */ void TextureCache::addImageAsync(const std::string &path, const std::function<void(Texture2D*)>& callback) { addImageAsync( path, callback, path ); } /** The addImageAsync logic follow the steps: - find the image has been add or not, if not add an AsyncStruct to _requestQueue (GL thread) - get AsyncStruct from _requestQueue, load res and fill image data to AsyncStruct.image, then add AsyncStruct to _responseQueue (Load thread) - on schedule callback, get AsyncStruct from _responseQueue, convert image to texture, then delete AsyncStruct (GL thread) the Critical Area include these members: - _requestQueue: locked by _requestMutex - _responseQueue: locked by _responseMutex the object's life time: - AsyncStruct: construct and destruct in GL thread - image data: new in Load thread, delete in GL thread(by Image instance) Note: - all AsyncStruct referenced in _asyncStructQueue, for unbind function use. How to deal add image many times? - At first, this situation is abnormal, we only ensure the logic is correct. - If the image has been loaded, the after load image call will return immediately. - If the image request is in queue already, there will be more than one request in queue, - In addImageAsyncCallback, will deduplicate the request to ensure only create one texture. Does process all response in addImageAsyncCallback consume more time? - Convert image to texture faster than load image from disk, so this isn't a problem. The callbackKey allows to unbind the callback in cases where the loading of path is requested by several sources simultaneously. Each source can then unbind the callback independently as needed whilst a call to unbindImageAsync(path) would be ambiguous. */ void TextureCache::addImageAsync(const std::string &path, const std::function<void(Texture2D*)>& callback, const std::string& callbackKey) { Texture2D *texture = nullptr; std::string fullpath = FileUtils::getInstance()->fullPathForFilename(path); auto it = _textures.find(fullpath); if (it != _textures.end()) texture = it->second; if (texture != nullptr) { if (callback) callback(texture); return; } // check if file exists if (fullpath.empty() || !FileUtils::getInstance()->isFileExist(fullpath)) { if (callback) callback(nullptr); return; } // lazy init if (_loadingThread == nullptr) { // create a new thread to load images _loadingThread = new (std::nothrow) std::thread(&TextureCache::loadImage, this); _needQuit = false; } if (0 == _asyncRefCount) { Director::getInstance()->getScheduler()->schedule(CC_SCHEDULE_SELECTOR(TextureCache::addImageAsyncCallBack), this, 0, false); } ++_asyncRefCount; // generate async struct AsyncStruct *data = new (std::nothrow) AsyncStruct(fullpath, callback, callbackKey); // add async struct into queue _asyncStructQueue.push_back(data); _requestMutex.lock(); _requestQueue.push_back(data); _requestMutex.unlock(); _sleepCondition.notify_one(); } void TextureCache::unbindImageAsync(const std::string& callbackKey) { if (_asyncStructQueue.empty()) { return; } for (auto& asyncStruct : _asyncStructQueue) { if (asyncStruct->callbackKey == callbackKey) { asyncStruct->callback = nullptr; } } } void TextureCache::unbindAllImageAsync() { if (_asyncStructQueue.empty()) { return; } for (auto& asyncStruct : _asyncStructQueue) { asyncStruct->callback = nullptr; } } void TextureCache::loadImage() { AsyncStruct *asyncStruct = nullptr; std::mutex signalMutex; std::unique_lock<std::mutex> signal(signalMutex); while (!_needQuit) { // pop an AsyncStruct from request queue _requestMutex.lock(); if (_requestQueue.empty()) { asyncStruct = nullptr; } else { asyncStruct = _requestQueue.front(); _requestQueue.pop_front(); } _requestMutex.unlock(); if (nullptr == asyncStruct) { _sleepCondition.wait(signal); continue; } // load image asyncStruct->loadSuccess = asyncStruct->image.initWithImageFileThreadSafe(asyncStruct->filename); // ETC1 ALPHA supports. if (asyncStruct->loadSuccess && asyncStruct->image.getFileType() == Image::Format::ETC && !s_etc1AlphaFileSuffix.empty()) { // check whether alpha texture exists & load it auto alphaFile = asyncStruct->filename + s_etc1AlphaFileSuffix; if (FileUtils::getInstance()->isFileExist(alphaFile)) asyncStruct->imageAlpha.initWithImageFileThreadSafe(alphaFile); } // push the asyncStruct to response queue _responseMutex.lock(); _responseQueue.push_back(asyncStruct); _responseMutex.unlock(); } } void TextureCache::addImageAsyncCallBack(float /*dt*/) { Texture2D *texture = nullptr; AsyncStruct *asyncStruct = nullptr; while (true) { // pop an AsyncStruct from response queue _responseMutex.lock(); if (_responseQueue.empty()) { asyncStruct = nullptr; } else { asyncStruct = _responseQueue.front(); _responseQueue.pop_front(); // the asyncStruct's sequence order in _asyncStructQueue must equal to the order in _responseQueue CC_ASSERT(asyncStruct == _asyncStructQueue.front()); _asyncStructQueue.pop_front(); } _responseMutex.unlock(); if (nullptr == asyncStruct) { break; } // check the image has been convert to texture or not auto it = _textures.find(asyncStruct->filename); if (it != _textures.end()) { texture = it->second; } else { // convert image to texture if (asyncStruct->loadSuccess) { Image* image = &(asyncStruct->image); // generate texture in render thread texture = new (std::nothrow) Texture2D(); texture->initWithImage(image, asyncStruct->pixelFormat); //parse 9-patch info this->parseNinePatchImage(image, texture, asyncStruct->filename); #if CC_ENABLE_CACHE_TEXTURE_DATA // cache the texture file name VolatileTextureMgr::addImageTexture(texture, asyncStruct->filename); #endif // cache the texture. retain it, since it is added in the map _textures.emplace(asyncStruct->filename, texture); texture->retain(); texture->autorelease(); // ETC1 ALPHA supports. if (asyncStruct->imageAlpha.getFileType() == Image::Format::ETC) { auto alphaTexture = new(std::nothrow) Texture2D(); if(alphaTexture != nullptr && alphaTexture->initWithImage(&asyncStruct->imageAlpha, asyncStruct->pixelFormat)) { texture->setAlphaTexture(alphaTexture); } CC_SAFE_RELEASE(alphaTexture); } } else { texture = nullptr; CCLOG("cocos2d: failed to call TextureCache::addImageAsync(%s)", asyncStruct->filename.c_str()); } } // call callback function if (asyncStruct->callback) { (asyncStruct->callback)(texture); } // release the asyncStruct delete asyncStruct; --_asyncRefCount; } if (0 == _asyncRefCount) { Director::getInstance()->getScheduler()->unschedule(CC_SCHEDULE_SELECTOR(TextureCache::addImageAsyncCallBack), this); } } Texture2D * TextureCache::addImage(const std::string &path) { Texture2D * texture = nullptr; Image* image = nullptr; // Split up directory and filename // MUTEX: // Needed since addImageAsync calls this method from a different thread std::string fullpath = FileUtils::getInstance()->fullPathForFilename(path); if (fullpath.size() == 0) { return nullptr; } auto it = _textures.find(fullpath); if (it != _textures.end()) texture = it->second; if (!texture) { // all images are handled by UIImage except PVR extension that is handled by our own handler do { image = new (std::nothrow) Image(); CC_BREAK_IF(nullptr == image); bool bRet = image->initWithImageFile(fullpath); CC_BREAK_IF(!bRet); texture = new (std::nothrow) Texture2D(); if (texture && texture->initWithImage(image)) { #if CC_ENABLE_CACHE_TEXTURE_DATA // cache the texture file name VolatileTextureMgr::addImageTexture(texture, fullpath); #endif // texture already retained, no need to re-retain it _textures.emplace(fullpath, texture); //-- ANDROID ETC1 ALPHA SUPPORTS. std::string alphaFullPath = path + s_etc1AlphaFileSuffix; if (image->getFileType() == Image::Format::ETC && !s_etc1AlphaFileSuffix.empty() && FileUtils::getInstance()->isFileExist(alphaFullPath)) { Image alphaImage; if (alphaImage.initWithImageFile(alphaFullPath)) { Texture2D *pAlphaTexture = new(std::nothrow) Texture2D; if(pAlphaTexture != nullptr && pAlphaTexture->initWithImage(&alphaImage)) { texture->setAlphaTexture(pAlphaTexture); } CC_SAFE_RELEASE(pAlphaTexture); } } //parse 9-patch info this->parseNinePatchImage(image, texture, path); } else { CCLOG("cocos2d: Couldn't create texture for file:%s in TextureCache", path.c_str()); CC_SAFE_RELEASE(texture); texture = nullptr; } } while (0); } CC_SAFE_RELEASE(image); return texture; } void TextureCache::parseNinePatchImage(cocos2d::Image *image, cocos2d::Texture2D *texture, const std::string& path) { if (NinePatchImageParser::isNinePatchImage(path)) { Rect frameRect = Rect(0, 0, image->getWidth(), image->getHeight()); NinePatchImageParser parser(image, frameRect, false); texture->addSpriteFrameCapInset(nullptr, parser.parseCapInset()); } } Texture2D* TextureCache::addImage(Image *image, const std::string &key) { CCASSERT(image != nullptr, "TextureCache: image MUST not be nil"); CCASSERT(image->getData() != nullptr, "TextureCache: image MUST not be nil"); Texture2D * texture = nullptr; do { auto it = _textures.find(key); if (it != _textures.end()) { texture = it->second; break; } texture = new (std::nothrow) Texture2D(); if (texture) { if (texture->initWithImage(image)) { _textures.emplace(key, texture); } else { CC_SAFE_RELEASE(texture); texture = nullptr; CCLOG("cocos2d: initWithImage failed!"); } } else { CCLOG("cocos2d: Allocating memory for Texture2D failed!"); } } while (0); #if CC_ENABLE_CACHE_TEXTURE_DATA VolatileTextureMgr::addImage(texture, image); #endif return texture; } bool TextureCache::reloadTexture(const std::string& fileName) { Texture2D * texture = nullptr; Image * image = nullptr; std::string fullpath = FileUtils::getInstance()->fullPathForFilename(fileName); if (fullpath.size() == 0) { return false; } auto it = _textures.find(fullpath); if (it != _textures.end()) { texture = it->second; } bool ret = false; if (!texture) { texture = this->addImage(fullpath); ret = (texture != nullptr); } else { do { image = new (std::nothrow) Image(); CC_BREAK_IF(nullptr == image); bool bRet = image->initWithImageFile(fullpath); CC_BREAK_IF(!bRet); ret = texture->initWithImage(image); } while (0); } CC_SAFE_RELEASE(image); return ret; } // TextureCache - Remove void TextureCache::removeAllTextures() { for (auto& texture : _textures) { texture.second->release(); } _textures.clear(); } void TextureCache::removeUnusedTextures() { for (auto it = _textures.cbegin(); it != _textures.cend(); /* nothing */) { Texture2D *tex = it->second; if (tex->getReferenceCount() == 1) { CCLOG("cocos2d: TextureCache: removing unused texture: %s", it->first.c_str()); tex->release(); it = _textures.erase(it); } else { ++it; } } } void TextureCache::removeTexture(Texture2D* texture) { if (!texture) { return; } for (auto it = _textures.cbegin(); it != _textures.cend(); /* nothing */) { if (it->second == texture) { it->second->release(); it = _textures.erase(it); break; } else ++it; } } void TextureCache::removeTextureForKey(const std::string &textureKeyName) { std::string key = textureKeyName; auto it = _textures.find(key); if (it == _textures.end()) { key = FileUtils::getInstance()->fullPathForFilename(textureKeyName); it = _textures.find(key); } if (it != _textures.end()) { it->second->release(); _textures.erase(it); } } Texture2D* TextureCache::getTextureForKey(const std::string &textureKeyName) const { std::string key = textureKeyName; auto it = _textures.find(key); if (it == _textures.end()) { key = FileUtils::getInstance()->fullPathForFilename(textureKeyName); it = _textures.find(key); } if (it != _textures.end()) return it->second; return nullptr; } void TextureCache::reloadAllTextures() { //will do nothing // #if CC_ENABLE_CACHE_TEXTURE_DATA // VolatileTextureMgr::reloadAllTextures(); // #endif } std::string TextureCache::getTextureFilePath(cocos2d::Texture2D* texture) const { for (auto& item : _textures) { if (item.second == texture) { return item.first; break; } } return ""; } void TextureCache::waitForQuit() { // notify sub thread to quick _needQuit = true; _sleepCondition.notify_one(); if (_loadingThread) _loadingThread->join(); } std::string TextureCache::getCachedTextureInfo() const { std::string buffer; char buftmp[4096]; unsigned int count = 0; unsigned int totalBytes = 0; for (auto& texture : _textures) { memset(buftmp, 0, sizeof(buftmp)); Texture2D* tex = texture.second; unsigned int bpp = tex->getBitsPerPixelForFormat(); // Each texture takes up width * height * bytesPerPixel bytes. auto bytes = tex->getPixelsWide() * tex->getPixelsHigh() * bpp / 8; totalBytes += bytes; count++; snprintf(buftmp, sizeof(buftmp) - 1, "\"%s\" rc=%lu id=%lu %lu x %lu @ %ld bpp => %lu KB\n", texture.first.c_str(), (long)tex->getReferenceCount(), (long)tex->getName(), (long)tex->getPixelsWide(), (long)tex->getPixelsHigh(), (long)bpp, (long)bytes / 1024); buffer += buftmp; } snprintf(buftmp, sizeof(buftmp) - 1, "TextureCache dumpDebugInfo: %ld textures, for %lu KB (%.2f MB)\n", (long)count, (long)totalBytes / 1024, totalBytes / (1024.0f*1024.0f)); buffer += buftmp; return buffer; } void TextureCache::renameTextureWithKey(const std::string& srcName, const std::string& dstName) { std::string key = srcName; auto it = _textures.find(key); if (it == _textures.end()) { key = FileUtils::getInstance()->fullPathForFilename(srcName); it = _textures.find(key); } if (it != _textures.end()) { std::string fullpath = FileUtils::getInstance()->fullPathForFilename(dstName); Texture2D* tex = it->second; Image* image = new (std::nothrow) Image(); if (image) { bool ret = image->initWithImageFile(dstName); if (ret) { tex->initWithImage(image); _textures.emplace(fullpath, tex); _textures.erase(it); } CC_SAFE_DELETE(image); } } } #if CC_ENABLE_CACHE_TEXTURE_DATA std::list<VolatileTexture*> VolatileTextureMgr::_textures; bool VolatileTextureMgr::_isReloading = false; VolatileTexture::VolatileTexture(Texture2D *t) : _texture(t) , _uiImage(nullptr) , _cashedImageType(kInvalid) , _textureData(nullptr) , _pixelFormat(Texture2D::PixelFormat::RGBA8888) , _fileName("") , _hasMipmaps(false) , _text("") { _texParams.minFilter = GL_LINEAR; _texParams.magFilter = GL_LINEAR; _texParams.wrapS = GL_CLAMP_TO_EDGE; _texParams.wrapT = GL_CLAMP_TO_EDGE; } VolatileTexture::~VolatileTexture() { CC_SAFE_RELEASE(_uiImage); } void VolatileTextureMgr::addImageTexture(Texture2D *tt, const std::string& imageFileName) { if (_isReloading) { return; } VolatileTexture *vt = findVolotileTexture(tt); vt->_cashedImageType = VolatileTexture::kImageFile; vt->_fileName = imageFileName; vt->_pixelFormat = tt->getPixelFormat(); } void VolatileTextureMgr::addImage(Texture2D *tt, Image *image) { if (tt == nullptr || image == nullptr) return; VolatileTexture *vt = findVolotileTexture(tt); image->retain(); vt->_uiImage = image; vt->_cashedImageType = VolatileTexture::kImage; } VolatileTexture* VolatileTextureMgr::findVolotileTexture(Texture2D *tt) { VolatileTexture *vt = nullptr; for (const auto& texture : _textures) { VolatileTexture *v = texture; if (v->_texture == tt) { vt = v; break; } } if (!vt) { vt = new (std::nothrow) VolatileTexture(tt); _textures.push_back(vt); } return vt; } void VolatileTextureMgr::addDataTexture(Texture2D *tt, void* data, int dataLen, Texture2D::PixelFormat pixelFormat, const Size& contentSize) { if (_isReloading) { return; } VolatileTexture *vt = findVolotileTexture(tt); vt->_cashedImageType = VolatileTexture::kImageData; vt->_textureData = data; vt->_dataLen = dataLen; vt->_pixelFormat = pixelFormat; vt->_textureSize = contentSize; } void VolatileTextureMgr::addStringTexture(Texture2D *tt, const char* text, const FontDefinition& fontDefinition) { if (_isReloading) { return; } VolatileTexture *vt = findVolotileTexture(tt); vt->_cashedImageType = VolatileTexture::kString; vt->_text = text; vt->_fontDefinition = fontDefinition; } void VolatileTextureMgr::setHasMipmaps(Texture2D *t, bool hasMipmaps) { VolatileTexture *vt = findVolotileTexture(t); vt->_hasMipmaps = hasMipmaps; } void VolatileTextureMgr::setTexParameters(Texture2D *t, const Texture2D::TexParams &texParams) { VolatileTexture *vt = findVolotileTexture(t); if (texParams.minFilter != GL_NONE) vt->_texParams.minFilter = texParams.minFilter; if (texParams.magFilter != GL_NONE) vt->_texParams.magFilter = texParams.magFilter; if (texParams.wrapS != GL_NONE) vt->_texParams.wrapS = texParams.wrapS; if (texParams.wrapT != GL_NONE) vt->_texParams.wrapT = texParams.wrapT; } void VolatileTextureMgr::removeTexture(Texture2D *t) { for (auto& item : _textures) { VolatileTexture *vt = item; if (vt->_texture == t) { _textures.remove(vt); delete vt; break; } } } void VolatileTextureMgr::reloadAllTextures() { _isReloading = true; // we need to release all of the glTextures to avoid collisions of texture id's when reloading the textures onto the GPU for (auto& item : _textures) { item->_texture->releaseGLTexture(); } CCLOG("reload all texture"); for (auto& texture : _textures) { VolatileTexture *vt = texture; switch (vt->_cashedImageType) { case VolatileTexture::kImageFile: { reloadTexture(vt->_texture, vt->_fileName, vt->_pixelFormat); // etc1 support check whether alpha texture exists & load it auto alphaFile = vt->_fileName + TextureCache::getETC1AlphaFileSuffix(); reloadTexture(vt->_texture->getAlphaTexture(), alphaFile, vt->_pixelFormat); } break; case VolatileTexture::kImageData: { vt->_texture->initWithData(vt->_textureData, vt->_dataLen, vt->_pixelFormat, vt->_textureSize.width, vt->_textureSize.height, vt->_textureSize); } break; case VolatileTexture::kString: { vt->_texture->initWithString(vt->_text.c_str(), vt->_fontDefinition); } break; case VolatileTexture::kImage: { vt->_texture->initWithImage(vt->_uiImage); } break; default: break; } if (vt->_hasMipmaps) { vt->_texture->generateMipmap(); } vt->_texture->setTexParameters(vt->_texParams); } _isReloading = false; } void VolatileTextureMgr::reloadTexture(Texture2D* texture, const std::string& filename, Texture2D::PixelFormat pixelFormat) { if (!texture) return; Image* image = new (std::nothrow) Image(); Data data = FileUtils::getInstance()->getDataFromFile(filename); if (image && image->initWithImageData(data.getBytes(), data.getSize())) texture->initWithImage(image, pixelFormat); CC_SAFE_RELEASE(image); } #endif // CC_ENABLE_CACHE_TEXTURE_DATA NS_CC_END
soniyj/basement
src/Cocos2d-x/Colby/cocos2d/cocos/renderer/CCTextureCache.cpp
C++
gpl-2.0
27,340
/* ScummVM - Graphic Adventure Engine * * ScummVM is the legal property of its developers, whose names * are too numerous to list here. Please refer to the COPYRIGHT * file distributed with this source distribution. * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * */ #include "common/error.h" #include "common/events.h" #include "common/system.h" #include "queen/journal.h" #include "queen/bankman.h" #include "queen/display.h" #include "queen/graphics.h" #include "queen/grid.h" #include "queen/logic.h" #include "queen/music.h" #include "queen/queen.h" #include "queen/resource.h" #include "queen/sound.h" namespace Queen { Journal::Journal(QueenEngine *vm) : _vm(vm) { _currentSavePage = 0; _currentSaveSlot = 0; } void Journal::use() { BobSlot *joe = _vm->graphics()->bob(0); _prevJoeX = joe->x; _prevJoeY = joe->y; _panelMode = PM_NORMAL; _system = g_system; _panelTextCount = 0; memset(_panelTextY, 0, sizeof(_panelTextY)); memset(&_textField, 0, sizeof(_textField)); memset(_saveDescriptions, 0, sizeof(_saveDescriptions)); _vm->findGameStateDescriptions(_saveDescriptions); setup(); redraw(); update(); _vm->display()->palFadeIn(ROOM_JOURNAL); _quitMode = QM_LOOP; while (_quitMode == QM_LOOP) { Common::Event event; Common::EventManager *eventMan = _system->getEventManager(); while (eventMan->pollEvent(event)) { switch (event.type) { case Common::EVENT_KEYDOWN: handleKeyDown(event.kbd.ascii, event.kbd.keycode); break; case Common::EVENT_LBUTTONDOWN: handleMouseDown(event.mouse.x, event.mouse.y); break; case Common::EVENT_WHEELUP: handleMouseWheel(-1); break; case Common::EVENT_WHEELDOWN: handleMouseWheel(1); break; case Common::EVENT_RETURN_TO_LAUNCHER: case Common::EVENT_QUIT: return; default: break; } } _system->delayMillis(20); _system->updateScreen(); } _vm->writeOptionSettings(); _vm->display()->clearTexts(0, GAME_SCREEN_HEIGHT - 1); _vm->graphics()->putCameraOnBob(0); if (_quitMode == QM_CONTINUE) { continueGame(); } } void Journal::continueGame() { _vm->display()->fullscreen(false); _vm->display()->forceFullRefresh(); _vm->logic()->joePos(_prevJoeX, _prevJoeY); _vm->logic()->joeCutFacing(_vm->logic()->joeFacing()); _vm->logic()->oldRoom(_vm->logic()->currentRoom()); _vm->logic()->displayRoom(_vm->logic()->currentRoom(), RDM_FADE_JOE, 0, 0, false); } void Journal::setup() { _vm->display()->palFadeOut(_vm->logic()->currentRoom()); _vm->display()->horizontalScroll(0); _vm->display()->fullscreen(true); _vm->graphics()->clearBobs(); _vm->display()->clearTexts(0, GAME_SCREEN_HEIGHT - 1); _vm->bankMan()->eraseFrames(false); _vm->display()->textCurrentColor(_vm->display()->getInkColor(INK_JOURNAL)); _vm->grid()->clear(GS_ROOM); for (int i = 0; i < MAX_ZONES; ++i) { const Zone *zn = &_zones[i]; _vm->grid()->setZone(GS_ROOM, zn->num, zn->x1, zn->y1, zn->x2, zn->y2); } _vm->display()->setupNewRoom("journal", ROOM_JOURNAL); _vm->bankMan()->load("journal.BBK", JOURNAL_BANK); for (int f = 1; f <= 20; ++f) { int frameNum = JOURNAL_FRAMES + f; _vm->bankMan()->unpack(f, frameNum, JOURNAL_BANK); BobFrame *bf = _vm->bankMan()->fetchFrame(frameNum); bf->xhotspot = 0; bf->yhotspot = 0; if (f == FRAME_INFO_BOX) { // adjust info box hot spot to put it always on top bf->yhotspot = 200; } } _vm->bankMan()->close(JOURNAL_BANK); _textField.x = 136; _textField.y = 9; _textField.w = 146; _textField.h = 13; } void Journal::redraw() { drawNormalPanel(); drawConfigPanel(); drawSaveDescriptions(); drawSaveSlot(); } void Journal::update() { _vm->graphics()->sortBobs(); _vm->display()->prepareUpdate(); _vm->graphics()->drawBobs(); if (_textField.enabled) { int16 x = _textField.x + _textField.posCursor; int16 y = _textField.y + _currentSaveSlot * _textField.h + 8; _vm->display()->drawBox(x, y, x + 6, y, _vm->display()->getInkColor(INK_JOURNAL)); } _vm->display()->forceFullRefresh(); _vm->display()->update(); _system->updateScreen(); } void Journal::showBob(int bobNum, int16 x, int16 y, int frameNum) { BobSlot *bob = _vm->graphics()->bob(bobNum); bob->curPos(x, y); bob->frameNum = JOURNAL_FRAMES + frameNum; } void Journal::hideBob(int bobNum) { _vm->graphics()->bob(bobNum)->active = false; } void Journal::drawSaveDescriptions() { for (int i = 0; i < NUM_SAVES_PER_PAGE; ++i) { int n = _currentSavePage * 10 + i; char nb[4]; sprintf(nb, "%d", n + 1); int y = _textField.y + i * _textField.h; _vm->display()->setText(_textField.x, y, _saveDescriptions[n], false); _vm->display()->setText(_textField.x - 27, y + 1, nb, false); } // highlight current page showBob(BOB_SAVE_PAGE, 300, 3 + _currentSavePage * 15, 6 + _currentSavePage); } void Journal::drawSaveSlot() { showBob(BOB_SAVE_DESC, 130, 6 + _currentSaveSlot * 13, 17); } void Journal::enterYesNoPanelMode(int16 prevZoneNum, int titleNum) { _panelMode = PM_YES_NO; _prevZoneNum = prevZoneNum; drawYesNoPanel(titleNum); } void Journal::exitYesNoPanelMode() { _panelMode = PM_NORMAL; if (_prevZoneNum == ZN_MAKE_ENTRY) { closeTextField(); } redraw(); } void Journal::enterInfoPanelMode() { _panelMode = PM_INFO_BOX; _vm->display()->clearTexts(0, GAME_SCREEN_HEIGHT - 1); drawInfoPanel(); } void Journal::exitInfoPanelMode() { _vm->display()->clearTexts(0, GAME_SCREEN_HEIGHT - 1); hideBob(BOB_INFO_BOX); redraw(); _panelMode = PM_NORMAL; } void Journal::handleKeyDown(uint16 ascii, int keycode) { switch (_panelMode) { case PM_INFO_BOX: break; case PM_YES_NO: if (keycode == Common::KEYCODE_ESCAPE) { exitYesNoPanelMode(); } else if (_textField.enabled) { updateTextField(ascii, keycode); } break; case PM_NORMAL: if (keycode == Common::KEYCODE_ESCAPE) { _quitMode = QM_CONTINUE; } break; default: break; } } void Journal::handleMouseWheel(int inc) { if (_panelMode == PM_NORMAL) { int curSave = _currentSavePage * NUM_SAVES_PER_PAGE + _currentSaveSlot + inc; if (curSave >= 0 && curSave < NUM_SAVES_PER_PAGE * 10) { _currentSavePage = curSave / NUM_SAVES_PER_PAGE; _currentSaveSlot = curSave % NUM_SAVES_PER_PAGE; drawSaveDescriptions(); drawSaveSlot(); update(); } } } void Journal::handleMouseDown(int x, int y) { int val; int16 zoneNum = _vm->grid()->findZoneForPos(GS_ROOM, x, y); switch (_panelMode) { case PM_INFO_BOX: exitInfoPanelMode(); break; case PM_YES_NO: if (zoneNum == ZN_YES) { _panelMode = PM_NORMAL; int currentSlot = _currentSavePage * 10 + _currentSaveSlot; switch (_prevZoneNum) { case ZN_REVIEW_ENTRY: if (_saveDescriptions[currentSlot][0]) { _vm->graphics()->clearBobs(); _vm->display()->palFadeOut(ROOM_JOURNAL); _vm->sound()->stopSong(); _vm->loadGameState(currentSlot); _vm->display()->clearTexts(0, GAME_SCREEN_HEIGHT - 1); _quitMode = QM_RESTORE; } else { exitYesNoPanelMode(); } break; case ZN_MAKE_ENTRY: if (_textField.text[0]) { closeTextField(); _vm->saveGameState(currentSlot, _textField.text); _quitMode = QM_CONTINUE; } else { exitYesNoPanelMode(); } break; case ZN_GIVEUP: _quitMode = QM_CONTINUE; _vm->quitGame(); break; default: break; } } else if (zoneNum == ZN_NO) { exitYesNoPanelMode(); } break; case PM_NORMAL: switch (zoneNum) { case ZN_REVIEW_ENTRY: enterYesNoPanelMode(zoneNum, TXT_REVIEW_ENTRY); break; case ZN_MAKE_ENTRY: initTextField(_saveDescriptions[_currentSavePage * 10 + _currentSaveSlot]); enterYesNoPanelMode(zoneNum, TXT_MAKE_ENTRY); break; case ZN_CLOSE: _quitMode = QM_CONTINUE; break; case ZN_GIVEUP: enterYesNoPanelMode(zoneNum, TXT_GIVE_UP); break; case ZN_TEXT_SPEED: val = (x - 136) * QueenEngine::MAX_TEXT_SPEED / (266 - 136); _vm->talkSpeed(val); drawConfigPanel(); break; case ZN_SFX_TOGGLE: _vm->sound()->toggleSfx(); drawConfigPanel(); break; case ZN_MUSIC_VOLUME: val = (x - 136) * Audio::Mixer::kMaxMixerVolume / (266 - 136); _vm->sound()->setVolume(val); drawConfigPanel(); break; case ZN_DESC_1: case ZN_DESC_2: case ZN_DESC_3: case ZN_DESC_4: case ZN_DESC_5: case ZN_DESC_6: case ZN_DESC_7: case ZN_DESC_8: case ZN_DESC_9: case ZN_DESC_10: _currentSaveSlot = zoneNum - ZN_DESC_1; drawSaveSlot(); break; case ZN_PAGE_A: case ZN_PAGE_B: case ZN_PAGE_C: case ZN_PAGE_D: case ZN_PAGE_E: case ZN_PAGE_F: case ZN_PAGE_G: case ZN_PAGE_H: case ZN_PAGE_I: case ZN_PAGE_J: _currentSavePage = zoneNum - ZN_PAGE_A; drawSaveDescriptions(); break; case ZN_INFO_BOX: enterInfoPanelMode(); break; case ZN_MUSIC_TOGGLE: _vm->sound()->toggleMusic(); if (_vm->sound()->musicOn()) { _vm->sound()->playLastSong(); } else { _vm->sound()->stopSong(); } drawConfigPanel(); break; case ZN_VOICE_TOGGLE: _vm->sound()->toggleSpeech(); drawConfigPanel(); break; case ZN_TEXT_TOGGLE: _vm->subtitles(!_vm->subtitles()); drawConfigPanel(); break; default: break; } break; default: break; } update(); } static void removeLeadingAndTrailingSpaces(char *dst, size_t dstSize, const char* src) { assert(dstSize > 0); size_t srcLen = strlen(src); if (0 == srcLen) { dst[0] = '\0'; return; } size_t firstNonSpaceIndex; for (firstNonSpaceIndex = 0; firstNonSpaceIndex < srcLen; ++firstNonSpaceIndex) { if (src[firstNonSpaceIndex] != ' ') break; } if (firstNonSpaceIndex == srcLen) { dst[0] = '\0'; return; } size_t lastNonSpaceIndex = srcLen - 1; while (src[lastNonSpaceIndex] == ' ') --lastNonSpaceIndex; uint newLen = lastNonSpaceIndex - firstNonSpaceIndex + 1; assert(newLen < dstSize); for (size_t i = 0; i < newLen; ++i) { dst[i] = src[firstNonSpaceIndex + i]; } dst[newLen] = '\0'; } void Journal::drawPanelText(int y, const char *text) { debug(7, "Journal::drawPanelText(%d, '%s')", y, text); char s[128]; removeLeadingAndTrailingSpaces(s, 128, text); // necessary for spanish version // draw the substrings char *p = strchr(s, ' '); if (!p) { int x = (128 - _vm->display()->textWidth(s)) / 2; _vm->display()->setText(x, y, s, false); assert(_panelTextCount < MAX_PANEL_TEXTS); _panelTextY[_panelTextCount++] = y; } else { *p++ = '\0'; if (_vm->resource()->getLanguage() == Common::HE_ISR) { drawPanelText(y - 5, p); drawPanelText(y + 5, s); } else { drawPanelText(y - 5, s); drawPanelText(y + 5, p); } } } void Journal::drawCheckBox(bool active, int bobNum, int16 x, int16 y, int frameNum) { if (active) { showBob(bobNum, x, y, frameNum); } else { hideBob(bobNum); } } void Journal::drawSlideBar(int value, int maxValue, int bobNum, int16 y, int frameNum) { showBob(bobNum, 136 + value * (266 - 136) / maxValue, y, frameNum); } void Journal::drawPanel(const int *frames, const int *titles, int n) { for (int i = 0; i < _panelTextCount; ++i) { _vm->display()->clearTexts(_panelTextY[i], _panelTextY[i]); } _panelTextCount = 0; int bobNum = 1; int y = 8; while (n--) { showBob(bobNum++, 32, y, *frames++); drawPanelText(y + 12, _vm->logic()->joeResponse(*titles++)); y += 48; } } void Journal::drawNormalPanel() { static const int frames[] = { FRAME_BLUE_1, FRAME_BLUE_2, FRAME_BLUE_1, FRAME_ORANGE }; static const int titles[] = { TXT_REVIEW_ENTRY, TXT_MAKE_ENTRY, TXT_CLOSE, TXT_GIVE_UP }; drawPanel(frames, titles, 4); } void Journal::drawYesNoPanel(int titleNum) { static const int frames[] = { FRAME_GREY, FRAME_BLUE_1, FRAME_BLUE_2 }; const int titles[] = { titleNum, TXT_YES, TXT_NO }; drawPanel(frames, titles, 3); hideBob(BOB_LEFT_RECT_4); hideBob(BOB_TALK_SPEED); hideBob(BOB_SFX_TOGGLE); hideBob(BOB_MUSIC_VOLUME); hideBob(BOB_SPEECH_TOGGLE); hideBob(BOB_TEXT_TOGGLE); hideBob(BOB_MUSIC_TOGGLE); } void Journal::drawConfigPanel() { _vm->checkOptionSettings(); drawSlideBar(_vm->talkSpeed(), QueenEngine::MAX_TEXT_SPEED, BOB_TALK_SPEED, 164, FRAME_BLUE_PIN); drawSlideBar(_vm->sound()->getVolume(), Audio::Mixer::kMaxMixerVolume, BOB_MUSIC_VOLUME, 177, FRAME_GREEN_PIN); drawCheckBox(_vm->sound()->sfxOn(), BOB_SFX_TOGGLE, 221, 155, FRAME_CHECK_BOX); drawCheckBox(_vm->sound()->speechOn(), BOB_SPEECH_TOGGLE, 158, 155, FRAME_CHECK_BOX); drawCheckBox(_vm->subtitles(), BOB_TEXT_TOGGLE, 125, 167, FRAME_CHECK_BOX); drawCheckBox(_vm->sound()->musicOn(), BOB_MUSIC_TOGGLE, 125, 181, FRAME_CHECK_BOX); } void Journal::drawInfoPanel() { showBob(BOB_INFO_BOX, 72, 221, FRAME_INFO_BOX); const char *ver = _vm->resource()->getJASVersion(); switch (ver[0]) { case 'P': _vm->display()->setTextCentered(132, "PC Hard Drive", false); break; case 'C': _vm->display()->setTextCentered(132, "PC CD-ROM", false); break; case 'a': _vm->display()->setTextCentered(132, "Amiga A500/600", false); break; default: break; } switch (ver[1]) { case 'E': _vm->display()->setTextCentered(144, "English", false); break; case 'F' : _vm->display()->setTextCentered(144, "Fran\x87""ais", false); break; case 'G': _vm->display()->setTextCentered(144, "Deutsch", false); break; case 'H': _vm->display()->setTextCentered(144, "Hebrew", false); break; case 'I': _vm->display()->setTextCentered(144, "Italiano", false); break; case 'S': _vm->display()->setTextCentered(144, "Espa\xA4""ol", false); break; default: break; } char versionId[13]; sprintf(versionId, "Version %c.%c%c", ver[2], ver[3], ver[4]); _vm->display()->setTextCentered(156, versionId, false); } void Journal::initTextField(const char *desc) { _system->setFeatureState(OSystem::kFeatureVirtualKeyboard, true); _textField.enabled = true; _textField.posCursor = _vm->display()->textWidth(desc); _textField.textCharsCount = strlen(desc); memset(_textField.text, 0, sizeof(_textField.text)); strcpy(_textField.text, desc); } void Journal::updateTextField(uint16 ascii, int keycode) { bool dirty = false; switch (keycode) { case Common::KEYCODE_BACKSPACE: if (_textField.textCharsCount > 0) { --_textField.textCharsCount; _textField.text[_textField.textCharsCount] = '\0'; dirty = true; } break; case Common::KEYCODE_RETURN: case Common::KEYCODE_KP_ENTER: if (_textField.text[0]) { closeTextField(); int currentSlot = _currentSavePage * 10 + _currentSaveSlot; _vm->saveGameState(currentSlot, _textField.text); _quitMode = QM_CONTINUE; } break; default: if (Common::isPrint((char)ascii) && _textField.textCharsCount < (sizeof(_textField.text) - 1) && _vm->display()->textWidth(_textField.text) < _textField.w) { _textField.text[_textField.textCharsCount] = (char)ascii; ++_textField.textCharsCount; dirty = true; } break; } if (dirty) { _vm->display()->setText(_textField.x, _textField.y + _currentSaveSlot * _textField.h, _textField.text, false); _textField.posCursor = _vm->display()->textWidth(_textField.text); update(); } } void Journal::closeTextField() { _system->setFeatureState(OSystem::kFeatureVirtualKeyboard, false); _textField.enabled = false; } const Journal::Zone Journal::_zones[] = { { ZN_REVIEW_ENTRY, 32, 8, 96, 40 }, { ZN_MAKE_ENTRY, 32, 56, 96, 88 }, // == ZN_YES { ZN_CLOSE, 32, 104, 96, 136 }, // == ZN_NO { ZN_GIVEUP, 32, 152, 96, 184 }, { ZN_TEXT_SPEED, 136, 169, 265, 176 }, { ZN_SFX_TOGGLE, 197, 155, 231, 164 }, { ZN_MUSIC_VOLUME, 136, 182, 265, 189 }, { ZN_DESC_1, 131, 7, 290, 18 }, { ZN_DESC_2, 131, 20, 290, 31 }, { ZN_DESC_3, 131, 33, 290, 44 }, { ZN_DESC_4, 131, 46, 290, 57 }, { ZN_DESC_5, 131, 59, 290, 70 }, { ZN_DESC_6, 131, 72, 290, 83 }, { ZN_DESC_7, 131, 85, 290, 96 }, { ZN_DESC_8, 131, 98, 290, 109 }, { ZN_DESC_9, 131, 111, 290, 122 }, { ZN_DESC_10, 131, 124, 290, 135 }, { ZN_PAGE_A, 300, 4, 319, 17 }, { ZN_PAGE_B, 300, 19, 319, 32 }, { ZN_PAGE_C, 300, 34, 319, 47 }, { ZN_PAGE_D, 300, 49, 319, 62 }, { ZN_PAGE_E, 300, 64, 319, 77 }, { ZN_PAGE_F, 300, 79, 319, 92 }, { ZN_PAGE_G, 300, 94, 319, 107 }, { ZN_PAGE_H, 300, 109, 319, 122 }, { ZN_PAGE_I, 300, 124, 319, 137 }, { ZN_PAGE_J, 300, 139, 319, 152 }, { ZN_INFO_BOX, 273, 146, 295, 189 }, { ZN_MUSIC_TOGGLE, 109, 181, 135, 190 }, { ZN_VOICE_TOGGLE, 134, 155, 168, 164 }, { ZN_TEXT_TOGGLE, 109, 168, 135, 177 } }; } // End of namespace Queen
vanfanel/scummvm
engines/queen/journal.cpp
C++
gpl-2.0
17,228
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> <html xmlns="http://www.w3.org/1999/xhtml"> <head> <meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/> <meta http-equiv="X-UA-Compatible" content="IE=9"/> <meta name="generator" content="Doxygen 1.8.10"/> <title>Member List</title> <link href="tabs.css" rel="stylesheet" type="text/css"/> <script type="text/javascript" src="jquery.js"></script> <script type="text/javascript" src="dynsections.js"></script> <link href="navtree.css" rel="stylesheet" type="text/css"/> <script type="text/javascript" src="resize.js"></script> <script type="text/javascript" src="navtreedata.js"></script> <script type="text/javascript" src="navtree.js"></script> <script type="text/javascript"> $(document).ready(initResizable); $(window).load(resizeHeight); </script> <link href="search/search.css" rel="stylesheet" type="text/css"/> <script type="text/javascript" src="search/searchdata.js"></script> <script type="text/javascript" src="search/search.js"></script> <script type="text/javascript"> $(document).ready(function() { init_search(); }); </script> <link href="doxygen.css" rel="stylesheet" type="text/css" /> </head> <body> <div id="top"><!-- do not remove this div, it is closed by doxygen! --> <div id="titlearea"> <table cellspacing="0" cellpadding="0"> <tbody> <tr style="height: 56px;"> <td id="projectlogo"><img alt="Logo" src="mixerp.png"/></td> </tr> </tbody> </table> </div> <!-- end header part --> <!-- Generated by Doxygen 1.8.10 --> <script type="text/javascript"> var searchBox = new SearchBox("searchBox", "search",false,'Search'); </script> <div id="navrow1" class="tabs"> <ul class="tablist"> <li><a href="index.html"><span>Main&#160;Page</span></a></li> <li><a href="namespaces.html"><span>Packages</span></a></li> <li class="current"><a href="annotated.html"><span>Classes</span></a></li> <li> <div id="MSearchBox" class="MSearchBoxInactive"> <span class="left"> <img id="MSearchSelect" src="search/mag_sel.png" onmouseover="return searchBox.OnSearchSelectShow()" onmouseout="return searchBox.OnSearchSelectHide()" alt=""/> <input type="text" id="MSearchField" value="Search" accesskey="S" onfocus="searchBox.OnSearchFieldFocus(true)" onblur="searchBox.OnSearchFieldFocus(false)" onkeyup="searchBox.OnSearchFieldChange(event)"/> </span><span class="right"> <a id="MSearchClose" href="javascript:searchBox.CloseResultsWindow()"><img id="MSearchCloseImg" border="0" src="search/close.png" alt=""/></a> </span> </div> </li> </ul> </div> <div id="navrow2" class="tabs2"> <ul class="tablist"> <li><a href="annotated.html"><span>Class&#160;List</span></a></li> <li><a href="classes.html"><span>Class&#160;Index</span></a></li> <li><a href="hierarchy.html"><span>Class&#160;Hierarchy</span></a></li> <li><a href="functions.html"><span>Class&#160;Members</span></a></li> </ul> </div> </div><!-- top --> <div id="side-nav" class="ui-resizable side-nav-resizable"> <div id="nav-tree"> <div id="nav-tree-contents"> <div id="nav-sync" class="sync"></div> </div> </div> <div id="splitbar" style="-moz-user-select:none;" class="ui-resizable-handle"> </div> </div> <script type="text/javascript"> $(document).ready(function(){initNavTree('class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html','');}); </script> <div id="doc-content"> <!-- window showing the filter options --> <div id="MSearchSelectWindow" onmouseover="return searchBox.OnSearchSelectShow()" onmouseout="return searchBox.OnSearchSelectHide()" onkeydown="return searchBox.OnSearchSelectKey(event)"> </div> <!-- iframe showing the search results (closed by default) --> <div id="MSearchResultsWindow"> <iframe src="javascript:void(0)" frameborder="0" name="MSearchResults" id="MSearchResults"> </iframe> </div> <div class="header"> <div class="headertitle"> <div class="title">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult Member List</div> </div> </div><!--header--> <div class="contents"> <p>This is the complete list of members for <a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a>, including all inherited members.</p> <table class="directory"> <tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>Account</b> (defined in <a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a>)</td><td class="entry"><a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a></td><td class="entry"></td></tr> <tr bgcolor="#f0f0f0"><td class="entry"><b>AccountId</b> (defined in <a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a>)</td><td class="entry"><a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a></td><td class="entry"></td></tr> <tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>AccountNumber</b> (defined in <a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a>)</td><td class="entry"><a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a></td><td class="entry"></td></tr> <tr bgcolor="#f0f0f0"><td class="entry"><b>ApprovedBy</b> (defined in <a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a>)</td><td class="entry"><a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a></td><td class="entry"></td></tr> <tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>Balance</b> (defined in <a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a>)</td><td class="entry"><a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a></td><td class="entry"></td></tr> <tr bgcolor="#f0f0f0"><td class="entry"><b>Book</b> (defined in <a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a>)</td><td class="entry"><a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a></td><td class="entry"></td></tr> <tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>Credit</b> (defined in <a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a>)</td><td class="entry"><a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a></td><td class="entry"></td></tr> <tr bgcolor="#f0f0f0"><td class="entry"><b>Debit</b> (defined in <a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a>)</td><td class="entry"><a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a></td><td class="entry"></td></tr> <tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>Delete</b>() (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"></td></tr> <tr bgcolor="#f0f0f0"><td class="entry"><b>Delete</b>(string sql, params object[] args) (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"><span class="mlabel">static</span></td></tr> <tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>Delete</b>(Sql sql) (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"><span class="mlabel">static</span></td></tr> <tr bgcolor="#f0f0f0"><td class="entry"><b>Delete</b>(object primaryKey) (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"><span class="mlabel">static</span></td></tr> <tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>Exists</b>(object primaryKey) (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"><span class="mlabel">static</span></td></tr> <tr bgcolor="#f0f0f0"><td class="entry"><b>Fetch</b>(string sql, params object[] args) (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"><span class="mlabel">static</span></td></tr> <tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>Fetch</b>(Sql sql) (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"><span class="mlabel">static</span></td></tr> <tr bgcolor="#f0f0f0"><td class="entry"><b>Fetch</b>(long page, long itemsPerPage, string sql, params object[] args) (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"><span class="mlabel">static</span></td></tr> <tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>Fetch</b>(long page, long itemsPerPage, Sql sql) (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"><span class="mlabel">static</span></td></tr> <tr bgcolor="#f0f0f0"><td class="entry"><b>First</b>(string sql, params object[] args) (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"><span class="mlabel">static</span></td></tr> <tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>First</b>(Sql sql) (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"><span class="mlabel">static</span></td></tr> <tr bgcolor="#f0f0f0"><td class="entry"><b>FirstOrDefault</b>(string sql, params object[] args) (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"><span class="mlabel">static</span></td></tr> <tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>FirstOrDefault</b>(Sql sql) (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"><span class="mlabel">static</span></td></tr> <tr bgcolor="#f0f0f0"><td class="entry"><b>Id</b> (defined in <a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a>)</td><td class="entry"><a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a></td><td class="entry"></td></tr> <tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>Insert</b>() (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"></td></tr> <tr bgcolor="#f0f0f0"><td class="entry"><b>IsNew</b>() (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"></td></tr> <tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>Office</b> (defined in <a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a>)</td><td class="entry"><a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a></td><td class="entry"></td></tr> <tr bgcolor="#f0f0f0"><td class="entry"><b>Page</b>(long page, long itemsPerPage, string sql, params object[] args) (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"><span class="mlabel">static</span></td></tr> <tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>Page</b>(long page, long itemsPerPage, Sql sql) (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"><span class="mlabel">static</span></td></tr> <tr bgcolor="#f0f0f0"><td class="entry"><b>PostedBy</b> (defined in <a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a>)</td><td class="entry"><a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a></td><td class="entry"></td></tr> <tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>PostedOn</b> (defined in <a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a>)</td><td class="entry"><a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a></td><td class="entry"></td></tr> <tr bgcolor="#f0f0f0"><td class="entry"><b>Query</b>(string sql, params object[] args) (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"><span class="mlabel">static</span></td></tr> <tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>Query</b>(Sql sql) (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"><span class="mlabel">static</span></td></tr> <tr bgcolor="#f0f0f0"><td class="entry"><b>repo</b> (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"><span class="mlabel">static</span></td></tr> <tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>Save</b>() (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"></td></tr> <tr bgcolor="#f0f0f0"><td class="entry"><b>Single</b>(object primaryKey) (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"><span class="mlabel">static</span></td></tr> <tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>Single</b>(string sql, params object[] args) (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"><span class="mlabel">static</span></td></tr> <tr bgcolor="#f0f0f0"><td class="entry"><b>Single</b>(Sql sql) (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"><span class="mlabel">static</span></td></tr> <tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>SingleOrDefault</b>(object primaryKey) (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"><span class="mlabel">static</span></td></tr> <tr bgcolor="#f0f0f0"><td class="entry"><b>SingleOrDefault</b>(string sql, params object[] args) (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"><span class="mlabel">static</span></td></tr> <tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>SingleOrDefault</b>(Sql sql) (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"><span class="mlabel">static</span></td></tr> <tr bgcolor="#f0f0f0"><td class="entry"><b>SkipTake</b>(long skip, long take, string sql, params object[] args) (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"><span class="mlabel">static</span></td></tr> <tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>SkipTake</b>(long skip, long take, Sql sql) (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"><span class="mlabel">static</span></td></tr> <tr bgcolor="#f0f0f0"><td class="entry"><b>StatementReference</b> (defined in <a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a>)</td><td class="entry"><a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a></td><td class="entry"></td></tr> <tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>TranCode</b> (defined in <a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a>)</td><td class="entry"><a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a></td><td class="entry"></td></tr> <tr bgcolor="#f0f0f0"><td class="entry"><b>Update</b>() (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"></td></tr> <tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>Update</b>(IEnumerable&lt; string &gt; columns) (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"></td></tr> <tr bgcolor="#f0f0f0"><td class="entry"><b>Update</b>(string sql, params object[] args) (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"><span class="mlabel">static</span></td></tr> <tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>Update</b>(Sql sql) (defined in <a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a>)</td><td class="entry"><a class="el" href="class_peta_poco_1_1_peta_poco_d_b_1_1_record.html">PetaPoco.PetaPocoDB.Record&lt; DbGetRetainedEarningsStatementResult &gt;</a></td><td class="entry"><span class="mlabel">static</span></td></tr> <tr bgcolor="#f0f0f0"><td class="entry"><b>ValueDate</b> (defined in <a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a>)</td><td class="entry"><a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a></td><td class="entry"></td></tr> <tr bgcolor="#f0f0f0" class="even"><td class="entry"><b>VerificationStatus</b> (defined in <a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a>)</td><td class="entry"><a class="el" href="class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result.html">MixERP.Net.Entities.Transactions.DbGetRetainedEarningsStatementResult</a></td><td class="entry"></td></tr> </table></div><!-- contents --> </div><!-- doc-content --> <!-- start footer part --> <div id="nav-path" class="navpath"><!-- id is needed for treeview function! --> <ul> <li class="footer">Generated by <a href="http://www.doxygen.org/index.html"> <img class="footer" src="doxygen.png" alt="doxygen"/></a> 1.8.10 </li> </ul> </div> </body> </html>
mixerp6/mixerp
docs/api/class_mix_e_r_p_1_1_net_1_1_entities_1_1_transactions_1_1_db_get_retained_earnings_statement_result-members.html
HTML
gpl-2.0
29,377
/* Copyright (C) 2006 - 2010 ScriptDev2 <https://scriptdev2.svn.sourceforge.net/> * This program is free software licensed under GPL version 2 * Please see the included DOCS/LICENSE.TXT for more information */ #ifndef SC_HYJALAI_H #define SC_HYJALAI_H #include "hyjal.h" enum eBaseArea { BASE_ALLY = 0, BASE_HORDE = 1 }; enum eMisc { MAX_SPELL = 3, MAX_WAVES = 9, MAX_WAVE_MOB = 18, ITEM_TEAR_OF_GODDESS = 24494 }; enum eSpell { SPELL_MASS_TELEPORT = 16807, // Spells for Jaina SPELL_BRILLIANCE_AURA = 31260, SPELL_BLIZZARD = 31266, SPELL_PYROBLAST = 31263, SPELL_SUMMON_ELEMENTALS = 31264, // Thrall spells SPELL_CHAIN_LIGHTNING = 31330, SPELL_FERAL_SPIRIT = 31331 }; struct sHyjalWave { uint32 m_auiMobEntry[MAX_WAVE_MOB]; // Stores Creature Entries to be summoned in Waves uint32 m_uiWaveTimer; // The timer before the next wave is summoned bool m_bIsBoss; // Simply used to inform the wave summoner that the next wave contains a boss to halt all waves after that }; // Waves that will be summoned in the Alliance Base static sHyjalWave m_aHyjalWavesAlliance[]= { // Rage Winterchill Wave 1-8 {NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, 0, 0, 0, 0, 0, 0, 0, 0, 120000, false}, {NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_CRYPT, NPC_CRYPT, 0, 0, 0, 0, 0, 0, 120000, false}, {NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, 0, 0, 0, 0, 0, 0, 120000, false}, {NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_NECRO, NPC_NECRO, 0, 0, 0, 0, 0, 0, 120000, false}, {NPC_GHOUL, NPC_GHOUL, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_NECRO, NPC_NECRO, NPC_NECRO, NPC_NECRO, 0, 0, 0, 0, 0, 0, 120000, false}, {NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, 0, 0, 0, 0, 0, 120000, false}, {NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_NECRO, NPC_NECRO, NPC_NECRO, NPC_NECRO, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, 0, 0, 0, 0, 0, 0, 120000, false}, {NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_ABOMI, NPC_ABOMI, NPC_NECRO, NPC_NECRO, 0, 0, 0, 0, 180000, false}, // All 8 Waves are summoned, summon Rage Winterchill, next few waves are for Anetheron {NPC_WINTERCHILL, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, true}, // Anetheron Wave 1-8 {NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, 0, 0, 0, 0, 0, 0, 0, 0, 120000, false}, {NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, 0, 0, 0, 0, 0, 0, 120000, false}, {NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_NECRO, NPC_NECRO, NPC_NECRO, NPC_NECRO, 0, 0, 0, 0, 0, 0, 120000, false}, {NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_BANSH, NPC_BANSH, NPC_BANSH, NPC_BANSH, 0, 0, 0, 0, 0, 0, 120000, false}, {NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_NECRO, NPC_NECRO, NPC_BANSH, NPC_BANSH, NPC_BANSH, NPC_BANSH, 0, 0, 0, 0, 0, 0, 120000, false}, {NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_ABOMI, NPC_ABOMI, NPC_NECRO, NPC_NECRO, NPC_NECRO, NPC_NECRO, 0, 0, 0, 0, 0, 0, 120000, false}, {NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_BANSH, NPC_BANSH, NPC_BANSH, NPC_BANSH, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, 0, 0, 0, 0, 0, 0, 120000, false}, {NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_BANSH, NPC_BANSH, NPC_NECRO, NPC_NECRO, 0, 0, 0, 0, 180000, false}, // All 8 Waves are summoned, summon Anatheron {NPC_ANETHERON, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, true} }; // Waves that are summoned in the Horde base static sHyjalWave m_aHyjalWavesHorde[]= { // Kaz'Rogal Wave 1-8 {NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, NPC_BANSH, NPC_BANSH, NPC_NECRO, NPC_NECRO, 0, 0, 0, 0, 0, 0, 120000, false}, {NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GARGO, NPC_GARGO, NPC_GARGO, NPC_GARGO, NPC_GARGO, NPC_GARGO, NPC_GARGO, NPC_GARGO, NPC_GARGO, NPC_GARGO, 0, 0, 0, 0, 120000, false}, {NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_NECRO, NPC_NECRO, NPC_NECRO, NPC_NECRO, 0, 0, 0, 0, 120000, false}, {NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_GARGO, NPC_GARGO, NPC_GARGO, NPC_GARGO, NPC_GARGO, NPC_GARGO, NPC_NECRO, NPC_NECRO, 0, 0, 0, 0, 120000, false}, {NPC_GHOUL, NPC_GHOUL, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_NECRO, NPC_NECRO, NPC_NECRO, NPC_NECRO, 0, 0, 0, 0, 0, 0, 120000, false}, {NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, NPC_NECRO, NPC_NECRO, NPC_NECRO, NPC_FROST, 0, 0, 120000, false}, {NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, NPC_NECRO, NPC_NECRO, NPC_NECRO, NPC_FROST, 0, 0, 120000, false}, {NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_CRYPT, NPC_CRYPT, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, NPC_NECRO, NPC_NECRO, NPC_BANSH, NPC_BANSH, 180000, false}, // All 8 Waves are summoned, summon Kaz'Rogal, next few waves are for Azgalor {NPC_KAZROGAL, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, true}, // Azgalor Wave 1-8 {NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, NPC_NECRO, NPC_NECRO, NPC_NECRO, NPC_NECRO, NPC_NECRO, NPC_NECRO, 0, 0, 0, 0, 0, 0, 120000, false}, {NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_FROST, NPC_GARGO, NPC_GARGO, NPC_GARGO, NPC_GARGO, NPC_GARGO, NPC_GARGO, NPC_GARGO, NPC_GARGO, 0, 0, 0, 0, 120000, false}, {NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GHOUL, NPC_GIANT, NPC_GIANT, NPC_GIANT, NPC_GIANT, NPC_GIANT, NPC_GIANT, NPC_GIANT, NPC_GIANT, 0, 0, 0, 0, 120000, false}, {NPC_GIANT, NPC_GIANT, NPC_GIANT, NPC_GIANT, NPC_GIANT, NPC_GIANT, NPC_GIANT, NPC_GIANT, NPC_STALK, NPC_STALK, NPC_STALK, NPC_STALK, NPC_STALK, NPC_STALK, 0, 0, 0, 0, 120000, false}, {NPC_STALK, NPC_STALK, NPC_STALK, NPC_STALK, NPC_STALK, NPC_STALK, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, NPC_NECRO, NPC_NECRO, NPC_NECRO, NPC_NECRO, 0, 0, 0, 0, 120000, false}, {NPC_NECRO, NPC_NECRO, NPC_NECRO, NPC_NECRO, NPC_NECRO, NPC_NECRO, NPC_NECRO, NPC_NECRO, NPC_BANSH, NPC_BANSH, NPC_BANSH, NPC_BANSH, NPC_BANSH, NPC_BANSH, 0, 0, 0, 0, 120000, false}, {NPC_GHOUL, NPC_GHOUL, NPC_CRYPT, NPC_CRYPT, NPC_STALK, NPC_STALK, NPC_GIANT, NPC_GIANT, NPC_GIANT, NPC_GIANT, NPC_GIANT, NPC_GIANT, 0, 0, 0, 0, 0, 0, 120000, false}, {NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_CRYPT, NPC_STALK, NPC_STALK, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, NPC_ABOMI, NPC_BANSH, NPC_BANSH, NPC_BANSH, NPC_BANSH, NPC_NECRO, NPC_NECRO, 0, 0, 180000, false}, // All 8 Waves are summoned, summon Azgalor {NPC_AZGALOR, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, true} }; enum TargetType // Used in the spell cast system for the AI { TARGETTYPE_SELF = 0, TARGETTYPE_RANDOM = 1, TARGETTYPE_VICTIM = 2, }; enum YellType { ATTACKED = 0, // Used when attacked and set in combat BEGIN = 1, // Used when the event is begun INCOMING = 2, // Used to warn the raid that another wave phase is coming RALLY = 3, // Used to rally the raid and warn that the next wave has been summoned FAILURE = 4, // Used when raid has failed (unsure where to place) SUCCESS = 5, // Used when the raid has sucessfully defeated a wave phase DEATH = 6, // Used on death }; struct MANGOS_DLL_DECL hyjalAI : public ScriptedAI { hyjalAI(Creature* pCreature) : ScriptedAI(pCreature) { memset(m_aSpells, 0, sizeof(m_aSpells)); m_pInstance = (ScriptedInstance*)pCreature->GetInstanceData(); Reset(); } // Generically used to reset our variables. Do *not* call in EnterEvadeMode as this may make problems if the raid is still in combat void Reset(); // Send creature back to spawn location and evade. void EnterEvadeMode(); // Called when creature reached home location after evade. void JustReachedHome(); // Used to reset cooldowns for our spells and to inform the raid that we're under attack void Aggro(Unit* pWho); // Called to summon waves, check for boss deaths and to cast our spells. void UpdateAI(const uint32 uiDiff); // Called on death, informs the raid that they have failed. void JustDied(Unit* pKiller); // "Teleport" all friendly creatures away from the base. void Retreat(); // Summons a creature for that wave in that base void SpawnCreatureForWave(uint32 uiMobEntry); void JustSummoned(Creature*); // Summons the next wave, calls SummonCreature void SummonNextWave(); // Begins the event by gossip click void StartEvent(); // Searches for the appropriate yell and sound and uses it to inform the raid of various things void DoTalk(YellType pYellType); // Used to filter who to despawn after mass teleport void SpellHitTarget(Unit*, const SpellEntry*); public: ScriptedInstance* m_pInstance; uint64 m_uiBossGUID[2]; uint32 m_uiNextWaveTimer; uint32 m_uiWaveCount; uint32 m_uiWaveMoveTimer; uint32 m_uiCheckTimer; uint32 m_uiEnemyCount; uint32 m_uiRetreatTimer; uint32 m_uiBase; bool m_bIsEventInProgress; bool m_bIsFirstBossDead; bool m_bIsSecondBossDead; bool m_bIsSummoningWaves; bool m_bIsRetreating; bool m_bDebugMode; struct sSpells { uint32 m_uiSpellId; uint32 m_uiCooldown; TargetType m_pType; } m_aSpells[MAX_SPELL]; private: uint32 m_uiSpellTimer[MAX_SPELL]; std::list<uint64> lWaveMobGUIDList; }; #endif
Apple15/AppleCore
src/bindings/scriptdev2/scripts/kalimdor/caverns_of_time/hyjal/hyjalAI.h
C
gpl-2.0
11,159
<?php /** * The template for displaying Archive pages. * * Learn more: http://codex.wordpress.org/Template_Hierarchy * * @package Pictorico */ get_header(); ?> <section id="primary" class="content-area"> <main id="main" class="site-main" role="main"> <?php if ( have_posts() ) : ?> <header class="page-header"> <h1 class="page-title"> <?php if ( is_category() ) : single_cat_title(); elseif ( is_tag() ) : single_tag_title(); elseif ( is_author() ) : printf( __( 'Author: %s', 'pictorico' ), '<span class="vcard">' . get_the_author() . '</span>' ); elseif ( is_day() ) : printf( __( 'Day: %s', 'pictorico' ), '<span>' . get_the_date() . '</span>' ); elseif ( is_month() ) : printf( __( 'Month: %s', 'pictorico' ), '<span>' . get_the_date( _x( 'F Y', 'monthly archives date format', 'pictorico' ) ) . '</span>' ); elseif ( is_year() ) : printf( __( 'Year: %s', 'pictorico' ), '<span>' . get_the_date( _x( 'Y', 'yearly archives date format', 'pictorico' ) ) . '</span>' ); elseif ( is_tax( 'post_format', 'post-format-aside' ) ) : _e( 'Asides', 'pictorico' ); elseif ( is_tax( 'post_format', 'post-format-gallery' ) ) : _e( 'Galleries', 'pictorico'); elseif ( is_tax( 'post_format', 'post-format-image' ) ) : _e( 'Images', 'pictorico'); elseif ( is_tax( 'post_format', 'post-format-video' ) ) : _e( 'Videos', 'pictorico' ); elseif ( is_tax( 'post_format', 'post-format-quote' ) ) : _e( 'Quotes', 'pictorico' ); elseif ( is_tax( 'post_format', 'post-format-link' ) ) : _e( 'Links', 'pictorico' ); elseif ( is_tax( 'post_format', 'post-format-status' ) ) : _e( 'Statuses', 'pictorico' ); elseif ( is_tax( 'post_format', 'post-format-audio' ) ) : _e( 'Audios', 'pictorico' ); elseif ( is_tax( 'post_format', 'post-format-chat' ) ) : _e( 'Chats', 'pictorico' ); else : _e( 'Archives', 'pictorico' ); endif; ?> </h1> <?php // Show an optional term description. $term_description = term_description(); if ( ! empty( $term_description ) ) : printf( '<div class="taxonomy-description">%s</div>', $term_description ); endif; ?> </header><!-- .page-header --> <?php /* Start the Loop */ ?> <?php while ( have_posts() ) : the_post(); ?> <?php /* Include the Post-Format-specific template for the content. * If you want to override this in a child theme, then include a file * called content-___.php (where ___ is the Post Format name) and that will be used instead. */ get_template_part( 'content', 'home' ); ?> <?php endwhile; ?> <?php pictorico_paging_nav(); ?> <?php else : ?> <?php get_template_part( 'content', 'none' ); ?> <?php endif; ?> </main><!-- #main --> </section><!-- #primary --> <?php get_footer(); ?>
developmentDM2/CZND
wp-content/themes/smg/archive.php
PHP
gpl-2.0
2,975
/* Misc. support for CPU family m32rbf. THIS FILE IS MACHINE GENERATED WITH CGEN. Copyright 1996-2016 Free Software Foundation, Inc. This file is part of the GNU simulators. This file is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3, or (at your option) any later version. It is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, see <http://www.gnu.org/licenses/>. */ #define WANT_CPU m32rbf #define WANT_CPU_M32RBF #include "sim-main.h" #include "cgen-ops.h" /* Get the value of h-pc. */ USI m32rbf_h_pc_get (SIM_CPU *current_cpu) { return CPU (h_pc); } /* Set a value for h-pc. */ void m32rbf_h_pc_set (SIM_CPU *current_cpu, USI newval) { CPU (h_pc) = newval; } /* Get the value of h-gr. */ SI m32rbf_h_gr_get (SIM_CPU *current_cpu, UINT regno) { return CPU (h_gr[regno]); } /* Set a value for h-gr. */ void m32rbf_h_gr_set (SIM_CPU *current_cpu, UINT regno, SI newval) { CPU (h_gr[regno]) = newval; } /* Get the value of h-cr. */ USI m32rbf_h_cr_get (SIM_CPU *current_cpu, UINT regno) { return GET_H_CR (regno); } /* Set a value for h-cr. */ void m32rbf_h_cr_set (SIM_CPU *current_cpu, UINT regno, USI newval) { SET_H_CR (regno, newval); } /* Get the value of h-accum. */ DI m32rbf_h_accum_get (SIM_CPU *current_cpu) { return GET_H_ACCUM (); } /* Set a value for h-accum. */ void m32rbf_h_accum_set (SIM_CPU *current_cpu, DI newval) { SET_H_ACCUM (newval); } /* Get the value of h-cond. */ BI m32rbf_h_cond_get (SIM_CPU *current_cpu) { return CPU (h_cond); } /* Set a value for h-cond. */ void m32rbf_h_cond_set (SIM_CPU *current_cpu, BI newval) { CPU (h_cond) = newval; } /* Get the value of h-psw. */ UQI m32rbf_h_psw_get (SIM_CPU *current_cpu) { return GET_H_PSW (); } /* Set a value for h-psw. */ void m32rbf_h_psw_set (SIM_CPU *current_cpu, UQI newval) { SET_H_PSW (newval); } /* Get the value of h-bpsw. */ UQI m32rbf_h_bpsw_get (SIM_CPU *current_cpu) { return CPU (h_bpsw); } /* Set a value for h-bpsw. */ void m32rbf_h_bpsw_set (SIM_CPU *current_cpu, UQI newval) { CPU (h_bpsw) = newval; } /* Get the value of h-bbpsw. */ UQI m32rbf_h_bbpsw_get (SIM_CPU *current_cpu) { return CPU (h_bbpsw); } /* Set a value for h-bbpsw. */ void m32rbf_h_bbpsw_set (SIM_CPU *current_cpu, UQI newval) { CPU (h_bbpsw) = newval; } /* Get the value of h-lock. */ BI m32rbf_h_lock_get (SIM_CPU *current_cpu) { return CPU (h_lock); } /* Set a value for h-lock. */ void m32rbf_h_lock_set (SIM_CPU *current_cpu, BI newval) { CPU (h_lock) = newval; } /* Record trace results for INSN. */ void m32rbf_record_trace_results (SIM_CPU *current_cpu, CGEN_INSN *insn, int *indices, TRACE_RECORD *tr) { }
swigger/gdb-ios
sim/m32r/cpu.c
C
gpl-2.0
3,112
#include <linux/module.h> #include <linux/errno.h> #include <linux/socket.h> #include <linux/skbuff.h> #include <linux/ip.h> #include <linux/udp.h> #include <linux/types.h> #include <linux/kernel.h> #include <net/genetlink.h> #include <net/gue.h> #include <net/fou.h> #include <net/ip.h> #include <net/protocol.h> #include <net/udp.h> #include <net/udp_tunnel.h> #include <net/xfrm.h> #include <uapi/linux/fou.h> #include <uapi/linux/genetlink.h> struct fou { struct socket *sock; u8 protocol; u8 flags; __be16 port; u8 family; u16 type; struct list_head list; struct rcu_head rcu; }; #define FOU_F_REMCSUM_NOPARTIAL BIT(0) struct fou_cfg { u16 type; u8 protocol; u8 flags; struct udp_port_cfg udp_config; }; static unsigned int fou_net_id; struct fou_net { struct list_head fou_list; struct mutex fou_lock; }; static inline struct fou *fou_from_sock(struct sock *sk) { return sk->sk_user_data; } static int fou_recv_pull(struct sk_buff *skb, struct fou *fou, size_t len) { /* Remove 'len' bytes from the packet (UDP header and * FOU header if present). */ if (fou->family == AF_INET) ip_hdr(skb)->tot_len = htons(ntohs(ip_hdr(skb)->tot_len) - len); else ipv6_hdr(skb)->payload_len = htons(ntohs(ipv6_hdr(skb)->payload_len) - len); __skb_pull(skb, len); skb_postpull_rcsum(skb, udp_hdr(skb), len); skb_reset_transport_header(skb); return iptunnel_pull_offloads(skb); } static int fou_udp_recv(struct sock *sk, struct sk_buff *skb) { struct fou *fou = fou_from_sock(sk); if (!fou) return 1; if (fou_recv_pull(skb, fou, sizeof(struct udphdr))) goto drop; return -fou->protocol; drop: kfree_skb(skb); return 0; } static struct guehdr *gue_remcsum(struct sk_buff *skb, struct guehdr *guehdr, void *data, size_t hdrlen, u8 ipproto, bool nopartial) { __be16 *pd = data; size_t start = ntohs(pd[0]); size_t offset = ntohs(pd[1]); size_t plen = sizeof(struct udphdr) + hdrlen + max_t(size_t, offset + sizeof(u16), start); if (skb->remcsum_offload) return guehdr; if (!pskb_may_pull(skb, plen)) return NULL; guehdr = (struct guehdr *)&udp_hdr(skb)[1]; skb_remcsum_process(skb, (void *)guehdr + hdrlen, start, offset, nopartial); return guehdr; } static int gue_control_message(struct sk_buff *skb, struct guehdr *guehdr) { /* No support yet */ kfree_skb(skb); return 0; } static int gue_udp_recv(struct sock *sk, struct sk_buff *skb) { struct fou *fou = fou_from_sock(sk); size_t len, optlen, hdrlen; struct guehdr *guehdr; void *data; u16 doffset = 0; u8 proto_ctype; if (!fou) return 1; len = sizeof(struct udphdr) + sizeof(struct guehdr); if (!pskb_may_pull(skb, len)) goto drop; guehdr = (struct guehdr *)&udp_hdr(skb)[1]; switch (guehdr->version) { case 0: /* Full GUE header present */ break; case 1: { /* Direct encasulation of IPv4 or IPv6 */ int prot; switch (((struct iphdr *)guehdr)->version) { case 4: prot = IPPROTO_IPIP; break; case 6: prot = IPPROTO_IPV6; break; default: goto drop; } if (fou_recv_pull(skb, fou, sizeof(struct udphdr))) goto drop; return -prot; } default: /* Undefined version */ goto drop; } optlen = guehdr->hlen << 2; len += optlen; if (!pskb_may_pull(skb, len)) goto drop; /* guehdr may change after pull */ guehdr = (struct guehdr *)&udp_hdr(skb)[1]; hdrlen = sizeof(struct guehdr) + optlen; if (guehdr->version != 0 || validate_gue_flags(guehdr, optlen)) goto drop; hdrlen = sizeof(struct guehdr) + optlen; if (fou->family == AF_INET) ip_hdr(skb)->tot_len = htons(ntohs(ip_hdr(skb)->tot_len) - len); else ipv6_hdr(skb)->payload_len = htons(ntohs(ipv6_hdr(skb)->payload_len) - len); /* Pull csum through the guehdr now . This can be used if * there is a remote checksum offload. */ skb_postpull_rcsum(skb, udp_hdr(skb), len); data = &guehdr[1]; if (guehdr->flags & GUE_FLAG_PRIV) { __be32 flags = *(__be32 *)(data + doffset); doffset += GUE_LEN_PRIV; if (flags & GUE_PFLAG_REMCSUM) { guehdr = gue_remcsum(skb, guehdr, data + doffset, hdrlen, guehdr->proto_ctype, !!(fou->flags & FOU_F_REMCSUM_NOPARTIAL)); if (!guehdr) goto drop; data = &guehdr[1]; doffset += GUE_PLEN_REMCSUM; } } if (unlikely(guehdr->control)) return gue_control_message(skb, guehdr); proto_ctype = guehdr->proto_ctype; __skb_pull(skb, sizeof(struct udphdr) + hdrlen); skb_reset_transport_header(skb); if (iptunnel_pull_offloads(skb)) goto drop; return -proto_ctype; drop: kfree_skb(skb); return 0; } static struct sk_buff *fou_gro_receive(struct sock *sk, struct list_head *head, struct sk_buff *skb) { u8 proto = fou_from_sock(sk)->protocol; const struct net_offload **offloads; const struct net_offload *ops; struct sk_buff *pp = NULL; /* We can clear the encap_mark for FOU as we are essentially doing * one of two possible things. We are either adding an L4 tunnel * header to the outer L3 tunnel header, or we are are simply * treating the GRE tunnel header as though it is a UDP protocol * specific header such as VXLAN or GENEVE. */ NAPI_GRO_CB(skb)->encap_mark = 0; /* Flag this frame as already having an outer encap header */ NAPI_GRO_CB(skb)->is_fou = 1; rcu_read_lock(); offloads = NAPI_GRO_CB(skb)->is_ipv6 ? inet6_offloads : inet_offloads; ops = rcu_dereference(offloads[proto]); if (!ops || !ops->callbacks.gro_receive) goto out_unlock; pp = call_gro_receive(ops->callbacks.gro_receive, head, skb); out_unlock: rcu_read_unlock(); return pp; } static int fou_gro_complete(struct sock *sk, struct sk_buff *skb, int nhoff) { const struct net_offload *ops; u8 proto = fou_from_sock(sk)->protocol; int err = -ENOSYS; const struct net_offload **offloads; rcu_read_lock(); offloads = NAPI_GRO_CB(skb)->is_ipv6 ? inet6_offloads : inet_offloads; ops = rcu_dereference(offloads[proto]); if (WARN_ON(!ops || !ops->callbacks.gro_complete)) goto out_unlock; err = ops->callbacks.gro_complete(skb, nhoff); skb_set_inner_mac_header(skb, nhoff); out_unlock: rcu_read_unlock(); return err; } static struct guehdr *gue_gro_remcsum(struct sk_buff *skb, unsigned int off, struct guehdr *guehdr, void *data, size_t hdrlen, struct gro_remcsum *grc, bool nopartial) { __be16 *pd = data; size_t start = ntohs(pd[0]); size_t offset = ntohs(pd[1]); if (skb->remcsum_offload) return guehdr; if (!NAPI_GRO_CB(skb)->csum_valid) return NULL; guehdr = skb_gro_remcsum_process(skb, (void *)guehdr, off, hdrlen, start, offset, grc, nopartial); skb->remcsum_offload = 1; return guehdr; } static struct sk_buff *gue_gro_receive(struct sock *sk, struct list_head *head, struct sk_buff *skb) { const struct net_offload **offloads; const struct net_offload *ops; struct sk_buff *pp = NULL; struct sk_buff *p; struct guehdr *guehdr; size_t len, optlen, hdrlen, off; void *data; u16 doffset = 0; int flush = 1; struct fou *fou = fou_from_sock(sk); struct gro_remcsum grc; u8 proto; skb_gro_remcsum_init(&grc); off = skb_gro_offset(skb); len = off + sizeof(*guehdr); guehdr = skb_gro_header_fast(skb, off); if (skb_gro_header_hard(skb, len)) { guehdr = skb_gro_header_slow(skb, len, off); if (unlikely(!guehdr)) goto out; } switch (guehdr->version) { case 0: break; case 1: switch (((struct iphdr *)guehdr)->version) { case 4: proto = IPPROTO_IPIP; break; case 6: proto = IPPROTO_IPV6; break; default: goto out; } goto next_proto; default: goto out; } optlen = guehdr->hlen << 2; len += optlen; if (skb_gro_header_hard(skb, len)) { guehdr = skb_gro_header_slow(skb, len, off); if (unlikely(!guehdr)) goto out; } if (unlikely(guehdr->control) || guehdr->version != 0 || validate_gue_flags(guehdr, optlen)) goto out; hdrlen = sizeof(*guehdr) + optlen; /* Adjust NAPI_GRO_CB(skb)->csum to account for guehdr, * this is needed if there is a remote checkcsum offload. */ skb_gro_postpull_rcsum(skb, guehdr, hdrlen); data = &guehdr[1]; if (guehdr->flags & GUE_FLAG_PRIV) { __be32 flags = *(__be32 *)(data + doffset); doffset += GUE_LEN_PRIV; if (flags & GUE_PFLAG_REMCSUM) { guehdr = gue_gro_remcsum(skb, off, guehdr, data + doffset, hdrlen, &grc, !!(fou->flags & FOU_F_REMCSUM_NOPARTIAL)); if (!guehdr) goto out; data = &guehdr[1]; doffset += GUE_PLEN_REMCSUM; } } skb_gro_pull(skb, hdrlen); list_for_each_entry(p, head, list) { const struct guehdr *guehdr2; if (!NAPI_GRO_CB(p)->same_flow) continue; guehdr2 = (struct guehdr *)(p->data + off); /* Compare base GUE header to be equal (covers * hlen, version, proto_ctype, and flags. */ if (guehdr->word != guehdr2->word) { NAPI_GRO_CB(p)->same_flow = 0; continue; } /* Compare optional fields are the same. */ if (guehdr->hlen && memcmp(&guehdr[1], &guehdr2[1], guehdr->hlen << 2)) { NAPI_GRO_CB(p)->same_flow = 0; continue; } } proto = guehdr->proto_ctype; next_proto: /* We can clear the encap_mark for GUE as we are essentially doing * one of two possible things. We are either adding an L4 tunnel * header to the outer L3 tunnel header, or we are are simply * treating the GRE tunnel header as though it is a UDP protocol * specific header such as VXLAN or GENEVE. */ NAPI_GRO_CB(skb)->encap_mark = 0; /* Flag this frame as already having an outer encap header */ NAPI_GRO_CB(skb)->is_fou = 1; rcu_read_lock(); offloads = NAPI_GRO_CB(skb)->is_ipv6 ? inet6_offloads : inet_offloads; ops = rcu_dereference(offloads[proto]); if (WARN_ON_ONCE(!ops || !ops->callbacks.gro_receive)) goto out_unlock; pp = call_gro_receive(ops->callbacks.gro_receive, head, skb); flush = 0; out_unlock: rcu_read_unlock(); out: skb_gro_flush_final_remcsum(skb, pp, flush, &grc); return pp; } static int gue_gro_complete(struct sock *sk, struct sk_buff *skb, int nhoff) { const struct net_offload **offloads; struct guehdr *guehdr = (struct guehdr *)(skb->data + nhoff); const struct net_offload *ops; unsigned int guehlen = 0; u8 proto; int err = -ENOENT; switch (guehdr->version) { case 0: proto = guehdr->proto_ctype; guehlen = sizeof(*guehdr) + (guehdr->hlen << 2); break; case 1: switch (((struct iphdr *)guehdr)->version) { case 4: proto = IPPROTO_IPIP; break; case 6: proto = IPPROTO_IPV6; break; default: return err; } break; default: return err; } rcu_read_lock(); offloads = NAPI_GRO_CB(skb)->is_ipv6 ? inet6_offloads : inet_offloads; ops = rcu_dereference(offloads[proto]); if (WARN_ON(!ops || !ops->callbacks.gro_complete)) goto out_unlock; err = ops->callbacks.gro_complete(skb, nhoff + guehlen); skb_set_inner_mac_header(skb, nhoff + guehlen); out_unlock: rcu_read_unlock(); return err; } static int fou_add_to_port_list(struct net *net, struct fou *fou) { struct fou_net *fn = net_generic(net, fou_net_id); struct fou *fout; mutex_lock(&fn->fou_lock); list_for_each_entry(fout, &fn->fou_list, list) { if (fou->port == fout->port && fou->family == fout->family) { mutex_unlock(&fn->fou_lock); return -EALREADY; } } list_add(&fou->list, &fn->fou_list); mutex_unlock(&fn->fou_lock); return 0; } static void fou_release(struct fou *fou) { struct socket *sock = fou->sock; list_del(&fou->list); udp_tunnel_sock_release(sock); kfree_rcu(fou, rcu); } static int fou_create(struct net *net, struct fou_cfg *cfg, struct socket **sockp) { struct socket *sock = NULL; struct fou *fou = NULL; struct sock *sk; struct udp_tunnel_sock_cfg tunnel_cfg; int err; /* Open UDP socket */ err = udp_sock_create(net, &cfg->udp_config, &sock); if (err < 0) goto error; /* Allocate FOU port structure */ fou = kzalloc(sizeof(*fou), GFP_KERNEL); if (!fou) { err = -ENOMEM; goto error; } sk = sock->sk; fou->port = cfg->udp_config.local_udp_port; fou->family = cfg->udp_config.family; fou->flags = cfg->flags; fou->type = cfg->type; fou->sock = sock; memset(&tunnel_cfg, 0, sizeof(tunnel_cfg)); tunnel_cfg.encap_type = 1; tunnel_cfg.sk_user_data = fou; tunnel_cfg.encap_destroy = NULL; /* Initial for fou type */ switch (cfg->type) { case FOU_ENCAP_DIRECT: tunnel_cfg.encap_rcv = fou_udp_recv; tunnel_cfg.gro_receive = fou_gro_receive; tunnel_cfg.gro_complete = fou_gro_complete; fou->protocol = cfg->protocol; break; case FOU_ENCAP_GUE: tunnel_cfg.encap_rcv = gue_udp_recv; tunnel_cfg.gro_receive = gue_gro_receive; tunnel_cfg.gro_complete = gue_gro_complete; break; default: err = -EINVAL; goto error; } setup_udp_tunnel_sock(net, sock, &tunnel_cfg); sk->sk_allocation = GFP_ATOMIC; err = fou_add_to_port_list(net, fou); if (err) goto error; if (sockp) *sockp = sock; return 0; error: kfree(fou); if (sock) udp_tunnel_sock_release(sock); return err; } static int fou_destroy(struct net *net, struct fou_cfg *cfg) { struct fou_net *fn = net_generic(net, fou_net_id); __be16 port = cfg->udp_config.local_udp_port; u8 family = cfg->udp_config.family; int err = -EINVAL; struct fou *fou; mutex_lock(&fn->fou_lock); list_for_each_entry(fou, &fn->fou_list, list) { if (fou->port == port && fou->family == family) { fou_release(fou); err = 0; break; } } mutex_unlock(&fn->fou_lock); return err; } static struct genl_family fou_nl_family; static const struct nla_policy fou_nl_policy[FOU_ATTR_MAX + 1] = { [FOU_ATTR_PORT] = { .type = NLA_U16, }, [FOU_ATTR_AF] = { .type = NLA_U8, }, [FOU_ATTR_IPPROTO] = { .type = NLA_U8, }, [FOU_ATTR_TYPE] = { .type = NLA_U8, }, [FOU_ATTR_REMCSUM_NOPARTIAL] = { .type = NLA_FLAG, }, }; static int parse_nl_config(struct genl_info *info, struct fou_cfg *cfg) { memset(cfg, 0, sizeof(*cfg)); cfg->udp_config.family = AF_INET; if (info->attrs[FOU_ATTR_AF]) { u8 family = nla_get_u8(info->attrs[FOU_ATTR_AF]); switch (family) { case AF_INET: break; case AF_INET6: cfg->udp_config.ipv6_v6only = 1; break; default: return -EAFNOSUPPORT; } cfg->udp_config.family = family; } if (info->attrs[FOU_ATTR_PORT]) { __be16 port = nla_get_be16(info->attrs[FOU_ATTR_PORT]); cfg->udp_config.local_udp_port = port; } if (info->attrs[FOU_ATTR_IPPROTO]) cfg->protocol = nla_get_u8(info->attrs[FOU_ATTR_IPPROTO]); if (info->attrs[FOU_ATTR_TYPE]) cfg->type = nla_get_u8(info->attrs[FOU_ATTR_TYPE]); if (info->attrs[FOU_ATTR_REMCSUM_NOPARTIAL]) cfg->flags |= FOU_F_REMCSUM_NOPARTIAL; return 0; } static int fou_nl_cmd_add_port(struct sk_buff *skb, struct genl_info *info) { struct net *net = genl_info_net(info); struct fou_cfg cfg; int err; err = parse_nl_config(info, &cfg); if (err) return err; return fou_create(net, &cfg, NULL); } static int fou_nl_cmd_rm_port(struct sk_buff *skb, struct genl_info *info) { struct net *net = genl_info_net(info); struct fou_cfg cfg; int err; err = parse_nl_config(info, &cfg); if (err) return err; return fou_destroy(net, &cfg); } static int fou_fill_info(struct fou *fou, struct sk_buff *msg) { if (nla_put_u8(msg, FOU_ATTR_AF, fou->sock->sk->sk_family) || nla_put_be16(msg, FOU_ATTR_PORT, fou->port) || nla_put_u8(msg, FOU_ATTR_IPPROTO, fou->protocol) || nla_put_u8(msg, FOU_ATTR_TYPE, fou->type)) return -1; if (fou->flags & FOU_F_REMCSUM_NOPARTIAL) if (nla_put_flag(msg, FOU_ATTR_REMCSUM_NOPARTIAL)) return -1; return 0; } static int fou_dump_info(struct fou *fou, u32 portid, u32 seq, u32 flags, struct sk_buff *skb, u8 cmd) { void *hdr; hdr = genlmsg_put(skb, portid, seq, &fou_nl_family, flags, cmd); if (!hdr) return -ENOMEM; if (fou_fill_info(fou, skb) < 0) goto nla_put_failure; genlmsg_end(skb, hdr); return 0; nla_put_failure: genlmsg_cancel(skb, hdr); return -EMSGSIZE; } static int fou_nl_cmd_get_port(struct sk_buff *skb, struct genl_info *info) { struct net *net = genl_info_net(info); struct fou_net *fn = net_generic(net, fou_net_id); struct sk_buff *msg; struct fou_cfg cfg; struct fou *fout; __be16 port; u8 family; int ret; ret = parse_nl_config(info, &cfg); if (ret) return ret; port = cfg.udp_config.local_udp_port; if (port == 0) return -EINVAL; family = cfg.udp_config.family; if (family != AF_INET && family != AF_INET6) return -EINVAL; msg = nlmsg_new(NLMSG_DEFAULT_SIZE, GFP_KERNEL); if (!msg) return -ENOMEM; ret = -ESRCH; mutex_lock(&fn->fou_lock); list_for_each_entry(fout, &fn->fou_list, list) { if (port == fout->port && family == fout->family) { ret = fou_dump_info(fout, info->snd_portid, info->snd_seq, 0, msg, info->genlhdr->cmd); break; } } mutex_unlock(&fn->fou_lock); if (ret < 0) goto out_free; return genlmsg_reply(msg, info); out_free: nlmsg_free(msg); return ret; } static int fou_nl_dump(struct sk_buff *skb, struct netlink_callback *cb) { struct net *net = sock_net(skb->sk); struct fou_net *fn = net_generic(net, fou_net_id); struct fou *fout; int idx = 0, ret; mutex_lock(&fn->fou_lock); list_for_each_entry(fout, &fn->fou_list, list) { if (idx++ < cb->args[0]) continue; ret = fou_dump_info(fout, NETLINK_CB(cb->skb).portid, cb->nlh->nlmsg_seq, NLM_F_MULTI, skb, FOU_CMD_GET); if (ret) break; } mutex_unlock(&fn->fou_lock); cb->args[0] = idx; return skb->len; } static const struct genl_ops fou_nl_ops[] = { { .cmd = FOU_CMD_ADD, .doit = fou_nl_cmd_add_port, .policy = fou_nl_policy, .flags = GENL_ADMIN_PERM, }, { .cmd = FOU_CMD_DEL, .doit = fou_nl_cmd_rm_port, .policy = fou_nl_policy, .flags = GENL_ADMIN_PERM, }, { .cmd = FOU_CMD_GET, .doit = fou_nl_cmd_get_port, .dumpit = fou_nl_dump, .policy = fou_nl_policy, }, }; static struct genl_family fou_nl_family __ro_after_init = { .hdrsize = 0, .name = FOU_GENL_NAME, .version = FOU_GENL_VERSION, .maxattr = FOU_ATTR_MAX, .netnsok = true, .module = THIS_MODULE, .ops = fou_nl_ops, .n_ops = ARRAY_SIZE(fou_nl_ops), }; size_t fou_encap_hlen(struct ip_tunnel_encap *e) { return sizeof(struct udphdr); } EXPORT_SYMBOL(fou_encap_hlen); size_t gue_encap_hlen(struct ip_tunnel_encap *e) { size_t len; bool need_priv = false; len = sizeof(struct udphdr) + sizeof(struct guehdr); if (e->flags & TUNNEL_ENCAP_FLAG_REMCSUM) { len += GUE_PLEN_REMCSUM; need_priv = true; } len += need_priv ? GUE_LEN_PRIV : 0; return len; } EXPORT_SYMBOL(gue_encap_hlen); int __fou_build_header(struct sk_buff *skb, struct ip_tunnel_encap *e, u8 *protocol, __be16 *sport, int type) { int err; err = iptunnel_handle_offloads(skb, type); if (err) return err; *sport = e->sport ? : udp_flow_src_port(dev_net(skb->dev), skb, 0, 0, false); return 0; } EXPORT_SYMBOL(__fou_build_header); int __gue_build_header(struct sk_buff *skb, struct ip_tunnel_encap *e, u8 *protocol, __be16 *sport, int type) { struct guehdr *guehdr; size_t hdrlen, optlen = 0; void *data; bool need_priv = false; int err; if ((e->flags & TUNNEL_ENCAP_FLAG_REMCSUM) && skb->ip_summed == CHECKSUM_PARTIAL) { optlen += GUE_PLEN_REMCSUM; type |= SKB_GSO_TUNNEL_REMCSUM; need_priv = true; } optlen += need_priv ? GUE_LEN_PRIV : 0; err = iptunnel_handle_offloads(skb, type); if (err) return err; /* Get source port (based on flow hash) before skb_push */ *sport = e->sport ? : udp_flow_src_port(dev_net(skb->dev), skb, 0, 0, false); hdrlen = sizeof(struct guehdr) + optlen; skb_push(skb, hdrlen); guehdr = (struct guehdr *)skb->data; guehdr->control = 0; guehdr->version = 0; guehdr->hlen = optlen >> 2; guehdr->flags = 0; guehdr->proto_ctype = *protocol; data = &guehdr[1]; if (need_priv) { __be32 *flags = data; guehdr->flags |= GUE_FLAG_PRIV; *flags = 0; data += GUE_LEN_PRIV; if (type & SKB_GSO_TUNNEL_REMCSUM) { u16 csum_start = skb_checksum_start_offset(skb); __be16 *pd = data; if (csum_start < hdrlen) return -EINVAL; csum_start -= hdrlen; pd[0] = htons(csum_start); pd[1] = htons(csum_start + skb->csum_offset); if (!skb_is_gso(skb)) { skb->ip_summed = CHECKSUM_NONE; skb->encapsulation = 0; } *flags |= GUE_PFLAG_REMCSUM; data += GUE_PLEN_REMCSUM; } } return 0; } EXPORT_SYMBOL(__gue_build_header); #ifdef CONFIG_NET_FOU_IP_TUNNELS static void fou_build_udp(struct sk_buff *skb, struct ip_tunnel_encap *e, struct flowi4 *fl4, u8 *protocol, __be16 sport) { struct udphdr *uh; skb_push(skb, sizeof(struct udphdr)); skb_reset_transport_header(skb); uh = udp_hdr(skb); uh->dest = e->dport; uh->source = sport; uh->len = htons(skb->len); udp_set_csum(!(e->flags & TUNNEL_ENCAP_FLAG_CSUM), skb, fl4->saddr, fl4->daddr, skb->len); *protocol = IPPROTO_UDP; } static int fou_build_header(struct sk_buff *skb, struct ip_tunnel_encap *e, u8 *protocol, struct flowi4 *fl4) { int type = e->flags & TUNNEL_ENCAP_FLAG_CSUM ? SKB_GSO_UDP_TUNNEL_CSUM : SKB_GSO_UDP_TUNNEL; __be16 sport; int err; err = __fou_build_header(skb, e, protocol, &sport, type); if (err) return err; fou_build_udp(skb, e, fl4, protocol, sport); return 0; } static int gue_build_header(struct sk_buff *skb, struct ip_tunnel_encap *e, u8 *protocol, struct flowi4 *fl4) { int type = e->flags & TUNNEL_ENCAP_FLAG_CSUM ? SKB_GSO_UDP_TUNNEL_CSUM : SKB_GSO_UDP_TUNNEL; __be16 sport; int err; err = __gue_build_header(skb, e, protocol, &sport, type); if (err) return err; fou_build_udp(skb, e, fl4, protocol, sport); return 0; } static const struct ip_tunnel_encap_ops fou_iptun_ops = { .encap_hlen = fou_encap_hlen, .build_header = fou_build_header, }; static const struct ip_tunnel_encap_ops gue_iptun_ops = { .encap_hlen = gue_encap_hlen, .build_header = gue_build_header, }; static int ip_tunnel_encap_add_fou_ops(void) { int ret; ret = ip_tunnel_encap_add_ops(&fou_iptun_ops, TUNNEL_ENCAP_FOU); if (ret < 0) { pr_err("can't add fou ops\n"); return ret; } ret = ip_tunnel_encap_add_ops(&gue_iptun_ops, TUNNEL_ENCAP_GUE); if (ret < 0) { pr_err("can't add gue ops\n"); ip_tunnel_encap_del_ops(&fou_iptun_ops, TUNNEL_ENCAP_FOU); return ret; } return 0; } static void ip_tunnel_encap_del_fou_ops(void) { ip_tunnel_encap_del_ops(&fou_iptun_ops, TUNNEL_ENCAP_FOU); ip_tunnel_encap_del_ops(&gue_iptun_ops, TUNNEL_ENCAP_GUE); } #else static int ip_tunnel_encap_add_fou_ops(void) { return 0; } static void ip_tunnel_encap_del_fou_ops(void) { } #endif static __net_init int fou_init_net(struct net *net) { struct fou_net *fn = net_generic(net, fou_net_id); INIT_LIST_HEAD(&fn->fou_list); mutex_init(&fn->fou_lock); return 0; } static __net_exit void fou_exit_net(struct net *net) { struct fou_net *fn = net_generic(net, fou_net_id); struct fou *fou, *next; /* Close all the FOU sockets */ mutex_lock(&fn->fou_lock); list_for_each_entry_safe(fou, next, &fn->fou_list, list) fou_release(fou); mutex_unlock(&fn->fou_lock); } static struct pernet_operations fou_net_ops = { .init = fou_init_net, .exit = fou_exit_net, .id = &fou_net_id, .size = sizeof(struct fou_net), }; static int __init fou_init(void) { int ret; ret = register_pernet_device(&fou_net_ops); if (ret) goto exit; ret = genl_register_family(&fou_nl_family); if (ret < 0) goto unregister; ret = ip_tunnel_encap_add_fou_ops(); if (ret == 0) return 0; genl_unregister_family(&fou_nl_family); unregister: unregister_pernet_device(&fou_net_ops); exit: return ret; } static void __exit fou_fini(void) { ip_tunnel_encap_del_fou_ops(); genl_unregister_family(&fou_nl_family); unregister_pernet_device(&fou_net_ops); } module_init(fou_init); module_exit(fou_fini); MODULE_AUTHOR("Tom Herbert <[email protected]>"); MODULE_LICENSE("GPL");
Fe-Pi/linux
net/ipv4/fou.c
C
gpl-2.0
23,962
<?php /** * Primo Permission Handler. * * PHP version 5 * * Copyright (C) Villanova University 2013. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License version 2, * as published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA * * @category VuFind * @package Search * @author Oliver Goldschmidt <[email protected]> * @license http://opensource.org/licenses/gpl-2.0.php GNU General Public License * @link https://vufind.org Main Site */ namespace VuFind\Search\Primo; use ZfcRbac\Service\AuthorizationServiceAwareInterface, ZfcRbac\Service\AuthorizationServiceAwareTrait; /** * Primo Permission Handler. * * @category VuFind * @package Search * @author Oliver Goldschmidt <[email protected]> * @license http://opensource.org/licenses/gpl-2.0.php GNU General Public License * @link https://vufind.org Main Site */ class PrimoPermissionHandler { use AuthorizationServiceAwareTrait; /** * Primo-Config for Institutions. * * @var array */ protected $primoConfig; /** * Institution code applicable for the user * * @var string */ protected $instCode = null; /** * Constructor. * * @param Zend\Config\Config|array $primoPermConfig Primo-Config for * Institutions * * @return void */ public function __construct($primoPermConfig) { if ($primoPermConfig instanceof \Zend\Config\Config) { $primoPermConfig = $primoPermConfig->toArray(); } $this->primoConfig = is_array($primoPermConfig) ? $primoPermConfig : []; $this->checkLegacySettings(); $this->checkConfig(); } /** * Set the institution code (no autodetection) * * @param string $code Institutioncode * * @return void */ public function setInstCode($code) { // If the code is valid, we'll set it; otherwise, we'll use "false" to // clear instCode's null status and indicate that the setter has been used. $this->instCode = ($this->instCodeExists($code) === true) ? $code : false; } /** * Determine if a institution code is set in config file * * @param string $code Code to approve against config file * * @return bool */ public function instCodeExists($code) { return (in_array($code, $this->getInstCodes()) === true); } /** * Determine the institution code * Returns false, if no institution can get set * * @return string|boolean */ public function getInstCode() { if ($this->instCode === null) { $this->autodetectCode(); } return $this->instCode; } /** * Check if the user has permission * * @return bool */ public function hasPermission() { $code = $this->getInstCode(); return (false !== $code && $this->checkPermission($code) === true); } /** * Checks the config file section for validity * * @return void */ protected function checkConfig() { if (isset($this->primoConfig['institutionCode']) || isset($this->primoConfig['onCampusRule']) || ($this->getDefaultCode() !== false) ) { return; } // If we reach this point, no institution code is set in config. // Primo will not work without an institution code! throw new \Exception( 'No institutionCode found. Please be sure that at least a ' . 'defaultCode is configured in section [Institutions] ' . 'in Primo.ini.' ); } /** * Legacy settings support * * @return void */ protected function checkLegacySettings() { // if we already have settings, ignore the legacy ones if (isset($this->primoConfig['defaultCode']) || isset($this->primoConfig['onCampusRule']) ) { return; } // Handle legacy options $codes = isset($this->primoConfig['code']) ? $this->primoConfig['code'] : []; $regex = isset($this->primoConfig['regex']) ? $this->primoConfig['regex'] : []; if (!empty($codes) && !empty($regex)) { throw new \Exception( 'Legacy [Institutions] settings detected.' . ' Please run upgrade process or correct settings manually' . ' in Primo.ini and permissions.ini.' ); } } /** * Gets all possible institution codes from config file * * @return array Array with valid Primo institution codes */ protected function getInstCodes() { // Start with default code (if any): $defaultCode = $this->getDefaultCode(); $codes = ($defaultCode !== false) ? [$defaultCode] : []; // Add additional keys from relevant config sections: foreach (['institutionCode', 'onCampusRule'] as $section) { if (isset($this->primoConfig[$section]) && is_array($this->primoConfig[$section]) ) { $codes = array_merge( $codes, array_keys($this->primoConfig[$section]) ); } } return $codes; } /** * Autodetects the permissions by configuration file * * @return void */ protected function autodetectCode() { $authService = $this->getAuthorizationService(); // if no authorization service is available, don't do anything if (!$authService) { $this->instCode = false; return; } // walk through the relevant config sections and check if one is granted foreach (['institutionCode', 'onCampusRule'] as $section) { if (isset($this->primoConfig[$section]) && is_array($this->primoConfig[$section]) ) { foreach ($this->primoConfig[$section] as $code => $permRule) { if ($authService->isGranted($permRule)) { $this->instCode = $code; return; } } } } // if no rule has matched until here, assume the user gets the default code if ($this->getDefaultCode() !== false) { $this->instCode = $this->getDefaultCode(); return; } // Autodetection failed, set instCode to false // Primo will not work without an institution code! if ($this->instCode === null) { $this->instCode = false; } } /** * Determine the default institution code * Returns false, if no default code has been set * * @return string|boolean */ protected function getDefaultCode() { return (isset($this->primoConfig['defaultCode'])) ? $this->primoConfig['defaultCode'] : false; } /** * Determine the default onCampus Rule * * @return string */ protected function getDefaultOnCampusRule() { $defaultCode = $this->getDefaultCode(); return ($defaultCode !== false) ? $this->getOnCampusRule($defaultCode) : null; } /** * Determine a onCampus Rule for a certain code * * @param string $code Code to determine the rule name for * * @return string */ protected function getOnCampusRule($code) { if ($code === null) { return null; } $onCampusRule = isset($this->primoConfig['onCampusRule'][$code]) ? $this->primoConfig['onCampusRule'][$code] : false; if (false !== $onCampusRule) { return $onCampusRule; } // If primoConfig->onCampusRule[] is not set // no rule can get applied. // So return null to indicate that nothing can get matched. return null; } /** * Checks, if a rule is granted * * @param string $code Code to check the rule name for * * @return bool */ protected function checkPermission($code) { $onCampusRule = $this->getOnCampusRule($code); $authService = $this->getAuthorizationService(); // if no authorization service is available, the user can't get permission return ($authService && $authService->isGranted($onCampusRule)); } }
ubtue/KrimDok
module/VuFind/src/VuFind/Search/Primo/PrimoPermissionHandler.php
PHP
gpl-2.0
9,014
/* * Copyright (c) 2003, 2007-8 Matteo Frigo * Copyright (c) 2003, 2007-8 Massachusetts Institute of Technology * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA * */ /* This file was automatically generated --- DO NOT EDIT */ /* Generated on Sun Jul 12 06:43:43 EDT 2009 */ #include "codelet-rdft.h" #ifdef HAVE_FMA /* Generated by: ../../../genfft/gen_r2cf -fma -reorder-insns -schedule-for-pipeline -compact -variables 4 -pipeline-latency 4 -n 64 -name r2cf_64 -include r2cf.h */ /* * This function contains 394 FP additions, 196 FP multiplications, * (or, 198 additions, 0 multiplications, 196 fused multiply/add), * 133 stack variables, 15 constants, and 128 memory accesses */ #include "r2cf.h" static void r2cf_64(R *R0, R *R1, R *Cr, R *Ci, stride rs, stride csr, stride csi, INT v, INT ivs, INT ovs) { DK(KP773010453, +0.773010453362736960810906609758469800971041293); DK(KP995184726, +0.995184726672196886244836953109479921575474869); DK(KP098491403, +0.098491403357164253077197521291327432293052451); DK(KP820678790, +0.820678790828660330972281985331011598767386482); DK(KP956940335, +0.956940335732208864935797886980269969482849206); DK(KP881921264, +0.881921264348355029712756863660388349508442621); DK(KP534511135, +0.534511135950791641089685961295362908582039528); DK(KP303346683, +0.303346683607342391675883946941299872384187453); DK(KP980785280, +0.980785280403230449126182236134239036973933731); DK(KP198912367, +0.198912367379658006911597622644676228597850501); DK(KP831469612, +0.831469612302545237078788377617905756738560812); DK(KP668178637, +0.668178637919298919997757686523080761552472251); DK(KP923879532, +0.923879532511286756128183189396788286822416626); DK(KP414213562, +0.414213562373095048801688724209698078569671875); DK(KP707106781, +0.707106781186547524400844362104849039284835938); INT i; for (i = v; i > 0; i = i - 1, R0 = R0 + ivs, R1 = R1 + ivs, Cr = Cr + ovs, Ci = Ci + ovs, MAKE_VOLATILE_STRIDE(rs), MAKE_VOLATILE_STRIDE(csr), MAKE_VOLATILE_STRIDE(csi)) { E T5n, T5o; { E T11, T2j, T4P, T5P, T3D, T5p, T3d, Tf, T1k, T1H, T5D, T4l, T5A, T4a, T3i; E T2U, T1R, T2e, T5K, T4G, T5H, T4v, T3l, T31, T5s, T42, T5t, T3Z, T2n, T1b; E T3f, TZ, T5v, T3T, T5w, T3Q, T2m, T18, T3e, TK, T3K, T5Q, T4S, T5q, T14; E T2k, T3p, Tu, T4w, T1U, T5E, T4h, T5B, T4o, T3j, T2X, T1I, T1z, T1Z, T4A; E T24, T4x, T1X, T20; { E TN, T3V, TS, TX, T3X, TQ, T40, TT; { E T1g, T46, T1B, T1G, T47, T1j, T4j, T1C; { E T4, T3z, T3, T3B, Td, T5, T8, T9; { E T1, T2, Tb, Tc; T1 = R0[0]; T2 = R0[WS(rs, 16)]; Tb = R0[WS(rs, 28)]; Tc = R0[WS(rs, 12)]; T4 = R0[WS(rs, 8)]; T3z = T1 - T2; T3 = T1 + T2; T3B = Tb - Tc; Td = Tb + Tc; T5 = R0[WS(rs, 24)]; T8 = R0[WS(rs, 4)]; T9 = R0[WS(rs, 20)]; } { E T1E, T1F, T1h, T1i; { E T1e, T4N, T6, T3A, Ta, T1f; T1e = R1[0]; T4N = T4 - T5; T6 = T4 + T5; T3A = T8 - T9; Ta = T8 + T9; T1f = R1[WS(rs, 16)]; { E T7, T3C, T4O, Te; T11 = T3 - T6; T7 = T3 + T6; T3C = T3A + T3B; T4O = T3B - T3A; T2j = Td - Ta; Te = Ta + Td; T4P = FNMS(KP707106781, T4O, T4N); T5P = FMA(KP707106781, T4O, T4N); T3D = FMA(KP707106781, T3C, T3z); T5p = FNMS(KP707106781, T3C, T3z); T3d = T7 - Te; Tf = T7 + Te; T1g = T1e + T1f; T46 = T1e - T1f; } } T1E = R1[WS(rs, 4)]; T1F = R1[WS(rs, 20)]; T1h = R1[WS(rs, 8)]; T1i = R1[WS(rs, 24)]; T1B = R1[WS(rs, 28)]; T1G = T1E + T1F; T47 = T1E - T1F; T1j = T1h + T1i; T4j = T1h - T1i; T1C = R1[WS(rs, 12)]; } } { E T1N, T4r, T28, T2d, T4s, T1Q, T4E, T29; { E T2b, T2c, T1O, T1P; { E T2S, T48, T1D, T1L, T1M, T4k, T49, T2T; T1L = R1[WS(rs, 31)]; T1M = R1[WS(rs, 15)]; T2S = T1g + T1j; T1k = T1g - T1j; T48 = T1B - T1C; T1D = T1B + T1C; T1N = T1L + T1M; T4r = T1L - T1M; T4k = T47 - T48; T49 = T47 + T48; T2T = T1G + T1D; T1H = T1D - T1G; T5D = FNMS(KP707106781, T4k, T4j); T4l = FMA(KP707106781, T4k, T4j); T5A = FNMS(KP707106781, T49, T46); T4a = FMA(KP707106781, T49, T46); T3i = T2S - T2T; T2U = T2S + T2T; T2b = R1[WS(rs, 3)]; T2c = R1[WS(rs, 19)]; } T1O = R1[WS(rs, 7)]; T1P = R1[WS(rs, 23)]; T28 = R1[WS(rs, 27)]; T2d = T2b + T2c; T4s = T2b - T2c; T1Q = T1O + T1P; T4E = T1P - T1O; T29 = R1[WS(rs, 11)]; } { E TV, TW, TO, TP; { E T2Z, T4t, T2a, TL, TM, T4F, T4u, T30; TL = R0[WS(rs, 31)]; TM = R0[WS(rs, 15)]; T2Z = T1N + T1Q; T1R = T1N - T1Q; T4t = T28 - T29; T2a = T28 + T29; TN = TL + TM; T3V = TL - TM; T4F = T4t - T4s; T4u = T4s + T4t; T30 = T2d + T2a; T2e = T2a - T2d; T5K = FNMS(KP707106781, T4F, T4E); T4G = FMA(KP707106781, T4F, T4E); T5H = FNMS(KP707106781, T4u, T4r); T4v = FMA(KP707106781, T4u, T4r); T3l = T2Z - T30; T31 = T2Z + T30; TV = R0[WS(rs, 27)]; TW = R0[WS(rs, 11)]; } TO = R0[WS(rs, 7)]; TP = R0[WS(rs, 23)]; TS = R0[WS(rs, 3)]; TX = TV + TW; T3X = TV - TW; TQ = TO + TP; T40 = TO - TP; TT = R0[WS(rs, 19)]; } } } { E Ti, T3E, Tn, Ts, T3I, Tl, T3F, To; { E Ty, T3M, TD, TI, T3O, TB, T3R, TE; { E TG, TH, Tz, TA; { E T19, TR, T3W, TU, Tw, Tx; Tw = R0[WS(rs, 1)]; Tx = R0[WS(rs, 17)]; T19 = TN - TQ; TR = TN + TQ; T3W = TS - TT; TU = TS + TT; Ty = Tw + Tx; T3M = Tw - Tx; { E T41, T3Y, T1a, TY; T41 = T3W - T3X; T3Y = T3W + T3X; T1a = TX - TU; TY = TU + TX; T5s = FNMS(KP707106781, T41, T40); T42 = FMA(KP707106781, T41, T40); T5t = FNMS(KP707106781, T3Y, T3V); T3Z = FMA(KP707106781, T3Y, T3V); T2n = FMA(KP414213562, T19, T1a); T1b = FNMS(KP414213562, T1a, T19); T3f = TR - TY; TZ = TR + TY; TG = R0[WS(rs, 29)]; TH = R0[WS(rs, 13)]; } } Tz = R0[WS(rs, 9)]; TA = R0[WS(rs, 25)]; TD = R0[WS(rs, 5)]; TI = TG + TH; T3O = TG - TH; TB = Tz + TA; T3R = Tz - TA; TE = R0[WS(rs, 21)]; } { E Tq, Tr, Tj, Tk; { E T16, TC, T3N, TF, Tg, Th; Tg = R0[WS(rs, 2)]; Th = R0[WS(rs, 18)]; T16 = Ty - TB; TC = Ty + TB; T3N = TD - TE; TF = TD + TE; Ti = Tg + Th; T3E = Tg - Th; { E T3S, T3P, T17, TJ; T3S = T3N - T3O; T3P = T3N + T3O; T17 = TI - TF; TJ = TF + TI; T5v = FNMS(KP707106781, T3S, T3R); T3T = FMA(KP707106781, T3S, T3R); T5w = FNMS(KP707106781, T3P, T3M); T3Q = FMA(KP707106781, T3P, T3M); T2m = FNMS(KP414213562, T16, T17); T18 = FMA(KP414213562, T17, T16); T3e = TC - TJ; TK = TC + TJ; Tq = R0[WS(rs, 6)]; Tr = R0[WS(rs, 22)]; } } Tj = R0[WS(rs, 10)]; Tk = R0[WS(rs, 26)]; Tn = R0[WS(rs, 30)]; Ts = Tq + Tr; T3I = Tq - Tr; Tl = Tj + Tk; T3F = Tj - Tk; To = R0[WS(rs, 14)]; } } { E T1n, T4b, T1s, T4f, T1x, T4c, T1q, T1t; { E T1v, T1w, T1o, T1p; { E T1l, T4Q, T3G, Tm, T12, Tp, T3H, T1m; T1l = R1[WS(rs, 2)]; T4Q = FMA(KP414213562, T3E, T3F); T3G = FNMS(KP414213562, T3F, T3E); Tm = Ti + Tl; T12 = Ti - Tl; Tp = Tn + To; T3H = Tn - To; T1m = R1[WS(rs, 18)]; T1v = R1[WS(rs, 6)]; { E T4R, T3J, Tt, T13; T4R = FNMS(KP414213562, T3H, T3I); T3J = FMA(KP414213562, T3I, T3H); Tt = Tp + Ts; T13 = Tp - Ts; T1n = T1l + T1m; T4b = T1l - T1m; T3K = T3G + T3J; T5Q = T3J - T3G; T4S = T4Q + T4R; T5q = T4Q - T4R; T14 = T12 + T13; T2k = T13 - T12; T3p = Tt - Tm; Tu = Tm + Tt; T1w = R1[WS(rs, 22)]; } } T1o = R1[WS(rs, 10)]; T1p = R1[WS(rs, 26)]; T1s = R1[WS(rs, 30)]; T4f = T1v - T1w; T1x = T1v + T1w; T4c = T1o - T1p; T1q = T1o + T1p; T1t = R1[WS(rs, 14)]; } { E T22, T23, T1V, T1W; { E T1S, T4d, T4m, T2V, T1r, T4e, T1u, T1T; T1S = R1[WS(rs, 1)]; T4d = FNMS(KP414213562, T4c, T4b); T4m = FMA(KP414213562, T4b, T4c); T2V = T1n + T1q; T1r = T1n - T1q; T4e = T1s - T1t; T1u = T1s + T1t; T1T = R1[WS(rs, 17)]; T22 = R1[WS(rs, 5)]; { E T4g, T4n, T2W, T1y; T4g = FMA(KP414213562, T4f, T4e); T4n = FNMS(KP414213562, T4e, T4f); T2W = T1u + T1x; T1y = T1u - T1x; T4w = T1S - T1T; T1U = T1S + T1T; T5E = T4g - T4d; T4h = T4d + T4g; T5B = T4m - T4n; T4o = T4m + T4n; T3j = T2W - T2V; T2X = T2V + T2W; T1I = T1y - T1r; T1z = T1r + T1y; T23 = R1[WS(rs, 21)]; } } T1V = R1[WS(rs, 9)]; T1W = R1[WS(rs, 25)]; T1Z = R1[WS(rs, 29)]; T4A = T23 - T22; T24 = T22 + T23; T4x = T1W - T1V; T1X = T1V + T1W; T20 = R1[WS(rs, 13)]; } } } } { E T4C, T5L, T4J, T5I, T26, T2f, T3q, T3h, T3w, T3s, T3o, T3r, T3t; { E T2R, T37, T2Y, T3a, T39, T3m, T3b, T35, Tv, T10, T34, T3c, T3x, T3y; { E T4y, T4H, T32, T1Y, T4z, T21; T2R = Tf - Tu; Tv = Tf + Tu; T4y = FMA(KP414213562, T4x, T4w); T4H = FNMS(KP414213562, T4w, T4x); T32 = T1U + T1X; T1Y = T1U - T1X; T4z = T1Z - T20; T21 = T1Z + T20; T10 = TK + TZ; T37 = TZ - TK; T2Y = T2U - T2X; T3a = T2U + T2X; { E T4B, T4I, T33, T25; T4B = FNMS(KP414213562, T4A, T4z); T4I = FMA(KP414213562, T4z, T4A); T33 = T21 + T24; T25 = T21 - T24; T39 = Tv + T10; T4C = T4y + T4B; T5L = T4B - T4y; T4J = T4H + T4I; T5I = T4I - T4H; T34 = T32 + T33; T3m = T33 - T32; T26 = T1Y + T25; T2f = T25 - T1Y; } } Cr[WS(csr, 16)] = Tv - T10; T3b = T31 + T34; T35 = T31 - T34; Ci[WS(csi, 16)] = T3b - T3a; T3c = T3a + T3b; { E T3k, T3u, T3v, T3n, T36, T38, T3g; T3g = T3e + T3f; T3q = T3f - T3e; Cr[0] = T39 + T3c; Cr[WS(csr, 32)] = T39 - T3c; T36 = T2Y + T35; T38 = T35 - T2Y; T3x = FNMS(KP707106781, T3g, T3d); T3h = FMA(KP707106781, T3g, T3d); Ci[WS(csi, 8)] = FMA(KP707106781, T38, T37); Ci[WS(csi, 24)] = FMS(KP707106781, T38, T37); Cr[WS(csr, 8)] = FMA(KP707106781, T36, T2R); Cr[WS(csr, 24)] = FNMS(KP707106781, T36, T2R); T3k = FMA(KP414213562, T3j, T3i); T3u = FNMS(KP414213562, T3i, T3j); T3v = FMA(KP414213562, T3l, T3m); T3n = FNMS(KP414213562, T3m, T3l); T3y = T3v - T3u; T3w = T3u + T3v; T3s = T3n - T3k; T3o = T3k + T3n; } Cr[WS(csr, 12)] = FMA(KP923879532, T3y, T3x); Cr[WS(csr, 20)] = FNMS(KP923879532, T3y, T3x); } Cr[WS(csr, 4)] = FMA(KP923879532, T3o, T3h); Cr[WS(csr, 28)] = FNMS(KP923879532, T3o, T3h); T3r = FNMS(KP707106781, T3q, T3p); T3t = FMA(KP707106781, T3q, T3p); { E T27, T2g, T2v, T1d, T2r, T2p, T2s, T1K, T6l, T6m; { E T15, T2o, T2P, T2z, T2l, T1c, T1A, T1J, T2D, T2L, T2J, T2M, T2C, T2E, T2N; E T2F; { E T2H, T2I, T2x, T2y, T2A, T2B; T15 = FMA(KP707106781, T14, T11); T2x = FNMS(KP707106781, T14, T11); T2y = T2n - T2m; T2o = T2m + T2n; Ci[WS(csi, 4)] = FMA(KP923879532, T3w, T3t); Ci[WS(csi, 28)] = FMS(KP923879532, T3w, T3t); Ci[WS(csi, 20)] = FMA(KP923879532, T3s, T3r); Ci[WS(csi, 12)] = FMS(KP923879532, T3s, T3r); T2P = FNMS(KP923879532, T2y, T2x); T2z = FMA(KP923879532, T2y, T2x); T2l = FMA(KP707106781, T2k, T2j); T2H = FNMS(KP707106781, T2k, T2j); T2I = T1b - T18; T1c = T18 + T1b; T1A = FMA(KP707106781, T1z, T1k); T2A = FNMS(KP707106781, T1z, T1k); T2B = FNMS(KP707106781, T1I, T1H); T1J = FMA(KP707106781, T1I, T1H); T27 = FMA(KP707106781, T26, T1R); T2D = FNMS(KP707106781, T26, T1R); T2L = FNMS(KP923879532, T2I, T2H); T2J = FMA(KP923879532, T2I, T2H); T2M = FMA(KP668178637, T2A, T2B); T2C = FNMS(KP668178637, T2B, T2A); T2E = FNMS(KP707106781, T2f, T2e); T2g = FMA(KP707106781, T2f, T2e); } T2N = FNMS(KP668178637, T2D, T2E); T2F = FMA(KP668178637, T2E, T2D); T2v = FNMS(KP923879532, T1c, T15); T1d = FMA(KP923879532, T1c, T15); { E T2Q, T2O, T2K, T2G; T2Q = T2M - T2N; T2O = T2M + T2N; T2K = T2F - T2C; T2G = T2C + T2F; Cr[WS(csr, 10)] = FMA(KP831469612, T2Q, T2P); Cr[WS(csr, 22)] = FNMS(KP831469612, T2Q, T2P); Ci[WS(csi, 26)] = FNMS(KP831469612, T2O, T2L); Ci[WS(csi, 6)] = -(FMA(KP831469612, T2O, T2L)); Ci[WS(csi, 22)] = FMS(KP831469612, T2K, T2J); Ci[WS(csi, 10)] = FMA(KP831469612, T2K, T2J); Cr[WS(csr, 6)] = FMA(KP831469612, T2G, T2z); Cr[WS(csr, 26)] = FNMS(KP831469612, T2G, T2z); } T2r = FMA(KP923879532, T2o, T2l); T2p = FNMS(KP923879532, T2o, T2l); T2s = FNMS(KP198912367, T1A, T1J); T1K = FMA(KP198912367, T1J, T1A); } { E T63, T5r, T5R, T6d, T5J, T5M, T6e, T5y, T6j, T6b, T66, T67, T64, T5U, T5Z; E T5G; { E T5S, T5u, T5x, T5T, T2t, T2h; T63 = FMA(KP923879532, T5q, T5p); T5r = FNMS(KP923879532, T5q, T5p); T5R = FNMS(KP923879532, T5Q, T5P); T6d = FMA(KP923879532, T5Q, T5P); T2t = FMA(KP198912367, T27, T2g); T2h = FNMS(KP198912367, T2g, T27); T5S = FNMS(KP668178637, T5s, T5t); T5u = FMA(KP668178637, T5t, T5s); { E T2w, T2u, T2q, T2i; T2w = T2t - T2s; T2u = T2s + T2t; T2q = T2h - T1K; T2i = T1K + T2h; Cr[WS(csr, 14)] = FMA(KP980785280, T2w, T2v); Cr[WS(csr, 18)] = FNMS(KP980785280, T2w, T2v); Ci[WS(csi, 30)] = FMS(KP980785280, T2u, T2r); Ci[WS(csi, 2)] = FMA(KP980785280, T2u, T2r); Ci[WS(csi, 18)] = FMA(KP980785280, T2q, T2p); Ci[WS(csi, 14)] = FMS(KP980785280, T2q, T2p); Cr[WS(csr, 2)] = FMA(KP980785280, T2i, T1d); Cr[WS(csr, 30)] = FNMS(KP980785280, T2i, T1d); T5x = FNMS(KP668178637, T5w, T5v); T5T = FMA(KP668178637, T5v, T5w); } { E T69, T6a, T5C, T5F; T5J = FNMS(KP923879532, T5I, T5H); T69 = FMA(KP923879532, T5I, T5H); T6a = FNMS(KP923879532, T5L, T5K); T5M = FMA(KP923879532, T5L, T5K); T6e = T5x + T5u; T5y = T5u - T5x; T6j = FNMS(KP303346683, T69, T6a); T6b = FMA(KP303346683, T6a, T69); T66 = FMA(KP923879532, T5B, T5A); T5C = FNMS(KP923879532, T5B, T5A); T5F = FNMS(KP923879532, T5E, T5D); T67 = FMA(KP923879532, T5E, T5D); T64 = T5T + T5S; T5U = T5S - T5T; T5Z = FMA(KP534511135, T5C, T5F); T5G = FNMS(KP534511135, T5F, T5C); } } { E T61, T6i, T68, T62; { E T5z, T5Y, T5N, T5X, T5V, T60, T5W, T5O; T61 = FNMS(KP831469612, T5y, T5r); T5z = FMA(KP831469612, T5y, T5r); T6i = FNMS(KP303346683, T66, T67); T68 = FMA(KP303346683, T67, T66); T5Y = FMA(KP534511135, T5J, T5M); T5N = FNMS(KP534511135, T5M, T5J); T5X = FNMS(KP831469612, T5U, T5R); T5V = FMA(KP831469612, T5U, T5R); T60 = T5Y - T5Z; T62 = T5Z + T5Y; T5W = T5N - T5G; T5O = T5G + T5N; Ci[WS(csi, 27)] = FMA(KP881921264, T60, T5X); Ci[WS(csi, 5)] = FMS(KP881921264, T60, T5X); Cr[WS(csr, 5)] = FMA(KP881921264, T5O, T5z); Cr[WS(csr, 27)] = FNMS(KP881921264, T5O, T5z); Ci[WS(csi, 21)] = FMS(KP881921264, T5W, T5V); Ci[WS(csi, 11)] = FMA(KP881921264, T5W, T5V); } { E T6g, T6f, T6h, T6k, T65, T6c; T6l = FNMS(KP831469612, T64, T63); T65 = FMA(KP831469612, T64, T63); T6c = T68 + T6b; T6g = T6b - T68; T6f = FNMS(KP831469612, T6e, T6d); T6h = FMA(KP831469612, T6e, T6d); Cr[WS(csr, 11)] = FMA(KP881921264, T62, T61); Cr[WS(csr, 21)] = FNMS(KP881921264, T62, T61); Cr[WS(csr, 3)] = FMA(KP956940335, T6c, T65); Cr[WS(csr, 29)] = FNMS(KP956940335, T6c, T65); T6k = T6i - T6j; T6m = T6i + T6j; Ci[WS(csi, 29)] = FMS(KP956940335, T6k, T6h); Ci[WS(csi, 3)] = FMA(KP956940335, T6k, T6h); Ci[WS(csi, 19)] = FMA(KP956940335, T6g, T6f); Ci[WS(csi, 13)] = FMS(KP956940335, T6g, T6f); } } } { E T55, T3L, T4T, T5f, T4D, T4K, T5g, T44, T5l, T5d, T58, T59, T56, T4W, T51; E T4q; { E T4U, T3U, T43, T4V; T55 = FNMS(KP923879532, T3K, T3D); T3L = FMA(KP923879532, T3K, T3D); T4T = FMA(KP923879532, T4S, T4P); T5f = FNMS(KP923879532, T4S, T4P); Cr[WS(csr, 13)] = FNMS(KP956940335, T6m, T6l); Cr[WS(csr, 19)] = FMA(KP956940335, T6m, T6l); T4U = FMA(KP198912367, T3Q, T3T); T3U = FNMS(KP198912367, T3T, T3Q); T43 = FMA(KP198912367, T42, T3Z); T4V = FNMS(KP198912367, T3Z, T42); { E T5b, T5c, T4i, T4p; T4D = FMA(KP923879532, T4C, T4v); T5b = FNMS(KP923879532, T4C, T4v); T5c = FNMS(KP923879532, T4J, T4G); T4K = FMA(KP923879532, T4J, T4G); T5g = T43 - T3U; T44 = T3U + T43; T5l = FNMS(KP820678790, T5b, T5c); T5d = FMA(KP820678790, T5c, T5b); T58 = FNMS(KP923879532, T4h, T4a); T4i = FMA(KP923879532, T4h, T4a); T4p = FMA(KP923879532, T4o, T4l); T59 = FNMS(KP923879532, T4o, T4l); T56 = T4U - T4V; T4W = T4U + T4V; T51 = FMA(KP098491403, T4i, T4p); T4q = FNMS(KP098491403, T4p, T4i); } } { E T53, T5k, T5a, T54; { E T45, T50, T4L, T4Z, T4X, T52, T4Y, T4M; T53 = FNMS(KP980785280, T44, T3L); T45 = FMA(KP980785280, T44, T3L); T5k = FNMS(KP820678790, T58, T59); T5a = FMA(KP820678790, T59, T58); T50 = FMA(KP098491403, T4D, T4K); T4L = FNMS(KP098491403, T4K, T4D); T4Z = FMA(KP980785280, T4W, T4T); T4X = FNMS(KP980785280, T4W, T4T); T52 = T50 - T51; T54 = T51 + T50; T4Y = T4L - T4q; T4M = T4q + T4L; Ci[WS(csi, 31)] = FMA(KP995184726, T52, T4Z); Ci[WS(csi, 1)] = FMS(KP995184726, T52, T4Z); Cr[WS(csr, 1)] = FMA(KP995184726, T4M, T45); Cr[WS(csr, 31)] = FNMS(KP995184726, T4M, T45); Ci[WS(csi, 17)] = FMS(KP995184726, T4Y, T4X); Ci[WS(csi, 15)] = FMA(KP995184726, T4Y, T4X); } { E T5i, T5h, T5j, T5m, T57, T5e; T5n = FNMS(KP980785280, T56, T55); T57 = FMA(KP980785280, T56, T55); T5e = T5a + T5d; T5i = T5d - T5a; T5h = FNMS(KP980785280, T5g, T5f); T5j = FMA(KP980785280, T5g, T5f); Cr[WS(csr, 15)] = FMA(KP995184726, T54, T53); Cr[WS(csr, 17)] = FNMS(KP995184726, T54, T53); Cr[WS(csr, 7)] = FMA(KP773010453, T5e, T57); Cr[WS(csr, 25)] = FNMS(KP773010453, T5e, T57); T5m = T5k - T5l; T5o = T5k + T5l; Ci[WS(csi, 25)] = FMS(KP773010453, T5m, T5j); Ci[WS(csi, 7)] = FMA(KP773010453, T5m, T5j); Ci[WS(csi, 23)] = FMA(KP773010453, T5i, T5h); Ci[WS(csi, 9)] = FMS(KP773010453, T5i, T5h); } } } } } } Cr[WS(csr, 9)] = FNMS(KP773010453, T5o, T5n); Cr[WS(csr, 23)] = FMA(KP773010453, T5o, T5n); } } static const kr2c_desc desc = { 64, "r2cf_64", {198, 0, 196, 0}, &GENUS }; void X(codelet_r2cf_64) (planner *p) { X(kr2c_register) (p, r2cf_64, &desc); } #else /* HAVE_FMA */ /* Generated by: ../../../genfft/gen_r2cf -compact -variables 4 -pipeline-latency 4 -n 64 -name r2cf_64 -include r2cf.h */ /* * This function contains 394 FP additions, 124 FP multiplications, * (or, 342 additions, 72 multiplications, 52 fused multiply/add), * 106 stack variables, 15 constants, and 128 memory accesses */ #include "r2cf.h" static void r2cf_64(R *R0, R *R1, R *Cr, R *Ci, stride rs, stride csr, stride csi, INT v, INT ivs, INT ovs) { DK(KP773010453, +0.773010453362736960810906609758469800971041293); DK(KP634393284, +0.634393284163645498215171613225493370675687095); DK(KP098017140, +0.098017140329560601994195563888641845861136673); DK(KP995184726, +0.995184726672196886244836953109479921575474869); DK(KP290284677, +0.290284677254462367636192375817395274691476278); DK(KP956940335, +0.956940335732208864935797886980269969482849206); DK(KP471396736, +0.471396736825997648556387625905254377657460319); DK(KP881921264, +0.881921264348355029712756863660388349508442621); DK(KP195090322, +0.195090322016128267848284868477022240927691618); DK(KP980785280, +0.980785280403230449126182236134239036973933731); DK(KP555570233, +0.555570233019602224742830813948532874374937191); DK(KP831469612, +0.831469612302545237078788377617905756738560812); DK(KP382683432, +0.382683432365089771728459984030398866761344562); DK(KP923879532, +0.923879532511286756128183189396788286822416626); DK(KP707106781, +0.707106781186547524400844362104849039284835938); INT i; for (i = v; i > 0; i = i - 1, R0 = R0 + ivs, R1 = R1 + ivs, Cr = Cr + ovs, Ci = Ci + ovs, MAKE_VOLATILE_STRIDE(rs), MAKE_VOLATILE_STRIDE(csr), MAKE_VOLATILE_STRIDE(csi)) { E T4l, T5a, T15, T3n, T2T, T3Q, T7, Te, Tf, T4A, T4L, T1X, T3B, T23, T3y; E T5I, T66, T4R, T52, T2j, T3F, T2H, T3I, T5P, T69, T1i, T3t, T1l, T3u, TZ; E T63, T4v, T58, T1r, T3r, T1u, T3q, TK, T62, T4s, T57, Tm, Tt, Tu, T4o; E T5b, T1c, T3R, T2Q, T3o, T1M, T3z, T5L, T67, T26, T3C, T4H, T4M, T2y, T3J; E T5S, T6a, T2C, T3G, T4Y, T53; { E T3, T11, Td, T13, T6, T2S, Ta, T12, T14, T2R; { E T1, T2, Tb, Tc; T1 = R0[0]; T2 = R0[WS(rs, 16)]; T3 = T1 + T2; T11 = T1 - T2; Tb = R0[WS(rs, 28)]; Tc = R0[WS(rs, 12)]; Td = Tb + Tc; T13 = Tb - Tc; } { E T4, T5, T8, T9; T4 = R0[WS(rs, 8)]; T5 = R0[WS(rs, 24)]; T6 = T4 + T5; T2S = T4 - T5; T8 = R0[WS(rs, 4)]; T9 = R0[WS(rs, 20)]; Ta = T8 + T9; T12 = T8 - T9; } T4l = T3 - T6; T5a = Td - Ta; T14 = KP707106781 * (T12 + T13); T15 = T11 + T14; T3n = T11 - T14; T2R = KP707106781 * (T13 - T12); T2T = T2R - T2S; T3Q = T2S + T2R; T7 = T3 + T6; Te = Ta + Td; Tf = T7 + Te; } { E T1P, T4J, T21, T4y, T1S, T4K, T1W, T4z; { E T1N, T1O, T1Z, T20; T1N = R1[WS(rs, 28)]; T1O = R1[WS(rs, 12)]; T1P = T1N - T1O; T4J = T1N + T1O; T1Z = R1[0]; T20 = R1[WS(rs, 16)]; T21 = T1Z - T20; T4y = T1Z + T20; } { E T1Q, T1R, T1U, T1V; T1Q = R1[WS(rs, 4)]; T1R = R1[WS(rs, 20)]; T1S = T1Q - T1R; T4K = T1Q + T1R; T1U = R1[WS(rs, 8)]; T1V = R1[WS(rs, 24)]; T1W = T1U - T1V; T4z = T1U + T1V; } T4A = T4y - T4z; T4L = T4J - T4K; { E T1T, T22, T5G, T5H; T1T = KP707106781 * (T1P - T1S); T1X = T1T - T1W; T3B = T1W + T1T; T22 = KP707106781 * (T1S + T1P); T23 = T21 + T22; T3y = T21 - T22; T5G = T4y + T4z; T5H = T4K + T4J; T5I = T5G + T5H; T66 = T5G - T5H; } } { E T2b, T4P, T2G, T4Q, T2e, T51, T2h, T50; { E T29, T2a, T2E, T2F; T29 = R1[WS(rs, 31)]; T2a = R1[WS(rs, 15)]; T2b = T29 - T2a; T4P = T29 + T2a; T2E = R1[WS(rs, 7)]; T2F = R1[WS(rs, 23)]; T2G = T2E - T2F; T4Q = T2E + T2F; } { E T2c, T2d, T2f, T2g; T2c = R1[WS(rs, 3)]; T2d = R1[WS(rs, 19)]; T2e = T2c - T2d; T51 = T2c + T2d; T2f = R1[WS(rs, 27)]; T2g = R1[WS(rs, 11)]; T2h = T2f - T2g; T50 = T2f + T2g; } T4R = T4P - T4Q; T52 = T50 - T51; { E T2i, T2D, T5N, T5O; T2i = KP707106781 * (T2e + T2h); T2j = T2b + T2i; T3F = T2b - T2i; T2D = KP707106781 * (T2h - T2e); T2H = T2D - T2G; T3I = T2G + T2D; T5N = T4P + T4Q; T5O = T51 + T50; T5P = T5N + T5O; T69 = T5N - T5O; } } { E TN, T1e, TX, T1g, TQ, T1k, TU, T1f, T1h, T1j; { E TL, TM, TV, TW; TL = R0[WS(rs, 31)]; TM = R0[WS(rs, 15)]; TN = TL + TM; T1e = TL - TM; TV = R0[WS(rs, 27)]; TW = R0[WS(rs, 11)]; TX = TV + TW; T1g = TV - TW; } { E TO, TP, TS, TT; TO = R0[WS(rs, 7)]; TP = R0[WS(rs, 23)]; TQ = TO + TP; T1k = TO - TP; TS = R0[WS(rs, 3)]; TT = R0[WS(rs, 19)]; TU = TS + TT; T1f = TS - TT; } T1h = KP707106781 * (T1f + T1g); T1i = T1e + T1h; T3t = T1e - T1h; T1j = KP707106781 * (T1g - T1f); T1l = T1j - T1k; T3u = T1k + T1j; { E TR, TY, T4t, T4u; TR = TN + TQ; TY = TU + TX; TZ = TR + TY; T63 = TR - TY; T4t = TN - TQ; T4u = TX - TU; T4v = FNMS(KP382683432, T4u, KP923879532 * T4t); T58 = FMA(KP382683432, T4t, KP923879532 * T4u); } } { E Ty, T1s, TI, T1n, TB, T1q, TF, T1o, T1p, T1t; { E Tw, Tx, TG, TH; Tw = R0[WS(rs, 1)]; Tx = R0[WS(rs, 17)]; Ty = Tw + Tx; T1s = Tw - Tx; TG = R0[WS(rs, 29)]; TH = R0[WS(rs, 13)]; TI = TG + TH; T1n = TG - TH; } { E Tz, TA, TD, TE; Tz = R0[WS(rs, 9)]; TA = R0[WS(rs, 25)]; TB = Tz + TA; T1q = Tz - TA; TD = R0[WS(rs, 5)]; TE = R0[WS(rs, 21)]; TF = TD + TE; T1o = TD - TE; } T1p = KP707106781 * (T1n - T1o); T1r = T1p - T1q; T3r = T1q + T1p; T1t = KP707106781 * (T1o + T1n); T1u = T1s + T1t; T3q = T1s - T1t; { E TC, TJ, T4q, T4r; TC = Ty + TB; TJ = TF + TI; TK = TC + TJ; T62 = TC - TJ; T4q = Ty - TB; T4r = TI - TF; T4s = FMA(KP923879532, T4q, KP382683432 * T4r); T57 = FNMS(KP382683432, T4q, KP923879532 * T4r); } } { E Ti, T16, Ts, T1a, Tl, T17, Tp, T19, T4m, T4n; { E Tg, Th, Tq, Tr; Tg = R0[WS(rs, 2)]; Th = R0[WS(rs, 18)]; Ti = Tg + Th; T16 = Tg - Th; Tq = R0[WS(rs, 6)]; Tr = R0[WS(rs, 22)]; Ts = Tq + Tr; T1a = Tq - Tr; } { E Tj, Tk, Tn, To; Tj = R0[WS(rs, 10)]; Tk = R0[WS(rs, 26)]; Tl = Tj + Tk; T17 = Tj - Tk; Tn = R0[WS(rs, 30)]; To = R0[WS(rs, 14)]; Tp = Tn + To; T19 = Tn - To; } Tm = Ti + Tl; Tt = Tp + Ts; Tu = Tm + Tt; T4m = Ti - Tl; T4n = Tp - Ts; T4o = KP707106781 * (T4m + T4n); T5b = KP707106781 * (T4n - T4m); { E T18, T1b, T2O, T2P; T18 = FNMS(KP382683432, T17, KP923879532 * T16); T1b = FMA(KP923879532, T19, KP382683432 * T1a); T1c = T18 + T1b; T3R = T1b - T18; T2O = FNMS(KP923879532, T1a, KP382683432 * T19); T2P = FMA(KP382683432, T16, KP923879532 * T17); T2Q = T2O - T2P; T3o = T2P + T2O; } } { E T1A, T4E, T1K, T4C, T1D, T4F, T1H, T4B; { E T1y, T1z, T1I, T1J; T1y = R1[WS(rs, 30)]; T1z = R1[WS(rs, 14)]; T1A = T1y - T1z; T4E = T1y + T1z; T1I = R1[WS(rs, 10)]; T1J = R1[WS(rs, 26)]; T1K = T1I - T1J; T4C = T1I + T1J; } { E T1B, T1C, T1F, T1G; T1B = R1[WS(rs, 6)]; T1C = R1[WS(rs, 22)]; T1D = T1B - T1C; T4F = T1B + T1C; T1F = R1[WS(rs, 2)]; T1G = R1[WS(rs, 18)]; T1H = T1F - T1G; T4B = T1F + T1G; } { E T1E, T1L, T5J, T5K; T1E = FNMS(KP923879532, T1D, KP382683432 * T1A); T1L = FMA(KP382683432, T1H, KP923879532 * T1K); T1M = T1E - T1L; T3z = T1L + T1E; T5J = T4B + T4C; T5K = T4E + T4F; T5L = T5J + T5K; T67 = T5K - T5J; } { E T24, T25, T4D, T4G; T24 = FNMS(KP382683432, T1K, KP923879532 * T1H); T25 = FMA(KP923879532, T1A, KP382683432 * T1D); T26 = T24 + T25; T3C = T25 - T24; T4D = T4B - T4C; T4G = T4E - T4F; T4H = KP707106781 * (T4D + T4G); T4M = KP707106781 * (T4G - T4D); } } { E T2m, T4S, T2w, T4W, T2p, T4T, T2t, T4V; { E T2k, T2l, T2u, T2v; T2k = R1[WS(rs, 1)]; T2l = R1[WS(rs, 17)]; T2m = T2k - T2l; T4S = T2k + T2l; T2u = R1[WS(rs, 5)]; T2v = R1[WS(rs, 21)]; T2w = T2u - T2v; T4W = T2u + T2v; } { E T2n, T2o, T2r, T2s; T2n = R1[WS(rs, 9)]; T2o = R1[WS(rs, 25)]; T2p = T2n - T2o; T4T = T2n + T2o; T2r = R1[WS(rs, 29)]; T2s = R1[WS(rs, 13)]; T2t = T2r - T2s; T4V = T2r + T2s; } { E T2q, T2x, T5Q, T5R; T2q = FNMS(KP382683432, T2p, KP923879532 * T2m); T2x = FMA(KP923879532, T2t, KP382683432 * T2w); T2y = T2q + T2x; T3J = T2x - T2q; T5Q = T4S + T4T; T5R = T4V + T4W; T5S = T5Q + T5R; T6a = T5R - T5Q; } { E T2A, T2B, T4U, T4X; T2A = FNMS(KP923879532, T2w, KP382683432 * T2t); T2B = FMA(KP382683432, T2m, KP923879532 * T2p); T2C = T2A - T2B; T3G = T2B + T2A; T4U = T4S - T4T; T4X = T4V - T4W; T4Y = KP707106781 * (T4U + T4X); T53 = KP707106781 * (T4X - T4U); } } { E Tv, T10, T5X, T5Y, T5Z, T60; Tv = Tf + Tu; T10 = TK + TZ; T5X = Tv + T10; T5Y = T5I + T5L; T5Z = T5P + T5S; T60 = T5Y + T5Z; Cr[WS(csr, 16)] = Tv - T10; Ci[WS(csi, 16)] = T5Z - T5Y; Cr[WS(csr, 32)] = T5X - T60; Cr[0] = T5X + T60; } { E T5F, T5V, T5U, T5W, T5M, T5T; T5F = Tf - Tu; T5V = TZ - TK; T5M = T5I - T5L; T5T = T5P - T5S; T5U = KP707106781 * (T5M + T5T); T5W = KP707106781 * (T5T - T5M); Cr[WS(csr, 24)] = T5F - T5U; Ci[WS(csi, 24)] = T5W - T5V; Cr[WS(csr, 8)] = T5F + T5U; Ci[WS(csi, 8)] = T5V + T5W; } { E T65, T6l, T6k, T6m, T6c, T6g, T6f, T6h; { E T61, T64, T6i, T6j; T61 = T7 - Te; T64 = KP707106781 * (T62 + T63); T65 = T61 + T64; T6l = T61 - T64; T6i = FNMS(KP382683432, T66, KP923879532 * T67); T6j = FMA(KP382683432, T69, KP923879532 * T6a); T6k = T6i + T6j; T6m = T6j - T6i; } { E T68, T6b, T6d, T6e; T68 = FMA(KP923879532, T66, KP382683432 * T67); T6b = FNMS(KP382683432, T6a, KP923879532 * T69); T6c = T68 + T6b; T6g = T6b - T68; T6d = KP707106781 * (T63 - T62); T6e = Tt - Tm; T6f = T6d - T6e; T6h = T6e + T6d; } Cr[WS(csr, 28)] = T65 - T6c; Ci[WS(csi, 28)] = T6k - T6h; Cr[WS(csr, 4)] = T65 + T6c; Ci[WS(csi, 4)] = T6h + T6k; Ci[WS(csi, 12)] = T6f + T6g; Cr[WS(csr, 12)] = T6l + T6m; Ci[WS(csi, 20)] = T6g - T6f; Cr[WS(csr, 20)] = T6l - T6m; } { E T5n, T5D, T5x, T5z, T5q, T5A, T5t, T5B; { E T5l, T5m, T5v, T5w; T5l = T4l - T4o; T5m = T58 - T57; T5n = T5l + T5m; T5D = T5l - T5m; T5v = T4v - T4s; T5w = T5b - T5a; T5x = T5v - T5w; T5z = T5w + T5v; } { E T5o, T5p, T5r, T5s; T5o = T4A - T4H; T5p = T4M - T4L; T5q = FMA(KP831469612, T5o, KP555570233 * T5p); T5A = FNMS(KP555570233, T5o, KP831469612 * T5p); T5r = T4R - T4Y; T5s = T53 - T52; T5t = FNMS(KP555570233, T5s, KP831469612 * T5r); T5B = FMA(KP555570233, T5r, KP831469612 * T5s); } { E T5u, T5C, T5y, T5E; T5u = T5q + T5t; Cr[WS(csr, 26)] = T5n - T5u; Cr[WS(csr, 6)] = T5n + T5u; T5C = T5A + T5B; Ci[WS(csi, 6)] = T5z + T5C; Ci[WS(csi, 26)] = T5C - T5z; T5y = T5t - T5q; Ci[WS(csi, 10)] = T5x + T5y; Ci[WS(csi, 22)] = T5y - T5x; T5E = T5B - T5A; Cr[WS(csr, 22)] = T5D - T5E; Cr[WS(csr, 10)] = T5D + T5E; } } { E T4x, T5j, T5d, T5f, T4O, T5g, T55, T5h; { E T4p, T4w, T59, T5c; T4p = T4l + T4o; T4w = T4s + T4v; T4x = T4p + T4w; T5j = T4p - T4w; T59 = T57 + T58; T5c = T5a + T5b; T5d = T59 - T5c; T5f = T5c + T59; } { E T4I, T4N, T4Z, T54; T4I = T4A + T4H; T4N = T4L + T4M; T4O = FMA(KP980785280, T4I, KP195090322 * T4N); T5g = FNMS(KP195090322, T4I, KP980785280 * T4N); T4Z = T4R + T4Y; T54 = T52 + T53; T55 = FNMS(KP195090322, T54, KP980785280 * T4Z); T5h = FMA(KP195090322, T4Z, KP980785280 * T54); } { E T56, T5i, T5e, T5k; T56 = T4O + T55; Cr[WS(csr, 30)] = T4x - T56; Cr[WS(csr, 2)] = T4x + T56; T5i = T5g + T5h; Ci[WS(csi, 2)] = T5f + T5i; Ci[WS(csi, 30)] = T5i - T5f; T5e = T55 - T4O; Ci[WS(csi, 14)] = T5d + T5e; Ci[WS(csi, 18)] = T5e - T5d; T5k = T5h - T5g; Cr[WS(csr, 18)] = T5j - T5k; Cr[WS(csr, 14)] = T5j + T5k; } } { E T3p, T41, T4c, T3S, T3w, T4b, T49, T4h, T3P, T42, T3E, T3W, T46, T4g, T3L; E T3X; { E T3s, T3v, T3A, T3D; T3p = T3n + T3o; T41 = T3n - T3o; T4c = T3R - T3Q; T3S = T3Q + T3R; T3s = FMA(KP831469612, T3q, KP555570233 * T3r); T3v = FNMS(KP555570233, T3u, KP831469612 * T3t); T3w = T3s + T3v; T4b = T3v - T3s; { E T47, T48, T3N, T3O; T47 = T3F - T3G; T48 = T3J - T3I; T49 = FNMS(KP471396736, T48, KP881921264 * T47); T4h = FMA(KP471396736, T47, KP881921264 * T48); T3N = FNMS(KP555570233, T3q, KP831469612 * T3r); T3O = FMA(KP555570233, T3t, KP831469612 * T3u); T3P = T3N + T3O; T42 = T3O - T3N; } T3A = T3y + T3z; T3D = T3B + T3C; T3E = FMA(KP956940335, T3A, KP290284677 * T3D); T3W = FNMS(KP290284677, T3A, KP956940335 * T3D); { E T44, T45, T3H, T3K; T44 = T3y - T3z; T45 = T3C - T3B; T46 = FMA(KP881921264, T44, KP471396736 * T45); T4g = FNMS(KP471396736, T44, KP881921264 * T45); T3H = T3F + T3G; T3K = T3I + T3J; T3L = FNMS(KP290284677, T3K, KP956940335 * T3H); T3X = FMA(KP290284677, T3H, KP956940335 * T3K); } } { E T3x, T3M, T3V, T3Y; T3x = T3p + T3w; T3M = T3E + T3L; Cr[WS(csr, 29)] = T3x - T3M; Cr[WS(csr, 3)] = T3x + T3M; T3V = T3S + T3P; T3Y = T3W + T3X; Ci[WS(csi, 3)] = T3V + T3Y; Ci[WS(csi, 29)] = T3Y - T3V; } { E T3T, T3U, T3Z, T40; T3T = T3P - T3S; T3U = T3L - T3E; Ci[WS(csi, 13)] = T3T + T3U; Ci[WS(csi, 19)] = T3U - T3T; T3Z = T3p - T3w; T40 = T3X - T3W; Cr[WS(csr, 19)] = T3Z - T40; Cr[WS(csr, 13)] = T3Z + T40; } { E T43, T4a, T4f, T4i; T43 = T41 + T42; T4a = T46 + T49; Cr[WS(csr, 27)] = T43 - T4a; Cr[WS(csr, 5)] = T43 + T4a; T4f = T4c + T4b; T4i = T4g + T4h; Ci[WS(csi, 5)] = T4f + T4i; Ci[WS(csi, 27)] = T4i - T4f; } { E T4d, T4e, T4j, T4k; T4d = T4b - T4c; T4e = T49 - T46; Ci[WS(csi, 11)] = T4d + T4e; Ci[WS(csi, 21)] = T4e - T4d; T4j = T41 - T42; T4k = T4h - T4g; Cr[WS(csr, 21)] = T4j - T4k; Cr[WS(csr, 11)] = T4j + T4k; } } { E T1d, T33, T3e, T2U, T1w, T3d, T3b, T3j, T2N, T34, T28, T2Y, T38, T3i, T2J; E T2Z; { E T1m, T1v, T1Y, T27; T1d = T15 - T1c; T33 = T15 + T1c; T3e = T2T + T2Q; T2U = T2Q - T2T; T1m = FMA(KP195090322, T1i, KP980785280 * T1l); T1v = FNMS(KP195090322, T1u, KP980785280 * T1r); T1w = T1m - T1v; T3d = T1v + T1m; { E T39, T3a, T2L, T2M; T39 = T2j + T2y; T3a = T2H + T2C; T3b = FNMS(KP098017140, T3a, KP995184726 * T39); T3j = FMA(KP995184726, T3a, KP098017140 * T39); T2L = FNMS(KP195090322, T1l, KP980785280 * T1i); T2M = FMA(KP980785280, T1u, KP195090322 * T1r); T2N = T2L - T2M; T34 = T2M + T2L; } T1Y = T1M - T1X; T27 = T23 - T26; T28 = FMA(KP634393284, T1Y, KP773010453 * T27); T2Y = FNMS(KP634393284, T27, KP773010453 * T1Y); { E T36, T37, T2z, T2I; T36 = T1X + T1M; T37 = T23 + T26; T38 = FMA(KP098017140, T36, KP995184726 * T37); T3i = FNMS(KP098017140, T37, KP995184726 * T36); T2z = T2j - T2y; T2I = T2C - T2H; T2J = FNMS(KP634393284, T2I, KP773010453 * T2z); T2Z = FMA(KP773010453, T2I, KP634393284 * T2z); } } { E T1x, T2K, T2X, T30; T1x = T1d + T1w; T2K = T28 + T2J; Cr[WS(csr, 25)] = T1x - T2K; Cr[WS(csr, 7)] = T1x + T2K; T2X = T2U + T2N; T30 = T2Y + T2Z; Ci[WS(csi, 7)] = T2X + T30; Ci[WS(csi, 25)] = T30 - T2X; } { E T2V, T2W, T31, T32; T2V = T2N - T2U; T2W = T2J - T28; Ci[WS(csi, 9)] = T2V + T2W; Ci[WS(csi, 23)] = T2W - T2V; T31 = T1d - T1w; T32 = T2Z - T2Y; Cr[WS(csr, 23)] = T31 - T32; Cr[WS(csr, 9)] = T31 + T32; } { E T35, T3c, T3h, T3k; T35 = T33 + T34; T3c = T38 + T3b; Cr[WS(csr, 31)] = T35 - T3c; Cr[WS(csr, 1)] = T35 + T3c; T3h = T3e + T3d; T3k = T3i + T3j; Ci[WS(csi, 1)] = T3h + T3k; Ci[WS(csi, 31)] = T3k - T3h; } { E T3f, T3g, T3l, T3m; T3f = T3d - T3e; T3g = T3b - T38; Ci[WS(csi, 15)] = T3f + T3g; Ci[WS(csi, 17)] = T3g - T3f; T3l = T33 - T34; T3m = T3j - T3i; Cr[WS(csr, 17)] = T3l - T3m; Cr[WS(csr, 15)] = T3l + T3m; } } } } static const kr2c_desc desc = { 64, "r2cf_64", {342, 72, 52, 0}, &GENUS }; void X(codelet_r2cf_64) (planner *p) { X(kr2c_register) (p, r2cf_64, &desc); } #endif /* HAVE_FMA */
tectronics/houdini-ocean-toolkit
src/3rdparty/src/fftw-3.2.2/rdft/scalar/r2cf/r2cf_64.c
C
gpl-2.0
40,247
/* * rcutorture.c: simple user-level performance/stress test of RCU. * * Usage: * ./rcu <nreaders> rperf [ <seconds> ] * Run a read-side performance test with the specified * number of readers for <seconds> seconds. * ./rcu <nupdaters> uperf [ <seconds> ] * Run an update-side performance test with the specified * number of updaters and specified duration. * ./rcu <nreaders> perf [ <seconds> ] * Run a combined read/update performance test with the specified * number of readers and one updater and specified duration. * * The above tests produce output as follows: * * n_reads: 46008000 n_updates: 146026 nreaders: 2 nupdaters: 1 duration: 1 * ns/read: 43.4707 ns/update: 6848.1 * * The first line lists the total number of RCU reads and updates executed * during the test, the number of reader threads, the number of updater * threads, and the duration of the test in seconds. The second line * lists the average duration of each type of operation in nanoseconds, * or "nan" if the corresponding type of operation was not performed. * * ./rcu <nreaders> stress [ <seconds> ] * Run a stress test with the specified number of readers and * one updater. * * This test produces output as follows: * * n_reads: 114633217 n_updates: 3903415 n_mberror: 0 * rcu_stress_count: 114618391 14826 0 0 0 0 0 0 0 0 0 * * The first line lists the number of RCU read and update operations * executed, followed by the number of memory-ordering violations * (which will be zero in a correct RCU implementation). The second * line lists the number of readers observing progressively more stale * data. A correct RCU implementation will have all but the first two * numbers non-zero. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. * * Copyright (c) 2008 Paul E. McKenney, IBM Corporation. */ /* * Test variables. */ #include "qemu/osdep.h" #include "qemu/atomic.h" #include "qemu/rcu.h" #include "qemu/thread.h" long long n_reads = 0LL; long n_updates = 0L; int nthreadsrunning; #define GOFLAG_INIT 0 #define GOFLAG_RUN 1 #define GOFLAG_STOP 2 static volatile int goflag = GOFLAG_INIT; #define RCU_READ_RUN 1000 #define NR_THREADS 100 static QemuMutex counts_mutex; static QemuThread threads[NR_THREADS]; static struct rcu_reader_data *data[NR_THREADS]; static int n_threads; static void create_thread(void *(*func)(void *)) { if (n_threads >= NR_THREADS) { fprintf(stderr, "Thread limit of %d exceeded!\n", NR_THREADS); exit(-1); } qemu_thread_create(&threads[n_threads], "test", func, &data[n_threads], QEMU_THREAD_JOINABLE); n_threads++; } static void wait_all_threads(void) { int i; for (i = 0; i < n_threads; i++) { qemu_thread_join(&threads[i]); } n_threads = 0; } /* * Performance test. */ static void *rcu_read_perf_test(void *arg) { int i; long long n_reads_local = 0; rcu_register_thread(); *(struct rcu_reader_data **)arg = &rcu_reader; atomic_inc(&nthreadsrunning); while (goflag == GOFLAG_INIT) { g_usleep(1000); } while (goflag == GOFLAG_RUN) { for (i = 0; i < RCU_READ_RUN; i++) { rcu_read_lock(); rcu_read_unlock(); } n_reads_local += RCU_READ_RUN; } qemu_mutex_lock(&counts_mutex); n_reads += n_reads_local; qemu_mutex_unlock(&counts_mutex); rcu_unregister_thread(); return NULL; } static void *rcu_update_perf_test(void *arg) { long long n_updates_local = 0; rcu_register_thread(); *(struct rcu_reader_data **)arg = &rcu_reader; atomic_inc(&nthreadsrunning); while (goflag == GOFLAG_INIT) { g_usleep(1000); } while (goflag == GOFLAG_RUN) { synchronize_rcu(); n_updates_local++; } qemu_mutex_lock(&counts_mutex); n_updates += n_updates_local; qemu_mutex_unlock(&counts_mutex); rcu_unregister_thread(); return NULL; } static void perftestinit(void) { nthreadsrunning = 0; } static void perftestrun(int nthreads, int duration, int nreaders, int nupdaters) { while (atomic_read(&nthreadsrunning) < nthreads) { g_usleep(1000); } goflag = GOFLAG_RUN; g_usleep(duration * G_USEC_PER_SEC); goflag = GOFLAG_STOP; wait_all_threads(); printf("n_reads: %lld n_updates: %ld nreaders: %d nupdaters: %d duration: %d\n", n_reads, n_updates, nreaders, nupdaters, duration); printf("ns/read: %g ns/update: %g\n", ((duration * 1000*1000*1000.*(double)nreaders) / (double)n_reads), ((duration * 1000*1000*1000.*(double)nupdaters) / (double)n_updates)); exit(0); } static void perftest(int nreaders, int duration) { int i; perftestinit(); for (i = 0; i < nreaders; i++) { create_thread(rcu_read_perf_test); } create_thread(rcu_update_perf_test); perftestrun(i + 1, duration, nreaders, 1); } static void rperftest(int nreaders, int duration) { int i; perftestinit(); for (i = 0; i < nreaders; i++) { create_thread(rcu_read_perf_test); } perftestrun(i, duration, nreaders, 0); } static void uperftest(int nupdaters, int duration) { int i; perftestinit(); for (i = 0; i < nupdaters; i++) { create_thread(rcu_update_perf_test); } perftestrun(i, duration, 0, nupdaters); } /* * Stress test. */ #define RCU_STRESS_PIPE_LEN 10 struct rcu_stress { int pipe_count; int mbtest; }; struct rcu_stress rcu_stress_array[RCU_STRESS_PIPE_LEN] = { { 0 } }; struct rcu_stress *rcu_stress_current; int rcu_stress_idx; int n_mberror; long long rcu_stress_count[RCU_STRESS_PIPE_LEN + 1]; static void *rcu_read_stress_test(void *arg) { int i; struct rcu_stress *p; int pc; long long n_reads_local = 0; long long rcu_stress_local[RCU_STRESS_PIPE_LEN + 1] = { 0 }; volatile int garbage = 0; rcu_register_thread(); *(struct rcu_reader_data **)arg = &rcu_reader; while (goflag == GOFLAG_INIT) { g_usleep(1000); } while (goflag == GOFLAG_RUN) { rcu_read_lock(); p = atomic_rcu_read(&rcu_stress_current); if (p->mbtest == 0) { n_mberror++; } rcu_read_lock(); for (i = 0; i < 100; i++) { garbage++; } rcu_read_unlock(); pc = p->pipe_count; rcu_read_unlock(); if ((pc > RCU_STRESS_PIPE_LEN) || (pc < 0)) { pc = RCU_STRESS_PIPE_LEN; } rcu_stress_local[pc]++; n_reads_local++; } qemu_mutex_lock(&counts_mutex); n_reads += n_reads_local; for (i = 0; i <= RCU_STRESS_PIPE_LEN; i++) { rcu_stress_count[i] += rcu_stress_local[i]; } qemu_mutex_unlock(&counts_mutex); rcu_unregister_thread(); return NULL; } static void *rcu_update_stress_test(void *arg) { int i; struct rcu_stress *p; rcu_register_thread(); *(struct rcu_reader_data **)arg = &rcu_reader; while (goflag == GOFLAG_INIT) { g_usleep(1000); } while (goflag == GOFLAG_RUN) { i = rcu_stress_idx + 1; if (i >= RCU_STRESS_PIPE_LEN) { i = 0; } p = &rcu_stress_array[i]; p->mbtest = 0; smp_mb(); p->pipe_count = 0; p->mbtest = 1; atomic_rcu_set(&rcu_stress_current, p); rcu_stress_idx = i; for (i = 0; i < RCU_STRESS_PIPE_LEN; i++) { if (i != rcu_stress_idx) { rcu_stress_array[i].pipe_count++; } } synchronize_rcu(); n_updates++; } rcu_unregister_thread(); return NULL; } static void *rcu_fake_update_stress_test(void *arg) { rcu_register_thread(); *(struct rcu_reader_data **)arg = &rcu_reader; while (goflag == GOFLAG_INIT) { g_usleep(1000); } while (goflag == GOFLAG_RUN) { synchronize_rcu(); g_usleep(1000); } rcu_unregister_thread(); return NULL; } static void stresstest(int nreaders, int duration) { int i; rcu_stress_current = &rcu_stress_array[0]; rcu_stress_current->pipe_count = 0; rcu_stress_current->mbtest = 1; for (i = 0; i < nreaders; i++) { create_thread(rcu_read_stress_test); } create_thread(rcu_update_stress_test); for (i = 0; i < 5; i++) { create_thread(rcu_fake_update_stress_test); } goflag = GOFLAG_RUN; g_usleep(duration * G_USEC_PER_SEC); goflag = GOFLAG_STOP; wait_all_threads(); printf("n_reads: %lld n_updates: %ld n_mberror: %d\n", n_reads, n_updates, n_mberror); printf("rcu_stress_count:"); for (i = 0; i <= RCU_STRESS_PIPE_LEN; i++) { printf(" %lld", rcu_stress_count[i]); } printf("\n"); exit(0); } /* GTest interface */ static void gtest_stress(int nreaders, int duration) { int i; rcu_stress_current = &rcu_stress_array[0]; rcu_stress_current->pipe_count = 0; rcu_stress_current->mbtest = 1; for (i = 0; i < nreaders; i++) { create_thread(rcu_read_stress_test); } create_thread(rcu_update_stress_test); for (i = 0; i < 5; i++) { create_thread(rcu_fake_update_stress_test); } goflag = GOFLAG_RUN; g_usleep(duration * G_USEC_PER_SEC); goflag = GOFLAG_STOP; wait_all_threads(); g_assert_cmpint(n_mberror, ==, 0); for (i = 2; i <= RCU_STRESS_PIPE_LEN; i++) { g_assert_cmpint(rcu_stress_count[i], ==, 0); } } static void gtest_stress_1_1(void) { gtest_stress(1, 1); } static void gtest_stress_10_1(void) { gtest_stress(10, 1); } static void gtest_stress_1_5(void) { gtest_stress(1, 5); } static void gtest_stress_10_5(void) { gtest_stress(10, 5); } /* * Mainprogram. */ static void usage(int argc, char *argv[]) { fprintf(stderr, "Usage: %s [nreaders [ perf | stress ] ]\n", argv[0]); exit(-1); } int main(int argc, char *argv[]) { int nreaders = 1; int duration = 1; qemu_mutex_init(&counts_mutex); if (argc >= 2 && argv[1][0] == '-') { g_test_init(&argc, &argv, NULL); if (g_test_quick()) { g_test_add_func("/rcu/torture/1reader", gtest_stress_1_1); g_test_add_func("/rcu/torture/10readers", gtest_stress_10_1); } else { g_test_add_func("/rcu/torture/1reader", gtest_stress_1_5); g_test_add_func("/rcu/torture/10readers", gtest_stress_10_5); } return g_test_run(); } if (argc >= 2) { nreaders = strtoul(argv[1], NULL, 0); } if (argc > 3) { duration = strtoul(argv[3], NULL, 0); } if (argc < 3 || strcmp(argv[2], "stress") == 0) { stresstest(nreaders, duration); } else if (strcmp(argv[2], "rperf") == 0) { rperftest(nreaders, duration); } else if (strcmp(argv[2], "uperf") == 0) { uperftest(nreaders, duration); } else if (strcmp(argv[2], "perf") == 0) { perftest(nreaders, duration); } usage(argc, argv); return 0; }
marioli/qemu
tests/rcutorture.c
C
gpl-2.0
11,817
/* * ARC Cache Management * * Copyright (C) 2014-15 Synopsys, Inc. (www.synopsys.com) * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com) * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License version 2 as * published by the Free Software Foundation. */ #include <linux/module.h> #include <linux/mm.h> #include <linux/sched.h> #include <linux/cache.h> #include <linux/mmu_context.h> #include <linux/syscalls.h> #include <linux/uaccess.h> #include <linux/pagemap.h> #include <asm/cacheflush.h> #include <asm/cachectl.h> #include <asm/setup.h> #ifdef CONFIG_ISA_ARCV2 #define USE_RGN_FLSH 1 #endif static int l2_line_sz; static int ioc_exists; int slc_enable = 1, ioc_enable = 1; unsigned long perip_base = ARC_UNCACHED_ADDR_SPACE; /* legacy value for boot */ unsigned long perip_end = 0xFFFFFFFF; /* legacy value */ void (*_cache_line_loop_ic_fn)(phys_addr_t paddr, unsigned long vaddr, unsigned long sz, const int op, const int full_page); void (*__dma_cache_wback_inv)(phys_addr_t start, unsigned long sz); void (*__dma_cache_inv)(phys_addr_t start, unsigned long sz); void (*__dma_cache_wback)(phys_addr_t start, unsigned long sz); char *arc_cache_mumbojumbo(int c, char *buf, int len) { int n = 0; struct cpuinfo_arc_cache *p; #define PR_CACHE(p, cfg, str) \ if (!(p)->line_len) \ n += scnprintf(buf + n, len - n, str"\t\t: N/A\n"); \ else \ n += scnprintf(buf + n, len - n, \ str"\t\t: %uK, %dway/set, %uB Line, %s%s%s\n", \ (p)->sz_k, (p)->assoc, (p)->line_len, \ (p)->vipt ? "VIPT" : "PIPT", \ (p)->alias ? " aliasing" : "", \ IS_USED_CFG(cfg)); PR_CACHE(&cpuinfo_arc700[c].icache, CONFIG_ARC_HAS_ICACHE, "I-Cache"); PR_CACHE(&cpuinfo_arc700[c].dcache, CONFIG_ARC_HAS_DCACHE, "D-Cache"); p = &cpuinfo_arc700[c].slc; if (p->line_len) n += scnprintf(buf + n, len - n, "SLC\t\t: %uK, %uB Line%s\n", p->sz_k, p->line_len, IS_USED_RUN(slc_enable)); n += scnprintf(buf + n, len - n, "Peripherals\t: %#lx%s%s\n", perip_base, IS_AVAIL3(ioc_exists, ioc_enable, ", IO-Coherency ")); return buf; } /* * Read the Cache Build Confuration Registers, Decode them and save into * the cpuinfo structure for later use. * No Validation done here, simply read/convert the BCRs */ static void read_decode_cache_bcr_arcv2(int cpu) { struct cpuinfo_arc_cache *p_slc = &cpuinfo_arc700[cpu].slc; struct bcr_generic sbcr; struct bcr_slc_cfg { #ifdef CONFIG_CPU_BIG_ENDIAN unsigned int pad:24, way:2, lsz:2, sz:4; #else unsigned int sz:4, lsz:2, way:2, pad:24; #endif } slc_cfg; struct bcr_clust_cfg { #ifdef CONFIG_CPU_BIG_ENDIAN unsigned int pad:7, c:1, num_entries:8, num_cores:8, ver:8; #else unsigned int ver:8, num_cores:8, num_entries:8, c:1, pad:7; #endif } cbcr; struct bcr_volatile { #ifdef CONFIG_CPU_BIG_ENDIAN unsigned int start:4, limit:4, pad:22, order:1, disable:1; #else unsigned int disable:1, order:1, pad:22, limit:4, start:4; #endif } vol; READ_BCR(ARC_REG_SLC_BCR, sbcr); if (sbcr.ver) { READ_BCR(ARC_REG_SLC_CFG, slc_cfg); p_slc->sz_k = 128 << slc_cfg.sz; l2_line_sz = p_slc->line_len = (slc_cfg.lsz == 0) ? 128 : 64; } READ_BCR(ARC_REG_CLUSTER_BCR, cbcr); if (cbcr.c) ioc_exists = 1; else ioc_enable = 0; /* HS 2.0 didn't have AUX_VOL */ if (cpuinfo_arc700[cpu].core.family > 0x51) { READ_BCR(AUX_VOL, vol); perip_base = vol.start << 28; /* HS 3.0 has limit and strict-ordering fields */ if (cpuinfo_arc700[cpu].core.family > 0x52) perip_end = (vol.limit << 28) - 1; } } void read_decode_cache_bcr(void) { struct cpuinfo_arc_cache *p_ic, *p_dc; unsigned int cpu = smp_processor_id(); struct bcr_cache { #ifdef CONFIG_CPU_BIG_ENDIAN unsigned int pad:12, line_len:4, sz:4, config:4, ver:8; #else unsigned int ver:8, config:4, sz:4, line_len:4, pad:12; #endif } ibcr, dbcr; p_ic = &cpuinfo_arc700[cpu].icache; READ_BCR(ARC_REG_IC_BCR, ibcr); if (!ibcr.ver) goto dc_chk; if (ibcr.ver <= 3) { BUG_ON(ibcr.config != 3); p_ic->assoc = 2; /* Fixed to 2w set assoc */ } else if (ibcr.ver >= 4) { p_ic->assoc = 1 << ibcr.config; /* 1,2,4,8 */ } p_ic->line_len = 8 << ibcr.line_len; p_ic->sz_k = 1 << (ibcr.sz - 1); p_ic->vipt = 1; p_ic->alias = p_ic->sz_k/p_ic->assoc/TO_KB(PAGE_SIZE) > 1; dc_chk: p_dc = &cpuinfo_arc700[cpu].dcache; READ_BCR(ARC_REG_DC_BCR, dbcr); if (!dbcr.ver) goto slc_chk; if (dbcr.ver <= 3) { BUG_ON(dbcr.config != 2); p_dc->assoc = 4; /* Fixed to 4w set assoc */ p_dc->vipt = 1; p_dc->alias = p_dc->sz_k/p_dc->assoc/TO_KB(PAGE_SIZE) > 1; } else if (dbcr.ver >= 4) { p_dc->assoc = 1 << dbcr.config; /* 1,2,4,8 */ p_dc->vipt = 0; p_dc->alias = 0; /* PIPT so can't VIPT alias */ } p_dc->line_len = 16 << dbcr.line_len; p_dc->sz_k = 1 << (dbcr.sz - 1); slc_chk: if (is_isa_arcv2()) read_decode_cache_bcr_arcv2(cpu); } /* * Line Operation on {I,D}-Cache */ #define OP_INV 0x1 #define OP_FLUSH 0x2 #define OP_FLUSH_N_INV 0x3 #define OP_INV_IC 0x4 /* * I-Cache Aliasing in ARC700 VIPT caches (MMU v1-v3) * * ARC VIPT I-cache uses vaddr to index into cache and paddr to match the tag. * The orig Cache Management Module "CDU" only required paddr to invalidate a * certain line since it sufficed as index in Non-Aliasing VIPT cache-geometry. * Infact for distinct V1,V2,P: all of {V1-P},{V2-P},{P-P} would end up fetching * the exact same line. * * However for larger Caches (way-size > page-size) - i.e. in Aliasing config, * paddr alone could not be used to correctly index the cache. * * ------------------ * MMU v1/v2 (Fixed Page Size 8k) * ------------------ * The solution was to provide CDU with these additonal vaddr bits. These * would be bits [x:13], x would depend on cache-geometry, 13 comes from * standard page size of 8k. * H/w folks chose [17:13] to be a future safe range, and moreso these 5 bits * of vaddr could easily be "stuffed" in the paddr as bits [4:0] since the * orig 5 bits of paddr were anyways ignored by CDU line ops, as they * represent the offset within cache-line. The adv of using this "clumsy" * interface for additional info was no new reg was needed in CDU programming * model. * * 17:13 represented the max num of bits passable, actual bits needed were * fewer, based on the num-of-aliases possible. * -for 2 alias possibility, only bit 13 needed (32K cache) * -for 4 alias possibility, bits 14:13 needed (64K cache) * * ------------------ * MMU v3 * ------------------ * This ver of MMU supports variable page sizes (1k-16k): although Linux will * only support 8k (default), 16k and 4k. * However from hardware perspective, smaller page sizes aggravate aliasing * meaning more vaddr bits needed to disambiguate the cache-line-op ; * the existing scheme of piggybacking won't work for certain configurations. * Two new registers IC_PTAG and DC_PTAG inttoduced. * "tag" bits are provided in PTAG, index bits in existing IVIL/IVDL/FLDL regs */ static inline void __cache_line_loop_v2(phys_addr_t paddr, unsigned long vaddr, unsigned long sz, const int op, const int full_page) { unsigned int aux_cmd; int num_lines; if (op == OP_INV_IC) { aux_cmd = ARC_REG_IC_IVIL; } else { /* d$ cmd: INV (discard or wback-n-discard) OR FLUSH (wback) */ aux_cmd = op & OP_INV ? ARC_REG_DC_IVDL : ARC_REG_DC_FLDL; } /* Ensure we properly floor/ceil the non-line aligned/sized requests * and have @paddr - aligned to cache line and integral @num_lines. * This however can be avoided for page sized since: * -@paddr will be cache-line aligned already (being page aligned) * -@sz will be integral multiple of line size (being page sized). */ if (!full_page) { sz += paddr & ~CACHE_LINE_MASK; paddr &= CACHE_LINE_MASK; vaddr &= CACHE_LINE_MASK; } num_lines = DIV_ROUND_UP(sz, L1_CACHE_BYTES); /* MMUv2 and before: paddr contains stuffed vaddrs bits */ paddr |= (vaddr >> PAGE_SHIFT) & 0x1F; while (num_lines-- > 0) { write_aux_reg(aux_cmd, paddr); paddr += L1_CACHE_BYTES; } } /* * For ARC700 MMUv3 I-cache and D-cache flushes * - ARC700 programming model requires paddr and vaddr be passed in seperate * AUX registers (*_IV*L and *_PTAG respectively) irrespective of whether the * caches actually alias or not. * - For HS38, only the aliasing I-cache configuration uses the PTAG reg * (non aliasing I-cache version doesn't; while D-cache can't possibly alias) */ static inline void __cache_line_loop_v3(phys_addr_t paddr, unsigned long vaddr, unsigned long sz, const int op, const int full_page) { unsigned int aux_cmd, aux_tag; int num_lines; if (op == OP_INV_IC) { aux_cmd = ARC_REG_IC_IVIL; aux_tag = ARC_REG_IC_PTAG; } else { aux_cmd = op & OP_INV ? ARC_REG_DC_IVDL : ARC_REG_DC_FLDL; aux_tag = ARC_REG_DC_PTAG; } /* Ensure we properly floor/ceil the non-line aligned/sized requests * and have @paddr - aligned to cache line and integral @num_lines. * This however can be avoided for page sized since: * -@paddr will be cache-line aligned already (being page aligned) * -@sz will be integral multiple of line size (being page sized). */ if (!full_page) { sz += paddr & ~CACHE_LINE_MASK; paddr &= CACHE_LINE_MASK; vaddr &= CACHE_LINE_MASK; } num_lines = DIV_ROUND_UP(sz, L1_CACHE_BYTES); /* * MMUv3, cache ops require paddr in PTAG reg * if V-P const for loop, PTAG can be written once outside loop */ if (full_page) write_aux_reg(aux_tag, paddr); /* * This is technically for MMU v4, using the MMU v3 programming model * Special work for HS38 aliasing I-cache configuration with PAE40 * - upper 8 bits of paddr need to be written into PTAG_HI * - (and needs to be written before the lower 32 bits) * Note that PTAG_HI is hoisted outside the line loop */ if (is_pae40_enabled() && op == OP_INV_IC) write_aux_reg(ARC_REG_IC_PTAG_HI, (u64)paddr >> 32); while (num_lines-- > 0) { if (!full_page) { write_aux_reg(aux_tag, paddr); paddr += L1_CACHE_BYTES; } write_aux_reg(aux_cmd, vaddr); vaddr += L1_CACHE_BYTES; } } #ifndef USE_RGN_FLSH /* * In HS38x (MMU v4), I-cache is VIPT (can alias), D-cache is PIPT * Here's how cache ops are implemented * * - D-cache: only paddr needed (in DC_IVDL/DC_FLDL) * - I-cache Non Aliasing: Despite VIPT, only paddr needed (in IC_IVIL) * - I-cache Aliasing: Both vaddr and paddr needed (in IC_IVIL, IC_PTAG * respectively, similar to MMU v3 programming model, hence * __cache_line_loop_v3() is used) * * If PAE40 is enabled, independent of aliasing considerations, the higher bits * needs to be written into PTAG_HI */ static inline void __cache_line_loop_v4(phys_addr_t paddr, unsigned long vaddr, unsigned long sz, const int op, const int full_page) { unsigned int aux_cmd; int num_lines; if (op == OP_INV_IC) { aux_cmd = ARC_REG_IC_IVIL; } else { /* d$ cmd: INV (discard or wback-n-discard) OR FLUSH (wback) */ aux_cmd = op & OP_INV ? ARC_REG_DC_IVDL : ARC_REG_DC_FLDL; } /* Ensure we properly floor/ceil the non-line aligned/sized requests * and have @paddr - aligned to cache line and integral @num_lines. * This however can be avoided for page sized since: * -@paddr will be cache-line aligned already (being page aligned) * -@sz will be integral multiple of line size (being page sized). */ if (!full_page) { sz += paddr & ~CACHE_LINE_MASK; paddr &= CACHE_LINE_MASK; } num_lines = DIV_ROUND_UP(sz, L1_CACHE_BYTES); /* * For HS38 PAE40 configuration * - upper 8 bits of paddr need to be written into PTAG_HI * - (and needs to be written before the lower 32 bits) */ if (is_pae40_enabled()) { if (op == OP_INV_IC) /* * Non aliasing I-cache in HS38, * aliasing I-cache handled in __cache_line_loop_v3() */ write_aux_reg(ARC_REG_IC_PTAG_HI, (u64)paddr >> 32); else write_aux_reg(ARC_REG_DC_PTAG_HI, (u64)paddr >> 32); } while (num_lines-- > 0) { write_aux_reg(aux_cmd, paddr); paddr += L1_CACHE_BYTES; } } #else /* * optimized flush operation which takes a region as opposed to iterating per line */ static inline void __cache_line_loop_v4(phys_addr_t paddr, unsigned long vaddr, unsigned long sz, const int op, const int full_page) { unsigned int s, e; /* Only for Non aliasing I-cache in HS38 */ if (op == OP_INV_IC) { s = ARC_REG_IC_IVIR; e = ARC_REG_IC_ENDR; } else { s = ARC_REG_DC_STARTR; e = ARC_REG_DC_ENDR; } if (!full_page) { /* for any leading gap between @paddr and start of cache line */ sz += paddr & ~CACHE_LINE_MASK; paddr &= CACHE_LINE_MASK; /* * account for any trailing gap to end of cache line * this is equivalent to DIV_ROUND_UP() in line ops above */ sz += L1_CACHE_BYTES - 1; } if (is_pae40_enabled()) { /* TBD: check if crossing 4TB boundary */ if (op == OP_INV_IC) write_aux_reg(ARC_REG_IC_PTAG_HI, (u64)paddr >> 32); else write_aux_reg(ARC_REG_DC_PTAG_HI, (u64)paddr >> 32); } /* ENDR needs to be set ahead of START */ write_aux_reg(e, paddr + sz); /* ENDR is exclusive */ write_aux_reg(s, paddr); /* caller waits on DC_CTRL.FS */ } #endif #if (CONFIG_ARC_MMU_VER < 3) #define __cache_line_loop __cache_line_loop_v2 #elif (CONFIG_ARC_MMU_VER == 3) #define __cache_line_loop __cache_line_loop_v3 #elif (CONFIG_ARC_MMU_VER > 3) #define __cache_line_loop __cache_line_loop_v4 #endif #ifdef CONFIG_ARC_HAS_DCACHE /*************************************************************** * Machine specific helpers for Entire D-Cache or Per Line ops */ #ifndef USE_RGN_FLSH /* * this version avoids extra read/write of DC_CTRL for flush or invalid ops * in the non region flush regime (such as for ARCompact) */ static inline void __before_dc_op(const int op) { if (op == OP_FLUSH_N_INV) { /* Dcache provides 2 cmd: FLUSH or INV * INV inturn has sub-modes: DISCARD or FLUSH-BEFORE * flush-n-inv is achieved by INV cmd but with IM=1 * So toggle INV sub-mode depending on op request and default */ const unsigned int ctl = ARC_REG_DC_CTRL; write_aux_reg(ctl, read_aux_reg(ctl) | DC_CTRL_INV_MODE_FLUSH); } } #else static inline void __before_dc_op(const int op) { const unsigned int ctl = ARC_REG_DC_CTRL; unsigned int val = read_aux_reg(ctl); if (op == OP_FLUSH_N_INV) { val |= DC_CTRL_INV_MODE_FLUSH; } if (op != OP_INV_IC) { /* * Flush / Invalidate is provided by DC_CTRL.RNG_OP 0 or 1 * combined Flush-n-invalidate uses DC_CTRL.IM = 1 set above */ val &= ~DC_CTRL_RGN_OP_MSK; if (op & OP_INV) val |= DC_CTRL_RGN_OP_INV; } write_aux_reg(ctl, val); } #endif static inline void __after_dc_op(const int op) { if (op & OP_FLUSH) { const unsigned int ctl = ARC_REG_DC_CTRL; unsigned int reg; /* flush / flush-n-inv both wait */ while ((reg = read_aux_reg(ctl)) & DC_CTRL_FLUSH_STATUS) ; /* Switch back to default Invalidate mode */ if (op == OP_FLUSH_N_INV) write_aux_reg(ctl, reg & ~DC_CTRL_INV_MODE_FLUSH); } } /* * Operation on Entire D-Cache * @op = {OP_INV, OP_FLUSH, OP_FLUSH_N_INV} * Note that constant propagation ensures all the checks are gone * in generated code */ static inline void __dc_entire_op(const int op) { int aux; __before_dc_op(op); if (op & OP_INV) /* Inv or flush-n-inv use same cmd reg */ aux = ARC_REG_DC_IVDC; else aux = ARC_REG_DC_FLSH; write_aux_reg(aux, 0x1); __after_dc_op(op); } static inline void __dc_disable(void) { const int r = ARC_REG_DC_CTRL; __dc_entire_op(OP_FLUSH_N_INV); write_aux_reg(r, read_aux_reg(r) | DC_CTRL_DIS); } static void __dc_enable(void) { const int r = ARC_REG_DC_CTRL; write_aux_reg(r, read_aux_reg(r) & ~DC_CTRL_DIS); } /* For kernel mappings cache operation: index is same as paddr */ #define __dc_line_op_k(p, sz, op) __dc_line_op(p, p, sz, op) /* * D-Cache Line ops: Per Line INV (discard or wback+discard) or FLUSH (wback) */ static inline void __dc_line_op(phys_addr_t paddr, unsigned long vaddr, unsigned long sz, const int op) { const int full_page = __builtin_constant_p(sz) && sz == PAGE_SIZE; unsigned long flags; local_irq_save(flags); __before_dc_op(op); __cache_line_loop(paddr, vaddr, sz, op, full_page); __after_dc_op(op); local_irq_restore(flags); } #else #define __dc_entire_op(op) #define __dc_disable() #define __dc_enable() #define __dc_line_op(paddr, vaddr, sz, op) #define __dc_line_op_k(paddr, sz, op) #endif /* CONFIG_ARC_HAS_DCACHE */ #ifdef CONFIG_ARC_HAS_ICACHE static inline void __ic_entire_inv(void) { write_aux_reg(ARC_REG_IC_IVIC, 1); read_aux_reg(ARC_REG_IC_CTRL); /* blocks */ } static inline void __ic_line_inv_vaddr_local(phys_addr_t paddr, unsigned long vaddr, unsigned long sz) { const int full_page = __builtin_constant_p(sz) && sz == PAGE_SIZE; unsigned long flags; local_irq_save(flags); (*_cache_line_loop_ic_fn)(paddr, vaddr, sz, OP_INV_IC, full_page); local_irq_restore(flags); } #ifndef CONFIG_SMP #define __ic_line_inv_vaddr(p, v, s) __ic_line_inv_vaddr_local(p, v, s) #else struct ic_inv_args { phys_addr_t paddr, vaddr; int sz; }; static void __ic_line_inv_vaddr_helper(void *info) { struct ic_inv_args *ic_inv = info; __ic_line_inv_vaddr_local(ic_inv->paddr, ic_inv->vaddr, ic_inv->sz); } static void __ic_line_inv_vaddr(phys_addr_t paddr, unsigned long vaddr, unsigned long sz) { struct ic_inv_args ic_inv = { .paddr = paddr, .vaddr = vaddr, .sz = sz }; on_each_cpu(__ic_line_inv_vaddr_helper, &ic_inv, 1); } #endif /* CONFIG_SMP */ #else /* !CONFIG_ARC_HAS_ICACHE */ #define __ic_entire_inv() #define __ic_line_inv_vaddr(pstart, vstart, sz) #endif /* CONFIG_ARC_HAS_ICACHE */ noinline void slc_op_rgn(phys_addr_t paddr, unsigned long sz, const int op) { #ifdef CONFIG_ISA_ARCV2 /* * SLC is shared between all cores and concurrent aux operations from * multiple cores need to be serialized using a spinlock * A concurrent operation can be silently ignored and/or the old/new * operation can remain incomplete forever (lockup in SLC_CTRL_BUSY loop * below) */ static DEFINE_SPINLOCK(lock); unsigned long flags; unsigned int ctrl; phys_addr_t end; spin_lock_irqsave(&lock, flags); /* * The Region Flush operation is specified by CTRL.RGN_OP[11..9] * - b'000 (default) is Flush, * - b'001 is Invalidate if CTRL.IM == 0 * - b'001 is Flush-n-Invalidate if CTRL.IM == 1 */ ctrl = read_aux_reg(ARC_REG_SLC_CTRL); /* Don't rely on default value of IM bit */ if (!(op & OP_FLUSH)) /* i.e. OP_INV */ ctrl &= ~SLC_CTRL_IM; /* clear IM: Disable flush before Inv */ else ctrl |= SLC_CTRL_IM; if (op & OP_INV) ctrl |= SLC_CTRL_RGN_OP_INV; /* Inv or flush-n-inv */ else ctrl &= ~SLC_CTRL_RGN_OP_INV; write_aux_reg(ARC_REG_SLC_CTRL, ctrl); /* * Lower bits are ignored, no need to clip * END needs to be setup before START (latter triggers the operation) * END can't be same as START, so add (l2_line_sz - 1) to sz */ end = paddr + sz + l2_line_sz - 1; if (is_pae40_enabled()) write_aux_reg(ARC_REG_SLC_RGN_END1, upper_32_bits(end)); write_aux_reg(ARC_REG_SLC_RGN_END, lower_32_bits(end)); if (is_pae40_enabled()) write_aux_reg(ARC_REG_SLC_RGN_START1, upper_32_bits(paddr)); write_aux_reg(ARC_REG_SLC_RGN_START, lower_32_bits(paddr)); /* Make sure "busy" bit reports correct stataus, see STAR 9001165532 */ read_aux_reg(ARC_REG_SLC_CTRL); while (read_aux_reg(ARC_REG_SLC_CTRL) & SLC_CTRL_BUSY); spin_unlock_irqrestore(&lock, flags); #endif } noinline void slc_op_line(phys_addr_t paddr, unsigned long sz, const int op) { #ifdef CONFIG_ISA_ARCV2 /* * SLC is shared between all cores and concurrent aux operations from * multiple cores need to be serialized using a spinlock * A concurrent operation can be silently ignored and/or the old/new * operation can remain incomplete forever (lockup in SLC_CTRL_BUSY loop * below) */ static DEFINE_SPINLOCK(lock); const unsigned long SLC_LINE_MASK = ~(l2_line_sz - 1); unsigned int ctrl, cmd; unsigned long flags; int num_lines; spin_lock_irqsave(&lock, flags); ctrl = read_aux_reg(ARC_REG_SLC_CTRL); /* Don't rely on default value of IM bit */ if (!(op & OP_FLUSH)) /* i.e. OP_INV */ ctrl &= ~SLC_CTRL_IM; /* clear IM: Disable flush before Inv */ else ctrl |= SLC_CTRL_IM; write_aux_reg(ARC_REG_SLC_CTRL, ctrl); cmd = op & OP_INV ? ARC_AUX_SLC_IVDL : ARC_AUX_SLC_FLDL; sz += paddr & ~SLC_LINE_MASK; paddr &= SLC_LINE_MASK; num_lines = DIV_ROUND_UP(sz, l2_line_sz); while (num_lines-- > 0) { write_aux_reg(cmd, paddr); paddr += l2_line_sz; } /* Make sure "busy" bit reports correct stataus, see STAR 9001165532 */ read_aux_reg(ARC_REG_SLC_CTRL); while (read_aux_reg(ARC_REG_SLC_CTRL) & SLC_CTRL_BUSY); spin_unlock_irqrestore(&lock, flags); #endif } #define slc_op(paddr, sz, op) slc_op_rgn(paddr, sz, op) noinline static void slc_entire_op(const int op) { unsigned int ctrl, r = ARC_REG_SLC_CTRL; ctrl = read_aux_reg(r); if (!(op & OP_FLUSH)) /* i.e. OP_INV */ ctrl &= ~SLC_CTRL_IM; /* clear IM: Disable flush before Inv */ else ctrl |= SLC_CTRL_IM; write_aux_reg(r, ctrl); if (op & OP_INV) /* Inv or flush-n-inv use same cmd reg */ write_aux_reg(ARC_REG_SLC_INVALIDATE, 0x1); else write_aux_reg(ARC_REG_SLC_FLUSH, 0x1); /* Make sure "busy" bit reports correct stataus, see STAR 9001165532 */ read_aux_reg(r); /* Important to wait for flush to complete */ while (read_aux_reg(r) & SLC_CTRL_BUSY); } static inline void arc_slc_disable(void) { const int r = ARC_REG_SLC_CTRL; slc_entire_op(OP_FLUSH_N_INV); write_aux_reg(r, read_aux_reg(r) | SLC_CTRL_DIS); } static inline void arc_slc_enable(void) { const int r = ARC_REG_SLC_CTRL; write_aux_reg(r, read_aux_reg(r) & ~SLC_CTRL_DIS); } /*********************************************************** * Exported APIs */ /* * Handle cache congruency of kernel and userspace mappings of page when kernel * writes-to/reads-from * * The idea is to defer flushing of kernel mapping after a WRITE, possible if: * -dcache is NOT aliasing, hence any U/K-mappings of page are congruent * -U-mapping doesn't exist yet for page (finalised in update_mmu_cache) * -In SMP, if hardware caches are coherent * * There's a corollary case, where kernel READs from a userspace mapped page. * If the U-mapping is not congruent to to K-mapping, former needs flushing. */ void flush_dcache_page(struct page *page) { struct address_space *mapping; if (!cache_is_vipt_aliasing()) { clear_bit(PG_dc_clean, &page->flags); return; } /* don't handle anon pages here */ mapping = page_mapping(page); if (!mapping) return; /* * pagecache page, file not yet mapped to userspace * Make a note that K-mapping is dirty */ if (!mapping_mapped(mapping)) { clear_bit(PG_dc_clean, &page->flags); } else if (page_mapcount(page)) { /* kernel reading from page with U-mapping */ phys_addr_t paddr = (unsigned long)page_address(page); unsigned long vaddr = page->index << PAGE_SHIFT; if (addr_not_cache_congruent(paddr, vaddr)) __flush_dcache_page(paddr, vaddr); } } EXPORT_SYMBOL(flush_dcache_page); /* * DMA ops for systems with L1 cache only * Make memory coherent with L1 cache by flushing/invalidating L1 lines */ static void __dma_cache_wback_inv_l1(phys_addr_t start, unsigned long sz) { __dc_line_op_k(start, sz, OP_FLUSH_N_INV); } static void __dma_cache_inv_l1(phys_addr_t start, unsigned long sz) { __dc_line_op_k(start, sz, OP_INV); } static void __dma_cache_wback_l1(phys_addr_t start, unsigned long sz) { __dc_line_op_k(start, sz, OP_FLUSH); } /* * DMA ops for systems with both L1 and L2 caches, but without IOC * Both L1 and L2 lines need to be explicitly flushed/invalidated */ static void __dma_cache_wback_inv_slc(phys_addr_t start, unsigned long sz) { __dc_line_op_k(start, sz, OP_FLUSH_N_INV); slc_op(start, sz, OP_FLUSH_N_INV); } static void __dma_cache_inv_slc(phys_addr_t start, unsigned long sz) { __dc_line_op_k(start, sz, OP_INV); slc_op(start, sz, OP_INV); } static void __dma_cache_wback_slc(phys_addr_t start, unsigned long sz) { __dc_line_op_k(start, sz, OP_FLUSH); slc_op(start, sz, OP_FLUSH); } /* * DMA ops for systems with IOC * IOC hardware snoops all DMA traffic keeping the caches consistent with * memory - eliding need for any explicit cache maintenance of DMA buffers */ static void __dma_cache_wback_inv_ioc(phys_addr_t start, unsigned long sz) {} static void __dma_cache_inv_ioc(phys_addr_t start, unsigned long sz) {} static void __dma_cache_wback_ioc(phys_addr_t start, unsigned long sz) {} /* * Exported DMA API */ void dma_cache_wback_inv(phys_addr_t start, unsigned long sz) { __dma_cache_wback_inv(start, sz); } EXPORT_SYMBOL(dma_cache_wback_inv); void dma_cache_inv(phys_addr_t start, unsigned long sz) { __dma_cache_inv(start, sz); } EXPORT_SYMBOL(dma_cache_inv); void dma_cache_wback(phys_addr_t start, unsigned long sz) { __dma_cache_wback(start, sz); } EXPORT_SYMBOL(dma_cache_wback); /* * This is API for making I/D Caches consistent when modifying * kernel code (loadable modules, kprobes, kgdb...) * This is called on insmod, with kernel virtual address for CODE of * the module. ARC cache maintenance ops require PHY address thus we * need to convert vmalloc addr to PHY addr */ void flush_icache_range(unsigned long kstart, unsigned long kend) { unsigned int tot_sz; WARN(kstart < TASK_SIZE, "%s() can't handle user vaddr", __func__); /* Shortcut for bigger flush ranges. * Here we don't care if this was kernel virtual or phy addr */ tot_sz = kend - kstart; if (tot_sz > PAGE_SIZE) { flush_cache_all(); return; } /* Case: Kernel Phy addr (0x8000_0000 onwards) */ if (likely(kstart > PAGE_OFFSET)) { /* * The 2nd arg despite being paddr will be used to index icache * This is OK since no alternate virtual mappings will exist * given the callers for this case: kprobe/kgdb in built-in * kernel code only. */ __sync_icache_dcache(kstart, kstart, kend - kstart); return; } /* * Case: Kernel Vaddr (0x7000_0000 to 0x7fff_ffff) * (1) ARC Cache Maintenance ops only take Phy addr, hence special * handling of kernel vaddr. * * (2) Despite @tot_sz being < PAGE_SIZE (bigger cases handled already), * it still needs to handle a 2 page scenario, where the range * straddles across 2 virtual pages and hence need for loop */ while (tot_sz > 0) { unsigned int off, sz; unsigned long phy, pfn; off = kstart % PAGE_SIZE; pfn = vmalloc_to_pfn((void *)kstart); phy = (pfn << PAGE_SHIFT) + off; sz = min_t(unsigned int, tot_sz, PAGE_SIZE - off); __sync_icache_dcache(phy, kstart, sz); kstart += sz; tot_sz -= sz; } } EXPORT_SYMBOL(flush_icache_range); /* * General purpose helper to make I and D cache lines consistent. * @paddr is phy addr of region * @vaddr is typically user vaddr (breakpoint) or kernel vaddr (vmalloc) * However in one instance, when called by kprobe (for a breakpt in * builtin kernel code) @vaddr will be paddr only, meaning CDU operation will * use a paddr to index the cache (despite VIPT). This is fine since since a * builtin kernel page will not have any virtual mappings. * kprobe on loadable module will be kernel vaddr. */ void __sync_icache_dcache(phys_addr_t paddr, unsigned long vaddr, int len) { __dc_line_op(paddr, vaddr, len, OP_FLUSH_N_INV); __ic_line_inv_vaddr(paddr, vaddr, len); } /* wrapper to compile time eliminate alignment checks in flush loop */ void __inv_icache_page(phys_addr_t paddr, unsigned long vaddr) { __ic_line_inv_vaddr(paddr, vaddr, PAGE_SIZE); } /* * wrapper to clearout kernel or userspace mappings of a page * For kernel mappings @vaddr == @paddr */ void __flush_dcache_page(phys_addr_t paddr, unsigned long vaddr) { __dc_line_op(paddr, vaddr & PAGE_MASK, PAGE_SIZE, OP_FLUSH_N_INV); } noinline void flush_cache_all(void) { unsigned long flags; local_irq_save(flags); __ic_entire_inv(); __dc_entire_op(OP_FLUSH_N_INV); local_irq_restore(flags); } #ifdef CONFIG_ARC_CACHE_VIPT_ALIASING void flush_cache_mm(struct mm_struct *mm) { flush_cache_all(); } void flush_cache_page(struct vm_area_struct *vma, unsigned long u_vaddr, unsigned long pfn) { unsigned int paddr = pfn << PAGE_SHIFT; u_vaddr &= PAGE_MASK; __flush_dcache_page(paddr, u_vaddr); if (vma->vm_flags & VM_EXEC) __inv_icache_page(paddr, u_vaddr); } void flush_cache_range(struct vm_area_struct *vma, unsigned long start, unsigned long end) { flush_cache_all(); } void flush_anon_page(struct vm_area_struct *vma, struct page *page, unsigned long u_vaddr) { /* TBD: do we really need to clear the kernel mapping */ __flush_dcache_page(page_address(page), u_vaddr); __flush_dcache_page(page_address(page), page_address(page)); } #endif void copy_user_highpage(struct page *to, struct page *from, unsigned long u_vaddr, struct vm_area_struct *vma) { void *kfrom = kmap_atomic(from); void *kto = kmap_atomic(to); int clean_src_k_mappings = 0; /* * If SRC page was already mapped in userspace AND it's U-mapping is * not congruent with K-mapping, sync former to physical page so that * K-mapping in memcpy below, sees the right data * * Note that while @u_vaddr refers to DST page's userspace vaddr, it is * equally valid for SRC page as well * * For !VIPT cache, all of this gets compiled out as * addr_not_cache_congruent() is 0 */ if (page_mapcount(from) && addr_not_cache_congruent(kfrom, u_vaddr)) { __flush_dcache_page((unsigned long)kfrom, u_vaddr); clean_src_k_mappings = 1; } copy_page(kto, kfrom); /* * Mark DST page K-mapping as dirty for a later finalization by * update_mmu_cache(). Although the finalization could have been done * here as well (given that both vaddr/paddr are available). * But update_mmu_cache() already has code to do that for other * non copied user pages (e.g. read faults which wire in pagecache page * directly). */ clear_bit(PG_dc_clean, &to->flags); /* * if SRC was already usermapped and non-congruent to kernel mapping * sync the kernel mapping back to physical page */ if (clean_src_k_mappings) { __flush_dcache_page((unsigned long)kfrom, (unsigned long)kfrom); set_bit(PG_dc_clean, &from->flags); } else { clear_bit(PG_dc_clean, &from->flags); } kunmap_atomic(kto); kunmap_atomic(kfrom); } void clear_user_page(void *to, unsigned long u_vaddr, struct page *page) { clear_page(to); clear_bit(PG_dc_clean, &page->flags); } /********************************************************************** * Explicit Cache flush request from user space via syscall * Needed for JITs which generate code on the fly */ SYSCALL_DEFINE3(cacheflush, uint32_t, start, uint32_t, sz, uint32_t, flags) { /* TBD: optimize this */ flush_cache_all(); return 0; } /* * IO-Coherency (IOC) setup rules: * * 1. Needs to be at system level, so only once by Master core * Non-Masters need not be accessing caches at that time * - They are either HALT_ON_RESET and kick started much later or * - if run on reset, need to ensure that arc_platform_smp_wait_to_boot() * doesn't perturb caches or coherency unit * * 2. caches (L1 and SLC) need to be purged (flush+inv) before setting up IOC, * otherwise any straggler data might behave strangely post IOC enabling * * 3. All Caches need to be disabled when setting up IOC to elide any in-flight * Coherency transactions */ noinline void __init arc_ioc_setup(void) { unsigned int ioc_base, mem_sz; /* Flush + invalidate + disable L1 dcache */ __dc_disable(); /* Flush + invalidate SLC */ if (read_aux_reg(ARC_REG_SLC_BCR)) slc_entire_op(OP_FLUSH_N_INV); /* * currently IOC Aperture covers entire DDR * TBD: fix for PGU + 1GB of low mem * TBD: fix for PAE */ mem_sz = arc_get_mem_sz(); if (!is_power_of_2(mem_sz) || mem_sz < 4096) panic("IOC Aperture size must be power of 2 larger than 4KB"); /* * IOC Aperture size decoded as 2 ^ (SIZE + 2) KB, * so setting 0x11 implies 512MB, 0x12 implies 1GB... */ write_aux_reg(ARC_REG_IO_COH_AP0_SIZE, order_base_2(mem_sz >> 10) - 2); /* for now assume kernel base is start of IOC aperture */ ioc_base = CONFIG_LINUX_RAM_BASE; if (ioc_base % mem_sz != 0) panic("IOC Aperture start must be aligned to the size of the aperture"); write_aux_reg(ARC_REG_IO_COH_AP0_BASE, ioc_base >> 12); write_aux_reg(ARC_REG_IO_COH_PARTIAL, 1); write_aux_reg(ARC_REG_IO_COH_ENABLE, 1); /* Re-enable L1 dcache */ __dc_enable(); } /* * Cache related boot time checks/setups only needed on master CPU: * - Geometry checks (kernel build and hardware agree: e.g. L1_CACHE_BYTES) * Assume SMP only, so all cores will have same cache config. A check on * one core suffices for all * - IOC setup / dma callbacks only need to be done once */ void __init arc_cache_init_master(void) { unsigned int __maybe_unused cpu = smp_processor_id(); if (IS_ENABLED(CONFIG_ARC_HAS_ICACHE)) { struct cpuinfo_arc_cache *ic = &cpuinfo_arc700[cpu].icache; if (!ic->line_len) panic("cache support enabled but non-existent cache\n"); if (ic->line_len != L1_CACHE_BYTES) panic("ICache line [%d] != kernel Config [%d]", ic->line_len, L1_CACHE_BYTES); /* * In MMU v4 (HS38x) the aliasing icache config uses IVIL/PTAG * pair to provide vaddr/paddr respectively, just as in MMU v3 */ if (is_isa_arcv2() && ic->alias) _cache_line_loop_ic_fn = __cache_line_loop_v3; else _cache_line_loop_ic_fn = __cache_line_loop; } if (IS_ENABLED(CONFIG_ARC_HAS_DCACHE)) { struct cpuinfo_arc_cache *dc = &cpuinfo_arc700[cpu].dcache; if (!dc->line_len) panic("cache support enabled but non-existent cache\n"); if (dc->line_len != L1_CACHE_BYTES) panic("DCache line [%d] != kernel Config [%d]", dc->line_len, L1_CACHE_BYTES); /* check for D-Cache aliasing on ARCompact: ARCv2 has PIPT */ if (is_isa_arcompact()) { int handled = IS_ENABLED(CONFIG_ARC_CACHE_VIPT_ALIASING); int num_colors = dc->sz_k/dc->assoc/TO_KB(PAGE_SIZE); if (dc->alias) { if (!handled) panic("Enable CONFIG_ARC_CACHE_VIPT_ALIASING\n"); if (CACHE_COLORS_NUM != num_colors) panic("CACHE_COLORS_NUM not optimized for config\n"); } else if (!dc->alias && handled) { panic("Disable CONFIG_ARC_CACHE_VIPT_ALIASING\n"); } } } /* Note that SLC disable not formally supported till HS 3.0 */ if (is_isa_arcv2() && l2_line_sz && !slc_enable) arc_slc_disable(); if (is_isa_arcv2() && ioc_enable) arc_ioc_setup(); if (is_isa_arcv2() && ioc_enable) { __dma_cache_wback_inv = __dma_cache_wback_inv_ioc; __dma_cache_inv = __dma_cache_inv_ioc; __dma_cache_wback = __dma_cache_wback_ioc; } else if (is_isa_arcv2() && l2_line_sz && slc_enable) { __dma_cache_wback_inv = __dma_cache_wback_inv_slc; __dma_cache_inv = __dma_cache_inv_slc; __dma_cache_wback = __dma_cache_wback_slc; } else { __dma_cache_wback_inv = __dma_cache_wback_inv_l1; __dma_cache_inv = __dma_cache_inv_l1; __dma_cache_wback = __dma_cache_wback_l1; } } void __ref arc_cache_init(void) { unsigned int __maybe_unused cpu = smp_processor_id(); char str[256]; pr_info("%s", arc_cache_mumbojumbo(0, str, sizeof(str))); if (!cpu) arc_cache_init_master(); /* * In PAE regime, TLB and cache maintenance ops take wider addresses * And even if PAE is not enabled in kernel, the upper 32-bits still need * to be zeroed to keep the ops sane. * As an optimization for more common !PAE enabled case, zero them out * once at init, rather than checking/setting to 0 for every runtime op */ if (is_isa_arcv2() && pae40_exist_but_not_enab()) { if (IS_ENABLED(CONFIG_ARC_HAS_ICACHE)) write_aux_reg(ARC_REG_IC_PTAG_HI, 0); if (IS_ENABLED(CONFIG_ARC_HAS_DCACHE)) write_aux_reg(ARC_REG_DC_PTAG_HI, 0); if (l2_line_sz) { write_aux_reg(ARC_REG_SLC_RGN_END1, 0); write_aux_reg(ARC_REG_SLC_RGN_START1, 0); } } }
HarveyHunt/linux
arch/arc/mm/cache.c
C
gpl-2.0
36,189
/* * Copyright (c) 2015, 2018, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package sun.security.ssl; import java.io.IOException; import java.nio.ByteBuffer; import sun.security.ssl.SSLHandshake.HandshakeMessage; /** * Pack of the HelloRequest handshake message. */ final class HelloRequest { static final SSLProducer kickstartProducer = new HelloRequestKickstartProducer(); static final SSLConsumer handshakeConsumer = new HelloRequestConsumer(); static final HandshakeProducer handshakeProducer = new HelloRequestProducer(); /** * The HelloRequest handshake message. * * [RFC 5246] The HelloRequest message MAY be sent by the server at any * time. HelloRequest is a simple notification that the client should * begin the negotiation process anew. * * struct { } HelloRequest; */ static final class HelloRequestMessage extends HandshakeMessage { HelloRequestMessage(HandshakeContext handshakeContext) { super(handshakeContext); } HelloRequestMessage(HandshakeContext handshakeContext, ByteBuffer m) throws IOException { super(handshakeContext); if (m.hasRemaining()) { throw handshakeContext.conContext.fatal(Alert.ILLEGAL_PARAMETER, "Error parsing HelloRequest message: not empty"); } } @Override public SSLHandshake handshakeType() { return SSLHandshake.HELLO_REQUEST; } @Override public int messageLength() { return 0; } @Override public void send(HandshakeOutStream s) throws IOException { // empty, nothing to send } @Override public String toString() { return "<empty>"; } } /** * The "HelloRequest" handshake message kick start producer. */ private static final class HelloRequestKickstartProducer implements SSLProducer { // Prevent instantiation of this class. private HelloRequestKickstartProducer() { // blank } @Override public byte[] produce(ConnectionContext context) throws IOException { // The producing happens in server side only. ServerHandshakeContext shc = (ServerHandshakeContext)context; HelloRequestMessage hrm = new HelloRequestMessage(shc); if (SSLLogger.isOn && SSLLogger.isOn("ssl,handshake")) { SSLLogger.fine("Produced HelloRequest handshake message", hrm); } // Output the handshake message. hrm.write(shc.handshakeOutput); shc.handshakeOutput.flush(); // update the context // What's the expected response? shc.handshakeConsumers.put( SSLHandshake.CLIENT_HELLO.id, SSLHandshake.CLIENT_HELLO); // The handshake message has been delivered. return null; } } /** * The "HelloRequest" handshake message producer. */ private static final class HelloRequestProducer implements HandshakeProducer { // Prevent instantiation of this class. private HelloRequestProducer() { // blank } @Override public byte[] produce(ConnectionContext context, HandshakeMessage message) throws IOException { // The producing happens in server side only. ServerHandshakeContext shc = (ServerHandshakeContext)context; HelloRequestMessage hrm = new HelloRequestMessage(shc); if (SSLLogger.isOn && SSLLogger.isOn("ssl,handshake")) { SSLLogger.fine("Produced HelloRequest handshake message", hrm); } // Output the handshake message. hrm.write(shc.handshakeOutput); shc.handshakeOutput.flush(); // update the context // What's the expected response? shc.handshakeConsumers.put( SSLHandshake.CLIENT_HELLO.id, SSLHandshake.CLIENT_HELLO); // The handshake message has been delivered. return null; } } /** * The "HelloRequest" handshake message consumer. */ private static final class HelloRequestConsumer implements SSLConsumer { // Prevent instantiation of this class. private HelloRequestConsumer() { // blank } @Override public void consume(ConnectionContext context, ByteBuffer message) throws IOException { // The consuming happens in client side only. ClientHandshakeContext chc = (ClientHandshakeContext)context; // For TLS 1.2 and prior versions, the HelloRequest message MAY // be sent by the server at any time. Please don't clean up this // handshake consumer. HelloRequestMessage hrm = new HelloRequestMessage(chc, message); if (SSLLogger.isOn && SSLLogger.isOn("ssl,handshake")) { SSLLogger.fine( "Consuming HelloRequest handshake message", hrm); } if (!chc.kickstartMessageDelivered) { if (!chc.conContext.secureRenegotiation && !HandshakeContext.allowUnsafeRenegotiation) { throw chc.conContext.fatal(Alert.HANDSHAKE_FAILURE, "Unsafe renegotiation is not allowed"); } if (!chc.conContext.secureRenegotiation) { if (SSLLogger.isOn && SSLLogger.isOn("ssl,handshake")) { SSLLogger.warning( "Continue with insecure renegotiation"); } } // update the responders chc.handshakeProducers.put( SSLHandshake.CLIENT_HELLO.id, SSLHandshake.CLIENT_HELLO); // // produce response handshake message // SSLHandshake.CLIENT_HELLO.produce(context, hrm); } else { if (SSLLogger.isOn && SSLLogger.isOn("ssl,handshake")) { SSLLogger.fine( "Ingore HelloRequest, handshaking is in progress"); } } } } }
md-5/jdk10
src/java.base/share/classes/sun/security/ssl/HelloRequest.java
Java
gpl-2.0
7,660
SOURCES=\ baseband.c \ main.c # set this to upload the built bbtool to /usr/local/bin on your iPhone for testing IPHONE_IP= CC=/usr/local/iphonedev/bin/arm-apple-darwin-gcc CFLAGS=-isysroot /Developer/SDKs/iPhone.sdk ifeq ($(CONFIGURATION),Debug) CFLAGS+=-g -O0 -Wall -DDEBUG else CFLAGS+=-O7 -Wall -Werror endif LD=$(CC) LDFLAGS=-isysroot /Developer/SDKs/iPhone.sdk -mmacosx-version-min=10.1 LIBS=-framework IOKit EXECUTABLE_NAME=$(PRODUCT_NAME) SOURCES_ABS=$(addprefix $(SRCROOT)/,$(SOURCES)) OBJECTS=\ $(patsubst %.c,%.o,$(filter %.c,$(SOURCES))) OBJECTS_ABS=$(addprefix $(CONFIGURATION_TEMP_DIR)/,$(OBJECTS)) PRODUCT_ABS=$(CONFIGURATION_TEMP_DIR)/bbtool ifdef IPHONE_IP all: install else all: $(PRODUCT_ABS) endif install: $(PRODUCT_ABS) scp -r $(PRODUCT_ABS) root@$(IPHONE_IP):/usr/local/bin/ $(PRODUCT_ABS): $(APP_ABS) $(OBJECTS_ABS) $(LD) $(LDFLAGS) $(LIBS) -o $(PRODUCT_ABS) $(OBJECTS_ABS) $(CONFIGURATION_TEMP_DIR): mkdir -p $(CONFIGURATION_TEMP_DIR) $(CONFIGURATION_TEMP_DIR)/%.o: $(SRCROOT)/%.m $(CONFIGURATION_TEMP_DIR) $(CC) $(CFLAGS) $(CPPFLAGS) -c $< -o $@ $(CONFIGURATION_TEMP_DIR)/%.o: $(SRCROOT)/%.c $(CONFIGURATION_TEMP_DIR) $(CC) $(CFLAGS) $(CPPFLAGS) -c $< -o $@ clean: rm -f $(OBJECTS_ABS) rm -f $(PRODUCT_ABS)
erichamisi/iphone-elite
bbtool/Makefile
Makefile
gpl-2.0
1,255
/* Simple DirectMedia Layer Copyright (C) 1997-2014 Sam Lantinga <[email protected]> This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software. Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions: 1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required. 2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software. 3. This notice may not be removed or altered from any source distribution. */ #include "../SDL_internal.h" #include "SDL_video.h" #include "SDL_blit.h" #ifdef __SSE__ /* *INDENT-OFF* */ #ifdef _MSC_VER #define SSE_BEGIN \ __m128 c128; \ c128.m128_u32[0] = color; \ c128.m128_u32[1] = color; \ c128.m128_u32[2] = color; \ c128.m128_u32[3] = color; #else #define SSE_BEGIN \ __m128 c128; \ DECLARE_ALIGNED(Uint32, cccc[4], 16); \ cccc[0] = color; \ cccc[1] = color; \ cccc[2] = color; \ cccc[3] = color; \ c128 = *(__m128 *)cccc; #endif #define SSE_WORK \ for (i = n / 64; i--;) { \ _mm_stream_ps((float *)(p+0), c128); \ _mm_stream_ps((float *)(p+16), c128); \ _mm_stream_ps((float *)(p+32), c128); \ _mm_stream_ps((float *)(p+48), c128); \ p += 64; \ } #define SSE_END #define DEFINE_SSE_FILLRECT(bpp, type) \ static void \ SDL_FillRect##bpp##SSE(Uint8 *pixels, int pitch, Uint32 color, int w, int h) \ { \ int i, n; \ Uint8 *p = NULL; \ \ SSE_BEGIN; \ \ while (h--) { \ n = w * bpp; \ p = pixels; \ \ if (n > 63) { \ int adjust = 16 - ((uintptr_t)p & 15); \ if (adjust < 16) { \ n -= adjust; \ adjust /= bpp; \ while (adjust--) { \ *((type *)p) = (type)color; \ p += bpp; \ } \ } \ SSE_WORK; \ } \ if (n & 63) { \ int remainder = (n & 63); \ remainder /= bpp; \ while (remainder--) { \ *((type *)p) = (type)color; \ p += bpp; \ } \ } \ pixels += pitch; \ } \ \ SSE_END; \ } static void SDL_FillRect1SSE(Uint8 *pixels, int pitch, Uint32 color, int w, int h) { int i, n; SSE_BEGIN; while (h--) { Uint8 *p = pixels; n = w; if (n > 63) { int adjust = 16 - ((uintptr_t)p & 15); if (adjust) { n -= adjust; SDL_memset(p, color, adjust); p += adjust; } SSE_WORK; } if (n & 63) { int remainder = (n & 63); SDL_memset(p, color, remainder); } pixels += pitch; } SSE_END; } /* DEFINE_SSE_FILLRECT(1, Uint8) */ DEFINE_SSE_FILLRECT(2, Uint16) DEFINE_SSE_FILLRECT(4, Uint32) /* *INDENT-ON* */ #endif /* __SSE__ */ static void SDL_FillRect1(Uint8 * pixels, int pitch, Uint32 color, int w, int h) { int n; Uint8 *p = NULL; while (h--) { n = w; p = pixels; if (n > 3) { switch ((uintptr_t) p & 3) { case 1: *p++ = (Uint8) color; --n; case 2: *p++ = (Uint8) color; --n; case 3: *p++ = (Uint8) color; --n; } SDL_memset4(p, color, (n >> 2)); } if (n & 3) { p += (n & ~3); switch (n & 3) { case 3: *p++ = (Uint8) color; case 2: *p++ = (Uint8) color; case 1: *p++ = (Uint8) color; } } pixels += pitch; } } static void SDL_FillRect2(Uint8 * pixels, int pitch, Uint32 color, int w, int h) { int n; Uint16 *p = NULL; while (h--) { n = w; p = (Uint16 *) pixels; if (n > 1) { if ((uintptr_t) p & 2) { *p++ = (Uint16) color; --n; } SDL_memset4(p, color, (n >> 1)); } if (n & 1) { p[n - 1] = (Uint16) color; } pixels += pitch; } } static void SDL_FillRect3(Uint8 * pixels, int pitch, Uint32 color, int w, int h) { Uint8 r = (Uint8) ((color >> 16) & 0xFF); Uint8 g = (Uint8) ((color >> 8) & 0xFF); Uint8 b = (Uint8) (color & 0xFF); int n; Uint8 *p = NULL; while (h--) { n = w; p = pixels; while (n--) { *p++ = r; *p++ = g; *p++ = b; } pixels += pitch; } } static void SDL_FillRect4(Uint8 * pixels, int pitch, Uint32 color, int w, int h) { while (h--) { SDL_memset4(pixels, color, w); pixels += pitch; } } /* * This function performs a fast fill of the given rectangle with 'color' */ int SDL_FillRect(SDL_Surface * dst, const SDL_Rect * rect, Uint32 color) { SDL_Rect clipped; Uint8 *pixels; if (!dst) { return SDL_SetError("Passed NULL destination surface"); } /* This function doesn't work on surfaces < 8 bpp */ if (dst->format->BitsPerPixel < 8) { return SDL_SetError("SDL_FillRect(): Unsupported surface format"); } /* If 'rect' == NULL, then fill the whole surface */ if (rect) { /* Perform clipping */ if (!SDL_IntersectRect(rect, &dst->clip_rect, &clipped)) { return 0; } rect = &clipped; } else { rect = &dst->clip_rect; } /* Perform software fill */ if (!dst->pixels) { return SDL_SetError("SDL_FillRect(): You must lock the surface"); } pixels = (Uint8 *) dst->pixels + rect->y * dst->pitch + rect->x * dst->format->BytesPerPixel; switch (dst->format->BytesPerPixel) { case 1: { color |= (color << 8); color |= (color << 16); #ifdef __SSE__ if (SDL_HasSSE()) { SDL_FillRect1SSE(pixels, dst->pitch, color, rect->w, rect->h); break; } #endif SDL_FillRect1(pixels, dst->pitch, color, rect->w, rect->h); break; } case 2: { color |= (color << 16); #ifdef __SSE__ if (SDL_HasSSE()) { SDL_FillRect2SSE(pixels, dst->pitch, color, rect->w, rect->h); break; } #endif SDL_FillRect2(pixels, dst->pitch, color, rect->w, rect->h); break; } case 3: /* 24-bit RGB is a slow path, at least for now. */ { SDL_FillRect3(pixels, dst->pitch, color, rect->w, rect->h); break; } case 4: { #ifdef __SSE__ if (SDL_HasSSE()) { SDL_FillRect4SSE(pixels, dst->pitch, color, rect->w, rect->h); break; } #endif SDL_FillRect4(pixels, dst->pitch, color, rect->w, rect->h); break; } } /* We're done! */ return 0; } int SDL_FillRects(SDL_Surface * dst, const SDL_Rect * rects, int count, Uint32 color) { int i; int status = 0; if (!rects) { return SDL_SetError("SDL_FillRects() passed NULL rects"); } for (i = 0; i < count; ++i) { status += SDL_FillRect(dst, &rects[i], color); } return status; } /* vi: set ts=4 sw=4 expandtab: */
mgerhardy/ufoai
src/libs/SDL/src/video/SDL_fillrect.c
C
gpl-2.0
7,970
// // ZoneMinder Communicatoions Class Interface, $Date$, $Revision$ // Copyright (C) 2001-2008 Philip Coombes // // This program is free software; you can redistribute it and/or // modify it under the terms of the GNU General Public License // as published by the Free Software Foundation; either version 2 // of the License, or (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program; if not, write to the Free Software // Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. // #ifndef ZM_COMMS_H #define ZM_COMMS_H #include "zm_exception.h" #include <string.h> #include <unistd.h> #include <netdb.h> #include <errno.h> #include <sys/un.h> #include <set> #include <vector> #if defined(BSD) #include <sys/uio.h> #include <sys/socket.h> #include <netinet/in.h> #endif class CommsException : public Exception { public: CommsException( const std::string &message ) : Exception( message ) { } }; class CommsBase { protected: const int &mRd; const int &mWd; protected: CommsBase( int &rd, int &wd ) : mRd( rd ), mWd( wd ) { } virtual ~CommsBase() { } public: virtual bool close()=0; virtual bool isOpen() const=0; virtual bool isClosed() const=0; virtual bool setBlocking( bool blocking )=0; public: int getReadDesc() const { return( mRd ); } int getWriteDesc() const { return( mWd ); } int getMaxDesc() const { return( mRd>mWd?mRd:mWd ); } virtual int read( void *msg, int len ) { ssize_t nBytes = ::read( mRd, msg, len ); if ( nBytes < 0 ) Debug( 1, "Read of %d bytes max on rd %d failed: %s", len, mRd, strerror(errno) ); return( nBytes ); } virtual int write( const void *msg, int len ) { ssize_t nBytes = ::write( mWd, msg, len ); if ( nBytes < 0 ) Debug( 1, "Write of %d bytes on wd %d failed: %s", len, mWd, strerror(errno) ); return( nBytes ); } virtual int readV( const struct iovec *iov, int iovcnt ) { int nBytes = ::readv( mRd, iov, iovcnt ); if ( nBytes < 0 ) Debug( 1, "Readv of %d buffers max on rd %d failed: %s", iovcnt, mRd, strerror(errno) ); return( nBytes ); } virtual int writeV( const struct iovec *iov, int iovcnt ) { ssize_t nBytes = ::writev( mWd, iov, iovcnt ); if ( nBytes < 0 ) Debug( 1, "Writev of %d buffers on wd %d failed: %s", iovcnt, mWd, strerror(errno) ); return( nBytes ); } virtual int readV( int iovcnt, /* const void *msg1, int len1, */ ... ); virtual int writeV( int iovcnt, /* const void *msg1, int len1, */ ... ); }; class Pipe : public CommsBase { protected: int mFd[2]; public: Pipe() : CommsBase( mFd[0], mFd[1] ) { mFd[0] = -1; mFd[1] = -1; } ~Pipe() { close(); } public: bool open(); bool close(); bool isOpen() const { return( mFd[0] != -1 && mFd[1] != -1 ); } int getReadDesc() const { return( mFd[0] ); } int getWriteDesc() const { return( mFd[1] ); } bool setBlocking( bool blocking ); }; class SockAddr { private: const struct sockaddr *mAddr; public: SockAddr( const struct sockaddr *addr ); virtual ~SockAddr() { } static SockAddr *newSockAddr( const struct sockaddr &addr, socklen_t len ); static SockAddr *newSockAddr( const SockAddr *addr ); int getDomain() const { return( mAddr?mAddr->sa_family:AF_UNSPEC ); } const struct sockaddr *getAddr() const { return( mAddr ); } virtual socklen_t getAddrSize() const=0; virtual struct sockaddr *getTempAddr() const=0; }; class SockAddrInet : public SockAddr { private: struct sockaddr_in mAddrIn; struct sockaddr_in mTempAddrIn; public: SockAddrInet(); SockAddrInet( const SockAddrInet &addr ) : SockAddr( (const struct sockaddr *)&mAddrIn ), mAddrIn( addr.mAddrIn ) { } SockAddrInet( const struct sockaddr_in *addr ) : SockAddr( (const struct sockaddr *)&mAddrIn ), mAddrIn( *addr ) { } bool resolve( const char *host, const char *serv, const char *proto ); bool resolve( const char *host, int port, const char *proto ); bool resolve( const char *serv, const char *proto ); bool resolve( int port, const char *proto ); socklen_t getAddrSize() const { return( sizeof(mAddrIn) ); } struct sockaddr *getTempAddr() const { return( (sockaddr *)&mTempAddrIn ); } public: static socklen_t addrSize() { return( sizeof(sockaddr_in) ); } }; class SockAddrUnix : public SockAddr { private: struct sockaddr_un mAddrUn; struct sockaddr_un mTempAddrUn; public: SockAddrUnix(); SockAddrUnix( const SockAddrUnix &addr ) : SockAddr( (const struct sockaddr *)&mAddrUn ), mAddrUn( addr.mAddrUn ) { } SockAddrUnix( const struct sockaddr_un *addr ) : SockAddr( (const struct sockaddr *)&mAddrUn ), mAddrUn( *addr ) { } bool resolve( const char *path, const char *proto ); socklen_t getAddrSize() const { return( sizeof(mAddrUn) ); } struct sockaddr *getTempAddr() const { return( (sockaddr *)&mTempAddrUn ); } public: static socklen_t addrSize() { return( sizeof(sockaddr_un) ); } }; class Socket : public CommsBase { protected: typedef enum { CLOSED, DISCONNECTED, LISTENING, CONNECTED } State; protected: int mSd; State mState; SockAddr *mLocalAddr; SockAddr *mRemoteAddr; protected: Socket() : CommsBase( mSd, mSd ), mSd( -1 ), mState( CLOSED ), mLocalAddr( 0 ), mRemoteAddr( 0 ) { } Socket( const Socket &socket, int newSd ) : CommsBase( mSd, mSd ), mSd( newSd ), mState( CONNECTED ), mLocalAddr( 0 ), mRemoteAddr( 0 ) { if ( socket.mLocalAddr ) mLocalAddr = SockAddr::newSockAddr( mLocalAddr ); if ( socket.mRemoteAddr ) mRemoteAddr = SockAddr::newSockAddr( mRemoteAddr ); } virtual ~Socket() { close(); delete mLocalAddr; delete mRemoteAddr; } public: bool isOpen() const { return( !isClosed() ); } bool isClosed() const { return( mState == CLOSED ); } bool isDisconnected() const { return( mState == DISCONNECTED ); } bool isConnected() const { return( mState == CONNECTED ); } virtual bool close(); protected: bool isListening() const { return( mState == LISTENING ); } protected: virtual bool socket(); virtual bool bind(); protected: virtual bool connect(); virtual bool listen(); virtual bool accept(); virtual bool accept( int & ); public: virtual int send( const void *msg, int len ) const { ssize_t nBytes = ::send( mSd, msg, len, 0 ); if ( nBytes < 0 ) Debug( 1, "Send of %d bytes on sd %d failed: %s", len, mSd, strerror(errno) ); return( nBytes ); } virtual int recv( void *msg, int len ) const { ssize_t nBytes = ::recv( mSd, msg, len, 0 ); if ( nBytes < 0 ) Debug( 1, "Recv of %d bytes max on sd %d failed: %s", len, mSd, strerror(errno) ); return( nBytes ); } virtual int send( const std::string &msg ) const { ssize_t nBytes = ::send( mSd, msg.data(), msg.size(), 0 ); if ( nBytes < 0 ) Debug( 1, "Send of string '%s' (%zd bytes) on sd %d failed: %s", msg.c_str(), msg.size(), mSd, strerror(errno) ); return( nBytes ); } virtual int recv( std::string &msg ) const { char buffer[msg.capacity()]; int nBytes = 0; if ( (nBytes = ::recv( mSd, buffer, sizeof(buffer), 0 )) < 0 ) { Debug( 1, "Recv of %zd bytes max to string on sd %d failed: %s", sizeof(buffer), mSd, strerror(errno) ); return( nBytes ); } buffer[nBytes] = '\0'; msg = buffer; return( nBytes ); } virtual int recv( std::string &msg, size_t maxLen ) const { char buffer[maxLen]; int nBytes = 0; if ( (nBytes = ::recv( mSd, buffer, sizeof(buffer), 0 )) < 0 ) { Debug( 1, "Recv of %zd bytes max to string on sd %d failed: %s", maxLen, mSd, strerror(errno) ); return( nBytes ); } buffer[nBytes] = '\0'; msg = buffer; return( nBytes ); } virtual int bytesToRead() const; int getDesc() const { return( mSd ); } //virtual bool isOpen() const //{ //return( mSd != -1 ); //} virtual int getDomain() const=0; virtual int getType() const=0; virtual const char *getProtocol() const=0; const SockAddr *getLocalAddr() const { return( mLocalAddr ); } const SockAddr *getRemoteAddr() const { return( mRemoteAddr ); } virtual socklen_t getAddrSize() const=0; bool getBlocking( bool &blocking ); bool setBlocking( bool blocking ); bool getSendBufferSize( int & ) const; bool getRecvBufferSize( int & ) const; bool setSendBufferSize( int ); bool setRecvBufferSize( int ); bool getRouting( bool & ) const; bool setRouting( bool ); bool getNoDelay( bool & ) const; bool setNoDelay( bool ); }; class InetSocket : virtual public Socket { public: int getDomain() const { return( AF_INET ); } virtual socklen_t getAddrSize() const { return( SockAddrInet::addrSize() ); } protected: bool resolveLocal( const char *host, const char *serv, const char *proto ) { SockAddrInet *addr = new SockAddrInet; mLocalAddr = addr; return( addr->resolve( host, serv, proto ) ); } bool resolveLocal( const char *host, int port, const char *proto ) { SockAddrInet *addr = new SockAddrInet; mLocalAddr = addr; return( addr->resolve( host, port, proto ) ); } bool resolveLocal( const char *serv, const char *proto ) { SockAddrInet *addr = new SockAddrInet; mLocalAddr = addr; return( addr->resolve( serv, proto ) ); } bool resolveLocal( int port, const char *proto ) { SockAddrInet *addr = new SockAddrInet; mLocalAddr = addr; return( addr->resolve( port, proto ) ); } bool resolveRemote( const char *host, const char *serv, const char *proto ) { SockAddrInet *addr = new SockAddrInet; mRemoteAddr = addr; return( addr->resolve( host, serv, proto ) ); } bool resolveRemote( const char *host, int port, const char *proto ) { SockAddrInet *addr = new SockAddrInet; mRemoteAddr = addr; return( addr->resolve( host, port, proto ) ); } protected: bool bind( const SockAddrInet &addr ) { mLocalAddr = new SockAddrInet( addr ); return( Socket::bind() ); } bool bind( const char *host, const char *serv ) { if ( !resolveLocal( host, serv, getProtocol() ) ) return( false ); return( Socket::bind() ); } bool bind( const char *host, int port ) { if ( !resolveLocal( host, port, getProtocol() ) ) return( false ); return( Socket::bind() ); } bool bind( const char *serv ) { if ( !resolveLocal( serv, getProtocol() ) ) return( false ); return( Socket::bind() ); } bool bind( int port ) { if ( !resolveLocal( port, getProtocol() ) ) return( false ); return( Socket::bind() ); } bool connect( const SockAddrInet &addr ) { mRemoteAddr = new SockAddrInet( addr ); return( Socket::connect() ); } bool connect( const char *host, const char *serv ) { if ( !resolveRemote( host, serv, getProtocol() ) ) return( false ); return( Socket::connect() ); } bool connect( const char *host, int port ) { if ( !resolveRemote( host, port, getProtocol() ) ) return( false ); return( Socket::connect() ); } }; class UnixSocket : virtual public Socket { public: int getDomain() const { return( AF_UNIX ); } virtual socklen_t getAddrSize() const { return( SockAddrUnix::addrSize() ); } protected: bool resolveLocal( const char *serv, const char *proto ) { SockAddrUnix *addr = new SockAddrUnix; mLocalAddr = addr; return( addr->resolve( serv, proto ) ); } bool resolveRemote( const char *path, const char *proto ) { SockAddrUnix *addr = new SockAddrUnix; mRemoteAddr = addr; return( addr->resolve( path, proto ) ); } protected: bool bind( const char *path ) { if ( !UnixSocket::resolveLocal( path, getProtocol() ) ) return( false ); return( Socket::bind() ); } bool connect( const char *path ) { if ( !UnixSocket::resolveRemote( path, getProtocol() ) ) return( false ); return( Socket::connect() ); } }; class UdpSocket : virtual public Socket { public: int getType() const { return( SOCK_DGRAM ); } const char *getProtocol() const { return( "udp" ); } public: virtual int sendto( const void *msg, int len, const SockAddr *addr=0 ) const { ssize_t nBytes = ::sendto( mSd, msg, len, 0, addr?addr->getAddr():NULL, addr?addr->getAddrSize():0 ); if ( nBytes < 0 ) Debug( 1, "Sendto of %d bytes on sd %d failed: %s", len, mSd, strerror(errno) ); return( nBytes ); } virtual int recvfrom( void *msg, int len, SockAddr *addr=0 ) const { ssize_t nBytes = 0; if ( addr ) { struct sockaddr sockAddr; socklen_t sockLen; nBytes = ::recvfrom( mSd, msg, len, 0, &sockAddr, &sockLen ); if ( nBytes < 0 ) { Debug( 1, "Recvfrom of %d bytes max on sd %d (with address) failed: %s", len, mSd, strerror(errno) ); } else if ( sockLen ) { addr = SockAddr::newSockAddr( sockAddr, sockLen ); } } else { nBytes = ::recvfrom( mSd, msg, len, 0, NULL, 0 ); if ( nBytes < 0 ) Debug( 1, "Recvfrom of %d bytes max on sd %d (no address) failed: %s", len, mSd, strerror(errno) ); } return( nBytes ); } }; class UdpInetSocket : virtual public UdpSocket, virtual public InetSocket { public: bool bind( const SockAddrInet &addr ) { return( InetSocket::bind( addr ) ); } bool bind( const char *host, const char *serv ) { return( InetSocket::bind( host, serv ) ); } bool bind( const char *host, int port ) { return( InetSocket::bind( host, port ) ); } bool bind( const char *serv ) { return( InetSocket::bind( serv ) ); } bool bind( int port ) { return( InetSocket::bind( port ) ); } bool connect( const SockAddrInet &addr ) { return( InetSocket::connect( addr ) ); } bool connect( const char *host, const char *serv ) { return( InetSocket::connect( host, serv ) ); } bool connect( const char *host, int port ) { return( InetSocket::connect( host, port ) ); } }; class UdpUnixSocket : virtual public UdpSocket, virtual public UnixSocket { public: bool bind( const char *path ) { return( UnixSocket::bind( path ) ); } bool connect( const char *path ) { return( UnixSocket::connect( path ) ); } }; class UdpInetClient : public UdpInetSocket { protected: bool bind( const SockAddrInet &addr ) { return( UdpInetSocket::bind( addr ) ); } bool bind( const char *host, const char *serv ) { return( UdpInetSocket::bind( host, serv ) ); } bool bind( const char *host, int port ) { return( UdpInetSocket::bind( host, port ) ); } bool bind( const char *serv ) { return( UdpInetSocket::bind( serv ) ); } bool bind( int port ) { return( UdpInetSocket::bind( port ) ); } public: bool connect( const SockAddrInet &addr ) { return( UdpInetSocket::connect( addr ) ); } bool connect( const char *host, const char *serv ) { return( UdpInetSocket::connect( host, serv ) ); } bool connect( const char *host, int port ) { return( UdpInetSocket::connect( host, port ) ); } }; class UdpUnixClient : public UdpUnixSocket { public: bool bind( const char *path ) { return( UdpUnixSocket::bind( path ) ); } public: bool connect( const char *path ) { return( UdpUnixSocket::connect( path) ); } }; class UdpInetServer : public UdpInetSocket { public: bool bind( const SockAddrInet &addr ) { return( UdpInetSocket::bind( addr ) ); } bool bind( const char *host, const char *serv ) { return( UdpInetSocket::bind( host, serv ) ); } bool bind( const char *host, int port ) { return( UdpInetSocket::bind( host, port ) ); } bool bind( const char *serv ) { return( UdpInetSocket::bind( serv ) ); } bool bind( int port ) { return( UdpInetSocket::bind( port ) ); } protected: bool connect( const char *host, const char *serv ) { return( UdpInetSocket::connect( host, serv ) ); } bool connect( const char *host, int port ) { return( UdpInetSocket::connect( host, port ) ); } }; class UdpUnixServer : public UdpUnixSocket { public: bool bind( const char *path ) { return( UdpUnixSocket::bind( path ) ); } protected: bool connect( const char *path ) { return( UdpUnixSocket::connect( path ) ); } }; class TcpSocket : virtual public Socket { public: TcpSocket() { } TcpSocket( const TcpSocket &socket, int newSd ) : Socket( socket, newSd ) { } public: int getType() const { return( SOCK_STREAM ); } const char *getProtocol() const { return( "tcp" ); } }; class TcpInetSocket : virtual public TcpSocket, virtual public InetSocket { public: TcpInetSocket() { } TcpInetSocket( const TcpInetSocket &socket, int newSd ) : TcpSocket( socket, newSd ) { } }; class TcpUnixSocket : virtual public TcpSocket, virtual public UnixSocket { public: TcpUnixSocket() { } TcpUnixSocket( const TcpUnixSocket &socket, int newSd ) : TcpSocket( socket, newSd ) { } }; class TcpInetClient : public TcpInetSocket { public: bool connect( const char *host, const char *serv ) { return( TcpInetSocket::connect( host, serv ) ); } bool connect( const char *host, int port ) { return( TcpInetSocket::connect( host, port ) ); } }; class TcpUnixClient : public TcpUnixSocket { public: bool connect( const char *path ) { return( TcpUnixSocket::connect( path) ); } }; class TcpInetServer : public TcpInetSocket { public: bool bind( const char *host, const char *serv ) { return( TcpInetSocket::bind( host, serv ) ); } bool bind( const char *host, int port ) { return( TcpInetSocket::bind( host, port ) ); } bool bind( const char *serv ) { return( TcpInetSocket::bind( serv ) ); } bool bind( int port ) { return( TcpInetSocket::bind( port ) ); } public: bool isListening() const { return( Socket::isListening() ); } bool listen(); bool accept(); bool accept( TcpInetSocket *&newSocket ); }; class TcpUnixServer : public TcpUnixSocket { public: bool bind( const char *path ) { return( TcpUnixSocket::bind( path ) ); } public: bool isListening() const { return( Socket::isListening() ); } bool listen(); bool accept(); bool accept( TcpUnixSocket *&newSocket ); }; class Select { public: typedef std::set<CommsBase *> CommsSet; typedef std::vector<CommsBase *> CommsList; protected: CommsSet mReaders; CommsSet mWriters; CommsList mReadable; CommsList mWriteable; bool mHasTimeout; struct timeval mTimeout; int mMaxFd; public: Select(); Select( struct timeval timeout ); Select( int timeout ); Select( double timeout ); void setTimeout( int timeout ); void setTimeout( double timeout ); void setTimeout( struct timeval timeout ); void clearTimeout(); void calcMaxFd(); bool addReader( CommsBase *comms ); bool deleteReader( CommsBase *comms ); void clearReaders(); bool addWriter( CommsBase *comms ); bool deleteWriter( CommsBase *comms ); void clearWriters(); int wait(); const CommsList &getReadable() const; const CommsList &getWriteable() const; }; #endif // ZM_COMMS_H
seebaer1976/ZoneMinder
src/zm_comms.h
C
gpl-2.0
21,080
/*************************************************************************** * Movement.cs * ------------------- * begin : May 1, 2002 * copyright : (C) The RunUO Software Team * email : [email protected] * * $Id$ * ***************************************************************************/ /*************************************************************************** * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * ***************************************************************************/ using System; using System.Collections; namespace Server.Movement { public static class Movement { private static IMovementImpl m_Impl; public static IMovementImpl Impl { get{ return m_Impl; } set{ m_Impl = value; } } public static bool CheckMovement( Mobile m, Direction d, out int newZ ) { if ( m_Impl != null ) return m_Impl.CheckMovement( m, d, out newZ ); newZ = m.Z; return false; } public static bool CheckMovement( Mobile m, Map map, Point3D loc, Direction d, out int newZ ) { if ( m_Impl != null ) return m_Impl.CheckMovement( m, map, loc, d, out newZ ); newZ = m.Z; return false; } public static void Offset( Direction d, ref int x, ref int y ) { switch ( d & Direction.Mask ) { case Direction.North: --y; break; case Direction.South: ++y; break; case Direction.West: --x; break; case Direction.East: ++x; break; case Direction.Right: ++x; --y; break; case Direction.Left: --x; ++y; break; case Direction.Down: ++x; ++y; break; case Direction.Up: --x; --y; break; } } } public interface IMovementImpl { bool CheckMovement( Mobile m, Direction d, out int newZ ); bool CheckMovement( Mobile m, Map map, Point3D loc, Direction d, out int newZ ); } }
Gr3enP0ison/runuo-ec
Server/Movement.cs
C#
gpl-2.0
2,183
#include <stdio.h> class c{ public: long f; }; static class sss: public c{ public: double m; } sss; #define _offsetof(st,f) ((char *)&((st *) 16)->f - (char *) 16) int main (void) { printf ("++Class with double inhereting class with long:\n"); printf ("size=%d,align=%d\n", sizeof (sss), __alignof__ (sss)); printf ("offset-long=%d,offset-double=%d,\nalign-long=%d,align-double=%d\n", _offsetof (class sss, f), _offsetof (class sss, m), __alignof__ (sss.f), __alignof__ (sss.m)); return 0; }
unofficial-opensource-apple/gcc_40
gcc/testsuite/consistency.vlad/layout/c-long-1-c-double.cpp
C++
gpl-2.0
529
/* *------------------------------------------------------------------------------ * Copyright (C) 2006 University of Dundee. All rights reserved. * * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. * *------------------------------------------------------------------------------ */ package org.openmicroscopy.shoola.agents.treeviewer.cmd; import java.sql.Timestamp; import java.util.Collections; import java.util.Comparator; import java.util.List; import org.openmicroscopy.shoola.agents.treeviewer.browser.Browser; import org.openmicroscopy.shoola.agents.util.browser.TreeImageDisplay; import org.openmicroscopy.shoola.agents.util.browser.TreeImageDisplayVisitor; import org.openmicroscopy.shoola.util.ui.UIUtilities; import omero.gateway.model.ImageData; /** * Command to sort items in tree. * * @author Jean-Marie Burel &nbsp;&nbsp;&nbsp;&nbsp; * <a href="mailto:[email protected]">[email protected]</a> * @version 2.2 * @since OME2.2 */ public class SortCmd implements ActionCmd { /** Reference to the model. */ private Browser model; /** The sorting type. One of the constants defined by this class. */ private int sortType; /** The list of sorted nodes. */ private List sortedNodes; /** The node whose children needed to be ordered. */ private TreeImageDisplay node; /** * Checks if the specified type is one of the constants defined by this * class. * * @param type The type to control. */ private void checkSortType(int type) { switch (type) { case Browser.SORT_NODES_BY_DATE: case Browser.SORT_NODES_BY_NAME: return; default: throw new IllegalArgumentException("Sort type not supported"); } } /** * Sorts the specified collection in the specified order. * * @param nodes The collection to sort. * @param ascending The order. * @return The sorted collection. */ private List sort(List nodes, final boolean ascending) { Comparator c; switch (sortType) { case Browser.SORT_NODES_BY_DATE: c = new Comparator() { public int compare(Object o1, Object o2) { ImageData i1 = (ImageData) (((TreeImageDisplay) o1).getUserObject()); ImageData i2 = (ImageData) (((TreeImageDisplay) o2).getUserObject()); Timestamp t1, t2; try { t1 = i1.getInserted(); } catch (Exception e) { t1 = null; } try { t2 = i2.getInserted(); } catch (Exception e) { t2 = null; } if (t1 == null) t1 = UIUtilities.getDefaultTimestamp(); if (t2 == null) t2 = UIUtilities.getDefaultTimestamp(); int r = t1.compareTo(t2); int v = 0; if (r < 0) v = -1; else if (r > 0) v = 1; if (ascending) return v; return -v; } }; break; case Browser.SORT_NODES_BY_NAME: default: c = new Comparator() { public int compare(Object o1, Object o2) { String s1 = o1.toString().toLowerCase(); String s2 = o2.toString().toLowerCase(); int result = s1.compareTo(s2); int v = 0; if (result < 0) v = -1; else if (result > 0) v = 1; if (ascending) return v; return -v; } }; } Collections.sort(nodes, c); return nodes; } /** * Creates a new instance. * * @param model Reference to the model. Mustn't be <code>null</code>. * @param sortType One of the constants defined by this class. * @param node The node to sort. If <code>null</code>, * we visit the tree. */ public SortCmd(Browser model, int sortType, TreeImageDisplay node) { if (model == null) throw new IllegalArgumentException("No model."); this.model = model; checkSortType(sortType); this.sortType = sortType; this.node = node; } /** * Returns the list of sorted nodes or <code>null</code> if the command * has been executed yet. * * @return See above. */ public List getSortedNodes() { return sortedNodes; } /** Implemented as specified by {@link ActionCmd}. */ public void execute() { SortVisitor visitor = new SortVisitor(model); if (node == null) model.accept(visitor, TreeImageDisplayVisitor.TREEIMAGE_NODE_ONLY); else node.accept(visitor, TreeImageDisplayVisitor.TREEIMAGE_NODE_ONLY); sortedNodes = sort(visitor.getNodes(), true); } }
simleo/openmicroscopy
components/insight/SRC/org/openmicroscopy/shoola/agents/treeviewer/cmd/SortCmd.java
Java
gpl-2.0
6,194
Open Atrium Styles ================== This contains information on how to use some of the reusable styles and components within Open Atrium. Pane Styles =========== Well ---- This creates a style for the Bootstrap Well that can be used on any Panels Pane. The Well style can be seen here: http://twitter.github.com/bootstrap/components.html#misc The Pane provides configuration options for a small, normal, and large Well. (This refers to the padding of the Well). Bootstrap pane style menus -------------------------- * Tabs, Pills, Buttons, Split Button Dropdowns. Controls how each item in the menu is rendered. Split Button Dropdowns work with menus that have subitems * Stacked. Controls if menu elements are shown horizontally or vertically. * Inverse. Uses inverse Bootstrap styles when available, such as for Buttons. * Wrapper. Specify another Pane style to wrap around the menu. Components ========== Modal ----- This component allow for easier use of a Bootstrap Modal. http://twitter.github.com/bootstrap/javascript.html#modals Bootstrap Modals allow for streamlined used of Modals with defined Header, Body and Actions. The use case here allows you to use a theme function to quickly have the skeleton of the Modal in place and to define a link/button that will dynamically populate the Modal Body. For starters you need to define the Modal with an optional ID and optional Title (although you likely want to specify both). To define the modal (put the skeleton content on on the page): theme('oa_styles_modal', array('modal_id' => 'oa-modal', 'title' => t('Modal Title'))); To use the Modal: l(t('Show Modal'), 'body/content.html', array('attributes' => array('data-toggle' => 'modal', 'data-target' => '#oa-modal'))); The link will popup the Modal and replace the body of the Modal with the content of the href, body/content.html.
SoloGabbo/demo
profiles/openatrium/modules/apps/oa_styles/README.md
Markdown
gpl-2.0
1,881
<?php class gdsrFront { var $g; var $gsr; var $loader_article_thumb = ""; var $loader_comment_thumb = ""; var $loader_article = ""; var $loader_comment = ""; var $loader_multis = ""; function gdsrFront($gdsr_main) { $this->g = $gdsr_main; } function taxonomy_multi_ratings($settings) { global $gdsr; $results = gdsrBlgDB::taxonomy_multi_ratings($settings["taxonomy"], $settings["terms"], $settings["multi_id"], $settings["term_property"]); $set = wp_gdget_multi_set($settings["multi_id"]); $new_results = $final = $ids = array(); $style = $gdsr->is_ie6 ? $settings["style_ie6"] : $settings["style"]; $size = $settings["size"]; $avg_style = $gdsr->is_ie6 ? $settings["average_style_ie6"] : $settings["average_style"]; $avg_size = $settings["average_size"]; foreach ($results as $row) { $ids[] = $row->mdid; $row->votes = $row->user_votes + $row->visitor_votes; $row->voters = $row->user_voters + $row->visitor_voters; $row->rating = $row->voters == 0 ? 0 : @number_format($row->votes / $row->voters, 1); $row->review = @number_format($row->review, 1); $row->bayesian = $gdsr->bayesian_estimate($row->voters, $row->rating, $set->stars); $row->rating_stars = GDSRRender::render_static_stars($style, $size, $set->stars, $row->rating); $row->bayesian_stars = GDSRRender::render_static_stars($style, $size, $set->stars, $row->bayesian); $row->review_stars = GDSRRender::render_static_stars($style, $size, $set->stars, $row->review); $new_results[] = $row; } $v_review = $v_rating = array(); $data = gdsrBlgDB::taxonomy_multi_ratings_data($settings["taxonomy"], $settings["terms"], $settings["multi_id"], $settings["term_property"]); foreach ($data as $row) { if ($row->source == "dta") { $single_vote = array(); $single_vote["votes"] = $row->user_voters + $row->visitor_voters; $single_vote["score"] = $row->user_votes + $row->visitor_votes; $single_vote["rating"] = $single_vote["votes"] > 0 ? $single_vote["score"] / $single_vote["votes"] : 0; $single_vote["rating"] = @number_format($single_vote["rating"], 1); $v_rating[$row->mdid][] = $single_vote; } else if ($row->source == "rvw") { $single_vote["votes"] = $row->user_voters; $single_vote["score"] = $row->user_votes; $single_vote["rating"] = $single_vote["votes"] > 0 ? $single_vote["score"] / $single_vote["votes"] : 0; $single_vote["rating"] = @number_format($single_vote["rating"], 1); $v_review[$row->mdid][] = $single_vote; } } foreach ($new_results as $row) { $row->rating_block = GDSRRenderT2::render_mrb( $settings["tpl_rating"], array("style" => $style, "allow_vote" => false, "votes" => $v_rating[$row->mdid], "post_id" => $row->term_id, "set" => $set, "height" => $size, "header_text" => "", "tags_css" => array("MUR_CSS_BLOCK" => "", "MUR_CSS_BUTTON" => ""), "avg_style" => $avg_style, "avg_size" => $avg_size, "star_factor" => $settings["star_factor"])); $row->review_block = GDSRRenderT2::render_rmb( $settings["tpl_review"], array("votes" => $v_review[$row->mdid], "post_id" => $row->term_id, "set" => $set, "avg_rating" => $row->review, "style" => $style, "size" => $size, "avg_style" => $avg_style, "avg_size" => $avg_size)); } if (count($new_results) == 1) return $new_results[0]; else return $new_results; } function get_taxonomy_multi_ratings($taxonomy = "category", $term = "", $multi_id = 0, $size = 20, $style = "oxygen") { global $gdsr; $results = gdsrBlgDB::taxonomy_multi_ratings($taxonomy, array($term), $multi_id); $set = wp_gdget_multi_set($multi_id); $new_results = array(); foreach ($results as $row) { $row->votes = $row->user_votes + $row->visitor_votes; $row->voters = $row->user_voters + $row->visitor_voters; $row->rating = $row->voters == 0 ? 0 : @number_format($row->votes / $row->voters, 1); $row->review = @number_format($row->review, 1); $row->bayesian = $gdsr->bayesian_estimate($row->voters, $row->rating, $set->stars); $row->rating_stars = GDSRRender::render_static_stars($style, $size, $set->stars, $row->rating); $row->bayesian_stars = GDSRRender::render_static_stars($style, $size, $set->stars, $row->bayesian); $row->review_stars = GDSRRender::render_static_stars($style, $size, $set->stars, $row->review); $new_results[] = $row; } if (count($new_results) == 1) return $new_results[0]; else return $new_results; } function init_google_rich_snippet() { $active = $this->g->o["google_rich_snippets_active"] == 1; if ($active && !is_admin() && (is_single() || is_page()) && !is_feed()) { global $post; $this->gsr = $this->render_google_rich_snippet($post); } } function insert_google_rich_snippet() { echo $this->gsr; } function render_google_rich_snippet($post, $settings = array()) { $hidden = false; $datasource = isset($settings["source"]) ? $settings["source"] : $this->g->o["google_rich_snippets_datasource"]; if (isset($settings["format"]) && is_object($this->g->rSnippets)) $this->g->rSnippets->snippet_type = $settings["format"]; switch ($datasource) { case "standard_rating": return $this->render_gsr_standard_rating($post, $hidden); break; case "standard_review": return $this->render_gsr_standard_review($post, $hidden); break; case "multis_rating": return $this->render_gsr_multis_rating($post, $hidden); break; case "multis_review": return $this->render_gsr_multis_review($post, $hidden); break; case "thumbs": return $this->render_gsr_thumbs($post, $hidden); break; } } function render_gsr_thumbs($post, $hidden = true) { $post_data = wp_gdget_post($post->ID); $votes = $post_data->user_recc_plus + $post_data->user_recc_minus + $post_data->visitor_recc_plus + $post_data->visitor_recc_minus; if (!is_object($this->g->rSnippets) || $votes == 0) return ""; $rating = $post_data->user_recc_plus - $post_data->user_recc_minus + $post_data->visitor_recc_plus - $post_data->visitor_recc_minus; $rating = number_format(100 * ($rating / $votes), 0); return $this->g->rSnippets->snippet_stars_percentage(array( "title" => $post->post_title, "rating" => $rating, "votes" => $votes, "hidden" => $hidden )); } function render_gsr_multis_rating($post, $hidden = true) { $data = gdsrBlgDB::get_rss_multi_data($post->ID); $votes = $data->total_votes_visitors + $data->total_votes_users; if (!is_object($this->g->rSnippets) || $votes == 0) return ""; $sum = $data->average_rating_users * $data->total_votes_users + $data->average_rating_visitors * $data->total_votes_visitors; $rating = number_format($sum / $votes, 1); $set = wp_gdget_multi_set($data->multi_id); return $this->g->rSnippets->snippet_stars_rating(array( "title" => $post->post_title, "rating" => $rating, "max_rating" => $set->stars, "votes" => $votes, "hidden" => $hidden )); } function render_gsr_multis_review($post, $hidden = true) { $data = gdsrBlgDB::get_rss_multi_data_review($post->ID); $review = is_object($data) ? $data->average_review : 0; if (!is_object($this->g->rSnippets) || $review <= 0) return ""; $set = wp_gdget_multi_set($data->multi_id); $author = get_userdata($post->post_author); return $this->g->rSnippets->snippet_stars_review(array( "title" => $post->post_title, "rating" => $review, "max_rating" => $set->stars, "review_date" => mysql2date("c", $post->post_date), "reviewer" => $author->display_name, "hidden" => $hidden )); } function render_gsr_standard_rating($post, $hidden = true) { $post_data = wp_gdget_post($post->ID); if (is_object($post_data)) { $voters = $post_data->visitor_voters + $post_data->user_voters; if (!is_object($this->g->rSnippets) || $voters == 0) return ""; $votes = $post_data->visitor_votes + $post_data->user_votes; $rating = number_format($votes / $voters, 1); return $this->g->rSnippets->snippet_stars_rating(array( "title" => $post->post_title, "rating" => $rating, "max_rating" => $this->g->o["stars"], "votes" => $voters, "hidden" => $hidden )); } } function render_gsr_standard_review($post, $hidden = true) { $post_data = wp_gdget_post($post->ID); $review = is_object($post_data) ? $post_data->review : 0; if (!is_object($this->g->rSnippets) || $review <= 0) return ""; $author = get_userdata($post->post_author); return $this->g->rSnippets->snippet_stars_review(array( "title" => $post->post_title, "rating" => $review, "max_rating" => $this->g->o["review_stars"], "review_date" => mysql2date("c", $post->post_date), "reviewer" => $author->display_name, "hidden" => $hidden )); } function render_article_rss() { global $post; $rd_post_id = intval($post->ID); $post_data = GDSRDatabase::get_post_data($rd_post_id); $template_id = $this->g->o["default_ssb_template"]; $votes = $score = 0; $stars = 10; if ($this->g->o["rss_datasource"] == "thumbs") { if ($rules_articles == "A" || $rules_articles == "N") { $votes = $post_data->user_recc_plus + $post_data->user_recc_minus + $post_data->visitor_recc_plus + $post_data->visitor_recc_minus; $score = $post_data->user_recc_plus - $post_data->user_recc_minus + $post_data->visitor_recc_plus - $post_data->visitor_recc_minus; } else if ($rules_articles == "V") { $votes = $post_data->visitor_recc_plus + $post_data->visitor_recc_minus; $score = $post_data->visitor_recc_plus - $post_data->visitor_recc_minus; } else { $votes = $post_data->user_recc_plus + $post_data->user_recc_minus; $score = $post_data->user_recc_plus - $post_data->user_recc_minus; } } else if ($this->g->o["rss_datasource"] == "standard") { $stars = $this->g->o["stars"]; if ($post_data->rules_articles == "A" || $post_data->rules_articles == "N") { $votes = $post_data->user_voters + $post_data->visitor_voters; $score = $post_data->user_votes + $post_data->visitor_votes; } else if ($post_data->rules_articles == "V") { $votes = $post_data->visitor_voters; $score = $post_data->visitor_votes; } else { $votes = $post_data->user_voters; $score = $post_data->user_votes; } } else { $data = gdsrBlgDB::get_rss_multi_data($post_id); if (count($row) > 0) { $set = wp_gdget_multi_set($data->multi_id); $stars = $set->stars; if ($post_data->rules_articles == "A" || $post_data->rules_articles == "N") { $sum = $data->average_rating_users * $data->total_votes_users + $data->average_rating_visitors * $data->total_votes_visitors; $votes = $data->total_votes_visitors + $data->total_votes_users; $score = number_format($votes == 0 ? 0 : $sum / $votes, 1); } else if ($post_data->rules_articles == "V") { $votes = $data->total_votes_visitors; $score = $data->average_rating_visitors; } else { $votes = $data->total_votes_users; $score = $data->average_rating_users; } } } $rating_block = GDSRRenderT2::render_ssb($template_id, array("post_id" => $rd_post_id, "votes" => $votes, "score" => $score, "unit_count" => $stars, "header_text" => $this->g->o["rss_header_text"], "type" => $this->g->o["rss_datasource"])); return $rating_block; } // rendering waiting animations function render_wait_article_thumb() { if ($this->g->o["wait_loader_artthumb"] != "") { $cls = 'loader '.$this->g->o["wait_loader_artthumb"].' thumb'; $div = '<div class="'.$cls.'" style="%s"></div>'; $this->loader_article_thumb = $div; } } function render_wait_comment_thumb() { if ($this->g->o["wait_loader_cmmthumb"] != "") { $cls = 'loader thumb '.$this->g->o["wait_loader_cmmthumb"]; $div = '<div class="'.$cls.'" style="%s"></div>'; $this->loader_comment_thumb = $div; } } function render_wait_article() { $cls = "loader ".$this->g->o["wait_loader_article"]." "; if ($this->g->o["wait_show_article"] == 1) $cls.= "width "; $cls.= $this->g->o["wait_class_article"]; $div = '<div class="'.$cls.'" style="height: '.$this->g->o["size"].'px">'; if ($this->g->o["wait_show_article"] == 0) { $padding = ""; if ($this->g->o["size"] > 20) $padding = ' style="padding-top: '.(($this->g->o["size"] / 2) - 10).'px"'; $div.= '<div class="loaderinner"'.$padding.'>'.__($this->g->o["wait_text_article"]).'</div>'; } $div.= '</div>'; $this->loader_article = $div; } function render_wait_multis() { $cls = "loader ".$this->g->o["wait_loader_multis"]." "; if ($this->g->o["wait_show_multis"] == 1) $cls.= "width "; $cls.= $this->g->o["wait_class_multis"]; $div = '<div class="'.$cls.'" style="height: '.$this->g->o["mur_size"].'px">'; if ($this->g->o["wait_show_multis"] == 0) { $padding = ""; if ($this->g->o["size"] > 20) $padding = ' style="padding-top: '.(($this->g->o["mur_size"] / 2) - 10).'px"'; $div.= '<div class="loaderinner"'.$padding.'>'.__($this->g->o["wait_text_multis"]).'</div>'; } $div.= '</div>'; $this->loader_multis = $div; } function render_wait_comment() { $cls = "loader ".$this->g->o["wait_loader_comment"]." "; if ($this->g->o["wait_show_comment"] == 1) $cls.= "width "; $cls.= $this->g->o["wait_class_comment"]; $div = '<div class="'.$cls.'" style="height: '.$this->g->o["cmm_size"].'px">'; if ($this->g->o["wait_show_comment"] == 0) { $padding = ""; if ($this->g->o["cmm_size"] > 20) $padding = ' style="padding-top: '.(($this->g->o["cmm_size"] / 2) - 10).'px"'; $div.= '<div class="loaderinner"'.$padding.'>'.__($this->g->o["wait_text_comment"]).'</div>'; } $div.= '</div>'; $this->loader_comment = $div; } // rendering waiting animations // comment integration rating /** * Renders comment review stars * * @param int $value initial rating value * @param bool $allow_vote render stars to support rendering or not to */ function comment_review($value = 0, $allow_vote = true, $override = array()) { $stars = $this->g->o["cmm_review_stars"]; $style = $override["style"] == "" ? $this->g->o["cmm_review_style"] : $override["style"]; $size = $override["size"] == 0 ? $this->g->o["cmm_review_size"] : $override["size"]; return GDSRRender::rating_stars_local($style, $size, $stars, $allow_vote, $value * $size); } function get_comment_integrate_standard_result($comment_id, $post_id) { if (!$this->g->is_cached_integration_std) { global $gdsr_cache_integation_std; $data = GDSRDBCache::get_integration($post_id); foreach ($data as $row) { $id = $row->comment_id; $gdsr_cache_integation_std->set($id, $row); } $this->g->is_cached_integration_std = true; } return intval(wp_gdget_integration_std($comment_id)); } /** * Renders result of comment integration of standard rating for specific comment * * @param int $comment_id initial rating value * @param string $stars_set set to use for rendering * @param int $stars_size set size to use for rendering * @param string $stars_set_ie6 set to use for rendering in ie6 */ function comment_integrate_standard_result($comment_id, $post_id, $stars_set = "oxygen", $stars_size = 20, $stars_set_ie6 = "oxygen_gif") { $value = $this->get_comment_integrate_standard_result($comment_id, $post_id); if ($value > 0 || $this->g->o["int_comment_std_zero"] == 1) { $style = $stars_set == "" ? $this->g->o["style"] : $stars_set; $style = $this->g->is_ie6 ? ($stars_set_ie6 == "" ? $this->g->o["style_ie6"] : $stars_set_ie6) : $style; return GDSRRender::render_static_stars($style, $stars_size == 0 ? $this->g->o["size"] : $stars_size, $this->g->o["stars"], $value); } else return ""; } /** * Renders comment integration of standard rating * * @param int $value initial rating value * @param string $stars_set set to use for rendering * @param int $stars_size set size to use for rendering * @param string $stars_set_ie6 set to use for rendering in ie6 */ function comment_integrate_standard_rating($value = 0, $stars_set = "oxygen", $stars_size = 20, $stars_set_ie6 = "oxygen_gif") { $style = $stars_set == "" ? $this->g->o["style"] : $stars_set; $style = $this->g->is_ie6 ? ($stars_set_ie6 == "" ? $this->g->o["style_ie6"] : $stars_set_ie6) : $style; $size = $stars_size == 0 ? $this->g->o["size"] : $stars_size; return GDSRRender::rating_stars_local($style, $size, $this->g->o["stars"], true, $value * $size, "gdsr_int", "rcmmpost"); } /** * Renders result of comment integration of multi rating for specific comment * * @param int $comment_id initial rating value * @param object $post_id post id * @param int $multi_set_id id of the multi rating set to use * @param int $template_id id of the template to use * @param string $stars_set set to use for rendering * @param int $stars_size set size to use for rendering * @param string $stars_set_ie6 set to use for rendering in ie6 * @param string $avg_stars_set set to use for rendering of average value * @param int $avg_stars_size set size to use for rendering of average value * @param string $avg_stars_set_ie6 set to use for rendering of average value in ie6 */ function comment_integrate_multi_result($comment_id, $post_id, $multi_set_id, $template_id, $stars_set = 'oxygen', $stars_size = 20, $stars_set_ie6 = 'oxygen_gif', $avg_stars_set = 'oxygen', $avg_stars_size = 20, $avg_stars_set_ie6 = 'oxygen_gif') { if (!$this->g->is_cached_integration_mur) { global $gdsr_cache_integation_mur; $data = GDSRDBCache::get_integration($post_id, 'multis'); foreach ($data as $row) { $id = $row->multi_id."_".$row->comment_id; $gdsr_cache_integation_mur->set($id, $row); } $this->g->is_cached_integration_mur = true; } $value = wp_gdget_integration_mur($comment_id, $multi_set_id); if (is_serialized($value) && !is_null($value)) { $value = unserialize($value); $set = gd_get_multi_set($multi_set_id); $weight_norm = array_sum($set->weight); $avg_rating = $i = 0; $votes = array(); foreach ($value as $md) { $single_vote = array(); $single_vote['votes'] = 1; $single_vote['score'] = $md; $single_vote['rating'] = $md; $avg_rating += ($md * $set->weight[$i]) / $weight_norm; $votes[] = $single_vote; $i++; } $avg_rating = @number_format($avg_rating, 1); if ($avg_rating > 0) { $style = $stars_set == '' ? $this->g->o['mur_style'] : $stars_set; $style = $this->g->is_ie6 ? ($stars_set_ie6 == "" ? $this->g->o['mur_style_ie6'] : $stars_set_ie6) : $style; return GDSRRenderT2::render_rmb($template_id, array('votes' => $votes, 'post_id' => $post_id, 'set' => $set, 'avg_rating' => $avg_rating, 'style' => $style, 'size' => $stars_size, 'avg_style' => $this->g->is_ie6 ? $avg_stars_set_ie6 : $avg_stars_set, 'avg_size' => $avg_stars_size)); } else { return ''; } } else return ''; } /** * Renders average result of comment integration of multi rating for specific comment * * @param int $comment_id initial rating value * @param object $post_id post id * @param int $multi_set_id id of the multi rating set to use * @param int $template_id id of the template to use * @param string $avg_stars_set set to use for rendering of average value * @param int $avg_stars_size set size to use for rendering of average value * @param string $avg_stars_set_ie6 set to use for rendering of average value in ie6 */ function comment_integrate_multi_result_average($comment_id, $post_id, $multi_set_id, $template_id, $avg_stars_set = "oxygen", $avg_stars_size = 20, $avg_stars_set_ie6 = "oxygen_gif") { $value = GDSRDBMulti::rating_from_comment($comment_id, $multi_set_id); if (is_serialized($value)) { $value = unserialize($value); $set = gd_get_multi_set($multi_set_id); $weight_norm = array_sum($set->weight); $avg_rating = $i = 0; foreach ($value as $md) { $avg_rating += ($md * $set->weight[$i]) / $weight_norm; $i++; } $avg_rating = @number_format($avg_rating, 1); if ($avg_rating > 0) { return GDSRRenderT2::render_mcr($template_id, array("post_id" => $post_id, "set" => $set, "avg_rating" => $avg_rating, "avg_style" => $this->g->is_ie6 ? $avg_stars_set_ie6 : $avg_stars_set, "avg_size" => $avg_stars_size)); } else return ""; } else return ""; } /** * Renders comment integration of multi rating * * @param int $value initial rating value * @param object $post_id post id * @param int $multi_set_id id of the multi rating set to use * @param int $template_id id of the template to use * @param string $stars_set set to use for rendering * @param int $stars_size set size to use for rendering * @param string $stars_set_ie6 set to use for rendering in ie6 */ function comment_integrate_multi_rating($value, $post_id, $multi_set_id, $template_id, $stars_set = "oxygen", $stars_size = 20, $stars_set_ie6 = "oxygen_gif") { if ($multi_set_id == 0) return ""; $set = gd_get_multi_set($multi_set_id); $votes = array(); for ($i = 0; $i < count($set->object); $i++) { $single_vote = array(); $single_vote["votes"] = 0; $single_vote["score"] = 0; $single_vote["rating"] = 0; $votes[] = $single_vote; } $style = $stars_set == "" ? $this->g->o["mur_style"] : $stars_set; $style = $this->g->is_ie6 ? ($stars_set_ie6 == "" ? $this->g->o["mur_style_ie6"] : $stars_set_ie6) : $style; return GDSRRenderT2::render_mri($template_id, array("post_id" => $post_id, "style" => $style, "set" => $set, "height" => $stars_size)); } // comment integration rating // comment rendering function rating_loader_elements_comment($post, $comment, $user, $override, $type) { $user_id = is_object($user) ? $user->ID : 0; switch ($type) { case "csr": return array( $type, $post->ID, $comment->comment_ID, $comment->user_id, $post->post_type == "page" ? "1" : "0", $post->post_author, $user_id, $override["tpl"], $override["read_only"], $override["size"], $this->g->g->find_stars_id($override["style"]), $this->g->g->find_stars_id($override["style_ie6"]) ); break; case "ctr": return array( $type, $post->ID, $comment->comment_ID, $comment->user_id, $post->post_type == "page" ? "1" : "0", $post->post_author, $user_id, $override["tpl"], $override["read_only"], $override["size"], $this->g->g->find_thumb_id($override["style"]), $this->g->g->find_thumb_id($override["style_ie6"]) ); break; } } function render_thumb_comment_actual($settings) { if ($this->g->o["comments_active"] != 1) return ""; $post_id = intval($settings[1]); $comment_id = intval($settings[2]); $comment_author = intval($settings[3]); $rd_is_page = intval($settings[4]); $post_author = intval($settings[5]); $user_id = intval($settings[6]); $override["tpl"] = intval($settings[7]); $override["read_only"] = intval($settings[8]); $override["size"] = intval($settings[9]); $override["style"] = $this->g->g->thumbs[$settings[10]]->folder; $override["style_ie6"] = $this->g->g->thumbs[$settings[11]]->folder; $dbg_allow = "F"; $already_voted = false; $allow_vote = $override["read_only"] == 0; $allow_vote = apply_filters("gdsr_allow_vote_thumb_comment", $allow_vote, $post_id); if ($this->g->is_ban && $this->g->o["ip_filtering"] == 1) { if ($this->g->o["ip_filtering_restrictive"] == 1) return ""; else $allow_vote = false; $dbg_allow = "B"; } $rd_unit_width = $override["size"]; $rd_unit_style = $this->g->is_ie6 ? $override["style_ie6"] : $override["style"]; $rd_post_id = intval($post_id); $rd_user_id = intval($user_id); $rd_comment_id = intval($comment_id); $post_data = wp_gdget_post($rd_post_id); if (!is_object($post_data)) { GDSRDatabase::add_default_vote($rd_post_id, $rd_is_page); $post_data = wp_gdget_post($rd_post_id); $this->g->c[$rd_post_id] = 1; } $rules_comments = $post_data->recc_rules_comments != "I" ? $post_data->recc_rules_comments : $this->g->get_post_rule_value($rd_post_id, "recc_rules_comments", "recc_default_voterules_comments"); if ($rules_comments == "H") return ""; $comment_data = wp_gdget_comment($rd_comment_id); if (count($comment_data) == 0) { GDSRDatabase::add_empty_comment($rd_comment_id, $rd_post_id); $comment_data = wp_gdget_comment($rd_comment_id); } if ($allow_vote) { if ($this->g->o["cmm_author_vote"] == 1 && $rd_user_id == $comment_author && $rd_user_id > 0) { $allow_vote = false; $dbg_allow = "A"; } } if ($allow_vote) { if (($rules_comments == "") || ($rules_comments == "A") || ($rules_comments == "U" && $rd_user_id > 0) || ($rules_comments == "V" && $rd_user_id == 0) ) $allow_vote = true; else { $allow_vote = false; $dbg_allow = "R_".$rules_comments; } } $already_voted = !wp_gdget_thumb_commentlog($rd_comment_id); if ($allow_vote) { $allow_vote = !$already_voted; if (!$allow_vote) $dbg_allow = "D"; } if ($allow_vote) { $allow_vote = gdsrFrontHelp::check_cookie($rd_comment_id, "cmmthumb"); if (!$allow_vote) $dbg_allow = "C"; } $votes = $score = $votes_plus = $votes_minus = 0; if ($rules_comments == "A" || $rules_comments == "N") { $votes = $comment_data->user_recc_plus + $comment_data->user_recc_minus + $comment_data->visitor_recc_plus + $comment_data->visitor_recc_minus; $score = $comment_data->user_recc_plus - $comment_data->user_recc_minus + $comment_data->visitor_recc_plus - $comment_data->visitor_recc_minus; $votes_plus = $comment_data->user_recc_plus + $comment_data->visitor_recc_plus; $votes_minus = $comment_data->user_recc_minus + $comment_data->visitor_recc_minus; } else if ($rules_comments == "V") { $votes = $comment_data->visitor_recc_plus + $comment_data->visitor_recc_minus; $score = $comment_data->visitor_recc_plus - $comment_data->visitor_recc_minus; $votes_plus = $comment_data->visitor_recc_plus; $votes_minus = $comment_data->visitor_recc_minus; } else { $votes = $comment_data->user_recc_plus + $comment_data->user_recc_minus; $score = $comment_data->user_recc_plus - $comment_data->user_recc_minus; $votes_plus = $comment_data->user_recc_plus; $votes_minus = $comment_data->user_recc_minus; } $debug = $rd_user_id == 0 ? "V" : "U"; $debug.= $rd_user_id == $comment_author ? "A" : "N"; $debug.= ":".$dbg_allow." [".STARRATING_VERSION."]"; $tags_css = array(); $tags_css["CMM_CSS_BLOCK"] = $this->g->o["cmm_class_block"]; $tags_css["CMM_CSS_HEADER"] = $this->g->o["srb_class_header"]; $tags_css["CMM_CSS_STARS"] = $this->g->o["cmm_class_stars"]; $tags_css["CMM_CSS_TEXT"] = $this->g->o["cmm_class_text"]; $template_id = $override["tpl"]; $rating_block = GDSRRenderT2::render_tcb($template_id, array("already_voted" => $already_voted, "comment_id" => $rd_comment_id, "votes" => $votes, "score" => $score, "votes_plus" => $votes_plus, "votes_minus" => $votes_minus, "style" => $rd_unit_style, "unit_width" => $rd_unit_width, "allow_vote" => $allow_vote, "user_id" => $rd_user_id, "tags_css" => $tags_css, "header_text" => $this->g->o["header_text"], "debug" => $debug, "wait_msg" => $this->loader_comment_thumb)); return $rating_block; } function render_thumb_comment($post, $comment, $user, $override = array()) { if ($this->g->is_bot && $this->g->o["cached_loading"] == 0 && $this->g->o["bot_message"] != "normal") return GDSRRender::render_locked_response($this->g->o["bot_message"]); $default_settings = array("style" => $this->g->o["thumb_cmm_style"], "style_ie6" => $this->g->o["thumb_cmm_style_ie6"], "size" => $this->g->o["thumb_cmm_size"], "tpl" => 0, "read_only" => 0); $override = shortcode_atts($default_settings, $override); if ($override["style"] == "") $override["style"] = $this->g->o["thumb_cmm_style"]; if ($override["style_ie6"] == "") $override["style_ie6"] = $this->g->o["thumb_cmm_style_ie6"]; if ($override["size"] == "") $override["size"] = $this->g->o["thumb_cmm_size"]; if ($override["tpl"] == 0) $override["tpl"] = $this->g->o["default_tcb_template"]; $elements = $this->rating_loader_elements_comment($post, $comment, $user, $override, "ctr"); if ($this->g->o["cached_loading"] == 1) return GDSRRender::rating_loader(join(".", $elements), $this->g->is_bot, "small"); else return $this->render_thumb_comment_actual($elements); } function render_comment_actual($settings) { if ($this->g->o["comments_active"] != 1) return ""; $post_id = intval($settings[1]); $comment_id = intval($settings[2]); $comment_author = intval($settings[3]); $rd_is_page = intval($settings[4]); $post_author = intval($settings[5]); $user_id = intval($settings[6]); $override["tpl"] = intval($settings[7]); $override["read_only"] = intval($settings[8]); $override["size"] = intval($settings[9]); $override["style"] = $this->g->g->stars[$settings[10]]->folder; $override["style_ie6"] = $this->g->g->stars[$settings[11]]->folder; $dbg_allow = "F"; $already_voted = false; $allow_vote = $override["read_only"] == 0; $allow_vote = apply_filters("gdsr_allow_vote_stars_comment", $allow_vote, $post_id); if ($this->g->is_ban && $this->g->o["ip_filtering"] == 1) { if ($this->g->o["ip_filtering_restrictive"] == 1) return ""; else $allow_vote = false; $dbg_allow = "B"; } $rd_unit_count = $this->g->o["cmm_stars"]; $rd_unit_width = $override["size"]; $rd_unit_style = $this->g->is_ie6 ? $override["style_ie6"] : $override["style"]; $rd_post_id = intval($post_id); $rd_user_id = intval($user_id); $rd_comment_id = intval($comment_id); $post_data = wp_gdget_post($rd_post_id); if (!is_object($post_data)) { GDSRDatabase::add_default_vote($rd_post_id, $rd_is_page); $post_data = wp_gdget_post($rd_post_id); $this->g->c[$rd_post_id] = 1; } $rules_comments = $post_data->rules_comments != "I" ? $post_data->rules_comments : $this->g->get_post_rule_value($rd_post_id, "rules_comments", "default_voterules_comments"); if ($rules_comments == "H") return ""; $comment_data = wp_gdget_comment($rd_comment_id); if (count($comment_data) == 0) { GDSRDatabase::add_empty_comment($rd_comment_id, $rd_post_id); $comment_data = wp_gdget_comment($rd_comment_id); } if ($allow_vote) { if ($this->g->o["cmm_author_vote"] == 1 && $rd_user_id == $comment_author && $rd_user_id > 0) { $allow_vote = false; $dbg_allow = "A"; } } if ($allow_vote) { if (($rules_comments == "") || ($rules_comments == "A") || ($rules_comments == "U" && $rd_user_id > 0) || ($rules_comments == "V" && $rd_user_id == 0) ) $allow_vote = true; else { $allow_vote = false; $dbg_allow = "R_".$rules_comments; } } $already_voted = !wp_gdget_commentlog($rd_comment_id); if ($allow_vote) { $allow_vote = !$already_voted; if (!$allow_vote) $dbg_allow = "D"; } if ($allow_vote) { $allow_vote = gdsrFrontHelp::check_cookie($rd_comment_id, "comment"); if (!$allow_vote) $dbg_allow = "C"; } $votes = 0; $score = 0; if ($rules_comments == "A" || $rules_comments == "N") { $votes = $comment_data->user_voters + $comment_data->visitor_voters; $score = $comment_data->user_votes + $comment_data->visitor_votes; } else if ($rules_comments == "V") { $votes = $comment_data->visitor_voters; $score = $comment_data->visitor_votes; } else { $votes = $comment_data->user_voters; $score = $comment_data->user_votes; } $debug = $rd_user_id == 0 ? "V" : "U"; $debug.= $rd_user_id == $comment_author ? "A" : "N"; $debug.= ":".$dbg_allow." [".STARRATING_VERSION."]"; $tags_css = array( "CMM_CSS_BLOCK" => $this->g->o["cmm_class_block"], "CMM_CSS_HEADER" => $this->g->o["srb_class_header"], "CMM_CSS_STARS" => $this->g->o["cmm_class_stars"], "CMM_CSS_TEXT" => $this->g->o["cmm_class_text"] ); $template_id = $override["tpl"]; $rating_block = GDSRRenderT2::render_crb($template_id, array("already_voted" => $already_voted, "cmm_id" => $rd_comment_id, "class" => "ratecmm", "type" => "c", "votes" => $votes, "score" => $score, "style" => $rd_unit_style, "unit_width" => $rd_unit_width, "unit_count" => $rd_unit_count, "allow_vote" => $allow_vote, "user_id" => $rd_user_id, "typecls" => "comment", "tags_css" => $tags_css, "header_text" => $this->g->o["cmm_header_text"], "debug" => $debug, "wait_msg" => $this->loader_comment)); return $rating_block; } function render_comment($post, $comment, $user, $override = array()) { if ($this->g->is_bot && $this->g->o["cached_loading"] == 0 && $this->g->o["bot_message"] != "normal") return GDSRRender::render_locked_response($this->g->o["bot_message"]); $default_settings = array("style" => $this->g->o["cmm_style"], "style_ie6" => $this->g->o["cmm_style_ie6"], "size" => $this->g->o["cmm_size"], "tpl" => 0, "read_only" => 0); $override = shortcode_atts($default_settings, $override); if ($override["style"] == "") $override["style"] = $this->g->o["cmm_style"]; if ($override["style_ie6"] == "") $override["style_ie6"] = $this->g->o["cmm_style_ie6"]; if ($override["size"] == "") $override["size"] = $this->g->o["cmm_size"]; if ($override["tpl"] == 0) $override["tpl"] = $this->g->o["default_crb_template"]; $elements = $this->rating_loader_elements_comment($post, $comment, $user, $override, "csr"); if ($this->g->o["cached_loading"] == 1) return GDSRRender::rating_loader(join(".", $elements), $this->g->is_bot, "small"); else return $this->render_comment_actual($elements); } // comment rendering // article rendering function rating_loader_elements_post($post, $user, $override, $type) { $user_id = is_object($user) ? $user->ID : 0; switch ($type) { case "amr": return array( $type, $post->ID, $post->post_type == "page" ? "1" : "0", $post->post_author, strtotime($post->post_date), $override["tpl"], $override["read_only"], $override["size"], $this->g->g->find_stars_id($override["style"]), $this->g->g->find_stars_id($override["style_ie6"]), $user_id, $override["id"], $override["average_size"], $this->g->g->find_stars_id($override["average_stars"]), $this->g->g->find_stars_id($override["average_stars_ie6"]) ); break; case "asr": return array( $type, $post->ID, $post->post_type == "page" ? "1" : "0", $post->post_author, strtotime($post->post_date), $override["tpl"], $override["read_only"], $override["size"], $this->g->g->find_stars_id($override["style"]), $this->g->g->find_stars_id($override["style_ie6"]), $user_id ); break; case "atr": return array( $type, $post->ID, $post->post_type == "page" ? "1" : "0", $post->post_author, strtotime($post->post_date), $override["tpl"], $override["read_only"], $override["size"], $this->g->g->find_thumb_id($override["style"]), $this->g->g->find_thumb_id($override["style_ie6"]), $user_id ); break; } } function render_thumb_article_actual($settings) { $rd_post_id = intval($settings[1]); $rd_is_page = intval($settings[2]); $post_author = intval($settings[3]); $post_date = intval($settings[4]); $rd_unit_width = $settings[7]; $override["tpl"] = intval($settings[5]); $override["read_only"] = intval($settings[6]); $override["style"] = $this->g->g->thumbs[$settings[8]]->folder; $override["style_ie6"] = $this->g->g->thumbs[$settings[9]]->folder; $rd_unit_style = $this->g->is_ie6 ? $override["style_ie6"] : $override["style"]; $rd_user_id = intval($settings[10]); $dbg_allow = "F"; $already_voted = false; $allow_vote = $override["read_only"] == 0; $allow_vote = apply_filters("gdsr_allow_vote_thumb_article", $allow_vote, $rd_post_id); if ($this->g->is_ban && $this->g->o["ip_filtering"] == 1) { if ($this->g->o["ip_filtering_restrictive"] == 1) return ""; else $allow_vote = false; $dbg_allow = "B"; } if ($override["read_only"] == 1) $dbg_allow = "RO"; $post_data = wp_gdget_post($rd_post_id); if (is_null($post_data) || !is_object($post_data)) { GDSRDatabase::add_default_vote($rd_post_id, $rd_is_page); $post_data = wp_gdget_post($rd_post_id); $this->g->c[$rd_post_id] = 1; } $rules_articles = $post_data->recc_rules_articles != "I" ? $post_data->recc_rules_articles : $this->g->get_post_rule_value($rd_post_id, "recc_rules_articles", "recc_default_voterules_articles"); if ($rules_articles == "H") return ""; if ($allow_vote) { if (($rules_articles == "") || ($rules_articles == "A") || ($rules_articles == "U" && $rd_user_id > 0) || ($rules_articles == "V" && $rd_user_id == 0) ) $allow_vote = true; else { $allow_vote = false; $dbg_allow = "R_".$rules_articles; } } if ($allow_vote) { if ($this->g->o["author_vote"] == 1 && $rd_user_id == $post_author) { $allow_vote = false; $dbg_allow = "A"; } } $remaining = 0; $deadline = ''; $expiry_type = 'N'; if ($allow_vote && ($post_data->expiry_type == 'D' || $post_data->expiry_type == 'T' || $post_data->expiry_type == 'I')) { $expiry_type = $post_data->expiry_type != 'I' ? $post_data->expiry_type : $this->g->get_post_rule_value($rd_post_id, "expiry_type", "default_timer_type"); $expiry_value = $post_data->expiry_type != 'I' ? $post_data->expiry_value : $this->g->get_post_rule_value($rd_post_id, "expiry_value", "default_timer_value"); switch($expiry_type) { case "D": $remaining = gdsrFrontHelp::expiration_date($expiry_value); $deadline = $expiry_value; break; case "T": $remaining = gdsrFrontHelp::expiration_countdown($post_date, $expiry_value); $deadline = gdsrFrontHelp::calculate_deadline($remaining); break; } if ($remaining < 1) { gdsrBlgDB::lock_post($rd_post_id); $allow_vote = false; $dbg_allow = "T"; } } $already_voted = !wp_gdget_thumb_postlog($rd_post_id); if ($allow_vote) { $allow_vote = !$already_voted; if (!$allow_vote) $dbg_allow = "D"; } if ($allow_vote) { $allow_vote = gdsrFrontHelp::check_cookie($rd_post_id, "artthumb"); if (!$allow_vote) $dbg_allow = "C"; } $votes = $score = $votes_plus = $votes_minus = 0; if ($rules_articles == "A" || $rules_articles == "N") { $votes = $post_data->user_recc_plus + $post_data->user_recc_minus + $post_data->visitor_recc_plus + $post_data->visitor_recc_minus; $score = $post_data->user_recc_plus - $post_data->user_recc_minus + $post_data->visitor_recc_plus - $post_data->visitor_recc_minus; $votes_plus = $post_data->user_recc_plus + $post_data->visitor_recc_plus; $votes_minus = $post_data->user_recc_minus + $post_data->visitor_recc_minus; } else if ($rules_articles == "V") { $votes = $post_data->visitor_recc_plus + $post_data->visitor_recc_minus; $score = $post_data->visitor_recc_plus - $post_data->visitor_recc_minus; $votes_plus = $post_data->visitor_recc_plus; $votes_minus = $post_data->visitor_recc_minus; } else { $votes = $post_data->user_recc_plus + $post_data->user_recc_minus; $score = $post_data->user_recc_plus - $post_data->user_recc_minus; $votes_plus = $post_data->user_recc_plus; $votes_minus = $post_data->user_recc_minus; } $debug = $rd_user_id == 0 ? "V" : "U"; $debug.= $rd_user_id == $post_author ? "A" : "N"; $debug.= ":".$dbg_allow." [".STARRATING_VERSION."]"; $tags_css = array( "CSS_BLOCK" => $this->g->o["srb_class_block"], "CSS_HEADER" => $this->g->o["srb_class_header"], "CSS_STARS" => $this->g->o["srb_class_stars"], "CSS_TEXT" => $this->g->o["srb_class_text"] ); $template_id = $override["tpl"]; $rating_block = GDSRRenderT2::render_tab($template_id, array("already_voted" => $already_voted, "post_id" => $rd_post_id, "votes" => $votes, "score" => $score, "votes_plus" => $votes_plus, "votes_minus" => $votes_minus, "style" => $rd_unit_style, "unit_width" => $rd_unit_width, "allow_vote" => $allow_vote, "user_id" => $rd_user_id, "tags_css" => $tags_css, "header_text" => $this->g->o["thumb_header_text"], "debug" => $debug, "wait_msg" => $this->loader_article_thumb, "time_restirctions" => $expiry_type, "time_remaining" => $remaining, "time_date" => $deadline)); return $rating_block; } function render_thumb_article($post, $user, $override = array()) { if (is_feed()) return ""; if ($this->g->is_bot && $this->g->o["cached_loading"] == 0 && $this->g->o["bot_message"] != "normal") return GDSRRender::render_locked_response($this->g->o["bot_message"]); $default_settings = array("style" => $this->g->o["thumb_style"], "style_ie6" => $this->g->o["thumb_style_ie6"], "size" => $this->g->o["thumb_size"], "tpl" => 0, "read_only" => 0); $override = shortcode_atts($default_settings, $override); if ($override["style"] == "") $override["style"] = $this->g->o["thumb_style"]; if ($override["style_ie6"] == "") $override["style_ie6"] = $this->g->o["thumb_style_ie6"]; if ($override["size"] == "") $override["size"] = $this->g->o["thumb_size"]; if ($override["tpl"] == 0) $override["tpl"] = $this->g->o["default_tab_template"]; $elements = $this->rating_loader_elements_post($post, $user, $override, "atr"); if ($this->g->o["cached_loading"] == 1) return GDSRRender::rating_loader(join(".", $elements), $this->g->is_bot, "small"); else return $this->render_thumb_article_actual($elements); } function render_article_actual($settings) { $rd_post_id = intval($settings[1]); $rd_is_page = intval($settings[2]); $post_author = intval($settings[3]); $post_date = intval($settings[4]); $override["tpl"] = intval($settings[5]); $override["read_only"] = intval($settings[6]); $override["size"] = intval($settings[7]); $override["style"] = $this->g->g->stars[$settings[8]]->folder; $override["style_ie6"] = $this->g->g->stars[$settings[9]]->folder; $rd_user_id = intval($settings[10]); $dbg_allow = "F"; $already_voted = false; $allow_vote = $override["read_only"] == 0; $allow_vote = apply_filters("gdsr_allow_vote_stars_article", $allow_vote, $rd_post_id); if ($this->g->override_readonly_standard) { $allow_vote = false; $dbg_allow = "RTO"; } if ($this->g->is_ban && $this->g->o["ip_filtering"] == 1) { if ($this->g->o["ip_filtering_restrictive"] == 1) return ""; else $allow_vote = false; $dbg_allow = "B"; } if ($override["read_only"] == 1) $dbg_allow = "RO"; $rd_unit_count = $this->g->o["stars"]; $rd_unit_width = $override["size"]; $rd_unit_style = $this->g->is_ie6 ? $override["style_ie6"] : $override["style"]; $post_data = wp_gdget_post($rd_post_id); if (!is_object($post_data)) { GDSRDatabase::add_default_vote($rd_post_id, $rd_is_page); $post_data = wp_gdget_post($rd_post_id); $this->g->c[$rd_post_id] = 1; } $rules_articles = $post_data->rules_articles != "I" ? $post_data->rules_articles : $this->g->get_post_rule_value($rd_post_id, "rules_articles", "default_voterules_articles"); if ($rules_articles == "H") return ""; if ($allow_vote) { if (($rules_articles == "") || ($rules_articles == "A") || ($rules_articles == "U" && $rd_user_id > 0) || ($rules_articles == "V" && $rd_user_id == 0) ) $allow_vote = true; else { $allow_vote = false; $dbg_allow = "R_".$rules_articles; } } if ($allow_vote) { if ($this->g->o["author_vote"] == 1 && $rd_user_id == $post_author) { $allow_vote = false; $dbg_allow = "A"; } } $remaining = 0; $deadline = ''; $expiry_type = 'N'; if ($allow_vote && ($post_data->expiry_type == 'D' || $post_data->expiry_type == 'T' || $post_data->expiry_type == 'I')) { $expiry_type = $post_data->expiry_type != 'I' ? $post_data->expiry_type : $this->g->get_post_rule_value($rd_post_id, "expiry_type", "default_timer_type"); $expiry_value = $post_data->expiry_type != 'I' ? $post_data->expiry_value : $this->g->get_post_rule_value($rd_post_id, "expiry_value", "default_timer_value"); switch($expiry_type) { case "D": $remaining = gdsrFrontHelp::expiration_date($expiry_value); $deadline = $expiry_value; break; case "T": $remaining = gdsrFrontHelp::expiration_countdown($post_date, $expiry_value); $deadline = gdsrFrontHelp::calculate_deadline($remaining); break; } if ($remaining < 1) { gdsrBlgDB::lock_post($rd_post_id); $allow_vote = false; $dbg_allow = "T"; } } $already_voted = !wp_gdget_postlog($rd_post_id); if ($allow_vote) { $allow_vote = !$already_voted; if (!$allow_vote) $dbg_allow = "D"; } if ($allow_vote) { $allow_vote = gdsrFrontHelp::check_cookie($rd_post_id); if (!$allow_vote) $dbg_allow = "C"; } $votes = $score = 0; if ($rules_articles == "A" || $rules_articles == "N") { $votes = $post_data->user_voters + $post_data->visitor_voters; $score = $post_data->user_votes + $post_data->visitor_votes; } else if ($rules_articles == "V") { $votes = $post_data->visitor_voters; $score = $post_data->visitor_votes; } else { $votes = $post_data->user_voters; $score = $post_data->user_votes; } $debug = $rd_user_id == 0 ? "V" : "U"; $debug.= $rd_user_id == $post_author ? "A" : "N"; $debug.= ":".$dbg_allow." [".STARRATING_VERSION."]"; $tags_css = array( "CSS_BLOCK" => $this->g->o["srb_class_block"], "CSS_HEADER" => $this->g->o["srb_class_header"], "CSS_STARS" => $this->g->o["srb_class_stars"], "CSS_TEXT" => $this->g->o["srb_class_text"] ); $template_id = $override["tpl"]; $rating_block = GDSRRenderT2::render_srb($template_id, array("already_voted" => $already_voted, "post_id" => $rd_post_id, "class" => "ratepost", "type" => "a", "votes" => $votes, "score" => $score, "style" => $rd_unit_style, "unit_width" => $rd_unit_width, "unit_count" => $rd_unit_count, "allow_vote" => $allow_vote, "user_id" => $rd_user_id, "typecls" => "article", "tags_css" => $tags_css, "header_text" => $this->g->o["header_text"], "debug" => $debug, "wait_msg" => $this->loader_article, "time_restirctions" => $expiry_type, "time_remaining" => $remaining, "time_date" => $deadline)); return $rating_block; } function render_article($post, $user, $override = array()) { if (is_feed()) return ""; if ($this->g->is_bot && $this->g->o["cached_loading"] == 0 && $this->g->o["bot_message"] != "normal") return GDSRRender::render_locked_response($this->g->o["bot_message"]); $default_settings = array("style" => $this->g->o["style"], "style_ie6" => $this->g->o["style_ie6"], "size" => $this->g->o["size"], "tpl" => 0, "read_only" => 0); $override = shortcode_atts($default_settings, $override); if ($override["style"] == "") $override["style"] = $this->g->o["style"]; if ($override["style_ie6"] == "") $override["style_ie6"] = $this->g->o["style_ie6"]; if ($override["size"] == "") $override["size"] = $this->g->o["size"]; if ($override["tpl"] == 0) $override["tpl"] = $this->g->o["default_srb_template"]; $elements = $this->rating_loader_elements_post($post, $user, $override, "asr"); if ($this->g->o["cached_loading"] == 1) return GDSRRender::rating_loader(join(".", $elements), $this->g->is_bot, "small"); else return $this->render_article_actual($elements); } function render_multi_rating_actual($settings) { if ($this->g->is_bot && $this->g->o["bot_message"] != "normal") return GDSRRender::render_locked_response($this->g->o["bot_message"]); $rd_post_id = intval($settings[1]); $rd_is_page = intval($settings[2]); $post_author = intval($settings[3]); $post_date = intval($settings[4]); $override["id"] = intval($settings[11]); $override["tpl"] = intval($settings[5]); $override["read_only"] = intval($settings[6]); $override["size"] = intval($settings[7]); $override["style"] = $this->g->g->stars[$settings[8]]->folder; $override["style_ie6"] = $this->g->g->stars[$settings[9]]->folder; $rd_user_id = intval($settings[10]); $override["average_size"] = intval($settings[12]); $override["average_stars"] = $this->g->g->stars[$settings[13]]->folder; $override["average_stars_ie6"] = $this->g->g->stars[$settings[14]]->folder; $set = gd_get_multi_set($override["id"]); if ($set == null) return ""; $rd_unit_width = $override["size"]; $rd_unit_style = $this->g->is_ie6 ? $override["style_ie6"] : $override["style"]; $rd_unit_width_avg = $override["average_size"]; $rd_unit_style_avg = $this->g->is_ie6 ? $override["average_stars_ie6"] : $override["average_stars"]; $dbg_allow = "F"; $already_voted = false; $allow_vote = $override["read_only"] == 0; $allow_vote = apply_filters("gdsr_allow_vote_stars_article", $allow_vote, $rd_post_id, $override["id"]); if ($this->g->override_readonly_multis) { $allow_vote = false; $dbg_allow = "RTO"; } if ($this->g->is_ban && $this->g->o["ip_filtering"] == 1) { if ($this->g->o["ip_filtering_restrictive"] == 1) return ""; else $allow_vote = false; $dbg_allow = "B"; } if ($override["read_only"] == 1) $dbg_allow = "RO"; $remaining = 0; $deadline = ""; $post_data = wp_gdget_post($rd_post_id); if (!is_object($post_data)) { GDSRDatabase::add_default_vote($rd_post_id, $rd_is_page); $post_data = wp_gdget_post($rd_post_id); $this->g->c[$rd_post_id] = 1; } $rules_articles = $post_data->rules_articles != "I" ? $post_data->rules_articles : $this->g->get_post_rule_value($rd_post_id, "rules_articles", "default_voterules_articles"); if ($rules_articles == "H") return ""; if ($allow_vote) { if ($this->g->o["author_vote"] == 1 && $rd_user_id == $post_author) { $allow_vote = false; $dbg_allow = "A"; } } if ($allow_vote) { if (($rules_articles == "") || ($rules_articles == "A") || ($rules_articles == "U" && $rd_user_id > 0) || ($rules_articles == "V" && $rd_user_id == 0) ) $allow_vote = true; else { $allow_vote = false; $dbg_allow = "R_".$rules_articles; } } $remaining = 0; $deadline = ''; $expiry_type = 'N'; if ($allow_vote && ($post_data->expiry_type == 'D' || $post_data->expiry_type == 'T' || $post_data->expiry_type == 'I')) { $expiry_type = $post_data->expiry_type != 'I' ? $post_data->expiry_type : $this->g->get_post_rule_value($rd_post_id, "expiry_type", "default_timer_type"); $expiry_value = $post_data->expiry_type != 'I' ? $post_data->expiry_value : $this->g->get_post_rule_value($rd_post_id, "expiry_value", "default_timer_value"); switch($expiry_type) { case "D": $remaining = gdsrFrontHelp::expiration_date($expiry_value); $deadline = $expiry_value; break; case "T": $remaining = gdsrFrontHelp::expiration_countdown($post_date, $expiry_value); $deadline = gdsrFrontHelp::calculate_deadline($remaining); break; } if ($remaining < 1) { gdsrBlgDB::lock_post($rd_post_id); $allow_vote = false; $dbg_allow = "T"; } } $already_voted = !GDSRDBMulti::check_vote($rd_post_id, $rd_user_id, $set->multi_id, 'multis', $_SERVER["REMOTE_ADDR"], $this->g->o["logged"] != 1, $this->g->o["mur_allow_mixed_ip_votes"] == 1); if ($allow_vote) { $allow_vote = !$already_voted; if (!$allow_vote) $dbg_allow = "D"; } if ($allow_vote) { $allow_vote = gdsrFrontHelp::check_cookie($rd_post_id."#".$set->multi_id, "multis"); if (!$allow_vote) $dbg_allow = "C"; } $multi_record_id = GDSRDBMulti::get_vote($rd_post_id, $set->multi_id, count($set->object)); $multi_data = GDSRDBMulti::get_values($multi_record_id); $votes = array(); foreach ($multi_data as $md) { $single_vote = array(); $single_vote["votes"] = 0; $single_vote["score"] = 0; if ($rules_articles == "A" || $rules_articles == "N") { $single_vote["votes"] = $md->user_voters + $md->visitor_voters; $single_vote["score"] = $md->user_votes + $md->visitor_votes; } else if ($rules_articles == "V") { $single_vote["votes"] = $md->visitor_voters; $single_vote["score"] = $md->visitor_votes; } else { $single_vote["votes"] = $md->user_voters; $single_vote["score"] = $md->user_votes; } $rating = $single_vote["votes"] > 0 ? $single_vote["score"] / $single_vote["votes"] : 0; if ($rating > $set->stars) $rating = $set->stars; $single_vote["rating"] = @number_format($rating, 1); $votes[] = $single_vote; } $debug = $rd_user_id == 0 ? "V" : "U"; $debug.= $rd_user_id == $post_author ? "A" : "N"; $debug.= ":".$dbg_allow." [".STARRATING_VERSION."]"; $tags_css = array( "MUR_CSS_BUTTON" => $this->g->o["mur_class_button"], "MUR_CSS_BLOCK" => $this->g->o["mur_class_block"], "MUR_CSS_HEADER" => $this->g->o["mur_class_header"], "MUR_CSS_STARS" => $this->g->o["mur_class_stars"], "MUR_CSS_TEXT" => $this->g->o["mur_class_text"] ); $mur_button = $this->g->o["mur_button_active"] == 1; if (!$allow_vote) $mur_button = false; $template_id = $override["tpl"]; return GDSRRenderT2::render_mrb($template_id, array("already_voted" => $already_voted, "style" => $rd_unit_style, "allow_vote" => $allow_vote, "votes" => $votes, "post_id" => $rd_post_id, "set" => $set, "height" => $rd_unit_width, "header_text" => $this->g->o["mur_header_text"], "tags_css" => $tags_css, "avg_style" => $rd_unit_style_avg, "avg_size" => $rd_unit_width_avg, "star_factor" => 1, "time_restirctions" => $expiry_type, "time_remaining" => $remaining, "time_date" => $deadline, "button_active" => $mur_button, "button_text" => $this->g->o["mur_button_text"], "debug" => $debug, "wait_msg" => $this->loader_multis)); } function render_multi_rating($post, $user, $override = array()) { if (is_feed()) return ""; if ($this->g->is_bot && $this->g->o["cached_loading"] == 0 && $this->g->o["bot_message"] != "normal") return GDSRRender::render_locked_response($this->g->o["bot_message"]); $default_settings = array("id" => 0, "style" => $this->g->o["mur_style"], "style_ie6" => $this->g->o["mur_style_ie6"], "size" => $this->g->o["mur_size"], "average_stars" => "oxygen", "average_stars_ie6" => "oxygen_gif", "average_size" => 30, "tpl" => 0, "read_only" => 0); $override = shortcode_atts($default_settings, $override); if ($override["style"] == "") $override["style"] = $this->g->o["mur_style"]; if ($override["style_ie6"] == "") $override["style_ie6"] = $this->g->o["mur_style_ie6"]; if ($override["size"] == "") $override["size"] = $this->g->o["mur_size"]; if ($override["tpl"] == 0) $override["tpl"] = $this->g->o["default_srb_template"]; $elements = $this->rating_loader_elements_post($post, $user, $override, "amr"); if ($this->g->o["cached_loading"] == 1) return GDSRRender::rating_loader(join(".", $elements), $this->g->is_bot, "small"); else return $this->render_multi_rating_actual($elements); } function render_stars_custom_value($settings = array()) { $style = $this->g->is_ie6 ? $settings["style_ie6"] : $settings["style"]; $value = isset($settings["vote"]) ? floatval($settings["vote"]) : 0; $star_factor = $settings["star_factor"]; $stars = $settings["max_value"]; $size = $settings["size"]; return GDSRRender::render_static_stars($style, $size, $stars, $value, "", "", $star_factor); } function render_multi_custom_values($template_id, $multi_set_id, $custom_id, $votes, $header_text = '', $override = array(), $tags_css = array(), $star_factor = 1) { $set = gd_get_multi_set($multi_set_id); $rd_unit_width = $override["size"]; $rd_unit_style = $this->g->is_ie6 ? $override["style_ie6"] : $override["style"]; $rd_unit_width_avg = isset($override["average_size"]) ? $override["average_size"] : $override["style"]; $rd_unit_style_avg = isset($override["average_stars"]) ? ($this->g->is_ie6 ? $override["average_stars_ie6"] : $override["average_stars"]) : $override["style"]; return GDSRRenderT2::render_mrb($template_id, array("style" => $rd_unit_style, "allow_vote" => false, "votes" => $votes, "post_id" => $custom_id, "set" => $set, "height" => $rd_unit_width, "header_text" => $header_text, "tags_css" => array("MUR_CSS_BLOCK" => ""), "avg_style" => $rd_unit_style_avg, "avg_size" => $rd_unit_width_avg, "star_factor" => $star_factor)); } // article rendering } ?>
imshashank/The-Perfect-Self
wp-content/plugins/gd-star-rating/code/blg/frontend.php
PHP
gpl-2.0
64,615
#!powershell # This file is part of Ansible # Copyright: (c) 2017, Red Hat, Inc. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) #Requires -Module Ansible.ModuleUtils.Legacy Set-StrictMode -Version 2 $ErrorActionPreference = "Stop" $ConfirmPreference = "None" $log_path = $null Function Write-DebugLog { Param( [string]$msg ) $DebugPreference = "Continue" $ErrorActionPreference = "Continue" $date_str = Get-Date -Format u $msg = "$date_str $msg" Write-Debug $msg if($log_path) { Add-Content $log_path $msg } } $required_features = @("AD-Domain-Services","RSAT-ADDS") Function Get-MissingFeatures { Write-DebugLog "Checking for missing Windows features..." $features = @(Get-WindowsFeature $required_features) If($features.Count -ne $required_features.Count) { Throw "One or more Windows features required for a domain controller are unavailable" } $missing_features = @($features | Where-Object InstallState -ne Installed) return ,$missing_features # no, the comma's not a typo- allows us to return an empty array } Function Ensure-FeatureInstallation { # ensure RSAT-ADDS and AD-Domain-Services features are installed Write-DebugLog "Ensuring required Windows features are installed..." $feature_result = Install-WindowsFeature $required_features If(-not $feature_result.Success) { Exit-Json -message ("Error installing AD-Domain-Services and RSAT-ADDS features: {0}" -f ($feature_result | Out-String)) } } # return the domain we're a DC for, or null if not a DC Function Get-DomainControllerDomain { Write-DebugLog "Checking for domain controller role and domain name" $sys_cim = Get-WmiObject Win32_ComputerSystem $is_dc = $sys_cim.DomainRole -in (4,5) # backup/primary DC # this will be our workgroup or joined-domain if we're not a DC $domain = $sys_cim.Domain Switch($is_dc) { $true { return $domain } Default { return $null } } } Function Create-Credential { Param( [string] $cred_user, [string] $cred_password ) $cred = New-Object System.Management.Automation.PSCredential($cred_user, $($cred_password | ConvertTo-SecureString -AsPlainText -Force)) Return $cred } Function Get-OperationMasterRoles { $assigned_roles = @((Get-ADDomainController -Server localhost).OperationMasterRoles) Return ,$assigned_roles # no, the comma's not a typo- allows us to return an empty array } $result = @{ changed = $false reboot_required = $false } $param = Parse-Args -arguments $args -supports_check_mode $true $dns_domain_name = Get-AnsibleParam $param "dns_domain_name" $safe_mode_password= Get-AnsibleParam $param "safe_mode_password" $domain_admin_user = Get-AnsibleParam $param "domain_admin_user" -failifempty $result $domain_admin_password= Get-AnsibleParam $param "domain_admin_password" -failifempty $result $local_admin_password= Get-AnsibleParam $param "local_admin_password" $database_path = Get-AnsibleParam $param "database_path" -type "path" $sysvol_path = Get-AnsibleParam $param "sysvol_path" -type "path" $read_only = Get-AnsibleParam $param "read_only" -type "bool" -default $false $site_name = Get-AnsibleParam $param "site_name" -type "str" -failifempty $read_only $state = Get-AnsibleParam $param "state" -validateset ("domain_controller", "member_server") -failifempty $result $log_path = Get-AnsibleParam $param "log_path" $_ansible_check_mode = Get-AnsibleParam $param "_ansible_check_mode" -default $false $global:log_path = $log_path Try { # ensure target OS support; < 2012 doesn't have cmdlet support for DC promotion If(-not (Get-Command Install-WindowsFeature -ErrorAction SilentlyContinue)) { Fail-Json -message "win_domain_controller requires at least Windows Server 2012" } # validate args If($state -eq "domain_controller") { If(-not $dns_domain_name) { Fail-Json -message "dns_domain_name is required when desired state is 'domain_controller'" } If(-not $safe_mode_password) { Fail-Json -message "safe_mode_password is required when desired state is 'domain_controller'" } # ensure that domain admin user is in UPN or down-level domain format (prevent hang from https://support.microsoft.com/en-us/kb/2737935) If(-not $domain_admin_user.Contains("\") -and -not $domain_admin_user.Contains("@")) { Fail-Json -message "domain_admin_user must be in domain\user or [email protected] format" } } Else { # member_server If(-not $local_admin_password) { Fail-Json -message "local_admin_password is required when desired state is 'member_server'" } } # short-circuit "member server" check, since we don't need feature checks for this... $current_dc_domain = Get-DomainControllerDomain If($state -eq "member_server" -and -not $current_dc_domain) { Exit-Json $result } # all other operations will require the AD-DS and RSAT-ADDS features... $missing_features = Get-MissingFeatures If($missing_features.Count -gt 0) { Write-DebugLog ("Missing Windows features ({0}), need to install" -f ($missing_features -join ", ")) $result.changed = $true # we need to install features If($_ansible_check_mode) { # bail out here- we can't proceed without knowing the features are installed Write-DebugLog "check-mode, exiting early" Exit-Json $result } Ensure-FeatureInstallation | Out-Null } $domain_admin_cred = Create-Credential -cred_user $domain_admin_user -cred_password $domain_admin_password switch($state) { domain_controller { If(-not $safe_mode_password) { Fail-Json -message "safe_mode_password is required for state=domain_controller" } If($current_dc_domain) { # FUTURE: implement managed Remove/Add to change domains? If($current_dc_domain -ne $dns_domain_name) { Fail-Json "$(hostname) is a domain controller for domain $current_dc_domain; changing DC domains is not implemented" } } # need to promote to DC If(-not $current_dc_domain) { Write-DebugLog "Not currently a domain controller; needs promotion" $result.changed = $true If($_ansible_check_mode) { Write-DebugLog "check-mode, exiting early" Fail-Json -message $result } $result.reboot_required = $true $safe_mode_secure = $safe_mode_password | ConvertTo-SecureString -AsPlainText -Force Write-DebugLog "Installing domain controller..." $install_params = @{ DomainName = $dns_domain_name Credential = $domain_admin_cred SafeModeAdministratorPassword = $safe_mode_secure } if ($database_path) { $install_params.DatabasePath = $database_path } if ($sysvol_path) { $install_params.SysvolPath = $sysvol_path } if ($site_name) { $install_params.SiteName = $site_name } $install_result = Install-ADDSDomainController -NoRebootOnCompletion -ReadOnlyReplica:$read_only -Force @install_params Write-DebugLog "Installation completed, needs reboot..." } } member_server { If(-not $local_admin_password) { Fail-Json -message "local_admin_password is required for state=domain_controller" } # at this point we already know we're a DC and shouldn't be... Write-DebugLog "Need to uninstall domain controller..." $result.changed = $true Write-DebugLog "Checking for operation master roles assigned to this DC..." $assigned_roles = Get-OperationMasterRoles # FUTURE: figure out a sane way to hand off roles automatically (designated recipient server, randomly look one up?) If($assigned_roles.Count -gt 0) { Fail-Json -message ("This domain controller has operation master role(s) ({0}) assigned; they must be moved to other DCs before demotion (see Move-ADDirectoryServerOperationMasterRole)" -f ($assigned_roles -join ", ")) } If($_ansible_check_mode) { Write-DebugLog "check-mode, exiting early" Exit-Json $result } $result.reboot_required = $true $local_admin_secure = $local_admin_password | ConvertTo-SecureString -AsPlainText -Force Write-DebugLog "Uninstalling domain controller..." $uninstall_result = Uninstall-ADDSDomainController -NoRebootOnCompletion -LocalAdministratorPassword $local_admin_secure -Credential $domain_admin_cred Write-DebugLog "Uninstallation complete, needs reboot..." } default { throw ("invalid state {0}" -f $state) } } Exit-Json $result } Catch { $excep = $_ Write-DebugLog "Exception: $($excep | out-string)" Throw }
noroutine/ansible
lib/ansible/modules/windows/win_domain_controller.ps1
PowerShell
gpl-3.0
9,444
#ifndef R2_HEAP_GLIBC_H #define R2_HEAP_GLIBC_H #ifdef __cplusplus extern "C" { #endif R_LIB_VERSION_HEADER(r_heap_glibc); #define PRINTF_A(color, fmt , ...) r_cons_printf (color fmt Color_RESET, __VA_ARGS__) #define PRINTF_YA(fmt, ...) PRINTF_A (Color_YELLOW, fmt, __VA_ARGS__) #define PRINTF_GA(fmt, ...) PRINTF_A (Color_GREEN, fmt, __VA_ARGS__) #define PRINTF_BA(fmt, ...) PRINTF_A (Color_BLUE, fmt, __VA_ARGS__) #define PRINTF_RA(fmt, ...) PRINTF_A (Color_RED, fmt, __VA_ARGS__) #define PRINT_A(color, msg) r_cons_print (color msg Color_RESET) #define PRINT_YA(msg) PRINT_A (Color_YELLOW, msg) #define PRINT_GA(msg) PRINT_A (Color_GREEN, msg) #define PRINT_BA(msg) PRINT_A (Color_BLUE, msg) #define PRINT_RA(msg) PRINT_A (Color_RED, msg) #define PREV_INUSE 0x1 #define IS_MMAPPED 0x2 #define NON_MAIN_ARENA 0x4 #define NBINS 128 #define NSMALLBINS 64 #define NFASTBINS 10 #define BINMAPSHIFT 5 #define SZ core->dbg->bits #define FASTBIN_IDX_TO_SIZE(i) ((SZ * 4) + (SZ * 2) * (i - 1)) #define BITSPERMAP (1U << BINMAPSHIFT) #define BINMAPSIZE (NBINS / BITSPERMAP) #define MAX(a,b) (((a)>(b))?(a):(b)) #define MALLOC_ALIGNMENT MAX (2 * SZ, __alignof__ (long double)) #define MALLOC_ALIGN_MASK (MALLOC_ALIGNMENT - 1) #define NPAD -6 #define largebin_index_32(size) \ (((((ut32)(size)) >> 6) <= 38)? 56 + (((ut32)(size)) >> 6): \ ((((ut32)(size)) >> 9) <= 20)? 91 + (((ut32)(size)) >> 9): \ ((((ut32)(size)) >> 12) <= 10)? 110 + (((ut32)(size)) >> 12): \ ((((ut32)(size)) >> 15) <= 4)? 119 + (((ut32)(size)) >> 15): \ ((((ut32)(size)) >> 18) <= 2)? 124 + (((ut32)(size)) >> 18): \ 126) #define largebin_index_32_big(size) \ (((((ut32)(size)) >> 6) <= 45)? 49 + (((ut32)(size)) >> 6): \ ((((ut32)(size)) >> 9) <= 20)? 91 + (((ut32)(size)) >> 9): \ ((((ut32)(size)) >> 12) <= 10)? 110 + (((ut32)(size)) >> 12): \ ((((ut32)(size)) >> 15) <= 4)? 119 + (((ut32)(size)) >> 15): \ ((((ut32)(size)) >> 18) <= 2)? 124 + (((ut32)(size)) >> 18): \ 126) #define largebin_index_64(size) \ (((((ut32)(size)) >> 6) <= 48)? 48 + (((ut32)(size)) >> 6): \ ((((ut32)(size)) >> 9) <= 20)? 91 + (((ut32)(size)) >> 9): \ ((((ut32)(size)) >> 12) <= 10)? 110 + (((ut32)(size)) >> 12): \ ((((ut32)(size)) >> 15) <= 4)? 119 + (((ut32)(size)) >> 15): \ ((((ut32)(size)) >> 18) <= 2)? 124 + (((ut32)(size)) >> 18): \ 126) #define largebin_index(size) \ (SZ == 8 ? largebin_index_64 (size) : largebin_index_32 (size)) /* Not works 32 bit on 64 emulation #define largebin_index(size) \ (SZ == 8 ? largebin_index_64 (size) \ : MALLOC_ALIGNMENT == 16 ? largebin_index_32_big (size) \ : largebin_index_32 (size)) */ typedef struct r_malloc_chunk_64 { ut64 prev_size; /* Size of previous chunk (if free). */ ut64 size; /* Size in bytes, including overhead. */ ut64 fd; /* double links -- used only if free. */ ut64 bk; /* Only used for large blocks: pointer to next larger size. */ ut64 fd_nextsize; /* double links -- used only if free. */ ut64 bk_nextsize; } RHeapChunk_64; typedef struct r_malloc_chunk_32 { ut32 prev_size; /* Size of previous chunk (if free). */ ut32 size; /* Size in bytes, including overhead. */ ut32 fd; /* double links -- used only if free. */ ut32 bk; /* Only used for large blocks: pointer to next larger size. */ ut32 fd_nextsize; /* double links -- used only if free. */ ut32 bk_nextsize; } RHeapChunk_32; /* typedef RHeapChunk64 *mfastbinptr64; typedef RHeapChunk64 *mchunkptr64; typedef RHeapChunk32 *mfastbinptr32; typedef RHeapChunk32 *mchunkptr32; */ typedef struct r_malloc_state_32 { int mutex; /* serialized access */ int flags; /* flags */ ut32 fastbinsY[NFASTBINS]; /* array of fastchunks */ ut32 top; /* top chunk's base addr */ ut32 last_remainder; /* remainder top chunk's addr */ ut32 bins[NBINS * 2 - 2]; /* array of remainder free chunks */ unsigned int binmap[BINMAPSIZE]; /* bitmap of bins */ ut32 next; /* double linked list of chunks */ ut32 next_free; /* double linked list of free chunks */ ut32 system_mem; /* current allocated memory of current arena */ ut32 max_system_mem; /* maximum system memory */ } RHeap_MallocState_32; typedef struct r_malloc_state_64 { int mutex; /* serialized access */ int flags; /* flags */ ut64 fastbinsY[NFASTBINS]; /* array of fastchunks */ ut64 top; /* top chunk's base addr */ ut64 last_remainder; /* remainder top chunk's addr */ ut64 bins[NBINS * 2 - 2]; /* array of remainder free chunks */ unsigned int binmap[BINMAPSIZE]; /* bitmap of bins */ ut64 next; /* double linked list of chunks */ ut64 next_free; /* double linked list of free chunks */ ut64 system_mem; /* current allocated memory of current arena */ ut64 max_system_mem; /* maximum system memory */ } RHeap_MallocState_64; typedef struct r_heap_info_32 { ut32 ar_ptr; /* Arena for this heap. */ ut32 prev; /* Previous heap. */ ut32 size; /* Current size in bytes. */ ut32 mprotect_size; /* Size in bytes that has been mprotected PROT_READ|PROT_WRITE. */ /* Make sure the following data is properly aligned, particularly that sizeof (heap_info) + 2 * SZ is a multiple of MALLOC_ALIGNMENT. */ /* char pad[NPAD * SZ & MALLOC_ALIGN_MASK]; */ } RHeapInfo_32; typedef struct r_heap_info_64 { ut64 ar_ptr; /* Arena for this heap. */ ut64 prev; /* Previous heap. */ ut64 size; /* Current size in bytes. */ ut64 mprotect_size; /* Size in bytes that has been mprotected PROT_READ|PROT_WRITE. */ /* Make sure the following data is properly aligned, particularly that sizeof (heap_info) + 2 * SZ is a multiple of MALLOC_ALIGNMENT. */ /* char pad[NPAD * SZ & MALLOC_ALIGN_MASK]; */ } RHeapInfo_64; #ifdef __cplusplus } #endif #endif
8tab/radare2
libr/include/r_heap_glibc.h
C
gpl-3.0
5,952
<?php /** * JComments plugin for Joomla com_weblinks component * * @version 1.4 * @package JComments * @author Tommy Nilsson ([email protected]) * @copyright (C) 2011 Tommy Nilsson (http://www.architechtsoftomorrow.com) * @copyright (C) 2011-2013 by Sergey M. Litvinov (http://www.joomlatune.ru) * @license GNU/GPL: http://www.gnu.org/copyleft/gpl.html */ defined('_JEXEC') or die; class jc_com_weblinks extends JCommentsPlugin { function getObjectTitle($id) { $db = JFactory::getDbo(); $db->setQuery( 'SELECT title, id FROM #__categories WHERE section = "com_weblinks" and id = ' . $id ); return $db->loadResult(); } function getObjectLink($id) { $db = JFactory::getDBO(); $db->setQuery( 'SELECT alias FROM #__categories WHERE section = "com_weblinks" and id = ' . $id ); $alias = $db->loadResult(); $link = 'index.php?option=com_weblinks&view=category&id='. $id.':'.$alias; require_once(JPATH_SITE.DS.'includes'.DS.'application.php'); $component = JComponentHelper::getComponent('com_weblinks'); $menus = JApplication::getMenu('site'); $items = $menus->getItems('componentid', $component->id); if (count($items)) { $link .= "&Itemid=" . $items[0]->id; } $link = JRoute::_($link); return $link; } }
studio42-fork/jcomments
site/plugins/com_weblinks.plugin.php
PHP
gpl-3.0
1,272
import math from math import sqrt, acos, cos, pi, sin, atan2 import os, sys, time, random from rotmat import Vector3, Matrix3 from subprocess import call, check_call,Popen, PIPE def m2ft(x): '''meters to feet''' return float(x) / 0.3048 def ft2m(x): '''feet to meters''' return float(x) * 0.3048 def kt2mps(x): return x * 0.514444444 def mps2kt(x): return x / 0.514444444 def topdir(): '''return top of git tree where autotest is running from''' d = os.path.dirname(os.path.realpath(__file__)) assert(os.path.basename(d)=='pysim') d = os.path.dirname(d) assert(os.path.basename(d)=='autotest') d = os.path.dirname(d) assert(os.path.basename(d)=='Tools') d = os.path.dirname(d) return d def reltopdir(path): '''return a path relative to topdir()''' return os.path.normpath(os.path.join(topdir(), path)) def run_cmd(cmd, dir=".", show=False, output=False, checkfail=True): '''run a shell command''' if show: print("Running: '%s' in '%s'" % (cmd, dir)) if output: return Popen([cmd], shell=True, stdout=PIPE, cwd=dir).communicate()[0] elif checkfail: return check_call(cmd, shell=True, cwd=dir) else: return call(cmd, shell=True, cwd=dir) def rmfile(path): '''remove a file if it exists''' try: os.unlink(path) except Exception: pass def deltree(path): '''delete a tree of files''' run_cmd('rm -rf %s' % path) def build_SIL(atype, target='sitl'): '''build desktop SIL''' run_cmd("make clean %s" % target, dir=reltopdir(atype), checkfail=True) return True def build_AVR(atype, board='mega2560'): '''build AVR binaries''' config = open(reltopdir('config.mk'), mode='w') config.write(''' HAL_BOARD=HAL_BOARD_APM1 BOARD=%s PORT=/dev/null ''' % board) config.close() run_cmd("make clean", dir=reltopdir(atype), checkfail=True) run_cmd("make", dir=reltopdir(atype), checkfail=True) return True # list of pexpect children to close on exit close_list = [] def pexpect_autoclose(p): '''mark for autoclosing''' global close_list close_list.append(p) def pexpect_close(p): '''close a pexpect child''' global close_list try: p.close() except Exception: pass try: p.close(force=True) except Exception: pass if p in close_list: close_list.remove(p) def pexpect_close_all(): '''close all pexpect children''' global close_list for p in close_list[:]: pexpect_close(p) def pexpect_drain(p): '''drain any pending input''' import pexpect try: p.read_nonblocking(1000, timeout=0) except pexpect.TIMEOUT: pass def start_SIL(atype, valgrind=False, wipe=False, height=None): '''launch a SIL instance''' import pexpect cmd="" if valgrind and os.path.exists('/usr/bin/valgrind'): cmd += 'valgrind -q --log-file=%s-valgrind.log ' % atype executable = reltopdir('tmp/%s.build/%s.elf' % (atype, atype)) if not os.path.exists(executable): executable = '/tmp/%s.build/%s.elf' % (atype, atype) cmd += executable if wipe: cmd += ' -w' if height is not None: cmd += ' -H %u' % height ret = pexpect.spawn(cmd, logfile=sys.stdout, timeout=5) ret.delaybeforesend = 0 pexpect_autoclose(ret) ret.expect('Waiting for connection') return ret def start_MAVProxy_SIL(atype, aircraft=None, setup=False, master='tcp:127.0.0.1:5760', options=None, logfile=sys.stdout): '''launch mavproxy connected to a SIL instance''' import pexpect global close_list MAVPROXY = os.getenv('MAVPROXY_CMD', reltopdir('../MAVProxy/mavproxy.py')) cmd = MAVPROXY + ' --master=%s --out=127.0.0.1:14550' % master if setup: cmd += ' --setup' if aircraft is None: aircraft = 'test.%s' % atype cmd += ' --aircraft=%s' % aircraft if options is not None: cmd += ' ' + options ret = pexpect.spawn(cmd, logfile=logfile, timeout=60) ret.delaybeforesend = 0 pexpect_autoclose(ret) return ret def expect_setup_callback(e, callback): '''setup a callback that is called once a second while waiting for patterns''' import pexpect def _expect_callback(pattern, timeout=e.timeout): tstart = time.time() while time.time() < tstart + timeout: try: ret = e.expect_saved(pattern, timeout=1) return ret except pexpect.TIMEOUT: e.expect_user_callback(e) pass print("Timed out looking for %s" % pattern) raise pexpect.TIMEOUT(timeout) e.expect_user_callback = callback e.expect_saved = e.expect e.expect = _expect_callback def mkdir_p(dir): '''like mkdir -p''' if not dir: return if dir.endswith("/"): mkdir_p(dir[:-1]) return if os.path.isdir(dir): return mkdir_p(os.path.dirname(dir)) os.mkdir(dir) def loadfile(fname): '''load a file as a string''' f = open(fname, mode='r') r = f.read() f.close() return r def lock_file(fname): '''lock a file''' import fcntl f = open(fname, mode='w') try: fcntl.lockf(f, fcntl.LOCK_EX | fcntl.LOCK_NB) except Exception: return None return f def check_parent(parent_pid=None): '''check our parent process is still alive''' if parent_pid is None: try: parent_pid = os.getppid() except Exception: pass if parent_pid is None: return try: os.kill(parent_pid, 0) except Exception: print("Parent had finished - exiting") sys.exit(1) def EarthRatesToBodyRates(dcm, earth_rates): '''convert the angular velocities from earth frame to body frame. Thanks to James Goppert for the formula all inputs and outputs are in radians returns a gyro vector in body frame, in rad/s ''' from math import sin, cos (phi, theta, psi) = dcm.to_euler() phiDot = earth_rates.x thetaDot = earth_rates.y psiDot = earth_rates.z p = phiDot - psiDot*sin(theta) q = cos(phi)*thetaDot + sin(phi)*psiDot*cos(theta) r = cos(phi)*psiDot*cos(theta) - sin(phi)*thetaDot return Vector3(p, q, r) def BodyRatesToEarthRates(dcm, gyro): '''convert the angular velocities from body frame to earth frame. all inputs and outputs are in radians/s returns a earth rate vector ''' from math import sin, cos, tan, fabs p = gyro.x q = gyro.y r = gyro.z (phi, theta, psi) = dcm.to_euler() phiDot = p + tan(theta)*(q*sin(phi) + r*cos(phi)) thetaDot = q*cos(phi) - r*sin(phi) if fabs(cos(theta)) < 1.0e-20: theta += 1.0e-10 psiDot = (q*sin(phi) + r*cos(phi))/cos(theta) return Vector3(phiDot, thetaDot, psiDot) def gps_newpos(lat, lon, bearing, distance): '''extrapolate latitude/longitude given a heading and distance thanks to http://www.movable-type.co.uk/scripts/latlong.html ''' from math import sin, asin, cos, atan2, radians, degrees radius_of_earth = 6378100.0 # in meters lat1 = radians(lat) lon1 = radians(lon) brng = radians(bearing) dr = distance/radius_of_earth lat2 = asin(sin(lat1)*cos(dr) + cos(lat1)*sin(dr)*cos(brng)) lon2 = lon1 + atan2(sin(brng)*sin(dr)*cos(lat1), cos(dr)-sin(lat1)*sin(lat2)) return (degrees(lat2), degrees(lon2)) class Wind(object): '''a wind generation object''' def __init__(self, windstring, cross_section=0.1): a = windstring.split(',') if len(a) != 3: raise RuntimeError("Expected wind in speed,direction,turbulance form, not %s" % windstring) self.speed = float(a[0]) # m/s self.direction = float(a[1]) # direction the wind is going in self.turbulance= float(a[2]) # turbulance factor (standard deviation) # the cross-section of the aircraft to wind. This is multiplied by the # difference in the wind and the velocity of the aircraft to give the acceleration self.cross_section = cross_section # the time constant for the turbulance - the average period of the # changes over time self.turbulance_time_constant = 5.0 # wind time record self.tlast = time.time() # initial turbulance multiplier self.turbulance_mul = 1.0 def current(self, deltat=None): '''return current wind speed and direction as a tuple speed is in m/s, direction in degrees ''' if deltat is None: tnow = time.time() deltat = tnow - self.tlast self.tlast = tnow # update turbulance random walk w_delta = math.sqrt(deltat)*(1.0-random.gauss(1.0, self.turbulance)) w_delta -= (self.turbulance_mul-1.0)*(deltat/self.turbulance_time_constant) self.turbulance_mul += w_delta speed = self.speed * math.fabs(self.turbulance_mul) return (speed, self.direction) # Calculate drag. def drag(self, velocity, deltat=None, testing=None): '''return current wind force in Earth frame. The velocity parameter is a Vector3 of the current velocity of the aircraft in earth frame, m/s''' from math import radians # (m/s, degrees) : wind vector as a magnitude and angle. (speed, direction) = self.current(deltat=deltat) # speed = self.speed # direction = self.direction # Get the wind vector. w = toVec(speed, radians(direction)) obj_speed = velocity.length() # Compute the angle between the object vector and wind vector by taking # the dot product and dividing by the magnitudes. d = w.length() * obj_speed if d == 0: alpha = 0 else: alpha = acos((w * velocity) / d) # Get the relative wind speed and angle from the object. Note that the # relative wind speed includes the velocity of the object; i.e., there # is a headwind equivalent to the object's speed even if there is no # absolute wind. (rel_speed, beta) = apparent_wind(speed, obj_speed, alpha) # Return the vector of the relative wind, relative to the coordinate # system. relWindVec = toVec(rel_speed, beta + atan2(velocity.y, velocity.x)) # Combine them to get the acceleration vector. return Vector3( acc(relWindVec.x, drag_force(self, relWindVec.x)) , acc(relWindVec.y, drag_force(self, relWindVec.y)) , 0 ) # http://en.wikipedia.org/wiki/Apparent_wind # # Returns apparent wind speed and angle of apparent wind. Alpha is the angle # between the object and the true wind. alpha of 0 rads is a headwind; pi a # tailwind. Speeds should always be positive. def apparent_wind(wind_sp, obj_speed, alpha): delta = wind_sp * cos(alpha) x = wind_sp**2 + obj_speed**2 + 2 * obj_speed * delta rel_speed = sqrt(x) if rel_speed == 0: beta = pi else: beta = acos((delta + obj_speed) / rel_speed) return (rel_speed, beta) # See http://en.wikipedia.org/wiki/Drag_equation # # Drag equation is F(a) = cl * p/2 * v^2 * a, where cl : drag coefficient # (let's assume it's low, .e.g., 0.2), p : density of air (assume about 1 # kg/m^3, the density just over 1500m elevation), v : relative speed of wind # (to the body), a : area acted on (this is captured by the cross_section # paramter). # # So then we have # F(a) = 0.2 * 1/2 * v^2 * cross_section = 0.1 * v^2 * cross_section def drag_force(wind, sp): return (sp**2.0) * 0.1 * wind.cross_section # Function to make the force vector. relWindVec is the direction the apparent # wind comes *from*. We want to compute the accleration vector in the direction # the wind blows to. def acc(val, mag): if val == 0: return mag else: return (val / abs(val)) * (0 - mag) # Converts a magnitude and angle (radians) to a vector in the xy plane. def toVec(magnitude, angle): v = Vector3(magnitude, 0, 0) m = Matrix3() m.from_euler(0, 0, angle) return m.transposed() * v if __name__ == "__main__": import doctest doctest.testmod()
ptsneves/QuadSim_OpenGL
util.py
Python
gpl-3.0
12,459
/* * Copyright (C) 1997-2015 JDE Developers Team * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see http://www.gnu.org/licenses/. * Authors : * Victor Arribas Raigadas <[email protected]> */ #ifndef QUADROTORSENSORS_H #define QUADROTORSENSORS_H #include <iostream> #include <boost/bind.hpp> #include <boost/format.hpp> #include <gazebo/gazebo.hh> #include <gazebo/physics/Model.hh> #include <gazebo/sensors/sensors.hh> #include <gazebo/sensors/SensorManager.hh> #include <gazebo/common/Events.hh> #include <opencv2/core/core.hpp> #include <quadrotor/debugtools.h> #define BROKEN_SonarSensor namespace quadrotor{ class QuadRotorSensors { public: enum{ CAM_VENTRAL = 0, CAM_FRONTAL, NUM_CAMS }; public: QuadRotorSensors(); virtual ~QuadRotorSensors(); void Load(gazebo::physics::ModelPtr model); void Init(); void debugInfo(); std::string _log_prefix; public: gazebo::sensors::CameraSensorPtr cam[NUM_CAMS]; #ifdef BROKEN_SonarSensor gazebo::sensors::RaySensorPtr sonar; #else gazebo::sensors::SonarSensorPtr sonar; #endif gazebo::sensors::ImuSensorPtr imu; private: gazebo::physics::ModelPtr model; uint32_t base_link_id; private: gazebo::event::ConnectionPtr sub_cam[NUM_CAMS]; gazebo::event::ConnectionPtr sub_sonar; gazebo::event::ConnectionPtr sub_imu; void _on_cam(int id); void _on_sonar(); void _on_imu(); public: cv::Mat img[NUM_CAMS]; gazebo::math::Pose pose; double altitude; }; }//NS #endif // QUADROTORSENSORS_H
jderobot-varribas/JdeRobot
src/stable/components/gazeboserver/plugins/quadrotor2/include/quadrotor/quadrotorsensors.hh
C++
gpl-3.0
2,155
package mirror.android.content; import android.os.IBinder; import android.os.IInterface; import mirror.RefClass; import mirror.MethodParams; import mirror.RefStaticMethod; public class IRestrictionsManager { public static Class<?> TYPE = RefClass.load(IRestrictionsManager.class, "android.content.IRestrictionsManager"); public static class Stub { public static Class<?> TYPE = RefClass.load(Stub.class, "android.content.IRestrictionsManager$Stub"); @MethodParams({IBinder.class}) public static RefStaticMethod<IInterface> asInterface; } }
codehz/container
lib/src/main/java/mirror/android/content/IRestrictionsManager.java
Java
gpl-3.0
579
# (c) 2012-2014, Michael DeHaan <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ############################################# from __future__ import (absolute_import, division, print_function) __metaclass__ = type import fnmatch import os import sys import re import stat import subprocess from ansible import constants as C from ansible import errors from ansible.inventory.ini import InventoryParser from ansible.inventory.script import InventoryScript from ansible.inventory.dir import InventoryDirectory from ansible.inventory.group import Group from ansible.inventory.host import Host from ansible.plugins import vars_loader from ansible.utils.path import is_executable from ansible.utils.vars import combine_vars class Inventory(object): """ Host inventory for ansible. """ #__slots__ = [ 'host_list', 'groups', '_restriction', '_also_restriction', '_subset', # 'parser', '_vars_per_host', '_vars_per_group', '_hosts_cache', '_groups_list', # '_pattern_cache', '_vault_password', '_vars_plugins', '_playbook_basedir'] def __init__(self, loader, variable_manager, host_list=C.DEFAULT_HOST_LIST): # the host file file, or script path, or list of hosts # if a list, inventory data will NOT be loaded self.host_list = host_list self._loader = loader self._variable_manager = variable_manager # caching to avoid repeated calculations, particularly with # external inventory scripts. self._vars_per_host = {} self._vars_per_group = {} self._hosts_cache = {} self._groups_list = {} self._pattern_cache = {} # to be set by calling set_playbook_basedir by playbook code self._playbook_basedir = None # the inventory object holds a list of groups self.groups = [] # a list of host(names) to contain current inquiries to self._restriction = None self._also_restriction = None self._subset = None if isinstance(host_list, basestring): if "," in host_list: host_list = host_list.split(",") host_list = [ h for h in host_list if h and h.strip() ] if host_list is None: self.parser = None elif isinstance(host_list, list): self.parser = None all = Group('all') self.groups = [ all ] ipv6_re = re.compile('\[([a-f:A-F0-9]*[%[0-z]+]?)\](?::(\d+))?') for x in host_list: m = ipv6_re.match(x) if m: all.add_host(Host(m.groups()[0], m.groups()[1])) else: if ":" in x: tokens = x.rsplit(":", 1) # if there is ':' in the address, then this is an ipv6 if ':' in tokens[0]: all.add_host(Host(x)) else: all.add_host(Host(tokens[0], tokens[1])) else: all.add_host(Host(x)) elif os.path.exists(host_list): if os.path.isdir(host_list): # Ensure basedir is inside the directory self.host_list = os.path.join(self.host_list, "") self.parser = InventoryDirectory(loader=self._loader, filename=host_list) self.groups = self.parser.groups.values() else: # check to see if the specified file starts with a # shebang (#!/), so if an error is raised by the parser # class we can show a more apropos error shebang_present = False try: inv_file = open(host_list) first_line = inv_file.readlines()[0] inv_file.close() if first_line.startswith('#!'): shebang_present = True except: pass if is_executable(host_list): try: self.parser = InventoryScript(loader=self._loader, filename=host_list) self.groups = self.parser.groups.values() except: if not shebang_present: raise errors.AnsibleError("The file %s is marked as executable, but failed to execute correctly. " % host_list + \ "If this is not supposed to be an executable script, correct this with `chmod -x %s`." % host_list) else: raise else: try: self.parser = InventoryParser(filename=host_list) self.groups = self.parser.groups.values() except: if shebang_present: raise errors.AnsibleError("The file %s looks like it should be an executable inventory script, but is not marked executable. " % host_list + \ "Perhaps you want to correct this with `chmod +x %s`?" % host_list) else: raise vars_loader.add_directory(self.basedir(), with_subdir=True) else: raise errors.AnsibleError("Unable to find an inventory file (%s), " "specify one with -i ?" % host_list) self._vars_plugins = [ x for x in vars_loader.all(self) ] # FIXME: shouldn't be required, since the group/host vars file # management will be done in VariableManager # get group vars from group_vars/ files and vars plugins for group in self.groups: # FIXME: combine_vars group.vars = combine_vars(group.vars, self.get_group_variables(group.name)) # get host vars from host_vars/ files and vars plugins for host in self.get_hosts(): # FIXME: combine_vars host.vars = combine_vars(host.vars, self.get_host_variables(host.name)) def _match(self, str, pattern_str): try: if pattern_str.startswith('~'): return re.search(pattern_str[1:], str) else: return fnmatch.fnmatch(str, pattern_str) except Exception, e: raise errors.AnsibleError('invalid host pattern: %s' % pattern_str) def _match_list(self, items, item_attr, pattern_str): results = [] try: if not pattern_str.startswith('~'): pattern = re.compile(fnmatch.translate(pattern_str)) else: pattern = re.compile(pattern_str[1:]) except Exception, e: raise errors.AnsibleError('invalid host pattern: %s' % pattern_str) for item in items: if pattern.match(getattr(item, item_attr)): results.append(item) return results def get_hosts(self, pattern="all"): """ find all host names matching a pattern string, taking into account any inventory restrictions or applied subsets. """ # process patterns if isinstance(pattern, list): pattern = ';'.join(pattern) patterns = pattern.replace(";",":").split(":") hosts = self._get_hosts(patterns) # exclude hosts not in a subset, if defined if self._subset: subset = self._get_hosts(self._subset) hosts = [ h for h in hosts if h in subset ] # exclude hosts mentioned in any restriction (ex: failed hosts) if self._restriction is not None: hosts = [ h for h in hosts if h in self._restriction ] if self._also_restriction is not None: hosts = [ h for h in hosts if h in self._also_restriction ] return hosts def _get_hosts(self, patterns): """ finds hosts that match a list of patterns. Handles negative matches as well as intersection matches. """ # Host specifiers should be sorted to ensure consistent behavior pattern_regular = [] pattern_intersection = [] pattern_exclude = [] for p in patterns: if p.startswith("!"): pattern_exclude.append(p) elif p.startswith("&"): pattern_intersection.append(p) elif p: pattern_regular.append(p) # if no regular pattern was given, hence only exclude and/or intersection # make that magically work if pattern_regular == []: pattern_regular = ['all'] # when applying the host selectors, run those without the "&" or "!" # first, then the &s, then the !s. patterns = pattern_regular + pattern_intersection + pattern_exclude hosts = [] for p in patterns: # avoid resolving a pattern that is a plain host if p in self._hosts_cache: hosts.append(self.get_host(p)) else: that = self.__get_hosts(p) if p.startswith("!"): hosts = [ h for h in hosts if h not in that ] elif p.startswith("&"): hosts = [ h for h in hosts if h in that ] else: to_append = [ h for h in that if h.name not in [ y.name for y in hosts ] ] hosts.extend(to_append) return hosts def __get_hosts(self, pattern): """ finds hosts that positively match a particular pattern. Does not take into account negative matches. """ if pattern in self._pattern_cache: return self._pattern_cache[pattern] (name, enumeration_details) = self._enumeration_info(pattern) hpat = self._hosts_in_unenumerated_pattern(name) result = self._apply_ranges(pattern, hpat) self._pattern_cache[pattern] = result return result def _enumeration_info(self, pattern): """ returns (pattern, limits) taking a regular pattern and finding out which parts of it correspond to start/stop offsets. limits is a tuple of (start, stop) or None """ # Do not parse regexes for enumeration info if pattern.startswith('~'): return (pattern, None) # The regex used to match on the range, which can be [x] or [x-y]. pattern_re = re.compile("^(.*)\[([-]?[0-9]+)(?:(?:-)([0-9]+))?\](.*)$") m = pattern_re.match(pattern) if m: (target, first, last, rest) = m.groups() first = int(first) if last: if first < 0: raise errors.AnsibleError("invalid range: negative indices cannot be used as the first item in a range") last = int(last) else: last = first return (target, (first, last)) else: return (pattern, None) def _apply_ranges(self, pat, hosts): """ given a pattern like foo, that matches hosts, return all of hosts given a pattern like foo[0:5], where foo matches hosts, return the first 6 hosts """ # If there are no hosts to select from, just return the # empty set. This prevents trying to do selections on an empty set. # issue#6258 if not hosts: return hosts (loose_pattern, limits) = self._enumeration_info(pat) if not limits: return hosts (left, right) = limits if left == '': left = 0 if right == '': right = 0 left=int(left) right=int(right) try: if left != right: return hosts[left:right] else: return [ hosts[left] ] except IndexError: raise errors.AnsibleError("no hosts matching the pattern '%s' were found" % pat) def _create_implicit_localhost(self, pattern): new_host = Host(pattern) new_host.set_variable("ansible_python_interpreter", sys.executable) new_host.set_variable("ansible_connection", "local") new_host.ipv4_address = '127.0.0.1' ungrouped = self.get_group("ungrouped") if ungrouped is None: self.add_group(Group('ungrouped')) ungrouped = self.get_group('ungrouped') self.get_group('all').add_child_group(ungrouped) ungrouped.add_host(new_host) return new_host def _hosts_in_unenumerated_pattern(self, pattern): """ Get all host names matching the pattern """ results = [] hosts = [] hostnames = set() # ignore any negative checks here, this is handled elsewhere pattern = pattern.replace("!","").replace("&", "") def __append_host_to_results(host): if host not in results and host.name not in hostnames: hostnames.add(host.name) results.append(host) groups = self.get_groups() for group in groups: if pattern == 'all': for host in group.get_hosts(): __append_host_to_results(host) else: if self._match(group.name, pattern) and group.name not in ('all', 'ungrouped'): for host in group.get_hosts(): __append_host_to_results(host) else: matching_hosts = self._match_list(group.get_hosts(), 'name', pattern) for host in matching_hosts: __append_host_to_results(host) if pattern in ["localhost", "127.0.0.1", "::1"] and len(results) == 0: new_host = self._create_implicit_localhost(pattern) results.append(new_host) return results def clear_pattern_cache(self): ''' called exclusively by the add_host plugin to allow patterns to be recalculated ''' self._pattern_cache = {} def groups_for_host(self, host): if host in self._hosts_cache: return self._hosts_cache[host].get_groups() else: return [] def groups_list(self): if not self._groups_list: groups = {} for g in self.groups: groups[g.name] = [h.name for h in g.get_hosts()] ancestors = g.get_ancestors() for a in ancestors: if a.name not in groups: groups[a.name] = [h.name for h in a.get_hosts()] self._groups_list = groups return self._groups_list def get_groups(self): return self.groups def get_host(self, hostname): if hostname not in self._hosts_cache: self._hosts_cache[hostname] = self._get_host(hostname) return self._hosts_cache[hostname] def _get_host(self, hostname): if hostname in ['localhost', '127.0.0.1', '::1']: for host in self.get_group('all').get_hosts(): if host.name in ['localhost', '127.0.0.1', '::1']: return host return self._create_implicit_localhost(hostname) else: for group in self.groups: for host in group.get_hosts(): if hostname == host.name: return host return None def get_group(self, groupname): for group in self.groups: if group.name == groupname: return group return None def get_group_variables(self, groupname, update_cached=False, vault_password=None): if groupname not in self._vars_per_group or update_cached: self._vars_per_group[groupname] = self._get_group_variables(groupname, vault_password=vault_password) return self._vars_per_group[groupname] def _get_group_variables(self, groupname, vault_password=None): group = self.get_group(groupname) if group is None: raise Exception("group not found: %s" % groupname) vars = {} # plugin.get_group_vars retrieves just vars for specific group vars_results = [ plugin.get_group_vars(group, vault_password=vault_password) for plugin in self._vars_plugins if hasattr(plugin, 'get_group_vars')] for updated in vars_results: if updated is not None: # FIXME: combine_vars vars = combine_vars(vars, updated) # Read group_vars/ files # FIXME: combine_vars vars = combine_vars(vars, self.get_group_vars(group)) return vars def get_vars(self, hostname, update_cached=False, vault_password=None): host = self.get_host(hostname) if not host: raise Exception("host not found: %s" % hostname) return host.get_vars() def get_host_variables(self, hostname, update_cached=False, vault_password=None): if hostname not in self._vars_per_host or update_cached: self._vars_per_host[hostname] = self._get_host_variables(hostname, vault_password=vault_password) return self._vars_per_host[hostname] def _get_host_variables(self, hostname, vault_password=None): host = self.get_host(hostname) if host is None: raise errors.AnsibleError("host not found: %s" % hostname) vars = {} # plugin.run retrieves all vars (also from groups) for host vars_results = [ plugin.run(host, vault_password=vault_password) for plugin in self._vars_plugins if hasattr(plugin, 'run')] for updated in vars_results: if updated is not None: # FIXME: combine_vars vars = combine_vars(vars, updated) # plugin.get_host_vars retrieves just vars for specific host vars_results = [ plugin.get_host_vars(host, vault_password=vault_password) for plugin in self._vars_plugins if hasattr(plugin, 'get_host_vars')] for updated in vars_results: if updated is not None: # FIXME: combine_vars vars = combine_vars(vars, updated) # still need to check InventoryParser per host vars # which actually means InventoryScript per host, # which is not performant if self.parser is not None: # FIXME: combine_vars vars = combine_vars(vars, self.parser.get_host_variables(host)) # Read host_vars/ files # FIXME: combine_vars vars = combine_vars(vars, self.get_host_vars(host)) return vars def add_group(self, group): if group.name not in self.groups_list(): self.groups.append(group) self._groups_list = None # invalidate internal cache else: raise errors.AnsibleError("group already in inventory: %s" % group.name) def list_hosts(self, pattern="all"): """ return a list of hostnames for a pattern """ result = [ h for h in self.get_hosts(pattern) ] if len(result) == 0 and pattern in ["localhost", "127.0.0.1", "::1"]: result = [pattern] return result def list_groups(self): return sorted([ g.name for g in self.groups ], key=lambda x: x) def restrict_to_hosts(self, restriction): """ Restrict list operations to the hosts given in restriction. This is used to exclude failed hosts in main playbook code, don't use this for other reasons. """ if not isinstance(restriction, list): restriction = [ restriction ] self._restriction = restriction def also_restrict_to(self, restriction): """ Works like restict_to but offers an additional restriction. Playbooks use this to implement serial behavior. """ if not isinstance(restriction, list): restriction = [ restriction ] self._also_restriction = restriction def subset(self, subset_pattern): """ Limits inventory results to a subset of inventory that matches a given pattern, such as to select a given geographic of numeric slice amongst a previous 'hosts' selection that only select roles, or vice versa. Corresponds to --limit parameter to ansible-playbook """ if subset_pattern is None: self._subset = None else: subset_pattern = subset_pattern.replace(',',':') subset_pattern = subset_pattern.replace(";",":").split(":") results = [] # allow Unix style @filename data for x in subset_pattern: if x.startswith("@"): fd = open(x[1:]) results.extend(fd.read().split("\n")) fd.close() else: results.append(x) self._subset = results def remove_restriction(self): """ Do not restrict list operations """ self._restriction = None def lift_also_restriction(self): """ Clears the also restriction """ self._also_restriction = None def is_file(self): """ did inventory come from a file? """ if not isinstance(self.host_list, basestring): return False return os.path.exists(self.host_list) def basedir(self): """ if inventory came from a file, what's the directory? """ if not self.is_file(): return None dname = os.path.dirname(self.host_list) if dname is None or dname == '' or dname == '.': cwd = os.getcwd() return os.path.abspath(cwd) return os.path.abspath(dname) def src(self): """ if inventory came from a file, what's the directory and file name? """ if not self.is_file(): return None return self.host_list def playbook_basedir(self): """ returns the directory of the current playbook """ return self._playbook_basedir def set_playbook_basedir(self, dir): """ sets the base directory of the playbook so inventory can use it as a basedir for host_ and group_vars, and other things. """ # Only update things if dir is a different playbook basedir if dir != self._playbook_basedir: self._playbook_basedir = dir # get group vars from group_vars/ files for group in self.groups: # FIXME: combine_vars group.vars = combine_vars(group.vars, self.get_group_vars(group, new_pb_basedir=True)) # get host vars from host_vars/ files for host in self.get_hosts(): # FIXME: combine_vars host.vars = combine_vars(host.vars, self.get_host_vars(host, new_pb_basedir=True)) # invalidate cache self._vars_per_host = {} self._vars_per_group = {} def get_host_vars(self, host, new_pb_basedir=False): """ Read host_vars/ files """ return self._get_hostgroup_vars(host=host, group=None, new_pb_basedir=new_pb_basedir) def get_group_vars(self, group, new_pb_basedir=False): """ Read group_vars/ files """ return self._get_hostgroup_vars(host=None, group=group, new_pb_basedir=new_pb_basedir) def _get_hostgroup_vars(self, host=None, group=None, new_pb_basedir=False): """ Loads variables from group_vars/<groupname> and host_vars/<hostname> in directories parallel to the inventory base directory or in the same directory as the playbook. Variables in the playbook dir will win over the inventory dir if files are in both. """ results = {} scan_pass = 0 _basedir = self.basedir() # look in both the inventory base directory and the playbook base directory # unless we do an update for a new playbook base dir if not new_pb_basedir: basedirs = [_basedir, self._playbook_basedir] else: basedirs = [self._playbook_basedir] for basedir in basedirs: # this can happen from particular API usages, particularly if not run # from /usr/bin/ansible-playbook if basedir is None: continue scan_pass = scan_pass + 1 # it's not an eror if the directory does not exist, keep moving if not os.path.exists(basedir): continue # save work of second scan if the directories are the same if _basedir == self._playbook_basedir and scan_pass != 1: continue # FIXME: these should go to VariableManager if group and host is None: # load vars in dir/group_vars/name_of_group base_path = os.path.join(basedir, "group_vars/%s" % group.name) results = self._variable_manager.add_group_vars_file(base_path, self._loader) elif host and group is None: # same for hostvars in dir/host_vars/name_of_host base_path = os.path.join(basedir, "host_vars/%s" % host.name) results = self._variable_manager.add_host_vars_file(base_path, self._loader) # all done, results is a dictionary of variables for this particular host. return results
Ensighten/ansible
lib/ansible/inventory/__init__.py
Python
gpl-3.0
26,188
<?php use yii\helpers\Html; /** * @var yii\web\View $this * @var yii\data\ActiveDataProvider $dataProvider * @var \app\modules\seo\models\Meta $searchModel */ $this->title = Yii::t('app', 'Meta tags'); $this->params['breadcrumbs'][] = ['label' => Yii::t('app', 'SEO'), 'url' => ['index']]; $this->params['breadcrumbs'][] = $this->title; ?> <div class="meta-tags-index"> <h1><?= Html::encode($this->title) ?></h1> <p> <?= Html::a(Yii::t('app', 'Create Meta tag'), ['create-meta', 'returnUrl' => \app\backend\components\Helper::getReturnUrl()], ['class' => 'btn btn-success']) ?> <?= Html::button(Yii::t('app', 'Delete selected'), ['class'=> 'btn btn-danger', 'id' => 'deleteTasks']); ?> <?= Yii::$app->user->can('cache manage') ? Html::button(Yii::t('app', 'Delete Meta Cache') . ' <span class="fa"></span>', ['class'=> 'btn btn-warning pull-right', 'id' => 'flushCache']) : ''; ?> </p> <?= $this->render('_metaGrid', ['dataProvider' => $dataProvider, 'searchModel' => $searchModel, 'id' => 'meta-tags']); ?> </div> <script type="text/javascript"> $(function() { $('#deleteTasks').on('click', function() { $.ajax({ 'url' : '/seo/manage/delete-metas', 'type': 'post', 'data': { 'metas' : $('.grid-view').yiiGridView('getSelectedRows') }, success: function(data) { if(data) location.reload(); } }); }); $('#flushCache').on('click', function() { $.ajax({ 'url' : '/seo/manage/flush-meta-cache', 'type': 'post', success: function(data) { $('#flushCache').removeClass('btn-warning').removeClass('btn-danger').addClass('btn-success').find('span.fa').removeClass('fa-times').addClass('fa-check'); }, error: function() { $('#flushCache').removeClass('btn-warning').removeClass('btn-success').addClass('btn-danger').find('span.fa').removeClass('fa-check').addClass('fa-times'); } }); }); }); </script>
rinodung/yii2-shop-cms
modules/seo/views/manage/meta.php
PHP
gpl-3.0
2,021
require 'puppet/util/windows' module Puppet::Util::Windows module SID require 'ffi' extend FFI::Library # missing from Windows::Error ERROR_NONE_MAPPED = 1332 ERROR_INVALID_SID_STRUCTURE = 1337 # Convert an account name, e.g. 'Administrators' into a SID string, # e.g. 'S-1-5-32-544'. The name can be specified as 'Administrators', # 'BUILTIN\Administrators', or 'S-1-5-32-544', and will return the # SID. Returns nil if the account doesn't exist. def name_to_sid(name) sid = name_to_sid_object(name) sid ? sid.to_s : nil end module_function :name_to_sid # Convert an account name, e.g. 'Administrators' into a SID object, # e.g. 'S-1-5-32-544'. The name can be specified as 'Administrators', # 'BUILTIN\Administrators', or 'S-1-5-32-544', and will return the # SID object. Returns nil if the account doesn't exist. def name_to_sid_object(name) # Apparently, we accept a symbol.. name = name.to_s.strip if name # if it's in SID string form, convert to user parsed_sid = Win32::Security::SID.string_to_sid(name) rescue nil parsed_sid ? Win32::Security::SID.new(parsed_sid) : Win32::Security::SID.new(name) rescue nil end module_function :name_to_sid_object # Converts an octet string array of bytes to a SID object, # e.g. [1, 1, 0, 0, 0, 0, 0, 5, 18, 0, 0, 0] is the representation for # S-1-5-18, the local 'SYSTEM' account. # Raises an Error for nil or non-array input. def octet_string_to_sid_object(bytes) if !bytes || !bytes.respond_to?('pack') || bytes.empty? raise Puppet::Util::Windows::Error.new("Octet string must be an array of bytes") end Win32::Security::SID.new(bytes.pack('C*')) end module_function :octet_string_to_sid_object # Convert a SID string, e.g. "S-1-5-32-544" to a name, # e.g. 'BUILTIN\Administrators'. Returns nil if an account # for that SID does not exist. def sid_to_name(value) sid = Win32::Security::SID.new(Win32::Security::SID.string_to_sid(value)) if sid.domain and sid.domain.length > 0 "#{sid.domain}\\#{sid.account}" else sid.account end rescue nil end module_function :sid_to_name # http://stackoverflow.com/a/1792930 - 68 bytes, 184 characters in a string MAXIMUM_SID_STRING_LENGTH = 184 # Convert a SID pointer to a SID string, e.g. "S-1-5-32-544". def sid_ptr_to_string(psid) if ! psid.instance_of?(FFI::Pointer) || IsValidSid(psid) == FFI::WIN32_FALSE raise Puppet::Util::Windows::Error.new("Invalid SID") end sid_string = nil FFI::MemoryPointer.new(:pointer, 1) do |buffer_ptr| if ConvertSidToStringSidW(psid, buffer_ptr) == FFI::WIN32_FALSE raise Puppet::Util::Windows::Error.new("Failed to convert binary SID") end buffer_ptr.read_win32_local_pointer do |wide_string_ptr| if wide_string_ptr.null? raise Puppet::Error.new("ConvertSidToStringSidW failed to allocate buffer for sid") end sid_string = wide_string_ptr.read_arbitrary_wide_string_up_to(MAXIMUM_SID_STRING_LENGTH) end end sid_string end module_function :sid_ptr_to_string # Convert a SID string, e.g. "S-1-5-32-544" to a pointer (containing the # address of the binary SID structure). The returned value can be used in # Win32 APIs that expect a PSID, e.g. IsValidSid. The account for this # SID may or may not exist. def string_to_sid_ptr(string_sid, &block) FFI::MemoryPointer.from_string_to_wide_string(string_sid) do |lpcwstr| FFI::MemoryPointer.new(:pointer, 1) do |sid_ptr_ptr| if ConvertStringSidToSidW(lpcwstr, sid_ptr_ptr) == FFI::WIN32_FALSE raise Puppet::Util::Windows::Error.new("Failed to convert string SID: #{string_sid}") end sid_ptr_ptr.read_win32_local_pointer do |sid_ptr| yield sid_ptr end end end # yielded sid_ptr has already had LocalFree called, nothing to return nil end module_function :string_to_sid_ptr # Return true if the string is a valid SID, e.g. "S-1-5-32-544", false otherwise. def valid_sid?(string_sid) valid = false begin string_to_sid_ptr(string_sid) { |ptr| valid = ! ptr.nil? && ! ptr.null? } rescue Puppet::Util::Windows::Error => e raise if e.code != ERROR_INVALID_SID_STRUCTURE end valid end module_function :valid_sid? ffi_convention :stdcall # http://msdn.microsoft.com/en-us/library/windows/desktop/aa379151(v=vs.85).aspx # BOOL WINAPI IsValidSid( # _In_ PSID pSid # ); ffi_lib :advapi32 attach_function_private :IsValidSid, [:pointer], :win32_bool # http://msdn.microsoft.com/en-us/library/windows/desktop/aa376399(v=vs.85).aspx # BOOL ConvertSidToStringSid( # _In_ PSID Sid, # _Out_ LPTSTR *StringSid # ); ffi_lib :advapi32 attach_function_private :ConvertSidToStringSidW, [:pointer, :pointer], :win32_bool # http://msdn.microsoft.com/en-us/library/windows/desktop/aa376402(v=vs.85).aspx # BOOL WINAPI ConvertStringSidToSid( # _In_ LPCTSTR StringSid, # _Out_ PSID *Sid # ); ffi_lib :advapi32 attach_function_private :ConvertStringSidToSidW, [:lpcwstr, :pointer], :win32_bool end end
dylanratcliffe/puppet-retrospec
vendor/gems/puppet-3.7.3/lib/puppet/util/windows/sid.rb
Ruby
agpl-3.0
5,502
/* * ProActive Parallel Suite(TM): * The Open Source library for parallel and distributed * Workflows & Scheduling, Orchestration, Cloud Automation * and Big Data Analysis on Enterprise Grids & Clouds. * * Copyright (c) 2007 - 2017 ActiveEon * Contact: [email protected] * * This library is free software: you can redistribute it and/or * modify it under the terms of the GNU Affero General Public License * as published by the Free Software Foundation: version 3 of * the License. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * * If needed, contact us to obtain a release under GPL Version 2 or 3 * or a different license than the AGPL. */ package functionaltests.service; import static com.google.common.truth.Truth.assertWithMessage; import java.util.List; import org.hibernate.Session; import org.junit.BeforeClass; import org.junit.Test; import org.ow2.proactive.db.SessionWork; import org.ow2.proactive.resourcemanager.db.RMDBManager; import org.ow2.proactive.scheduler.core.db.SchedulerDBManager; /** * The purpose of this class is to test the database schema that is generated * by Hibernate based on the annotations and values that are used. * <p> * For instance, Oracle database does not support table, column or index name * whose length is greater than 30 characters. */ public class DatabaseSchemaTest { private static RMDBManager rmDbManager; private static SchedulerDBManager schedulerDbManager; private static final int NAME_LENGTH_LIMIT = 30; // characters @BeforeClass public static void setUp() { rmDbManager = RMDBManager.createInMemoryRMDBManager(); schedulerDbManager = SchedulerDBManager.createInMemorySchedulerDBManager(); } @Test public void testColumnNamesShouldNotExceedLengthLimit() { runAndCheck("SELECT TABLE_NAME, COLUMN_NAME FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA = 'PUBLIC'", new Constraint<Object[]>() { @Override public void check(List<Object[]> databaseObjects) { for (Object[] columns : databaseObjects) { String tableName = (String) columns[0]; String columnName = (String) columns[1]; assertWithMessage("Table " + tableName + ", Column " + columnName) .that(tableName.length()) .isLessThan(NAME_LENGTH_LIMIT + 1); } } }); } @Test public void testIndexNamesShouldNotExceedLengthLimit() { runAndCheck("SELECT INDEX_NAME FROM INFORMATION_SCHEMA.SYSTEM_INDEXINFO " + "WHERE TABLE_SCHEM = 'PUBLIC' AND INDEX_NAME NOT LIKE 'SYS%'", new Constraint<String>() { @Override public void check(List<String> databaseObjects) { for (String indexName : databaseObjects) { assertWithMessage("Index " + indexName).that(indexName.length()) .isLessThan(NAME_LENGTH_LIMIT + 1); } } }); } @Test public void testTableNamesShouldNotExceedLengthLimit() { runAndCheck("SELECT TABLE_NAME FROM INFORMATION_SCHEMA.SYSTEM_TABLES WHERE TABLE_TYPE = 'TABLE'", new Constraint<String>() { @Override public void check(List<String> databaseObjects) { for (String tableName : databaseObjects) { assertWithMessage("Table " + tableName).that(tableName.length()) .isLessThan(NAME_LENGTH_LIMIT + 1); } } }); } private void runAndCheck(final String sqlQuery, Constraint constraint) { SessionWork<List<String>> sessionWork = new SessionWork<List<String>>() { @Override public List<String> doInTransaction(Session session) { return session.createSQLQuery(sqlQuery).list(); } }; List<String> result = schedulerDbManager.executeReadOnlyTransaction(sessionWork); constraint.check(result); result = rmDbManager.executeReadTransaction(sessionWork); constraint.check(result); } private interface Constraint<T> { void check(List<T> databaseObjects); } }
laurianed/scheduling
scheduler/scheduler-server/src/test/java/functionaltests/service/DatabaseSchemaTest.java
Java
agpl-3.0
5,298
ActiveAdmin.register Group do controller do def permitted_params params.permit! end def find_resource Group.friendly.find(params[:id]) end def collection super.includes(:group_request) end end actions :index, :show, :edit, :update filter :name filter :description filter :payment_plan, as: :select, collection: Group::PAYMENT_PLANS filter :memberships_count filter :created_at filter :is_commercial filter :subdomain scope :parents_only scope :engaged scope :engaged_but_stopped scope :has_members_but_never_engaged scope :visible_on_explore_front_page scope :is_subscription scope :is_donation batch_action :delete_spam do |group_ids| group_ids.each do |group_id| if Group.exists?(group_id) group = Group.find(group_id) user = group.creator || group.admins.first if user UserService.delete_spam(user) end end if Group.exists?(group_id) Group.find(group_id).destroy end end redirect_to admin_groups_path, notice: "#{group_ids.size} spammy groups deleted" end index :download_links => false do selectable_column column :id column :name do |g| simple_format(g.full_name.sub(' - ', "\n \n> ")) end column :contact do |g| admin_name = ERB::Util.h(g.requestor_name) admin_email = ERB::Util.h(g.requestor_email) simple_format "#{admin_name} \n &lt;#{admin_email}&gt;" end column "Size", :memberships_count column "Discussions", :discussions_count column "Motions", :motions_count column :created_at column :description, :sortable => :description do |group| group.description end column :is_commercial column :archived_at actions end show do |group| attributes_table do row :group_request group.attributes.each do |k,v| row k, v.inspect end end panel("Group Admins") do table_for group.admins.each do |admin| column :name column :email do |user| if user.email == group.admin_email simple_format "#{mail_to(user.email,user.email)}" else mail_to(user.email,user.email) end end end end panel("Group members") do table_for group.members.each do |member| column :user_id do |user| link_to user.id, admin_user_path(user) end column :name column :email column :deactivated_at end end panel("Subgroups") do table_for group.subgroups.each do |subgroup| column :name do |g| link_to g.name, admin_group_path(g) end column :id end end panel("Pending invitations") do table_for group.pending_invitations.each do |invitation| column :recipient_email column :link do |i| invitation_url(i) end end end if group.archived_at.nil? panel('Archive') do link_to 'Archive this group', archive_admin_group_path(group), method: :post, data: {confirm: "Are you sure you wanna archive #{group.name}, pal?"} end else panel('Unarchive') do link_to 'Unarchive this group', unarchive_admin_group_path(group), method: :post, data: {confirm: "Are you sure you wanna unarchive #{group.name}, pal?"} end end active_admin_comments end form do |f| f.inputs "Details" do f.input :id, :input_html => { :disabled => true } f.input :name, :input_html => { :disabled => true } f.input :description f.input :subdomain f.input :theme, as: :select, collection: Theme.all f.input :max_size f.input :is_commercial f.input :category_id, as: :select, collection: Category.all end f.actions end member_action :archive, :method => :post do group = Group.friendly.find(params[:id]) group.archive! flash[:notice] = "Archived #{group.name}" redirect_to [:admin, :groups] end member_action :unarchive, :method => :post do group = Group.friendly.find(params[:id]) group.unarchive! flash[:notice] = "Unarchived #{group.name}" redirect_to [:admin, :groups] end #controller do #def set_pagination #if params[:pagination].blank? #@per_page = 40 #elsif params[:pagination] == 'false' #@per_page = 999999999 #else #@per_page = params[:pagination] #end #end #end end
juliagra/loomio
app/admin/groups.rb
Ruby
agpl-3.0
4,514
/* * ProActive Parallel Suite(TM): * The Open Source library for parallel and distributed * Workflows & Scheduling, Orchestration, Cloud Automation * and Big Data Analysis on Enterprise Grids & Clouds. * * Copyright (c) 2007 - 2017 ActiveEon * Contact: [email protected] * * This library is free software: you can redistribute it and/or * modify it under the terms of the GNU Affero General Public License * as published by the Free Software Foundation: version 3 of * the License. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * * If needed, contact us to obtain a release under GPL Version 2 or 3 * or a different license than the AGPL. */ package org.ow2.proactive.scheduler.common.exception; import org.objectweb.proactive.annotation.PublicAPI; /** * Exceptions Generated if a ProActive task is sent to a java or native task dependence list. * * @author The ProActive Team * @since ProActive Scheduling 0.9 */ @PublicAPI public class DependenceFailedException extends RuntimeException { /** */ /** * Create a new instance of JobCreationException with the given messag * * @param msg the message to attach. */ public DependenceFailedException(String msg) { super(msg); } }
laurianed/scheduling
scheduler/scheduler-api/src/main/java/org/ow2/proactive/scheduler/common/exception/DependenceFailedException.java
Java
agpl-3.0
1,612
<?php if(!defined('sugarEntry') || !sugarEntry) die('Not A Valid Entry Point'); /********************************************************************************* * SugarCRM Community Edition is a customer relationship management program developed by * SugarCRM, Inc. Copyright (C) 2004-2012 SugarCRM Inc. * * This program is free software; you can redistribute it and/or modify it under * the terms of the GNU Affero General Public License version 3 as published by the * Free Software Foundation with the addition of the following permission added * to Section 15 as permitted in Section 7(a): FOR ANY PART OF THE COVERED WORK * IN WHICH THE COPYRIGHT IS OWNED BY SUGARCRM, SUGARCRM DISCLAIMS THE WARRANTY * OF NON INFRINGEMENT OF THIRD PARTY RIGHTS. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS * FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more * details. * * You should have received a copy of the GNU Affero General Public License along with * this program; if not, see http://www.gnu.org/licenses or write to the Free * Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA * 02110-1301 USA. * * You can contact SugarCRM, Inc. headquarters at 10050 North Wolfe Road, * SW2-130, Cupertino, CA 95014, USA. or at email address [email protected]. * * The interactive user interfaces in modified source and object code versions * of this program must display Appropriate Legal Notices, as required under * Section 5 of the GNU Affero General Public License version 3. * * In accordance with Section 7(b) of the GNU Affero General Public License version 3, * these Appropriate Legal Notices must retain the display of the "Powered by * SugarCRM" logo. If the display of the logo is not reasonably feasible for * technical reasons, the Appropriate Legal Notices must display the words * "Powered by SugarCRM". ********************************************************************************/ $searchFields['Schedulers'] = array ( 'name' => array( 'query_type'=>'default'), ); ?>
harish-patel/rrd
modules/Schedulers/metadata/SearchFields.php
PHP
agpl-3.0
2,188
// --------------------------------------------------------------------- // // Copyright (C) 2009 - 2015 by the deal.II authors // // This file is part of the deal.II library. // // The deal.II library is free software; you can use it, redistribute // it, and/or modify it under the terms of the GNU Lesser General // Public License as published by the Free Software Foundation; either // version 2.1 of the License, or (at your option) any later version. // The full text of the license can be found in the file LICENSE at // the top level of the deal.II distribution. // // --------------------------------------------------------------------- // test indexing in IndexSet variables for a contiguous range #include "../tests.h" #include <iomanip> #include <iomanip> #include <fstream> #include <cmath> #include <deal.II/base/index_set.h> void test () { IndexSet index_set (20); index_set.add_index (2); index_set.add_index (3); index_set.add_index (4); index_set.add_index (6); index_set.add_index (7); index_set.compress (); index_set.add_index (5); for (unsigned int i=0; i<index_set.n_elements(); ++i) { deallog << index_set.nth_index_in_set(i) << std::endl; AssertThrow (index_set.index_within_set(index_set.nth_index_in_set(i)) == i, ExcInternalError()); } deallog << "OK" << std::endl; for (unsigned int i=0; i<index_set.size(); ++i) if (index_set.is_element (i)) deallog << i << ' ' << index_set.index_within_set(i) << std::endl; } int main() { std::ofstream logfile("output"); deallog.attach(logfile); deallog.depth_console(0); deallog.threshold_double(1.e-10); test (); }
mtezzele/dealii
tests/base/index_set_10.cc
C++
lgpl-2.1
1,709
/* Special .init and .fini section support for 64 bit S/390. Copyright (C) 2001 Free Software Foundation, Inc. Contributed by Martin Schwidefsky ([email protected]). This file is part of the GNU C Library. The GNU C Library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. In addition to the permissions in the GNU Lesser General Public License, the Free Software Foundation gives you unlimited permission to link the compiled version of this file with other programs, and to distribute those programs without any restriction coming from the use of this file. (The Lesser General Public License restrictions do apply in other respects; for example, they cover modification of the file, and distribution when not linked into another program.) The GNU C Library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with the GNU C Library; see the file COPYING.LIB. If not, see <http://www.gnu.org/licenses/>. */ /* This file is compiled into assembly code which is then munged by a sed script into two files: crti.s and crtn.s. * crti.s puts a function prologue at the beginning of the .init and .fini sections and defines global symbols for those addresses, so they can be called as functions. * crtn.s puts the corresponding function epilogues in the .init and .fini sections. */ __asm__ ("\ \n\ #include \"defs.h\"\n\ \n\ /*@HEADER_ENDS*/\n\ \n\ /*@TESTS_BEGIN*/\n\ \n\ /*@TESTS_END*/\n\ \n\ /*@_init_PROLOG_BEGINS*/\n\ \n\ .section .init\n\ #NO_APP\n\ .align 4\n\ .globl _init\n\ .type _init,@function\n\ _init:\n\ # leaf function 0\n\ # automatics 0\n\ # outgoing args 0\n\ # need frame pointer 0\n\ # call alloca 0\n\ # has varargs 0\n\ # incoming args (stack) 0\n\ # function length 36\n\ STMG 6,15,48(15)\n\ LGR 1,15\n\ AGHI 15,-160\n\ STG 1,0(15)\n\ LARL 12,_GLOBAL_OFFSET_TABLE_\n\ BRASL 14,__pthread_initialize_minimal@PLT\n\ LARL 1,__gmon_start__@GOTENT\n\ LG 1,0(1)\n\ LTGR 1,1\n\ JE .L22\n\ BASR 14,1\n\ .L22:\n\ #APP\n\ .align 4,0x07\n\ END_INIT\n\ \n\ /*@_init_PROLOG_ENDS*/\n\ \n\ /*@_init_EPILOG_BEGINS*/\n\ .align 4\n\ .section .init\n\ #NO_APP\n\ .align 4\n\ LG 4,272(15)\n\ LMG 6,15,208(15)\n\ BR 4\n\ #APP\n\ END_INIT\n\ \n\ /*@_init_EPILOG_ENDS*/\n\ \n\ /*@_fini_PROLOG_BEGINS*/\n\ .section .fini\n\ #NO_APP\n\ .align 4\n\ .globl _fini\n\ .type _fini,@function\n\ _fini:\n\ # leaf function 0\n\ # automatics 0\n\ # outgoing args 0\n\ # need frame pointer 0\n\ # call alloca 0\n\ # has varargs 0\n\ # incoming args (stack) 0\n\ # function length 30\n\ STMG 6,15,48(15)\n\ LGR 1,15\n\ AGHI 15,-160\n\ STG 1,0(15)\n\ LARL 12,_GLOBAL_OFFSET_TABLE_\n\ #APP\n\ .align 4,0x07\n\ END_FINI\n\ \n\ /*@_fini_PROLOG_ENDS*/\n\ \n\ /*@_fini_EPILOG_BEGINS*/\n\ .align 4\n\ .section .fini\n\ #NO_APP\n\ .align 4\n\ LG 4,272(15)\n\ LMG 6,15,208(15)\n\ BR 4\n\ #APP\n\ END_FINI\n\ \n\ /*@_fini_EPILOG_ENDS*/\n\ \n\ /*@TRAILER_BEGINS*/\n\ ");
hjl-tools/uClibc
libpthread/linuxthreads/sysdeps/unix/sysv/linux/s390/s390-64/pt-initfini.c
C
lgpl-2.1
3,573
/*************************************************************************** * Copyright (c) 2020 FreeCAD Developers * * Author: Uwe Stöhr <[email protected]> * * Based on src/Mod/FEM/Gui/DlgSettingsFEMImp.cpp * * * * This file is part of the FreeCAD CAx development system. * * * * This library is free software; you can redistribute it and/or * * modify it under the terms of the GNU Library General Public * * License as published by the Free Software Foundation; either * * version 2 of the License, or (at your option) any later version. * * * * This library is distributed in the hope that it will be useful, * * but WITHOUT ANY WARRANTY; without even the implied warranty of * * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * * GNU Library General Public License for more details. * * * * You should have received a copy of the GNU Library General Public * * License along with this library; see the file COPYING.LIB. If not, * * write to the Free Software Foundation, Inc., 59 Temple Place, * * Suite 330, Boston, MA 02111-1307, USA * * * ***************************************************************************/ #include "PreCompiled.h" #include <App/Application.h> #include <Base/Parameter.h> #include <Base/Console.h> #include "DrawGuiUtil.h" #include "PreferencesGui.h" #include "DlgPrefsTechDrawAnnotationImp.h" #include "ui_DlgPrefsTechDrawAnnotation.h" #include <Mod/TechDraw/App/LineGroup.h> using namespace TechDrawGui; using namespace TechDraw; DlgPrefsTechDrawAnnotationImp::DlgPrefsTechDrawAnnotationImp( QWidget* parent ) : PreferencePage( parent ) , ui(new Ui_DlgPrefsTechDrawAnnotationImp) { ui->setupUi(this); ui->pdsbBalloonKink->setUnit(Base::Unit::Length); ui->pdsbBalloonKink->setMinimum(0); // connect the LineGroup the update the tooltip if index changed connect(ui->pcbLineGroup, SIGNAL(currentIndexChanged(int)), this, SLOT(onLineGroupChanged(int))); } DlgPrefsTechDrawAnnotationImp::~DlgPrefsTechDrawAnnotationImp() { // no need to delete child widgets, Qt does it all for us } void DlgPrefsTechDrawAnnotationImp::saveSettings() { ui->cbAutoHoriz->onSave(); ui->cbPrintCenterMarks->onSave(); ui->cbPyramidOrtho->onSave(); ui->cbSectionLineStd->onSave(); ui->cbShowCenterMarks->onSave(); ui->pcbLineGroup->onSave(); ui->pcbBalloonArrow->onSave(); ui->pcbBalloonShape->onSave(); ui->pcbCenterStyle->onSave(); ui->pcbMatting->onSave(); ui->pcbSectionStyle->onSave(); ui->pdsbBalloonKink->onSave(); ui->cbCutSurface->onSave(); ui->pcbHighlightStyle->onSave(); } void DlgPrefsTechDrawAnnotationImp::loadSettings() { //set defaults for Quantity widgets if property not found //Quantity widgets do not use preset value since they are based on //QAbstractSpinBox double kinkDefault = 5.0; ui->pdsbBalloonKink->setValue(kinkDefault); // re-read the available LineGroup files ui->pcbLineGroup->clear(); std::string lgFileName = Preferences::lineGroupFile(); std::string lgRecord = LineGroup::getGroupNamesFromFile(lgFileName); // split collected groups std::stringstream ss(lgRecord); std::vector<std::string> lgNames; while (std::getline(ss, lgRecord, ',')) { lgNames.push_back(lgRecord); } // fill the combobox with the found names for (auto it = lgNames.begin(); it < lgNames.end(); ++it) { ui->pcbLineGroup->addItem(tr((*it).c_str())); } ui->cbAutoHoriz->onRestore(); ui->cbPrintCenterMarks->onRestore(); ui->cbPyramidOrtho->onRestore(); ui->cbSectionLineStd->onRestore(); ui->cbShowCenterMarks->onRestore(); ui->pcbLineGroup->onRestore(); ui->pcbBalloonArrow->onRestore(); ui->pcbBalloonShape->onRestore(); ui->pcbCenterStyle->onRestore(); ui->pcbMatting->onRestore(); ui->pcbSectionStyle->onRestore(); ui->pdsbBalloonKink->onRestore(); ui->cbCutSurface->onRestore(); ui->pcbHighlightStyle->onRestore(); DrawGuiUtil::loadArrowBox(ui->pcbBalloonArrow); ui->pcbBalloonArrow->setCurrentIndex(prefBalloonArrow()); } /** * Sets the strings of the subwidgets using the current language. */ void DlgPrefsTechDrawAnnotationImp::changeEvent(QEvent *e) { if (e->type() == QEvent::LanguageChange) { saveSettings(); ui->retranslateUi(this); loadSettings(); } else { QWidget::changeEvent(e); } } int DlgPrefsTechDrawAnnotationImp::prefBalloonArrow(void) const { return Preferences::balloonArrow(); } /** * Updates the tooltip of the LineGroup combobox */ void DlgPrefsTechDrawAnnotationImp::onLineGroupChanged(int index) { if (index == -1) { // there is no valid index yet ui->pcbLineGroup->setToolTip(QString::fromStdString("Please select a Line Group")); return; } // get the definition the the selected LineGroup (includes the name) std::string lgRecord = LineGroup::getRecordFromFile(Preferences::lineGroupFile(), index); std::stringstream ss(lgRecord); std::vector<std::string> lgNames; while (std::getline(ss, lgRecord, ',')) { lgNames.push_back(lgRecord); } // format the tooltip std::stringstream TooltipText; TooltipText << lgNames.at(0).substr(1) << " defines these line widths:\n" << "thin: " << lgNames.at(1) << "\n" << "graphic: " << lgNames.at(2) << "\n" << "thick: " << lgNames.at(3); ui->pcbLineGroup->setToolTip(QString::fromStdString(TooltipText.str())); } #include <Mod/TechDraw/Gui/moc_DlgPrefsTechDrawAnnotationImp.cpp>
sanguinariojoe/FreeCAD
src/Mod/TechDraw/Gui/DlgPrefsTechDrawAnnotationImp.cpp
C++
lgpl-2.1
6,224
using System; using Microsoft.VisualStudio.TestTools.UnitTesting; using System.Collections; using FluentAssertions; using Kentor.AuthServices.Internal; namespace Kentor.AuthServices.Tests.Internal { [TestClass] public class EnumeratorTests { [TestMethod] public void Enumerator_AsGeneric() { IEnumerable src = new int[] { 1, 2 }; var subject = src.GetEnumerator().AsGeneric<int>(); subject.MoveNext().Should().BeTrue(); subject.Current.Should().Be(1); subject.MoveNext().Should().BeTrue(); subject.Current.Should().Be(2); subject.MoveNext().Should().BeFalse(); subject.Reset(); subject.MoveNext().Should().BeTrue(); subject.Current.Should().Be(1); ((IEnumerator)subject).Current.Should().Be(1); } } }
nate-impartner/authservices
Kentor.AuthServices.Tests/Internal/EnumeratorTests.cs
C#
lgpl-3.0
889
/* PCSX2 - PS2 Emulator for PCs * Copyright (C) 2002-2010 PCSX2 Dev Team * * PCSX2 is free software: you can redistribute it and/or modify it under the terms * of the GNU Lesser General Public License as published by the Free Software Found- * ation, either version 3 of the License, or (at your option) any later version. * * PCSX2 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR * PURPOSE. See the GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along with PCSX2. * If not, see <http://www.gnu.org/licenses/>. */ #include "PrecompiledHeader.h" #include "wxAppWithHelpers.h" #include "ThreadingInternal.h" #include "PersistentThread.h" DEFINE_EVENT_TYPE( pxEvt_DeleteObject ); DEFINE_EVENT_TYPE( pxEvt_DeleteThread ); DEFINE_EVENT_TYPE( pxEvt_StartIdleEventTimer ); DEFINE_EVENT_TYPE( pxEvt_InvokeAction ); DEFINE_EVENT_TYPE( pxEvt_SynchronousCommand ); IMPLEMENT_DYNAMIC_CLASS( pxSimpleEvent, wxEvent ) ConsoleLogSource_App::ConsoleLogSource_App() { static const TraceLogDescriptor myDesc = { L"AppEvents", L"App Events", pxLt("Includes idle event processing and some other uncommon event usages.") }; m_Descriptor = &myDesc; } ConsoleLogSource_App pxConLog_App; void BaseDeletableObject::DoDeletion() { wxAppWithHelpers* app = wxDynamicCast( wxApp::GetInstance(), wxAppWithHelpers ); pxAssert( app != NULL ); app->DeleteObject( *this ); } // -------------------------------------------------------------------------------------- // SynchronousActionState Implementations // -------------------------------------------------------------------------------------- void SynchronousActionState::SetException( const BaseException& ex ) { m_exception = ex.Clone(); } void SynchronousActionState::SetException( BaseException* ex ) { if( !m_posted ) { m_exception = ex; } else if( wxTheApp ) { // transport the exception to the main thread, since the message is fully // asynchronous, or has already entered an asynchronous state. Message is sent // as a non-blocking action since proper handling of user errors on async messages // is *usually* to log/ignore it (hah), or to suspend emulation and issue a dialog // box to the user. pxExceptionEvent ev( ex ); wxTheApp->AddPendingEvent( ev ); } } void SynchronousActionState::RethrowException() const { if( m_exception ) m_exception->Rethrow(); } int SynchronousActionState::WaitForResult() { m_sema.WaitNoCancel(); RethrowException(); return return_value; } int SynchronousActionState::WaitForResult_NoExceptions() { m_sema.WaitNoCancel(); return return_value; } void SynchronousActionState::PostResult( int res ) { return_value = res; PostResult(); } void SynchronousActionState::ClearResult() { m_posted = false; m_exception = NULL; } void SynchronousActionState::PostResult() { if( m_posted ) return; m_posted = true; m_sema.Post(); } // -------------------------------------------------------------------------------------- // pxActionEvent Implementations // -------------------------------------------------------------------------------------- IMPLEMENT_DYNAMIC_CLASS( pxActionEvent, wxEvent ) pxActionEvent::pxActionEvent( SynchronousActionState* sema, int msgtype ) : wxEvent( 0, msgtype ) { m_state = sema; } pxActionEvent::pxActionEvent( SynchronousActionState& sema, int msgtype ) : wxEvent( 0, msgtype ) { m_state = &sema; } pxActionEvent::pxActionEvent( const pxActionEvent& src ) : wxEvent( src ) { m_state = src.m_state; } void pxActionEvent::SetException( const BaseException& ex ) { SetException( ex.Clone() ); } void pxActionEvent::SetException( BaseException* ex ) { const wxString& prefix( pxsFmt(L"(%s) ", GetClassInfo()->GetClassName()) ); ex->DiagMsg() = prefix + ex->DiagMsg(); if( !m_state ) { ScopedExcept exptr( ex ); // auto-delete it after handling. ex->Rethrow(); } m_state->SetException( ex ); } // -------------------------------------------------------------------------------------- // pxSynchronousCommandEvent // -------------------------------------------------------------------------------------- IMPLEMENT_DYNAMIC_CLASS( pxSynchronousCommandEvent, wxCommandEvent ) pxSynchronousCommandEvent::pxSynchronousCommandEvent(SynchronousActionState* sema, wxEventType commandType, int winid) : wxCommandEvent( pxEvt_SynchronousCommand, winid ) { m_sync = sema; m_realEvent = commandType; } pxSynchronousCommandEvent::pxSynchronousCommandEvent(SynchronousActionState& sema, wxEventType commandType, int winid) : wxCommandEvent( pxEvt_SynchronousCommand ) { m_sync = &sema; m_realEvent = commandType; } pxSynchronousCommandEvent::pxSynchronousCommandEvent(SynchronousActionState* sema, const wxCommandEvent& evt ) : wxCommandEvent( evt ) { m_sync = sema; m_realEvent = evt.GetEventType(); SetEventType( pxEvt_SynchronousCommand ); } pxSynchronousCommandEvent::pxSynchronousCommandEvent(SynchronousActionState& sema, const wxCommandEvent& evt ) : wxCommandEvent( evt ) { m_sync = &sema; m_realEvent = evt.GetEventType(); SetEventType( pxEvt_SynchronousCommand ); } pxSynchronousCommandEvent::pxSynchronousCommandEvent( const pxSynchronousCommandEvent& src ) : wxCommandEvent( src ) { m_sync = src.m_sync; m_realEvent = src.m_realEvent; } void pxSynchronousCommandEvent::SetException( const BaseException& ex ) { if( !m_sync ) ex.Rethrow(); m_sync->SetException( ex ); } void pxSynchronousCommandEvent::SetException( BaseException* ex ) { if( !m_sync ) { ScopedExcept exptr( ex ); // auto-delete it after handling. ex->Rethrow(); } m_sync->SetException( ex ); } // -------------------------------------------------------------------------------------- // pxRpcEvent // -------------------------------------------------------------------------------------- // Unlike pxPingEvent, the Semaphore belonging to this event is typically posted when the // invoked method is completed. If the method can be executed in non-blocking fashion then // it should leave the semaphore postback NULL. // class pxRpcEvent : public pxActionEvent { DECLARE_DYNAMIC_CLASS_NO_ASSIGN(pxRpcEvent) typedef pxActionEvent _parent; protected: void (*m_Method)(); public: virtual ~pxRpcEvent() throw() { } virtual pxRpcEvent *Clone() const { return new pxRpcEvent(*this); } explicit pxRpcEvent( void (*method)()=NULL, SynchronousActionState* sema=NULL ) : pxActionEvent( sema ) { m_Method = method; } explicit pxRpcEvent( void (*method)(), SynchronousActionState& sema ) : pxActionEvent( sema ) { m_Method = method; } pxRpcEvent( const pxRpcEvent& src ) : pxActionEvent( src ) { m_Method = src.m_Method; } void SetMethod( void (*method)() ) { m_Method = method; } protected: void InvokeEvent() { if( m_Method ) m_Method(); } }; IMPLEMENT_DYNAMIC_CLASS( pxRpcEvent, pxActionEvent ) // -------------------------------------------------------------------------------------- // pxExceptionEvent implementations // -------------------------------------------------------------------------------------- pxExceptionEvent::pxExceptionEvent( const BaseException& ex ) { m_except = ex.Clone(); } void pxExceptionEvent::InvokeEvent() { ScopedExcept deleteMe( m_except ); if( deleteMe ) deleteMe->Rethrow(); } // -------------------------------------------------------------------------------------- // wxAppWithHelpers Implementation // -------------------------------------------------------------------------------------- // // TODO : Ping dispatch and IdleEvent dispatch can be unified into a single dispatch, which // would mean checking only one list of events per idle event, instead of two. (ie, ping // events can be appended to the idle event list, instead of into their own custom list). // IMPLEMENT_DYNAMIC_CLASS( wxAppWithHelpers, wxApp ) // Posts a method to the main thread; non-blocking. Post occurs even when called from the // main thread. void wxAppWithHelpers::PostMethod( FnType_Void* method ) { PostEvent( pxRpcEvent( method ) ); } // Posts a method to the main thread; non-blocking. Post occurs even when called from the // main thread. void wxAppWithHelpers::PostIdleMethod( FnType_Void* method ) { pxRpcEvent evt( method ); AddIdleEvent( evt ); } // Invokes the specified void method, or posts the method to the main thread if the calling // thread is not Main. Action is blocking. For non-blocking method execution, use // AppRpc_TryInvokeAsync. // // This function works something like setjmp/longjmp, in that the return value indicates if the // function actually executed the specified method or not. // // Returns: // FALSE if the method was not invoked (meaning this IS the main thread!) // TRUE if the method was invoked. // bool wxAppWithHelpers::Rpc_TryInvoke( FnType_Void* method ) { if( wxThread::IsMain() ) return false; SynchronousActionState sync; PostEvent( pxRpcEvent( method, sync ) ); sync.WaitForResult(); return true; } // Invokes the specified void method, or posts the method to the main thread if the calling // thread is not Main. Action is non-blocking (asynchronous). For blocking method execution, // use AppRpc_TryInvoke. // // This function works something like setjmp/longjmp, in that the return value indicates if the // function actually executed the specified method or not. // // Returns: // FALSE if the method was not posted to the main thread (meaning this IS the main thread!) // TRUE if the method was posted. // bool wxAppWithHelpers::Rpc_TryInvokeAsync( FnType_Void* method ) { if( wxThread::IsMain() ) return false; PostEvent( pxRpcEvent( method ) ); return true; } void wxAppWithHelpers::ProcessMethod( FnType_Void* method ) { if( wxThread::IsMain() ) { method(); return; } SynchronousActionState sync; PostEvent( pxRpcEvent( method, sync ) ); sync.WaitForResult(); } void wxAppWithHelpers::PostEvent( const wxEvent& evt ) { // Const Cast is OK! // Truth is, AddPendingEvent should be a const-qualified parameter, as // it makes an immediate clone copy of the event -- but wxWidgets // fails again in structured C/C++ design design. So I'm forcing it as such // here. -- air _parent::AddPendingEvent( const_cast<wxEvent&>(evt) ); } bool wxAppWithHelpers::ProcessEvent( wxEvent& evt ) { // Note: We can't do an automatic blocking post of the message here, because wxWidgets // isn't really designed for it (some events return data to the caller via the event // struct, and posting the event would require a temporary clone, where changes would // be lost). AffinityAssert_AllowFrom_MainUI(); return _parent::ProcessEvent( evt ); } bool wxAppWithHelpers::ProcessEvent( wxEvent* evt ) { AffinityAssert_AllowFrom_MainUI(); ScopedPtr<wxEvent> deleteMe( evt ); return _parent::ProcessEvent( *deleteMe ); } bool wxAppWithHelpers::ProcessEvent( pxActionEvent& evt ) { if( wxThread::IsMain() ) return _parent::ProcessEvent( evt ); else { SynchronousActionState sync; evt.SetSyncState( sync ); AddPendingEvent( evt ); sync.WaitForResult(); return true; } } bool wxAppWithHelpers::ProcessEvent( pxActionEvent* evt ) { if( wxThread::IsMain() ) { ScopedPtr<wxEvent> deleteMe( evt ); return _parent::ProcessEvent( *deleteMe ); } else { SynchronousActionState sync; evt->SetSyncState( sync ); AddPendingEvent( *evt ); sync.WaitForResult(); return true; } } void wxAppWithHelpers::CleanUp() { // I'm pretty sure the message pump is dead by now, which means we need to run through // idle event list by hand and process the pending Deletion messages (all others can be // ignored -- it's only deletions we want handled, and really we *could* ignore those too // but I like to be tidy. -- air //IdleEventDispatcher( "CleanUp" ); //DeletionDispatcher(); _parent::CleanUp(); } // Executes the event with exception handling. If the event throws an exception, the exception // will be neatly packaged and transported back to the thread that posted the event. // This function is virtual, however overloading it is not recommended. Derrived classes // should overload InvokeEvent() instead. void pxActionEvent::_DoInvokeEvent() { AffinityAssert_AllowFrom_MainUI(); try { InvokeEvent(); } catch( BaseException& ex ) { SetException( ex ); } catch( std::runtime_error& ex ) { SetException( new Exception::RuntimeError( ex ) ); } if( m_state ) m_state->PostResult(); } void wxAppWithHelpers::OnSynchronousCommand( pxSynchronousCommandEvent& evt ) { AffinityAssert_AllowFrom_MainUI(); pxAppLog.Write(L"(App) Executing command event synchronously..."); evt.SetEventType( evt.GetRealEventType() ); try { ProcessEvent( evt ); } catch( BaseException& ex ) { evt.SetException( ex ); } catch( std::runtime_error& ex ) { evt.SetException( new Exception::RuntimeError( ex, evt.GetClassInfo()->GetClassName() ) ); } if( Semaphore* sema = evt.GetSemaphore() ) sema->Post(); } void wxAppWithHelpers::AddIdleEvent( const wxEvent& evt ) { ScopedLock lock( m_IdleEventMutex ); if( m_IdleEventQueue.size() == 0 ) PostEvent( pxSimpleEvent( pxEvt_StartIdleEventTimer ) ); m_IdleEventQueue.push_back( evt.Clone() ); } void wxAppWithHelpers::OnStartIdleEventTimer( wxEvent& evt ) { ScopedLock lock( m_IdleEventMutex ); if( m_IdleEventQueue.size() != 0 ) m_IdleEventTimer.Start( 100, true ); } void wxAppWithHelpers::IdleEventDispatcher( const wxChar* action ) { // Recursion is possible thanks to modal dialogs being issued from the idle event handler. // (recursion shouldn't hurt anything anyway, since the node system re-creates the iterator // on each pass) //static int __guard=0; //RecursionGuard guard(__guard); //if( !pxAssertDev(!guard.IsReentrant(), "Re-entrant call to IdleEventdispatcher caught on camera!") ) return; wxEventList postponed; wxEventList::iterator node; ScopedLock lock( m_IdleEventMutex ); while( node = m_IdleEventQueue.begin(), node != m_IdleEventQueue.end() ) { ScopedPtr<wxEvent> deleteMe(*node); m_IdleEventQueue.erase( node ); lock.Release(); if( !Threading::AllowDeletions() && (deleteMe->GetEventType() == pxEvt_DeleteThread) ) { // Threads that have active semaphores or mutexes (other threads are waiting on them) cannot // be deleted because those mutex/sema objects will become invalid and cause the pending // thread to crash. So we disallow deletions when those waits are in action, and continue // to postpone the deletion of the thread until such time that it is safe. pxThreadLog.Write( ((pxThread*)((wxCommandEvent*)deleteMe.GetPtr())->GetClientData())->GetName(), L"Deletion postponed due to mutex or semaphore dependency." ); postponed.push_back(deleteMe.DetachPtr()); } else { pxAppLog.Write( L"(AppIdleQueue%s) Dispatching event '%s'", action, deleteMe->GetClassInfo()->GetClassName() ); ProcessEvent( *deleteMe ); // dereference to prevent auto-deletion by ProcessEvent } lock.Acquire(); } m_IdleEventQueue = postponed; if( m_IdleEventQueue.size() > 0 ) pxAppLog.Write( L"(AppIdleQueue%s) %d events postponed due to dependencies.", action, m_IdleEventQueue.size() ); } void wxAppWithHelpers::OnIdleEvent( wxIdleEvent& evt ) { m_IdleEventTimer.Stop(); IdleEventDispatcher(); } void wxAppWithHelpers::OnIdleEventTimeout( wxTimerEvent& evt ) { IdleEventDispatcher( L"[Timeout]" ); } void wxAppWithHelpers::Ping() { pxThreadLog.Write( pxGetCurrentThreadName().c_str(), L"App Event Ping Requested." ); SynchronousActionState sync; pxActionEvent evt( sync ); AddIdleEvent( evt ); sync.WaitForResult(); } void wxAppWithHelpers::PostCommand( void* clientData, int evtType, int intParam, long longParam, const wxString& stringParam ) { wxCommandEvent evt( evtType ); evt.SetClientData( clientData ); evt.SetInt( intParam ); evt.SetExtraLong( longParam ); evt.SetString( stringParam ); AddPendingEvent( evt ); } void wxAppWithHelpers::PostCommand( int evtType, int intParam, long longParam, const wxString& stringParam ) { PostCommand( NULL, evtType, intParam, longParam, stringParam ); } sptr wxAppWithHelpers::ProcessCommand( void* clientData, int evtType, int intParam, long longParam, const wxString& stringParam ) { SynchronousActionState sync; pxSynchronousCommandEvent evt( sync, evtType ); evt.SetClientData( clientData ); evt.SetInt( intParam ); evt.SetExtraLong( longParam ); evt.SetString( stringParam ); AddPendingEvent( evt ); sync.WaitForResult(); return sync.return_value; } sptr wxAppWithHelpers::ProcessCommand( int evtType, int intParam, long longParam, const wxString& stringParam ) { return ProcessCommand( NULL, evtType, intParam, longParam, stringParam ); } void wxAppWithHelpers::PostAction( const pxActionEvent& evt ) { PostEvent( evt ); } void wxAppWithHelpers::ProcessAction( pxActionEvent& evt ) { if( !wxThread::IsMain() ) { SynchronousActionState sync; evt.SetSyncState( sync ); AddPendingEvent( evt ); sync.WaitForResult(); } else evt._DoInvokeEvent(); } void wxAppWithHelpers::DeleteObject( BaseDeletableObject& obj ) { pxAssert( !obj.IsBeingDeleted() ); wxCommandEvent evt( pxEvt_DeleteObject ); evt.SetClientData( (void*)&obj ); AddIdleEvent( evt ); } void wxAppWithHelpers::DeleteThread( pxThread& obj ) { pxThreadLog.Write(obj.GetName(), L"Scheduling for deletion..."); wxCommandEvent evt( pxEvt_DeleteThread ); evt.SetClientData( (void*)&obj ); AddIdleEvent( evt ); } typedef void (wxEvtHandler::*pxInvokeActionEventFunction)(pxActionEvent&); bool wxAppWithHelpers::OnInit() { #define pxActionEventHandler(func) \ (wxObjectEventFunction)(wxEventFunction)wxStaticCastEvent(pxInvokeActionEventFunction, &func ) Connect( pxEvt_SynchronousCommand, pxSynchronousEventHandler (wxAppWithHelpers::OnSynchronousCommand) ); Connect( pxEvt_InvokeAction, pxActionEventHandler (wxAppWithHelpers::OnInvokeAction) ); Connect( pxEvt_StartIdleEventTimer, wxEventHandler (wxAppWithHelpers::OnStartIdleEventTimer) ); Connect( pxEvt_DeleteObject, wxCommandEventHandler (wxAppWithHelpers::OnDeleteObject) ); Connect( pxEvt_DeleteThread, wxCommandEventHandler (wxAppWithHelpers::OnDeleteThread) ); Connect( wxEVT_IDLE, wxIdleEventHandler (wxAppWithHelpers::OnIdleEvent) ); Connect( m_IdleEventTimer.GetId(), wxEVT_TIMER, wxTimerEventHandler(wxAppWithHelpers::OnIdleEventTimeout) ); return _parent::OnInit(); } void wxAppWithHelpers::OnInvokeAction( pxActionEvent& evt ) { evt._DoInvokeEvent(); // wow this is easy! } void wxAppWithHelpers::OnDeleteObject( wxCommandEvent& evt ) { if( evt.GetClientData() == NULL ) return; delete (BaseDeletableObject*)evt.GetClientData(); } // In theory we create a Pcsx2App object which inherit from wxAppWithHelpers, // so Pcsx2App::CreateTraits must be used instead. // // However it doesn't work this way because wxAppWithHelpers constructor will // be called first. This constructor will build some wx objects (here wxTimer) // that require a trait. In others word, wxAppWithHelpers::CreateTraits will be // called instead wxAppTraits* wxAppWithHelpers::CreateTraits() { return new Pcsx2AppTraits; } // Threads have their own deletion handler that propagates exceptions thrown by the thread to the UI. // (thus we have a fairly automatic threaded exception system!) void wxAppWithHelpers::OnDeleteThread( wxCommandEvent& evt ) { ScopedPtr<pxThread> thr( (pxThread*)evt.GetClientData() ); if( !thr ) { pxThreadLog.Write( L"null", L"OnDeleteThread: NULL thread object received (and ignored)." ); return; } pxThreadLog.Write(thr->GetName(), wxString(wxString(L"Thread object deleted successfully") + (thr->HasPendingException() ? L" [exception pending!]" : L"")).wc_str() ); thr->RethrowException(); } wxAppWithHelpers::wxAppWithHelpers() : m_IdleEventTimer( this ) { #ifdef __WXMSW__ // This variable assignment ensures that MSVC links in the TLS setup stubs even in // full optimization builds. Without it, DLLs that use TLS won't work because the // FS segment register won't have been initialized by the main exe, due to tls_insurance // being optimized away >_< --air static __threadlocal int tls_insurance = 0; tls_insurance = 1; #endif }
Pistachioman/pcsx2
common/src/Utilities/wxAppWithHelpers.cpp
C++
lgpl-3.0
20,362
using System; using System.Collections.Generic; using System.Linq; using System.Xml.Linq; using SolrNet.Commands.Cores; namespace SolrNet.Impl { /// <summary> /// Solr core administration commands. /// </summary> /// <seealso href="http://wiki.apache.org/solr/CoreAdmin"/> public class SolrCoreAdmin : ISolrCoreAdmin { private readonly ISolrConnection connection; private readonly ISolrHeaderResponseParser headerParser; private readonly ISolrStatusResponseParser resultParser; /// <summary> /// Initializes a new instance of the <see cref="SolrCoreAdmin"/> class. /// </summary> public SolrCoreAdmin(ISolrConnection connection, ISolrHeaderResponseParser headerParser, ISolrStatusResponseParser resultParser) { this.connection = connection; this.headerParser = headerParser; this.resultParser = resultParser; } /// <summary> /// The ALIAS action establishes an additional name by which a core may be referenced. /// Subsequent actions may use the core's original name or any of its aliases. /// </summary> /// <remarks> /// This action is still considered experimental. /// </remarks> /// <param name="coreName">The name or alias of an existing core.</param> /// <param name="otherName">The additional name by which this core should be known.</param> public ResponseHeader Alias(string coreName, string otherName) { return SendAndParseHeader(new AliasCommand(coreName, otherName)); } /// <summary> /// The CREATE action creates a new core and registers it. If persistence is enabled /// (persistent="true" on the &lt;solr&gt; element), the updated configuration for this new core will be /// saved in solr.xml. If a core with the given name already exists, it will continue to handle requests /// while the new core is initializing. When the new core is ready, it will take new requests and the old core /// will be unloaded. /// </summary> /// <param name="coreName">The name of the new core. Same as "name" on the &lt;core&gt; element.</param> /// <param name="instanceDir">The directory where files for this core should be stored. Same as "instanceDir" on the &lt;core&gt; element.</param> public ResponseHeader Create(string coreName, string instanceDir) { return SendAndParseHeader(new CreateCommand(coreName, instanceDir)); } /// <summary> /// The CREATE action creates a new core and registers it. If persistence is enabled /// (persistent="true" on the &lt;solr&gt; element), the updated configuration for this new core will be /// saved in solr.xml. If a core with the given name already exists, it will continue to handle requests /// while the new core is initializing. When the new core is ready, it will take new requests and the old core /// will be unloaded. /// </summary> /// <param name="coreName">The name of the new core. Same as "name" on the &lt;core&gt; element.</param> /// <param name="instanceDir">The directory where files for this SolrCore should be stored. Same as "instanceDir" on the &lt;core&gt; element.</param> /// <param name="configFile">(Optional) Name of the config file (solrconfig.xml) relative to "instanceDir".</param> /// <param name="schemaFile">(Optional) Name of the schema file (schema.xml) relative to "instanceDir".</param> /// <param name="dataDir">(Optional) Name of the data directory relative to "instanceDir".</param> public ResponseHeader Create(string coreName, string instanceDir, string configFile, string schemaFile, string dataDir) { return SendAndParseHeader(new CreateCommand(coreName, instanceDir, configFile, schemaFile, dataDir)); } /// <summary> /// The RELOAD action loads a new core from the configuration of an existing, registered core. /// While the new core is initializing, the existing one will continue to handle requests. /// When the new core is ready, it takes over and the old core is unloaded. /// This is useful when you've made changes to a core's configuration on disk, such as adding /// new field definitions. Calling the RELOAD action lets you apply the new configuration without /// having to restart the Web container. /// </summary> /// <param name="coreName">The name of the core to be reloaded.</param> public ResponseHeader Reload(string coreName) { return SendAndParseHeader(new ReloadCommand(coreName)); } /// <summary> /// The RENAME action changes the name of a core. /// </summary> /// <param name="coreName">The name of the core to be renamed.</param> /// <param name="otherName">The new name for the core. If the persistent attribute of &lt;solr&gt; is /// "true", the new name will be written to solr.xml as the "name" attribute /// of the &lt;core&gt; attribute.</param> public ResponseHeader Rename(string coreName, string otherName) { return SendAndParseHeader(new RenameCommand(coreName, otherName)); } /// <summary> /// The STATUS action returns the status of all running cores. /// </summary> public List<CoreResult> Status() { return ParseStatusResponse(Send(new StatusCommand())); } /// <summary> /// The STATUS action returns the status of the named core. /// </summary> /// <param name="coreName">The name of a core, as listed in the "name" attribute of a &lt;core&gt; element in solr.xml.</param> public CoreResult Status(string coreName) { return ParseStatusResponse(Send(new StatusCommand(coreName))).FirstOrDefault(); } /// <summary> /// SWAP atomically swaps the names used to access two existing cores. /// This can be used to swap new content into production. The prior core /// remains available and can be swapped back, if necessary. Each core will /// be known by the name of the other, after the swap. /// </summary> /// <param name="coreName">The name of one of the cores to be swapped.</param> /// <param name="otherName">The name of one of the cores to be swapped.</param> public ResponseHeader Swap(string coreName, string otherName) { return SendAndParseHeader(new SwapCommand(coreName, otherName)); } /// <summary> /// The UNLOAD action removes a core from Solr. Active requests will /// continue to be processed, but no new requests will be sent to the named core. /// If a core is registered under more than one name, only the given name is removed. /// </summary> /// <param name="coreName">The name of the core to be to be removed. If the persistent /// attribute of &lt;solr&gt; is set to "true", the &lt;core&gt; element /// with this "name" attribute will be removed from solr.xml.</param> public ResponseHeader Unload(string coreName) { return SendAndParseHeader(new UnloadCommand(coreName, null)); } /// <summary> /// The UNLOAD action removes a core from Solr. Active requests will /// continue to be processed, but no new requests will be sent to the named core. /// If a core is registered under more than one name, only the given name is removed. /// </summary> /// <param name="coreName">The name of the core to be to be removed. If the persistent /// attribute of &lt;solr&gt; is set to "true", the &lt;core&gt; element /// with this "name" attribute will be removed from solr.xml.</param> /// <param name="deleteIndex">If set to <c>true</c> deletes the index once the core is unloaded. (Only available in 3.3 and above).</param> [Obsolete("Use Unload(string coreName, UnloadCommand.Delete delete) instead")] public ResponseHeader Unload(string coreName, bool deleteIndex) { return Unload(coreName, UnloadCommand.Delete.Index); } /// <summary> /// The UNLOAD action removes a core from Solr. Active requests will /// continue to be processed, but no new requests will be sent to the named core. /// If a core is registered under more than one name, only the given name is removed. /// </summary> /// <param name="coreName">The name of the core to be to be removed. If the persistent /// attribute of &lt;solr&gt; is set to "true", the &lt;core&gt; element /// with this "name" attribute will be removed from solr.xml.</param> /// <param name="delete">If not null, deletes the index once the core is unloaded. (Only available in 3.3 and above).</param> public ResponseHeader Unload(string coreName, UnloadCommand.Delete delete) { return SendAndParseHeader(new UnloadCommand(coreName, delete)); } /// <summary> /// Merge indexes using their core names to identify them. /// Requires Solr 3.3+ /// </summary> /// <param name="destinationCore"></param> /// <param name="srcCore"></param> /// <param name="srcCores"></param> public ResponseHeader Merge(string destinationCore, MergeCommand.SrcCore srcCore, params MergeCommand.SrcCore[] srcCores) { return SendAndParseHeader(new MergeCommand(destinationCore, srcCore, srcCores)); } /// <summary> /// Merge indexes using their path to identify them. /// Requires Solr 1.4+ /// </summary> /// <param name="destinationCore"></param> /// <param name="indexDir"></param> /// <param name="indexDirs"></param> public ResponseHeader Merge(string destinationCore, MergeCommand.IndexDir indexDir, params MergeCommand.IndexDir[] indexDirs) { return SendAndParseHeader(new MergeCommand(destinationCore, indexDir, indexDirs)); } /// <summary> /// Sends a command and parses the ResponseHeader. /// </summary> /// <param name="cmd">The CMD.</param> /// <returns></returns> public ResponseHeader SendAndParseHeader(ISolrCommand cmd) { var r = Send(cmd); var xml = XDocument.Parse(r); return headerParser.Parse(xml); } /// <summary> /// Sends the specified Command to Solr. /// </summary> /// <param name="command">The Command to send.</param> /// <returns></returns> public string Send(ISolrCommand command) { return command.Execute(connection); } /// <summary> /// Parses the status response. /// </summary> /// <param name="responseXml">The response XML.</param> /// <returns></returns> protected List<CoreResult> ParseStatusResponse(string responseXml) { var xml = XDocument.Parse( responseXml ); return resultParser.Parse( xml ); } } }
ManpowerNordic/SolrNet
SolrNet/Impl/SolrCoreAdmin.cs
C#
apache-2.0
11,278
//// [tests/cases/compiler/importHelpersSystem.ts] //// //// [a.ts] export class A { } //// [b.ts] import { A } from "./a"; export * from "./a"; export class B extends A { } //// [tslib.d.ts] export declare function __extends(d: Function, b: Function): void; export declare function __assign(t: any, ...sources: any[]): any; export declare function __decorate(decorators: Function[], target: any, key?: string | symbol, desc?: any): any; export declare function __param(paramIndex: number, decorator: Function): Function; export declare function __metadata(metadataKey: any, metadataValue: any): Function; export declare function __awaiter(thisArg: any, _arguments: any, P: Function, generator: Function): any; //// [a.js] System.register([], function (exports_1, context_1) { "use strict"; var A; var __moduleName = context_1 && context_1.id; return { setters: [], execute: function () { A = /** @class */ (function () { function A() { } return A; }()); exports_1("A", A); } }; }); //// [b.js] System.register(["tslib", "./a"], function (exports_1, context_1) { "use strict"; var tslib_1, a_1, B; var __moduleName = context_1 && context_1.id; var exportedNames_1 = { "B": true }; function exportStar_1(m) { var exports = {}; for (var n in m) { if (n !== "default" && !exportedNames_1.hasOwnProperty(n)) exports[n] = m[n]; } exports_1(exports); } return { setters: [ function (tslib_1_1) { tslib_1 = tslib_1_1; }, function (a_1_1) { a_1 = a_1_1; exportStar_1(a_1_1); } ], execute: function () { B = /** @class */ (function (_super) { tslib_1.__extends(B, _super); function B() { return _super !== null && _super.apply(this, arguments) || this; } return B; }(a_1.A)); exports_1("B", B); } }; });
weswigham/TypeScript
tests/baselines/reference/importHelpersSystem.js
JavaScript
apache-2.0
2,227
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.rocketmq.example.simple; import org.apache.rocketmq.client.QueryResult; import org.apache.rocketmq.client.exception.MQClientException; import org.apache.rocketmq.client.producer.DefaultMQProducer; import org.apache.rocketmq.client.producer.SendResult; import org.apache.rocketmq.common.message.Message; import org.apache.rocketmq.common.message.MessageExt; import org.apache.rocketmq.remoting.common.RemotingHelper; public class TestProducer { public static void main(String[] args) throws MQClientException, InterruptedException { DefaultMQProducer producer = new DefaultMQProducer("ProducerGroupName"); producer.start(); for (int i = 0; i < 1; i++) try { { Message msg = new Message("TopicTest1", "TagA", "key113", "Hello world".getBytes(RemotingHelper.DEFAULT_CHARSET)); SendResult sendResult = producer.send(msg); System.out.printf("%s%n", sendResult); QueryResult queryMessage = producer.queryMessage("TopicTest1", "key113", 10, 0, System.currentTimeMillis()); for (MessageExt m : queryMessage.getMessageList()) { System.out.printf("%s%n", m); } } } catch (Exception e) { e.printStackTrace(); } producer.shutdown(); } }
Todd-start/RocketMQ
example/src/main/java/org/apache/rocketmq/example/simple/TestProducer.java
Java
apache-2.0
2,315
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio.client.keyvalue; import java.io.IOException; /** * Interface to write to the payload buffer. */ public interface PayloadWriter { /** * Inserts a key and a value into the payload buffer, returns an offset indicating where the key * and value data is stored in payload buffer. * * @param key bytes of key * @param value bytes of value * @return the offset of this key-value pair in payload buffer */ int insert(byte[] key, byte[] value) throws IOException; }
riversand963/alluxio
keyvalue/client/src/main/java/alluxio/client/keyvalue/PayloadWriter.java
Java
apache-2.0
1,017
using System; namespace Web.Areas.HelpPage { /// <summary> /// This represents an image sample on the help page. There's a display template named ImageSample associated with this class. /// </summary> public class ImageSample { /// <summary> /// Initializes a new instance of the <see cref="ImageSample"/> class. /// </summary> /// <param name="src">The URL of an image.</param> public ImageSample(string src) { if (src == null) { throw new ArgumentNullException("src"); } Src = src; } public string Src { get; private set; } public override bool Equals(object obj) { ImageSample other = obj as ImageSample; return other != null && Src == other.Src; } public override int GetHashCode() { return Src.GetHashCode(); } public override string ToString() { return Src; } } }
wadewegner/Feedbackful
src/Feedbackful/Web/Areas/HelpPage/SampleGeneration/ImageSample.cs
C#
apache-2.0
1,046
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.datatorrent.contrib.zmq; import java.util.List; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.datatorrent.contrib.helper.CollectorModule; import com.datatorrent.contrib.helper.MessageQueueTestHelper; import com.datatorrent.api.DAG; import com.datatorrent.api.DAG.Locality; import com.datatorrent.api.LocalMode; import com.datatorrent.netlet.util.DTThrowable; /** * */ public class ZeroMQInputOperatorTest { protected static Logger logger = LoggerFactory.getLogger(ZeroMQInputOperatorTest.class); @Test public void testDag() throws InterruptedException, Exception { final int testNum = 3; testHelper(testNum); } protected void testHelper(final int testNum) { LocalMode lma = LocalMode.newInstance(); DAG dag = lma.getDAG(); final ZeroMQMessageGenerator publisher = new ZeroMQMessageGenerator(); publisher.setup(); ZeroMQInputOperator generator = dag.addOperator("Generator", ZeroMQInputOperator.class); final CollectorModule<byte[]> collector = dag.addOperator("Collector", new CollectorModule<byte[]>()); generator.setFilter(""); generator.setUrl("tcp://localhost:5556"); generator.setSyncUrl("tcp://localhost:5557"); dag.addStream("Stream", generator.outputPort, collector.inputPort).setLocality(Locality.CONTAINER_LOCAL); new Thread() { @Override public void run() { try { publisher.generateMessages(testNum); } catch (InterruptedException ex) { logger.debug(ex.toString()); } } }.start(); final LocalMode.Controller lc = lma.getController(); lc.setHeartbeatMonitoringEnabled(false); new Thread("LocalClusterController") { @Override public void run() { long startTms = System.currentTimeMillis(); long timeout = 10000L; try { while (!collector.inputPort.collections.containsKey("collector") && System.currentTimeMillis() - startTms < timeout) { Thread.sleep(500); } Thread.sleep(1000); startTms = System.currentTimeMillis(); while (System.currentTimeMillis() - startTms < timeout) { List<?> list = collector.inputPort.collections.get("collector"); if (list.size() < testNum * 3) { Thread.sleep(10); } else { break; } } } catch (InterruptedException ex) { DTThrowable.rethrow(ex); } finally { logger.debug("Shutting down.."); lc.shutdown(); try { Thread.sleep(1000); } catch (InterruptedException e) { DTThrowable.rethrow(e); } finally { publisher.teardown(); } } } }.start(); lc.run(); // logger.debug("collection size:"+collector.inputPort.collections.size()+" "+collector.inputPort.collections.toString()); MessageQueueTestHelper.validateResults(testNum, collector.inputPort.collections); logger.debug("end of test"); } }
siyuanh/apex-malhar
contrib/src/test/java/com/datatorrent/contrib/zmq/ZeroMQInputOperatorTest.java
Java
apache-2.0
3,923
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cassandra.utils.btree; import java.util.Comparator; import static org.apache.cassandra.utils.btree.BTree.EMPTY_LEAF; import static org.apache.cassandra.utils.btree.BTree.FAN_SHIFT; import static org.apache.cassandra.utils.btree.BTree.POSITIVE_INFINITY; /** * A class for constructing a new BTree, either from an existing one and some set of modifications * or a new tree from a sorted collection of items. * <p/> * This is a fairly heavy-weight object, so a ThreadLocal instance is created for making modifications to a tree */ final class Builder { private final NodeBuilder rootBuilder = new NodeBuilder(); /** * At the highest level, we adhere to the classic b-tree insertion algorithm: * * 1. Add to the appropriate leaf * 2. Split the leaf if necessary, add the median to the parent * 3. Split the parent if necessary, etc. * * There is one important difference: we don't actually modify the original tree, but copy each node that we * modify. Note that every node on the path to the key being inserted or updated will be modified; this * implies that at a minimum, the root node will be modified for every update, so every root is a "snapshot" * of a tree that can be iterated or sliced without fear of concurrent modifications. * * The NodeBuilder class handles the details of buffering the copied contents of the original tree and * adding in our changes. Since NodeBuilder maintains parent/child references, it also handles parent-splitting * (easy enough, since any node affected by the split will already be copied into a NodeBuilder). * * One other difference from the simple algorithm is that we perform modifications in bulk; * we assume @param source has been sorted, e.g. by BTree.update, so the update of each key resumes where * the previous left off. */ public <V> Object[] update(Object[] btree, Comparator<V> comparator, Iterable<V> source, UpdateFunction<V> updateF) { assert updateF != null; NodeBuilder current = rootBuilder; current.reset(btree, POSITIVE_INFINITY, updateF, comparator); for (V key : source) { while (true) { if (updateF.abortEarly()) { rootBuilder.clear(); return null; } NodeBuilder next = current.update(key); if (next == null) break; // we were in a subtree from a previous key that didn't contain this new key; // retry against the correct subtree current = next; } } // finish copying any remaining keys from the original btree while (true) { NodeBuilder next = current.finish(); if (next == null) break; current = next; } // updating with POSITIVE_INFINITY means that current should be back to the root assert current.isRoot(); Object[] r = current.toNode(); current.clear(); return r; } public <V> Object[] build(Iterable<V> source, UpdateFunction<V> updateF, int size) { assert updateF != null; NodeBuilder current = rootBuilder; // we descend only to avoid wasting memory; in update() we will often descend into existing trees // so here we want to descend also, so we don't have lg max(N) depth in both directions while ((size >>= FAN_SHIFT) > 0) current = current.ensureChild(); current.reset(EMPTY_LEAF, POSITIVE_INFINITY, updateF, null); for (V key : source) current.addNewKey(key); current = current.ascendToRoot(); Object[] r = current.toNode(); current.clear(); return r; } }
lynchlee/play-jmx
src/main/java/org/apache/cassandra/utils/btree/Builder.java
Java
apache-2.0
4,717
/* * Copyright 2016-present Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.rest.resources; import com.eclipsesource.json.Json; import com.eclipsesource.json.JsonArray; import com.eclipsesource.json.JsonObject; import com.google.common.collect.ImmutableSet; import org.glassfish.jersey.client.ClientProperties; import org.hamcrest.Description; import org.hamcrest.TypeSafeMatcher; import org.junit.Before; import org.junit.Test; import org.onlab.osgi.ServiceDirectory; import org.onlab.osgi.TestServiceDirectory; import org.onlab.packet.IpAddress; import org.onlab.rest.BaseResource; import org.onosproject.codec.CodecService; import org.onosproject.codec.impl.CodecManager; import org.onosproject.net.mcast.McastRoute; import org.onosproject.net.mcast.MulticastRouteService; import javax.ws.rs.client.Entity; import javax.ws.rs.client.WebTarget; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import java.io.InputStream; import java.net.HttpURLConnection; import java.util.Set; import static org.easymock.EasyMock.anyObject; import static org.easymock.EasyMock.createMock; import static org.easymock.EasyMock.expect; import static org.easymock.EasyMock.expectLastCall; import static org.easymock.EasyMock.replay; import static org.easymock.EasyMock.verify; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.junit.Assert.assertThat; /** * Unit tests for multicast route REST APIs. */ public class MulticastRouteResourceTest extends ResourceTest { final MulticastRouteService mockMulticastRouteService = createMock(MulticastRouteService.class); private McastRoute route1; private McastRoute route2; private McastRoute route3; private void initMcastRouteMocks() { IpAddress source1 = IpAddress.valueOf("1.1.1.1"); IpAddress source2 = IpAddress.valueOf("2.2.2.2"); IpAddress source3 = IpAddress.valueOf("3.3.3.3"); IpAddress group = IpAddress.valueOf("224.0.0.1"); route1 = new McastRoute(source1, group, McastRoute.Type.PIM); route2 = new McastRoute(source2, group, McastRoute.Type.IGMP); route3 = new McastRoute(source3, group, McastRoute.Type.STATIC); } @Before public void setupTest() { final CodecManager codecService = new CodecManager(); codecService.activate(); ServiceDirectory testDirectory = new TestServiceDirectory() .add(MulticastRouteService.class, mockMulticastRouteService) .add(CodecService.class, codecService); BaseResource.setServiceDirectory(testDirectory); } /** * Hamcrest matcher to check that a mcast route representation in JSON matches * the actual mcast route. */ public static class McastRouteJsonMatcher extends TypeSafeMatcher<JsonObject> { private final McastRoute route; private String reason = ""; public McastRouteJsonMatcher(McastRoute mcastRoute) { this.route = mcastRoute; } @Override protected boolean matchesSafely(JsonObject jsonMcastRoute) { // check source String jsonSource = jsonMcastRoute.get("source").asString(); String source = route.source().toString(); if (!jsonSource.equals(source)) { reason = "Mcast route source was " + jsonSource; return false; } // check group String jsonGroup = jsonMcastRoute.get("group").asString(); String group = route.group().toString(); if (!jsonGroup.equals(group)) { reason = "Mcast route group was " + jsonSource; return false; } // check type String jsonType = jsonMcastRoute.get("type").asString(); String type = route.type().toString(); if (!jsonType.equals(type)) { reason = "Mcast route type was " + jsonSource; return false; } return true; } @Override public void describeTo(Description description) { description.appendText(reason); } } private static McastRouteJsonMatcher matchesMcastRoute(McastRoute route) { return new McastRouteJsonMatcher(route); } /** * Hamcrest matcher to check that a Mcast route is represented properly in * a JSON array of Mcastroutes. */ public static class McastRouteJsonArrayMatcher extends TypeSafeMatcher<JsonArray> { private final McastRoute route; private String reason = ""; public McastRouteJsonArrayMatcher(McastRoute mcastRoute) { this.route = mcastRoute; } @Override protected boolean matchesSafely(JsonArray json) { boolean found = false; for (int index = 0; index < json.size(); index++) { final JsonObject jsonMcastRoute = json.get(index).asObject(); final String source = route.source().toString(); final String group = route.group().toString(); final String type = route.type().toString(); final String jsonSource = jsonMcastRoute.get("source").asString(); final String jsonGroup = jsonMcastRoute.get("group").asString(); final String jsonType = jsonMcastRoute.get("type").asString(); if (jsonSource.equals(source) && jsonGroup.equals(group) && jsonType.equals(type)) { found = true; assertThat(jsonMcastRoute, matchesMcastRoute(route)); } } return found; } @Override public void describeTo(Description description) { description.appendText(reason); } } private static McastRouteJsonArrayMatcher hasMcastRoute(McastRoute route) { return new McastRouteJsonArrayMatcher(route); } /** * Tests the results of the REST API GET when there are active mcastroutes. */ @Test public void testMcastRoutePopulatedArray() { initMcastRouteMocks(); final Set<McastRoute> mcastRoutes = ImmutableSet.of(route1, route2, route3); expect(mockMulticastRouteService.getRoutes()).andReturn(mcastRoutes).anyTimes(); replay(mockMulticastRouteService); final WebTarget wt = target(); final String response = wt.path("mcast").request().get(String.class); final JsonObject result = Json.parse(response).asObject(); assertThat(result, notNullValue()); assertThat(result.names(), hasSize(1)); assertThat(result.names().get(0), is("routes")); final JsonArray jsonMcastRoutes = result.get("routes").asArray(); assertThat(jsonMcastRoutes, notNullValue()); assertThat(jsonMcastRoutes, hasMcastRoute(route1)); assertThat(jsonMcastRoutes, hasMcastRoute(route2)); assertThat(jsonMcastRoutes, hasMcastRoute(route3)); } /** * Tests creating a Mcast route with POST. */ @Test public void testMcastRoutePost() { mockMulticastRouteService.add(anyObject()); expectLastCall(); replay(mockMulticastRouteService); WebTarget wt = target(); InputStream jsonStream = MulticastRouteResourceTest.class .getResourceAsStream("mcastroute.json"); Response response = wt.path("mcast/") .request(MediaType.APPLICATION_JSON_TYPE) .post(Entity.json(jsonStream)); assertThat(response.getStatus(), is(HttpURLConnection.HTTP_CREATED)); verify(mockMulticastRouteService); } /** * Tests deletion a Mcast route with DELETE. */ @Test public void testMcastRouteDelete() { mockMulticastRouteService.remove(anyObject()); expectLastCall(); replay(mockMulticastRouteService); WebTarget wt = target().property( ClientProperties.SUPPRESS_HTTP_COMPLIANCE_VALIDATION, true); InputStream jsonStream = MulticastRouteResourceTest.class .getResourceAsStream("mcastroute.json"); wt.request().method("DELETE", Entity.json(jsonStream)); } }
donNewtonAlpha/onos
web/api/src/test/java/org/onosproject/rest/resources/MulticastRouteResourceTest.java
Java
apache-2.0
8,951
// This file is generated automatically by `scripts/build/typings.js`. Please, don't change it. import { getWeekOfMonthWithOptions } from 'date-fns/fp' export default getWeekOfMonthWithOptions
BigBoss424/portfolio
v8/development/node_modules/date-fns/fp/getWeekOfMonthWithOptions/index.d.ts
TypeScript
apache-2.0
194
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p/> * http://www.apache.org/licenses/LICENSE-2.0 * <p/> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.tests.extras.byteman; import org.apache.activemq.artemis.api.core.ActiveMQBuffer; import org.apache.activemq.artemis.api.core.client.ServerLocator; import org.apache.activemq.artemis.core.protocol.core.impl.PacketImpl; import org.apache.activemq.artemis.jms.client.ActiveMQConnectionFactory; import org.apache.activemq.artemis.tests.util.JMSTestBase; import org.jboss.byteman.contrib.bmunit.BMRule; import org.jboss.byteman.contrib.bmunit.BMRules; import org.jboss.byteman.contrib.bmunit.BMUnitRunner; import org.junit.After; import org.junit.Test; import org.junit.runner.RunWith; import javax.jms.Connection; import javax.jms.ExceptionListener; import javax.jms.JMSException; import javax.jms.Message; import javax.jms.MessageConsumer; import javax.jms.MessageProducer; import javax.jms.Queue; import javax.jms.Session; import javax.jms.TextMessage; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; @RunWith(BMUnitRunner.class) public class DisconnectOnCriticalFailureTest extends JMSTestBase { private static AtomicBoolean corruptPacket = new AtomicBoolean(false); @After @Override public void tearDown() throws Exception { corruptPacket.set(false); super.tearDown(); } @Test @BMRules( rules = {@BMRule( name = "Corrupt Decoding", targetClass = "org.apache.activemq.artemis.core.protocol.core.impl.PacketDecoder", targetMethod = "decode(byte)", targetLocation = "ENTRY", action = "org.apache.activemq.artemis.tests.extras.byteman.DisconnectOnCriticalFailureTest.doThrow();")}) public void testSendDisconnect() throws Exception { createQueue("queue1"); final Connection producerConnection = nettyCf.createConnection(); final CountDownLatch latch = new CountDownLatch(1); try { producerConnection.setExceptionListener(new ExceptionListener() { @Override public void onException(JMSException e) { latch.countDown(); } }); corruptPacket.set(true); producerConnection.createSession(false, Session.AUTO_ACKNOWLEDGE); assertTrue(latch.await(5, TimeUnit.SECONDS)); } finally { corruptPacket.set(false); if (producerConnection != null) { producerConnection.close(); } } } @Test @BMRules( rules = {@BMRule( name = "Corrupt Decoding", targetClass = "org.apache.activemq.artemis.core.protocol.ClientPacketDecoder", targetMethod = "decode(org.apache.activemq.artemis.api.core.ActiveMQBuffer)", targetLocation = "ENTRY", action = "org.apache.activemq.artemis.tests.extras.byteman.DisconnectOnCriticalFailureTest.doThrow($1);")}) public void testClientDisconnect() throws Exception { Queue q1 = createQueue("queue1"); final Connection connection = nettyCf.createConnection(); final CountDownLatch latch = new CountDownLatch(1); try { connection.setExceptionListener(new ExceptionListener() { @Override public void onException(JMSException e) { latch.countDown(); } }); Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE); MessageProducer producer = session.createProducer(q1); TextMessage m = session.createTextMessage("hello"); producer.send(m); connection.start(); corruptPacket.set(true); MessageConsumer consumer = session.createConsumer(q1); consumer.receive(2000); assertTrue(latch.await(5, TimeUnit.SECONDS)); } finally { corruptPacket.set(false); if (connection != null) { connection.close(); } } } @Test(timeout = 60000) @BMRules( rules = {@BMRule( name = "Corrupt Decoding", targetClass = "org.apache.activemq.artemis.core.protocol.ClientPacketDecoder", targetMethod = "decode(org.apache.activemq.artemis.api.core.ActiveMQBuffer)", targetLocation = "ENTRY", action = "org.apache.activemq.artemis.tests.extras.byteman.DisconnectOnCriticalFailureTest.doThrow($1);")}) public void testClientDisconnectLarge() throws Exception { Queue q1 = createQueue("queue1"); final Connection connection = nettyCf.createConnection(); final CountDownLatch latch = new CountDownLatch(1); ServerLocator locator = ((ActiveMQConnectionFactory)nettyCf).getServerLocator(); int minSize = locator.getMinLargeMessageSize(); StringBuilder builder = new StringBuilder(); for (int i = 0; i < minSize; i++) { builder.append("a"); } try { connection.setExceptionListener(new ExceptionListener() { @Override public void onException(JMSException e) { latch.countDown(); } }); Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE); MessageProducer producer = session.createProducer(q1); TextMessage m = session.createTextMessage(builder.toString()); producer.send(m); connection.start(); corruptPacket.set(true); MessageConsumer consumer = session.createConsumer(q1); Message lm = consumer.receive(2000); //first receive won't crash because the packet //is SESS_RECEIVE_LARGE_MSG assertNotNull(lm); //second receive will force server to send a //"forced delivery" message, and will cause //the exception to be thrown. lm = consumer.receive(5000); assertNull(lm); assertTrue(latch.await(5, TimeUnit.SECONDS)); } finally { corruptPacket.set(false); if (connection != null) { connection.close(); } } } public static void doThrow(ActiveMQBuffer buff) { byte type = buff.getByte(buff.readerIndex()); if (corruptPacket.get() && type == PacketImpl.SESS_RECEIVE_MSG) { corruptPacket.set(false); throw new IllegalArgumentException("Invalid type: -84"); } } public static void doThrow() { if (corruptPacket.get()) { corruptPacket.set(false); throw new IllegalArgumentException("Invalid type: -84"); } } }
gtully/activemq-artemis
tests/extra-tests/src/test/java/org/apache/activemq/artemis/tests/extras/byteman/DisconnectOnCriticalFailureTest.java
Java
apache-2.0
7,286
// Copyright 2016 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // Flags: --allow-natives-syntax --es-staging "use strict"; function bar() { try { unref; } catch (e) { return (1 instanceof TypeError) && unref(); // Call in tail position! } } function foo() { return bar(); // Call in tail position! } %OptimizeFunctionOnNextCall(foo); foo();
weolar/miniblink49
v8_7_5/test/mjsunit/regress/regress-crbug-624747.js
JavaScript
apache-2.0
468
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2015 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.core.compress.hadoopsnappy; import java.io.IOException; import java.io.OutputStream; import java.lang.reflect.Method; import org.pentaho.di.core.compress.CompressionOutputStream; import org.pentaho.di.core.compress.CompressionProvider; public class HadoopSnappyCompressionOutputStream extends CompressionOutputStream { public HadoopSnappyCompressionOutputStream( OutputStream out, CompressionProvider provider ) throws IOException { super( getDelegate( out ), provider ); } private static OutputStream getDelegate( OutputStream out ) throws IOException { try { return getSnappyOutputStream( out ); } catch ( Exception e ) { throw new IOException( e ); } } /** * Gets an OutputStream that uses the snappy codec and wraps the supplied base output stream. * * @param the * buffer size for the codec to use (in bytes) * @param out * the base output stream to wrap around * @return a OutputStream that uses the Snappy codec * * @throws Exception * if snappy is not available or an error occurs during reflection */ public static OutputStream getSnappyOutputStream( OutputStream out ) throws Exception { return getSnappyOutputStream( HadoopSnappyCompressionProvider.IO_COMPRESSION_CODEC_SNAPPY_DEFAULT_BUFFERSIZE, out ); } /** * Gets an OutputStream that uses the snappy codec and wraps the supplied base output stream. * * @param the * buffer size for the codec to use (in bytes) * * @param out * the base output stream to wrap around * @return a OutputStream that uses the Snappy codec * * @throws Exception * if snappy is not available or an error occurs during reflection */ public static OutputStream getSnappyOutputStream( int bufferSize, OutputStream out ) throws Exception { if ( !HadoopSnappyCompressionProvider.isHadoopSnappyAvailable() ) { throw new Exception( "Hadoop-snappy does not seem to be available" ); } Object snappyShim = HadoopSnappyCompressionProvider.getActiveSnappyShim(); Method getSnappyOutputStream = snappyShim.getClass().getMethod( "getSnappyOutputStream", int.class, OutputStream.class ); return (OutputStream) getSnappyOutputStream.invoke( snappyShim, bufferSize, out ); } }
IvanNikolaychuk/pentaho-kettle
engine/src/org/pentaho/di/core/compress/hadoopsnappy/HadoopSnappyCompressionOutputStream.java
Java
apache-2.0
3,249
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.iec60870.client; import org.apache.camel.Category; import org.apache.camel.Consumer; import org.apache.camel.Processor; import org.apache.camel.Producer; import org.apache.camel.component.iec60870.AbstractIecEndpoint; import org.apache.camel.component.iec60870.ObjectAddress; import org.apache.camel.spi.UriEndpoint; import org.apache.camel.support.DefaultComponent; import static java.util.Objects.requireNonNull; /** * IEC 60870 supervisory control and data acquisition (SCADA) client using NeoSCADA implementation. */ @UriEndpoint(firstVersion = "2.20.0", scheme = "iec60870-client", syntax = "iec60870-client:uriPath", title = "IEC 60870 Client", category = { Category.IOT }) public class ClientEndpoint extends AbstractIecEndpoint<ClientConnectionMultiplexor> { public ClientEndpoint(final String uri, final DefaultComponent component, final ClientConnectionMultiplexor connection, final ObjectAddress address) { super(uri, component, requireNonNull(connection), address); } @Override public Producer createProducer() throws Exception { return new ClientProducer(this, getConnection().getConnection()); } @Override public Consumer createConsumer(final Processor processor) throws Exception { return new ClientConsumer(this, processor, getConnection().getConnection()); } }
gnodet/camel
components/camel-iec60870/src/main/java/org/apache/camel/component/iec60870/client/ClientEndpoint.java
Java
apache-2.0
2,220
/** * Copyright (c) 2015-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * @providesModule LayoutAnimation * @flow */ 'use strict'; var PropTypes = require('ReactPropTypes'); var RCTUIManager = require('NativeModules').UIManager; var createStrictShapeTypeChecker = require('createStrictShapeTypeChecker'); var keyMirror = require('keyMirror'); var Types = keyMirror({ spring: true, linear: true, easeInEaseOut: true, easeIn: true, easeOut: true, }); var Properties = keyMirror({ opacity: true, scaleXY: true, }); var animChecker = createStrictShapeTypeChecker({ duration: PropTypes.number, delay: PropTypes.number, springDamping: PropTypes.number, initialVelocity: PropTypes.number, type: PropTypes.oneOf( Object.keys(Types) ), property: PropTypes.oneOf( // Only applies to create/delete Object.keys(Properties) ), }); type Anim = { duration?: number; delay?: number; springDamping?: number; initialVelocity?: number; type?: $Enum<typeof Types>; property?: $Enum<typeof Properties>; } var configChecker = createStrictShapeTypeChecker({ duration: PropTypes.number.isRequired, create: animChecker, update: animChecker, delete: animChecker, }); type Config = { duration: number; create?: Anim; update?: Anim; delete?: Anim; } function configureNext(config: Config, onAnimationDidEnd?: Function, onError?: Function) { configChecker({config}, 'config', 'LayoutAnimation.configureNext'); RCTUIManager.configureNextLayoutAnimation(config, onAnimationDidEnd, onError); } function create(duration: number, type, creationProp): Config { return { duration, create: { type, property: creationProp, }, update: { type, }, }; } var LayoutAnimation = { configureNext, create, Types, Properties, configChecker: configChecker, Presets: { easeInEaseOut: create( 300, Types.easeInEaseOut, Properties.opacity ), linear: create( 500, Types.linear, Properties.opacity ), spring: { duration: 700, create: { type: Types.linear, property: Properties.opacity, }, update: { type: Types.spring, springDamping: 0.4, }, }, } }; module.exports = LayoutAnimation;
dancurtiss/MLBSchedules
node_modules/react-native/Libraries/Animation/LayoutAnimation.js
JavaScript
apache-2.0
2,505
/* * Copyright 2014 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.bootstrap.util; import java.io.PrintWriter; import java.io.StringWriter; import java.io.Writer; public final class InterceptorUtils { private InterceptorUtils() { } public static boolean isThrowable(Object result) { return result instanceof Throwable; } public static boolean isSuccess(Throwable throwable) { return throwable == null; } public static String exceptionToString(Throwable ex) { if (ex != null) { StringBuilder sb = new StringBuilder(128); sb.append(ex.toString()).append('\n'); Writer writer = new StringWriter(); PrintWriter printWriter = new PrintWriter(writer); ex.printStackTrace(printWriter); sb.append(writer.toString()); return sb.toString(); } return null; } public static String getHttpUrl(final String uriString, final boolean param) { if (com.navercorp.pinpoint.common.util.StringUtils.isEmpty(uriString)) { return ""; } if (param) { return uriString; } int queryStart = uriString.indexOf('?'); if (queryStart != -1) { return uriString.substring(0, queryStart); } return uriString; } }
emeroad/pinpoint
bootstraps/bootstrap-core/src/main/java/com/navercorp/pinpoint/bootstrap/util/InterceptorUtils.java
Java
apache-2.0
1,914
/***************************************************************************//** * @file i2c_api.c ******************************************************************************* * @section License * <b>(C) Copyright 2015 Silicon Labs, http://www.silabs.com</b> ******************************************************************************* * * SPDX-License-Identifier: Apache-2.0 * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ #include "device.h" #include "clocking.h" #include <stdio.h> #if DEVICE_I2C #include "mbed_assert.h" #include "mbed_power_mgmt.h" #include "i2c_api.h" #include "PeripheralPins.h" #include "pinmap_function.h" #include "em_i2c.h" #include "em_cmu.h" /** Error flags indicating I2C transfer has failed somehow. */ /* Notice that I2C_IF_TXOF (transmit overflow) is not really possible with */ /* this SW supporting master mode. Likewise for I2C_IF_RXUF (receive underflow) */ /* RXUF is only likely to occur with this SW if using a debugger peeking into */ /* RXDATA register. Thus, we ignore those types of fault. */ #define I2C_IF_ERRORS (I2C_IF_BUSERR | I2C_IF_ARBLOST) #define I2C_TIMEOUT 100000 /* Prototypes */ int block_and_wait_for_ack(I2C_TypeDef *i2c); void i2c_enable(i2c_t *obj, uint8_t enable); void i2c_enable_pins(i2c_t *obj, uint8_t enable); void i2c_enable_interrupt(i2c_t *obj, uint32_t address, uint8_t enable); static uint8_t i2c_get_index(i2c_t *obj) { uint8_t index = 0; switch ((int)obj->i2c.i2c) { #ifdef I2C0 case I2C_0: index = 0; break; #endif #ifdef I2C1 case I2C_1: index = 1; break; #endif default: printf("I2C module not available.. Out of bound access."); break; } return index; } static CMU_Clock_TypeDef i2c_get_clock(i2c_t *obj) { CMU_Clock_TypeDef clock; switch ((int)obj->i2c.i2c) { #ifdef I2C0 case I2C_0: clock = cmuClock_I2C0; break; #endif #ifdef I2C1 case I2C_1: clock = cmuClock_I2C1; break; #endif default: printf("I2C module not available.. Out of bound access. (clock)"); clock = cmuClock_HFPER; break; } return clock; } void i2c_init(i2c_t *obj, PinName sda, PinName scl) { /* Find out which I2C peripheral we're asked to use */ I2CName i2c_sda = (I2CName) pinmap_peripheral(sda, PinMap_I2C_SDA); I2CName i2c_scl = (I2CName) pinmap_peripheral(scl, PinMap_I2C_SCL); obj->i2c.i2c = (I2C_TypeDef*) pinmap_merge(i2c_sda, i2c_scl); MBED_ASSERT(((unsigned int) obj->i2c.i2c) != NC); /* You need both SDA and SCL for I2C, so configuring one of them to NC is illegal */ MBED_ASSERT((unsigned int)sda != NC); MBED_ASSERT((unsigned int)scl != NC); /* Enable clock for the peripheral */ CMU_ClockEnable(i2c_get_clock(obj), true); /* Initializing the I2C */ /* Using default settings */ I2C_Init_TypeDef i2cInit = I2C_INIT_DEFAULT; I2C_Init(obj->i2c.i2c, &i2cInit); /* Enable pins at correct location */ #ifdef I2C_ROUTE_SDAPEN /* Find common location in pinmap */ unsigned int loc_sda = pin_location(sda, PinMap_I2C_SDA); unsigned int loc_scl = pin_location(scl, PinMap_I2C_SCL); unsigned int loc = pinmap_merge(loc_sda, loc_scl); MBED_ASSERT(loc != NC); /* Set location */ obj->i2c.location = I2C_ROUTE_SDAPEN | I2C_ROUTE_SCLPEN | (loc << _I2C_ROUTE_LOCATION_SHIFT); obj->i2c.i2c->ROUTE = I2C_ROUTE_SDAPEN | I2C_ROUTE_SCLPEN | (loc << _I2C_ROUTE_LOCATION_SHIFT); #else obj->i2c.i2c->ROUTEPEN = I2C_ROUTEPEN_SDAPEN | I2C_ROUTEPEN_SCLPEN; obj->i2c.location = (pin_location(sda, PinMap_I2C_SDA) << _I2C_ROUTELOC0_SDALOC_SHIFT) | (pin_location(scl, PinMap_I2C_SCL) << _I2C_ROUTELOC0_SCLLOC_SHIFT); obj->i2c.i2c->ROUTELOC0 = obj->i2c.location; #endif /* Set up the pins for I2C use */ /* Note: Set up pins in higher drive strength to reduce slew rate */ /* Though this requires user knowledge, since drive strength is controlled per port, not pin */ pin_mode(scl, WiredAndPullUp); pin_mode(sda, WiredAndPullUp); /* Enable General Call Address Mode. That is; we respond to the general address (0x0) */ obj->i2c.i2c->CTRL |= _I2C_CTRL_GCAMEN_MASK; /* We are assuming that there is only one master. So disable automatic arbitration */ obj->i2c.i2c->CTRL |= _I2C_CTRL_ARBDIS_MASK; /* Set to master (needed if this I2C block was used previously as slave) */ i2c_slave_mode(obj, false); /* Enable i2c */ i2c_enable(obj, true); } void i2c_enable(i2c_t *obj, uint8_t enable) { I2C_Enable(obj->i2c.i2c, enable); if (!enable) { /* After a reset BUSY is usually set. We assume that we are the only master and call abort, * which sends nothing on the bus, it just allows us to assume that the bus is idle */ if (obj->i2c.i2c->STATE & I2C_STATE_BUSY) { obj->i2c.i2c->CMD = I2C_CMD_ABORT; } } } void i2c_enable_interrupt(i2c_t *obj, uint32_t address, uint8_t enable) { IRQn_Type irq_number; switch (i2c_get_index(obj)) { #ifdef I2C0 case 0: irq_number = I2C0_IRQn; break; #endif #ifdef I2C1 case 1: irq_number = I2C1_IRQn; break; #endif } NVIC_SetVector(irq_number, address); /* Lower IRQ priority to avoid messing with asynch RX on UART */ NVIC_SetPriority(irq_number, 1); if (enable) { NVIC_EnableIRQ(irq_number); } else { NVIC_DisableIRQ(irq_number); } } /* Set the frequency of the I2C interface */ void i2c_frequency(i2c_t *obj, int hz) { /* Set frequency. As the second argument is 0, * HFPER clock frequency is used as reference freq */ if (hz <= 0) return; /* In I2C Normal mode (50% duty), we can go up to 100kHz */ if (hz <= 100000) { I2C_BusFreqSet(obj->i2c.i2c, REFERENCE_FREQUENCY, hz, i2cClockHLRStandard); } /* In I2C Fast mode (6:3 ratio), we can go up to 400kHz */ else if (hz <= 400000) { I2C_BusFreqSet(obj->i2c.i2c, REFERENCE_FREQUENCY, hz, i2cClockHLRAsymetric); } /* In I2C Fast+ mode (11:6 ratio), we can go up to 1 MHz */ else if (hz <= 1000000) { I2C_BusFreqSet(obj->i2c.i2c, REFERENCE_FREQUENCY, hz, i2cClockHLRFast); } /* Cap requested frequency at 1MHz */ else { I2C_BusFreqSet(obj->i2c.i2c, REFERENCE_FREQUENCY, 1000000, i2cClockHLRFast); } } /* Creates a start condition on the I2C bus */ int i2c_start(i2c_t *obj) { I2C_TypeDef *i2c = obj->i2c.i2c; /* Restore pin configuration in case we changed I2C object */ #ifdef I2C_ROUTE_SDAPEN obj->i2c.i2c->ROUTE = obj->i2c.location; #else obj->i2c.i2c->ROUTELOC0 = obj->i2c.location; #endif /* Ensure buffers are empty */ i2c->CMD = I2C_CMD_CLEARPC | I2C_CMD_CLEARTX; if (i2c->IF & I2C_IF_RXDATAV) { (void) i2c->RXDATA; } /* Clear all pending interrupts prior to starting transfer. */ i2c->IFC = _I2C_IFC_MASK; /* Send start */ obj->i2c.i2c->CMD = I2C_CMD_START; return 0; } /* Creates a stop condition on the I2C bus */ int i2c_stop(i2c_t *obj) { obj->i2c.i2c->CMD = I2C_CMD_STOP; /* Wait for the stop to be sent */ int timeout = I2C_TIMEOUT; while (!(obj->i2c.i2c->IF & I2C_IF_MSTOP) && !timeout--); return 0; } /* Returns number of bytes read */ int i2c_read(i2c_t *obj, int address, char *data, int length, int stop) { int retval; i2c_start(obj); retval = i2c_byte_write(obj, (address | 1)); if ((!retval) || (length == 0)) { //Write address with W flag (last bit 1) obj->i2c.i2c->CMD = I2C_CMD_STOP | I2C_CMD_ABORT; while(obj->i2c.i2c->STATE & I2C_STATE_BUSY); // Wait until the bus is done return (retval == 0 ? I2C_ERROR_NO_SLAVE : 0); //NACK or error when writing adress. Return 0 as 0 bytes were read } int i = 0; while (i < length) { uint8_t last = (i == length - 1); data[i++] = i2c_byte_read(obj, last); } if (stop) { i2c_stop(obj); } return length; } int i2c_write(i2c_t *obj, int address, const char *data, int length, int stop) { i2c_start(obj); if (!i2c_byte_write(obj, (address & 0xFE))) { i2c_stop(obj); return I2C_ERROR_NO_SLAVE; //NACK or error when writing adress. Return 0 as 0 bytes were written } int i; for (i = 0; i < length; i++) { if (!i2c_byte_write(obj, data[i])) { i2c_stop(obj); return i; } } if (stop) { i2c_stop(obj); } return length; } void i2c_reset(i2c_t *obj) { /* EMLib function */ I2C_Reset(obj->i2c.i2c); } int i2c_byte_read(i2c_t *obj, int last) { int timeout = I2C_TIMEOUT; /* Wait for data */ while (!(obj->i2c.i2c->STATUS & I2C_STATUS_RXDATAV) && timeout--); if (timeout <= 0) { return 0; //TODO Is this the correct way to handle this? } char data = obj->i2c.i2c->RXDATA; if (last) { obj->i2c.i2c->CMD = I2C_CMD_NACK; } else { obj->i2c.i2c->CMD = I2C_CMD_ACK; } return data; } int i2c_byte_write(i2c_t *obj, int data) { obj->i2c.i2c->TXDATA = data; return block_and_wait_for_ack(obj->i2c.i2c); } /* * Returns 1 for ACK. 0 for NACK, timeout or error. */ int block_and_wait_for_ack(I2C_TypeDef *i2c) { uint32_t pending; uint32_t timeout = I2C_TIMEOUT; while (timeout > 0) { timeout -= 1; pending = i2c->IF; /* If some sort of fault, abort transfer. */ if (pending & I2C_IF_ERRORS) { if (pending & I2C_IF_ARBLOST) { /* If arbitration fault, it indicates either a slave device */ /* not responding as expected, or other master which is not */ /* supported by this SW. */ return 0; } else if (pending & I2C_IF_BUSERR) { /* A bus error indicates a misplaced start or stop, which should */ /* not occur in master mode controlled by this SW. */ return 0; } } if (pending & I2C_IF_NACK) { i2c->IFC = I2C_IFC_NACK; return 0; //Received NACK } else if (pending & I2C_IF_ACK) { i2c->IFC = I2C_IFC_ACK; return 1; //Got ACK } } return 0; //Timeout } #if DEVICE_I2CSLAVE #define NoData 0 #define ReadAddressed 1 #define WriteGeneral 2 #define WriteAddressed 3 void i2c_slave_mode(i2c_t *obj, int enable_slave) { if(enable_slave) { /* Reference manual note: DIV must be set to 1 during slave operation */ obj->i2c.i2c->CLKDIV = 1; obj->i2c.i2c->CTRL |= _I2C_CTRL_SLAVE_MASK; obj->i2c.i2c->CTRL |= _I2C_CTRL_AUTOACK_MASK; //Slave implementation assumes auto acking } else { obj->i2c.i2c->CTRL &= ~_I2C_CTRL_SLAVE_MASK; obj->i2c.i2c->CTRL &= ~_I2C_CTRL_AUTOACK_MASK; //Master implementation ACKs manually /* function is only called with enable_slave = false through i2c_init(..), so frequency is already guaranteed to be set */ } } int i2c_slave_receive(i2c_t *obj) { if(obj->i2c.i2c->IF & I2C_IF_ADDR) { obj->i2c.i2c->IFC = I2C_IF_ADDR; //Clear interrupt /*0x00 is the address for general write. The address the master wrote is in RXDATA now and reading it also frees the buffer for the next write which can then be acked. */ if(obj->i2c.i2c->RXDATA == 0x00) { return WriteGeneral; //Read the address; } if(obj->i2c.i2c->STATE & I2C_STATE_TRANSMITTER) { return ReadAddressed; } else { return WriteAddressed; } } return NoData; } int i2c_slave_read(i2c_t *obj, char *data, int length) { int count; for (count = 0; count < length; count++) { data[count] = i2c_byte_read(obj, 0); } return count; } int i2c_slave_write(i2c_t *obj, const char *data, int length) { int count; for (count = 0; count < length; count++) { i2c_byte_write(obj, data[count]); } return count; } void i2c_slave_address(i2c_t *obj, int idx, uint32_t address, uint32_t mask) { obj->i2c.i2c->SADDR = address; obj->i2c.i2c->SADDRMASK = 0xFE;//mask; } #endif //DEVICE_I2CSLAVE #ifdef DEVICE_I2C_ASYNCH #include "em_dma.h" #include "dma_api_HAL.h" #include "dma_api.h" #include "sleep_api.h" #include "buffer.h" /** Start i2c asynchronous transfer. * @param obj The I2C object * @param tx The buffer to send * @param tx_length The number of words to transmit * @param rx The buffer to receive * @param rx_length The number of words to receive * @param address The address to be set - 7bit or 9 bit * @param stop If true, stop will be generated after the transfer is done * @param handler The I2C IRQ handler to be set * @param hint DMA hint usage */ void i2c_transfer_asynch(i2c_t *obj, const void *tx, size_t tx_length, void *rx, size_t rx_length, uint32_t address, uint32_t stop, uint32_t handler, uint32_t event, DMAUsage hint) { I2C_TransferReturn_TypeDef retval; if(i2c_active(obj)) return; if((tx_length == 0) && (rx_length == 0)) return; // For now, we are assuming a solely interrupt-driven implementation. #ifdef I2C_ROUTE_SDAPEN obj->i2c.i2c->ROUTE = obj->i2c.location; #else obj->i2c.i2c->ROUTELOC0 = obj->i2c.location; #endif // Store transfer config obj->i2c.xfer.addr = address; // Some combination of tx_length and rx_length will tell us what to do if((tx_length > 0) && (rx_length == 0)) { obj->i2c.xfer.flags = I2C_FLAG_WRITE; //Store buffer info obj->i2c.xfer.buf[0].data = (void *)tx; obj->i2c.xfer.buf[0].len = (uint16_t) tx_length; } else if ((tx_length == 0) && (rx_length > 0)) { obj->i2c.xfer.flags = I2C_FLAG_READ; //Store buffer info obj->i2c.xfer.buf[0].data = rx; obj->i2c.xfer.buf[0].len = (uint16_t) rx_length; } else if ((tx_length > 0) && (rx_length > 0)) { obj->i2c.xfer.flags = I2C_FLAG_WRITE_READ; //Store buffer info obj->i2c.xfer.buf[0].data = (void *)tx; obj->i2c.xfer.buf[0].len = (uint16_t) tx_length; obj->i2c.xfer.buf[1].data = rx; obj->i2c.xfer.buf[1].len = (uint16_t) rx_length; } if(address > 255) obj->i2c.xfer.flags |= I2C_FLAG_10BIT_ADDR; // Store event flags obj->i2c.events = event; // Enable interrupt i2c_enable_interrupt(obj, handler, true); // Kick off the transfer retval = I2C_TransferInit(obj->i2c.i2c, &(obj->i2c.xfer)); if(retval == i2cTransferInProgress) { sleep_manager_lock_deep_sleep(); } else { // something happened, and the transfer did not go through // So, we need to clean up // Disable interrupt i2c_enable_interrupt(obj, 0, false); // Block until free while(i2c_active(obj)); } } /** The asynchronous IRQ handler * @param obj The I2C object which holds the transfer information * @return Returns event flags if a transfer termination condition was met or 0 otherwise. */ uint32_t i2c_irq_handler_asynch(i2c_t *obj) { // For now, we are assuming a solely interrupt-driven implementation. I2C_TransferReturn_TypeDef status = I2C_Transfer(obj->i2c.i2c); switch(status) { case i2cTransferInProgress: // Still busy transferring, so let it. return 0; case i2cTransferDone: // Transfer has completed // Disable interrupt i2c_enable_interrupt(obj, 0, false); sleep_manager_unlock_deep_sleep(); return I2C_EVENT_TRANSFER_COMPLETE & obj->i2c.events; case i2cTransferNack: // A NACK has been received while an ACK was expected. This is usually because the slave did not respond to the address. // Disable interrupt i2c_enable_interrupt(obj, 0, false); sleep_manager_unlock_deep_sleep(); return I2C_EVENT_ERROR_NO_SLAVE & obj->i2c.events; default: // An error situation has arisen. // Disable interrupt i2c_enable_interrupt(obj, 0, false); sleep_manager_unlock_deep_sleep(); // return error return I2C_EVENT_ERROR & obj->i2c.events; } } /** Attempts to determine if I2C peripheral is already in use. * @param obj The I2C object * @return non-zero if the I2C module is active or zero if it is not */ uint8_t i2c_active(i2c_t *obj) { return (obj->i2c.i2c->STATE & I2C_STATE_BUSY); } /** Abort ongoing asynchronous transaction. * @param obj The I2C object */ void i2c_abort_asynch(i2c_t *obj) { // Disable interrupt i2c_enable_interrupt(obj, 0, false); // Do not deactivate I2C twice if (!i2c_active(obj)) return; // Abort obj->i2c.i2c->CMD = I2C_CMD_STOP | I2C_CMD_ABORT; // Block until free while(i2c_active(obj)); sleep_manager_unlock_deep_sleep(); } #endif //DEVICE_I2C ASYNCH #endif //DEVICE_I2C
c1728p9/mbed-os
targets/TARGET_Silicon_Labs/TARGET_EFM32/i2c_api.c
C
apache-2.0
17,884
/* Copyright 2013 The Camlistore Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Package camerrors define specific errors that are used to // decide on how to deal with some failure cases. package camerrors import ( "errors" ) // ErrMissingKeyBlob is returned by the jsonsign handler when a // verification fails because the public key for a signed blob is // missing. var ErrMissingKeyBlob = errors.New("key blob not found")
ginabythebay/camlistore
pkg/camerrors/errors.go
GO
apache-2.0
923
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.util.containers; import com.intellij.util.ArrayUtil; import org.jetbrains.annotations.NotNull; import java.util.Arrays; public class UnsignedShortArrayList implements Cloneable { private char[] myData; // use char as an unsigned short private int mySize; public UnsignedShortArrayList(int initialCapacity) { myData = new char[initialCapacity]; } public UnsignedShortArrayList() { this(10); } public void trimToSize() { if (mySize < myData.length){ myData = ArrayUtil.realloc(myData, mySize); } } public void ensureCapacity(int minCapacity) { int oldCapacity = myData.length; if (minCapacity > oldCapacity){ char[] oldData = myData; int newCapacity = oldCapacity * 3 / 2 + 1; if (newCapacity < minCapacity){ newCapacity = minCapacity; } myData = new char[newCapacity]; System.arraycopy(oldData, 0, myData, 0, mySize); } } public void fill(int fromIndex, int toIndex, int value) { assertShort(value); if (toIndex > mySize) { ensureCapacity(toIndex); mySize = toIndex; } Arrays.fill(myData, fromIndex, toIndex, (char)value); } public int size() { return mySize; } public boolean isEmpty() { return mySize == 0; } public boolean contains(int element) { assertShort(element); return indexOf(element) >= 0; } public int indexOf(int element) { assertShort(element); return indexOf(element, 0, mySize); } public int indexOf(int element, int startIndex, int endIndex) { assertShort(element); if (startIndex < 0 || endIndex < startIndex || endIndex > mySize) { throw new IndexOutOfBoundsException("startIndex: "+startIndex+"; endIndex: "+endIndex+"; mySize: "+mySize); } for(int i = startIndex; i < endIndex; i++){ if (element == myData[i]) return i; } return -1; } public int lastIndexOf(int element) { assertShort(element); for(int i = mySize - 1; i >= 0; i--){ if (element == myData[i]) return i; } return -1; } @Override public Object clone() { try{ UnsignedShortArrayList v = (UnsignedShortArrayList)super.clone(); v.myData = myData.clone(); return v; } catch(CloneNotSupportedException e){ // this shouldn't happen, since we are Cloneable throw new InternalError(); } } public int @NotNull [] toArray() { return toArray(0,mySize); } public int @NotNull [] toArray(int @NotNull [] a) { if (a.length < mySize){ a = new int[mySize]; } for (int i = 0; i < mySize; i++) { char c = myData[i]; a[i] = c; } return a; } public int @NotNull [] toArray(int startIndex, int length) { int[] result = new int[length]; for (int i = startIndex; i < length; i++) { char c = myData[i]; result[i-startIndex] = c; } return result; } public int get(int index) { checkRange(index); return myData[index]; } public int getQuick(int index) { return myData[index]; } public int set(int index, int element) { checkRange(index); int oldValue = myData[index]; setQuick(index, element); return oldValue; } public void setQuick(int index, int element) { assertShort(element); myData[index] = (char)element; } private static void assertShort(int element) { assert element >= 0 && element < 1<<16 : element; } public void add(int element) { ensureCapacity(mySize + 1); setQuick(mySize++, element); } public void add(int index, int element) { if (index > mySize || index < 0){ throw new IndexOutOfBoundsException("Index: " + index + ", Size: " + mySize); } ensureCapacity(mySize + 1); System.arraycopy(myData, index, myData, index + 1, mySize - index); setQuick(index, element); mySize++; } public int remove(int index) { checkRange(index); int oldValue = myData[index]; int numMoved = mySize - index - 1; if (numMoved > 0){ System.arraycopy(myData, index + 1, myData, index,numMoved); } mySize--; return oldValue; } public void clear() { mySize = 0; } public void removeRange(int fromIndex, int toIndex) { int numMoved = mySize - toIndex; System.arraycopy(myData, toIndex, myData, fromIndex, numMoved); mySize -= toIndex - fromIndex; } public void copyRange(int fromIndex, int length, int toIndex) { if (length < 0 || fromIndex < 0 || fromIndex + length > mySize || toIndex < 0 || toIndex + length > mySize) { throw new IndexOutOfBoundsException("fromIndex: "+fromIndex+"; length: "+length+"; toIndex: "+toIndex+"; mySize: "+mySize); } System.arraycopy(myData, fromIndex, myData, toIndex, length); } private void checkRange(int index) { if (index >= mySize || index < 0){ throw new IndexOutOfBoundsException("Index: " + index + ", Size: " + mySize); } } @Override public String toString() { return Arrays.toString(toArray()); } }
allotria/intellij-community
platform/util/src/com/intellij/util/containers/UnsignedShortArrayList.java
Java
apache-2.0
5,641