content
stringlengths 10
4.9M
|
---|
def _get_disable_pod_identity(self, enable_validation: bool = False) -> bool:
disable_pod_identity = self.raw_param.get("disable_pod_identity")
if enable_validation:
if self.decorator_mode == DecoratorMode.UPDATE:
if disable_pod_identity and self._get_enable_pod_identity(enable_validation=False):
raise MutuallyExclusiveArgumentError(
"Cannot specify --enable-pod-identity and "
"--disable-pod-identity at the same time."
)
return disable_pod_identity |
import * as assert from 'assert';
import Editor from '../Editor';
suite('Editor.ts Tests', function() {
test('Create a new Editor instance and expect it to return the file name and line number it was created with', function() {
const editor = new Editor(10, 'src/testFile.js');
assert.strictEqual(editor.GetActiveFileName(), 'src/testFile.js');
assert.strictEqual(editor.GetActiveLineNumber(), 10);
});
});
|
Improved Environmental Status: 50 Years of Declining Fish Mercury Levels in Boreal and Subarctic Fennoscandia.
Temporally (1965-2015) and spatially (55°-70°N) extensive records of total mercury (Hg) in freshwater fish showed consistent declines in boreal and subarctic Fennoscandia. The database contains 54 560 fish entries ( n: pike > perch ≫ brown trout > roach ≈ Arctic charr) from 3132 lakes across Sweden, Finland, Norway, and Russian Murmansk area. 74% of the lakes did not meet the 0.5 ppm limit to protect human health. However, after 2000 only 25% of the lakes exceeded this level, indicating improved environmental status. In lakes where local pollution sources were identified, pike and perch Hg concentrations were significantly higher between 1965 and 1990 compared to values after 1995, likely an effect of implemented reduction measures. In lakes where Hg originated from long-range transboundary air pollution (LRTAP), consistent Hg declines (3-7‰ per year) were found for perch and pike in both boreal and subarctic Fennoscandia, suggesting common environmental controls. Hg in perch and pike in LRTAP lakes showed minimal declines with latitude, suggesting that drivers affected by temperature, such as growth dilution, counteracted Hg loading and food web exposure. We recommend that future fish Hg monitoring sampling design should include repeated sampling and collection of pollution history, water chemistry, fish age, and stable isotopes to enable evaluation of emission reduction policies. |
<filename>internal/scan/minikubectl.go<gh_stars>10-100
/*
Copyright 2021 GramLabs, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package scan
import (
"bytes"
"fmt"
"sync"
"github.com/spf13/pflag"
apierrors "k8s.io/apimachinery/pkg/api/errors"
"k8s.io/apimachinery/pkg/api/meta"
"k8s.io/cli-runtime/pkg/genericclioptions"
"k8s.io/cli-runtime/pkg/resource"
"k8s.io/client-go/discovery"
memory "k8s.io/client-go/discovery/cached"
"k8s.io/client-go/rest"
restclient "k8s.io/client-go/rest"
"k8s.io/client-go/restmapper"
"k8s.io/client-go/tools/clientcmd"
clientcmdapi "k8s.io/client-go/tools/clientcmd/api"
)
// minikubectl is just a miniature in-process kubectl for us to use to avoid
// a binary dependency on the tool itself.
type minikubectl struct {
ConfigFlags *genericclioptions.ConfigFlags
ResourceBuilderFlags *genericclioptions.ResourceBuilderFlags
PrintFlags *genericclioptions.PrintFlags
IgnoreNotFound bool
restConfig *rest.Config
lock sync.Mutex
}
// newMinikubectl creates a new minikubectl, the empty state is not usable.
func newMinikubectl() *minikubectl {
outputFormat := ""
return &minikubectl{
ConfigFlags: genericclioptions.NewConfigFlags(true),
ResourceBuilderFlags: genericclioptions.NewResourceBuilderFlags().
WithLabelSelector("").
WithAll(true).
WithLatest(),
PrintFlags: &genericclioptions.PrintFlags{
JSONYamlPrintFlags: genericclioptions.NewJSONYamlPrintFlags(),
OutputFormat: &outputFormat,
},
}
}
// AddFlags configures the supplied flag set with the recognized flags.
func (k *minikubectl) AddFlags(flags *pflag.FlagSet) {
k.ConfigFlags.AddFlags(flags)
k.ResourceBuilderFlags.AddFlags(flags)
// PrintFlags somehow is tied to Cobra...
flags.StringVarP(k.PrintFlags.OutputFormat, "output", "o", *k.PrintFlags.OutputFormat, "")
// Don't bother with usage strings here, we aren't showing help to anyone
flags.BoolVar(&k.IgnoreNotFound, "ignore-not-found", k.IgnoreNotFound, "")
}
// Complete validates we can execute against the supplied arguments.
func (k *minikubectl) Complete(args []string) error {
if len(args) == 0 || args[0] != "get" {
return fmt.Errorf("minikubectl only supports get")
}
return nil
}
// Run executes the supplied arguments and returns the output as bytes.
func (k *minikubectl) Run(args []string) ([]byte, error) {
v := k.ResourceBuilderFlags.ToBuilder(k, args[1:]).Do()
// Create a printer to dump the objects
printer, err := k.PrintFlags.ToPrinter()
if err != nil {
return nil, err
}
// Use the printer to render everything into a byte buffer
var b bytes.Buffer
err = v.Visit(func(info *resource.Info, err error) error {
if err != nil {
if k.IgnoreNotFound && apierrors.IsNotFound(err) {
return nil
}
return err
}
return printer.PrintObj(info.Object, &b)
})
if err != nil {
return nil, err
}
return b.Bytes(), nil
}
// ToRawKubeConfigLoader just defers to the configuration flags.
func (k *minikubectl) ToRawKubeConfigLoader() clientcmd.ClientConfig {
return &namespaceOverrideClientConfig{
Delegate: k.ConfigFlags.ToRawKubeConfigLoader(),
NamespaceOverride: k.ConfigFlags.Namespace,
}
}
// ToRESTConfig lazily loads a REST configuration.
func (k *minikubectl) ToRESTConfig() (*rest.Config, error) {
k.lock.Lock()
defer k.lock.Unlock()
var err error
if k.restConfig != nil {
k.restConfig, err = k.ToRawKubeConfigLoader().ClientConfig()
}
return k.restConfig, err
}
// ToDiscoveryClient returns an in-memory cached discovery instance instead of an on-disk cached instance.
func (k *minikubectl) ToDiscoveryClient() (discovery.CachedDiscoveryInterface, error) {
config, err := k.ToRESTConfig()
if err != nil {
return nil, err
}
discoveryClient, err := discovery.NewDiscoveryClientForConfig(config)
if err != nil {
return nil, err
}
return memory.NewMemCacheClient(discoveryClient), nil
}
// ToRESTMapper does the exact same thing as the ConfigFlag implementation, just with a different discovery client.
func (k *minikubectl) ToRESTMapper() (meta.RESTMapper, error) {
discoveryClient, err := k.ToDiscoveryClient()
if err != nil {
return nil, err
}
mapper := restmapper.NewDeferredDiscoveryRESTMapper(discoveryClient)
expander := restmapper.NewShortcutExpander(mapper, discoveryClient)
return expander, nil
}
type namespaceOverrideClientConfig struct {
Delegate clientcmd.ClientConfig
NamespaceOverride *string
}
func (c *namespaceOverrideClientConfig) Namespace() (string, bool, error) {
if c.NamespaceOverride != nil && *c.NamespaceOverride != "" {
return *c.NamespaceOverride, true, nil
}
return c.Delegate.Namespace()
}
// NOTE: Because the interface ClientConfig has a function ClientConfig we cannot simply embed ClientConfig
func (c *namespaceOverrideClientConfig) RawConfig() (clientcmdapi.Config, error) {
return c.Delegate.RawConfig()
}
func (c *namespaceOverrideClientConfig) ConfigAccess() clientcmd.ConfigAccess {
return c.Delegate.ConfigAccess()
}
func (c *namespaceOverrideClientConfig) ClientConfig() (*restclient.Config, error) {
return c.Delegate.ClientConfig()
}
|
package org.intermine.objectstore.intermine;
/*
* Copyright (C) 2002-2017 FlyMine
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. See the LICENSE file for more
* information or http://www.gnu.org/copyleft/lesser.html.
*
*/
import static org.intermine.objectstore.intermine.ObjectStoreInterMineImpl.BAGID_COLUMN;
import static org.intermine.objectstore.intermine.ObjectStoreInterMineImpl.BAGVAL_COLUMN;
import static org.intermine.objectstore.intermine.ObjectStoreInterMineImpl.CLOBID_COLUMN;
import static org.intermine.objectstore.intermine.ObjectStoreInterMineImpl.CLOBPAGE_COLUMN;
import static org.intermine.objectstore.intermine.ObjectStoreInterMineImpl.CLOBVAL_COLUMN;
import static org.intermine.objectstore.intermine.ObjectStoreInterMineImpl.CLOB_TABLE_NAME;
import static org.intermine.objectstore.intermine.ObjectStoreInterMineImpl.INT_BAG_TABLE_NAME;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.SortedMap;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.WeakHashMap;
import org.apache.log4j.Logger;
import org.apache.torque.engine.database.model.Domain;
import org.apache.torque.engine.database.model.SchemaType;
import org.apache.torque.engine.platform.Platform;
import org.apache.torque.engine.platform.PlatformFactory;
import org.intermine.metadata.AttributeDescriptor;
import org.intermine.metadata.ClassDescriptor;
import org.intermine.metadata.CollectionDescriptor;
import org.intermine.metadata.ConstraintOp;
import org.intermine.metadata.FieldDescriptor;
import org.intermine.metadata.ReferenceDescriptor;
import org.intermine.metadata.TypeUtil;
import org.intermine.metadata.Util;
import org.intermine.model.InterMineObject;
import org.intermine.objectstore.ObjectStoreException;
import org.intermine.objectstore.proxy.ProxyReference;
import org.intermine.objectstore.query.BagConstraint;
import org.intermine.objectstore.query.ClassConstraint;
import org.intermine.objectstore.query.Clob;
import org.intermine.objectstore.query.Constraint;
import org.intermine.objectstore.query.ConstraintHelper;
import org.intermine.objectstore.query.ConstraintSet;
import org.intermine.objectstore.query.ContainsConstraint;
import org.intermine.objectstore.query.FromElement;
import org.intermine.objectstore.query.MultipleInBagConstraint;
import org.intermine.objectstore.query.ObjectStoreBag;
import org.intermine.objectstore.query.ObjectStoreBagCombination;
import org.intermine.objectstore.query.ObjectStoreBagsForObject;
import org.intermine.objectstore.query.OrderDescending;
import org.intermine.objectstore.query.OverlapConstraint;
import org.intermine.objectstore.query.PathExpressionField;
import org.intermine.objectstore.query.Query;
import org.intermine.objectstore.query.QueryCast;
import org.intermine.objectstore.query.QueryClass;
import org.intermine.objectstore.query.QueryClassBag;
import org.intermine.objectstore.query.QueryCollectionPathExpression;
import org.intermine.objectstore.query.QueryCollectionReference;
import org.intermine.objectstore.query.QueryEvaluable;
import org.intermine.objectstore.query.QueryExpression;
import org.intermine.objectstore.query.QueryField;
import org.intermine.objectstore.query.QueryForeignKey;
import org.intermine.objectstore.query.QueryFunction;
import org.intermine.objectstore.query.QueryNode;
import org.intermine.objectstore.query.QueryObjectPathExpression;
import org.intermine.objectstore.query.QueryObjectReference;
import org.intermine.objectstore.query.QueryOrderable;
import org.intermine.objectstore.query.QueryPathExpression;
import org.intermine.objectstore.query.QueryReference;
import org.intermine.objectstore.query.QuerySelectable;
import org.intermine.objectstore.query.QueryValue;
import org.intermine.objectstore.query.SimpleConstraint;
import org.intermine.objectstore.query.SubqueryConstraint;
import org.intermine.objectstore.query.SubqueryExistsConstraint;
import org.intermine.objectstore.query.UnknownTypeValue;
import org.intermine.objectstore.query.WidthBucketFunction;
import org.intermine.objectstore.query.iql.IqlQuery;
import org.intermine.sql.Database;
import org.intermine.sql.DatabaseUtil;
import org.intermine.util.AlwaysMap;
import org.intermine.util.CombinedIterator;
import org.intermine.util.DynamicUtil;
/**
* Code to generate an sql statement from a Query object.
*
* @author <NAME>
* @author <NAME>
* @author <NAME>
*/
public final class SqlGenerator
{
private SqlGenerator() {
}
private static final Logger LOG = Logger.getLogger(SqlGenerator.class);
/** normal query **/
public static final int QUERY_NORMAL = 0;
/** subquery in FROM **/
public static final int QUERY_SUBQUERY_FROM = 1;
/** subquery in CONSTRAINT **/
public static final int QUERY_SUBQUERY_CONSTRAINT = 2;
/** IDs only **/
public static final int ID_ONLY = 2;
/** I DON'T KNOW **/
public static final int NO_ALIASES_ALL_FIELDS = 3;
/** query for precomputing **/
public static final int QUERY_FOR_PRECOMP = 4;
/** subquery exists **/
public static final int QUERY_SUBQUERY_EXISTS = 5;
/** query for go faster **/
public static final int QUERY_FOR_GOFASTER = 6;
/** subquery only for counting. SELECT COUNT(*) AS ... FROM (subquery) **/
public static final int QUERY_FOR_COUNTING = 7;
protected static Map<DatabaseSchema, Map<Query, CacheEntry>> sqlCache
= new WeakHashMap<DatabaseSchema, Map<Query, CacheEntry>>();
protected static Map<DatabaseSchema, Map<Query, Set<Object>>> tablenamesCache
= new WeakHashMap<DatabaseSchema, Map<Query, Set<Object>>>();
/**
* Generates a query to retrieve a single object from the database, by id.
*
* @param id the id of the object to fetch
* @param clazz a Class of the object - if unsure use InterMineObject
* @param schema the DatabaseSchema
* @return a String suitable for passing to an SQL server
* @throws ObjectStoreException if the given class is not in the model
*/
public static String generateQueryForId(Integer id, Class<?> clazz,
DatabaseSchema schema) throws ObjectStoreException {
ClassDescriptor tableMaster;
// if (schema.isFlatMode(clazz)) {
// Query q = new Query();
// QueryClass qc = new QueryClass(clazz);
// q.addFrom(qc);
// q.addToSelect(qc);
// q.setConstraint(new SimpleConstraint(new QueryField(qc, "id"), ConstraintOp.EQUALS,
// new QueryValue(id)));
// q.setDistinct(false);
// return generate(q, 0, 2, schema, null, null);
// }
if (schema.isMissingNotXml()) {
tableMaster = schema.getModel()
.getClassDescriptorByName(InterMineObject.class.getName());
} else {
ClassDescriptor cld = schema.getModel().getClassDescriptorByName(clazz.getName());
if (cld == null) {
throw new ObjectStoreException(clazz.toString() + " is not in the model");
}
tableMaster = schema.getTableMaster(cld);
}
if (schema.isTruncated(tableMaster)) {
return "SELECT a1_.OBJECT AS a1_ FROM "
+ DatabaseUtil.getTableName(tableMaster) + " AS a1_ WHERE a1_.id = " + id.toString()
+ " AND a1_.tableclass = '" + clazz.getName() + "' LIMIT 2";
} else {
return "SELECT a1_.OBJECT AS a1_ FROM "
+ DatabaseUtil.getTableName(tableMaster) + " AS a1_ WHERE a1_.id = " + id.toString()
+ " LIMIT 2";
}
}
/**
* Returns the table name used by the ID fetch query.
*
* @param clazz the Class of the object
* @param schema the DatabaseSchema
* @return a table name
* @throws ObjectStoreException if the given class is not in the model
*/
public static String tableNameForId(Class<?> clazz,
DatabaseSchema schema) throws ObjectStoreException {
ClassDescriptor tableMaster;
if (schema.isMissingNotXml()) {
tableMaster = schema.getModel()
.getClassDescriptorByName(InterMineObject.class.getName());
} else {
ClassDescriptor cld = schema.getModel().getClassDescriptorByName(clazz.getName());
if (cld == null) {
throw new ObjectStoreException(clazz.toString() + " is not in the model");
}
tableMaster = schema.getTableMaster(cld);
}
return DatabaseUtil.getTableName(tableMaster);
}
/**
* Registers an offset for a given query. This is used later on to speed up queries that use
* big offsets.
*
* @param q the Query
* @param start the offset
* @param schema the DatabaseSchema in which to look up metadata
* @param db the Database that the ObjectStore uses
* @param value a value, such that adding a WHERE component first_order_field > value with
* OFFSET 0 is equivalent to the original query with OFFSET offset
* @param bagTableNames a Map from BagConstraints to table names, where the table contains the
* contents of the bag that are relevant for the BagConstraint
*/
public static void registerOffset(Query q, int start, DatabaseSchema schema, Database db,
Object value, Map<Object, String> bagTableNames) {
LOG.debug("registerOffset() called with offset: " + start);
try {
if (value.getClass().equals(Boolean.class)) {
return;
}
QueryOrderable firstOrderByO = null;
firstOrderByO = (QueryOrderable) q.getEffectiveOrderBy().iterator().next();
if ((firstOrderByO instanceof QueryClass) && (!InterMineObject.class
.isAssignableFrom(((QueryClass) firstOrderByO).getType()))) {
return;
}
synchronized (q) {
Map<Query, CacheEntry> schemaCache = getCacheForSchema(schema);
CacheEntry cacheEntry = schemaCache.get(q);
if (cacheEntry != null) {
if ((cacheEntry.getLastOffset() - start >= 100000)
|| (start - cacheEntry.getLastOffset() >= 10000)) {
QueryNode firstOrderBy = null;
firstOrderBy = (QueryNode) q.getEffectiveOrderBy().iterator().next();
if (firstOrderBy instanceof QueryFunction) {
return;
}
if (firstOrderBy instanceof QueryClass) {
firstOrderBy = new QueryField((QueryClass) firstOrderBy, "id");
}
// Now we need to work out if this field is a primitive type or a object
// type (that can accept null values).
Constraint c = getOffsetConstraint(q, firstOrderBy, value, schema);
String sql = generate(q, schema, db, c, QUERY_NORMAL, bagTableNames);
cacheEntry.setLast(start, sql);
}
SortedMap<Integer, String> headMap = cacheEntry.getCached()
.headMap(new Integer(start + 1));
Integer lastKey = null;
try {
lastKey = headMap.lastKey();
} catch (NoSuchElementException e) {
// ignore
}
if (lastKey != null) {
int offset = lastKey.intValue();
if (start - offset < 100000) {
return;
}
}
}
// Now we need to work out if this field is a primitive type or a object
// type (that can accept null values).
Constraint offsetConstraint = getOffsetConstraint(q, firstOrderByO, value, schema);
String sql = generate(q, schema, db, offsetConstraint, QUERY_NORMAL, bagTableNames);
if (cacheEntry == null) {
cacheEntry = new CacheEntry(start, sql);
schemaCache.put(q, cacheEntry);
}
cacheEntry.getCached().put(new Integer(start), sql);
//LOG.info("Created cache entry for offset " + start + " (cache contains "
// + cacheEntry.getCached().keySet() + ") for query " + q + ", sql = " + sql);
}
} catch (ObjectStoreException e) {
LOG.warn("Error while registering offset for query " + q + ": " + e);
} catch (IllegalArgumentException e) {
LOG.warn("Error while registering offset for query " + q + ": " + e);
}
}
/**
* Create a constraint to add to the main query to deal with offset - this is based on
* the first element in the order by (field) and a given value (x). If the order by
* element cannot have null values this is: 'field > x'. If field can have null values
* *and* it has not already been constrained as 'NOT NULL' in the main query it is:
* '(field > x or field IS NULL'.
*
* @param q the Query
* @param firstOrderBy the offset element of the query's order by list
* @param value a value, such that adding a WHERE component first_order_field > value with
* OFFSET 0 is equivalent to the original query with OFFSET offset
* @param schema the DatabaseSchema in which to look up metadata
* @return the constraint(s) to add to the main query
*/
protected static Constraint getOffsetConstraint(Query q, QueryOrderable firstOrderBy,
Object value, DatabaseSchema schema) {
boolean reverse = false;
QueryOrderable newFirstOrderBy = firstOrderBy;
if (newFirstOrderBy instanceof OrderDescending) {
newFirstOrderBy = ((OrderDescending) newFirstOrderBy).getQueryOrderable();
reverse = true;
}
if (newFirstOrderBy instanceof QueryClass) {
newFirstOrderBy = new QueryField((QueryClass) newFirstOrderBy, "id");
}
boolean hasNulls = true;
if ((newFirstOrderBy instanceof QueryField) && (!reverse)) {
FromElement qc = ((QueryField) newFirstOrderBy).getFromElement();
if (qc instanceof QueryClass) {
if ("id".equals(((QueryField) newFirstOrderBy).getFieldName())) {
hasNulls = false;
} else if ("class".equals(((QueryField) newFirstOrderBy).getFieldName())) {
hasNulls = false;
} else {
AttributeDescriptor desc = (AttributeDescriptor) schema
.getModel().getFieldDescriptorsForClass(((QueryClass) qc)
.getType()).get(((QueryField) newFirstOrderBy)
.getFieldName());
if (desc.isPrimitive()) {
hasNulls = false;
}
}
}
}
if (reverse) {
return new SimpleConstraint((QueryEvaluable) newFirstOrderBy,
ConstraintOp.LESS_THAN, new QueryValue(value));
} else {
SimpleConstraint sc = new SimpleConstraint((QueryEvaluable) newFirstOrderBy,
ConstraintOp.GREATER_THAN, new QueryValue(value));
if (hasNulls) {
// if the query aready constrains the first order by field to be
// not null it doesn't make sense to add a costraint to null
CheckForIsNotNullConstraint check = new CheckForIsNotNullConstraint((QueryNode)
newFirstOrderBy);
ConstraintHelper.traverseConstraints(q.getConstraint(), check);
if (!check.exists()) {
ConstraintSet cs = new ConstraintSet(ConstraintOp.OR);
cs.addConstraint(sc);
cs.addConstraint(new SimpleConstraint((QueryEvaluable) newFirstOrderBy,
ConstraintOp.IS_NULL));
return cs;
}
}
return sc;
}
}
/**
* Converts a Query object into an SQL String. To produce an SQL query that does not have
* OFFSET and LIMIT clauses, set start to 0, and limit to Integer.MAX_VALUE.
*
* @param q the Query to convert
* @param start the number of the first row for the query to return, numbered from zero
* @param limit the maximum number of rows for the query to return
* @param schema the DatabaseSchema in which to look up metadata
* @param db the Database that the ObjectStore uses
* @param bagTableNames a Map from BagConstraints to table names, where the table contains the
* contents of the bag that are relevant for the BagConstraint
* @return a String suitable for passing to an SQL server
* @throws ObjectStoreException if something goes wrong
*/
public static String generate(Query q, int start, int limit, DatabaseSchema schema, Database db,
Map<Object, String> bagTableNames) throws ObjectStoreException {
synchronized (q) {
if ((q.getSelect().size() == 1) && (q.getSelect().get(0) instanceof Clob)) {
// Special case.
Clob clob = (Clob) q.getSelect().get(0);
return "SELECT " + CLOBVAL_COLUMN + " AS a1_ FROM " + CLOB_TABLE_NAME + " WHERE "
+ CLOBID_COLUMN + " = " + clob.getClobId() + " AND " + CLOBPAGE_COLUMN + " >= "
+ start + " AND " + CLOBPAGE_COLUMN + " < " + (start + limit) + " ORDER BY "
+ CLOBPAGE_COLUMN;
}
Map<Query, CacheEntry> schemaCache = getCacheForSchema(schema);
CacheEntry cacheEntry = schemaCache.get(q);
if (cacheEntry != null) {
SortedMap<Integer, String> headMap = cacheEntry.getCached()
.headMap(new Integer(start + 1));
Integer lastKey = null;
try {
lastKey = headMap.lastKey();
} catch (NoSuchElementException e) {
// ignore
}
if (lastKey != null) {
int offset = lastKey.intValue();
if ((offset > cacheEntry.getLastOffset())
|| (cacheEntry.getLastOffset() > start)) {
return cacheEntry.getCached().get(lastKey)
+ (limit == Integer.MAX_VALUE ? "" : " LIMIT " + limit)
+ (start == offset ? "" : " OFFSET " + (start - offset));
} else {
return cacheEntry.getLastSQL()
+ (limit == Integer.MAX_VALUE ? "" : " LIMIT " + limit)
+ (start == cacheEntry.getLastOffset() ? ""
: " OFFSET " + (start - cacheEntry.getLastOffset()));
}
}
}
String sql = generate(q, schema, db, null, QUERY_NORMAL, bagTableNames);
/*if (cached == null) {
cached = new TreeMap();
schemaCache.put(q, cached);
}
cached.put(new Integer(0), sql);
*/
return sql + ((limit == Integer.MAX_VALUE ? "" : " LIMIT " + limit)
+ (start == 0 ? "" : " OFFSET " + start));
}
}
/**
* Returns a cache specific to a particular DatabaseSchema.
*
* @param schema the DatabaseSchema
* @return a Map
*/
private static Map<Query, CacheEntry> getCacheForSchema(DatabaseSchema schema) {
synchronized (sqlCache) {
Map<Query, CacheEntry> retval = sqlCache.get(schema);
if (retval == null) {
retval = Collections.synchronizedMap(new WeakHashMap<Query, CacheEntry>());
sqlCache.put(schema, retval);
}
return retval;
}
}
/**
* Converts a Query object into an SQL String.
*
* @param q the Query to convert
* @param schema the DatabaseSchema in which to look up metadata
* @param db the Database that the ObjectStore uses
* @param offsetCon an additional constraint for improving the speed of large offsets
* @param kind Query type
* @param bagTableNames a Map from BagConstraints to table names, where the table contains the
* contents of the bag that are relevant for the BagConstraint
* @return a String suitable for passing to an SQL server
* @throws ObjectStoreException if something goes wrong
*/
public static String generate(Query q, DatabaseSchema schema, Database db,
Constraint offsetCon, int kind,
Map<Object, String> bagTableNames) throws ObjectStoreException {
State state = new State();
List<QuerySelectable> selectList = q.getSelect();
if ((selectList.size() == 1) && (selectList.get(0) instanceof ObjectStoreBag)) {
// Special case - we are fetching the contents of an ObjectStoreBag.
return "SELECT " + BAGVAL_COLUMN + " AS a1_ FROM " + INT_BAG_TABLE_NAME + " WHERE "
+ BAGID_COLUMN + " = " + ((ObjectStoreBag) selectList.get(0)).getBagId()
+ " ORDER BY " + BAGVAL_COLUMN;
} else if ((selectList.size() == 1)
&& (selectList.get(0) instanceof ObjectStoreBagCombination)) {
// Another special case.
ObjectStoreBagCombination osbc = (ObjectStoreBagCombination) selectList.get(0);
return generateSQLForBagCombo(osbc);
} else if ((selectList.size() == 1)
&& (selectList.get(0) instanceof ObjectStoreBagsForObject)) {
// Another special case.
ObjectStoreBagsForObject osbfo = (ObjectStoreBagsForObject) selectList.get(0);
StringBuffer retval = new StringBuffer("SELECT " + BAGID_COLUMN + " AS a1_ FROM "
+ INT_BAG_TABLE_NAME + " WHERE " + BAGVAL_COLUMN + " = " + osbfo.getValue());
Collection<ObjectStoreBag> bags = osbfo.getBags();
if ((bags != null) && (!bags.isEmpty())) {
retval.append(" AND " + BAGID_COLUMN + " IN (");
boolean needComma = false;
for (ObjectStoreBag osb : osbfo.getBags()) {
if (needComma) {
retval.append(", ");
}
needComma = true;
retval.append("" + osb.getBagId());
}
retval.append(")");
}
retval.append(" ORDER BY " + BAGID_COLUMN);
return retval.toString();
}
state.setDb(db);
state.setBagTableNames(bagTableNames);
buildFromComponent(state, q, schema, bagTableNames);
buildWhereClause(state, q, q.getConstraint(), schema);
buildWhereClause(state, q, offsetCon, schema);
String orderBy = "";
if ((kind == QUERY_NORMAL) || (kind == QUERY_FOR_PRECOMP) || (kind == QUERY_FOR_GOFASTER)) {
boolean haveOrderBy = true;
if (q.getGroupBy().isEmpty()) {
for (QuerySelectable selectable : q.getSelect()) {
if (selectable instanceof QueryFunction) {
haveOrderBy = false;
}
}
}
if (haveOrderBy) {
orderBy = buildOrderBy(state, q, schema, kind);
}
}
// TODO check here - What on earth does this comment mean, Julie?
StringBuffer retval = new StringBuffer("SELECT ")
.append(needsDistinct(q) ? "DISTINCT " : "")
.append(buildSelectComponent(state, q, schema, kind))
.append(state.getFrom())
.append(state.getWhere())
.append(buildGroupBy(q, schema, state))
.append(state.getHaving())
.append(orderBy);
if ((q.getLimit() != Integer.MAX_VALUE) && (kind == QUERY_SUBQUERY_FROM)) {
retval.append(" LIMIT " + q.getLimit());
}
return retval.toString();
}
private static String generateSQLForBagCombo(ObjectStoreBagCombination osbc) {
// In this implementation, only INTERSECT and EXCEPT combinations may
// have sub-combinations.
if (osbc.getOp() == ObjectStoreBagCombination.UNION) {
StringBuffer retval = new StringBuffer("SELECT DISTINCT " + BAGVAL_COLUMN
+ " AS a1_ FROM " + INT_BAG_TABLE_NAME + " WHERE " + BAGID_COLUMN
+ " IN (");
boolean needComma = false;
for (QuerySelectable qs : osbc.getBags()) {
ObjectStoreBag osb = (ObjectStoreBag) qs;
if (needComma) {
retval.append(", ");
}
needComma = true;
retval.append(osb.getBagId() + "");
}
retval.append(")");
return retval.toString();
} else if (osbc.getOp() == ObjectStoreBagCombination.ALLBUTINTERSECT) {
StringBuffer retval = new StringBuffer("SELECT " + BAGVAL_COLUMN
+ " AS a1_ FROM " + INT_BAG_TABLE_NAME + " WHERE " + BAGID_COLUMN
+ " IN (");
boolean needComma = false;
for (QuerySelectable qs : osbc.getBags()) {
ObjectStoreBag osb = (ObjectStoreBag) qs;
if (needComma) {
retval.append(", ");
}
needComma = true;
retval.append(osb.getBagId() + "");
}
retval.append(") GROUP BY " + BAGVAL_COLUMN + " HAVING COUNT(*) < "
+ osbc.getBags().size() + " ORDER BY " + BAGVAL_COLUMN);
return retval.toString();
} else {
StringBuffer retval = new StringBuffer();
boolean needComma = false;
for (QuerySelectable qs : osbc.getBags()) {
if (needComma) {
retval.append(osbc.getOp() == ObjectStoreBagCombination.INTERSECT
? " INTERSECT " : " EXCEPT ");
}
needComma = true;
if (qs instanceof ObjectStoreBag) {
ObjectStoreBag osb = (ObjectStoreBag) qs;
retval.append("SELECT " + BAGVAL_COLUMN + " AS a1_ FROM " + INT_BAG_TABLE_NAME
+ " WHERE " + BAGID_COLUMN + " = " + osb.getBagId());
} else {
// Must be a bag combo.
ObjectStoreBagCombination subCombo = (ObjectStoreBagCombination) qs;
retval.append(generateSQLForBagCombo(subCombo));
}
}
retval.append(" ORDER BY a1_");
return retval.toString();
}
}
/**
* Returns true if this query requires a DISTINCT keyword in the generated SQL.
*
* @param q the Query
* @return a boolean
*/
protected static boolean needsDistinct(Query q) {
if (!q.isDistinct()) {
return false;
}
Set<QueryClass> selectClasses = new HashSet<QueryClass>();
for (QuerySelectable n : q.getSelect()) {
if (n instanceof QueryClass) {
selectClasses.add((QueryClass) n);
} else if (n instanceof QueryField) {
QueryField f = (QueryField) n;
if ("id".equals(f.getFieldName())) {
FromElement qc = f.getFromElement();
if (qc instanceof QueryClass) {
selectClasses.add((QueryClass) qc);
}
}
}
}
boolean allPresent = true;
Iterator<FromElement> fromIter = q.getFrom().iterator();
while (fromIter.hasNext() && allPresent) {
FromElement qc = fromIter.next();
allPresent = selectClasses.contains(qc);
}
return !allPresent;
}
/**
* Builds a Set of all table names that are touched by a given query.
*
* @param q the Query
* @param schema the DatabaseSchema in which to look up metadata
* @return a Set of table names
* @throws ObjectStoreException if something goes wrong
*/
public static Set<String> findTableNames(Query q, DatabaseSchema schema)
throws ObjectStoreException {
Set<Object> retvalO = findTableNames(q, schema, false);
// If the last argument is false, we know that the result only contains Strings.
Set<String> retval = (Set) retvalO;
return retval;
}
/**
* Builds a Set of all table names that are touched by a given query.
*
* @param q the Query
* @param schema the DatabaseSchema in which to look up metadata
* @param individualOsbs if true, adds individual ObjectStoreBags to the Set, otherwise just
* adds the table name instead
* @return a Set of table names
* @throws ObjectStoreException if something goes wrong
*/
public static Set<Object> findTableNames(Query q, DatabaseSchema schema,
boolean individualOsbs) throws ObjectStoreException {
Map<Query, Set<Object>> schemaCache = getTablenamesCacheForSchema(schema);
synchronized (q) {
Set<Object> tablenames = schemaCache.get(q);
if (tablenames == null) {
tablenames = new HashSet<Object>();
findTableNames(tablenames, q, schema, true, individualOsbs);
schemaCache.put(q, tablenames);
}
return tablenames;
}
}
/**
* Returns a cache for table names specific to a particular DatabaseSchema.
*
* @param schema the DatabaseSchema
* @return a Map
*/
private static Map<Query, Set<Object>> getTablenamesCacheForSchema(DatabaseSchema schema) {
synchronized (tablenamesCache) {
Map<Query, Set<Object>> retval = tablenamesCache.get(schema);
if (retval == null) {
retval = Collections.synchronizedMap(new WeakHashMap<Query, Set<Object>>());
tablenamesCache.put(schema, retval);
}
return retval;
}
}
/**
* Adds table names to a Set of table names, from a given Query.
*
* @param tablenames a Set of table names and bags - new ones will be added here
* @param q the Query
* @param schema the DatabaseSchema in which to look up metadata
* @param addInterMineObject true if this method should normally add the InterMineObject
* table to the Set
* @param individualOsbs if true, adds individual ObjectStoreBags to the Set, otherwise just
* adds the table name instead
* @throws ObjectStoreException if something goes wrong
*/
private static void findTableNames(Set<Object> tablenames, Query q,
DatabaseSchema schema, boolean addInterMineObject,
boolean individualOsbs) throws ObjectStoreException {
if (completelyFalse(q.getConstraint())) {
return;
}
findTableNamesInConstraint(tablenames, q.getConstraint(), schema, individualOsbs);
for (FromElement fromElement : q.getFrom()) {
if (fromElement instanceof QueryClass) {
for (Class<?> cls : Util.decomposeClass(((QueryClass) fromElement)
.getType())) {
ClassDescriptor cld = schema.getModel().getClassDescriptorByName(cls.getName());
if (cld == null) {
throw new ObjectStoreException(cls + " is not in the model");
}
ClassDescriptor tableMaster = schema.getTableMaster(cld);
tablenames.add(DatabaseUtil.getTableName(tableMaster));
}
} else if (fromElement instanceof Query) {
Query subQ = (Query) fromElement;
findTableNames(tablenames, subQ, schema, false, individualOsbs);
} else if (fromElement instanceof QueryClassBag) {
// Do nothing
} else {
throw new ObjectStoreException("Unknown FromElement: " + fromElement.getClass());
}
}
String interMineObject = DatabaseUtil.getTableName(schema.getModel()
.getClassDescriptorByName(InterMineObject.class.getName()));
for (QuerySelectable selectable : q.getSelect()) {
if (selectable instanceof QueryClass) {
if (addInterMineObject && schema.isMissingNotXml()) {
tablenames.add(interMineObject);
}
} else if (selectable instanceof QueryEvaluable) {
// Do nothing
} else if (selectable instanceof QueryForeignKey) {
// Do nothing
} else if (selectable instanceof ObjectStoreBag) {
if (individualOsbs) {
tablenames.add(selectable);
} else {
tablenames.add(INT_BAG_TABLE_NAME);
}
} else if (selectable instanceof ObjectStoreBagCombination) {
if (individualOsbs) {
tablenames.addAll(((ObjectStoreBagCombination) selectable).getBags());
} else {
tablenames.add(INT_BAG_TABLE_NAME);
}
} else if (selectable instanceof ObjectStoreBagsForObject) {
tablenames.add(INT_BAG_TABLE_NAME);
} else if (selectable instanceof Clob) {
if (individualOsbs) {
tablenames.add(selectable);
} else {
tablenames.add(CLOB_TABLE_NAME);
}
} else if (selectable instanceof QueryCollectionPathExpression) {
Collection<ProxyReference> empty = Collections.singleton(new ProxyReference(null,
new Integer(1), InterMineObject.class));
findTableNames(tablenames, ((QueryCollectionPathExpression) selectable)
.getQuery(empty), schema, addInterMineObject, individualOsbs);
} else if (selectable instanceof QueryObjectPathExpression) {
Collection<Integer> empty = Collections.singleton(new Integer(1));
findTableNames(tablenames, ((QueryObjectPathExpression) selectable)
.getQuery(empty, schema.isMissingNotXml()), schema,
addInterMineObject, individualOsbs);
} else if (selectable instanceof PathExpressionField) {
Collection<Integer> empty = Collections.singleton(new Integer(1));
findTableNames(tablenames, ((PathExpressionField) selectable).getQope()
.getQuery(empty, schema.isMissingNotXml()), schema,
addInterMineObject, individualOsbs);
} else {
throw new ObjectStoreException("Illegal entry in SELECT list: "
+ selectable.getClass());
}
}
}
/**
* Adds table names to a Set of table names, from a given constraint.
*
* @param tablenames a Set of table names and bags - new ones will be added here
* @param c the Constraint
* @param schema the DatabaseSchema in which to look up metadata
* @param individualOsbs if true, adds individual ObjectStoreBags to the Set, otherwise just
* adds the table name instead
* @throws ObjectStoreException if something goes wrong
*/
private static void findTableNamesInConstraint(Set<Object> tablenames, Constraint c,
DatabaseSchema schema, boolean individualOsbs) throws ObjectStoreException {
if (c instanceof ConstraintSet) {
for (Constraint subC : ((ConstraintSet) c).getConstraints()) {
findTableNamesInConstraint(tablenames, subC, schema, individualOsbs);
}
} else if (c instanceof SubqueryConstraint) {
findTableNames(tablenames, ((SubqueryConstraint) c).getQuery(), schema, false,
individualOsbs);
} else if (c instanceof SubqueryExistsConstraint) {
findTableNames(tablenames, ((SubqueryExistsConstraint) c).getQuery(), schema, false,
individualOsbs);
} else if (c instanceof ContainsConstraint) {
ContainsConstraint cc = (ContainsConstraint) c;
QueryReference ref = cc.getReference();
if (ref instanceof QueryCollectionReference) {
ReferenceDescriptor refDesc = (ReferenceDescriptor) schema.getModel()
.getFieldDescriptorsForClass(ref.getQcType()).get(ref.getFieldName());
if (refDesc.relationType() == FieldDescriptor.M_N_RELATION) {
tablenames.add(DatabaseUtil.getIndirectionTableName((CollectionDescriptor)
refDesc));
} else if (cc.getQueryClass() == null) {
tablenames.add(DatabaseUtil.getTableName(schema.getTableMaster(
refDesc.getReferencedClassDescriptor())));
}
}
} else if (c instanceof BagConstraint) {
if (((BagConstraint) c).getOsb() != null) {
if (individualOsbs) {
tablenames.add(((BagConstraint) c).getOsb());
} else {
tablenames.add(INT_BAG_TABLE_NAME);
}
}
} else if (!((c == null) || (c instanceof SimpleConstraint)
|| (c instanceof ClassConstraint) || (c instanceof OverlapConstraint)
|| (c instanceof MultipleInBagConstraint))) {
throw new ObjectStoreException("Unknown constraint type: " + c.getClass());
}
}
/**
* Builds the FROM list for the SQL query.
*
* @param state the current Sql Query state
* @param q the Query
* @param schema the DatabaseSchema in which to look up metadata
* @param bagTableNames a Map from BagConstraint to temporary table name
* @throws ObjectStoreException if something goes wrong
*/
protected static void buildFromComponent(State state, Query q, DatabaseSchema schema,
Map<Object, String> bagTableNames) throws ObjectStoreException {
for (FromElement fromElement : q.getFrom()) {
if (fromElement instanceof QueryClass) {
QueryClass qc = (QueryClass) fromElement;
String baseAlias = DatabaseUtil.generateSqlCompatibleName(q.getAliases().get(qc));
Set<Class<?>> classes = Util.decomposeClass(qc.getType());
List<ClassDescriptorAndAlias> aliases = new ArrayList<ClassDescriptorAndAlias>();
int sequence = 0;
String lastAlias = "";
for (Class<?> cls : classes) {
ClassDescriptor cld = schema.getModel().getClassDescriptorByName(cls.getName());
if (cld == null) {
throw new ObjectStoreException(cls.toString() + " is not in the model");
}
ClassDescriptor tableMaster = schema.getTableMaster(cld);
if (sequence == 0) {
aliases.add(new ClassDescriptorAndAlias(cld, baseAlias));
state.addToFrom(DatabaseUtil.getTableName(tableMaster) + " AS "
+ baseAlias);
if (schema.isTruncated(tableMaster)) {
if (state.getWhereBuffer().length() > 0) {
state.addToWhere(" AND ");
}
state.addToWhere(baseAlias + ".tableclass = '" + cls.getName() + "'");
}
} else {
aliases.add(new ClassDescriptorAndAlias(cld, baseAlias + "_" + sequence));
state.addToFrom(DatabaseUtil.getTableName(tableMaster) + " AS " + baseAlias
+ "_" + sequence);
if (state.getWhereBuffer().length() > 0) {
state.addToWhere(" AND ");
}
state.addToWhere(baseAlias + lastAlias + ".id = " + baseAlias
+ "_" + sequence + ".id");
lastAlias = "_" + sequence;
if (schema.isTruncated(tableMaster)) {
state.addToWhere(" AND " + baseAlias + "_" + sequence
+ ".tableclass = '" + cls.getName() + "'");
}
}
sequence++;
}
Map<String, FieldDescriptor> fields = schema.getModel()
.getFieldDescriptorsForClass(qc.getType());
Map<String, String> fieldToAlias = state.getFieldToAlias(qc);
Iterator<FieldDescriptor> fieldIter = null;
if (schema.isFlatMode(qc.getType())) {
List<Iterator<? extends FieldDescriptor>> iterators
= new ArrayList<Iterator<? extends FieldDescriptor>>();
ClassDescriptor cld = schema.getTableMaster(schema.getModel()
.getClassDescriptorsForClass(qc.getType()).iterator().next());
DatabaseSchema.Fields dbsFields = schema.getTableFields(schema
.getTableMaster(cld));
iterators.add(dbsFields.getAttributes().iterator());
iterators.add(dbsFields.getReferences().iterator());
fieldIter = new CombinedIterator<FieldDescriptor>(iterators);
} else {
fieldIter = fields.values().iterator();
}
while (fieldIter.hasNext()) {
FieldDescriptor field = fieldIter.next();
String name = field.getName();
for (ClassDescriptorAndAlias aliasEntry : aliases) {
ClassDescriptor cld = aliasEntry.getClassDescriptor();
String alias = aliasEntry.getAlias();
if (cld.getAllFieldDescriptors().contains(field) || schema.isFlatMode(qc
.getType())) {
fieldToAlias.put(name, alias + "." + DatabaseUtil.getColumnName(field));
break;
}
}
}
// Deal with OBJECT column
if (schema.isMissingNotXml()) {
for (ClassDescriptorAndAlias aliasEntry : aliases) {
ClassDescriptor cld = aliasEntry.getClassDescriptor();
String alias = aliasEntry.getAlias();
ClassDescriptor tableMaster = schema.getTableMaster(cld);
if (InterMineObject.class.equals(tableMaster.getType())) {
fieldToAlias.put("OBJECT", alias + ".OBJECT");
break;
}
}
} else if (!schema.isFlatMode(qc.getType())) {
fieldToAlias.put("OBJECT", baseAlias + ".OBJECT");
}
fieldToAlias.put("class", baseAlias + ".class");
} else if (fromElement instanceof Query) {
state.addToFrom("(" + generate((Query) fromElement, schema, state.getDb(), null,
QUERY_SUBQUERY_FROM, bagTableNames) + ") AS "
+ DatabaseUtil.generateSqlCompatibleName(q.getAliases().get(fromElement)));
state.setFieldToAlias(fromElement, new AlwaysMap<String, String>(DatabaseUtil
.generateSqlCompatibleName((q.getAliases().get(fromElement)))));
} else if (fromElement instanceof QueryClassBag) {
// The problem here is:
// We do not know the column name for the "id" field, because this will use a
// table like an indirection table or other class table. We need to have this id
// column name available for QueryFields and for extra tables added to the query
// that need to be tied to the original copy via the id column. This id column
// name must be filled in by the ContainsConstraint code.
// Therefore, we do nothing here.
} else {
throw new ObjectStoreException("Unknown FromElement: " + fromElement.getClass());
}
}
}
/**
* Builds the WHERE clause for the SQL query.
*
* @param state the current Sql Query state
* @param q the Query
* @param c the Constraint
* @param schema the DatabaseSchema in which to look up metadata
* @throws ObjectStoreException if something goes wrong
*/
protected static void buildWhereClause(State state, Query q, Constraint c,
DatabaseSchema schema) throws ObjectStoreException {
if (c != null) {
if (completelyFalse(c)) {
throw new CompletelyFalseException();
}
if (completelyTrue(c)) {
return;
}
LinkedList<Constraint> constraints = new LinkedList<Constraint>();
boolean needWhereComma = state.getWhereBuffer().length() > 0;
boolean needHavingComma = state.getHavingBuffer().length() > 0;
boolean usingHaving = !q.getGroupBy().isEmpty();
constraints.add(c);
while (!constraints.isEmpty()) {
Constraint con = constraints.removeFirst();
if ((con instanceof ConstraintSet)
&& ((ConstraintSet) con).getOp().equals(ConstraintOp.AND)) {
constraints.addAll(0, ((ConstraintSet) con).getConstraints());
} else {
boolean[] whs = whereHavingSafe(con, q);
if (whs[1] && usingHaving) {
StringBuffer buffer = state.getHavingBuffer();
if (needHavingComma) {
buffer.append(" AND ");
}
needHavingComma = true;
constraintToString(state, buffer, con, q, schema, SAFENESS_SAFE, true);
} else if (whs[0]) {
StringBuffer buffer = state.getWhereBuffer();
if (needWhereComma) {
buffer.append(" AND ");
}
needWhereComma = true;
constraintToString(state, buffer, con, q, schema, SAFENESS_SAFE, true);
} else {
throw new ObjectStoreException("Constraint " + con + " mixes WHERE"
+ " and HAVING components");
}
}
}
}
}
/**
* Returns an array containing two boolean values. The first is whether this object is suitable
* for use in a WHERE clause, and the second is whether the object is suitable for use in a
* HAVING clause.
*
* @param o an Object of some kind
* @param q the current Query
* @return an array of two booleans
* @throws ObjectStoreException if the object type is not recognised
*/
protected static boolean[] whereHavingSafe(Object o, Query q) throws ObjectStoreException {
if (o instanceof QueryField) {
return new boolean[] {true, q.getGroupBy().contains(o)
|| q.getGroupBy().contains(((QueryField) o).getFromElement())};
} else if (o instanceof QueryClass) {
return new boolean[] {true, q.getGroupBy().contains(o)};
} else if (o instanceof QueryValue) {
return new boolean[] {true, true};
} else if (o instanceof QueryFunction) {
return new boolean[] {false, true};
} else if (o instanceof QueryCast) {
return whereHavingSafe(((QueryCast) o).getValue(), q);
} else if (o instanceof QueryExpression) {
QueryExpression qe = (QueryExpression) o;
QueryEvaluable arg1 = qe.getArg1();
QueryEvaluable arg2 = qe.getArg2();
QueryEvaluable arg3 = qe.getArg3();
boolean[] s = whereHavingSafe(arg1, q);
boolean whereSafe = s[0];
boolean havingSafe = s[1];
if (arg2 != null) {
s = whereHavingSafe(arg2, q);
whereSafe = whereSafe && s[0];
havingSafe = havingSafe && s[1];
}
if (arg3 != null) {
s = whereHavingSafe(arg3, q);
whereSafe = whereSafe && s[0];
havingSafe = havingSafe && s[1];
}
return new boolean[] {whereSafe, havingSafe};
} else if (o instanceof QueryForeignKey) {
return new boolean[] {true, q.getGroupBy().contains(o)
|| q.getGroupBy().contains(((QueryForeignKey) o).getQueryClass())};
} else if (o instanceof QueryReference) {
QueryClass qc = ((QueryReference) o).getQueryClass();
if (qc == null) {
return new boolean[] {true, true};
} else {
return whereHavingSafe(qc, q);
}
} else if (o instanceof SimpleConstraint) {
SimpleConstraint c = (SimpleConstraint) o;
QueryEvaluable arg1 = c.getArg1();
QueryEvaluable arg2 = c.getArg2();
if (arg2 == null) {
return whereHavingSafe(arg1, q);
} else {
boolean[] s1 = whereHavingSafe(arg1, q);
boolean[] s2 = whereHavingSafe(arg2, q);
return new boolean[] {s1[0] && s2[0], s1[1] && s2[1]};
}
} else if (o instanceof ConstraintSet) {
boolean whereSafe = true;
boolean havingSafe = true;
for (Constraint c : ((ConstraintSet) o).getConstraints()) {
boolean[] s = whereHavingSafe(c, q);
whereSafe = whereSafe && s[0];
havingSafe = havingSafe && s[1];
}
return new boolean[] {whereSafe, havingSafe};
} else if (o instanceof BagConstraint) {
//return whereHavingSafe(((BagConstraint) o).getQueryNode(), q);
return new boolean[] {true, false};
} else if (o instanceof MultipleInBagConstraint) {
boolean whereSafe = true;
boolean havingSafe = true;
for (QueryEvaluable qe : ((MultipleInBagConstraint) o).getEvaluables()) {
boolean[] s = whereHavingSafe(qe, q);
whereSafe = whereSafe && s[0];
havingSafe = havingSafe && s[1];
}
return new boolean[] {whereSafe, havingSafe};
} else if (o instanceof ClassConstraint) {
ClassConstraint cc = (ClassConstraint) o;
QueryClass arg1 = cc.getArg1();
QueryClass arg2 = cc.getArg2QueryClass();
boolean[] s = whereHavingSafe(arg1, q);
boolean whereSafe = s[0];
boolean havingSafe = s[1];
if (arg2 != null) {
s = whereHavingSafe(arg2, q);
whereSafe = whereSafe && s[0];
havingSafe = havingSafe && s[1];
}
return new boolean[] {whereSafe, havingSafe};
} else if (o instanceof ContainsConstraint) {
ContainsConstraint c = (ContainsConstraint) o;
QueryReference arg1 = c.getReference();
QueryClass arg2 = c.getQueryClass();
boolean[] s = whereHavingSafe(arg1, q);
boolean whereSafe = s[0];
boolean havingSafe = s[1];
if (arg2 != null) {
s = whereHavingSafe(arg2, q);
whereSafe = whereSafe && s[0];
havingSafe = havingSafe && s[1];
}
return new boolean[] {whereSafe, havingSafe};
} else if (o instanceof SubqueryConstraint) {
QueryClass qc = ((SubqueryConstraint) o).getQueryClass();
QueryEvaluable qe = ((SubqueryConstraint) o).getQueryEvaluable();
if (qc != null) {
return whereHavingSafe(qc, q);
} else {
return whereHavingSafe(qe, q);
}
} else if (o instanceof SubqueryExistsConstraint) {
return new boolean[] {true, true};
} else if (o instanceof OverlapConstraint) {
OverlapConstraint oc = (OverlapConstraint) o;
boolean[] s1 = whereHavingSafe(oc.getLeft().getStart(), q);
boolean[] s2 = whereHavingSafe(oc.getLeft().getEnd(), q);
boolean[] s3 = whereHavingSafe(oc.getLeft().getParent(), q);
boolean[] s4 = whereHavingSafe(oc.getRight().getStart(), q);
boolean[] s5 = whereHavingSafe(oc.getRight().getEnd(), q);
boolean[] s6 = whereHavingSafe(oc.getRight().getParent(), q);
return new boolean[] {s1[0] && s2[0] && s3[0] && s4[0] && s5[0] && s6[0],
s1[1], s2[1], s3[1], s4[1], s5[1], s6[1]};
} else {
throw new ObjectStoreException("Unrecognised object " + o);
}
}
/**
* Returns true if this constraint is always true, regardless of row values.
*
* @param con a Constraint
* @return a boolean
* @throws ObjectStoreException when a bag contains elements of the wrong type
*/
protected static boolean completelyTrue(Constraint con) throws ObjectStoreException {
if (con instanceof ConstraintSet) {
ConstraintSet cs = (ConstraintSet) con;
if (cs.getOp() == ConstraintOp.AND) {
boolean retval = true;
Iterator<Constraint> csIter = cs.getConstraints().iterator();
while (csIter.hasNext() && retval) {
Constraint c = csIter.next();
retval = retval && completelyTrue(c);
}
return retval;
} else if (cs.getOp() == ConstraintOp.OR) {
boolean retval = false;
Iterator<Constraint> csIter = cs.getConstraints().iterator();
while (csIter.hasNext() && (!retval)) {
Constraint c = csIter.next();
retval = retval || completelyTrue(c);
}
return retval;
} else if (cs.getOp() == ConstraintOp.NOR) {
boolean retval = true;
Iterator<Constraint> csIter = cs.getConstraints().iterator();
while (csIter.hasNext() && retval) {
Constraint c = csIter.next();
retval = retval && completelyFalse(c);
}
return retval;
} else if (cs.getOp() == ConstraintOp.NAND) {
boolean retval = false;
Iterator<Constraint> csIter = cs.getConstraints().iterator();
while (csIter.hasNext() && retval) {
Constraint c = csIter.next();
retval = retval || completelyFalse(c);
}
return retval;
} else {
throw new IllegalArgumentException("Invalid operation " + cs.getOp());
}
} else if (con instanceof BagConstraint) {
BagConstraint bc = (BagConstraint) con;
if ((bc.getBag() != null) && (bc.getOp() == ConstraintOp.NOT_IN)) {
boolean empty = true;
Class<?> type = bc.getQueryNode().getType();
for (Object bagItem : bc.getBag()) {
if (!(ProxyReference.class.equals(bagItem.getClass())
|| DynamicUtil.isInstance(bagItem, type))) {
throw new ObjectStoreException("Bag<" + Util.getFriendlyName(type)
+ "> contains element of wrong type ("
+ Util.getFriendlyName(bagItem.getClass()) + ")");
}
empty = false;
}
return empty;
}
}
return false;
}
/**
* Returns true if this constraint is always false, regardless of row values.
*
* @param con a Constraint
* @return a boolean
* @throws ObjectStoreException when a bag contains elements of the wrong type
*/
protected static boolean completelyFalse(Constraint con) throws ObjectStoreException {
if (con instanceof ConstraintSet) {
ConstraintSet cs = (ConstraintSet) con;
if (cs.getOp() == ConstraintOp.AND) {
boolean retval = false;
Iterator<Constraint> csIter = cs.getConstraints().iterator();
while (csIter.hasNext() && (!retval)) {
Constraint c = csIter.next();
retval = retval || completelyFalse(c);
}
return retval;
} else if (cs.getOp() == ConstraintOp.OR) {
boolean retval = true;
Iterator<Constraint> csIter = cs.getConstraints().iterator();
while (csIter.hasNext() && retval) {
Constraint c = csIter.next();
retval = retval && completelyFalse(c);
}
return retval;
} else if (cs.getOp() == ConstraintOp.NOR) {
boolean retval = false;
Iterator<Constraint> csIter = cs.getConstraints().iterator();
while (csIter.hasNext() && (!retval)) {
Constraint c = csIter.next();
retval = retval || completelyTrue(c);
}
return retval;
} else if (cs.getOp() == ConstraintOp.NAND) {
boolean retval = true;
Iterator<Constraint> csIter = cs.getConstraints().iterator();
while (csIter.hasNext() && retval) {
Constraint c = csIter.next();
retval = retval && completelyTrue(c);
}
return retval;
} else {
throw new IllegalArgumentException("Invalid operation " + cs.getOp());
}
} else if (con instanceof BagConstraint) {
BagConstraint bc = (BagConstraint) con;
if ((bc.getBag() != null) && (bc.getOp() == ConstraintOp.IN)) {
boolean empty = true;
Class<?> type = bc.getQueryNode().getType();
for (Object bagItem : bc.getBag()) {
if (!(bagItem == null || ProxyReference.class.equals(bagItem.getClass())
|| DynamicUtil.isInstance(bagItem, type))) {
throw new ObjectStoreException("Bag<" + Util.getFriendlyName(type)
+ "> contains element of wrong type ("
+ Util.getFriendlyName(bagItem.getClass()) + ")");
}
empty = false;
}
return empty;
}
}
return false;
}
/** Safeness value indicating a situation safe for ContainsConstraint CONTAINS */
public static final int SAFENESS_SAFE = 1;
/** Safeness value indicating a situation safe for ContainsConstraint DOES NOT CONTAIN */
public static final int SAFENESS_ANTISAFE = -1;
/** Safeness value indicating a situation unsafe for ContainsConstraint */
public static final int SAFENESS_UNSAFE = 0;
/**
* Converts a Constraint object into a String suitable for putting in an SQL query.
*
* @param state the current SqlGenerator state
* @param buffer the StringBuffer to place text into
* @param c the Constraint object
* @param q the Query
* @param schema the DatabaseSchema in which to look up metadata
* @param safeness the ContainsConstraint safeness parameter
* @param loseBrackets true if an AND ConstraintSet can be represented safely without
* surrounding parentheses
* @throws ObjectStoreException if something goes wrong
*/
protected static void constraintToString(State state, StringBuffer buffer, Constraint c,
Query q, DatabaseSchema schema, int safeness,
boolean loseBrackets) throws ObjectStoreException {
if ((safeness != SAFENESS_SAFE) && (safeness != SAFENESS_ANTISAFE)
&& (safeness != SAFENESS_UNSAFE)) {
throw new ObjectStoreException("Unknown ContainsConstraint safeness: " + safeness);
}
if (c instanceof ConstraintSet) {
constraintSetToString(state, buffer, (ConstraintSet) c, q, schema, safeness,
loseBrackets);
} else if (c instanceof SimpleConstraint) {
simpleConstraintToString(state, buffer, (SimpleConstraint) c, q);
} else if (c instanceof SubqueryConstraint) {
subqueryConstraintToString(state, buffer, (SubqueryConstraint) c, q, schema);
} else if (c instanceof SubqueryExistsConstraint) {
subqueryExistsConstraintToString(state, buffer, (SubqueryExistsConstraint) c,
schema);
} else if (c instanceof ClassConstraint) {
classConstraintToString(state, buffer, (ClassConstraint) c, q, schema);
} else if (c instanceof ContainsConstraint) {
containsConstraintToString(state, buffer, (ContainsConstraint) c, q, schema, safeness,
loseBrackets);
} else if (c instanceof BagConstraint) {
bagConstraintToString(state, buffer, (BagConstraint) c, q, schema, safeness);
} else if (c instanceof MultipleInBagConstraint) {
multipleInBagConstraintToString(state, buffer, (MultipleInBagConstraint) c, q,
safeness);
} else if (c instanceof OverlapConstraint) {
overlapConstraintToString(state, buffer, (OverlapConstraint) c, q, schema, safeness);
} else {
throw (new ObjectStoreException("Unknown constraint type: " + c));
}
}
/**
* Converts a ConstraintSet object into a String suitable for putting in an SQL query.
*
* @param state the current SqlGenerator state
* @param buffer the StringBuffer to place text into
* @param c the ConstraintSet object
* @param q the Query
* @param schema the DatabaseSchema in which to look up metadata
* @param safeness the ContainsConstraint safeness parameter
* @param loseBrackets true if an AND ConstraintSet can be represented safely without
* surrounding parentheses
* @throws ObjectStoreException if something goes wrong
*/
protected static void constraintSetToString(State state, StringBuffer buffer, ConstraintSet c,
Query q, DatabaseSchema schema, int safeness,
boolean loseBrackets) throws ObjectStoreException {
if ((safeness != SAFENESS_SAFE) && (safeness != SAFENESS_ANTISAFE)
&& (safeness != SAFENESS_UNSAFE)) {
throw new ObjectStoreException("Unknown ContainsConstraint safeness: " + safeness);
}
ConstraintOp op = c.getOp();
boolean negate = (op == ConstraintOp.NAND) || (op == ConstraintOp.NOR);
boolean disjunctive = (op == ConstraintOp.OR) || (op == ConstraintOp.NOR);
boolean andOrNor = (op == ConstraintOp.AND) || (op == ConstraintOp.NOR);
int newSafeness;
if (safeness == SAFENESS_UNSAFE) {
newSafeness = SAFENESS_UNSAFE;
} else if (c.getConstraints().size() == 1) {
newSafeness = negate ? -safeness : safeness;
} else if (safeness == (andOrNor ? SAFENESS_SAFE : SAFENESS_ANTISAFE)) {
newSafeness = negate ? -safeness : safeness;
} else {
newSafeness = SAFENESS_UNSAFE;
}
if (c.getConstraints().isEmpty()) {
buffer.append((disjunctive ? negate : !negate) ? "true" : "false");
} else {
buffer.append(negate ? "(NOT (" : (loseBrackets && (!disjunctive) ? "" : "("));
boolean needComma = false;
Map<String, StringBuffer> subqueryConstraints = new HashMap<String, StringBuffer>();
for (Constraint subC : c.getConstraints()) {
if (disjunctive && (subC instanceof SubqueryConstraint)) {
SubqueryConstraint subQC = (SubqueryConstraint) subC;
Query subQCQuery = subQC.getQuery();
QueryEvaluable subQCEval = subQC.getQueryEvaluable();
QueryClass subQCClass = subQC.getQueryClass();
StringBuffer left = new StringBuffer();
if (subQCEval != null) {
queryEvaluableToString(left, subQCEval, q, state);
} else {
queryClassToString(left, subQCClass, q, schema, QUERY_SUBQUERY_CONSTRAINT,
state);
}
left.append(" " + subQC.getOp().toString() + " (");
StringBuffer existing = subqueryConstraints.get(left.toString());
if (existing == null) {
existing = new StringBuffer();
subqueryConstraints.put(left.toString(), existing);
} else {
existing.append(" UNION ");
}
existing.append(generate(subQCQuery, schema, state.getDb(), null,
QUERY_SUBQUERY_CONSTRAINT, state.getBagTableNames()));
} else {
if ((disjunctive && completelyFalse(subC))
|| ((!disjunctive) && completelyTrue(subC))) {
// This query can be skipped
} else {
if (needComma) {
buffer.append(disjunctive ? " OR " : " AND ");
}
needComma = true;
constraintToString(state, buffer, subC, q, schema, newSafeness, (!negate)
&& (!disjunctive));
}
}
}
for (Map.Entry<String, StringBuffer> entry : subqueryConstraints.entrySet()) {
String left = entry.getKey();
String right = entry.getValue().toString();
if (needComma) {
buffer.append(" OR ");
}
needComma = true;
buffer.append(left);
buffer.append(right);
buffer.append(")");
}
buffer.append(negate ? "))" : (loseBrackets && (!disjunctive) ? "" : ")"));
}
}
/**
* Converts a SimpleConstraint object into a String suitable for putting in an SQL query.
*
* @param state the current SqlGenerator state
* @param buffer the StringBuffer to place text into
* @param c the SimpleConstraint object
* @param q the Query
* @throws ObjectStoreException if something goes wrong
*/
protected static void simpleConstraintToString(State state, StringBuffer buffer,
SimpleConstraint c, Query q) throws ObjectStoreException {
queryEvaluableToString(buffer, c.getArg1(), q, state);
buffer.append(" " + c.getOp().toString());
if (c.getArg2() != null) {
buffer.append(" ");
queryEvaluableToString(buffer, c.getArg2(), q, state);
}
}
/**
* Converts a SubqueryConstraint object into a String suitable for putting in an SQL query.
*
* @param state the current SqlGenerator state
* @param buffer the StringBuffer to place text into
* @param c the SubqueryConstraint object
* @param q the Query
* @param schema the DatabaseSchema in which to look up metadata
* @throws ObjectStoreException if something goes wrong
*/
protected static void subqueryConstraintToString(State state, StringBuffer buffer,
SubqueryConstraint c, Query q, DatabaseSchema schema) throws ObjectStoreException {
Query subQ = c.getQuery();
QueryEvaluable qe = c.getQueryEvaluable();
QueryClass cls = c.getQueryClass();
if (qe != null) {
queryEvaluableToString(buffer, qe, q, state);
} else {
queryClassToString(buffer, cls, q, schema, QUERY_SUBQUERY_CONSTRAINT,
state);
}
buffer.append(" " + c.getOp().toString() + " ("
+ generate(subQ, schema, state.getDb(), null, QUERY_SUBQUERY_CONSTRAINT,
state.getBagTableNames()) + ")");
}
/**
* Converts a SubqueryExistsConstraint object into a String suitable for putting in an SQL
* query.
*
* @param state the current SqlGenerator state
* @param buffer the StringBuffer to place text into
* @param c the SubqueryExistsConstraint object
* @param schema the DatabaseSchema in which to look up metadata
* @throws ObjectStoreException if something goes wrong
*/
protected static void subqueryExistsConstraintToString(State state, StringBuffer buffer,
SubqueryExistsConstraint c, DatabaseSchema schema) throws ObjectStoreException {
Query subQ = c.getQuery();
buffer.append((c.getOp() == ConstraintOp.EXISTS ? "EXISTS(" : "(NOT EXISTS(")
+ generate(subQ, schema, state.getDb(), null, QUERY_SUBQUERY_EXISTS,
state.getBagTableNames())
+ (c.getOp() == ConstraintOp.EXISTS ? ")" : "))"));
}
/**
* Converts a ClassConstraint object into a String suitable for putting in an SQL query.
*
* @param state the current SqlGenerator state
* @param buffer the StringBuffer to place text into
* @param c the ClassConstraint object
* @param q the Query
* @param schema the DatabaseSchema in which to look up metadata
* @throws ObjectStoreException if something goes wrong
*/
protected static void classConstraintToString(State state, StringBuffer buffer,
ClassConstraint c, Query q, DatabaseSchema schema) throws ObjectStoreException {
QueryClass arg1 = c.getArg1();
QueryClass arg2QC = c.getArg2QueryClass();
InterMineObject arg2O = c.getArg2Object();
queryClassToString(buffer, arg1, q, schema, ID_ONLY, state);
buffer.append(" " + c.getOp().toString() + " ");
if (arg2QC != null) {
queryClassToString(buffer, arg2QC, q, schema, ID_ONLY, state);
} else if (arg2O.getId() != null) {
objectToString(buffer, arg2O);
} else {
throw new ObjectStoreException("ClassConstraint cannot contain an InterMineObject"
+ " without an ID set");
}
}
/**
* Converts a ContainsConstraint object into a String suitable for putting in an SQL query.
*
* @param state the current SqlGenerator state
* @param buffer the StringBuffer to place text into
* @param c the ContainsConstraint object
* @param q the Query
* @param schema the DatabaseSchema in which to look up metadata
* @param safeness the ContainsConstraint safeness parameter
* @param loseBrackets true if an AND ConstraintSet can be represented safely without
* surrounding parentheses
* @throws ObjectStoreException if something goes wrong
*/
protected static void containsConstraintToString(State state, StringBuffer buffer,
ContainsConstraint c, Query q, DatabaseSchema schema, int safeness,
boolean loseBrackets) throws ObjectStoreException {
if ((safeness != SAFENESS_SAFE) && (safeness != SAFENESS_ANTISAFE)
&& (safeness != SAFENESS_UNSAFE)) {
throw new ObjectStoreException("Unknown ContainsConstraint safeness: " + safeness);
}
QueryReference arg1 = c.getReference();
QueryClass arg2 = c.getQueryClass();
InterMineObject arg2Obj = c.getObject();
Map<String, FieldDescriptor> fieldNameToFieldDescriptor = schema.getModel()
.getFieldDescriptorsForClass(arg1.getQcType());
ReferenceDescriptor arg1Desc = (ReferenceDescriptor)
fieldNameToFieldDescriptor.get(arg1.getFieldName());
if (arg1Desc == null) {
throw new ObjectStoreException("Reference "
+ IqlQuery.queryReferenceToString(q, arg1, new ArrayList<Object>())
+ "." + arg1.getFieldName() + " is not in the model - fields available in "
+ arg1.getQcType() + " are " + fieldNameToFieldDescriptor.keySet());
}
if (arg1 instanceof QueryObjectReference) {
String arg1Alias = state.getFieldToAlias(arg1.getQueryClass()).get(arg1Desc.getName());
if (c.getOp().equals(ConstraintOp.IS_NULL) || c.getOp().equals(ConstraintOp
.IS_NOT_NULL)) {
buffer.append(arg1Alias + " " + c.getOp().toString());
} else {
buffer.append(arg1Alias + (c.getOp() == ConstraintOp.CONTAINS ? " = " : " != "));
if (arg2 == null) {
objectToString(buffer, arg2Obj);
} else {
queryClassToString(buffer, arg2, q, schema, ID_ONLY, state);
}
}
} else if (arg1 instanceof QueryCollectionReference) {
InterMineObject arg1Obj = ((QueryCollectionReference) arg1).getQcObject();
QueryClass arg1Qc = arg1.getQueryClass();
QueryClassBag arg1Qcb = ((QueryCollectionReference) arg1).getQcb();
if ((arg1Qcb != null) && (safeness != (c.getOp().equals(ConstraintOp.CONTAINS)
? SAFENESS_SAFE : SAFENESS_ANTISAFE))) {
throw new ObjectStoreException(safeness == SAFENESS_UNSAFE
? "Invalid constraint: QueryClassBag ContainsConstraint cannot be inside"
+ " an OR ConstraintSet"
: "Invalid constraint: DOES NOT CONTAINS cannot be applied to a"
+ " QueryClassBag");
}
if (c.getOp().equals(ConstraintOp.IS_NULL) || c.getOp().equals(ConstraintOp
.IS_NOT_NULL)) {
addNullPhrase(state, buffer, c, schema, arg1, arg1Desc);
} else if (arg1Desc.relationType() == FieldDescriptor.ONE_N_RELATION) {
if (arg2 == null) {
ReferenceDescriptor reverse = arg1Desc.getReverseReferenceDescriptor();
String indirectTableAlias = state.getIndirectAlias(); // Not really indirection
String arg2Alias = indirectTableAlias + "."
+ DatabaseUtil.getColumnName(reverse);
ClassDescriptor tableMaster = schema.getTableMaster(reverse
.getClassDescriptor());
state.addToFrom(DatabaseUtil.getTableName(tableMaster) + " AS "
+ indirectTableAlias);
buffer.append(loseBrackets ? "" : "(");
if (schema.isTruncated(tableMaster)) {
buffer.append(indirectTableAlias + ".tableclass = '"
+ reverse.getClassDescriptor().getType().getName() + "' AND ");
}
if (arg1Qc != null) {
queryClassToString(buffer, arg1Qc, q, schema, ID_ONLY, state);
buffer.append((c.getOp() == ConstraintOp.CONTAINS ? " = " : " != ")
+ arg2Alias + " AND ");
} else if (arg1Qcb != null) {
Map<String, String> fieldToAlias = state.getFieldToAlias(arg1Qcb);
if (fieldToAlias.containsKey("id")) {
buffer.append(arg2Alias + " = " + fieldToAlias.get("id") + " AND ");
} else {
fieldToAlias.put("id", arg2Alias);
if (arg1Qcb.getOsb() != null) {
bagConstraintToString(state, buffer, new BagConstraint(new
QueryField(arg1Qcb), ConstraintOp.IN,
arg1Qcb.getOsb()), q,
schema, SAFENESS_UNSAFE);
// TODO: Not really unsafe
// [ 2012-08-06 ajk: what does this comment mean??]
buffer.append(" AND ");
} else if (arg1Qcb.getIds() != null) {
BagConstraint bagCon = new BagConstraint(new QueryField(arg1Qcb),
(c.getOp() == ConstraintOp.CONTAINS ? ConstraintOp.IN
: ConstraintOp.NOT_IN), arg1Qcb.getIds());
state.getBagTableNames().put(bagCon, state.getBagTableNames().get(
arg1Qcb));
bagConstraintToString(state, buffer, bagCon, q, schema,
((safeness == SAFENESS_SAFE)
&& (c.getOp() == ConstraintOp.CONTAINS)) ? SAFENESS_SAFE
: SAFENESS_UNSAFE); // TODO: Not really unsafe
buffer.append(" AND ");
}
}
} else {
buffer.append(arg1Obj.getId() + (c.getOp() == ConstraintOp.CONTAINS
? " = " : " != ") + arg2Alias + " AND ");
}
buffer.append(indirectTableAlias + ".id = " + arg2Obj.getId());
buffer.append(loseBrackets ? "" : ")");
} else {
String arg2Alias = state.getFieldToAlias(arg2).get(arg1Desc
.getReverseReferenceDescriptor().getName());
if (arg1Qc != null) {
queryClassToString(buffer, arg1Qc, q, schema, ID_ONLY, state);
buffer.append((c.getOp() == ConstraintOp.CONTAINS ? " = " : " != ")
+ arg2Alias);
} else if (arg1Qcb != null) {
Map<String, String> fieldToAlias = state.getFieldToAlias(arg1Qcb);
if (fieldToAlias.containsKey("id")) {
buffer.append(arg2Alias + " = " + fieldToAlias.get("id"));
} else {
fieldToAlias.put("id", arg2Alias);
if (arg1Qcb.getOsb() != null) {
bagConstraintToString(state, buffer, new BagConstraint(
new QueryField(arg1Qcb), ConstraintOp.IN,
arg1Qcb.getOsb()), q, schema,
SAFENESS_UNSAFE); // TODO: Not really unsafe
} else if (arg1Qcb.getIds() != null) {
BagConstraint bagCon = new BagConstraint(new QueryField(arg1Qcb),
(c.getOp() == ConstraintOp.CONTAINS ? ConstraintOp.IN
: ConstraintOp.NOT_IN), arg1Qcb.getIds());
state.getBagTableNames().put(bagCon, state.getBagTableNames().get(
arg1Qcb));
bagConstraintToString(state, buffer, bagCon, q, schema,
((safeness == SAFENESS_SAFE)
&& (c.getOp() == ConstraintOp.CONTAINS)) ? SAFENESS_SAFE
: SAFENESS_UNSAFE); // TODO: Not really unsafe
}
}
} else {
buffer.append("" + arg1Obj.getId()
+ (c.getOp() == ConstraintOp.CONTAINS ? " = " : " != ")
+ arg2Alias);
}
}
} else {
if (safeness != (c.getOp().equals(ConstraintOp.CONTAINS) ? SAFENESS_SAFE
: SAFENESS_ANTISAFE)) {
throw new ObjectStoreException(safeness == SAFENESS_UNSAFE
? "Cannot represent a many-to-many collection inside an OR"
+ " ConstraintSet in SQL"
: "Cannot represent many-to-many collection DOES NOT CONTAIN in SQL");
}
CollectionDescriptor arg1ColDesc = (CollectionDescriptor) arg1Desc;
String indirectTableAlias = state.getIndirectAlias();
String arg2Alias = indirectTableAlias + "."
+ DatabaseUtil.getInwardIndirectionColumnName(arg1ColDesc, schema.getVersion());
state.addToFrom(DatabaseUtil.getIndirectionTableName(arg1ColDesc) + " AS "
+ indirectTableAlias);
buffer.append(loseBrackets ? "" : "(");
if (arg1Qc != null) {
queryClassToString(buffer, arg1Qc, q, schema, ID_ONLY, state);
buffer.append(" = " + arg2Alias);
buffer.append(" AND ");
} else if (arg1Qcb != null) {
Map<String, String> fieldToAlias = state.getFieldToAlias(arg1Qcb);
if (fieldToAlias.containsKey("id")) {
buffer.append(arg2Alias + " = " + fieldToAlias.get("id"));
buffer.append(" AND ");
} else {
fieldToAlias.put("id", arg2Alias);
if (arg1Qcb.getOsb() != null) {
bagConstraintToString(state, buffer, new BagConstraint(
new QueryField(arg1Qcb), ConstraintOp.IN, arg1Qcb
.getOsb()), q, schema,
SAFENESS_UNSAFE); // TODO: Not really unsafe
buffer.append(" AND ");
} else if (arg1Qcb.getIds() != null) {
BagConstraint bagCon = new BagConstraint(new QueryField(arg1Qcb),
(c.getOp() == ConstraintOp.CONTAINS ? ConstraintOp.IN
: ConstraintOp.NOT_IN), arg1Qcb.getIds());
state.getBagTableNames().put(bagCon, state.getBagTableNames().get(
arg1Qcb));
bagConstraintToString(state, buffer, bagCon, q, schema,
((safeness == SAFENESS_SAFE)
&& (c.getOp() == ConstraintOp.CONTAINS)) ? SAFENESS_SAFE
: SAFENESS_UNSAFE); // TODO: Not really unsafe
buffer.append(" AND ");
}
}
} else {
buffer.append(arg1Obj.getId() + " = " + arg2Alias);
buffer.append(" AND ");
}
buffer.append(indirectTableAlias + "."
+ DatabaseUtil.getOutwardIndirectionColumnName(arg1ColDesc,
schema.getVersion()) + " = ");
if (arg2 == null) {
buffer.append("" + arg2Obj.getId());
} else {
queryClassToString(buffer, arg2, q, schema, ID_ONLY, state);
}
buffer.append(loseBrackets ? "" : ")");
}
}
}
private static void addNullPhrase(State state, StringBuffer buffer,
ContainsConstraint c, DatabaseSchema schema, QueryReference arg1,
ReferenceDescriptor arg1Desc) {
// EXISTS OR NOT EXISTS to wrap this query
String arg1Alias = state.getFieldToAlias(arg1.getQueryClass()).get("id");
if (c.getOp() == ConstraintOp.IS_NULL) {
buffer.append("(NOT ");
}
buffer.append("EXISTS(SELECT 1 FROM ");
if (arg1Desc.relationType() == FieldDescriptor.ONE_N_RELATION) {
// the referenced class will have an field pointing back to this class, see
// whether anything exists that points back to our id
ReferenceDescriptor reverse = arg1Desc.getReverseReferenceDescriptor();
String indirectTableAlias = state.getIndirectAlias(); // Not really indirection
String reverseRefAlias = indirectTableAlias + "."
+ DatabaseUtil.getColumnName(reverse);
ClassDescriptor referencedClass = schema.getTableMaster(reverse
.getClassDescriptor());
buffer.append(DatabaseUtil.getTableName(referencedClass) + " AS "
+ indirectTableAlias);
buffer.append(" WHERE " + reverseRefAlias + " = " + arg1Alias);
} else if (arg1Desc.relationType() == FieldDescriptor.M_N_RELATION) {
// We need to see if there are rows in the indirection table that points back
// to our id
CollectionDescriptor arg1ColDesc = (CollectionDescriptor) arg1Desc;
String indirectTableAlias = state.getIndirectAlias();
String indirectionTable = DatabaseUtil.getIndirectionTableName(arg1ColDesc);
String inwardIndirectionCol = indirectTableAlias + "."
+ DatabaseUtil.getInwardIndirectionColumnName(arg1ColDesc,
schema.getVersion());
buffer.append(indirectionTable + " AS " + indirectTableAlias);
buffer.append(" WHERE " + inwardIndirectionCol + " = " + arg1Alias);
}
buffer.append(")");
if (c.getOp() == ConstraintOp.IS_NULL) {
buffer.append(")");
}
}
/**
* The maximum size a bag in a BagConstraint can be before we consider using a temporary table
* instead.
*/
public static final int MAX_BAG_INLINE_SIZE = 2;
/**
* Converts a BagConstraint object into a String suitable for putting on an SQL query.
*
* @param state the current SqlGenerator state
* @param buffer the StringBuffer to place text into
* @param c the BagConstraint object
* @param q the Query
* @param schema the DatabaseSchema in which to look up metadata
* @param safeness the constraint context safeness
* @throws ObjectStoreException if something goes wrong
*/
protected static void bagConstraintToString(State state, StringBuffer buffer, BagConstraint c,
Query q, DatabaseSchema schema, int safeness) throws ObjectStoreException {
Class<?> type = c.getQueryNode().getType();
String leftHandSide;
if (c.getQueryNode() instanceof QueryEvaluable) {
StringBuffer lhsBuffer = new StringBuffer();
queryEvaluableToString(lhsBuffer, (QueryEvaluable) c.getQueryNode(), q, state);
leftHandSide = lhsBuffer.toString();
} else {
StringBuffer lhsBuffer = new StringBuffer();
queryClassToString(lhsBuffer, (QueryClass) c.getQueryNode(), q, schema, ID_ONLY, state);
leftHandSide = lhsBuffer.toString();
}
SortedSet<Object> filteredBag = new TreeSet<Object>();
Collection<?> bagColl = c.getBag();
if (bagColl == null) {
ObjectStoreBag osb = c.getOsb();
if (c.getOp() == ConstraintOp.IN) {
buffer.append(leftHandSide);
} else {
buffer.append("(NOT (");
buffer.append(leftHandSide);
}
if (((safeness == SAFENESS_SAFE) && (c.getOp() == ConstraintOp.IN))
|| ((safeness == SAFENESS_ANTISAFE)
&& (c.getOp() == ConstraintOp.NOT_IN))) {
// We can move the temporary bag table to the FROM list.
String indirectTableAlias = state.getIndirectAlias(); // Not really indirection
state.addToFrom(INT_BAG_TABLE_NAME + " AS "
+ indirectTableAlias);
buffer.append(" = " + indirectTableAlias + "." + BAGVAL_COLUMN);
buffer.append(" AND " + indirectTableAlias + "." + BAGID_COLUMN + " = "
+ osb.getBagId());
} else {
buffer.append(" IN (SELECT " + BAGVAL_COLUMN + " FROM ");
buffer.append(INT_BAG_TABLE_NAME);
buffer.append(" WHERE " + BAGID_COLUMN + " = " + osb.getBagId() + ")");
}
if (c.getOp() == ConstraintOp.NOT_IN) {
buffer.append("))");
}
} else {
boolean nullSeen = false;
for (Object bagItem : bagColl) {
if (bagItem == null) {
nullSeen = true;
} else if (ProxyReference.class.equals(bagItem.getClass())
|| DynamicUtil.isInstance(bagItem, type)) {
if (bagItem instanceof InterMineObject) {
Integer bagValue = ((InterMineObject) bagItem).getId();
filteredBag.add(bagValue);
} else if (bagItem instanceof Class<?>) {
filteredBag.add(((Class<?>) bagItem).getName());
} else {
filteredBag.add(bagItem);
}
} else {
throw new ObjectStoreException("Bag<" + type.getName() + "> contains element "
+ "of wrong type (" + bagItem.getClass().getName() + ")");
}
}
if (nullSeen) {
if (!filteredBag.isEmpty()) {
buffer.append("(");
}
buffer.append("(" + leftHandSide + " "
+ (c.getOp() == ConstraintOp.IN ? "IS" : "IS NOT") + " NULL)");
if (!filteredBag.isEmpty()) {
buffer.append(" ")
.append(c.getOp() == ConstraintOp.IN ? "OR" : "AND")
.append(" ");
}
}
if (filteredBag.isEmpty() && !nullSeen) {
buffer.append(c.getOp() == ConstraintOp.IN ? "false" : "true");
} else {
String bagTableName = state.getBagTableNames().get(c);
if (filteredBag.size() < MAX_BAG_INLINE_SIZE || bagTableName == null) {
int needComma = 0;
buffer.append(c.getOp() == ConstraintOp.IN ? "" : "(NOT (");
boolean limitRange = false;
//boolean limitRange = (lowest < highest) && (filteredBag.size() > 10);
//if (limitRange) {
// buffer.append("(" + leftHandSide + " >= " + lowest + " AND " + leftHandSide
// + " <= " + highest + " AND ");
//}
boolean parenthesesForGroups = (filteredBag.size() > 9000)
&& ((c.getOp() == ConstraintOp.IN) || limitRange);
for (Object orNext : filteredBag) {
if (needComma == 0) {
buffer.append((parenthesesForGroups ? "(" : "") + leftHandSide
+ " IN (");
} else if (needComma % 9000 == 0) {
buffer.append(") OR " + leftHandSide + " IN (");
} else {
buffer.append(", ");
}
needComma++;
objectToString(buffer, orNext);
}
buffer.append(")");
//if (limitRange) {
// buffer.append(")");
//}
if (parenthesesForGroups) {
buffer.append(")");
}
if (c.getOp() != ConstraintOp.IN) {
buffer.append("))");
}
} else {
if (c.getOp() == ConstraintOp.IN) {
buffer.append(leftHandSide);
} else {
buffer.append("(NOT (");
buffer.append(leftHandSide);
}
if (((safeness == SAFENESS_SAFE) && (c.getOp() == ConstraintOp.IN))
|| ((safeness == SAFENESS_ANTISAFE)
&& (c.getOp() == ConstraintOp.NOT_IN))) {
// We can move the temporary bag table to the FROM list.
String indirectTableAlias = state.getIndirectAlias(); // Not really
// indirection
state.addToFrom(bagTableName + " AS " + indirectTableAlias);
buffer.append(" = " + indirectTableAlias + ".value");
} else {
buffer.append(" IN (SELECT value FROM ");
buffer.append(bagTableName);
buffer.append(")");
}
if (c.getOp() == ConstraintOp.NOT_IN) {
buffer.append("))");
}
}
if (nullSeen) {
buffer.append(")");
}
}
}
}
/**
* Converts a BagConstraint object into a String suitable for putting on an SQL query.
*
* @param state the current SqlGenerator state
* @param buffer the StringBuffer to place text into
* @param c the BagConstraint object
* @param q the Query
* @param safeness the constraint context safeness
* @throws ObjectStoreException if something goes wrong
*/
protected static void multipleInBagConstraintToString(State state, StringBuffer buffer,
MultipleInBagConstraint c, Query q, int safeness)
throws ObjectStoreException {
Class<?> type = null;
for (QueryEvaluable qe : c.getEvaluables()) {
if (type == null) {
type = qe.getType();
} else if (!type.equals(qe.getType())) {
throw new IllegalArgumentException("MultipleInBagConstraint evaluables do not match"
+ " type");
}
}
List<String> leftHandSide = new ArrayList<String>();
for (QueryEvaluable qe : c.getEvaluables()) {
StringBuffer lhsBuffer = new StringBuffer();
queryEvaluableToString(lhsBuffer, qe, q, state);
leftHandSide.add(lhsBuffer.toString());
}
SortedSet<Object> filteredBag = new TreeSet<Object>();
Collection<?> bagColl = c.getBag();
for (Object bagItem : bagColl) {
if (type != null && type.isInstance(bagItem)) {
filteredBag.add(bagItem);
} else {
if (type == null) {
throw new NullPointerException("Couldn't determine type of list");
} else {
throw new ObjectStoreException("Bag<" + type.getName() + "> contains element "
+ "of wrong type (" + bagItem.getClass().getName() + ")");
}
}
}
if (filteredBag.isEmpty()) {
buffer.append("false");
} else {
String bagTableName = state.getBagTableNames().get(c);
if (filteredBag.size() < MAX_BAG_INLINE_SIZE || bagTableName == null) {
buffer.append("(");
boolean needOrComma = false;
for (String lhs : leftHandSide) {
if (needOrComma) {
buffer.append(" OR ");
}
needOrComma = true;
int needComma = 0;
buffer.append(c.getOp() == ConstraintOp.IN ? "" : "(NOT (");
for (Object orNext : filteredBag) {
if (needComma == 0) {
buffer.append(lhs + " IN (");
} else if (needComma % 9000 == 0) {
buffer.append(") OR " + lhs + " IN (");
} else {
buffer.append(", ");
}
needComma++;
objectToString(buffer, orNext);
}
buffer.append(")");
}
buffer.append(")");
} else {
if (safeness == SAFENESS_SAFE) {
// We can move the temporary bag table to the FROM list.
String indirectTableAlias = state.getIndirectAlias(); // Not really indirection
state.addToFrom(bagTableName + " AS " + indirectTableAlias);
buffer.append("(");
boolean needOrComma = false;
for (String lhs : leftHandSide) {
if (needOrComma) {
buffer.append(" OR ");
}
needOrComma = true;
buffer.append(lhs + " = " + indirectTableAlias + ".value");
}
buffer.append(")");
} else {
buffer.append("(");
boolean needOrComma = false;
for (String lhs : leftHandSide) {
if (needOrComma) {
buffer.append(" OR ");
}
needOrComma = true;
buffer.append(lhs + " IN (SELECT value FROM " + bagTableName + ")");
}
buffer.append(")");
}
}
}
}
/**
* Converts an OverlapConstraint to a String suitable for putting in an SQL query. This will
* try to use a Postgres range type column first, if not present it will try BioSeg, if not
* present it will use simple constraints on start and end fields.
*
* @param state the current SqlGenerator state
* @param buffer the StringBuffer to place text into
* @param c the OverlapConstraint object
* @param q the Query
* @param schema the DatabaseSchema in which to look up metadata
* @param safeness the constraint context safeness
* @throws ObjectStoreException if something goes wrong
*/
protected static void overlapConstraintToString(State state, StringBuffer buffer,
OverlapConstraint c, Query q, DatabaseSchema schema,
int safeness) throws ObjectStoreException {
if ((safeness != SAFENESS_SAFE) && (safeness != SAFENESS_ANTISAFE)
&& (safeness != SAFENESS_UNSAFE)) {
throw new ObjectStoreException("Unknown constraint safeness: " + safeness);
}
boolean not = (ConstraintOp.DOES_NOT_CONTAIN == c.getOp())
|| (ConstraintOp.NOT_IN == c.getOp())
|| (ConstraintOp.DOES_NOT_OVERLAP == c.getOp());
if (not) {
buffer.append("(NOT (");
} else if (safeness != SAFENESS_SAFE) {
buffer.append("(");
}
// make sure the parents of each range are the same object
QueryObjectReference leftParent = c.getLeft().getParent();
QueryObjectReference rightParent = c.getRight().getParent();
buffer.append(state.getFieldToAlias(leftParent.getQueryClass()).get(leftParent
.getFieldName()))
.append(" = ")
.append(state.getFieldToAlias(rightParent.getQueryClass()).get(rightParent
.getFieldName()))
.append(" AND ");
// TODO get column type and use appropriate range type, currently uses int4range which is
// correct for integer columns, we could support other range types
boolean useRangeFunction = schema.hasBioSeg() || schema.useRangeTypes();
if (useRangeFunction) {
// either built in ranges or bioseg, prefer to use build int ranges
String rangeFunction = (schema.useRangeTypes()) ? "int4range(" : "bioseg_create(";
buffer.append(rangeFunction);
queryEvaluableToString(buffer, c.getLeft().getStart(), q, state);
buffer.append(", ");
queryEvaluableToString(buffer, c.getLeft().getEnd(), q, state);
// if int4range, request it includes extremes (default: include lower, exclude upper)
if (rangeFunction.startsWith("int")) {
buffer.append(", '[]')");
} else {
buffer.append(")");
}
if ((ConstraintOp.CONTAINS == c.getOp())
|| (ConstraintOp.DOES_NOT_CONTAIN == c.getOp())) {
buffer.append(" @> ");
} else if ((ConstraintOp.IN == c.getOp()) || (ConstraintOp.NOT_IN == c.getOp())) {
buffer.append(" <@ ");
} else if ((ConstraintOp.OVERLAPS == c.getOp())
|| (ConstraintOp.DOES_NOT_OVERLAP == c.getOp())) {
buffer.append(" && ");
} else {
throw new IllegalArgumentException("Illegal constraint op " + c.getOp()
+ " for range");
}
buffer.append(rangeFunction);
queryEvaluableToString(buffer, c.getRight().getStart(), q, state);
buffer.append(", ");
queryEvaluableToString(buffer, c.getRight().getEnd(), q, state);
if (rangeFunction.startsWith("int")) {
buffer.append(", '[]')");
} else {
buffer.append(")");
}
} else {
if ((ConstraintOp.CONTAINS == c.getOp())
|| (ConstraintOp.DOES_NOT_CONTAIN == c.getOp())) {
queryEvaluableToString(buffer, c.getLeft().getStart(), q, state);
buffer.append(" <= ");
queryEvaluableToString(buffer, c.getRight().getStart(), q, state);
buffer.append(" AND ");
queryEvaluableToString(buffer, c.getLeft().getEnd(), q, state);
buffer.append(" >= ");
queryEvaluableToString(buffer, c.getRight().getEnd(), q, state);
} else if ((ConstraintOp.IN == c.getOp()) || (ConstraintOp.NOT_IN == c.getOp())) {
queryEvaluableToString(buffer, c.getLeft().getStart(), q, state);
buffer.append(" >= ");
queryEvaluableToString(buffer, c.getRight().getStart(), q, state);
buffer.append(" AND ");
queryEvaluableToString(buffer, c.getLeft().getEnd(), q, state);
buffer.append(" <= ");
queryEvaluableToString(buffer, c.getRight().getEnd(), q, state);
} else if ((ConstraintOp.OVERLAPS == c.getOp())
|| (ConstraintOp.DOES_NOT_OVERLAP == c.getOp())) {
queryEvaluableToString(buffer, c.getLeft().getStart(), q, state);
buffer.append(" <= ");
queryEvaluableToString(buffer, c.getRight().getEnd(), q, state);
buffer.append(" AND ");
queryEvaluableToString(buffer, c.getLeft().getEnd(), q, state);
buffer.append(" >= ");
queryEvaluableToString(buffer, c.getRight().getStart(), q, state);
} else {
throw new IllegalArgumentException("Illegal constraint op " + c.getOp()
+ " for range");
}
}
if (not) {
buffer.append("))");
} else if (safeness != SAFENESS_SAFE) {
buffer.append(")");
}
}
/**
* Converts an Object to a String, in a form suitable for SQL.
*
* @param buffer a StringBuffer to add text to
* @param value the Object to convert
* @throws ObjectStoreException if something goes wrong
*/
public static void objectToString(StringBuffer buffer,
Object value) throws ObjectStoreException {
if (value instanceof UnknownTypeValue) {
buffer.append(value.toString());
} else if (value instanceof InterMineObject) {
Integer id = ((InterMineObject) value).getId();
if (id == null) {
throw new ObjectStoreException("InterMineObject found"
+ " without an ID set");
}
buffer.append(id.toString());
} else if (value instanceof Date) {
buffer.append(DatabaseUtil.objectToString(new Long(((Date) value).getTime())));
} else {
buffer.append(DatabaseUtil.objectToString(value));
}
}
/**
* Converts a QueryClass to a String.
*
* @param buffer the StringBuffer to add text to
* @param qc the QueryClass to convert
* @param q the Query
* @param schema the DatabaseSchema in which to look up metadata
* @param kind the type of the output requested
* @param state a State object
* @throws ObjectStoreException if the model is internally inconsistent
*/
protected static void queryClassToString(StringBuffer buffer, QueryClass qc, Query q,
DatabaseSchema schema, int kind, State state) throws ObjectStoreException {
if ((kind == ID_ONLY) && (!InterMineObject.class.isAssignableFrom(qc.getType()))) {
throw new ObjectStoreException("QueryClass for non-InterMineObject class does not"
+ " have an ID");
}
String alias = q.getAliases().get(qc);
Map<String, String> fieldToAlias = state.getFieldToAlias(qc);
if (alias == null) {
throw new NullPointerException("A QueryClass is referenced by elements of a query,"
+ " but the QueryClass is not in the FROM list of that query. QueryClass: "
+ qc + ", aliases: " + q.getAliases());
}
if (kind == QUERY_SUBQUERY_EXISTS) {
if (InterMineObject.class.isAssignableFrom(qc.getType())) {
queryClassToString(buffer, qc, q, schema, QUERY_SUBQUERY_CONSTRAINT, state);
} else {
queryClassToString(buffer, qc, q, schema, NO_ALIASES_ALL_FIELDS, state);
}
} else if (kind == QUERY_SUBQUERY_CONSTRAINT) {
buffer.append(DatabaseUtil.generateSqlCompatibleName(alias))
.append(".id");
} else {
boolean needComma = false;
String objectAlias = state.getFieldToAlias(qc).get("OBJECT");
if ((kind != QUERY_SUBQUERY_FROM) && (objectAlias != null)) {
buffer.append(objectAlias);
if ((kind == QUERY_NORMAL) || (kind == QUERY_FOR_PRECOMP)
|| (kind == QUERY_FOR_GOFASTER) || (kind == QUERY_FOR_COUNTING)) {
buffer.append(" AS ")
.append(alias.equals(alias.toLowerCase())
? DatabaseUtil.generateSqlCompatibleName(alias)
: "\"" + DatabaseUtil.generateSqlCompatibleName(alias) + "\"");
}
needComma = true;
}
if ((kind == QUERY_SUBQUERY_FROM) || (kind == NO_ALIASES_ALL_FIELDS)
|| (((kind == QUERY_NORMAL) || (kind == QUERY_FOR_GOFASTER)
|| (kind == QUERY_FOR_COUNTING)) && schema.isFlatMode(qc.getType()))
|| (kind == QUERY_FOR_PRECOMP)) {
Iterator<FieldDescriptor> fieldIter = null;
ClassDescriptor cld = schema.getModel().getClassDescriptorByName(qc.getType()
.getName());
if (schema.isFlatMode(qc.getType()) && ((kind == QUERY_NORMAL)
|| (kind == QUERY_FOR_GOFASTER) || (kind == QUERY_FOR_COUNTING))) {
List<Iterator<? extends FieldDescriptor>> iterators
= new ArrayList<Iterator<? extends FieldDescriptor>>();
DatabaseSchema.Fields fields = schema.getTableFields(schema
.getTableMaster(cld));
iterators.add(fields.getAttributes().iterator());
iterators.add(fields.getReferences().iterator());
fieldIter = new CombinedIterator<FieldDescriptor>(iterators);
} else {
fieldIter = cld.getAllFieldDescriptors().iterator();
}
Map<String, FieldDescriptor> fieldMap = new TreeMap<String, FieldDescriptor>();
while (fieldIter.hasNext()) {
FieldDescriptor field = fieldIter.next();
String columnName = DatabaseUtil.getColumnName(field);
if (columnName != null) {
fieldMap.put(columnName, field);
}
}
for (Map.Entry<String, FieldDescriptor> fieldEntry : fieldMap.entrySet()) {
FieldDescriptor field = fieldEntry.getValue();
String columnName = DatabaseUtil.getColumnName(field);
if (needComma) {
buffer.append(", ");
}
needComma = true;
buffer.append(fieldToAlias.get(field.getName()));
if (kind == QUERY_SUBQUERY_FROM) {
buffer.append(" AS ")
.append(DatabaseUtil.generateSqlCompatibleName(alias) + columnName);
} else if ((kind == QUERY_NORMAL) || (kind == QUERY_FOR_PRECOMP)
|| (kind == QUERY_FOR_GOFASTER) || (kind == QUERY_FOR_COUNTING)) {
buffer.append(" AS ")
.append(alias.equals(alias.toLowerCase())
? DatabaseUtil.generateSqlCompatibleName(alias) + columnName
: "\"" + DatabaseUtil.generateSqlCompatibleName(alias)
+ columnName.toLowerCase() + "\"");
}
}
if (schema.isFlatMode(qc.getType())
&& schema.isTruncated(schema.getTableMaster(cld))) {
buffer.append(", ")
.append(fieldToAlias.get("class"))
.append(" AS ")
.append(alias.equals(alias.toLowerCase())
? DatabaseUtil.generateSqlCompatibleName(alias) + "objectclass"
: "\"" + DatabaseUtil.generateSqlCompatibleName(alias)
+ "objectclass\"");
}
} else {
if (needComma) {
buffer.append(", ");
}
buffer.append(DatabaseUtil.generateSqlCompatibleName(alias))
.append(".id AS ")
.append(alias.equals(alias.toLowerCase())
? DatabaseUtil.generateSqlCompatibleName(alias) + "id"
: "\"" + DatabaseUtil.generateSqlCompatibleName(alias) + "id" + "\"");
}
}
}
/**
* Converts a QueryEvaluable into a String suitable for an SQL query String.
*
* @param buffer the StringBuffer to add text to
* @param node the QueryEvaluable
* @param q the Query
* @param state a State object
* @throws ObjectStoreException if something goes wrong
*/
protected static void queryEvaluableToString(StringBuffer buffer, QueryEvaluable node,
Query q, State state) throws ObjectStoreException {
if (node instanceof QueryField) {
QueryField nodeF = (QueryField) node;
FromElement nodeClass = nodeF.getFromElement();
if (state != null) {
Map<String, String> aliasMap = state.getFieldToAlias(nodeClass);
String classAlias = aliasMap.get(nodeF.getFieldName());
buffer.append(classAlias);
if (aliasMap instanceof AlwaysMap<?, ?>) {
// This is a subquery, so the classAlias only contains the alias of the subquery
buffer.append(".")
.append(DatabaseUtil.generateSqlCompatibleName(nodeF.getFieldName()))
.append(nodeF.getSecondFieldName() == null
? "" : DatabaseUtil.generateSqlCompatibleName(nodeF
.getSecondFieldName()));
}
} else {
buffer.append(DatabaseUtil.generateSqlCompatibleName(nodeF.getFieldName()));
}
} else if (node instanceof QueryExpression) {
QueryExpression nodeE = (QueryExpression) node;
if (nodeE.getOperation() == QueryExpression.SUBSTRING) {
QueryEvaluable arg1 = nodeE.getArg1();
QueryEvaluable arg2 = nodeE.getArg2();
QueryEvaluable arg3 = nodeE.getArg3();
buffer.append("SUBSTR(");
queryEvaluableToString(buffer, arg1, q, state);
buffer.append(", ");
queryEvaluableToString(buffer, arg2, q, state);
if (arg3 != null) {
buffer.append(", ");
queryEvaluableToString(buffer, arg3, q, state);
}
buffer.append(")");
} else if (nodeE.getOperation() == QueryExpression.INDEX_OF) {
QueryEvaluable arg1 = nodeE.getArg1();
QueryEvaluable arg2 = nodeE.getArg2();
buffer.append("STRPOS(");
queryEvaluableToString(buffer, arg1, q, state);
buffer.append(", ");
queryEvaluableToString(buffer, arg2, q, state);
buffer.append(")");
} else if (nodeE.getOperation() == QueryExpression.LOWER) {
buffer.append("LOWER(");
queryEvaluableToString(buffer, nodeE.getArg1(), q, state);
buffer.append(")");
} else if (nodeE.getOperation() == QueryExpression.UPPER) {
buffer.append("UPPER(");
queryEvaluableToString(buffer, nodeE.getArg1(), q, state);
buffer.append(")");
} else if (nodeE.getOperation() == QueryExpression.GREATEST) {
buffer.append("GREATEST(");
queryEvaluableToString(buffer, nodeE.getArg1(), q, state);
buffer.append(",");
queryEvaluableToString(buffer, nodeE.getArg2(), q, state);
buffer.append(")");
} else if (nodeE.getOperation() == QueryExpression.LEAST) {
buffer.append("LEAST(");
queryEvaluableToString(buffer, nodeE.getArg1(), q, state);
buffer.append(",");
queryEvaluableToString(buffer, nodeE.getArg2(), q, state);
buffer.append(")");
} else {
QueryEvaluable arg1 = nodeE.getArg1();
QueryEvaluable arg2 = nodeE.getArg2();
String op = null;
switch (nodeE.getOperation()) {
case QueryExpression.ADD:
op = " + ";
break;
case QueryExpression.SUBTRACT:
op = " - ";
break;
case QueryExpression.MULTIPLY:
op = " * ";
break;
case QueryExpression.DIVIDE:
op = " / ";
break;
case QueryExpression.MODULO:
op = " % ";
break;
default:
throw (new ObjectStoreException("Invalid QueryExpression operation: "
+ nodeE.getOperation()));
}
buffer.append("(");
queryEvaluableToString(buffer, arg1, q, state);
buffer.append(op);
queryEvaluableToString(buffer, arg2, q, state);
buffer.append(")");
}
} else if (node instanceof QueryFunction) {
QueryFunction nodeF = (QueryFunction) node;
switch (nodeF.getOperation()) {
case QueryFunction.COUNT:
buffer.append("COUNT(*)");
break;
case QueryFunction.SUM:
buffer.append("SUM(");
queryEvaluableToString(buffer, nodeF.getParam(), q, state);
buffer.append(")");
break;
case QueryFunction.AVERAGE:
buffer.append("AVG(");
queryEvaluableToString(buffer, nodeF.getParam(), q, state);
buffer.append(")");
break;
case QueryFunction.MIN:
buffer.append("MIN(");
queryEvaluableToString(buffer, nodeF.getParam(), q, state);
buffer.append(")");
break;
case QueryFunction.MAX:
buffer.append("MAX(");
queryEvaluableToString(buffer, nodeF.getParam(), q, state);
buffer.append(")");
break;
case QueryFunction.STDDEV:
buffer.append("STDDEV(");
queryEvaluableToString(buffer, nodeF.getParam(), q, state);
buffer.append(")");
break;
case QueryFunction.CEIL:
buffer.append("CEIL(");
queryEvaluableToString(buffer, nodeF.getParam(), q, state);
buffer.append(")");
break;
case QueryFunction.FLOOR:
buffer.append("FLOOR(");
queryEvaluableToString(buffer, nodeF.getParam(), q, state);
buffer.append(")");
break;
case QueryFunction.ROUND:
buffer.append("ROUND(");
queryEvaluableToString(buffer, nodeF.getParam(), q, state);
buffer.append(", ");
queryEvaluableToString(buffer, nodeF.getParam2(), q, state);
buffer.append(")");
break;
case QueryFunction.WIDTH_BUCKET:
WidthBucketFunction wbf = (WidthBucketFunction) nodeF;
buffer.append("WIDTH_BUCKET(");
queryEvaluableToString(buffer, wbf.getParam(), q, state);
buffer.append(", ");
queryEvaluableToString(buffer, wbf.getMinParam(), q, state);
buffer.append(", ");
queryEvaluableToString(buffer, wbf.getMaxParam(), q, state);
buffer.append(", ");
queryEvaluableToString(buffer, wbf.getBinsParam(), q, state);
buffer.append(")");
break;
default:
throw (new ObjectStoreException("Invalid QueryFunction operation: "
+ nodeF.getOperation()));
}
} else if (node instanceof QueryValue) {
QueryValue nodeV = (QueryValue) node;
Object value = nodeV.getValue();
objectToString(buffer, value);
} else if (node instanceof QueryCast) {
buffer.append("(");
queryEvaluableToString(buffer, ((QueryCast) node).getValue(), q, state);
buffer.append(")::");
String torqueTypeName = TorqueModelOutput.generateJdbcType(node.getType()
.getName());
SchemaType torqueType = SchemaType.getEnum(torqueTypeName);
Platform torquePlatform = PlatformFactory.getPlatformFor(state.getDb().getPlatform()
.toLowerCase());
Domain torqueDomain = torquePlatform.getDomainForSchemaType(torqueType);
buffer.append(torqueDomain.getSqlType());
} else if (node instanceof QueryForeignKey) {
QueryForeignKey qor = (QueryForeignKey) node;
buffer.append(state.getFieldToAlias(qor.getQueryClass()).get(qor.getFieldName()));
} else {
throw (new ObjectStoreException("Invalid QueryEvaluable: " + node));
}
}
/**
* Builds a String representing the SELECT component of the Sql query.
*
* @param state the current Sql Query state
* @param q the Query
* @param schema the DatabaseSchema in which to look up metadata
* @param kind the kind of output requested
* @return a String
* @throws ObjectStoreException if something goes wrong
*/
protected static String buildSelectComponent(State state, Query q, DatabaseSchema schema,
int kind) throws ObjectStoreException {
boolean needComma = false;
StringBuffer retval = new StringBuffer();
Iterator<QuerySelectable> iter = q.getSelect().iterator();
if (!iter.hasNext()) {
throw new ObjectStoreException("SELECT list is empty in Query");
}
while (iter.hasNext()) {
QuerySelectable node = iter.next();
String alias = q.getAliases().get(node);
if (node instanceof QueryClass) {
if (needComma) {
retval.append(", ");
}
needComma = true;
queryClassToString(retval, (QueryClass) node, q, schema, kind, state);
} else if (node instanceof QueryEvaluable) {
if (needComma) {
retval.append(", ");
}
needComma = true;
queryEvaluableToString(retval, (QueryEvaluable) node, q, state);
if ((kind == QUERY_NORMAL) || (kind == QUERY_FOR_PRECOMP)
|| (kind == QUERY_FOR_GOFASTER) || (kind == QUERY_FOR_COUNTING)) {
retval.append(" AS " + (alias.equals(alias.toLowerCase())
? DatabaseUtil.generateSqlCompatibleName(alias)
: "\"" + DatabaseUtil.generateSqlCompatibleName(alias) + "\""));
} else if (kind == QUERY_SUBQUERY_FROM) {
retval.append(" AS " + DatabaseUtil.generateSqlCompatibleName(alias));
}
} else if (node instanceof QueryPathExpression) {
// Do nothing
} else {
throw new ObjectStoreException("Unknown object in SELECT list: " + node.getClass());
}
}
for (Map.Entry<String, String> entry : state.getOrderBy().entrySet()) {
if (needComma) {
retval.append(", ");
}
needComma = true;
retval.append(entry.getKey())
.append(" AS ")
.append(entry.getValue());
}
return retval.toString();
}
/**
* Builds a String representing the GROUP BY component of the Sql query.
*
* @param q the Query
* @param schema the DatabaseSchema in which to look up metadata
* @param state a State object
* @return a String
* @throws ObjectStoreException if something goes wrong
*/
protected static String buildGroupBy(Query q, DatabaseSchema schema,
State state) throws ObjectStoreException {
StringBuffer retval = new StringBuffer();
boolean needComma = false;
for (QueryNode node : q.getGroupBy()) {
retval.append(needComma ? ", " : " GROUP BY ");
needComma = true;
if (node instanceof QueryClass) {
queryClassToString(retval, (QueryClass) node, q, schema, NO_ALIASES_ALL_FIELDS,
state);
} else {
queryEvaluableToString(retval, (QueryEvaluable) node, q, state);
}
}
return retval.toString();
}
/**
* Builds a String representing the ORDER BY component of the Sql query.
*
* @param state the current Sql Query state
* @param q the Query
* @param schema the DatabaseSchema in which to look up metadata
* @param kind the kind of output requested
* @return a String
* @throws ObjectStoreException if something goes wrong
*/
protected static String buildOrderBy(State state, Query q, DatabaseSchema schema,
int kind) throws ObjectStoreException {
StringBuffer retval = new StringBuffer();
HashSet<String> seen = new HashSet<String>();
boolean needComma = false;
for (Object node : q.getEffectiveOrderBy()) {
boolean desc = false;
if (node instanceof OrderDescending) {
desc = true;
node = ((OrderDescending) node).getQueryOrderable();
}
if (!((node instanceof QueryValue) || (node instanceof QueryPathExpression))) {
StringBuffer buffer = new StringBuffer();
if (node instanceof QueryClass) {
if (TypeUtil.getFieldInfo(((QueryClass) node).getType(), "id") != null) {
queryClassToString(buffer, (QueryClass) node, q, schema, ID_ONLY, state);
} else {
queryClassToString(buffer, (QueryClass) node, q, schema,
NO_ALIASES_ALL_FIELDS, state);
}
if (!seen.contains(buffer.toString())) {
retval.append(needComma ? ", " : " ORDER BY ");
needComma = true;
retval.append(buffer.toString());
seen.add(buffer.toString());
if ((!q.getSelect().contains(node))
&& (!q.getSelect().contains(new QueryField((QueryClass) node,
"id")))) {
if (q.isDistinct()) {
throw new ObjectStoreException("Class " + q.getAliases().get(node)
+ " in the ORDER BY list must be in the SELECT list, or its"
+ " id, or the query made non-distinct");
} else if ((kind == QUERY_FOR_PRECOMP)
|| (kind == QUERY_FOR_GOFASTER)) {
state.addToOrderBy(buffer.toString());
}
}
}
} else if (node instanceof QueryObjectReference) {
QueryObjectReference ref = (QueryObjectReference) node;
buffer.append(state.getFieldToAlias(ref.getQueryClass()).get(ref
.getFieldName()));
if (!seen.contains(buffer.toString())) {
retval.append(needComma ? ", " : " ORDER BY ");
needComma = true;
retval.append(buffer.toString());
seen.add(buffer.toString());
if (q.isDistinct()) {
if (q.getSelect().contains(ref)) {
// Nothing required
} else if (q.getSelect().contains(ref.getQueryClass())) {
// This means that the field's QueryClass is present in the SELECT
// list, so adding the field artificially will not alter the number
// of rows of a DISTINCT query.
if (!schema.isFlatMode(ref.getQueryClass().getType())) {
state.addToOrderBy(buffer.toString());
}
} else {
throw new ObjectStoreException("Reference " + buffer.toString()
+ " in the ORDER BY list must be in the SELECT list, or the"
+ " whole QueryClass must be in the SELECT list, or the"
+ " query made non-distinct");
}
} else if ((!q.getSelect().contains(ref)) && ((kind == QUERY_FOR_PRECOMP)
|| (kind == QUERY_FOR_GOFASTER))
&& (!schema.isFlatMode(ref.getQueryClass().getType()))) {
state.addToOrderBy(buffer.toString());
}
}
} else {
// DON'T NEED TO RE-EVALUATE FNS WE ARE ORDERING BY.
if (q.getSelect().contains(node) && node instanceof QueryFunction) {
//don't add average in the orderby because in the
// QueryOptimise.optimiseWith in originalQuery = new Query(query);
// the originalQuery is not parsed correctly!!!
if (((QueryFunction) node).getOperation() == QueryFunction.AVERAGE) {
continue;
}
String alias = q.getAliases().get(node);
buffer.append(alias);
} else {
queryEvaluableToString(buffer, (QueryEvaluable) node, q, state);
}
if (!seen.contains(buffer.toString())) {
retval.append(needComma ? ", " : " ORDER BY ");
needComma = true;
retval.append(buffer.toString());
seen.add(buffer.toString());
if ((!q.getSelect().contains(node)) && q.isDistinct()
&& (node instanceof QueryField)) {
FromElement fe = ((QueryField) node).getFromElement();
if (q.getSelect().contains(fe)) {
// This means that this field is not in the SELECT list, but its
// FromElement is, therefore adding it artificially to the SELECT
// list will not alter the number of rows of a DISTINCT query.
if (!schema.isFlatMode(InterMineObject.class)) {
state.addToOrderBy(buffer.toString());
}
} else if (fe instanceof QueryClass) {
throw new ObjectStoreException("Field " + buffer.toString()
+ " in the ORDER BY list must be in the SELECT list, or the"
+ " whole QueryClass " + fe.toString() + " must be in the"
+ " SELECT list, or the query made non-distinct");
} else {
throw new ObjectStoreException("Field " + buffer.toString()
+ " in the ORDER BY list must be in the SELECT list, or the"
+ " query made non-distinct");
}
} else if ((!q.getSelect().contains(node)) && (!q.isDistinct())
&& ((kind == QUERY_FOR_PRECOMP)
|| (kind == QUERY_FOR_GOFASTER))
&& (!schema.isFlatMode(InterMineObject.class))) {
state.addToOrderBy(buffer.toString());
}
}
}
if (desc) {
retval.append(" DESC");
}
}
}
return retval.toString();
}
/**
* Internal representation of the State of the query as it is built up.
* @author Matthew
*
*/
protected static class State
{
private StringBuffer whereText = new StringBuffer();
private StringBuffer havingText = new StringBuffer();
private StringBuffer fromText = new StringBuffer();
private Map<String, String> orderBy = new LinkedHashMap<String, String>();
private int number = 0;
private Map<FromElement, Map<String, String>> fromToFieldToAlias
= new HashMap<FromElement, Map<String, String>>();
private Database db;
// a Map from BagConstraints to table names, where the table contains the contents of the
// bag that are relevant for the BagConstraint
private Map<Object, String> bagTableNames = new HashMap<Object, String>();
/**
* Constructor
*/
public State() {
// empty
}
/**
* @return where clause
*/
public String getWhere() {
// a hacky fix for #731:
String where = whereText.toString();
//if (where.startsWith("(") && where.endsWith(")")) {
// where = where.substring(1, where.length() - 1);
//}
return (where.length() == 0 ? "" : " WHERE " + where);
}
/**
* @return where clause
*/
public StringBuffer getWhereBuffer() {
return whereText;
}
/**
*
* @return having clause
*/
public String getHaving() {
String having = havingText.toString();
return (having.length() == 0 ? "" : " HAVING " + having);
}
/**
*
* @return having clause
*/
public StringBuffer getHavingBuffer() {
return havingText;
}
/**
*
* @return from clause
*/
public String getFrom() {
return fromText.toString();
}
/**
*
* @param text where caluse
*/
public void addToWhere(String text) {
whereText.append(text);
}
/**
*
* @param text from clause
*/
public void addToFrom(String text) {
if (fromText.length() == 0) {
fromText.append(" FROM ").append(text);
} else {
fromText.append(", ").append(text);
}
}
/**
*
* @return indirect alias
*/
public String getIndirectAlias() {
return "indirect" + (number++);
}
/**
*
* @return order by alias
*/
public String getOrderByAlias() {
return "orderbyfield" + (number++);
}
/**
*
* @param s order by string
*/
public void addToOrderBy(String s) {
orderBy.put(s, getOrderByAlias());
}
/**
*
* @return order by clause
*/
public Map<String, String> getOrderBy() {
return orderBy;
}
/**
*
* @param from from element
* @return alias for given from element
*/
public Map<String, String> getFieldToAlias(FromElement from) {
Map<String, String> retval = fromToFieldToAlias.get(from);
if (retval == null) {
retval = new HashMap<String, String>();
fromToFieldToAlias.put(from, retval);
}
return retval;
}
/**
*
* @param from from element
* @param map map of from elements to aliases
*/
public void setFieldToAlias(FromElement from, Map<String, String> map) {
fromToFieldToAlias.put(from, map);
}
/**
*
* @param bagTableNames map
*/
public void setBagTableNames(Map<Object, String> bagTableNames) {
if (bagTableNames != null) {
this.bagTableNames = bagTableNames;
}
}
/**
*
* @return bag table name map
*/
public Map<Object, String> getBagTableNames() {
return bagTableNames;
}
/**
*
* @param db database
*/
public void setDb(Database db) {
this.db = db;
}
/**
*
* @return database
*/
public Database getDb() {
return db;
}
}
private static class CacheEntry
{
private TreeMap<Integer, String> cached = new TreeMap<Integer, String>();
private int lastOffset;
private String lastSQL;
public CacheEntry(int lastOffset, String lastSQL) {
this.lastOffset = lastOffset;
this.lastSQL = lastSQL;
}
public TreeMap<Integer, String> getCached() {
return cached;
}
public void setLast(int lastOffset, String lastSQL) {
this.lastOffset = lastOffset;
this.lastSQL = lastSQL;
}
public int getLastOffset() {
return lastOffset;
}
public String getLastSQL() {
return lastSQL;
}
}
private static class ClassDescriptorAndAlias
{
private ClassDescriptor cld;
private String alias;
public ClassDescriptorAndAlias(ClassDescriptor cld, String alias) {
this.cld = cld;
this.alias = alias;
}
public ClassDescriptor getClassDescriptor() {
return cld;
}
public String getAlias() {
return alias;
}
}
}
|
/**
* @author Dmitry Krasilschikov
*/
public class DomainClassNode extends ClassNode {
public DomainClassNode(@NotNull final Module module,
@NotNull final GrTypeDefinition typeDefinition,
@Nullable final ViewSettings viewSettings) {
super(module, typeDefinition, viewSettings);
}
@Override
protected String getTestPresentationImpl(@NotNull final PsiElement psiElement) {
return "Domain class: " + ((GrTypeDefinition)psiElement).getName();
}
@Override
protected void updateImpl(final PresentationData data) {
super.updateImpl(data);
data.setIcon(JetgroovyIcons.Mvc.Domain_class);
}
@Override
public boolean validate() {
if (!super.validate()) {
return false;
}
return getValue() != null;
}
} |
class Utils:
# expand text template from keys-values in a run
def template(run, text):
kv = run.keys_values()
for k,v in kv.items():
text = text.replace('${}'.format(k), v)
return text
|
// SetIndex sets the item in a collection for the given key, using the
// '__newindex' metamethod if appropriate. SetIndex always consumes CPU if it
// doesn't return an error.
func SetIndex(t *Thread, coll Value, idx Value, val Value) error {
if idx.IsNil() {
return errors.New("index is nil")
}
for i := 0; i < maxIndexChainLength; i++ {
t.RequireCPU(1)
tbl, isTable := coll.TryTable()
if isTable && idx.IsNaN() {
return errTableIndexIsNaN
}
if isTable && tbl.Reset(idx, val) {
return nil
}
metaNewIndex := t.metaGetS(coll, "__newindex")
if metaNewIndex.IsNil() {
if isTable {
t.SetTable(tbl, idx, val)
}
return nil
}
if _, ok := metaNewIndex.TryTable(); ok {
coll = metaNewIndex
} else {
return Call(t, metaNewIndex, []Value{coll, idx, val}, NewTermination(t.CurrentCont(), nil, nil))
}
}
return fmt.Errorf("'__newindex' chain too long; possible loop")
} |
/**
* Create a 2D array the same size as the map with the costs of moving to
* each tile
* An abyss tile obviously has the highest cost and the cost decreases
* exponentially as we move farther from the edge
*
*/
private void genMapCostsMask()
{
genProxMask();
evaluateProxMask();
} |
<filename>server/src/database/migration/1588246416803-removeOldColumns.ts
import { MigrationInterface, QueryRunner } from 'typeorm';
export class removeOldColumns1588246416803 implements MigrationInterface {
name = 'removeOldColumns1588246416803';
public async up(queryRunner: QueryRunner): Promise<any> {
await queryRunner.query(
`ALTER TABLE "customers" DROP COLUMN "borrowerAge"`,
undefined,
);
await queryRunner.query(
`ALTER TABLE "customers" DROP COLUMN "id"`,
undefined,
);
await queryRunner.query(
`ALTER TABLE "contracts" DROP COLUMN "borrowerFirstName"`,
undefined,
);
await queryRunner.query(
`ALTER TABLE "contracts" DROP COLUMN "borrowerLastName"`,
undefined,
);
await queryRunner.query(
`ALTER TABLE "contracts" DROP COLUMN "borrowerAge"`,
undefined,
);
}
public async down(queryRunner: QueryRunner): Promise<any> {
await queryRunner.query(
`ALTER TABLE "contracts" ADD "borrowerAge" double precision NOT NULL`,
undefined,
);
await queryRunner.query(
`ALTER TABLE "contracts" ADD "borrowerLastName" text NOT NULL`,
undefined,
);
await queryRunner.query(
`ALTER TABLE "contracts" ADD "borrowerFirstName" text NOT NULL`,
undefined,
);
await queryRunner.query(
`ALTER TABLE "customers" ADD "id" uuid NOT NULL`,
undefined,
);
await queryRunner.query(
`ALTER TABLE "customers" ADD "borrowerAge" double precision NOT NULL`,
undefined,
);
}
}
|
/**
* Image
* @author Eduardo Fonseca Velasques - @eduveks
*/
public class Image implements Component {
private Proteu proteu = null;
private Script script = null;
private Values parameters = new Values();
private String type = "";
private String description = "";
private String style = "";
private String visible = "true";
private int count = 0;
private String name = "";
private String src = "";
private String cssclass = "";
private String accesskey = "";
private String alt = "";
private String altkey = "";
private String checked = "";
private String disabled = "";
private String id = "";
private String onblur = "";
private String onchange = "";
private String onclick = "";
private String ondblclick = "";
private String onfocus = "";
private String onkeydown = "";
private String onkeypress = "";
private String onkeyup = "";
private String onmousedown = "";
private String onmousemove = "";
private String onmouseout = "";
private String onmouseover = "";
private String onmouseup = "";
private String tabindex = "";
private String title = "";
private String titlekey = "";
/**
* Image
* @param proteu Proteu
*/
public Image(Proteu proteu) {
this.proteu = proteu;
}
/**
* Parent
* @param component Component
*/
public void parent(Component component) {
}
/**
* Next
* @return Loop to next
*/
public boolean next() {
if (count == 0 && visible.equalsIgnoreCase("true") ) {
try {
proteu.getOutput().print("<img name=\"" + name + "\" src=\"" + src);
} catch (Exception e) {
throw new Error(e);
}
count++;
return true;
} else {
return false;
}
}
/**
* Close
*/
public void close() {
try {
if (visible.equalsIgnoreCase("true")) {
proteu.getOutput().print("\"");
if (!id.equals("")) {
proteu.getOutput().print(" id=\"" + id + "\"");
}
if (!cssclass.equals("")) {
proteu.getOutput().print(" class=\"" + cssclass + "\"");
}
if (!style.equals("")) {
proteu.getOutput().print(" style=\"" + style + "\"");
}
if (!accesskey.equals("")) {
proteu.getOutput().print(" accesskey=\"" + accesskey + "\"");
}
if (!alt.equals("")) {
proteu.getOutput().print(" alt=\"" + alt + "\"");
}
if (!altkey.equals("")) {
proteu.getOutput().print(" altkey=\"" + altkey + "\"");
}
if (!checked.equals("")) {
proteu.getOutput().print(" checked=\"" + checked + "\"");
}
if (!disabled.equals("")) {
proteu.getOutput().print(" disabled=\"" + disabled + "\"");
}
if (!onblur.equals("")) {
proteu.getOutput().print(" onblur=\"" + onblur + "\"");
}
if (!onchange.equals("")) {
proteu.getOutput().print(" onchange=\"" + onchange + "\"");
}
if (!onclick.equals("")) {
proteu.getOutput().print(" onclick=\"" + onclick + "\"");
}
if (!ondblclick.equals("")) {
proteu.getOutput().print(" ondblclick=\"" + ondblclick + "\"");
}
if (!onfocus.equals("")) {
proteu.getOutput().print(" onfocus=\"" + onfocus + "\"");
}
if (!onkeydown.equals("")) {
proteu.getOutput().print(" onkeydown=\"" + onkeydown + "\"");
}
if (!onkeypress.equals("")) {
proteu.getOutput().print(" onkeypress=\"" + onkeypress + "\"");
}
if (!onkeyup.equals("")) {
proteu.getOutput().print(" onkeyup=\"" + onkeyup + "\"");
}
if (!onmousedown.equals("")) {
proteu.getOutput().print(" onmousedown=\"" + onmousedown + "\"");
}
if (!onmousemove.equals("")) {
proteu.getOutput().print(" onmousemove=\"" + onmousemove + "\"");
}
if (!onmouseout.equals("")) {
proteu.getOutput().print(" onmouseout=\"" + onmouseout + "\"");
}
if (!onmouseover.equals("")) {
proteu.getOutput().print(" onmouseover=\"" + onmouseover + "\"");
}
if (!onmouseup.equals("")) {
proteu.getOutput().print(" onmouseup=\"" + onmouseup + "\"");
}
if (!tabindex.equals("")) {
proteu.getOutput().print(" tabindex=\"" + tabindex + "\"");
}
if (!title.equals("")) {
proteu.getOutput().print(" title=\"" + title + "\"");
}
if (!titlekey.equals("")) {
proteu.getOutput().print(" titlekey=\"" + titlekey + "\"");
}
if (!parameters.toString("\" ", "=\"").equals("")) {
proteu.getOutput().print(" " + parameters.toString("\" ", "=\"") + "\"");
}
proteu.getOutput().print("/>");
}
count = 0;
} catch (Exception e) {
throw new Error(e);
}
}
/**
* Add Component
* @param component Component
*/
public void add(Component component) {
script.addComponent(component);
}
/**
* Add Parameter
* @param name Name
* @param value Value
*/
public void addParameter(String name, String value) {
parameters.set(name, value);
}
/**
* Set Parameter
* exemple: setParameter("name[~]value")
* @param parameter in format name + separator + value
*/
public void setParameter(String parameter) {
if (parameter.indexOf(Component.SEPARATOR) > -1) {
String[] Parameter = parameter.split(RegEx.toRegEx(Component.SEPARATOR));
parameters.set(Parameter[0], Parameter[1]);
}
}
/**
* Get Type
* @return Type
*/
public String getType() {
return this.type;
}
/**
* Set Type
* @param type Type
*/
public void setType(String type) {
this.type = type;
}
/**
* Set Description
* @param description Description
*/
public void setDescription(String description) {
this.description = description;
}
/**
* Get Description
* @return Description
*/
public String getDescription() {
return description;
}
/**
* Get Style
* @return Style
*/
public String getStyle() {
return this.style;
}
/**
* Set Style
* @param style Style
*/
public void setStyle(String style) {
this.style = style;
}
/**
* Set Name
* @param name name
*/
public void setName(String name) {
this.name = name;
}
/**
* Get Name
* @return name
*/
public String getName() {
return this.name;
}
/**
* Set Src
* @param src Src
*/
public void setSrc(String src) {
this.src = src;
}
/**
* Get Src
* @return src
*/
public String getSrc() {
return this.src;
}
/**
* Set Visible
* @param visible visible
*/
public void setVisible(String visible) {
this.visible = visible;
}
/**
* Get Visible
* @return visible
*/
public String getVisible() {
return this.visible;
}
/**
* Set Cssclass
* @param cssclass cssclass
*/
public void setCssclass(String cssclass) {
this.cssclass = cssclass;
}
/**
* Get Cssclass
* @return cssclass
*/
public String getCssclass() {
return this.cssclass;
}
/**
* Set Accesskey
* @param accesskey accesskey
*/
public void setAccesskey(String accesskey) {
this.accesskey = accesskey;
}
/**
* Get Accesskey
* @return accesskey
*/
public String getAccesskey() {
return this.accesskey;
}
/**
* Set Alt
* @param alt alt
*/
public void setAlt(String alt) {
this.alt = alt;
}
/**
* Get Alt
* @return alt
*/
public String getAlt() {
return this.alt;
}
/**
* Set Altkey
* @param altkey altkey
*/
public void setAltkey(String altkey) {
this.altkey = altkey;
}
/**
* Get Altkey
* @return altkey
*/
public String getAltkey() {
return this.altkey;
}
/**
* Set Checked
* @param checked checked
*/
public void setChecked(String checked) {
this.checked = checked;
}
/**
* Get Checked
* @return checked
*/
public String getChecked() {
return this.checked;
}
/**
* Set Disabled
* @param disabled disabled
*/
public void setDisabled(String disabled) {
this.disabled = disabled;
}
/**
* Get Disabled
* @return disabled
*/
public String getDisabled() {
return this.disabled;
}
/**
* Set Id
* @param id id
*/
public void setId(String id) {
this.id = id;
}
/**
* Get Id
* @return id
*/
public String getId() {
return this.id;
}
/**
* Set Onblur
* @param onblur onblur
*/
public void setOnblur(String onblur) {
this.onblur = onblur;
}
/**
* Get Onblur
* @return onblur
*/
public String getOnblur() {
return this.onblur;
}
/**
* Set Onchange
* @param onchange onchange
*/
public void setOnchange(String onchange) {
this.onchange = onchange;
}
/**
* Get Onchange
* @return onchange
*/
public String getOnchange() {
return this.onchange;
}
/**
* Set Onclick
* @param onclick onclick
*/
public void setOnclick(String onclick) {
this.onclick = onclick;
}
/**
* Get Onclick
* @return onclick
*/
public String getOnclick() {
return this.onclick;
}
/**
* Set Ondblclick
* @param ondblclick ondblclick
*/
public void setOndblclick(String ondblclick) {
this.ondblclick = ondblclick;
}
/**
* Get Ondblclick
* @return ondblclick
*/
public String getOndblclick() {
return this.ondblclick;
}
/**
* Set Onfocus
* @param onfocus onfocus
*/
public void setOnfocus(String onfocus) {
this.onfocus = onfocus;
}
/**
* Get Onfocus
* @return onfocus
*/
public String getOnfocus() {
return this.onfocus;
}
/**
* Set Onkeydown
* @param onkeydown onkeydown
*/
public void setOnkeydown(String onkeydown) {
this.onkeydown = onkeydown;
}
/**
* Get Onkeydown
* @return onkeydown
*/
public String getOnkeydown() {
return this.onkeydown;
}
/**
* Set Onkeypress
* @param onkeypress onkeypress
*/
public void setOnkeypress(String onkeypress) {
this.onkeypress = onkeypress;
}
/**
* Get Onkeypress
* @return onkeypress
*/
public String getOnkeypress() {
return this.onkeypress;
}
/**
* Set Onkeyup
* @param onkeyup onkeyup
*/
public void setOnkeyup(String onkeyup) {
this.onkeyup = onkeyup;
}
/**
* Get Onkeyup
* @return onkeyup
*/
public String getOnkeyup() {
return this.onkeyup;
}
/**
* Set Onmousedown
* @param onmousedown onmousedown
*/
public void setOnmousedown(String onmousedown) {
this.onmousedown = onmousedown;
}
/**
* Get Onmousedown
* @return onmousedown
*/
public String getOnmousedown() {
return this.onmousedown;
}
/**
* Set Onmousemove
* @param onmousemove onmousemove
*/
public void setOnmousemove(String onmousemove) {
this.onmousemove = onmousemove;
}
/**
* Get Onmousemove
* @return onmousemove
*/
public String getOnmousemove() {
return this.onmousemove;
}
/**
* Set Onmouseout
* @param onmouseout onmouseout
*/
public void setOnmouseout(String onmouseout) {
this.onmouseout = onmouseout;
}
/**
* Get Onmouseout
* @return onmouseout
*/
public String getOnmouseout() {
return this.onmouseout;
}
/**
* Set Onmouseover
* @param onmouseover onmouseover
*/
public void setOnmouseover(String onmouseover) {
this.onmouseover = onmouseover;
}
/**
* Get Onmouseover
* @return onmouseover
*/
public String getOnmouseover() {
return this.onmouseover;
}
/**
* Set Onmouseup
* @param onmouseup onmouseup
*/
public void setOnmouseup(String onmouseup) {
this.onmouseup = onmouseup;
}
/**
* Get Onmouseup
* @return onmouseup
*/
public String getOnmouseup() {
return this.onmouseup;
}
/**
* Set Tabindex
* @param tabindex tabindex
*/
public void setTabindex(String tabindex) {
this.tabindex = tabindex;
}
/**
* Get Tabindex
* @return tabindex
*/
public String getTabindex() {
return this.tabindex;
}
/**
* Set Title
* @param title title
*/
public void setTitle(String title) {
this.title = title;
}
/**
* Get Title
* @return title
*/
public String getTitle() {
return this.title;
}
/**
* Set Titlekey
* @param titlekey titlekey
*/
public void setTitlekey(String titlekey) {
this.titlekey = titlekey;
}
/**
* Get Titlekey
* @return titlekey
*/
public String getTitlekey() {
return this.titlekey;
}
} |
Nuclear Capsid Uncoating and Reverse Transcription of HIV-1.
After cell entry, human immunodeficiency virus type 1 (HIV-1) replication involves reverse transcription of the RNA genome, nuclear import of the subviral complex without nuclear envelope breakdown, and integration of the viral complementary DNA into the host genome. Here, we discuss recent evidence indicating that completion of reverse transcription and viral genome uncoating occur in the nucleus rather than in the cytoplasm, as previously thought, and suggest a testable model for nuclear import and uncoating. Multiple recent studies indicated that the cone-shaped capsid, which encases the genome and replication proteins, not only serves as a reaction container for reverse transcription and as a shield from innate immune sensors but also may constitute the elusive HIV-1 nuclear import factor. Rupture of the capsid may be triggered in the nucleus by completion of reverse transcription, by yet-unknown nuclear factors, or by physical damage, and it appears to occur in close temporal and spatial association with the integration process. Expected final online publication date for the Annual Review of Virology, Volume 9 is September 2022. Please see http://www.annualreviews.org/page/journal/pubdates for revised estimates. |
The Use of Exergames in Motor Education Processes for School-Aged Children: A Systematic Review and Epistemic Diagnosis
This study aimed to diagnose the current state of knowledge about the use of exergames in the motor education processes of school-aged children. We conducted a systematic review following the PRISMA recommendations. Web of Science, MedLine (via PubMed), ScienceDirect, and Scopus databases were searched in December 2020 with the terms “exergames”, “motor education”, and “children”. We used the Jadad scale and the Systematization for Research Approaches in Sports Sciences instrument to evaluate the surveyed material. Seventeen articles met the inclusion criteria. We observed that: 1) the use of exergames by children can increase the motor skills of locomotion and control of objects, in addition to the levels of physical fitness, but the magnitude and duration of these increments remain inconclusive; 2) the articles exhibited theoretical and methodological weaknesses; 3) empirical-experimental investigations centered on intervention studies are hegemonic; 4) the theories of Sports Training, Didactics, and Human Movement underlie the studies, referring to an interdisciplinary crossing between Sport Psychology, Sport Pedagogy, Sport and Performance, and Sport and Health; 4) researches with alternative designs are necessary; 5) we recommend to approach this issue according to other perspectives, such as Biomechanics applied to Sport, Sports Medicine, Sociology of Sport, and Philosophy of Sport.
Introduction
The development of fundamental motor skills is an essential prerequisite for the competent performance of several types of physical activities . Evidence shows that the triggering of this process in a systematic way since childhood affects both the practice of efficient sports performances in youth and the adoption of active lifestyles in adulthood .
In theoretical terms, fundamental motor skills can be subdivided into two broad classifying categories: locomotion skills and object control skills . Locomotion skills include running, jumping, marching, climbing, riding, swimming, skating, among others, while object control skills refer to transporting, intercepting, wielding, designing, and controlling implements in actions related to receptions, throws, bouncing, conduction with feet and hitting . The development of physical fitness regarding balance, coordination, agility, speed, and reaction time contributes positively to the increase of these two types of skills, as they enable the body to perform them properly .
Overall, the first manifestations of fundamental motor skills occur after the child stabilizes the bipedal posture and starts to walk alone. Participation in games is relevant to, even at random, have the opportunity to perform body skills in the challenges inherent to these activities. The continued exposure to such stimuli contributes, over time, to acquire increasing levels of motor proficiency .
On the other hand, any obstructions in the course of motor evolution even in the first years of life can cause delays with an extension until puberty if they are not properly reversed in a timely manner. If they remain unchanged for long periods, deficits in locomotion and object control skills affect the behavioral and psychic domains. This can decrease the interest in the practice of physical activities, perturb self-esteem, and cause distortions in body image .
School Physical Education programs represent a strategic possibility of facing this scenario if they are given diligently with regard to content planning and execution. Likewise, the provision of public leisure policies focused on combating sedentary lifestyles among young people should be seen as measures of equal significance . Although such actions are essential, public health indicators attest that, by themselves, they are limited to promote the increase of basic motor skills of infants and adolescents related to the actions of running, jumping, swimming, throwing, launching, among others, according to minimally reasonable standards of technical effectiveness . In a 13-year longitudinal study, Hardy et al. investigated the development levels of fundamental motor skills in children and adolescents. In the end, they observed that less than 50% exhibited basic motor skills at satisfactory levels. Similarly, Brian et al. found in a recent study carried out in the United States of America (USA) that approximately 77% of the analyzed sample of infants and pubescents were in a situation of delayed motor development .
The cogency of this context and the urgency to face it has led academics and professionals in the area of human motricity to research and propose original solutions during the last two decades. One of them refers to the use of active video games (exergames) in children's motor education processes . Supporters of this idea state that exergames can be helpful tools for teaching, acquiring, and improving the motor skills of children and adolescents of different ages, sex, biological maturity, and clinical conditions. The undeniable popularity of these types of games among young people, mainly as a residential entertainment option, is the main justification to support such a suggestion. Conceptually, exergames are digital games that require movement of the body as a whole, through devices that convert the individual's real movements to the virtual environment. This allows them to practice simulated sports, fitness exercises, and/or other playful and interactive physical activities. Unlike conventional video games, exergames require physical effort .
The innovative character of this approach not only ratifies the creativity of its proponents but also demonstrates the commitment to try to equate and solve the problem at hand. However, as it is a recent issue, it is legitimate to raise the hypothesis that studies related to the theme are still in an early stage. Thus, identifying the characteristics of the exergames as to the criteria for demarcating objects, data treatment techniques, sample compositions, and the applicability of the results is a necessary task both to have a broader view of their theoretical-methodological profiles and for the emission of epistemic diagnoses/prognoses. Therefore, the objective of this study was twofold: 1) to identify, through a systematic review, the ways of using exergames in the processes of motor education of school-age children; 2) to diagnose the epistemic state of this use in the context of Sport Sciences.
Method
This systematic review was drafted based on the Preferred Reporting Items for Systematic Reviews and Meta-Analyses (PRISMA) recommendations .
Search strategy
A search was made without time or language filters in December 2020 in the Web of Science, MedLine (via PubMed), ScienceDirect, and Scopus databases. We used the keywords "exergames", "motor education", and "children". The search phrase was obtained using the Boolean operators OR (between the synonyms) and AND (between the descriptors). Two independent evaluators performed the search. Any disagreements were solved by a consensus meeting or decided by a supervisor.
Inclusion and exclusion criteria
We included peer-reviewed articles that investigated the use of exergames on the acquisition and development of at least one type of locomotor skill or object control both in Physical Education classes and in non-formal educational contexts (clubs, gyms, residences) in school-aged individuals. The exclusion criteria consisted of: (1) opinion articles, reviews, case reports, annals of congresses, books, book chapters, theses, dissertations, and technical reports; (2) games unsuitable for residential or educational use, as well as computer games; (3) research related to the rehabilitation of special groups.
Data collection process
Data extracted from included studies comprised the following analytical matrices: (1) author, year of publication, and country of the study; (2) purpose of the study; (3) descriptive characteristics of the participants; (4) methodological aspects; (5) results.
Methodological quality evaluation and epistemological diagnosis
The methodological quality of the studies was evaluated by the Jadad scale , which consists of the punctuation of the scores from 11 domains, namely: 1a) the study was reported as randomized; 1b) the randomization was properly performed; 2a) the study was a double-blind trial; 2b) the blinding was properly performed; 3) the sample loss was described. If items 1a, 2a, and 3 were performed, the study got 1 point per item. If items 1b and 2b were observed, the study received another point per item. In the case of items 1b and 2b were not met, the study lost 1 point concerning items 1a and 2a, respectively. On this scale, the scores ranged from 0 to 5. Studies with scores equal to or lower than 3 points were considered at a high risk of bias. Two independent and qualified researchers applied this instrument. A third author was consulted in case of any divergence.
The epistemological evaluation of the surveyed material occurred through the Systematization for Research Approaches in Sports Sciences (SRASS) instrument . The SRASS aims to determine the epistemic approaches of studies regarding their guiding paradigms (empirical-experimental paradigm; critical-dialectic paradigm; hermeneutic-phenomenological paradigm); nature of the study (intervention study; cross-sectional study; case study; laboratory study); support theories (theories of human movement; game theories; theories of sports training; theories of didactics applied to sport) and subareas of linkage to Sport Sciences (Sports Medicine; Biomechanics applied to Sport; Sport Psychology; Sport Pedagogy); Sociology of Sport; History of Sport; Philosophy of Sport; Sport and Health; Sport for Special Groups; Sport and Media; Sport of Participation) .
Results
In total, 120 studies were found following the proposed research methodology (Web of Science = 12; MedLine via PubMed = 17; ScienceDirect = 71; Scopus = 20). After using the selection criteria, 17 studies were included (Figure 1). Table 1 shows the descriptive characteristics of the studies included in the present review. The year of publication of the studies ranged from 2012 to 2020. The sample size in each group (intervention and control) ranged from 5 to 557 participants. The samples included both girls and boys, except the study by EbrahimiSani et al. that included only girls. The total number of participants was 2,631 (1,338 in the intervention group and 1,293 in the control group). The age of the participants ranged from 4 to 14 years old. n: sample size; IG: intervention group; CG: control group; ♀: female; ♂: male; DCD: developmental coordination disorder.
8
The methodological quality evaluation is shown in Table 3. Only two studies presented a low risk of bias.
Discussion
This study aimed to identify, through a systematic review, the ways of using exergames in the motor education processes of school-aged children and to diagnose the epistemic state of this use in the context of Sport Sciences. Technically, exergames gather the main dimensions of virtual realities: interaction, involvement, and immersion. The interaction is related to the environment's ability to respond to user actions interactively through devices. Some devices can naturally capture users' movements. The involvement is the ability to maintain the user's attention, seeking to explore their different senses, keeping the user attracted and motivated to remain in the environment. Immersion refers to the ability to make the user feel present in the simulated environment, seeking to distance them from the real environment .
A dominant feature of the investigations raised on exergames in our study concerns the fact that most of them focused on samples of neurotypical infants. Neurotypical individuals are those who do not fit the autism spectrum, exhibiting linguistic, sensorimotor, affective, and cognitive aspects consistent with those expected for their chronological age . Eleven of the included studies exemplify this trend. Conversely, four included studies analyzed the motor behaviors of children with developmental coordination disorder. The other two studies included, as target subjects, groups of neurotypical and autistic individuals and young people in different stages of motor performance .
In summary, it is noted that the dominant neurological characteristic of the investigated subjects refers to neurotypical people, that is, situated within frames considered normal. Contrariwise, neurodivergent or neuroatypical analyzes are in the minority. Moreover, all authors focused on sample groups of infants of both sexes, except EbrahimiSani et al. , who prioritized only girls. It is concluded then, in this regard, that the investigations do not privilege the masculine gender over the feminine and vice versa. Still with respect to the sample groups, it is reiterated that the samples were heterogeneous in terms of the number of individuals analyzed, chronological ages, and levels of biological maturation.
Another demographic item to be highlighted is the fact that the investigations are distributed by teams of researchers located in different countries. This means that the theme of the effects of exergames on the acquisition and development of motor skills of schoolchildren has a global connotation.
In terms of the methodological characteristics of the investigations, despite the different training volumes and motor stimuli applied, in almost all studies some type of significant result was obtained from the intervention groups when compared to the control groups. At first, this means that exergames may have positive effects on the gross motor skills and physical fitness levels of children with different levels of training. Only one study showed no change in any variable. However, such gains should be viewed with caution, as it is not possible to confirm whether they will continue, and in what proportion, as the infant's biological maturation progresses and the training status changes.
This diagnosis is reinforced when it is observed that, among the 17 selected studies, only two were considered to be of high theoretical and methodological quality. Hence, they correspond to those of greater scientific credibility. In compensation, 15 investigations received a rating of three or less on the Jadad scale , which denotes compromises in their quality in terms of scientificity. Thus, they are research with coherence, consistency, objectivity, and control of subjectivity subject to criticism. As a result, the verisimilitude of the conclusions they announce must be interpreted with caution . To paraphrase Miller , situations of this nature are relatively usual when a given object of study is still recent, in the sense that the scientific community to address it is still in the early stages of theoretical problematization. Consequently, the demarcations of the object have little depth, as well as the investigative horizons considered more pertinent in the medium and long terms.
Regarding the epistemological profile of the studies surveyed, it can be seen that, in full, all consisted of empirical-experimental intervention studies. Research with this bias has, as a guiding axis, the exposure of individuals to certain stimuli to verify their random impacts on one or more pre-established variables. In this type of conception, the researcher pre-understands that certain factors are hypothetically capable of engendering transformations in structural elements of the object. In the case of the present study, it is reasonable to conclude that researchers in exergames assume that such a class of games is capable of influencing the biopsychic construction of the motor skills of school-aged individuals. Hence the need for them to seek reliable evidence on such a process .
The conceptual basis adopted to support the selected investigations refers almost exclusively to the Theories of Human Movement with an emphasis on Sport Psychology of a behavioral nature. However, it was possible to identify, in some of them, the existence of interfaces with the areas of Sport Pedagogy, Sport and Performance, and Sport and Health. Mediating this junction are the theories of Sports Training and Didactics. As a complement, we reiterate that no study mentioned Game Theories, which are among the classic bodies of knowledge of Physical Education and Sport Sciences.
The previous observation shows two contexts that are interconnected. The first goes back to the detection that, despite behavioral Sport Psychology being the Sports Science sub-area to endorse most of the inventoried works, it is possible to perceive the search for an incipient interdisciplinary dialog with the other mentioned theoretical fields. On the other hand, and this is the second consideration, Sports Training, and Didactics, under the aegis of Sport Pedagogy, Sport and Health, and Sport and Performance, constitute disciplines that go back to the structuring nucleus originating from Physical Education and Sports Science . Therefore, it can be seen that, given the emergence of the relationship between exergames and the development of motor skills in childhood, given that it is a relatively recent object of study, a return to the knowledge bases that support the epistemic tradition of Physical Education and Sport Sciences is outlined. In terms of Theory of Knowledge, attitudes like these are consistent with the notion called Fundationalism, which alludes to the search for theoretical support for new ideas in knowledge that history has endorsed and endorse as legitimate in the flow of time .
The present study has some limitations. The first concerns the selection of articles from four electronic databases. Although the investigated databases catalog a vast number of scientific journals worldwide, some articles published in other journals that address this issue may not have been found. Studies from a larger number of search engines could enrich the analysis and discussions.
Conclusions
The present study allows the announcement of some conclusions. Effectively, the use of exergames by school-aged children can promote an increase in motor skills both in locomotion and in object control. Their physical fitness levels are also capable of improving. However, the magnitude and duration of these increments remain inconclusive. In epistemological terms, the state of knowledge of the productions related to the theme is in an embryonic state. Furthermore, the quality of the articles exhibits theoretical and methodological weaknesses that must be overcome. Investigations of an empirical-experimental nature focused on intervention studies are hegemonic. At the conceptual level, the theories of Sports Training, Didactics, and Human Movement have been chosen to provide the theoretical foundation, referring to the existence of an interdisciplinary intersection, in the field of Sport Sciences, between Sport Psychology, Sport Pedagogy, Sport and Performance, and Sport and Health.
Author details
Based on this diagnosis, it is urgent to affirm that, for example, research that opts for alternative methodological designs is still necessary, such as case reports, cross-sectional studies, longitudinal studies, and even conceptual essays. In the case of Sports Science sub-areas, it is necessary to approach the subject according to other perspectives. As an option, we suggest Biomechanics applied to Sport, Sports Medicine, Sport Sociology, and Sport Philosophy. |
<reponame>robertdstein/flarestack
import numpy as np
from astropy import units as u
from astropy.coordinates import Distance
import os
import logging
from flarestack.shared import catalogue_dir
from flarestack.utils.prepare_catalogue import cat_dtype
from flarestack.cosmo.neutrino_cosmology import (
define_cosmology_functions,
integrate_over_z,
cumulative_z,
)
from scipy.interpolate import interp1d
logger = logging.getLogger(__name__)
def simulate_transient_catalogue(
mh_dict,
rate,
resimulate=False,
cat_name="random",
n_entries=30,
local_z=0.1,
seed=None,
):
tpdfs = [season.get_time_pdf() for season in mh_dict["dataset"].values()]
data_start = min([time_pdf.sig_t0() for time_pdf in tpdfs])
data_end = max([time_pdf.sig_t1() for time_pdf in tpdfs])
try:
injection_gamma = mh_dict["inj_dict"]["injection_energy_pdf"]["gamma"]
except KeyError:
raise Exception("No spectral index defined")
(
rate_per_z,
nu_flux_per_z,
nu_flux_per_source,
cumulative_nu_flux,
) = define_cosmology_functions(
rate, 1 * u.erg, injection_gamma, nu_bright_fraction=1.0
)
n_tot = integrate_over_z(rate_per_z, zmin=0.0, zmax=8.0)
logger.info(
"We can integrate the rate up to z=8.0. This gives {:.3E}".format(n_tot)
)
n_local = integrate_over_z(rate_per_z, zmin=0.0, zmax=local_z)
logger.info(
"We will only simulate up to z={0}. In this volume, there are {1:.3E}".format(
local_z, n_local
)
)
sim_length = (data_end - data_start) * u.day
logger.info("We simulate for {0}".format(sim_length))
n_local = int(n_local * sim_length)
logger.debug("Entries in catalogue {0}".format(n_local))
logger.debug(
"We expect this region to contribute {:.3g} of all the flux from this source class".format(
cumulative_nu_flux(local_z)[-1] / cumulative_nu_flux(8.0)[-1]
)
)
n_catalogue = sorted(
list(
set([int(x) for x in np.logspace(-4, 0, n_entries) * n_local if int(x) > 0])
)
)
cat_names_north = [
catalogue_dir + cat_name + "/" + str(n) + "_cat_northern.npy"
for n in n_catalogue
]
cat_names_south = [
catalogue_dir + cat_name + "/" + str(n) + "_cat_southern.npy"
for n in n_catalogue
]
cat_names = [
catalogue_dir + cat_name + "/" + str(n) + "_cat_full.npy" for n in n_catalogue
]
all_cat_names = {
"Northern": cat_names_north,
"Southern": cat_names_south,
"Full": cat_names,
}
if seed is not None:
np.random.seed(seed)
if not np.logical_and(
np.sum([os.path.isfile(x) for x in cat_names]) == len(cat_names), not resimulate
):
catalogue = np.empty(n_local, dtype=cat_dtype)
catalogue["source_name"] = ["src" + str(i) for i in range(n_local)]
catalogue["ra_rad"] = np.random.uniform(0.0, 2 * np.pi, n_local)
catalogue["dec_rad"] = np.arcsin(np.random.uniform(-1.0, 1.0, n_local))
catalogue["injection_weight_modifier"] = np.ones(n_local)
catalogue["base_weight"] = np.ones(n_local)
catalogue["ref_time_mjd"] = np.random.uniform(data_start, data_end, n_local)
catalogue["start_time_mjd"] = 0.0
catalogue["end_time_mjd"] = 0.0
# Define conversion fraction to sample redshift distribution
zrange = np.linspace(0, local_z, int(1e3))
count_ints = [(x * sim_length).value for x in cumulative_z(rate_per_z, zrange)]
count_ints = np.array([0] + count_ints) / max(count_ints)
rand_to_z = interp1d(count_ints, zrange[:-1])
z_vals = sorted(rand_to_z(np.random.uniform(0.0, 1.0, n_local)))
mpc_vals = [Distance(z=z).to("Mpc").value for z in z_vals]
catalogue["distance_mpc"] = np.array(mpc_vals)
dec_ranges = [
("Northern", 0.0, 1.0),
("Southern", -1, 0.0),
("Full", -1.0, 1.0),
]
for i, n in enumerate(n_catalogue):
for (key, dec_min, dec_max) in dec_ranges:
index = int(n)
cat = catalogue[:index]
cat_path = all_cat_names[key][i]
mask = np.logical_and(
np.sin(cat["dec_rad"]) > dec_min, np.sin(cat["dec_rad"]) < dec_max
)
try:
os.makedirs(os.path.dirname(cat_path))
except OSError:
pass
np.save(cat_path, cat[mask])
logger.info("Saved to {0}".format(cat_path))
return all_cat_names
def simulate_transients(sim_length_year, rate, injection_gamma=2.0, local_z=0.1):
rate_per_z, nu_flux_per_z, cumulative_nu_flux = define_cosmology_functions(
rate, 1 * u.erg, injection_gamma, nu_bright_fraction=1.0
)
print("We can integrate the rate up to z=8.0. This gives")
n_tot = integrate_over_z(rate_per_z, zmin=0.0, zmax=8.0)
print("{:.3E}".format(n_tot))
print("We will only simulate up to z=" + str(local_z) + ".")
n_local = integrate_over_z(rate_per_z, zmin=0.0, zmax=local_z)
print("In this volume, there are", "{:.3E}".format(n_local))
sim_length = (sim_length_year * u.year).to("day")
print("We simulate for", sim_length)
n_local = int(n_local * sim_length)
print("Entries in catalogue", n_local)
print("We expect this region to contribute")
print(
"{:.3g}".format(cumulative_nu_flux(local_z)[-1] / cumulative_nu_flux(8.0)[-1])
)
print("of all the flux from this source class")
# Define conversion fraction to sample redshift distribution
zrange = np.linspace(0, local_z, 1e3)
count_ints = [(x * sim_length).value for x in cumulative_z(rate_per_z, zrange)]
count_ints = np.array([0] + count_ints) / max(count_ints)
rand_to_z = interp1d(count_ints, zrange[:-1])
z_vals = sorted(rand_to_z(np.random.uniform(0.0, 1.0, n_local)))
return z_vals
|
<filename>Development/nmos/id.cpp
#include "nmos/id.h"
#include <boost/uuid/name_generator.hpp>
#include <boost/uuid/random_generator.hpp>
#include <boost/uuid/string_generator.hpp>
#include <boost/uuid/uuid_io.hpp>
#include "cpprest/basic_utils.h"
namespace nmos
{
struct id_generator::impl_t
{
boost::uuids::random_generator gen;
};
id_generator::id_generator()
: impl(new impl_t)
{
}
id_generator::~id_generator()
{
// explicitly defined so that impl_t is a complete type for the unique_ptr destructor
}
namespace details
{
template <typename StringT> StringT to(const boost::uuids::uuid& u);
template <> inline std::string to(const boost::uuids::uuid& u) { return boost::uuids::to_string(u); }
template <> inline std::wstring to(const boost::uuids::uuid& u) { return boost::uuids::to_wstring(u); }
}
id id_generator::operator()()
{
return details::to<id>(impl->gen());
}
// generate a random number-based UUID (v4)
// note, when creating multiple UUIDs, using a generator can be more efficient depending on platform and dependencies
id make_id()
{
return id_generator()();
}
// generate a name-based UUID (v5)
id make_repeatable_id(id namespace_id, const utility::string_t& name)
{
return details::to<id>(boost::uuids::name_generator(boost::uuids::string_generator()(namespace_id))(name));
}
}
|
use tract_hir::internal::*;
use tract_ndarray::prelude::*;
#[derive(Debug, Clone, new, Default, Educe)]
#[educe(Hash)]
pub struct Lrn {
#[educe(Hash(method = "hash_f32"))]
pub alpha: f32,
#[educe(Hash(method = "hash_f32"))]
pub beta: f32,
#[educe(Hash(method = "hash_f32"))]
pub bias: f32,
pub size: usize,
}
tract_linalg::impl_dyn_hash!(Lrn);
impl Lrn {
fn eval_t<
T: Datum + tract_num_traits::Float + tract_num_traits::FromPrimitive + ::std::iter::Sum,
>(
&self,
input: Arc<Tensor>,
) -> TractResult<TVec<Arc<Tensor>>> {
let input = input.to_array_view::<T>()?;
let channels = input.shape()[1];
let output = Array::from_shape_fn(input.shape(), |mut coords| {
let c = coords[1];
let x = input[&coords];
let c_min = c.saturating_sub((self.size - 1) / 2);
let c_max = (c + ((self.size - 1).div_ceil(2))).min(channels - 1);
let square_sum: T = (c_min..=c_max)
.map(|c| {
coords[1] = c;
input[&coords].powi(2)
})
.sum();
x / (T::from(self.bias).unwrap()
+ T::from(self.alpha).unwrap() / T::from(self.size).unwrap() * square_sum)
.powf(T::from(self.beta).unwrap())
});
Ok(tvec!(output.into_arc_tensor()))
}
}
impl Op for Lrn {
fn name(&self) -> Cow<str> {
"Lrn".into()
}
op_onnx!();
op_as_typed_op!();
not_a_pulsed_op!();
}
impl StatelessOp for Lrn {
fn eval(&self, mut inputs: TVec<Arc<Tensor>>) -> TractResult<TVec<Arc<Tensor>>> {
let input = args_1!(inputs);
dispatch_floatlike!(Self::eval_t(input.datum_type())(self, input))
}
}
impl InferenceRulesOp for Lrn {
fn rules<'r, 'p: 'r, 's: 'r>(
&'s self,
s: &mut Solver<'r>,
inputs: &'p [TensorProxy],
outputs: &'p [TensorProxy],
) -> InferenceResult {
check_input_arity(&inputs, 1)?;
check_output_arity(&outputs, 1)?;
s.equals(&inputs[0].datum_type, &outputs[0].datum_type)?;
s.equals(&inputs[0].shape, &outputs[0].shape)?;
Ok(())
}
as_op!();
to_typed!();
}
impl TypedOp for Lrn {
as_op!();
fn output_facts(&self, inputs: &[&TypedFact]) -> TractResult<TVec<TypedFact>> {
Ok(tvec!(inputs[0].clone()))
}
}
|
/**
* Test case for the CertificateSamplerCustomPublisher.
*
* This is a unit test and does not require EJBCA to be running.
*
* @version $Id: CertificateSamplerCustomPublisherUnitTest.java 22121 2015-10-29 13:49:30Z mikekushner $
*/
public class CertificateSamplerCustomPublisherUnitTest {
private static final File TEMP_DIR = new File(System.getProperty("java.io.tmpdir"));
private static final AuthenticationToken ANY_ADMIN = new AlwaysAllowLocalAuthenticationToken(new UsernamePrincipal("CertificateSamplerCustomPublisherUnitTest"));
private static final byte[] ANY_BYTEARRAY = new byte[0];
private static final String ANY_CAFP = "44447777111";
private static final int ANY_NUMBER = 4711;
private static final String ANY_SUBJECTDN = "CN=User";
private static final Certificate NULL_CERTIFICATE = null;
private static final int ANY_PROFILEID = 123123;
private static final int PROBABILISTIC_TRIES = 100;
private static final int PROFILE_A = 10;
private static final int PROFILE_B = 11;
private static final Properties CONFIG_SAMPLE_ALL;
private static final Properties ANY_GOOD_PROPERTIES;
static {
CONFIG_SAMPLE_ALL = new Properties();
CONFIG_SAMPLE_ALL.setProperty("outputfolder", TEMP_DIR.getAbsolutePath());
CONFIG_SAMPLE_ALL.setProperty("default.samplingmethod", "SAMPLE_ALL");
ANY_GOOD_PROPERTIES = CONFIG_SAMPLE_ALL;
}
@Before
public void setUp() throws Exception {
}
@After
public void tearDown() throws Exception {
}
/**
* Tests that storeCertificate and testConnection throws Exception as the property output folder is missing.
*/
@Test
public void testNoOutputFolder() throws Exception {
MockedCertificateSamplerCustomPublisher publisher;
Properties config = new Properties();
config.setProperty("default.samplingmethod", "SAMPLE_ALL");
// Test storeCertificate
publisher = createMockedPublisher(config);
try {
storeCertificate(publisher, CertificateConstants.CERT_ACTIVE, ANY_PROFILEID);
fail("Should have failed as property outputfolder was missing");
} catch (PublisherException expected) {} // NOPMD
// Test testConnection
publisher = createMockedPublisher(config);
try {
publisher.testConnection();
fail("Should have failed as property outputfolder was missing");
} catch (PublisherConnectionException expected) {} // NOPMD
}
/**
* Tests that storeCertificate and testConnection throws Exception as the property for the default sampling method is missing.
*/
@Test
public void testNoDefaultSamplingMethod() throws Exception {
MockedCertificateSamplerCustomPublisher publisher;
Properties config = new Properties();
config.setProperty("outputfolder", TEMP_DIR.getAbsolutePath());
// Test storeCertificate
publisher = createMockedPublisher(config);
try {
storeCertificate(publisher, CertificateConstants.CERT_ACTIVE, ANY_PROFILEID);
fail("Should have failed as property outputfolder was missing");
} catch (PublisherException expected) {} // NOPMD
// Test testConnection
publisher = createMockedPublisher(config);
try {
publisher.testConnection();
fail("Should have failed as property outputfolder was missing");
} catch (PublisherConnectionException expected) {} // NOPMD
}
/**
* Tests that storeCertificate and testConnection throws Exception as the default pvalue is missing.
*/
@Test
public void testNoPValueForDefaultSamplingMethod() throws Exception {
MockedCertificateSamplerCustomPublisher publisher;
Properties config = new Properties();
config.setProperty("outputfolder", TEMP_DIR.getAbsolutePath());
config.setProperty("default.samplingmethod", "SAMPLE_PROBABILISTIC");
// Test storeCertificate
publisher = createMockedPublisher(config);
try {
storeCertificate(publisher, CertificateConstants.CERT_ACTIVE, ANY_PROFILEID);
fail("Should have failed as no default pvalue was specified");
} catch (PublisherException expected) {} // NOPMD
// Test testConnection
publisher = createMockedPublisher(config);
try {
publisher.testConnection();
fail("Should have failed as no default pvalue was specified");
} catch (PublisherConnectionException expected) {} // NOPMD
}
/**
* Tests that storeCertificate and testConnection throws Exception as the pvalue for a profile is missing.
*/
@Test
public void testNoPValueForProfileSamplingMethod() throws Exception {
MockedCertificateSamplerCustomPublisher publisher;
Properties config = new Properties();
config.setProperty("outputfolder", TEMP_DIR.getAbsolutePath());
config.setProperty("default.samplingmethod", "SAMPLE_ALL");
config.setProperty("profileid." + PROFILE_A + ".samplingmethod", "SAMPLE_PROBABILISTIC");
// Test storeCertificate
publisher = createMockedPublisher(config);
try {
storeCertificate(publisher, CertificateConstants.CERT_ACTIVE, PROFILE_A);
fail("Should have failed as no default pvalue for profile A specified");
} catch (PublisherException expected) {} // NOPMD
// Test testConnection
publisher = createMockedPublisher(config);
try {
publisher.testConnection();
fail("Should have failed as no pvalue for a profile specified");
} catch (PublisherConnectionException expected) {} // NOPMD
}
/**
* Tests that testConnection gives error if pvalue is invalid.
*/
@Test
public void testPvalueNotInInterval() throws Exception {
MockedCertificateSamplerCustomPublisher publisher;
Properties config;
// Profile pvalue=-0.5 (illegal)
config = new Properties();
config.setProperty("outputfolder", TEMP_DIR.getAbsolutePath());
config.setProperty("default.samplingmethod", "SAMPLE_ALL");
config.setProperty("profileid." + PROFILE_A + ".samplingmethod", "SAMPLE_PROBABILISTIC");
config.setProperty("profileid." + PROFILE_A + ".pvalue", "-0.5");
publisher = createMockedPublisher(config);
try {
publisher.testConnection();
fail("Should have failed as pvalue is not in the [0, 1] range");
} catch (PublisherConnectionException expected) {} // NOPMD
// Default pvalue=-0.5 (illegal)
config = new Properties();
config.setProperty("outputfolder", TEMP_DIR.getAbsolutePath());
config.setProperty("default.samplingmethod", "SAMPLE_PROBABILISTIC");
config.setProperty("default.pvalue", "-0.5");
publisher = createMockedPublisher(config);
try {
publisher.testConnection();
fail("Should have failed as pvalue is not in the [0, 1] range");
} catch (PublisherConnectionException expected) {} // NOPMD
}
/**
* Tests that testConnection gives error if there is an invalid profile key.
*/
@Test
public void testInvalidProfileKey() throws Exception {
MockedCertificateSamplerCustomPublisher publisher;
Properties config;
// Profile pvalue=-0.5 (illegal)
config = new Properties();
config.setProperty("outputfolder", TEMP_DIR.getAbsolutePath());
config.setProperty("default.samplingmethod", "SAMPLE_ALL");
config.setProperty("profileid.INVALID.samplingmethod", "SAMPLE_ALL");
publisher = createMockedPublisher(config);
try {
publisher.testConnection();
fail("Should have failed as 'INVALID' is not an legal profile id");
} catch (PublisherConnectionException expected) {} // NOPMD
}
/**
* Tests that testConnection and storeCertificate gives error if there is an invalid sampling method.
*/
@Test
public void testInvalidSamplingMethod() throws Exception {
MockedCertificateSamplerCustomPublisher publisher;
Properties config;
// Default sampling method: INVALID
config = new Properties();
config.setProperty("outputfolder", TEMP_DIR.getAbsolutePath());
config.setProperty("default.samplingmethod", "_INVALID_");
publisher = createMockedPublisher(config);
try {
publisher.testConnection();
fail("Should have failed as '_INVALID_' is not an existing sampling method");
} catch (PublisherConnectionException expected) {} // NOPMD
publisher = createMockedPublisher(config);
try {
storeCertificate(publisher, CertificateConstants.CERT_ACTIVE, ANY_PROFILEID);
fail("Should have failed as '_INVALID_' is not an existing sampling method");
} catch (PublisherException expected) {} // NOPMD
// Profile sampling method: INVALID
config = new Properties();
config.setProperty("outputfolder", TEMP_DIR.getAbsolutePath());
config.setProperty("default.samplingmethod", "SAMPLE_ALL");
config.setProperty("profileid." + PROFILE_A + ".samplingmethod", "_INVALID_");
publisher = createMockedPublisher(config);
try {
publisher.testConnection();
fail("Should have failed as '_INVALID_' is not an existing sampling method");
} catch (PublisherConnectionException expected) {} // NOPMD
publisher = createMockedPublisher(config);
try {
storeCertificate(publisher, CertificateConstants.CERT_ACTIVE, PROFILE_A);
fail("Should have failed as '_INVALID_' is not an existing sampling method");
} catch (PublisherException expected) {} // NOPMD
}
/**
* Tests that the method storeCRL always returns true as publishing/sampling of CRLs are currently not supported.
*/
@Test
public void testStoreCRL() throws Exception {
assertTrue("Storing CRL is not supported but return status should be success",
createPublisher(ANY_GOOD_PROPERTIES).storeCRL(ANY_ADMIN, ANY_BYTEARRAY, ANY_CAFP, ANY_NUMBER, ANY_SUBJECTDN));
}
/**
* Tests that revoking a certificate does not invoke any sampling.
*/
@Test
public void testStoreCertificateRevoked() throws Exception {
MockedCertificateSamplerCustomPublisher publisher;
boolean success;
publisher = createMockedPublisher(ANY_GOOD_PROPERTIES);
success = storeCertificate(publisher, CertificateConstants.CERT_REVOKED, ANY_PROFILEID);
assertTrue("Status should be success", success);
assertFalse("Certificate should not have been stored", publisher.isWriteCertificateCalled());
publisher = createMockedPublisher(ANY_GOOD_PROPERTIES);
success = storeCertificate(publisher, CertificateConstants.CERT_INACTIVE, ANY_PROFILEID);
assertTrue("Status should be success", success);
assertFalse("Certificate should not have been stored", publisher.isWriteCertificateCalled());
}
/**
* Tests that publishing with sampling method ALL stores the certificate.
*/
@Test
public void testSampleAll() throws Exception {
MockedCertificateSamplerCustomPublisher publisher;
boolean success;
publisher = createMockedPublisher(CONFIG_SAMPLE_ALL);
success = storeCertificate(publisher, CertificateConstants.CERT_ACTIVE, ANY_PROFILEID);
assertTrue("Status should be success", success);
assertTrue("Certificate should have been stored", publisher.isWriteCertificateCalled());
}
/**
* Tests sampling with different probabilities. This method has a change of false positives but with
* <code>PROBABILISTIC_TRIES</code> number of tries the probability should be small.
*/
@Test
public void testSampleProbabilistic() throws Exception {
MockedCertificateSamplerCustomPublisher publisher;
boolean success;
// Test that with p=0.0 no certificate is stored
Properties default0 = new Properties();
default0.setProperty("outputfolder", TEMP_DIR.getAbsolutePath());
default0.setProperty("default.samplingmethod", "SAMPLE_PROBABILISTIC");
default0.setProperty("default.pvalue", "0.0");
for (int i = 0; i < PROBABILISTIC_TRIES; i++) {
publisher = createMockedPublisher(default0);
success = storeCertificate(publisher, CertificateConstants.CERT_ACTIVE, ANY_PROFILEID);
assertTrue("Status should be success", success);
assertFalse("Certificate should not have been stored, i=" + i, publisher.isWriteCertificateCalled());
}
// Test that with pvalue=1.0 all certificates are stored
Properties default1 = new Properties();
default1.setProperty("outputfolder", TEMP_DIR.getAbsolutePath());
default1.setProperty("default.samplingmethod", "SAMPLE_PROBABILISTIC");
default1.setProperty("default.pvalue", "1.0");
for (int i = 0; i < PROBABILISTIC_TRIES; i++) {
publisher = createMockedPublisher(default1);
success = storeCertificate(publisher, CertificateConstants.CERT_ACTIVE, ANY_PROFILEID);
assertTrue("Status should be success", success);
assertTrue("Certificate should have been stored, i=" + i, publisher.isWriteCertificateCalled());
publisher.reset();
}
// Test that with pvalue=0.5 at least some certificates are stored
Properties default05 = new Properties();
default05.setProperty("outputfolder", TEMP_DIR.getAbsolutePath());
default05.setProperty("default.samplingmethod", "SAMPLE_PROBABILISTIC");
default05.setProperty("default.pvalue", "0.5");
int stored = 0;
for (int i = 0; i < PROBABILISTIC_TRIES; i++) {
publisher = createMockedPublisher(default05);
success = storeCertificate(publisher, CertificateConstants.CERT_ACTIVE, ANY_PROFILEID);
assertTrue("Status should be success", success);
if (publisher.isWriteCertificateCalled()) {
stored++;
}
publisher.reset();
}
assertTrue("At least some should have been stored", stored > 0);
}
/**
* Tests that different profiles can have different values for pvalue.
*/
@Test
public void testDifferentProfiles() throws Exception {
MockedCertificateSamplerCustomPublisher publisher;
Properties config;
// Default: p=1.0, A: p=0.0, B: p=1.0
config = new Properties();
config.setProperty("outputfolder", TEMP_DIR.getAbsolutePath());
config.setProperty("default.samplingmethod", "SAMPLE_PROBABILISTIC");
config.setProperty("default.pvalue", "1.0");
config.setProperty("profileid." + PROFILE_A + ".pvalue", "0.0");
config.setProperty("profileid." + PROFILE_B + ".pvalue", "1.0");
publisher = createMockedPublisher(config);
assertTrue(storeCertificate(publisher, CertificateConstants.CERT_ACTIVE, PROFILE_A));
assertFalse("Certificate in profile A should not be stored", publisher.isWriteCertificateCalled());
publisher = createMockedPublisher(config);
assertTrue(storeCertificate(publisher, CertificateConstants.CERT_ACTIVE, PROFILE_B));
assertTrue("Certificate in profile B should have been stored", publisher.isWriteCertificateCalled());
publisher = createMockedPublisher(config);
assertTrue(storeCertificate(publisher, CertificateConstants.CERT_ACTIVE, ANY_PROFILEID));
assertTrue("Certificate in any other profile should have been stored", publisher.isWriteCertificateCalled());
}
/**
* Tests that different profiles can have different sampling methods.
*/
@Test
public void testDifferentMethodsForProfiles() throws Exception {
MockedCertificateSamplerCustomPublisher publisher;
Properties config;
// Default: p=0.0
// Nothing should be stored
config = new Properties();
config.setProperty("outputfolder", TEMP_DIR.getAbsolutePath());
config.setProperty("default.samplingmethod", "SAMPLE_PROBABILISTIC");
config.setProperty("default.pvalue", "0.0");
publisher = createMockedPublisher(config);
assertTrue(storeCertificate(publisher, CertificateConstants.CERT_ACTIVE, PROFILE_A));
assertFalse("Certificate in profile A should not be stored", publisher.isWriteCertificateCalled());
publisher = createMockedPublisher(config);
assertTrue(storeCertificate(publisher, CertificateConstants.CERT_ACTIVE, PROFILE_B));
assertFalse("Certificate in profile B should not be stored", publisher.isWriteCertificateCalled());
publisher = createMockedPublisher(config);
assertTrue(storeCertificate(publisher, CertificateConstants.CERT_ACTIVE, ANY_PROFILEID));
assertFalse("Certificate in no profile should not be stored", publisher.isWriteCertificateCalled());
// Default: p=0.0, A: ALL
// Only from profile A should be stored
config = new Properties();
config.setProperty("outputfolder", TEMP_DIR.getAbsolutePath());
config.setProperty("default.samplingmethod", "SAMPLE_PROBABILISTIC");
config.setProperty("default.pvalue", "0.0");
config.setProperty("profileid." + PROFILE_A + ".samplingmethod", "SAMPLE_ALL");
publisher = createMockedPublisher(config);
assertTrue(storeCertificate(publisher, CertificateConstants.CERT_ACTIVE, PROFILE_A));
assertTrue("Certificate in profile A should be stored", publisher.isWriteCertificateCalled());
publisher = createMockedPublisher(config);
assertTrue(storeCertificate(publisher, CertificateConstants.CERT_ACTIVE, PROFILE_A));
assertTrue("Certificate in profile A should be stored", publisher.isWriteCertificateCalled());
publisher = createMockedPublisher(config);
assertTrue(storeCertificate(publisher, CertificateConstants.CERT_ACTIVE, PROFILE_B));
assertFalse("Certificate in profile B should not be stored", publisher.isWriteCertificateCalled());
publisher = createMockedPublisher(config);
assertTrue(storeCertificate(publisher, CertificateConstants.CERT_ACTIVE, ANY_PROFILEID));
assertFalse("Certificate in other profile should not be stored", publisher.isWriteCertificateCalled());
// Default: p=0.0, A: ALL, B: p=1.0
// Only certificates from profiles A and B should be stored
config = new Properties();
config.setProperty("outputfolder", TEMP_DIR.getAbsolutePath());
config.setProperty("default.samplingmethod", "SAMPLE_PROBABILISTIC");
config.setProperty("default.pvalue", "0.0");
config.setProperty("profileid." + PROFILE_A + ".samplingmethod", "SAMPLE_ALL");
config.setProperty("profileid." + PROFILE_B + ".samplingmethod", "SAMPLE_PROBABILISTIC");
config.setProperty("profileid." + PROFILE_B + ".pvalue", "1.0");
publisher = createMockedPublisher(config);
assertTrue(storeCertificate(publisher, CertificateConstants.CERT_ACTIVE, PROFILE_A));
assertTrue("Certificate in profile A should be stored", publisher.isWriteCertificateCalled());
publisher = createMockedPublisher(config);
assertTrue(storeCertificate(publisher, CertificateConstants.CERT_ACTIVE, PROFILE_A));
assertTrue("Certificate in profile A should be stored", publisher.isWriteCertificateCalled());
publisher = createMockedPublisher(config);
assertTrue(storeCertificate(publisher, CertificateConstants.CERT_ACTIVE, PROFILE_B));
assertTrue("Certificate in profile B should be stored", publisher.isWriteCertificateCalled());
publisher = createMockedPublisher(config);
assertTrue(storeCertificate(publisher, CertificateConstants.CERT_ACTIVE, ANY_PROFILEID));
assertFalse("Certificate in other profile should not be stored", publisher.isWriteCertificateCalled());
}
/**
* Tests the NONE sample method.
*/
@Test
public void testSampleNone() throws Exception {
MockedCertificateSamplerCustomPublisher publisher;
Properties config;
// Default: NONE
// Nothing should be stored
config = new Properties();
config.setProperty("outputfolder", TEMP_DIR.getAbsolutePath());
config.setProperty("default.samplingmethod", "SAMPLE_NONE");
publisher = createMockedPublisher(config);
assertTrue(storeCertificate(publisher, CertificateConstants.CERT_ACTIVE, ANY_PROFILEID));
assertFalse("Certificate in no profile should not be stored", publisher.isWriteCertificateCalled());
// Default: ALL, A: NONE
// Only from profile A should be stored
config = new Properties();
config.setProperty("outputfolder", TEMP_DIR.getAbsolutePath());
config.setProperty("default.samplingmethod", "SAMPLE_ALL");
config.setProperty("profileid." + PROFILE_A + ".samplingmethod", "SAMPLE_NONE");
publisher = createMockedPublisher(config);
assertTrue(storeCertificate(publisher, CertificateConstants.CERT_ACTIVE, PROFILE_A));
assertFalse("Certificate in profile A should not be stored", publisher.isWriteCertificateCalled());
publisher = createMockedPublisher(config);
assertTrue(storeCertificate(publisher, CertificateConstants.CERT_ACTIVE, ANY_PROFILEID));
assertTrue("Certificate in other profile should be stored", publisher.isWriteCertificateCalled());
}
/**
* Tests that a profile can have a different pvalue than the default but the same method will be used.
*/
@Test
public void testDifferentPvalues() throws Exception {
MockedCertificateSamplerCustomPublisher publisher;
Properties config = new Properties();
// Default: probabilistic(0.0), profile b: (1.0)
config.setProperty("outputfolder", TEMP_DIR.getAbsolutePath());
config.setProperty("default.samplingmethod", "SAMPLE_PROBABILISTIC");
config.setProperty("default.pvalue", "0.0");
config.setProperty("profileid." + PROFILE_B + ".pvalue", "1.0");
publisher = createMockedPublisher(config);
assertTrue(storeCertificate(publisher, CertificateConstants.CERT_ACTIVE, PROFILE_A));
assertFalse("Certificate in profile A should not be stored", publisher.isWriteCertificateCalled());
publisher = createMockedPublisher(config);
assertTrue(storeCertificate(publisher, CertificateConstants.CERT_ACTIVE, PROFILE_B));
assertTrue("Certificate in profile B should be stored", publisher.isWriteCertificateCalled());
}
/** storeCertificate wrapper. */
private boolean storeCertificate(ICustomPublisher publisher, int status, int profileId) throws PublisherException {
return publisher.storeCertificate(ANY_ADMIN, NULL_CERTIFICATE, null, null, null, null,
status,
0, System.currentTimeMillis(), 0, null, profileId, System.currentTimeMillis(), null);
}
/** Create publisher wrapper. */
private CertificateSamplerCustomPublisher createPublisher(Properties properties) {
CertificateSamplerCustomPublisher result = new CertificateSamplerCustomPublisher();
result.init(properties);
return result;
}
/** Create mocked publisher wrapper. */
private MockedCertificateSamplerCustomPublisher createMockedPublisher(Properties properties) {
MockedCertificateSamplerCustomPublisher result = new MockedCertificateSamplerCustomPublisher();
result.init(properties);
return result;
}
} |
Gender and Media
Gender and media have been topics of academic interest for over half a century.Media
production, content, and consumption have each given rise to vibrant fields of scholarly
research on how to understand themin relation to gender. Specific epistemological
and ontological viewpoints on understanding gender and its relation to media marks
each of these fields. Theoretical and social developments now challenge the concepts
of both gender and media. Specifically, digitization and globalization challenge theoretical
concepts such as media consumption, the sex/gender dichotomy, and so on.
Using three recent cases—SheDecides, the Gender Pay Gap, and #MeToo—as illustrations,
current critical inquiries in the study of gender and media are scrutinized. First,
the traditional categories in the study of media and gender—production, content, and
consumption—are summarized, briefly explicating their major contributions to academic
thought on gender and media. Second, the challenges posed by digitization and
globalization are discussed. |
package com.manywho.services.sql.services;
import com.healthmarketscience.sqlbuilder.*;
import com.manywho.sdk.api.run.elements.type.ListFilter;
import com.manywho.sdk.api.run.elements.type.MObject;
import com.manywho.sdk.api.run.elements.type.ObjectDataType;
import com.manywho.sdk.api.run.elements.type.Property;
import com.manywho.services.sql.ServiceConfiguration;
import com.manywho.services.sql.entities.TableMetadata;
import com.manywho.services.sql.services.filter.QueryFilterConditions;
import com.manywho.services.sql.utilities.ScapeForTablesUtil;
import javax.inject.Inject;
import java.util.Set;
public class QueryStrService {
private QueryFilterConditions queryFilterConditions;
private ScapeForTablesUtil scapeForTablesUtil;
private AliasService aliasService;
@Inject
public QueryStrService(QueryFilterConditions queryFilterConditions, ScapeForTablesUtil scapeForTablesUtil,
AliasService aliasService) {
this.queryFilterConditions = queryFilterConditions;
this.scapeForTablesUtil = scapeForTablesUtil;
this.aliasService = aliasService;
}
public String createQueryWithParametersForSelectByPrimaryKey(TableMetadata tableMetadata, Set<String> primaryKeyNames, ServiceConfiguration configuration) {
SelectQuery selectQuery = new SelectQuery().addAllColumns()
.addCustomFromTable(scapeForTablesUtil.scapeTableName(configuration.getDatabaseType(),tableMetadata.getSchemaName(), tableMetadata.getTableName()));
for (String key: primaryKeyNames) {
selectQuery.addCondition(BinaryCondition.equalTo(new CustomSql(ScapeForTablesUtil.scapeCollumnName(configuration.getDatabaseType(),key)), new CustomSql(":" + key)));
}
return selectQuery.validate().toString();
}
public String createQueryWithParametersForDeleteByPrimaryKey(TableMetadata tableMetadata, Set<String> primaryKeyNames, ServiceConfiguration configuration) {
DeleteQuery selectQuery = new DeleteQuery(scapeForTablesUtil.scapeTableName(configuration.getDatabaseType(), tableMetadata.getSchemaName(), tableMetadata.getTableName()));
for (String key: primaryKeyNames) {
selectQuery.addCondition(BinaryCondition.equalTo(new CustomSql(ScapeForTablesUtil.scapeCollumnName(configuration.getDatabaseType(),key)), new CustomSql(":" + key)));
}
return selectQuery.validate().toString();
}
public String createQueryWithParametersForUpdate(MObject mObject, TableMetadata tableMetadata, Set<String> primaryKeyNames, ServiceConfiguration configuration){
UpdateQuery updateQuery = new UpdateQuery(
scapeForTablesUtil.scapeTableName(configuration.getDatabaseType(), tableMetadata.getSchemaName(), tableMetadata.getTableName()));
for(Property p : mObject.getProperties()) {
if(!tableMetadata.isColumnAutoincrement(p.getDeveloperName())) {
updateQuery.addCustomSetClause(new CustomSql(ScapeForTablesUtil.scapeCollumnName(configuration.getDatabaseType(), p.getDeveloperName())), new CustomSql(":" + p.getDeveloperName()));
}
}
for (String key: primaryKeyNames) {
updateQuery.addCondition(BinaryCondition.equalTo(new CustomSql(ScapeForTablesUtil.scapeCollumnName(configuration.getDatabaseType(),key)), new CustomSql(":" + key)));
}
return updateQuery.validate().toString();
}
public String createQueryWithParametersForInsert(MObject mObject, TableMetadata tableMetadata, ServiceConfiguration configuration) {
InsertQuery insertQuery = new InsertQuery(
scapeForTablesUtil.scapeTableName(configuration.getDatabaseType(), tableMetadata.getSchemaName(), tableMetadata.getTableName()));
for(Property p : mObject.getProperties()) {
if (!tableMetadata.isColumnAutoincrement(p.getDeveloperName())) {
insertQuery.addCustomColumn(new CustomSql(ScapeForTablesUtil.scapeCollumnName(configuration.getDatabaseType(), p.getDeveloperName())), new CustomSql(":" + p.getDeveloperName()));
}
}
return insertQuery.validate().toString();
}
public String getSqlFromFilter(ServiceConfiguration configuration, ObjectDataType objectDataType, ListFilter filter, TableMetadata tableMetadata) {
SelectQuery selectQuery = new SelectQuery().addAllColumns()
.addCustomFromTable(scapeForTablesUtil.scapeTableName(configuration.getDatabaseType(),
configuration.getDatabaseSchema(), objectDataType.getDeveloperName()));
aliasService.setFiltersOriginalNames(tableMetadata, filter);
objectDataType.setProperties(aliasService.setPropertiesOriginalName(tableMetadata, objectDataType.getProperties()));
queryFilterConditions.addSearch(selectQuery, filter.getSearch(), objectDataType.getProperties(), tableMetadata.getColumnsDatabaseType(), configuration.getDatabaseType());
queryFilterConditions.addWhere(selectQuery, filter.getWhere(), filter.getComparisonType(), configuration.getDatabaseType(), tableMetadata);
queryFilterConditions.addOffset(selectQuery, configuration.getDatabaseType(), filter.getOffset(), filter.getLimit());
queryFilterConditions.addOrderBy(selectQuery, filter.getOrderByPropertyDeveloperName(),
filter.getOrderByDirectionType(), tableMetadata, configuration.getDatabaseType());
return selectQuery.validate().toString();
}
}
|
def crates_vendor_deps_targets():
native.config_setting(
name = "linux_amd64",
constraint_values = ["@platforms//os:linux", "@platforms//cpu:x86_64"],
visibility = ["//visibility:public"],
)
native.config_setting(
name = "linux_arm64",
constraint_values = ["@platforms//os:linux", "@platforms//cpu:arm64"],
visibility = ["//visibility:public"],
)
native.config_setting(
name = "macos_amd64",
constraint_values = ["@platforms//os:macos", "@platforms//cpu:x86_64"],
visibility = ["//visibility:public"],
)
native.config_setting(
name = "macos_arm64",
constraint_values = ["@platforms//os:macos", "@platforms//cpu:arm64"],
visibility = ["//visibility:public"],
)
native.config_setting(
name = "windows",
constraint_values = ["@platforms//os:windows"],
visibility = ["//visibility:public"],
)
native.alias(
name = "buildifier",
actual = select({
":linux_amd64": "@cargo_bazel.buildifier-linux-amd64//file",
":linux_arm64": "@cargo_bazel.buildifier-linux-arm64//file",
":macos_amd64": "@cargo_bazel.buildifier-darwin-amd64//file",
":macos_arm64": "@cargo_bazel.buildifier-darwin-arm64//file",
":windows": "@cargo_bazel.buildifier-windows-amd64.exe//file",
}),
visibility = ["//visibility:public"],
) |
In indigenous communities like Nuyoó, where almost every family has members who have migrated for work, low-cost phone calls are seen as an essential service
Until this month, Celia Pérez could only afford a brief weekly call to her husband, Rubén Martínez, who left left their remote rural community in Mexico two years ago to find a job in the United States.
Pérez, 25, was pregnant with their third child when Martínez headed north; he made it to New Jersey and regularly wires home money from his construction job, but the long separation and infrequent calls have been tough on everyone.
Now, a legal triumph by indigenous activists has cracked the monopoly enjoyed by Mexico’s powerful telephone magnates – including the world’s richest man, Carlos Slim – and opened the door to new services which will slash the cost of communication.
Welcome to earthships: an off-the-grid solution to Canada's housing crisis? Read more
Indigenous Communities Telecommunications (TIC) last month won a long battle with the government to become the world’s first not-for-profit group to be granted a mobile phone concession.
The social cooperative has licence to install and operate mobile phone networks in 356 marginalised municipalities in five of the country’s poorest states: Chiapas, Guerrero, Oaxaca, Puebla and Veracruz.
It means couples like Pérez and Martinez will be able to talk and text on their mobiles for a fraction of the cost currently charged by phone booth operators.
“I am so excited, I’ll be able to talk to my husband in private when I want, and the children can have proper conversations with their dad. The calls are so much cheaper that it feels like a gift,” said Pérez, clutching her mobile phone as she queued with excited neighbours to register for the new service.
Globally, 95% of the world’s population live in areas covered by mobile phone networks, according to the UN agency for information and communication technologies. But that leaves at least 400 million people without any mobile coverage, and another 2 billion or so without access to affordable services.
In Mexico, as in the rest the world, provision is best in towns and cities where dense populations can earn big profits for phone providers.
But rural communities have been marginalized by major telephone operators due to high infrastructure costs and low profit margins. Most are poor and indigenous populations, still relying on exorbitant landline services – or even walkie-talkie radios.
Nuyoó is a remote district in the fertile Mixteca region of Oaxaca, where about 5,000 habitants live in communities amid pine-forested mountains threaded with waterfalls.
The region is renowned for its organic coffee and honey, but families struggle to live off their produce alone. Like Pérez, almost every household has family members working in the US, Canada or cities within Mexico. Opposite the town’s imposing 17th-century church, a Western Union office underlines the importance of remittances to the community’s survival.
A handful of public phone booths are hosted in the village’s few shops. Until recently, Pérez paid 15 pesos ($0.80) a minute to call her husband. Once a month, she would travel two hours to Tlaxiaco – the nearest town with mobile phone signal and 3G internet – to send him photos of their young children.
It was communities like Nuyoó which in 2011 inspired the activist Peter Bloom, a social entrepreneur and founder of NGO Rhizomatica, to petition the Federal Institute of Telecommunication.
Facebook Twitter Pinterest Fortino Rojas, 65, the education councillor, makes the first call from Santiago Nuyoó. Photograph: Nina Lakhani
An experimental concession was awarded in May 2014, allowing affordable, community-owned telephone services to be installed in 16 communities in Oaxaca over the next two years.
In July 2016, TIC – which works alongside Rhizomatica - was granted the first-ever permanent licence.
“Lack of access to communication is part and parcel of the general neglect – like poor health and education services – in many indigenous communities,” said Bloom. “Often, they try to resolve the problem with the government and big telecoms companies, but almost always in vain. Now, there is a legal pathway for communities to own, install and operate their own telephone networks as part of a cooperative.”
Nuyoó is the first community to benefit from the July victory.
In all, it cost 180,000 pesos ($10,000) for the equipment and installation – a third of what one multinational provider wanted to charge.
Subscription is free, but each registered user must pay 40 pesos a month – 15 goes to TIC to cover overheads and serious repairs – and the rest stays in the community to cover the upfront running costs.
Calls within the network – which includes 17 communities so far – are free. International and national calls are cheap: one peso will buy five minutes to US.
These long-distance calls are transmitted over the internet – avoiding tolls charged by telephone companies. Spectra Telecomunicaciones, based in Tlaxiaco, buys internet from large providers but uses cheap infrastructure and innovative technologies to slash costs for communities like Nuyoó. It means they can now also access low-cost Wi-Fi.
On a recent afternoon, an expectant crowd gathered in the leafy main square to witness the first calls. Lilian Cruz, 19, searched for a socket to charge her phone, eager to get connected. “I can’t wait to be able to speak to my sister in Mexico City and my aunt in Tijuana – she’s going to help me find a job.”
The benefits go way beyond family ties.
Facebook Twitter Pinterest Retired community nurse Agustina Sarabia, 61, in her small pharmacy Santiago Nuyoó: ‘Now, communication will be private and fast.’ Photograph: Nina Lakhani
Agustina Sarabia, 61, a retired community nurse who runs a modest pharmacy two dusty blocks from the square, believes the new phone service will improve patient care.
“I would have to discuss patient information with doctors in Tlaxiaco in the phone booths. This is a small place so everyone knows everyone else. Now, communication will be private and fast,” said Sarabia.
The UN considers reliable and affordable mobile phone and internet services essential to achieving development goals. In 2015, Mexico ranked a disappointing 95th out of 167 countries for phone and internet coverage – overtaken by Iran and Mongolia since 2010. Rhizomatica is currently supporting groups in Brazil, Nicaragua and Somalia to explore similar services.
For the people of Nuyoó, the scheme is a huge step towards self-determination.
Fortino Rojas, 65, the village’s education councillor, was at the head of the line to make the first international call from the community – to his sister in Oregon.
“We want to preserve our culture and tradition but the pueblo can now develop, we can connect with the world, at last,” he said. “We need to communicate with our families; this is as important to us as good transport and health services.” |
<filename>src/main/java/pinacolada/powers/common/ImpairedPower.java
package pinacolada.powers.common;
import com.megacrit.cardcrawl.core.AbstractCreature;
import com.megacrit.cardcrawl.orbs.AbstractOrb;
import pinacolada.interfaces.subscribers.OnOrbApplyFocusSubscriber;
import pinacolada.powers.PCLCombatStats;
import pinacolada.powers.PCLPower;
import pinacolada.utilities.PCLGameUtilities;
public class ImpairedPower extends PCLPower implements OnOrbApplyFocusSubscriber
{
public static final String POWER_ID = CreateFullID(ImpairedPower.class);
public static final int ORB_MULTIPLIER = 50;
public boolean justApplied;
@Override
public void onInitialApplication()
{
super.onInitialApplication();
PCLCombatStats.onOrbApplyFocus.Subscribe(this);
}
@Override
public void onRemove()
{
super.onRemove();
PCLCombatStats.onOrbApplyFocus.Unsubscribe(this);
}
public static int GetOrbMultiplier()
{
return (ORB_MULTIPLIER + PCLCombatStats.GetPlayerEffectBonus(POWER_ID));
}
public ImpairedPower(AbstractCreature owner, int amount) {
this(owner, amount, false);
}
public ImpairedPower(AbstractCreature owner, int amount, boolean isSourceMonster)
{
super(owner, POWER_ID);
justApplied = isSourceMonster;
Initialize(amount, PowerType.DEBUFF, true);
}
@Override
public void atStartOfTurnPostDraw()
{
super.atStartOfTurnPostDraw();
if (justApplied) {
justApplied = false;
}
else {
ReducePower(1);
}
}
@Override
public String GetUpdatedDescription() {
return FormatDescription(0,GetOrbMultiplier(),amount,amount == 1 ? powerStrings.DESCRIPTIONS[1] : powerStrings.DESCRIPTIONS[2]);
}
@Override
public void OnApplyFocus(AbstractOrb orb) {
if (PCLGameUtilities.CanOrbApplyFocus(orb)) {
orb.passiveAmount *= Math.max(0,GetOrbMultiplier() / 100f);
if (PCLGameUtilities.CanOrbApplyFocusToEvoke(orb)) {
orb.evokeAmount *= Math.max(0,GetOrbMultiplier() / 100f);
}
}
}
} |
Vermaelen made just 14 Premier League appearances last season
Arsenal have agreed a fee of about £15m with Barcelona for the sale of captain Thomas Vermaelen.
The 28-year-old Belgium centre-back must now agree personal terms with the Catalan giants and pass a medical.
Vermaelen was a target for Manchester United but they ended their interest after refusing Arsenal's demand to receive a player as part of the deal.
The move to Barcelona is likely to be completed this weekend, ending Vermaelen's five-year stay in London.
Thomas Vermaelen's career Nationality Belgian Age 28 Current club Arsenal (110 Premier League appearances, 13 goals) Previous clubs Ajax, Waalwijk Club honours Dutch league (2004), Dutch cup (2006, 2007), Dutch Super Cup (2006, 2007), FA Cup (2014) International caps 47
Vermaelen joined Arsenal from Ajax for £10m in 2009 and has made 150 appearances in all competitions.
But he was troubled by a series of injuries and lost his place at centre-back last season as Arsenal manager Arsene Wenger favoured Per Mertesacker and Laurent Koscielny as his first-choice pairing.
Vermaelen's form and fitness meant he made just 21 appearances in all competitions last term and Wenger admitted this week that the player needed to move on.
"He could leave, it could happen. I've said that since the start of pre-season," said Wenger.
"He needs to play now. He has shown an outstanding attitude but he is in a position where we wouldn't stand in his way if he finds an interesting opportunity."
Vermaelen would become Barca's second centre-back signing this summer after Valencia's Jeremy Mathieu, who had been their first recruit in that position since 2009. |
def amorphous_carbon(cls):
return cls._namespace_SIO('SIO_010791') |
/**
* The type Dfa 19.
*/
class DFA19 extends DFA {
/**
* Instantiates a new Dfa 19.
*
* @param recognizer the recognizer
*/
public DFA19(BaseRecognizer recognizer) {
this.recognizer = recognizer;
this.decisionNumber = 19;
this.eot = DFA19_eot;
this.eof = DFA19_eof;
this.min = DFA19_min;
this.max = DFA19_max;
this.accept = DFA19_accept;
this.special = DFA19_special;
this.transition = DFA19_transition;
}
public String getDescription() {
return "181:1: castExpression : ( '(' type ')' postfixExpression -> ^( CAST type postfixExpression ) | postfixExpression );";
}
} |
def unnormalized_G(branch_lengths, i, j):
return (branch_lengths*logical_and(i, logical_not(j))).sum()/\
branch_lengths.sum() |
import './style/index.less';
import Tabs from 'yoshino/lib/Tabs';
export default Tabs |
def floyd_warshall_predecessor_and_distance(self):
dist, pred = self.floyd_warshall_initialization()
shared_d = mp.sharedctypes.RawArray(ctypes.c_double, dist.shape[0]**2)
dist_shared = np.frombuffer(shared_d, 'float64').reshape(dist.shape)
dist_shared[:] = dist
shared_p = mp.sharedctypes.RawArray(ctypes.c_double,pred.shape[0]**2)
pred_shared = np.frombuffer(shared_p, 'float64').reshape(pred.shape)
pred_shared[:] = pred
n = len(self.nodes())
chunk = [(0, int(n / self.num))]
node_chunks = chunk_it(list(self.nodes()), self.num)
for i in range(1, self.num):
chunk.append((chunk[i - 1][1],
chunk[i - 1][1] + len(node_chunks[i])))
barrier = mp.Barrier(self.num)
processes = [
mp.Process( target=self.floyd_warshall_kernel,
args=(dist_shared, pred_shared, chunk[p][0], chunk[p][1], barrier))
for p in range(self.num) ]
for proc in processes:
proc.start()
for proc in processes:
proc.join()
all_shortest_path = self.manager.dict()
processes = [
mp.Process( target=self.measure_iteration,
args=(list(map(self.ids_reversed.get, node_chunks[p])),
all_shortest_path, self.construct_path_kernel, pred_shared) )
for p in range(self.num) ]
for proc in processes:
proc.start()
for proc in processes:
proc.join()
nonempty_shortest_path = {}
for k in all_shortest_path.keys():
nonempty_shortest_path[k] = {
key: value
for key, value in all_shortest_path[k].items() if value
}
shortest_path_length = {}
for i in list(self.H):
shortest_path_length[self.ids[i]] = {}
for key, value in nonempty_shortest_path[self.ids[i]].items():
length_path = dist_shared[self.ids_reversed[value[0]],
self.ids_reversed[value[-1]]]
shortest_path_length[self.ids[i]][key] = length_path
return nonempty_shortest_path, shortest_path_length |
Polarized Raman Scattering from Small Single Crystals
Polarized Raman scattering from single crystals can provide definitive evidence for assigning symmetry species to vibrational modes. Unfortunately, reflection from crystal faces and surface imperfections scrambles the polarization of the scattered radiation. Reflection problems can be avoided for large crystals by polishing the crystal surfaces. For small crystals this is not possible. Consequently, studies of small crystals have generally utilized 180° viewing, which minimizes reflection problems. A simple and inexpensive technique for studying the polarization of Raman scattering (90° viewing) from small single crystals is presented here. |
<reponame>sbardian/portfolio
import React from "react"
// eslint-disable-next-line
import { render } from "@testing-library/react"
import Article from "./article"
const ArticleData: React.FC = () => (
<div>
<h1>Mock Article</h1>
<p>
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod
tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim
veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea
commodo consequat. Duis aute irure dolor in reprehenderit in voluptate
velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat
cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id
est laborum.
</p>
</div>
)
test("Displays an Article with its children", () => {
const { findByText, queryByText, queryByTestId } = render(
<Article name="test-article-id">
<ArticleData />
</Article>
)
const genericArticle = queryByTestId("generic-article")
expect(genericArticle).toBeTruthy()
expect(genericArticle).toHaveAttribute("id", "test-article-id")
expect(queryByTestId("article-wrapper")).toBeTruthy()
expect(queryByText("Mock Article")).toBeTruthy()
expect(findByText("laborum")).toBeTruthy()
})
|
// Looks up if the origin matches one of the patterns
// provided in Options.AllowOrigins patterns.
func (o *Options) IsOriginAllowed(origin string) (allowed bool) {
for _, pattern := range o.AllowOrigins {
allowed, _ = regexp.MatchString(pattern, origin)
if allowed {
return
}
}
return
} |
<filename>src/main/java/svenhjol/charm/mixin/core/OverrideBundleTooltipGridsizeMixin.java
package svenhjol.charm.mixin.core;
import net.minecraft.client.gui.screens.inventory.tooltip.ClientBundleTooltip;
import net.minecraft.world.inventory.tooltip.BundleTooltip;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfoReturnable;
import svenhjol.charm.client.ICustomGridsize;
@Mixin(ClientBundleTooltip.class)
public class OverrideBundleTooltipGridsizeMixin {
/**
* CharmItemTooltips have a customizable X and Y gridSize.
*/
private static BundleTooltip storedBundleTooltip;
@Inject(
method = "<init>",
at = @At("TAIL")
)
private void hookInit(BundleTooltip bundleTooltip, CallbackInfo ci) {
storedBundleTooltip = bundleTooltip;
}
@Inject(
method = "gridSizeX",
at = @At("RETURN"),
cancellable = true
)
private void hookGridSizeX(CallbackInfoReturnable<Integer> cir) {
if (storedBundleTooltip instanceof ICustomGridsize) {
cir.setReturnValue(((ICustomGridsize) storedBundleTooltip).gridSizeX());
}
}
@Inject(
method = "gridSizeY",
at = @At("RETURN"),
cancellable = true
)
private void hookGridSizeY(CallbackInfoReturnable<Integer> cir) {
if (storedBundleTooltip instanceof ICustomGridsize) {
cir.setReturnValue(((ICustomGridsize) storedBundleTooltip).gridSizeY());
}
}
}
|
<filename>src/components/layout/index.tsx
import React, { FunctionComponent } from 'react'
import { Github } from '../icons'
type Props = {
children: React.ReactElement
}
export const Layout: FunctionComponent<Props> = ({ children }) => {
return (
<>
<div className='flex justify-center fixed right-0 left-0 bottom-0 z-10'>
<a href='https://github.com/thousandsofraccoons/router5-breadcrumbs-react'>
<Github />
</a>
</div>
{React.cloneElement(children, { customProp: 'Hello' })}
</>
)
}
|
/**
* Enable or disable the SmsReceiver as appropriate. Pre-KLP we use this receiver for
* receiving incoming SMS messages. For KLP+ this receiver is not used when running as the
* primary user and the SmsDeliverReceiver is used for receiving incoming SMS messages.
* When running as a secondary user, this receiver is still used to trigger the incoming
* notification.
*/
public static void updateSmsReceiveHandler(final Context context) {
boolean smsReceiverEnabled;
boolean mmsWapPushReceiverEnabled;
boolean respondViaMessageEnabled;
boolean broadcastAbortEnabled;
if (OsUtil.isAtLeastKLP()) {
smsReceiverEnabled = OsUtil.isSecondaryUser();
mmsWapPushReceiverEnabled = false;
respondViaMessageEnabled = true;
broadcastAbortEnabled = false;
} else {
final boolean carrierSmsEnabled = PhoneUtils.getDefault().isSmsEnabled();
smsReceiverEnabled = carrierSmsEnabled;
mmsWapPushReceiverEnabled = carrierSmsEnabled;
respondViaMessageEnabled = carrierSmsEnabled;
broadcastAbortEnabled = carrierSmsEnabled;
}
final PackageManager packageManager = context.getPackageManager();
final boolean logv = LogUtil.isLoggable(TAG, LogUtil.VERBOSE);
if (smsReceiverEnabled) {
if (logv) {
LogUtil.v(TAG, "Enabling SMS message receiving");
}
packageManager.setComponentEnabledSetting(
new ComponentName(context, SmsReceiver.class),
PackageManager.COMPONENT_ENABLED_STATE_ENABLED, PackageManager.DONT_KILL_APP);
} else {
if (logv) {
LogUtil.v(TAG, "Disabling SMS message receiving");
}
packageManager.setComponentEnabledSetting(
new ComponentName(context, SmsReceiver.class),
PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP);
}
if (mmsWapPushReceiverEnabled) {
if (logv) {
LogUtil.v(TAG, "Enabling MMS message receiving");
}
packageManager.setComponentEnabledSetting(
new ComponentName(context, MmsWapPushReceiver.class),
PackageManager.COMPONENT_ENABLED_STATE_ENABLED, PackageManager.DONT_KILL_APP);
} else {
if (logv) {
LogUtil.v(TAG, "Disabling MMS message receiving");
}
packageManager.setComponentEnabledSetting(
new ComponentName(context, MmsWapPushReceiver.class),
PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP);
}
if (broadcastAbortEnabled) {
if (logv) {
LogUtil.v(TAG, "Enabling SMS/MMS broadcast abort");
}
packageManager.setComponentEnabledSetting(
new ComponentName(context, AbortSmsReceiver.class),
PackageManager.COMPONENT_ENABLED_STATE_ENABLED, PackageManager.DONT_KILL_APP);
packageManager.setComponentEnabledSetting(
new ComponentName(context, AbortMmsWapPushReceiver.class),
PackageManager.COMPONENT_ENABLED_STATE_ENABLED, PackageManager.DONT_KILL_APP);
} else {
if (logv) {
LogUtil.v(TAG, "Disabling SMS/MMS broadcast abort");
}
packageManager.setComponentEnabledSetting(
new ComponentName(context, AbortSmsReceiver.class),
PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP);
packageManager.setComponentEnabledSetting(
new ComponentName(context, AbortMmsWapPushReceiver.class),
PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP);
}
if (respondViaMessageEnabled) {
if (logv) {
LogUtil.v(TAG, "Enabling respond via message intent");
}
packageManager.setComponentEnabledSetting(
new ComponentName(context, NoConfirmationSmsSendService.class),
PackageManager.COMPONENT_ENABLED_STATE_ENABLED, PackageManager.DONT_KILL_APP);
} else {
if (logv) {
LogUtil.v(TAG, "Disabling respond via message intent");
}
packageManager.setComponentEnabledSetting(
new ComponentName(context, NoConfirmationSmsSendService.class),
PackageManager.COMPONENT_ENABLED_STATE_DISABLED, PackageManager.DONT_KILL_APP);
}
} |
<reponame>gangadhar-kadam/sapphite_lib
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd.
# MIT License. See license.txt
from __future__ import unicode_literals
import webnotes
from webnotes import msgprint, _
from webnotes.utils import flt, cint, cstr
from webnotes.model.meta import get_field_precision
error_condition_map = {
"=": "!=",
"!=": "=",
"<": ">=",
">": "<=",
">=": "<",
"<=": ">",
"in": _("not in"),
"not in": _("in"),
"^": _("cannot start with"),
}
class EmptyTableError(webnotes.ValidationError): pass
class DocListController(object):
def __init__(self, doc, doclist):
self.doc, self.doclist = doc, doclist
if hasattr(self, "setup"):
self.setup()
@property
def meta(self):
if not hasattr(self, "_meta"):
self._meta = webnotes.get_doctype(self.doc.doctype)
return self._meta
def validate_value(self, fieldname, condition, val2, doc=None, raise_exception=None):
"""check that value of fieldname should be 'condition' val2
else throw exception"""
if not doc:
doc = self.doc
df = self.meta.get_field(fieldname, parent=doc.doctype)
val1 = doc.fields.get(fieldname)
if df.fieldtype in ("Currency", "Float"):
val1 = flt(val1, self.precision(df.fieldname, doc.parentfield or None))
val2 = flt(val2, self.precision(df.fieldname, doc.parentfield or None))
elif df.fieldtype in ("Int", "Check"):
val1 = cint(val1)
val2 = cint(val2)
if not webnotes.compare(val1, condition, val2):
msg = _("Error") + ": "
if doc.parentfield:
msg += _("Row") + (" # %d: " % doc.idx)
msg += _(self.meta.get_label(fieldname, parent=doc.doctype)) \
+ " " + error_condition_map.get(condition, "") + " " + cstr(val2)
# raise passed exception or True
msgprint(msg, raise_exception=raise_exception or True)
def validate_table_has_rows(self, parentfield, raise_exception=None):
if not self.doclist.get({"parentfield": parentfield}):
label = self.meta.get_label(parentfield)
msgprint(_("Error") + ": " + _(label) + " " + _("cannot be empty"),
raise_exception=raise_exception or EmptyTableError)
def round_floats_in(self, doc, fieldnames=None):
if not fieldnames:
fieldnames = [df.fieldname for df in self.meta.get({"doctype": "DocField", "parent": doc.doctype,
"fieldtype": ["in", ["Currency", "Float"]]})]
for fieldname in fieldnames:
doc.fields[fieldname] = flt(doc.fields.get(fieldname), self.precision(fieldname, doc.parentfield))
def _process(self, parentfield):
from webnotes.model.doc import Document
if isinstance(parentfield, Document):
parentfield = parentfield.parentfield
elif isinstance(parentfield, dict):
parentfield = parentfield.get("parentfield")
return parentfield
def precision(self, fieldname, parentfield=None):
parentfield = self._process(parentfield)
if not hasattr(self, "_precision"):
self._precision = webnotes._dict({
"default": cint(webnotes.conn.get_default("float_precision")) or 3,
"options": {}
})
if self._precision.setdefault(parentfield or "main", {}).get(fieldname) is None:
df = self.meta.get_field(fieldname, parentfield=parentfield)
if df.fieldtype == "Currency" and df.options and not self._precision.options.get(df.options):
self._precision.options[df.options] = get_field_precision(df, self.doc)
if df.fieldtype == "Currency":
self._precision[parentfield or "main"][fieldname] = cint(self._precision.options.get(df.options)) or \
self._precision.default
elif df.fieldtype == "Float":
self._precision[parentfield or "main"][fieldname] = self._precision.default
return self._precision[parentfield or "main"][fieldname] |
MG has flagged big changes to its Australian operations following an almost non-existent opening 12 months of local trading, believed to stem from setbacks in gaining compliance to sell cars.
The ambitious Chinese brand says it sold “fewer than 50 vehicles” since being introduced to the Australian market last year, something difficult to ascertain as MG (along with some other smaller brands) does not volunteer its sales figures to the Federal Chamber of Automotive Industries. MG (or Morris Garages) re-emerged on the local market under the ownership of one of China's largest car makers, SAIC Motor Corporation Limited.
Fairfax Media has been told by industry sources that MG has been prevented in selling its lone offering, the MG6 sedan, because it cannot meet Australian compliance on three separate issues.
Official documents show the car maker was given conditional approval to import the MG6 in 2012. However, it's understood additional mandatory checks on the cars found issues that prevented their sale.
“SAIC Motor Corporation Ltd received approval for the MG6 variants in 2012, with some amendments to the approval made subsequently as per standard certification processes,” a spokesperson for the Department of Infrastructure and Regional Development said.
“Any queries relating to the supply of vehicles or vehicle specifications should be referred to the manufacturer or distributor.”
MG Australia spokesman Andrew Shaw vehemently denied the MG6 had shortfalls in local compliance, and instead put the lack of sales down to the model’s manual-only transmission and troubles around local franchising.
“Because we’re backed by the Chinese government and we’re not a private company, we are very aware of all of our responsibilities and we are making sure that we are keeping up with compliance all the time … We’re certainly keeping on top of all of it,” he said.
Asked specifically whether the compliance issues had prevented MG from selling vehicles in Australia, Shaw said: “No, not at all. We’re on top of all of our compliance; we don’t have a single problem with compliance in Australia at this very moment.
“I’m not going to be specific, because there’s no need to be, all I’m saying is I think you wouldn’t be able to find us being uncompliant. It’s not been an issue really.”
Millions of dollars worth or MG stock - upwards of 300 cars - is believed to be currently laying dormant in locations including the brand’s lone Australian dealership, a former Rick Damelian site on Sydney’s Parramatta Road.
Shaw said none of those cars would be distributed to new dealers to sell to customers.
“Those cars in Petersham are only going to be used for service vehicles and demonstrators, we’re going to insist that dealers are only buying MY14 [model year 2014] stock to sell to their customers,” he said.
“It’s our problem to get rid of the remaining cars. All I can tell you is we are telling dealers we would like them to order MY14 cars to sell.”
To curtail the problem, Shaw said the local arm is preparing to take order of new vehicles in the coming weeks.
Among the new models to be introduced this year will be an automatic version of the MG6 as well as the diminutive MG3 city car, expected to be priced from about $16,000, plus on-road and dealer costs.
“What MG have done is they’ve now sub-contracted all of the right-hand drive manufacturing from the middle of this year to a manufacturer in Thailand,” he explained. “That will speed everything up tremendously, the current vehicles are built in Shanghai.”
Shaw said plans were afoot to expand MG’s dealership network to more than 10 different sites.
Despite the setbacks, Shaw insisted MG was in Australia for the long haul.
“We’ve got owners and a manufacturer with deep pockets and a lot of patience, and they know they need to bring in more products in line with what Australians want to drive,” he said.
“We’re really waiting for that second car to arrive so that we have more than the one-car offering, which has obviously been difficult to sell.” |
import math
def count(a, b, P):
if abs(a-b) > 2:
return 0
if a == b:
return 0
m = (a+b)/2
c = m + (m-a)*(math.sqrt(1-abs(m-a)**2))*1j/abs(m-a)
cnt = 0
for x in P:
if abs(x-c) <=1.0001:
cnt += 1
return cnt
while True:
N = int(input())
if N == 0:
break
P = []
for i in range(N):
x, y = map(float, input().split())
P.append(complex(x,y))
ans = 1
Max = -1
for i in range(N):
for j in range(N):
num = count(P[i], P[j], P)
Max = max(Max, num)
print(max(Max, ans))
|
NVIDIA will host a conference call on Thursday, Feb. 9 at 2 p.m. PT (5 p.m. ET) to discuss its financial results for the fourth quarter and fiscal year 2017, ending Jan. 29, 2017.
The call will be webcast live (in listen-only mode) at the following websites: www.nvidia.com and www.streetevents.com. The company's prepared remarks will be followed by a question and answer session, which will be limited to questions from financial analysts and institutional investors.
Ahead of the call, NVIDIA will provide written commentary on its fourth-quarter results from its CFO. This material will be posted to www.nvidia.com/ir immediately after the company's results are publicly announced at approximately 1:20 p.m. PT.
To listen to the conference call, dial (877) 223-3864 or, for those outside the United States, (574) 990-1377, conference ID 52907909.
A replay of the conference call will be available until Feb. 16, 2017, at (855) 859-2056, conference ID 52907909. The webcast will be recorded and available for replay until the company's conference call to discuss financial results for its first quarter of fiscal year 2018.
Keep Current on NVIDIA
Subscribe to the NVIDIA blog, follow us on Facebook, Google+, Twitter, LinkedIn and Instagram, and view NVIDIA videos on YouTube and images on Flickr. |
Effects of a Post-Shock Injection of the Kappa Opioid Receptor Antagonist Norbinaltorphimine (norBNI) on Fear and Anxiety in Rats
Exposure of rats to footshocks leads to an enduring behavioral state involving generalized fear responses and avoidance. Recent evidence suggests that the expression of negative emotional behaviors produced by a stressor is in part mediated by dynorphin and its main receptor, the kappa opioid receptor (KOR). The purpose of this study was to determine if a subcutaneous injection of the long-acting KOR antagonist norbinaltorphimine (norBNI; 15.0 and 30.0 mg/kg) given 2 days after an acute exposure of rats to footshooks (5×2 s episodes of 1.5 mA delivered over 5 min) attenuates the expression of lasting fear and anxiety. We report that exposure of rats to acute footshock produced long-lasting (>4 weeks) fear (freezing) and anxiety (avoidance of an open area in the defensive withdrawal test). The 30 mg dose of norBNI attenuated the fear expressed when shock rats were placed in the shock context at Day 9 but not Day 27 post-shock. The same dose of norBNI had no effect on the expression of generalized fear produced when shock rats were placed in a novel chamber at Days 8 and 24. In contrast, the 30 mg dose of norBNI produced consistent anxiolytic effects in shock and nonshock rats. First, the 30 mg dose was found to decrease the latency to enter the open field in the defensive withdrawal test done 30 days after the shock exposure. Second, the same high dose also had anxiolytic effects in both nonshock and shock rats as evidence by a decrease in the mean time spent in the withdrawal box. The present study shows that systemic injection of the KOR antagonist norBNI had mixed effect on fear. In contrast, norBNI had an anxiolytic effect which included the attenuation of the enhanced avoidance of a novel area produced by a prior shock experience.
Introduction
Clinical evidence indicates that many individuals exposed to a severe trauma involving intense fear subsequently exhibit a strong emotional reaction when confronted with reminders of the trauma situation . In addition, these individuals often display fear or anxiety in situations that would not normally elicit this reaction . The generalization of fear to situations not directly related to the trauma can lead to anxiety and avoidance of normal day-today situations which may lead to the diagnosis of posttraumatic stress disorder (PTSD) in individuals in which the symptoms last longer than one month . The discovery of more effective treatments is essential since PTSD symptoms may last for years to decades in some individuals despite these people having received psychological and pharmacological treatment .
Similar to the clinical situation, rodents exposed to a single episode of moderately intense footshocks (1.5 to 2.0 mA) not only show a strong fear response when re-exposed to the shock apparatus associated with the shock experience but also display an increase of fear-like (immobility) response when exposed to novel environment or loud noises . A number of studies have also shown that rodents pre-exposed to electrical shock show enhanced avoidance or anxiety in situations involving novel conspecific, objects, or test areas . The anxiety displayed by rodents previously exposed to shock results from an adaptive response where potentially fearful situations are avoided or approached with caution. As shown recently, generalized fear and learning mechanisms appear to contribute to avoidance of fear-inducing situations .
There has been a surge of recent interest in the possibility that the kappa opioid receptor (KOR) and prodynorphin derived peptides (dynorphins), which act with high specificity at KORs , modulate negative emotional states following exposure to stress . Indeed, there are a number of studies showing that systemic and central injections of KOR agonists produce dysphoria, anxiety and pro-depressive states in humans and rodents while KOR antagonists attenuate the anxietyand depression-like behaviors . There is also ample evidence showing that disrupting the synthesis of prodynorphin derived peptides or blocking KOR with antagonists reduces the negative emotional states associated with a previous exposure of rodents to forced swimming, social defeat, and footshock stress and has anxiolytic effects in non-stressed rodents . In addition, a role for KOR in fear conditioning has been provided by studies showing that central administration of a KOR antagonist interfered with freezing and fear-potentiated startle . Taken together, these findings suggest that blocking KORs before or at the time of the stress episode can attenuate negative emotional behaviors expressed 1 to 2 days after exposure to a stressful situation . In contrast, no studies have reported that blocking of KORs after a stress episode is effective in reducing the negative emotional behaviors that result from the stressor.
As described above, exposure of rats to a brief episode of relatively intense footshocks produces long-lasting fear/anxiety and provides a useful model to examine if a KOR antagonist could have therapeutic effects if given to people after a trauma experience. The present study was done to determine if a single post-shock injection of the long-lasting antagonist norbinaltorphimine (norBNI) prevents the expression of the fear-and anxiety-like behaviors over a 4 week period in rats. This is of special interests because norBNI and most of the more specific KOR antagonists have slow onset latencies and maintain their pharmacological effects for weeks following a single administration .
Ethics Statement
The experimental procedures were in compliance with the Canadian Council on Animal Care and the experimental protocol was approved by Research Ethics Review Board of the University of Manitoba (protocol number 09-057) and every effort was made to minimize the suffering of the animals.
Animals and Housing
Six week old male Sprague-Dawley rats (University of Manitoba, Canada) weighing 140-150 g at the time of first arrival to the animal facility were pair-housed in plastic cages in a colony room on a 12 h/12 h light/dark cycle (lights on 06:00) with controlled temperature (20-24uC) and humidity (40-70%). All rats had free access to food and water in their home cages and were handled for 2 min on alternate days during a 10-day adaptation period. All the behavioral tests were done in the light cycle of the day (09:00-17:00) in a different room from the colony room. Testing during the light phase was done to enhance the expression of fear and anxiety following footshock exposure.
Experimental Design
The parameters for the shock procedure as well as the timing of the behavioral tests were according to work previously described from our laboratory which was based on methods used by a number of laboratories . At 8 weeks of age, rats were randomly assigned to different experimental conditions as follow. On Day 0, rats were transferred to the testing room where some of the rats received a single episode of footshocks (n = 29) while the remainder of the animals were placed in the chamber without receiving footshocks (n = 26). We have previously shown that rats show individual differences in their reaction to the footshock exposure . Accordingly, on Day 1, rats were placed in a small open field to determine the amount of generalized fear expressed early after the footshock exposure and to generate homogeneous groups for the treatment conditions. On Day 2, subgroups of nonshock and shock rats received injections of norBNI at 15.0 mg/kg (n = 8 and 11 for nonshock and shock, respectively), 30.0 mg/kg (n = 9 and 9 for nonshock and shock, respectively) or vehicle (n = 9 and 9 for nonshock and shock, respectively). A series of behavioral tests were done at early (Days 8-9) and later (Days 24-30) time points to measure fear-and avoidance-like behaviors and to determine the effect of norBNI on these measures (Table 1; each rat was exposed to all tests). The order and time of the behavioral tests were counterbalanced among the different drug treatment groups. The novel test chambers, open field, and defensive withdrawal chambers were cleaned with liquinox (0.5%) whereas the shock chamber was cleaned with ethanol (10.0%) after each rat exposure. Analysis of behavioral tests was done by experimenters blind to the treatment conditions.
Generation of Homogenous Shock and Drug Injection Groups
We have shown previously that shock rats display a range of post-shock fear generalization and that rats showing a higher level of fear generalization exhibit more anxiety 4 weeks after shock exposure . To generate homogenous treatment groups of shock rats, post-shock fear generalization was assessed on Day 1 by measuring the amount of freezing expressed in rats placed in a small open field (made of black Plexiglas and measuring L65 cm6W40 cm6H50 cm) for 3 min. Freezing, defined as complete lack of body movement except breathing movements , was expressed as the percentage of time the rats spent freezing during the test period. The range of percentage of time spent freezing for nonshock rats was 0 to 2.6% and these rats were randomly assigned to treatment groups. The range of percentage of the time spent freezing for shock rats was 0 to 48.3% and based on these responses, shock rats were assigned to form 3 treatment groups with similar means (saline = 23.3366.37%; 15 mg = 19.7063.94%; 30 mg = 24.0463.98). The KOR antagonist norBNI (Tocris, U.K) was dissolved in sterile saline (15.0 and 30.0 mg/ml; with the high dose dissolved by gentle warming in a 60uC water bath) on the same day as the injections. Treated rats received norBNI injections (15.0 and 30.0 mg/kg, s.c.) in a volume of 1.0 ml/kg body weight on Day 2 following footshock exposure. The antagonist norBNI has slow onset latency and maintains pharmacological effects for weeks . The 15.0 and 30.0 mg/kg doses used in the present study are similar to what have been previously reported to be effective for antidepressive and anxiolytic effects in rodents . While norBNI has a short lasting antagonist effect on mu opioid receptor (2-4 hours), the long-lasting antagonist effect exists only for the KORs .
Behavioral Procedures
Rats were transferred individually to the behavioral testing room and placed into a shock chamber with a grid floor (MED Associates, St. Albans, VT, USA). After a 2 min acclimation period, rats were exposed to footshocks (562 s of 1.5 mA shock with an inter-shock period of 10-50 s presented randomly over 3 min) administrated by a commercially available scrambled stimulator (MED Associates). The rats were kept in the chamber for another 60 s before they were returned to their home cages. The shock chamber was cleaned with ethanol and the bedding under the grid floor was changed for each animal. The rats in the nonshock group were exposed to the shock chamber in the same way except that the stimulator was turned off. Rats were transported and shocked one at a time so that other rats would not be exposed to auditory and olfactory responses produced during the shock procedure. The post-shock generalized fear response was assessed on Day 8 by placing rats in a clear plastic chamber (L22 cm6W28 cm6H35 cm) for 5 min. The activity of the rats was recorded using a video camera and all of the video was subsequently scored for presence of freezing by two experimenters blind to the experimental conditions. The reliability score between raters ranged between 0.94-0.97 (correlation coefficient) and the data from two observers were averaged for statistical analysis.
Fear generalization was also examined on Day 24 by placing rats in the center area of a large open field (L80 cm6W80 cm6H40 cm) made of black Plexiglass for 5 min. The chamber was illuminated with a light of 8-10 lux. The conditioned fear response was measured on Days 9 and 27 after the footshock exposure by placing the rats in the shock chamber for 5 min (no shock delivered). The behavioral activity in the shock chamber was recorded and analyzed as described above.
The defensive withdrawal test was done as originally described on Day 30 to measure avoidance to novel environment using an apparatus that consisted of an open field (L80 cm6W80 cm6H40 cm with a black floor and green opaque walls) and a smaller movable black box (L25 cm6W20 cm6H15 cm made of black opaque plastic walls and floor) with a sliding door. The movable withdrawal box was placed 20 cm away from one of the corners of the open field. The light level in the test chamber was approximately 7 lux and the test protocol was similar to what was described in previous studies . The rat was placed in the withdrawal box which was then placed into the open field with the door facing one corner. Two minutes later, the sliding door was removed to allow the rat to freely explore the open field. The behaviors of the rat in the open field were recorded for 10 min using a video camera. The latency to enter the open field (four paws into the open arena) and the time spent in the withdrawal box (total time spent in the withdrawal box/number of entries) were quantified.
Statistical Analysis
The study was a 2 (shock vs nonshock) 63 (saline, 15, and 30 mg/kg) factor design. Behavioral test day (early vs. late) was not analyzed as factors and separate statistical tests were done for those data sets. The between factor data were analyzed using two-way ANOVA for main effects involving ''shock'' (nonshock and shock) and ''norBNI'' treatment (saline, 15 mg and 30 mg) as well as interaction effects between ''shock'' and ''norBNI''. When a main effect or interaction was indicated by the two-way ANOVA, the within factor data (shock vs. nonshock) was further probed using one-way ANOVA to determine if norBNI had an effect on the behaviors examined. Least significant differences (LSD) post-hoc tests were used to identify which of the drug treatments produced the significant differences indicated by the one-way ANOVA. The statistical analysis was done using SPSS 19 and all data are shown as mean 6 SEM.
Generalized Fear
Animals exposed to an intense stressor involving fear will show a fear response when confronted with novel situations that are not directly related to the trauma experience . In this study, fear generalization was assessed by measuring the amount of freezing expressed when rats were placed in a novel environment on Days 8 and 24. The two-way ANOVA revealed that shock rats showed more freezing on Day 8 ( Fig. 1A; F 1,49 = 59.653, p,0.001) but not on Day 24. There was no significant effect of ''norBNI'' on freezing on Days 8 or 24, nor was there an interaction effect between ''shock'' and ''norBNI'' on Days 8 or Day 24. In summary, shock rats showed more freezing when placed in a novel chamber on Day 8 and inactivation of KORs with norBNI did not affect the amount of freezing expressed.
Conditioned Fear Response
The amount of freezing expressed when rats are re-exposed to the shock chamber was used to assess the strength of the conditioned fear response. The two-way ANOVA showed that shock rats expressed more freezing on Day 9 ( Fig. 2A; F 1,49 = 142.001, p,0.001) and Day 27 ( Fig. 2B; F 1,49 = 36.044, p,0.001). For test done on Day 9, there was an interaction effect between ''norBNI'' and ''shock'' (F 2,49 = 11.137, p,0.001). The one-way ANOVA showed that norBNI had an effect on freezing duration in shock rat (F 2,26 = 9.819, p,0.001) and nonshock rats (F 2,23 = 3.564, p,0.05). The post-hoc analysis revealed that the 30.0 mg dose of norBNI significantly decreased freezing duration when compare to the 15.0 mg dose (p,0.001) and saline (p,0.01) in shock rats on Day 9. In contrast, the high dose of norBNI increased freezing in nonshock rats when compare to the low dose of norBNI (p,0.05) but not saline treated rats. There was no interaction effect between ''norBNI'' and ''shock'' for test done on Day 27. The results indicate that the high dose of norBNI attenuated the expression of conditioned fear in shock rats while increasing fear in nonshock rats when tested at an early time point.
Avoidance of Open Spaces
The defensive withdrawal test was done to determine the effect of norBNI on the anxiety produced by footshock exposure. The two-way ANOVA demonstrated that shock rats had a higher latency to enter the open field ( However, there was no interaction between ''norBNI'' and ''shock''. The one-way ANOVA indicated that norBNI had significant effects on the time in both the nonshock (F 2,23 = 3.828, p,0.05) and shock (F 2,26 = 6.190, p,0.01) rats. For nonshock rats, the post-hoc analysis showed that the 30 mg dose significantly decreased the time in the withdrawal box when compared to the saline treated group (p,0.01). For the shock group, post-hoc analysis showed that the 30 mg dose significantly decreased the time when compare to the 15 mg dose (p,0.01) and the saline treated group (p,0.05). The results indicate that the 30 mg dose of norBNI decreased the latency for shock rats to leave the safety of the withdrawal box. In addition, the 30 mg dose decreased the time that nonshock and shock rats spent in the withdrawal box.
Discussion
In this study, we investigated if inactivation of KORs with norBNI attenuated fear and anxiety in a rat model of PTSD. In general, acute exposure to footshocks (1.5 mA) produced longlasting expression of fear-and anxiety-like behavioral responses (freezing and anxiety) in shock rats when they were placed in the same context where shock occurred or when exposed to novel environment. More importantly, the high dose of norBNI was effective in lowering the latency to leave the withdrawal box in shock rats indicating an anxiolytic effect of norBNI specific to shock rats. We found that the high dose of the KOR antagonist norBNI had anxiolytic effects in both nonshock and shock rats when we used the time spent in the withdrawal box as a measure of anxiety. We also report that norBNI slightly reduced the expression of conditioned fear 9 days after shock exposure. These results are consistent with other studies showing that treatment of rodents with a KOR antagonist before or immediately after (0.5 hr) a stress-related event interfered with some of the behavioral changes associated with the stress experience a few days later . However, we show for the first time that a single post-shock injection of the KOR antagonist norBNI can attenuate anxiety produced in rats exposed to footshocks of 1.5 mA.
Evidence for a role for KOR in fear has been provided by a number of studies. First, central administration of norBNI prior to contextual fear conditioning was reported to interfere with the subsequent expression of freezing to the shock context . Second, systemic injections of norBNI or JDTic, another KOR antagonist, prior to discriminative fear conditioning was shown to attenuate the expression of fear-potentiated startle responses . However, the extended inactivation of KOR that results from the administration of these KOR antagonists makes it impossible to know from those studies if the drug treatment interfered with the acquisition or with the expression of the leaned fear response. In a third study, central administrations of norBNI after a discriminative fear conditioning procedure had no effect on subsequent expression of fear . In contrast to this most recent study, we report in this paper that contextual fear expression at Day 9 was weakly attenuated in shock rats that had received a post-shock injection of 30 mg/kg norBNI. In addition, norBNI had inconsistent effects on fear produced when rats were exposed to novel or shock chamber. For example, the high dose of norBNI reduced contextual fear in shock rats at Day 9 while increasing fear in nonshock rats placed in the shock chamber. Second, norBNI did not significantly lower the level of generalized fear in shock rats given the fact that freezing to the novel chamber was nearly as high as that expressed in the shock chamber. Based on our experiments as well as previous studies, we conclude that the role of KOR in fear is complex and that norBNI treatment after a strong fear inducing episode may not be a useful approach for modulating fear expression.
Shock rats displayed more freezing when exposed to novel chambers, a behavioral response which is taken as a sign that fear which is normally associated with novelty is more easily aroused or that the experience of the fear inducing situation generalizes to other situations . However, norBNI did not reduce the amount of freezing expressed in shock rats placed in a novel chamber. We also report the unexpected finding that the high dose of norBNI produced an increase in the freezing expressed by nonshock rats when re-exposed to the shock chamber. While speculative at this point, it is possible that the neural mechanisms that mediate freezing in shock rats may be different than the freezing expressed by nonshock rats. As such, the results of our experiments would suggest that KORs play a different role in innate fear mechanisms than it does in the fear associated with an intense stress experience.
It is well-documented that rodents pre-exposed to a short episode of relatively intense electrical shock show enhanced avoidance to novel situations and objects which is believed to reflect behavioral responses associated with anxiety . The present study used a similar protocol to examine the potential role of KORs in the lasting anxiety that can develop following a severe stressor. In the defensive withdrawal test, we show that the high dose of norBNI attenuated the latency for shock rats to leave the safety of the withdrawal box. Latency appears to be the most sensitive measure of anxiety in the defensive withdrawal test as shown in a previous study using a similar shock protocol to the one used in the present study . In that study, the benzodiazepine anxiolytic drug decreased the latency to enter the open field in shock rats while having no effect on the time spent outside the withdrawal box or the number of entries in the open field. In the present study, there was non-significant tendency for norBNI to decrease latency in nonshock rats but norBNI's anxiolytic effects were specific for shock rats. We also found that norBNI decreased the time shock and nonshock rats spent in the withdrawal box. As previously shown and discussed , it is likely that the time spent in the withdrawal box represents a behavioral pattern that includes more than just anxiety because there is a clear dissociation in the rate and the extent in which latency and the time spent in the withdrawal box habituate over repeated daily tests. This indicates that the time spent in the withdrawal box is more than an avoidance behavior and that norBNI effects on this measure is not entirely dependent on the shock experience. Further work will be needed to completely characterize the role of KORs in the anxiety that is produced by exposure of rats to footshocks and to confirm the anxiolytic effect of KOR antagonists in this model of PTSD. Anxiolytic effects of norBNI were also noted in one study in non-stressed rats tested on the elevated plus maze . As such, the results of this study using the defensive withdrawal test are consistent with studies indicating that KOR antagonists have anxiolytic effects in a variety of anxiety tests including the elevated plus maze, novelty-induced hypophagia and defensive burying protocols .
A number of inconsistencies were observed between the results of the fear expression and the defensive withdrawal tests. First, norBNI treated nonshock rats displayed an increase in freezing to the shock chamber while the same animals did not show an increase in avoidance in the defensive withdrawal test. Second, avoidance in the defensive withdrawal test was reduced by norBNI but generalized fear to a novel context was not affected. Since fear and anxiety represent different types of defensive behaviors, the apparent discrepant findings may be due to potential differences in KORs involvement in fear and anxiety. Fear is a defensive response associated with specific context or stimulus which has been associated with a negative emotional state (for example, reexposure to the shock context) . Freezing to the novel chamber following exposure of rodents to footshocks of similar intensity as used in the present experiments appears to result largely from a generalization of fear from the footshocks experienced in the shock box to other chambers . In contrast, anxiety is conceptualized as behavioral response associated with a non-specific threat and is often operationally defined as an increase in avoidance behaviors . The defensive withdrawal test is designed to measure the avoidance tendency of rodents that are naturally motivated to explore new environments and the results of our experiments are consistent with previous studies which have consistently shown that norBNI and other KOR antagonists have anxiolytic effects using similar tests.
Contrary to what might be expected from the published literature on norBNI, it is somewhat surprising that norBNI given at a systemic dose of 15 mg/kg had no anxiolytic effect. For example, a number of studies have shown that systemic injections of norBNI given at 10 mg/kg had antidepressant and anxiolytic effects in mice and rats . However, other studies have reported that the KOR antagonists norBNI and 59guanidinonaltrinodole or GTNI given systemically at a dose of 10 mg/kg had no antidepressant effects in the swim test while the same studies showed that these KOR antagonists given as 20 mg in the cerebral ventricles had the predicted effect . Another recent study has reported that a 10 mg/kg systemic dose had no behavioral effects whereas a 30 mg/kg dose interfered with alcohol self-administration . As discussed previously, a dose of 10-15 mg/kg should be sufficient to occupy KOR in body, but the binding of KOR in the brain following systemic injections of norBNI may be limited by bioavailability of norBNI to brain tissue . For example, the more lipophilic KOR 59acetamidinoethylnaltrindole or ANTI was found to have antidepressant effects following systemic dosing at 10 mg/kg whereas the less lipophilic GTNI did not . Other potential reasons that may explain why the 15 mg/kg dose was ineffective in the present study include factors such as species type, age and weight of the subjects, type of behavioral test, time between the drug administration and the behavioral test, and presence of an intense stress episode. It is important to note that the anxiolytic effects of the 30 mg/kg dose of norBNI were from tests done 4 weeks after the drug treatment and that the design of our experiment does not rule out the possibility that the anxiolytic effects of norBNI might have been through some secondary mechanism.
It is difficult to compare the magnitude of the effects observed in our study to other studies because different studies have used different behavioral tests, KOR antagonists and experimental animals. However, in one study, rats given the 30 mg/kg dose of norBNI had open arm time that reached 30% which is typical for non-anxious rats tested on the elevated plus maze . The anxiolytic effects reported here could be considered comparable to that study in that shock rats treated with the 30 mg/kg dose had latency responses that were similar to nonshock rats. One potential concern in the discussion of the mechanism mediating the anxiolytic effect of norBNI with the 30 mg/kg dose is that this concentration of the antagonist is approaching a concentration that would antagonize other opioid receptors (Endoh et al., 1992;Thomas et al., 2004). As such, one has to think about the possibility that norBNI produces its anxiolytic effect through non-KOR mediated mechanism. This is especially important considering that antagonism of other opioid receptors has also been shown to have anxiolytic properties . However, this does not appear to be the case because antagonism of mu or delta opioid receptors lasts for a few hours after norBNI administration while the anxiolytic profile of norBNI lasts for weeks in the present study. Another caveat is that long-term inactivation of KOR may produce some compensatory changes in other neural mechanisms which in turn could be responsible for the anxiolytic effects of norBNI observed in the present study. This could represent an interesting area of investigation for future studies.
According to the present study, the KOR appears to be involved in avoidance in rats exposed to an acute episode of footshocks. More importantly, systemic injections of the KOR antagonist norBNI two days after the footshock episode were shown to block avoidance-like behaviors 4 weeks after the drug administration. These findings have potential implications for the treatment of humans with PTSD since fear associated with memories of the trauma often becomes generalized in a way that the person feels constantly threatened . This can eventually lead to avoidance of many situations that may not be directly related to the trauma and the development of maladaptive behaviors including social isolation . It is possible that treating PTSD patients with a KOR antagonist after a trauma experience may help reduce the development of the avoidance and anxiety that contribute to the psychological distress associated with the condition. In addition, the results presented here point to the potential of KOR antagonists as a form of prophylactic treatment in the development of symptoms of avoidance in at risk individuals experiencing a severe trauma.
Author Contributions
Conceived and designed the experiments: YL GK. Performed the experiments: BR SL XC. Analyzed the data: SL. Contributed reagents/ materials/analysis tools: GK YL BR. |
/**
* should detect people around the robot
*/
private void findHumansAround() {
Log.i(TAG, "findHumansAround");
Future<List<Human>> humansAroundFuture = humanAwareness.async().getHumansAround();
humansAroundFuture.andThenConsume(humansAround -> {
Log.i(TAG, humansAround.size() + " human(s) around.");
retrieveCharacteristics(humansAround);
});
} |
def load_audio_in_ref_paths(cls, metadata_path):
metadata_filepath = os.path.join(metadata_path, cls._AUDIO_IN_REF_FILENAME)
with open(metadata_filepath) as f:
audio_in_filepath = f.readline().strip()
audio_ref_filepath = f.readline().strip()
return audio_in_filepath, audio_ref_filepath |
//SwitchToState implements github.com/insolar/insolar/pulsar.StateSwitcher interface
func (m *StateSwitcherMock) SwitchToState(p context.Context, p1 State, p2 interface{}) {
atomic.AddUint64(&m.SwitchToStatePreCounter, 1)
defer atomic.AddUint64(&m.SwitchToStateCounter, 1)
if m.SwitchToStateMock.mockExpectations != nil {
testify_assert.Equal(m.t, *m.SwitchToStateMock.mockExpectations, StateSwitcherMockSwitchToStateParams{p, p1, p2},
"StateSwitcher.SwitchToState got unexpected parameters")
if m.SwitchToStateFunc == nil {
m.t.Fatal("No results are set for the StateSwitcherMock.SwitchToState")
return
}
}
if m.SwitchToStateFunc == nil {
m.t.Fatal("Unexpected call to StateSwitcherMock.SwitchToState")
return
}
m.SwitchToStateFunc(p, p1, p2)
} |
/*
* ======== OPT3001_open ========
* Setups OPT3001 sensor and returns OPT3001_Handle
*/
OPT3001_Handle OPT3001_open(unsigned int index,
I2C_Handle i2cHandle, OPT3001_Params *params)
{
OPT3001_Handle handle = &OPT3001_config[index];
OPT3001_Object *obj = (OPT3001_Object*)(OPT3001_config[index].object);
OPT3001_HWAttrs *hw = (OPT3001_HWAttrs*)(OPT3001_config[index].hwAttrs);
uint16_t data;
if (obj->i2cHandle != NULL) {
return (NULL);
}
obj->i2cHandle = i2cHandle;
if (params == NULL) {
params = (OPT3001_Params *) &OPT3001_defaultParams;
}
data = (uint16_t)params->conversionMode | (uint16_t)params->conversionTime | (uint16_t)params->faultCount
| (uint16_t)params->interruptMode | params->range;
if (OPT3001_writeRegister(handle, data, OPT3001_CONFIG)) {
if (params->callback != NULL) {
obj->callback = params->callback;
if (params->conversionReady) {
if (!OPT3001_writeRegister(handle, params->conversionReady,
OPT3001_LOLIMIT)) {
obj->i2cHandle = NULL;
return (NULL);
}
}
GPIO_setCallback(hw->gpioIndex, obj->callback);
}
return (handle);
}
obj->i2cHandle = NULL;
return (NULL);
} |
UV and temperature effects on chloroacetanilide and triazine herbicides degradation and cytotoxicity
The purpose of this study was to explore the stability and toxicity of the herbicides and their degradation byproduct after exposure to different environmental factors. Triazines (atrazine, propazine, simazine) and chloroacetanilides (acetochlor, alachlor, metolachlor) which are commonly used herbicides were evaluated for cytotoxicity in different UV (254 nm and 365 nm) and temperature (4 °C, 23 °C, and 40 °C) conditions as well as degradation rates. Atrazine with the highest LD50 (4.23 μg mL−1) was less toxic than the other tested triazine herbicides Chloroacetanilides tested were more toxic than tested triazines, with LD50 0.08–1.42 μg mL−1 vs 1.44–4.23 μg mL−1, respectively. Alachlor with LD50 0.08 μg mL−1 showed the strongest toxic response as compared with other tested herbicides. Temperatures only did not alter cytotoxicity of the tested herbicides, except for acetochlor and alachlor showing about 45 % more cell death after exposure to 40 °C for 2 h. At all 3 tested temperatures, 2 h of UV treatments did not affect cytotoxic effects of the tested herbicides, except for acetochlor and alachlor. At 4 °C, acetochlor toxicity was attenuated about 63 % after UV 365 nm exposure; but alachlor toxicity was enhanced after either UV 254 or 365 nm exposure for about 40 % and 24 %, respectively. At 23 °C, acetochlor toxicity was enhanced about 35 % after UV 254 nm exposure, but attenuated about 48 % after UV 365 nm exposure. Alachlor toxicity was enhanced about 34 % after UV 254 nm and 23 °C exposure. In combination of UV 254 nm and 40 °C, acetochlor toxicity was lowered by 63 % and alachlor toxicity was no change as compared with 4 °C, no UV group. After co-treatment with UV 365 nm and 40 °C both acetochlor and alachlor toxicity was enhanced 55 % and 80 %, respectively. Through degradation analysis by LC-MS/MS, alachlor showed the most dramatic degradation (only 0.58 %–10.58 % remaining) after heat and UV treatments.
Introduction
The United States' herbicide use has surged 25.6 % in just a 4-year time span from 540 million pounds in 2008 to 678 million pounds in 2012 . While being beneficial to crop growth, the dramatic increase in herbicides use over recent decades poses a serious threat to the environment, due to residue remains left on crops, posing a direct threat to humans through consumption, as well as collection into water systems through runoff . This, in turn, leads to the consumption of harmful chemicals by organisms in and around the ecosystem. Surface runoff from agricultural areas is the main reason for contamination of surface water by herbicides and causes serious environmental impacts. Transformation of herbicides due to nature environmental factors could produce more hazardous transformed byproducts . The toxicity of herbicides, however, after transformation by natural environmental factors are not well studied.
Triazine and chloroacetanilide herbicides are some of the most commonly used in agriculture in the United States of America. They are frequently found in soil and aquatic systems due to their high usages and the persistence through physiochemical degradation . Atrazine has been found to have the highest concentration of 30 μg L À1 in ground and surface water . Kalkhoff et at. (1998) detected chloroacetanilides in ground and surface water with concentration of 0.05 μg L À1 and 0.13 μg L À1 , respectively . Chloroacetanilides are known to transform rapidly in soil under aerobic conditions . In water with microorganisms, chloroacetanilides are degraded via various pathways to form a large number of degradation byproducts . Triazines, which are relatively more stable than chloroacetanilides, can undergo dealkylation and dechlorination simultaneously with faster rates under UV and ozonation .
The triazine herbicides used in this study specifically are atrazine, propazine, and simazine are regarded as more stable compounds . Atrazine, one of most widely used triazines, has up a half-life of up to 6 months in soil , and its degradation is known to be affected by temperature and moisture. The main photodegradation processes for triazines at the early phases are through dechlorination and dealkylation. Different triazines have different degradation rates depending on chemical structure . Simazine with one less alkyl group and a more symmetric structure is degraded slower as compared with atrazine. Atrazine at low dose exhibits carcinogenic properties and disrupts endocrine system . Studies have demonstrated that some triazine degradation byproducts, such as deisopropyl-atrazine, deethyl-atrazine and deethyldeisopropylatrazine, change toxicity and are persistence as compared to their parent chemicals .
Chloroacetanilide herbicides, another widely used group of herbicides, have been frequently detected in ground and surface waters and are toxic to a wide range of organisms . Even at very low concentration, exposure to chloroacetanilide herbicides via contamination of water or agricultural products would be a great public health and environment risk. Ma et al. showed chloroacetanilides can increase oxidative stress by increasing reactive oxygen species level and trigger apoptosis . Studies also showed that utilization of chloroacetanilides links to caner and Parkinson's disease . To minimize the impact of herbicide residue and its byproducts in the environment and risk it poses to human health, unveiling the physiochemical effect on herbicide degradation will provide the comprehensive information for improving current pesticide management strategies.
It is known that these chemicals, while in the environment, undergo degradation via different environmental factors . There is a plethora of degradation factors on the average farm and neighboring aquatic ecosystems through soil leaching, such as moisture levels, light intensity, temperature, soil pH, and UV rays. Additional factors can include microorganisms, which can alter chemical composition through degradation, mineralization, or conjugation .
Photodegradation is the differentiation (breakdown, derivatization, decomposition) of a substance by exposure to sunlight, usually in tandem with air. This type of degradation is responsible for oxidation and reduction of environmental materials . The degradation factors focused on in this experiment include UV radiation and temperature, both of which individually can cause degradation of organic molecules . When temperature and UV effects are applied in tandem to degrade pesticides and herbicides, the exposure can result in different byproducts, which can alter toxicity in relation to parent compounds. Moreira's study showed that atrazine after radiation could generate 3 main hydroxyl byproducts: atrazine-2-hydroxy, atrazine-desethyl-2-hidroxy, and atrazine-desisopropyl-2-hydroxy .
As previously stated, pesticides have become more popular in agricultural practices worldwide. The adverse effects that these chemicals have on humans and other organisms around the world are dramatic and highly complex. The toxic effects of the herbicides on animals and plants were insufficiently investigated. Moreover, the toxicity of herbicide degradation products is rarely taken into account for its toxicity. Many of the degraded products can exert similar acute and chronic toxicities as the parent compound . The purpose of exposing the herbicides to different environmental factors is to test their stability and to evaluate the toxicity of degraded products together with their parent chemicals. This study was aimed to elucidate the cytotoxicity of the degraded byproducts of 2 classes of herbicides, triazines and chloroacetanilides after exposure of UV and various temperatures. This study is critical to public health as it allows us to build an understanding of the fate and the toxicity of pesticides under the influence of environmental factors. Hopefully it also highlights the need for further experimentation on these and other hazardous chemicals. This study will be beneficial to farmers and other crop growers in order to ensure safer produce.
Cell culture
Human embryonic kidney cells (HEK-293) were purchased from American Type Culture Collection (Manassas, VA). Cells were cultured as descripted in previous study with Dulbecco Modified Eagle Media (DMEM) supplemented by 10 % fetal bovine serum, 2 mM L-glutamine, and gentamicin (50 μg mL À1 ) . Incubation occurred at 37 C, in a 5 % CO 2 humidified incubator. The cells were cultured into 96-well plates at 50,000 cells per 200 μL on the day before chemical treatments. Cells were about medium density (~80 % confluence) when chemicals were added.
LD50 determination: neutral red uptake assay
Herbicides (triazines: atrazine, simazine, propazine; chloroacetanilides: acetochlor, alachlor, and metolachlor) were obtained from Sigma Aldrich (St. Louis, USA) at a stock of 1 mg mL À1 in methanol. These herbicides were diluted in phosphate buffered saline (PBS) or fresh media to concentrations as indicated in cytotoxicity study for determining LD 50 values which represent the chemical concentration needed to inhibit 50 % cell proliferation by neutral red uptake assay. After LD 50 was determined for each herbicide, specific LD 50 results were used as the concentration for UV and temperature treatments.
LD 50 was determined by neutral red assay which is based on the lysosome uptake of neutral red dye . Briefly, 200 μL cell suspension (2 Â 10 4 cells per well) were seeded onto 96-well plate on the day before chemical treatments. Chemicals in methanol (1 mg mL À1 ) were diluted in fresh media to obtain various concentrations for each chemical: acetochlor and alachlor (5-500 ng mL À1 ), metolachlor and simazine (100-2000 ng mL À1 ); atrazine and propazine (500-5000 ng Each concentration in each experiment was done with at least triplicate. Multiple experiments were done to obtain LD 50 values for each herbicide. The viability was determined based on a comparison with untreated cells which were set as 100 % cell viability. The LD 50 values were calculated from the dose-response curve.
UV and temperature treatments
The temperatures chosen for this study are to mimic our real environmental situation. To study temperature effect on herbicide degradation, each herbicide solution at LD 50 was solely placed in a cold room (4 C), laboratory (RT, 23 C), or an incubator (40 C) for up to 2 h and then kept in a -20 C freezer until analysis. Temperature conditions were chosen to be the most relevant representation of real-world situations, in which crops will be grown. The exposure time of UV was chosen based on Mermama's study which showed 2 h of photocatalysis treatment significantly altered IC 50 of S-metolachlor .
To study UV effects on herbicide degradation, each herbicide solution at LD 50 concentration was solely exposed to 2 h of UV radiation from UV lamps (Spectroline, Model ENF-280C). JAZ Spectrometer JAZA 1464 (Ocean Optics Inc) detected the intensity of UV-A (365 nm; 470 μW/cm 2 ) and UV-C (254 nm; 650 μW/cm 2 ) for consistency. UV-A is the main UV ray which can penetrate the atmosphere and reach earth. UV-C is almost all blocked by the atmosphere. When preparing chemicals for cell culture exposure, the herbicides were set in a space with a UV lamp overhead (height of 27 cm) for up to 2 h and then kept in a -20 C freezer until analysis.
Cytotoxicity: neutral red uptake assay and MTT assay
In order to determine the HEK-293 cells ability to survive exposure to the treated herbicides, two viability assays were performed: neutral red uptake assay and MTT assay as described in previous studies by Cheng . The neutral red uptake assay is to test cell viability through monitoring cellular lysosomal activity. The MTT viability assay is another assay commonly used for quantifying culture viability by measuring metabolic activity and in turn mit oc hondrial functionality. The cells were treated with herbicides for heat and UV studies at the concentrations based on the LD 50 .
Neutral red uptake assay for monitoring of cell lysosomal activity
The assay was conducted as descripted in Section 2.2. LD50 determination: Neutral Red Uptake Assay. The relative cell survival rate was calculated by comparing with untreated cells which were set as 100 % cell viability.
MTT assay for the estimation of cell mitochondrial activity
The assay was conducted by following the manufacturer's protocol (Sigma-Aldrich) with slight modification to improve the formazan solubility (ATCC, 2011). Briefly, cells were introduced to 20 μL of MTT reagent (5 mg mL À1 in PBS) after chemical treatments. After 2 h of incubation (37 C and 5% CO 2 ) with MTT solution, MTT reagent was removed and 100 μL dimethyl sulfoxide (DMSO) was added to the wells to dissolve the formazan. The plates were then shaken for 10 min and read in BioTek Synergy Mx microplate reader spectrophotometrically at a wavelength of 540 nm and 690 nm for background. The relative cell survival rate was calculated by comparing with untreated cells which were set as 100 % cell viability.
Degradation rate measurement by LC-MS/MS
To further understanding of the degradation, the samples were subjected to LC-MS/MS analysis. The use of LC-MS/MS becomes vital in verifying degradation rate. To begin, the herbicide samples, after treatments, were dried in Turbovac using N 2 gas for 45 min at 23 C. After drying, the samples were reconstituted with 0.1 % formic acid in water, with 50 ng mL À1 butachlor as the internal standard (IS) based on EPA 535 method which used butachlor-ESA as an internal standard to detect chloroacetanilides in drinking waters by LC-MS/MS . Finally, the samples were injected into ultra-high-performance liquid chromatography-tandem mass spectrometer (UPLC-MSMS) LCMS 8030 (Shimadzu Inc., Columbia, USA). LCMS 8030 with a Phenomenex KinetexTM C18 column (2.1 Â 100 mm, 1.7 um) was used to separate compounds under the following condition: gradient mobile phase system The triple quadrupole mass spectrometer with Dual Ionization Source (DUIS) was operated in the positive ionization mode with spray voltage at 4.5 kV; corona pin voltage at 4.5 kV; desolvation line temperature at 250 C; heat block temperature at 400 C; nebulizing gas flow rate at 1.5 L min À1 and drying gas flow rate at 15 L min À1 . Each compound was monitored by at least two transitions in multiple reaction monitoring (MRM) mode ( Table 1). The chromatograms and mass spectra were shown in the Supplementary data. The percentage of remaining herbicide after treatments was calculated by dividing the ratio of treated herbicide with IS to the ratio of untreated herbicide with IS.
Statistical analysis
All the experiments were performed in triplicates at least. The data was calculated from the mean of at least three separate experiments. The results are reported as means AE SEM. The data were evaluated using the analysis of variance (ANOVA) and the means were analyzed using student's t-test. Statistical significance was determined using student's t-test (p < 0.05, with control at 100 %).
LD50 determination by neutral red uptake assay
In order to study whether degradation products have altered cytotoxicity as compared to parent compounds, LD 50 was chosen as the experimental dosage for UV and temperature treatments. Before testing the environmental factors on herbicide degradation on cytotoxicity, the lethal dose which can kill 50 % of tested cells (LD 50 ) was needed to be determined first. To determine the LD 50 for subsequent studies, cells were treated with various concentrations (5-5000 ng mL À1 ) of herbicides for 24 h before the neutral red uptake assay. The concentrations of herbicides which caused 50 % of cell death (LD 50 ) was calculated and shown in Table 2. The LD 50 for triazine herbicides used in this study were from 1.44 μg mL À1 to 4.23 μg mL À1 . Simazine showed stronger toxic effect with the lowest LD 50 (1.44 μg mL À1 ) as compared with atrazine which showed weaker toxicity, LD 50 4.23 μg mL À1 . The LD 50 for chloroacetanilide herbicides used in this study were from 0.08 μg mL À1 to 1.42 μg mL À1 . Alachlor showed stronger toxic effect with the lowest LD 50 (0.08 μg mL À1 ) as compared with metolachlor which showed weaker toxicity, LD 50 1.42 μg mL À1 . >5000 mg kg-1; Simazine >5000 mg kg-1 , except Atrazine. Among of chloroacetanilide herbicides in this study, the order of herbicide toxicity based on neural red study was alachlor > acetochlor > metolachlor. Among of triazine herbicides in this study, the order of triazines toxicity based on neutral red study was simazine > propazine > atrazine.
Atrazine was much less toxic as compared with the other two tested triazines in this study. However, atrazine's results in this study were not consistent with NIH data which atrazine is much more toxic as compared with other tested triazines. Since NIH data is based on in vivo studies, it could be due to different testing systems. The toxicokinetic factors, such as metabolism, must be included in the consideration of chemical toxicity. Jin et al. demonstrated that the metabolites of atrazine by cytochrome P450 can increase oxidative stress and disrupt the endocrine system in mice. Abarikwu and Farombi suggested that atrazine toxicity is cell-type specific. Studies observed that human SH-SY5Y cells shows toxic responses to lower levels of atrazine; but HepG2 liver cells to higher levels of atrazine. Kale et al. suggests that acetochlor and alachlor, but not metolachlor, have hepatotoxicity in rats and dogs at the lowest tested dose 100-200 μmole L-1 for 2-4 h; but all three chloroacetanilides have the same potency in human hepatic cells at 400-800 μmole L-1 for 2 h.
Herbicides after heat and UV exposures exhibit different effects on cell viability
In order to test the cytotoxicity of degraded herbicides after UV and temperature treatments, HEK-293 cells were treated with those herbicides at LD 50 dosage with or without UV and various temperature treatments for 2 h. After 24-h sample exposure, cells were then subjected for neutral red uptake assay for lysosomal activity and MTT assay for mitochondrial activity. The relative cell viability for cells treated with chemicals at LD 50 dosage was set up as 100 %. If the relative cell viability is more than 100 %, it indicates cells have higher survival rate after exposure with chemicals. If the relative cell viability is less than 100 %, it indicates cells have lower survival rate after exposure with chemicals.
Temperature or UV effect on cytotoxicity of triazines and chloroacetanilides
Herbicides after various temperature treatments were used to treat cells to determine cell viability by neutral red assay (Figure 1). In these experiments, the 4 C group was set up as a control. The results showed that acetochlor (Figure 1a) and alachlor (Figure 1b) exhibited higher toxicity after exposing to 40 C for 2 h as compared with 4 C and RT treatment groups with about 45 % more cell death. Temperature has no further effect on metolachlor triggered cytotoxicity (Figure 1c). For triazines, all tested triazine compounds showed no further effect after temperature treatments on cytotoxicity (Figure 1d-f).
For UV effect on herbicide toxicity, cells were first exposed to UV 254 nm or UV 365 nm for 2 h at 4 C and then subjected for neutral red cell viability analysis. In these experiments, the no UV group was set up as a control. The results showed that the cell viability increased significantly for about 63 % after acetochlor exposed to UV 365 nm as compared with no UV treatment and UV 254 nm treated groups (Figure 2a). Alachlor after either UV 254 nm or UV 365 nm treatment showed increased toxicity (40 % and 24 %, respectively) as compared with no UV treatment group (Figure 2b). For metolachlor, the cytotoxicity has no further change between no UV, UV 254 nm, and UV 365 nm groups (Figure 2c). On the other hand, 2-h UV treatments at 4 C showed no further changes on cytotoxicity of all tested triazine compounds (Figure 2d-f). These results are consistent with other studies which found alachlor has shorter half-life than metolachlor has in outdoor aquatic mesocosms, 19.8 days vs 33.8 days at 25 ng mL À1 , respectively .
Temperature and UV combination effect on cytotoxicity of triazines and chloroacetanilides
Two of chloroacetanilide herbicides, acetochlor and alachlor, exposed to UV 254 nm and UV 365 nm for 2 h at 23 C (Figure 3a,b) and 40 C (Figure 4a,b) have triggered slightly different cellular responses as compared 4 C groups did (Figure 2a,b). However, there are no differences in cell viability responses for metolachlor and 3 tested triazine herbicides between 4 C groups (Figure 2c (Figure 3a). This indicated that with the combination of UV 254 nm and 23 C acetochlor has further degraded into higher-toxic byproducts, but into less-toxic byproducts with 23 ºC/UV 365 nm co-exposure. Cells exposed to the 23 ºC/UV365 nm co-treated alachlor showed no further change in cytotoxicity as compared with 23 ºC/No UV group (Figure 3b). This indicated that at 23 ºC/UV 254 nm alachlor was degraded into higher-toxic byproducts, but not with UV 365 nm.
With 2 h of 40 ºC/UV 254 nm exposure, cells in acetochlor group exhibited a significant change in cell viability (~63 % increase) as (Figure 4a). This indicated that with combination of UV 254 nm and heat (40 C) acetochlor has further degraded into less-toxic byproducts, but not with UV 365 nm co-exposure. Cells exposed to the 40 ºC/UV254 nm co-treated alachlor (87.9 %) showed less cytotoxicity as compared with 40 ºC/No UV group (54.1 %) (Figure 4b). Cells exposed to the 40 ºC/UV 365 nm co-treated alachlor (19.2 %) showed higher cytotoxicity as compared with 40 ºC/No UV group (54.1 %) (Figure 4b). This indicated that UV 254 nm further degraded alachlor into less-toxic byproducts, but UV 365 nm further degraded alachlor into more-toxic byproducts.
Changes in mitochondrial activity of HEK-293 cells in response to herbicides and its byproducts after the heat and UV exposures were also monitored. However, there were no significant changes in mitochondrial activity in cells after exposed to the tested herbicides and it byproducts for 2 h ( Figure 5).
Heat and UV exposures trigger different degradation rate
The different cell viability responses to herbicides with heat and UV treatments could be due to the different degradation responses to heat and UV. To quantify the degradation profile, LC-MS/MS was applied to detect the degradation rate of herbicides in response to heat and UV ( Table 3).
The degree of degradation measured by LC-MS/MS (Table 3) has shown no specific patents or correlation with specific temperature and UV exposures. All of the tested herbicides have exhibited some degree of degradation. Alachlor was the highest degree of degradation as compared with all other tested herbicides. This signified the instability of alachlor. Since this study was designed to reveal the environmental effects on herbicides toxicity and the mixture of parent compound and degradation products in our environment is what we normally encounter, the formation of specific degradation products was not focused in this study.
The three tested triazines also showed more resistance to heat and UV treatments (Figures 1d-f and 2d-f). Even in the combination of heat and UV (Figures 3d-f and 4d-f), triazines showed no changes in its cytotoxicity as compared to 4 C/No UV. Triazines are well known about its persistence in biological and chemical degradation. For example, atrazine's half-life is about 150 days in aerobic condition and more than 2 years in anaerobic condition . The combination treatment of heat and UV has triggered the degradation of acetochlor and alachlor. Especially, more toxic byproducts after exposing to UV 254 nm for 2 h at 23 C were formed (Figure 3a,b). Interestingly, more toxic byproducts after exposing to UV 365 nm for 2 h at 40 C were formed (Figure 3,b). In degradation degree study, the results showed alachlor was the most unstable one among the tested herbicides (Table 3). In Kawabata's study , nine pharmaceuticals in solution format also show different degradation profiles after UV irradiation at 254 nm, 302 nm or 365 nm (UV-C, UV-B or UV-A, respectively) which are dependent on both chemical structure and the wavelength of UV exposure. Kawabata et al. suggested that UV-A is less effective as compared with UV-C on degrading pharmaceuticals. Another concerning factor for degradation is that UV-C (254 nm) has a higher energy content determined through calculation ( Figure 6). For UV-C the energy content was 7.82*10-24J. For UV-A the energy content for UV-A was 5.49*10-24J. This is an important point to make as it allows for an understanding of the difference between UV-A (365 nm) and UV-C (254 nm) being 2.33*10-24J. Due to increased energy content, the degradation of samples exposed to UV-C experienced greater degradation. Moreover, the toxicity study conducted by Kawabata's group also indicated that UV irradiation can reduce the toxicity of some compounds due to the decrease of the amount and also can increase the toxicity of others due to the generations of toxic byproducts .
Conclusion
In summary, the triazine herbicides used in this study were more stable than their chloroacetanilide counterparts used in this study under these testing conditions. Acetochlor and alachlor were more sensitive to temperature and UV effects as compared to the other four tested herbicides. At 40 C, the toxicity of acetochlor and alachlor was further enhanced. The cytotoxicity of acetochlor and alachlor was also altered after the combination treatments of UV and temperature. Since 365nm UV (UV-A) is the main component (about 95 %) of solar UV radiation to earth, it is important to focus on 365 nm UV and heat effects on herbicide toxicity. The study shows that the toxicity of acetochlor and alachlor after 365 nm UV and 23 C treatments was attenuated, but was enhanced after 365 nm UV and 40 C treatments. This indicates that degradation byproducts of acetochlor and alachlor after heat and UV-A co-exposure could also have detrimental impact for human health. The future study will focus on identifying the specific degradation products of each test herbicide and their effects on cytotoxicity.
Author contribution statement
Johnatan Gideon, Jonathan Mulligan, Christina Hui, Shu-Yuan Cheng: Conceived and designed the experiments; Performed the experiments; Analyzed and interpreted the data; Contributed reagents, materials, analysis tools or data; Wrote the paper.
Funding statement
This research did not receive any specific grant from funding agencies in the public, commercial, or not-for-profit sectors.
Data availability statement
Data will be made available on request.
Declaration of interests statement
The authors declare no conflict of interest.
Additional information
Supplementary content related to this article has been published online at https://doi.org/10.1016/j.heliyon.2021.e08010. |
import random
from sqds.models import Unit, PlayerUnitGear, Gear, Skill
from sqds_seed.factories import CategoryFactory, GearFactory, UnitFactory, GuildFactory, \
PlayerFactory, PlayerUnitFactory, ZetaFactory, ModFactory
def random_sublist(lst, probability=0.5):
"""
Returns an iterator that includes each element of lst with a given probability.
:param lst Starting list
:param probability Probability of inclusion of each of lst elements
"""
return filter(lambda _: random.random() < probability, lst)
def generate_game_data(unit_api_id=None):
"""
Generate random mock game data. Returns array of units
"""
categories = CategoryFactory.create_batch(5)
GearFactory.create_batch(30)
if unit_api_id:
return [UnitFactory(api_id=api_id, categories=random_sublist(categories, 0.1))
for api_id in unit_api_id]
else:
return [UnitFactory(categories=random_sublist(categories, 0.1))
for _ in range(15)]
def generate_player_unit(unit, player, **kwargs):
player_unit = PlayerUnitFactory(unit=unit, player=player, **kwargs)
# Generate some gear
for _ in range(random.randint(0, 5)):
PlayerUnitGear(player_unit=player_unit,
gear=random.choice(Gear.objects.all()))
# Zeta some abilities
for skill in Skill.objects.filter(unit=unit):
if skill.is_zeta and random.random() > 0.5:
ZetaFactory(player_unit=player_unit,
skill=skill)
# Equip some mods
for slot in random_sublist(range(7)):
ModFactory(player_unit=player_unit, slot=slot)
return player_unit
def generate_guild(player_count=45):
"""
Generate a mock guild.
"""
guild = GuildFactory()
for idx in range(player_count):
player = PlayerFactory(guild=guild)
# Generate some random player units
for unit in random_sublist(Unit.objects.all(), 0.85):
generate_player_unit(unit, player)
return guild
|
/**
* Increment our version and place ourself in the cache.
*/
public synchronized void processSessionRepl()
{
if (log.isTraceEnabled())
{
log.trace("processSessionRepl(): session is dirty. Will increment " +
"version from: " + getVersion() + " and replicate.");
}
this.incrementVersion();
proxy_.putSession(realId, this);
sessionAttributesDirty = false;
sessionMetadataDirty = false;
updateLastReplicated();
} |
/*
* Copyright 2018, Decawave Limited, All Rights Reserved
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* @file dw1000_dev.c
* @author <NAME>
* @date 2018
* @brief Device file
*
* @details This is the dev base class which utilises the functions to perform initialization and necessary configurations on device.
*
*/
#include <stdio.h>
#include <string.h>
#include <assert.h>
#include <os/os.h>
#include <hal/hal_spi.h>
#include <hal/hal_gpio.h>
#include <stats/stats.h>
#include <dw1000/dw1000_dev.h>
#include <dw1000/dw1000_regs.h>
#include <dw1000/dw1000_hal.h>
#include <dw1000/dw1000_phy.h>
#define DIAGMSG(s,u) printf(s,u)
#ifndef DIAGMSG
#define DIAGMSG(s,u)
#endif
/**
* API to perform dw1000_read from given address.
*
* @param inst Pointer to dw1000_dev_instance_t.
* @param reg Member of dw1000_cmd_t structure.
* @param subaddress Member of dw1000_cmd_t structure.
* @param buffer Result is stored in buffer.
* @param length Represents buffer length.
* @return dw1000_dev_status_t
*/
dw1000_dev_status_t
dw1000_read(dw1000_dev_instance_t * inst, uint16_t reg, uint16_t subaddress, uint8_t * buffer, uint16_t length){
assert(reg <= 0x3F); // Record number is limited to 6-bits.
assert((subaddress <= 0x7FFF) && ((subaddress + length) <= 0x7FFF)); // Index and sub-addressable area are limited to 15-bits.
dw1000_cmd_t cmd = {
.reg = reg,
.subindex = subaddress != 0,
.operation = 0, //Read
.extended = subaddress > 0x7F,
.subaddress = subaddress
};
uint8_t header[] = {
[0] = cmd.operation << 7 | cmd.subindex << 6 | cmd.reg,
[1] = cmd.extended << 7 | (uint8_t) (subaddress),
[2] = (uint8_t) (subaddress >> 7)
};
uint8_t len = cmd.subaddress?(cmd.extended?3:2):1;
if (length < 8) {
hal_dw1000_read(inst, header, len, buffer, length);
} else {
hal_dw1000_read_noblock(inst, header, len, buffer, length);
}
return inst->status;
}
/**
* API to performs dw1000_write into given address.
*
* @param inst Pointer to dw1000_dev_instance_t.
* @param reg Member of dw1000_cmd_t structure.
* @param subaddress Member of dw1000_cmd_t structure.
* @param buffer Result is stored in buffer.
* @param length Represents buffer length.
* @return dw1000_dev_status_t
*/
dw1000_dev_status_t
dw1000_write(dw1000_dev_instance_t * inst, uint16_t reg, uint16_t subaddress, uint8_t * buffer, uint16_t length)
{
assert(reg <= 0x3F); // Record number is limited to 6-bits.
assert((subaddress <= 0x7FFF) && ((subaddress + length) <= 0x7FFF)); // Index and sub-addressable area are limited to 15-bits.
dw1000_cmd_t cmd = {
.reg = reg,
.subindex = subaddress != 0,
.operation = 1, //Write
.extended = subaddress > 0x7F,
.subaddress = subaddress
};
uint8_t header[] = {
[0] = cmd.operation << 7 | cmd.subindex << 6 | cmd.reg,
[1] = cmd.extended << 7 | (uint8_t) (subaddress),
[2] = (uint8_t) (subaddress >> 7)
};
uint8_t len = cmd.subaddress?(cmd.extended?3:2):1;
/* Only use non-blocking write if the length of the write justifies it */
if (len+length < 4) {
hal_dw1000_write(inst, header, len, buffer, length);
} else {
hal_dw1000_write_noblock(inst, header, len, buffer, length);
}
return inst->status;
}
/**
* API to read data from dw1000 register based on given parameters.
*
* @param inst Pointer to dw1000_dev_instance_t.
* @param reg Register from where data is read.
* @param subaddress Address where data is read.
* @param val value to be read.
* @param nbytes Length of data.
* @return buffer.value
*/
uint64_t
dw1000_read_reg(dw1000_dev_instance_t * inst, uint16_t reg, uint16_t subaddress, size_t nbytes)
{
union _buffer{
uint8_t array[sizeof(uint64_t)];
uint64_t value;
} __attribute__((__packed__, aligned (8))) buffer;
assert(reg <= 0x3F); // Record number is limited to 6-bits.
assert((subaddress <= 0x7FFF) && ((subaddress + nbytes) <= 0x7FFF)); // Index and sub-addressable area are limited to 15-bits.
assert(nbytes <= sizeof(uint64_t));
dw1000_cmd_t cmd = {
.reg = reg,
.subindex = subaddress != 0,
.operation = 0, //Read
.extended = subaddress > 0x7F,
.subaddress = subaddress
};
uint8_t header[] = {
[0] = cmd.operation << 7 | cmd.subindex << 6 | cmd.reg,
[1] = cmd.extended << 7 | (uint8_t) (subaddress),
[2] = (uint8_t) (subaddress >> 7)
};
uint8_t len = cmd.subaddress?(cmd.extended?3:2):1;
hal_dw1000_read(inst, header, len, buffer.array, nbytes); // result is stored in the buffer
return buffer.value;
}
/**
* API to write data into dw1000 register based on given parameters.
*
* @param inst Pointer to dw1000_dev_instance_t.
* @param reg Register from where data is written into.
* @param subaddress Address where writing of data begins.
* @param val Value to be written.
* @param nbytes Length of data.
* @return buffer.value
*/
void
dw1000_write_reg(dw1000_dev_instance_t * inst, uint16_t reg, uint16_t subaddress, uint64_t val, size_t nbytes)
{
union _buffer{
uint8_t array[sizeof(uint64_t)];
uint64_t value;
} __attribute__((__packed__)) buffer;
buffer.value = val;
assert(nbytes <= sizeof(uint64_t));
assert(reg <= 0x3F); // Record number is limited to 6-bits.
assert((subaddress <= 0x7FFF) && ((subaddress + nbytes) <= 0x7FFF)); // Index and sub-addressable area are limited to 15-bits.
dw1000_cmd_t cmd = {
.reg = reg,
.subindex = subaddress != 0,
.operation = 1, //Write
.extended = subaddress > 0x7F,
.subaddress = subaddress
};
uint8_t header[] = {
[0] = cmd.operation << 7 | cmd.subindex << 6 | cmd.reg,
[1] = cmd.extended << 7 | (uint8_t) (subaddress),
[2] = (uint8_t) (subaddress >> 7)
};
uint8_t len = cmd.subaddress?(cmd.extended?3:2):1;
hal_dw1000_write(inst, header, len, buffer.array, nbytes);
}
/**
* API to do softreset on dw1000 by writing data into PMSC_CTRL0_SOFTRESET_OFFSET.
*
* @param inst Pointer to dw1000_dev_instance_t.
* @return void
*/
void
dw1000_softreset(dw1000_dev_instance_t * inst)
{
// Set system clock to XTI
dw1000_phy_sysclk_XTAL(inst);
dw1000_write_reg(inst, PMSC_ID, PMSC_CTRL1_OFFSET, PMSC_CTRL1_PKTSEQ_DISABLE, sizeof(uint16_t)); // Disable PMSC ctrl of RF and RX clk blocks
dw1000_write_reg(inst, AON_ID, AON_WCFG_OFFSET, 0x0, sizeof(uint16_t)); // Clear any AON auto download bits (as reset will trigger AON download)
dw1000_write_reg(inst, AON_ID, AON_CFG0_OFFSET, 0x0, sizeof(uint8_t)); // Clear the wake-up configuration
// Uploads always-on (AON) data array and configuration
dw1000_write_reg(inst, AON_ID, AON_CTRL_OFFSET, 0x0, sizeof(uint8_t)); // Clear the register
dw1000_write_reg(inst, AON_ID, AON_CTRL_OFFSET, AON_CTRL_SAVE, sizeof(uint8_t));
dw1000_write_reg(inst, PMSC_ID, PMSC_CTRL0_SOFTRESET_OFFSET, PMSC_CTRL0_RESET_ALL, sizeof(uint8_t));// Reset HIF, TX, RX and PMSC
// DW1000 needs a 10us sleep to let clk PLL lock after reset - the PLL will automatically lock after the reset
os_cputime_delay_usecs(10);
dw1000_write_reg(inst, PMSC_ID, PMSC_CTRL0_SOFTRESET_OFFSET, PMSC_CTRL0_RESET_CLEAR, sizeof(uint8_t)); // Clear reset
}
/**
* API to initialize a dw1000_dev_instance_t structure from the os device initialization callback.
*
* @param odev Pointer to struct os_dev.
* @param arg Argument to set as pointer to struct dw1000_dev_cfg.
* @return OS_OK on success
*/
int
dw1000_dev_init(struct os_dev *odev, void *arg)
{
DIAGMSG("{\"utime\": %lu,\"msg\": \"dw1000_dev_init\"}\n",os_cputime_ticks_to_usecs(os_cputime_get32()));
struct dw1000_dev_cfg *cfg = (struct dw1000_dev_cfg*)arg;
dw1000_dev_instance_t *inst = (dw1000_dev_instance_t *)odev;
if (inst == NULL ) {
inst = (dw1000_dev_instance_t *) malloc(sizeof(dw1000_dev_instance_t));
assert(inst);
memset(inst,0,sizeof(dw1000_dev_instance_t));
inst->status.selfmalloc = 1;
}
inst->spi_sem = cfg->spi_sem;
inst->spi_num = cfg->spi_num;
os_error_t err = os_mutex_init(&inst->mutex);
assert(err == OS_OK);
err = os_sem_init(&inst->sem, 0x1);
assert(err == OS_OK);
err = os_sem_init(&inst->spi_nb_sem, 0x1);
assert(err == OS_OK);
SLIST_INIT(&inst->interface_cbs);
return OS_OK;
}
/**
* API to configure dw1000.
*
* @param inst Pointer to dw1000_dev_instance_t.
* @returns OS_OK on success
*/
int
dw1000_dev_config(dw1000_dev_instance_t * inst)
{
int rc;
int timeout = 3;
retry:
inst->spi_settings.baudrate = MYNEWT_VAL(DW1000_DEVICE_BAUDRATE_LOW);
hal_dw1000_reset(inst);
rc = hal_spi_disable(inst->spi_num);
assert(rc == 0);
rc = hal_spi_config(inst->spi_num, &inst->spi_settings);
assert(rc == 0);
hal_spi_set_txrx_cb(inst->spi_num, hal_dw1000_spi_txrx_cb, (void*)inst);
rc = hal_spi_enable(inst->spi_num);
assert(rc == 0);
inst->device_id = dw1000_read_reg(inst, DEV_ID_ID, 0, sizeof(uint32_t));
inst->status.initialized = (inst->device_id == DWT_DEVICE_ID);
if (!inst->status.initialized && --timeout)
{
/* In case dw1000 was sleeping */
dw1000_dev_wakeup(inst);
goto retry;
}
if(!inst->status.initialized)
{
return OS_TIMEOUT;
}
inst->timestamp = (uint64_t) dw1000_read_reg(inst, SYS_TIME_ID, SYS_TIME_OFFSET, SYS_TIME_LEN);
dw1000_phy_init(inst, NULL);
/* It's now safe to increase the SPI baudrate > 4M */
inst->spi_settings.baudrate = MYNEWT_VAL(DW1000_DEVICE_BAUDRATE_HIGH);
rc = hal_spi_disable(inst->spi_num);
assert(rc == 0);
rc = hal_spi_config(inst->spi_num, &inst->spi_settings);
assert(rc == 0);
rc = hal_spi_enable(inst->spi_num);
assert(rc == 0);
inst->PANID = MYNEWT_VAL(PANID);
#if MYNEWT_VAL(DW1000_DEVICE_0) && !MYNEWT_VAL(DW1000_DEVICE_1)
inst->my_short_address = MYNEWT_VAL(DEVICE_ID);
#elif MYNEWT_VAL(DW1000_DEVICE_0) && MYNEWT_VAL(DW1000_DEVICE_1)
if (inst == hal_dw1000_inst(0))
inst->my_short_address = MYNEWT_VAL(DEVICE_ID_0);
else
inst->my_short_address = MYNEWT_VAL(DEVICE_ID_1);
#endif
inst->my_long_address = ((uint64_t) inst->device_id << 32) + inst->partID;
dw1000_set_panid(inst,inst->PANID);
dw1000_mac_init(inst, NULL);
return OS_OK;
}
/**
* API to free the acquired resources.
*
* @param inst Pointer to dw1000_dev_instance_t.
* @return void
*/
void
dw1000_dev_free(dw1000_dev_instance_t * inst){
assert(inst);
hal_spi_disable(inst->spi_num);
if (inst->status.selfmalloc)
free(inst);
else
inst->status.initialized = 0;
}
/**
* API to set the sleep counter to new value, this function programs the high 16-bits of the 28-bit counter.
*
* NOTE: this function needs to be run before dw1000_dev_configure_sleep, also the SPI freq has to be < 3MHz
*
* @param inst Pointer to dw1000_dev_instance_t.
* @param count Value of the sleep counter to program.
*
* @return void
*/
void
dw1000_dev_set_sleep_timer(dw1000_dev_instance_t * inst, uint16_t count)
{
dw1000_phy_sysclk_XTAL(inst); // Force system clock to be the 19.2 MHz XTI clock.
dw1000_write_reg(inst, AON_ID, AON_CFG1_OFFSET, 0x0, sizeof(uint8_t)); // Disable the sleep counter
dw1000_write_reg(inst, AON_ID, AON_CFG0_SLEEP_TIM_OFFSET, count, sizeof(uint16_t)); // Write new sleep counter
dw1000_write_reg(inst, AON_ID, AON_CFG1_OFFSET, AON_CFG1_SLEEP_CEN | AON_CFG1_LPOSC_CAL, sizeof(uint8_t)); // Enable the sleep counter
dw1000_write_reg(inst, AON_ID, AON_CTRL_OFFSET, AON_CTRL_UPL_CFG, sizeof(uint8_t)); // Upload array
dw1000_write_reg(inst, AON_ID, AON_CTRL_OFFSET, 0, sizeof(uint8_t)); // Upload array
dw1000_phy_sysclk_SEQ(inst); // The system clock will run off the 19.2 MHz XTI clock until the PLL is calibrated and locked
}
/**
* API to configure the device for both DEEP_SLEEP and SLEEP modes, and on-wake mode
* i.e., before entering the sleep, the device should be programmed for TX or RX, then upon "waking up" the TX/RX settings
* will be preserved and the device can immediately perform the desired action TX/RX.
*
* NOTE: e.g. Tag operation - after deep sleep, the device needs to just load the TX buffer and send the frame.
*
* @param inst Pointer to dw1000_dev_instance_t.
*
* @return void
*/
void
dw1000_dev_configure_sleep(dw1000_dev_instance_t * inst)
{
uint16_t reg = dw1000_read_reg(inst, AON_ID, AON_WCFG_OFFSET, sizeof(uint16_t));
reg |= AON_WCFG_ONW_L64P | AON_WCFG_ONW_LDC;
if (inst->status.LDE_enabled)
reg |= AON_WCFG_ONW_LLDE;
else
reg &= ~AON_WCFG_ONW_LLDE;
if (inst->status.LDO_enabled)
reg |= AON_WCFG_ONW_LLDO;
else
reg &= ~AON_WCFG_ONW_LLDO;
if (inst->config.wakeup_rx_enable)
reg |= AON_WCFG_ONW_RX;
else
reg &= ~AON_WCFG_ONW_RX;
dw1000_write_reg(inst, AON_ID, AON_WCFG_OFFSET, reg, sizeof(uint16_t));
reg = dw1000_read_reg(inst, AON_ID, AON_CFG0_OFFSET, sizeof(uint16_t));
reg |= AON_CFG0_WAKE_SPI | AON_CFG0_WAKE_PIN;
inst->status.sleep_enabled = inst->config.sleep_enable;
if (inst->status.sleep_enabled)
reg |= AON_CFG0_WAKE_CNT | AON_CFG0_SLEEP_EN;
else
reg &= ~(AON_CFG0_WAKE_CNT | AON_CFG0_SLEEP_EN);
dw1000_write_reg(inst, AON_ID, AON_CFG0_OFFSET, reg, sizeof(uint16_t));
}
/**
* API to enter device into sleep mode.
*
* @param inst Pointer to dw1000_dev_instance_t.
* @return dw1000_dev_status_t
*/
dw1000_dev_status_t
dw1000_dev_enter_sleep(dw1000_dev_instance_t * inst)
{
// Critical region, atomic lock with mutex
os_error_t err = os_mutex_pend(&inst->mutex, OS_WAIT_FOREVER);
assert(err == OS_OK);
/* Upload always on array configuration and enter sleep */
dw1000_write_reg(inst, AON_ID, AON_CTRL_OFFSET, 0x0, sizeof(uint16_t));
dw1000_write_reg(inst, AON_ID, AON_CTRL_OFFSET, AON_CTRL_SAVE, sizeof(uint16_t));
inst->status.sleeping = 1;
// Critical region, unlock mutex
err = os_mutex_release(&inst->mutex);
assert(err == OS_OK);
return inst->status;
}
/**
* API to wakeup device from sleep to init.
*
* @param inst Pointer to dw1000_dev_instance_t.
* @return dw1000_dev_status_t
*/
dw1000_dev_status_t
dw1000_dev_wakeup(dw1000_dev_instance_t * inst)
{
int timeout=5;
uint32_t devid;
// Critical region, atomic lock with mutex
os_error_t err = os_mutex_pend(&inst->mutex, OS_WAIT_FOREVER);
assert(err == OS_OK);
devid = dw1000_read_reg(inst, DEV_ID_ID, 0, sizeof(uint32_t));
while (devid != 0xDECA0130 && --timeout)
{
hal_dw1000_wakeup(inst);
devid = dw1000_read_reg(inst, DEV_ID_ID, 0, sizeof(uint32_t));
}
inst->status.sleeping = (devid != DWT_DEVICE_ID);
dw1000_write_reg(inst, SYS_STATUS_ID, 0, SYS_STATUS_SLP2INIT, sizeof(uint32_t));
dw1000_write_reg(inst, SYS_STATUS_ID, 0, SYS_STATUS_ALL_RX_ERR, sizeof(uint32_t));
/* Antenna delays lost in deep sleep ? */
dw1000_phy_set_rx_antennadelay(inst, inst->rx_antenna_delay);
dw1000_phy_set_tx_antennadelay(inst, inst->tx_antenna_delay);
// Critical region, unlock mutex
err = os_mutex_release(&inst->mutex);
assert(err == OS_OK);
return inst->status;
}
/**
* API to set the auto TX to sleep bit. This means that after a frame
* transmission the device will enter deep sleep mode. The dev_configure_sleep() function
* needs to be called before this to configure the on-wake settings.
*
* NOTE: the IRQ line has to be low/inactive (i.e. no pending events)
*
* @param inst Pointer to dw1000_dev_instance_t.
* @param enable 1 to configure the device to enter deep sleep after TX, 0 to disables the configuration.
* @return dw1000_dev_status_t
*/
dw1000_dev_status_t
dw1000_dev_enter_sleep_after_tx(dw1000_dev_instance_t * inst, uint8_t enable)
{
inst->control.sleep_after_tx = enable;
uint32_t reg = dw1000_read_reg(inst, PMSC_ID, PMSC_CTRL1_OFFSET, sizeof(uint32_t));
if(inst->control.sleep_after_tx)
reg |= PMSC_CTRL1_ATXSLP;
else
reg &= ~(PMSC_CTRL1_ATXSLP);
dw1000_write_reg(inst, PMSC_ID, PMSC_CTRL1_OFFSET, reg, sizeof(uint32_t));
return inst->status;
}
/**
* Sets the auto RX to sleep bit. This means that after a frame
* received the device will enter deep sleep mode. The dev_configure_sleep() function
* needs to be called before this to configure the on-wake settings.
*
* NOTE: the IRQ line has to be low/inactive (i.e. no pending events).
* @param inst Pointer to dw1000_dev_instance_t.
* @param enable 1 to configure the device to enter deep sleep after TX, 0 to disables the configuration
* @return dw1000_dev_status_t
*/
dw1000_dev_status_t
dw1000_dev_enter_sleep_after_rx(dw1000_dev_instance_t * inst, uint8_t enable)
{
inst->control.sleep_after_rx = enable;
uint32_t reg = dw1000_read_reg(inst, PMSC_ID, PMSC_CTRL1_OFFSET, sizeof(uint32_t));
if(inst->control.sleep_after_rx)
reg |= PMSC_CTRL1_ARXSLP;
else
reg &= ~(PMSC_CTRL1_ARXSLP);
dw1000_write_reg(inst, PMSC_ID, PMSC_CTRL1_OFFSET, reg, sizeof(uint32_t));
return inst->status;
}
|
/**
* Register is the basic unit of ASM form. Each assignment targets a new, unique register.
* Use the factory method {@link #ofType(Supplier)} to obtain instances of this class.
*/
public final class Register implements ExprNode {
private final String name;
private final Supplier<StaticType> type;
private Register(Supplier<StaticType> type) {
this.name = UniqueNames.next();
this.type = type;
}
@NotNull
@Contract(value = "_ -> new", pure = true)
public static Register ofType(Supplier<StaticType> type) {
return new Register(type);
}
@Override
public <T, X extends Throwable> T accept(ExpressionVisitor<T, X> visitor) throws X {
return visitor.visitRegister(this);
}
@Override
public String toString() {
return name;
}
public String name() {
return name;
}
@Override
public boolean isPure() {
return true;
}
@Override
public StaticType typeInfo() {
return type.get();
}
} |
package com.google.android.gms.internal.ads;
import android.view.View;
public final class tz implements b10 {
private aa0 a;
public tz(aa0 aa0) {
this.a = aa0;
}
@Override // com.google.android.gms.internal.ads.b10
public final boolean a() {
return this.a == null;
}
@Override // com.google.android.gms.internal.ads.b10
public final b10 b() {
return this;
}
@Override // com.google.android.gms.internal.ads.b10
public final View c() {
aa0 aa0 = this.a;
if (aa0 != null) {
return aa0.I1();
}
return null;
}
}
|
package org.greenplum.pxf.plugins.hive;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.greenplum.pxf.api.filter.ColumnIndexOperandNode;
import org.greenplum.pxf.api.filter.Node;
import org.greenplum.pxf.api.filter.Operator;
import org.greenplum.pxf.api.filter.OperatorNode;
import org.greenplum.pxf.api.filter.SupportedOperatorPruner;
import org.greenplum.pxf.api.utilities.ColumnDescriptor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.EnumSet;
import java.util.List;
import java.util.Map;
/**
* Prune the tree based on partition keys and whether or not pushing down
* of integrals is enabled on the MetaStore.
*/
public class HivePartitionPruner extends SupportedOperatorPruner {
private static final Logger LOG = LoggerFactory.getLogger(HivePartitionPruner.class);
private final boolean canPushDownIntegral;
private final Map<String, String> partitionKeys;
private final List<ColumnDescriptor> columnDescriptors;
public HivePartitionPruner(EnumSet<Operator> supportedOperators,
boolean canPushDownIntegral,
Map<String, String> partitionKeys,
List<ColumnDescriptor> columnDescriptors) {
super(supportedOperators);
this.canPushDownIntegral = canPushDownIntegral;
this.partitionKeys = partitionKeys;
this.columnDescriptors = columnDescriptors;
}
@Override
public Node visit(Node node, final int level) {
if (node instanceof OperatorNode &&
!canOperatorBePushedDown((OperatorNode) node)) {
return null;
}
return super.visit(node, level);
}
/**
* Returns true when the operatorNode is logical, or for simple operators
* true when the column is a partitioned column, and push-down is enabled
* for integral types or when the column is of string
* <p>
* Say P is a conforming predicate based on partition column and supported
* comparison operatorNode NP is a non conforming predicate based on either a
* non-partition column or an unsupported operatorNode.
* <p>
* The following rule will be used during filter pruning
* P <op> P -> P <op> P (op can be any logical operatorNode)
* P AND NP -> P
* P OR NP -> null
* NP <op> NP -> null
*
* @param operatorNode the operatorNode node
* @return true when the filter is compatible, false otherwise
*/
private boolean canOperatorBePushedDown(OperatorNode operatorNode) {
Operator operator = operatorNode.getOperator();
if (operator.isLogical()) {
// Skip AND / OR
return true;
}
ColumnIndexOperandNode columnIndexOperand = operatorNode.getColumnIndexOperand();
ColumnDescriptor columnDescriptor = columnDescriptors.get(columnIndexOperand.index());
String columnName = columnDescriptor.columnName();
String colType = partitionKeys.get(columnName);
boolean isPartitionColumn = colType != null;
boolean isIntegralSupported =
canPushDownIntegral &&
(operator == Operator.EQUALS || operator == Operator.NOT_EQUALS);
boolean canPushDown = isPartitionColumn && (
colType.equalsIgnoreCase(serdeConstants.STRING_TYPE_NAME) ||
isIntegralSupported && serdeConstants.IntegralTypes.contains(colType)
);
if (!canPushDown) {
LOG.trace("Filter is on a non-partition column or on a partition column that is not supported for push-down, ignore this filter for column: {}", columnName);
}
return canPushDown;
}
}
|
def parse_object_array(hf, heap, reader):
del hf
objid = reader.id()
strace = reader.u4()
length = reader.u4()
clsid = reader.id()
elems = tuple(reader.id() for ix in range(length))
heap._deferred_objarrays.append((objid, strace, clsid, elems)) |
// Returns the length of the given scan record array. We have to calculate this
// based on the maximum possible data length and the TLV data. See TODO above
// |kScanRecordLength|.
size_t GetScanRecordLength(uint8_t* bytes) {
for (size_t i = 0, field_len = 0; i < kScanRecordLength;
i += (field_len + 1)) {
field_len = bytes[i];
CHECK(i + field_len < kScanRecordLength);
if (field_len == 0)
return i;
}
return kScanRecordLength;
} |
import { cold } from "jest-marbles";
import { runEffect, withEffect } from "../../util/testing";
import { MetamaskStore } from "./metamask.store";
import { MetamaskQuery } from "./metamask.query";
let store: MetamaskStore;
let query: MetamaskQuery;
beforeEach(() => {
store = new MetamaskStore({});
query = new MetamaskQuery(store);
});
it("#isAvailable", () => {
expect(query.isAvailable).toEqual(undefined);
store.update({ isAvailable: true });
expect(query.isAvailable).toEqual(true);
store.update({ isAvailable: false });
expect(query.isAvailable).toEqual(false);
});
it("#isEnabled", () => {
expect(query.isEnabled).toEqual(undefined);
store.update({ isEnabled: true });
expect(query.isEnabled).toEqual(true);
store.update({ isEnabled: false });
expect(query.isEnabled).toEqual(false);
});
it("#isEnabled$", () => {
const VALUES = {
t: true,
f: false,
u: undefined
};
const updateAddress$ = runEffect("t-f-f-t|", VALUES, isEnabled => store.update({ isEnabled }));
const expected$ = cold("u-t-f-t|", VALUES);
expect(withEffect(updateAddress$, query.isEnabled$)).toBeObservable(expected$);
});
|
<filename>src/main/java/net/optifine/config/GlVersion.java
package net.optifine.config;
public class GlVersion
{
private int major;
private int minor;
private int release;
private String suffix;
public GlVersion(int major, int minor)
{
this(major, minor, 0);
}
public GlVersion(int major, int minor, int release)
{
this(major, minor, release, (String)null);
}
public GlVersion(int major, int minor, int release, String suffix)
{
this.major = major;
this.minor = minor;
this.release = release;
this.suffix = suffix;
}
public int getMajor()
{
return this.major;
}
public int getMinor()
{
return this.minor;
}
public int getRelease()
{
return this.release;
}
public int toInt()
{
if (this.minor > 9)
{
return this.major * 100 + this.minor;
}
else
{
return this.release > 9 ? this.major * 100 + this.minor * 10 + 9 : this.major * 100 + this.minor * 10 + this.release;
}
}
public String toString()
{
return this.suffix == null ? "" + this.major + "." + this.minor + "." + this.release : "" + this.major + "." + this.minor + "." + this.release + this.suffix;
}
}
|
/// <summary>
/// Disables and removes the scene analysis effect, and unregisters the event handler for the SceneAnalyzed event of the effect
/// </summary>
/// <returns></returns>
task<void> MainPage::CleanSceneAnalysisEffectAsync()
{
_sceneAnalysisEffect->HighDynamicRangeAnalyzer->Enabled = false;
_sceneAnalysisEffect->SceneAnalyzed -= _sceneAnalyzedEventToken;
return create_task(_mediaCapture->RemoveEffectAsync(_sceneAnalysisEffect))
.then([this]()
{
WriteLine("SceneAnalysis effect removed from pipeline");
_sceneAnalysisEffect = nullptr;
});
} |
def collect_protocol_samples(self, n_protocol_samples, protocol_length, marginal="configuration"):
W_shads_F, W_shads_R = [], []
xv_F, xv_R = [], []
for _ in tqdm(range(n_protocol_samples)):
x_0 = self.sample_x_from_equilibrium()
v_0 = self.sample_v_given_x(x_0)
xs, vs, Q, W_shads = self.integrator(x0=x_0, v0=v_0, n_steps=protocol_length)
W_shads_F.append(W_shads)
xv_F.append(np.vstack([xs, vs]).T)
x_1 = xs[-1]
if marginal == "configuration":
v_1 = self.sample_v_given_x(x_1)
elif marginal == "full":
v_1 = vs[-1]
else:
raise NotImplementedError("`marginal` must be either 'configuration' or 'full'")
xs, vs, Q, W_shads = self.integrator(x0=x_1, v0=v_1, n_steps=protocol_length)
W_shads_R.append(W_shads)
xv_R.append(np.vstack([xs, vs]).T)
return np.array(W_shads_F), np.array(W_shads_R), np.array(xv_F), np.array(xv_R) |
// init initializes the command line flags.
func init() {
core.Flags.Var(&flags.pretty, "pretty", "[-pretty]", "Produce output in human readable format")
var def string
if flags.rotate.interval == 0 {
def = " (default do not rotate)"
} else if flags.rotate.interval < time.Second {
flags.rotate.interval = time.Second
}
flags.rotate.Set(flags.rotate.interval.String())
core.Flags.Var(&flags.rotate, "rotate", "[-rotate <interval>]",
"Rotate output file at `interval`, specified in Go time.Duration string format"+def)
} |
import { ImageLoadState } from 'office-ui-fabric-react';
import { IPersonaCoinProps, IPersonaCoinViewProps } from './PersonaCoin.types';
import { BaseState } from '../../utilities/BaseState';
export type IPersonaCoinState = Pick<IPersonaCoinViewProps, 'isPictureLoaded' | 'onPhotoLoadingStateChange'>;
export class PersonaCoinState extends BaseState<IPersonaCoinProps, IPersonaCoinViewProps, IPersonaCoinState> {
constructor(props: PersonaCoinState['props']) {
super(props, {
controlledProps: ['isPictureLoaded']
});
this.state = {
isPictureLoaded: false,
onPhotoLoadingStateChange: this._onPhotoLoadingStateChange
};
}
private _onPhotoLoadingStateChange = (newImageLoadState: ImageLoadState): void => {
if (this.props.onPhotoLoadingStateChange) {
this.props.onPhotoLoadingStateChange(newImageLoadState);
}
this.setState({
isPictureLoaded: newImageLoadState === ImageLoadState.loaded
});
};
}
|
n1 = int(input())
n = [int(0) for i in range(10)]
st = input()
for i in range(len(st)):
if st[i] == 'L':
for j in range(len(n)):
if n[j] is 0:
n[j] = 1
break
elif st[i] is 'R':
for j in reversed(range(len(n))):
if n[j] is 0:
n[j] = 1
break
elif int(st[i])>=0 or int(st[i])<10:
n[int(st[i])] = 0
for i in range(len(n)):
print(n[i],end="")
|
Nonlinear absorption in dielectric metamaterials
We solve the nonlinear Maxwell equations in an InP-based dielectric metamaterial, considering both two-photon absorption and photo-induced free-carrier absorption. We obtain the intensity-dependent reflection, absorption, and effective permittivity and permeability of the metamaterial. Our results show that nonlinear absorption dampens both the electric and magnetic Mie resonance, although the magnetic resonance is more affected because it occurs at longer wavelengths where the free-carrier absorption cross section is larger. Owing to field concentration in the metamaterial at resonance, the threshold intensity for nonlinear absorption is smaller by a factor of about 30 compared to a homogeneous layer of the same thickness. Our results have implications on the use of dielectric metamaterials for nonlinear applications such as frequency conversion and optical limiting.
To date, the research on nonlinear phenomena in dielectric metamaterials has focused primarily on the experimental aspects , with relatively few examples of theoretical studies. A few examples employ the recently-developed linear generalized source method for nonlinear materials , which calculates the diffraction of one-and two-dimensional gratings accounting for nonlinear polarization sources. Although these models provide important insights, they do not support threedimensional structures and do not represent full solutions of the nonlinear Maxwell equations. Also, existing models do not account for the nontrivial frequency dependence of the nonlinear parameters, such as the two photon absorption (TPA) coefficient and the free-carrier absorption (FCA) cross section .
In this work, we develop a full-wave model to solve the nonlinear Maxwell equations in a structured, three-dimensional metamaterial accounting for both TPA and photo-induced FCA. The nonlinear absorption coefficients are obtained from full-band structure calculations . We apply the model to study the optical properties and effective parameters of a representative indium phosphide (InP)-based dielectric metamaterial operating in the near infrared spectral band. As expected, we find that nonlinear absorption at high intensities leads to dampening of the electric and magnetic Mie resonances. For continuous wave illumination, the onset of nonlinear absorption occurs at intensities of 1 MW/cm 2 , while the Mie resonances are almost completely diminished for intensities approaching 5 MW/cm 2 . In addition, we find several unexpected results. First, the nonlinear absorption at the magnetic resonance is larger than at the electric resonance, which is explained by the wavelengthdependent FCA. Second, assuming FCA is independent of wavelength, the absorption at the two resonances is found to be nearly equal, despite the electric field being heavily localized at the electric resonance and more uniformly distributed at the magnetic resonance. Third, owing to the enhancement of the electric field at resonance, we find that the intensity threshold of nonlinear absorption in the metamaterial is nearly 30 times lower compared to a homogeneous material of similar thickness.
To clearly illustrate the effects of nonlinearity on Mie resonances, we consider a large index-contrast metamaterial consisting of 360 nm InP spheres with large refractive index (3.3 at 1 µm) in air medium, arranged in a square lattice with a periodicity of 800 nm, as shown in Fig. 1(a). InP is chosen for its transparency in the near-infrared band of interest (0.9-1.3 µm). Note that the conclusions drawn in this article are valid even if the air medium is replaced by a polymer and the metamaterial layer is placed on a low index substrate such as silica. The size and periodicity of the InP spheres are optimized to position the electric and magnetic Mie resonances, identified as narrowband peaks in the reflection spectrum, in the band of interest as shown in Fig. 1(b).
The origin of nonlinear absorption in InP can be understood from the band structure, shown in valence bands (VBs) consist of heavy-hole (HH), lighthole (LH), and spin-orbit (SO) bands. The conduction band (CB) is separated from the HH band by the band gap. In the absence of light, states in the VB are filled with electrons and the CB states are empty. Electrons in the VB can absorb photons with energy larger than the band gap and enter the CB. Since the band gap of InP (1.45 eV) is larger than the photon energies in the band of interest (0.95-1.4 eV), at low intensities photons transmit through InP without absorption. However, when the incident intensity is high, the probability for valence electrons to absorb two photons (shown as two stacked vertical arrows) is high, resulting in reduced transmission. In addition, this TPA is followed by FCA in which the holes .
left behind in the HH band can be filled by one-photon absorption by electrons in the LH and SO bands, shown by the colored arrows in Fig. 2. Because the strength of both TPA and FCA depend on intensity, they are referred to as nonlinear absorption processes. Figure 3 shows the previously calculated values of the TPA coefficient β and the FCA cross section σ for InP . The value of β is relatively constant with wavelength, which is typical for wide-bandgap materials, while σ increases by an order of magnitude over the band. The FCA increases with increasing wavelength because the corresponding photon energy decreases, and the energy-momentum conservation condition for FCA (colored lines in Fig. 2) is satisfied only near the center of the Brillouin zone, where a larger number of holes are present.
We will now incorporate these nonlinear coefficients into Maxwell's equations. The nonlinear Maxwell equation for the electric field E(r, t) is where n is the refractive index, c is the speed of light, N and σ are the free-carrier concentration and absorption cross section, 0 is the free-space permittivity, and P N L (r, t) is the nonlinear polarization. Taking the Fourier transform of Eq. (1), assuming an exp (−iωt) time dependence, and using the relation for the thirdorder nonlinear polarization where χ (3) is the nonlinear susceptibility, we obtain (3) Relating the imaginary part of χ (3) to the two-photon absorption coefficient β as Im(χ (3) ) = 2n 2 c 2 0 3ω β, and neglecting the real part of χ (3) , Eq. (3) can be rewritten as (4) The free carrier concentration is given by the continuity equation for free electrons where I = 1 2 nc 0 |E(r, ω)| 2 is the intensity, ω is the photon energy and τ is the photo-carrier relaxation time, which we assume is a constant equal to 1 µs. The first term in Eq. (5) describes free-carrier generation via TPA, and the second term describes free carrier recombination. For continuous-wave illumination, the free-carrier concentration will reach steady state conditions (dN/dt = 0) and thus Substituting Eq. (6) into Eq. (4), we obtain the following form of the nonlinear Maxwell equation: We solve Eq. (7) using the full-wave finite-element frequency domain solver in COMSOL. This was accomplished by assigning the two nonlinear terms in Eq. (7) to the imaginary part of n 2 . Before solving Eq. (7) in the metamaterial in Fig. 1, we apply it to a homogeneous nonlinear medium and compare the results with the solution to the well-known rate equation The transmitted intensity, as a function of thickness for InP at a wavelength of 1 µm for various intensities, calculated by solving Eq. (7) and (8), respectively, are shown by dots and solid lines in Fig. 4. The two calculations are in excellent agreement, thus validating our full-wave nonlinear model. In this validation the index of InP is Wavelength dependence of the reflection (a) and absorption (b) for a 0.8 µm square array of 360-nm diameter InP spheres for different incident intensities. set equal to 1 to avoid interference effects, which are not included in Eq. (8).
We now apply our validated nonlinear full-wave model to understand the role of nonlinear absorption in the dielectric metamaterial shown in Fig. 1(a). First, we studied the reflection and absorption for different incident intensities, shown in Fig. 5. For all intensities, the reflection spectrum contains narrowband peaks near 1.2 and 0.95 µm, corresponding to the magnetic and electric dipole Mie resonances, respectively. For a low intensity of 1 W/cm 2 , the nonlinear processes are negligible, resulting in low absorption and nearly 100% reflection at the two resonances. As the intensity increases, the absorption at the resonances increases and the reflection decreases. Electric field distributions, normalized to the incident field, at the magnetic resonance (a) and electric resonance (b) for an incident intensity of 1 MW/cm 2 . (c) Absorption of the metamaterial for an intensity of 1 MW/cm 2 , assuming a constant free carrier absorption cross section.
We note that for a given intensity, the absorption is larger at the magnetic resonance (1.2 µm) than at the electric resonance (0.95 µm). This is a surprising result considering the electric field distributions at the resonances, shown in Fig. 6(a) and (b) normalized to the incident electric field for an intensity of 1 MW/cm 2 . At the magnetic dipole resonance, the electric field is ring shaped and relatively uniform, while the electric field at the electric dipole resonance is highly concentrated at the center of the sphere. Note the field concentration outside the sphere arises from the boundary condition on the normal component electric field, which is discontinuous by the ratio of the dielectric constants of the sphere and free space . Thus, based on the field distributions, one might expect the absorption at the electric resonance to be larger due to the larger field concentration. However, we find more absorption at the magnetic resonance. We attribute this to the FCA cross section being about 5 times larger at the magnetic resonance than at the electric resonance . To validate this claim, we recalculated the spectral absorption for 1 MW/cm 2 intensity, assuming a constant FCA cross section. The results, shown in Fig. 6(c), show that the absorption is approximately equal at the two resonances, confirming that the wavelength-dependent FCA cross section is responsible for the larger absorption at the magnetic resonance. The fact that the absorption is equal at the two resonances for constant FCA is also counterintuitive, since more absorption is expected at the electric resonance because of the larger field concentration.
Since the electric field is enhanced at both resonances, we expect more absorption per unit length in the metamaterial than in a homogeneous material. To illustrate this, we calculated the reflection and absorption of a 360 nm-thick slab of InP, equal in thickness to the InP sphere metamaterial in Fig. 1(a). We see from Fig. 7 that 100 MW/cm 2 of intensity is needed to obtain 40% absorption near 1.2 µm in the homogenous layer, whereas the metamaterial obtains a similar level of absorption for 3 MW/cm 2 . Thus, the homogenous layer requires much higher intensities to achieve absorption values comparable to the metamaterial. This factor of 30 higher intensity is consistent with the five-fold field enhancement at the magnetic resonance shown in Fig. 6(a).
It is also important to understand the impact of nonlinear absorption on the effective permittivity ( ) and permeability (µ) of the metamaterial, as and µ are often used to obtain unique properties such as negative refraction and perfect reflection . The calculated real parts of and µ, shown in Fig. 8, were obtained using S-parameter inversion, assuming a layer thickness of 1.24 µm. At low intensities, shown as the blue line in Fig. 8(a), we see a strong resonance in µ near 1.2 µm, which arises from the magnetic resonance. The weaker resonance near 0.95 µm is the anti-resonance associated with the strong electric resonance at that wavelength, clearly seen in Fig. 8(b) for the permittivity. We also find a strong anti-resonance in at 1.1 µm, which arises from the magnetic resonance at 1.2 µm. These anti-resonances are an artifact of S-parameter retrieval that arises from approximating a Bloch wave by a plane wave . In the resonance regions, either or µ is negative, resulting in single-negative regions and high reflectivity, as shown in Fig. 5(a). As the intensity increases to 1 MW/cm 2 and 3 MW/cm 2 , we find that the magnetic resonance, near 1.2 µm in Fig. 8(a), begins to dampen while the electric reso- nance, near 1 µm in Fig. 8(b), is largely unchanged. Only when the intensity exceeds 3 MW/cm 2 does the electric resonance begin to dampen. For 5 MW/cm 2 , both resonances are completely dampened. The dampening of the resonances with increasing intensity is also consistent with the decreasing reflection in Fig. 5(a). As the resonance in the real part of and µ broadens, the corresponding imaginary parts of and µ (not shown) also broaden, as per the Kramer-Kronig relationship, which results in broader-band absorption with increasing intensity, as shown in Fig. 5(b).
In summary, we developed a full-wave model to study the effects of two-photon absorption and photo-induced free-carrier absorption on the effective parameters and optical properties of a structured dielectric metamaterial operating in the near infrared spectral band. As expected, we find that nonlinear absorption leads to dampening of the electric and magnetic Mie resonances at high intensities, with an onset around 1 MW/cm 2 for continuous wave illumination. The resonances are almost completely dampened for intensities around 5 MW/cm 2 . Surprisingly, we find that the nonlinear absorption at the magnetic resonance is larger than at the electric resonance, despite the electric field being more concentrated at the electric resonance. We find this is because the free-carrier absorption cross section is considerably larger at the longer wavelengths near the magnetic resonance. We also find that the metamaterial provides absorption comparable to a homogeneous layer of the same thickness at approximately 30 times less intensity. The lower threshold intensity and smaller footprint for nonlinear absorption can be exploited in applications involving optical limiting, frequency conversion, the Kerr effect, and four-wave mixing. |
#ifndef GMAIL_UTILS_HPP_
#define GMAIL_UTILS_HPP_
#include <string>
// Utilities for GmailArchiver
namespace GmailArchiver {
namespace Utils {
namespace Crypto {
void encrypt ( std::string& str ) noexcept;
void decrypt ( std::string& str ) noexcept;
} // Crypto
// Utils functions
bool openBrowser( const std::string& uri ) noexcept;
bool isValidDateFormat( const std::string& date ) noexcept;
} // Utils
} // GmailArchiver
#endif // GMAIL_UTILS_HPP_
|
package com.xinran.qxviewslib.customview;
import android.content.Context;
import android.text.Editable;
import android.text.TextWatcher;
import android.util.AttributeSet;
import android.view.View;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.LinearLayout;
import com.xinran.qxviewslib.R;
/**
* Created by qixinh on 16/7/4.
*/
public class CustomTitleBarView extends LinearLayout{
private EditText edit;
private ImageView img;
public CustomTitleBarView(Context context) {
this(context,null);
}
public CustomTitleBarView(Context context, AttributeSet attrs) {
this(context,attrs,0);
}
public CustomTitleBarView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init(context);
}
private void init(Context context) {
View.inflate(context, R.layout.view_customtitlebar,this);
edit= (EditText) findViewById(R.id.custom_edit);
img= (ImageView) findViewById(R.id.custom_img);
edit.addTextChangedListener(new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
if(s.length()>0){
img.setVisibility(VISIBLE);
}else{
img.setVisibility(GONE);
}
}
@Override
public void afterTextChanged(Editable s) {
}
});
}
}
|
/**********************************************************************
* Description: only used by transport layer.
set ack waiting information.
* Input: handle,sequence number
* Return: none
**********************************************************************/
static void transport_setAckWaiting(TransTx_node_t* unit)
{
ECGP_listAddNode(&waitAckList, unit->node);
unit->timeout = ECGP_TRANS_NOACK_TIMEOUT;
unit->resend = ECGP_TRANS_NOACK_RESEND;
} |
/**
* Attempts to add a callback for a resource.
*
* @param uri the {@link android.net.Uri} of the resource for which a callback is
* desired.
* @param callback the callback to register.
* @return {@code true} if the callback is guaranteed to be invoked with
* a non-null result (as long as there is no error and the
* callback is not canceled), or {@code false} if the callback
* cannot be registered with this task because the result for
* the desired {@link android.net.Uri} has already been discarded due to
* low-memory.
* @throws NullPointerException if either argument is {@code null}
*/
public boolean addCallback(Uri uri, ItemLoadedCallback callback) {
if (Log.isLoggable(TAG, Log.DEBUG)) {
Log.d(TAG, "Adding image callback " + callback);
}
if (uri == null) {
throw new NullPointerException("uri is null");
}
if (callback == null) {
throw new NullPointerException("callback is null");
}
Set<ItemLoadedCallback> callbacks = mCallbacks.get(uri);
if (callbacks == null) {
callbacks = new HashSet<ItemLoadedCallback>(4);
mCallbacks.put(uri, callbacks);
}
callbacks.add(callback);
return true;
} |
import { css } from 'styled-components';
import { SuomifiTheme } from '../../../../theme';
import { font } from '../../../../theme/reset';
export const baseStyles = (theme: SuomifiTheme) => css`
&.fi-select-item-list {
${font(theme)('bodyText')}
list-style-type: none;
box-sizing: content-box;
max-height: 265px;
background-color: ${theme.colors.whiteBase};
border-width: 0 1px 1px 1px;
border-style: solid;
border-color: ${theme.colors.depthDark3};
border-bottom-left-radius: ${theme.radius.basic};
border-bottom-right-radius: ${theme.radius.basic};
margin: 0;
padding: 4px 0 0 0;
&:focus {
outline: none;
}
& .fi-item-list_top-spacer {
height: 4px;
width: 100%;
}
}
& .fi-select-item-list_content_wrapper {
display: block;
width: 100%;
max-height: inherit;
overflow-y: auto;
overflow-x: hidden;
}
`;
|
// CleanupPod cleans up a pod that was started with the container deployer.
func CleanupPod(ctx context.Context, hostClient client.Client, pod *corev1.Pod, keepPod bool) error {
controllerutil.RemoveFinalizer(pod, container.ContainerDeployerFinalizer)
if err := hostClient.Update(ctx, pod); err != nil {
err = fmt.Errorf("unable to remove finalizer from pod: %w", err)
return lserrors.NewWrappedError(err,
"CleanupPod", "RemoveFinalizer", err.Error())
}
if keepPod {
return nil
}
if err := hostClient.Delete(ctx, pod); err != nil {
err = fmt.Errorf("unable to delete pod: %w", err)
return lserrors.NewWrappedError(err,
"CleanupPod", "DeletePod", err.Error())
}
return nil
} |
package main
import (
"log"
"os"
"path/filepath"
"github.com/hsmtkk/azure-blob-upload/upload"
"github.com/spf13/cobra"
"go.uber.org/zap"
)
var command = &cobra.Command{
Use: "azure-blob-upload srcDirectory container",
Run: run,
Args: cobra.ExactArgs(2),
}
func init() {
}
func main() {
if err := command.Execute(); err != nil {
log.Fatal(err)
}
}
func run(cmd *cobra.Command, args []string) {
srcDirectory := args[0]
container := args[1]
accountName := requiredEnv("ACCOUNT_NAME")
accountKey := requiredEnv("ACCOUNT_KEY")
logger, err := zap.NewDevelopment()
if err != nil {
log.Fatalf("failed to init logger; %s", err)
}
defer logger.Sync()
sugar := logger.Sugar()
entries, err := os.ReadDir(srcDirectory)
if err != nil {
log.Fatalf("failed to read directory; %s; %s", srcDirectory, err)
}
uploader := upload.NewUploader(sugar, accountName, accountKey, container)
for _, entry := range entries {
fileName := entry.Name()
sugar.Infow("start", "name", fileName)
filePath := filepath.Join(srcDirectory, fileName)
if err := uploader.Upload(filePath); err != nil {
sugar.Errorw("failed to upload file", "name", fileName)
}
sugar.Infow("end", "name", fileName)
}
}
func requiredEnv(key string) string {
val := os.Getenv(key)
if val == "" {
log.Fatalf("you must define %s environment variable", key)
}
return val
}
|
/**
* Dex entry data access object
*
* @author Eduardo Naveda
* @since 0.0.1
*/
public class DexEntry extends Access {
public static enum Version {
XY(24); // Todo add all versions
private Integer version;
private Version(Integer version) {
this.version = version;
}
@Override
public String toString() {
return version.toString();
}
}
private Integer pokemonId;
private Version version;
private DexEntry(Context context, Integer pokemonId, Version version) {
super(context);
this.pokemonId = pokemonId;
this.version = version;
}
/**
* Creates a dex entry.
*
* @param context The application context
* @param pokemonId The Pokémon id
* @return A dex entry object
*/
public static DexEntry create(Context context, Integer pokemonId) {
database = DexDatabase.getInstance(context).getReadableDatabase();
Version version = Version.XY; // Todo from preferences
return new DexEntry(context, pokemonId, version);
}
/**
* Creates a dex entry.
*
* @param context The application context
* @param pokemonId The Pokémon id
* @param version The game version
* @return A dex entry object
*/
public static DexEntry create(Context context, Integer pokemonId, Version version) {
database = DexDatabase.getInstance(context).getReadableDatabase();
return new DexEntry(context, pokemonId, version);
}
/**
* Get the Pokémon information for this dex entry.
*
* @return The {@link com.nav.dexedd.model.Pokemon} for the dex entry
*/
public Pokemon getPokemon() {
String[] args = {Dex.DexType.NATIONAL_DEX.toString(), version.toString(), pokemonId.toString()};
String query = getContext().getString(R.string.get_basic_info);
Cursor cursor = database.rawQuery(query, args);
cursor.moveToFirst();
Pokemon pokemon = new Pokemon();
pokemon.setId(cursor.getInt(0));
pokemon.setSpeciesId(cursor.getInt(1));
pokemon.setDexNumber(cursor.getInt(2));
pokemon.setName(cursor.getString(3));
pokemon.setGenus(cursor.getString(4));
pokemon.setFlavorText(PokemonTextUtil.cleanDexText(cursor.getString(5)));
pokemon.setPrimaryType(new Type(cursor.getInt(6)));
if (cursor.isNull(7)) {
pokemon.setSecondaryType(new Type(cursor.getInt(7)));
}
pokemon.setAbilities(getAbilities(pokemon.getId()));
pokemon.setCatchRate(cursor.getInt(8));
pokemon.setEggGroups(getEggGroups(pokemon.getSpeciesId()));
pokemon.setGenderRatio(cursor.getDouble(9));
// Height from the data source is measured in decameters (dam), thus the conversion to meters
pokemon.setHeight((double) cursor.getInt(10) / 10);
// Weight from the data source is measured in hectograms (hg), thus the conversion to kilograms
pokemon.setWeight((double) cursor.getInt(11) / 10);
pokemon.setBaseStats(getStats(pokemon.getId()));
pokemon.setEvolutionChain(getEvolutionChain(pokemon.getSpeciesId()));
pokemon.setCatched(cursor.getInt(12) == 1);
cursor.close();
return pokemon;
}
/**
* Get a Pokémon's abilities.
*
* @param pokemonId The Pokémon id
* @return The ability list for the Pokémon
*/
public List<Ability> getAbilities(Integer pokemonId) {
String[] args = {pokemonId.toString()};
String query = getContext().getString(R.string.get_abilities);
Cursor cursor = database.rawQuery(query, args);
List<Ability> abilities = new ArrayList<>();
while (cursor.moveToNext()) {
Ability ability = new Ability();
ability.setId(cursor.getInt(0));
ability.setName(cursor.getString(1));
ability.setFlavorText(PokemonTextUtil.cleanDexText(cursor.getString(2)));
ability.setEffect(PokemonTextUtil.cleanDexText(cursor.getString(3)));
ability.setIsHidden(cursor.getInt(4) == 1);
ability.setSlot(cursor.getInt(5));
abilities.add(ability);
}
cursor.close();
return abilities;
}
/**
* Get a Pokémon's egg groups.
*
* @param speciesId The Pokémon species id
* @return The egg group list for the Pokémon species
*/
public List<EggGroup> getEggGroups(Integer speciesId) {
String[] args = {pokemonId.toString()};
String query = getContext().getString(R.string.get_egg_groups);
Cursor cursor = database.rawQuery(query, args);
List<EggGroup> eggGroups = new ArrayList<>();
while (cursor.moveToNext()) {
EggGroup eggGroup = new EggGroup();
eggGroup.setId(cursor.getInt(0));
eggGroup.setName(cursor.getString(1));
eggGroups.add(eggGroup);
}
cursor.close();
return eggGroups;
}
/**
* Get a Pokémon's stats.
*
* @param pokemonId The Pokémon id
* @return The StatSpread object for the Pokémon
*/
public StatSpread getStats(Integer pokemonId) {
String[] args = {pokemonId.toString()};
String query = getContext().getString(R.string.get_stats);
Cursor cursor = database.rawQuery(query, args);
StatSpread statSpread = new StatSpread();
while (cursor.moveToNext()) {
Stat stat = new Stat(cursor.getInt(0), cursor.getInt(1), cursor.getInt(2));
statSpread.setStat(stat, stat.getId());
}
cursor.close();
return statSpread;
}
/**
* Get a Pokemon's evolution chain.
*
* @param speciesId The Pokémon species id
* @return A Pokémon's evolution chain
*/
public Tree<Pokemon> getEvolutionChain(Integer speciesId) {
String[] args = {speciesId.toString()};
String query = getContext().getString(R.string.get_evolution_chain);
Cursor cursor = database.rawQuery(query, args);
Tree<Pokemon> evolutionChain = new Tree<>();
TreeTraverser<Tree<Pokemon>> traverser = new TreeTraverser<Tree<Pokemon>>() {
@Override
public Iterable<Tree<Pokemon>> children(Tree<Pokemon> root) {
return root.getChildren();
}
};
List<Pokemon> evolutions = new ArrayList<>();
// Group the evolution conditions by species
while (cursor.moveToNext()) {
Pokemon newEvolution = new Pokemon(cursor.getInt(0), cursor.getInt(1), cursor.getString(2));
newEvolution.setEvolutionConditions(new ArrayList<EvolutionCondition>());
EvolutionCondition evolutionCondition = prepareEvolutionCondition(cursor);
boolean added = false;
for (Pokemon evolution : evolutions) {
if (evolution.getSpeciesId().equals(newEvolution.getSpeciesId())) {
evolution.getEvolutionConditions().add(evolutionCondition);
added = true;
}
}
if (!added) {
newEvolution.getEvolutionConditions().add(evolutionCondition);
evolutions.add(newEvolution);
}
}
/* WARNING: Getting the root of the evolution tree. For the moment this works as there are no
Pokémon with multiple evolution conditions that have different base for evolution,
this may change in the future so heads up */
for (Pokemon evolution : evolutions) {
if (evolution.getEvolutionConditions().get(0).getFromPokemon() == null) {
evolutionChain.setData(evolution);
evolutionChain.setRank(0);
evolutionChain.setChildren(new ArrayList<Tree<Pokemon>>());
evolutions.remove(evolution);
break;
}
}
while (evolutions.size() > 0) {
for (Iterator<Pokemon> iterator = evolutions.iterator(); iterator.hasNext(); ) {
Pokemon evolution = iterator.next();
FluentIterable<Tree<Pokemon>> evolutionChainNodes = traverser.breadthFirstTraversal(evolutionChain);
/* WARNING: For the moment this works as there are no Pokémon with multiple evolution
conditions that have different base for evolution, this may change in the future
so heads up */
Pokemon fromPokemon = evolution.getEvolutionConditions().get(0).getFromPokemon();
for (Tree<Pokemon> evolutionChainNode : evolutionChainNodes) {
if (fromPokemon != null) {
if (evolutionChainNode.getData().getSpeciesId().equals(fromPokemon.getSpeciesId())) {
Tree<Pokemon> newEvolutionChainNode = new Tree<>();
newEvolutionChainNode.setData(evolution);
newEvolutionChainNode.setRank(evolutionChainNode.getRank() + 1);
newEvolutionChainNode.setChildren(new ArrayList<Tree<Pokemon>>());
evolutionChainNode.getChildren().add(newEvolutionChainNode);
iterator.remove();
break;
}
}
}
}
}
cursor.close();
return evolutionChain;
}
private EvolutionCondition prepareEvolutionCondition(Cursor cursor) {
EvolutionCondition evolutionCondition = new EvolutionCondition();
if (!cursor.isNull(3)) {
evolutionCondition.setFromPokemon(new Pokemon(cursor.getInt(3), cursor.getInt(4), cursor.getString(5)));
}
if (!cursor.isNull(6)) {
evolutionCondition.setTrigger(EvolutionCondition.Trigger.getTriggerByValue(cursor.getInt(6)));
}
if (!cursor.isNull(7)) {
evolutionCondition.setMinimumLevel(cursor.getInt(7));
}
if (!cursor.isNull(8)) {
evolutionCondition.setTriggerItem(new Item(cursor.getInt(8), cursor.getString(9)));
}
if (!cursor.isNull(10)) {
evolutionCondition.setGender(cursor.getInt(10));
}
if (!cursor.isNull(11)) {
evolutionCondition.setLocation(new Location(cursor.getInt(11), cursor.getString(12), new Region(cursor.getInt(13), cursor.getString(14))));
}
if (!cursor.isNull(15)) {
evolutionCondition.setHeldItem(new Item(cursor.getInt(15), cursor.getString(16)));
}
if (!cursor.isNull(17)) {
evolutionCondition.setAtDaytime(cursor.getString(17).equals(EvolutionCondition.DAYTIME_CONDITION));
}
if (!cursor.isNull(18)) {
evolutionCondition.setKnownMove(new Move(cursor.getInt(18), cursor.getString(19)));
}
if (!cursor.isNull(20)) {
evolutionCondition.setKnownMoveType(new Type(cursor.getInt(20)));
}
if (!cursor.isNull(21)) {
evolutionCondition.setMinimumHappiness(cursor.getInt(21));
}
if (!cursor.isNull(22)) {
evolutionCondition.setMinimumBeauty(cursor.getInt(22));
}
if (!cursor.isNull(23)) {
evolutionCondition.setMinimumAffection(cursor.getInt(23));
}
if (!cursor.isNull(24)) {
evolutionCondition.setRelativePhysicalStats(cursor.getInt(24));
}
if (!cursor.isNull(25)) {
evolutionCondition.setInPartyPokemonSpecies(new Pokemon(cursor.getInt(25), cursor.getInt(26), cursor.getString(27)));
}
if (!cursor.isNull(28)) {
evolutionCondition.setInPartyPokemonType(new Type(cursor.getInt(28)));
}
if (!cursor.isNull(29)) {
evolutionCondition.setTradeFor(new Pokemon(cursor.getInt(29), cursor.getInt(30), cursor.getString(31)));
}
if (!cursor.isNull(32)) {
evolutionCondition.setBabyTriggerItem(new Item(cursor.getInt(32), cursor.getString(33)));
}
evolutionCondition.setNeedsOverworldRain(cursor.getInt(34) == 1);
evolutionCondition.setTurnUpsideDown(cursor.getInt(35) == 1);
return evolutionCondition;
}
} |
/**
* Invoke the method with the provided signature (name and parameter types)
* on the given Java {@link Object}.
*
* @param target target {@link Object} whose method we are invoking
* @param methodName method name to invoke
* @param parameters parameters passed to the method call
* @param <T> return value object type
* @return the value return by the method invocation
*/
public static <T> T invokeMethod(Object target, String methodName, Object... parameters) {
try {
Class[] parameterClasses = new Class[parameters.length];
for (int i = 0; i < parameters.length; i++) {
parameterClasses[i] = parameters[i].getClass();
}
Method method = getMethod(target, methodName, parameterClasses);
method.setAccessible(true);
@SuppressWarnings("unchecked")
T returnValue = (T) method.invoke(target, parameters);
return returnValue;
} catch (InvocationTargetException e) {
throw handleException(e);
} catch (IllegalAccessException e) {
throw handleException(e);
}
} |
s = raw_input()
st = 0
en = 0
ans = [['.' for _ in xrange(13)] for _ in xrange(2)]
for i in xrange(ord('A'),ord('Z')+1):
c = chr(i)
st = s.find(c)
en = s.find(c,st+1)
if en != -1:
break
if st+1 == en:
print "Impossible"
else:
l = (en-st)
l += (l%2)
ss = 13-(l/2)
p = [ss,0]
dr = 1
for i in xrange(st,en):
ans[p[1]][p[0]] = s[i]
if (p[0]+dr == 13):
p[1] += 1
dr *= -1
else:
p[0] += dr
p = [ss-1,0]
dr = -1
a = s[:st]
b = s[en+1:]
bf = a[::-1]+ b[::-1]
for i in xrange(len(bf)):
if p[0]<0:
p[0] = 0
p[1] = 1
dr = 1
ans[p[1]][p[0]] = bf[i]
p[0] += dr
print "".join(ans[0])
print "".join(ans[1])
|
Will there be no more cakes and ale? Sir Toby Belch, Twelfth Night (Act II, Scene 3)
We’d be willing to bet that most of us have over indulged on the Christmas goodies at one time or another, and for those in the past this was just as true. In this post we look at a traditional cure for bloating and discomfort caused by over eating and drinking, and also at a cure for hangover, which might have helped Samuel Pepys at Christmas 1662 when he drank too much wine!
In 1698 Richard Stafford complained that it was such a shame that
people do keep the Feasts of what is commonly called Christmas, Easter, and Whitsuntide, with the Leaven of Gluttony, Drunkenness, Chambering, and Wantonness, in Vanity and Idleness.
It is reasonable to assume Stafford wasn’t a fan of the Christmas excesses. Over indulgence of food and drink was generally known as a surfeiting and was often associated with the Christmas festivities.
Cures to ease the symptoms a surfeit – a heavy stomach and vomiting – included a medicated drink, known as a surfeit water. Many versions of this drink existed, Elizabeth Okeover’s manuscript recipe book used some extremely expensive ingredients and needed making well in advance. It involved taking two quarts of the best aqua vitae (strong liquor) infused with damask rose water and white sugar. After standing overnight add some red poppy and some raisins. Add to this some musk and some ambergris and leave it to stand for ten days more shaking daily. After this time it was to be strained through a cloth and bottled. It could be kept until required.
Sara had a go at making this cure in a film for Loughborough University’s Christmas Wellness campaign – you can watch how she got on here:
If you’re feeling brave you could try this 300 year old cure for your #ChristmasDay food coma 👀 https://t.co/dYVGIM6Gon #LboroExperts @saralread pic.twitter.com/6xDBBQUTW6 — Loughborough University (@lborouniversity) December 21, 2017
The name ‘hangover’ for the feelings of being unwell after drinking too much alcohol the night before dates from the early twentieth century (first recorded in 1904). This doesn’t mean that the effects of too much alcohol weren’t known about much earlier. As Daniel Dyke wrote in 1614 if ‘the paine of the headach were before the plesure of the wine, none would be drunk’.
Samuel Pepys might wince on reading that since on Saturday 22 December 1660, he went drinking in the Sun Tavern with some pals and had ‘a very fine dinner, good musique, and a great deal of wine’. While the intention was perhaps a couple of swift drinks, they ended up staying out very late, meaning Pepys had to help a staggering Sir William Penn home through the streets. Pepys got home safely but noted that ‘my head [was] troubled with wine, […] akeing all night’. We’ve all been there!
A guide to household medicines, originally from France, The Countrey Farme (1616) gave a cure for wine induced headaches specifically. It advised making a ‘frontlet’ (or plaster applied to the forehead) infused with thyme, maidenhair, and roses. The sufferer may also eat one or two apples and some bitter almonds. We have a feeling this might not be as fortifying as the classic fry-up. If you also had a queasy stomach then the author recommended, ‘thou mayst take of the haire of the Beast that hath made thee ill, and drinke off a good glasse of Wine’ or good old hair of the dog!
This is something to bear in mind as we head into New Years revelry!!
Happy New Year everyone
Share this: Google
Reddit
Facebook
Twitter
Email |
Calotropis procera Latex Extract Affords Protection against Inflammation and Oxidative Stress in Freund's Complete Adjuvant-Induced Monoarthritis in Rats
In view of the well-established anti-inflammatory properties of latex of Calotropis procera (DL), the present study was carried out to evaluate the protective effect of its methanol extract (MeDL) against inflammation and oxidative stress in monoarthritis induced by Freund's complete adjuvant (FCA) in rats. Intra-articular injection of FCA produced inflammation of the joint with a peak effect occurring on day 4 where a maximum increase in the levels of myeloperoxidase and inflammatory mediators like PGE2, TNF-α, and nitric oxide was observed. This was associated with oxidative stress with a marked reduction in the levels of glutathione, catalase, superoxide dismutase and glutathione peroxidase and an increase in the lipid peroxidation as indicated by the higher levels of thiobarbituric acid reactive substances (TBARSs). Subsequently on day 28 the histological analysis of the joint also revealed arthritic changes. Daily treatment of rats with MeDL (50 and 500 mg/kg) and standard anti-inflammatory drug rofecoxib (20 and 100 mg/kg), produced a significant attenuation in the inflammatory response and ameliorated the arthritic changes in the joint. The protection afforded by MeDL and rofecoxib was more pronounced than that of phenylbutazone and was associated with normalization of the levels of inflammatory mediators and biochemical parameters of oxidative stress. However, the overall protection afforded by rofecoxib was better than that of MeDL.
INTRODUCTION
The incidence of degenerative and inflammatory joint diseases, namely osteoarthritis and rheumatoid arthritis, is very high over the world . Typically arthritis is a common inflammatory disorder of the joint characterized by inflammation of the synovial membrane, pain, and restricted joint movement. Experimentally arthritis could be induced by various inflammagens of which Freund's complete adjuvant (FCA) is the most commonly used agent . Intraarticular injection of FCA is known to induce inflammation as well as immune response and to produce features that resemble rheumatoid arthritis in humans. The acute inflammatory response induced by FCA is associated with leukocyte infiltration, mast cell activation, and release of cytokines and free radicals . This process gets aggravated with macrophage activation and secretion of bioactive products that play an important role in tissue destruction, vascular proliferation, and fibrosis over a period of time .
The role of cytokines like IL-1, IL-6, tumor necrosis factor-α (TNF-α), prostaglandins (PGs), and nitric oxide (NO) in arthritis has been well established. The levels of these inflammatory mediators have been reported to be high in both experimental models of arthritis and in patients suffering from arthritis . Besides, generation of reactive oxygen species (ROS) and other free radicals also contribute to the pathogenesis of arthritis . In view of the underlying mechanisms, both nonsteroidal and steroidal antiinflammatory drugs are used for the management of arthritis . However, due to side effects associated with the longterm use of these agents, many patients tend to use alternative therapeutic approaches including herbal therapies that have been considered safe and effective in alleviating chronic pain associated with arthritis .
Calotropis procera (Ait.) R. Br., a wild growing plant of family Asclepiadaceae, is well known for its medicinal properties. Different parts of this plant have been reported to exhibit anti-inflammatory, analgesic, and antioxidant properties . The latex of this plant produces potent antiinflammatory, analgesic, and weak antipyretic effects in various animal models . Both latex and its methanol extract (MeDL) have been shown to inhibit inflammatory cell influx and edema formation induced by various inflammagens . It also improves locomotor functions in experimentally induced monoarthritis in rats (unpublished findings). In view of these properties, the present study was carried out to evaluate the effect of MeDL on the levels of PGE 2 , TNF-α, nitric oxide (NO), myeloperoxidase (MPO), oxidative stress parameters, and joint histology in FCA-induced monoarthritis in rats. The effect of MeDL was compared with rofecoxib, a selective COX-2 (cyclooxygenase-2) inhibitor, and phenylbutazone (PBZ) a nonselective COX inhibitor.
Plant material and drugs
The C. procera plant was identified by the Raw Materials, Herbarium and Museum Division, National Institute of Science and Communication, CSIR, New Delhi, where a voucher specimen is preserved (Voucher no. PID 1739). The latex was collected from the aerial parts of the plant growing in the wild. It was dried under shade at ambient temperature and was soxhlated to obtain methanol extract (MeDL) . The MeDL was triturated with gum acacia used as suspending agent (1 : 1) in normal saline (NS), and administered orally to rats at doses ranging from 50 to 500 mg/kg (MeDL 50 and MeDL 500). Rofecoxib was administered orally at 20 and 100 mg/kg doses (Rofe 20 and Rofe 100) and phenylbutazone at a dose of 100 mg/kg (PBZ). The drugs used in the study were obtained from Arbro Pharmaceuticals (New Delhi, India) (rofecoxib and phenylbutazone). Freund's complete adjuvant was obtained from Sigma-Aldrich Corporation (Bangalore, India).
Animals
The study was carried out on 5-6-month-old Wistar rats of either sex weighing 150-180 g. The rats were obtained from the Experimetal Animal Facility of the Institute, were kept at ambient temperature, and had free access to water and diet. The animal experiments were carried in accordance with the guidelines of Institutional Animal Ethics Committee.
Experimental design
Monoarticular arthritis was induced in rats by injecting 0.1 mL of 0.1% FCA (Sigma Aldrich, USA) into the intraarticular space of right ankle joint (day 0) . The increase in joint diameter was measured daily starting from day 0, using a screw gauge till the time of peak inflammation (day 4), and then it was measured every fourth day for a period of 28 days. The rats were divided into seven groups, consisting of six animals each for analysis of histological and biochemical parameters. Group I: normal control; Group II: FCA control. In Group III to Group VII, drugs were administered orally as suspension with gum acacia in NS, 1 hour before injecting FCA on day 0 and then daily either for 4 days or for 28 days at doses based on our earlier studies where no observable toxic effects were seen , Group III: MeDL (50 mg/kg, MeDL 50); Group IV: MeDL (500 mg/kg, MeDL 500); Group V: rofecoxib (20 mg/kg, Rofe 20); Group VI: rofecoxib (100 mg/kg, Rofe 100); Group VII: phenylbutazone (100 mg/kg, PBZ).
Estimation of protein
The protein concentration of the samples was determined by Bradford's method .
Histological analysis
Rats were sacrificed on day 28, the limbs were removed above the stifle joints, degloved and fixed in 1% formaldehyde in saline. They were decalcified in EDTA, processed for paraffin embedding, sectioned, and stained with hematoxylin-eosin . The sections were examined for arthritic changes in the control as well as in the drug-treated rats.
Statistical analysis
The values are expressed as mean ± SEM of six observations and ANOVA was used to compare the groups. The statistical analysis was carried out by the version 10 of the SPSS program and the values of P < .05 were considered as statistically significant.
Effect of MeDL on joint inflammation
Injection of FCA into right ankle joint of rat produced an increase in joint diameter that was maximum on day 4 (2.17 ± 0.13 mm), and thereafter it gradually declined. Injection of NS on the other hand produced a marginal increase in the joint diameter on day 2 (0.04 ± 0.10 mm) that returned to normal within 4 days ( Figure 1). The inhibitory effect of various drugs was evaluated on the day of peak inflammation, that is, day 4. Oral administration of MeDL produced a dose-dependent decrease in V. L. Kumar and S. Roy joint inflammation and the increase in joint diameter was 1.59 ± 0.09 mm and 1.20 ± 0.08 mm in MeDL 50 and MeDL 500 groups against 2.17 ± 0.13 mm in FCA control (27% and 45% inhibition). COX-2 selective inhibitor, rofecoxib, was more effective in inhibiting joint inflammation as compared to MeDL. The increase in joint diameter in Rofe 20 and Rofe 100 groups was 1.66 ± 0.08 mm and 0.70 ± 0.33 mm (24% and 68% inhibition). PBZ, a nonselective COX inhibitor produced 16% inhibition in joint inflammation with the increase in joint diameter of 1.82 ± 0.12 mm (Table 1).
Effect of MeDL on tissue levels of inflammatory mediators
The inflammation induced by FCA was associated with an increase in the levels of PGE 2 and TNF-α. The tissue levels of PGE 2 and TNF-α were 7.35 ± 0.14 and 71.5 ± 5.00 pg/mg tissue in the FCA control as compared to 1.00 ± 0.01 and 2.50 ± 5.00 pg/mg tissue in normal control rats, respectively. Both MeDL and rofecoxib produced a significant decrease in the levels of PGE 2 and TNF-α (P < .005). The levels of PGE 2 in MeDL 500 group were 0.6 ± 0.05, and in Rofe 100 group were 1.00±0.23, and that of TNF-α in MeDL 500 group were 14.50±15.00, and in Rofe 100 group were 10.50±5.00 pg/mg tissue, respectively. PBZ on the other hand was not effective in reducing the tissue PGE 2 levels and was only marginally effective in reducing the tissue TNF-α levels ( Figure 2). FCA injection produced a significant increase in tissue MPO activity from 0.06 ± 0.01 OD/mg tissue in normal control rats to 1.33±0.11 OD/mg tissue. Treatment with MeDL and rofecoxib significantly reduced the tissue MPO activity and their effect was comparable in this regard. The MPO levels were 0.14 ± 0.02 and 0.09 ± 0 OD/mg tissue in MeDL 500 and Rofe 100 group, respectively. PBZ on the other hand was marginally effective in decreasing the MPO levels as compared to FCA control (1.00 ± 0.03 versus 1.33 ± 0.11 OD/mg tissue) (Figure 2). MeDL and rofecoxib were also equieffective in reducing the tissue NO levels in the arthritic rats (2.0 ± 0.11 and 2.8 ± 0.10 against 5.9 ± 0.50 μM/mg tissue in FCA control). The effect of PBZ in this regard was comparable to that of MeDL and rofecoxib (3.0 ± 0.04 μM/mg tissue) (Figure 2).
Effect of MeDL on tissue levels of GSH, catalase, SOD, GPx, and TBARS
Oxidative stress associated with FCA-induced monoarthritis was evaluated by measuring the levels of GSH, catalase, SOD, GPx, and TBARS in the inflamed joint tissue. FCA injection into the ankle joint markedly decreased the tissue GSH, catalase, SOD, and GPx levels from 18.20 ± 1.10 mg/g tissue, 28.60 ± 0.15 U/mg protein, 277.70 ± 0.15 U/mg protein, and 31.40 ± 0.10 U/mg protein in normal control rats to 4.80 ± 0.40 mg/g tissue, 0.17 ± 0.02 U/mg protein, 79.90 ± 0.10 U/mg protein, and 5.97 ± 0.05 U/mg protein, respectively. Both MeDL and rofecoxib produced a dose-dependent increase in the level of these oxidative stress parameters. On the other hand, FCA produced a marked increase in the levels of TBARS from 3.50 ± 0.50 nmol/g tissue to 103.00 ± 3.00 nmol/g tissue. Both MeDL and rofecoxib produced a dose-dependent decrease in the levels of TBARS and the effect of these drugs was comparable. PBZ, on the other hand, produced a marginal change in the levels of all the oxidative stress parameters as compared to FCA control (Table 2).
Effect of MeDL on joint histology
The inflammation induced by FCA was associated with cellular infiltration, edema, granuloma formation, and bone destruction on day 28 (Figure 3(b)). Both MeDL 500 and Rofe 100 significantly decreased the arthritic changes as compared to FCA control, however, rofecoxib was more effective in this regard (Figures 3(c) and 3(d)).
DISCUSSION
The latex of Calotropis procera is well known for its antiinflammatory properties in various experimental models. It has also been shown to afford protection against functional impairment produced by FCA in rat model of monoarthritis. In the present study, we have evaluated the effect of latex of C. procera on the levels of inflammatory mediators, oxidative stress parameters, and joint histology in FCA-induced monoarthritis model and compared it with rofecoxib. Intraarticular injection of FCA produced a peak inflammatory response in the joint on day 4 that is associated with fluid exudation, neutrophil infiltration, and mast cell activation . This was followed by a slow regression and the joint swelling continued up to day 28 possibly due to oil-based adjuvant and the antigenicity of mycobacterium . The inhibitory effect of drugs was evaluated against FCA-induced inflammation on day 4. MeDL produced a dose-dependent inhibition in joint inflammation that could be attributed to its ability to inhibit cellular influx and vascular permeability . It has earlier been shown to inhibit inflammatory response induced by various mediators and inflammagens like histamine, bradykinin, prostaglandins, carragenin, and compound 48/80 . The role of various inflammatory mediators in adjuvant-induced arthritis has been well established . In our study, rofecoxib, a selective COX-2 inhibitor, was found to be more effective than MeDL and phenylbutazone in inhibiting the FCA-induced joint inflammation as reported earlier by Kumar et al. and Francischi et al. . Rofecoxib acts by inhibiting COX-2 that plays an important role in an inflammatory response. The greater efficacy of rofecoxib could be attributed to its better distribution at the site of inflammation as suggested for other COX-2 inhibitors . Further, rofecoxib was also found to be more effective as compared to MeDL in inhibiting cell influx and bone destruction as revealed by histological analysis. The inhibitory effect of MeDL and rofecoxib on cell influx was further substantiated by their ability to decrease tissue MPO activity that has been used as an index of granulocyte infiltration. It is interesting to note that PBZ produced only a marginal decrease in tissue MPO activity. The inability of PBZ to inhibit cellular influx has also been reported by Meacock and Kitchen and Arya and Kumar .
The neutrophilic recruitment at the site of inflammation has been reported to involve TNF-α production that induces the synthesis of LTB4, a well-known chemoattractant and prostaglandins that plays a key role in the pathogenesis of inflammatory diseases. Elevated levels of TNF-α and prostaglandins have been reported in arthritic patients and in experimentally induced arthritis . In our study, both MeDL and rofecoxib produced a marked reduction in the tissue levels of TNF-α and PGE 2 . However, PBZ was ineffective in reducing the levels of PGE 2 though it produced a significant decrease in tissue TNF-α levels. A marked reduction in the levels of PGE 2 brought about by MeDL was comparable to that of rofecoxib and suggests that like rofecoxib, MeDL might be inhibiting COX-2. Earlier, the MeDL was shown to inhibit inflammation induced by PGE 2 . The role of NO has been well established in an inflammatory response. As the inflammatory response progresses, large quantities of NO are generated through the induction of iNOS (inducible nitric oxide synthase) that reacts with superoxide anion to form peroxynitrate, a potent oxidizing molecule capable of eliciting lipid peroxidation. Lipid peroxidation is the oxidative deterioration of polyunsaturated lipids to form radical intermediates that bring about cellular damage. MDA, a major end product of this reaction, is an index of lipid peroxidation and has been estimated as TBARS . In our study, both MeDL and rofecoxib brought down the tissue levels of NO and TBARS. Besides, the infiltrating cells also generate reactive oxygen species and free radicals that bring about destruction of the inflamed joint. As a result, the scavenging enzyme SOD that leads to the formation of hydrogen peroxide is utilized and its activity is reduced in arthritic rats. The hydrogen peroxide thus generated is de-composed by catalase and glutathione peroxidase. Excessive production of lipid hydroperoxide may also contribute to decreased activity of GPx in arthritic condition . Beside enzymatic antioxidants, the level of glutathione, a nonenzymatic reducing agent that traps free radicals and prevents oxidative stress, is also decreased in arthritis . Both MeDL and rofecoxib maintained the oxidative homeostasis, and the levels of GSH and activities of catalase, SOD, and GPx were comparable to the control animals. The antioxidant properties of rofecoxib and latex of C. procera have also been reported earlier .
Thus, present study shows that the latex of C. procera markedly reduces cell influx, release of mediators, and oxidative stress associated with arthritic condition, and therefore has the potential to be used as an antiarthritic agent. |
<gh_stars>1-10
import { useLocalStorage, useWebStorage } from "../../src";
import { nextTick } from "../utils";
import { promisedTimeout } from "../../src/utils";
describe("localStorage", () => {
const setItemSpy = jest.spyOn(Storage.prototype, "setItem");
const consoleWarnSpy = jest.spyOn(console, "warn");
beforeEach(() => {
localStorage.clear();
useWebStorage("localStorage").remove();
setItemSpy.mockClear();
consoleWarnSpy.mockClear();
});
it("should store object in localStorage if default is passed", async () => {
const obj = { a: 1 };
const { storage } = useLocalStorage("test", obj);
await promisedTimeout(100);
expect(storage.value).toMatchObject(obj);
expect(setItemSpy).toHaveBeenLastCalledWith("test", JSON.stringify(obj));
});
it("should update the localStorage if value changes", async () => {
const obj = { a: 111 };
const { storage } = useLocalStorage("test", obj);
expect(storage.value).toMatchObject(obj);
expect(setItemSpy).toHaveBeenLastCalledWith("test", JSON.stringify(obj));
storage.value.a = 33;
await nextTick();
expect(storage.value).toMatchObject({ a: 33 });
await promisedTimeout(20);
expect(setItemSpy).toHaveBeenLastCalledWith(
"test",
JSON.stringify({ a: 33 })
);
});
it("should get the same object if the same key is used", () => {
const key = "test";
const { storage: storage1 } = useLocalStorage(key, { a: 1 });
const { storage: storage2 } = useLocalStorage(key, { a: 1 });
expect(storage1).toMatchObject(storage2);
});
it("should remove from localStorage", async () => {
const key = "test";
const { remove } = useLocalStorage(key, { a: 1 });
remove();
await nextTick();
expect(localStorage.getItem(key)).toBeFalsy();
});
it("should clear all localStorage keys", async () => {
localStorage.setItem("_other_", "secret");
const s1 = useLocalStorage("key", { a: 1 });
const s2 = useLocalStorage("key2", { a: 2 });
expect(localStorage.getItem("key")).toBe(JSON.stringify(s1.storage.value));
expect(localStorage.getItem("key2")).toBe(JSON.stringify(s2.storage.value));
expect(localStorage.getItem("_other_")).toBe("secret");
s1.clear();
await nextTick();
await promisedTimeout(200);
expect(s1.storage.value).toBeUndefined();
expect(s2.storage.value).toBeUndefined();
expect(localStorage.getItem("_other_")).toBe("secret");
});
it("should load from localStorage", () => {
const key = "hello";
localStorage.setItem(key, JSON.stringify({ k: 1 }));
const { storage } = useLocalStorage(key, { k: 10 });
expect(storage.value).toMatchObject({ k: 1 });
});
it("should you try to sync", () => {
const key = "hello";
const { setSync } = useLocalStorage(key, { k: 10 });
const setSyncSpy = jest.spyOn(
useWebStorage("localStorage").store!,
"setSync"
);
setSync(true);
expect(setSyncSpy).toHaveBeenCalledWith(key, true);
});
it("should warn if sessionStorage is not supported", () => {
setItemSpy.mockImplementationOnce(() => {
throw new Error("random");
});
const key = "hello";
useLocalStorage(key, { k: 10 });
expect(consoleWarnSpy).toHaveBeenCalledWith(
"[localStorage] is not available"
);
});
});
|
def _install_widevine_arm(self):
root_cmds = ['mount', 'umount', 'losetup', 'modprobe']
cos_config = self._chromeos_config()
device = [x for x in cos_config if config.CHROMEOS_ARM_HWID in x['hwidmatch']][0]
required_diskspace = int(device['filesize']) + int(device['zipfilesize'])
dialog = xbmcgui.Dialog()
if dialog.yesno(LANGUAGE(30001),
LANGUAGE(30006).format(self._sizeof_fmt(required_diskspace))) and self._widevine_eula():
if self._os() != 'Linux':
dialog.ok(LANGUAGE(30004), LANGUAGE(30019).format(self._os()))
return False
if required_diskspace >= self._diskspace():
dialog.ok(LANGUAGE(30004),
LANGUAGE(30018).format(self._sizeof_fmt(required_diskspace)))
return False
if not self._cmd_exists('fdisk') and not self._cmd_exists('parted'):
dialog.ok(LANGUAGE(30004), LANGUAGE(30020).format('fdisk', 'parted'))
return False
if not self._cmd_exists('mount'):
dialog.ok(LANGUAGE(30004), LANGUAGE(30021).format('mount'))
return False
if not self._cmd_exists('losetup'):
dialog.ok(LANGUAGE(30004), LANGUAGE(30021).format('losetup'))
return False
if os.getuid() != 0:
if not dialog.yesno(LANGUAGE(30001), LANGUAGE(30030).format(', '.join(root_cmds)), yeslabel=LANGUAGE(30027), nolabel=LANGUAGE(30028)):
return False
self._url = device['url']
downloaded = self._http_request(download=True, message=LANGUAGE(30022))
if downloaded:
dialog.ok(LANGUAGE(30023), LANGUAGE(30024))
busy_dialog = xbmcgui.DialogBusy()
busy_dialog.create()
bin_filename = self._url.split('/')[-1].replace('.zip', '')
bin_path = os.path.join(self._temp_path(), bin_filename)
success = [
self._unzip(self._temp_path(), bin_filename),
self._check_loop(), self._set_loop_dev(),
self._losetup(bin_path), self._mnt_loop_dev()
]
if all(success):
self._extract_widevine_from_img()
self._install_cdm()
self._cleanup()
if self._has_widevine():
with open(self._widevine_config_path(), 'w') as config_file:
config_file.write(json.dumps(cos_config, indent=4))
dialog.notification(LANGUAGE(30037), LANGUAGE(30003))
busy_dialog.close()
wv_check = self._check_widevine()
if wv_check:
dialog.notification(LANGUAGE(30037), LANGUAGE(30003))
busy_dialog.close()
return wv_check
else:
busy_dialog.close()
dialog.ok(LANGUAGE(30004), LANGUAGE(30005))
else:
self._cleanup()
busy_dialog.close()
dialog.ok(LANGUAGE(30004), LANGUAGE(30005))
return False |
/**
* @author Vyacheslav Rusakov
* @since 30.12.2021
*/
public class ContextAccess implements BeforeAllCallback, BeforeEachCallback {
@Override
public void beforeAll(ExtensionContext context) throws Exception {
// class context
ActionHolder.add("class.id: " + context.getUniqueId());
ActionHolder.add("class.display name: " + context.getDisplayName());
ActionHolder.add("class.parent: " + context.getParent().isPresent());
ActionHolder.add("class.root: " + context.getRoot().getDisplayName());
ActionHolder.add("class.element: " + context.getElement().get());
ActionHolder.add("class.lifecycle: " + context.getTestInstanceLifecycle().get());
ActionHolder.add("class.exec mode: " + context.getExecutionMode());
ActionHolder.add("class.exception: " + context.getExecutionException().isPresent());
ActionHolder.add("class.test class: " + context.getRequiredTestClass());
ActionHolder.add("class.test method: " + context.getTestMethod().isPresent());
ActionHolder.add("class.tags: " + context.getTags());
ActionHolder.add("class.test instance: " + context.getTestInstance().isPresent());
ActionHolder.add("class.test instances: " + context.getTestInstances().isPresent());
}
@Override
public void beforeEach(ExtensionContext context) throws Exception {
// method context
ActionHolder.add("method.id: " + context.getUniqueId());
ActionHolder.add("method.display name: " + context.getDisplayName());
ActionHolder.add("method.parent: " + context.getParent().get().getDisplayName());
ActionHolder.add("method.root: " + context.getRoot().getDisplayName());
ActionHolder.add("method.element: " + context.getElement().get());
ActionHolder.add("method.lifecycle: " + context.getTestInstanceLifecycle().get());
ActionHolder.add("method.exec mode: " + context.getExecutionMode());
ActionHolder.add("method.exception: " + context.getExecutionException().isPresent());
ActionHolder.add("method.test class: " + context.getRequiredTestClass());
ActionHolder.add("method.test method: " + context.getRequiredTestMethod());
ActionHolder.add("method.tags: " + context.getTags());
ActionHolder.add("method.test instance: " + context.getTestInstance().isPresent());
ActionHolder.add("method.test instances: " + context.getTestInstances().get().getAllInstances().size());
}
} |
def login(self, username=None, password=None, auth=AuthMethod.SESSION):
self.__username = username if username else self.__username
self.__password = password if password else self.__password
if auth == AuthMethod.BASIC:
auth_key = base64.b64encode(('%s:%s' % (self.__username, \
self.__password)).encode('utf-8')).decode('utf-8')
self.__authorization_key = 'Basic %s' % auth_key
headers = dict()
headers['Authorization'] = self.__authorization_key
respvalidate = self._rest_request('%s%s' % (self.__url.path, \
self.login_url), headers=headers)
if respvalidate.status == 401:
delay = 0
raise InvalidCredentialsError(delay)
elif auth == AuthMethod.SESSION:
data = dict()
data['UserName'] = self.__username
data['Password'] = self.__password
headers = dict()
resp = self._rest_request(self.login_url, method="POST", \
body=data, headers=headers)
LOGGER.info(json.loads('%s' % resp.text))
LOGGER.info('Login returned code %s: %s', resp.status, resp.text)
self.__session_key = resp.session_key
self.__session_location = resp.session_location
if not self.__session_key and resp.status not in [200, 201, 202, 204]:
delay = 0
raise InvalidCredentialsError(delay)
else:
self.set_username(None)
self.set_password(None)
else:
pass |
// findByPath find and returns VM by Inventory Path
func findByPath(ctx context.Context, client *vim25.Client, dcName, path string) (*object.VirtualMachine, error) {
f := find.NewFinder(client, true)
dc, err := f.DatacenterOrDefault(ctx, dcName)
if err != nil {
return nil, err
}
f.SetDatacenter(dc)
return f.VirtualMachine(ctx, path)
} |
import { DateTime } from 'luxon';
import { RRule } from '@mackgevanni/rrule-es6';
const rule = RRule.fromString(
"DTSTART;TZID=America/Denver:20181101T190000;\n"
+ "RRULE:FREQ=WEEKLY;BYDAY=MO,WE,TH;INTERVAL=1;COUNT=3"
);
const rule_all = rule.all();;
const date = new Date();
const date_utc_date = date.getUTCDate();
const date_utc_hours = date.getUTCHours();
const rule_all_luxon = rule.all().map(function (date) {
return DateTime.fromJSDate(date)
.toUTC()
.setZone('local', { keepLocalTime: true })
.toJSDate();
});
// Logs
console.log(JSON.stringify({
rule_all,
date,
date_utc_date,
date_utc_hours,
rule_all_luxon,
}, null, '\t'));
const log = {
"rule_all": [
"2018-11-01T19:00:00.000Z",
"2018-11-05T19:00:00.000Z",
"2018-11-07T19:00:00.000Z"
],
"date": "2022-03-08T23:15:31.999Z",
"date_utc_date": 8,
"date_utc_hours": 23,
"rule_all_luxon": [
"2018-11-02T00:00:00.000Z",
"2018-11-06T01:00:00.000Z",
"2018-11-08T01:00:00.000Z"
]
};
(log);
|
def generate_function_name(self, function_type, variable, arr_index):
variable_spec_regex = r'@.*?::(?P<namescope>.*?::.*?)::(' \
r'?P<variable>.*?)::(?P<index>.*)'
variable_match = re.match(variable_spec_regex, variable)
if variable_match:
namespace_scope = variable_match.group("namescope")
variable_name = variable_match.group("variable")
if arr_index:
variable_name += "_"
for index in arr_index:
variable_name = variable_name + f"{index}"
variable_index = variable_match.group("index")
name = namespace_scope + function_type + variable_name + "::" + \
variable_index
name = self.replace_multiple(name, ['$', '-', ':'], '_')
name = name.replace('.', '__')
if any([x in function_type for x in ["assign", "condition",
"decision"]]):
spec_type = "lambda"
else:
spec_type = "None"
else:
assert False, f"Cannot match regex for variable spec: {variable}"
return {"name": name, "type": spec_type} |
/*
* Copyright 2019 <NAME> (github.com/mP1)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package walkingkooka.net.header;
import org.junit.jupiter.api.Test;
import java.time.LocalDateTime;
import static org.junit.jupiter.api.Assertions.assertSame;
import static org.junit.jupiter.api.Assertions.assertThrows;
public final class IfRangeLastModifiedTest extends IfRangeTestCase<IfRangeLastModified, LocalDateTime, ETag> {
@Test
public void testETag() {
assertThrows(HeaderException.class, () -> this.createHeader().etag());
}
@Test
public void testLastModified() {
final IfRangeLastModified ifRange = this.createHeader();
assertSame(ifRange, ifRange.lastModified());
}
@Override
IfRangeLastModified createHeader(final LocalDateTime value) {
return IfRangeLastModified.lastModified(value);
}
@Override
LocalDateTime value() {
return this.lastModified();
}
@Override
String headerText() {
return HeaderHandler.localDateTime().toText(this.value(), HttpHeaderName.LAST_MODIFIED);
}
@Override
LocalDateTime differentValue() {
return this.value().plusYears(1);
}
@Override
ETag otherValue() {
return this.etag();
}
@Override
boolean isETag() {
return false;
}
@Override
public Class<IfRangeLastModified> type() {
return IfRangeLastModified.class;
}
}
|
// Copyright (c) 2017 <NAME>
// MIT License, see LICENSE file for full terms.
import { Message } from '@phosphor/messaging';
import { Widget } from '@phosphor/widgets';
import * as React from 'react';
import * as ReactDOM from 'react-dom';
import { style } from 'typestyle';
import { AppContainer } from './react-hot-loader';
import { appBackgroundColor } from './style';
/**
* PhosphorJS widget that wraps a React component.
*/
export default class ReactWidget<TProps = {}> extends Widget {
private _cssClass = style({
backgroundColor: appBackgroundColor,
display: 'flex',
flexDirection: 'column',
minHeight: '50px',
minWidth: '50px',
padding: '4px'
});
constructor(private component: React.ComponentClass<TProps>, private props?: TProps) {
super();
this.addClass(this._cssClass);
}
protected onAfterAttach(_: Message): void {
this.update();
}
protected onBeforeDetach(_: Message): void {
ReactDOM.unmountComponentAtNode(this.node);
}
protected onUpdateRequest(_: Message): void {
ReactDOM.render(
React.createElement(AppContainer, {}, React.createElement(this.component, this.props)),
this.node
);
}
}
|
<reponame>matu3ba/cports
pkgname = "libthai"
pkgver = "0.1.28"
pkgrel = 0
build_style = "gnu_configure"
make_cmd = "gmake"
make_install_args = ["-j1"]
hostmakedepends = ["gmake", "pkgconf"]
makedepends = ["libdatrie-devel"]
pkgdesc = "Thai language support routines"
maintainer = "q66 <<EMAIL>>"
license = "LGPL-2.1-or-later"
url = "https://linux.thai.net/projects/libthai"
source = f"https://linux.thai.net/pub/ThaiLinux/software/{pkgname}/{pkgname}-{pkgver}.tar.xz"
sha256 = "ffe0a17b4b5aa11b153c15986800eca19f6c93a4025ffa5cf2cab2dcdf1ae911"
if self.cross_build:
hostmakedepends += ["libdatrie"]
@subpackage("libthai-devel")
def _devel(self):
return self.default_devel()
|
#include <QTest>
#include "loginactiontest.h"
#include "asteriskmanager.h"
#include "packettransport.h"
#include "packettransportmock.h"
#include "loginaction.h"
LoginActionTest::LoginActionTest(QObject *parent) : QObject(parent)
{
}
void LoginActionTest::testLoginMessageSent()
{
AsteriskManager *asteriskManager = new AsteriskManager;
PacketTransportMock *packetTransport = new PacketTransportMock;
LoginAction *loginAction = new LoginAction;
asteriskManager->setPacketTransport(packetTransport);
loginAction->setUsername("test");
loginAction->setSecret("<PASSWORD>");
asteriskManager->sendAction(loginAction);
Packet sentPacket = packetTransport->getSentPacket();
QCOMPARE(sentPacket.field("Action").toString(), QString("Login"));
QCOMPARE(sentPacket.field("Username").toString(), QString("test"));
QCOMPARE(sentPacket.field("Secret").toString(), QString("<PASSWORD>"));
}
|
// handleError handles a raw yaml decoder.Decode error, filters it,
// and return the resulting error.
func (p yamlParser) handleError(err error) error {
var typeError *yaml.TypeError
if !errors.As(err, &typeError) {
return err
}
filtered := &yaml.TypeError{}
for _, msg := range typeError.Errors {
if p.isCustomFieldError(msg) {
continue
}
filtered.Errors = append(filtered.Errors, p.prettyErrorMessage(msg))
}
if len(filtered.Errors) != 0 {
return filtered
}
return nil
} |
package repository
import (
"github.com/Tanibox/tania-core/src/user/domain"
"github.com/Tanibox/tania-core/src/user/storage"
uuid "github.com/satori/go.uuid"
)
// RepositoryResult is a struct to wrap repository result
// so its easy to use it in channel
type RepositoryResult struct {
Result interface{}
Error error
}
type UserEventRepository interface {
Save(uid uuid.UUID, latestVersion int, events []interface{}) <-chan error
}
type UserReadRepository interface {
Save(userRead *storage.UserRead) <-chan error
}
type UserAuthRepository interface {
Save(userAuth *storage.UserAuth) <-chan error
}
func NewUserFromHistory(events []storage.UserEvent) *domain.User {
state := &domain.User{}
for _, v := range events {
state.Transition(v.Event)
state.Version++
}
return state
}
|
N, M = map(int, input().split())
A = list(map(int, input().split()))
def votenum(A):
m = 0
for i in range(0, len(A)):
m = m + A[i]
return m
#上位番目
Mlast = sorted(A)[-M]
S = Mlast / votenum(A)
if S < 0.25 / M:
print('No')
else:
print('Yes') |
<filename>src/dynamo/handlers/MongoDbHandler.java<gh_stars>1-10
package dynamo.handlers;
import com.google.gson.Gson;
import com.mongodb.async.client.MongoClient;
import com.mongodb.async.client.MongoClients;
import com.mongodb.async.client.MongoCollection;
import com.mongodb.async.client.MongoDatabase;
import dynamo.serialization.ISerializer;
import dynamo.serialization.JsonSerializer;
import org.bson.Document;
import org.bson.types.ObjectId;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.FileInputStream;
import java.io.InputStream;
import java.nio.file.Paths;
import java.util.Properties;
/******************************************************************************
* Sends data to MongoDB.
******************************************************************************/
public class MongoDbHandler implements IDataHandler
{
private final Logger _logger = LoggerFactory.getLogger(MongoDbHandler.class);
private final ISerializer _serializer;
private final Gson _gson;
private MongoClient _client;
private MongoCollection _collection;
/****************************************************************************
* Creates an instance of MongoDbHandler.
****************************************************************************/
public MongoDbHandler()
{
_gson = new Gson();
CheckConfiguration();
_serializer = new JsonSerializer();
}
/****************************************************************************
* Serializes the data to a byte array and sends it into MongoDB as a message.
*
* @param data
* The data.
* @throws Throwable
* Any uncaught exception.
****************************************************************************/
public void HandleData(Object data) throws Throwable
{
final Document document = Document.parse(_gson.toJson(data));
document.putIfAbsent("_id", ObjectId.get());
_collection.insertOne(document, (Object result, final Throwable me) ->
{
if (me != null)
_logger.error(me.getMessage(), me);
else
_logger.info("Document written.");
});
}
/****************************************************************************
* Destructors aren't reliable in Java, but it's good to call this here
* anyway.
****************************************************************************/
protected void finalize()
{
try
{
_client.close();
}
catch (Exception e)
{
_logger.error(e.getMessage(), e);
}
}
/****************************************************************************
* Checks for a properties file to load settings.
****************************************************************************/
private void CheckConfiguration()
{
InputStream propertiesStream = null;
try
{
final String propertiesFile = Paths.get("dynamo.properties").toAbsolutePath().toString();
propertiesStream = new FileInputStream(propertiesFile);
final Properties settings = new Properties();
settings.load(propertiesStream);
_client = MongoClients.create(settings.getProperty("Handlers.MongoDB.ConnectionString"));
CreateDatabaseAndCollection(settings);
}
catch (Exception e)
{
_logger.error(e.getMessage(), e);
System.exit(0);
}
finally
{
if (propertiesStream != null)
{
try
{
propertiesStream.close();
}
catch (Exception e)
{
_logger.error(e.getMessage(), e);
}
}
}
}
/****************************************************************************
* Creates the Document DB database and collection.
*
* @param settings
* A reference to the properties.
****************************************************************************/
private void CreateDatabaseAndCollection(final Properties settings)
{
final String databaseName = settings.getProperty("Handlers.MongoDB.Database");
final MongoDatabase database = _client.getDatabase(databaseName);
final String collectionName = settings.getProperty("Handlers.MongoDB.Collection");
_collection = database.getCollection(collectionName);
}
} |
def _svd(self,
points1: torch.Tensor,
points2: torch.Tensor
) -> torch.Tensor:
b, _, c = points1.shape
points1 = torch.transpose(points1, -2, -1)
points2 = torch.transpose(points2, -2, -1)
centroid1 = points1.mean(dim=-1, keepdims=True)
centroid2 = points1.mean(dim=-1, keepdims=True)
centered1 = points1 - centroid1
centered2 = points2 - centroid2
variance = torch.sum(centered1 ** 2, dim=[1, 2])
K = centered1 @ torch.transpose(centered2, -2, -1)
U, s, V = torch.svd(K)
Z = torch.eye(c).to(U).unsqueeze(0).repeat(b, 1, 1)
Z[:,-1, -1] *= torch.sign(torch.det(U @ torch.transpose(V, -2, -1)))
rotation = V @ (Z @ torch.transpose(U, -2, -1))
scale = torch.cat([torch.trace(x).unsqueeze(0) for x in (rotation @ K)]) / variance
scale = scale.unsqueeze(-1).unsqueeze(-1)
translation = centroid2 - (scale * (rotation @ centroid1))
return rotation, translation, scale |
<gh_stars>1-10
// Copyright © 2011-2018 <NAME> <<EMAIL>>
package goryachev.common.util;
import java.util.Arrays;
import java.util.Collection;
public class CStringList
extends CList<String>
{
public CStringList(int initialCapacity)
{
super(initialCapacity);
}
public CStringList()
{
super();
}
public CStringList(Collection<String> c)
{
super(c);
}
public CStringList(String[] a)
{
super(Arrays.asList(a));
}
public String[] toStringArray()
{
return toArray(new String[size()]);
}
}
|
def forward(self, x):
if self.name == "noconcat":
return [x[0][0].squeeze(), x[1].squeeze(), x[2].squeeze(), x[3][0].squeeze(), x[4]]
if self.name == "image":
return torch.cat([x[0][0].squeeze(), x[1][0].squeeze(), x[2]], 1)
if self.name == "simple":
return torch.cat([x[0].squeeze(), x[1]], 1)
return torch.cat([x[0][0].squeeze(), x[1].squeeze(), x[2].squeeze(), x[3][0].squeeze(), x[4]], 1) |
/**
* @see ClaimDataDAO
*
* @author Derek Hulley
* @since 1.3
*/
@RunWith(JUnit4.class)
public class ClaimDataDAOTest
{
private final static String COLLECTION_BM_CLAIM_DAO_TEST = "BenchmarkClaimDAOTest";
public final static String[] CLAIM_IDS = new String[] {"A-123", "B-234", "C-345", "D-456", "E-567"};
private static AbstractApplicationContext ctx;
private static ClaimDataDAO claimDataDAO;
@Before
public void setUp()
{
Properties props = new Properties();
props.put("mongoCollection", COLLECTION_BM_CLAIM_DAO_TEST);
ctx = new ClassPathXmlApplicationContext(new String[] {"test-MongoClaimDataDAOTest-context.xml"}, false);
ctx.getEnvironment().getPropertySources().addFirst(new PropertiesPropertySource("TestProps", props));
ctx.refresh();
ctx.start();
claimDataDAO = ctx.getBean(ClaimDataDAO.class);
// Generate some random users
for (int i = 0; i < CLAIM_IDS.length; i++)
{
String claimId = CLAIM_IDS[i];
claimDataDAO.createClaim(claimId);
}
}
@After
public void tearDown()
{
ctx.close();
}
@Test
public void testSetUp()
{
for (int i = 0; i < CLAIM_IDS.length; i++)
{
String claimId = CLAIM_IDS[i];
ClaimData claimData = claimDataDAO.findClaimById(claimId);
assertNotNull("Expect to find all the created claims.", claimData);
}
assertEquals("Expected the count to match", CLAIM_IDS.length, claimDataDAO.countClaims(null));
assertEquals("Expected the count to match", CLAIM_IDS.length, claimDataDAO.countClaims(DataCreationState.Unknown));
}
@Test
public void testDuplicateClaim()
{
String randomClaimId = UUID.randomUUID().toString();
boolean inserted = claimDataDAO.createClaim(randomClaimId);
assertTrue(inserted);
// This should fail
boolean reinserted = claimDataDAO.createClaim(randomClaimId);
assertFalse(reinserted);
}
@Test
public void testClaimNotExist()
{
boolean updated = claimDataDAO.updateClaimState("Bob", DataCreationState.Created);
assertFalse(updated);
ClaimData claimData = claimDataDAO.findClaimById("Bob");
assertNull("Expected to NOT find this claim.", claimData);
}
@Test
public void testRandomClaim()
{
ClaimData claimData = claimDataDAO.getRandomClaim(DataCreationState.Unknown);
assertNotNull("Expected to find a random 'Unknown' claim.", claimData);
claimData = claimDataDAO.getRandomClaim(DataCreationState.Created);
assertNull("Expected to NOT find a random 'Created' claim.", claimData);
assertEquals(0, claimDataDAO.countClaims(DataCreationState.Created));
claimDataDAO.updateClaimState("A-123", DataCreationState.Created);
claimData = claimDataDAO.getRandomClaim(DataCreationState.Created);
assertNotNull("Expected to find a random 'Created' claim.", claimData);
assertEquals(1, claimDataDAO.countClaims(DataCreationState.Created));
}
} |
import styled from "styled-components";
function TabletBlob(props: any) {
return (
<svg
viewBox="0 0 960 540"
xmlns="http://www.w3.org/2000/svg"
xmlnsXlink="http://www.w3.org/1999/xlink"
version="1.1"
{...props}
>
<g transform="translate(960, 0)">
<path
d="M0 486C-110.5 488 -221 490 -243 420.9C-265 351.8 -198.5 211.5 -224.3 129.5C-250.1 47.5 -368 23.7 -486 0L0 0Z"
fill="#e1d0b9"
></path>
<path
d="M0 324C-73.7 325.3 -147.3 326.7 -162 280.6C-176.7 234.5 -132.4 141 -149.5 86.3C-166.7 31.7 -245.4 15.8 -324 0L0 0Z"
fill="#a69d66"
></path>
<path
d="M0 162C-36.8 162.7 -73.7 163.3 -81 140.3C-88.3 117.3 -66.2 70.5 -74.8 43.2C-83.4 15.8 -122.7 7.9 -162 0L0 0Z"
fill="#828641"
></path>
</g>
<g transform="translate(0, 540)">
<path
d="M0 -486C30.1 -379.1 60.3 -272.2 141.5 -245.1C222.7 -217.9 355 -270.6 420.9 -243C486.8 -215.4 486.4 -107.7 486 0L0 0Z"
fill="#e1d0b9"
></path>
<path
d="M0 -324C20.1 -252.7 40.2 -181.5 94.3 -163.4C148.5 -145.3 236.6 -180.4 280.6 -162C324.5 -143.6 324.3 -71.8 324 0L0 0Z"
fill="#a69d66"
></path>
<path
d="M0 -162C10 -126.4 20.1 -90.7 47.2 -81.7C74.2 -72.6 118.3 -90.2 140.3 -81C162.3 -71.8 162.1 -35.9 162 0L0 0Z"
fill="#828641"
></path>
</g>
</svg>
);
}
const GreenTablet = styled(TabletBlob)`
display: none;
position: fixed;
top: 0;
left: 0;
right: 0;
bottom: 0;
z-index: -1;
@media screen and (min-width: 500px) {
display: revert;
}
@media screen and (min-width: 1600px) {
display: none;
}
`;
export default GreenTablet;
|
//
// DynamicLayoutCollectionViewController.h
// CATCalendarProvider
//
// Created by wit on 15/2/19.
// Copyright (c) 2015年 cat. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface DynamicLayoutCollectionViewController : UICollectionViewController
@end
|
/// Add an interval to the store.
pub fn add(&mut self, start: T, end: T) {
let mut new_interval = Interval { start, end };
let mut first_match = None;
let mut i = 0;
while i != self.intervals.len() {
let interval = self.intervals[i];
if first_match.is_none() && interval.can_merge(&new_interval) {
self.intervals[i].merge(&new_interval);
new_interval = self.intervals[i];
first_match = Some(i);
i += 1;
} else if first_match.is_some() && interval.can_merge(&new_interval) {
let match_idx = first_match.unwrap();
self.intervals[match_idx].merge(&interval);
self.intervals.remove(i);
} else {
i += 1;
}
}
if first_match.is_none() {
self.intervals.push(new_interval)
}
} |
<reponame>EmmaNguyen/feature_adversarial_with_topology_signatures
import matplotlib.pyplot as plt
def save_encoded_sample(data, targets, epoch, vis_path="../data/", figsize=(10,10)):
plt.figure(figsize=figsize)
plt.scatter(data[:, 0], -data[:, 1], c=(10 * targets), cmap=plt.cm.Spectral)
plt.xlim([-1.5, 1.5])
plt.ylim([-1.5, 1.5])
# plt.title('Test Latent Space\nLoss: {:.5f}'.format(test_loss))
plt.savefig("{}/test_latent_epoch_{}.png".format(vis_path, epoch + 1))
plt.close()
# save sample input and reconstruction
vutils.save_image(x,
"{}/test_samples_epoch_{}.png".format(vis_path,
epoch + 1))
vutils.save_image(batch['decode'].detach(),
"{}/test_reconstructions_epoch_{}.png".format(vis_path, epoch + 1),
normalize=True)
|
import { AxiosPromise } from '../types/response'
import { AxiosRequestConfig } from '../types/request'
import { processURL } from '../helpers/url'
import { processRequestData } from '../helpers/data'
import { processHeaders } from '../helpers/headers'
import xhr from '../xhr'
/**
* 发送请求前处理请求配置
* @param config 请求配置
*/
function processConfig(config: AxiosRequestConfig): void {
let { url, data, params, headers = {} } = config
config.url = processURL(url!, params)
config.headers = processHeaders(headers, data)
config.data = processRequestData(data)
}
function dispatchFetch(config: AxiosRequestConfig): AxiosPromise {
// 发送请求前处理请求配置
processConfig(config)
// 发送请求
return xhr(config)
}
export default dispatchFetch |
/**
* @author byte
*
* Represents the paddle.
*/
public class Paddle
{
private Point2D.Double _position;
private int _pixelWidth;
private int _pixelHeight;
private Rectangle2D.Double _collisionRectangle;
private PlayField _parentPlayField;
/**
* @param parentPlayField
*
* Instantiates a paddle.
*/
public Paddle(PlayField parentPlayField)
{
_parentPlayField = parentPlayField;
_pixelWidth = (int)((getGameOptions().get("fieldPixelWidth") / playFieldSettings.maxBricksHorizontal) * 1.8);
_pixelHeight = (int)((getGameOptions().get("fieldPixelHeight") / playFieldSettings.maxBricksVertical) / 3);
double x = (((getGameOptions().get("fieldPixelWidth")) / 2) - (_pixelWidth / 2));
double y = getGameOptions().get("fieldPixelHeight") - (_pixelHeight * 2);
_position = new Point2D.Double(x,y);
_collisionRectangle = new Rectangle2D.Double(_position.x-2, _position.y-2, _pixelWidth+2, _pixelHeight+2);
}
protected void paddleContacted()
{
double angles[] =
{
Math.toRadians(157.5),
Math.toRadians(142.5),
Math.toRadians(127.5),
Math.toRadians(112.5),
Math.toRadians(97.5),
Math.toRadians(82.5),
Math.toRadians(67.5),
Math.toRadians(52.5),
Math.toRadians(37.5),
Math.toRadians(22.5)
};
int hitPos = (int)(getBall().getPosition().x - _position.x + _pixelWidth/2);
int angle = 0;
for ( angle = angles.length-1; angle > 0; angle--)
if ( hitPos >= _pixelWidth/angles.length * angle )
break;
double movX = getCanvas().getCurrentMovementX();
double movY = getCanvas().getCurrentMovementY();
double speed = Math.sqrt( movX*movX + movY*movY );
double newMovX = speed * Math.cos(angles[angle]);
double newMovY = -speed * Math.sin(angles[angle]);
getCanvas().setCurrentMovementX(newMovX);
getCanvas().setCurrentMovementY(newMovY);
}
private Ball getBall()
{
return getPlayField().getBall();
}
private BasicCanvas getCanvas()
{
return getPlayField().getCanvas();
}
private PlayField getPlayField()
{
return _parentPlayField;
}
protected Point2D.Double getPosition()
{
return _position;
}
protected Rectangle2D.Double getCollisionRectangle()
{
return _collisionRectangle;
}
protected boolean isPointOnSurface(Point testPoint)
{
double upperLeftX = _position.x - (_pixelWidth/2);
double upperRightX = _position.x + (_pixelWidth/2);
if((testPoint.y == _position.y) && (testPoint.x < upperRightX) && (testPoint.x > upperLeftX))
return true;
else
return false;
}
protected Hashtable<String, Integer> getGameOptions()
{
return _parentPlayField.getGameOptions();
}
protected void updatePosition(int x)
{
if(isValidPosition(x) == true)
_position.x = x;
}
private boolean isValidPosition(int x)
{
if((x <= getGameOptions().get("fieldPixelWidth")) && (x >= 0))
return true;
else
return false;
}
/**
* Draws the paddle.
*
* @param paddleGraphic The graphics object to draw with.
*/
public void draw(Graphics2D paddleGraphic)
{
_pixelWidth = (int)((getGameOptions().get("fieldPixelWidth") / playFieldSettings.maxBricksHorizontal) * 1.8);
_pixelHeight = (int)((getGameOptions().get("fieldPixelHeight") / playFieldSettings.maxBricksVertical) / 3);
_position.y = getGameOptions().get("fieldPixelHeight") - (_pixelHeight * 2);
double upperLeftX = _position.x - (_pixelWidth/2);
double upperLeftY = _position.y - (_pixelHeight/2);
//inside fill
GradientPaint paddleGradient = new GradientPaint((float)_position.x,
(float)_position.y,
Color.BLACK,
(float)_position.x,
(float)_position.y+_pixelHeight,
Color.LIGHT_GRAY);
paddleGraphic.setPaint(paddleGradient);
paddleGraphic.fillRect((int)upperLeftX, (int)upperLeftY, _pixelWidth, _pixelHeight);
//border
paddleGraphic.setStroke(new BasicStroke(1));
paddleGraphic.setPaint(Color.GRAY);
paddleGraphic.drawRect((int)upperLeftX, (int)upperLeftY, _pixelWidth, _pixelHeight);
updateCollisionRectangle(upperLeftX, upperLeftY, _pixelWidth, _pixelHeight);
}
/**
* Erases the paddle.
*
* @param paddleGraphic The graphics object to draw with.
*/
public void erase(Graphics2D paddleGraphic)
{
double upperLeftX = _position.x - (_pixelWidth/2);
double upperLeftY = _position.y - (_pixelHeight/2);
paddleGraphic.setPaint(Color.WHITE);
paddleGraphic.fillRect((int)upperLeftX, (int)upperLeftY, _pixelWidth, _pixelHeight);
//border
paddleGraphic.setStroke(new BasicStroke(1));
paddleGraphic.setPaint(Color.WHITE);
paddleGraphic.drawRect((int)upperLeftX, (int)upperLeftY, _pixelWidth, _pixelHeight);
}
private void updateCollisionRectangle(double x, double y, int width, int height)
{
_collisionRectangle.x = x-2;
_collisionRectangle.y = y-2;
_collisionRectangle.width = width+2;
_collisionRectangle.height = height+2;
}
} |
/**
* Created by rezkyatinnov on 09/08/2017.
*/
public class AlertUtils {
public static void showDialogDualActions(Context context, String title, String message, String positive, String negative, DialogInterface.OnClickListener positiveCallback, DialogInterface.OnClickListener negativeCallback) {
AlertDialog.Builder builder = new AlertDialog.Builder(context);
builder.setTitle(title)
.setMessage(message)
.setPositiveButton(positive, positiveCallback)
.setNegativeButton(negative, negativeCallback);
builder.create().show();
}
public static void showDialogDualActions(Context context, String title, String message, DialogInterface.OnClickListener positiveCallback, DialogInterface.OnClickListener negativeCallback) {
AlertDialog.Builder builder = new AlertDialog.Builder(context);
builder.setTitle(title)
.setMessage(message)
.setPositiveButton("OK", positiveCallback)
.setNegativeButton("Cancel", negativeCallback);
builder.create().show();
}
public static void showDialogDualActions(Context context, String message, DialogInterface.OnClickListener positiveCallback, DialogInterface.OnClickListener negativeCallback) {
AlertDialog.Builder builder = new AlertDialog.Builder(context);
builder.setMessage(message)
.setPositiveButton("OK", positiveCallback)
.setNegativeButton("Cancel", negativeCallback);
builder.create().show();
}
public static void showDialogSingleAction(Context context, String title, String message, String positive, DialogInterface.OnClickListener positiveCallback) {
AlertDialog.Builder builder = new AlertDialog.Builder(context);
builder.setTitle(title)
.setMessage(message)
.setPositiveButton(positive, positiveCallback);
builder.create().show();
}
public static void showDialogSingleAction(Context context, String title, String message, DialogInterface.OnClickListener positiveCallback) {
AlertDialog.Builder builder = new AlertDialog.Builder(context);
builder.setTitle(title)
.setMessage(message)
.setPositiveButton("OK", positiveCallback);
builder.create().show();
}
public static void showDialogSingleAction(Context context, String message, DialogInterface.OnClickListener positiveCallback) {
AlertDialog.Builder builder = new AlertDialog.Builder(context);
builder.setMessage(message)
.setPositiveButton("OK", positiveCallback);
builder.create().show();
}
} |
module Main where
import Haste
import Haste.DOM
import Haste.Events
import ProcNum
main = withElems ["pv","ty","r","result"] calculator
calculator [pv,ty,r,result] = do
onEvent pv KeyUp $ \_ -> recalculate
onEvent ty KeyUp $ \_ -> recalculate
onEvent r Change $ \_ -> recalculate
where
recalculate = do
vpv <- getValue pv
vty <- getValue ty
vr <- getValue r
case (str2dbl vpv, str2dbl vty, str2dbl vr) of
(Just pv', Just ty', Just r') -> setProp result "innerHTML" (prtdbl2 $ calc pv' ty' r')
_ -> return ()
calc :: Double -> Double -> Double -> Double
calc pv ty r = pv * (1.0 + r)**ty
|
/**
* @author Antonio Rabelo
*
* @param <R> Result Type
* @param <T> Entity Type
*/
public abstract class AbstractCriteriaSnippetDecorator<R, T> implements CriteriaSnippet<R, T> {
/**
* CriteriaSnippet to be decorated.
*/
protected final CriteriaSnippet<R, T> snippetToBeDecorated;
/**
* Constructor.
*
* @param snippet CriteriaSnippet to be decorated.
*/
public AbstractCriteriaSnippetDecorator(CriteriaSnippet<R, T> snippet) {
if (snippet == null) {
this.snippetToBeDecorated = new CriteriaSnippet<R, T>() {
public boolean validate() throws IllegalArgumentException {
return true;
}
public CriteriaQuery<R> modify(
CriteriaQuery<R> criteria,
Class<R> resultClass,
Class<T> entity,
EntityManager manager) {
return criteria;
}
public TypedQuery<R> configure(TypedQuery<R> query) {
return query;
}
};
} else {
this.snippetToBeDecorated = snippet;
}
}
/* (non-Javadoc)
* @see com.github.tennaito.entity.service.snippet.CriteriaSnippet#validate()
*/
public boolean validate() throws IllegalArgumentException {
return this.snippetToBeDecorated.validate();
}
/* (non-Javadoc)
* @see com.github.tennaito.entity.service.snippet.CriteriaSnippet#modify(javax.persistence.criteria.CriteriaQuery, java.lang.Class, java.lang.Class, javax.persistence.EntityManager)
*/
public CriteriaQuery<R> modify(CriteriaQuery<R> criteria, Class<R> resultClass, Class<T> entity, EntityManager manager) {
return this.snippetToBeDecorated.modify(criteria, resultClass, entity, manager);
}
/* (non-Javadoc)
* @see com.github.tennaito.entity.service.snippet.CriteriaSnippet#configure(javax.persistence.TypedQuery)
*/
public TypedQuery<R> configure(TypedQuery<R> query) {
return this.snippetToBeDecorated.configure(query);
}
/**
* Find the Root of the criteria query.
*
* @param query criteria query.
* @param clazz Java type of the root you want.
* @return Root of the criteria query.
*/
protected static Root<?> findRoot(CriteriaQuery<?> query, Class<?> clazz) {
Root root = null;
if (query.getRoots().isEmpty()) {
root = query.from(clazz);
} else {
for (Root<?> r : query.getRoots()) {
if (clazz.equals(r.getJavaType())) {
root = (Root)r.as(clazz);
break;
}
}
}
return root;
}
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.